hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ff2bf3950bb88c32f97fbf9d0eb6eda161000658 | 5,054 | exs | Elixir | apps/gitgud/test/gitgud/issue_test.exs | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 449 | 2018-03-06T01:05:55.000Z | 2022-03-23T21:03:56.000Z | apps/gitgud/test/gitgud/issue_test.exs | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 69 | 2018-03-06T09:26:41.000Z | 2022-03-21T22:43:09.000Z | apps/gitgud/test/gitgud/issue_test.exs | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 41 | 2018-03-06T01:06:07.000Z | 2021-11-21T17:55:04.000Z | defmodule GitGud.IssueTest do
use GitGud.DataCase, async: true
use GitGud.DataFactory
alias GitGud.User
alias GitGud.Repo
alias GitGud.RepoStorage
alias GitGud.Issue
setup :create_user
setup :create_repo
test "creates a new issue with valid params", %{user: user, repo: repo} do
params = factory(:issue)
assert {:ok, issue} = Issue.create(repo, user, params)
assert issue.title == params.title
assert issue.status == "open"
assert issue.repo_id == repo.id
assert issue.author_id == user.id
assert comment = issue.comment
assert comment.body == params.comment.body
assert comment.thread_table == "issues_comments"
assert comment.repo_id == repo.id
assert comment.author_id == user.id
end
test "fails to create a new issue with invalid title", %{user: user, repo: repo} do
params = factory(:issue)
assert {:error, changeset} = Issue.create(repo, user, Map.delete(params, :title))
assert "can't be blank" in errors_on(changeset).title
end
test "fails to create a new issue without comment", %{user: user, repo: repo} do
params = factory(:issue)
assert {:error, changeset} = Issue.create(repo, user, Map.put(params, :comment, %{}))
assert "can't be blank" in errors_on(changeset).comment.body
end
describe "when issue exists" do
setup :create_issue
test "adds new comment", %{user: user, repo: repo, issue: issue1} do
assert {:ok, comment} = Issue.add_comment(issue1, user, "Hello this is a comment.")
issue2 = DB.preload(issue1, :replies)
assert comment.id == hd(issue2.replies).id
assert comment.body == "Hello this is a comment."
assert comment.repo_id == repo.id
assert comment.author_id == user.id
assert comment.thread_table == "issues_comments"
end
test "updates issue title", %{issue: issue1} do
assert {:ok, issue2} = Issue.update_title(issue1, "This is the new title")
assert issue2.title == "This is the new title"
assert List.last(issue2.events) == %{
"old_title" => issue1.title,
"new_title" => "This is the new title",
"timestamp" => NaiveDateTime.to_iso8601(issue2.updated_at),
"type" => "title_update"
}
end
test "closes issue", %{issue: issue1} do
assert {:ok, issue2} = Issue.close(issue1)
assert issue2.status == "close"
assert List.last(issue2.events) == %{
"timestamp" => NaiveDateTime.to_iso8601(issue2.updated_at),
"type" => "close"
}
end
test "reopens issue", %{issue: issue1} do
assert {:ok, issue2} =Issue.reopen(Issue.close!(issue1))
assert issue2.status == "open"
assert List.last(issue2.events) == %{
"timestamp" => NaiveDateTime.to_iso8601(issue2.updated_at),
"type" => "reopen"
}
end
test "adds issue labels", %{repo: repo, issue: issue1} do
push = Enum.map(Enum.take(repo.issue_labels, 2), &(&1.id))
assert {:ok, issue2} = Issue.update_labels(issue1, {push, []})
assert length(issue2.labels) == 2
assert Enum.all?(issue2.labels, &(&1.id in push))
assert List.last(issue2.events) == %{
"push" => push,
"pull" => [],
"timestamp" => NaiveDateTime.to_iso8601(issue2.updated_at),
"type" => "labels_update"
}
end
test "updates issue labels", %{repo: repo, issue: issue1} do
labels_ids = Enum.map(repo.issue_labels, &(&1.id))
Issue.update_labels!(issue1, {Enum.drop(labels_ids, 1), []})
push = Enum.take(labels_ids, 1)
pull = Enum.slice(labels_ids, 1..2)
assert {:ok, issue2} = Issue.update_labels(issue1, {push, pull})
assert length(issue2.labels) == length(labels_ids) - 2
refute Enum.find(issue2.labels, &(&1.id in pull))
assert List.last(issue2.events) == %{
"timestamp" => NaiveDateTime.to_iso8601(issue2.updated_at),
"push" => push,
"pull" => pull,
"type" => "labels_update"
}
end
test "removes issue labels", %{repo: repo, issue: issue1} do
labels_ids = Enum.map(repo.issue_labels, &(&1.id))
Issue.update_labels!(issue1, {Enum.take(labels_ids, 4), []})
pull = Enum.take(labels_ids, 2)
assert {:ok, issue2} = Issue.update_labels(issue1, {[], pull})
assert length(issue2.labels) == 2
refute Enum.find(issue2.labels, &(&1.id in pull))
assert List.last(issue2.events) == %{
"timestamp" => NaiveDateTime.to_iso8601(issue2.updated_at),
"push" => [],
"pull" => pull,
"type" => "labels_update"
}
end
end
#
# Helpers
#
defp create_user(context) do
Map.put(context, :user, User.create!(factory(:user)))
end
defp create_repo(context) do
repo = Repo.create!(context.user, factory(:repo))
on_exit fn ->
File.rm_rf(RepoStorage.workdir(repo))
end
Map.put(context, :repo, repo)
end
defp create_issue(context) do
Map.put(context, :issue, Issue.create!(context.repo, context.user, factory(:issue)))
end
end
| 34.616438 | 89 | 0.630787 |
ff2c41452d4ed7dc4cf3dafab57b2bc414e8d27e | 1,494 | ex | Elixir | lib/speakr_web/endpoint.ex | BlackBoxSQL/eventr | 4d84c747fba12a53ae16fb674b8293b5ad083ac0 | [
"MIT"
] | 14 | 2019-09-20T04:51:00.000Z | 2019-10-09T07:36:34.000Z | lib/speakr_web/endpoint.ex | BlackBoxSQL/eventr | 4d84c747fba12a53ae16fb674b8293b5ad083ac0 | [
"MIT"
] | 1 | 2021-03-10T05:27:43.000Z | 2021-03-10T05:27:43.000Z | lib/speakr_web/endpoint.ex | BlackBoxSQL/eventr | 4d84c747fba12a53ae16fb674b8293b5ad083ac0 | [
"MIT"
] | 4 | 2019-09-20T06:59:07.000Z | 2019-11-18T14:59:01.000Z | defmodule SpeakrWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :speakr
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_speakr_key",
signing_salt: "vvXUKowP"
]
socket "/socket", SpeakrWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :speakr,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug Plug.Session,
store: :cookie,
key: "_speakr_key",
signing_salt: "secret-change-in-production"
plug Pow.Plug.Session, otp_app: :speakr
plug SpeakrWeb.Router
end
| 27.163636 | 69 | 0.706827 |
ff2c550a4ca31938f7e3407e93dff11970bb584b | 513 | ex | Elixir | lib/gatekeeper/test_server.ex | samfrench/gatekeeper | 6286ed16360e0d538e7f4c802a866b2cd83d5514 | [
"MIT"
] | null | null | null | lib/gatekeeper/test_server.ex | samfrench/gatekeeper | 6286ed16360e0d538e7f4c802a866b2cd83d5514 | [
"MIT"
] | null | null | null | lib/gatekeeper/test_server.ex | samfrench/gatekeeper | 6286ed16360e0d538e7f4c802a866b2cd83d5514 | [
"MIT"
] | null | null | null | defmodule Gatekeeper.TestServer do
use Plug.Router
plug(:match)
plug(:dispatch)
get "/level/essential" do
send_resp(conn, 200, "Personalised content essential!")
end
get "/level/expected" do
send_resp(conn, 200, "Personalised content expected!")
end
get "/level/ideal" do
send_resp(conn, 200, "Personalised content ideal!")
end
get "/level/none" do
send_resp(conn, 200, "Non personalised content!")
end
match(_, do: send_resp(conn, 404, "404, error not found!"))
end
| 20.52 | 61 | 0.684211 |
ff2c585c5b1eef72b262ad9b3ca099b81e10cba7 | 4,371 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/ssl_certificate.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/ssl_certificate.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/ssl_certificate.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.SslCertificate do
@moduledoc """
Represents an SSL Certificate resource.
Google Compute Engine has two SSL Certificate resources:
* [Global](/compute/docs/reference/rest/{$api_version}/sslCertificates) * [Regional](/compute/docs/reference/rest/{$api_version}/regionSslCertificates)
The sslCertificates are used by:
- external HTTPS load balancers
- SSL proxy load balancers
The regionSslCertificates are used by internal HTTPS load balancers.
Optionally, certificate file contents that you upload can contain a set of up to five PEM-encoded certificates. The API call creates an object (sslCertificate) that holds this data. You can use SSL keys and certificates to secure connections to a load balancer. For more information, read Creating and using SSL certificates and SSL certificates quotas and limits. (== resource_for {$api_version}.sslCertificates ==) (== resource_for {$api_version}.regionSslCertificates ==)
## Attributes
* `certificate` (*type:* `String.t`, *default:* `nil`) - A local certificate file. The certificate must be in PEM format. The certificate chain must be no greater than 5 certs long. The chain must include at least one intermediate cert.
* `creationTimestamp` (*type:* `String.t`, *default:* `nil`) - [Output Only] Creation timestamp in RFC3339 text format.
* `description` (*type:* `String.t`, *default:* `nil`) - An optional description of this resource. Provide this property when you create the resource.
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* `kind` (*type:* `String.t`, *default:* `compute#sslCertificate`) - [Output Only] Type of the resource. Always compute#sslCertificate for SSL certificates.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
* `privateKey` (*type:* `String.t`, *default:* `nil`) - A write-only private key in PEM format. Only insert requests will include this field.
* `region` (*type:* `String.t`, *default:* `nil`) - [Output Only] URL of the region where the regional SSL Certificate resides. This field is not applicable to global SSL Certificate.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output only] Server-defined URL for the resource.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:certificate => String.t(),
:creationTimestamp => String.t(),
:description => String.t(),
:id => String.t(),
:kind => String.t(),
:name => String.t(),
:privateKey => String.t(),
:region => String.t(),
:selfLink => String.t()
}
field(:certificate)
field(:creationTimestamp)
field(:description)
field(:id)
field(:kind)
field(:name)
field(:privateKey)
field(:region)
field(:selfLink)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.SslCertificate do
def decode(value, options) do
GoogleApi.Compute.V1.Model.SslCertificate.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.SslCertificate do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.423529 | 490 | 0.715397 |
ff2c94c88351d7bdf93583a490831c7bafd6ff94 | 606 | exs | Elixir | examples/helloworld/mix.exs | braverhealth/grpc | eff8a8828d27ddd7f63a3c1dd5aae86246df215e | [
"Apache-2.0"
] | 561 | 2019-03-18T09:10:57.000Z | 2022-03-27T17:34:59.000Z | examples/helloworld/mix.exs | braverhealth/grpc | eff8a8828d27ddd7f63a3c1dd5aae86246df215e | [
"Apache-2.0"
] | 94 | 2019-03-20T09:34:38.000Z | 2022-02-27T20:44:04.000Z | examples/helloworld/mix.exs | braverhealth/grpc | eff8a8828d27ddd7f63a3c1dd5aae86246df215e | [
"Apache-2.0"
] | 112 | 2019-03-25T03:27:26.000Z | 2022-03-21T12:43:59.000Z | defmodule Helloworld.Mixfile do
use Mix.Project
def project do
[app: :helloworld,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
def application do
[mod: {HelloworldApp, []},
applications: [:logger, :grpc]]
end
defp deps do
[
{:grpc, path: "../../"},
{:protobuf, github: "tony612/protobuf-elixir", override: true},
{:cowlib, "~> 2.8.0", hex: :grpc_cowlib, override: true},
{:dialyxir, "~> 0.5", only: [:dev, :test], runtime: false},
]
end
end
| 22.444444 | 69 | 0.561056 |
ff2ca0209a61875cdd25ccc10b3aad91a11ee545 | 872 | ex | Elixir | elixirpay/lib/elixirpay_web/controllers/accounts_controller.ex | fcsouza/elixir-project | e6212c4def050400eb4fcc50c6a8274409b1f0c7 | [
"MIT"
] | null | null | null | elixirpay/lib/elixirpay_web/controllers/accounts_controller.ex | fcsouza/elixir-project | e6212c4def050400eb4fcc50c6a8274409b1f0c7 | [
"MIT"
] | null | null | null | elixirpay/lib/elixirpay_web/controllers/accounts_controller.ex | fcsouza/elixir-project | e6212c4def050400eb4fcc50c6a8274409b1f0c7 | [
"MIT"
] | null | null | null | defmodule ElixirpayWeb.AccountsController do
use ElixirpayWeb, :controller
alias Elixirpay.Account
alias Elixirpay.Accounts.Transactions.Response, as: TransactionResponse
action_fallback ElixirpayWeb.FallbackController
def deposit(conn, params) do
with {:ok, %Account{} = account} <- Elixirpay.deposit(params) do
conn
|> put_status(:ok)
|> render("update.json", account: account)
end
end
def withdraw(conn, params) do
with {:ok, %Account{} = account} <- Elixirpay.withdraw(params) do
conn
|> put_status(:ok)
|> render("update.json", account: account)
end
end
def transaction(conn, params) do
with {:ok, %TransactionResponse{} = transaction} <- Elixirpay.transaction(params) do
conn
|> put_status(:ok)
|> render("transaction.json", transaction: transaction)
end
end
end
| 26.424242 | 88 | 0.677752 |
ff2cb0c00c813206868f1e7b3b4fdbfc5c508633 | 62,774 | ex | Elixir | lib/aws/generated/sesv2.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sesv2.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sesv2.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.SESv2 do
@moduledoc """
Amazon SES API v2
Welcome to the Amazon SES API v2 Reference.
This guide provides information about the Amazon SES API v2, including supported
operations, data types, parameters, and schemas.
[Amazon SES](https://aws.amazon.com/pinpoint) is an AWS service that you can use to send email messages to your customers.
If you're new to Amazon SES API v2, you might find it helpful to also review the
[Amazon Simple Email Service Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/). The *Amazon SES
Developer Guide* provides information and code samples that demonstrate how to
use Amazon SES API v2 features programmatically.
The Amazon SES API v2 is available in several AWS Regions and it provides an
endpoint for each of these Regions. For a list of all the Regions and endpoints
where the API is currently available, see [AWS Service Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#ses_region)
in the *Amazon Web Services General Reference*. To learn more about AWS Regions,
see [Managing AWS Regions](https://docs.aws.amazon.com/general/latest/gr/rande-manage.html) in the
*Amazon Web Services General Reference*.
In each Region, AWS maintains multiple Availability Zones. These Availability
Zones are physically isolated from each other, but are united by private,
low-latency, high-throughput, and highly redundant network connections. These
Availability Zones enable us to provide very high levels of availability and
redundancy, while also minimizing latency. To learn more about the number of
Availability Zones that are available in each Region, see [AWS Global Infrastructure](http://aws.amazon.com/about-aws/global-infrastructure/).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-09-27",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "email",
global?: false,
protocol: "rest-json",
service_id: "SESv2",
signature_version: "v4",
signing_name: "ses",
target_prefix: nil
}
end
@doc """
Create a configuration set.
*Configuration sets* are groups of rules that you can apply to the emails that
you send. You apply a configuration set to an email by specifying the name of
the configuration set when you call the Amazon SES API v2. When you apply a
configuration set to an email, all of the rules in that configuration set are
applied to the email.
"""
def create_configuration_set(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/configuration-sets"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Create an event destination.
*Events* include message sends, deliveries, opens, clicks, bounces, and
complaints. *Event destinations* are places that you can send information about
these events to. For example, you can send event data to Amazon SNS to receive
notifications when you receive bounces or complaints, or you can use Amazon
Kinesis Data Firehose to stream data to Amazon S3 for long-term storage.
A single configuration set can include more than one event destination.
"""
def create_configuration_set_event_destination(
%Client{} = client,
configuration_set_name,
input,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a contact, which is an end-user who is receiving the email, and adds
them to a contact list.
"""
def create_contact(%Client{} = client, contact_list_name, input, options \\ []) do
url_path = "/v2/email/contact-lists/#{URI.encode(contact_list_name)}/contacts"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a contact list.
"""
def create_contact_list(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/contact-lists"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new custom verification email template.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-verify-address-custom.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def create_custom_verification_email_template(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/custom-verification-email-templates"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Create a new pool of dedicated IP addresses.
A pool can include one or more dedicated IP addresses that are associated with
your AWS account. You can associate a pool with a configuration set. When you
send an email that uses that configuration set, the message is sent from one of
the addresses in the associated pool.
"""
def create_dedicated_ip_pool(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/dedicated-ip-pools"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Create a new predictive inbox placement test.
Predictive inbox placement tests can help you predict how your messages will be
handled by various email providers around the world. When you perform a
predictive inbox placement test, you provide a sample message that contains the
content that you plan to send to your customers. Amazon SES then sends that
message to special email addresses spread across several major email providers.
After about 24 hours, the test is complete, and you can use the
`GetDeliverabilityTestReport` operation to view the results of the test.
"""
def create_deliverability_test_report(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/deliverability-dashboard/test"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Starts the process of verifying an email identity.
An *identity* is an email address or domain that you use when you send email.
Before you can use an identity to send email, you first have to verify it. By
verifying an identity, you demonstrate that you're the owner of the identity,
and that you've given Amazon SES API v2 permission to send email from the
identity.
When you verify an email address, Amazon SES sends an email to the address. Your
email address is verified as soon as you follow the link in the verification
email.
When you verify a domain without specifying the `DkimSigningAttributes` object,
this operation provides a set of DKIM tokens. You can convert these tokens into
CNAME records, which you then add to the DNS configuration for your domain. Your
domain is verified when Amazon SES detects these records in the DNS
configuration for your domain. This verification method is known as [Easy DKIM](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim.html).
Alternatively, you can perform the verification process by providing your own
public-private key pair. This verification method is known as Bring Your Own
DKIM (BYODKIM). To use BYODKIM, your call to the `CreateEmailIdentity` operation
has to include the `DkimSigningAttributes` object. When you specify this object,
you provide a selector (a component of the DNS record name that identifies the
public key that you want to use for DKIM authentication) and a private key.
When you verify a domain, this operation provides a set of DKIM tokens, which
you can convert into CNAME tokens. You add these CNAME tokens to the DNS
configuration for your domain. Your domain is verified when Amazon SES detects
these records in the DNS configuration for your domain. For some DNS providers,
it can take 72 hours or more to complete the domain verification process.
Additionally, you can associate an existing configuration set with the email
identity that you're verifying.
"""
def create_email_identity(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/identities"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates the specified sending authorization policy for the given identity (an
email address or a domain).
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def create_email_identity_policy(
%Client{} = client,
email_identity,
policy_name,
input,
options \\ []
) do
url_path =
"/v2/email/identities/#{URI.encode(email_identity)}/policies/#{URI.encode(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an email template.
Email templates enable you to send personalized email to one or more
destinations in a single API operation. For more information, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html).
You can execute this operation no more than once per second.
"""
def create_email_template(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/templates"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an import job for a data destination.
"""
def create_import_job(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/import-jobs"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Delete an existing configuration set.
*Configuration sets* are groups of rules that you can apply to the emails you
send. You apply a configuration set to an email by including a reference to the
configuration set in the headers of the email. When you apply a configuration
set to an email, all of the rules in that configuration set are applied to the
email.
"""
def delete_configuration_set(%Client{} = client, configuration_set_name, input, options \\ []) do
url_path = "/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Delete an event destination.
*Events* include message sends, deliveries, opens, clicks, bounces, and
complaints. *Event destinations* are places that you can send information about
these events to. For example, you can send event data to Amazon SNS to receive
notifications when you receive bounces or complaints, or you can use Amazon
Kinesis Data Firehose to stream data to Amazon S3 for long-term storage.
"""
def delete_configuration_set_event_destination(
%Client{} = client,
configuration_set_name,
event_destination_name,
input,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations/#{
URI.encode(event_destination_name)
}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes a contact from a contact list.
"""
def delete_contact(%Client{} = client, contact_list_name, email_address, input, options \\ []) do
url_path =
"/v2/email/contact-lists/#{URI.encode(contact_list_name)}/contacts/#{
URI.encode(email_address)
}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a contact list and all of the contacts on that list.
"""
def delete_contact_list(%Client{} = client, contact_list_name, input, options \\ []) do
url_path = "/v2/email/contact-lists/#{URI.encode(contact_list_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an existing custom verification email template.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/es/latest/DeveloperGuide/send-email-verify-address-custom.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def delete_custom_verification_email_template(
%Client{} = client,
template_name,
input,
options \\ []
) do
url_path = "/v2/email/custom-verification-email-templates/#{URI.encode(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Delete a dedicated IP pool.
"""
def delete_dedicated_ip_pool(%Client{} = client, pool_name, input, options \\ []) do
url_path = "/v2/email/dedicated-ip-pools/#{URI.encode(pool_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an email identity.
An identity can be either an email address or a domain name.
"""
def delete_email_identity(%Client{} = client, email_identity, input, options \\ []) do
url_path = "/v2/email/identities/#{URI.encode(email_identity)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified sending authorization policy for the given identity (an
email address or a domain).
This API returns successfully even if a policy with the specified name does not
exist.
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def delete_email_identity_policy(
%Client{} = client,
email_identity,
policy_name,
input,
options \\ []
) do
url_path =
"/v2/email/identities/#{URI.encode(email_identity)}/policies/#{URI.encode(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an email template.
You can execute this operation no more than once per second.
"""
def delete_email_template(%Client{} = client, template_name, input, options \\ []) do
url_path = "/v2/email/templates/#{URI.encode(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes an email address from the suppression list for your account.
"""
def delete_suppressed_destination(%Client{} = client, email_address, input, options \\ []) do
url_path = "/v2/email/suppression/addresses/#{URI.encode(email_address)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Obtain information about the email-sending status and capabilities of your
Amazon SES account in the current AWS Region.
"""
def get_account(%Client{} = client, options \\ []) do
url_path = "/v2/email/account"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve a list of the blacklists that your dedicated IP addresses appear on.
"""
def get_blacklist_reports(%Client{} = client, blacklist_item_names, options \\ []) do
url_path = "/v2/email/deliverability-dashboard/blacklist-report"
headers = []
query_params = []
query_params =
if !is_nil(blacklist_item_names) do
[{"BlacklistItemNames", blacklist_item_names} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Get information about an existing configuration set, including the dedicated IP
pool that it's associated with, whether or not it's enabled for sending email,
and more.
*Configuration sets* are groups of rules that you can apply to the emails you
send. You apply a configuration set to an email by including a reference to the
configuration set in the headers of the email. When you apply a configuration
set to an email, all of the rules in that configuration set are applied to the
email.
"""
def get_configuration_set(%Client{} = client, configuration_set_name, options \\ []) do
url_path = "/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve a list of event destinations that are associated with a configuration
set.
*Events* include message sends, deliveries, opens, clicks, bounces, and
complaints. *Event destinations* are places that you can send information about
these events to. For example, you can send event data to Amazon SNS to receive
notifications when you receive bounces or complaints, or you can use Amazon
Kinesis Data Firehose to stream data to Amazon S3 for long-term storage.
"""
def get_configuration_set_event_destinations(
%Client{} = client,
configuration_set_name,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a contact from a contact list.
"""
def get_contact(%Client{} = client, contact_list_name, email_address, options \\ []) do
url_path =
"/v2/email/contact-lists/#{URI.encode(contact_list_name)}/contacts/#{
URI.encode(email_address)
}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns contact list metadata.
It does not return any information about the contacts present in the list.
"""
def get_contact_list(%Client{} = client, contact_list_name, options \\ []) do
url_path = "/v2/email/contact-lists/#{URI.encode(contact_list_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the custom email verification template for the template name you
specify.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-verify-address-custom.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def get_custom_verification_email_template(%Client{} = client, template_name, options \\ []) do
url_path = "/v2/email/custom-verification-email-templates/#{URI.encode(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Get information about a dedicated IP address, including the name of the
dedicated IP pool that it's associated with, as well information about the
automatic warm-up process for the address.
"""
def get_dedicated_ip(%Client{} = client, ip, options \\ []) do
url_path = "/v2/email/dedicated-ips/#{URI.encode(ip)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List the dedicated IP addresses that are associated with your AWS account.
"""
def get_dedicated_ips(
%Client{} = client,
next_token \\ nil,
page_size \\ nil,
pool_name \\ nil,
options \\ []
) do
url_path = "/v2/email/dedicated-ips"
headers = []
query_params = []
query_params =
if !is_nil(pool_name) do
[{"PoolName", pool_name} | query_params]
else
query_params
end
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve information about the status of the Deliverability dashboard for your
account.
When the Deliverability dashboard is enabled, you gain access to reputation,
deliverability, and other metrics for the domains that you use to send email.
You also gain the ability to perform predictive inbox placement tests.
When you use the Deliverability dashboard, you pay a monthly subscription
charge, in addition to any other fees that you accrue by using Amazon SES and
other AWS services. For more information about the features and cost of a
Deliverability dashboard subscription, see [Amazon SES Pricing](http://aws.amazon.com/ses/pricing/).
"""
def get_deliverability_dashboard_options(%Client{} = client, options \\ []) do
url_path = "/v2/email/deliverability-dashboard"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve the results of a predictive inbox placement test.
"""
def get_deliverability_test_report(%Client{} = client, report_id, options \\ []) do
url_path = "/v2/email/deliverability-dashboard/test-reports/#{URI.encode(report_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve all the deliverability data for a specific campaign.
This data is available for a campaign only if the campaign sent email by using a
domain that the Deliverability dashboard is enabled for.
"""
def get_domain_deliverability_campaign(%Client{} = client, campaign_id, options \\ []) do
url_path = "/v2/email/deliverability-dashboard/campaigns/#{URI.encode(campaign_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve inbox placement and engagement rates for the domains that you use to
send email.
"""
def get_domain_statistics_report(
%Client{} = client,
domain,
end_date,
start_date,
options \\ []
) do
url_path = "/v2/email/deliverability-dashboard/statistics-report/#{URI.encode(domain)}"
headers = []
query_params = []
query_params =
if !is_nil(start_date) do
[{"StartDate", start_date} | query_params]
else
query_params
end
query_params =
if !is_nil(end_date) do
[{"EndDate", end_date} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about a specific identity, including the identity's
verification status, sending authorization policies, its DKIM authentication
status, and its custom Mail-From settings.
"""
def get_email_identity(%Client{} = client, email_identity, options \\ []) do
url_path = "/v2/email/identities/#{URI.encode(email_identity)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the requested sending authorization policies for the given identity (an
email address or a domain).
The policies are returned as a map of policy names to policy contents. You can
retrieve a maximum of 20 policies at a time.
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def get_email_identity_policies(%Client{} = client, email_identity, options \\ []) do
url_path = "/v2/email/identities/#{URI.encode(email_identity)}/policies"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Displays the template object (which includes the subject line, HTML part and
text part) for the template you specify.
You can execute this operation no more than once per second.
"""
def get_email_template(%Client{} = client, template_name, options \\ []) do
url_path = "/v2/email/templates/#{URI.encode(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about an import job.
"""
def get_import_job(%Client{} = client, job_id, options \\ []) do
url_path = "/v2/email/import-jobs/#{URI.encode(job_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves information about a specific email address that's on the suppression
list for your account.
"""
def get_suppressed_destination(%Client{} = client, email_address, options \\ []) do
url_path = "/v2/email/suppression/addresses/#{URI.encode(email_address)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List all of the configuration sets associated with your account in the current
region.
*Configuration sets* are groups of rules that you can apply to the emails you
send. You apply a configuration set to an email by including a reference to the
configuration set in the headers of the email. When you apply a configuration
set to an email, all of the rules in that configuration set are applied to the
email.
"""
def list_configuration_sets(
%Client{} = client,
next_token \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/v2/email/configuration-sets"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the contact lists available.
"""
def list_contact_lists(%Client{} = client, next_token \\ nil, page_size \\ nil, options \\ []) do
url_path = "/v2/email/contact-lists"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the contacts present in a specific contact list.
"""
def list_contacts(
%Client{} = client,
contact_list_name,
next_token \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/v2/email/contact-lists/#{URI.encode(contact_list_name)}/contacts"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the existing custom verification email templates for your account in the
current AWS Region.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-verify-address-custom.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def list_custom_verification_email_templates(
%Client{} = client,
next_token \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/v2/email/custom-verification-email-templates"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List all of the dedicated IP pools that exist in your AWS account in the current
Region.
"""
def list_dedicated_ip_pools(
%Client{} = client,
next_token \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/v2/email/dedicated-ip-pools"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Show a list of the predictive inbox placement tests that you've performed,
regardless of their statuses.
For predictive inbox placement tests that are complete, you can use the
`GetDeliverabilityTestReport` operation to view the results.
"""
def list_deliverability_test_reports(
%Client{} = client,
next_token \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/v2/email/deliverability-dashboard/test-reports"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve deliverability data for all the campaigns that used a specific domain
to send email during a specified time range.
This data is available for a domain only if you enabled the Deliverability
dashboard for the domain.
"""
def list_domain_deliverability_campaigns(
%Client{} = client,
subscribed_domain,
end_date,
next_token \\ nil,
page_size \\ nil,
start_date,
options \\ []
) do
url_path =
"/v2/email/deliverability-dashboard/domains/#{URI.encode(subscribed_domain)}/campaigns"
headers = []
query_params = []
query_params =
if !is_nil(start_date) do
[{"StartDate", start_date} | query_params]
else
query_params
end
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(end_date) do
[{"EndDate", end_date} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of all of the email identities that are associated with your AWS
account.
An identity can be either an email address or a domain. This operation returns
identities that are verified as well as those that aren't. This operation
returns identities that are associated with Amazon SES and Amazon Pinpoint.
"""
def list_email_identities(
%Client{} = client,
next_token \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/v2/email/identities"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the email templates present in your Amazon SES account in the current AWS
Region.
You can execute this operation no more than once per second.
"""
def list_email_templates(%Client{} = client, next_token \\ nil, page_size \\ nil, options \\ []) do
url_path = "/v2/email/templates"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the import jobs.
"""
def list_import_jobs(%Client{} = client, next_token \\ nil, page_size \\ nil, options \\ []) do
url_path = "/v2/email/import-jobs"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves a list of email addresses that are on the suppression list for your
account.
"""
def list_suppressed_destinations(
%Client{} = client,
end_date \\ nil,
next_token \\ nil,
page_size \\ nil,
reasons \\ nil,
start_date \\ nil,
options \\ []
) do
url_path = "/v2/email/suppression/addresses"
headers = []
query_params = []
query_params =
if !is_nil(start_date) do
[{"StartDate", start_date} | query_params]
else
query_params
end
query_params =
if !is_nil(reasons) do
[{"Reason", reasons} | query_params]
else
query_params
end
query_params =
if !is_nil(page_size) do
[{"PageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(end_date) do
[{"EndDate", end_date} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieve a list of the tags (keys and values) that are associated with a
specified resource.
A *tag* is a label that you optionally define and associate with a resource.
Each tag consists of a required *tag key* and an optional associated *tag
value*. A tag key is a general label that acts as a category for more specific
tag values. A tag value acts as a descriptor within a tag key.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/v2/email/tags"
headers = []
query_params = []
query_params =
if !is_nil(resource_arn) do
[{"ResourceArn", resource_arn} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Enable or disable the automatic warm-up feature for dedicated IP addresses.
"""
def put_account_dedicated_ip_warmup_attributes(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/account/dedicated-ips/warmup"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Update your Amazon SES account details.
"""
def put_account_details(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/account/details"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enable or disable the ability of your account to send email.
"""
def put_account_sending_attributes(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/account/sending"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Change the settings for the account-level suppression list.
"""
def put_account_suppression_attributes(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/account/suppression"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Associate a configuration set with a dedicated IP pool.
You can use dedicated IP pools to create groups of dedicated IP addresses for
sending specific types of email.
"""
def put_configuration_set_delivery_options(
%Client{} = client,
configuration_set_name,
input,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/delivery-options"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enable or disable collection of reputation metrics for emails that you send
using a particular configuration set in a specific AWS Region.
"""
def put_configuration_set_reputation_options(
%Client{} = client,
configuration_set_name,
input,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/reputation-options"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enable or disable email sending for messages that use a particular configuration
set in a specific AWS Region.
"""
def put_configuration_set_sending_options(
%Client{} = client,
configuration_set_name,
input,
options \\ []
) do
url_path = "/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/sending"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Specify the account suppression list preferences for a configuration set.
"""
def put_configuration_set_suppression_options(
%Client{} = client,
configuration_set_name,
input,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/suppression-options"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Specify a custom domain to use for open and click tracking elements in email
that you send.
"""
def put_configuration_set_tracking_options(
%Client{} = client,
configuration_set_name,
input,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/tracking-options"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Move a dedicated IP address to an existing dedicated IP pool.
The dedicated IP address that you specify must already exist, and must be
associated with your AWS account.
The dedicated IP pool you specify must already exist. You can create a new pool
by using the `CreateDedicatedIpPool` operation.
"""
def put_dedicated_ip_in_pool(%Client{} = client, ip, input, options \\ []) do
url_path = "/v2/email/dedicated-ips/#{URI.encode(ip)}/pool"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
def put_dedicated_ip_warmup_attributes(%Client{} = client, ip, input, options \\ []) do
url_path = "/v2/email/dedicated-ips/#{URI.encode(ip)}/warmup"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enable or disable the Deliverability dashboard.
When you enable the Deliverability dashboard, you gain access to reputation,
deliverability, and other metrics for the domains that you use to send email.
You also gain the ability to perform predictive inbox placement tests.
When you use the Deliverability dashboard, you pay a monthly subscription
charge, in addition to any other fees that you accrue by using Amazon SES and
other AWS services. For more information about the features and cost of a
Deliverability dashboard subscription, see [Amazon SES Pricing](http://aws.amazon.com/ses/pricing/).
"""
def put_deliverability_dashboard_option(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/deliverability-dashboard"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Used to associate a configuration set with an email identity.
"""
def put_email_identity_configuration_set_attributes(
%Client{} = client,
email_identity,
input,
options \\ []
) do
url_path = "/v2/email/identities/#{URI.encode(email_identity)}/configuration-set"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Used to enable or disable DKIM authentication for an email identity.
"""
def put_email_identity_dkim_attributes(%Client{} = client, email_identity, input, options \\ []) do
url_path = "/v2/email/identities/#{URI.encode(email_identity)}/dkim"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Used to configure or change the DKIM authentication settings for an email domain
identity.
You can use this operation to do any of the following:
* Update the signing attributes for an identity that uses Bring Your
Own DKIM (BYODKIM).
* Change from using no DKIM authentication to using Easy DKIM.
* Change from using no DKIM authentication to using BYODKIM.
* Change from using Easy DKIM to using BYODKIM.
* Change from using BYODKIM to using Easy DKIM.
"""
def put_email_identity_dkim_signing_attributes(
%Client{} = client,
email_identity,
input,
options \\ []
) do
url_path = "/v1/email/identities/#{URI.encode(email_identity)}/dkim/signing"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Used to enable or disable feedback forwarding for an identity.
This setting determines what happens when an identity is used to send an email
that results in a bounce or complaint event.
If the value is `true`, you receive email notifications when bounce or complaint
events occur. These notifications are sent to the address that you specified in
the `Return-Path` header of the original email.
You're required to have a method of tracking bounces and complaints. If you
haven't set up another mechanism for receiving bounce or complaint notifications
(for example, by setting up an event destination), you receive an email
notification when these events occur (even if this setting is disabled).
"""
def put_email_identity_feedback_attributes(
%Client{} = client,
email_identity,
input,
options \\ []
) do
url_path = "/v2/email/identities/#{URI.encode(email_identity)}/feedback"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Used to enable or disable the custom Mail-From domain configuration for an email
identity.
"""
def put_email_identity_mail_from_attributes(
%Client{} = client,
email_identity,
input,
options \\ []
) do
url_path = "/v2/email/identities/#{URI.encode(email_identity)}/mail-from"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds an email address to the suppression list for your account.
"""
def put_suppressed_destination(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/suppression/addresses"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Composes an email message to multiple destinations.
"""
def send_bulk_email(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/outbound-bulk-emails"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds an email address to the list of identities for your Amazon SES account in
the current AWS Region and attempts to verify it.
As a result of executing this operation, a customized verification email is sent
to the specified address.
To use this operation, you must first create a custom verification email
template. For more information about creating and using custom verification
email templates, see [Using Custom Verification Email Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-verify-address-custom.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def send_custom_verification_email(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/outbound-custom-verification-emails"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sends an email message.
You can use the Amazon SES API v2 to send two types of messages:
* **Simple** – A standard email message. When you create this type
of message, you specify the sender, the recipient, and the message body, and
Amazon SES assembles the message for you.
* **Raw** – A raw, MIME-formatted email message. When you send this
type of email, you have to specify all of the message headers, as well as the
message body. You can use this message type to send messages that contain
attachments. The message that you specify has to be a valid MIME message.
* **Templated** – A message that contains personalization tags. When
you send this type of email, Amazon SES API v2 automatically replaces the tags
with values that you specify.
"""
def send_email(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/outbound-emails"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Add one or more tags (keys and values) to a specified resource.
A *tag* is a label that you optionally define and associate with a resource.
Tags can help you categorize and manage resources in different ways, such as by
purpose, owner, environment, or other criteria. A resource can have as many as
50 tags.
Each tag consists of a required *tag key* and an associated *tag value*, both of
which you define. A tag key is a general label that acts as a category for more
specific tag values. A tag value acts as a descriptor within a tag key.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/tags"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a preview of the MIME content of an email when provided with a template
and a set of replacement data.
You can execute this operation no more than once per second.
"""
def test_render_email_template(%Client{} = client, template_name, input, options \\ []) do
url_path = "/v2/email/templates/#{URI.encode(template_name)}/render"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Remove one or more tags (keys and values) from a specified resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
url_path = "/v2/email/tags"
headers = []
{query_params, input} =
[
{"ResourceArn", "ResourceArn"},
{"TagKeys", "TagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Update the configuration of an event destination for a configuration set.
*Events* include message sends, deliveries, opens, clicks, bounces, and
complaints. *Event destinations* are places that you can send information about
these events to. For example, you can send event data to Amazon SNS to receive
notifications when you receive bounces or complaints, or you can use Amazon
Kinesis Data Firehose to stream data to Amazon S3 for long-term storage.
"""
def update_configuration_set_event_destination(
%Client{} = client,
configuration_set_name,
event_destination_name,
input,
options \\ []
) do
url_path =
"/v2/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations/#{
URI.encode(event_destination_name)
}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a contact's preferences for a list.
It is not necessary to specify all existing topic preferences in the
TopicPreferences object, just the ones that need updating.
"""
def update_contact(%Client{} = client, contact_list_name, email_address, input, options \\ []) do
url_path =
"/v2/email/contact-lists/#{URI.encode(contact_list_name)}/contacts/#{
URI.encode(email_address)
}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates contact list metadata.
This operation does a complete replacement.
"""
def update_contact_list(%Client{} = client, contact_list_name, input, options \\ []) do
url_path = "/v2/email/contact-lists/#{URI.encode(contact_list_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates an existing custom verification email template.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-verify-address-custom.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def update_custom_verification_email_template(
%Client{} = client,
template_name,
input,
options \\ []
) do
url_path = "/v2/email/custom-verification-email-templates/#{URI.encode(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the specified sending authorization policy for the given identity (an
email address or a domain).
This API returns successfully even if a policy with the specified name does not
exist.
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def update_email_identity_policy(
%Client{} = client,
email_identity,
policy_name,
input,
options \\ []
) do
url_path =
"/v2/email/identities/#{URI.encode(email_identity)}/policies/#{URI.encode(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates an email template.
Email templates enable you to send personalized email to one or more
destinations in a single API operation. For more information, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html).
You can execute this operation no more than once per second.
"""
def update_email_template(%Client{} = client, template_name, input, options \\ []) do
url_path = "/v2/email/templates/#{URI.encode(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
| 24.841314 | 159 | 0.635916 |
ff2cb8b90af312933708ea7b9cd27d39c3d7a274 | 1,086 | ex | Elixir | lib/betex_web/channels/user_socket.ex | esl/betex | b887d95c9c6edac4bcadb8da188fae215d04fe6c | [
"Apache-2.0"
] | 1 | 2021-06-15T08:18:50.000Z | 2021-06-15T08:18:50.000Z | lib/betex_web/channels/user_socket.ex | AdiletAbylov/betex | b887d95c9c6edac4bcadb8da188fae215d04fe6c | [
"Apache-2.0"
] | null | null | null | lib/betex_web/channels/user_socket.ex | AdiletAbylov/betex | b887d95c9c6edac4bcadb8da188fae215d04fe6c | [
"Apache-2.0"
] | 2 | 2021-06-23T16:35:04.000Z | 2021-06-23T16:35:44.000Z | defmodule BetexWeb.UserSocket do
use Phoenix.Socket
## Channels
channel "sport:*", BetexWeb.SportChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# BetexWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.166667 | 83 | 0.694291 |
ff2cb90b177c1ae9a21396b3f92deb0978a47c75 | 30,316 | ex | Elixir | clients/cloud_build/lib/google_api/cloud_build/v1/api/projects.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/lib/google_api/cloud_build/v1/api/projects.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/lib/google_api/cloud_build/v1/api/projects.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudBuild.V1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.CloudBuild.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Cancels a build in progress.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project.
- id (String.t): ID of the build.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
- :body (CancelBuildRequest):
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.Build{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_builds_cancel(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.Build.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_builds_cancel(connection, project_id, id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectId}/builds/{id}:cancel", %{
"projectId" => URI.encode_www_form(project_id),
"id" => URI.encode_www_form(id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.Build{})
end
@doc """
Starts a build with the specified configuration. This method returns a long-running `Operation`, which includes the build ID. Pass the build ID to `GetBuild` to determine the build status (such as `SUCCESS` or `FAILURE`).
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
- :body (Build):
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_builds_create(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.Operation.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_builds_create(connection, project_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectId}/builds", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.Operation{})
end
@doc """
Returns information about a previously requested build. The `Build` that is returned includes its status (such as `SUCCESS`, `FAILURE`, or `WORKING`), and timing information.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project.
- id (String.t): ID of the build.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.Build{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_builds_get(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.Build.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_builds_get(connection, project_id, id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectId}/builds/{id}", %{
"projectId" => URI.encode_www_form(project_id),
"id" => URI.encode_www_form(id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.Build{})
end
@doc """
Lists previously requested builds. Previously requested builds may still be in-progress, or may have finished successfully or unsuccessfully.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
- :pageSize (integer()): Number of results to return in the list.
- :filter (String.t): The raw filter text to constrain the results.
- :pageToken (String.t): Token to provide to skip to a particular spot in the list.
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.ListBuildsResponse{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_builds_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.ListBuildsResponse.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_builds_list(connection, project_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query,
:pageSize => :query,
:filter => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectId}/builds", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.ListBuildsResponse{})
end
@doc """
Creates a new build based on the specified build. This method creates a new build using the original build request, which may or may not result in an identical build. For triggered builds: * Triggered builds resolve to a precise revision; therefore a retry of a triggered build will result in a build that uses the same revision. For non-triggered builds that specify `RepoSource`: * If the original build built from the tip of a branch, the retried build will build from the tip of that branch, which may not be the same revision as the original build. * If the original build specified a commit sha or revision ID, the retried build will use the identical source. For builds that specify `StorageSource`: * If the original build pulled source from Google Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. * If the original build pulled source from Cloud Storage and specified the generation of the object, the new build will attempt to use the same object, which may or may not be available depending on the bucket's lifecycle management settings.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project.
- id (String.t): Build ID of the original build.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
- :body (RetryBuildRequest):
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_builds_retry(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.Operation.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_builds_retry(connection, project_id, id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectId}/builds/{id}:retry", %{
"projectId" => URI.encode_www_form(project_id),
"id" => URI.encode_www_form(id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.Operation{})
end
@doc """
Creates a new `BuildTrigger`. This API is experimental.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project for which to configure automatic builds.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
- :body (BuildTrigger):
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.BuildTrigger{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_triggers_create(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.BuildTrigger.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_triggers_create(connection, project_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectId}/triggers", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.BuildTrigger{})
end
@doc """
Deletes a `BuildTrigger` by its project ID and trigger ID. This API is experimental.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project that owns the trigger.
- trigger_id (String.t): ID of the `BuildTrigger` to delete.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.Empty{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_triggers_delete(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.Empty.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_triggers_delete(connection, project_id, trigger_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/projects/{projectId}/triggers/{triggerId}", %{
"projectId" => URI.encode_www_form(project_id),
"triggerId" => URI.encode_www_form(trigger_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.Empty{})
end
@doc """
Returns information about a `BuildTrigger`. This API is experimental.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project that owns the trigger.
- trigger_id (String.t): ID of the `BuildTrigger` to get.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.BuildTrigger{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_triggers_get(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.BuildTrigger.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_triggers_get(connection, project_id, trigger_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectId}/triggers/{triggerId}", %{
"projectId" => URI.encode_www_form(project_id),
"triggerId" => URI.encode_www_form(trigger_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.BuildTrigger{})
end
@doc """
Lists existing `BuildTrigger`s. This API is experimental.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project for which to list BuildTriggers.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.ListBuildTriggersResponse{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_triggers_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.ListBuildTriggersResponse.t()}
| {:error, Tesla.Env.t()}
def cloudbuild_projects_triggers_list(connection, project_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectId}/triggers", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.ListBuildTriggersResponse{})
end
@doc """
Updates a `BuildTrigger` by its project ID and trigger ID. This API is experimental.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project that owns the trigger.
- trigger_id (String.t): ID of the `BuildTrigger` to update.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
- :body (BuildTrigger):
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.BuildTrigger{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_triggers_patch(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.BuildTrigger.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_triggers_patch(connection, project_id, trigger_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/projects/{projectId}/triggers/{triggerId}", %{
"projectId" => URI.encode_www_form(project_id),
"triggerId" => URI.encode_www_form(trigger_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.BuildTrigger{})
end
@doc """
Runs a `BuildTrigger` at a particular source revision.
## Parameters
- connection (GoogleApi.CloudBuild.V1.Connection): Connection to server
- project_id (String.t): ID of the project.
- trigger_id (String.t): ID of the trigger.
- opts (KeywordList): [optional] Optional parameters
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :$.xgafv (String.t): V1 error format.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :callback (String.t): JSONP
- :alt (String.t): Data format for response.
- :body (RepoSource):
## Returns
{:ok, %GoogleApi.CloudBuild.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec cloudbuild_projects_triggers_run(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.CloudBuild.V1.Model.Operation.t()} | {:error, Tesla.Env.t()}
def cloudbuild_projects_triggers_run(connection, project_id, trigger_id, opts \\ []) do
optional_params = %{
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:fields => :query,
:"$.xgafv" => :query,
:oauth_token => :query,
:callback => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectId}/triggers/{triggerId}:run", %{
"projectId" => URI.encode_www_form(project_id),
"triggerId" => URI.encode_www_form(trigger_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.CloudBuild.V1.Model.Operation{})
end
end
| 45.58797 | 1,193 | 0.66836 |
ff2cd065ae4158f9103159eea4b73deae7f7f1a2 | 2,811 | exs | Elixir | config/prod.exs | tacohole/banchan | 04c9f2fd5464e697d9b69e4bc524ace5f6487487 | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | config/prod.exs | tacohole/banchan | 04c9f2fd5464e697d9b69e4bc524ace5f6487487 | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | config/prod.exs | tacohole/banchan | 04c9f2fd5464e697d9b69e4bc524ace5f6487487 | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :banchan, BanchanWeb.Endpoint,
# Uncomment when it's time to actually go live.
url: [host: "dev.banchan.art", port: 443],
cache_static_manifest: "priv/static/cache_manifest.json",
force_ssl: [hsts: true, rewrite_on: [:x_forwarded_proto]],
secret_key_base: Map.fetch!(System.get_env(), "SECRET_KEY_BASE"),
server: true
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("Banchan_SSL_KEY_PATH"),
# certfile: System.get_env("Banchan_SSL_CERT_PATH")
# ]
# Do not print debug messages in production
config :logger, level: :info
config :banchan, Banchan.Repo,
adapter: Ecto.Adapters.Postgres,
url: System.get_env("DATABASE_URL"),
ssl: true,
# Free tier db only allows 4 connections. Rolling deploys need pool_size*(n+1) connections where n is the number of app replicas.
pool_size: 2
config :banchan, Banchan.Mailer,
adapter: Bamboo.SendGridAdapter,
api_key: {:system, "SENDGRID_API_KEY"},
hackney_opts: [
recv_timeout: :timer.minutes(1)
]
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :banchan, BanchanWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :banchan, BanchanWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 34.703704 | 131 | 0.716115 |
ff2d07376781b3f77ec701c3ae07467396fb197e | 651 | ex | Elixir | lib/benchee/benchmark/scenario_context.ex | gomoripeti/benchee | bd5185bcfdc8320c237959505e63a89d31e044a9 | [
"MIT"
] | 504 | 2019-03-19T16:50:04.000Z | 2022-03-30T03:33:11.000Z | lib/benchee/benchmark/scenario_context.ex | gomoripeti/benchee | bd5185bcfdc8320c237959505e63a89d31e044a9 | [
"MIT"
] | 46 | 2019-03-19T16:52:27.000Z | 2022-03-10T08:34:45.000Z | lib/benchee/benchmark/scenario_context.ex | gomoripeti/benchee | bd5185bcfdc8320c237959505e63a89d31e044a9 | [
"MIT"
] | 20 | 2019-03-21T06:36:33.000Z | 2022-03-10T06:13:40.000Z | defmodule Benchee.Benchmark.ScenarioContext do
@moduledoc false
# This struct holds the context in which any scenario is run.
defstruct [
:config,
:printer,
:current_time,
:end_time,
# before_scenario can alter the original input
:scenario_input,
num_iterations: 1,
function_call_overhead: 0
]
@type t :: %__MODULE__{
config: Benchee.Configuration.t(),
printer: module,
current_time: pos_integer | nil,
end_time: pos_integer | nil,
scenario_input: any,
num_iterations: pos_integer,
function_call_overhead: non_neg_integer
}
end
| 24.111111 | 63 | 0.648233 |
ff2d44a77d9d99cf4e7ad6cb88e7e180575f223f | 5,351 | ex | Elixir | lib/nerves/release.ex | TheEndIsNear/nerves | 04eebbc725d74fa291d6b4844fc98850b0486ac9 | [
"Apache-2.0"
] | null | null | null | lib/nerves/release.ex | TheEndIsNear/nerves | 04eebbc725d74fa291d6b4844fc98850b0486ac9 | [
"Apache-2.0"
] | null | null | null | lib/nerves/release.ex | TheEndIsNear/nerves | 04eebbc725d74fa291d6b4844fc98850b0486ac9 | [
"Apache-2.0"
] | null | null | null | defmodule Nerves.Release do
# No leading '/' here since this is passed to mksquashfs and it
# doesn't like the leading slash.
@target_release_path "srv/erlang"
def init(%{options: options} = release) do
opts = Keyword.merge(options, release_opts())
release = %{
release
| options: opts,
boot_scripts: %{},
steps: release.steps ++ [&Nerves.Release.finalize/1]
}
if Code.ensure_loaded?(Shoehorn.Release) do
apply(Shoehorn.Release, :init, [release])
else
release
end
end
def finalize(release) do
bootfile_path = Path.join([release.version_path, bootfile()])
case File.read(bootfile_path) do
{:ok, bootfile} ->
Nerves.Release.write_rootfs_priorities(release.applications, release.path, bootfile)
_ ->
Nerves.Utils.Shell.warn("""
Unable to load bootfile: #{inspect(bootfile_path)}
Skipping rootfs priority file generation
""")
end
release
end
def bootfile() do
Application.get_env(:nerves, :firmware)[:bootfile] || "shoehorn.boot"
end
def erts() do
if Nerves.Env.loaded?() do
System.get_env("ERTS_DIR")
else
true
end
end
def write_rootfs_priorities(applications, host_release_path, bootfile) do
# Distillery support
applications = normalize_applications(applications)
target_release_path = @target_release_path
{:script, _, boot_script} = :erlang.binary_to_term(bootfile)
target_beam_files = target_beam_files(boot_script, host_release_path, target_release_path)
target_app_files = target_app_files(applications, target_release_path)
target_priv_dirs = target_priv_dirs(applications, target_release_path)
priorities =
(target_beam_files ++ target_app_files ++ target_priv_dirs)
|> List.flatten()
|> Enum.zip(32_000..1_000)
|> Enum.map(fn {file, priority} ->
file <> " " <> to_string(priority)
end)
|> Enum.join("\n")
build_path = Path.join([Mix.Project.build_path(), "nerves"])
File.mkdir_p(build_path)
Path.join(build_path, "rootfs.priorities")
|> File.write(priorities)
end
defp target_beam_files(boot_script, host_release_path, target_release_path) do
{_, loaded} =
Enum.reduce(boot_script, {nil, []}, fn
{:path, paths}, {_, loaded} ->
{rel_paths(paths), loaded}
{:primLoad, files}, {paths, loaded} ->
load =
Enum.reduce(paths, [], fn path, loaded ->
load =
Enum.reduce(files, [], fn file, loaded ->
filename = to_string(file) <> ".beam"
path =
if String.starts_with?(path, "lib/") do
# Distillery
Path.join([path, filename])
else
# Elixir 1.9 releases
Path.join(["lib", path, filename])
end
host_path = Path.join(host_release_path, path) |> Path.expand()
if File.exists?(host_path) do
[expand_target_path(target_release_path, path) | loaded]
else
loaded
end
end)
loaded ++ load
end)
{paths, [load | loaded]}
_, acc ->
acc
end)
loaded
|> Enum.reverse()
|> List.flatten()
end
defp target_app_files(applications, target_release_path) do
Enum.reduce(applications, [], fn
{app, vsn, path}, app_files ->
host_path = Path.join([path, "ebin", app <> ".app"])
if File.exists?(host_path) do
app_file_path =
Path.join([
target_release_path,
"lib",
app <> "-" <> vsn,
"ebin",
app <> ".app"
])
[app_file_path | app_files]
else
app_files
end
end)
end
defp target_priv_dirs(applications, target_release_path) do
Enum.reduce(applications, [], fn
{app, vsn, path}, priv_dirs ->
host_priv_dir = Path.join(path, "priv")
if File.dir?(host_priv_dir) and not_empty_dir(host_priv_dir) do
priv_dir = Path.join([target_release_path, "lib", app <> "-" <> to_string(vsn), "priv"])
[priv_dir | priv_dirs]
else
priv_dirs
end
end)
end
defp rel_paths(paths) do
paths
|> Enum.map(&to_string/1)
|> Enum.map(&Path.split/1)
|> Enum.map(fn [_root | path] ->
Path.join(path)
end)
end
defp release_opts do
[
quiet: true,
include_executables_for: [],
include_erts: &Nerves.Release.erts/0,
boot_scripts: []
]
end
defp not_empty_dir(dir) do
File.ls(dir) != {:ok, []}
end
defp normalize_applications(applications) do
Enum.map(applications, fn
%{name: app, vsn: vsn, path: path} ->
{to_string(app), to_string(vsn), Path.expand(to_string(path))}
{app, opts} ->
{to_string(app), to_string(opts[:vsn]), Path.expand(to_string(opts[:path]))}
end)
end
defp expand_target_path(target_release_path, path) do
Path.join(["/", target_release_path, path])
|> Path.expand(target_release_path)
|> String.trim_leading("/")
end
end
| 26.755 | 98 | 0.578023 |
ff2d625c2ff8b9fc87497835c7e4dd03ee34d1a1 | 2,090 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/row_access_policy_reference.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/big_query/lib/google_api/big_query/v2/model/row_access_policy_reference.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/big_query/lib/google_api/big_query/v2/model/row_access_policy_reference.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.RowAccessPolicyReference do
@moduledoc """
## Attributes
* `datasetId` (*type:* `String.t`, *default:* `nil`) - [Required] The ID of the dataset containing this row access policy.
* `policyId` (*type:* `String.t`, *default:* `nil`) - [Required] The ID of the row access policy. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
* `projectId` (*type:* `String.t`, *default:* `nil`) - [Required] The ID of the project containing this row access policy.
* `tableId` (*type:* `String.t`, *default:* `nil`) - [Required] The ID of the table containing this row access policy.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:datasetId => String.t() | nil,
:policyId => String.t() | nil,
:projectId => String.t() | nil,
:tableId => String.t() | nil
}
field(:datasetId)
field(:policyId)
field(:projectId)
field(:tableId)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.RowAccessPolicyReference do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.RowAccessPolicyReference.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.RowAccessPolicyReference do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.321429 | 219 | 0.703828 |
ff2d7241f68fbff5f5b972e914fca31e9f37e52b | 989 | ex | Elixir | lib/blog_web/views/error_helpers.ex | Miradorn/absinthe_tutorial | fbe63be1d4acc92c7a85d4a04fd259ac6b5ef072 | [
"MIT"
] | 69 | 2017-11-15T17:26:57.000Z | 2022-01-25T16:37:40.000Z | lib/blog_web/views/error_helpers.ex | Miradorn/absinthe_tutorial | fbe63be1d4acc92c7a85d4a04fd259ac6b5ef072 | [
"MIT"
] | 24 | 2020-01-27T20:44:40.000Z | 2020-10-29T05:48:31.000Z | lib/blog_web/views/error_helpers.ex | Miradorn/absinthe_tutorial | fbe63be1d4acc92c7a85d4a04fd259ac6b5ef072 | [
"MIT"
] | 35 | 2017-11-12T22:04:10.000Z | 2021-07-22T18:00:39.000Z | defmodule BlogWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(BlogWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(BlogWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.966667 | 73 | 0.669363 |
ff2dcd36f688d74689f9c640df6023c5ffe6237b | 104 | exs | Elixir | .formatter.exs | silathdiir/rat_error | eff3b46db28071814b91c623729bbd951bbf6cfa | [
"MIT"
] | 3 | 2017-10-10T10:38:38.000Z | 2018-03-15T01:57:34.000Z | .formatter.exs | silathdiir/rat_error | eff3b46db28071814b91c623729bbd951bbf6cfa | [
"MIT"
] | null | null | null | .formatter.exs | silathdiir/rat_error | eff3b46db28071814b91c623729bbd951bbf6cfa | [
"MIT"
] | 1 | 2017-10-10T10:39:17.000Z | 2017-10-10T10:39:17.000Z | # Used by "mix format"
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"],
line_length: 80,
]
| 17.333333 | 57 | 0.557692 |
ff2e0905b57f5acc717def9d963965ef685cd969 | 143 | ex | Elixir | lib/bundlex/platform/linux.ex | kianmeng/bundlex | c3477977ab1c46bc87c62fbade9b0a7c13c791be | [
"Apache-2.0"
] | 40 | 2018-06-10T20:55:32.000Z | 2021-12-12T22:08:25.000Z | lib/bundlex/platform/linux.ex | kianmeng/bundlex | c3477977ab1c46bc87c62fbade9b0a7c13c791be | [
"Apache-2.0"
] | 40 | 2018-07-18T16:14:52.000Z | 2022-03-27T16:33:09.000Z | lib/bundlex/platform/linux.ex | kianmeng/bundlex | c3477977ab1c46bc87c62fbade9b0a7c13c791be | [
"Apache-2.0"
] | 10 | 2018-08-02T06:07:42.000Z | 2022-03-25T15:41:14.000Z | defmodule Bundlex.Platform.Linux do
@moduledoc false
use Bundlex.Platform
def toolchain_module() do
Bundlex.Toolchain.GCC
end
end
| 15.888889 | 35 | 0.762238 |
ff2e4da474cb66ddab44df318c1619b41b5d368b | 23,990 | ex | Elixir | lib/cldr/plural_rules/plural_rule.ex | nickel/cldr | 4c463326bd699a167d58b00aeb226489306dd7f8 | [
"Apache-2.0"
] | null | null | null | lib/cldr/plural_rules/plural_rule.ex | nickel/cldr | 4c463326bd699a167d58b00aeb226489306dd7f8 | [
"Apache-2.0"
] | null | null | null | lib/cldr/plural_rules/plural_rule.ex | nickel/cldr | 4c463326bd699a167d58b00aeb226489306dd7f8 | [
"Apache-2.0"
] | null | null | null | defmodule Cldr.Number.PluralRule do
@moduledoc """
Defines the plural rule implementation
modules. The functions in this module
generate code to implement the plural
rules of CLDR.
"""
@type operand :: any()
@type plural_type() :: :zero | :one | :two | :few | :many | :other
@doc """
Returns a list of the possible pluralization
types
"""
@spec known_plural_types :: list(plural_type())
@plural_types [:zero, :one, :two, :few, :many, :other]
def known_plural_types do
@plural_types
end
@doc """
Returns the plural type for a given number.
## Arguments
* `number` is an integer, float or Decimal number
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend!/0`.
* `options` is a keyword list of options
## Options
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`, The
default is `Cldr.get_locale/0`.
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend!/0`.
This option allows the backend to be specified as an argument or an option.
* `type` is either `Cardinal` or `Ordinal`. The default is `Cardinal`.
## Examples
iex> Cldr.Number.PluralRule.plural_type(123)
:other
iex> Cldr.Number.PluralRule.plural_type(123, type: Ordinal)
:few
iex> Cldr.Number.PluralRule.plural_type(123, type: Cardinal)
:other
iex> Cldr.Number.PluralRule.plural_type(2, locale: "de")
:other
"""
def plural_type(number, backend \\ nil, options \\ [])
def plural_type(number, options, []) when is_list(options) do
{_locale, backend} = Cldr.locale_and_backend_from(options)
plural_type(number, backend, options)
end
def plural_type(number, nil, options) do
{_locale, backend} = Cldr.locale_and_backend_from(options)
plural_type(number, backend, options)
end
def plural_type(number, backend, options) do
locale = Keyword.get_lazy(options, :locale, &Cldr.get_locale/0)
type = Keyword.get(options, :type, Cardinal)
module = Module.concat([backend, Number, type])
module.plural_rule(number, locale)
end
defmacro __using__(opts) do
module_name = Keyword.get(opts, :type)
unless module_name in [:cardinal, :ordinal] do
raise ArgumentError,
"Invalid option #{inspect(opts)}. `type: :cardinal` or " <>
"`type: :ordinal` are the only valid options"
end
quote location: :keep do
alias Cldr.Math
alias Cldr.LanguageTag
alias Cldr.Locale
import Cldr.Digits,
only: [number_of_integer_digits: 1, remove_trailing_zeros: 1, fraction_as_integer: 2]
import Cldr.Number.PluralRule.Compiler
import Cldr.Number.PluralRule.Transformer
@module Atom.to_string(unquote(module_name)) |> String.capitalize()
@rules Cldr.Config.cldr_data_dir()
|> Path.join("/plural_rules.json")
|> File.read!()
|> Cldr.Config.json_library().decode!
|> Map.get(Atom.to_string(unquote(module_name)))
|> Cldr.Config.normalize_plural_rules()
|> Map.new()
@rules_locales @rules
|> Map.keys()
|> Enum.sort()
@nplurals_range [0, 1, 2, 3, 4, 5]
@gettext_nplurals @rules
|> Enum.map(fn {locale, rules} ->
{locale, Keyword.keys(rules) |> Enum.zip(@nplurals_range)}
end)
|> Map.new()
@config Keyword.get(unquote(opts), :config)
@backend Map.get(@config, :backend)
@known_locale_names @rules_locales
|> MapSet.new()
|> MapSet.intersection(
MapSet.new(Cldr.Config.known_locale_names(@config))
)
|> MapSet.to_list()
|> Enum.sort()
@doc """
The locale names for which plural rules are defined.
"""
def available_locale_names do
@rules_locales
end
@doc """
The configured locales for which plural rules are defined.
Returns the intersection of `#{inspect(@backend)}.known_locale_names/0` and
the locales for which #{@module} plural rules are defined.
There are many `Cldr` locales which don't have their own plural
rules so this list is the intersection of `Cldr`'s configured
locales and those that have rules.
"""
@spec known_locale_names :: [Locale.locale_name(), ...]
def known_locale_names do
@known_locale_names
end
@doc """
Returns all the plural rules defined in CLDR.
"""
@spec plural_rules :: map()
def plural_rules do
@rules
end
@doc false
@spec gettext_nplurals :: map()
def gettext_nplurals do
@gettext_nplurals
end
if unquote(module_name) == :cardinal do
@doc """
Pluralize a number using #{unquote(module_name)} plural rules
and a substition map.
## Arguments
* `number` is an integer, float or Decimal
* `locale` is any locale returned by `#{inspect(@backend)}.Locale.new!/1` or any
`locale_name` returned by `#{inspect(@backend)}.known_locale_names/0`
* `substitutions` is a map that maps plural keys to a string.
The valid substitution keys are `:zero`, `:one`, `:two`,
`:few`, `:many` and `:other`.
See also `#{inspect(__MODULE__)}.#{@module}.plural_rule/3`.
## Examples
iex> #{inspect(__MODULE__)}.pluralize 1, "en", %{one: "one"}
"one"
iex> #{inspect(__MODULE__)}.pluralize 2, "en", %{one: "one"}
nil
iex> #{inspect(__MODULE__)}.pluralize 2, "en", %{one: "one", two: "two", other: "other"}
"other"
iex> #{inspect(__MODULE__)}.pluralize 22, "en", %{one: "one", two: "two", other: "other"}
"other"
iex> #{inspect(__MODULE__)}.pluralize Decimal.new(1), "en", %{one: "one"}
"one"
iex> #{inspect(__MODULE__)}.pluralize Decimal.new(2), "en", %{one: "one"}
nil
iex> #{inspect(__MODULE__)}.pluralize Decimal.new(2), "en", %{one: "one", two: "two"}
nil
iex> #{inspect(__MODULE__)}.pluralize 1..10, "ar", %{one: "one", few: "few", other: "other"}
"few"
iex> #{inspect(__MODULE__)}.pluralize 1..10, "en", %{one: "one", few: "few", other: "other"}
"other"
"""
else
@doc """
Pluralize a number using #{unquote(module_name)} plural rules
and a substition map.
## Arguments
* `number` is an integer, float or Decimal or a `Range.t{}`. When a range, The
is that in any usage, the start value is strictly less than the end value,
and that no values are negative. Results for any cases that do not meet
these criteria are undefined.
* `locale` is any locale returned by `#{inspect(@backend)}.Locale.new!/1` or any
`locale_name` returned by `#{inspect(@backend)}.known_locale_names/0`
* `substitutions` is a map that maps plural keys to a string.
The valid substitution keys are `:zero`, `:one`, `:two`,
`:few`, `:many` and `:other`.
See also `#{inspect(__MODULE__)}.#{@module}.plural_rule/3`.
## Examples
iex> #{inspect(__MODULE__)}.pluralize 1, "en", %{one: "one"}
"one"
iex> #{inspect(__MODULE__)}.pluralize 2, "en", %{one: "one"}
nil
iex> #{inspect(__MODULE__)}.pluralize 2, "en", %{one: "one", two: "two"}
"two"
iex> #{inspect(__MODULE__)}.pluralize 22, "en", %{one: "one", two: "two", other: "other"}
"two"
iex> #{inspect(__MODULE__)}.pluralize Decimal.new(1), "en", %{one: "one"}
"one"
iex> #{inspect(__MODULE__)}.pluralize Decimal.new(2), "en", %{one: "one"}
nil
iex> #{inspect(__MODULE__)}.pluralize Decimal.new(2), "en", %{one: "one", two: "two"}
"two"
iex> #{inspect(__MODULE__)}.pluralize 1..10, "ar", %{one: "one", few: "few", other: "other"}
"other"
iex> #{inspect(__MODULE__)}.pluralize 1..10, "en", %{one: "one", few: "few", other: "other"}
"other"
"""
end
@default_substitution :other
@spec pluralize(
Math.number_or_decimal() | %Range{},
LanguageTag.t() | Locale.locale_name(),
%{}
) ::
any()
def pluralize(%Range{first: first, last: last}, locale_name, substitutions) do
with {:ok, language_tag} <- @backend.validate_locale(locale_name) do
first_rule = plural_rule(first, language_tag)
last_rule = plural_rule(last, language_tag)
combined_rule =
@backend.Number.PluralRule.Range.plural_rule(first_rule, last_rule, language_tag)
substitutions[combined_rule] || substitutions[@default_substitution]
end
end
def pluralize(number, locale_name, substitutions) when is_binary(locale_name) do
with {:ok, language_tag} <- @backend.validate_locale(locale_name) do
pluralize(number, language_tag, substitutions)
end
end
def pluralize(number, %LanguageTag{} = locale, %{} = substitutions)
when is_number(number) do
do_pluralize(number, locale, substitutions)
end
def pluralize(%Decimal{sign: sign, coef: coef, exp: 0} = number, locale, substitutions)
when is_integer(coef) do
number
|> Decimal.to_integer()
|> do_pluralize(locale, substitutions)
end
def pluralize(%Decimal{sign: sign, coef: coef, exp: exp} = number, locale, substitutions)
when is_integer(coef) and exp > 0 do
number
|> Decimal.to_integer()
|> do_pluralize(locale, substitutions)
end
def pluralize(%Decimal{sign: sign, coef: coef, exp: exp} = number, locale, substitutions)
when is_integer(coef) and exp < 0 and rem(coef, 10) == 0 do
number
|> Decimal.to_integer()
|> do_pluralize(locale, substitutions)
end
def pluralize(%Decimal{} = number, %LanguageTag{} = locale, %{} = substitutions) do
number
|> Decimal.to_float()
|> do_pluralize(locale, substitutions)
end
defp do_pluralize(number, %LanguageTag{} = locale, %{} = substitutions) do
plural = plural_rule(number, locale)
number = if (truncated = trunc(number)) == number, do: truncated, else: number
substitutions[number] || substitutions[plural] || substitutions[@default_substitution]
end
@doc """
Return the plural rules for a locale.
## Arguments
* `locale` is any locale returned by `#{inspect(@backend)}.Locale.new!/1` or any
`locale_name` returned by `#{inspect(@backend)}.known_locale_names/0`
The rules are returned in AST form after parsing.
"""
@spec plural_rules_for(Locale.locale_name() | LanguageTag.t()) :: [{atom(), list()}, ...]
def plural_rules_for(%LanguageTag{cldr_locale_name: cldr_locale_name, language: language}) do
plural_rules()[cldr_locale_name] || plural_rules()[language]
end
def plural_rules_for(locale_name) when is_binary(locale_name) do
with {:ok, locale} <- @backend.validate_locale(locale_name) do
plural_rules_for(locale)
end
end
# Plural Operand Meanings as defined in CLDR plural rules and used
# in the generated code
#
# Symbol Value
# n absolute value of the source number (integer and decimals).
# i integer digits of n.
# v number of visible fraction digits in n, with trailing zeros.
# w number of visible fraction digits in n, without trailing zeros.
# f visible fractional digits in n, with trailing zeros.
# t visible fractional digits in n, without trailing zeros.
if unquote(module_name) == :cardinal do
@doc """
Return the plural key for a given number in a given locale
Returns which plural key (`:zero`, `:one`, `:two`, `:few`,
`:many` or `:other`) a given number fits into within the
context of a given locale.
Note that these key names should not be interpreted
literally. For example, the key returned from
`Cldr.Number.Ordinal.plural_rule(0, "en")` is actually
`:other`, not `:zero`.
This key can then be used to format a number, date, time, unit,
list or other content in a plural-sensitive way.
## Arguments
* `number` is any `integer`, `float` or `Decimal`
* `locale` is any locale returned by `Cldr.Locale.new!/2` or any
`locale_name` returned by `#{inspect(@backend)}.known_locale_names/0`
* `rounding` is one of `#{inspect(Cldr.Math.rounding_modes())}`. The
default is `#{inspect(Cldr.Math.default_rounding_mode())}`.
## Examples
iex> #{inspect(__MODULE__)}.plural_rule 0, "fr"
:one
iex> #{inspect(__MODULE__)}.plural_rule 0, "en"
:other
"""
else
@doc """
Return the plural key for a given number in a given locale
Returns which plural key (`:zero`, `:one`, `:two`, `:few`,
`:many` or `:other`) a given number fits into within the
context of a given locale.
Note that these key names should not be interpreted
literally. For example, the key returned from
`Cldr.Number.Ordinal.plural_rule(0, "en")` is actually
`:other`, not `:zero`.
This key can then be used to format a number, date, time, unit,
list or other content in a plural-sensitive way.
## Arguments
* `number` is any `integer`, `float` or `Decimal`
* `locale` is any locale returned by `Cldr.Locale.new!/2` or any
`locale_name` returned by `#{inspect(@backend)}.known_locale_names/0`
* `rounding` is one of `#{inspect(Cldr.Math.rounding_modes())}`. The
default is `#{inspect(Cldr.Math.default_rounding_mode())}`.
## Examples
iex> #{inspect(__MODULE__)}.plural_rule 0, "fr"
:other
iex> #{inspect(__MODULE__)}.plural_rule 1, "en"
:one
"""
end
@spec plural_rule(
Math.number_or_decimal(),
Locale.locale_name() | LanguageTag.t(),
atom() | pos_integer()
) :: Cldr.Number.PluralRule.plural_type()
def plural_rule(number, locale, rounding \\ Math.default_rounding())
def plural_rule(number, locale_name, rounding) when is_binary(locale_name) do
with {:ok, locale} <- @backend.validate_locale(locale_name) do
plural_rule(number, locale, rounding)
end
end
def plural_rule(number, locale, rounding) when is_binary(number) do
plural_rule(Decimal.new(number), locale, rounding)
end
# Plural rule for an integer
def plural_rule(number, locale, _rounding) when is_integer(number) do
n = abs(number)
i = n
v = 0
w = 0
f = 0
t = 0
e = 0
do_plural_rule(locale, n, i, v, w, f, t, e)
end
# For a compact integer
def plural_rule({number, e}, locale, _rounding) when is_integer(number) do
n = abs(number)
i = n
v = 0
w = 0
f = 0
t = 0
do_plural_rule(locale, n, i, v, w, f, t, e)
end
# Plural rule for a float
def plural_rule(number, locale, rounding)
when is_float(number) and is_integer(rounding) and rounding > 0 do
# Testing shows that this is working but just in case we
# can go back to casting the number to a decimal and
# using that path
# plural_rule(Decimal.new(number), locale, rounding)
n = Float.round(abs(number), rounding)
i = trunc(n)
v = rounding
t = fraction_as_integer(n - i, rounding)
w = number_of_integer_digits(t)
f = trunc(t * Math.power_of_10(v - w))
e = 0
do_plural_rule(locale, n, i, v, w, f, t, e)
end
# Plural rule for a compact float
def plural_rule({number, e}, locale, rounding)
when is_float(number) and is_integer(rounding) and rounding > 0 do
# Testing shows that this is working but just in case we
# can go back to casting the number to a decimal and
# using that path
# plural_rule(Decimal.new(number), locale, rounding)
n = Float.round(abs(number), rounding)
i = trunc(n)
v = rounding
t = fraction_as_integer(n - i, rounding)
w = number_of_integer_digits(t)
f = trunc(t * Math.power_of_10(v - w))
do_plural_rule(locale, n, i, v, w, f, t, e)
end
# Plural rule for a %Decimal{}
def plural_rule(%Decimal{} = number, locale, rounding)
when is_integer(rounding) and rounding > 0 do
# n absolute value of the source number (integer and decimals).
n = Decimal.abs(number)
# i integer digits of n.
i = Decimal.round(n, 0, :floor)
# v number of visible fraction digits in n, with trailing zeros.
v = abs(n.exp)
# f visible fractional digits in n, with trailing zeros.
f =
n
|> Decimal.sub(i)
|> Decimal.mult(Decimal.new(Math.power_of_10(v)))
|> Decimal.round(0, :floor)
|> Decimal.to_integer()
# t visible fractional digits in n, without trailing zeros.
t = remove_trailing_zeros(f)
# w number of visible fraction digits in n, without trailing zeros.
w = number_of_integer_digits(t)
i = Decimal.to_integer(i)
n = Math.to_float(n)
e = 0
do_plural_rule(locale, n, i, v, w, f, t, e)
end
# Plural rule for a compact %Decimal{}
def plural_rule({%Decimal{} = number, e}, locale, rounding)
when is_integer(rounding) and rounding > 0 do
# n absolute value of the source number (integer and decimals).
n = Decimal.abs(number)
# i integer digits of n.
i = Decimal.round(n, 0, :floor)
# v number of visible fraction digits in n, with trailing zeros.
v = abs(n.exp)
# f visible fractional digits in n, with trailing zeros.
f =
n
|> Decimal.sub(i)
|> Decimal.mult(Decimal.new(Math.power_of_10(v)))
|> Decimal.round(0, :floor)
|> Decimal.to_integer()
# t visible fractional digits in n, without trailing zeros.
t = remove_trailing_zeros(f)
# w number of visible fraction digits in n, without trailing zeros.
w = number_of_integer_digits(t)
i = Decimal.to_integer(i)
n = Math.to_float(n)
do_plural_rule(locale, n, i, v, w, f, t, e)
end
end
end
@doc false
def define_ordinal_and_cardinal_modules(config) do
quote location: :keep do
defmodule Number.Ordinal do
@moduledoc false
if Cldr.Config.include_module_docs?(unquote(config.generate_docs)) do
@moduledoc """
Implements ordinal plural rules for numbers.
"""
end
use Cldr.Number.PluralRule, type: :ordinal, config: unquote(Macro.escape(config))
alias Cldr.LanguageTag
unquote(Cldr.Number.PluralRule.define_plural_rules())
end
defmodule Number.Cardinal do
@moduledoc false
if Cldr.Config.include_module_docs?(unquote(config.generate_docs)) do
@moduledoc """
Implements cardinal plural rules for numbers.
"""
end
use Cldr.Number.PluralRule, type: :cardinal, config: unquote(Macro.escape(config))
alias Cldr.LanguageTag
unquote(Cldr.Number.PluralRule.define_plural_rules())
end
end
end
@doc false
def define_plural_ranges(config) do
quote location: :keep, bind_quoted: [config: Macro.escape(config)] do
defmodule Number.PluralRule.Range do
@moduledoc false
if Cldr.Config.include_module_docs?(config.generate_docs) do
@moduledoc """
Implements plural rules for ranges
"""
end
alias Cldr.Number.PluralRule
@doc """
Returns a final plural type for a start-of-range plural
type, an end-of-range plural type and a locale.
## Arguments
* `first` is a plural type for the start of a range
* `last` is a plural type for the end of a range
* `locale` is any `Cldr.LanguageTag.t` or a language name
(not locale name)
## Example
iex> #{inspect(__MODULE__)}.plural_rule :other, :few, "ar"
:few
"""
@spec plural_rule(
first :: PluralRule.plural_type(),
last :: PluralRule.plural_type(),
locale :: Cldr.Locale.locale_name() | Cldr.LanguageTag.t()
) :: PluralRule.plural_type()
def plural_rule(first, last, %Cldr.LanguageTag{language: language}) do
plural_rule(first, last, language)
end
for %{locales: locales, ranges: ranges} <- Cldr.Config.plural_ranges(),
range <- ranges do
def plural_rule(unquote(range.start), unquote(range.end), locale)
when locale in unquote(locales) do
unquote(range.result)
end
end
def plural_rule(_start, _end, _locale) do
:other
end
end
end
end
@doc false
def define_plural_rules do
quote bind_quoted: [], location: :keep do
alias Cldr.Number.PluralRule
# Generate the functions to process plural rules
@spec do_plural_rule(
LanguageTag.t(),
number(),
PluralRule.operand(),
PluralRule.operand(),
PluralRule.operand(),
PluralRule.operand(),
[integer(), ...] | integer(),
number()
) :: :zero | :one | :two | :few | :many | :other
# Function body is the AST of the function which needs to be injected
# into the function definition.
for locale_name <- @known_locale_names do
function_body =
@rules
|> Map.get(locale_name)
|> rules_to_condition_statement(__MODULE__)
defp do_plural_rule(
%LanguageTag{cldr_locale_name: unquote(locale_name)},
n,
i,
v,
w,
f,
t,
e
) do
# silence unused variable warnings
_ = {n, i, v, w, f, t, e}
unquote(function_body)
end
end
# If we get here then it means that the locale doesn't have a plural rule,
# but the language might
defp do_plural_rule(%LanguageTag{} = language_tag, n, i, v, w, f, t, e) do
if language_tag.language == language_tag.cldr_locale_name do
{
:error,
{
Cldr.UnknownPluralRules,
"No #{@module} plural rules available for #{inspect(language_tag)}"
}
}
else
language_tag
|> Map.put(:cldr_locale_name, language_tag.language)
|> do_plural_rule(n, i, v, w, f, t, e)
end
end
end
end
end
| 32.595109 | 104 | 0.583993 |
ff2e5fdb860224812666e59300e8cf09b259543b | 459 | exs | Elixir | config/test.exs | MattIII/CastBug | aad9eabce5af4a80dd0f4383683746a1f518e377 | [
"MIT"
] | 1 | 2019-03-14T03:48:29.000Z | 2019-03-14T03:48:29.000Z | config/test.exs | MattIII/CastBug | aad9eabce5af4a80dd0f4383683746a1f518e377 | [
"MIT"
] | null | null | null | config/test.exs | MattIII/CastBug | aad9eabce5af4a80dd0f4383683746a1f518e377 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :castbug, CastBugWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :castbug, CastBug.Repo,
username: "postgres",
password: "postgres",
database: "castbug_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.157895 | 56 | 0.732026 |
ff2e68f3f6e003cdd7dd930bdfeef4ccb64e8bd6 | 1,254 | ex | Elixir | lib/yakusu_web/controllers/books/translation_controller.ex | jiegillet/yakusu | 3d9cbc19b0a6112604b362186211400a2e4923b2 | [
"MIT"
] | 1 | 2021-08-17T06:54:02.000Z | 2021-08-17T06:54:02.000Z | lib/yakusu_web/controllers/books/translation_controller.ex | jiegillet/yakusu | 3d9cbc19b0a6112604b362186211400a2e4923b2 | [
"MIT"
] | null | null | null | lib/yakusu_web/controllers/books/translation_controller.ex | jiegillet/yakusu | 3d9cbc19b0a6112604b362186211400a2e4923b2 | [
"MIT"
] | null | null | null | defmodule YakusuWeb.Books.TranslationController do
use YakusuWeb, :controller
alias Yakusu.Books
alias Yakusu.Books.Translation
action_fallback YakusuWeb.FallbackController
def index(conn, _params) do
translations = Books.list_translations()
render(conn, "index.json", translations: translations)
end
def create(conn, %{"translation" => translation_params}) do
with {:ok, %Translation{} = translation} <- Books.create_translation(translation_params) do
conn
|> put_status(:created)
|> render("show.json", translation: translation)
end
end
def show(conn, %{"id" => id}) do
translation = Books.get_translation!(id)
render(conn, "show.json", translation: translation)
end
def update(conn, %{"id" => id, "translation" => translation_params}) do
translation = Books.get_translation!(id)
with {:ok, %Translation{} = translation} <- Books.update_translation(translation, translation_params) do
render(conn, "show.json", translation: translation)
end
end
def delete(conn, %{"id" => id}) do
translation = Books.get_translation!(id)
with {:ok, %Translation{}} <- Books.delete_translation(translation) do
send_resp(conn, :no_content, "")
end
end
end
| 29.162791 | 108 | 0.694577 |
ff2ed8b9348938e73466d8ea06d3603063a72d1e | 2,813 | exs | Elixir | exercises/concept/language-list/test/language_list_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/concept/language-list/test/language_list_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/concept/language-list/test/language_list_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule LanguageListTest do
use ExUnit.Case
describe "new/0" do
@tag task_id: 1
test "new list" do
assert LanguageList.new() == []
end
end
describe "add/2" do
@tag task_id: 2
test "add a language to a list" do
language = "Elixir"
list = [language]
assert LanguageList.new() |> LanguageList.add(language) == list
end
@tag task_id: 2
test "add several languages to a list" do
list =
LanguageList.new()
|> LanguageList.add("Clojure")
|> LanguageList.add("Haskell")
|> LanguageList.add("Erlang")
|> LanguageList.add("F#")
|> LanguageList.add("Elixir")
assert list == ["Elixir", "F#", "Erlang", "Haskell", "Clojure"]
end
end
describe "remove/1" do
@tag task_id: 3
test "add then remove results in empty list" do
list =
LanguageList.new()
|> LanguageList.add("Elixir")
|> LanguageList.remove()
assert list == []
end
@tag task_id: 3
test "adding two languages, when removed, removes first item" do
list =
LanguageList.new()
|> LanguageList.add("F#")
|> LanguageList.add("Elixir")
|> LanguageList.remove()
assert list == ["F#"]
end
end
describe "first/1" do
@tag task_id: 4
test "add one language, then get the first" do
assert LanguageList.new() |> LanguageList.add("Elixir") |> LanguageList.first() == "Elixir"
end
@tag task_id: 4
test "add a few languages, then get the first" do
first =
LanguageList.new()
|> LanguageList.add("Elixir")
|> LanguageList.add("Prolog")
|> LanguageList.add("F#")
|> LanguageList.first()
assert first == "F#"
end
end
describe "count/1" do
@tag task_id: 5
test "the count of a new list is 0" do
assert LanguageList.new() |> LanguageList.count() == 0
end
@tag task_id: 5
test "the count of a one-language list is 1" do
count =
LanguageList.new()
|> LanguageList.add("Elixir")
|> LanguageList.count()
assert count == 1
end
@tag task_id: 5
test "the count of a multiple-item list is equal to its length" do
count =
LanguageList.new()
|> LanguageList.add("Elixir")
|> LanguageList.add("Prolog")
|> LanguageList.add("F#")
|> LanguageList.count()
assert count == 3
end
end
describe "exciting_list?/1" do
@tag task_id: 6
test "an exciting language list" do
assert LanguageList.exciting_list?(["Clojure", "Haskell", "Erlang", "F#", "Elixir"])
end
@tag task_id: 6
test "not an exciting language list" do
refute LanguageList.exciting_list?(["Java", "C", "JavaScript"])
end
end
end
| 24.042735 | 97 | 0.579453 |
ff2ed909d7fc8a5e78b48c4240449bf7a4dee00a | 3,674 | ex | Elixir | lib/day10/asteroid_scanner.ex | anamba/adventofcode2019 | a5de43ddce8b40f67c3017f349d8563c73c94e20 | [
"MIT"
] | null | null | null | lib/day10/asteroid_scanner.ex | anamba/adventofcode2019 | a5de43ddce8b40f67c3017f349d8563c73c94e20 | [
"MIT"
] | null | null | null | lib/day10/asteroid_scanner.ex | anamba/adventofcode2019 | a5de43ddce8b40f67c3017f349d8563c73c94e20 | [
"MIT"
] | null | null | null | defmodule Day10.AsteroidScanner do
@doc """
iex> Day10.AsteroidScanner.part1("day10-sample0.txt")
"Best is 3,4 with 8 other asteroids detected"
iex> Day10.AsteroidScanner.part1("day10-sample1.txt")
"Best is 5,8 with 33 other asteroids detected"
iex> Day10.AsteroidScanner.part1("day10-sample2.txt")
"Best is 1,2 with 35 other asteroids detected"
iex> Day10.AsteroidScanner.part1("day10-sample3.txt")
"Best is 6,3 with 41 other asteroids detected"
iex> Day10.AsteroidScanner.part1("day10-sample4.txt")
"Best is 11,13 with 210 other asteroids detected"
"""
def part1(filename \\ "day10.txt") do
lines =
"inputs/#{filename}"
|> File.stream!()
|> Enum.map(fn line -> line |> String.trim() |> String.graphemes() end)
xmin = 0
ymin = 0
xmax = length(List.first(lines)) - 1
ymax = length(lines) - 1
bounds = [xmin, ymin, xmax, ymax]
# parse input into list of coordinates
asteroid_list = to_asteroid_list(lines)
asteroid_map = to_asteroid_map(asteroid_list)
{n, {x, y}} =
asteroid_list
|> Enum.map(&evaluate_location(&1, asteroid_list, asteroid_map, bounds))
|> Enum.sort()
|> List.last()
"Best is #{x},#{y} with #{n} other asteroids detected"
end
def to_asteroid_list(lines) do
lines
|> Enum.with_index()
|> Enum.map(fn {line, y} ->
line
|> Enum.with_index()
|> Enum.map(fn {char, x} ->
case char do
"#" -> {x, y}
_ -> nil
end
end)
end)
|> List.flatten()
|> Enum.filter(& &1)
end
def to_asteroid_map([]), do: %{}
def to_asteroid_map([asteroid | list]),
do: Enum.into(%{asteroid => true}, to_asteroid_map(list))
@doc """
iex> Day10.AsteroidScanner.evaluate_location({1, 2}, [{1, 0}, {4, 0}, {0, 2}, {1, 2}, {2, 2}, {3, 2}, {4, 2}, {4, 3}, {3, 4}, {4, 4}], %{{0, 2} => true, {1, 0} => true, {1, 2} => true, {2, 2} => true, {3, 2} => true, {3, 4} => true, {4, 0} => true, {4, 2} => true, {4, 3} => true, {4, 4} => true}, [0,0, 4,4])
{7, {1, 2}}
iex> Day10.AsteroidScanner.evaluate_location({3, 4}, [{1, 0}, {4, 0}, {0, 2}, {1, 2}, {2, 2}, {3, 2}, {4, 2}, {4, 3}, {3, 4}, {4, 4}], %{{0, 2} => true, {1, 0} => true, {1, 2} => true, {2, 2} => true, {3, 2} => true, {3, 4} => true, {4, 0} => true, {4, 2} => true, {4, 3} => true, {4, 4} => true}, [0,0, 4,4])
{8, {3, 4}}
"""
def evaluate_location({x, y}, asteroid_list, asteroid_map, bounds) do
visible_count =
asteroid_list
|> Stream.reject(&(&1 == {x, y}))
|> Stream.map(&clear_line_of_sight?({x, y}, &1, asteroid_map, bounds))
|> Enum.count(& &1)
{visible_count, {x, y}}
end
# if we have reached the destination, return true
def clear_line_of_sight?(n, n, _asteroid_map, _bounds), do: true
# if we are outside bounds, return false
def clear_line_of_sight?({x1, y1}, _dest, _asteroid_map, [xmin, ymin, xmax, ymax])
when x1 < xmin or x1 > xmax or y1 < ymin or y1 > ymax,
do: false
def clear_line_of_sight?({x1, y1}, dest = {x2, y2}, asteroid_map, bounds) do
# calculate slope
{rise, run} = {y2 - y1, x2 - x1} |> to_fraction()
# move one step toward <dest>
new_pos = {x1 + run, y1 + rise}
cond do
# if we are at dest, return true
new_pos == dest -> true
# if we hit something else, return false
asteroid_map[new_pos] -> false
# otherwise, continue on
true -> clear_line_of_sight?(new_pos, dest, asteroid_map, bounds)
end
end
def to_fraction({a, b}) do
gcd = Integer.gcd(a, b)
{div(a, gcd), div(b, gcd)}
end
end
| 34.018519 | 315 | 0.567229 |
ff2eeb91eeb16e482a3090efe748d3673f654013 | 413 | ex | Elixir | test/support/assertion_helpers.ex | rkorzeniec/strava | aa99040355f72ff2766c080d5a919c66a53ac44b | [
"MIT"
] | 39 | 2016-04-09T21:50:34.000Z | 2022-03-04T09:16:25.000Z | test/support/assertion_helpers.ex | rkorzeniec/strava | aa99040355f72ff2766c080d5a919c66a53ac44b | [
"MIT"
] | 24 | 2016-05-29T15:49:07.000Z | 2022-01-17T11:57:05.000Z | test/support/assertion_helpers.ex | rkorzeniec/strava | aa99040355f72ff2766c080d5a919c66a53ac44b | [
"MIT"
] | 21 | 2016-02-02T01:19:23.000Z | 2022-02-06T23:29:32.000Z | defmodule Strava.AssertionHelpers do
import ExUnit.Assertions
def assert_present(string) when is_binary(string), do: refute(string == "")
def assert_present(nil), do: flunk("Expected a string but got `nil`")
def assert_lat_long([lat, long]) do
assert is_number(lat)
assert is_number(long)
end
def assert_lat_long(invalid), do: flunk("Expected a lat/long but got: " <> inspect(invalid))
end
| 29.5 | 94 | 0.726392 |
ff2f0ebc01d423234e7968400886e6decf4934e7 | 2,723 | ex | Elixir | clients/tag_manager/lib/google_api/tag_manager/v2/model/variable_format_value.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/tag_manager/lib/google_api/tag_manager/v2/model/variable_format_value.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/tag_manager/lib/google_api/tag_manager/v2/model/variable_format_value.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.TagManager.V2.Model.VariableFormatValue do
@moduledoc """
## Attributes
* `caseConversionType` (*type:* `String.t`, *default:* `nil`) - The option to convert a string-type variable value to either lowercase or uppercase.
* `convertFalseToValue` (*type:* `GoogleApi.TagManager.V2.Model.Parameter.t`, *default:* `nil`) - The value to convert if a variable value is false.
* `convertNullToValue` (*type:* `GoogleApi.TagManager.V2.Model.Parameter.t`, *default:* `nil`) - The value to convert if a variable value is null.
* `convertTrueToValue` (*type:* `GoogleApi.TagManager.V2.Model.Parameter.t`, *default:* `nil`) - The value to convert if a variable value is true.
* `convertUndefinedToValue` (*type:* `GoogleApi.TagManager.V2.Model.Parameter.t`, *default:* `nil`) - The value to convert if a variable value is undefined.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:caseConversionType => String.t(),
:convertFalseToValue => GoogleApi.TagManager.V2.Model.Parameter.t(),
:convertNullToValue => GoogleApi.TagManager.V2.Model.Parameter.t(),
:convertTrueToValue => GoogleApi.TagManager.V2.Model.Parameter.t(),
:convertUndefinedToValue => GoogleApi.TagManager.V2.Model.Parameter.t()
}
field(:caseConversionType)
field(:convertFalseToValue, as: GoogleApi.TagManager.V2.Model.Parameter)
field(:convertNullToValue, as: GoogleApi.TagManager.V2.Model.Parameter)
field(:convertTrueToValue, as: GoogleApi.TagManager.V2.Model.Parameter)
field(:convertUndefinedToValue, as: GoogleApi.TagManager.V2.Model.Parameter)
end
defimpl Poison.Decoder, for: GoogleApi.TagManager.V2.Model.VariableFormatValue do
def decode(value, options) do
GoogleApi.TagManager.V2.Model.VariableFormatValue.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.TagManager.V2.Model.VariableFormatValue do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.152542 | 160 | 0.741829 |
ff2f282fee5e14a75002585a873491297300ca2d | 2,575 | ex | Elixir | rumbrella/apps/rumbl/web/controllers/video_controller.ex | benjohns1/phoenix-sandbox | 58fa6650828882f684a37de7af48d9fb6bd39f59 | [
"MIT"
] | null | null | null | rumbrella/apps/rumbl/web/controllers/video_controller.ex | benjohns1/phoenix-sandbox | 58fa6650828882f684a37de7af48d9fb6bd39f59 | [
"MIT"
] | null | null | null | rumbrella/apps/rumbl/web/controllers/video_controller.ex | benjohns1/phoenix-sandbox | 58fa6650828882f684a37de7af48d9fb6bd39f59 | [
"MIT"
] | null | null | null | defmodule Rumbl.VideoController do
use Rumbl.Web, :controller
alias Rumbl.Video
alias Rumbl.Category
plug :load_categories when action in [:new, :create, :edit, :update]
def action(conn, _) do
# change controller function signature: add 'user' as third arg and populate with session's currently logged-in user
apply(__MODULE__, action_name(conn), [conn, conn.params, conn.assigns.current_user])
end
def index(conn, _params, user) do
videos = Repo.all(user_videos(user))
render(conn, "index.html", videos: videos)
end
def new(conn, _params, user) do
changeset =
user
|> build_assoc(:videos)
|> Video.changeset()
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"video" => video_params}, user) do
changeset =
user
|> build_assoc(:videos)
|> Video.changeset(video_params)
case Repo.insert(changeset) do
{:ok, _video} ->
conn
|> put_flash(:info, "Video created successfully.")
|> redirect(to: video_path(conn, :index))
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}, user) do
video = Repo.get!(user_videos(user), id)
render(conn, "show.html", video: video)
end
def edit(conn, %{"id" => id}, user) do
video = Repo.get!(user_videos(user), id)
changeset = Video.changeset(video)
render(conn, "edit.html", video: video, changeset: changeset)
end
def update(conn, %{"id" => id, "video" => video_params}, user) do
video = Repo.get!(user_videos(user), id)
changeset = Video.changeset(video, video_params)
case Repo.update(changeset) do
{:ok, video} ->
conn
|> put_flash(:info, "Video updated successfully.")
|> redirect(to: video_path(conn, :show, video))
{:error, changeset} ->
render(conn, "edit.html", video: video, changeset: changeset)
end
end
def delete(conn, %{"id" => id}, user) do
video = Repo.get!(user_videos(user), id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(video)
conn
|> put_flash(:info, "Video deleted successfully.")
|> redirect(to: video_path(conn, :index))
end
defp user_videos(user) do
assoc(user, :videos)
end
defp load_categories(conn, _) do
categories =
Category
|> Category.alphabetical
|> Category.names_and_ids
|> Repo.all
assign(conn, :categories, categories)
end
end
| 27.98913 | 120 | 0.633398 |
ff2f3c47e9e78c28a4174b14f763651080fa219b | 1,434 | ex | Elixir | lib/unifex/code_generator/base_types/atom.ex | vanillahsu/unifex | a501479b611a94c8a1477d969e170a7280673b7c | [
"Apache-2.0"
] | 42 | 2018-09-11T02:27:00.000Z | 2022-03-23T18:30:56.000Z | lib/unifex/code_generator/base_types/atom.ex | vanillahsu/unifex | a501479b611a94c8a1477d969e170a7280673b7c | [
"Apache-2.0"
] | 30 | 2018-10-18T10:56:22.000Z | 2022-03-09T13:04:51.000Z | lib/unifex/code_generator/base_types/atom.ex | vanillahsu/unifex | a501479b611a94c8a1477d969e170a7280673b7c | [
"Apache-2.0"
] | 7 | 2018-10-24T09:21:40.000Z | 2022-03-29T12:39:08.000Z | defmodule Unifex.CodeGenerator.BaseTypes.Atom do
@moduledoc """
Module implementing `Unifex.CodeGenerator.BaseType` behaviour for atoms.
Atoms in native code are represented by C-strings (`char *`)
Implemented both for NIF and CNode as function parameter as well as return type.
"""
use Unifex.CodeGenerator.BaseType
alias Unifex.CodeGenerator.BaseType
@impl BaseType
def ptr_level(_ctx), do: 1
@impl BaseType
def generate_native_type(ctx) do
optional_const = if ctx.mode == :const, do: "const ", else: ""
~g<char #{optional_const} *>
end
@impl BaseType
def generate_initialization(name, _ctx) do
~g<#{name} = NULL;>
end
@impl BaseType
def generate_destruction(name, _ctx) do
~g<if (#{name} != NULL) unifex_free(#{name});>
end
defmodule NIF do
@moduledoc false
use Unifex.CodeGenerator.BaseType
alias Unifex.CodeGenerator.BaseType
@impl BaseType
def generate_arg_parse(arg_term, var_name, _ctx) do
~g<unifex_alloc_and_get_atom(env, #{arg_term}, &#{var_name})>
end
end
defmodule CNode do
@moduledoc false
use Unifex.CodeGenerator.BaseType
alias Unifex.CodeGenerator.BaseType
@impl BaseType
def generate_arg_parse(argument, name, _ctx) do
~g"""
({
#{name} = (char *) unifex_alloc(MAXATOMLEN);
ei_decode_atom(#{argument}->buff, #{argument}->index, #{name});
})
"""
end
end
end
| 24.724138 | 82 | 0.679916 |
ff2f7b4b7546fe4cabc9f879f3d37f12d14410aa | 1,751 | ex | Elixir | debian/manpage.1.ex | agdsn/xerxes | be31e2486e8686680fb22a972af34537572e116b | [
"BSD-3-Clause"
] | null | null | null | debian/manpage.1.ex | agdsn/xerxes | be31e2486e8686680fb22a972af34537572e116b | [
"BSD-3-Clause"
] | null | null | null | debian/manpage.1.ex | agdsn/xerxes | be31e2486e8686680fb22a972af34537572e116b | [
"BSD-3-Clause"
] | null | null | null | .\" Hey, EMACS: -*- nroff -*-
.\" First parameter, NAME, should be all caps
.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection
.\" other parameters are allowed: see man(7), man(1)
.TH XERXES SECTION "August 27, 2008"
.\" Please adjust this date whenever revising the manpage.
.\"
.\" Some roff macros, for reference:
.\" .nh disable hyphenation
.\" .hy enable hyphenation
.\" .ad l left justify
.\" .ad b justify to both left and right margins
.\" .nf disable filling
.\" .fi enable filling
.\" .br insert line break
.\" .sp <n> insert n+1 empty lines
.\" for manpage-specific macros, see man(7)
.SH NAME
xerxes \- program to do something
.SH SYNOPSIS
.B xerxes
.RI [ options ] " files" ...
.br
.B bar
.RI [ options ] " files" ...
.SH DESCRIPTION
This manual page documents briefly the
.B xerxes
and
.B bar
commands.
.PP
.\" TeX users may be more comfortable with the \fB<whatever>\fP and
.\" \fI<whatever>\fP escape sequences to invode bold face and italics,
.\" respectively.
\fBxerxes\fP is a program that...
.SH OPTIONS
These programs follow the usual GNU command line syntax, with long
options starting with two dashes (`-').
A summary of options is included below.
For a complete description, see the Info files.
.TP
.B \-h, \-\-help
Show summary of options.
.TP
.B \-v, \-\-version
Show version of program.
.SH SEE ALSO
.BR bar (1),
.BR baz (1).
.br
The programs are documented fully by
.IR "The Rise and Fall of a Fooish Bar" ,
available via the Info system.
.SH AUTHOR
xerxes was written by <upstream author>.
.PP
This manual page was written by Jan Losinshi <software@wh2.tu-dresden.de>,
for the Debian project (but may be used by others).
| 29.183333 | 74 | 0.672187 |
ff2fb0f15f21cd2eb4eb6fa9ec70e8416fe28c61 | 1,703 | ex | Elixir | host_core/lib/host_core/nats.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/nats.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/nats.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | defmodule HostCore.Nats do
require Logger
def rpc_connection_settings(opts) do
%{
# (required) the registered named you want to give the Gnat connection
name: :lattice_nats,
# number of milliseconds to wait between consecutive reconnect attempts (default: 2_000)
backoff_period: 4_000,
connection_settings: [
Map.merge(
%{host: opts.rpc_host, port: opts.rpc_port},
determine_auth_method(opts.rpc_seed, opts.rpc_jwt, "lattice rpc")
)
]
}
end
def control_connection_settings(opts) do
%{
# (required) the registered named you want to give the Gnat connection
name: :control_nats,
# number of milliseconds to wait between consecutive reconnect attempts (default: 2_000)
backoff_period: 4_000,
connection_settings: [
Map.merge(
%{host: opts.ctl_host, port: opts.ctl_port},
determine_auth_method(opts.ctl_seed, opts.ctl_jwt, "control interface")
)
]
}
end
def sanitize_for_topic(input) do
Base.url_encode64(input, padding: false)
end
defp determine_auth_method(nkey_seed, jwt, conn_name) do
cond do
jwt != "" && nkey_seed != "" ->
Logger.info("Authenticating to #{conn_name} NATS with JWT and seed")
%{jwt: jwt, nkey_seed: nkey_seed, auth_required: true}
nkey_seed != "" ->
Logger.info("Authenticating to #{conn_name} NATS with seed")
%{nkey_seed: nkey_seed, auth_required: true}
# No arguments specified that create a valid authentication method
true ->
Logger.info("Connecting to #{conn_name} NATS without authentication")
%{}
end
end
end
| 30.963636 | 94 | 0.651791 |
ff2fc71ee8bb77724f6e689b9e6b75b8d4f10918 | 2,957 | ex | Elixir | lib/regex_help/prom_ex.ex | maciejgryka/regex_help | 93fe33cadcf63597dadb5e3dac9a0ee8f05dac15 | [
"MIT"
] | 6 | 2021-05-24T08:21:34.000Z | 2021-11-19T14:29:55.000Z | lib/regex_help/prom_ex.ex | maciejgryka/regex_help | 93fe33cadcf63597dadb5e3dac9a0ee8f05dac15 | [
"MIT"
] | null | null | null | lib/regex_help/prom_ex.ex | maciejgryka/regex_help | 93fe33cadcf63597dadb5e3dac9a0ee8f05dac15 | [
"MIT"
] | 3 | 2021-11-19T14:32:00.000Z | 2022-01-17T00:13:18.000Z | defmodule RegexHelp.PromEx do
@moduledoc """
Be sure to add the following to finish setting up PromEx:
1. Update your configuration (config.exs, dev.exs, prod.exs, releases.exs, etc) to
configure the necessary bit of PromEx. Be sure to check out `PromEx.Config` for
more details regarding configuring PromEx:
```
config :regex_help, RegexHelp.PromEx,
disabled: false,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: :disabled,
metrics_server: :disabled
```
2. Add this module to your application supervision tree. It should be one of the first
things that is started so that no Telemetry events are missed. For example, if PromEx
is started after your Repo module, you will miss Ecto's init events and the dashboards
will be missing some data points:
```
def start(_type, _args) do
children = [
RegexHelp.PromEx,
...
]
...
end
```
3. Update your `endpoint.ex` file to expose your metrics (or configure a standalone
server using the `:metrics_server` config options). Be sure to put this plug before
your `Plug.Telemetry` entry so that you can avoid having calls to your `/metrics`
endpoint create their own metrics and logs which can pollute your logs/metrics given
that Prometheus will scrape at a regular interval and that can get noisy:
```
defmodule RegexHelpWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :regex_help
...
plug PromEx.Plug, prom_ex_module: RegexHelp.PromEx
...
end
```
4. Update the list of plugins in the `plugins/0` function return list to reflect your
application's dependencies. Also update the list of dashboards that are to be uploaded
to Grafana in the `dashboards/0` function.
"""
use PromEx, otp_app: :regex_help
alias PromEx.Plugins
@impl true
def plugins do
[
# PromEx built in plugins
Plugins.Application,
Plugins.Beam,
{Plugins.Phoenix, router: RegexHelpWeb.Router, endpoint: RegexHelpWeb.Endpoint},
# Plugins.Ecto,
# Plugins.Oban,
Plugins.PhoenixLiveView
# Plugins.Absinthe
# Add your own PromEx metrics plugins
# RegexHelp.Users.PromExPlugin
]
end
@impl true
def dashboard_assigns do
[
datasource_id: "prometheus",
default_selected_interval: "30s"
]
end
@impl true
def dashboards do
[
# PromEx built in Grafana dashboards
{:prom_ex, "application.json"},
{:prom_ex, "beam.json"},
{:prom_ex, "phoenix.json"},
# {:prom_ex, "ecto.json"},
# {:prom_ex, "oban.json"},
{:prom_ex, "phoenix_live_view.json"}
# {:prom_ex, "absinthe.json"}
# Add your dashboard definitions here with the format: {:otp_app, "path_in_priv"}
# {:regex_help, "/grafana_dashboards/user_metrics.json"}
]
end
end
| 29.277228 | 91 | 0.658438 |
ff2fcef49d4c6383a635b89d83d1f5c64876fc05 | 2,727 | ex | Elixir | lib/aws/mca.ex | RAM9/aws-elixir | 2890ba722c977e03212df6a957a19d466c05cdf6 | [
"Apache-2.0"
] | 223 | 2015-05-29T17:45:35.000Z | 2021-06-29T08:37:14.000Z | lib/aws/mca.ex | RAM9/aws-elixir | 2890ba722c977e03212df6a957a19d466c05cdf6 | [
"Apache-2.0"
] | 33 | 2015-11-20T20:56:43.000Z | 2021-07-09T20:13:34.000Z | lib/aws/mca.ex | RAM9/aws-elixir | 2890ba722c977e03212df6a957a19d466c05cdf6 | [
"Apache-2.0"
] | 62 | 2015-06-14T20:53:24.000Z | 2021-12-13T07:20:15.000Z | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/jkakar/aws-codegen for more details.
defmodule AWS.MCA do
@moduledoc """
Provides AWS Marketplace business intelligence data on-demand.
"""
@doc """
Given a data set type and data set publication date, asynchronously
publishes the requested data set to the specified S3 bucket and notifies
the specified SNS topic once the data is available. Returns a unique
request identifier that can be used to correlate requests with
notifications from the SNS topic. Data sets will be published in
comma-separated values (CSV) format with the file name
{data_set_type}_YYYY-MM-DD.csv. If a file with the same name already exists
(e.g. if the same data set is requested twice), the original file will be
overwritten by the new file. Requires a Role with an attached permissions
policy providing Allow permissions for the following actions: s3:PutObject,
s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish,
iam:GetRolePolicy.
"""
def generate_data_set(client, input, options \\ []) do
request(client, "GenerateDataSet", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "marketplacecommerceanalytics"}
host = get_host("marketplacecommerceanalytics", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "MarketplaceCommerceAnalytics20150701.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
| 40.102941 | 82 | 0.676568 |
ff2fd0a961caec2749e49e69b99cc3a9b56d8451 | 584 | ex | Elixir | cryin/lib/cryin_web/router.ex | Graveyardillon/cryin | 975ef2f2c170351d60b0b9846b0616dc7d32a747 | [
"MIT"
] | null | null | null | cryin/lib/cryin_web/router.ex | Graveyardillon/cryin | 975ef2f2c170351d60b0b9846b0616dc7d32a747 | [
"MIT"
] | 1 | 2020-05-18T13:34:20.000Z | 2020-05-18T13:34:20.000Z | cryin/lib/cryin_web/router.ex | Papillon6814/cryin | 975ef2f2c170351d60b0b9846b0616dc7d32a747 | [
"MIT"
] | null | null | null | defmodule CryinWeb.Router do
use CryinWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", CryinWeb do
pipe_through :browser
get "/", PageController, :index
end
# Other scopes may use custom stacks.
scope "/api", CryinWeb do
pipe_through :api
post "/image/gen", ImageController, :generate
post "/image/rects", ImageController, :generate_rects
end
end
| 19.466667 | 57 | 0.683219 |
ff303f0ebae7ad32a2ada530c61f82007ce7f0a2 | 160 | ex | Elixir | installer/templates/phx_web/controllers/page_controller.ex | zorn/phoenix | ac88958550fbd861e2f1e1af6e3c6b787b1a202e | [
"MIT"
] | 1 | 2018-07-26T10:42:26.000Z | 2018-07-26T10:42:26.000Z | installer/templates/phx_web/controllers/page_controller.ex | zorn/phoenix | ac88958550fbd861e2f1e1af6e3c6b787b1a202e | [
"MIT"
] | null | null | null | installer/templates/phx_web/controllers/page_controller.ex | zorn/phoenix | ac88958550fbd861e2f1e1af6e3c6b787b1a202e | [
"MIT"
] | 1 | 2020-02-08T16:23:00.000Z | 2020-02-08T16:23:00.000Z | defmodule <%= web_namespace %>.PageController do
use <%= web_namespace %>, :controller
def index(conn, _params) do
render conn, "index.html"
end
end
| 20 | 48 | 0.6875 |
ff305ca8de3072b2c7aa4ae970762fd2daaa54a8 | 2,019 | ex | Elixir | lib/absinthe/introspection/field.ex | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | null | null | null | lib/absinthe/introspection/field.ex | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | 2 | 2020-07-21T05:23:37.000Z | 2020-08-26T04:56:12.000Z | lib/absinthe/introspection/field.ex | jlgeering/absinthe | a3dbc29640d613928398626ad75a8f03203a1720 | [
"MIT"
] | null | null | null | defmodule Absinthe.Introspection.Field do
@moduledoc false
use Absinthe.Schema.Notation
alias Absinthe.Schema
alias Absinthe.Type
def meta("typename") do
%Type.Field{
name: "__typename",
type: :string,
description: "The name of the object type currently being queried.",
middleware: [
Absinthe.Resolution.resolver_spec(fn
_, %{parent_type: %Type.Object{} = type} ->
{:ok, type.name}
_, %{source: source, parent_type: %Type.Interface{} = iface} = env ->
case Type.Interface.resolve_type(iface, source, env) do
nil ->
{:error, "Could not resolve type of concrete " <> iface.name}
type ->
{:ok, type.name}
end
_, %{source: source, parent_type: %Type.Union{} = union} = env ->
case Type.Union.resolve_type(union, source, env) do
nil ->
{:error, "Could not resolve type of concrete " <> union.name}
type ->
{:ok, type.name}
end
end)
]
}
end
def meta("type") do
%Type.Field{
name: "__type",
type: :__type,
description: "Represents scalars, interfaces, object types, unions, enums in the system",
args: %{
name: %Type.Argument{
name: "name",
type: non_null(:string),
description: "The name of the type to introspect",
__reference__: %{
identifier: :name
}
}
},
middleware: [
Absinthe.Resolution.resolver_spec(fn %{name: name}, %{schema: schema} ->
{:ok, Schema.lookup_type(schema, name)}
end)
]
}
end
def meta("schema") do
%Type.Field{
name: "__schema",
type: :__schema,
description: "Represents the schema",
middleware: [
Absinthe.Resolution.resolver_spec(fn _, %{schema: schema} ->
{:ok, schema}
end)
]
}
end
end
| 26.220779 | 95 | 0.531451 |
ff306e618a50525070e3b200bcdeca7a3130f817 | 4,883 | ex | Elixir | apps/neoscan_sync/lib/neoscan_sync/converter.ex | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | apps/neoscan_sync/lib/neoscan_sync/converter.ex | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | apps/neoscan_sync/lib/neoscan_sync/converter.ex | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | defmodule NeoscanSync.Converter do
alias Neoscan.Block
alias Neoscan.BlockGasGeneration
alias Neoscan.Vout
alias Neoscan.Vin
alias Neoscan.Claim
alias Neoscan.Transfer
alias Neoscan.Transaction
alias Neoscan.Asset
def convert_claim(claim_raw, transaction_raw, block_raw) do
%Claim{
transaction_hash: transaction_raw.hash,
vout_n: claim_raw.vout_n,
vout_transaction_hash: claim_raw.vout_transaction_hash,
block_time: block_raw.time,
inserted_at: block_raw.inserted_at,
updated_at: block_raw.updated_at
}
end
def convert_vin(vin_raw, transaction_raw, block_raw) do
%Vin{
transaction_hash: transaction_raw.hash,
vout_n: vin_raw.vout_n,
vout_transaction_hash: vin_raw.vout_transaction_hash,
block_index: block_raw.index,
block_time: block_raw.time,
inserted_at: block_raw.inserted_at,
updated_at: block_raw.updated_at
}
end
def convert_asset(nil, _, _), do: nil
def convert_asset(asset_raw, transaction_raw, block_raw) do
%Asset{
transaction_hash: transaction_raw.hash,
admin: asset_raw.admin,
amount: asset_raw.amount,
name: asset_raw.name,
owner: asset_raw.owner,
precision: asset_raw.precision,
type: to_string(asset_raw.type),
issued: asset_raw.available,
block_time: block_raw.time,
contract: <<0>>,
inserted_at: block_raw.inserted_at,
updated_at: block_raw.updated_at
}
end
def convert_transfer(transfer_raw, transaction_raw, block_raw) do
%Transfer{
transaction_hash: transaction_raw.hash,
address_from: transfer_raw.addr_from,
address_to: transfer_raw.addr_to,
amount: transfer_raw.amount * 1.0,
contract: transfer_raw.contract,
block_index: block_raw.index,
block_time: block_raw.time,
inserted_at: block_raw.inserted_at,
updated_at: block_raw.updated_at
}
end
def convert_vout(vout_raw, transaction_raw, block_raw) do
%Vout{
transaction_hash: transaction_raw.hash,
n: vout_raw.n,
address_hash: vout_raw.address,
value: vout_raw.value,
asset_hash: vout_raw.asset,
claimed: false,
spent: false,
start_block_index: block_raw.index,
block_time: block_raw.time,
inserted_at: block_raw.inserted_at,
updated_at: block_raw.updated_at
}
end
# this function is a hack to prevent hash collision on miner transaction hash of the block 1826259 and 2000357, using
# this hack prevent us from changing the data model (transaction hash is supposed to be unique), it might need to be
# reviewed at a later time.
def get_transaction_hash(%{type: :miner_transaction, hash: hash}, %{index: 2_000_357}) do
:binary.encode_unsigned(:binary.decode_unsigned(hash) + 1)
end
def get_transaction_hash(transaction_raw, _), do: transaction_raw.hash
def convert_transaction(transaction_raw, block_raw) do
%Transaction{
block_hash: block_raw.hash,
hash: get_transaction_hash(transaction_raw, block_raw),
block_index: block_raw.index,
block_time: block_raw.time,
attributes: transaction_raw.attributes,
net_fee: transaction_raw.net_fee,
sys_fee: transaction_raw.sys_fee,
nonce: transaction_raw.nonce,
scripts: transaction_raw.scripts,
size: transaction_raw.size,
type: to_string(transaction_raw.type),
version: transaction_raw.version,
vouts: Enum.map(transaction_raw.vouts, &convert_vout(&1, transaction_raw, block_raw)),
vins: Enum.map(transaction_raw.vins, &convert_vin(&1, transaction_raw, block_raw)),
claims: Enum.map(transaction_raw.claims, &convert_claim(&1, transaction_raw, block_raw)),
transfers:
Enum.map(transaction_raw.transfers, &convert_transfer(&1, transaction_raw, block_raw)),
asset: convert_asset(transaction_raw.asset, transaction_raw, block_raw),
inserted_at: block_raw.inserted_at,
updated_at: block_raw.updated_at
}
end
def convert_block(block_raw) do
now = DateTime.utc_now()
block_raw = Map.merge(block_raw, %{inserted_at: now, updated_at: now})
%Block{
hash: block_raw.hash,
index: block_raw.index,
merkle_root: block_raw.merkle_root,
next_consensus: block_raw.next_consensus,
nonce: block_raw.nonce,
script: block_raw.script,
size: block_raw.size,
time: block_raw.time,
version: block_raw.version,
transactions: Enum.map(block_raw.tx, &convert_transaction(&1, block_raw)),
total_sys_fee: Enum.reduce(Enum.map(block_raw.tx, & &1.sys_fee), 0, &Decimal.add/2),
total_net_fee: Enum.reduce(Enum.map(block_raw.tx, & &1.net_fee), 0, &Decimal.add/2),
gas_generated: BlockGasGeneration.get_amount_generate_in_block(block_raw.index),
tx_count: Enum.count(block_raw.tx)
}
end
end
| 35.129496 | 119 | 0.715544 |
ff309e773c83eaae530235511b48b136c6dc608a | 85 | exs | Elixir | config/config.exs | epinault/wavexfront | 29dcc3e82cedbbf18d5894a3b4b465efed4c3069 | [
"MIT"
] | 1 | 2020-01-12T03:47:59.000Z | 2020-01-12T03:47:59.000Z | config/config.exs | epinault/wavexfront | 29dcc3e82cedbbf18d5894a3b4b465efed4c3069 | [
"MIT"
] | null | null | null | config/config.exs | epinault/wavexfront | 29dcc3e82cedbbf18d5894a3b4b465efed4c3069 | [
"MIT"
] | null | null | null | use Mix.Config
config :wavexfront, enabled: false
import_config "#{Mix.env()}.exs"
| 14.166667 | 34 | 0.729412 |
ff30a939d9fc2b3e0a839f457c9d256fd7fb937e | 6,569 | ex | Elixir | clients/container/lib/google_api/container/v1/model/ip_allocation_policy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/ip_allocation_policy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/ip_allocation_policy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Container.V1.Model.IPAllocationPolicy do
@moduledoc """
Configuration for controlling how IPs are allocated in the cluster.
## Attributes
* `clusterIpv4Cidr` (*type:* `String.t`, *default:* `nil`) - This field is deprecated, use cluster_ipv4_cidr_block.
* `clusterIpv4CidrBlock` (*type:* `String.t`, *default:* `nil`) - The IP address range for the cluster pod IPs. If this field is set, then
`cluster.cluster_ipv4_cidr` must be left blank.
This field is only applicable when `use_ip_aliases` is true.
Set to blank to have a range chosen with the default size.
Set to /netmask (e.g. `/14`) to have a range chosen with a specific
netmask.
Set to a
[CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing)
notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g.
`10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range
to use.
* `clusterSecondaryRangeName` (*type:* `String.t`, *default:* `nil`) - The name of the secondary range to be used for the cluster CIDR
block. The secondary range will be used for pod IP
addresses. This must be an existing secondary range associated
with the cluster subnetwork.
This field is only applicable with use_ip_aliases is true and
create_subnetwork is false.
* `createSubnetwork` (*type:* `boolean()`, *default:* `nil`) - Whether a new subnetwork will be created automatically for the cluster.
This field is only applicable when `use_ip_aliases` is true.
* `nodeIpv4Cidr` (*type:* `String.t`, *default:* `nil`) - This field is deprecated, use node_ipv4_cidr_block.
* `nodeIpv4CidrBlock` (*type:* `String.t`, *default:* `nil`) - The IP address range of the instance IPs in this cluster.
This is applicable only if `create_subnetwork` is true.
Set to blank to have a range chosen with the default size.
Set to /netmask (e.g. `/14`) to have a range chosen with a specific
netmask.
Set to a
[CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing)
notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g.
`10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range
to use.
* `servicesIpv4Cidr` (*type:* `String.t`, *default:* `nil`) - This field is deprecated, use services_ipv4_cidr_block.
* `servicesIpv4CidrBlock` (*type:* `String.t`, *default:* `nil`) - The IP address range of the services IPs in this cluster. If blank, a range
will be automatically chosen with the default size.
This field is only applicable when `use_ip_aliases` is true.
Set to blank to have a range chosen with the default size.
Set to /netmask (e.g. `/14`) to have a range chosen with a specific
netmask.
Set to a
[CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing)
notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g.
`10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range
to use.
* `servicesSecondaryRangeName` (*type:* `String.t`, *default:* `nil`) - The name of the secondary range to be used as for the services
CIDR block. The secondary range will be used for service
ClusterIPs. This must be an existing secondary range associated
with the cluster subnetwork.
This field is only applicable with use_ip_aliases is true and
create_subnetwork is false.
* `subnetworkName` (*type:* `String.t`, *default:* `nil`) - A custom subnetwork name to be used if `create_subnetwork` is true. If
this field is empty, then an automatic name will be chosen for the new
subnetwork.
* `tpuIpv4CidrBlock` (*type:* `String.t`, *default:* `nil`) - The IP address range of the Cloud TPUs in this cluster. If unspecified, a
range will be automatically chosen with the default size.
This field is only applicable when `use_ip_aliases` is true.
If unspecified, the range will use the default size.
Set to /netmask (e.g. `/14`) to have a range chosen with a specific
netmask.
Set to a
[CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing)
notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g.
`10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range
to use.
* `useIpAliases` (*type:* `boolean()`, *default:* `nil`) - Whether alias IPs will be used for pod IPs in the cluster.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clusterIpv4Cidr => String.t(),
:clusterIpv4CidrBlock => String.t(),
:clusterSecondaryRangeName => String.t(),
:createSubnetwork => boolean(),
:nodeIpv4Cidr => String.t(),
:nodeIpv4CidrBlock => String.t(),
:servicesIpv4Cidr => String.t(),
:servicesIpv4CidrBlock => String.t(),
:servicesSecondaryRangeName => String.t(),
:subnetworkName => String.t(),
:tpuIpv4CidrBlock => String.t(),
:useIpAliases => boolean()
}
field(:clusterIpv4Cidr)
field(:clusterIpv4CidrBlock)
field(:clusterSecondaryRangeName)
field(:createSubnetwork)
field(:nodeIpv4Cidr)
field(:nodeIpv4CidrBlock)
field(:servicesIpv4Cidr)
field(:servicesIpv4CidrBlock)
field(:servicesSecondaryRangeName)
field(:subnetworkName)
field(:tpuIpv4CidrBlock)
field(:useIpAliases)
end
defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.IPAllocationPolicy do
def decode(value, options) do
GoogleApi.Container.V1.Model.IPAllocationPolicy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.IPAllocationPolicy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.503311 | 146 | 0.685036 |
ff30d46da34002b45811354c50cd71205f1dc6be | 1,196 | ex | Elixir | examples/server/lib/server_web/endpoint.ex | charaku/phoenix_wings | b2881ecaea0841d6c27e903ca8965c3ccb6a39c9 | [
"MIT"
] | 132 | 2017-12-30T06:12:34.000Z | 2022-03-13T16:16:24.000Z | examples/server/lib/server_web/endpoint.ex | charaku/phoenix_wings | b2881ecaea0841d6c27e903ca8965c3ccb6a39c9 | [
"MIT"
] | 25 | 2018-04-04T14:40:30.000Z | 2022-03-23T21:25:17.000Z | examples/server/lib/server_web/endpoint.ex | charaku/phoenix_wings | b2881ecaea0841d6c27e903ca8965c3ccb6a39c9 | [
"MIT"
] | 46 | 2018-03-18T17:35:21.000Z | 2022-03-23T20:37:01.000Z | defmodule ServerWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :server
socket "/socket", ServerWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :server,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_server_key",
signing_salt: "tnZaX0BP"
plug ServerWeb.Router
end
| 26.577778 | 63 | 0.703177 |
ff30e6c9db6d4676454ff1e5e5929e5a58d06a76 | 2,930 | ex | Elixir | lib/tracking/tracking.ex | amoein/ex_audit | 50e47a75624f1d266e414fabf4201696030cc721 | [
"MIT"
] | null | null | null | lib/tracking/tracking.ex | amoein/ex_audit | 50e47a75624f1d266e414fabf4201696030cc721 | [
"MIT"
] | null | null | null | lib/tracking/tracking.ex | amoein/ex_audit | 50e47a75624f1d266e414fabf4201696030cc721 | [
"MIT"
] | null | null | null | defmodule ExAudit.Tracking do
def find_changes(action, struct_or_changeset, resulting_struct) do
old = case {action, struct_or_changeset} do
{:created, _} -> %{}
{_, %Ecto.Changeset{data: struct}} -> struct
{_, %{} = struct} -> struct
{_, nil} -> %{}
end
new = case action do
x when x in [:updated, :created] ->
resulting_struct
:deleted -> %{}
end
compare_versions(action, old, new)
end
def compare_versions(action, old, new) do
schema = Map.get(old, :__struct__, Map.get(new, :__struct__))
if schema in tracked_schemas() do
assocs = schema.__schema__(:associations)
patch = ExAudit.Diff.diff(
ExAudit.Tracker.map_struct(old) |> Map.drop(assocs),
ExAudit.Tracker.map_struct(new) |> Map.drop(assocs)
)
case patch do
:not_changed -> []
patch ->
params = %{
entity_id: Map.get(old, :id) || Map.get(new, :id),
entity_schema: schema,
patch: patch,
action: action
}
[params]
end
else
[]
end
end
def track_change(module, action, changeset, resulting_struct, opts) do
if not Keyword.get(opts, :ignore_audit, false) do
changes = find_changes(action, changeset, resulting_struct)
insert_versions(module, changes, opts)
end
end
def insert_versions(module, changes, opts) do
now = DateTime.utc_now()|>DateTime.truncate(:second)
custom_fields =
Keyword.get(opts, :ex_audit_custom, [])
|> Enum.into(%{})
changes =
Enum.map(changes, fn change ->
change = Map.put(change, :recorded_at, now)
Map.merge(change, custom_fields)
end)
case changes do
[] ->
:ok
_ ->
module.insert_all(version_schema(), changes, opts)
end
end
def find_assoc_deletion(module, struct, repo_opts) do
struct =
case struct do
%Ecto.Changeset{} -> Ecto.Changeset.apply_changes(struct)
_ -> struct
end
schema = struct.__struct__
assocs =
schema.__schema__(:associations)
|> Enum.map(fn field -> {field, schema.__schema__(:association, field)} end)
|> Enum.filter(fn {_, opts} -> Map.get(opts, :on_delete) == :delete_all end)
assocs
|> Enum.flat_map(fn {field, _opts} ->
root = module.all(Ecto.assoc(struct, field))
root ++ Enum.map(root, &find_assoc_deletion(module, &1, repo_opts))
end)
|> List.flatten()
|> Enum.flat_map(&compare_versions(:deleted, &1, %{}))
end
def track_assoc_deletion(module, struct, opts) do
deleted_structs = find_assoc_deletion(module, struct, opts)
insert_versions(module, deleted_structs, opts)
end
defp tracked_schemas do
Application.get_env(:ex_audit, :tracked_schemas)
end
defp version_schema do
Application.get_env(:ex_audit, :version_schema)
end
end
| 25.701754 | 82 | 0.616041 |
ff310309a9fcf65fa1e217a475d84474f76d5bac | 3,951 | exs | Elixir | test/controllers/badge_controller_test.exs | DNNX/badging | 861c0d0e376c212bf27e59710fb21d00f734c715 | [
"MIT"
] | 4 | 2016-11-07T09:47:09.000Z | 2020-10-19T20:39:03.000Z | test/controllers/badge_controller_test.exs | DNNX/badging | 861c0d0e376c212bf27e59710fb21d00f734c715 | [
"MIT"
] | 7 | 2016-11-07T13:02:36.000Z | 2016-11-23T17:37:07.000Z | test/controllers/badge_controller_test.exs | DNNX/badging | 861c0d0e376c212bf27e59710fb21d00f734c715 | [
"MIT"
] | 3 | 2016-11-07T09:51:36.000Z | 2019-06-18T12:26:38.000Z | defmodule Badging.BadgeControllerTest do
use Badging.ConnCase
alias Badging.Badge
@valid_attrs %{
identifier: "conversion",
color: "yellow",
status: "83%",
subject: "Conversion Progress"
}
@invalid_attrs %{color: ""}
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
test "lists all entries on index", %{conn: conn} do
conn = get(conn, "/badges", token: valid_token(:read))
assert json_response(conn, 200)["data"] == []
end
test "shows chosen resource", %{conn: conn} do
badge = Repo.insert!(valid_badge())
conn = get(conn, "/badges/coverage", token: valid_token(:read))
assert json_response(conn, 200)["data"] == %{
"id" => badge.id,
"identifier" => badge.identifier,
"subject" => badge.subject,
"status" => badge.status,
"color" => badge.color,
"svg" => badge.svg
}
end
test "renders SVG when it's available", %{conn: conn} do
Repo.insert!(valid_badge_with_svg())
conn = get(conn, "/badges/coverage.svg", token: valid_token(:read))
assert conn.resp_body == "<svg />"
assert get_header(conn, "content-type") == "image/svg+xml"
end
test "renders 403 when not authed", %{conn: conn} do
Repo.insert!(valid_badge_with_svg())
conn = get(conn, "/badges/coverage.svg")
assert conn.status == 403
assert conn.resp_body == "Forbidden"
end
test "renders 404 when SVG is not available", %{conn: conn} do
Repo.insert!(valid_badge())
assert_error_sent(404, fn ->
get(conn, "/badges/coverage.svg", token: valid_token(:read))
end)
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_error_sent(404, fn ->
get(conn, "/badges/dont_exist.svg", token: valid_token(:read))
end)
end
test "creates and renders resource when data is valid", %{conn: conn} do
conn =
conn
|> post("/badges", badge: @valid_attrs, token: valid_token(:write))
assert json_response(conn, 201)["data"]["id"]
assert Repo.get_by(Badge, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn =
conn
|> post("/badges", badge: @invalid_attrs, token: valid_token(:write))
assert json_response(conn, 422)["errors"] != %{}
end
test "updates and renders chosen resource when data is valid", %{conn: conn} do
Repo.insert!(valid_badge())
conn =
conn
|> put("/badges/coverage", badge: @valid_attrs, token: valid_token(:write))
assert json_response(conn, 200)["data"]["id"]
assert Repo.get_by(Badge, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
Repo.insert!(valid_badge())
conn =
conn
|> put("/badges/coverage", badge: @invalid_attrs, token: valid_token(:write))
assert json_response(conn, 422)["errors"] != %{}
end
test "deletes chosen resource", %{conn: conn} do
Repo.insert!(valid_badge())
conn =
conn
|> delete("/badges/coverage", token: valid_token(:write))
assert response(conn, 204)
refute Repo.one(Badge, identifier: "coverage")
end
defp valid_badge do
%Badge{
identifier: "coverage",
subject: "Coverage",
status: "83%",
color: "yellow"
}
end
defp valid_badge_with_svg do
%Badge{
identifier: "coverage",
subject: "Coverage",
status: "83%",
color: "yellow",
svg: "<svg />",
svg_downloaded_at: now()
}
end
defp get_header(conn, header_name) do
conn.resp_headers
|> Enum.find_value(fn
{^header_name, value} -> value
_ -> nil
end)
end
defp now do
DateTime.utc_now()
end
defp valid_token(mode) do
Application.get_env(:badging, :token)
|> Keyword.fetch!(mode)
end
end
| 25.165605 | 98 | 0.619843 |
ff31063181003e99b3b87b56c7153ca4171aa4a1 | 970 | exs | Elixir | test/word_smith/squish_test.exs | benfalk/word_smith | f5f09d0fbbaa2513b59947ef37ffaeec0cbc456a | [
"MIT"
] | 19 | 2016-04-29T20:31:50.000Z | 2021-12-07T13:04:55.000Z | test/word_smith/squish_test.exs | hiro-riveros/word_smith | f5f09d0fbbaa2513b59947ef37ffaeec0cbc456a | [
"MIT"
] | 2 | 2017-07-13T14:21:03.000Z | 2019-07-14T14:30:22.000Z | test/word_smith/squish_test.exs | hiro-riveros/word_smith | f5f09d0fbbaa2513b59947ef37ffaeec0cbc456a | [
"MIT"
] | 2 | 2019-07-10T09:48:28.000Z | 2019-10-13T09:10:40.000Z | defmodule WordSmith.SquishTest do
use ExUnit.Case, async: true
import WordSmith.Squish
import String, only: [duplicate: 2]
test "pre-squished strings come out the same" do
assert squish("hello world") == "hello world"
end
test "will squish this down" do
assert squish("hello world") == "hello world"
end
test "squishes left and right padding out" do
assert squish(" hello world ") == "hello world"
end
test "squishes left and right with tabs and newlines" do
assert squish("\n \t hello\n\n\tworld \n \t ") == "hello world"
end
test "white space get's emptied" do
assert squish("\n\n\t\n\t\n\t \t \n \n") == ""
end
test "works on **large** strings" do
big_string = duplicate(" \n \t hello \n world \n \t\t", 400)
expected = Stream.repeatedly(fn -> "hello world" end)
|> Enum.take(400)
|> Enum.to_list
|> Enum.join(" ")
assert squish(big_string) == expected
end
end
| 26.944444 | 72 | 0.627835 |
ff317bfbe4f869c27dd170c7294c077940de8ee4 | 142 | exs | Elixir | config/dev.exs | headwayio/firebase-admin-ex | a57d0ee4cf197176aeef4a341a16993da52fa229 | [
"MIT"
] | 2 | 2021-05-03T03:16:32.000Z | 2021-07-12T11:16:13.000Z | config/dev.exs | headwayio/firebase-admin-ex | a57d0ee4cf197176aeef4a341a16993da52fa229 | [
"MIT"
] | null | null | null | config/dev.exs | headwayio/firebase-admin-ex | a57d0ee4cf197176aeef4a341a16993da52fa229 | [
"MIT"
] | 1 | 2021-05-03T03:16:46.000Z | 2021-05-03T03:16:46.000Z | use Mix.Config
config :goth, json: "/Users/pendragondev/Development/Projects/headway/theobotic/ditto-292414-c5e53185a7b1.json" |> File.read!
| 35.5 | 125 | 0.795775 |
ff31a1627153b4d63f4f4de6dbf2c2900e45f4df | 65 | ex | Elixir | lib/bifroest/web/views/page_view.ex | theSuess/bifroest-web | fc25d4e827546b2e1a08719962f0e161d622eded | [
"BSD-3-Clause"
] | 1 | 2017-03-29T06:48:06.000Z | 2017-03-29T06:48:06.000Z | lib/bifroest/web/views/page_view.ex | theSuess/bifroest-web | fc25d4e827546b2e1a08719962f0e161d622eded | [
"BSD-3-Clause"
] | null | null | null | lib/bifroest/web/views/page_view.ex | theSuess/bifroest-web | fc25d4e827546b2e1a08719962f0e161d622eded | [
"BSD-3-Clause"
] | null | null | null | defmodule Bifroest.Web.PageView do
use Bifroest.Web, :view
end
| 16.25 | 34 | 0.784615 |
ff31ea61de22524311aa840ace5225a699ab047a | 4,685 | ex | Elixir | deps/phoenix/lib/mix/phoenix.ex | Thrashmandicoot/my-first-phoenix-app | 7cdfe34a1d874cbce8dba17e9824a5c91e3b47da | [
"MIT"
] | 1 | 2015-08-24T06:01:51.000Z | 2015-08-24T06:01:51.000Z | deps/phoenix/lib/mix/phoenix.ex | Thrashmandicoot/my-first-phoenix-app | 7cdfe34a1d874cbce8dba17e9824a5c91e3b47da | [
"MIT"
] | null | null | null | deps/phoenix/lib/mix/phoenix.ex | Thrashmandicoot/my-first-phoenix-app | 7cdfe34a1d874cbce8dba17e9824a5c91e3b47da | [
"MIT"
] | null | null | null | defmodule Mix.Phoenix do
# Conveniences for Phoenix tasks.
@moduledoc false
@doc """
Copies files from source dir to target dir
according to the given map.
Files are evaluated against EEx according to
the given binding.
"""
def copy_from(apps, source_dir, target_dir, binding, mapping) when is_list(mapping) do
roots = Enum.map(apps, &to_app_source(&1, source_dir))
for {format, source_file_path, target_file_path} <- mapping do
source =
Enum.find_value(roots, fn root ->
source = Path.join(root, source_file_path)
if File.exists?(source), do: source
end) || raise "could not find #{source_file_path} in any of the sources"
target = Path.join(target_dir, target_file_path)
contents =
case format do
:text -> File.read!(source)
:eex -> EEx.eval_file(source, binding)
end
Mix.Generator.create_file(target, contents)
end
end
defp to_app_source(path, source_dir) when is_binary(path),
do: Path.join(path, source_dir)
defp to_app_source(app, source_dir) when is_atom(app),
do: Application.app_dir(app, source_dir)
@doc """
Inflect path, scope, alias and more from the given name.
iex> Mix.Phoenix.inflect("user")
[alias: "User",
human: "User",
base: "Phoenix",
module: "Phoenix.User",
scoped: "User",
singular: "user",
path: "user"]
iex> Mix.Phoenix.inflect("Admin.User")
[alias: "User",
human: "User",
base: "Phoenix",
module: "Phoenix.Admin.User",
scoped: "Admin.User",
singular: "user",
path: "admin/user"]
iex> Mix.Phoenix.inflect("Admin.SuperUser")
[alias: "SuperUser",
human: "Super user",
base: "Phoenix",
module: "Phoenix.Admin.SuperUser",
scoped: "Admin.SuperUser",
singular: "super_user",
path: "admin/super_user"]
"""
def inflect(singular) do
base = Mix.Phoenix.base
scoped = Phoenix.Naming.camelize(singular)
path = Phoenix.Naming.underscore(scoped)
singular = String.split(path, "/") |> List.last
module = Module.concat(base, scoped) |> inspect
alias = String.split(module, ".") |> List.last
human = Phoenix.Naming.humanize(singular)
[alias: alias,
human: human,
base: base,
module: module,
scoped: scoped,
singular: singular,
path: path]
end
@doc """
Parses the attrs as received by generators.
"""
def attrs(attrs) do
Enum.map attrs, fn attr ->
case String.split(attr, ":", parts: 3) do
[key, comp, value] -> {String.to_atom(key), {String.to_atom(comp), String.to_atom(value)}}
[key, value] -> {String.to_atom(key), String.to_atom(value)}
[key] -> {String.to_atom(key), :string}
end
end
end
@doc """
Generates some sample params based on the parsed attributes.
"""
def params(attrs) do
attrs
|> Enum.reject(fn
{_, {:references, _}} -> true
{_, _} -> false
end)
|> Enum.into(%{}, fn
{k, {:array, _}} -> {k, []}
{k, :integer} -> {k, 42}
{k, :float} -> {k, "120.5"}
{k, :decimal} -> {k, "120.5"}
{k, :boolean} -> {k, true}
{k, :map} -> {k, %{}}
{k, :text} -> {k, "some content"}
{k, :date} -> {k, "2010-04-17"}
{k, :time} -> {k, "14:00:00"}
{k, :datetime} -> {k, "2010-04-17 14:00:00"}
{k, :uuid} -> {k, "7488a646-e31f-11e4-aace-600308960662"}
{k, _} -> {k, "some content"}
end)
end
@doc """
Checks the availability of a given module name.
"""
def check_module_name_availability!(name) do
name = Module.concat(Elixir, name)
if Code.ensure_loaded?(name) do
Mix.raise "Module name #{inspect name} is already taken, please choose another name"
end
end
@doc """
Returns the module base name based on the configuration value.
config :my_app
app_namespace: My.App
"""
def base do
app = Mix.Project.config |> Keyword.fetch!(:app)
case Application.get_env(app, :app_namespace, app) do
^app -> app |> to_string |> Phoenix.Naming.camelize
mod -> mod |> inspect
end
end
@doc """
Returns all compiled modules in a project.
"""
def modules do
Mix.Project.compile_path
|> Path.join("*.beam")
|> Path.wildcard
|> Enum.map(&beam_to_module/1)
end
defp beam_to_module(path) do
path |> Path.basename(".beam") |> String.to_atom()
end
end
| 28.393939 | 98 | 0.572038 |
ff31f8fac793d408e3d5d93d533ff2b3704ebe46 | 1,574 | exs | Elixir | mix.exs | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | mix.exs | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | mix.exs | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | defmodule PhoenixElixirHelloWorld.MixProject do
use Mix.Project
def project do
[
app: :phoenix_elixir_hello_world,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {PhoenixElixirHelloWorld.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.7"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "cmd npm install --prefix assets"]
]
end
end
| 26.233333 | 84 | 0.605464 |
ff320c6316c797e8ab5a690baf843adbefc16cac | 419 | exs | Elixir | mix.exs | satom99/clust | 53c6a0c022b76bf68544bb73ed503904e68c3761 | [
"Apache-2.0"
] | null | null | null | mix.exs | satom99/clust | 53c6a0c022b76bf68544bb73ed503904e68c3761 | [
"Apache-2.0"
] | null | null | null | mix.exs | satom99/clust | 53c6a0c022b76bf68544bb73ed503904e68c3761 | [
"Apache-2.0"
] | null | null | null | defmodule Clust.MixProject do
use Mix.Project
def project do
[
app: :clust,
version: "0.1.0",
elixir: "~> 1.7",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
mod: {Clust, []}
]
end
defp deps do
[
{:fastglobal, "~> 1.0"},
{:ex_hash_ring, "~> 3.0"}
]
end
end
| 14.964286 | 42 | 0.491647 |
ff3211323efb666ad1b7337dcb381c7511164c89 | 3,044 | ex | Elixir | lib/new_relic/harvest/collector/harvest_cycle.ex | ll1498/elixir_agent | a7b3bf1b49639c61d9eba3d4b91b30e0e856b54a | [
"Apache-2.0"
] | 1 | 2020-08-13T05:08:20.000Z | 2020-08-13T05:08:20.000Z | lib/new_relic/harvest/collector/harvest_cycle.ex | ll1498/elixir_agent | a7b3bf1b49639c61d9eba3d4b91b30e0e856b54a | [
"Apache-2.0"
] | null | null | null | lib/new_relic/harvest/collector/harvest_cycle.ex | ll1498/elixir_agent | a7b3bf1b49639c61d9eba3d4b91b30e0e856b54a | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.Harvest.Collector.HarvestCycle do
use GenServer
# Manages the harvest cycle for a given harvester.
@moduledoc false
alias NewRelic.Harvest.Collector
def start_link(config) do
GenServer.start_link(__MODULE__, config, name: config[:name])
end
def init(
name: name,
harvest_cycle_key: harvest_cycle_key,
module: module,
supervisor: supervisor
) do
if NewRelic.Config.enabled?(), do: send(self(), :harvest_cycle)
{:ok,
%{
name: name,
harvest_cycle_key: harvest_cycle_key,
module: module,
supervisor: supervisor,
harvester: nil,
timer: nil
}}
end
# API
def current_harvester(name), do: Collector.HarvesterStore.current(name)
def manual_shutdown(name) do
harvester = current_harvester(name)
Process.monitor(harvester)
GenServer.call(name, :pause)
receive do
{:DOWN, _ref, _, ^harvester, _reason} -> :ok
after
1000 -> NewRelic.log(:error, "Failed to shut down #{name}")
end
end
# Server
def handle_call(:restart, _from, %{timer: timer} = state) do
stop_harvest_cycle(timer)
harvester = swap_harvester(state)
timer = trigger_harvest_cycle(state)
{:reply, :ok, %{state | harvester: harvester, timer: timer}}
end
def handle_call(:pause, _from, %{harvester: harvester, timer: old_timer} = state) do
stop_harvester(state, harvester)
stop_harvest_cycle(old_timer)
{:reply, :ok, %{state | harvester: nil, timer: nil}}
end
def handle_info(:harvest_cycle, state) do
harvester = swap_harvester(state)
timer = trigger_harvest_cycle(state)
{:noreply, %{state | harvester: harvester, timer: timer}}
end
def handle_info(
{:DOWN, _ref, _, pid, _reason},
%{harvester: crashed_harvester, timer: old_timer} = state
)
when pid == crashed_harvester do
stop_harvest_cycle(old_timer)
harvester = swap_harvester(state)
timer = trigger_harvest_cycle(state)
{:noreply, %{state | harvester: harvester, timer: timer}}
end
def handle_info({:DOWN, _ref, _, _pid, _reason}, state) do
{:noreply, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
# Helpers
defp swap_harvester(%{supervisor: supervisor, name: name, module: harvester_module}) do
{:ok, next} = Supervisor.start_child(supervisor, [])
Process.monitor(next)
prev = Collector.HarvesterStore.current(name)
Collector.HarvesterStore.update(name, next)
harvester_module.complete(prev)
next
end
defp stop_harvester(%{name: name, module: harvester_module}, harvester) do
Collector.HarvesterStore.update(name, nil)
harvester_module.complete(harvester)
end
defp stop_harvest_cycle(timer), do: timer && Process.cancel_timer(timer)
defp trigger_harvest_cycle(%{harvest_cycle_key: harvest_cycle_key}) do
harvest_cycle = Collector.AgentRun.lookup(harvest_cycle_key) || 60_000
Process.send_after(self(), :harvest_cycle, harvest_cycle)
end
end
| 27.178571 | 89 | 0.68594 |
ff321911760431b21b98df3d1b24c7157aa347cd | 66 | ex | Elixir | lib/screens_web/views/v2/layout_view.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 3 | 2021-07-27T14:11:00.000Z | 2022-01-03T14:16:43.000Z | lib/screens_web/views/v2/layout_view.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 444 | 2021-03-10T20:57:17.000Z | 2022-03-31T16:00:35.000Z | lib/screens_web/views/v2/layout_view.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | null | null | null | defmodule ScreensWeb.V2.LayoutView do
use ScreensWeb, :view
end
| 16.5 | 37 | 0.80303 |
ff3259f7584902d14e53f999775dfb6d921db24f | 367 | ex | Elixir | lib/tod/slack.ex | webutil/tod | 0abde2ae2295aee88a40933b66adf9b0c6e5992f | [
"MIT"
] | null | null | null | lib/tod/slack.ex | webutil/tod | 0abde2ae2295aee88a40933b66adf9b0c6e5992f | [
"MIT"
] | null | null | null | lib/tod/slack.ex | webutil/tod | 0abde2ae2295aee88a40933b66adf9b0c6e5992f | [
"MIT"
] | null | null | null | defmodule TOD.Slack do
def send_message(text, webhook_url) do
case HTTPoison.post(
webhook_url,
Jason.encode!(%{
text: text
}),
[{"content-type", "application/json"}]
) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} -> {:ok, body}
err -> {:error, err}
end
end
end
| 24.466667 | 77 | 0.525886 |
ff3288292f228b86965ab99442f0e2f500c3e1e6 | 3,883 | exs | Elixir | test/marathon_client/sse_client_test.exs | aclemmensen/relay | 4bce71ed7d8bd4936f96d62ed08d007729c4253d | [
"BSD-3-Clause"
] | 5 | 2018-10-12T13:13:19.000Z | 2020-10-03T17:51:37.000Z | test/marathon_client/sse_client_test.exs | aclemmensen/relay | 4bce71ed7d8bd4936f96d62ed08d007729c4253d | [
"BSD-3-Clause"
] | 207 | 2018-02-09T14:24:14.000Z | 2020-07-25T11:09:19.000Z | test/marathon_client/sse_client_test.exs | aclemmensen/relay | 4bce71ed7d8bd4936f96d62ed08d007729c4253d | [
"BSD-3-Clause"
] | 1 | 2019-08-08T11:30:59.000Z | 2019-08-08T11:30:59.000Z | Code.require_file(Path.join([__DIR__, "marathon_client_helper.exs"]))
defmodule MarathonClient.SSEClientTest do
use ExUnit.Case
alias MarathonClient.SSEClient
import MarathonTestHelpers, only: [marathon_event: 2]
setup_all do
TestHelpers.override_log_level(:info)
TestHelpers.setup_apps([:cowboy, :hackney])
end
def stream_events(fm, timeout \\ 60_000) do
url = FakeMarathon.base_url(fm) <> "/v2/events"
SSEClient.start_link({url, [self()], timeout})
end
test "the SSE client streams events to a listener process" do
{:ok, fm} = start_supervised(FakeMarathon)
{:ok, _} = stream_events(fm)
# Stream an event, assert that we receive it within a second.
event = marathon_event("event_stream_attached", remoteAddress: "127.0.0.1")
FakeMarathon.event(fm, event.event, event.data)
assert_receive {:sse, ^event}, 1_000
# Stream and assert on another event.
event2 = marathon_event("event_stream_attached", remoteAddress: "10.1.2.3")
FakeMarathon.event(fm, event2.event, event2.data)
assert_receive {:sse, ^event2}, 1_000
end
test "the SSE client exits when the server connection is closed" do
{:ok, fm} = start_supervised(FakeMarathon)
{:ok, se} = stream_events(fm)
ref = Process.monitor(se)
# Stream an event, assert that we receive it within a second.
event = marathon_event("event_stream_attached", remoteAddress: "127.0.0.1")
FakeMarathon.event(fm, event.event, event.data)
assert_receive {:sse, ^event}, 1_000
# Close the connection on the server side.
FakeMarathon.end_stream(fm)
assert_receive {:DOWN, ^ref, :process, _, :normal}, 1_000
end
test "the SSE client fails on a bad response" do
# Trap exits so the start_link in stream_events doesn't break the test.
Process.flag(:trap_exit, true)
{:ok, fm} = start_supervised(FakeMarathon)
base_url = FakeMarathon.base_url(fm)
{:error, err} = SSEClient.start_link({base_url <> "/bad", [self()], 60_000})
assert err =~ ~r/Error connecting to event stream: .*{code: 404/
end
test "the SSE client only starts once a response is received" do
# On my machine, without waiting for the response, the delay is
# consistently under 100ms. I chose 250ms here as a balance between
# incorrect results and waiting too long.
delay_ms = 250
{:ok, fm} = start_supervised({FakeMarathon, [response_delay: delay_ms]})
t0 = Time.utc_now()
{:ok, _} = stream_events(fm)
t1 = Time.utc_now()
assert Time.diff(t1, t0, :millisecond) >= delay_ms
# Stream an event, assert that we receive it within a second.
event = marathon_event("event_stream_attached", remoteAddress: "127.0.0.1")
FakeMarathon.event(fm, event.event, event.data)
assert_receive {:sse, ^event}, 1_000
end
test "the SSE client times out if no data is received for too long" do
# Trap exits so the start_link in stream_events doesn't break the test.
Process.flag(:trap_exit, true)
{:ok, fm} = start_supervised(FakeMarathon)
{:ok, sc} = stream_events(fm, 100)
ref = Process.monitor(sc)
# Send keepalives for longer than our timeout interval.
FakeMarathon.keepalive(fm)
Process.sleep(50)
FakeMarathon.keepalive(fm)
Process.sleep(50)
FakeMarathon.keepalive(fm)
# Stream an event, assert that we receive it within a second.
event = marathon_event("event_stream_attached", remoteAddress: "127.0.0.1")
FakeMarathon.event(fm, event.event, event.data)
assert_receive {:sse, ^event}, 1_000
# Capture the error that gets logged outside our control.
import ExUnit.CaptureLog
assert capture_log(fn ->
# Wait for the timeout.
assert_receive {:DOWN, ^ref, :process, _, {:closed, :timeout}}, 150
end) =~ ~r/\[error\] .* terminating\n\*\* \(stop\) \{:closed, :timeout\}/
end
end
| 37.336538 | 84 | 0.690961 |
ff3297d5da52611f73d9d4ce30030e8b20640284 | 494 | ex | Elixir | lib/kaffy/tasks.ex | clszzyh/kaffy | 411766de3bf1b5b88ca0cb5078b7421154c70db5 | [
"MIT"
] | 840 | 2020-05-08T20:24:01.000Z | 2022-03-18T07:03:49.000Z | lib/kaffy/tasks.ex | clszzyh/kaffy | 411766de3bf1b5b88ca0cb5078b7421154c70db5 | [
"MIT"
] | 189 | 2020-05-07T04:58:35.000Z | 2022-02-09T16:33:36.000Z | lib/kaffy/tasks.ex | clszzyh/kaffy | 411766de3bf1b5b88ca0cb5078b7421154c70db5 | [
"MIT"
] | 103 | 2020-05-09T12:42:22.000Z | 2022-03-30T04:10:22.000Z | defmodule Kaffy.Tasks do
def collect_tasks() do
Kaffy.Utils.get_task_modules()
|> Enum.map(fn m ->
m.__info__(:functions)
|> Enum.filter(fn {f, _} -> String.starts_with?(to_string(f), "task_") end)
|> Enum.map(fn {f, _} -> apply(m, f, []) end)
end)
|> List.flatten()
end
def tasks_info() do
children = DynamicSupervisor.which_children(KaffyTaskSupervisor)
Enum.map(children, fn {_, p, _, _} ->
GenServer.call(p, :info)
end)
end
end
| 24.7 | 81 | 0.609312 |
ff32aa1dada5b54de9cc5280fde76a7a28a5aeb4 | 3,027 | exs | Elixir | apps/omg_watcher_rpc/test/omg_watcher_rpc/tracer_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 177 | 2018-08-24T03:51:02.000Z | 2020-05-30T13:29:25.000Z | apps/omg_watcher_rpc/test/omg_watcher_rpc/tracer_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 1,042 | 2018-08-25T00:52:39.000Z | 2020-06-01T05:15:17.000Z | apps/omg_watcher_rpc/test/omg_watcher_rpc/tracer_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 47 | 2018-08-24T12:06:33.000Z | 2020-04-28T11:49:25.000Z | # Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.WatcherRPC.TracerTest do
@app :omg_watcher_rpc
use ExUnit.Case
import Plug.Conn
alias OMG.WatcherRPC.Configuration
alias OMG.WatcherRPC.Tracer
setup do
original_mode = Application.get_env(:omg_watcher_rpc, :api_mode)
_ = on_exit(fn -> Application.put_env(:omg_watcher_rpc, :api_mode, original_mode) end)
:ok
end
test "api responses without errors get traced with metadata" do
:ok = Application.put_env(@app, :api_mode, :watcher)
version = Configuration.version()
resp_body = """
{
"data": [],
"service_name": "watcher",
"success": true,
"version": "#{version}"
}
"""
conn =
:get
|> Phoenix.ConnTest.build_conn("/alerts.get")
|> Plug.Conn.resp(200, resp_body)
trace_metadata = Tracer.add_trace_metadata(conn)
expected =
Keyword.new([
{:tags, [version: version]},
{:service, :watcher},
{:http, [method: "GET", query_string: "", status_code: 200, url: "/alerts.get", user_agent: nil]},
{:resource, "GET /alerts.get"},
{:type, :web}
])
assert trace_metadata == expected
end
test "if api responses with errors get traced with metadata" do
:ok = Application.put_env(@app, :api_mode, :watcher_info)
version = Configuration.version()
resp_body = """
{
"data": {
"code": "operation:not_found",
"description": "Operation cannot be found. Check request URL.",
"object": "error"
},
"service_name": "watcher_info",
"success": false,
"version": "#{version}"
}
"""
conn =
:post
|> Phoenix.ConnTest.build_conn("/")
|> Plug.Conn.resp(200, resp_body)
|> assign(:error_type, "operation:not_found")
|> assign(:error_msg, "Operation cannot be found. Check request URL.")
trace_metadata = Tracer.add_trace_metadata(conn)
expected =
Keyword.new([
{
:tags,
[
{:version, version},
{:"error.type", "operation:not_found"},
{:"error.msg", "Operation cannot be found. Check request URL."}
]
},
{:error, [error: true]},
{:service, :watcher_info},
{:http, [method: "POST", query_string: "", status_code: 200, url: "/", user_agent: nil]},
{:resource, "POST /"},
{:type, :web}
])
assert trace_metadata == expected
end
end
| 28.027778 | 106 | 0.617443 |
ff32adfa642385edefed55c2bd8ea34df841ae24 | 719 | ex | Elixir | lib/database/project_permission.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | null | null | null | lib/database/project_permission.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | 1 | 2020-03-07T08:28:14.000Z | 2020-03-07T08:28:14.000Z | lib/database/project_permission.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | 3 | 2020-11-15T16:17:58.000Z | 2021-06-04T11:26:42.000Z | defmodule BorsNG.Database.ProjectPermission do
@behaviour Ecto.Type
@moduledoc """
A type to represent the permissions of a project member.
"""
def type, do: :string
def select_list, do: [
{"None", nil},
{"Admin", :admin},
{"Push", :push},
{"Pull", :pull},
]
def cast("") do
{:ok, nil}
end
def cast(data) when data in ["nil", "admin", "push", "pull"] do
{:ok, String.to_atom(data)}
end
def cast(data) when data in [nil, :admin, :push, :pull] do
{:ok, data}
end
def cast(_), do: :error
def load(data) do
cast(data)
end
def dump(data) when data in [nil, :admin, :push, :pull] do
{:ok, Atom.to_string(data)}
end
def dump(_), do: :error
end
| 18.435897 | 65 | 0.586926 |
ff32d412280eb6fe1edeecfb81f849a3d395a0e7 | 706 | ex | Elixir | lib/mix/tasks/kudos.generate.ex | ni-kismet/kudos | 3a4cf6ad375492d7434799aca2859ab17274a77b | [
"MIT"
] | null | null | null | lib/mix/tasks/kudos.generate.ex | ni-kismet/kudos | 3a4cf6ad375492d7434799aca2859ab17274a77b | [
"MIT"
] | 6 | 2020-06-15T20:23:52.000Z | 2021-05-26T20:07:00.000Z | lib/mix/tasks/kudos.generate.ex | ni-kismet/kudos | 3a4cf6ad375492d7434799aca2859ab17274a77b | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Kudos.Generate do
use Mix.Task
@shortdoc "Generates a licenses file"
@recursive false
def run(args \\ []) do
IO.puts("Generating Licenses file...")
resp =
include_dev_deps?(args)
|> Kudos.generate()
|> handle_licenses(dry_run?(args))
case resp do
:ok -> IO.puts("Done!")
_ -> IO.puts("Failed!")
end
end
defp handle_licenses(content, true) do
IO.puts(content)
:ok
end
defp handle_licenses(content, false) do
File.write("licenses.md", content)
end
defp dry_run?(args) do
Enum.member?(args, "--dry-run")
end
defp include_dev_deps?(args) do
Enum.member?(args, "--include-dev-deps")
end
end
| 18.578947 | 44 | 0.626062 |
ff32da832f62c7b4c04f6c9f17b69d5ee63a588a | 3,285 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_http_proxy.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_http_proxy.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_http_proxy.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.TargetHttpProxy do
@moduledoc """
Represents a Target HTTP Proxy resource.
A target HTTP proxy is a component of certain types of load balancers. Global forwarding rules reference a target HTTP proxy, and the target proxy then references a URL map. For more information, read Using Target Proxies. (== resource_for beta.targetHttpProxies ==) (== resource_for v1.targetHttpProxies ==)
## Attributes
* `creationTimestamp` (*type:* `String.t`, *default:* `nil`) - [Output Only] Creation timestamp in RFC3339 text format.
* `description` (*type:* `String.t`, *default:* `nil`) - An optional description of this resource. Provide this property when you create the resource.
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* `kind` (*type:* `String.t`, *default:* `compute#targetHttpProxy`) - [Output Only] Type of resource. Always compute#targetHttpProxy for target HTTP proxies.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output Only] Server-defined URL for the resource.
* `urlMap` (*type:* `String.t`, *default:* `nil`) - URL to the UrlMap resource that defines the mapping from URL to the BackendService.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:creationTimestamp => String.t(),
:description => String.t(),
:id => String.t(),
:kind => String.t(),
:name => String.t(),
:selfLink => String.t(),
:urlMap => String.t()
}
field(:creationTimestamp)
field(:description)
field(:id)
field(:kind)
field(:name)
field(:selfLink)
field(:urlMap)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetHttpProxy do
def decode(value, options) do
GoogleApi.Compute.V1.Model.TargetHttpProxy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.TargetHttpProxy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.029851 | 490 | 0.709589 |
ff32ece7cfa7b7ecb18869c1b0e24d358c8ec445 | 109 | exs | Elixir | test/premailex/html_parser/meeseeks_test.exs | dmarkow/premailex | 13dfbc5330541f8a30cdf00d3ee21816a3f9820b | [
"MIT"
] | 131 | 2017-10-01T04:02:35.000Z | 2022-03-25T03:28:07.000Z | test/premailex/html_parser/meeseeks_test.exs | dmarkow/premailex | 13dfbc5330541f8a30cdf00d3ee21816a3f9820b | [
"MIT"
] | 37 | 2018-01-16T16:04:46.000Z | 2022-03-24T14:38:57.000Z | test/premailex/html_parser/meeseeks_test.exs | dmarkow/premailex | 13dfbc5330541f8a30cdf00d3ee21816a3f9820b | [
"MIT"
] | 17 | 2017-12-24T20:13:30.000Z | 2021-12-29T19:37:31.000Z | defmodule Premailex.HTMLParser.MeeseeksTest do
use ExUnit.Case
doctest Premailex.HTMLParser.Meeseeks
end
| 21.8 | 46 | 0.844037 |
ff33258ed44443ee40316b094f52b8ef423b6b25 | 1,809 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/app_state.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/app_state.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/app_state.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.AppState do
@moduledoc """
List of states set by the app.
## Attributes
* `keyedAppState` (*type:* `list(GoogleApi.AndroidEnterprise.V1.Model.KeyedAppState.t)`, *default:* `nil`) - List of keyed app states. This field will always be present.
* `packageName` (*type:* `String.t`, *default:* `nil`) - The package name of the app. This field will always be present.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:keyedAppState => list(GoogleApi.AndroidEnterprise.V1.Model.KeyedAppState.t()) | nil,
:packageName => String.t() | nil
}
field(:keyedAppState, as: GoogleApi.AndroidEnterprise.V1.Model.KeyedAppState, type: :list)
field(:packageName)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.AppState do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.AppState.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.AppState do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.18 | 173 | 0.735766 |
ff3336f3116daf8a410cfcb4d99569d95229ea56 | 1,559 | ex | Elixir | goal_light_ui/lib/goal_light_ui_web/views/error_helpers.ex | TheEndIsNear/goal_light | 8456189832130a5bbfb641b275146de413ee6fa8 | [
"MIT"
] | null | null | null | goal_light_ui/lib/goal_light_ui_web/views/error_helpers.ex | TheEndIsNear/goal_light | 8456189832130a5bbfb641b275146de413ee6fa8 | [
"MIT"
] | null | null | null | goal_light_ui/lib/goal_light_ui_web/views/error_helpers.ex | TheEndIsNear/goal_light | 8456189832130a5bbfb641b275146de413ee6fa8 | [
"MIT"
] | null | null | null | defmodule GoalLightUiWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_id(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(GoalLightUiWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(GoalLightUiWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.479167 | 80 | 0.667736 |
ff3359f1c739624967b27f30ca369425eefe8fee | 8,670 | ex | Elixir | apps/ewallet_db/lib/ewallet_db/role.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/lib/ewallet_db/role.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/lib/ewallet_db/role.ex | Macavirus/ewallet | ce62177b8bd3f7e72156930d384a1c4c047a3b5b | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.Role do
@moduledoc """
Ecto Schema representing user roles.
"""
use Ecto.Schema
use Utils.Types.ExternalID
use EWalletDB.SoftDelete
use ActivityLogger.ActivityLogging
import Ecto.Changeset
import EWalletDB.Helpers.Preloader
alias Ecto.UUID
alias EWalletDB.{Membership, Repo, Role, User}
@primary_key {:uuid, UUID, autogenerate: true}
@timestamps_opts [type: :naive_datetime_usec]
@account_role_permissions %{
"admin" => %{
exchange_pairs: %{
all: :global,
get: :global
},
accounts: %{all: :accounts, get: :accounts, update: :accounts},
categories: %{all: :global, get: :global},
memberships: %{all: :accounts, get: :accounts, create: :accounts, delete: :accounts},
admin_users: %{
all: :accounts,
get: :accounts,
create: :accounts,
update: :accounts,
update_password: :self,
update_email: :self,
upload_avatar: :self,
get_account: :self,
get_accounts: :self,
logout: :self
},
end_users: %{
all: :accounts,
get: :accounts,
listen: :accounts,
create: :accounts,
update: :accounts,
login: :accounts,
logout: :accounts
},
access_keys: %{
all: :accounts,
get: :accounts,
create: :accounts,
update: :accounts,
disable: :accounts
},
api_keys: %{
all: :accounts,
get: :accounts,
create: :accounts,
update: :accounts,
disable: :accounts
},
tokens: %{all: :global, get: :global},
mints: :none,
account_wallets: %{
all: :global,
get: :global,
listen: :accounts,
view_balance: :accounts,
create: :accounts,
update: :accounts
},
end_user_wallets: %{
all: :global,
get: :global,
listen: :accounts,
view_balance: :accounts,
create: :accounts,
update: :accounts
},
blockchain_wallets: %{},
account_transactions: %{
all: :accounts,
get: :accounts,
listen: :accounts,
create: :accounts
},
end_user_transactions: %{
all: :accounts,
get: :accounts,
listen: :accounts,
create: :accounts
},
account_transaction_requests: %{
all: :accounts,
get: :accounts,
listen: :accounts,
create: :accounts,
confirm: :accounts
},
end_user_transaction_requests: %{
all: :accounts,
get: :accounts,
listen: :accounts,
create: :accounts,
confirm: :accounts
},
account_transaction_consumptions: %{
all: :accounts,
get: :accounts,
listen: :accounts,
create: :accounts,
cancel: :accounts
},
end_user_transaction_consumptions: %{
all: :accounts,
get: :accounts,
listen: :accounts,
create: :accounts,
cancel: :accounts
},
exports: :none,
configuration: :none,
permissions: %{all: :global}
},
"viewer" => %{
exchange_pairs: %{
all: :global,
get: :global
},
account: %{all: :accounts, get: :accounts},
categories: %{all: :global, get: :global},
memberships: %{all: :accounts, get: :accounts},
admin_users: %{
all: :accounts,
get: :accounts,
update_password: :self,
update_email: :self,
upload_avatar: :self,
get_account: :self,
get_accounts: :self,
logout: :self
},
end_users: %{all: :accounts, get: :accounts, listen: :accounts},
access_keys: %{all: :accounts, get: :accounts},
api_keys: %{all: :accounts, get: :accounts},
tokens: %{all: :global, get: :global},
mints: :none,
account_wallets: %{all: :global, get: :global, listen: :accounts, view_balance: :accounts},
end_user_wallets: %{all: :global, get: :global, listen: :accounts, view_balance: :accounts},
account_transactions: %{all: :accounts, get: :accounts, listen: :accounts},
end_user_transactions: %{all: :accounts, get: :accounts, listen: :accounts},
account_transaction_requests: %{all: :accounts, get: :accounts, listen: :accounts},
end_user_transaction_requests: %{all: :accounts, get: :accounts, listen: :accounts},
account_transaction_consumptions: %{all: :accounts, get: :accounts, listen: :accounts},
end_user_transaction_consumptions: %{all: :accounts, get: :accounts, listen: :accounts},
exports: :none,
configuration: :none,
permissions: %{all: :global}
}
}
schema "role" do
external_id(prefix: "rol_")
field(:name, :string)
field(:display_name, :string)
many_to_many(
:users,
User,
join_through: Membership,
join_keys: [role_uuid: :uuid, user_uuid: :uuid]
)
timestamps()
soft_delete()
activity_logging()
end
def account_roles, do: Map.keys(@account_role_permissions)
def account_role_permissions, do: @account_role_permissions
defp changeset(%Role{} = key, attrs) do
key
|> cast_and_validate_required_for_activity_log(
attrs,
cast: [:name, :display_name],
required: [:name]
)
|> validate_required([:name])
|> validate_length(:name, count: :bytes, max: 255)
|> validate_length(:display_name, count: :bytes, max: 255)
|> validate_inclusion(:name, account_roles())
|> unique_constraint(:name, name: "role_name_index")
end
@doc """
Get all roles.
"""
@spec all(keyword()) :: [%__MODULE__{}] | []
def all(opts \\ []) do
__MODULE__
|> Repo.all()
|> preload_option(opts)
end
@doc """
Retrieves a role with the given ID.
"""
@spec get(String.t(), keyword()) :: %__MODULE__{} | nil
def get(id, opts \\ [])
def get(id, opts) when is_external_id(id) do
get_by([id: id], opts)
end
def get(_id, _opts), do: nil
@doc """
Retrieves a role using one or more fields.
"""
@spec get_by(map() | keyword(), keyword()) :: %__MODULE__{} | nil
def get_by(fields, opts \\ []) do
__MODULE__
|> exclude_deleted()
|> Repo.get_by(fields)
|> preload_option(opts)
end
@doc """
Creates a new role with the passed attributes.
"""
def insert(attrs) do
%Role{}
|> changeset(attrs)
|> Repo.insert_record_with_activity_log()
end
@doc """
Updates a role with the passed attributes.
"""
@spec update(%__MODULE__{}, map()) :: {:ok, %__MODULE__{}} | {:error, Ecto.Changeset.t()}
def update(role, attrs) do
role
|> changeset(attrs)
|> Repo.update_record_with_activity_log()
end
@doc """
Checks whether the given role is soft-deleted.
"""
@spec deleted?(%__MODULE__{}) :: boolean()
def deleted?(role), do: SoftDelete.deleted?(role)
@doc """
Soft-deletes the given role. The operation fails if the role
has one more more users associated.
"""
@spec delete(%__MODULE__{}, map()) ::
{:ok, %__MODULE__{}} | {:error, Ecto.Changeset.t()} | {:error, atom()}
def delete(role, originator) do
empty? =
role
|> Repo.preload(:users)
|> Map.get(:users)
|> Enum.empty?()
case empty? do
true -> SoftDelete.delete(role, originator)
false -> {:error, :role_not_empty}
end
end
@doc """
Restores the given role from soft-delete.
"""
@spec restore(%__MODULE__{}, map()) :: {:ok, %__MODULE__{}} | {:error, Ecto.Changeset.t()}
def restore(role, originator), do: SoftDelete.restore(role, originator)
@doc """
Compares that the given string value is equivalent to the given role.
"""
def is_role?(%Role{} = role, role_name) do
role.name == role_name
end
def insert_default_roles(originator) do
Enum.each(account_roles(), fn role ->
{:ok, _} =
insert(%{
name: role,
display_name: String.capitalize(role),
originator: originator
})
end)
:ok
end
end
| 27.788462 | 98 | 0.598155 |
ff3398e67bc852e637eba53be451b1e41b0ed5c2 | 784 | ex | Elixir | lib/one_word/client.ex | Bentheburrito/onewordstorybot | 2d75f3a59a952d141cfa4306fb14c902815cbff5 | [
"MIT"
] | null | null | null | lib/one_word/client.ex | Bentheburrito/onewordstorybot | 2d75f3a59a952d141cfa4306fb14c902815cbff5 | [
"MIT"
] | null | null | null | lib/one_word/client.ex | Bentheburrito/onewordstorybot | 2d75f3a59a952d141cfa4306fb14c902815cbff5 | [
"MIT"
] | null | null | null | defmodule OneWord.Client do
use Nostrum.Consumer
alias Nostrum.Api
def start_link do
Consumer.start_link(__MODULE__)
end
def handle_event({:MESSAGE_CREATE, message, _ws_state}) do
if (!message.author.bot) do
with :notacommand <- OneWord.Command.handle_message(message),
{:ok, game} <- OneWord.GameHandler.get_game(message.channel_id),
do: OneWord.Game.add_word(game.pid, message)
end
if Enum.random(1..40) == 1, do: Api.create_reaction(message.channel_id, message.id, Enum.random(["thonk:381325006761754625", "🤔", "😂", "😭"]))
end
def handle_event({:READY, data, _ws_state}) do
IO.puts("Logged in under user #{data.user.username}##{data.user.discriminator}")
Api.update_status(:dnd, "Audio Books", 2)
end
def handle_event(_event), do: :noop
end
| 29.037037 | 143 | 0.718112 |
ff33a3e2e38acaed6fb3d6bb28f903c1125bd87d | 138 | exs | Elixir | apps/general/test/general_test.exs | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | apps/general/test/general_test.exs | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | apps/general/test/general_test.exs | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | defmodule GeneralTest do
use ExUnit.Case
doctest General
test "greets the world" do
assert General.hello() == :world
end
end
| 15.333333 | 36 | 0.710145 |
ff34133c250f03e03a11cf29692955ac72163d13 | 142 | ex | Elixir | wadm/lib/application.ex | wasmCloud/lattice-controller | f965ee6ed367904d5efe3bd7475a4e24ea578254 | [
"Apache-2.0"
] | 2 | 2021-08-31T12:55:58.000Z | 2021-09-04T13:20:22.000Z | wadm/lib/application.ex | wasmCloud/wadm | ccf76739039f14fc40548a00dd209d7ffa5c0c4e | [
"Apache-2.0"
] | 12 | 2021-11-28T19:22:20.000Z | 2022-01-25T18:41:48.000Z | wadm/lib/application.ex | wasmCloud/wadm | ccf76739039f14fc40548a00dd209d7ffa5c0c4e | [
"Apache-2.0"
] | 1 | 2021-11-25T00:42:06.000Z | 2021-11-25T00:42:06.000Z | defmodule Wadm.Application do
use Application
def start(_type, _args) do
Wadm.Supervisor.start_link(name: Wadm.Supervisor)
end
end
| 17.75 | 53 | 0.760563 |
ff341ac908af905c39d65dc64a9804412254a3f3 | 70 | ex | Elixir | lib/leeloo/metrics_exporter.ex | azohra/leeloo | d5b26dda0e54c4cdc2f47208be433e6b79223284 | [
"MIT"
] | null | null | null | lib/leeloo/metrics_exporter.ex | azohra/leeloo | d5b26dda0e54c4cdc2f47208be433e6b79223284 | [
"MIT"
] | null | null | null | lib/leeloo/metrics_exporter.ex | azohra/leeloo | d5b26dda0e54c4cdc2f47208be433e6b79223284 | [
"MIT"
] | null | null | null | defmodule Leeloo.MetricsExporter do
use Prometheus.PlugExporter
end
| 17.5 | 35 | 0.857143 |
ff34279cade3f34c467b75b8102ef1ae739476b2 | 2,588 | ex | Elixir | lib/sippet/message/request_line.ex | BrendanBall/elixir-sippet | 877edcbbc8d8ba5b6b41684c20041510c410aad3 | [
"BSD-3-Clause"
] | 54 | 2017-04-26T03:15:56.000Z | 2022-02-08T00:22:11.000Z | lib/sippet/message/request_line.ex | BrendanBall/elixir-sippet | 877edcbbc8d8ba5b6b41684c20041510c410aad3 | [
"BSD-3-Clause"
] | 21 | 2017-06-19T08:00:33.000Z | 2022-01-19T10:38:11.000Z | lib/sippet/message/request_line.ex | BrendanBall/elixir-sippet | 877edcbbc8d8ba5b6b41684c20041510c410aad3 | [
"BSD-3-Clause"
] | 22 | 2017-06-19T08:15:34.000Z | 2022-03-22T13:56:20.000Z | defmodule Sippet.Message.RequestLine do
@moduledoc """
A SIP Request-Line struct, composed by the Method, Request-URI and
SIP-Version.
The `start_line` of requests are represented by this struct. The RFC 3261
represents the Request-Line as:
Request-Line = Method SP Request-URI SP SIP-Version CRLF
The above `Method` is represented by atoms, when the method is a known one,
or by binaries, when the method is unknown. Known ones are `:ack`, `:invite`,
`:register`, `:cancel`, `:message` and all others returned by the function
`Sippet.Message.known_methods/0`.
The `Request-URI` is represented by a `Sippet.URI` struct, breaking down the
SIP-URI in more useful parts for processing.
The `SIP-Version` is a `{major, minor}` tuple, which assumes the value
`{2, 0}` in standard implementations.
"""
alias Sippet.URI, as: URI
defstruct [
method: nil,
request_uri: nil,
version: {2, 0}
]
@type method :: Sippet.Message.method
@type uri :: URI.t
@type version :: {integer, integer}
@type t :: %__MODULE__{
method: method,
request_uri: uri,
version: version
}
@doc """
Creates a Request-Line struct.
The version will assume the default value `{2, 0}`.
"""
@spec new(method, uri | binary) :: t
def new(method, %URI{} = request_uri)
when is_atom(method) or is_binary(method) do
%__MODULE__{
method: method,
request_uri: request_uri,
}
end
def new(method, request_uri)
when is_binary(request_uri) do
new(method, URI.parse!(request_uri))
end
@doc """
Returns a binary which corresponds to the text representation of the given
Request-Line.
It does not includes an ending line CRLF.
"""
@spec to_string(t) :: binary
defdelegate to_string(value), to: String.Chars.Sippet.Message.RequestLine
@doc """
Returns an iodata which corresponds to the text representation of the given
Request-Line.
It does not includes an ending line CRLF.
"""
@spec to_iodata(t) :: iodata
def to_iodata(%Sippet.Message.RequestLine{version: {major, minor},
request_uri: uri, method: method}) do
[if(is_atom(method), do: String.upcase(Atom.to_string(method)), else: method),
" ", Sippet.URI.to_string(uri),
" SIP/", Integer.to_string(major), ".", Integer.to_string(minor)]
end
end
defimpl String.Chars, for: Sippet.Message.RequestLine do
alias Sippet.Message.RequestLine, as: RequestLine
def to_string(%RequestLine{} = request_line) do
request_line
|> RequestLine.to_iodata()
|> IO.iodata_to_binary
end
end
| 27.242105 | 82 | 0.686631 |
ff344778e460c174c25fe065aa0455322db3cc07 | 1,748 | ex | Elixir | lib/seven/otters/policy.ex | the-AjK/sevenotters | b56c4c129f441f832561b6a9aff66281aa8627de | [
"MIT"
] | 7 | 2019-08-23T16:28:34.000Z | 2020-12-18T04:57:51.000Z | lib/seven/otters/policy.ex | the-AjK/sevenotters | b56c4c129f441f832561b6a9aff66281aa8627de | [
"MIT"
] | 9 | 2021-07-29T16:18:30.000Z | 2021-07-29T16:36:59.000Z | lib/seven/otters/policy.ex | the-AjK/sevenotters | b56c4c129f441f832561b6a9aff66281aa8627de | [
"MIT"
] | 6 | 2020-04-07T15:41:50.000Z | 2021-10-01T19:03:08.000Z | defmodule Seven.Otters.Policy do
@moduledoc false
defmacro __using__(listener_of_events: listener_of_events) do
quote location: :keep do
use GenServer
use Seven.Utils.Tagger
@tag :policy
# API
def start_link(opts \\ []) do
{:ok, pid} = GenServer.start_link(__MODULE__, {:ok, nil}, opts ++ [name: __MODULE__])
# subscribe my events in store
unquote(listener_of_events)
|> Enum.each(&Seven.EventStore.EventStore.subscribe(&1, pid))
{:ok, pid}
end
# Callbacks
def init({:ok, state}) do
Seven.Log.info("#{__MODULE__} started.")
{:ok, state}
end
def terminate(:normal, _state) do
Seven.Log.debug("Terminating #{__MODULE__}(#{inspect(self())}) for :normal")
end
def terminate(reason, state) do
Seven.Log.debug("Terminating #{__MODULE__}(#{inspect(self())}) for #{inspect(reason)}")
end
def handle_info(%Seven.Otters.Event{} = event, state) do
Seven.Log.event_received(event, __MODULE__)
handle_event(event)
|> Enum.map(&Seven.Log.command_request_sent/1)
|> Enum.each(&Seven.CommandBus.send_command_request/1)
{:noreply, state}
end
def handle_info({:DOWN, _ref, :process, pid, _reason}, state) do
Seven.Log.debug("Dying #{__MODULE__}(#{inspect(pid)}): #{inspect(state)}")
{:noreply, state}
end
def handle_info(_, state), do: {:noreply, state}
@before_compile Seven.Otters.Policy
end
end
defmacro __before_compile__(_env) do
quote generated: true do
defp handle_event(event), do: raise("Event #{inspect(event)} is not handled correctly by #{__MODULE__}")
end
end
end
| 27.746032 | 110 | 0.621281 |
ff345bed200b240b9fbb4c1f027586eb7fef7d34 | 1,065 | ex | Elixir | lib/open_cagex/response.ex | dreallday/open-cagex | 84caf2c421b7272b25d21b469c3441be2cf7ba61 | [
"MIT"
] | 13 | 2017-06-20T07:25:20.000Z | 2022-01-19T17:44:12.000Z | lib/open_cagex/response.ex | winescout/open-cagex | 3d1fe2197341b32022d0f9cbab33a4243307b261 | [
"MIT"
] | 2 | 2019-11-10T12:19:29.000Z | 2020-12-05T02:42:58.000Z | lib/open_cagex/response.ex | winescout/open-cagex | 3d1fe2197341b32022d0f9cbab33a4243307b261 | [
"MIT"
] | 6 | 2017-12-12T20:30:34.000Z | 2020-12-02T21:23:56.000Z | defmodule OpenCagex.Response do
@moduledoc """
Allows to handle responses from OpenCagex.Api
"""
@errors %{
400 => "Invalid request (bad request; a required parameter is missing; invalid coordinates)",
402 => "Valid request but quota exceeded (payment required)",
403 => "Invalid or missing API key",
404 => "Invalid API endpoint",
408 => "Timeout; you can try again",
410 => "Request too long",
429 => "Too many requests (too quickly, rate limiting)",
503 => "Internal server error",
}
@doc """
Handles and parses :ok responses with status 200
"""
def handle({:ok, %{status_code: 200} = response}) do
response
|> parse_body
end
@doc """
Handles and parses :ok responses with status different than 200
"""
def handle({:ok, %{status_code: status_code}}) do
{:error, Map.get(@errors, status_code)}
end
@doc """
Handles and parses :error responses
"""
def handle({:error, response}), do: {:error, response}
defp parse_body(response) do
Poison.decode(response.body)
end
end
| 25.97561 | 97 | 0.651643 |
ff346100b5c06da94b40db763015318d090539c0 | 16,859 | ex | Elixir | lib/cldr/calendar/behaviour.ex | kianmeng/cldr_calendars | af1ffe1b40100792e8e643ce09cc7db7224060b6 | [
"Apache-2.0"
] | 6 | 2019-08-29T12:31:03.000Z | 2021-08-28T23:15:39.000Z | lib/cldr/calendar/behaviour.ex | kianmeng/cldr_calendars | af1ffe1b40100792e8e643ce09cc7db7224060b6 | [
"Apache-2.0"
] | 8 | 2019-11-08T09:13:00.000Z | 2021-12-26T05:34:28.000Z | lib/cldr/calendar/behaviour.ex | kianmeng/cldr_calendars | af1ffe1b40100792e8e643ce09cc7db7224060b6 | [
"Apache-2.0"
] | 2 | 2020-05-08T12:19:01.000Z | 2022-03-03T14:53:06.000Z | defmodule Cldr.Calendar.Behaviour do
defmacro __using__(opts \\ []) do
epoch = Keyword.fetch!(opts, :epoch)
{date, []} = Code.eval_quoted(epoch)
epoch = Cldr.Calendar.date_to_iso_days(date)
epoch_day_of_week = Date.day_of_week(date)
days_in_week = Keyword.get(opts, :days_in_week, 7)
first_day_of_week = Keyword.get(opts, :first_day_of_week, 1)
cldr_calendar_type = Keyword.get(opts, :cldr_calendar_type, :gregorian)
cldr_calendar_base = Keyword.get(opts, :cldr_calendar_base, :month)
months_in_ordinary_year = Keyword.get(opts, :months_in_ordinary_year, 12)
months_in_leap_year = Keyword.get(opts, :months_in_leap_year, months_in_ordinary_year)
quote location: :keep do
import Cldr.Macros
@behaviour Calendar
@behaviour Cldr.Calendar
@after_compile Cldr.Calendar.Behaviour
@days_in_week unquote(days_in_week)
@quarters_in_year 4
@epoch unquote(epoch)
@epoch_day_of_week unquote(epoch_day_of_week)
@first_day_of_week unquote(first_day_of_week)
@last_day_of_week Cldr.Math.amod(@first_day_of_week + @days_in_week - 1, @days_in_week)
@months_in_ordinary_year unquote(months_in_ordinary_year)
@months_in_leap_year unquote(months_in_leap_year)
def epoch do
@epoch
end
def epoch_day_of_week do
@epoch_day_of_week
end
def first_day_of_week do
@first_day_of_week
end
def last_day_of_week do
@last_day_of_week
end
@doc """
Defines the CLDR calendar type for this calendar.
This type is used in support of `Cldr.Calendar.
localize/3`.
"""
@impl true
def cldr_calendar_type do
unquote(cldr_calendar_type)
end
@doc """
Identifies that this calendar is month based.
"""
@impl true
def calendar_base do
unquote(cldr_calendar_base)
end
@doc """
Determines if the `date` given is valid according to
this calendar.
"""
@impl true
def valid_date?(year, month, day) do
month <= months_in_year(year) && day <= days_in_month(year, month)
end
@doc """
Calculates the year and era from the given `year`.
"""
@era_module Cldr.Calendar.Era.era_module(unquote(cldr_calendar_type))
@spec year_of_era(Calendar.year) :: {year :: Calendar.year(), era :: Calendar.era()}
unless Code.ensure_loaded?(Calendar.ISO) && function_exported?(Calendar.ISO, :year_of_era, 3) do
@impl true
end
def year_of_era(year) do
iso_days = date_to_iso_days(year, 1, 1)
@era_module.year_of_era(iso_days, year)
end
@doc """
Calculates the year and era from the given `date`.
"""
@spec year_of_era(Calendar.year, Calendar.month, Calendar.day) ::
{year :: Calendar.year(), era :: Calendar.era()}
@impl true
def year_of_era(year, month, day) do
iso_days = date_to_iso_days(year, month, day)
@era_module.year_of_era(iso_days, year)
end
@doc """
Returns the calendar year as displayed
on rendered calendars.
"""
@spec calendar_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def calendar_year(year, month, day) do
year
end
@doc """
Returns the related gregorain year as displayed
on rendered calendars.
"""
@spec related_gregorian_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def related_gregorian_year(year, month, day) do
year
end
@doc """
Returns the extended year as displayed
on rendered calendars.
"""
@spec extended_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def extended_year(year, month, day) do
year
end
@doc """
Returns the cyclic year as displayed
on rendered calendars.
"""
@spec cyclic_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def cyclic_year(year, month, day) do
year
end
@doc """
Returns the quarter of the year from the given
`year`, `month`, and `day`.
"""
@spec quarter_of_year(Calendar.year, Calendar.month, Calendar.day) ::
Cldr.Calendar.quarter()
@impl true
def quarter_of_year(year, month, day) do
ceil(month / (months_in_year(year) / @quarters_in_year))
end
@doc """
Returns the month of the year from the given
`year`, `month`, and `day`.
"""
@spec month_of_year(Calendar.year, Calendar.month, Calendar.day) ::
Calendar.month() | {Calendar.month, Cldr.Calendar.leap_month?()}
@impl true
def month_of_year(_year, month, _day) do
month
end
@doc """
Calculates the week of the year from the given
`year`, `month`, and `day`.
By default this function always returns
`{:error, :not_defined}`.
"""
@spec week_of_year(Calendar.year, Calendar.month, Calendar.day) ::
{:error, :not_defined}
@impl true
def week_of_year(_year, _month, _day) do
{:error, :not_defined}
end
@doc """
Calculates the ISO week of the year from the
given `year`, `month`, and `day`.
By default this function always returns
`{:error, :not_defined}`.
"""
@spec iso_week_of_year(Calendar.year, Calendar.month, Calendar.day) ::
{:error, :not_defined}
@impl true
def iso_week_of_year(_year, _month, _day) do
{:error, :not_defined}
end
@doc """
Calculates the week of the year from the given
`year`, `month`, and `day`.
By default this function always returns
`{:error, :not_defined}`.
"""
@spec week_of_month(Calendar.year, Calendar.month, Calendar.day) ::
{pos_integer(), pos_integer()} | {:error, :not_defined}
@impl true
def week_of_month(_year, _month, _day) do
{:error, :not_defined}
end
@doc """
Calculates the day and era from the given
`year`, `month`, and `day`.
By default we consider on two eras: before the epoch
and on-or-after the epoch.
"""
@spec day_of_era(Calendar.year, Calendar.month, Calendar.day) ::
{day :: Calendar.day, era :: Calendar.era}
@impl true
def day_of_era(year, month, day) do
iso_days = date_to_iso_days(year, month, day)
@era_module.day_of_era(iso_days)
end
@doc """
Calculates the day of the year from the given
`year`, `month`, and `day`.
"""
@spec day_of_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.day()
@impl true
def day_of_year(year, month, day) do
first_day = date_to_iso_days(year, 1, 1)
this_day = date_to_iso_days(year, month, day)
this_day - first_day + 1
end
if (Code.ensure_loaded?(Date) && function_exported?(Date, :day_of_week, 2)) do
@impl true
@spec day_of_week(Calendar.year, Calendar.month, Calendar.day, :default | atom()) ::
{Calendar.day_of_week(), first_day_of_week :: non_neg_integer(),
last_day_of_week :: non_neg_integer()}
def day_of_week(year, month, day, :default = starting_on) do
days = date_to_iso_days(year, month, day)
day_of_week = Cldr.Math.amod(days - 1, @days_in_week)
{day_of_week, @first_day_of_week, @last_day_of_week}
end
defoverridable day_of_week: 4
else
@impl true
@spec day_of_week(Calendar.year, Calendar.month, Calendar.day) :: 1..7
def day_of_week(year, month, day) do
day_of_week(year, month, day, :default)
end
defoverridable day_of_week: 3
end
@doc """
Returns the number of periods in a given
`year`. A period corresponds to a month
in month-based calendars and a week in
week-based calendars.
"""
@impl true
def periods_in_year(year) do
months_in_year(year)
end
@doc """
Returns the number of months in a
given `year`.
"""
@impl true
def months_in_year(year) do
if leap_year?(year), do: @months_in_leap_year, else: @months_in_ordinary_year
end
@doc """
Returns the number of weeks in a
given `year`.
"""
@impl true
def weeks_in_year(_year) do
{:error, :not_defined}
end
@doc """
Returns the number days in a given year.
The year is the number of years since the
epoch.
"""
@impl true
def days_in_year(year) do
this_year = date_to_iso_days(year, 1, 1)
next_year = date_to_iso_days(year + 1, 1, 1)
next_year - this_year + 1
end
@doc """
Returns how many days there are in the given year
and month.
"""
@spec days_in_month(Calendar.year, Calendar.month) :: Calendar.month()
@impl true
def days_in_month(year, month) do
start_of_this_month =
date_to_iso_days(year, month, 1)
start_of_next_month =
if month == months_in_year(year) do
date_to_iso_days(year + 1, 1, 1)
else
date_to_iso_days(year, month + 1, 1)
end
start_of_next_month - start_of_this_month
end
@doc """
Returns the number days in a a week.
"""
def days_in_week do
@days_in_week
end
@doc """
Returns a `Date.Range.t` representing
a given year.
"""
@impl true
def year(year) do
last_month = months_in_year(year)
days_in_last_month = days_in_month(year, last_month)
with {:ok, start_date} <- Date.new(year, 1, 1, __MODULE__),
{:ok, end_date} <- Date.new(year, last_month, days_in_last_month, __MODULE__) do
Date.range(start_date, end_date)
end
end
@doc """
Returns a `Date.Range.t` representing
a given quarter of a year.
"""
@impl true
def quarter(_year, _quarter) do
{:error, :not_defined}
end
@doc """
Returns a `Date.Range.t` representing
a given month of a year.
"""
@impl true
def month(year, month) do
starting_day = 1
ending_day = days_in_month(year, month)
with {:ok, start_date} <- Date.new(year, month, starting_day, __MODULE__),
{:ok, end_date} <- Date.new(year, month, ending_day, __MODULE__) do
Date.range(start_date, end_date)
end
end
@doc """
Returns a `Date.Range.t` representing
a given week of a year.
"""
@impl true
def week(_year, _week) do
{:error, :not_defined}
end
@doc """
Adds an `increment` number of `date_part`s
to a `year-month-day`.
`date_part` can be `:months` only.
"""
@impl true
def plus(year, month, day, date_part, increment, options \\ [])
def plus(year, month, day, :months, months, options) do
months_in_year = months_in_year(year)
{year_increment, new_month} = Cldr.Math.div_amod(month + months, months_in_year)
new_year = year + year_increment
new_day =
if Keyword.get(options, :coerce, false) do
max_new_day = days_in_month(new_year, new_month)
min(day, max_new_day)
else
day
end
{new_year, new_month, new_day}
end
@doc """
Returns the `t:Calendar.iso_days` format of
the specified date.
"""
@impl true
@spec naive_datetime_to_iso_days(
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
) :: Calendar.iso_days()
def naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond) do
{date_to_iso_days(year, month, day), time_to_day_fraction(hour, minute, second, microsecond)}
end
@doc """
Converts the `t:Calendar.iso_days` format to the
datetime format specified by this calendar.
"""
@spec naive_datetime_from_iso_days(Calendar.iso_days()) :: {
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
}
@impl true
def naive_datetime_from_iso_days({days, day_fraction}) do
{year, month, day} = date_from_iso_days(days)
{hour, minute, second, microsecond} = time_from_day_fraction(day_fraction)
{year, month, day, hour, minute, second, microsecond}
end
@doc false
calendar_impl()
def parse_date(string) do
Cldr.Calendar.Parse.parse_date(string, __MODULE__)
end
@doc false
calendar_impl()
def parse_utc_datetime(string) do
Cldr.Calendar.Parse.parse_utc_datetime(string, __MODULE__)
end
@doc false
calendar_impl()
def parse_naive_datetime(string) do
Cldr.Calendar.Parse.parse_naive_datetime(string, __MODULE__)
end
@doc false
@impl Calendar
defdelegate parse_time(string), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate day_rollover_relative_to_midnight_utc, to: Calendar.ISO
@doc false
@impl Calendar
defdelegate time_from_day_fraction(day_fraction), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate time_to_day_fraction(hour, minute, second, microsecond), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate date_to_string(year, month, day), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
),
to: Calendar.ISO
@doc false
@impl Calendar
defdelegate naive_datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond
),
to: Calendar.ISO
@doc false
@impl Calendar
defdelegate time_to_string(hour, minute, second, microsecond), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate valid_time?(hour, minute, second, microsecond), to: Calendar.ISO
defoverridable valid_date?: 3
defoverridable valid_time?: 4
defoverridable naive_datetime_to_string: 7
defoverridable date_to_string: 3
defoverridable time_to_day_fraction: 4
defoverridable time_from_day_fraction: 1
defoverridable day_rollover_relative_to_midnight_utc: 0
defoverridable parse_time: 1
defoverridable parse_naive_datetime: 1
defoverridable parse_utc_datetime: 1
defoverridable parse_date: 1
defoverridable naive_datetime_from_iso_days: 1
defoverridable naive_datetime_to_iso_days: 7
defoverridable year_of_era: 1
defoverridable quarter_of_year: 3
defoverridable month_of_year: 3
defoverridable week_of_year: 3
defoverridable iso_week_of_year: 3
defoverridable week_of_month: 3
defoverridable day_of_era: 3
defoverridable day_of_year: 3
defoverridable periods_in_year: 1
defoverridable months_in_year: 1
defoverridable weeks_in_year: 1
defoverridable days_in_year: 1
defoverridable days_in_month: 2
defoverridable days_in_week: 0
defoverridable year: 1
defoverridable quarter: 2
defoverridable month: 2
defoverridable week: 2
defoverridable plus: 5
defoverridable plus: 6
defoverridable epoch: 0
defoverridable cldr_calendar_type: 0
defoverridable calendar_base: 0
defoverridable calendar_year: 3
defoverridable extended_year: 3
defoverridable related_gregorian_year: 3
defoverridable cyclic_year: 3
end
end
def __after_compile__(env, _bytecode) do
Cldr.Calendar.Era.define_era_module(env.module)
end
end | 26.888357 | 102 | 0.603654 |
ff346b445b68fa7c47b0fcb41b18277122729b74 | 1,446 | exs | Elixir | test/acceptances/scroll_test.exs | falood/tirexs | 9c63532cf2f50f77fb437f617d433741771d3619 | [
"Apache-2.0"
] | null | null | null | test/acceptances/scroll_test.exs | falood/tirexs | 9c63532cf2f50f77fb437f617d433741771d3619 | [
"Apache-2.0"
] | null | null | null | test/acceptances/scroll_test.exs | falood/tirexs | 9c63532cf2f50f77fb437f617d433741771d3619 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../test_helper.exs", __ENV__.file
defmodule Acceptances.ScrollTest do
use ExUnit.Case
import Tirexs.Search
import Tirexs.Bulk
require Tirexs.ElasticSearch
require Tirexs.Query
test :scroll do
settings = Tirexs.ElasticSearch.config()
Tirexs.ElasticSearch.delete("bear_test", settings)
Tirexs.Bulk.store [index: "bear_test", refresh: false], settings do
create id: 1, title: "bar1", description: "foo bar test"
create id: 2, title: "bar2", description: "foo bar test"
create id: 3, title: "bar3", description: "foo bar test"
create id: 4, title: "bar4", description: "foo bar test"
create id: 5, title: "bar5", description: "foo bar test"
create id: 6, title: "bar6", description: "foo bar test"
create id: 7, title: "bar7", description: "foo bar test"
create id: 8, title: "bar8", description: "foo bar test"
create id: 9, title: "bar9", description: "foo bar test"
create id: 10, title: "bar10", description: "foo bar test"
create id: 11, title: "bar11", description: "foo bar test"
delete id: 11
index id: 90, title: "barww"
end
Tirexs.Manage.refresh("bear_test", settings)
s = search [index: "bear_test"] do
query do
string "bar7"
end
end
body = Tirexs.Query.create_resource(s, settings, [scroll: "5m"])
assert Tirexs.Query.result(body, :_scroll_id) != nil
end
end
| 32.863636 | 71 | 0.651452 |
ff34b2e572119042d7d92c9c777d469ed2c59442 | 831 | ex | Elixir | lib/encrypted_field.ex | jclem/encrypted_field | 29da394f9dfcfa4c3d8efde21d8bddcc4d904d9a | [
"MIT"
] | 6 | 2019-01-26T20:10:11.000Z | 2020-01-19T01:51:59.000Z | lib/encrypted_field.ex | jclem/encrypted_field | 29da394f9dfcfa4c3d8efde21d8bddcc4d904d9a | [
"MIT"
] | null | null | null | lib/encrypted_field.ex | jclem/encrypted_field | 29da394f9dfcfa4c3d8efde21d8bddcc4d904d9a | [
"MIT"
] | 4 | 2019-01-26T19:18:30.000Z | 2019-05-14T03:53:51.000Z | defmodule EncryptedField do
@moduledoc """
An encrypted field for use in an Ecto Schema
defmodule MyApp.User do
use Ecto.Schema
schema "users" do
field :token, EncryptedField
end
end
"""
import EncryptedField.Encryption
@behaviour Ecto.Type
@impl Ecto.Type
@spec type() :: :string
def type, do: :string
@impl Ecto.Type
@spec cast(any()) :: {:ok, String.t() | nil}
def cast(nil), do: {:ok, nil}
def cast(value), do: {:ok, to_string(value)}
@impl Ecto.Type
@spec dump(String.t() | nil) :: {:ok, String.t() | nil}
def dump(nil), do: {:ok, nil}
def dump(value), do: {:ok, value |> encrypt}
@impl Ecto.Type
@spec load(String.t() | nil) :: {:ok, String.t() | nil}
def load(nil), do: {:ok, nil}
def load(value), do: {:ok, value |> decrypt}
end
| 22.459459 | 57 | 0.593261 |
ff34d19b61a95de44139e20913b8fa0f7aab204c | 557 | exs | Elixir | test/concentrate/supervisor_test.exs | paulswartz/concentrate | a69aa51c16071f2669932005be810da198f622c8 | [
"MIT"
] | 19 | 2018-01-22T18:39:20.000Z | 2022-02-22T16:15:30.000Z | test/concentrate/supervisor_test.exs | mbta/concentrate | bae6e05713ed079b7da53867a01dd007861fb656 | [
"MIT"
] | 216 | 2018-01-22T14:22:39.000Z | 2022-03-31T10:30:31.000Z | test/concentrate/supervisor_test.exs | paulswartz/concentrate | a69aa51c16071f2669932005be810da198f622c8 | [
"MIT"
] | 5 | 2018-01-22T14:18:15.000Z | 2021-04-26T18:34:19.000Z | defmodule Concentrate.SupervisorTest do
@moduledoc false
use ExUnit.Case
import Concentrate.Supervisor
describe "start_link/0" do
test "can start the application" do
Application.ensure_all_started(:concentrate)
on_exit(fn ->
Application.stop(:concentrate)
end)
end
end
describe "children/1" do
test "builds the right number of children" do
# currently, the right number is 5: HTTP pool, alerts, GTFS, pipeline, health
actual = children([])
assert length(actual) == 5
end
end
end
| 22.28 | 83 | 0.678636 |
ff34db4ffc5381b1694edf62a291baf6fdd4cc2d | 8,545 | ex | Elixir | lib/buf_edit.ex | moxley/ex_buffer | 5efcb1ca6568175cda908fb2833207a084ff94a2 | [
"Apache-2.0"
] | 1 | 2018-10-17T12:16:15.000Z | 2018-10-17T12:16:15.000Z | lib/buf_edit.ex | moxley/buf_edit | 5efcb1ca6568175cda908fb2833207a084ff94a2 | [
"Apache-2.0"
] | null | null | null | lib/buf_edit.ex | moxley/buf_edit | 5efcb1ca6568175cda908fb2833207a084ff94a2 | [
"Apache-2.0"
] | null | null | null | defmodule BufEdit do
@moduledoc """
Programmable buffer editor, similar in concept to [ed](https://en.wikipedia.org/wiki/Ed_(text_editor)).
`BufEdit` reads a file into memory and provides a flexible API for editing it and writing
it back to a file.
Consider the following mix.exs file:
```elixir
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "0.1.0",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
]
end
end
```
We'll use `BufEdit` to load the file and jump to a line matching a regular
expression:
```elixir
iex> buf = BufEdit.load("mix.exs")
iex> buf = BufEdit.search(buf, ~r/defp deps/, :down)
%BufEdit{
col: 1,
filename: "test/fixtures/mix.exs",
lines: ["defmodule MyApp.MixProject do", " use Mix.Project", ""],
line_num: 22,
status: :ok
}
```
(The `lines` key value above has been abbreviated in this example)
If the line is found, the returned `%BufEdit{}` has a `:status` key set to `:ok`,
and the `:line_num` key is set to the new line number.
Now, let's say we want to remove the two comment lines from the dependencies
list and add a new dependency:
```elixir
iex> buf = BufEdit.search(buf, ~r/^\s*#/, :down)
%BufEdit{
col: 1,
filename: "test/fixtures/mix.exs",
lines: ["defmodule MyApp.MixProject do", " use Mix.Project", ""],
line_num: 24,
status: :ok
}
```
Now, we're at line 24. If you want to see line 24, use `line/1`:
```elixir
iex> BufEdit.line(buf)
" # {:dep_from_hexpm, \\"~> 0.3.0\\"},"
```
Yep, that's the line we're looking for.
The next step is to delete the two comments:
```elixir
iex> buf = BufEdit.delete_lines(buf, 2)
%BufEdit{
col: 1,
filename: "test/fixtures/mix.exs",
lines: ["defmodule MyApp.MixProject do", " use Mix.Project", ""],
line_num: 24,
status: :ok
}
iex> BufEdit.line(buf)
" ]"
```
Now that the lines are deleted, we're ready to add the new dependency:
```elixir
iex> buf = BufEdit.insert_line(buf, " {:buffer, \\"~> 0.1.0\\"}")
iex> BufEdit.dump(buf) |> IO.puts()
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "0.1.0",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:buffer, "~> 0.1.0"}
]
end
end
```
Our new dependency is added! Now it's time to write the file, then we're done:
```elixir
iex> BufEdit.save(buf)
```
"""
@type t :: %__MODULE__{
filename: String.t(),
lines: [String.t()],
line_num: integer(),
col: integer(),
status: :ok | :not_found
}
defstruct filename: nil,
lines: [],
line_num: 1,
col: 1,
status: :ok
@doc "Load the file into the buffer"
@spec load(filename :: String.t()) :: t()
def load(filename) do
contents = File.read!(filename)
lines = String.split(contents, ~r/\r\n|\n/)
%__MODULE__{
filename: filename,
lines: lines
}
end
@doc "Dump the BufEdit to a string"
@spec dump(t()) :: String.t()
def dump(buf) do
Enum.join(buf.lines, "\n")
end
@doc "Save the BufEdit to a file specified by the `:filename` value."
@spec save(t()) :: :ok | no_return()
def save(buf) do
File.write!(buf.filename, dump(buf))
end
@doc "Get the current line."
@spec line(t()) :: String.t()
def line(buf) do
Enum.at(buf.lines, buf.line_num - 1)
end
@doc "Get a list of lines starting from the current line number."
@spec lines(buf :: t(), length :: integer()) :: [String.t()]
def lines(buf, length) do
0..(length - 1)
|> Enum.map(fn i ->
Enum.at(buf.lines, buf.line_num - 1 + i)
end)
end
@doc "Search for a line using a regular expression."
@spec search(buf :: t(), pattern :: Regex.t(), direction :: :down | :up) :: t()
def search(buf, pattern, direction \\ :down) do
line_with_index = matching_line_with_index(buf, pattern, direction)
case line_with_index do
{_line, index} ->
buf = move_to(buf, index + 1, buf.col)
%{buf | status: :ok}
_ ->
%{buf | status: :not_found}
end
end
@doc "Insert a line at the current line number."
@spec insert_line(buf :: t(), line :: String.t()) :: t()
def insert_line(%{status: :ok} = buf, line) do
lines = List.insert_at(buf.lines, buf.line_num - 1, line)
buf
|> set(:lines, lines)
|> move_relative(1, buf.col)
|> set(:status, :ok)
end
# Skip operation if :status != :ok
def insert_line(buf, _line) do
buf
end
@doc "Insert multiple lines at the current line number."
@spec insert_lines(buf :: t(), lines :: [String.t()]) :: t()
def insert_lines(buf, lines) do
new_buf =
lines
|> Enum.reduce(buf, fn line, buf ->
insert_line(buf, line)
end)
%{new_buf | status: :ok}
end
@doc "Move to the given line number and column"
@spec move_to(buf :: t(), line_num :: integer(), col :: integer()) :: t()
def move_to(buf, line_num, col) do
%{buf | line_num: line_num, col: col, status: :ok}
end
@doc "Move to a line offset from the current line."
@spec move_relative(buf :: t(), line_num_offset :: integer(), col_offset :: integer()) :: t()
def move_relative(buf, line_num_offset, col_offset) do
move_to(buf, buf.line_num + line_num_offset, buf.col + col_offset)
end
@doc "Move to the last line in the file."
@spec move_to_end(buf :: t()) :: t()
def move_to_end(buf) do
count = length(buf.lines)
move_to(buf, count, buf.col)
end
@doc "Delete a number of lines from the current line number"
@spec delete_lines(buf :: t(), count :: integer()) :: t()
def delete_lines(buf, count) do
1..count
|> Enum.reduce(buf, fn _n, buf ->
delete_line(buf)
end)
end
@doc "Delete the current line"
@spec delete_line(buf :: t()) :: t()
def delete_line(%{status: :ok} = buf) do
lines = List.delete_at(buf.lines, buf.line_num - 1)
%{buf | lines: lines, status: :ok}
end
# Skip operation if :status != :ok
def delete_line(buf) do
buf
end
@doc "Replace a sub string within the current line matching pattern."
@spec replace_in_line(buf :: t(), search :: String.t(), replace :: String.t()) :: t()
def replace_in_line(buf, search, replace) do
replace_line(buf, fn _buf, line ->
String.replace(line, search, replace)
end)
end
@doc """
Replace the current line with the output of a function.
Example:
Commenting out a line of Elixir:
```
iex> BufEdit.replace_line(buf, fn _buf, line -> "# \#{line}" end)
```
"""
@spec replace_line(t(), (t(), line :: String.t() -> String.t())) :: t()
def replace_line(%{status: :ok} = buf, fun) do
line = line(buf)
buf = delete_line(buf)
new_line = fun.(buf, line)
insert_line(buf, new_line)
end
# Skip operation if :status != :ok
def replace_line(buf, _fun) do
buf
end
## PRIVATE FUNCTIONS
defp set(buf, key, value) do
struct(buf, %{key => value})
end
defp matching_line_with_index(buf, pattern, direction) do
lines =
case direction do
:down -> buf.lines
:up -> Enum.reverse(buf.lines)
end
line_count = length(buf.lines)
lines
|> Enum.with_index()
|> Enum.map(fn {line, index} ->
case direction do
:down -> {line, index}
:up -> {line, line_count - index - 1}
end
end)
|> Enum.filter(fn {_line, index} ->
at_line_num = index + 1
case direction do
:down -> at_line_num >= buf.line_num
:up -> at_line_num <= buf.line_num
end
end)
|> Enum.find(fn {line, _index} ->
String.match?(line, pattern)
end)
end
end
| 24.840116 | 105 | 0.590053 |
ff3500499039f8d727dfe907587223643224bc85 | 250 | exs | Elixir | ttrack-backend/priv/repo/migrations/20200120163227_create_users.exs | kacper-marzecki/ttrack | 603cbe91800feb680996827f66eafd458b9b8a90 | [
"MIT"
] | null | null | null | ttrack-backend/priv/repo/migrations/20200120163227_create_users.exs | kacper-marzecki/ttrack | 603cbe91800feb680996827f66eafd458b9b8a90 | [
"MIT"
] | null | null | null | ttrack-backend/priv/repo/migrations/20200120163227_create_users.exs | kacper-marzecki/ttrack | 603cbe91800feb680996827f66eafd458b9b8a90 | [
"MIT"
] | null | null | null | defmodule Ttrack.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add :email, :string
add :hashed_password, :string
add :session_secret, :string
timestamps()
end
end
end
| 16.666667 | 47 | 0.668 |
ff3503361ae28bc7444ef5a293db28a04009367f | 3,765 | ex | Elixir | crawler_challenge/lib/crawler_challenge/processes/processes.ex | tuliostarling/crawler-challenge | 1b948769db200d09ac0481b9fd71a175d876d20f | [
"Apache-2.0"
] | null | null | null | crawler_challenge/lib/crawler_challenge/processes/processes.ex | tuliostarling/crawler-challenge | 1b948769db200d09ac0481b9fd71a175d876d20f | [
"Apache-2.0"
] | 3 | 2021-05-08T08:50:39.000Z | 2022-02-10T19:40:38.000Z | crawler_challenge/lib/crawler_challenge/processes/processes.ex | tuliostarling/elixir-crawler | 1b948769db200d09ac0481b9fd71a175d876d20f | [
"Apache-2.0"
] | null | null | null | defmodule CrawlerChallenge.Processes do
@moduledoc """
The Processes context.
"""
use Timex
import Ecto.Query, warn: false
alias CrawlerChallenge.{Courts, Details, Movements, NebulexCache, Parties}
alias CrawlerChallenge.Repo
alias CrawlerChallenge.Processes.Process
alias Ecto.Multi
@regex_number_structure ~r/(([0-9]{7})-)(([0-9]{2}).)(([0-9]{4}).)(8.02.)([0-9]{4})/
@doc """
Returns the list of processes.
## Examples
iex> list_processes()
[%Process{}, ...]
"""
def list_processes do
Repo.all(Process)
end
@doc """
Gets a single process.
Raises `Ecto.NoResultsError` if the Process does not exist.
## Examples
iex> get_process!(123)
%Process{}
iex> get_process!(456)
** (Ecto.NoResultsError)
"""
def get_process!(id), do: Repo.get!(Process, id)
def get_process_by_number(number), do: Repo.get_by(Process, process_number: number)
def get_process_by_id_and_preload(%{id: id}, associations) do
Process
|> Repo.get!(id)
|> Repo.preload(associations)
end
def valid_process_number(""), do: {:error, :invalid_process_number}
def valid_process_number(process_n) do
with false <- String.match?(process_n, ~r/[a-zA-Z]/),
25 <- String.length(process_n),
true <- String.match?(process_n, @regex_number_structure) do
{:ok, :valid_process_number}
else
_ -> {:error, :invalid_process_number}
end
end
def validate_date(process) do
case NebulexCache.get_cache(process) do
{:error, :not_found} ->
{:invalid, nil}
{:ok, data} ->
process = get_process!(data)
{:valid, Repo.preload(process, [:details, :movements, :parties, :court])}
end
end
def get_last_process_and_preload(associations) do
query = from(p in Process, order_by: [desc: p.id], limit: 1)
query
|> Repo.one()
|> Repo.preload(associations)
end
@doc """
Creates a process.
## Examples
iex> create_process(%{field: value})
{:ok, %Process{}}
iex> create_process(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_process(attrs \\ %{}) do
%Process{}
|> Process.changeset(attrs)
|> Repo.insert()
end
def insert_all_data(process, court, crawled_data) do
%{id: court_id} = Courts.get_court_by_name(court.name)
process_params = %{"process_number" => process, "court_id" => court_id}
Multi.new()
|> Multi.insert(:process, Process.changeset(%Process{}, process_params))
|> Movements.insert_movement_by_multi(crawled_data[:list_movements])
|> Parties.insert_parties_by_multi(crawled_data[:list_parties])
|> Details.insert_details_by_multi(crawled_data[:list_details])
|> Repo.transaction()
|> case do
{:ok, result} -> {:ok, result}
{:error, _module, reason} -> {:error, reason}
end
end
@doc """
Updates a process.
## Examples
iex> update_process(process, %{field: new_value})
{:ok, %Process{}}
iex> update_process(process, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_process(%Process{} = process, attrs) do
process
|> Process.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Process.
## Examples
iex> delete_process(process)
{:ok, %Process{}}
iex> delete_process(process)
{:error, %Ecto.Changeset{}}
"""
def delete_process(%Process{} = process) do
Repo.delete(process)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking process changes.
## Examples
iex> change_process(process)
%Ecto.Changeset{source: %Process{}}
"""
def change_process(%Process{} = process) do
Process.changeset(process, %{})
end
end
| 22.54491 | 86 | 0.634263 |
ff35034432804d0a53f0605dc06b42aad3dbf5cc | 240 | exs | Elixir | priv/repo/migrations/20210418041849_add_meta_to_article_content.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | priv/repo/migrations/20210418041849_add_meta_to_article_content.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | priv/repo/migrations/20210418041849_add_meta_to_article_content.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Repo.Migrations.AddMetaToArticleContent do
use Ecto.Migration
def change do
alter table(:cms_jobs) do
add(:meta, :map)
end
alter table(:cms_repos) do
add(:meta, :map)
end
end
end
| 17.142857 | 67 | 0.675 |
ff3529e9743d5bd7cdd917ab2d9a7e3eef6999c2 | 1,535 | exs | Elixir | test/shim_test.exs | bitwalker/uniq_compat | 0cc564fbde21f872e4621d825af41953d66fc8ca | [
"Apache-2.0"
] | 3 | 2021-06-09T16:40:10.000Z | 2021-06-16T11:45:11.000Z | test/shim_test.exs | bitwalker/uniq_compat | 0cc564fbde21f872e4621d825af41953d66fc8ca | [
"Apache-2.0"
] | null | null | null | test/shim_test.exs | bitwalker/uniq_compat | 0cc564fbde21f872e4621d825af41953d66fc8ca | [
"Apache-2.0"
] | null | null | null | defmodule Uniq.Compat.ShimTests do
@moduledoc "These tests were lifted straight from elixir_uuid so we can ensure compatibility"
use ExUnit.Case, async: true
@info_tests_path Path.join([__DIR__, "fixtures", "elixir_uuid_info_tests.txt"])
test "UUID.info/1 invalid argument type" do
assert UUID.info(:not_a_uuid) == {:error, :invalid_format}
end
test "UUID.info/1 invalid UUID" do
assert UUID.info("not_a_uuid") == {:error, :invalid_format}
end
test "UUID.info!/1 invalid argument type" do
assert_raise(
ArgumentError,
"invalid uuid: :invalid_format",
fn ->
UUID.info!(:not_a_uuid)
end
)
end
test "UUID.info!/1 invalid UUID" do
assert_raise(
ArgumentError,
"invalid uuid: :invalid_format",
fn ->
UUID.info!("not_a_uuid")
end
)
end
# Expand the lines in info_tests.txt into individual tests for the
# UUID.info!/1 and UUID.info/1 functions, assuming the lines are:
# test name || expected output || input value
for line <- File.stream!(@info_tests_path) do
[name, expected, input] = line |> String.split("||") |> Enum.map(&String.trim/1)
test "UUID.info!/1 #{name}" do
{expected, []} = Code.eval_string(unquote(expected))
result = UUID.info!(unquote(input))
assert ^expected = result
end
test "UUID.info/1 #{name}" do
{expected, []} = Code.eval_string(unquote(expected))
{:ok, result} = UUID.info(unquote(input))
assert ^expected = result
end
end
end
| 28.425926 | 95 | 0.64886 |
ff352dd63bb2bcac8e05a459d33ecac5ed575928 | 3,828 | ex | Elixir | lib/cartel/pusher/wns.ex | lucacorti/cartel | da86c1428b2c5ce1d8859c40e74e3499bf09dfb4 | [
"MIT"
] | 17 | 2016-03-09T19:44:55.000Z | 2022-01-31T20:23:28.000Z | lib/cartel/pusher/wns.ex | lucacorti/cartel | da86c1428b2c5ce1d8859c40e74e3499bf09dfb4 | [
"MIT"
] | 18 | 2016-03-09T21:10:52.000Z | 2020-11-17T09:05:17.000Z | lib/cartel/pusher/wns.ex | lucacorti/cartel | da86c1428b2c5ce1d8859c40e74e3499bf09dfb4 | [
"MIT"
] | 3 | 2016-03-11T22:08:11.000Z | 2016-05-19T05:32:17.000Z | defmodule Cartel.Pusher.Wns do
@moduledoc """
Microsoft WNS interface worker
"""
use GenServer
use Cartel.Pusher, message_module: Cartel.Message.Wns
alias Cartel.HTTP
alias Cartel.Message.Wns
alias HTTP.{Request, Response}
@wns_login_url "https://login.live.com/accesstoken.srf"
@doc """
Starts the pusher
"""
@spec start_link(%{sid: String.t(), secret: String.t()}) :: GenServer.on_start()
def start_link(args), do: GenServer.start_link(__MODULE__, args)
@impl Cartel.Pusher
def handle_push(pid, message, payload) do
GenServer.call(pid, {:push, message, payload})
end
@impl GenServer
def init(conf), do: {:ok, %{conf: conf, token: nil}}
@impl GenServer
def handle_call(
{:push, message, payload},
from,
%{token: nil, conf: %{sid: sid, secret: secret}} = state
) do
case login(sid, secret) do
{:ok, token} ->
handle_call({:push, message, payload}, from, %{state | token: token})
{:error, reason} ->
{:stop, {:error, reason}, state}
end
end
@impl GenServer
def handle_call({:push, %Wns{channel: channel} = message, payload}, _from, state) do
headers = message_headers(message)
request =
channel
|> Request.new("POST")
|> Request.set_body(payload)
|> Request.set_headers(headers)
|> Request.put_header({"content-type", Wns.content_type(message)})
|> Request.put_header({"authorization", "Bearer " <> state[:key]})
|> Request.put_header({"x-wns-type", message.type})
case HTTP.request(%HTTP{}, request) do
{:ok, _, %Response{status: code, headers: headers}} when code >= 400 ->
{:reply, {:error, headers}, state}
{:ok, _, %Response{}} ->
{:reply, :ok, state}
{:error, reason} ->
{:stop, {:error, reason}, state}
end
end
defp login(client_id, client_secret) do
body =
%{
grant_type: "client_credentials",
scope: "notify.windows.com",
client_id: client_id,
client_secret: client_secret
}
|> URI.encode_query()
request =
@wns_login_url
|> Request.new("POST")
|> Request.set_body(body)
|> Request.put_header({"content-type", "application/x-www-form-urlencoded"})
case HTTP.request(%HTTP{}, request) do
{:ok, _, %Response{body: body}} ->
{:ok, Jason.decode!(body)["access_token"]}
{:error, reason} ->
{:error, reason}
end
end
defp message_headers(message) do
[]
|> add_message_header_cache_policy(message)
|> add_message_header_ttl(message)
|> add_message_header_suppress_popup(message)
|> add_message_header_request_for_status(message)
end
defp add_message_header_cache_policy(headers, %Wns{cache_policy: true}) do
[{"X-WNS-Cache-Policy", "cache"} | headers]
end
defp add_message_header_cache_policy(headers, %Wns{cache_policy: false}) do
[{"X-WNS-Cache-Policy", "no-cache"} | headers]
end
defp add_message_header_cache_policy(headers, _), do: headers
defp add_message_header_ttl(headers, %Wns{ttl: ttl}) when is_integer(ttl) and ttl > 0 do
[{"X-WNS-TTL", ttl} | headers]
end
defp add_message_header_ttl(headers, _), do: headers
defp add_message_header_suppress_popup(headers, %Wns{suppress_popup: suppress_popup})
when is_boolean(suppress_popup) and suppress_popup == true do
[{"X-WNS-SuppressPopup", "true"} | headers]
end
defp add_message_header_suppress_popup(headers, _), do: headers
defp add_message_header_request_for_status(headers, %Wns{request_for_status: request_for_status})
when is_boolean(request_for_status) and request_for_status == true do
[{"X-WNS-RequestForStatus", "true"} | headers]
end
defp add_message_header_request_for_status(headers, _), do: headers
end
| 29 | 99 | 0.654127 |
ff352fe33cf3319771e1affa838e5749c50727bb | 619 | ex | Elixir | Microsoft.Azure.Management.EventHub/lib/microsoft/azure/management/event_hub/model/check_name_availability_parameter.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.EventHub/lib/microsoft/azure/management/event_hub/model/check_name_availability_parameter.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.EventHub/lib/microsoft/azure/management/event_hub/model/check_name_availability_parameter.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.EventHub.Model.CheckNameAvailabilityParameter do
@moduledoc """
Parameter supplied to check Namespace name availability operation
"""
@derive [Poison.Encoder]
defstruct [
:"name"
]
@type t :: %__MODULE__{
:"name" => String.t
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.EventHub.Model.CheckNameAvailabilityParameter do
def decode(value, _options) do
value
end
end
| 23.807692 | 104 | 0.741519 |
ff3531b3dba9299bc9d5a4f885731226278415e2 | 34,159 | exs | Elixir | test/hexpm/web/controllers/api/release_controller_test.exs | hubertpompecki/hexpm | 5cd4208b07a70bf2e1490930bf5d577978793b50 | [
"Apache-2.0"
] | null | null | null | test/hexpm/web/controllers/api/release_controller_test.exs | hubertpompecki/hexpm | 5cd4208b07a70bf2e1490930bf5d577978793b50 | [
"Apache-2.0"
] | null | null | null | test/hexpm/web/controllers/api/release_controller_test.exs | hubertpompecki/hexpm | 5cd4208b07a70bf2e1490930bf5d577978793b50 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Web.API.ReleaseControllerTest do
use Hexpm.ConnCase, async: true
alias Hexpm.Accounts.AuditLog
alias Hexpm.Repository.{Package, RegistryBuilder, Release, Repository}
setup do
user = insert(:user)
repository = insert(:repository)
package = insert(:package, package_owners: [build(:package_owner, user: user)])
release = insert(:release, package: package, version: "0.0.1", has_docs: true)
%{
user: user,
repository: repository,
package: package,
release: release
}
end
describe "POST /api/packages/:name/releases" do
test "create release and new package", %{user: user} do
meta = %{
name: Fake.sequence(:package),
version: "1.0.0",
description: "Domain-specific language."
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/#{meta.name}/releases", create_tar(meta, []))
result = json_response(conn, 201)
assert result["url"] =~ "api/packages/#{meta.name}/releases/1.0.0"
assert result["html_url"] =~ "packages/#{meta.name}/1.0.0"
package = Hexpm.Repo.get_by!(Package, name: meta.name)
package_owner = Hexpm.Repo.one!(assoc(package, :owners))
assert package_owner.id == user.id
log = Hexpm.Repo.one!(AuditLog)
assert log.user_id == user.id
assert log.repository_id == 1
assert log.action == "release.publish"
assert log.params["package"]["name"] == meta.name
assert log.params["release"]["version"] == "1.0.0"
end
test "update package", %{user: user, package: package} do
meta = %{name: package.name, version: "1.0.0", description: "awesomeness"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/#{package.name}/releases", create_tar(meta, []))
assert conn.status == 201
result = json_response(conn, 201)
assert result["url"] =~ "/api/packages/#{package.name}/releases/1.0.0"
assert result["html_url"] =~ "packages/#{package.name}/1.0.0"
assert Hexpm.Repo.get_by(Package, name: package.name).meta.description == "awesomeness"
end
test "create release checks if package name is correct", %{user: user, package: package} do
meta = %{name: Fake.sequence(:package), version: "0.1.0", description: "description"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/#{package.name}/releases", create_tar(meta, []))
result = json_response(conn, 422)
assert result["errors"]["name"] == "metadata does not match package name"
meta = %{name: package.name, version: "1.0.0", description: "description"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/#{Fake.sequence(:package)}/releases", create_tar(meta, []))
# Bad error message but /api/publish solves it
# https://github.com/hexpm/hexpm/issues/489
result = json_response(conn, 422)
assert result["errors"]["name"] == "has already been taken"
end
end
describe "POST /api/publish" do
test "create release and new package", %{user: user} do
meta = %{
name: Fake.sequence(:package),
version: "1.0.0",
description: "Domain-specific language."
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 201)
assert result["url"] =~ "api/packages/#{meta.name}/releases/1.0.0"
assert result["html_url"] =~ "packages/#{meta.name}/1.0.0"
package = Hexpm.Repo.get_by!(Package, name: meta.name)
package_owner = Hexpm.Repo.one!(assoc(package, :owners))
assert package_owner.id == user.id
log = Hexpm.Repo.one!(AuditLog)
assert log.user_id == user.id
assert log.repository_id == 1
assert log.action == "release.publish"
assert log.params["package"]["name"] == meta.name
assert log.params["release"]["version"] == "1.0.0"
end
test "update package", %{user: user, package: package} do
meta = %{name: package.name, version: "1.0.0", description: "awesomeness"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
assert conn.status == 201
result = json_response(conn, 201)
assert result["url"] =~ "/api/packages/#{package.name}/releases/1.0.0"
assert result["html_url"] =~ "packages/#{package.name}/1.0.0"
assert Hexpm.Repo.get_by(Package, name: package.name).meta.description == "awesomeness"
end
test "create release authorizes existing package", %{package: package} do
other_user = insert(:user)
meta = %{name: package.name, version: "0.1.0", description: "description"}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(other_user))
|> post("api/publish", create_tar(meta, []))
|> json_response(403)
end
test "create release authorizes" do
meta = %{name: Fake.sequence(:package), version: "0.1.0", description: "description"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", "WRONG")
|> post("api/publish", create_tar(meta, []))
assert conn.status == 401
assert get_resp_header(conn, "www-authenticate") == ["Basic realm=hex"]
end
test "update package authorizes", %{package: package} do
meta = %{name: package.name, version: "1.0.0", description: "Domain-specific language."}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", "WRONG")
|> post("api/publish", create_tar(meta, []))
assert conn.status == 401
assert get_resp_header(conn, "www-authenticate") == ["Basic realm=hex"]
end
test "create package validates", %{user: user, package: package} do
meta = %{name: package.name, version: "1.0.0", links: "invalid", description: "description"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 422)
assert result["errors"]["meta"]["links"] == "expected type map(string)"
end
test "create package casts proplist metadata", %{user: user, package: package} do
meta = %{
name: package.name,
version: "1.0.0",
links: %{"link" => "http://localhost"},
extra: %{"key" => "value"},
description: "description"
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
json_response(conn, 201)
package = Hexpm.Repo.get_by!(Package, name: package.name)
assert package.meta.links == %{"link" => "http://localhost"}
assert package.meta.extra == %{"key" => "value"}
end
test "create releases", %{user: user} do
meta = %{
name: Fake.sequence(:package),
app: "other",
version: "0.0.1",
description: "description"
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/#{meta.name}/releases", create_tar(meta, []))
result = json_response(conn, 201)
assert result["meta"]["app"] == "other"
assert result["url"] =~ "/api/packages/#{meta.name}/releases/0.0.1"
assert result["html_url"] =~ "packages/#{meta.name}/0.0.1"
meta = %{name: meta.name, version: "0.0.2", description: "description"}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
|> json_response(201)
package = Hexpm.Repo.get_by!(Package, name: meta.name)
package_id = package.id
assert [
%Release{package_id: ^package_id, version: %Version{major: 0, minor: 0, patch: 2}},
%Release{package_id: ^package_id, version: %Version{major: 0, minor: 0, patch: 1}}
] = Release.all(package) |> Hexpm.Repo.all() |> Release.sort()
Hexpm.Repo.get_by!(assoc(package, :releases), version: "0.0.1")
end
test "create release also creates package", %{user: user} do
meta = %{name: Fake.sequence(:package), version: "1.0.0", description: "Web framework"}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
|> json_response(201)
Hexpm.Repo.get_by!(Package, name: meta.name)
end
test "update release", %{user: user} do
meta = %{name: Fake.sequence(:package), version: "0.0.1", description: "description"}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/#{meta.name}/releases", create_tar(meta, []))
|> json_response(201)
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
|> json_response(200)
package = Hexpm.Repo.get_by!(Package, name: meta.name)
Hexpm.Repo.get_by!(assoc(package, :releases), version: "0.0.1")
assert [%AuditLog{action: "release.publish"}, %AuditLog{action: "release.publish"}] =
Hexpm.Repo.all(AuditLog)
end
test "update release with different and unresolved requirements", %{
user: user,
package: package
} do
name = Fake.sequence(:package)
reqs = [%{name: package.name, requirement: "~> 0.0.1", app: "app", optional: false}]
meta = %{name: name, version: "0.0.1", requirements: reqs, description: "description"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 201)
assert result["requirements"] == %{
package.name => %{"app" => "app", "optional" => false, "requirement" => "~> 0.0.1"}
}
# re-publish with unresolved requirement
reqs = [%{name: package.name, requirement: "~> 9.0", app: "app", optional: false}]
meta = %{name: name, version: "0.0.1", requirements: reqs, description: "description"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/#{meta.name}/releases", create_tar(meta, []))
result = json_response(conn, 422)
assert result["errors"]["requirements"][package.name] =~ ~s(Failed to use "#{package.name}")
end
test "can update release within package one hour grace period", %{
user: user,
package: package,
release: release
} do
Ecto.Changeset.change(
package,
inserted_at: NaiveDateTime.add(NaiveDateTime.utc_now(), -36000, :second)
)
|> Hexpm.Repo.update!()
Ecto.Changeset.change(
release,
inserted_at: NaiveDateTime.add(NaiveDateTime.utc_now(), -36000, :second)
)
|> Hexpm.Repo.update!()
meta = %{name: package.name, version: "0.0.1", description: "description"}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
|> json_response(200)
end
test "cannot update release after grace period", %{
user: user,
package: package,
release: release
} do
Ecto.Changeset.change(package, inserted_at: %{NaiveDateTime.utc_now() | year: 2000})
|> Hexpm.Repo.update!()
Ecto.Changeset.change(release, inserted_at: %{NaiveDateTime.utc_now() | year: 2000})
|> Hexpm.Repo.update!()
meta = %{name: package.name, version: "0.0.1", description: "description"}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 422)
assert result["errors"]["inserted_at"] ==
"can only modify a release up to one hour after creation"
end
test "create releases with requirements", %{user: user, package: package} do
reqs = [%{name: package.name, requirement: "~> 0.0.1", app: "app", optional: false}]
meta = %{
name: Fake.sequence(:package),
version: "0.0.1",
requirements: reqs,
description: "description"
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 201)
assert result["requirements"] == %{
package.name => %{"app" => "app", "optional" => false, "requirement" => "~> 0.0.1"}
}
release =
Hexpm.Repo.get_by!(Package, name: meta.name)
|> assoc(:releases)
|> Hexpm.Repo.get_by!(version: "0.0.1")
|> Hexpm.Repo.preload(:requirements)
assert [%{app: "app", requirement: "~> 0.0.1", optional: false}] = release.requirements
end
test "create releases with requirements validates requirement", %{
user: user,
package: package
} do
reqs = [%{name: package.name, requirement: "~> invalid", app: "app", optional: false}]
meta = %{
name: Fake.sequence(:package),
version: "0.0.1",
requirements: reqs,
description: "description"
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 422)
assert result["errors"]["requirements"][package.name] ==
~s(invalid requirement: "~> invalid")
end
test "create releases with requirements validates package name", %{user: user} do
reqs = [%{name: "nonexistant_package", requirement: "~> 1.0", app: "app", optional: false}]
meta = %{
name: Fake.sequence(:package),
version: "0.0.1",
requirements: reqs,
description: "description"
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 422)
assert result["errors"]["requirements"]["nonexistant_package"] ==
"package does not exist in repository \"hexpm\""
end
test "create releases with requirements validates resolution", %{user: user, package: package} do
reqs = [%{name: package.name, requirement: "~> 1.0", app: "app", optional: false}]
meta = %{
name: Fake.sequence(:package),
version: "0.1.0",
requirements: reqs,
description: "description"
}
conn =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
result = json_response(conn, 422)
assert result["errors"]["requirements"][package.name] =~
~s(Failed to use "#{package.name}" because)
end
test "create release updates registry", %{user: user, package: package} do
RegistryBuilder.full_build(Repository.hexpm())
registry_before = Hexpm.Store.get(nil, :s3_bucket, "registry.ets.gz", [])
reqs = [%{name: package.name, app: "app", requirement: "~> 0.0.1", optional: false}]
meta = %{
name: Fake.sequence(:package),
app: "app",
version: "0.0.1",
requirements: reqs,
description: "description"
}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/publish", create_tar(meta, []))
|> json_response(201)
registry_after = Hexpm.Store.get(nil, :s3_bucket, "registry.ets.gz", [])
assert registry_before != registry_after
end
end
describe "POST /api/repos/:repository/packages/:name/releases" do
test "new package authorizes", %{user: user, repository: repository} do
meta = %{
name: Fake.sequence(:package),
version: "1.0.0",
description: "Domain-specific language."
}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/packages/#{meta.name}/releases", create_tar(meta, []))
|> json_response(403)
end
test "existing package authorizes", %{user: user, repository: repository} do
package =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, user: user)]
)
meta = %{name: package.name, version: "1.0.0", description: "Domain-specific language."}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/packages/#{meta.name}/releases", create_tar(meta, []))
|> json_response(403)
end
end
describe "POST /api/repos/:repository/publish" do
test "new package authorizes", %{user: user, repository: repository} do
meta = %{
name: Fake.sequence(:package),
version: "1.0.0",
description: "Domain-specific language."
}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/publish", create_tar(meta, []))
|> json_response(403)
end
test "existing package authorizes", %{user: user, repository: repository} do
package =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, user: user)]
)
meta = %{name: package.name, version: "1.0.0", description: "Domain-specific language."}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/publish", create_tar(meta, []))
|> json_response(403)
end
test "new package requries write permission", %{user: user, repository: repository} do
insert(:repository_user, repository: repository, user: user, role: "read")
meta = %{
name: Fake.sequence(:package),
version: "1.0.0",
description: "Domain-specific language."
}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/publish", create_tar(meta, []))
|> json_response(403)
refute Hexpm.Repo.get_by(Package, name: meta.name)
end
test "repository needs to have active billing", %{user: user} do
repository = insert(:repository, billing_active: false)
insert(:repository_user, repository: repository, user: user, role: "write")
meta = %{
name: Fake.sequence(:package),
version: "1.0.0",
description: "Domain-specific language."
}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/publish", create_tar(meta, []))
|> json_response(403)
refute Hexpm.Repo.get_by(Package, name: meta.name)
end
test "new package", %{user: user, repository: repository} do
insert(:repository_user, repository: repository, user: user, role: "write")
meta = %{
name: Fake.sequence(:package),
version: "1.0.0",
description: "Domain-specific language."
}
result =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/publish", create_tar(meta, []))
|> json_response(201)
assert result["url"] =~ "api/repos/#{repository.name}/packages/#{meta.name}/releases/1.0.0"
package = Hexpm.Repo.get_by!(Package, name: meta.name)
assert package.repository_id == repository.id
end
test "existing package", %{user: user, repository: repository} do
package =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, user: user)]
)
insert(:repository_user, repository: repository, user: user)
meta = %{name: package.name, version: "1.0.0", description: "Domain-specific language."}
result =
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/publish", create_tar(meta, []))
|> json_response(201)
assert result["url"] =~ "api/repos/#{repository.name}/packages/#{meta.name}/releases/1.0.0"
package = Hexpm.Repo.get_by!(Package, name: meta.name)
assert package.repository_id == repository.id
end
test "can update private package after grace period", %{user: user, repository: repository} do
package =
insert(
:package,
package_owners: [build(:package_owner, user: user)],
repository_id: repository.id
)
insert(
:release,
package: package,
version: "0.0.1",
inserted_at: %{NaiveDateTime.utc_now() | year: 2000}
)
insert(:repository_user, repository: repository, user: user)
meta = %{name: package.name, version: "0.0.1", description: "description"}
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/repos/#{repository.name}/publish", create_tar(meta, []))
|> json_response(200)
end
end
describe "DELETE /api/packages/:name/releases/:version" do
@tag isolation: :serializable
test "delete release validates release age", %{user: user, package: package, release: release} do
Ecto.Changeset.change(package, inserted_at: %{NaiveDateTime.utc_now() | year: 2000})
|> Hexpm.Repo.update!()
Ecto.Changeset.change(release, inserted_at: %{NaiveDateTime.utc_now() | year: 2000})
|> Hexpm.Repo.update!()
conn =
build_conn()
|> put_req_header("authorization", key_for(user))
|> delete("api/packages/#{package.name}/releases/0.0.1")
result = json_response(conn, 422)
assert result["errors"]["inserted_at"] ==
"can only delete a release up to one hour after creation"
end
@tag isolation: :serializable
test "delete release", %{user: user, package: package, release: release} do
Ecto.Changeset.change(release, inserted_at: %{NaiveDateTime.utc_now() | year: 2030})
|> Hexpm.Repo.update!()
build_conn()
|> put_req_header("authorization", key_for(user))
|> delete("api/packages/#{package.name}/releases/0.0.1")
|> response(204)
refute Hexpm.Repo.get_by(Package, name: package.name)
refute Hexpm.Repo.get_by(assoc(package, :releases), version: "0.0.1")
[log] = Hexpm.Repo.all(AuditLog)
assert log.user_id == user.id
assert log.action == "release.revert"
assert log.params["package"]["name"] == package.name
assert log.params["release"]["version"] == "0.0.1"
end
end
describe "DELETE /api/repos/:repository/packages/:name/releases/:version" do
@tag isolation: :serializable
test "authorizes", %{user: user, repository: repository} do
package =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, user: user)]
)
insert(:release, package: package, version: "0.0.1")
build_conn()
|> put_req_header("authorization", key_for(user))
|> delete("api/repos/#{repository.name}/packages/#{package.name}/releases/0.0.1")
|> response(403)
assert Hexpm.Repo.get_by(Package, name: package.name)
assert Hexpm.Repo.get_by(assoc(package, :releases), version: "0.0.1")
end
@tag isolation: :serializable
test "repository needs to have active billing", %{user: user} do
repository = insert(:repository, billing_active: false)
insert(:repository_user, repository: repository, user: user, role: "write")
package =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, user: user)]
)
insert(:release, package: package, version: "0.0.1")
build_conn()
|> put_req_header("authorization", key_for(user))
|> delete("api/repos/#{repository.name}/packages/#{package.name}/releases/0.0.1")
|> response(403)
assert Hexpm.Repo.get_by(Package, name: package.name)
assert Hexpm.Repo.get_by(assoc(package, :releases), version: "0.0.1")
end
@tag isolation: :serializable
test "delete release", %{user: user, repository: repository} do
package =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, user: user)]
)
insert(:release, package: package, version: "0.0.1")
insert(:repository_user, repository: repository, user: user)
build_conn()
|> put_req_header("authorization", key_for(user))
|> delete("api/repos/#{repository.name}/packages/#{package.name}/releases/0.0.1")
|> response(204)
refute Hexpm.Repo.get_by(Package, name: package.name)
refute Hexpm.Repo.get_by(assoc(package, :releases), version: "0.0.1")
end
@tag isolation: :serializable
test "can delete private package release after grace period", %{
user: user,
repository: repository
} do
package =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, user: user)]
)
insert(
:release,
package: package,
version: "0.0.1",
inserted_at: %{NaiveDateTime.utc_now() | year: 2000}
)
insert(:repository_user, repository: repository, user: user)
build_conn()
|> put_req_header("authorization", key_for(user))
|> delete("api/repos/#{repository.name}/packages/#{package.name}/releases/0.0.1")
|> response(204)
end
end
describe "GET /api/packages/:name/releases/:version" do
test "get release", %{package: package, release: release} do
result =
build_conn()
|> get("api/packages/#{package.name}/releases/#{release.version}")
|> json_response(200)
assert result["url"] =~ "/api/packages/#{package.name}/releases/#{release.version}"
assert result["html_url"] =~ "/packages/#{package.name}/#{release.version}"
assert result["docs_html_url"] =~ "/#{package.name}/#{release.version}"
assert result["version"] == "#{release.version}"
end
test "get unknown release", %{package: package} do
conn = get(build_conn(), "api/packages/#{package.name}/releases/1.2.3")
assert conn.status == 404
conn = get(build_conn(), "api/packages/unknown/releases/1.2.3")
assert conn.status == 404
end
test "get release with requirements", %{package: package, release: release} do
package2 = insert(:package)
insert(:release, package: package2, version: "0.0.1")
insert(:requirement, release: release, dependency: package2, requirement: "~> 0.0.1")
result =
build_conn()
|> get("api/packages/#{package.name}/releases/#{release.version}")
|> json_response(200)
assert result["url"] =~ "/api/packages/#{package.name}/releases/#{release.version}"
assert result["html_url"] =~ "/packages/#{package.name}/#{release.version}"
assert result["version"] == "#{release.version}"
assert result["requirements"][package2.name]["requirement"] == "~> 0.0.1"
end
end
describe "GET /api/repos/:repository/packages/:name/releases/:version" do
test "get release authorizes", %{user: user, repository: repository} do
package = insert(:package, repository_id: repository.id)
insert(:release, package: package, version: "0.0.1")
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/repos/#{repository.name}/packages/#{package.name}/releases/0.0.1")
|> json_response(403)
end
test "get release returns 403 for non-existant repository", %{user: user} do
package = insert(:package)
insert(:release, package: package, version: "0.0.1")
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/repos/NONEXISTANT_REPOSITORY/packages/#{package.name}/releases/0.0.1")
|> json_response(403)
end
test "get release", %{user: user, repository: repository} do
package = insert(:package, repository_id: repository.id)
insert(:release, package: package, version: "0.0.1")
insert(:repository_user, repository: repository, user: user)
result =
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/repos/#{repository.name}/packages/#{package.name}/releases/0.0.1")
|> json_response(200)
assert result["url"] =~
"/api/repos/#{repository.name}/packages/#{package.name}/releases/0.0.1"
assert result["version"] == "0.0.1"
end
end
describe "GET /api/packages/:name/releases/:version/downloads" do
setup do
user = insert(:user)
repository = insert(:repository)
package = insert(:package, package_owners: [build(:package_owner, user: user)])
relprev = insert(:release, package: package, version: "0.0.1")
release = insert(:release, package: package, version: "0.0.2")
insert(:download, release: relprev, downloads: 8, day: ~D[2000-01-01])
insert(:download, release: release, downloads: 1, day: ~D[2000-01-01])
insert(:download, release: release, downloads: 3, day: ~D[2000-02-01])
insert(:download, release: release, downloads: 2, day: ~D[2000-02-07])
insert(:download, release: release, downloads: 4, day: ~D[2000-02-08])
Hexpm.Repo.refresh_view(Hexpm.Repository.ReleaseDownload)
%{
user: user,
repository: repository,
package: package,
release: release
}
end
test "get release downloads (all by default)", %{package: package, release: release} do
result =
build_conn()
|> get("api/packages/#{package.name}/releases/#{release.version}")
|> json_response(200)
assert result["version"] == "#{release.version}"
assert result["downloads"] == 10
result =
build_conn()
|> get("api/packages/#{package.name}/releases/#{release.version}?downloads=all")
|> json_response(200)
assert result["version"] == "#{release.version}"
assert result["downloads"] == 10
result =
build_conn()
|> get("api/packages/#{package.name}/releases/#{release.version}?downloads=xxx")
|> json_response(200)
assert result["version"] == "#{release.version}"
assert result["downloads"] == 10
end
test "get release downloads by day", %{package: package, release: release} do
result =
build_conn()
|> get("api/packages/#{package.name}/releases/#{release.version}?downloads=day")
|> json_response(200)
assert result["version"] == "#{release.version}"
assert result["downloads"] == [
["2000-01-01", 1],
["2000-02-01", 3],
["2000-02-07", 2],
["2000-02-08", 4]
]
end
test "get release downloads by month", %{package: package, release: release} do
result =
build_conn()
|> get("api/packages/#{package.name}/releases/#{release.version}?downloads=month")
|> json_response(200)
assert result["version"] == "#{release.version}"
assert result["downloads"] == [
["2000-01", 1],
["2000-02", 9]
]
end
end
end
| 35.471443 | 101 | 0.612313 |
ff3536b805cb9aba4b5df1955f27a3056550b660 | 13,855 | exs | Elixir | lib/elixir/test/elixir/kernel/guard_test.exs | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/guard_test.exs | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/guard_test.exs | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.GuardTest do
use ExUnit.Case, async: true
describe "Kernel.defguard(p) usage" do
defmodule GuardsInMacros do
defguard is_foo(atom) when atom == :foo
defmacro is_compile_time_foo(atom) when is_foo(atom) do
quote do: unquote(__MODULE__).is_foo(unquote(atom))
end
end
test "guards can be used in other macros in the same module" do
require GuardsInMacros
assert GuardsInMacros.is_foo(:foo)
refute GuardsInMacros.is_foo(:baz)
assert GuardsInMacros.is_compile_time_foo(:foo)
end
defmodule GuardsInFuns do
defguard is_foo(atom) when atom == :foo
defguard is_equal(foo, bar) when foo == bar
def is_foobar(atom) when is_foo(atom) do
is_foo(atom)
end
end
test "guards can be used in other funs in the same module" do
require GuardsInFuns
assert GuardsInFuns.is_foo(:foo)
refute GuardsInFuns.is_foo(:bar)
end
test "guards do not change code evaluation semantics" do
require GuardsInFuns
x = 1
assert GuardsInFuns.is_equal(x = 2, x) == false
assert x == 2
end
defmodule MacrosInGuards do
defmacro is_foo(atom) do
quote do
unquote(atom) == :foo
end
end
defguard is_foobar(atom) when is_foo(atom) or atom == :bar
end
test "macros can be used in other guards in the same module" do
require MacrosInGuards
assert MacrosInGuards.is_foobar(:foo)
assert MacrosInGuards.is_foobar(:bar)
refute MacrosInGuards.is_foobar(:baz)
end
defmodule GuardsInGuards do
defguard is_foo(atom) when atom == :foo
defguard is_foobar(atom) when is_foo(atom) or atom == :bar
end
test "guards can be used in other guards in the same module" do
require GuardsInGuards
assert GuardsInGuards.is_foobar(:foo)
assert GuardsInGuards.is_foobar(:bar)
refute GuardsInGuards.is_foobar(:baz)
end
defmodule DefaultArgs do
defguard is_divisible(value, remainder \\ 2)
when is_integer(value) and rem(value, remainder) == 0
end
test "permits default values in args" do
require DefaultArgs
assert DefaultArgs.is_divisible(2)
refute DefaultArgs.is_divisible(1)
assert DefaultArgs.is_divisible(3, 3)
refute DefaultArgs.is_divisible(3, 4)
end
test "doesn't allow matching in args" do
assert_raise ArgumentError, ~r"invalid syntax in defguard", fn ->
defmodule Integer.Args do
defguard foo(value, 1) when is_integer(value)
end
end
assert_raise ArgumentError, ~r"invalid syntax in defguard", fn ->
defmodule String.Args do
defguard foo(value, "string") when is_integer(value)
end
end
assert_raise ArgumentError, ~r"invalid syntax in defguard", fn ->
defmodule Atom.Args do
defguard foo(value, :atom) when is_integer(value)
end
end
assert_raise ArgumentError, ~r"invalid syntax in defguard", fn ->
defmodule Tuple.Args do
defguard foo(value, {foo, bar}) when is_integer(value)
end
end
end
defmodule GuardFromMacro do
defmacro __using__(_) do
quote do
defguard is_even(value) when is_integer(value) and rem(value, 2) == 0
end
end
end
test "defguard defines a guard from inside another macro" do
defmodule UseGuardFromMacro do
use GuardFromMacro
def assert! do
assert is_even(0)
refute is_even(1)
end
end
UseGuardFromMacro.assert!()
end
defmodule IntegerPrivateGuards do
defguardp is_even(value) when is_integer(value) and rem(value, 2) == 0
def is_even_and_large?(value) when is_even(value) and value > 100, do: true
def is_even_and_large?(_), do: false
def is_even_and_small?(value) do
if is_even(value) and value <= 100, do: true, else: false
end
end
test "defguardp defines private guards that work inside and outside guard clauses" do
assert IntegerPrivateGuards.is_even_and_large?(102)
refute IntegerPrivateGuards.is_even_and_large?(98)
refute IntegerPrivateGuards.is_even_and_large?(99)
refute IntegerPrivateGuards.is_even_and_large?(103)
assert IntegerPrivateGuards.is_even_and_small?(98)
refute IntegerPrivateGuards.is_even_and_small?(99)
refute IntegerPrivateGuards.is_even_and_small?(102)
refute IntegerPrivateGuards.is_even_and_small?(103)
assert_raise CompileError, ~r"cannot invoke local is_even/1 inside guard", fn ->
defmodule IntegerPrivateGuardUtils do
import IntegerPrivateGuards
def is_even_and_large?(value) when is_even(value) and value > 100, do: true
def is_even_and_large?(_), do: false
end
end
assert_raise CompileError, ~r"undefined function is_even/1", fn ->
defmodule IntegerPrivateFunctionUtils do
import IntegerPrivateGuards
def is_even_and_small?(value) do
if is_even(value) and value <= 100, do: true, else: false
end
end
end
end
test "requires a proper macro name" do
assert_raise ArgumentError, ~r"invalid syntax in defguard", fn ->
defmodule(LiteralUsage, do: defguard("literal is bad"))
end
assert_raise ArgumentError, ~r"invalid syntax in defguard", fn ->
defmodule(RemoteUsage, do: defguard(Remote.call(is_bad)))
end
end
test "handles overriding appropriately" do
assert_raise CompileError, ~r"defmacro (.*?) already defined as def", fn ->
defmodule OverridenFunUsage do
def foo(bar), do: bar
defguard foo(bar) when bar
end
end
assert_raise CompileError, ~r"defmacro (.*?) already defined as defp", fn ->
defmodule OverridenPrivateFunUsage do
defp foo(bar), do: bar
defguard foo(bar) when bar
end
end
assert_raise CompileError, ~r"defmacro (.*?) already defined as defmacrop", fn ->
defmodule OverridenPrivateFunUsage do
defmacrop foo(bar), do: bar
defguard foo(bar) when bar
end
end
assert_raise CompileError, ~r"defmacrop (.*?) already defined as def", fn ->
defmodule OverridenFunUsage do
def foo(bar), do: bar
defguardp foo(bar) when bar
end
end
assert_raise CompileError, ~r"defmacrop (.*?) already defined as defp", fn ->
defmodule OverridenPrivateFunUsage do
defp foo(bar), do: bar
defguardp foo(bar) when bar
end
end
assert_raise CompileError, ~r"defmacrop (.*?) already defined as defmacro", fn ->
defmodule OverridenPrivateFunUsage do
defmacro foo(bar), do: bar
defguardp foo(bar) when bar
end
end
end
test "does not allow multiple guard clauses" do
assert_raise ArgumentError, ~r"invalid syntax in defguard", fn ->
defmodule MultiGuardUsage do
defguardp foo(bar, baz) when bar == 1 when baz == 2
end
end
end
test "does not accept a block" do
assert_raise CompileError, ~r"undefined function defguard/2", fn ->
defmodule OnelinerBlockUsage do
defguard(foo(bar), do: one_liner)
end
end
assert_raise CompileError, ~r"undefined function defguard/2", fn ->
defmodule MultilineBlockUsage do
defguard foo(bar) do
multi
liner
end
end
end
assert_raise CompileError, ~r"undefined function defguard/2", fn ->
defmodule ImplAndBlockUsage do
defguard(foo(bar) when both_given, do: error)
end
end
end
end
describe "Kernel.defguard compilation" do
test "refuses to compile non-sensical code" do
assert_raise CompileError, ~r"cannot invoke local undefined/1 inside guard", fn ->
defmodule UndefinedUsage do
defguard foo(function) when undefined(function)
end
end
end
test "fails on expressions not allowed in guards" do
# Slightly unique errors
assert_raise ArgumentError, ~r{invalid args for operator "in"}, fn ->
defmodule RuntimeListUsage do
defguard foo(bar, baz) when bar in baz
end
end
assert_raise CompileError, ~r"cannot invoke remote function", fn ->
defmodule BadErlangFunctionUsage do
defguard foo(bar) when :erlang.binary_to_atom("foo")
end
end
assert_raise CompileError, ~r"cannot invoke remote function", fn ->
defmodule SendUsage do
defguard foo(bar) when send(self(), :baz)
end
end
# Consistent errors
assert_raise ArgumentError, ~r"invalid expression in guard, ! is not allowed", fn ->
defmodule SoftNegationLogicUsage do
defguard foo(logic) when !logic
end
end
assert_raise ArgumentError, ~r"invalid expression in guard, && is not allowed", fn ->
defmodule SoftAndLogicUsage do
defguard foo(soft, logic) when soft && logic
end
end
assert_raise ArgumentError, ~r"invalid expression in guard, || is not allowed", fn ->
defmodule SoftOrLogicUsage do
defguard foo(soft, logic) when soft || logic
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule LocalCallUsage do
defguard foo(local, call) when local.(call)
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule ComprehensionUsage do
defguard foo(bar) when for(x <- [1, 2, 3], do: x * bar)
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule AliasUsage do
defguard foo(bar) when alias(bar)
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule ImportUsage do
defguard foo(bar) when import(bar)
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule RequireUsage do
defguard foo(bar) when require(bar)
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule SuperUsage do
defguard foo(bar) when super(bar)
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule SpawnUsage do
defguard foo(bar) when spawn(& &1)
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule ReceiveUsage do
defguard foo(bar) when receive(do: (baz -> baz))
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule CaseUsage do
defguard foo(bar) when case(bar, do: (baz -> :baz))
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule CondUsage do
defguard foo(bar) when cond(do: (bar -> :baz))
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule TryUsage do
defguard foo(bar) when try(do: (baz -> baz))
end
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
defmodule WithUsage do
defguard foo(bar) when with(do: (baz -> baz))
end
end
end
end
describe "Kernel.Utils.defguard/2" do
test "generates unquoted variables based on context" do
args = quote(do: [foo, bar, baz])
expr = quote(do: foo + bar + baz)
{:ok, goal} =
Code.string_to_quoted("""
case Macro.Env.in_guard? __CALLER__ do
true -> quote do
:erlang.+(:erlang.+(unquote(foo), unquote(bar)), unquote(baz))
end
false -> quote do
{foo, bar, baz} = {unquote(foo), unquote(bar), unquote(baz)}
:erlang.+(:erlang.+(foo, bar), baz)
end
end
""")
assert expand_defguard_to_string(args, expr) == Macro.to_string(goal)
end
test "doesn't obscure unused variables" do
args = quote(do: [foo, bar, baz])
expr = quote(do: foo + bar)
{:ok, goal} =
Code.string_to_quoted("""
case Macro.Env.in_guard? __CALLER__ do
true -> quote do
:erlang.+(unquote(foo), unquote(bar))
end
false -> quote do
{foo, bar} = {unquote(foo), unquote(bar)}
:erlang.+(foo, bar)
end
end
""")
assert expand_defguard_to_string(args, expr) == Macro.to_string(goal)
end
test "handles re-used variables" do
args = quote(do: [foo, bar, baz])
expr = quote(do: foo + foo + bar + baz)
{:ok, goal} =
Code.string_to_quoted("""
case(Macro.Env.in_guard?(__CALLER__)) do
true ->
quote() do
:erlang.+(:erlang.+(:erlang.+(unquote(foo), unquote(foo)), unquote(bar)), unquote(baz))
end
false ->
quote() do
{foo, bar, baz} = {unquote(foo), unquote(bar), unquote(baz)}
:erlang.+(:erlang.+(:erlang.+(foo, foo), bar), baz)
end
end
""")
assert expand_defguard_to_string(args, expr) == Macro.to_string(goal)
end
defp expand_defguard_to_string(args, expr) do
require Kernel.Utils
quote(do: Kernel.Utils.defguard(unquote(args), unquote(expr)))
|> Macro.expand(__ENV__)
|> Macro.to_string()
end
end
end
| 30.317287 | 101 | 0.620498 |
ff355885e94fae91eada4cde94720b277dfecf07 | 7,838 | ex | Elixir | lib/elixir_sense/plugins/ecto/query.ex | maciej-szlosarczyk/elixir_sense | aeadfd74c0cacb4370b7f3fae1d891ae75421b87 | [
"Unlicense",
"MIT"
] | null | null | null | lib/elixir_sense/plugins/ecto/query.ex | maciej-szlosarczyk/elixir_sense | aeadfd74c0cacb4370b7f3fae1d891ae75421b87 | [
"Unlicense",
"MIT"
] | null | null | null | lib/elixir_sense/plugins/ecto/query.ex | maciej-szlosarczyk/elixir_sense | aeadfd74c0cacb4370b7f3fae1d891ae75421b87 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule ElixirSense.Plugins.Ecto.Query do
@moduledoc false
alias ElixirSense.Core.Introspection
alias ElixirSense.Core.Metadata
alias ElixirSense.Core.Source
alias ElixirSense.Core.State
alias ElixirSense.Providers.Suggestion.Complete
# We'll keep these values hard-coded until Ecto provides the same information
# using docs' metadata.
@joins [
:join,
:inner_join,
:cross_join,
:left_join,
:right_join,
:full_join,
:inner_lateral_join,
:left_lateral_join
]
@from_join_opts [
as: "A named binding for the from/join.",
prefix: "The prefix to be used for the from/join when issuing a database query.",
hints: "A string or a list of strings to be used as database hints."
]
@join_opts [on: "A query expression or keyword list to filter the join."]
@var_r "[a-z][a-zA-Z0-9_]*"
@mod_r "[A-Z][a-zA-Z0-9_\.]*"
@binding_r "(#{@var_r}) in (#{@mod_r}|assoc\\(\\s*#{@var_r},\\s*\\:#{@var_r}\\s*\\))"
def find_assoc_suggestions(type, hint) do
for assoc <- type.__schema__(:associations),
assoc_str = inspect(assoc),
String.starts_with?(assoc_str, hint) do
assoc_mod = type.__schema__(:association, assoc).related
{doc, _} = Introspection.get_module_docs_summary(assoc_mod)
%{
type: :generic,
kind: :field,
label: assoc_str,
detail: "(Ecto association) #{inspect(assoc_mod)}",
documentation: doc
}
end
end
def find_options(hint) do
clauses_suggestions(hint) ++ joins_suggestions(hint) ++ join_opts_suggestions(hint)
end
defp clauses_suggestions(hint) do
funs = Complete.get_module_funs(Ecto.Query, false)
for {name, arity, arity, :macro, {doc, _}, _, ["query" | _]} <- funs,
clause = to_string(name),
String.starts_with?(clause, hint) do
clause_to_suggestion(clause, doc, "from clause")
end
end
defp joins_suggestions(hint) do
for name <- @joins -- [:join],
clause = to_string(name),
String.starts_with?(clause, hint) do
join_kind = String.replace(clause, "_", " ")
doc = "A #{join_kind} query expression."
clause_to_suggestion(clause, doc, "from clause")
end
end
defp join_opts_suggestions(hint) do
for {name, doc} <- @join_opts ++ @from_join_opts,
clause = to_string(name),
String.starts_with?(clause, hint) do
type = if Keyword.has_key?(@join_opts, name), do: "join", else: "from/join"
clause_to_suggestion(clause, doc, "#{type} option")
end
end
defp find_fields(type, hint) do
with {:module, _} <- Code.ensure_compiled(type),
true <- function_exported?(type, :__schema__, 1) do
for field <- Enum.sort(type.__schema__(:fields)),
name = to_string(field),
String.starts_with?(name, hint) do
%{name: field, type: type.__schema__(:type, field)}
end
else
_ ->
[]
end
end
defp find_field_relations(field, type) do
associations = type.__schema__(:associations)
for assoc_name <- associations,
assoc = type.__schema__(:association, assoc_name),
assoc.owner == type,
assoc.owner_key == field.name do
assoc
end
end
def bindings_suggestions(hint, bindings) do
case String.split(hint, ".") do
[var, field_hint] ->
type = bindings[var][:type]
type
|> find_fields(field_hint)
|> Enum.map(fn f -> field_to_suggestion(f, type) end)
_ ->
for {name, %{type: type}} <- bindings,
String.starts_with?(name, hint) do
binding_to_suggestion(name, type)
end
end
end
defp clause_to_suggestion(option, doc, detail) do
doc_str =
doc
|> doc_sections()
|> Enum.filter(fn {k, _v} -> k in [:summary, "Keywords examples", "Keywords example"] end)
|> Enum.map_join("\n\n", fn
{:summary, text} ->
text
{_, text} ->
[first | _] = String.split(text, "\n\n")
if first == "", do: "", else: "### Example\n\n#{first}"
end)
%{
type: :generic,
kind: :property,
label: option,
insert_text: "#{option}: ",
detail: "(#{detail}) Ecto.Query",
documentation: doc_str
}
end
defp binding_to_suggestion(binding, type) do
{doc, _} = Introspection.get_module_docs_summary(type)
%{
type: :generic,
kind: :variable,
label: binding,
detail: "(query binding) #{inspect(type)}",
documentation: doc
}
end
defp field_to_suggestion(field, origin) do
type_str = inspect(field.type)
associations = find_field_relations(field, origin)
relations =
Enum.map_join(associations, ", ", fn
%{related: related, related_key: related_key} ->
"`#{inspect(related)} (#{inspect(related_key)})`"
%{related: related} ->
# Ecto.Association.ManyToMany does not define :related_key
"`#{inspect(related)}`"
end)
related_info = if relations == "", do: "", else: "* **Related:** #{relations}"
doc = """
The `#{inspect(field.name)}` field of `#{inspect(origin)}`.
* **Type:** `#{type_str}`
#{related_info}
"""
%{
type: :generic,
kind: :field,
label: to_string(field.name),
detail: "Ecto field",
documentation: doc
}
end
defp infer_type({:__aliases__, _, mods}, _vars, env, buffer_metadata) do
mod = Module.concat(mods)
{actual_mod, _, _} = actual_mod_fun({mod, nil}, false, env, buffer_metadata)
actual_mod
end
defp infer_type({:assoc, _, [{var, _, _}, assoc]}, vars, _env, _buffer_metadata) do
var_type = vars[to_string(var)][:type]
if var_type && function_exported?(var_type, :__schema__, 2) do
var_type.__schema__(:association, assoc).related
end
end
defp infer_type(_, _vars, _env, _buffer_metadata) do
nil
end
def extract_bindings(prefix, %{pos: {{line, col}, _}} = func_info, env, buffer_metadata) do
func_code = Source.text_after(prefix, line, col)
from_matches = Regex.scan(~r/^.+\(?\s*(#{@binding_r})/, func_code)
join_matches =
for {join, {line, col, _}} when join in @joins <- func_info.options_so_far,
code = Source.text_after(prefix, line, col),
match <- Regex.scan(~r/^#{join}\:\s*(#{@binding_r})/, code) do
match
end
matches = from_matches ++ join_matches
Enum.reduce(matches, %{}, fn [_, _, var, expr], bindings ->
case Code.string_to_quoted(expr) do
{:ok, expr_ast} ->
type = infer_type(expr_ast, bindings, env, buffer_metadata)
Map.put(bindings, var, %{type: type})
_ ->
bindings
end
end)
end
def extract_bindings(_prefix, _func_info, _env, _buffer_metadata) do
%{}
end
defp doc_sections(doc) do
[summary_and_detail | rest] = String.split(doc, "##")
summary_and_detail_parts = Source.split_lines(summary_and_detail, parts: 2)
summary = summary_and_detail_parts |> Enum.at(0, "") |> String.trim()
detail = summary_and_detail_parts |> Enum.at(1, "") |> String.trim()
sections =
Enum.map(rest, fn text ->
[title, body] = Source.split_lines(text, parts: 2)
{String.trim(title), String.trim(body, "\n")}
end)
[{:summary, summary}, {:detail, detail}] ++ sections
end
# TODO: Centralize
defp actual_mod_fun({mod, fun}, elixir_prefix, env, buffer_metadata) do
%State.Env{imports: imports, aliases: aliases, module: module} = env
%Metadata{mods_funs_to_positions: mods_funs, types: metadata_types} = buffer_metadata
Introspection.actual_mod_fun(
{mod, fun},
imports,
if(elixir_prefix, do: [], else: aliases),
module,
mods_funs,
metadata_types
)
end
end
| 28.605839 | 96 | 0.615208 |
ff35599a764a8fe6dce4fda699d197be3f6f3e24 | 3,696 | ex | Elixir | apps/fz_http/lib/fz_http_web/live/mfa_live/register_steps_component.ex | CloudFire-LLC/cloudfire-ce | 416ea0d9c9528790fdf70c432aa4eb507d7b2074 | [
"Apache-2.0"
] | null | null | null | apps/fz_http/lib/fz_http_web/live/mfa_live/register_steps_component.ex | CloudFire-LLC/cloudfire-ce | 416ea0d9c9528790fdf70c432aa4eb507d7b2074 | [
"Apache-2.0"
] | 1 | 2020-04-24T01:53:41.000Z | 2020-04-24T01:53:41.000Z | apps/fz_http/lib/fz_http_web/live/mfa_live/register_steps_component.ex | CloudFire-LLC/cloudfire-ce | 416ea0d9c9528790fdf70c432aa4eb507d7b2074 | [
"Apache-2.0"
] | null | null | null | defmodule FzHttpWeb.MFA.RegisterStepsComponent do
@moduledoc """
MFA registration steps
"""
use Phoenix.Component
import FzHttpWeb.ErrorHelpers
def render_step(assigns) do
apply(__MODULE__, assigns[:step], [assigns])
end
def pick_type(assigns) do
~H"""
<form id="mfa-method-form" phx-target={@parent} phx-submit="next">
<h4>Choose authenticator type</h4>
<hr>
<div class="control">
<div>
<label class="radio">
<input type="radio" name="type" value="totp" checked>
Time-Based One-Time Password
</label>
</div>
<!-- Coming Soon
<div>
<label class="radio disabled">
<input type="radio" name="type" value="native" disabled>
Native (Windows Hello, iOS Face ID, etc)
</label>
</div>
<div>
<label class="radio disabled">
<input type="radio" name="type" value="portable" disabled>
Portable (YubiKey-like products)
</label>
</div>
-->
</div>
</form>
"""
end
def register(assigns) do
secret = NimbleTOTP.secret()
assigns =
Map.merge(
assigns,
%{
secret: secret,
secret_base32_encoded: Base.encode32(secret),
secret_base64_encoded: Base.encode64(secret),
uri:
NimbleTOTP.otpauth_uri(
"Firezone:#{assigns[:user].email}",
secret,
issuer: "Firezone"
)
}
)
~H"""
<form id="mfa-method-form" phx-target={@parent} phx-submit="next">
<h4>Register Authenticator</h4>
<hr>
<input value={@secret_base64_encoded} type="hidden" name="secret" />
<div class="has-text-centered">
<canvas data-qrdata={@uri} id="register-totp" phx-hook="RenderQR" />
<pre class="mb-4"
id="copy-totp-key"
phx-hook="ClipboardCopy"
data-clipboard={@secret_base32_encoded}><code><%= format_key(@secret_base32_encoded) %></code></pre>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Name</label>
</div>
<div class="field-body">
<div class="field">
<p class="control">
<input class="input" type="text" name="name"
placeholder="Name" value="My Authenticator" required />
</p>
</div>
</div>
</div>
</form>
"""
end
def verify(assigns) do
~H"""
<form id="mfa-method-form" phx-target={@parent} phx-submit="next">
<h4>Verify Code</h4>
<hr>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Code</label>
</div>
<div class="field-body">
<div class="field">
<p class="control">
<input class={"input #{input_error_class(@changeset, :code)}"}
type="text" name="code" placeholder="123456" required />
</p>
</div>
</div>
</div>
</form>
"""
end
def save(assigns) do
~H"""
<form id="mfa-method-form" phx-target={@parent} phx-submit="save">
Confirm to save this Authentication method.
<%= if !@changeset.valid? do %>
<p class="help is-danger">
Something went wrong. Try saving again or starting over.
</p>
<% end %>
</form>
"""
end
defp format_key(string) do
string
|> String.split("", trim: true)
|> Enum.chunk_every(4)
|> Enum.intersperse(" ")
|> Enum.join("")
end
end
| 25.846154 | 112 | 0.52895 |
ff356144884803b573e6ced3c5289dec43a452ec | 6,445 | ex | Elixir | clients/data_fusion/lib/google_api/data_fusion/v1/model/policy.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/data_fusion/lib/google_api/data_fusion/v1/model/policy.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/data_fusion/lib/google_api/data_fusion/v1/model/policy.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataFusion.V1.Model.Policy do
@moduledoc """
An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members`, or principals, to a single `role`. Principals can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 } **YAML example:** bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/).
## Attributes
* `auditConfigs` (*type:* `list(GoogleApi.DataFusion.V1.Model.AuditConfig.t)`, *default:* `nil`) - Specifies cloud audit logging configuration for this policy.
* `bindings` (*type:* `list(GoogleApi.DataFusion.V1.Model.Binding.t)`, *default:* `nil`) - Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
* `etag` (*type:* `String.t`, *default:* `nil`) - `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
* `version` (*type:* `integer()`, *default:* `nil`) - Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditConfigs => list(GoogleApi.DataFusion.V1.Model.AuditConfig.t()) | nil,
:bindings => list(GoogleApi.DataFusion.V1.Model.Binding.t()) | nil,
:etag => String.t() | nil,
:version => integer() | nil
}
field(:auditConfigs, as: GoogleApi.DataFusion.V1.Model.AuditConfig, type: :list)
field(:bindings, as: GoogleApi.DataFusion.V1.Model.Binding, type: :list)
field(:etag)
field(:version)
end
defimpl Poison.Decoder, for: GoogleApi.DataFusion.V1.Model.Policy do
def decode(value, options) do
GoogleApi.DataFusion.V1.Model.Policy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DataFusion.V1.Model.Policy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 115.089286 | 2,012 | 0.75128 |
ff356d1c47c84d1559f73b5ee55fa52aee39c52d | 3,026 | ex | Elixir | lib/boom_notifier.ex | rafaeliga/boom | 809d5575a3ca74239e4bdb906cc2de5fd6cd3e4e | [
"MIT"
] | null | null | null | lib/boom_notifier.ex | rafaeliga/boom | 809d5575a3ca74239e4bdb906cc2de5fd6cd3e4e | [
"MIT"
] | null | null | null | lib/boom_notifier.ex | rafaeliga/boom | 809d5575a3ca74239e4bdb906cc2de5fd6cd3e4e | [
"MIT"
] | null | null | null | defmodule BoomNotifier do
@moduledoc false
# Responsible for sending a notification to each notifier every time an
# exception is raised.
alias BoomNotifier.ErrorStorage
alias BoomNotifier.NotifierSenderServer
require Logger
def run_callback(settings, callback) do
missing_keys = Enum.reject([:notifier, :options], &Keyword.has_key?(settings, &1))
case missing_keys do
[] ->
callback.(settings[:notifier], settings[:options])
[missing_key] ->
Logger.error("Settings error: #{inspect(missing_key)} parameter missing")
_ ->
Logger.error(
"Settings error: The following parameters are missing: #{inspect(missing_keys)}"
)
end
end
def walkthrough_notifiers(settings, callback) do
case Keyword.get(settings, :notifiers) do
nil ->
run_callback(settings, callback)
notifiers_settings when is_list(notifiers_settings) ->
Enum.each(notifiers_settings, &run_callback(&1, callback))
end
end
def validate_notifiers(notifier, options) do
if Code.ensure_loaded?(notifier) && function_exported?(notifier, :validate_config, 1) do
case notifier.validate_config(options) do
{:error, message} ->
Logger.error(
"Notifier validation: #{message} in #{notifier |> to_string() |> String.split(".") |> List.last()}"
)
_ ->
nil
end
end
end
defmacro __using__(config) do
quote location: :keep do
use Plug.ErrorHandler
import BoomNotifier
settings = unquote(config)
# Notifiers validation
walkthrough_notifiers(
settings,
fn notifier, options -> validate_notifiers(notifier, options) end
)
def handle_errors(conn, %{kind: :error, reason: %mod{}} = error) do
settings = unquote(config)
{ignored_exceptions, _settings} = Keyword.pop(settings, :ignore_exceptions, [])
unless Enum.member?(ignored_exceptions, mod) do
do_handle_errors(conn, settings, error)
end
end
def handle_errors(conn, error) do
settings = unquote(config)
do_handle_errors(conn, settings, error)
end
defp do_handle_errors(conn, settings, error) do
{custom_data, _settings} = Keyword.pop(settings, :custom_data, :nothing)
{error_kind, error_info} = ErrorInfo.build(error, conn, custom_data)
ErrorStorage.add_errors(error_kind, error_info)
if ErrorStorage.send_notification?(error_kind) do
occurrences = ErrorStorage.get_errors(error_kind)
# Triggers the notification in each notifier
walkthrough_notifiers(settings, fn notifier, options ->
NotifierSenderServer.send(notifier, occurrences, options)
end)
{notification_trigger, _settings} =
Keyword.pop(settings, :notification_trigger, :always)
ErrorStorage.clear_errors(notification_trigger, error_kind)
end
end
end
end
end
| 29.096154 | 111 | 0.657634 |
ff358ac39dab5b9be60305c128086399c4c173b9 | 1,507 | ex | Elixir | lib/basic.ex | georgelima/skooma | c33fbbe38a02ccabb95a131325ee38b3fdc4c117 | [
"MIT"
] | 1 | 2020-11-08T01:39:16.000Z | 2020-11-08T01:39:16.000Z | lib/basic.ex | georgelima/skooma | c33fbbe38a02ccabb95a131325ee38b3fdc4c117 | [
"MIT"
] | null | null | null | lib/basic.ex | georgelima/skooma | c33fbbe38a02ccabb95a131325ee38b3fdc4c117 | [
"MIT"
] | null | null | null | defmodule Skooma.Basic do
alias Skooma.Utils
def validator(validator_, type, data, schema, path \\ []) do
data
|> validator_.()
|> error(data, type, path)
|> custom_validator(data, schema, path)
end
defp error(bool, data, expected_type, path) do
data_type = Utils.typeof(data)
if bool do
:ok
else
cond do
Enum.count(path) > 0 ->
{:error,
"Esperava um #{expected_type}, recebido #{data_type} #{inspect(data)}, em #{
eval_path(path)
}"}
true ->
{:error, "Esperava um #{expected_type}, recebido #{data_type} #{inspect(data)}"}
end
end
end
defp eval_path(path) do
Enum.join(path, " -> ")
end
defp custom_validator(result, data, schema, path) do
case result do
:ok -> do_custom_validator(data, schema, path)
_ -> result
end
end
defp do_custom_validator(data, schema, path) do
validators = Enum.filter(schema, &is_function/1)
if Enum.count(validators) == 0 do
:ok
else
Enum.map(validators, fn validator ->
arity = :erlang.fun_info(validator)[:arity]
cond do
arity == 0 -> validator.()
arity == 1 -> validator.(data)
arity == 2 -> validator.(data, path)
end
end)
|> Enum.reject(&(&1 == :ok || &1 == true))
|> Enum.map(
&if &1 == false, do: {:error, "O valor não combina com o validador customizado"}, else: &1
)
end
end
end
| 23.920635 | 98 | 0.560717 |
ff358fde76aef48060ff4c3716351d82890231aa | 7,509 | exs | Elixir | test/client_tests/terraform_test.exs | valiot/opex62541 | 8e50ef9508c25b0693123840d20dcae708e98bfe | [
"MIT"
] | 11 | 2020-04-24T20:54:23.000Z | 2022-01-11T03:13:13.000Z | test/client_tests/terraform_test.exs | valiot/opex62541 | 8e50ef9508c25b0693123840d20dcae708e98bfe | [
"MIT"
] | 3 | 2021-02-25T10:03:49.000Z | 2021-09-15T03:37:12.000Z | test/client_tests/terraform_test.exs | valiot/opex62541 | 8e50ef9508c25b0693123840d20dcae708e98bfe | [
"MIT"
] | 4 | 2020-09-28T16:18:43.000Z | 2021-09-09T16:29:14.000Z | defmodule ClientTerraformTest do
use ExUnit.Case
alias OpcUA.{Client, NodeId, Server, QualifiedName}
@configuration_server [
config: [
port: 4006,
users: [{"alde103", "secret"}]
]
]
@address_space [
namespace: "Sensor",
object_type_node: OpcUA.ObjectTypeNode.new(
[
requested_new_node_id: NodeId.new(ns_index: 1, identifier_type: "integer", identifier: 10000),
parent_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 58),
reference_type_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 45),
browse_name: QualifiedName.new(ns_index: 1, name: "Obj")
],
write_mask: 0x3FFFFF,
is_abstract: true
),
object_node: OpcUA.ObjectNode.new(
[
requested_new_node_id: NodeId.new(ns_index: 1, identifier_type: "integer", identifier: 10002),
parent_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 85),
reference_type_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 35),
browse_name: QualifiedName.new(ns_index: 1, name: "Test1"),
type_definition: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 58)
]
),
object_node: OpcUA.ObjectNode.new(
[
requested_new_node_id: NodeId.new(ns_index: 2, identifier_type: "string", identifier: "R1_TS1_Sensor"),
parent_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 85),
reference_type_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 35),
browse_name: QualifiedName.new(ns_index: 2, name: "Temperature sensor"),
type_definition: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 58)
]
),
variable_node: OpcUA.VariableNode.new(
[
requested_new_node_id: NodeId.new(ns_index: 1, identifier_type: "integer", identifier: 10001),
parent_node_id: NodeId.new(ns_index: 1, identifier_type: "integer", identifier: 10002),
reference_type_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 47),
browse_name: QualifiedName.new(ns_index: 1, name: "Var"),
type_definition: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 63)
],
write_mask: 0x3BFFFF,
access_level: 3,
browse_name: QualifiedName.new(ns_index: 2, name: "Var_N"),
display_name: {"en-US", "var"},
description: {"en-US", "variable"},
data_type: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 63),
value_rank: 3,
minimum_sampling_interval: 100.0,
historizing: true
),
variable_node: OpcUA.VariableNode.new(
[
requested_new_node_id: NodeId.new(ns_index: 2, identifier_type: "string", identifier: "R1_TS1_Temperature"),
parent_node_id: NodeId.new(ns_index: 2, identifier_type: "string", identifier: "R1_TS1_Sensor"),
reference_type_node_id: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 47),
browse_name: QualifiedName.new(ns_index: 2, name: "Temperature"),
type_definition: NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 63)
],
write_mask: 0x3FFFFF,
value: {10, 103.0},
access_level: 3
),
monitored_item: OpcUA.MonitoredItem.new(
[
monitored_item: NodeId.new(ns_index: 2, identifier_type: "string", identifier: "R1_TS1_Temperature"),
sampling_time: 1000.0,
subscription_id: 1
]
)
]
{:ok, localhost} = :inet.gethostname()
@configuration_client [
config: [
set_config: %{
"requestedSessionTimeout" => 1200000,
"secureChannelLifeTime" => 600000,
"timeout" => 50000
}
],
conn: [
by_username: [
url: "opc.tcp://#{localhost}:4006/",
user: "alde103",
password: "secret"
]
]
]
@monitored_items [
subscription: 200.0,
monitored_item: OpcUA.MonitoredItem.new(
[
monitored_item: NodeId.new(ns_index: 2, identifier_type: "string", identifier: "R1_TS1_Temperature"),
sampling_time: 100.0,
subscription_id: 1
]
)
]
defmodule MyClient do
use OpcUA.Client
alias OpcUA.Client
# Use the `init` function to configure your Client.
def init({parent_pid, 103} = _user_init_state, opc_ua_client_pid) do
%{parent_pid: parent_pid, opc_ua_client_pid: opc_ua_client_pid}
end
def configuration(_user_init_state), do: Application.get_env(:my_client, :configuration, [])
def monitored_items(_user_init_state), do: Application.get_env(:my_client, :monitored_items, [])
def handle_subscription_timeout(subscription_id, state) do
send(state.parent_pid, {:subscription_timeout, subscription_id})
state
end
def handle_deleted_subscription(subscription_id, state) do
send(state.parent_pid, {:subscription_delete, subscription_id})
state
end
def handle_monitored_data(changed_data_event, state) do
send(state.parent_pid, {:value_changed, changed_data_event})
state
end
def handle_deleted_monitored_item(subscription_id, monitored_id, state) do
send(state.parent_pid, {:item_deleted, {subscription_id, monitored_id}})
state
end
def read_node_value(pid, node), do: GenServer.call(pid, {:read, node}, :infinity)
def get_client(pid), do: GenServer.call(pid, {:get_client, nil})
def handle_call({:read, node}, _from, state) do
resp = Client.read_node_value(state.opc_ua_client_pid, node)
{:reply, resp, state}
end
def handle_call({:get_client, nil}, _from, state) do
{:reply, state.opc_ua_client_pid, state}
end
end
defmodule MyServer do
use OpcUA.Server
alias OpcUA.Server
# Use the `init` function to configure your server.
def init({parent_pid, 103}, s_pid) do
Server.start(s_pid)
%{parent_pid: parent_pid}
end
def configuration(_user_init_state), do: Application.get_env(:my_server, :configuration, [])
def address_space(_user_init_state), do: Application.get_env(:my_server, :address_space, [])
@impl true
def handle_write(write_event, %{parent_pid: parent_pid} = state) do
send(parent_pid, write_event)
state
end
end
setup() do
Application.put_env(:my_server, :address_space, @address_space)
Application.put_env(:my_server, :configuration, @configuration_server)
Application.put_env(:my_client, :configuration, @configuration_client)
Application.put_env(:my_client, :monitored_items, @monitored_items)
{:ok, _pid} = MyServer.start_link({self(), 103})
{:ok, c_pid} = MyClient.start_link({self(), 103})
%{c_pid: c_pid}
end
test "Write value event", %{c_pid: c_pid} do
node_id = NodeId.new(ns_index: 2, identifier_type: "string", identifier: "R1_TS1_Temperature")
c_response = MyClient.read_node_value(c_pid, node_id)
assert c_response == {:ok, 103.0}
pid = MyClient.get_client(c_pid)
assert :ok == Client.write_node_value(pid, node_id, 10, 103103.0)
Process.sleep(200)
assert :ok == Client.delete_monitored_item(pid, monitored_item_id: 1, subscription_id: 1)
assert :ok == Client.delete_subscription(pid, 1)
assert_receive({:value_changed, {1, 1, 103103.0}}, 1000)
assert_receive({:item_deleted, {1, 1}}, 1000)
assert_receive({:subscription_delete, 1}, 1000)
end
end
| 34.603687 | 116 | 0.676388 |
ff35f31e1654d2f67a40dc54a53a3805ae42078d | 1,133 | ex | Elixir | lib/markdown_elixir/paser/indented_code_blocks.ex | mazz-seven/markdown_elixir | 0b78ffa6c69ca67d54befedd4927bb4ba947235e | [
"MIT"
] | 1 | 2020-04-13T09:39:55.000Z | 2020-04-13T09:39:55.000Z | lib/markdown_elixir/paser/indented_code_blocks.ex | mazz-seven/markdown_elixir | 0b78ffa6c69ca67d54befedd4927bb4ba947235e | [
"MIT"
] | null | null | null | lib/markdown_elixir/paser/indented_code_blocks.ex | mazz-seven/markdown_elixir | 0b78ffa6c69ca67d54befedd4927bb4ba947235e | [
"MIT"
] | null | null | null | defmodule MarkdownElixir.Parser.IndentedCodeBlocks do
import NimbleParsec
@doc """
Parses Indented Code Blocks
"""
def parse(content) do
case indented_code_blocks(content, context: [macro: nil]) do
{:ok, tree, "", %{macro: nil}, _, _} ->
{:ok, tree}
{:ok, message, _rest, _context, {line, _}, _byte_offset} ->
{:error, message, line}
end
end
indent = ascii_string([?\s], 4)
line_break = ascii_char([?\n])
text_line = ignore(indent) |> utf8_string([not: ?\n], min: 0) |> concat(line_break)
empty_line = ascii_char([?\n, ?\s])
content = repeat(choice([text_line, empty_line]))
indented_code_blocks =
content
|> line()
|> post_traverse(:indented_code_blocks)
defp indented_code_blocks(_rest, data, context, {_line, _}, _offset) do
[{content, {line, _}} | _rest] = Enum.reverse(data)
{[
{
"code",
[lang: nil, meta: nil],
[content |> List.to_string() |> String.trim_trailing("\n")],
%{line: line}
}
], context}
end
defparsec(
:indented_code_blocks,
indented_code_blocks
)
end
| 22.66 | 85 | 0.598411 |
ff361a75033b9a5679fc2389fc06a7fb3afddd83 | 6,943 | ex | Elixir | lib/livebook/application.ex | nicksen/livebook | f5afac94970d9c3bd3ad93f6c633e305e9bce30a | [
"Apache-2.0"
] | null | null | null | lib/livebook/application.ex | nicksen/livebook | f5afac94970d9c3bd3ad93f6c633e305e9bce30a | [
"Apache-2.0"
] | null | null | null | lib/livebook/application.ex | nicksen/livebook | f5afac94970d9c3bd3ad93f6c633e305e9bce30a | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Application do
@moduledoc false
use Application
def start(_type, _args) do
ensure_directories!()
set_local_filesystem!()
ensure_distribution!()
validate_hostname_resolution!()
set_cookie()
children =
[
# Start the Telemetry supervisor
LivebookWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: Livebook.PubSub},
# Start a supervisor for Livebook tasks
{Task.Supervisor, name: Livebook.TaskSupervisor},
# Start the storage module
Livebook.Storage.current(),
# Start the periodic version check
Livebook.UpdateCheck,
# Periodic measurement of system resources
Livebook.SystemResources,
# Start the tracker server on this node
{Livebook.Tracker, pubsub_server: Livebook.PubSub},
# Start the supervisor dynamically managing sessions
{DynamicSupervisor, name: Livebook.SessionSupervisor, strategy: :one_for_one},
# Start the server responsible for associating files with sessions
Livebook.Session.FileGuard,
# Start the Node Pool for managing node names
Livebook.Runtime.NodePool,
# Start the unique task dependencies
Livebook.Utils.UniqueTask
] ++
iframe_server_specs() ++
[
# Start the Endpoint (http/https)
# We skip the access url as we do our own logging below
{LivebookWeb.Endpoint, log_access_url: false}
] ++ app_specs()
opts = [strategy: :one_for_one, name: Livebook.Supervisor]
with {:ok, _} = result <- Supervisor.start_link(children, opts) do
clear_env_vars()
display_startup_info()
result
end
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
LivebookWeb.Endpoint.config_change(changed, removed)
:ok
end
defp ensure_directories!() do
File.mkdir_p!(Livebook.Config.home())
File.mkdir_p!(Livebook.Config.data_path())
end
defp set_local_filesystem!() do
home =
Livebook.Config.home()
|> Livebook.FileSystem.Utils.ensure_dir_path()
local_filesystem = Livebook.FileSystem.Local.new(default_path: home)
:persistent_term.put(:livebook_local_filesystem, local_filesystem)
end
defp ensure_distribution!() do
unless Node.alive?() do
case System.cmd("epmd", ["-daemon"]) do
{_, 0} ->
:ok
_ ->
Livebook.Config.abort!("""
could not start epmd (Erlang Port Mapper Driver). Livebook uses epmd to \
talk to different runtimes. You may have to start epmd explicitly by calling:
epmd -daemon
Or by calling:
elixir --sname test -e "IO.puts node()"
Then you can try booting Livebook again
""")
end
{type, name} = get_node_type_and_name()
case Node.start(name, type) do
{:ok, _} ->
:ok
{:error, reason} ->
Livebook.Config.abort!("could not start distributed node: #{inspect(reason)}")
end
end
end
import Record
defrecordp :hostent, Record.extract(:hostent, from_lib: "kernel/include/inet.hrl")
# See https://github.com/livebook-dev/livebook/issues/302
defp validate_hostname_resolution!() do
unless Livebook.Config.longname() do
hostname = Livebook.Utils.node_host() |> to_charlist()
case :inet.gethostbyname(hostname) do
{:error, :nxdomain} ->
invalid_hostname!("your hostname \"#{hostname}\" does not resolve to an IP address")
# We only try the first address, so that's the one we validate.
{:ok, hostent(h_addrtype: :inet, h_addr_list: [address | _])} ->
unless inet_loopback?(address) or inet_if?(address) do
invalid_hostname!(
"your hostname \"#{hostname}\" does not resolve to a loopback address (127.0.0.0/8)"
)
end
_ ->
:ok
end
end
end
defp inet_loopback?(address) do
match?({127, _, _, _}, address)
end
defp inet_if?(address) do
case :inet.getifaddrs() do
{:ok, addrs} -> Enum.any?(addrs, fn {_name, flags} -> {:addr, address} in flags end)
_ -> false
end
end
@spec invalid_hostname!(String.t()) :: no_return()
defp invalid_hostname!(prelude) do
Livebook.Config.abort!("""
#{prelude}, which indicates something wrong in your OS configuration.
Make sure your computer's name resolves locally or start Livebook using a long distribution name. If you are using Livebook's CLI, you can:
livebook server --name livebook@127.0.0.1
If you are running it from source, do instead:
MIX_ENV=prod elixir --name livebook@127.0.0.1 -S mix phx.server
""")
end
defp set_cookie() do
cookie = Application.fetch_env!(:livebook, :cookie)
Node.set_cookie(cookie)
end
defp get_node_type_and_name() do
Application.get_env(:livebook, :node) || {:shortnames, random_short_name()}
end
defp random_short_name() do
:"livebook_#{Livebook.Utils.random_short_id()}"
end
defp display_startup_info() do
if Phoenix.Endpoint.server?(:livebook, LivebookWeb.Endpoint) do
IO.puts("[Livebook] Application running at #{LivebookWeb.Endpoint.access_url()}")
end
end
defp clear_env_vars() do
for {var, _} <- System.get_env(), config_env_var?(var) do
System.delete_env(var)
end
end
defp config_env_var?("LIVEBOOK_" <> _), do: true
defp config_env_var?("RELEASE_" <> _), do: true
defp config_env_var?(_), do: false
if Mix.target() == :app do
defp app_specs, do: [LivebookApp]
else
defp app_specs, do: []
end
defp iframe_server_specs() do
server? = Phoenix.Endpoint.server?(:livebook, LivebookWeb.Endpoint)
port = Livebook.Config.iframe_port()
if server? do
http = Application.fetch_env!(:livebook, LivebookWeb.Endpoint)[:http]
iframe_opts =
[
scheme: :http,
plug: LivebookWeb.IframeEndpoint,
port: port
] ++ Keyword.take(http, [:ip])
spec = Plug.Cowboy.child_spec(iframe_opts)
spec = update_in(spec.start, &{__MODULE__, :start_iframe, [port, &1]})
[spec]
else
[]
end
end
@doc false
def start_iframe(port, {m, f, a}) do
case apply(m, f, a) do
{:ok, pid} ->
{:ok, pid}
{:error, {:shutdown, {_, _, {{_, {:error, :eaddrinuse}}, _}}}} = error ->
iframe_port_in_use(port)
error
{:error, {:shutdown, {_, _, {:listen_error, _, :eaddrinuse}}}} = error ->
iframe_port_in_use(port)
error
{:error, _} = error ->
error
end
end
defp iframe_port_in_use(port) do
require Logger
Logger.error("Failed to start Livebook iframe server because port #{port} is already in use")
end
end
| 28.809129 | 143 | 0.635172 |
ff361a9f03e6714945491974de340152b17ea894 | 77 | exs | Elixir | test/test_helper.exs | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | test/test_helper.exs | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | test/test_helper.exs | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(DocumentsDesign.Repo, :manual)
| 25.666667 | 61 | 0.805195 |
ff36544b93a304ba999fb16616c4e59401d88ea9 | 258 | ex | Elixir | lib/agent/cache/cache.ex | IanLuites/kvasir_agent | 37a70c1eaa8cee1a61de8c0fe6b9ff7dcd290a83 | [
"MIT"
] | 1 | 2020-09-03T08:31:02.000Z | 2020-09-03T08:31:02.000Z | lib/agent/cache/cache.ex | IanLuites/kvasir_agent | 37a70c1eaa8cee1a61de8c0fe6b9ff7dcd290a83 | [
"MIT"
] | null | null | null | lib/agent/cache/cache.ex | IanLuites/kvasir_agent | 37a70c1eaa8cee1a61de8c0fe6b9ff7dcd290a83 | [
"MIT"
] | null | null | null | defmodule Kvasir.Agent.Cache do
@callback load(agent :: module, id :: term) :: {:ok, Kvasir.Offset.t(), map} | {:error, atom}
@callback save(agent :: module, id :: term, data :: map, offset :: Kvasir.Offset.t()) ::
:ok | {:error, atom}
end
| 43 | 95 | 0.589147 |
ff367adc1d0682bd59f37a815a00dff8aff30119 | 1,115 | ex | Elixir | lib/ex_saferpay/request_normalizer.ex | jshmrtn/ex-saferpay | 263fc6dacf5b2741eb3fff33cf8ed405637d170a | [
"MIT"
] | 1 | 2017-12-12T17:09:02.000Z | 2017-12-12T17:09:02.000Z | lib/ex_saferpay/request_normalizer.ex | jshmrtn/ex-saferpay | 263fc6dacf5b2741eb3fff33cf8ed405637d170a | [
"MIT"
] | 4 | 2017-12-12T17:08:28.000Z | 2018-03-01T08:04:56.000Z | lib/ex_saferpay/request_normalizer.ex | jshmrtn/ex-saferpay | 263fc6dacf5b2741eb3fff33cf8ed405637d170a | [
"MIT"
] | null | null | null | defmodule ExSaferpay.RequestNormalizer do
@moduledoc false
defp camelize_map(map) do
Enum.map(map, fn
{key, value} when is_binary(key) ->
{Macro.camelize(key), value}
{key, value} when is_atom(key) ->
{Macro.camelize(Atom.to_string(key)), value}
end)
end
defp normalize_values(map) do
Enum.map(map, fn
{key, %Money{amount: amount, currency: currency}} ->
amount =
amount
|> Decimal.mult(Decimal.new(100))
|> Decimal.to_integer()
{key,
%{
Value: amount,
CurrencyCode: currency
}}
{key, %URI{} = uri} ->
{key, URI.to_string(uri)}
other ->
other
end)
end
def json_encode(map, options) do
map
|> Map.from_struct()
|> camelize_map
|> normalize_values
|> Enum.into(%{})
|> Poison.encode!(options)
end
defmacro __using__(_) do
quote do
defimpl Poison.Encoder, for: __MODULE__ do
defdelegate encode(map, options), to: ExSaferpay.RequestNormalizer, as: :json_encode
end
end
end
end
| 21.037736 | 92 | 0.573094 |
ff367c74335957d5fea543201d46f5064d5d73ae | 1,090 | ex | Elixir | lib/phoenix_app_web/router.ex | yosefbennywidyo/phoenix_app | 04c49fc324e137589006ada3a6cb414c466be86d | [
"MIT"
] | null | null | null | lib/phoenix_app_web/router.ex | yosefbennywidyo/phoenix_app | 04c49fc324e137589006ada3a6cb414c466be86d | [
"MIT"
] | null | null | null | lib/phoenix_app_web/router.ex | yosefbennywidyo/phoenix_app | 04c49fc324e137589006ada3a6cb414c466be86d | [
"MIT"
] | null | null | null | defmodule PhoenixAppWeb.Router do
use PhoenixAppWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", PhoenixAppWeb do
pipe_through :browser
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", PhoenixAppWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: PhoenixAppWeb.Telemetry
end
end
end
| 25.348837 | 70 | 0.702752 |
ff3682c063b2385e0ce67709d659ee79843982a6 | 1,698 | exs | Elixir | test/upload/adapters/s3_test.exs | artemeff/upload | ab495e79caed499c0c80d273e581887b7df51b9d | [
"MIT"
] | 30 | 2017-12-23T15:00:30.000Z | 2021-09-29T19:33:09.000Z | test/upload/adapters/s3_test.exs | artemeff/upload | ab495e79caed499c0c80d273e581887b7df51b9d | [
"MIT"
] | 6 | 2018-05-03T18:44:36.000Z | 2021-04-21T17:15:25.000Z | test/upload/adapters/s3_test.exs | artemeff/upload | ab495e79caed499c0c80d273e581887b7df51b9d | [
"MIT"
] | 5 | 2019-09-27T16:37:58.000Z | 2021-10-04T21:04:35.000Z | defmodule Upload.Adapters.S3Test do
use ExUnit.Case, async: true
doctest Upload.Adapters.S3
alias Upload.Adapters.S3, as: Adapter
@fixture Path.expand("../../fixtures/text.txt", __DIR__)
@upload %Upload{path: @fixture, filename: "text.txt", key: "foo/text.txt"}
defp ensure_bucket_exists! do
with {:error, _} <- Adapter.bucket() |> ExAws.S3.head_bucket() |> ExAws.request() do
Adapter.bucket() |> ExAws.S3.put_bucket("us-east-1") |> ExAws.request!()
end
end
defp get_object(key) do
Adapter.bucket() |> ExAws.S3.get_object(key) |> ExAws.request()
end
setup_all do
ensure_bucket_exists!()
:ok
end
test "get_url/1" do
assert Adapter.get_url("foo.txt") == "https://my_bucket_name.s3.amazonaws.com/foo.txt"
assert Adapter.get_url("foo/bar.txt") == "https://my_bucket_name.s3.amazonaws.com/foo/bar.txt"
end
test "get_signed_url/2" do
assert {:ok, url} = Adapter.get_signed_url("foo.txt", [])
query = url |> URI.parse() |> Map.fetch!(:query) |> URI.decode_query()
assert query["X-Amz-Algorithm"]
assert query["X-Amz-Credential"]
assert query["X-Amz-Date"]
assert query["X-Amz-Expires"] == "3600"
assert query["X-Amz-Signature"]
assert query["X-Amz-SignedHeaders"]
end
test "get_signed_url/2 with a custom expiration" do
assert {:ok, url} = Adapter.get_signed_url("foo.txt", expires_in: 100)
query = url |> URI.parse() |> Map.fetch!(:query) |> URI.decode_query()
assert query["X-Amz-Expires"] == "100"
end
test "transfer/1" do
assert {:ok, %Upload{key: key, status: :transferred}} = Adapter.transfer(@upload)
assert {:ok, %{body: "MEATLOAF\n"}} = get_object(key)
end
end
| 31.444444 | 98 | 0.658422 |
ff36904b834dce96806979f5755a3f6e4228a52b | 1,737 | ex | Elixir | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/claim_device_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/claim_device_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/claim_device_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AndroidDeviceProvisioning.V1.Model.ClaimDeviceResponse do
@moduledoc """
Response message containing device id of the claim.
## Attributes
* `deviceId` (*type:* `String.t`, *default:* `nil`) - The device ID of the claimed device.
* `deviceName` (*type:* `String.t`, *default:* `nil`) - The resource name of the device in the format
`partners/[PARTNER_ID]/devices/[DEVICE_ID]`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deviceId => String.t(),
:deviceName => String.t()
}
field(:deviceId)
field(:deviceName)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidDeviceProvisioning.V1.Model.ClaimDeviceResponse do
def decode(value, options) do
GoogleApi.AndroidDeviceProvisioning.V1.Model.ClaimDeviceResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidDeviceProvisioning.V1.Model.ClaimDeviceResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.058824 | 105 | 0.735751 |
ff369574d34d44e92bd473a46a91b2a805bc3402 | 3,075 | ex | Elixir | lib/ex_unit/lib/ex_unit/capture_server.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/capture_server.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/capture_server.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit.CaptureServer do
@moduledoc false
@timeout 30_000
use GenServer
def start_link() do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def device_capture_on(device, pid) do
GenServer.call(__MODULE__, {:device_capture_on, device, pid}, @timeout)
end
def device_capture_off(ref) do
GenServer.call(__MODULE__, {:device_capture_off, ref}, @timeout)
end
def log_capture_on(pid) do
GenServer.call(__MODULE__, {:log_capture_on, pid}, @timeout)
end
def log_capture_off(ref) do
GenServer.call(__MODULE__, {:log_capture_off, ref}, @timeout)
end
## Callbacks
def init(:ok) do
{:ok, %{
devices: {%{}, %{}},
log_captures: %{},
log_status: nil
}}
end
def handle_call({:device_capture_on, name, pid}, _from, config) do
{names, refs} = config.devices
if Map.has_key?(names, name) do
{:reply, {:error, :already_captured}, config}
else
orig_pid = Process.whereis(name)
Process.unregister(name)
Process.register(pid, name)
ref = Process.monitor(pid)
refs = Map.put(refs, ref, {name, orig_pid})
names = Map.put(names, name, true)
{:reply, {:ok, ref}, %{config | devices: {names, refs}}}
end
end
def handle_call({:device_capture_off, ref}, _from, config) do
config = release_device(ref, config)
{:reply, :ok, config}
end
def handle_call({:log_capture_on, pid}, _from, config) do
ref = Process.monitor(pid)
refs = Map.put(config.log_captures, ref, true)
if map_size(refs) == 1 do
status = Logger.remove_backend(:console)
{:reply, ref, %{config | log_captures: refs, log_status: status}}
else
{:reply, ref, %{config | log_captures: refs}}
end
end
def handle_call({:log_capture_off, ref}, _from, config) do
Process.demonitor(ref, [:flush])
config = remove_log_capture(ref, config)
{:reply, :ok, config}
end
def handle_info({:DOWN, ref, _, _, _}, config) do
config = remove_log_capture(ref, config)
config = release_device(ref, config)
{:noreply, config}
end
def handle_info(msg, state) do
super(msg, state)
end
defp release_device(ref, %{devices: {names, refs}} = config) do
case Map.pop(refs, ref) do
{{name, pid}, refs} ->
names = Map.delete(names, name)
Process.demonitor(ref, [:flush])
try do
try do
Process.unregister(name)
after
Process.register(pid, name)
end
rescue
ArgumentError -> nil
end
%{config | devices: {names, refs}}
{nil, _refs} -> config
end
end
defp remove_log_capture(ref, %{log_captures: refs} = config) do
if Map.has_key?(refs, ref) do
refs = Map.delete(refs, ref)
maybe_add_console(refs, config.log_status)
%{config | log_captures: refs}
else
config
end
end
defp maybe_add_console(refs, status) do
if status == :ok and map_size(refs) == 0 do
Logger.add_backend(:console, flush: true)
end
end
end
| 25.625 | 75 | 0.627967 |
ff36a0504a869de619b15033dda1d7ee91730077 | 1,470 | ex | Elixir | lib/mail_slurp_api/model/page_group_projection.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | 1 | 2021-06-17T18:07:49.000Z | 2021-06-17T18:07:49.000Z | lib/mail_slurp_api/model/page_group_projection.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | null | null | null | lib/mail_slurp_api/model/page_group_projection.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | 1 | 2021-03-16T18:55:56.000Z | 2021-03-16T18:55:56.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Model.PageGroupProjection do
@moduledoc """
Paginated missed email results. Page index starts at zero. Projection results may omit larger entity fields. For fetching a full entity use the projection ID with individual method calls.
"""
@derive [Poison.Encoder]
defstruct [
:"content",
:"empty",
:"first",
:"last",
:"number",
:"numberOfElements",
:"pageable",
:"size",
:"sort",
:"totalElements",
:"totalPages"
]
@type t :: %__MODULE__{
:"content" => [GroupProjection] | nil,
:"empty" => boolean() | nil,
:"first" => boolean() | nil,
:"last" => boolean() | nil,
:"number" => integer() | nil,
:"numberOfElements" => integer() | nil,
:"pageable" => Pageable | nil,
:"size" => integer() | nil,
:"sort" => Sort | nil,
:"totalElements" => integer() | nil,
:"totalPages" => integer() | nil
}
end
defimpl Poison.Decoder, for: MailSlurpAPI.Model.PageGroupProjection do
import MailSlurpAPI.Deserializer
def decode(value, options) do
value
|> deserialize(:"content", :list, MailSlurpAPI.Model.GroupProjection, options)
|> deserialize(:"pageable", :struct, MailSlurpAPI.Model.Pageable, options)
|> deserialize(:"sort", :struct, MailSlurpAPI.Model.Sort, options)
end
end
| 29.4 | 189 | 0.646259 |
ff36a94884f8b5411062e17e0c64c221cd69180a | 4,914 | exs | Elixir | apps/ewallet_db/test/ewallet_db/api_key_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/test/ewallet_db/api_key_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/test/ewallet_db/api_key_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule EWalletDB.APIKeyTest do
use EWalletDB.SchemaCase
alias EWalletDB.APIKey
alias Ecto.UUID
@owner_app :some_app
describe "APIKey factory" do
test_has_valid_factory(APIKey)
end
describe "get/1" do
test "accepts a uuid" do
api_key = insert(:api_key)
result = APIKey.get(api_key.id)
assert result.uuid == api_key.uuid
end
test "does not return a soft-deleted API key" do
{:ok, api_key} = :api_key |> insert() |> APIKey.delete()
assert APIKey.get(api_key.id) == nil
end
test "returns nil if the given uuid is invalid" do
assert APIKey.get("not_a_uuid") == nil
end
test "returns nil if the key with the given uuid is not found" do
assert APIKey.get(UUID.generate()) == nil
end
end
describe "APIKey.insert/1" do
test_insert_generate_uuid(APIKey, :uuid)
test_insert_generate_external_id(APIKey, :id, "api_")
test_insert_generate_timestamps(APIKey)
# 32 bytes = ceil(32 / 3 * 4)
test_insert_generate_length(APIKey, :key, 43)
test_insert_allow_duplicate(APIKey, :account, insert(:account))
test_insert_prevent_duplicate(APIKey, :key)
test "defaults to master account if not provided" do
master_account = get_or_insert_master_account()
{:ok, api_key} = :api_key |> params_for(%{account: nil}) |> APIKey.insert()
assert api_key.account_uuid == master_account.uuid
end
end
describe "APIKey.update/2" do
test_update_ignores_changing(APIKey, :key)
test_update_ignores_changing(APIKey, :owner_app)
test_update_field_ok(APIKey, :expired, false, true)
test_update_field_ok(
APIKey,
:exchange_address,
insert(:wallet).address,
insert(:wallet).address
)
end
describe "APIKey.authenticate/2" do
test "returns the API key" do
account = insert(:account)
:api_key
|> params_for(%{
key: "apikey123",
account: account,
owner_app: Atom.to_string(@owner_app)
})
|> APIKey.insert()
assert APIKey.authenticate("apikey123", @owner_app).account_uuid == account.uuid
end
test "returns false if API key does not exists" do
:api_key
|> params_for(%{key: "apikey123", owner_app: Atom.to_string(@owner_app)})
|> APIKey.insert()
assert APIKey.authenticate("unmatched", @owner_app) == false
end
test "returns false if API key exists but for a different owner app" do
:api_key
|> params_for(%{key: "apikey123", owner_app: "wrong_app"})
|> APIKey.insert()
assert APIKey.authenticate("unmatched", @owner_app) == false
end
test "returns false if API key is nil" do
assert APIKey.authenticate(nil, @owner_app) == false
end
end
describe "APIKey.authenticate/3" do
test "returns the API key if the api_key_id and api_key matches database" do
account = insert(:account)
{:ok, api_key} =
:api_key
|> params_for(%{
key: "apikey123",
account: account,
owner_app: Atom.to_string(@owner_app)
})
|> APIKey.insert()
assert APIKey.authenticate(api_key.id, api_key.key, @owner_app).account_uuid == account.uuid
end
test "returns false if API key does not exists" do
key_id = UUID.generate()
:api_key
|> params_for(%{id: key_id, key: "apikey123", owner_app: Atom.to_string(@owner_app)})
|> APIKey.insert()
assert APIKey.authenticate(key_id, "unmatched", @owner_app) == false
end
test "returns false if API key ID does not exists" do
:api_key
|> params_for(%{key: "apikey123", owner_app: Atom.to_string(@owner_app)})
|> APIKey.insert()
assert APIKey.authenticate(UUID.generate(), "apikey123", @owner_app) == false
end
test "returns false if API key ID and its key exist but for a different owner app" do
key_id = UUID.generate()
:api_key
|> params_for(%{key: "apikey123", owner_app: "wrong_app"})
|> APIKey.insert()
assert APIKey.authenticate(key_id, "apikey123", @owner_app) == false
end
test "returns false if API key ID is not provided" do
:api_key
|> params_for(%{key: "apikey123", owner_app: Atom.to_string(@owner_app)})
|> APIKey.insert()
assert APIKey.authenticate(nil, "apikey123", @owner_app) == false
end
test "returns false if API key is not provided" do
key_id = UUID.generate()
:api_key
|> params_for(%{key: "apikey123", owner_app: Atom.to_string(@owner_app)})
|> APIKey.insert()
assert APIKey.authenticate(key_id, nil, @owner_app) == false
end
end
describe "deleted?/1" do
test_deleted_checks_nil_deleted_at(APIKey)
end
describe "delete/1" do
test_delete_causes_record_deleted(APIKey)
end
describe "restore/1" do
test_restore_causes_record_undeleted(APIKey)
end
end
| 27.920455 | 98 | 0.656288 |
ff36c3241a4a249d24cbcdb576e0cd4aed0f43d8 | 298 | exs | Elixir | config/config.exs | aleDsz/ecto-xsd | da9eec98cd8e3350184f278272ff5d9a828ce92c | [
"MIT"
] | 6 | 2021-03-11T17:23:14.000Z | 2021-11-15T11:13:53.000Z | config/config.exs | aleDsz/ecto-xsd | da9eec98cd8e3350184f278272ff5d9a828ce92c | [
"MIT"
] | 9 | 2021-04-13T08:36:29.000Z | 2021-07-23T08:42:25.000Z | config/config.exs | aleDsz/ecto-xsd | da9eec98cd8e3350184f278272ff5d9a828ce92c | [
"MIT"
] | null | null | null | use Mix.Config
config :git_hooks,
auto_install: true,
verbose: true,
hooks: [
pre_commit: [
tasks: [
{:cmd, "mix format"},
{:cmd, "mix credo --strict"}
]
],
pre_push: [
verbose: false,
tasks: [
{:cmd, "mix test"}
]
]
]
| 14.9 | 36 | 0.463087 |
ff36cdfb4c9580e56d69afdb78d74518b57c7352 | 428 | ex | Elixir | code-sample-elixir/kv/lib/tasklist.ex | aquatir/learntocode | 9b860a528ded64fab2686a93c49dfd4b3947d6c9 | [
"MIT"
] | null | null | null | code-sample-elixir/kv/lib/tasklist.ex | aquatir/learntocode | 9b860a528ded64fab2686a93c49dfd4b3947d6c9 | [
"MIT"
] | 1 | 2017-10-03T11:25:54.000Z | 2017-10-03T11:25:54.000Z | code-sample-elixir/kv/lib/tasklist.ex | aquatir/remember_java_api | 6acc72e23a954152d922e380d06f337f55921950 | [
"MIT"
] | null | null | null | defmodule TaskList do
## Import two functions from File so we could write just "write(..)" and not "File.write()"
import File, only: [write: 3, read: 1]
## Module attribute.
@file_name "task_list.md"
def add(task_name) do
task = "[ ] " <> task_name <> "\n"
write(@file_name, task, [:append])
end
def show_list do
read(@file_name)
end
def reset do
write(@file_name, "", [:write])
end
end
| 19.454545 | 93 | 0.621495 |
ff36cf06f7c4bdc1cbe3c39874d9ef8fcbd30c49 | 1,528 | exs | Elixir | exercises/saddle-points/example.exs | martinsvalin/xelixir | 9469d92e7eecb528a05a8da923d8271ed303c058 | [
"MIT"
] | 1 | 2021-08-16T20:24:14.000Z | 2021-08-16T20:24:14.000Z | exercises/saddle-points/example.exs | Triangle-Elixir/xelixir | 08d23bf47f57799f286567cb26f635291de2fde5 | [
"MIT"
] | null | null | null | exercises/saddle-points/example.exs | Triangle-Elixir/xelixir | 08d23bf47f57799f286567cb26f635291de2fde5 | [
"MIT"
] | null | null | null | defmodule Matrix do
@doc """
Parses a string representation of a matrix
to a list of rows
"""
@spec rows(String.t()) :: [[integer]]
def rows(str) do
str
|> String.split("\n")
|> Enum.map(&parse_row/1)
end
defp parse_row(str) do
str
|> String.split(" ")
|> Enum.map(&String.to_integer/1)
end
@doc """
Parses a string representation of a matrix
to a list of columns
"""
@spec columns(String.t()) :: [[integer]]
def columns(str) do
str
|> rows
|> List.zip
|> Enum.map(&Tuple.to_list/1)
end
@doc """
Calculates all the saddle points from a string
representation of a matrix
"""
@spec saddle_points(String.t()) :: [{integer, integer}]
def saddle_points(str) do
rows = rows(str)
columns = columns(str)
rows
|> generate_coordinates
|> Enum.filter(&is_saddle_point?(&1, rows, columns))
end
defp is_saddle_point?(point, rows, columns) do
max_in_row?(point, rows) && min_in_column?(point, columns)
end
defp max_in_row?({x, y}, rows) do
row = Enum.at(rows, x)
Enum.at(row, y) == Enum.max(row)
end
defp min_in_column?({x, y}, columns) do
column = Enum.at(columns, y)
Enum.at(column, x) == Enum.min(column)
end
defp generate_coordinates(rows) do
rows
|> Enum.with_index
|> Enum.flat_map(&generate_coordinates_row/1)
end
defp generate_coordinates_row({row, row_index}) do
row
|> Enum.with_index
|> Enum.map(fn {_, col_index} -> {row_index, col_index} end)
end
end
| 21.828571 | 64 | 0.626963 |
ff36d02d83c5d8327993cccfb2620c1548abc648 | 86 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_and_operation_parsing_test_case/MatchedTwoOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_and_operation_parsing_test_case/MatchedTwoOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_and_operation_parsing_test_case/MatchedTwoOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | one ++ two && three -- four
one <> two &&& three .. four
one .. two and three <> four
| 21.5 | 28 | 0.55814 |
ff36e8b42f8cec87da1014b615a0c2222596d686 | 1,704 | ex | Elixir | lib/indexing_reporter_web/controllers/user_reset_password_controller.ex | ashton314/indexing-reporter | 7bb9d1bb436b18a359ad4855b084b92c076b979d | [
"MIT"
] | null | null | null | lib/indexing_reporter_web/controllers/user_reset_password_controller.ex | ashton314/indexing-reporter | 7bb9d1bb436b18a359ad4855b084b92c076b979d | [
"MIT"
] | null | null | null | lib/indexing_reporter_web/controllers/user_reset_password_controller.ex | ashton314/indexing-reporter | 7bb9d1bb436b18a359ad4855b084b92c076b979d | [
"MIT"
] | null | null | null | defmodule IndexingReporterWeb.UserResetPasswordController do
use IndexingReporterWeb, :controller
alias IndexingReporter.Accounts
plug :get_user_by_reset_password_token when action in [:edit, :update]
def new(conn, _params) do
render(conn, "new.html")
end
def create(conn, %{"user" => %{"email" => email}}) do
if user = Accounts.get_user_by_email(email) do
Accounts.deliver_user_reset_password_instructions(
user,
&Routes.user_reset_password_url(conn, :edit, &1)
)
end
conn
|> put_flash(
:info,
"If your email is in our system, you will receive instructions to reset your password shortly."
)
|> redirect(to: "/")
end
def edit(conn, _params) do
render(conn, "edit.html", changeset: Accounts.change_user_password(conn.assigns.user))
end
# Do not log in the user after reset password to avoid a
# leaked token giving the user access to the account.
def update(conn, %{"user" => user_params}) do
case Accounts.reset_user_password(conn.assigns.user, user_params) do
{:ok, _} ->
conn
|> put_flash(:info, "Password reset successfully.")
|> redirect(to: Routes.user_session_path(conn, :new))
{:error, changeset} ->
render(conn, "edit.html", changeset: changeset)
end
end
defp get_user_by_reset_password_token(conn, _opts) do
%{"token" => token} = conn.params
if user = Accounts.get_user_by_reset_password_token(token) do
conn |> assign(:user, user) |> assign(:token, token)
else
conn
|> put_flash(:error, "Reset password link is invalid or it has expired.")
|> redirect(to: "/")
|> halt()
end
end
end
| 28.881356 | 101 | 0.661972 |
ff36e8c4b160acdda76d6dffab76b4834af6ade4 | 2,741 | ex | Elixir | lib/jenny_lite/expander.ex | rob-brown/Jenny-Lite | d3bc461168e1f77c7bfbdf9779f3be0357992dd2 | [
"MIT"
] | 1 | 2016-11-10T20:11:37.000Z | 2016-11-10T20:11:37.000Z | lib/jenny_lite/expander.ex | rob-brown/Jenny-Lite | d3bc461168e1f77c7bfbdf9779f3be0357992dd2 | [
"MIT"
] | null | null | null | lib/jenny_lite/expander.ex | rob-brown/Jenny-Lite | d3bc461168e1f77c7bfbdf9779f3be0357992dd2 | [
"MIT"
] | null | null | null | defmodule JennyLite.Expander do
alias JennyLite.{Template}
def expand_file(file) when is_binary(file) do
file
|> Path.expand
|> File.stream!
|> expand_lines(Path.dirname file)
|> (& File.write! file, &1).()
end
def expand_string(string, relative_to, include_spec? \\ true) when is_binary(string) and is_binary(relative_to) and is_boolean(include_spec?) do
string
|> String.split("\n")
|> Stream.map(& &1 <> "\n")
|> expand_lines(relative_to, include_spec?)
end
defp expand_lines(lines, relative_to, include_spec? \\ true) do
lines
|> Enum.reduce({[], nil, :normal}, find_expansions(include_spec?))
|> (fn {lines, nil, :normal} -> Enum.reverse lines end).()
|> Enum.map(& expand_template &1, relative_to)
end
# TODO: Change this to use improper lists to avoid the reverse.
defp find_expansions(include_spec?) do
fn line, {lines, json_lines, state} ->
cond do
state == :normal and expand_spec?(line) and include_spec? ->
{[line | lines], [], :json}
state == :normal and expand_spec?(line) ->
{lines, [], :json}
state == :json and start_expand?(line) and include_spec? ->
template = json_lines |> Enum.reverse |> new_template
{[template, line | lines], nil, :drop}
state == :json and start_expand?(line) ->
template = json_lines |> Enum.reverse |> new_template
{[template | lines], nil, :drop}
end_expand?(line) and include_spec? ->
{[line | lines], nil, :normal}
end_expand?(line) ->
{lines, nil, :normal}
state == :json and include_spec? ->
{[line | lines], [line | json_lines], :json}
state == :json ->
{lines, [line | json_lines], :json}
state == :drop ->
{lines, json_lines, :drop}
state == :normal ->
{[line | lines], json_lines, :normal}
end
end
end
defp expand_template(template = %Template{}, relative_path) do
full_path = Path.expand template.path, relative_path
bindings = Template.bindings template
expanded = EEx.eval_file full_path, bindings, trim: true
dir = Path.dirname full_path
# Recursively expand.
expanded
|> :erlang.iolist_to_binary
|> expand_string(dir, false )
end
defp expand_template(line, _), do: line
defp new_template(json) do
%{"template" => template, "inputs" => inputs} = Poison.decode! json
Template.new template, inputs
end
defp expand_spec?(line) do
line =~ ~r"^.*<<<EXPAND_SPEC>>>.*$"
end
defp start_expand?(line) do
line =~ ~r"^.*<<<START_EXPAND>>>.*$"
end
defp end_expand?(line) do
line =~ ~r"^.*<<<END_EXPAND>>>.*$"
end
end
| 31.872093 | 146 | 0.606348 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.