hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
08258c9587fd833d1cc2bd55d6b5393786ef5bf7 | 270 | ex | Elixir | lib/discuss_web/models/topic.ex | shahnCM/discuss | a880950e090ecf9309d495e81c31d589d3655881 | [
"MIT"
] | null | null | null | lib/discuss_web/models/topic.ex | shahnCM/discuss | a880950e090ecf9309d495e81c31d589d3655881 | [
"MIT"
] | null | null | null | lib/discuss_web/models/topic.ex | shahnCM/discuss | a880950e090ecf9309d495e81c31d589d3655881 | [
"MIT"
] | null | null | null | defmodule DiscussWeb.Topic do
use DiscussWeb, :model
schema "topics" do
field :title, :string
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:title])
|> validate_required([:title])
end
end
| 19.285714 | 43 | 0.562963 |
082598936100fe90ef8ceefbbd118e8e0467430a | 292 | ex | Elixir | lib/potionx/middleware/mutation_middleware.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 31 | 2021-02-16T20:50:46.000Z | 2022-02-03T10:38:07.000Z | lib/potionx/middleware/mutation_middleware.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 6 | 2021-04-07T21:50:20.000Z | 2022-02-06T21:54:04.000Z | lib/potionx/middleware/mutation_middleware.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 4 | 2021-03-25T17:59:44.000Z | 2021-04-25T16:28:22.000Z | defmodule Potionx.Middleware.Mutation do
@behaviour Absinthe.Middleware
def call(res, _) do
cond do
is_map(res.value) && Map.has_key?(res.value, :__struct__) ->
%{res | value: %{node: res.value}}
true ->
%{res | value: res.value || %{}}
end
end
end
| 22.461538 | 66 | 0.592466 |
0825b9d95d713dcae3bf6d6dd4bc913cf8757916 | 669 | ex | Elixir | test/support/factory.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | test/support/factory.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | test/support/factory.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | defmodule Rockelivery.Factory do
use ExMachina.Ecto, repo: Rockelivery.Repo
alias Rockelivery.User.User
def user_params_factory,
do: %{
"age" => 22,
"address" => "Rua Paulista 20º",
"cep" => "12345678",
"cpf" => "12345678900",
"email" => "henriquecidoz@hotmail.com",
"password" => "123456",
"name" => "Joabe Henrique"
}
def user_factory,
do: %User{
age: 22,
address: "Rua Paulista 20º",
cep: "12345678",
cpf: "12345678900",
email: "henriquecidoz@hotmail.com",
password: "123456",
name: "Joabe Henrique",
id: "13691c3b-3a27-4a19-bd1a-48f350bfb080"
}
end
| 23.892857 | 48 | 0.58296 |
0825e81c0238adad42c70216ca28ae5f740c9ade | 6,277 | ex | Elixir | lib/mix/tasks/hex.info.ex | hrzndhrn/hex | f74e2ed979e74130bdc4a6974660aa986333f33f | [
"Apache-2.0"
] | 824 | 2015-01-05T09:12:36.000Z | 2022-03-28T12:02:29.000Z | lib/mix/tasks/hex.info.ex | hrzndhrn/hex | f74e2ed979e74130bdc4a6974660aa986333f33f | [
"Apache-2.0"
] | 737 | 2015-01-01T05:48:46.000Z | 2022-03-29T12:56:12.000Z | lib/mix/tasks/hex.info.ex | hrzndhrn/hex | f74e2ed979e74130bdc4a6974660aa986333f33f | [
"Apache-2.0"
] | 220 | 2015-03-14T17:55:11.000Z | 2022-03-23T22:17:07.000Z | defmodule Mix.Tasks.Hex.Info do
use Mix.Task
@shortdoc "Prints Hex information"
@moduledoc """
Prints Hex package or system information.
$ mix hex.info [PACKAGE [VERSION]]
If `package` is not given, print system information. This includes when
registry was last updated and current system version.
If `package` is given, print information about the package. This includes all
released versions and package metadata.
If `package` and `version` is given, print release information.
## Command line options
* `--organization ORGANIZATION` - Set this for private packages belonging to an organization
"""
@behaviour Hex.Mix.TaskDescription
@switches [organization: :string]
@impl true
def run(args) do
Hex.start()
{opts, args} = Hex.OptionParser.parse!(args, strict: @switches)
case args do
[] ->
general()
[package] ->
package(opts[:organization], package)
[package, version] ->
release(opts[:organization], package, version)
_ ->
Mix.raise("""
Invalid arguments, expected:
mix hex.info [PACKAGE [VERSION]]
""")
end
end
@impl true
def tasks() do
[
{"", "Prints Hex information"},
{"PACKAGE [VERSION]", "Prints package information"}
]
end
defp general() do
Hex.Shell.info("Hex: #{Hex.version()}")
Hex.Shell.info("Elixir: #{System.version()}")
Hex.Shell.info("OTP: #{Hex.Utils.otp_version()}")
Hex.Shell.info("")
Hex.Shell.info("Built with: Elixir #{Hex.elixir_version()} and OTP #{Hex.otp_version()}")
Hex.Registry.Server.open()
Hex.UpdateChecker.check()
Hex.Registry.Server.close()
end
defp package(organization, package) do
auth = organization && Mix.Tasks.Hex.auth_info(:read)
case Hex.API.Package.get(organization, package, auth) do
{:ok, {code, body, _}} when code in 200..299 ->
print_package(body, locked_dep(package))
{:ok, {404, _, _}} ->
Hex.Shell.error("No package with name #{package}")
Mix.Tasks.Hex.set_exit_code(1)
other ->
Hex.Shell.error("Failed to retrieve package information")
Hex.Utils.print_error_result(other)
Mix.Tasks.Hex.set_exit_code(1)
end
end
defp release(organization, package, version) do
auth = organization && Mix.Tasks.Hex.auth_info(:read)
case Hex.API.Release.get(organization, package, version, auth) do
{:ok, {code, body, _}} when code in 200..299 ->
print_release(organization, package, body)
{:ok, {404, _, _}} ->
Hex.Shell.error("No release with name #{package} #{version}")
Mix.Tasks.Hex.set_exit_code(1)
other ->
Hex.Shell.error("Failed to retrieve release information")
Hex.Utils.print_error_result(other)
Mix.Tasks.Hex.set_exit_code(1)
end
end
defp print_package(package, locked_package) do
meta = package["meta"]
desc = meta["description"] || "No description provided"
Hex.Shell.info(desc <> "\n")
releases = package["releases"] || []
retirements = package["retirements"] || %{}
Hex.Shell.info("Config: " <> package["configs"]["mix.exs"])
print_locked_package(locked_package)
Hex.Shell.info(["Releases: "] ++ format_releases(releases, Map.keys(retirements)) ++ ["\n"])
print_meta(meta)
end
defp format_releases(releases, retirements) do
{releases, rest} = Enum.split(releases, 8)
Enum.map(releases, &format_version(&1, retirements))
|> Enum.intersperse([", "])
|> add_ellipsis(rest)
end
defp format_version(%{"version" => version}, retirements) do
if version in retirements do
[:yellow, version, " (retired)", :reset]
else
[version]
end
end
defp add_ellipsis(output, []), do: output
defp add_ellipsis(output, _rest), do: output ++ [", ..."]
defp print_meta(meta) do
print_list(meta, "licenses")
print_dict(meta, "links")
end
defp print_release(organization, package, release) do
version = release["version"]
print_retirement(release)
Hex.Shell.info("Config: " <> release["configs"]["mix.exs"])
if release["has_docs"] do
Hex.Shell.info("Documentation at: #{Hex.Utils.hexdocs_url(organization, package, version)}")
end
if requirements = release["requirements"] do
Hex.Shell.info("Dependencies:")
Enum.each(requirements, fn {name, req} ->
app = req["app"]
app = if app && app != name, do: " (app: #{app})"
optional = if req["optional"], do: " (optional)"
Hex.Shell.info(" #{name} #{req["requirement"]}#{app}#{optional}")
end)
end
print_publisher(release)
end
defp print_locked_package(nil), do: nil
defp print_locked_package(locked_package) do
Hex.Shell.info(["Locked version: #{locked_package.version}"])
end
defp print_retirement(%{"retirement" => nil}), do: ""
defp print_retirement(release) do
retirement = %{
reason: release["retirement"]["reason"],
message: release["retirement"]["message"]
}
Hex.Shell.warn([
[:bright, "This version has been retired"],
[:normal, ": "],
[:normal, Hex.Utils.package_retirement_message(retirement)]
])
end
defp print_publisher(release) do
publisher_username = release["publisher"]["username"]
publisher_email = release["publisher"]["email"]
email_or_empty = if publisher_email, do: " (#{publisher_email})", else: ""
Hex.Shell.info("Published by: #{publisher_username}#{email_or_empty}")
end
defp print_list(meta, name) do
list = Map.get(meta, name, [])
if list != [] do
Hex.Shell.info(String.capitalize(name) <> ": " <> Enum.join(list, ", "))
end
end
defp print_dict(meta, name) do
title = String.capitalize(name)
dict = Map.get(meta, name, [])
if dict != [] do
Hex.Shell.info(title <> ":")
Enum.each(dict, fn {key, val} ->
Hex.Shell.info(" #{key}: #{val}")
end)
end
end
# Pull out the locked dependency version, if it exists
defp locked_dep(package_name) do
Mix.Dep.Lock.read()
|> Enum.map(fn {_app, info} -> Hex.Utils.lock(info) end)
|> Enum.find(fn locked -> locked && locked.name == package_name end)
end
end
| 27.897778 | 98 | 0.632308 |
0825eb78236fab5113f92a3ca36c9849fdce58df | 2,293 | ex | Elixir | clients/webmaster/lib/google_api/webmaster/v3/model/url_crawl_errors_sample.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/webmaster/lib/google_api/webmaster/v3/model/url_crawl_errors_sample.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/webmaster/lib/google_api/webmaster/v3/model/url_crawl_errors_sample.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Webmaster.V3.Model.UrlCrawlErrorsSample do
@moduledoc """
Contains information about specific crawl errors.
## Attributes
- first_detected (DateTime.t): The time the error was first detected, in RFC 3339 format. Defaults to: `null`.
- last_crawled (DateTime.t): The time when the URL was last crawled, in RFC 3339 format. Defaults to: `null`.
- pageUrl (String.t): The URL of an error, relative to the site. Defaults to: `null`.
- responseCode (integer()): The HTTP response code, if any. Defaults to: `null`.
- urlDetails (UrlSampleDetails): Additional details about the URL, set only when calling get(). Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:first_detected => DateTime.t(),
:last_crawled => DateTime.t(),
:pageUrl => any(),
:responseCode => any(),
:urlDetails => GoogleApi.Webmaster.V3.Model.UrlSampleDetails.t()
}
field(:first_detected, as: DateTime)
field(:last_crawled, as: DateTime)
field(:pageUrl)
field(:responseCode)
field(:urlDetails, as: GoogleApi.Webmaster.V3.Model.UrlSampleDetails)
end
defimpl Poison.Decoder, for: GoogleApi.Webmaster.V3.Model.UrlCrawlErrorsSample do
def decode(value, options) do
GoogleApi.Webmaster.V3.Model.UrlCrawlErrorsSample.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Webmaster.V3.Model.UrlCrawlErrorsSample do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.216667 | 118 | 0.730048 |
0825ec553993a44626461f75da840d41d40ddf08 | 1,970 | ex | Elixir | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/threat_list_descriptor.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/threat_list_descriptor.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/threat_list_descriptor.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.SafeBrowsing.V4.Model.ThreatListDescriptor do
@moduledoc """
Describes an individual threat list. A list is defined by three parameters:
the type of threat posed, the type of platform targeted by the threat, and
the type of entries in the list.
## Attributes
* `platformType` (*type:* `String.t`, *default:* `nil`) - The platform type targeted by the list's entries.
* `threatEntryType` (*type:* `String.t`, *default:* `nil`) - The entry types contained in the list.
* `threatType` (*type:* `String.t`, *default:* `nil`) - The threat type posed by the list's entries.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:platformType => String.t(),
:threatEntryType => String.t(),
:threatType => String.t()
}
field(:platformType)
field(:threatEntryType)
field(:threatType)
end
defimpl Poison.Decoder, for: GoogleApi.SafeBrowsing.V4.Model.ThreatListDescriptor do
def decode(value, options) do
GoogleApi.SafeBrowsing.V4.Model.ThreatListDescriptor.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SafeBrowsing.V4.Model.ThreatListDescriptor do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.818182 | 111 | 0.725381 |
0825fe6a6d29b52245be990cf44ac18582451b03 | 151 | ex | Elixir | lib/cgrates_web_jsonapi_web/controllers/tp_filter_import_job_controller.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 2 | 2018-10-03T07:41:32.000Z | 2021-03-21T11:27:27.000Z | lib/cgrates_web_jsonapi_web/controllers/tp_filter_import_job_controller.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 1 | 2018-10-31T04:55:59.000Z | 2018-10-31T04:55:59.000Z | lib/cgrates_web_jsonapi_web/controllers/tp_filter_import_job_controller.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 5 | 2018-09-27T11:30:44.000Z | 2021-01-16T08:28:58.000Z | defmodule CgratesWebJsonapiWeb.TpFilterImportJobController do
use CgratesWebJsonapi.CsvImportJob, module: CgratesWebJsonapi.TariffPlans.TpFilter
end
| 37.75 | 84 | 0.89404 |
0826150390ec85c41f3bfe85d358adf37cd4dde8 | 279 | exs | Elixir | priv/repo/seeds.exs | emerayo/challenge | 93d41a667bf32aa4982cc0e19dbf0b7b2d9ec800 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | emerayo/challenge | 93d41a667bf32aa4982cc0e19dbf0b7b2d9ec800 | [
"MIT"
] | 3 | 2019-01-20T23:17:20.000Z | 2019-01-21T11:46:09.000Z | priv/repo/seeds.exs | emerayo/challenge | 93d41a667bf32aa4982cc0e19dbf0b7b2d9ec800 | [
"MIT"
] | null | null | null | alias Challenge.Account
alias Challenge.Repo
email = "admin@bankapi.com"
password = "1234"
result = Repo.get_by Account, %{email: email, encrypted_password: password}
if result == nil do
%Account{email: email, encrypted_password: password, admin: true} |> Repo.insert!()
end
| 25.363636 | 85 | 0.74552 |
082675216aab62b47fccc8c6f90d0ad62522d182 | 1,762 | exs | Elixir | test/oban/plugins/stager_test.exs | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | test/oban/plugins/stager_test.exs | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | test/oban/plugins/stager_test.exs | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Plugins.StagerTest do
use Oban.Case
alias Oban.Plugins.Stager
alias Oban.{PluginTelemetryHandler, Registry}
@moduletag :integration
test "descheduling jobs to make them available for execution" do
events = [
[:oban, :plugin, :start],
[:oban, :plugin, :stop],
[:oban, :plugin, :exception]
]
:telemetry.attach_many(
"plugin-stager-handler",
events,
&PluginTelemetryHandler.handle/4,
self()
)
then = DateTime.add(DateTime.utc_now(), -30)
job_1 = insert!([ref: 1, action: "OK"], inserted_at: then, schedule_in: -9, queue: :alpha)
job_2 = insert!([ref: 2, action: "OK"], inserted_at: then, schedule_in: -5, queue: :alpha)
job_3 = insert!([ref: 3, action: "OK"], inserted_at: then, schedule_in: 10, queue: :alpha)
start_supervised_oban!(plugins: [{Stager, interval: 10}])
with_backoff(fn ->
assert %{state: "available"} = Repo.reload(job_1)
assert %{state: "available"} = Repo.reload(job_2)
assert %{state: "scheduled"} = Repo.reload(job_3)
end)
assert_receive {:event, :start, %{system_time: _}, %{config: _, plugin: Stager}}
assert_receive {:event, :stop, %{duration: _}, %{config: _, plugin: Stager, staged_count: 2}}
after
:telemetry.detach("plugin-stager-handler")
end
test "translating poll_interval config into plugin usage" do
assert []
|> start_supervised_oban!()
|> Registry.whereis({:plugin, Stager})
assert [poll_interval: 2000]
|> start_supervised_oban!()
|> Registry.whereis({:plugin, Stager})
refute [plugins: false, poll_interval: 2000]
|> start_supervised_oban!()
|> Registry.whereis({:plugin, Stager})
end
end
| 30.37931 | 97 | 0.632236 |
0826790ebead7625a9dbf167d75a70033a36c3c6 | 20,768 | ex | Elixir | lib/asteroid/token/access_token.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 36 | 2019-07-23T20:01:05.000Z | 2021-08-05T00:52:34.000Z | lib/asteroid/token/access_token.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 19 | 2019-08-23T19:04:50.000Z | 2021-05-07T22:12:25.000Z | lib/asteroid/token/access_token.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 3 | 2019-09-06T10:47:20.000Z | 2020-09-09T03:43:31.000Z | defmodule Asteroid.Token.AccessToken do
import Asteroid.Utils
alias Asteroid.Context
alias Asteroid.Client
alias Asteroid.Crypto
alias Asteroid.Token
@moduledoc """
Access token structure
## Field naming
The `data` field holds the token data. The following field names are standard and are used
by Asteroid:
- `"exp"`: the expiration unix timestamp of the access token
- `"sub"`: the `t:Asteroid.Subject.id()` of the access token
- `"client_id"`: the `t:Asteroid.Client.id()` of the access token
- `"scope"`: a list of `OAuth2Utils.Scope.scope()` scopes granted to the refresh token
- `"device_id"`: the `t:Asteroid.Device.id()` of the access token
- `"status"`: a `String.t()` for the status of the token. A token that has been revoked is not
necessarily still present in the token store (e.g. for stateful tokens it will be probably
deleted). Optionally one of:
- `"active"`: active token
- `"revoked"`: revoked token
- `"__asteroid_oauth2_initial_flow"`: the initial `t:Asteroid.OAuth2.flow_str/0` that led to
the issuance of this token
- `"__asteroid_oidc_authenticated_session_id"`: the `t:Asteroid.OIDC.AuthenticatedSession.id/0`
, if any
- `"__asteroid_oidc_claims"`: the claims that were requested, if any
"""
@enforce_keys [:id, :serialization_format, :data]
defstruct [:id, :refresh_token_id, :serialization_format, :signing_key, :signing_alg, :data]
@type id :: binary()
@type t :: %__MODULE__{
id: __MODULE__.id(),
refresh_token_id: binary() | nil,
serialization_format: Asteroid.Token.serialization_format(),
signing_key: Asteroid.Crypto.Key.name() | nil,
signing_alg: Asteroid.Crypto.Key.jws_alg() | nil,
data: map()
}
@doc ~s"""
Creates a new access token
## Options
- `:id`: `String.t()` id, **mandatory**
- `:refresh_token_id`: the `t:Asteroid.Token.RefreshToken.id/0` of the refresh token associated
to this access token if any. Defaults to `nil`
- `:data`: a data `map()`
- `:serialization_format`: an `t:Asteroid.Token.serialization_format/0` atom, defaults to
`:opaque`
- `:signing_key`: an `Asteroid.Crypto.Key.name()` for the signing key
"""
@spec new(Keyword.t()) :: t()
def new(opts) do
%__MODULE__{
id: opts[:id] || raise("Missing access token id"),
refresh_token_id: opts[:refresh_token_id] || nil,
data: opts[:data] || %{},
serialization_format: opts[:serialization_format] || :opaque,
signing_key: opts[:signing_key]
}
end
@doc """
Generates a new access token
## Options
- `:refresh_token_id`: the `t:Asteroid.Token.RefreshToken.id/0` of the refresh token associated
to this access token if any. Defaults to `nil`
- `:serialization_format`: an `t:Asteroid.Token.serialization_format/0` atom, defaults to
`:opaque`
- `:signing_key`: an `Asteroid.Crypto.Key.name()` for the signing key
"""
@spec gen_new(Keyword.t()) :: t()
def gen_new(opts \\ []) do
%__MODULE__{
id: secure_random_b64(20),
refresh_token_id: opts[:refresh_token],
data: %{},
serialization_format:
if(opts[:serialization_format], do: opts[:serialization_format], else: :opaque),
signing_key: opts[:signing_key],
signing_alg: opts[:signing_alg]
}
end
@doc """
Gets a access token from the access token store
Unlike the `c:Asteroid.ObjectStore.AccessToken.get/2`, this function returns
`{:error, Exception.t()}` if the access token is not found in the token
store.
## Options
- `:check_active`: determines whether the validity of the access token should be checked.
Defaults to `true`. For validity checking details, see `active?/1`
"""
@spec get(id(), Keyword.t()) :: {:ok, t()} | {:error, Exception.t()}
def get(access_token_id, opts \\ [check_active: true]) do
at_store_module = astrenv(:object_store_access_token)[:module]
at_store_opts = astrenv(:object_store_access_token)[:opts] || []
case at_store_module.get(access_token_id, at_store_opts) do
{:ok, access_token} when not is_nil(access_token) ->
if opts[:check_active] != true or active?(access_token) do
{:ok, access_token}
else
{:error,
Token.InvalidTokenError.exception(
sort: "access token",
reason: "inactive token",
id: access_token_id
)}
end
{:ok, nil} ->
{:error,
Token.InvalidTokenError.exception(
sort: "access token",
reason: "not found in the token store",
id: access_token_id
)}
{:error, error} ->
{:error, error}
end
end
@doc """
Stores an access token
This function only stores access tokens that have an `:opaque` serialization format.
"""
@spec store(t(), Context.t()) :: {:ok, t()} | {:error, any()}
def store(access_token, ctx \\ %{})
def store(%__MODULE__{serialization_format: :opaque} = access_token, ctx) do
at_store_module = astrenv(:object_store_access_token)[:module]
at_store_opts = astrenv(:object_store_access_token)[:opts] || []
access_token = astrenv(:object_store_access_token_before_store_callback).(access_token, ctx)
case at_store_module.put(access_token, at_store_opts) do
:ok ->
{:ok, access_token}
{:error, _} = error ->
error
end
end
def store(access_token, _ctx) do
{:ok, access_token}
end
@doc """
Deletes an access token
"""
@spec delete(t() | id()) :: :ok | {:error, any()}
def delete(%__MODULE__{id: id}) do
delete(id)
end
def delete(access_token_id) do
at_store_module = astrenv(:object_store_access_token)[:module]
at_store_opts = astrenv(:object_store_access_token)[:opts] || []
at_store_module.delete(access_token_id, at_store_opts)
end
@doc """
Puts a value into the `data` field of access token
If the value is `nil`, the access token is not changed and the filed is not added.
"""
@spec put_value(t(), any(), any()) :: t()
def put_value(access_token, _key, nil), do: access_token
def put_value(access_token, key, val) do
%{access_token | data: Map.put(access_token.data, key, val)}
end
@doc """
Removes a value from the `data` field of a access token
If the value does not exist, does nothing.
"""
@spec delete_value(t(), any()) :: t()
def delete_value(access_token, key) do
%{access_token | data: Map.delete(access_token.data, key)}
end
@doc """
Serializes the access token, using its inner `t:Asteroid.Token.serialization_format/0`
information
Supports serialization to `:opaque` and `:jws` serialization formats.
In case of the serialization to the `jws` format:
- if the signing algorithm was set, uses this algorithm
- otherwise uses the default signer of `JOSE.JWT.sign/2`
"""
@spec serialize(t()) :: String.t()
def serialize(%__MODULE__{id: id, serialization_format: :opaque}) do
id
end
def serialize(%__MODULE__{serialization_format: :jws} = access_token) do
jwt =
Enum.reduce(
access_token.data,
%{},
fn
{"__asteroid" <> _, _v}, acc ->
acc
{k, v}, acc ->
Map.put(acc, k, v)
end
)
{:ok, jwk} = Crypto.Key.get(access_token.signing_key)
if access_token.signing_alg do
jws = JOSE.JWS.from_map(%{"alg" => access_token.signing_alg})
JOSE.JWT.sign(jwk, jws, jwt)
|> JOSE.JWS.compact()
|> elem(1)
else
JOSE.JWT.sign(jwk, jwt)
|> JOSE.JWS.compact()
|> elem(1)
end
end
@doc """
Returns `true` if the token is active, `false` otherwise
The following data, *when set*, are used to determine that a token is active:
- `"nbf"`: must be lower than current time
- `"exp"`: must be higher than current time
- `"revoked"`: must be the boolean `false`
"""
@spec active?(t()) :: boolean()
def active?(access_token) do
(is_nil(access_token.data["nbf"]) or access_token.data["nbf"] < now()) and
(is_nil(access_token.data["exp"]) or access_token.data["exp"] > now()) and
(is_nil(access_token.data["status"]) or access_token.data["status"] != "revoked")
# FIXME: implement the following items from https://tools.ietf.org/html/rfc7662#section-4
# o If the token has been signed, the authorization server MUST
# validate the signature.
# o If the token can be used only at certain resource servers, the
# authorization server MUST determine whether or not the token can
# be used at the resource server making the introspection call.
end
@doc """
Returns the access token lifetime
## Processing rules
- If the client has the following field set to an integer value for the corresponding flow
returns that value:
- `"__asteroid_oauth2_flow_ropc_access_token_lifetime"`
- `"__asteroid_oauth2_flow_client_credentials_access_token_lifetime"`
- `"__asteroid_oauth2_flow_authorization_code_access_token_lifetime"`
- `"__asteroid_oauth2_flow_implicit_access_token_lifetime"`
- `"__asteroid_oauth2_flow_device_authorization_access_token_lifetime"`
- `"__asteroid_oidc_flow_authorization_code_access_token_lifetime"`
- `"__asteroid_oidc_flow_implicit_access_token_lifetime"`
- `"__asteroid_oidc_flow_hybrid_access_token_lifetime"`
- Otherwise, if the following configuration option is set to an integer for the corresponding
flow, returns its value:
- #{Asteroid.Config.link_to_option(:oauth2_flow_ropc_access_token_lifetime)}
- #{Asteroid.Config.link_to_option(:oauth2_flow_client_credentials_access_token_lifetime)}
- #{Asteroid.Config.link_to_option(:oauth2_flow_authorization_code_access_token_lifetime)}
- #{Asteroid.Config.link_to_option(:oauth2_flow_implicit_access_token_lifetime)}
- #{Asteroid.Config.link_to_option(:oauth2_flow_device_authorization_access_token_lifetime)}
- #{Asteroid.Config.link_to_option(:oidc_flow_authorization_code_access_token_lifetime)}
- #{Asteroid.Config.link_to_option(:oidc_flow_implicit_access_token_lifetime)}
- #{Asteroid.Config.link_to_option(:oidc_flow_hybrid_access_token_lifetime)}
- Otherwise returns
#{Asteroid.Config.link_to_option(:oauth2_access_token_lifetime)}, or `0` if not set
In any case, the returned value is capped by the scope configuration.
"""
@spec lifetime(Context.t()) :: non_neg_integer()
def lifetime(%{flow: flow, granted_scopes: granted_scopes} = ctx) do
scope_config = Asteroid.OAuth2.Scope.configuration_for_flow(flow)
case Asteroid.OAuth2.Scope.max_access_token_lifetime(granted_scopes, scope_config) do
capped_lifetime when is_integer(capped_lifetime) ->
min(lifetime_for_client(ctx), capped_lifetime)
nil ->
lifetime_for_client(ctx)
end
end
# no scopes
def lifetime(ctx) do
lifetime_for_client(ctx)
end
@spec lifetime_for_client(Context.t()) :: non_neg_integer()
def lifetime_for_client(%{flow: flow, client: client}) do
attr =
case flow do
:ropc ->
"__asteroid_oauth2_flow_ropc_access_token_lifetime"
:client_credentials ->
"__asteroid_oauth2_flow_client_credentials_access_token_lifetime"
:authorization_code ->
"__asteroid_oauth2_flow_authorization_code_access_token_lifetime"
:implicit ->
"__asteroid_oauth2_flow_implicit_access_token_lifetime"
:device_authorization ->
"__asteroid_oauth2_flow_device_authorization_access_token_lifetime"
:oidc_authorization_code ->
"__asteroid_oidc_flow_authorization_code_access_token_lifetime"
:oidc_implicit ->
"__asteroid_oidc_flow_implicit_access_token_lifetime"
:oidc_hybrid ->
"__asteroid_oidc_flow_hybrid_access_token_lifetime"
end
client = Client.fetch_attributes(client, [attr])
case client.attrs[attr] do
lifetime when is_integer(lifetime) ->
lifetime
_ ->
conf_opt =
case flow do
:ropc ->
:oauth2_flow_ropc_access_token_lifetime
:client_credentials ->
:oauth2_flow_client_credentials_access_token_lifetime
:authorization_code ->
:oauth2_flow_authorization_code_access_token_lifetime
:implicit ->
:oauth2_flow_implicit_access_token_lifetime
:device_authorization ->
:oauth2_flow_device_authorization_access_token_lifetime
:oidc_authorization_code ->
:oidc_flow_authorization_code_access_token_lifetime
:oidc_implicit ->
:oidc_flow_implicit_access_token_lifetime
:oidc_hybrid ->
:oidc_flow_hybrid_access_token_lifetime
end
astrenv(conf_opt, astrenv(:oauth2_access_token_lifetime, 0))
end
end
@doc """
Returns the serialization format for an access token
Formalisation format is necessarily `:opaque`, except for access tokens for which the
following rules apply (<FLOW> is to be replace by a `t:Asteroid.OAuth2.flow_str()/0`):
- if the `__asteroid_oauth2_flow_<FLOW>_access_token_serialization_format` is set, returns
this value
- otherwise, if the `:oauth2_flow_<FLOW>_access_token_serialization_format` is set, returns
this value
- otherwise, returns the value of the
#{Asteroid.Config.link_to_option(:oauth2_access_token_serialization_format)} configuration
option
- otherwise, returns `:opaque`
"""
@spec serialization_format(Context.t()) :: Asteroid.Token.serialization_format()
def serialization_format(%{flow: flow, client: client}) do
attr = "__asteroid_oauth2_flow_#{Atom.to_string(flow)}_access_token_serialization_format"
case flow do
:ropc ->
"__asteroid_oauth2_flow_ropc_access_token_serialization_format"
:client_credentials ->
"__asteroid_oauth2_flow_client_credentials_access_token_serialization_format"
:authorization_code ->
"__asteroid_oauth2_flow_authorization_code_access_token_serialization_format"
:implicit ->
"__asteroid_oauth2_flow_implicit_access_token_serialization_format"
:device_authorization ->
"__asteroid_oauth2_flow_device_authorization_access_token_serialization_format"
:oidc_authorization_code ->
"__asteroid_oidc_flow_authorization_code_access_token_serialization_format"
:oidc_implicit ->
"__asteroid_oidc_flow_implicit_access_token_serialization_format"
:oidc_hybrid ->
"__asteroid_oidc_flow_hybrid_access_token_serialization_format"
end
client = Client.fetch_attributes(client, [attr])
if client.attrs[attr] == "jws" do
:jws
else
conf_opt =
case flow do
:ropc ->
:oauth2_flow_ropc_access_token_serialization_format
:client_credentials ->
:oauth2_flow_client_credentials_access_token_serialization_format
:authorization_code ->
:oauth2_flow_authorization_code_access_token_serialization_format
:implicit ->
:oauth2_flow_implicit_access_token_serialization_format
:device_authorization ->
:oauth2_flow_device_authorization_access_token_serialization_format
:oidc_authorization_code ->
:oidc_flow_authorization_code_access_token_serialization_format
:oidc_implicit ->
:oidc_flow_implicit_access_token_serialization_format
:oidc_hybrid ->
:oidc_flow_hybrid_access_token_serialization_format
end
astrenv(conf_opt, astrenv(:oauth2_access_token_serialization_format, :opaque))
end
end
@doc """
Returns the signing key name for an access token
The following rules apply (<FLOW> is to be replace by a `t:Asteroid.OAuth2.flow_str()/0`):
- if the `__asteroid_oauth2_flow_<FLOW>_access_token_signing_key` is set, returns
this value
- otherwise, if the `:oauth2_flow_<FLOW>_access_token_signing_key` is set, returns
this value
- otherwise, returns the value of the
#{Asteroid.Config.link_to_option(:oauth2_access_token_signing_key)} configuration
option
- otherwise, returns `nil`
"""
@spec signing_key(Context.t()) :: Asteroid.Crypto.Key.name()
def signing_key(%{flow: flow, client: client}) do
attr =
case flow do
:ropc ->
"__asteroid_oauth2_flow_ropc_access_token_signing_key"
:client_credentials ->
"__asteroid_oauth2_flow_client_credentials_access_token_signing_key"
:authorization_code ->
"__asteroid_oauth2_flow_authorization_code_access_token_signing_key"
:implicit ->
"__asteroid_oauth2_flow_implicit_access_token_signing_key"
:device_authorization ->
"__asteroid_oauth2_flow_device_authorization_access_token_signing_key"
:oidc_authorization_code ->
"__asteroid_oidc_flow_authorization_code_access_token_signing_key"
:oidc_implicit ->
"__asteroid_oidc_flow_implicit_access_token_signing_key"
:oidc_hybrid ->
"__asteroid_oidc_flow_hybrid_access_token_signing_key"
end
client = Client.fetch_attributes(client, [attr])
if client.attrs[attr] != nil do
client.attrs[attr]
else
conf_opt =
case flow do
:ropc ->
:oauth2_flow_ropc_access_token_signing_key
:client_credentials ->
:oauth2_flow_client_credentials_access_token_signing_key
:authorization_code ->
:oauth2_flow_authorization_code_access_token_signing_key
:implicit ->
:oauth2_flow_implicit_access_token_signing_key
:device_authorization ->
:oauth2_flow_device_authorization_access_token_signing_key
:oidc_authorization_code ->
:oidc_flow_authorization_code_access_token_signing_key
:oidc_implicit ->
:oidc_flow_implicit_access_token_signing_key
:oidc_hybrid ->
:oidc_flow_hybrid_access_token_signing_key
end
astrenv(conf_opt, astrenv(:oauth2_access_token_signing_key))
end
end
@doc """
Returns the signing algortihm for an access token
The following rules apply (<FLOW> is to be replace by a `t:Asteroid.OAuth2.flow_str()/0`):
- if the `__asteroid_oauth2_flow_<FLOW>_access_token_signing_alg` is set, returns
this value
- otherwise, if the `:oauth2_flow_<FLOW>_access_token_signing_alg` is set, returns
this value
- otherwise, returns the value of the
#{Asteroid.Config.link_to_option(:oauth2_access_token_signing_alg)} configuration
option
- otherwise, returns `nil`
"""
@spec signing_alg(Context.t()) :: Asteroid.Crypto.Key.jws_alg()
def signing_alg(%{flow: flow, client: client}) do
attr =
case flow do
:ropc ->
"__asteroid_oauth2_flow_ropc_access_token_signing_alg"
:client_credentials ->
"__asteroid_oauth2_flow_client_credentials_access_token_signing_alg"
:authorization_code ->
"__asteroid_oauth2_flow_authorization_code_access_token_signing_alg"
:implicit ->
"__asteroid_oauth2_flow_implicit_access_token_signing_alg"
:device_authorization ->
"__asteroid_oauth2_flow_device_authorization_access_token_signing_alg"
:oidc_authorization_code ->
"__asteroid_oidc_flow_authorization_code_access_token_signing_alg"
:oidc_implicit ->
"__asteroid_oidc_flow_implicit_access_token_signing_alg"
:oidc_hybrid ->
"__asteroid_oidc_flow_hybrid_access_token_signing_alg"
end
client = Client.fetch_attributes(client, [attr])
if client.attrs[attr] != nil do
client.attrs[attr]
else
conf_opt =
case flow do
:ropc ->
:oauth2_flow_ropc_access_token_signing_alg
:client_credentials ->
:oauth2_flow_client_credentials_access_token_signing_alg
:authorization_code ->
:oauth2_flow_authorization_code_access_token_signing_alg
:implicit ->
:oauth2_flow_implicit_access_token_signing_alg
:device_authorization ->
:oauth2_flow_device_authorization_access_token_signing_alg
:oidc_authorization_code ->
:oidc_flow_authorization_code_access_token_signing_alg
:oidc_implicit ->
:oidc_flow_implicit_access_token_signing_alg
:oidc_hybrid ->
:oidc_flow_hybrid_access_token_signing_alg
end
astrenv(conf_opt, astrenv(:oauth2_access_token_signing_alg))
end
end
end
| 32.45 | 97 | 0.692363 |
08269615e6b12c9d8e7ce4560dc7873f958a506e | 1,783 | exs | Elixir | mix.exs | Qqwy/elixir-map_diff | 7a0e0292d44bfc9a87bccfe1430c3867caab2592 | [
"MIT"
] | 55 | 2019-08-19T04:14:39.000Z | 2022-03-28T08:10:04.000Z | mix.exs | Qqwy/elixir-map_diff | 7a0e0292d44bfc9a87bccfe1430c3867caab2592 | [
"MIT"
] | 46 | 2019-08-17T20:52:52.000Z | 2022-03-25T11:15:12.000Z | mix.exs | Qqwy/elixir_map_diff | 7a0e0292d44bfc9a87bccfe1430c3867caab2592 | [
"MIT"
] | 7 | 2016-12-20T20:39:16.000Z | 2019-08-05T19:05:49.000Z | defmodule MapDiff.Mixfile do
use Mix.Project
def project do
[app: :map_diff,
version: "1.3.3",
# build_path: "../../_build",
# config_path: "../../config/config.exs",
# deps_path: "../../deps",
# lockfile: "../../mix.lock",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
description: description(),
package: package()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# To depend on another app inside the umbrella:
#
# {:my_app, in_umbrella: true}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:earmark, ">= 0.0.0", only: [:dev, :docs]}, # Markdown, dependency of ex_doc
{:ex_doc, "~> 0.11", only: [:dev, :docs]}, # Documentation for Hex.pm
{:inch_ex, ">= 0.0.0", only: [:docs]}, # Inch CI documentation quality test.
]
end
defp description do
"""
Calculates the difference between two (nested) maps,
and returns a map representing the patch of changes.
"""
end
defp package do
[# These are the default files included in the package
name: :map_diff,
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Wiebe-Marten Wijnja/Qqwy"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/Qqwy/elixir-map_diff/"}
]
end
end
| 25.471429 | 86 | 0.591139 |
0826b8cf03805ad2586ec052a250878cd7656c5b | 707 | ex | Elixir | lib/brahman/dns/header.ex | shun159/brahman | dfa04d757c4e4422b00bdc97a694d2d6637708f9 | [
"Beerware"
] | null | null | null | lib/brahman/dns/header.ex | shun159/brahman | dfa04d757c4e4422b00bdc97a694d2d6637708f9 | [
"Beerware"
] | null | null | null | lib/brahman/dns/header.ex | shun159/brahman | dfa04d757c4e4422b00bdc97a694d2d6637708f9 | [
"Beerware"
] | null | null | null | defmodule Brahman.Dns.Header do
@moduledoc false
defmacro __using__(_which) do
quote location: :keep do
require Record
@include_libs [
"dns/include/dns_terms.hrl",
"dns/include/dns_records.hrl"
]
for lib_path <- @include_libs do
for {name, fields} <- Record.extract_all(from_lib: lib_path) do
Record.defrecord(name, fields)
end
end
@dns_rcode_formerr 1
@dns_rcode_servfail 2
@dns_rcode_nxdomain 3
@dns_rcode_notimp 4
@dns_rcode_refused 5
@dns_rcode_yxdomain 6
@dns_rcode_yxrrset 7
@dns_rcode_nxrrset 8
@dns_rcode_notauth 9
@dns_rcode_notzone 10
end
end
end
| 22.09375 | 71 | 0.639321 |
0826c9817471c7789f5ea36b82ea93aa53727e0e | 3,297 | exs | Elixir | test/flix_web/controllers/user_confirmation_controller_test.exs | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | 3 | 2021-03-21T23:52:16.000Z | 2021-06-02T03:47:00.000Z | test/flix_web/controllers/user_confirmation_controller_test.exs | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | 44 | 2021-04-09T04:04:13.000Z | 2022-03-29T06:29:37.000Z | test/flix_web/controllers/user_confirmation_controller_test.exs | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | null | null | null | defmodule FlixWeb.UserConfirmationControllerTest do
use FlixWeb.ConnCase, async: true
alias Flix.Accounts
alias Flix.Repo
import Flix.AccountsFixtures
setup do
%{user: user_fixture()}
end
describe "GET /users/confirm" do
test "renders the confirmation page", %{conn: conn} do
conn = get(conn, Routes.user_confirmation_path(conn, :new))
response = html_response(conn, 200)
assert response =~ "<h1>Resend confirmation instructions</h1>"
end
end
describe "POST /users/confirm" do
@tag :capture_log
test "sends a new confirmation token", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_confirmation_path(conn, :create), %{
"user" => %{"email" => user.email}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :notice) =~ "If your email is in our system"
assert Repo.get_by!(Accounts.UserToken, user_id: user.id).context == "confirm"
end
test "does not send confirmation token if account is confirmed", %{conn: conn, user: user} do
Repo.update!(Accounts.User.confirm_changeset(user))
conn =
post(conn, Routes.user_confirmation_path(conn, :create), %{
"user" => %{"email" => user.email}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :notice) =~ "If your email is in our system"
refute Repo.get_by(Accounts.UserToken, user_id: user.id)
end
test "does not send confirmation token if email is invalid", %{conn: conn} do
conn =
post(conn, Routes.user_confirmation_path(conn, :create), %{
"user" => %{"email" => "unknown@example.com"}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :notice) =~ "If your email is in our system"
assert Repo.all(Accounts.UserToken) == []
end
end
describe "GET /users/confirm/:token" do
test "confirms the given token once", %{conn: conn, user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
conn = get(conn, Routes.user_confirmation_path(conn, :confirm, token))
assert redirected_to(conn) == "/"
assert get_flash(conn, :notice) =~ "Account confirmed successfully"
assert Accounts.get_user!(user.id).confirmed_at
refute get_session(conn, :user_token)
assert Repo.all(Accounts.UserToken) == []
# When not logged in
conn = get(conn, Routes.user_confirmation_path(conn, :confirm, token))
assert redirected_to(conn) == "/"
assert get_flash(conn, :error) =~ "Account confirmation link is invalid or it has expired"
# When logged in
conn =
build_conn()
|> log_in_user(user)
|> get(Routes.user_confirmation_path(conn, :confirm, token))
assert redirected_to(conn) == "/"
refute get_flash(conn, :error)
end
test "does not confirm email with invalid token", %{conn: conn, user: user} do
conn = get(conn, Routes.user_confirmation_path(conn, :confirm, "oops"))
assert redirected_to(conn) == "/"
assert get_flash(conn, :error) =~ "Account confirmation link is invalid or it has expired"
refute Accounts.get_user!(user.id).confirmed_at
end
end
end
| 34.705263 | 97 | 0.646648 |
0826f4525c5acc1e9bdf3187eba8d7d2383c3ae2 | 5,513 | ex | Elixir | lib/application_auth/accounts.ex | oscarjg/application_auth | c4f5bb3eb4a29db2e11714829e495c880b2adf28 | [
"MIT"
] | null | null | null | lib/application_auth/accounts.ex | oscarjg/application_auth | c4f5bb3eb4a29db2e11714829e495c880b2adf28 | [
"MIT"
] | null | null | null | lib/application_auth/accounts.ex | oscarjg/application_auth | c4f5bb3eb4a29db2e11714829e495c880b2adf28 | [
"MIT"
] | null | null | null | defmodule ApplicationAuth.Accounts do
@moduledoc """
Accounts context to handle accounts actions as abstraction
"""
import Ecto.Query
alias ApplicationAuth.Accounts.{User, Application, ApplicationCredential}
alias ApplicationAuth.Repo
@spec create_registered_user(Map.t()) ::
{:ok, ApplicationAuth.Accounts.User.t()} | {:error, Ecto.Changeset.t()}
@spec get_user(Integer.t()) :: ApplicationAuth.Accounts.User.t() | nil
@spec get_user_by_email_credential(String.t()) :: ApplicationAuth.Accounts.User.t() | nil
@spec authenticate_user_by_email_and_password(String.t(), String.t()) ::
{:ok, ApplicationAuth.Accounts.User.t()} | {:error, Atom.t()}
@spec get_user_application!(ApplicationAuth.Accounts.User.t(), Integer.t()) ::
{:ok, ApplicationAuth.Accounts.Application.t()} | Ecto.NoResultsError.t()
@spec create_application(ApplicationAuth.Accounts.User.t(), Map.t()) ::
{:ok, ApplicationAuth.Accounts.Application.t()} | {:error, Ecto.Changeset.t()}
@spec list_user_applications(ApplicationAuth.Accounts.User.t()) :: Enum.t()
@spec authenticate_application(String.t()) ::
{:ok, ApplicationAuth.Accounts.Application.t()} | {:error, Atom.t()}
@spec authenticate_application(String.t(), String.t()) ::
{:ok, ApplicationAuth.Accounts.Application.t()} | {:error, Atom.t()}
@spec update_application(ApplicationAuth.Accounts.Application.t(), Map.t()) :: {:ok, ApplicationAuth.Accounts.Application.t()} | {:error, Ecto.Changeset.t()}
########### User #####################################################################################################
@doc """
Create a new user with credentials
"""
def create_registered_user(attr \\ %{}) do
%User{}
|> User.registration_changeset(attr)
|> Repo.insert()
end
@doc """
Get a user by id
"""
def get_user(id) when is_integer(id) do
Repo.get(User, id)
end
@doc """
Find a user by email
"""
def get_user_by_email_credential(email) when is_binary(email) do
from(u in User, join: uc in assoc(u, :credential), where: uc.email == ^email)
|> Repo.one()
end
@doc """
Find a user by email and password
"""
def authenticate_user_by_email_and_password(email, password)
when is_binary(email) and is_binary(password) do
user =
get_user_by_email_credential(email)
|> preload_credential()
cond do
user && user.is_active && check_password(password, user.credential.password_hash) ->
{:ok, user}
user ->
{:error, :unauthorized}
true ->
{:error, :not_found}
end
end
########### Application ##############################################################################################
@doc """
Create a new application with credentials
"""
def create_application(%User{} = user, attr \\ %{}) do
%Application{}
|> Application.registration_changeset(attr)
|> put_user_assoc(user)
|> Repo.insert()
end
@doc """
List all user applications
"""
def list_user_applications(%User{} = user) do
Application
|> user_application_query(user)
|> Repo.all()
|> preload_credential()
end
@doc """
Get application by user
"""
def get_user_application!(%User{} = user, app_id) do
from(a in Application, where: a.id == ^app_id)
|> user_application_query(user)
|> Repo.one!()
|> preload_credential()
end
defp user_application_query(query, %User{id: user_id}) do
from(a in query, where: a.user_id == ^user_id)
end
@doc """
Update application
"""
def update_application(%Application{} = application, attrs) do
application
|> Application.updated_changeset(attrs)
|> Repo.update()
end
@doc """
Find an application with app_key and allowed hosts
"""
def authenticate_application(app_key, allowed_hosts)
when is_binary(app_key) and is_binary(allowed_hosts) do
case authenticate_application(app_key) do
{:ok, app} ->
cond do
app && check_application_hosts(app.credential, allowed_hosts) ->
{:ok, app}
app ->
{:error, :unauthorized}
true ->
{:error, :not_found}
end
_ -> {:error, :not_found}
end
end
@doc """
Find an application with app_key and allowed hosts
"""
def authenticate_application(app_key) when is_binary(app_key) do
app =
from(u in Application,
join: ac in assoc(u, :credential),
where: ac.app_key == ^app_key
)
|> Repo.one()
|> preload_credential()
|> preload_user()
cond do
app ->
{:ok, app}
true ->
{:error, :not_found}
end
end
######################################################################################################################
defp check_application_hosts(%ApplicationCredential{} = credential, hosts_to_match) do
hosts = ApplicationCredential.allowed_hosts_to_enum(credential)
cond do
Enum.empty?(hosts) ->
true
true ->
ApplicationCredential.is_allowed_host?(hosts, hosts_to_match)
end
end
defp check_password(pass, check_pass) do
Comeonin.Pbkdf2.checkpw(pass, check_pass)
end
defp preload_credential(schema) do
Repo.preload(schema, :credential)
end
defp preload_user(schema) do
Repo.preload(schema, :user)
end
defp put_user_assoc(changeset, %User{} = user) do
changeset
|> Ecto.Changeset.put_assoc(:user, user)
end
end
| 28.713542 | 159 | 0.60965 |
08271d59321ef2d48b536c90ef19437d165e35d4 | 1,559 | ex | Elixir | lib/mix/tasks.ex | msimonborg/odd_job | 99c662b21e7a6ed34ebae6bdd79cb6ad997e9724 | [
"MIT"
] | 3 | 2022-02-01T13:49:51.000Z | 2022-02-04T05:56:34.000Z | lib/mix/tasks.ex | msimonborg/odd_job | 99c662b21e7a6ed34ebae6bdd79cb6ad997e9724 | [
"MIT"
] | null | null | null | lib/mix/tasks.ex | msimonborg/odd_job | 99c662b21e7a6ed34ebae6bdd79cb6ad997e9724 | [
"MIT"
] | null | null | null | if Mix.env() == :test do
defmodule Mix.Tasks.OddJob do
@moduledoc """
Mix task for development testing and code linting.
Runs the Mix code formatter and all tests, generates an ExCoveralls test coverage report, and builds
documentation locally.
Use `$ mix odd_job` in the project's root directory to run the task.
"""
import IO.ANSI, only: [cyan: 0, bright: 0]
alias Mix.Tasks.{Coveralls, Credo, Docs, Format}
use Mix.Task
@preferred_cli_env :test
@required_elixir_version "1.13.0"
@doc since: "0.2.0"
@spec run(argv :: [String.t()]) :: nil
def run(argv) do
{opts, argv, _} = OptionParser.parse(argv, switches: [format: :boolean])
if Keyword.get(opts, :format, true), do: run_formatter()
do_run(argv)
end
@doc false
@doc since: "0.2.0"
@spec run_formatter :: any()
def run_formatter do
case Version.compare(System.version(), @required_elixir_version) do
:lt ->
raise RuntimeError, """
#{bright()}Elixir version must be >= #{@required_elixir_version}. Detected version:
* #{System.version()}
Please upgrade to Elixir #{@required_elixir_version} or above to continue development on this project.
"""
_ ->
Mix.shell().info("#{cyan()}#{bright()}Running formatter")
Format.run([])
end
end
@spec do_run([binary()]) :: nil
def do_run(argv) do
Coveralls.Html.run(argv)
Docs.run(argv)
Credo.run(["--strict" | argv])
end
end
end
| 29.415094 | 112 | 0.612572 |
0827364961b221bedd5c718d79bafebfa0481aef | 7,642 | ex | Elixir | apps/api_web/lib/api_web/controllers/service_controller.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 62 | 2019-01-17T12:34:39.000Z | 2022-03-20T21:49:47.000Z | apps/api_web/lib/api_web/controllers/service_controller.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 375 | 2019-02-13T15:30:50.000Z | 2022-03-30T18:50:41.000Z | apps/api_web/lib/api_web/controllers/service_controller.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 14 | 2019-01-16T19:35:57.000Z | 2022-02-26T18:55:54.000Z | defmodule ApiWeb.ServiceController do
@moduledoc """
Controller for Services. Filterable by:
* id (multiple)
"""
use ApiWeb.Web, :api_controller
alias State.Service
plug(ApiWeb.Plugs.ValidateDate)
@filters ~w(id route)s
@pagination_opts ~w(offset limit order_by)a
def state_module, do: State.Service
swagger_path :index do
get(path(__MODULE__, :index))
description("""
List of services. Service represents the days of the week, as well as extra days, that a trip \
is valid.
""")
common_index_parameters(__MODULE__, :service)
parameter(
"filter[id]",
:query,
:string,
"Filter by multiple IDs. #{comma_separated_list()}.",
example: "1,2"
)
parameter(
"filter[route]",
:query,
:string,
"Filter by route. Multiple `route` #{comma_separated_list()}."
)
consumes("application/vnd.api+json")
produces("application/vnd.api+json")
response(200, "OK", Schema.ref(:Services))
response(400, "Bad Request", Schema.ref(:BadRequest))
response(403, "Forbidden", Schema.ref(:Forbidden))
response(429, "Too Many Requests", Schema.ref(:TooManyRequests))
end
def index_data(conn, params) do
case Params.filter_params(params, @filters, conn) do
{:ok, filters} when map_size(filters) > 0 ->
filters
|> apply_filters()
|> State.all(Params.filter_opts(params, @pagination_opts, conn))
{:error, _, _} = error ->
error
_ ->
{:error, :filter_required}
end
end
defp apply_filters(%{"id" => id}) do
id
|> Params.split_on_comma()
|> Service.by_ids()
end
defp apply_filters(%{"route" => route}) do
route
|> Params.split_on_comma()
|> Service.by_route_ids()
end
swagger_path :show do
get(path(__MODULE__, :show))
description("""
Single service, which represents the days of the week, as well as extra days, that a trip \
is valid.
""")
parameter(:id, :path, :string, "Unique identifier for a service")
common_show_parameters(:service)
consumes("application/vnd.api+json")
produces("application/vnd.api+json")
response(200, "OK", Schema.ref(:Service))
response(403, "Forbidden", Schema.ref(:Forbidden))
response(404, "Not Found", Schema.ref(:NotFound))
response(406, "Not Acceptable", Schema.ref(:NotAcceptable))
response(429, "Too Many Requests", Schema.ref(:TooManyRequests))
end
def show_data(_conn, %{"id" => id}) do
Service.by_id(id)
end
def swagger_definitions do
import PhoenixSwagger.JsonApi, except: [page: 1]
%{
ServiceResource:
resource do
description("Service represents a set of dates on which trips run.")
attributes do
start_date(
:string,
"Earliest date which is valid for this service. Format is ISO8601.",
format: :date,
example: "2018-11-19"
)
end_date(
:string,
"Latest date which is valid for this service. Format is ISO8601.",
format: :date,
example: "2018-12-24"
)
description(
:string,
"Human-readable description of the service, as it should appear on public-facing websites and applications.",
"x-nullable": true,
example: "Weekday schedule (no school)"
)
schedule_name(
:string,
"Description of when the `service_id` is in effect.",
"x-nullable": true,
example: "Weekday (no school)"
)
schedule_type(
:string,
"""
Description of the schedule type the service_id can be applied.
For example, on a holiday, the schedule_type value may be "Saturday" or "Sunday".
Current valid values are "Weekday", "Saturday", "Sunday", or "Other"
""",
"x-nullable": true,
example: "Sunday"
)
schedule_typicality(
:integer,
"""
Describes how well this schedule represents typical service for the listed `schedule_type`
| Value | Description |
|-------|-----------------------------------------------------------------------------|
| `0` | Not defined. |
| `1` | Typical service with perhaps minor modifications |
| `2` | Extra service supplements typical schedules |
| `3` | Reduced holiday service is provided by typical Saturday or Sunday schedule |
| `4` | Major changes in service due to a planned disruption, such as construction |
| `5` | Major reductions in service for weather events or other atypical situations |
""",
enum: Enum.to_list(0..5),
example: 1
)
rating_start_date(
:string,
"Earliest date which is a part of the rating (season) which contains this service. Format is ISO8601.",
"x-nullable": true,
format: :date,
example: "2018-12-22"
)
rating_end_date(
:string,
"Latest date which is a part of the rating (season) which contains this service. Format is ISO8601.",
"x-nullable": true,
format: :date,
example: "2019-03-14"
)
rating_description(
:string,
"Human-readable description of the rating (season), as it should appear on public-facing websites and applications.",
"x-nullable": true,
example: "Winter"
)
end
array_attribute(
:added_dates,
:date,
"Aditional dates when the service is valid. Format is ISO8601.",
"2018-11-21"
)
array_attribute(
:added_dates_notes,
:string,
"Extra information about additional dates (e.g. holiday name)",
"New Year Day"
)
array_attribute(
:removed_dates,
:date,
"Exceptional dates when the service is not valid. Format is ISO8601.",
"2018-12-17"
)
array_attribute(
:removed_dates_notes,
:string,
"Extra information about exceptional dates (e.g. holiday name)",
"New Year Day"
)
valid_dates_attribute()
end,
Services: page(:ServiceResource),
Service: single(:ServiceResource)
}
end
defp array_attribute(schema, property, type, description, example) do
nested = Schema.array(:string)
nested = put_in(nested.items.description, description)
nested = put_in(nested.items.format, type)
nested = put_in(nested.items.example, example)
put_in(schema.properties.attributes.properties[property], nested)
end
defp valid_dates_attribute(schema) do
nested = Schema.array(:number)
nested =
put_in(nested.items.description, """
Day of week. From Monday as 1 to Sunday as 7.
""")
nested = put_in(nested.items.example, "1")
put_in(schema.properties.attributes.properties[:valid_days], nested)
end
end
| 30.939271 | 131 | 0.551295 |
082775979a2c54e1bdb000ba7e93f5c400739c55 | 8,861 | ex | Elixir | lib/livebook/evaluator/io_proxy.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | lib/livebook/evaluator/io_proxy.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | lib/livebook/evaluator/io_proxy.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Evaluator.IOProxy do
@moduledoc false
# An IO device process used by `Evaluator` as its `:stdio`.
#
# The process implements [The Erlang I/O Protocol](https://erlang.org/doc/apps/stdlib/io_protocol.html)
# and can be thought of as a *virtual* IO device.
#
# Upon receiving an IO requests, the process sends a message
# the `target` process specified during initialization.
# Currently only output requests are supported.
#
# The implementation is based on the built-in `StringIO`,
# so check it out for more reference.
use GenServer
alias Livebook.Evaluator
## API
@doc """
Starts the IO device process.
Make sure to use `configure/3` to actually proxy the requests.
"""
@spec start_link() :: GenServer.on_start()
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts)
end
@doc """
Sets IO proxy destination and the reference to be attached to all messages.
For all supported requests a message is sent to `target`,
so this device serves as a proxy. The given evaluation
reference (`ref`) is also sent in all messages.
The possible messages are:
* `{:evaluation_output, ref, string}` - for output requests,
where `ref` is the given evaluation reference and `string` is the output.
"""
@spec configure(pid(), pid(), Evaluator.ref()) :: :ok
def configure(pid, target, ref) do
GenServer.cast(pid, {:configure, target, ref})
end
@doc """
Synchronously sends all buffer contents to the configured target process.
"""
@spec flush(pid()) :: :ok
def flush(pid) do
GenServer.call(pid, :flush)
end
@doc """
Asynchronously clears all buffered inputs, so next time they
are requested again.
"""
@spec clear_input_buffers(pid()) :: :ok
def clear_input_buffers(pid) do
GenServer.cast(pid, :clear_input_buffers)
end
@doc """
Returns the accumulated widget pids and clears the accumulator.
"""
@spec flush_widgets(pid()) :: MapSet.t(pid())
def flush_widgets(pid) do
GenServer.call(pid, :flush_widgets)
end
## Callbacks
@impl true
def init(_opts) do
{:ok,
%{
encoding: :unicode,
target: nil,
ref: nil,
buffer: [],
input_buffers: %{},
widget_pids: MapSet.new()
}}
end
@impl true
def handle_cast({:configure, target, ref}, state) do
{:noreply, %{state | target: target, ref: ref}}
end
def handle_cast(:clear_input_buffers, state) do
{:noreply, %{state | input_buffers: %{}}}
end
@impl true
def handle_call(:flush, _from, state) do
{:reply, :ok, flush_buffer(state)}
end
def handle_call(:flush_widgets, _from, state) do
{:reply, state.widget_pids, %{state | widget_pids: MapSet.new()}}
end
@impl true
def handle_info({:io_request, from, reply_as, req}, state) do
{reply, state} = io_request(req, state)
io_reply(from, reply_as, reply)
{:noreply, state}
end
def handle_info(:flush, state) do
{:noreply, flush_buffer(state)}
end
defp io_request({:put_chars, chars} = req, state) do
put_chars(:latin1, chars, req, state)
end
defp io_request({:put_chars, mod, fun, args} = req, state) do
put_chars(:latin1, apply(mod, fun, args), req, state)
end
defp io_request({:put_chars, encoding, chars} = req, state) do
put_chars(encoding, chars, req, state)
end
defp io_request({:put_chars, encoding, mod, fun, args} = req, state) do
put_chars(encoding, apply(mod, fun, args), req, state)
end
defp io_request({:get_chars, _prompt, count}, state) when count >= 0 do
{{:error, :enotsup}, state}
end
defp io_request({:get_chars, _encoding, _prompt, count}, state) when count >= 0 do
{{:error, :enotsup}, state}
end
defp io_request({:get_line, prompt}, state) do
get_line(:latin1, prompt, state)
end
defp io_request({:get_line, encoding, prompt}, state) do
get_line(encoding, prompt, state)
end
defp io_request({:get_until, _prompt, _mod, _fun, _args}, state) do
{{:error, :enotsup}, state}
end
defp io_request({:get_until, _encoding, _prompt, _mod, _fun, _args}, state) do
{{:error, :enotsup}, state}
end
defp io_request({:get_password, _encoding}, state) do
{{:error, :enotsup}, state}
end
defp io_request({:setopts, [encoding: encoding]}, state) when encoding in [:latin1, :unicode] do
{:ok, %{state | encoding: encoding}}
end
defp io_request({:setopts, _opts}, state) do
{{:error, :enotsup}, state}
end
defp io_request(:getopts, state) do
{[binary: true, encoding: state.encoding], state}
end
defp io_request({:get_geometry, :columns}, state) do
{{:error, :enotsup}, state}
end
defp io_request({:get_geometry, :rows}, state) do
{{:error, :enotsup}, state}
end
defp io_request({:requests, reqs}, state) do
io_requests(reqs, {:ok, state})
end
# Livebook custom request type, handled in a special manner
# by IOProxy and safely failing for any other IO device
# (resulting in the {:error, :request} response).
defp io_request({:livebook_put_output, output}, state) do
state = flush_buffer(state)
send(state.target, {:evaluation_output, state.ref, output})
state =
case Evaluator.widget_pid_from_output(output) do
{:ok, pid} -> update_in(state.widget_pids, &MapSet.put(&1, pid))
:error -> state
end
{:ok, state}
end
defp io_request(_, state) do
{{:error, :request}, state}
end
defp io_requests([req | rest], {:ok, state}) do
io_requests(rest, io_request(req, state))
end
defp io_requests(_, result) do
result
end
defp put_chars(encoding, chars, req, state) do
case :unicode.characters_to_binary(chars, encoding, state.encoding) do
string when is_binary(string) ->
if state.buffer == [] do
Process.send_after(self(), :flush, 50)
end
{:ok, update_in(state.buffer, &buffer_append(&1, string))}
{_, _, _} ->
{{:error, req}, state}
end
rescue
ArgumentError -> {{:error, req}, state}
end
defp get_line(encoding, prompt, state) do
prompt = :unicode.characters_to_binary(prompt, encoding, state.encoding)
case get_input(prompt, state) do
input when is_binary(input) ->
{line, rest} = line_from_input(input)
line =
if is_binary(line) do
:unicode.characters_to_binary(line, state.encoding, encoding)
else
line
end
state = put_in(state.input_buffers[prompt], rest)
{line, state}
error ->
{error, state}
end
end
defp get_input(prompt, state) do
Map.get_lazy(state.input_buffers, prompt, fn ->
request_input(prompt, state)
end)
end
defp request_input(prompt, state) do
send(state.target, {:evaluation_input, state.ref, self(), prompt})
ref = Process.monitor(state.target)
receive do
{:evaluation_input_reply, {:ok, string}} ->
Process.demonitor(ref, [:flush])
string
{:evaluation_input_reply, :error} ->
Process.demonitor(ref, [:flush])
{:error, "no matching Livebook input found"}
{:DOWN, ^ref, :process, _object, _reason} ->
{:error, :terminated}
end
end
defp line_from_input(""), do: {:eof, ""}
defp line_from_input(input) do
case :binary.match(input, ["\r\n", "\n"]) do
:nomatch ->
{input, ""}
{pos, len} ->
size = byte_size(input)
line = binary_part(input, 0, pos + len)
rest = binary_part(input, pos + len, size - pos - len)
{line, rest}
end
end
defp io_reply(from, reply_as, reply) do
send(from, {:io_reply, reply_as, reply})
end
defp flush_buffer(state) do
string = state.buffer |> Enum.reverse() |> Enum.join()
if state.target != nil and string != "" do
send(state.target, {:evaluation_output, state.ref, string})
end
%{state | buffer: []}
end
defp buffer_append(buffer, text) do
# Sometimes there are intensive outputs that use \r
# to dynamically refresh the printd text.
# Since we buffer the messages anyway, it makes
# sense to send only the latest of these outputs.
# Note that \r works per-line, so if there are newlines
# we keep the buffer, but for \r-intensive operations
# there are usually no newlines involved, so this optimisation works fine.
if has_rewind?(text) and not has_newline?(text) and not Enum.any?(buffer, &has_newline?/1) do
[text]
else
[text | buffer]
end
end
# Checks for [\r][not \r] sequence in the given string.
defp has_rewind?(<<>>), do: false
defp has_rewind?(<<?\r, next, _rest::binary>>) when next != ?\r, do: true
defp has_rewind?(<<_head, rest::binary>>), do: has_rewind?(rest)
defp has_newline?(text), do: String.contains?(text, "\n")
end
| 27.015244 | 105 | 0.646315 |
0827b314b61ef5abd100c747ad42fe025a2a4596 | 559 | ex | Elixir | web/web.ex | stwf/notsense | 4ed9d27682f0f0a0ba0a9c12ab7df622001052ac | [
"Apache-2.0"
] | null | null | null | web/web.ex | stwf/notsense | 4ed9d27682f0f0a0ba0a9c12ab7df622001052ac | [
"Apache-2.0"
] | null | null | null | web/web.ex | stwf/notsense | 4ed9d27682f0f0a0ba0a9c12ab7df622001052ac | [
"Apache-2.0"
] | 1 | 2019-10-12T13:23:21.000Z | 2019-10-12T13:23:21.000Z | defmodule Notsense.Web do
@moduledoc false
def view do
quote do
require Logger
# use Phoenix.View, root: file_path
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end | 22.36 | 61 | 0.670841 |
0827c6c241ac412c7947f984a663d3a42267c077 | 993 | exs | Elixir | mix.exs | johninvictus/mpesa_elixir | 1c4715c247ae05805cd4aae2497af5e7c2eea289 | [
"Apache-2.0"
] | 11 | 2018-04-10T15:27:54.000Z | 2020-12-04T07:48:52.000Z | mix.exs | johninvictus/mpesa_elixir | 1c4715c247ae05805cd4aae2497af5e7c2eea289 | [
"Apache-2.0"
] | 1 | 2020-05-22T05:13:25.000Z | 2020-05-22T05:13:39.000Z | mix.exs | johninvictus/mpesa_elixir | 1c4715c247ae05805cd4aae2497af5e7c2eea289 | [
"Apache-2.0"
] | 7 | 2018-05-09T20:31:44.000Z | 2020-11-08T15:21:27.000Z | defmodule MpesaElixir.MixProject do
use Mix.Project
@description """
Elixir wrapper for Safricom Mpesa API
"""
def project do
[
app: :mpesa_elixir,
version: "0.1.1",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
description: @description,
package: package(),
deps: deps(),
name: "MpesaElixir"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :httpotion, :timex]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:httpotion, "~> 3.1.0"},
{:poison, "~> 3.1"},
{:rsa, "~> 0.0.1 "},
{:ex_crypto, "~> 0.9.0"},
{:timex, "~> 3.1"},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
defp package do
[
maintainers: ["John Invictus"],
licenses: ["Apache 2.0"],
links: %{"Github" => "https://github.com/johninvictus/mpesa_elixir"}
]
end
end
| 20.6875 | 74 | 0.543807 |
0827c74b5e3b399bf67fc2b1205d0bef2611314d | 3,256 | ex | Elixir | apps/ewallet_db/lib/ewallet_db/minted_token.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet_db/lib/ewallet_db/minted_token.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/lib/ewallet_db/minted_token.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWalletDB.MintedToken do
@moduledoc """
Ecto Schema representing minted tokens.
"""
use Ecto.Schema
import Ecto.{Changeset, Query}
alias Ecto.UUID
alias EWalletDB.{Repo, Account, MintedToken}
@primary_key {:id, Ecto.UUID, autogenerate: true}
schema "minted_token" do
field :friendly_id, :string # "EUR:123"
field :symbol, :string # "eur"
field :iso_code, :string # "EUR"
field :name, :string # "Euro"
field :description, :string # Official currency of the European Union
field :short_symbol, :string # "€"
field :subunit, :string # "Cent"
field :subunit_to_unit, EWalletDB.Types.Integer # 100
field :symbol_first, :boolean # true
field :html_entity, :string # "€"
field :iso_numeric, :string # "978"
field :smallest_denomination, :integer # 1
field :locked, :boolean # false
field :metadata, :map, default: %{}
field :encrypted_metadata, Cloak.EncryptedMapField, default: %{}
field :encryption_version, :binary
belongs_to :account, Account, foreign_key: :account_id,
references: :id,
type: UUID
timestamps()
end
defp changeset(%MintedToken{} = minted_token, attrs) do
minted_token
|> cast(attrs, [
:symbol, :iso_code, :name, :description, :short_symbol,
:subunit, :subunit_to_unit, :symbol_first, :html_entity,
:iso_numeric, :smallest_denomination, :locked, :account_id,
:metadata, :encrypted_metadata, :friendly_id
])
|> validate_required([
:symbol, :name, :subunit_to_unit, :account_id,
:metadata, :encrypted_metadata
])
|> set_friendly_id()
|> validate_required([:friendly_id])
|> unique_constraint(:symbol)
|> unique_constraint(:iso_code)
|> unique_constraint(:name)
|> unique_constraint(:short_symbol)
|> unique_constraint(:iso_numeric)
|> assoc_constraint(:account)
|> put_change(:encryption_version, Cloak.version)
end
defp set_friendly_id(changeset) do
case get_field(changeset, :friendly_id) do
nil ->
symbol = get_field(changeset, :symbol)
uuid = UUID.generate()
changeset
|> put_change(:id, uuid)
|> put_change(:friendly_id, build_friendly_id(symbol, uuid))
_ -> changeset
end
end
def build_friendly_id(symbol, uuid) do
"#{symbol}:#{uuid}"
end
@doc """
Returns all minted tokens in the system
"""
def all do
Repo.all(MintedToken)
end
@doc """
Create a new minted token with the passed attributes.
"""
def insert(attrs) do
changeset = changeset(%MintedToken{}, attrs)
case Repo.insert(changeset) do
{:ok, minted_token} ->
{:ok, get(minted_token.friendly_id)}
{:error, changeset} ->
{:error, changeset}
end
end
@doc """
Retrieve a minted token by friendly_id.
"""
def get(nil), do: nil
def get(friendly_id) do
Repo.get_by(MintedToken, friendly_id: friendly_id)
end
@doc """
Retrieve a list of minted tokens by supplying a list of friendly IDs.
"""
def get_all(friendly_ids) do
Repo.all(from m in MintedToken,
where: m.friendly_id in ^friendly_ids)
end
end
| 29.071429 | 73 | 0.641585 |
0827cf7cb8997eb47ddf7b8f560862af8211ec8b | 371 | ex | Elixir | lib/ash/error/query/no_such_operator.ex | regularfellow/ash | 179495fecb141e0eb8eaf35bbb4d04000eab694b | [
"MIT"
] | null | null | null | lib/ash/error/query/no_such_operator.ex | regularfellow/ash | 179495fecb141e0eb8eaf35bbb4d04000eab694b | [
"MIT"
] | null | null | null | lib/ash/error/query/no_such_operator.ex | regularfellow/ash | 179495fecb141e0eb8eaf35bbb4d04000eab694b | [
"MIT"
] | null | null | null | defmodule Ash.Error.Query.NoSuchOperator do
@moduledoc "Used when an operator that doesn't exist is used in a query"
use Ash.Error
def_ash_error([:name], class: :invalid)
defimpl Ash.ErrorKind do
def id(_), do: Ecto.UUID.generate()
def code(_), do: "no_such_operator"
def message(error) do
"No such operator #{error.name}"
end
end
end
| 21.823529 | 74 | 0.684636 |
0827e6181a96ae4033c3bb651e7487c924d008fe | 43 | exs | Elixir | test/test_helper.exs | giraphme/sendgrid_elixir | 53eaa58486cd052d19a5df58dbb3c944fd54b635 | [
"MIT"
] | null | null | null | test/test_helper.exs | giraphme/sendgrid_elixir | 53eaa58486cd052d19a5df58dbb3c944fd54b635 | [
"MIT"
] | null | null | null | test/test_helper.exs | giraphme/sendgrid_elixir | 53eaa58486cd052d19a5df58dbb3c944fd54b635 | [
"MIT"
] | null | null | null | ExUnit.start(exclude: [integration: true])
| 21.5 | 42 | 0.767442 |
0827f0414fea25bc7694ad589fcf2526b6a4c84f | 44,142 | exs | Elixir | test/ecto/type_test.exs | fertapric/ecto | c7e144a2fd54d2ecc2df2f85e70cb6113116a2a4 | [
"Apache-2.0"
] | null | null | null | test/ecto/type_test.exs | fertapric/ecto | c7e144a2fd54d2ecc2df2f85e70cb6113116a2a4 | [
"Apache-2.0"
] | 2 | 2020-04-23T00:19:07.000Z | 2020-04-23T00:24:25.000Z | test/ecto/type_test.exs | fertapric/ecto | c7e144a2fd54d2ecc2df2f85e70cb6113116a2a4 | [
"Apache-2.0"
] | 1 | 2020-10-07T16:52:00.000Z | 2020-10-07T16:52:00.000Z | defmodule Ecto.TypeTest do
use ExUnit.Case, async: true
defmodule Custom do
use Ecto.Type
def type, do: :custom
def load(_), do: {:ok, :load}
def dump(_), do: {:ok, :dump}
def cast(_), do: {:ok, :cast}
def equal?(true, _), do: true
def equal?(_, _), do: false
def embed_as(_), do: :dump
end
defmodule CustomAny do
use Ecto.Type
def type, do: :any
def load(_), do: {:ok, :load}
def dump(_), do: {:ok, :dump}
def cast(_), do: {:ok, :cast}
end
defmodule Schema do
use Ecto.Schema
@primary_key {:id, :binary_id, autogenerate: true}
schema "" do
field :a, :integer, source: :abc
field :b, :integer, virtual: true
field :c, :integer, default: 0
end
def changeset(params, schema) do
Ecto.Changeset.cast(schema, params, ~w(a))
end
end
import Kernel, except: [match?: 2], warn: false
import Ecto.Type
doctest Ecto.Type
test "embed_as" do
assert embed_as(:string, :json) == :self
assert embed_as(:integer, :json) == :self
assert embed_as(Custom, :json) == :dump
assert embed_as(CustomAny, :json) == :self
end
test "embedded_load" do
assert embedded_load(:decimal, "1", :json) == {:ok, Decimal.new("1")}
assert embedded_load(:decimal, "oops", :json) == :error
assert embedded_load(Custom, :value, :json) == {:ok, :load}
end
test "embedded_dump" do
assert embedded_dump(:decimal, Decimal.new("1"), :json) == {:ok, Decimal.new("1")}
assert embedded_dump(Custom, :value, :json) == {:ok, :dump}
end
test "custom types" do
assert load(Custom, "foo") == {:ok, :load}
assert dump(Custom, "foo") == {:ok, :dump}
assert cast(Custom, "foo") == {:ok, :cast}
assert load(Custom, nil) == {:ok, nil}
assert dump(Custom, nil) == {:ok, nil}
assert cast(Custom, nil) == {:ok, nil}
assert match?(Custom, :any)
assert match?(:any, Custom)
assert match?(CustomAny, :boolean)
end
test "untyped maps" do
assert load(:map, %{"a" => 1}) == {:ok, %{"a" => 1}}
assert load(:map, 1) == :error
assert dump(:map, %{a: 1}) == {:ok, %{a: 1}}
assert dump(:map, 1) == :error
end
test "typed maps" do
assert load({:map, :integer}, %{"a" => 1, "b" => 2}) == {:ok, %{"a" => 1, "b" => 2}}
assert dump({:map, :integer}, %{"a" => 1, "b" => 2}) == {:ok, %{"a" => 1, "b" => 2}}
assert cast({:map, :integer}, %{"a" => "1", "b" => "2"}) == {:ok, %{"a" => 1, "b" => 2}}
assert load({:map, {:array, :integer}}, %{"a" => [0, 0], "b" => [1, 1]}) == {:ok, %{"a" => [0, 0], "b" => [1, 1]}}
assert dump({:map, {:array, :integer}}, %{"a" => [0, 0], "b" => [1, 1]}) == {:ok, %{"a" => [0, 0], "b" => [1, 1]}}
assert cast({:map, {:array, :integer}}, %{"a" => [0, 0], "b" => [1, 1]}) == {:ok, %{"a" => [0, 0], "b" => [1, 1]}}
assert load({:map, :integer}, %{"a" => ""}) == :error
assert dump({:map, :integer}, %{"a" => ""}) == :error
assert cast({:map, :integer}, %{"a" => ""}) == :error
assert load({:map, :integer}, 1) == :error
assert dump({:map, :integer}, 1) == :error
assert cast({:map, :integer}, 1) == :error
end
test "custom types with array" do
assert load({:array, Custom}, ["foo"]) == {:ok, [:load]}
assert dump({:array, Custom}, ["foo"]) == {:ok, [:dump]}
assert cast({:array, Custom}, ["foo"]) == {:ok, [:cast]}
assert load({:array, Custom}, [nil]) == {:ok, [nil]}
assert dump({:array, Custom}, [nil]) == {:ok, [nil]}
assert cast({:array, Custom}, [nil]) == {:ok, [nil]}
assert load({:array, Custom}, nil) == {:ok, nil}
assert dump({:array, Custom}, nil) == {:ok, nil}
assert cast({:array, Custom}, nil) == {:ok, nil}
assert load({:array, Custom}, 1) == :error
assert dump({:array, Custom}, 1) == :error
assert cast({:array, Custom}, 1) == :error
assert load({:array, Custom}, [:unused], fn Custom, _ -> {:ok, :used} end) == {:ok, [:used]}
assert dump({:array, Custom}, [:unused], fn Custom, _ -> {:ok, :used} end) == {:ok, [:used]}
end
test "custom types with map" do
assert load({:map, Custom}, %{"x" => "foo"}) == {:ok, %{"x" => :load}}
assert dump({:map, Custom}, %{"x" => "foo"}) == {:ok, %{"x" => :dump}}
assert cast({:map, Custom}, %{"x" => "foo"}) == {:ok, %{"x" => :cast}}
assert load({:map, Custom}, %{"x" => nil}) == {:ok, %{"x" => nil}}
assert dump({:map, Custom}, %{"x" => nil}) == {:ok, %{"x" => nil}}
assert cast({:map, Custom}, %{"x" => nil}) == {:ok, %{"x" => nil}}
assert load({:map, Custom}, nil) == {:ok, nil}
assert dump({:map, Custom}, nil) == {:ok, nil}
assert cast({:map, Custom}, nil) == {:ok, nil}
assert load({:map, Custom}, 1) == :error
assert dump({:map, Custom}, 1) == :error
assert cast({:map, Custom}, 1) == :error
assert load({:map, Custom}, %{"a" => :unused}, fn Custom, _ -> {:ok, :used} end) == {:ok, %{"a" => :used}}
assert dump({:map, Custom}, %{"a" => :unused}, fn Custom, _ -> {:ok, :used} end) == {:ok, %{"a" => :used}}
end
test "dump with custom function" do
dumper = fn :integer, term -> {:ok, term * 2} end
assert dump({:array, :integer}, [1, 2], dumper) == {:ok, [2, 4]}
assert dump({:map, :integer}, %{x: 1, y: 2}, dumper) == {:ok, %{x: 2, y: 4}}
end
test "in" do
assert cast({:in, :integer}, ["1", "2", "3"]) == {:ok, [1, 2, 3]}
assert cast({:in, :integer}, nil) == :error
end
test "{:param, :any_datetime}" do
value = ~N[2010-04-17 14:00:00]
assert cast({:param, :any_datetime}, value) == {:ok, value}
value = ~N[2010-04-17 14:00:00.123]
assert cast({:param, :any_datetime}, value) == {:ok, value}
value = DateTime.utc_now()
assert cast({:param, :any_datetime}, value) == {:ok, value}
value = "2010-04-17 14:00:00"
assert cast({:param, :any_datetime}, value) == {:ok, ~N[2010-04-17 14:00:00]}
value = Map.from_struct(~N[2010-04-17 14:00:00])
assert cast({:param, :any_datetime}, value) == {:ok, ~N[2010-04-17 14:00:00]}
assert match?(:naive_datetime, {:param, :any_datetime})
assert match?(:naive_datetime_usec, {:param, :any_datetime})
assert match?(:utc_datetime, {:param, :any_datetime})
assert match?(:utc_datetime_usec, {:param, :any_datetime})
refute match?(:string, {:param, :any_datetime})
end
test "decimal" do
assert cast(:decimal, "1.0") == {:ok, Decimal.new("1.0")}
assert cast(:decimal, 1.0) == {:ok, Decimal.new("1.0")}
assert cast(:decimal, 1) == {:ok, Decimal.new("1")}
assert cast(:decimal, Decimal.new("1")) == {:ok, Decimal.new("1")}
assert cast(:decimal, "nan") == :error
assert_raise ArgumentError, ~r"#Decimal<NaN> is not allowed for type :decimal", fn ->
cast(:decimal, Decimal.new("NaN"))
end
assert dump(:decimal, Decimal.new("1")) == {:ok, Decimal.new("1")}
assert dump(:decimal, 1.0) == {:ok, Decimal.new("1.0")}
assert dump(:decimal, 1) == {:ok, Decimal.new("1")}
assert dump(:decimal, "1.0") == :error
assert dump(:decimal, "bad") == :error
assert_raise ArgumentError, ~r"#Decimal<NaN> is not allowed for type :decimal", fn ->
dump(:decimal, Decimal.new("nan"))
end
assert load(:decimal, 1) == {:ok, Decimal.new(1)}
assert load(:decimal, 1.0) == {:ok, Decimal.new("1.0")}
assert load(:decimal, Decimal.new("1.0")) == {:ok, Decimal.new("1.0")}
assert load(:decimal, "1.0") == :error
end
test "maybe" do
assert dump({:maybe, :decimal}, Decimal.new(1)) == {:ok, Decimal.new(1)}
assert dump({:maybe, :decimal}, "not decimal") == {:ok, "not decimal"}
assert load({:maybe, :decimal}, 1) == {:ok, Decimal.new(1)}
assert load({:maybe, :decimal}, "not decimal") == {:ok, "not decimal"}
assert cast({:maybe, :decimal}, 1) == {:ok, Decimal.new(1)}
assert cast({:maybe, :decimal}, "not decimal") == {:ok, "not decimal"}
end
describe "embeds" do
@uuid_string "bfe0888c-5c59-4bb3-adfd-71f0b85d3db7"
@uuid_binary <<191, 224, 136, 140, 92, 89, 75, 179, 173, 253, 113, 240, 184, 93, 61, 183>>
test "one" do
embed = %Ecto.Embedded{field: :embed, cardinality: :one,
owner: __MODULE__, related: Schema}
type = {:embed, embed}
assert {:ok, %Schema{id: @uuid_string, a: 1, c: 0}} =
adapter_load(Ecto.TestAdapter, type, %{"id" => @uuid_binary, "abc" => 1})
assert {:ok, nil} == adapter_load(Ecto.TestAdapter, type, nil)
assert :error == adapter_load(Ecto.TestAdapter, type, 1)
assert {:ok, %{abc: 1, c: 0, id: @uuid_binary}} ==
adapter_dump(Ecto.TestAdapter, type, %Schema{id: @uuid_string, a: 1})
assert {:ok, nil} = adapter_dump(Ecto.TestAdapter, type, nil)
assert :error = adapter_dump(Ecto.TestAdapter, type, 1)
assert :error == cast(type, %{"a" => 1})
assert cast(type, %Schema{}) == {:ok, %Schema{}}
assert cast(type, nil) == {:ok, nil}
assert match?(:any, type)
end
test "many" do
embed = %Ecto.Embedded{field: :embed, cardinality: :many,
owner: __MODULE__, related: Schema}
type = {:embed, embed}
assert {:ok, [%Schema{id: @uuid_string, a: 1, c: 0}]} =
adapter_load(Ecto.TestAdapter, type, [%{"id" => @uuid_binary, "abc" => 1}])
assert {:ok, []} == adapter_load(Ecto.TestAdapter, type, nil)
assert :error == adapter_load(Ecto.TestAdapter, type, 1)
assert {:ok, [%{id: @uuid_binary, abc: 1, c: 0}]} ==
adapter_dump(Ecto.TestAdapter, type, [%Schema{id: @uuid_string, a: 1}])
assert {:ok, nil} = adapter_dump(Ecto.TestAdapter, type, nil)
assert :error = adapter_dump(Ecto.TestAdapter, type, 1)
assert cast(type, [%{"abc" => 1}]) == :error
assert cast(type, [%Schema{}]) == {:ok, [%Schema{}]}
assert cast(type, []) == {:ok, []}
assert match?({:array, :any}, type)
end
end
@date ~D[2015-12-31]
@leap_date ~D[2000-02-29]
@date_unix_epoch ~D[1970-01-01]
describe "date" do
test "cast" do
assert Ecto.Type.cast(:date, @date) == {:ok, @date}
assert Ecto.Type.cast(:date, "2015-12-31") == {:ok, @date}
assert Ecto.Type.cast(:date, "2000-02-29") == {:ok, @leap_date}
assert Ecto.Type.cast(:date, "2015-00-23") == :error
assert Ecto.Type.cast(:date, "2015-13-23") == :error
assert Ecto.Type.cast(:date, "2015-01-00") == :error
assert Ecto.Type.cast(:date, "2015-01-32") == :error
assert Ecto.Type.cast(:date, "2015-02-29") == :error
assert Ecto.Type.cast(:date, "1900-02-29") == :error
assert Ecto.Type.cast(:date, %{"year" => "2015", "month" => "12", "day" => "31"}) ==
{:ok, @date}
assert Ecto.Type.cast(:date, %{year: 2015, month: 12, day: 31}) ==
{:ok, @date}
assert Ecto.Type.cast(:date, %{"year" => "", "month" => "", "day" => ""}) ==
{:ok, nil}
assert Ecto.Type.cast(:date, %{year: nil, month: nil, day: nil}) ==
{:ok, nil}
assert Ecto.Type.cast(:date, %{"year" => "2015", "month" => "", "day" => "31"}) ==
:error
assert Ecto.Type.cast(:date, %{"year" => "2015", "month" => nil, "day" => "31"}) ==
:error
assert Ecto.Type.cast(:date, %{"year" => "2015", "month" => nil}) ==
:error
assert Ecto.Type.cast(:date, %{"year" => "", "month" => "01", "day" => "30"}) ==
:error
assert Ecto.Type.cast(:date, %{"year" => nil, "month" => "01", "day" => "30"}) ==
:error
assert Ecto.Type.cast(:date, DateTime.from_unix!(10)) ==
{:ok, @date_unix_epoch}
assert Ecto.Type.cast(:date, ~N[1970-01-01 12:23:34]) ==
{:ok, @date_unix_epoch}
assert Ecto.Type.cast(:date, @date) ==
{:ok, @date}
assert Ecto.Type.cast(:date, ~T[12:23:34]) ==
:error
assert Ecto.Type.cast(:date, "2015-12-31T00:00:00") == {:ok, @date}
assert Ecto.Type.cast(:date, "2015-12-31 00:00:00") == {:ok, @date}
end
test "dump" do
assert Ecto.Type.dump(:date, @date) == {:ok, @date}
assert Ecto.Type.dump(:date, @leap_date) == {:ok, @leap_date}
assert Ecto.Type.dump(:date, @date_unix_epoch) == {:ok, @date_unix_epoch}
end
test "load" do
assert Ecto.Type.load(:date, @date) == {:ok, @date}
assert Ecto.Type.load(:date, @leap_date) == {:ok, @leap_date}
assert Ecto.Type.load(:date, @date_unix_epoch) == {:ok, @date_unix_epoch}
end
end
@time ~T[23:50:07]
@time_zero ~T[23:50:00]
@time_zero_usec ~T[23:50:00.000000]
@time_usec ~T[23:50:07.030000]
describe "time" do
test "cast" do
assert Ecto.Type.cast(:time, @time) == {:ok, @time}
assert Ecto.Type.cast(:time, @time_usec) == {:ok, @time}
assert Ecto.Type.cast(:time, @time_zero) == {:ok, @time_zero}
assert Ecto.Type.cast(:time, "23:50") == {:ok, @time_zero}
assert Ecto.Type.cast(:time, "23:50:07") == {:ok, @time}
assert Ecto.Type.cast(:time, "23:50:07Z") == {:ok, @time}
assert Ecto.Type.cast(:time, "23:50:07.030000") == {:ok, @time}
assert Ecto.Type.cast(:time, "23:50:07.030000Z") == {:ok, @time}
assert Ecto.Type.cast(:time, "24:01") == :error
assert Ecto.Type.cast(:time, "00:61") == :error
assert Ecto.Type.cast(:time, "00:00.123") == :error
assert Ecto.Type.cast(:time, "00:00Z") == :error
assert Ecto.Type.cast(:time, "24:01:01") == :error
assert Ecto.Type.cast(:time, "00:61:00") == :error
assert Ecto.Type.cast(:time, "00:00:61") == :error
assert Ecto.Type.cast(:time, "00:00:009") == :error
assert Ecto.Type.cast(:time, "00:00:00.A00") == :error
assert Ecto.Type.cast(:time, %{"hour" => "23", "minute" => "50", "second" => "07"}) ==
{:ok, @time}
assert Ecto.Type.cast(:time, %{hour: 23, minute: 50, second: 07}) ==
{:ok, @time}
assert Ecto.Type.cast(:time, %{"hour" => "", "minute" => ""}) ==
{:ok, nil}
assert Ecto.Type.cast(:time, %{hour: nil, minute: nil}) ==
{:ok, nil}
assert Ecto.Type.cast(:time, %{"hour" => "23", "minute" => "50"}) ==
{:ok, @time_zero}
assert Ecto.Type.cast(:time, %{hour: 23, minute: 50}) ==
{:ok, @time_zero}
assert Ecto.Type.cast(:time, %{hour: 23, minute: 50, second: 07, microsecond: 30_000}) ==
{:ok, @time}
assert Ecto.Type.cast(:time, %{"hour" => 23, "minute" => 50, "second" => 07, "microsecond" => 30_000}) ==
{:ok, @time}
assert Ecto.Type.cast(:time, %{"hour" => "", "minute" => "50"}) ==
:error
assert Ecto.Type.cast(:time, %{hour: 23, minute: nil}) ==
:error
assert Ecto.Type.cast(:time, ~N[2016-11-11 23:30:10]) ==
{:ok, ~T[23:30:10]}
assert Ecto.Type.cast(:time, ~D[2016-11-11]) ==
:error
end
test "dump" do
assert Ecto.Type.dump(:time, @time) == {:ok, @time}
assert Ecto.Type.dump(:time, @time_zero) == {:ok, @time_zero}
assert_raise ArgumentError, ~r":time expects microseconds to be empty", fn ->
Ecto.Type.dump(:time, @time_usec)
end
end
test "load" do
assert Ecto.Type.load(:time, @time) == {:ok, @time}
assert Ecto.Type.load(:time, @time_usec) == {:ok, @time}
assert Ecto.Type.load(:time, @time_zero) == {:ok, @time_zero}
end
end
describe "time_usec" do
test "cast from Time" do
assert Ecto.Type.cast(:time_usec, @time_usec) == {:ok, @time_usec}
assert Ecto.Type.cast(:time_usec, @time_zero) == {:ok, @time_zero_usec}
end
test "cast from binary" do
assert Ecto.Type.cast(:time_usec, "23:50:00") == {:ok, @time_zero_usec}
assert Ecto.Type.cast(:time_usec, "23:50:00Z") == {:ok, @time_zero_usec}
assert Ecto.Type.cast(:time_usec, "23:50:07.03") == {:ok, @time_usec}
assert Ecto.Type.cast(:time_usec, "23:50:07.03Z") == {:ok, @time_usec}
assert Ecto.Type.cast(:time_usec, "23:50:07.030000") == {:ok, @time_usec}
assert Ecto.Type.cast(:time_usec, "23:50:07.030000Z") == {:ok, @time_usec}
assert Ecto.Type.cast(:time_usec, "24:01:01") == :error
assert Ecto.Type.cast(:time_usec, "00:61:00") == :error
assert Ecto.Type.cast(:time_usec, "00:00:61") == :error
assert Ecto.Type.cast(:time_usec, "00:00:009") == :error
assert Ecto.Type.cast(:time_usec, "00:00:00.A00") == :error
end
test "cast from map" do
assert Ecto.Type.cast(:time_usec, %{"hour" => "23", "minute" => "50", "second" => "00"}) == {:ok, @time_zero_usec}
assert Ecto.Type.cast(:time_usec, %{hour: 23, minute: 50, second: 0}) == {:ok, @time_zero_usec}
assert Ecto.Type.cast(:time_usec, %{"hour" => "", "minute" => ""}) == {:ok, nil}
assert Ecto.Type.cast(:time_usec, %{hour: nil, minute: nil}) == {:ok, nil}
assert Ecto.Type.cast(:time_usec, %{"hour" => "23", "minute" => "50"}) == {:ok, @time_zero_usec}
assert Ecto.Type.cast(:time_usec, %{hour: 23, minute: 50}) == {:ok, @time_zero_usec}
assert Ecto.Type.cast(:time_usec, %{hour: 23, minute: 50, second: 07, microsecond: 30_000}) == {:ok, @time_usec}
assert Ecto.Type.cast(:time_usec, %{"hour" => 23, "minute" => 50, "second" => 07, "microsecond" => 30_000}) == {:ok, @time_usec}
assert Ecto.Type.cast(:time_usec, %{"hour" => "", "minute" => "50"}) == :error
assert Ecto.Type.cast(:time_usec, %{hour: 23, minute: nil}) == :error
end
test "cast from NaiveDateTime" do
assert Ecto.Type.cast(:time_usec, ~N[2016-11-11 23:30:10]) == {:ok, ~T[23:30:10.000000]}
end
test "cast from DateTime" do
utc_datetime = DateTime.from_naive!(~N[2016-11-11 23:30:10], "Etc/UTC")
assert Ecto.Type.cast(:time_usec, utc_datetime) == {:ok, ~T[23:30:10.000000]}
end
test "cast from Date" do
assert Ecto.Type.cast(:time_usec, ~D[2016-11-11]) == :error
end
test "dump" do
assert Ecto.Type.dump(:time_usec, @time_usec) == {:ok, @time_usec}
assert_raise ArgumentError, ~r":time_usec expects microsecond precision", fn ->
Ecto.Type.dump(:time_usec, @time)
end
end
test "load" do
assert Ecto.Type.load(:time_usec, @time_usec) == {:ok, @time_usec}
assert Ecto.Type.load(:time_usec, @time_zero) == {:ok, @time_zero_usec}
end
end
@datetime ~N[2015-01-23 23:50:07]
@datetime_zero ~N[2015-01-23 23:50:00]
@datetime_zero_usec ~N[2015-01-23 23:50:00.000000]
@datetime_usec ~N[2015-01-23 23:50:07.008000]
@datetime_leapyear ~N[2000-02-29 23:50:07]
@datetime_leapyear_usec ~N[2000-02-29 23:50:07.000000]
describe "naive_datetime" do
test "casting naive datetime" do
assert Ecto.Type.cast(:naive_datetime, @datetime) == {:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, @datetime_usec) == {:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, @datetime_leapyear) == {:ok, @datetime_leapyear}
assert Ecto.Type.cast(:naive_datetime, "2015-01-23 23:50") == {:ok, @datetime_zero}
assert Ecto.Type.cast(:naive_datetime, "2015-01-23 23:50:07") == {:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, "2015-01-23T23:50:07") == {:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, "2015-01-23T23:50:07Z") == {:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, "2000-02-29T23:50:07") == {:ok, @datetime_leapyear}
assert Ecto.Type.cast(:naive_datetime, "2015-01-23T23:50:07.008000") == {:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, "2015-01-23T23:50:07.008000Z") == {:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, "2015-01-23P23:50:07") == :error
assert Ecto.Type.cast(:naive_datetime, "2015-01-23 23:50.123") == :error
assert Ecto.Type.cast(:naive_datetime, "2015-01-23 23:50Z") == :error
assert Ecto.Type.cast(:naive_datetime, %{"year" => "2015", "month" => "1", "day" => "23",
"hour" => "23", "minute" => "50", "second" => "07"}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, %{year: 2015, month: 1, day: 23, hour: 23, minute: 50, second: 07}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, %{"year" => "", "month" => "", "day" => "",
"hour" => "", "minute" => ""}) ==
{:ok, nil}
assert Ecto.Type.cast(:naive_datetime, %{year: nil, month: nil, day: nil, hour: nil, minute: nil}) ==
{:ok, nil}
assert Ecto.Type.cast(:naive_datetime, %{"year" => "2015", "month" => "1", "day" => "23",
"hour" => "23", "minute" => "50"}) ==
{:ok, @datetime_zero}
assert Ecto.Type.cast(:naive_datetime, %{year: 2015, month: 1, day: 23, hour: 23, minute: 50}) ==
{:ok, @datetime_zero}
assert Ecto.Type.cast(:naive_datetime, %{year: 2015, month: 1, day: 23, hour: 23,
minute: 50, second: 07, microsecond: 8_000}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, %{"year" => 2015, "month" => 1, "day" => 23,
"hour" => 23, "minute" => 50, "second" => 07,
"microsecond" => 8_000}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:naive_datetime, %{"year" => "2015", "month" => "1", "day" => "23",
"hour" => "", "minute" => "50"}) ==
:error
assert Ecto.Type.cast(:naive_datetime, %{year: 2015, month: 1, day: 23, hour: 23, minute: nil}) ==
:error
assert Ecto.Type.cast(:naive_datetime, %{"year" => "", "month" => "", "day" => "",
"hour" => "23", "minute" => "50", "second" => "07"}) ==
:error
assert Ecto.Type.cast(:naive_datetime, %{year: nil, month: nil, day: nil, hour: 23, minute: 50, second: 07}) ==
:error
assert Ecto.Type.cast(:naive_datetime, %{"year" => "2015", "month" => "1", "day" => "23",
"hour" => "", "minute" => ""}) ==
:error
assert Ecto.Type.cast(:naive_datetime, %{year: 2015, month: 1, day: 23, hour: nil, minute: nil}) ==
:error
assert Ecto.Type.cast(:naive_datetime, DateTime.from_unix!(10, :second)) ==
{:ok, ~N[1970-01-01 00:00:10]}
assert Ecto.Type.cast(:naive_datetime, @time) == :error
assert Ecto.Type.cast(:naive_datetime, 1) == :error
end
if Version.match?(System.version(), ">= 1.7.0") do
test "cast negative datetime" do
datetime = NaiveDateTime.from_iso8601!("-2015-01-23 23:50:07Z")
datetime_zero = NaiveDateTime.from_iso8601!("-2015-01-23 23:50:00Z")
assert Ecto.Type.cast(:naive_datetime, "-2015-01-23 23:50") == {:ok, datetime_zero}
assert Ecto.Type.cast(:naive_datetime, "-2015-01-23 23:50:07") == {:ok, datetime}
assert Ecto.Type.cast(:naive_datetime, "-2015-01-23 23:50:07bad") == :error
end
end
test "dump" do
assert Ecto.Type.dump(:naive_datetime, @datetime) == {:ok, @datetime}
assert Ecto.Type.dump(:naive_datetime, @datetime_zero) == {:ok, @datetime_zero}
assert Ecto.Type.dump(:naive_datetime, @datetime_leapyear) == {:ok, @datetime_leapyear}
assert_raise ArgumentError, ~r":naive_datetime expects microseconds to be empty", fn ->
Ecto.Type.dump(:naive_datetime, @datetime_usec)
end
end
test "load" do
assert Ecto.Type.load(:naive_datetime, @datetime) == {:ok, @datetime}
assert Ecto.Type.load(:naive_datetime, @datetime_zero) == {:ok, @datetime_zero}
assert Ecto.Type.load(:naive_datetime, @datetime_usec) == {:ok, @datetime}
assert Ecto.Type.load(:naive_datetime, @datetime_leapyear) == {:ok, @datetime_leapyear}
assert Ecto.Type.load(:naive_datetime, DateTime.from_naive!(@datetime, "Etc/UTC")) ==
{:ok, @datetime}
assert Ecto.Type.load(:naive_datetime, DateTime.from_naive!(@datetime_zero, "Etc/UTC")) ==
{:ok, @datetime_zero}
assert Ecto.Type.load(:naive_datetime, DateTime.from_naive!(@datetime_usec, "Etc/UTC")) ==
{:ok, @datetime}
assert Ecto.Type.load(:naive_datetime, DateTime.from_naive!(@datetime_leapyear, "Etc/UTC")) ==
{:ok, @datetime_leapyear}
end
end
describe "naive_datetime_usec" do
test "cast from NaiveDateTime" do
assert Ecto.Type.cast(:naive_datetime_usec, @datetime_zero) == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:naive_datetime_usec, @datetime_usec) == {:ok, @datetime_usec}
assert Ecto.Type.cast(:naive_datetime_usec, @datetime_leapyear) == {:ok, @datetime_leapyear_usec}
end
test "cast from binary" do
assert Ecto.Type.cast(:naive_datetime_usec, "2015-01-23 23:50:00") == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:naive_datetime_usec, "2015-01-23T23:50:00") == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:naive_datetime_usec, "2015-01-23T23:50:00Z") == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:naive_datetime_usec, "2000-02-29T23:50:07") == {:ok, @datetime_leapyear_usec}
assert Ecto.Type.cast(:naive_datetime_usec, "2015-01-23T23:50:07.008000") == {:ok, @datetime_usec}
assert Ecto.Type.cast(:naive_datetime_usec, "2015-01-23T23:50:07.008000Z") == {:ok, @datetime_usec}
assert Ecto.Type.cast(:naive_datetime_usec, "2015-01-23P23:50:07") == :error
end
test "cast from map" do
term = %{"year" => "2015", "month" => "1", "day" => "23", "hour" => "23", "minute" => "50", "second" => "00"}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: 50, second: 0}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %{"year" => "", "month" => "", "day" => "", "hour" => "", "minute" => ""}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, nil}
term = %{year: nil, month: nil, day: nil, hour: nil, minute: nil}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, nil}
term = %{"year" => "2015", "month" => "1", "day" => "23", "hour" => "23", "minute" => "50"}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: 50}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: 50, second: 07, microsecond: 8_000}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, @datetime_usec}
term = %{
"year" => 2015, "month" => 1, "day" => 23,
"hour" => 23, "minute" => 50, "second" => 07, "microsecond" => 8_000
}
assert Ecto.Type.cast(:naive_datetime_usec, term) == {:ok, @datetime_usec}
term = %{
"year" => "2015", "month" => "1", "day" => "23",
"hour" => "", "minute" => "50"
}
assert Ecto.Type.cast(:naive_datetime_usec, term) == :error
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: nil}
assert Ecto.Type.cast(:naive_datetime_usec, term) == :error
end
test "cast from DateTime" do
assert Ecto.Type.cast(:naive_datetime_usec, DateTime.from_unix!(10, :second)) == {:ok, ~N[1970-01-01 00:00:10.000000]}
end
test "cast from Time" do
assert Ecto.Type.cast(:naive_datetime_usec, ~T[23:50:07]) == :error
end
test "cast from integer" do
assert Ecto.Type.cast(:naive_datetime_usec, 1) == :error
end
test "dump" do
assert Ecto.Type.dump(:naive_datetime_usec, @datetime_usec) == {:ok, @datetime_usec}
assert Ecto.Type.dump(:naive_datetime_usec, @datetime_leapyear_usec) == {:ok, @datetime_leapyear_usec}
assert_raise ArgumentError, ~r":naive_datetime_usec expects microsecond precision", fn ->
Ecto.Type.dump(:naive_datetime_usec, @datetime)
end
end
test "load" do
assert Ecto.Type.load(:naive_datetime_usec, @datetime_usec) == {:ok, @datetime_usec}
assert Ecto.Type.load(:naive_datetime_usec, @datetime_leapyear_usec) == {:ok, @datetime_leapyear_usec}
end
end
@datetime DateTime.from_unix!(1422057007, :second)
@datetime_zero DateTime.from_unix!(1422057000, :second)
@datetime_zero_usec DateTime.from_unix!(1422057000000000, :microsecond)
@datetime_usec DateTime.from_unix!(1422057007008000, :microsecond)
@datetime_usec_tz %DateTime{
calendar: Calendar.ISO,
day: 24,
hour: 0,
microsecond: {8000, 6},
minute: 50,
month: 1,
second: 7,
std_offset: 0,
time_zone: "Europe/Berlin",
utc_offset: 3600,
year: 2015,
zone_abbr: "CET"
}
@datetime_leapyear DateTime.from_unix!(951868207, :second)
@datetime_leapyear_usec DateTime.from_unix!(951868207008000, :microsecond)
describe "utc_datetime" do
test "cast" do
assert Ecto.Type.cast(:utc_datetime, @datetime) == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, @datetime_usec) == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, @datetime_leapyear) == {:ok, @datetime_leapyear}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23 23:50") == {:ok, @datetime_zero}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23 23:50:07") == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23T23:50:07") == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23T23:50:07Z") == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, "2015-01-24T09:50:07+10:00") == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, "2000-02-29T23:50:07") == {:ok, @datetime_leapyear}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23T23:50:07.008000") == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23T23:50:07.008000Z") == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23T17:50:07.008000-06:00") == {:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, "2015-01-23P23:50:07") == :error
assert Ecto.Type.cast(:utc_datetime, "2015-01-23 23:50.123") == :error
assert Ecto.Type.cast(:utc_datetime, "2015-01-23 23:50Z") == :error
assert Ecto.Type.cast(:utc_datetime, %{"year" => "2015", "month" => "1", "day" => "23",
"hour" => "23", "minute" => "50", "second" => "07"}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, %{year: 2015, month: 1, day: 23, hour: 23, minute: 50, second: 07}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, %DateTime{calendar: Calendar.ISO, year: 2015, month: 1, day: 24,
hour: 9, minute: 50, second: 7, microsecond: {0, 0},
std_offset: 0, utc_offset: 36000,
time_zone: "Etc/GMT-10", zone_abbr: "+10"}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, %{"year" => "", "month" => "", "day" => "",
"hour" => "", "minute" => ""}) ==
{:ok, nil}
assert Ecto.Type.cast(:utc_datetime, %{year: nil, month: nil, day: nil, hour: nil, minute: nil}) ==
{:ok, nil}
assert Ecto.Type.cast(:utc_datetime, %{"year" => "2015", "month" => "1", "day" => "23",
"hour" => "23", "minute" => "50"}) ==
{:ok, @datetime_zero}
assert Ecto.Type.cast(:utc_datetime, %{year: 2015, month: 1, day: 23, hour: 23, minute: 50}) ==
{:ok, @datetime_zero}
assert Ecto.Type.cast(:utc_datetime, %{year: 2015, month: 1, day: 23, hour: 23,
minute: 50, second: 07, microsecond: 8_000}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, %{"year" => 2015, "month" => 1, "day" => 23,
"hour" => 23, "minute" => 50, "second" => 07,
"microsecond" => 8_000}) ==
{:ok, @datetime}
assert Ecto.Type.cast(:utc_datetime, %{"year" => "2015", "month" => "1", "day" => "23",
"hour" => "", "minute" => "50"}) ==
:error
assert Ecto.Type.cast(:utc_datetime, %{year: 2015, month: 1, day: 23, hour: 23, minute: nil}) ==
:error
assert Ecto.Type.cast(:utc_datetime, ~T[12:23:34]) == :error
assert Ecto.Type.cast(:utc_datetime, 1) == :error
end
if Version.match?(System.version(), ">= 1.7.0") do
test "cast negative datetime" do
{:ok, datetime, 0} = DateTime.from_iso8601("-2015-01-23 23:50:07Z")
{:ok, datetime_zero, 0} = DateTime.from_iso8601("-2015-01-23 23:50:00Z")
assert Ecto.Type.cast(:utc_datetime, "-2015-01-23 23:50") == {:ok, datetime_zero}
assert Ecto.Type.cast(:utc_datetime, "-2015-01-23 23:50:07") == {:ok, datetime}
assert Ecto.Type.cast(:utc_datetime, "-2015-01-23 23:50:07bad") == :error
end
end
test "dump" do
assert Ecto.Type.dump(:utc_datetime, @datetime) == DateTime.from_naive(~N[2015-01-23 23:50:07], "Etc/UTC")
assert Ecto.Type.dump(:utc_datetime, @datetime_zero) == DateTime.from_naive(~N[2015-01-23 23:50:00], "Etc/UTC")
assert Ecto.Type.dump(:utc_datetime, @datetime_leapyear) == DateTime.from_naive(~N[2000-02-29 23:50:07], "Etc/UTC")
assert_raise ArgumentError, ~r":utc_datetime expects microseconds to be empty", fn ->
Ecto.Type.dump(:utc_datetime, @datetime_usec)
end
end
test "load" do
assert Ecto.Type.load(:utc_datetime, ~N[2015-01-23 23:50:07]) == {:ok, @datetime}
assert Ecto.Type.load(:utc_datetime, ~N[2015-01-23 23:50:00]) == {:ok, @datetime_zero}
assert Ecto.Type.load(:utc_datetime, ~N[2015-01-23 23:50:07.008000]) == {:ok, @datetime}
assert Ecto.Type.load(:utc_datetime, ~N[2000-02-29 23:50:07]) == {:ok, @datetime_leapyear}
assert Ecto.Type.load(:utc_datetime, @datetime) == {:ok, @datetime}
assert Ecto.Type.load(:utc_datetime, @datetime_zero) == {:ok, @datetime_zero}
assert Ecto.Type.load(:utc_datetime, @datetime_usec) == {:ok, @datetime}
assert Ecto.Type.load(:utc_datetime, @datetime_leapyear) == {:ok, @datetime_leapyear}
end
end
describe "utc_datetime_usec" do
test "cast from DateTime" do
assert Ecto.Type.cast(:utc_datetime_usec, @datetime_zero) == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:utc_datetime_usec, @datetime_usec) == {:ok, @datetime_usec}
assert Ecto.Type.cast(:utc_datetime_usec, @datetime_usec_tz) == {:ok, @datetime_usec}
end
test "cast from binary" do
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-23 23:50:00") == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-23T23:50:00") == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-23T23:50:00Z") == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-24T09:50:00+10:00") == {:ok, @datetime_zero_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-23T23:50:07.008000") == {:ok, @datetime_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-23T23:50:07.008000Z") == {:ok, @datetime_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-23T17:50:07.008000-06:00") == {:ok, @datetime_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2000-02-29T23:50:07.008") == {:ok, @datetime_leapyear_usec}
assert Ecto.Type.cast(:utc_datetime_usec, "2015-01-23P23:50:07") == :error
end
test "cast from map" do
term = %{
"year" => "2015", "month" => "1", "day" => "23",
"hour" => "23", "minute" => "50", "second" => "00"
}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: 50, second: 0}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %DateTime{
calendar: Calendar.ISO, year: 2015, month: 1, day: 24,
hour: 9, minute: 50, second: 0, microsecond: {0, 0},
std_offset: 0, utc_offset: 36000,
time_zone: "Etc/GMT-10", zone_abbr: "+10"
}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %DateTime{
calendar: Calendar.ISO, year: 2015, month: 1, day: 24,
hour: 9, minute: 50, second: 7, microsecond: {8000, 6},
std_offset: 0, utc_offset: 36000,
time_zone: "Etc/GMT-10", zone_abbr: "+10"
}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_usec}
term = %{"year" => "", "month" => "", "day" => "", "hour" => "", "minute" => ""}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, nil}
term = %{year: nil, month: nil, day: nil, hour: nil, minute: nil}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, nil}
term = %{"year" => "2015", "month" => "1", "day" => "23", "hour" => "23", "minute" => "50"}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: 50}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_zero_usec}
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: 50, second: 07, microsecond: 8_000}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_usec}
term = %{
"year" => 2015, "month" => 1, "day" => 23,
"hour" => 23, "minute" => 50, "second" => 07, "microsecond" => 8_000
}
assert Ecto.Type.cast(:utc_datetime_usec, term) == {:ok, @datetime_usec}
term = %{"year" => "2015", "month" => "1", "day" => "23", "hour" => "", "minute" => "50"}
assert Ecto.Type.cast(:utc_datetime_usec, term) == :error
term = %{year: 2015, month: 1, day: 23, hour: 23, minute: nil}
assert Ecto.Type.cast(:utc_datetime_usec, term) == :error
end
test "cast from Time" do
assert Ecto.Type.cast(:utc_datetime_usec, ~T[12:23:34]) == :error
end
test "cast from integer" do
assert Ecto.Type.cast(:utc_datetime_usec, 1) == :error
end
test "dump" do
assert Ecto.Type.dump(:utc_datetime_usec, @datetime_usec) == DateTime.from_naive(~N[2015-01-23 23:50:07.008000], "Etc/UTC")
assert_raise ArgumentError, ~r":utc_datetime_usec expects microsecond precision", fn ->
Ecto.Type.dump(:utc_datetime_usec, @datetime)
end
end
test "load" do
assert Ecto.Type.load(:utc_datetime_usec, @datetime_usec) == {:ok, @datetime_usec}
assert Ecto.Type.load(:utc_datetime_usec, ~N[2015-01-23 23:50:07.008000]) == {:ok, @datetime_usec}
assert Ecto.Type.load(:utc_datetime_usec, ~N[2000-02-29 23:50:07.008000]) == {:ok, @datetime_leapyear_usec}
assert Ecto.Type.load(:utc_datetime_usec, @datetime_leapyear_usec) == {:ok, @datetime_leapyear_usec}
assert Ecto.Type.load(:utc_datetime_usec, @datetime_zero) == {:ok, @datetime_zero_usec}
assert Ecto.Type.load(:utc_datetime_usec, ~D[2018-01-01]) == :error
end
end
describe "equal?/3" do
test "primitive" do
assert Ecto.Type.equal?(:integer, 1, 1)
refute Ecto.Type.equal?(:integer, 1, 2)
refute Ecto.Type.equal?(:integer, 1, "1")
refute Ecto.Type.equal?(:integer, 1, nil)
end
test "composite primitive" do
assert Ecto.Type.equal?({:array, :integer}, [1], [1])
refute Ecto.Type.equal?({:array, :integer}, [1], [2])
refute Ecto.Type.equal?({:array, :integer}, [1, 1], [1])
refute Ecto.Type.equal?({:array, :integer}, [1], [1, 1])
end
test "semantical comparison" do
assert Ecto.Type.equal?(:decimal, d(1), d("1.0"))
refute Ecto.Type.equal?(:decimal, d(1), 1)
refute Ecto.Type.equal?(:decimal, d(1), d("1.1"))
refute Ecto.Type.equal?(:decimal, d(1), nil)
assert Ecto.Type.equal?(:time, ~T[09:00:00], ~T[09:00:00.000000])
refute Ecto.Type.equal?(:time, ~T[09:00:00], ~T[09:00:00.999999])
assert Ecto.Type.equal?(:time_usec, ~T[09:00:00], ~T[09:00:00.000000])
refute Ecto.Type.equal?(:time_usec, ~T[09:00:00], ~T[09:00:00.999999])
assert Ecto.Type.equal?(:naive_datetime, ~N[2018-01-01 09:00:00], ~N[2018-01-01 09:00:00.000000])
refute Ecto.Type.equal?(:naive_datetime, ~N[2018-01-01 09:00:00], ~N[2018-01-01 09:00:00.999999])
assert Ecto.Type.equal?(:naive_datetime_usec, ~N[2018-01-01 09:00:00], ~N[2018-01-01 09:00:00.000000])
refute Ecto.Type.equal?(:naive_datetime_usec, ~N[2018-01-01 09:00:00], ~N[2018-01-01 09:00:00.999999])
assert Ecto.Type.equal?(:utc_datetime, utc("2018-01-01 09:00:00"), utc("2018-01-01 09:00:00.000000"))
refute Ecto.Type.equal?(:utc_datetime, utc("2018-01-01 09:00:00"), utc("2018-01-01 09:00:00.999999"))
assert Ecto.Type.equal?(:utc_datetime_usec, utc("2018-01-01 09:00:00"), utc("2018-01-01 09:00:00.000000"))
refute Ecto.Type.equal?(:utc_datetime_usec, utc("2018-01-01 09:00:00"), utc("2018-01-01 09:00:00.999999"))
end
test "composite semantical comparison" do
assert Ecto.Type.equal?({:array, :decimal}, [d(1)], [d("1.0")])
refute Ecto.Type.equal?({:array, :decimal}, [d(1)], [d("1.1")])
refute Ecto.Type.equal?({:array, :decimal}, [d(1), d(1)], [d(1)])
refute Ecto.Type.equal?({:array, :decimal}, [d(1)], [d(1), d(1)])
assert Ecto.Type.equal?({:array, {:array, :decimal}}, [[d(1)]], [[d("1.0")]])
refute Ecto.Type.equal?({:array, {:array, :decimal}}, [[d(1)]], [[d("1.1")]])
assert Ecto.Type.equal?({:map, :decimal}, %{x: d(1)}, %{x: d("1.0")})
end
test "custom structural comparison" do
uuid = "00000000-0000-0000-0000-000000000000"
assert Ecto.Type.equal?(Ecto.UUID, uuid, uuid)
refute Ecto.Type.equal?(Ecto.UUID, uuid, "")
end
test "custom semantical comparison" do
assert Ecto.Type.equal?(Custom, true, false)
refute Ecto.Type.equal?(Custom, false, false)
end
test "nil type" do
assert Ecto.Type.equal?(nil, 1, 1.0)
refute Ecto.Type.equal?(nil, 1, 2)
end
test "nil values" do
assert Ecto.Type.equal?(:any, nil, nil)
assert Ecto.Type.equal?(:boolean, nil, nil)
assert Ecto.Type.equal?(:binary, nil, nil)
assert Ecto.Type.equal?(:binary_id, nil, nil)
assert Ecto.Type.equal?(:date, nil, nil)
assert Ecto.Type.equal?(:decimal, nil, nil)
assert Ecto.Type.equal?(:float, nil, nil)
assert Ecto.Type.equal?(:id, nil, nil)
assert Ecto.Type.equal?(:integer, nil, nil)
assert Ecto.Type.equal?(:map, nil, nil)
assert Ecto.Type.equal?(:naive_datetime, nil, nil)
assert Ecto.Type.equal?(:naive_datetime_usec, nil, nil)
assert Ecto.Type.equal?(:string, nil, nil)
assert Ecto.Type.equal?(:time, nil, nil)
assert Ecto.Type.equal?(:time_usec, nil, nil)
assert Ecto.Type.equal?(:utc_datetime, nil, nil)
assert Ecto.Type.equal?(:utc_datetime_usec, nil, nil)
term = [~T[10:10:10], nil]
assert Ecto.Type.equal?({:array, :time}, term, term)
term = %{one: nil, two: ~T[10:10:10]}
assert Ecto.Type.equal?({:map, :time}, term, term)
assert Ecto.Type.equal?(Custom, nil, nil)
end
test "bad type" do
assert_raise ArgumentError, ~r"cannot use :foo as Ecto.Type", fn ->
Ecto.Type.equal?(:foo, 1, 1.0)
end
end
end
defp d(decimal), do: Decimal.new(decimal)
defp utc(string) do
string
|> NaiveDateTime.from_iso8601!()
|> DateTime.from_naive!("Etc/UTC")
end
end
| 44.632963 | 134 | 0.575416 |
08283e6b5a1a69caa04e27e6d863926d26a95b3b | 1,058 | ex | Elixir | lib/level/resolvers/reaction_connection.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 928 | 2018-04-03T16:18:11.000Z | 2019-09-09T17:59:55.000Z | lib/level/resolvers/reaction_connection.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 74 | 2018-04-03T00:46:50.000Z | 2019-03-10T18:57:27.000Z | lib/level/resolvers/reaction_connection.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 89 | 2018-04-03T17:33:20.000Z | 2019-08-19T03:40:20.000Z | defmodule Level.Resolvers.ReactionConnection do
@moduledoc """
A paginated connection for fetching a post or reply's reactions.
"""
alias Level.Pagination
alias Level.Pagination.Args
alias Level.Schemas.Post
alias Level.Schemas.Reply
defstruct first: nil,
last: nil,
before: nil,
after: nil,
order_by: %{
field: :inserted_at,
direction: :asc
}
@type t :: %__MODULE__{
first: integer() | nil,
last: integer() | nil,
before: String.t() | nil,
after: String.t() | nil,
order_by: %{field: :inserted_at, direction: :asc | :desc}
}
@doc """
Executes a paginated query for reactions.
"""
def get(%Post{} = post, args, _info) do
query = Ecto.assoc(post, :post_reactions)
Pagination.fetch_result(query, Args.build(args))
end
def get(%Reply{} = reply, args, _info) do
query = Ecto.assoc(reply, :reply_reactions)
Pagination.fetch_result(query, Args.build(args))
end
end
| 25.804878 | 67 | 0.594518 |
08284b7dc35352b6ed26593a3db697f943f5bd70 | 5,786 | ex | Elixir | lib/hexpm/accounts/audit_log.ex | pragmaticivan/hexpm | 7845d1baaf14e8811df00db550b59e51ac9675c6 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/accounts/audit_log.ex | pragmaticivan/hexpm | 7845d1baaf14e8811df00db550b59e51ac9675c6 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/accounts/audit_log.ex | pragmaticivan/hexpm | 7845d1baaf14e8811df00db550b59e51ac9675c6 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Accounts.AuditLog do
use Hexpm.Web, :schema
schema "audit_logs" do
belongs_to :actor, User
field :user_agent, :string
field :action, :string
field :params, :map
timestamps(updated_at: false)
end
def build(nil, user_agent, action, params)
when action in ~w(password.reset.init password.reset.finish) do
%AuditLog{
actor_id: nil,
user_agent: user_agent,
action: action,
params: extract_params(action, params)
}
end
def build(%User{id: user_id}, user_agent, action, params) do
%AuditLog{
actor_id: user_id,
user_agent: user_agent,
action: action,
params: extract_params(action, params)
}
end
def audit(%Multi{} = multi, {user, user_agent}, action, fun) when is_function(fun, 1) do
Multi.merge(multi, fn data ->
Multi.insert(Multi.new(), multi_key(action), build(user, user_agent, action, fun.(data)))
end)
end
def audit(%Multi{} = multi, {user, user_agent}, action, params) do
Multi.insert(multi, multi_key(action), build(user, user_agent, action, params))
end
def audit_many(multi, {user, user_agent}, action, list, opts \\ []) do
fields = AuditLog.__schema__(:fields) -- [:id]
extra = %{inserted_at: NaiveDateTime.utc_now()}
entries =
Enum.map(list, fn entry ->
build(user, user_agent, action, entry)
|> Map.take(fields)
|> Map.merge(extra)
end)
Multi.insert_all(multi, multi_key(action), AuditLog, entries, opts)
end
def audit_with_user(multi, {_user, user_agent}, action, fun) do
Multi.merge(multi, fn %{user: user} = data ->
Multi.insert(Multi.new(), multi_key(action), build(user, user_agent, action, fun.(data)))
end)
end
defp extract_params("docs.publish", {package, release}),
do: %{package: serialize(package), release: serialize(release)}
defp extract_params("docs.revert", {package, release}),
do: %{package: serialize(package), release: serialize(release)}
defp extract_params("key.generate", key), do: serialize(key)
defp extract_params("key.remove", key), do: serialize(key)
defp extract_params("owner.add", {package, user}),
do: %{package: serialize(package), user: serialize(user)}
defp extract_params("owner.remove", {package, user}),
do: %{package: serialize(package), user: serialize(user)}
defp extract_params("release.publish", {package, release}),
do: %{package: serialize(package), release: serialize(release)}
defp extract_params("release.revert", {package, release}),
do: %{package: serialize(package), release: serialize(release)}
defp extract_params("release.retire", {package, release}),
do: %{package: serialize(package), release: serialize(release)}
defp extract_params("release.unretire", {package, release}),
do: %{package: serialize(package), release: serialize(release)}
defp extract_params("email.add", email), do: serialize(email)
defp extract_params("email.remove", email), do: serialize(email)
defp extract_params("email.primary", {old_email, new_email}),
do: %{old_email: serialize(old_email), new_email: serialize(new_email)}
defp extract_params("email.public", {old_email, new_email}),
do: %{old_email: serialize(old_email), new_email: serialize(new_email)}
defp extract_params("email.gravatar", {old_email, new_email}),
do: %{old_email: serialize(old_email), new_email: serialize(new_email)}
defp extract_params("user.create", user), do: serialize(user)
defp extract_params("user.update", user), do: serialize(user)
defp extract_params("repository.create", repository), do: serialize(repository)
defp extract_params("repository.member.add", {repository, user}),
do: %{repository: serialize(repository), user: serialize(user)}
defp extract_params("repository.member.remove", {repository, user}),
do: %{repository: serialize(repository), user: serialize(user)}
defp extract_params("repository.member.role", {repository, user, role}),
do: %{repository: serialize(repository), user: serialize(user), role: role}
defp extract_params("password.reset.init", nil), do: %{}
defp extract_params("password.reset.finish", nil), do: %{}
defp extract_params("password.update", nil), do: %{}
defp serialize(%Key{} = key) do
key
|> do_serialize()
|> Map.put(:permissions, Enum.map(key.permissions, &serialize/1))
end
defp serialize(%Package{} = package) do
package
|> do_serialize()
|> Map.put(:meta, serialize(package.meta))
end
defp serialize(%Release{} = release) do
release
|> do_serialize()
|> Map.put(:meta, serialize(release.meta))
|> Map.put(:retirement, serialize(release.retirement))
end
defp serialize(%User{} = user) do
user
|> do_serialize()
|> Map.put(:handles, serialize(user.handles))
end
defp serialize(nil), do: nil
defp serialize(schema), do: do_serialize(schema)
defp do_serialize(schema), do: Map.take(schema, fields(schema))
defp fields(%Email{}), do: [:email, :primary, :public, :primary, :gravatar]
defp fields(%Key{}), do: [:id, :name]
defp fields(%KeyPermission{}), do: [:resource, :domain]
defp fields(%Package{}), do: [:id, :name, :repository_id]
defp fields(%PackageMetadata{}), do: [:description, :licenses, :links, :maintainers, :extra]
defp fields(%Release{}), do: [:id, :version, :checksum, :has_docs, :package_id]
defp fields(%ReleaseMetadata{}), do: [:app, :build_tools, :elixir]
defp fields(%ReleaseRetirement{}), do: [:status, :message]
defp fields(%Repository{}), do: [:name, :public, :active, :billing_active]
defp fields(%User{}), do: [:id, :username]
defp fields(%UserHandles{}), do: [:github, :twitter, :freenode]
defp multi_key(action), do: :"log.#{action}"
end
| 35.937888 | 95 | 0.680781 |
08285ef571e4dbb35b4699ceca6f3d6ee361d177 | 691 | ex | Elixir | lib/phx_api_web/controllers/fallback_controller.ex | trevligare/phx_api | ea622cfe15b2c3248374a70aaa65edca53cc3171 | [
"MIT"
] | null | null | null | lib/phx_api_web/controllers/fallback_controller.ex | trevligare/phx_api | ea622cfe15b2c3248374a70aaa65edca53cc3171 | [
"MIT"
] | null | null | null | lib/phx_api_web/controllers/fallback_controller.ex | trevligare/phx_api | ea622cfe15b2c3248374a70aaa65edca53cc3171 | [
"MIT"
] | null | null | null | defmodule PhxApiWeb.FallbackController do
@moduledoc """
Translates controller action results into valid `Plug.Conn` responses.
See `Phoenix.Controller.action_fallback/1` for more details.
"""
use PhxApiWeb, :controller
def call(conn, {:error, %Ecto.Changeset{} = changeset}) do
conn
|> put_status(:unprocessable_entity)
|> render(PhxApiWeb.ChangesetView, "error.json", changeset: changeset)
end
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> render(PhxApiWeb.ErrorView, :"404")
end
def call(conn, {:error, :forbidden}) do
conn
|> put_status(:forbidden)
|> render(PhxApiWeb.ErrorView, :"403")
end
end
| 24.678571 | 74 | 0.684515 |
08287134b38aef5cb2de8803d432cd1d24bb293b | 102 | ex | Elixir | apps/naive/lib/naive/repo.ex | itsemilano/orbex | 301dfaad1369acfd68055f1868d9a1dcd7e51e16 | [
"Apache-2.0"
] | 65 | 2020-07-07T01:51:27.000Z | 2021-09-27T00:13:59.000Z | apps/naive/lib/naive/repo.ex | itsemilano/orbex | 301dfaad1369acfd68055f1868d9a1dcd7e51e16 | [
"Apache-2.0"
] | 5 | 2021-02-12T08:21:15.000Z | 2021-09-01T21:17:27.000Z | apps/naive/lib/naive/repo.ex | itsemilano/orbex | 301dfaad1369acfd68055f1868d9a1dcd7e51e16 | [
"Apache-2.0"
] | 10 | 2020-08-13T13:39:31.000Z | 2021-09-14T12:46:51.000Z | defmodule Naive.Repo do
use Ecto.Repo,
otp_app: :naive,
adapter: Ecto.Adapters.Postgres
end
| 17 | 35 | 0.715686 |
0828899014c9bdc17aa5e79c4a024ca0c96034df | 1,347 | exs | Elixir | test/support/conn_case.exs | jcarlos7121/ex_admin | 60a07320efaf549814d29f3593715543aef51e6b | [
"MIT"
] | 1,347 | 2015-10-05T18:23:49.000Z | 2022-01-09T18:38:36.000Z | test/support/conn_case.exs | leonardzhou/ex_admin | c241e956503c548a472e3ee89751e64a16477638 | [
"MIT"
] | 402 | 2015-10-03T13:53:32.000Z | 2021-07-08T09:52:22.000Z | test/support/conn_case.exs | leonardzhou/ex_admin | c241e956503c548a472e3ee89751e64a16477638 | [
"MIT"
] | 333 | 2015-10-12T22:56:57.000Z | 2021-05-26T18:40:24.000Z | defmodule TestExAdmin.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias TestExAdmin.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
import TestExAdmin.Router.Helpers
import TestExAdmin.TestHelpers
import TestExAdmin.ErrorView
import ExAdmin.Utils
# The default endpoint for testing
@endpoint TestExAdmin.Endpoint
unless function_exported?(Phoenix.ConnTest, :build_conn, 0) do
def build_conn, do: Phoenix.ConnTest.conn()
end
end
end
setup _tags do
conn =
if function_exported?(Phoenix.ConnTest, :build_conn, 0) do
Phoenix.ConnTest.build_conn()
else
Phoenix.ConnTest.conn()
end
{:ok, conn: conn}
end
end
| 25.903846 | 68 | 0.694135 |
0828b5879c4c3564d20c7c5668d4055f6459e7c8 | 280 | ex | Elixir | lib/blue_jet/app/file_storage/external/cloudfront_client.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | lib/blue_jet/app/file_storage/external/cloudfront_client.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | lib/blue_jet/app/file_storage/external/cloudfront_client.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.FileStorage.CloudfrontClient do
@moduledoc false
@cloudfront_client Application.get_env(:blue_jet, :file_storage)[:cloudfront_client]
@callback get_presigned_url(String.t()) :: String.t()
defdelegate get_presigned_url(key), to: @cloudfront_client
end
| 28 | 86 | 0.796429 |
0828b73dff8d317baeb562ba382334d7df181cde | 496 | ex | Elixir | lib/tweet_source.ex | stphnrdmr/twittrix | c0a1d76d6bd7d6c43d011ed9776c8514e6dde215 | [
"MIT"
] | 1 | 2017-12-29T16:35:45.000Z | 2017-12-29T16:35:45.000Z | lib/tweet_source.ex | stphnrdmr/twittrix | c0a1d76d6bd7d6c43d011ed9776c8514e6dde215 | [
"MIT"
] | null | null | null | lib/tweet_source.ex | stphnrdmr/twittrix | c0a1d76d6bd7d6c43d011ed9776c8514e6dde215 | [
"MIT"
] | null | null | null | alias Experimental.GenStage
defmodule Twittrix.TweetSource do
use GenStage
def start_link(query) do
GenStage.start_link(__MODULE__, query, name: __MODULE__)
end
def init(query) do
{:producer, query}
end
def handle_demand(demand, query) when demand > 0 do
result = twitter_search(query, 100)
{:noreply, result, query}
end
defp twitter_search(query, count) do
{:ok, %{"statuses" => result}} = Twittex.Client.search(query, count: count)
result
end
end
| 20.666667 | 79 | 0.699597 |
0828baa842296e93b9f94af9d3e9c4dd221342cc | 574 | ex | Elixir | exercises/concept/guessing-game/.meta/exemplar.ex | herminiotorres/exercism-elixir | 0464a968f3e37680b2bf34868b889d6f8de5581e | [
"MIT"
] | 2 | 2019-07-09T05:23:38.000Z | 2019-07-29T01:39:59.000Z | exercises/concept/guessing-game/.meta/exemplar.ex | herminiotorres/exercism-elixir | 0464a968f3e37680b2bf34868b889d6f8de5581e | [
"MIT"
] | 6 | 2022-03-04T13:05:25.000Z | 2022-03-30T18:36:49.000Z | exercises/concept/guessing-game/.meta/exemplar.ex | herminiotorres/exercism-elixir | 0464a968f3e37680b2bf34868b889d6f8de5581e | [
"MIT"
] | null | null | null | defmodule GuessingGame do
def compare(secret_number, guess \\ :no_guess)
def compare(_secret_number, guess) when guess == :no_guess do
"Make a guess"
end
def compare(secret_number, guess) when guess == secret_number do
"Correct"
end
def compare(secret_number, guess)
when guess == secret_number + 1 or
guess == secret_number - 1 do
"So close"
end
def compare(secret_number, guess) when guess > secret_number do
"Too High"
end
def compare(secret_number, guess) when guess < secret_number do
"Too Low"
end
end
| 22.076923 | 66 | 0.682927 |
0828bfd4a5fb68d99c2a040a5ed1a66e62bc7191 | 5,353 | exs | Elixir | test/stopsel/builder_test.exs | Awlexus/stopsel | 0d207a65e786a5a057aea76cd7fde72d22ddfd78 | [
"MIT"
] | 3 | 2021-01-07T15:59:01.000Z | 2021-08-16T00:46:55.000Z | test/stopsel/builder_test.exs | Awlexus/Stopsel | 0d207a65e786a5a057aea76cd7fde72d22ddfd78 | [
"MIT"
] | null | null | null | test/stopsel/builder_test.exs | Awlexus/Stopsel | 0d207a65e786a5a057aea76cd7fde72d22ddfd78 | [
"MIT"
] | null | null | null | defmodule Stopsel.BuilderTest do
use ExUnit.Case
import ExUnit.CaptureIO
alias Stopsel.Command
describe "router/2" do
test "allows only one router per module" do
capture_io(:stderr, fn ->
assert_raise(Stopsel.RouterAlreadyDefinedError, fn ->
defmodule Invalid do
import Stopsel.Builder
router do
end
router do
end
end
end)
end)
end
test "defines __commands__" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router do
end
end
assert Test.__commands__() == []
end)
end
test "adds initial alias" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router MyApp do
command(:hello)
end
end
assert [%Command{path: ~w"hello", function: :hello, module: MyApp}] == Test.__commands__()
end)
end
end
describe "scope/3" do
test "cannot be called outside of router" do
capture_io(:stderr, fn ->
assert_raise Stopsel.OutsideOfRouterError, fn ->
defmodule Invalid do
import Stopsel.Builder
scope do
end
end
end
end)
end
test "adds alias" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router do
scope nil, MyApp do
command(:hello)
end
end
end
assert [%Command{path: ~w"hello", function: :hello, module: MyApp}] == Test.__commands__()
end)
end
test "adds to path" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router MyApp do
scope "scope" do
command(:hello)
end
end
end
assert [%Command{path: ~w"scope hello", function: :hello, module: MyApp}] ==
Test.__commands__()
end)
end
test "scopes stopsel" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
import MyApp.NumberUtils, warn: false
router MyApp do
scope do
stopsel(:parse_number, :a)
end
command(:hello)
end
end
assert [%Command{path: ~w"hello", function: :hello, module: MyApp}] ==
Test.__commands__()
end)
end
end
describe "command/3" do
test "cannot be called outside of router" do
capture_io(:stderr, fn ->
assert_raise Stopsel.OutsideOfRouterError, fn ->
defmodule Invalid do
import Stopsel.Builder
command(:hello)
end
end
end)
end
test "warns when the function doesn't exist" do
io =
ExUnit.CaptureIO.capture_io(:stderr, fn ->
defmodule Invalid do
import Stopsel.Builder
router MyApp do
command(:helloooo)
end
end
end)
assert io =~ "Elixir.MyApp.helloooo/2 does not exist"
end
test "uses path when given" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router MyApp do
command(:hello, path: "hihi")
end
end
assert [%Command{path: ~w"hihi", function: :hello, module: MyApp}] == Test.__commands__()
end)
end
test "adds name as path if none was given" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router MyApp do
command(:hello)
end
end
assert [%Command{path: ~w"hello", function: :hello, module: MyApp}] == Test.__commands__()
end)
end
end
describe "stopsel/2" do
test "cannot be called outside of router" do
capture_io(:stderr, fn ->
assert_raise Stopsel.OutsideOfRouterError, fn ->
defmodule Invalid do
import Stopsel.Builder
import MyApp.NumberUtils, warn: false
stopsel(:parse_number, :a)
end
end
end)
end
test "adds stopsel to pipeline" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router MyApp do
stopsel(MyApp.PseudoStopsel)
command(:hello)
end
end
command = %Command{
path: ~w"hello",
function: :hello,
module: MyApp,
stopsel: [{MyApp.PseudoStopsel, []}]
}
assert [command] == Test.__commands__()
end)
end
test "supports module stopsel" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
router MyApp do
stopsel(MyApp.PseudoStopsel)
command(:hello)
end
end
end)
end
test "supports supports imported functions" do
capture_io(:stderr, fn ->
defmodule Test do
import Stopsel.Builder
import MyApp.NumberUtils
router MyApp do
stopsel(:parse_number)
command(:hello)
end
end
end)
end
end
end
| 22.304167 | 98 | 0.533346 |
0828c27a1969e16b5b2a7bba6a2a1781c6500025 | 431 | ex | Elixir | lib/error.ex | maartenvanvliet/absinthe_linter | ebfb5879aa1638d29d789dda47350febc1848dc8 | [
"MIT"
] | 1 | 2022-01-20T20:16:15.000Z | 2022-01-20T20:16:15.000Z | lib/error.ex | maartenvanvliet/absinthe_linter | ebfb5879aa1638d29d789dda47350febc1848dc8 | [
"MIT"
] | 1 | 2022-02-24T04:30:37.000Z | 2022-02-24T04:30:37.000Z | lib/error.ex | maartenvanvliet/absinthe_linter | ebfb5879aa1638d29d789dda47350febc1848dc8 | [
"MIT"
] | null | null | null | defmodule AbsintheLinter.Error do
@moduledoc false
@enforce_keys [:message, :phase]
defstruct [
:message,
:phase,
locations: [],
extra: %{},
path: []
]
@type loc_t :: %{optional(any) => any, line: pos_integer, column: pos_integer}
@type t :: %__MODULE__{
message: String.t(),
phase: module,
locations: [loc_t],
path: [],
extra: map
}
end
| 18.73913 | 80 | 0.535963 |
0828cb0d5c2225d43117aa4c992a1d030e08e2f0 | 1,749 | ex | Elixir | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/result.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/result.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/result.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseRules.V1.Model.Result do
@moduledoc """
Possible result values from the function mock invocation.
## Attributes
* `undefined` (*type:* `GoogleApi.FirebaseRules.V1.Model.Empty.t`, *default:* `nil`) - The result is undefined, meaning the result could not be computed.
* `value` (*type:* `any()`, *default:* `nil`) - The result is an actual value. The type of the value must match that of the type declared by the service.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:undefined => GoogleApi.FirebaseRules.V1.Model.Empty.t(),
:value => any()
}
field(:undefined, as: GoogleApi.FirebaseRules.V1.Model.Empty)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.FirebaseRules.V1.Model.Result do
def decode(value, options) do
GoogleApi.FirebaseRules.V1.Model.Result.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FirebaseRules.V1.Model.Result do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.98 | 157 | 0.72956 |
0828ceff5056735e4068b8ee7380ba4cf4ca8bb1 | 264 | exs | Elixir | hoge/test/hoge_web/views/layout_view_test.exs | t-yamanashi/testex | 2b51788fc71f7b60c40316b9a9b6166953a060ba | [
"Apache-2.0"
] | null | null | null | hoge/test/hoge_web/views/layout_view_test.exs | t-yamanashi/testex | 2b51788fc71f7b60c40316b9a9b6166953a060ba | [
"Apache-2.0"
] | null | null | null | hoge/test/hoge_web/views/layout_view_test.exs | t-yamanashi/testex | 2b51788fc71f7b60c40316b9a9b6166953a060ba | [
"Apache-2.0"
] | null | null | null | defmodule HogeWeb.LayoutViewTest do
use HogeWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 29.333333 | 65 | 0.761364 |
0828d1b21adc471ab153d28224af8742958384c7 | 833 | ex | Elixir | lib/today/entry.ex | rockwood/today | 4ce3a92d1bee5bb32040e1d2d333df50cd0c24ff | [
"MIT"
] | null | null | null | lib/today/entry.ex | rockwood/today | 4ce3a92d1bee5bb32040e1d2d333df50cd0c24ff | [
"MIT"
] | null | null | null | lib/today/entry.ex | rockwood/today | 4ce3a92d1bee5bb32040e1d2d333df50cd0c24ff | [
"MIT"
] | null | null | null | defmodule Today.Entry do
defstruct [:timestamp, :dir, :body]
def from_body(body) do
struct(__MODULE__, %{
timestamp: DateTime.utc_now,
dir: "#{System.user_home}/today",
body: body
})
end
def path(entry) do
"#{entry.dir}/#{padded_date(entry.timestamp)}.md"
end
def date_heading(entry) do
"""
#{padded_date(entry.timestamp)}
==================================
"""
end
def time_heading(entry) do
"""
#{entry.timestamp.hour}:#{entry.timestamp.minute}
---------------
"""
end
def formated_body(entry) do
"""
#{entry.body}
"""
end
defp padded_date(date) do
"#{date.year}-#{left_pad(date.month)}-#{left_pad(date.day)}"
end
defp left_pad(integer) do
integer
|> Integer.to_string
|> String.rjust(2, ?0)
end
end
| 17 | 64 | 0.558223 |
0828e39cce67091f38238cac586a6d1af079e2ba | 1,178 | exs | Elixir | test/combineLatest_test.exs | macoene/observables | 6e43ab9b7ea2afa60160a13ef5c37eb20dc597b7 | [
"MIT"
] | 4 | 2018-04-16T20:43:37.000Z | 2019-07-18T21:50:49.000Z | test/combineLatest_test.exs | macoene/observables | 6e43ab9b7ea2afa60160a13ef5c37eb20dc597b7 | [
"MIT"
] | null | null | null | test/combineLatest_test.exs | macoene/observables | 6e43ab9b7ea2afa60160a13ef5c37eb20dc597b7 | [
"MIT"
] | 1 | 2021-03-18T18:37:11.000Z | 2021-03-18T18:37:11.000Z | defmodule CombineLatestTest do
use ExUnit.Case
alias Observables.{Obs, Subject}
require Logger
@tag :combinelatest
test "Combine Latest" do
testproc = self()
xs = Subject.create()
ys = Subject.create()
Obs.combinelatest(xs, ys, left: nil, right: nil)
|> Obs.map(fn v -> send(testproc, v) end)
# Send first value, should not produce.
Subject.next(xs, :x0)
receive do
x -> flunk("Mailbox was supposed to be empty, got: #{inspect(x)}")
after
0 -> :ok
end
# Send second value, should produce.
Subject.next(ys, :y0)
assert_receive({:x0, :y0}, 1000, "did not get this message!")
# Update the left observable. Shoudl produce with history.
Subject.next(xs, :x1)
assert_receive({:x1, :y0}, 1000, "did not get this message!")
Subject.next(ys, :y1)
assert_receive({:x1, :y1}, 1000, "did not get this message!")
# Send a final value, should produce.
Subject.next(xs, :xfinal)
assert_receive({:xfinal, :y1}, 1000, "did not get this message!")
receive do
x -> flunk("Mailbox was supposed to be empty, got: #{inspect(x)}")
after
0 -> :ok
end
end
end
| 24.541667 | 72 | 0.623939 |
08291da30080e08328d564d05a812c9f4730db80 | 1,176 | ex | Elixir | lib/hl7/2.5/segments/con.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/con.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/con.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_5.Segments.CON do
@moduledoc false
require Logger
alias HL7.V2_5.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
set_id_con: nil,
consent_type: DataTypes.Cwe,
consent_form_id: nil,
consent_form_number: DataTypes.Ei,
consent_text: nil,
subject_specific_consent_text: nil,
consent_background: nil,
subject_specific_consent_background: nil,
consenter_imposed_limitations: nil,
consent_mode: DataTypes.Cne,
consent_status: DataTypes.Cne,
consent_discussion_date_time: DataTypes.Ts,
consent_decision_date_time: DataTypes.Ts,
consent_effective_date_time: DataTypes.Ts,
consent_end_date_time: DataTypes.Ts,
subject_competence_indicator: nil,
translator_assistance_indicator: nil,
language_translated_to: nil,
informational_material_supplied_indicator: nil,
consent_bypass_reason: DataTypes.Cwe,
consent_disclosure_level: nil,
consent_non_disclosure_reason: DataTypes.Cwe,
non_subject_consenter_reason: DataTypes.Cwe,
consenter_id: DataTypes.Xpn,
relationship_to_subject_table: nil
]
end
| 31.783784 | 53 | 0.730442 |
082932d264650d70a24f6d58c30d92c6c20b6291 | 342 | exs | Elixir | test/covid19_questionnaire_web/controllers/bienvenue_controller/index_test.exs | betagouv/covid19-algorithme-orientation-elixir | 7d99c0b79551438bd763ae4293b495096bc8d9ad | [
"MIT"
] | 3 | 2020-04-08T19:15:22.000Z | 2020-05-24T22:37:54.000Z | test/covid19_questionnaire_web/controllers/bienvenue_controller/index_test.exs | betagouv/covid19-algorithme-orientation-elixir | 7d99c0b79551438bd763ae4293b495096bc8d9ad | [
"MIT"
] | 10 | 2020-04-05T17:31:49.000Z | 2020-06-10T11:09:17.000Z | test/covid19_questionnaire_web/controllers/bienvenue_controller/index_test.exs | betagouv/covid19-algorithme-orientation-elixir | 7d99c0b79551438bd763ae4293b495096bc8d9ad | [
"MIT"
] | null | null | null | defmodule Covid19QuestionnaireWeb.BienvenueController.IndexQuestionnaire do
use Covid19QuestionnaireWeb.ConnCase, async: true
test "bienvenue", %{conn: conn} do
body =
conn
|> get("/")
|> response(200)
|> Jason.decode!()
assert body["bienvenue"] =~ "Bienvenue à l'API d'orientation du COVID19"
end
end
| 24.428571 | 76 | 0.672515 |
08294e3eb84a7b97661fb58bdfc419acbbc1fc11 | 2,083 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/table_cell_properties.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/table_cell_properties.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/table_cell_properties.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Slides.V1.Model.TableCellProperties do
@moduledoc """
The properties of the TableCell.
## Attributes
- contentAlignment (String.t): The alignment of the content in the table cell. The default alignment matches the alignment for newly created table cells in the Slides editor. Defaults to: `null`.
- Enum - one of [CONTENT_ALIGNMENT_UNSPECIFIED, CONTENT_ALIGNMENT_UNSUPPORTED, TOP, MIDDLE, BOTTOM]
- tableCellBackgroundFill (TableCellBackgroundFill): The background fill of the table cell. The default fill matches the fill for newly created table cells in the Slides editor. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:contentAlignment => any(),
:tableCellBackgroundFill => GoogleApi.Slides.V1.Model.TableCellBackgroundFill.t()
}
field(:contentAlignment)
field(:tableCellBackgroundFill, as: GoogleApi.Slides.V1.Model.TableCellBackgroundFill)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.TableCellProperties do
def decode(value, options) do
GoogleApi.Slides.V1.Model.TableCellProperties.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.TableCellProperties do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.057692 | 200 | 0.763322 |
08295e0a50ed6e262a00efaa656ca57612e59261 | 1,092 | exs | Elixir | braccino_ui/config/config.exs | darcros/braccino | 33f4d945daf8eac36e4e88ef412dd53cb1389376 | [
"MIT"
] | null | null | null | braccino_ui/config/config.exs | darcros/braccino | 33f4d945daf8eac36e4e88ef412dd53cb1389376 | [
"MIT"
] | null | null | null | braccino_ui/config/config.exs | darcros/braccino | 33f4d945daf8eac36e4e88ef412dd53cb1389376 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
# Configures the endpoint
config :braccino_ui, BraccinoUiWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "si8T9MZO2mRsjpibSLQRPtvauVcJPnkAEeYm08A/43M/njK+no6uu54/GCCGcF5g",
render_errors: [view: BraccinoUiWeb.ErrorView, accepts: ~w(html json), layout: false],
pubsub_server: BraccinoUi.PubSub,
live_view: [signing_salt: "9oML2jIS"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
# Braccino config
config :braccino, Braccino.Braccio, implementation: Braccino.Braccio.Mock
| 34.125 | 88 | 0.776557 |
08295ed45785af6753407c99f620f4afd06d25f2 | 1,113 | exs | Elixir | config/config.exs | rockerBOO/master-slave-failover | 7db82af3a5a4f1ee83c7bdbe13982958c385ab39 | [
"MIT"
] | null | null | null | config/config.exs | rockerBOO/master-slave-failover | 7db82af3a5a4f1ee83c7bdbe13982958c385ab39 | [
"MIT"
] | null | null | null | config/config.exs | rockerBOO/master-slave-failover | 7db82af3a5a4f1ee83c7bdbe13982958c385ab39 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for third-
# party users, it should be done in your mix.exs file.
# Sample configuration:
#
config :logger, :console,
level: :info,
format: "$date $time [$level] $metadata$message\n",
metadata: [:user_id]
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# config :logger, handle_sasl_reports: true
import_config "#{Mix.env}.exs"
# import_config "server1.exs"
config :etcd, :uri, "127.0.0.1:2379" | 37.1 | 73 | 0.760108 |
0829742d9fa2ddeea3e45c9646a503323889af61 | 1,744 | ex | Elixir | clients/content/lib/google_api/content/v21/model/orders_update_shipment_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/orders_update_shipment_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/orders_update_shipment_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.OrdersUpdateShipmentResponse do
@moduledoc """
## Attributes
* `executionStatus` (*type:* `String.t`, *default:* `nil`) - The status of the execution.
Acceptable values are:
- "`duplicate`"
- "`executed`"
* `kind` (*type:* `String.t`, *default:* `content#ordersUpdateShipmentResponse`) - Identifies what kind of resource this is. Value: the fixed string "content#ordersUpdateShipmentResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:executionStatus => String.t(),
:kind => String.t()
}
field(:executionStatus)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.OrdersUpdateShipmentResponse do
def decode(value, options) do
GoogleApi.Content.V21.Model.OrdersUpdateShipmentResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.OrdersUpdateShipmentResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.296296 | 192 | 0.727064 |
0829889499b56515bcfd06cdd386529e6bfa5fad | 397 | exs | Elixir | priv/repo/migrations/20181023142819_create_posts_viewers.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | priv/repo/migrations/20181023142819_create_posts_viewers.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | priv/repo/migrations/20181023142819_create_posts_viewers.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Repo.Migrations.CreatePostsViewers do
use Ecto.Migration
def change do
create table(:posts_viewers) do
add(:post_id, references(:cms_posts, on_delete: :delete_all), null: false)
add(:user_id, references(:users, on_delete: :delete_all), null: false)
timestamps()
end
create(unique_index(:posts_viewers, [:post_id, :user_id]))
end
end
| 26.466667 | 80 | 0.715365 |
0829df9416d6d83925d376c979f805f362054353 | 2,030 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_page_info_form_info.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_page_info_form_info.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_page_info_form_info.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1PageInfoFormInfo do
@moduledoc """
Represents form information.
## Attributes
* `parameterInfo` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1PageInfoFormInfoParameterInfo.t)`, *default:* `nil`) - Optional for both WebhookRequest and WebhookResponse.
The parameters contained in the form. Note that the webhook cannot add
or remove any form parameter.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:parameterInfo =>
list(
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1PageInfoFormInfoParameterInfo.t()
)
}
field(:parameterInfo,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1PageInfoFormInfoParameterInfo,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1PageInfoFormInfo do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1PageInfoFormInfo.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1PageInfoFormInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.833333 | 205 | 0.755172 |
0829f68ba2597104132bb4e2fab9dad91873d4a8 | 641 | exs | Elixir | test/fast_ts/stream/pipeline_test.exs | processone/fast_ts | ac7e42c17753f1399a12e2c5531570466debfaba | [
"Apache-2.0"
] | 15 | 2016-01-13T09:46:17.000Z | 2020-02-07T20:12:19.000Z | test/fast_ts/stream/pipeline_test.exs | processone/fast_ts | ac7e42c17753f1399a12e2c5531570466debfaba | [
"Apache-2.0"
] | null | null | null | test/fast_ts/stream/pipeline_test.exs | processone/fast_ts | ac7e42c17753f1399a12e2c5531570466debfaba | [
"Apache-2.0"
] | 6 | 2016-01-13T11:28:56.000Z | 2019-09-22T14:17:39.000Z | defmodule FastTS.Stream.PipelineTest do
use ExUnit.Case
test "Ignore empty pipeline" do
assert FastTS.Stream.Pipeline.start_link(:empty, []) == :ignore
end
test "Create a one step pipeline" do
test_pid = self
start_fun = fn _ets_table, next_pid ->
send test_pid, {:from, :started, self}
fn(event) -> event end
end
{:ok, _pid} = FastTS.Stream.Pipeline.start_link(:pipe1, [{:stateful, start_fun}])
receive do
{:from, :started, step1_pid} ->
assert Process.alive?(step1_pid)
after 5000 ->
assert false, "pipeline process not properly started"
end
end
end
| 24.653846 | 85 | 0.644306 |
082a116827df3446dc3383d7e6f9de49c5ad55fb | 1,994 | ex | Elixir | lib/hl7/2.4/segments/in1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/in1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/in1.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_4.Segments.IN1 do
@moduledoc false
require Logger
alias HL7.V2_4.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
set_id_in1: nil,
insurance_plan_id: DataTypes.Ce,
insurance_company_id: DataTypes.Cx,
insurance_company_name: DataTypes.Xon,
insurance_company_address: DataTypes.Xad,
insurance_co_contact_person: DataTypes.Xpn,
insurance_co_phone_number: DataTypes.Xtn,
group_number: nil,
group_name: DataTypes.Xon,
insureds_group_emp_id: DataTypes.Cx,
insureds_group_emp_name: DataTypes.Xon,
plan_effective_date: nil,
plan_expiration_date: nil,
authorization_information: DataTypes.Aui,
plan_type: nil,
name_of_insured: DataTypes.Xpn,
insureds_relationship_to_patient: DataTypes.Ce,
insureds_date_of_birth: DataTypes.Ts,
insureds_address: DataTypes.Xad,
assignment_of_benefits: nil,
coordination_of_benefits: nil,
coord_of_ben_priority: nil,
notice_of_admission_flag: nil,
notice_of_admission_date: nil,
report_of_eligibility_flag: nil,
report_of_eligibility_date: nil,
release_information_code: nil,
pre_admit_cert_pac: nil,
verification_date_time: DataTypes.Ts,
verification_by: DataTypes.Xcn,
type_of_agreement_code: nil,
billing_status: nil,
lifetime_reserve_days: nil,
delay_before_l_r_day: nil,
company_plan_code: nil,
policy_number: nil,
policy_deductible: DataTypes.Cp,
policy_limit_amount: DataTypes.Cp,
policy_limit_days: nil,
room_rate_semi_private: DataTypes.Cp,
room_rate_private: DataTypes.Cp,
insureds_employment_status: DataTypes.Ce,
insureds_administrative_sex: nil,
insureds_employers_address: DataTypes.Xad,
verification_status: nil,
prior_insurance_plan_id: nil,
coverage_type: nil,
handicap: nil,
insureds_id_number: DataTypes.Cx
]
end
| 32.688525 | 53 | 0.715145 |
082a2407a65d198a585ca0607255b3e87c49e2fb | 707 | ex | Elixir | lib/word_smith/remove_accents.ex | benfalk/word_smith | f5f09d0fbbaa2513b59947ef37ffaeec0cbc456a | [
"MIT"
] | 19 | 2016-04-29T20:31:50.000Z | 2021-12-07T13:04:55.000Z | lib/word_smith/remove_accents.ex | hiro-riveros/word_smith | f5f09d0fbbaa2513b59947ef37ffaeec0cbc456a | [
"MIT"
] | 2 | 2017-07-13T14:21:03.000Z | 2019-07-14T14:30:22.000Z | lib/word_smith/remove_accents.ex | hiro-riveros/word_smith | f5f09d0fbbaa2513b59947ef37ffaeec0cbc456a | [
"MIT"
] | 2 | 2019-07-10T09:48:28.000Z | 2019-10-13T09:10:40.000Z | defmodule WordSmith.RemoveAccents do
@accents_file Path.join([__DIR__, "../../priv/data/unaccent.rules"])
@external_resource @accents_file
@moduledoc false
@doc false
def remove_accents(str) when is_binary(str), do: remove_accents(str, [])
for pair <- File.stream!(@accents_file, [], :line) do
[char, replacement] = pair |> String.trim() |> String.split("\t")
defp remove_accents(unquote(char) <> rest, acc) do
remove_accents(rest, [unquote(replacement) | acc])
end
end
defp remove_accents(<<char::binary-1, rest::binary>>, acc) do
remove_accents(rest, [char | acc])
end
defp remove_accents("", acc) do
Enum.reverse(acc) |> IO.iodata_to_binary()
end
end
| 28.28 | 74 | 0.677511 |
082a3f06b1ff76f42e6f406290f224fa6978a574 | 19,537 | exs | Elixir | apps/language_server/test/dialyzer_test.exs | maciej-szlosarczyk/elixir-ls | f9e3a969a32212482a7625deec9e0fd0f533f991 | [
"Apache-2.0"
] | 865 | 2018-10-31T20:29:13.000Z | 2022-03-29T11:13:39.000Z | apps/language_server/test/dialyzer_test.exs | maciej-szlosarczyk/elixir-ls | f9e3a969a32212482a7625deec9e0fd0f533f991 | [
"Apache-2.0"
] | 441 | 2019-01-05T02:33:52.000Z | 2022-03-30T20:56:50.000Z | apps/language_server/test/dialyzer_test.exs | maciej-szlosarczyk/elixir-ls | f9e3a969a32212482a7625deec9e0fd0f533f991 | [
"Apache-2.0"
] | 126 | 2018-11-12T19:16:53.000Z | 2022-03-26T13:27:50.000Z | defmodule ElixirLS.LanguageServer.DialyzerTest do
# TODO: Test loading and saving manifest
alias ElixirLS.LanguageServer.{Dialyzer, Server, Protocol, SourceFile, JsonRpc}
import ExUnit.CaptureLog
use ElixirLS.Utils.MixTest.Case, async: false
use Protocol
setup_all do
# This will generate a large PLT file and will take a long time, so we need to make sure that
# Mix.Utils.home() is in the saved build artifacts for any automated testing
Dialyzer.Manifest.load_elixir_plt()
{:ok, %{}}
end
setup do
server = ElixirLS.LanguageServer.Test.ServerTestHelpers.start_server()
{:ok, %{server: server}}
end
@tag slow: true, fixture: true
test "reports diagnostics then clears them once problems are fixed", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_a = SourceFile.path_to_uri(Path.absname("lib/a.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{"dialyzerEnabled" => true, "dialyzerFormat" => "dialyxir_long"}
})
)
message = assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20000
assert publish_diagnostics_notif(^file_a, [
%{
"message" => error_message1,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
},
%{
"message" => error_message2,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
}
]) = message
assert error_message1 == "Function fun/0 has no local return."
assert error_message2 ==
"The pattern can never match the type.\n\nPattern:\n:ok\n\nType:\n:error\n"
# Fix file B. It should recompile and re-analyze A and B only
b_text = """
defmodule B do
def fun do
:ok
end
end
"""
b_uri = SourceFile.path_to_uri("lib/b.ex")
Server.receive_packet(server, did_open(b_uri, "elixir", 1, b_text))
Process.sleep(1500)
File.write!("lib/b.ex", b_text)
Server.receive_packet(server, did_save(b_uri))
assert_receive publish_diagnostics_notif(^file_a, []), 20000
assert_receive notification("window/logMessage", %{
"message" => "[ElixirLS Dialyzer] Analyzing 2 modules: [A, B]"
}),
40000
# Stop while we're still capturing logs to avoid log leakage
GenServer.stop(server)
end)
end)
end
@tag slow: true, fixture: true
test "only analyzes the changed files", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_c = SourceFile.path_to_uri(Path.absname("lib/c.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{"dialyzerEnabled" => true, "dialyzerFormat" => "dialyxir_long"}
})
)
assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20_000
c_text = """
defmodule C do
end
"""
c_uri = SourceFile.path_to_uri("lib/c.ex")
assert_receive notification("window/logMessage", %{
"message" => "[ElixirLS Dialyzer] Found " <> _
})
assert_receive notification("window/logMessage", %{
"message" => "[ElixirLS Dialyzer] Done writing manifest" <> _
}),
3_000
Server.receive_packet(server, did_open(c_uri, "elixir", 1, c_text))
# The dialyzer process checks a second back since mtime only has second
# granularity, so we need to wait a second.
File.write!("lib/c.ex", c_text)
Process.sleep(1_500)
Server.receive_packet(server, did_save(c_uri))
assert_receive notification("window/logMessage", %{
"message" => "[ElixirLS Dialyzer] Analyzing 1 modules: [C]"
}),
3_000
assert_receive publish_diagnostics_notif(^file_c, []), 20_000
assert_receive notification("window/logMessage", %{
"message" => "[ElixirLS Dialyzer] Done writing manifest" <> _
}),
3_000
# Stop while we're still capturing logs to avoid log leakage
GenServer.stop(server)
end)
end)
end
@tag slow: true, fixture: true
test "reports dialyxir_long formatted error", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_a = SourceFile.path_to_uri(Path.absname("lib/a.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{"dialyzerEnabled" => true, "dialyzerFormat" => "dialyxir_long"}
})
)
message = assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20000
assert publish_diagnostics_notif(^file_a, [
%{
"message" => error_message1,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
},
%{
"message" => error_message2,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
}
]) = message
assert error_message1 == "Function fun/0 has no local return."
assert error_message2 == """
The pattern can never match the type.
Pattern:
:ok
Type:
:error
"""
end)
end)
end
@tag slow: true, fixture: true
test "reports dialyxir_short formatted error", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_a = SourceFile.path_to_uri(Path.absname("lib/a.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{"dialyzerEnabled" => true, "dialyzerFormat" => "dialyxir_short"}
})
)
message = assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20000
assert publish_diagnostics_notif(^file_a, [
%{
"message" => error_message1,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
},
%{
"message" => error_message2,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
}
]) = message
assert error_message1 == "Function fun/0 has no local return."
assert error_message2 == "The pattern can never match the type :error."
end)
end)
end
@tag slow: true, fixture: true
test "reports dialyzer_formatted error", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_a = SourceFile.path_to_uri(Path.absname("lib/a.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{"dialyzerEnabled" => true, "dialyzerFormat" => "dialyzer"}
})
)
message = assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20000
assert publish_diagnostics_notif(^file_a, [
%{
"message" => error_message1,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
},
%{
"message" => _error_message2,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
}
]) = message
assert error_message1 == "Function 'fun'/0 has no local return"
# Note: Don't assert on error_messaage 2 because the message is not stable across OTP versions
end)
end)
end
@tag slow: true, fixture: true
test "reports dialyxir_short error in umbrella", %{server: server} do
in_fixture(__DIR__, "umbrella_dialyzer", fn ->
file_a = SourceFile.path_to_uri(Path.absname("apps/app1/lib/app1.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{"dialyzerEnabled" => true, "dialyzerFormat" => "dialyxir_short"}
})
)
message = assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20000
assert publish_diagnostics_notif(^file_a, [
%{
"message" => error_message1,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
},
%{
"message" => error_message2,
"range" => %{
"end" => %{"character" => 0, "line" => _},
"start" => %{"character" => 0, "line" => _}
},
"severity" => 2,
"source" => "ElixirLS Dialyzer"
}
]) = message
assert error_message1 == "Function check_error/0 has no local return."
assert error_message2 == "The pattern can never match the type :error."
end)
end)
end
test "clears diagnostics when source files are deleted", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_a = SourceFile.path_to_uri(Path.absname("lib/a.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{"elixirLS" => %{"dialyzerEnabled" => true}})
)
assert_receive publish_diagnostics_notif(^file_a, [_, _]), 20000
# Delete file, warning diagnostics should be cleared
File.rm("lib/a.ex")
Server.receive_packet(server, did_change_watched_files([%{"uri" => file_a, "type" => 3}]))
assert_receive publish_diagnostics_notif(^file_a, []), 20000
# Stop while we're still capturing logs to avoid log leakage
GenServer.stop(server)
end)
end)
end
@tag slow: true, fixture: true
test "protocol rebuild does not trigger consolidation warnings", %{server: server} do
in_fixture(__DIR__, "protocols", fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
uri = SourceFile.path_to_uri(Path.absname("lib/implementations.ex"))
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(server, notification("initialized"))
Server.receive_packet(
server,
did_change_configuration(%{"elixirLS" => %{"dialyzerEnabled" => true}})
)
assert_receive notification("window/logMessage", %{"message" => "Compile took" <> _}), 5000
assert_receive notification("window/logMessage", %{
"message" => "[ElixirLS Dialyzer] Done writing manifest" <> _
}),
30000
v2_text = """
defimpl Protocols.Example, for: List do
def some(t), do: t
end
defimpl Protocols.Example, for: String do
def some(t), do: t
end
defimpl Protocols.Example, for: Map do
def some(t), do: t
end
"""
Server.receive_packet(server, did_open(uri, "elixir", 1, v2_text))
File.write!("lib/implementations.ex", v2_text)
Server.receive_packet(server, did_save(uri))
assert_receive notification("window/logMessage", %{"message" => "Compile took" <> _}), 5000
assert_receive notification("textDocument/publishDiagnostics", %{"diagnostics" => []}),
30000
Process.sleep(2000)
v2_text = """
defimpl Protocols.Example, for: List do
def some(t), do: t
end
defimpl Protocols.Example, for: String do
def some(t), do: t
end
defimpl Protocols.Example, for: Map do
def some(t), do: t
end
defimpl Protocols.Example, for: Atom do
def some(t), do: t
end
"""
File.write!("lib/implementations.ex", v2_text)
Server.receive_packet(server, did_save(uri))
assert_receive notification("window/logMessage", %{"message" => "Compile took" <> _}), 5000
# we should not receive Protocol has already been consolidated warnings here
refute_receive notification("textDocument/publishDiagnostics", _), 3000
end)
end
@tag slow: true, fixture: true
test "do not suggests contracts if not enabled", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_c = SourceFile.path_to_uri(Path.absname("lib/c.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{"dialyzerEnabled" => true, "dialyzerFormat" => "dialyxir_long"}
})
)
message = assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20000
assert publish_diagnostics_notif(_, _) = message
Server.receive_packet(
server,
did_open(file_c, "elixir", 2, File.read!(Path.absname("lib/c.ex")))
)
Server.receive_packet(
server,
code_lens_req(3, file_c)
)
resp = assert_receive(%{"id" => 3}, 5000)
assert response(3, []) == resp
end)
end)
end
@tag slow: true, fixture: true
test "suggests contracts if enabled and applies suggestion", %{server: server} do
in_fixture(__DIR__, "dialyzer", fn ->
file_c = SourceFile.path_to_uri(Path.absname("lib/c.ex"))
capture_log(fn ->
root_uri = SourceFile.path_to_uri(File.cwd!())
Server.receive_packet(server, initialize_req(1, root_uri, %{}))
Server.receive_packet(
server,
did_change_configuration(%{
"elixirLS" => %{
"dialyzerEnabled" => true,
"dialyzerFormat" => "dialyxir_long",
"suggestSpecs" => true
}
})
)
message = assert_receive %{"method" => "textDocument/publishDiagnostics"}, 20000
assert publish_diagnostics_notif(_, _) = message
Server.receive_packet(
server,
did_open(file_c, "elixir", 2, File.read!(Path.absname("lib/c.ex")))
)
Server.receive_packet(
server,
code_lens_req(3, file_c)
)
resp = assert_receive(%{"id" => 3}, 5000)
assert response(3, [
%{
"command" => %{
"arguments" =>
args = [
%{
"arity" => 0,
"fun" => "myfun",
"line" => 2,
"mod" => "Elixir.C",
"spec" => "myfun :: 1",
"uri" => ^file_c
}
],
"command" => command = "spec:" <> _,
"title" => "@spec myfun :: 1"
},
"range" => %{
"end" => %{"character" => 0, "line" => 1},
"start" => %{"character" => 0, "line" => 1}
}
}
]) = resp
Server.receive_packet(
server,
execute_command_req(4, command, args)
)
assert_receive(%{
"id" => 1,
"method" => "workspace/applyEdit",
"params" => %{
"edit" => %{
"changes" => %{
^file_c => [
%{
"newText" => " @spec myfun :: 1\n",
"range" => %{
"end" => %{"character" => 0, "line" => 1},
"start" => %{"character" => 0, "line" => 1}
}
}
]
}
},
"label" => "Add @spec to Elixir.C.myfun/0"
}
})
# TODO something is broken in packet capture
# using JsonRpc.receive_packet causes the packet to be delivered to LanguageServer
# which crashes with no match error
# JsonRpc.receive_packet(
# server,
# response(1, %{"applied" => true})
# )
# instead we fake a callback in JsonRpc server that forwards the response as needed
JsonRpc.handle_call(
{:packet, response(1, %{"applied" => true})},
nil,
:sys.get_state(JsonRpc)
)
assert_receive(%{"id" => 4, "result" => nil}, 5000)
end)
end)
end
end
| 33.22619 | 102 | 0.507959 |
082a4018b4ca6307a2621398872399edf0dbe1e6 | 994 | ex | Elixir | phoenix_tutorial/test/support/channel_case.ex | metabrain/elixir-playground | 0c114ee8a8cb2d610f54b9cca83cbe6917226c33 | [
"MIT"
] | null | null | null | phoenix_tutorial/test/support/channel_case.ex | metabrain/elixir-playground | 0c114ee8a8cb2d610f54b9cca83cbe6917226c33 | [
"MIT"
] | null | null | null | phoenix_tutorial/test/support/channel_case.ex | metabrain/elixir-playground | 0c114ee8a8cb2d610f54b9cca83cbe6917226c33 | [
"MIT"
] | null | null | null | defmodule PhoenixTutorial.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias PhoenixTutorial.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
# The default endpoint for testing
@endpoint PhoenixTutorial.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(PhoenixTutorial.Repo, [])
end
:ok
end
end
| 23.666667 | 74 | 0.710262 |
082a64cc508785d7bf143565071dac41a749ceb0 | 686 | ex | Elixir | backendElixir/lib/backendElixir_web/router.ex | deibsoncogo/NLW7Impulse | d530204a01085668d29e7bc303ba6263783283a1 | [
"MIT"
] | null | null | null | backendElixir/lib/backendElixir_web/router.ex | deibsoncogo/NLW7Impulse | d530204a01085668d29e7bc303ba6263783283a1 | [
"MIT"
] | null | null | null | backendElixir/lib/backendElixir_web/router.ex | deibsoncogo/NLW7Impulse | d530204a01085668d29e7bc303ba6263783283a1 | [
"MIT"
] | null | null | null | defmodule BackendElixirWeb.Router do
use BackendElixirWeb, :router
pipeline :api do
plug :accepts, ["json"]
end
# criamos as rotas aqui dentro
scope "/api", BackendElixirWeb do
pipe_through :api
post "/message", MessageController, :create
end
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through [:fetch_session, :protect_from_forgery]
live_dashboard "/dashboard", metrics: BackendElixirWeb.Telemetry
end
end
if Mix.env() == :dev do
scope "/dev" do
pipe_through [:fetch_session, :protect_from_forgery]
forward "/mailbox", Plug.Swoosh.MailboxPreview
end
end
end
| 21.4375 | 70 | 0.685131 |
082a788454501db5f8016dcc2988cb5ca960cb5f | 73 | exs | Elixir | config/config.exs | mvalitov/phone_number | 15dc3cb4354a128a00fa40a5f6ee4d041f04dae4 | [
"MIT"
] | 9 | 2017-12-07T09:22:40.000Z | 2022-01-09T06:27:24.000Z | config/config.exs | mvalitov/phone_number | 15dc3cb4354a128a00fa40a5f6ee4d041f04dae4 | [
"MIT"
] | 6 | 2017-12-13T11:13:02.000Z | 2022-02-19T02:52:47.000Z | config/config.exs | mvalitov/phone_number | 15dc3cb4354a128a00fa40a5f6ee4d041f04dae4 | [
"MIT"
] | 3 | 2018-04-30T23:55:30.000Z | 2022-01-09T00:02:56.000Z | use Mix.Config
config :phone_number,
files_path: "/priv/phone_number"
| 14.6 | 34 | 0.767123 |
082a7be96c34ecbee69d0aae50a8a47431722122 | 264 | exs | Elixir | test/axon_web/views/layout_view_test.exs | MattFerraro/Axon | 9c41c617ffd4a8dac89319d2f26f5736d9f96ca4 | [
"MIT"
] | null | null | null | test/axon_web/views/layout_view_test.exs | MattFerraro/Axon | 9c41c617ffd4a8dac89319d2f26f5736d9f96ca4 | [
"MIT"
] | null | null | null | test/axon_web/views/layout_view_test.exs | MattFerraro/Axon | 9c41c617ffd4a8dac89319d2f26f5736d9f96ca4 | [
"MIT"
] | null | null | null | defmodule AxonWeb.LayoutViewTest do
use AxonWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 29.333333 | 65 | 0.761364 |
082a7f0d25eb3bb2cf3b04a8210298ff154e48ec | 1,234 | ex | Elixir | test/support/conn_case.ex | girorme/starer | 4e3840291da6a7d47c0b94aacb456794fbee091b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | girorme/starer | 4e3840291da6a7d47c0b94aacb456794fbee091b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | girorme/starer | 4e3840291da6a7d47c0b94aacb456794fbee091b | [
"MIT"
] | null | null | null | defmodule StarerWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use StarerWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import StarerWeb.ConnCase
alias StarerWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint StarerWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Starer.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Starer.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.045455 | 68 | 0.722853 |
082a926b340cb8b8c5a77e0c33aaa7f94ac6715e | 2,789 | ex | Elixir | lib/hangman/player_logger.ex | brpandey/elixir-hangman | 458502af766b42e492ebb9ca543fc8b855687b09 | [
"MIT"
] | 1 | 2016-12-19T00:10:34.000Z | 2016-12-19T00:10:34.000Z | lib/hangman/player_logger.ex | brpandey/elixir-hangman | 458502af766b42e492ebb9ca543fc8b855687b09 | [
"MIT"
] | null | null | null | lib/hangman/player_logger.ex | brpandey/elixir-hangman | 458502af766b42e492ebb9ca543fc8b855687b09 | [
"MIT"
] | null | null | null | defmodule Hangman.Player.Logger.Handler do
@moduledoc """
Module implements event logger handler for `Hangman.Events.Manager`.
Each `event` is logged to a file named after the player `id`.
"""
use Experimental.GenStage
alias Experimental.GenStage
require Logger
@root_path :code.priv_dir(:hangman_game)
@spec start_link(Keyword.t()) :: GenServer.on_start()
def start_link(options) do
GenStage.start_link(__MODULE__, options)
end
@spec stop(pid) :: tuple
def stop(pid) when is_pid(pid) do
GenStage.call(pid, :stop)
end
# Callbacks
@callback init(term) :: {GenStage.type(), tuple, GenStage.options()} | {:stop, :normal}
def init(options) do
# Starts a permanent subscription to the broadcaster
# which will automatically start requesting items.
with {:ok, key} <- Keyword.fetch(options, :id) do
file_name = "#{@root_path}/#{key}_hangman_games.txt"
{:ok, logger_pid} = File.open(file_name, [:append])
{:consumer, {key, logger_pid}, subscribe_to: [Hangman.Game.Event.Manager]}
else
## ABORT if display output not true
_ ->
{:stop, :normal}
end
end
@callback handle_call(atom, tuple, term) :: tuple
def handle_call(:stop, _from, state) do
{:stop, :normal, :ok, state}
end
@doc """
The handle_events callback handles various events
which ultimately write to `player` logger file.
Only those that match the player id key are selected
"""
@callback handle_events(term, term, tuple) :: tuple
def handle_events(events, _from, {key, logger_pid}) do
for event <- events, key == Kernel.elem(event, 1) do
process_event(event, logger_pid)
end
{:noreply, [], {key, logger_pid}}
end
@spec process_event({atom, term, tuple | binary}, pid) :: :ok
defp process_event(event, logger_pid) do
msg =
case event do
{:register, _, {game_no, length}} ->
"\n# new game #{game_no}! secret length --> #{length}\n"
{:guess, _, {{:guess_letter, letter}, _game_no}} ->
"# letter --> #{letter} "
{:guess, _, {{:guess_word, word}, _game_no}} ->
"# word --> #{word} "
{:status, _, {_game_no, round_no, text}} ->
"# round #{round_no} status --> #{text}\n"
{:finished, _, text} ->
"\n# games over! --> #{text} \n"
end
IO.write(logger_pid, msg)
:ok
end
@doc """
Terminate callback. Closes player `logger` file
"""
@callback terminate(term, term) :: :ok
def terminate(_reason, state) do
_ = Logger.debug("Terminating Player Logger Handler")
_ =
case state do
val when is_tuple(val) ->
{_key, logger_pid} = val
File.close(logger_pid)
_ ->
""
end
:ok
end
end
| 25.587156 | 89 | 0.614557 |
082aa02a85a95af24881b4d2e51501370196c07d | 4,213 | ex | Elixir | lib/plumbapius/abstract_plug.ex | Amuhar/plumbapius | a9066512f520f2ad97e677b04d70cc62695f2def | [
"Apache-2.0"
] | 10 | 2020-08-25T07:52:23.000Z | 2020-12-06T12:44:44.000Z | lib/plumbapius/abstract_plug.ex | Amuhar/plumbapius | a9066512f520f2ad97e677b04d70cc62695f2def | [
"Apache-2.0"
] | 3 | 2020-10-13T11:49:32.000Z | 2021-05-28T08:34:41.000Z | lib/plumbapius/abstract_plug.ex | Amuhar/plumbapius | a9066512f520f2ad97e677b04d70cc62695f2def | [
"Apache-2.0"
] | 2 | 2020-09-03T14:29:00.000Z | 2021-05-26T11:07:37.000Z | defmodule Plumbapius.AbstractPlug do
alias Plumbapius.{ContentType, Request, Response, ConnHelper}
alias Plumbapius.Plug.Options
alias Plumbapius.Coverage.CoverageTracker
alias Plumbapius.Coverage.CoverageTracker.CoveredCase
alias Plumbapius.Coverage.NullCoverageTracker
@spec init(json_schema: String.t(), coverage_tracker: CoverageTracker.t()) :: Options.t()
def init(options) do
Options.new(options)
end
@spec call(Plug.Conn.t(), Options.t(), function, function) ::
Plug.Conn.t()
def call(
%{private: %{plumbapius_ignore: true}} = conn,
_options,
_handle_request_error,
_handle_response_error
) do
conn
end
def call(conn, options, handle_request_error, handle_response_error) do
case find_request_schema(options.schema, conn) do
{:ok, request_schema} ->
validate_schema(request_schema, conn, handle_request_error, handle_response_error)
{:error, _} = error ->
handle_validation_result(error, handle_request_error, conn, Request.ErrorDescription)
end
end
defp find_request_schema(request_schemas, conn) do
case Enum.filter(request_schemas, &Request.match?(&1, conn.method, conn.request_path)) do
[] ->
{:error,
%Request.NotFoundError{
method: conn.method,
path: conn.request_path
}}
[_ | _] = schema_candidates ->
find_schema_by_content_type(conn, schema_candidates)
end
end
defp find_schema_by_content_type(conn, schema_candidates) do
with {:ok, content_type} <- content_type_for(conn) do
request_schema = Enum.find(schema_candidates, &ContentType.match?(content_type, &1.content_type))
if request_schema do
{:ok, request_schema}
else
{:error,
%Request.UnknownContentTypeError{
method: conn.method,
path: conn.request_path,
content_type: content_type
}}
end
end
end
defp validate_schema(request_schema, conn, handle_request_error, handle_response_error) do
new_conn =
Request.validate_body(request_schema, conn.body_params)
|> handle_validation_result(handle_request_error, conn, Request.ErrorDescription)
register_before_send = fn conn ->
with {:ok, resp_body} <- parse_resp_body(conn.resp_body) do
request_schema
|> Response.validate_response(conn.status, ConnHelper.get_resp_header(conn, "content-type"), resp_body)
|> handle_resp_validation_result(request_schema, handle_response_error, conn, resp_body)
end
end
if new_conn.state == :sent do
new_conn
else
Plug.Conn.register_before_send(new_conn, register_before_send)
end
end
defp content_type_for(conn) do
if has_body?(conn) do
case ConnHelper.get_req_header(conn, "content-type") do
nil -> {:error, %Request.NoContentTypeError{method: conn.method, path: conn.request_path}}
content_type -> {:ok, content_type}
end
else
{:ok, nil}
end
end
defp has_body?(conn) do
conn.method in ["POST", "PUT", "PATCH"]
end
defp parse_resp_body(""), do: {:ok, %{}}
defp parse_resp_body(body), do: Jason.decode(body)
defp handle_resp_validation_result(
{:ok, %Response.Schema{} = response_schema},
request_schema,
_error_handler,
conn,
resp_body
) do
coverage_tracker().response_covered(
CoveredCase.new(
{request_schema, response_schema},
conn.body_params,
resp_body
)
)
conn
end
defp handle_resp_validation_result(
{:error, _} = error,
_request_schema,
error_handler,
conn,
_resp_body
) do
handle_validation_result(error, error_handler, conn, Response.ErrorDescription)
end
defp handle_validation_result(:ok, _error_handler, conn, _error_module) do
conn
end
defp handle_validation_result({:error, error}, error_handler, conn, error_module) do
error_module.new(conn, error)
|> error_handler.(conn)
end
defp coverage_tracker do
Application.get_env(:plumbapius, :coverage_tracker, NullCoverageTracker)
end
end
| 29.256944 | 111 | 0.676715 |
082af2af49fd5001d3d446fadb2fe460ccaada25 | 1,025 | ex | Elixir | apps/sms_code/lib/sms_code/generator.ex | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | null | null | null | apps/sms_code/lib/sms_code/generator.ex | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | null | null | null | apps/sms_code/lib/sms_code/generator.ex | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | null | null | null | defmodule SmsCode.Generator do
require Logger
use GenServer
def start_link do
GenServer.start_link __MODULE__, [], name: __MODULE__
end
def init(_) do
{:ok, []}
end
def send_new_code(id, mobile_number) do
GenServer.call(__MODULE__, {:send_new_code, id, mobile_number})
end
def validate_code(id, code) do
code == ConCache.get(:sms_code, id)
end
def handle_call({:send_new_code, id, mobile}, _from, state) do
new_code = generate_code()
{:ok, message} = send_reset(new_code, mobile)
Logger.debug "We got this from Twilio: #{inspect message}"
ConCache.put(:sms_code, id, new_code)
{:reply, :ok, state}
end
def twilio_api do
Application.get_env(:sms_code, :twilio_api)
end
def generate_code do
:crypto.strong_rand_bytes(3)
|> Base.encode16
end
def send_reset(code, mobile) do
payload = [to: mobile, from: "+15558675309", body: "Your password reset code is #{code}"]
Module.concat(twilio_api(), Message).create(payload)
end
end
| 22.282609 | 93 | 0.683902 |
082b2bc4cfebece3b1108e1cd0043aa05bf37b50 | 1,568 | exs | Elixir | test/parser_test.exs | suexcxine/jaxon | fb638f76945236822e8e015ee4b4d79b8255b71e | [
"Apache-2.0"
] | null | null | null | test/parser_test.exs | suexcxine/jaxon | fb638f76945236822e8e015ee4b4d79b8255b71e | [
"Apache-2.0"
] | null | null | null | test/parser_test.exs | suexcxine/jaxon | fb638f76945236822e8e015ee4b4d79b8255b71e | [
"Apache-2.0"
] | 1 | 2020-01-29T01:50:12.000Z | 2020-01-29T01:50:12.000Z | defmodule ParseTest do
use ExUnit.Case
alias Jaxon.Parser
doctest Parser
@tests [
{~s({ "name": "john", "test": {"number": 5.1}, "tags":[null,true,false,1]}),
[
:start_object,
{:string, "name"},
:colon,
{:string, "john"},
:comma,
{:string, "test"},
:colon,
:start_object,
{:string, "number"},
:colon,
{:decimal, 5.1},
:end_object,
:comma,
{:string, "tags"},
:colon,
:start_array,
nil,
:comma,
{:boolean, true},
:comma,
{:boolean, false},
:comma,
{:integer, 1},
:end_array,
:end_object
]},
{~s("string"),
[
{:string, "string"}
]},
{~s({"key":"va),
[
:start_object,
{:string, "key"},
:colon,
{:incomplete, "\"va"}
]},
{~s("hello" "hello" 1.5 true),
[
{:string, "hello"},
{:string, "hello"},
{:decimal, 1.5},
{:boolean, true}
]},
{~s(}}),
[
:end_object,
:end_object
]},
{~s(5e ),
[
{:error, "5e "}
]},
{~s(5e),
[
{:incomplete, "5e"}
]},
{~s(5..),
[
{:error, "5.."}
]},
{~s(5. ),
[
{:error, "5. "}
]},
{~s(5.),
[
{:incomplete, "5."}
]},
{~s("\\u00),
[
{:incomplete, ~s("\\u00)}
]}
]
test "parser tests" do
Enum.each(@tests, fn {json, events} ->
assert Parser.parse(json) == events
end)
end
end
| 17.422222 | 80 | 0.384566 |
082b5e8032aeb05d849bf8f58442fe089d635b4a | 136 | exs | Elixir | .formatter.exs | utrustdev/commanded-scheduler | 7532f192d39df2d0b7d1d500973ceaa8ec987ed3 | [
"MIT"
] | 31 | 2017-11-25T00:18:12.000Z | 2022-03-03T20:11:17.000Z | .formatter.exs | utrustdev/commanded-scheduler | 7532f192d39df2d0b7d1d500973ceaa8ec987ed3 | [
"MIT"
] | 12 | 2018-01-09T21:18:30.000Z | 2021-09-06T10:35:43.000Z | .formatter.exs | utrustdev/commanded-scheduler | 7532f192d39df2d0b7d1d500973ceaa8ec987ed3 | [
"MIT"
] | 13 | 2018-02-08T15:15:11.000Z | 2020-08-13T12:25:18.000Z | # Used by "mix format"
[
locals_without_parens: [project: 2],
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]
| 22.666667 | 69 | 0.610294 |
082b62a3b396be5f248414856cf0b1efc2ef7de1 | 4,642 | ex | Elixir | web/models/user.ex | albertchan/phoenix_admin | 74a9bbb01d7574db887b19a13f2934b7372f7d20 | [
"MIT"
] | 1 | 2018-03-07T17:21:58.000Z | 2018-03-07T17:21:58.000Z | web/models/user.ex | albertchan/phoenix_admin | 74a9bbb01d7574db887b19a13f2934b7372f7d20 | [
"MIT"
] | null | null | null | web/models/user.ex | albertchan/phoenix_admin | 74a9bbb01d7574db887b19a13f2934b7372f7d20 | [
"MIT"
] | null | null | null | defmodule PhoenixAdmin.User do
use PhoenixAdmin.Web, :model
use Ecto.Schema
alias PhoenixAdmin.Repo
import Comeonin.Bcrypt, only: [hashpwsalt: 1]
import Ecto.Changeset
schema "users" do
field :email, :string
field :name, :string
field :encrypted_password, :string
field :password, :string, virtual: true
field :password_confirmation, :string, virtual: true
field :last_login, Ecto.DateTime
field :verified_at, Ecto.DateTime
field :verification_sent_at, Ecto.DateTime
field :verification_token, :string
field :reset_sent_at, Ecto.DateTime
field :reset_token, :string
timestamps()
# relationships
many_to_many :roles, PhoenixAdmin.Role, join_through: "users_roles"
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:email, :name])
|> validate_required([:email])
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset_create(struct, params \\ %{}) do
struct
|> cast(params, [:email, :name, :password])
|> validate_required([:email, :name, :password])
|> unique_constraint(:email)
|> put_encrypted_password()
end
@doc """
Builds a update changeset based on the `struct` and `params`.
"""
def changeset_update(struct, params \\ %{}) do
struct
|> cast(params, [:email, :name, :last_login, :verified_at, :verification_sent_at])
|> validate_required([:email, :name])
|> unique_constraint(:email)
end
@doc """
Builds a user changeset for login.
"""
def changeset_login(model, params \\ %{}) do
model
|> cast(params, [:email, :password])
|> validate_required([:email, :password])
end
@doc """
Builds a user changeset for resetting password.
"""
def changeset_password(model, params \\ %{}) do
model
|> cast(params, [:password])
|> validate_required([:password])
end
@doc """
Builds a user changeset for user registration.
"""
def changeset_registration(model, params \\ %{}, token \\ "verysecret") do
model
|> cast(params, [:email, :name, :password, :password_confirmation])
|> validate_required([:email, :name, :password, :password_confirmation])
|> unique_constraint(:email)
|> validate_length(:password, min: 6, max: 100)
|> validate_length(:password_confirmation, min: 6, max: 100)
|> validate_confirmation(:password, message: "does not match password!")
|> put_encrypted_password()
|> put_verification_token(token)
end
@doc """
Builds a user changeset for resetting password.
"""
def changeset_reset(model, params \\ %{}, token) do
model
|> changeset(params)
|> put_reset_token(token)
end
@doc """
Builds a user changeset for verifying a new user.
"""
def changeset_verification(model, params \\ %{}, token) do
model
|> changeset(params)
|> put_verification_token(token)
end
@doc """
Function used to check if a token has expired.
"""
def check_expiry(nil, _), do: false
def check_expiry(sent_at, valid_seconds) do
(sent_at |> Ecto.DateTime.to_erl |> :calendar.datetime_to_gregorian_seconds) + valid_seconds <
(:calendar.universal_time |> :calendar.datetime_to_gregorian_seconds)
end
@doc """
Find the user, using the user id, in the database.
"""
def find_user_by_id(id) do
Repo.get(User, id)
end
@doc """
Find the user by email in the database.
"""
def find_user_by_email(email) do
Repo.get_by(PhoenixAdmin.User, email: email)
end
def put_encrypted_password(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{password: password}} ->
put_change(changeset, :encrypted_password, hashpwsalt(password))
_ ->
changeset
end
end
@doc """
Change the `verified_at` value in the database to the current time.
"""
def put_verified_at(user) do
change(user, %{verified_at: Ecto.DateTime.utc})
|> Repo.update
end
defp put_reset_token(changeset, token) do
case changeset do
%Ecto.Changeset{valid?: true} ->
changeset
|> put_change(:reset_token, token)
|> put_change(:reset_sent_at, Ecto.DateTime.utc)
_ ->
changeset
end
end
defp put_verification_token(changeset, token) do
case changeset do
%Ecto.Changeset{valid?: true} ->
changeset
|> put_change(:verified_at, nil)
|> put_change(:verification_token, token)
|> put_change(:verification_sent_at, Ecto.DateTime.utc)
_ ->
changeset
end
end
end
| 27.146199 | 98 | 0.658768 |
082b8c6ef9ee67e533f71e528a1dd8c6326c55b5 | 584 | ex | Elixir | apps/ex_wire/test/support/ex_wire/fake_kademlia.ex | unozerocode/mana | d4b16f16020fd512255004ad537ec604513c2589 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 152 | 2018-10-27T04:52:03.000Z | 2022-03-26T10:34:00.000Z | apps/ex_wire/test/support/ex_wire/fake_kademlia.ex | Jacekdaa/mana | 4b24b05fbf4033c6b57d30e1cd2dc5890ff6b3e5 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 270 | 2018-04-14T07:34:57.000Z | 2018-10-25T18:10:45.000Z | apps/ex_wire/test/support/ex_wire/fake_kademlia.ex | Jacekdaa/mana | 4b24b05fbf4033c6b57d30e1cd2dc5890ff6b3e5 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 25 | 2018-10-27T12:15:13.000Z | 2022-01-25T20:31:14.000Z | defmodule ExWire.FakeKademlia do
@moduledoc """
Kademlia interface mock.
"""
use GenServer
# API
def get_peers() do
_ = GenServer.call(__MODULE__, :get_peers_call)
[]
end
def start_link(_), do: GenServer.start_link(__MODULE__, [], name: __MODULE__)
def init(_), do: {:ok, %{}}
def handle_call(:setup_get_peers_call, {reporter, _ref}, _state) do
{:reply, :ok, %{setup_get_peers_call: reporter}}
end
def handle_call(:get_peers_call, _, %{setup_get_peers_call: reporter}) do
_ = send(reporter, :get_peers_call)
{:reply, :ok, %{}}
end
end
| 24.333333 | 79 | 0.669521 |
082bbf4e3cc160d209764b5a9197192c29486557 | 3,038 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_moderator_list_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_moderator_list_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_moderator_list_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse do
@moduledoc """
## Attributes
- etag (String.t): Etag of this resource. Defaults to: `null`.
- eventId (String.t): Serialized EventId of the request which produced this response. Defaults to: `null`.
- items ([LiveChatModerator]): A list of moderators that match the request criteria. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"youtube#liveChatModeratorListResponse\". Defaults to: `null`.
- nextPageToken (String.t): The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set. Defaults to: `null`.
- pageInfo (PageInfo): Defaults to: `null`.
- prevPageToken (String.t): The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set. Defaults to: `null`.
- tokenPagination (TokenPagination): Defaults to: `null`.
- visitorId (String.t): The visitorId identifies the visitor. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => any(),
:eventId => any(),
:items => list(GoogleApi.YouTube.V3.Model.LiveChatModerator.t()),
:kind => any(),
:nextPageToken => any(),
:pageInfo => GoogleApi.YouTube.V3.Model.PageInfo.t(),
:prevPageToken => any(),
:tokenPagination => GoogleApi.YouTube.V3.Model.TokenPagination.t(),
:visitorId => any()
}
field(:etag)
field(:eventId)
field(:items, as: GoogleApi.YouTube.V3.Model.LiveChatModerator, type: :list)
field(:kind)
field(:nextPageToken)
field(:pageInfo, as: GoogleApi.YouTube.V3.Model.PageInfo)
field(:prevPageToken)
field(:tokenPagination, as: GoogleApi.YouTube.V3.Model.TokenPagination)
field(:visitorId)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.194444 | 166 | 0.722844 |
082bc791b5558efdecf7ae9f3123d6dcc02bb5b5 | 262 | exs | Elixir | config/test.exs | bigbassroller/ueberauth_example | 5e889abaf060b6a37add2eb8a3cf1938f394c4af | [
"MIT"
] | null | null | null | config/test.exs | bigbassroller/ueberauth_example | 5e889abaf060b6a37add2eb8a3cf1938f394c4af | [
"MIT"
] | null | null | null | config/test.exs | bigbassroller/ueberauth_example | 5e889abaf060b6a37add2eb8a3cf1938f394c4af | [
"MIT"
] | null | null | null | import Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :my_app, MyAppWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 23.818182 | 56 | 0.736641 |
082bd40686c61aa01f6866a5dbf20b90ac9c52b4 | 416 | ex | Elixir | test/support/tenancy/person.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 36 | 2019-12-30T23:02:59.000Z | 2022-03-26T14:38:41.000Z | test/support/tenancy/person.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 1 | 2021-01-13T05:01:04.000Z | 2021-01-13T05:01:04.000Z | test/support/tenancy/person.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 1 | 2021-04-25T16:50:16.000Z | 2021-04-25T16:50:16.000Z | defmodule Tenancy.Person do
use Ecto.Schema
import Ecto.Changeset
schema "people" do
field :name, :string
field :tenant_id, :id
belongs_to :company, Tenancy.Company
belongs_to :unenforced_resource, Tenancy.UnenforcedResource
timestamps()
end
@doc false
def changeset(person, attrs) do
person
|> cast(attrs, [:name, :company_id])
|> validate_required([:name])
end
end
| 19.809524 | 63 | 0.692308 |
082bde7c3969a1b8e4f2a68d9849f56989770322 | 3,897 | ex | Elixir | lib/identicon.ex | mauricius/elixir-identicon | ebf967f18a5bb6a39d03969e9dd045d318e95d30 | [
"MIT"
] | null | null | null | lib/identicon.ex | mauricius/elixir-identicon | ebf967f18a5bb6a39d03969e9dd045d318e95d30 | [
"MIT"
] | null | null | null | lib/identicon.ex | mauricius/elixir-identicon | ebf967f18a5bb6a39d03969e9dd045d318e95d30 | [
"MIT"
] | null | null | null | defmodule Identicon do
alias IO.ANSI, as: Ansi
@moduledoc """
Identicon generator written in Elixir
Resources:
* https://en.wikipedia.org/wiki/Identicon
* https://github.com/mauricius/identicon
"""
@doc """
Exports a string into an Identicon PNG image.
## Parameters
- input: The input string.
"""
def export(input) do
input
|> hash_input
|> pick_color
|> build_grid
|> filter_odd_squares
|> build_pixel_map
|> draw_image
|> save_image(input)
end
@doc """
Outputs the Identicon directly to console.
## Parameters
- input: The input string.
"""
def console(input) do
input
|> hash_input
|> pick_color
|> build_grid
|> output_image
end
@doc """
Outputs the Identicon.Image to console.
## Parameters
- image: The Identicon.Image struct.
"""
def output_image(%Identicon.Image{color: color, grid: grid} = _image) do
color = Color.rgb_to_ansi(color)
Enum.each(grid, fn {code, index} ->
if rem(index, 5) == 0 do
IO.write("\n")
end
if rem(code, 2) == 0 do
IO.write(Ansi.color_background(color) <> " " <> Ansi.reset())
else
IO.write(Ansi.white_background() <> " " <> Ansi.reset())
end
end)
IO.write("\n")
end
@doc """
Draws the Identicon.Image as binary image
http://erlang.org/documentation/doc-6.1/lib/percept-0.8.9/doc/html/egd.html
## Parameters
- image: The Identicon.Image struct.
"""
def draw_image(%Identicon.Image{color: color, pixel_map: pixel_map}) do
image = :egd.create(250, 250)
fill = :egd.color(color)
Enum.each(pixel_map, fn {start, stop} ->
:egd.filledRectangle(image, start, stop, fill)
end)
:egd.render(image)
end
@doc """
Saves the image as PNG.
## Parameters
- image: The binary Image.
- input: the input string
"""
def save_image(image, input) do
File.write("#{input}.png", image)
end
@doc """
Generates the pixel map from the Image grid
## Parameters
- image: The Identicon.Image struct
"""
def build_pixel_map(%Identicon.Image{grid: grid} = image) do
pixel_map =
Enum.map(grid, fn {_, index} ->
horizontal = rem(index, 5) * 50
vertical = div(index, 5) * 50
top_left = {horizontal, vertical}
bottom_right = {horizontal + 50, vertical + 50}
{top_left, bottom_right}
end)
%Identicon.Image{image | pixel_map: pixel_map}
end
def filter_odd_squares(%Identicon.Image{grid: grid} = image) do
grid =
Enum.filter(grid, fn {code, _index} ->
rem(code, 2) == 0
end)
%Identicon.Image{image | grid: grid}
end
@doc """
Builds the Identicon grid
## Parameters
- image: The Identicon.Image struct
"""
def build_grid(%Identicon.Image{hex: hex} = image) do
grid =
hex
# Enum.chunk has been deprecated
|> Enum.chunk_every(3, 3, :discard)
|> Enum.map(&mirror_row/1)
|> List.flatten()
|> Enum.with_index()
%Identicon.Image{image | grid: grid}
end
@doc """
Mirrors an enumerable with 3 elements
## Parameters
- row: An enumerable
## Examples
iex> Identicon.mirror_row([1,2,3])
[1,2,3,2,1]
"""
def mirror_row(row) do
[first, second | _tail] = row
row ++ [second, first]
end
@doc """
Picks the first three elements as the RGB color for the identicon
## Parameters
- image: The Identicon.Image struct
"""
def pick_color(%Identicon.Image{hex: [r, g, b | _tail]} = image) do
%Identicon.Image{image | color: {r, g, b}}
end
@doc """
Hashes the input and converts it into a list of bytes().
## Parameters
- input: The input string
"""
def hash_input(input) do
hex =
:crypto.hash(:md5, input)
|> :binary.bin_to_list()
%Identicon.Image{hex: hex}
end
end
| 20.19171 | 77 | 0.60662 |
082c31b4b64297260fded58093d30125b4c4a182 | 3,352 | ex | Elixir | apps/nerves_hub_www/lib/nerves_hub_www_web/controllers/account_controller.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/controllers/account_controller.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/controllers/account_controller.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWWWWeb.AccountController do
use NervesHubWWWWeb, :controller
alias Ecto.Changeset
alias NervesHubWebCore.Accounts
alias NervesHubWebCore.Accounts.User
alias NervesHubWWW.Accounts.Email
alias NervesHubWWW.Mailer
plug(NervesHubWWWWeb.Plugs.AllowUninvitedSignups when action in [:new, :create])
def new(conn, _params) do
render(conn, "new.html", changeset: %Changeset{data: %User{}})
end
@doc """
This action is triggered by a "Create Account" UI request. There is no
organization involved save the one created for the user.
"""
def create(conn, %{"user" => user_params}) do
user_params
|> whitelist([:password, :username, :email])
|> Accounts.create_user()
|> case do
{:ok, _new_user} ->
redirect(conn, to: "/login")
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def edit(conn, _params) do
conn
|> render(
"edit.html",
changeset: %Changeset{data: conn.assigns.user}
)
end
def update(conn, params) do
cleaned =
params["user"]
|> whitelist([:current_password, :password, :username, :email, :orgs])
conn.assigns.user
|> Accounts.update_user(cleaned)
|> case do
{:ok, user} ->
conn
|> put_flash(:info, "Account updated")
|> redirect(to: Routes.account_path(conn, :edit, user.username))
{:error, changeset} ->
conn
|> render("edit.html", changeset: changeset)
end
end
def invite(conn, %{"token" => token} = _) do
with {:ok, invite} <- Accounts.get_valid_invite(token),
{:ok, org} <- Accounts.get_org(invite.org_id) do
render(
conn,
"invite.html",
changeset: %Changeset{data: invite},
org: org,
token: token
)
else
_ ->
conn
|> put_flash(:error, "Invalid or expired invite")
|> redirect(to: "/")
end
end
def accept_invite(conn, %{"user" => user_params, "token" => token} = _) do
clean_params = whitelist(user_params, [:password, :username])
with {:ok, invite} <- Accounts.get_valid_invite(token),
{:ok, org} <- Accounts.get_org(invite.org_id) do
with {:ok, {:ok, new_org_user}} <-
Accounts.create_user_from_invite(invite, org, clean_params) do
# Now let everyone in the organization - except the new guy -
# know about this new user.
instigator = conn.assigns.user.username
email =
Email.tell_org_user_added(
org,
Accounts.get_org_users(org),
instigator,
new_org_user.user
)
email
|> Mailer.deliver_later()
conn
|> put_flash(:info, "Account successfully created, login below")
|> redirect(to: "/login")
else
{:error, %Changeset{} = changeset} ->
render(
conn,
"invite.html",
changeset: changeset,
org: org,
token: token
)
end
else
{:error, :invite_not_found} ->
conn
|> put_flash(:error, "Invalid or expired invite")
|> redirect(to: "/")
{:error, :org_not_found} ->
conn
|> put_flash(:error, "Invalid org")
|> redirect(to: "/")
end
end
end
| 26.603175 | 82 | 0.579654 |
082c4d0c2b303d5236dc1e49d4e7196445afb44b | 4,901 | ex | Elixir | lib/sue/models/message.ex | alwayswimmin/Sue | 33dfd860e7d5b6dce11e2dc202924efad6a9474c | [
"MIT"
] | 1 | 2020-06-21T01:50:12.000Z | 2020-06-21T01:50:12.000Z | lib/sue/models/message.ex | alwayswimmin/Sue | 33dfd860e7d5b6dce11e2dc202924efad6a9474c | [
"MIT"
] | null | null | null | lib/sue/models/message.ex | alwayswimmin/Sue | 33dfd860e7d5b6dce11e2dc202924efad6a9474c | [
"MIT"
] | null | null | null | defmodule Sue.Models.Message do
alias __MODULE__
alias Sue.Models.{Account, Buddy, Chat}
@type t() :: %__MODULE__{}
defstruct [
:platform,
:id,
:sue_id,
:buddy,
:body,
:time,
:chat,
:account,
:from_me,
:ignorable,
:attachments,
:has_attachments,
:command,
:args
]
@spec new(:imessage | :telegram, Keyword.t() | Map.t()) :: Message.t()
def new(:imessage, kw) do
[
id: handle_id,
person_centric_id: handle_person_centric_id,
cache_has_attachments: has_attachments,
text: body,
ROWID: message_id,
cache_roomnames: chat_id,
is_from_me: from_me,
utc_date: utc_date
] = kw
from_me = from_me == 1
%Message{
platform: :imessage,
id: message_id,
sue_id: make_ref(),
buddy: %Buddy{id: handle_id, guid: handle_person_centric_id},
body: body,
time: DateTime.from_unix!(utc_date),
chat: %Chat{
platform: :imessage,
id: chat_id || "direct;#{handle_id}",
is_direct: chat_id == nil
},
from_me: from_me,
ignorable: is_ignorable?(:imessage, from_me, body),
has_attachments: has_attachments == 1
}
|> augment_one()
end
def new(:telegram, %{msg: msg, command: command, context: context}) do
# When multiple bots are in the same chat, telegram sometimes suffixes
# commands with bot names
botnameSuffix = "@" <> context.bot_info.username
command =
cond do
String.ends_with?(command, botnameSuffix) ->
String.slice(command, 0, String.length(command) - String.length(botnameSuffix))
true ->
command
end
%Message{
platform: :telegram,
id: msg.chat.id,
sue_id: make_ref(),
buddy: %Buddy{id: msg.from.id},
body: context.update.message.text |> String.trim(),
time: DateTime.from_unix!(msg.date),
chat: %Chat{
platform: :telegram,
id: msg.chat.id,
is_direct: msg.chat.type == "private"
},
# account:
from_me: false,
ignorable: false,
# attachments:
has_attachments:
Map.get(msg, :reply_to_message, %{})[:photo] != nil or
Map.get(msg, :reply_to_message, %{})[:document] != nil,
command: command,
args: msg.text
}
end
def new(:telegram, %{msg: msg, context: _context}) do
# No command specified, so we have to parse it from the body.
body = Map.get(msg, :caption, "")
%Message{
platform: :telegram,
id: msg.chat.id,
sue_id: make_ref(),
buddy: %Buddy{id: msg.from.id},
body: body,
time: DateTime.from_unix!(msg.date),
chat: %Chat{
platform: :telegram,
id: msg.chat.id,
is_direct: msg.chat.type == "private"
},
from_me: false,
ignorable: is_ignorable?(:telegram, false, body),
has_attachments:
msg[:photo] != nil or msg[:document] != nil or
Map.get(msg, :reply_to_message, %{})[:photo] != nil or
Map.get(msg, :reply_to_message, %{})[:document] != nil
}
|> augment_one()
end
# First stage of adding new fields to our Message. Primarily concerned with
# parsing commands, args, stripping whitespace.
defp augment_one(%Message{ignorable: true} = msg), do: msg
defp augment_one(%Message{platform: :imessage} = msg) do
"!" <> newbody = msg.body |> String.trim()
parse_command(msg, newbody)
end
defp augment_one(%Message{platform: :telegram} = msg) do
"/" <> newbody = msg.body |> String.trim()
parse_command(msg, newbody)
end
# Second stage of adding new fields to our Message. Primarily concerned with
# resolving fields that map to elements in our database (Accounts, etc.)
def augment_two(%Message{} = msg) do
account = Account.resolve_and_relate(msg)
%Message{msg | account: account}
end
defp parse_command(msg, newbody) do
[command | args] = String.split(newbody, " ", parts: 2)
%Message{
msg
| body: newbody,
command: command |> String.downcase(),
args: if(args == [], do: "", else: args |> hd())
}
end
# This binary classifier will grow in complexity over time.
defp is_ignorable?(_platform, true, _body), do: true
defp is_ignorable?(_platform, _from_me, nil), do: true
defp is_ignorable?(:imessage, _from_me, body) do
not Regex.match?(~r/^!(?! )./u, body |> String.trim_leading())
end
defp is_ignorable?(:telegram, _from_me, body) do
not Regex.match?(~r/^\/(?! )./u, body |> String.trim_leading())
end
# to_string override
defimpl String.Chars, for: Message do
def to_string(%Message{
platform: protocol,
buddy: %Buddy{id: bid},
chat: %Chat{id: cid},
sue_id: sid
}) do
"#Message<#{protocol},#{bid},#{cid},#{sid |> inspect()}>"
end
end
end
| 27.533708 | 89 | 0.600898 |
082c557e5d3e4213e60168df49a0c7ff4b9d7123 | 6,047 | ex | Elixir | lib/talan/counting_bloom_filter.ex | preciz/talan | 97aa357987ab4237d695f9542d48bce9ba30c7ba | [
"MIT"
] | 4 | 2019-10-24T22:27:47.000Z | 2020-11-13T09:11:38.000Z | lib/talan/counting_bloom_filter.ex | preciz/talan | 97aa357987ab4237d695f9542d48bce9ba30c7ba | [
"MIT"
] | 9 | 2020-09-14T04:57:30.000Z | 2021-08-03T04:48:34.000Z | lib/talan/counting_bloom_filter.ex | preciz/talan | 97aa357987ab4237d695f9542d48bce9ba30c7ba | [
"MIT"
] | 2 | 2020-02-22T17:08:44.000Z | 2021-11-05T17:36:47.000Z | defmodule Talan.CountingBloomFilter do
@moduledoc """
Counting bloom filter implementation with **concurrent accessibility**,
powered by [:atomics](http://erlang.org/doc/man/atomics.html) module.
## Features
* Fixed size Counting Bloom filter
* Concurrent reads & writes
* Custom & default hash functions
* Estimate number of unique elements
* Estimate false positive probability
Counting bloom filters support probabilistic deletion
of elements but have higher memory consumption because
they need to store a counter of N bits for every bloom filter bit.
"""
alias Talan.BloomFilter, as: BF
alias Talan.CountingBloomFilter, as: CBF
@enforce_keys [:bloom_filter, :counter]
defstruct [:bloom_filter, :counter]
@type t :: %__MODULE__{
bloom_filter: reference,
counter: Abit.Counter.t()
}
@doc """
Returns a new `%Talan.CountingBloomFilter{}` struct.
`cardinality` is the expected number of unique items. Duplicated items
can be infinite.
## Options
* `:counters_bit_size` - bit size of counters, defaults to `8`
* `:signed` - to have signed or unsigned counters, defaults to `true`
* `:false_positive_probability` - a float, defaults to `0.01`
* `:hash_functions` - a list of hash functions, defaults to randomly seeded murmur
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("phone")
:ok
iex> cbf |> Talan.CountingBloomFilter.count("hat")
2
iex> cbf |> Talan.CountingBloomFilter.count("phone")
1
"""
@spec new(pos_integer, list) :: t
def new(cardinality, options \\ []) do
bloom_filter = BF.new(cardinality, options)
counters_bit_size = options |> Keyword.get(:counters_bit_size, 8)
signed = options |> Keyword.get(:signed, true)
counter =
Abit.Counter.new(
bloom_filter.filter_length * counters_bit_size,
counters_bit_size,
signed: signed
)
%CBF{
bloom_filter: bloom_filter,
counter: counter
}
end
@doc """
Puts `term` into `bloom_filter` and increments counters in `counter`.
After this the `member?/2` function will return `true`
for the membership of `term` unless bits representing
membership are modified by the `delete/2` function.
Returns `:ok`.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
:ok
"""
@spec put(t, any) :: :ok
def put(%CBF{bloom_filter: bloom_filter, counter: counter}, term) do
hashes = BF.hash_term(bloom_filter, term)
BF.put_hashes(bloom_filter, hashes)
hashes
|> Enum.each(fn hash ->
Abit.Counter.add(counter, hash, 1)
end)
:ok
end
@doc """
Probabilistically delete `term` from `bloom_filter` and
decrement counters in `counter`.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.count("hat")
1
iex> cbf |> Talan.CountingBloomFilter.delete("hat")
:ok
iex> cbf |> Talan.CountingBloomFilter.count("hat")
0
iex> cbf |> Talan.CountingBloomFilter.delete("this wasn't there")
iex> cbf |> Talan.CountingBloomFilter.count("this wasn't there")
-1
"""
@spec delete(t, any) :: :ok
def delete(%CBF{bloom_filter: bloom_filter, counter: counter}, term) do
hashes = BF.hash_term(bloom_filter, term)
hashes
|> Enum.each(fn hash ->
Abit.Counter.add(counter, hash, -1)
if Abit.Counter.get(counter, hash) <= 0 do
Abit.set_bit_at(bloom_filter.atomics_ref, hash, 0)
end
end)
:ok
end
@doc """
See `Talan.BloomFilter.member?/2` for docs.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.member?("hat")
true
"""
@spec member?(t, any) :: boolean
def member?(%CBF{bloom_filter: bloom_filter}, term) do
BF.member?(bloom_filter, term)
end
@doc """
Returns probabilistic count of term in `counter`.
This means that (given no hash collisions) it returns how many times
the item was put into the CountingBloomFilter. A few hash collisions
should be also fine since it returns the average count of the counters.
An single item is hashed with multiple counters.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.count("hat")
3
"""
@spec count(t, any) :: non_neg_integer
def count(%CBF{bloom_filter: bloom_filter, counter: counter}, term) do
hashes = BF.hash_term(bloom_filter, term)
counters =
hashes
|> Enum.map(fn hash ->
Abit.Counter.get(counter, hash)
end)
round(Enum.sum(counters) / length(counters))
end
@doc """
See `Talan.BloomFilter.cardinality/1` for docs.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("car keys")
iex> cbf |> Talan.CountingBloomFilter.cardinality()
2
"""
@spec cardinality(t) :: non_neg_integer
def cardinality(%CBF{bloom_filter: bloom_filter}) do
BF.cardinality(bloom_filter)
end
@doc """
See `Talan.BloomFilter.false_positive_probability/1` for
docs.
"""
@spec false_positive_probability(t) :: float
def false_positive_probability(%CBF{bloom_filter: bloom_filter}) do
BF.false_positive_probability(bloom_filter)
end
end
| 28.933014 | 86 | 0.668927 |
082c56a5b72b5a36d0f0e72041fef5a2bc2d83fe | 180 | exs | Elixir | test/basic_benchmarks_elixir_test.exs | zacky1972/basic_benchmarks_elixir | 670585c5f4413c1e9d689d482a022e3a88978540 | [
"Apache-2.0"
] | null | null | null | test/basic_benchmarks_elixir_test.exs | zacky1972/basic_benchmarks_elixir | 670585c5f4413c1e9d689d482a022e3a88978540 | [
"Apache-2.0"
] | null | null | null | test/basic_benchmarks_elixir_test.exs | zacky1972/basic_benchmarks_elixir | 670585c5f4413c1e9d689d482a022e3a88978540 | [
"Apache-2.0"
] | null | null | null | defmodule BasicBenchmarksElixirTest do
use ExUnit.Case
doctest BasicBenchmarksElixir
test "greets the world" do
assert BasicBenchmarksElixir.hello() == :world
end
end
| 20 | 50 | 0.777778 |
082c5d431751c283b0cb77d6c67098e7f960a8ba | 1,022 | exs | Elixir | rel/config.exs | ConnorRigby/turtle_fw | b9f2da3ef63fd62b4de00457c135bf9f095ee7e0 | [
"MIT"
] | null | null | null | rel/config.exs | ConnorRigby/turtle_fw | b9f2da3ef63fd62b4de00457c135bf9f095ee7e0 | [
"MIT"
] | null | null | null | rel/config.exs | ConnorRigby/turtle_fw | b9f2da3ef63fd62b4de00457c135bf9f095ee7e0 | [
"MIT"
] | null | null | null | use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: :dev
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/config/distillery.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
set(cookie: :"Unused with Nerves. See vm.args")
end
environment :prod do
set(cookie: :"Unused with Nerves. See vm.args")
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :turtle_fw do
set(version: current_version(:turtle_fw))
plugin(Nerves)
plugin(Shoehorn)
end
| 30.969697 | 78 | 0.762231 |
082d370b5f6c4084553c39fd9558fba564488f42 | 1,979 | exs | Elixir | test/membrane/integration/timer_test.exs | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | test/membrane/integration/timer_test.exs | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | test/membrane/integration/timer_test.exs | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Integration.TimerTest do
use ExUnit.Case, async: true
import Membrane.Testing.Assertions
alias Membrane.{Pipeline, Testing, Time}
defmodule Element do
use Membrane.Source
def_output_pad :output, mode: :push, caps: :any
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, Time.milliseconds(100)}}, state}
end
@impl true
def handle_tick(:timer, _ctx, state) do
{{:ok, notify: :tick, stop_timer: :timer}, state}
end
end
defmodule Bin do
use Membrane.Bin
def_input_pad :input, mode: :push, caps: :any
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, Time.milliseconds(100)}}, state}
end
@impl true
def handle_tick(:timer, _ctx, state) do
{{:ok, notify: :tick, stop_timer: :timer}, state}
end
end
defmodule Pipeline do
use Membrane.Pipeline
@impl true
def handle_init(pid) do
spec = %ParentSpec{
children: [element: Element, bin: Bin],
links: [link(:element) |> to(:bin)]
}
{{:ok, spec: spec}, %{pid: pid}}
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, Time.milliseconds(100)}}, state}
end
@impl true
def handle_tick(:timer, _ctx, state) do
send(state.pid, :pipeline_tick)
{{:ok, stop_timer: :timer}, state}
end
end
test "Stopping timer from handle_tick" do
{:ok, pipeline} =
Testing.Pipeline.start_link(%Testing.Pipeline.Options{
module: Pipeline,
custom_args: self()
})
Pipeline.play(pipeline)
assert_pipeline_playback_changed(pipeline, _, :playing)
assert_pipeline_notified(pipeline, :element, :tick)
assert_pipeline_notified(pipeline, :bin, :tick)
assert_receive :pipeline_tick
Pipeline.stop(pipeline)
assert_pipeline_playback_changed(pipeline, _, :stopped)
end
end
| 25.050633 | 67 | 0.654371 |
082d418a814757adbf647a26de0b85bc53635c98 | 1,869 | exs | Elixir | test/db/old_reservation_sink_test.exs | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 10 | 2016-07-15T15:57:33.000Z | 2018-06-09T00:40:46.000Z | test/db/old_reservation_sink_test.exs | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | null | null | null | test/db/old_reservation_sink_test.exs | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 6 | 2016-07-15T15:57:41.000Z | 2018-03-22T16:38:00.000Z | defmodule Eecrit.OldReservationSinkTest do
use Eecrit.ModelCase
alias Eecrit.OldReservationSink
alias Eecrit.OldReservation
alias Eecrit.OldGroup
alias Eecrit.OldUse
alias Eecrit.OldAnimal
alias Eecrit.OldProcedure
test "complete_reservation" do
animal = insert_old_animal()
procedure = insert_old_procedure()
reservation_fields = %OldReservation{course: "vcm333",
first_date: Ecto.Date.cast!("2001-01-01"),
last_date: Ecto.Date.cast!("2012-12-12"),
time_bits: "001"}
OldReservationSink.make_full!(reservation_fields, [animal], [procedure])
use = OldRepo.one(OldUse)
group = OldRepo.one(OldGroup)
reservation = OldRepo.one(OldReservation)
assert use.group_id == group.id
assert use.animal_id == animal.id
assert use.procedure_id == procedure.id
assert group.reservation_id == reservation.id
assert reservation.course == reservation_fields.course
assert reservation.first_date == reservation_fields.first_date
# Check the nesting, just in case
reservation = reservation |> OldRepo.preload([:animals, :procedures])
assert reservation.animals == [animal]
assert reservation.procedures == [procedure]
end
test "uses are NxM" do
animals = [insert_old_animal(name: "a1"), insert_old_animal(name: "a2")]
procedures = [insert_old_procedure(name: "p1"), insert_old_procedure(name: "p2")]
OldReservationSink.make_full!(make_old_reservation_fields(), animals, procedures)
reservation =
OldRepo.one(OldReservation)
|> OldRepo.preload([:animals, :procedures])
assert OldAnimal.alphabetical_names(reservation.animals) == ["a1", "a2"]
assert OldProcedure.alphabetical_names(reservation.procedures) == ["p1", "p2"]
end
end
| 33.981818 | 85 | 0.684323 |
082d641f853164ef8ead6b01b534d1b5b2555823 | 561 | ex | Elixir | Samples/BarNecking/BarNecking-Axi/Abaqus/Extract.ex | pantale/DynELA | f346c0888059784c3f56b853e8593b71fc3dd708 | [
"BSD-3-Clause"
] | 5 | 2019-12-13T14:12:43.000Z | 2020-09-21T18:27:39.000Z | Samples/BarNecking/BarNecking-Axi/Abaqus/Extract.ex | pantale/DynELA-v.-4.0 | 2c366859b68df6243a1e128a7839e4fb23888820 | [
"BSD-3-Clause"
] | 1 | 2021-06-28T16:54:58.000Z | 2021-06-28T16:54:58.000Z | Samples/BarNecking/BarNecking-Axi/Abaqus/Extract.ex | pantale/DynELA-v.-4.0 | 2c366859b68df6243a1e128a7839e4fb23888820 | [
"BSD-3-Clause"
] | 2 | 2021-03-15T07:13:28.000Z | 2021-06-28T16:46:26.000Z | # Built-in model
TimeHistory, job=BarNecking, value=MISES, name=vonMises, region=Element CYLINDER-1.1 Int Point 1
TimeHistory, job=BarNecking, value=PEEQ, name=PlasticStrain, region=Element CYLINDER-1.1 Int Point 1
TimeHistory, job=BarNecking, value=TEMP, name=Temperature, region=Element CYLINDER-1.1 Int Point 1
TimeHistory, job=BarNecking, value=DT, name=timeStep, region=Assembly ASSEMBLY
TimeHistory, job=BarNecking, value=ALLKE, name=kineticEnergy, region=Assembly ASSEMBLY
TimeHistory, job=BarNecking, value=COOR1, name=radius, region=Node CYLINDER-1.11
| 70.125 | 100 | 0.811052 |
082d651f63205e30e8a5e5185331455fe8f9a11c | 1,131 | ex | Elixir | exercism/elixir/beer-song/lib/beer_song.ex | rusucosmin/til | 1687b89454b22e14c5c720f41199a5d2badf7db2 | [
"MIT"
] | 14 | 2016-02-19T22:14:31.000Z | 2022-02-06T21:59:46.000Z | exercism/elixir/beer-song/lib/beer_song.ex | rusucosmin/til | 1687b89454b22e14c5c720f41199a5d2badf7db2 | [
"MIT"
] | null | null | null | exercism/elixir/beer-song/lib/beer_song.ex | rusucosmin/til | 1687b89454b22e14c5c720f41199a5d2badf7db2 | [
"MIT"
] | 2 | 2020-01-07T11:27:26.000Z | 2022-02-06T21:59:54.000Z | defmodule BeerSong do
@doc """
Get a single verse of the beer song
"""
@spec verse(integer) :: String.t()
def verse(number) do
# Your implementation here...
if number == 0 do
"No more bottles of beer on the wall, no more bottles of beer.\n" <>
"Go to the store and buy some more, 99 bottles of beer on the wall.\n"
else
"#{pluralize_bottle(number)} of beer on the wall, #{pluralize_bottle(number)} of beer.\n" <>
"Take #{bottle_pronoun(number)} down and pass it around, #{pluralize_bottle(number - 1)} of beer on the wall.\n"
end
end
def pluralize_bottle(number) do
cond do
number == 0 ->
"no more bottles"
number > 1 ->
"#{number} bottles"
true ->
"1 bottle"
end
end
def bottle_pronoun(number) do
if number == 1 do
"it"
else
"one"
end
end
@doc """
Get the entire beer song for a given range of numbers of bottles.
"""
@spec lyrics(Range.t()) :: String.t()
def lyrics(range) do
range |> Enum.map(&verse/1) |> Enum.join("\n")
end
def lyrics do
lyrics(99..0)
end
end
| 23.5625 | 120 | 0.592396 |
082dbcadd6041e44f289751770bbc6571ab55510 | 215 | exs | Elixir | farmbot_core/priv/config/migrations/20190110180643_add_eap_settings.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | farmbot_core/priv/config/migrations/20190110180643_add_eap_settings.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_core/priv/config/migrations/20190110180643_add_eap_settings.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotCore.Config.Migrations.AddEapSettings do
use Ecto.Migration
def change do
alter table("network_interfaces") do
add(:identity, :string)
add(:password, :string)
end
end
end
| 19.545455 | 57 | 0.706977 |
082e154deacad3349243c41652f3ef2ff97097f4 | 2,552 | exs | Elixir | test/controllers/github_controller_test.exs | joakimk/exremit | 6c0a5fb32208b98cc1baac11d6a7bd248a1aa3bc | [
"Unlicense",
"MIT"
] | 27 | 2016-09-21T09:11:25.000Z | 2020-12-16T04:04:50.000Z | test/controllers/github_controller_test.exs | barsoom/exremit | 6c0a5fb32208b98cc1baac11d6a7bd248a1aa3bc | [
"Unlicense",
"MIT"
] | 2 | 2016-12-02T08:05:13.000Z | 2020-03-27T08:07:59.000Z | test/controllers/github_controller_test.exs | barsoom/exremit | 6c0a5fb32208b98cc1baac11d6a7bd248a1aa3bc | [
"Unlicense",
"MIT"
] | 4 | 2016-09-25T09:58:17.000Z | 2020-04-27T15:07:36.000Z | defmodule Review.GithubControllerTest do
use Review.ConnCase
use Phoenix.ChannelTest
test "can handle a ping" do
conn = build_conn()
|> put_req_header("x-github-event", "ping")
|> post("/webhooks/github?secret=webhook_secret", %{ zen: "Yo.", hook_id: 123 })
assert text_response(conn, 200) =~ "pong"
end
test "can handle a commit_comment update" do
{:ok, _, _socket} = socket("", %{})
|> subscribe_and_join(Review.ReviewChannel, "review")
conn = build_conn()
|> put_req_header("x-github-event", "commit_comment")
|> post("/webhooks/github?secret=webhook_secret", %{ comment: Poison.decode!(Review.Factory.comment_payload) })
assert text_response(conn, 200) =~ "ok"
assert_broadcast "new_or_updated_comment", %{}
comment = Review.Repo.comments |> Review.Repo.one
assert comment.commit_sha == "2be829b9163897e8bb57ceea9709a5d5e61faee1"
end
test "can handle a push update" do
{:ok, _, _socket} = socket("", %{})
|> subscribe_and_join(Review.ReviewChannel, "review")
conn = build_conn()
|> put_req_header("x-github-event", "push")
|> post("/webhooks/github?secret=webhook_secret", Poison.decode!(Review.Factory.push_payload))
assert text_response(conn, 200) =~ "ok"
assert_broadcast "new_or_updated_commit", %{}
commit = Review.Repo.commits |> Review.Repo.one
assert commit.sha == "c5472c5276f564621afe4b56b14f50e7c298dff9"
assert Review.CommitSerializer.serialize(commit).repository == "gridlook"
end
test "can handle a push update caused by a pair commit" do
conn = build_conn()
|> put_req_header("x-github-event", "push")
|> post("/webhooks/github?secret=webhook_secret", Poison.decode!(Review.Factory.pair_commit_push_payload))
assert text_response(conn, 200) =~ "ok"
end
test "ignores non-master commits in push updates" do
data =
Poison.decode!(Review.Factory.push_payload)
|> Map.put("ref", "refs/heads/lab")
conn = build_conn()
|> put_req_header("x-github-event", "push")
|> post("/webhooks/github?secret=webhook_secret", data)
assert text_response(conn, 200) =~ "ok"
commit_count = Review.Repo.commits |> Review.Repo.all |> Enum.count
assert commit_count == 0
end
test "requires a valid key" do
conn = build_conn()
|> put_req_header("x-github-event", "ping")
|> post("/webhooks/github?secret=invalid", %{ zen: "Yo.", hook_id: 123 })
assert response(conn, 403) =~ "Denied (probably an invalid key?)"
end
end
| 33.142857 | 117 | 0.672806 |
082e1c84114b136509722ab5f47fa064040261c4 | 404 | exs | Elixir | mix.exs | Kyuden/heiseiex | 7ecc951cab70462b58ffb4be76f08e239eff4ecd | [
"MIT"
] | null | null | null | mix.exs | Kyuden/heiseiex | 7ecc951cab70462b58ffb4be76f08e239eff4ecd | [
"MIT"
] | null | null | null | mix.exs | Kyuden/heiseiex | 7ecc951cab70462b58ffb4be76f08e239eff4ecd | [
"MIT"
] | null | null | null | defmodule TrotExample.Mixfile do
use Mix.Project
def project do
[app: :trot_example,
version: "0.0.1",
elixir: "~> 1.0",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
def application do
[applications: [:logger, :trot]]
end
defp deps do
[{:trot, github: "kyuden/trot", branch: "add_trot.server_task"}]
end
end
| 17.565217 | 68 | 0.616337 |
082e3cd6f3f7add4e59073b306a68f7a91a6d688 | 49,101 | ex | Elixir | lib/elixir/lib/code.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/code.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/code.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | null | null | null | defmodule Code do
@moduledoc ~S"""
Utilities for managing code compilation, code evaluation, and code loading.
This module complements Erlang's [`:code` module](http://www.erlang.org/doc/man/code.html)
to add behaviour which is specific to Elixir. Almost all of the functions in this module
have global side effects on the behaviour of Elixir.
## Working with files
This module contains three functions for compiling and evaluating files.
Here is a summary of them and their behaviour:
* `require_file/2` - compiles a file and tracks its name. It does not
compile the file again if it has been previously required.
* `compile_file/2` - compiles a file without tracking its name. Compiles the
file multiple times when invoked multiple times.
* `eval_file/2` - evaluates the file contents without tracking its name. It
returns the result of the last expression in the file, instead of the modules
defined in it. Evaluated files do not trigger the compilation tracers described
in the next section.
In a nutshell, the first must be used when you want to keep track of the files
handled by the system, to avoid the same file from being compiled multiple
times. This is common in scripts.
`compile_file/2` must be used when you are interested in the modules defined in a
file, without tracking. `eval_file/2` should be used when you are interested in
the result of evaluating the file rather than the modules it defines.
## Compilation tracers
Elixir supports compilation tracers, which allows modules to observe constructs
handled by the Elixir compiler when compiling files. A tracer is a module
that implements the `trace/2` function. The function receives the event name
as first argument and `Macro.Env` as second and it must return `:ok`. It is
very important for a tracer to do as little work as possible synchronously
and dispatch the bulk of the work to a separate process. **Slow tracers will
slow down compilation**.
You can configure your list of tracers via `put_compiler_option/2`. The
following events are available to tracers:
* `{:import, meta, module, opts}` - traced whenever `module` is imported.
`meta` is the import AST metadata and `opts` are the import options.
* `{:imported_function, meta, module, name, arity}` and
`{:imported_macro, meta, module, name, arity}` - traced whenever an
imported function or macro is invoked. `meta` is the call AST metadata,
`module` is the module the import is from, followed by the `name` and `arity`
of the imported function/macro.
* `{:alias, meta, alias, as, opts}` - traced whenever `alias` is aliased
to `as`. `meta` is the alias AST metadata and `opts` are the alias options.
* `{:alias_expansion, meta, as, alias}` traced whenever there is an alias
expansion for a previously defined `alias`, i.e. when the user writes `as`
which is expanded to `alias`. `meta` is the alias expansion AST metadata.
* `{:alias_reference, meta, module}` - traced whenever there is an alias
in the code, i.e. whenever the user writes `MyModule.Foo.Bar` in the code,
regardless if it was expanded or not.
* `{:require, meta, module, opts}` - traced whenever `module` is required.
`meta` is the require AST metadata and `opts` are the require options.
* `{:struct_expansion, meta, module, keys}` - traced whenever `module`'s struct
is expanded. `meta` is the struct AST metadata and `keys` are the keys being
used by expansion
* `{:remote_function, meta, module, name, arity}` and
`{:remote_macro, meta, module, name, arity}` - traced whenever a remote
function or macro is referenced. `meta` is the call AST metadata, `module`
is the invoked module, followed by the `name` and `arity`.
* `{:local_function, meta, module, name, arity}` and
`{:local_macro, meta, module, name, arity}` - traced whenever a local
function or macro is referenced. `meta` is the call AST metadata, `module`
is the invoked module, followed by the `name` and `arity`.
* `{:compile_env, app, path, return}` - traced whenever `Application.compile_env/3`
or `Application.compile_env!/2` are called. `app` is an atom, `path` is a list
of keys to traverse in the application environemnt and `return` is either
`{:ok, value}` or `:error`.
The `:tracers` compiler option can be combined with the `:parser_options`
compiler option to enrich the metadata of the traced events above.
New events may be added at any time in the future, therefore it is advised
for the `trace/2` function to have a "catch-all" clause.
Below is an example tracer that prints all remote function invocations:
defmodule MyTracer do
def trace({:remote_function, _meta, module, name, arity}, env) do
IO.puts "#{env.file}:#{env.line} #{inspect(module)}.#{name}/#{arity}"
:ok
end
def trace(_event, _env) do
:ok
end
end
"""
@typedoc """
A list with all variable bindings.
The binding keys are usually atoms, but they may be a tuple for variables
defined in a different context.
"""
@type binding :: [{atom() | tuple(), any}]
@boolean_compiler_options [
:docs,
:debug_info,
:ignore_module_conflict,
:relative_paths,
:warnings_as_errors
]
@list_compiler_options [:no_warn_undefined, :tracers, :parser_options]
@available_compiler_options @boolean_compiler_options ++ @list_compiler_options
@doc """
Lists all required files.
## Examples
Code.require_file("../eex/test/eex_test.exs")
List.first(Code.required_files()) =~ "eex_test.exs"
#=> true
"""
@doc since: "1.7.0"
@spec required_files() :: [binary]
def required_files do
:elixir_code_server.call(:required)
end
@deprecated "Use Code.required_files/0 instead"
@doc false
def loaded_files do
required_files()
end
@doc """
Removes files from the required files list.
The modules defined in the file are not removed;
calling this function only removes them from the list,
allowing them to be required again.
## Examples
# Require EEx test code
Code.require_file("../eex/test/eex_test.exs")
# Now unrequire all files
Code.unrequire_files(Code.required_files())
# Notice modules are still available
function_exported?(EExTest.Compiled, :before_compile, 0)
#=> true
"""
@doc since: "1.7.0"
@spec unrequire_files([binary]) :: :ok
def unrequire_files(files) do
:elixir_code_server.cast({:unrequire_files, files})
end
@deprecated "Use Code.unrequire_files/1 instead"
@doc false
def unload_files(files) do
unrequire_files(files)
end
@doc """
Appends a path to the end of the Erlang VM code path list.
This is the list of directories the Erlang VM uses for
finding module code.
The path is expanded with `Path.expand/1` before being appended.
If this path does not exist, an error is returned.
## Examples
Code.append_path(".")
#=> true
Code.append_path("/does_not_exist")
#=> {:error, :bad_directory}
"""
@spec append_path(Path.t()) :: true | {:error, :bad_directory}
def append_path(path) do
:code.add_pathz(to_charlist(Path.expand(path)))
end
@doc """
Prepends a path to the beginning of the Erlang VM code path list.
This is the list of directories the Erlang VM uses for finding
module code.
The path is expanded with `Path.expand/1` before being prepended.
If this path does not exist, an error is returned.
## Examples
Code.prepend_path(".")
#=> true
Code.prepend_path("/does_not_exist")
#=> {:error, :bad_directory}
"""
@spec prepend_path(Path.t()) :: true | {:error, :bad_directory}
def prepend_path(path) do
:code.add_patha(to_charlist(Path.expand(path)))
end
@doc """
Deletes a path from the Erlang VM code path list. This is the list of
directories the Erlang VM uses for finding module code.
The path is expanded with `Path.expand/1` before being deleted. If the
path does not exist, this function returns `false`.
## Examples
Code.prepend_path(".")
Code.delete_path(".")
#=> true
Code.delete_path("/does_not_exist")
#=> false
"""
@spec delete_path(Path.t()) :: boolean
def delete_path(path) do
:code.del_path(to_charlist(Path.expand(path)))
end
@doc """
Evaluates the contents given by `string`.
The `binding` argument is a list of variable bindings.
The `opts` argument is a keyword list of environment options.
**Warning**: `string` can be any Elixir code and will be executed with
the same privileges as the Erlang VM: this means that such code could
compromise the machine (for example by executing system commands).
Don't use `eval_string/3` with untrusted input (such as strings coming
from the network).
## Options
Options can be:
* `:file` - the file to be considered in the evaluation
* `:line` - the line on which the script starts
Additionally, the following scope values can be configured:
* `:aliases` - a list of tuples with the alias and its target
* `:requires` - a list of modules required
* `:functions` - a list of tuples where the first element is a module
and the second a list of imported function names and arity; the list
of function names and arity must be sorted
* `:macros` - a list of tuples where the first element is a module
and the second a list of imported macro names and arity; the list
of function names and arity must be sorted
Notice that setting any of the values above overrides Elixir's default
values. For example, setting `:requires` to `[]` will no longer
automatically require the `Kernel` module. In the same way setting
`:macros` will no longer auto-import `Kernel` macros like `Kernel.if/2`,
`Kernel.SpecialForms.case/2`, and so on.
Returns a tuple of the form `{value, binding}`,
where `value` is the value returned from evaluating `string`.
If an error occurs while evaluating `string` an exception will be raised.
`binding` is a list with all variable bindings
after evaluating `string`. The binding keys are usually atoms, but they
may be a tuple for variables defined in a different context.
## Examples
iex> Code.eval_string("a + b", [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
iex> Code.eval_string("c = a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2, c: 3]}
iex> Code.eval_string("a = a + b", [a: 1, b: 2])
{3, [a: 3, b: 2]}
For convenience, you can pass `__ENV__/0` as the `opts` argument and
all imports, requires and aliases defined in the current environment
will be automatically carried over:
iex> Code.eval_string("a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
@spec eval_string(List.Chars.t(), binding, Macro.Env.t() | keyword) :: {term, binding}
def eval_string(string, binding \\ [], opts \\ [])
def eval_string(string, binding, %Macro.Env{} = env) do
eval_string_with_error_handling(string, binding, Map.to_list(env))
end
def eval_string(string, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
eval_string_with_error_handling(string, binding, opts)
end
defp eval_string_with_error_handling(string, binding, opts) do
%{line: line, file: file} = env = :elixir.env_for_eval(opts)
forms = :elixir.string_to_quoted!(to_charlist(string), line, file, [])
{value, binding, _env} = :elixir.eval_forms(forms, binding, env)
{value, binding}
end
@doc ~S"""
Formats the given code `string`.
The formatter receives a string representing Elixir code and
returns iodata representing the formatted code according to
pre-defined rules.
## Options
* `:file` - the file which contains the string, used for error
reporting
* `:line` - the line the string starts, used for error reporting
* `:line_length` - the line length to aim for when formatting
the document. Defaults to 98. Note this value is used as
reference but it is not enforced by the formatter as sometimes
user intervention is required. See "Running the formatter"
section
* `:locals_without_parens` - a keyword list of name and arity
pairs that should be kept without parens whenever possible.
The arity may be the atom `:*`, which implies all arities of
that name. The formatter already includes a list of functions
and this option augments this list.
* `:rename_deprecated_at` - rename all known deprecated functions
at the given version to their non-deprecated equivalent. It
expects a valid `Version` which is usually the minimum Elixir
version supported by the project.
* `:force_do_end_blocks` (since v1.9.0) - when `true`, converts all
inline usages of `do: ...`, `else: ...` and friends into `do/end`
blocks. Defaults to `false`. Notice this option is convergent:
once you set it to `true`, all keywords will be converted. If you
set it to `false` later on, `do/end` blocks won't be converted
back to keywords.
## Design principles
The formatter was designed under three principles.
First, the formatter never changes the semantics of the code by
default. This means the input AST and the output AST are equivalent.
Optional behaviour, such as `:rename_deprecated_at`, is allowed to
break this guarantee.
The second principle is to provide as little configuration as possible.
This eases the formatter adoption by removing contention points while
making sure a single style is followed consistently by the community as
a whole.
The formatter does not hard code names. The formatter will not behave
specially because a function is named `defmodule`, `def`, or the like. This
principle mirrors Elixir's goal of being an extensible language where
developers can extend the language with new constructs as if they were
part of the language. When it is absolutely necessary to change behaviour
based on the name, this behaviour should be configurable, such as the
`:locals_without_parens` option.
## Running the formatter
The formatter attempts to fit the most it can on a single line and
introduces line breaks wherever possible when it cannot.
In some cases, this may lead to undesired formatting. Therefore, **some
code generated by the formatter may not be aesthetically pleasing and
may require explicit intervention from the developer**. That's why we
do not recommend to run the formatter blindly in an existing codebase.
Instead you should format and sanity check each formatted file.
Let's see some examples. The code below:
"this is a very long string ... #{inspect(some_value)}"
may be formatted as:
"this is a very long string ... #{
inspect(some_value)
}"
This happens because the only place the formatter can introduce a
new line without changing the code semantics is in the interpolation.
In those scenarios, we recommend developers to directly adjust the
code. Here we can use the binary concatenation operator `<>/2`:
"this is a very long string " <>
"... #{inspect(some_value)}"
The string concatenation makes the code fit on a single line and also
gives more options to the formatter.
A similar example is when the formatter breaks a function definition
over multiple clauses:
def my_function(
%User{name: name, age: age, ...},
arg1,
arg2
) do
...
end
While the code above is completely valid, you may prefer to match on
the struct variables inside the function body in order to keep the
definition on a single line:
def my_function(%User{} = user, arg1, arg2) do
%{name: name, age: age, ...} = user
...
end
In some situations, you can use the fact the formatter does not generate
elegant code as a hint for refactoring. Take this code:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
required_permissions == Enum.to_list(MapSet.intersection(MapSet.new(required_permissions), MapSet.new(available_permissions)))
end
The code above has very long lines and running the formatter is not going
to address this issue. In fact, the formatter may make it more obvious that
you have complex expressions:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
required_permissions ==
Enum.to_list(
MapSet.intersection(
MapSet.new(required_permissions),
MapSet.new(available_permissions)
)
)
end
Take such cases as a suggestion that your code should be refactored:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
matching_permissions?(required_permissions, available_permissions)
end
defp matching_permissions?(required_permissions, available_permissions) do
intersection =
required_permissions
|> MapSet.new()
|> MapSet.intersection(MapSet.new(available_permissions))
|> Enum.to_list()
required_permissions == intersection
end
To sum it up: since the formatter cannot change the semantics of your
code, sometimes it is necessary to tweak or refactor the code to get
optimal formatting. To help better understand how to control the formatter,
we describe in the next sections the cases where the formatter keeps the
user encoding and how to control multiline expressions.
## Keeping user's formatting
The formatter respects the input format in some cases. Those are
listed below:
* Insignificant digits in numbers are kept as is. The formatter
however always inserts underscores for decimal numbers with more
than 5 digits and converts hexadecimal digits to uppercase
* Strings, charlists, atoms and sigils are kept as is. No character
is automatically escaped or unescaped. The choice of delimiter is
also respected from the input
* Newlines inside blocks are kept as in the input except for:
1) expressions that take multiple lines will always have an empty
line before and after and 2) empty lines are always squeezed
together into a single empty line
* The choice between `:do` keyword and `do/end` blocks is left
to the user
* Lists, tuples, bitstrings, maps, structs and function calls will be
broken into multiple lines if they are followed by a newline in the
opening bracket and preceded by a new line in the closing bracket
* Newlines before certain operators (such as the pipeline operators)
and before other operators (such as comparison operators)
The behaviours above are not guaranteed. We may remove or add new
rules in the future. The goal of documenting them is to provide better
understanding on what to expect from the formatter.
### Multi-line lists, maps, tuples, and the like
You can force lists, tuples, bitstrings, maps, structs and function
calls to have one entry per line by adding a newline after the opening
bracket and a new line before the closing bracket lines. For example:
[
foo,
bar
]
If there are no newlines around the brackets, then the formatter will
try to fit everything on a single line, such that the snippet below
[foo,
bar]
will be formatted as
[foo, bar]
You can also force function calls and keywords to be rendered on multiple
lines by having each entry on its own line:
defstruct name: nil,
age: 0
The code above will be kept with one keyword entry per line by the
formatter. To avoid that, just squash everything into a single line.
### Parens and no parens in function calls
Elixir has two syntaxes for function calls. With parens and no parens.
By default, Elixir will add parens to all calls except for:
1. calls that have do/end blocks
2. local calls without parens where the name and arity of the local
call is also listed under `:locals_without_parens` (except for
calls with arity 0, where the compiler always require parens)
The choice of parens and no parens also affects indentation. When a
function call with parens doesn't fit on the same line, the formatter
introduces a newline around parens and indents the arguments with two
spaces:
some_call(
arg1,
arg2,
arg3
)
On the other hand, function calls without parens are always indented
by the function call length itself, like this:
some_call arg1,
arg2,
arg3
If the last argument is a data structure, such as maps and lists, and
the beginning of the data structure fits on the same line as the function
call, then no indentation happens, this allows code like this:
Enum.reduce(some_collection, initial_value, fn element, acc ->
# code
end)
some_function_without_parens %{
foo: :bar,
baz: :bat
}
## Code comments
The formatter also handles code comments in a way to guarantee a space
is always added between the beginning of the comment (#) and the next
character.
The formatter also extracts all trailing comments to their previous line.
For example, the code below
hello #world
will be rewritten to
# world
hello
Because code comments are handled apart from the code representation (AST),
there are some situations where code comments are seen as ambiguous by the
code formatter. For example, the comment in the anonymous function below
fn
arg1 ->
body1
# comment
arg2 ->
body2
end
and in this one
fn
arg1 ->
body1
# comment
arg2 ->
body2
end
are considered equivalent (the nesting is discarded alongside most of
user formatting). In such cases, the code formatter will always format to
the latter.
"""
@doc since: "1.6.0"
@spec format_string!(binary, keyword) :: iodata
def format_string!(string, opts \\ []) when is_binary(string) and is_list(opts) do
line_length = Keyword.get(opts, :line_length, 98)
algebra = Code.Formatter.to_algebra!(string, opts)
Inspect.Algebra.format(algebra, line_length)
end
@doc """
Formats a file.
See `format_string!/2` for more information on code formatting and
available options.
"""
@doc since: "1.6.0"
@spec format_file!(binary, keyword) :: iodata
def format_file!(file, opts \\ []) when is_binary(file) and is_list(opts) do
string = File.read!(file)
formatted = format_string!(string, [file: file, line: 1] ++ opts)
[formatted, ?\n]
end
@doc """
Evaluates the quoted contents.
**Warning**: Calling this function inside a macro is considered bad
practice as it will attempt to evaluate runtime values at compile time.
Macro arguments are typically transformed by unquoting them into the
returned quoted expressions (instead of evaluated).
See `eval_string/3` for a description of `binding` and options.
## Examples
iex> contents = quote(do: var!(a) + var!(b))
iex> Code.eval_quoted(contents, [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
For convenience, you can pass `__ENV__/0` as the `opts` argument and
all options will be automatically extracted from the current environment:
iex> contents = quote(do: var!(a) + var!(b))
iex> Code.eval_quoted(contents, [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
@spec eval_quoted(Macro.t(), binding, Macro.Env.t() | keyword) :: {term, binding}
def eval_quoted(quoted, binding \\ [], opts \\ [])
def eval_quoted(quoted, binding, %Macro.Env{} = env) do
{value, binding, _env} = :elixir.eval_quoted(quoted, binding, Map.to_list(env))
{value, binding}
end
def eval_quoted(quoted, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
{value, binding, _env} = :elixir.eval_quoted(quoted, binding, opts)
{value, binding}
end
defp validate_eval_opts(opts) do
if f = opts[:functions], do: validate_imports(:functions, f)
if m = opts[:macros], do: validate_imports(:macros, m)
if a = opts[:aliases], do: validate_aliases(:aliases, a)
if r = opts[:requires], do: validate_requires(:requires, r)
end
defp validate_requires(kind, requires) do
valid = is_list(requires) and Enum.all?(requires, &is_atom(&1))
unless valid do
raise ArgumentError, "expected :#{kind} option given to eval in the format: [module]"
end
end
defp validate_aliases(kind, aliases) do
valid = is_list(aliases) and Enum.all?(aliases, fn {k, v} -> is_atom(k) and is_atom(v) end)
unless valid do
raise ArgumentError,
"expected :#{kind} option given to eval in the format: [{module, module}]"
end
end
defp validate_imports(kind, imports) do
valid =
is_list(imports) and
Enum.all?(imports, fn {k, v} ->
is_atom(k) and is_list(v) and
Enum.all?(v, fn {name, arity} -> is_atom(name) and is_integer(arity) end)
end)
unless valid do
raise ArgumentError,
"expected :#{kind} option given to eval in the format: [{module, [{name, arity}]}]"
end
end
@doc ~S"""
Converts the given string to its quoted form.
Returns `{:ok, quoted_form}` if it succeeds,
`{:error, {line, error, token}}` otherwise.
## Options
* `:file` - the filename to be reported in case of parsing errors.
Defaults to "nofile".
* `:line` - the starting line of the string being parsed.
Defaults to 1.
* `:columns` - when `true`, attach a `:column` key to the quoted
metadata. Defaults to `false`.
* `:existing_atoms_only` - when `true`, raises an error
when non-existing atoms are found by the tokenizer.
Defaults to `false`.
* `:token_metadata` (since v1.10.0) - when `true`, includes token-related
metadata in the expression AST, such as metadata for `do` and `end`
tokens, for closing tokens, end of expressions, as well as delimiters
for sigils. See `t:Macro.metadata/0`. Defaults to `false`.
* `:literal_encoder` (since v1.10.0) - how to encode literals in the AST.
It must be a function that receives two arguments, the literal and its
metadata, and it must return `{:ok, ast :: Macro.t}` or
`{:error, reason :: binary}`. If you return anything than the literal
itself as the `term`, then the AST is no longer valid. This option
may still useful for textual analysis of the source code.
* `:static_atoms_encoder` - the static atom encoder function, see
"The `:static_atoms_encoder` function" section below. Note this
option overrides the `:existing_atoms_only` behaviour for static
atoms but `:existing_atoms_only` is still used for dynamic atoms,
such as atoms with interpolations.
* `:warn_on_unnecessary_quotes` - when `false`, does not warn
when atoms, keywords or calls have unnecessary quotes on
them. Defaults to `true`.
## `Macro.to_string/2`
The opposite of converting a string to its quoted form is
`Macro.to_string/2`, which converts a quoted form to a string/binary
representation.
## The `:static_atoms_encoder` function
When `static_atoms_encoder: &my_encoder/2` is passed as an argument,
`my_encoder/2` is called every time the tokenizer needs to create a
"static" atom. Static atoms are atoms in the AST that function as
aliases, remote calls, local calls, variable names, regular atoms
and keyword lists.
The encoder function will receive the atom name (as a binary) and a
keyword list with the current file, line and column. It must return
`{:ok, token :: term} | {:error, reason :: binary}`.
The encoder function is supposed to create an atom from the given
string. To produce a valid AST, it is required to return `{:ok, term}`,
where `term` is an atom. It is possible to return something other than an atom,
however, in that case the AST is no longer "valid" in that it cannot
be used to compile or evaluate Elixir code. A use case for this is
if you want to use the Elixir parser in a user-facing situation, but
you don't want to exhaust the atom table.
The atom encoder is not called for *all* atoms that are present in
the AST. It won't be invoked for the following atoms:
* operators (`:+`, `:-`, and so on)
* syntax keywords (`fn`, `do`, `else`, and so on)
* atoms containing interpolation (`:"#{1 + 1} is two"`), as these
atoms are constructed at runtime.
"""
@spec string_to_quoted(List.Chars.t(), keyword) ::
{:ok, Macro.t()} | {:error, {line :: pos_integer, term, term}}
def string_to_quoted(string, opts \\ []) when is_list(opts) do
file = Keyword.get(opts, :file, "nofile")
line = Keyword.get(opts, :line, 1)
case :elixir.string_to_tokens(to_charlist(string), line, file, opts) do
{:ok, tokens} ->
:elixir.tokens_to_quoted(tokens, file, opts)
{:error, _error_msg} = error ->
error
end
end
@doc """
Converts the given string to its quoted form.
It returns the ast if it succeeds,
raises an exception otherwise. The exception is a `TokenMissingError`
in case a token is missing (usually because the expression is incomplete),
`SyntaxError` otherwise.
Check `string_to_quoted/2` for options information.
"""
@spec string_to_quoted!(List.Chars.t(), keyword) :: Macro.t()
def string_to_quoted!(string, opts \\ []) when is_list(opts) do
file = Keyword.get(opts, :file, "nofile")
line = Keyword.get(opts, :line, 1)
:elixir.string_to_quoted!(to_charlist(string), line, file, opts)
end
@doc """
Evals the given file.
Accepts `relative_to` as an argument to tell where the file is located.
While `require_file/2` and `compile_file/2` return the loaded modules and their
bytecode, `eval_file/2` simply evaluates the file contents and returns the
evaluation result and its binding (exactly the same return value as `eval_string/3`).
"""
@spec eval_file(binary, nil | binary) :: {term, binding}
def eval_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
eval_string(File.read!(file), [], file: file, line: 1)
end
@deprecated "Use Code.require_file/2 or Code.compile_file/2 instead"
@doc false
def load_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
:elixir_code_server.call({:acquire, file})
loaded = :elixir_compiler.file(file, fn _, _ -> :ok end)
:elixir_code_server.cast({:required, file})
verify_loaded(loaded)
end
@doc """
Requires the given `file`.
Accepts `relative_to` as an argument to tell where the file is located.
If the file was already required, `require_file/2` doesn't do anything and
returns `nil`.
Notice that if `require_file/2` is invoked by different processes concurrently,
the first process to invoke `require_file/2` acquires a lock and the remaining
ones will block until the file is available. This means that if `require_file/2`
is called more than once with a given file, that file will be compiled only once.
The first process to call `require_file/2` will get the list of loaded modules,
others will get `nil`.
See `compile_file/2` if you would like to compile a file without tracking its
filenames. Finally, if you would like to get the result of evaluating a file rather
than the modules defined in it, see `eval_file/2`.
## Examples
If the file has not been required, it returns the list of modules:
modules = Code.require_file("eex_test.exs", "../eex/test")
List.first(modules)
#=> {EExTest.Compiled, <<70, 79, 82, 49, ...>>}
If the file has been required, it returns `nil`:
Code.require_file("eex_test.exs", "../eex/test")
#=> nil
"""
@spec require_file(binary, nil | binary) :: [{module, binary}] | nil
def require_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
case :elixir_code_server.call({:acquire, file}) do
:required ->
nil
:proceed ->
loaded = :elixir_compiler.file(file, fn _, _ -> :ok end)
:elixir_code_server.cast({:required, file})
verify_loaded(loaded)
end
end
@doc """
Gets all compilation options from the code server.
To get invidual options, see `get_compiler_option/1`.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.compiler_options()
#=> %{debug_info: true, docs: true, ...}
"""
@spec compiler_options :: map
def compiler_options do
for key <- @available_compiler_options, into: %{} do
{key, :elixir_config.get(key)}
end
end
@doc """
Stores all given compilation options.
To store invidual options, see `put_compiler_option/2`.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.compiler_options()
#=> %{debug_info: true, docs: true, ...}
"""
@spec compiler_options(Enumerable.t()) :: %{optional(atom) => boolean}
def compiler_options(opts) do
for {key, value} <- opts, into: %{} do
put_compiler_option(key, value)
{key, value}
end
end
@doc """
Returns the value of a given compiler option.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.get_compiler_option(:debug_info)
#=> true
"""
@doc since: "1.10.0"
@spec get_compiler_option(atom) :: term
def get_compiler_option(key) when key in @available_compiler_options do
:elixir_config.get(key)
end
@doc """
Returns a list with all available compiler options.
For a description of all options, see `put_compiler_option/2`.
## Examples
Code.available_compiler_options()
#=> [:docs, :debug_info, ...]
"""
@spec available_compiler_options() :: [atom]
def available_compiler_options do
@available_compiler_options
end
@doc """
Stores a compilation option.
These options are global since they are stored by Elixir's code server.
Available options are:
* `:docs` - when `true`, retain documentation in the compiled module.
Defaults to `true`.
* `:debug_info` - when `true`, retain debug information in the compiled
module. This allows a developer to reconstruct the original source
code. Defaults to `true`.
* `:ignore_module_conflict` - when `true`, override modules that were
already defined without raising errors. Defaults to `false`.
* `:relative_paths` - when `true`, use relative paths in quoted nodes,
warnings and errors generated by the compiler. Note disabling this option
won't affect runtime warnings and errors. Defaults to `true`.
* `:warnings_as_errors` - causes compilation to fail when warnings are
generated. Defaults to `false`.
* `:no_warn_undefined` (since v1.10.0) - list of modules and `{Mod, fun, arity}`
tuples that will not emit warnings that the module or function does not exist
at compilation time. This can be useful when doing dynamic compilation.
Defaults to `[]`.
* `:tracers` (since v1.10.0) - a list of tracers (modules) to be used during
compilation. See the module docs for more information. Defaults to `[]`.
* `:parser_options` (since v1.10.0) - a keyword list of options to be given
to the parser when compiling files. It accepts the same options as
`string_to_quoted/2` (except by the options that change the AST itself).
This can be used in combination with the tracer to retrieve localized
information about events happening during compilation. Defaults to `[]`.
It always returns `:ok`. Raises an error for invalid options.
## Examples
Code.put_compiler_option(:debug_info, true)
#=> :ok
"""
@doc since: "1.10.0"
@spec put_compiler_option(atom, term) :: :ok
def put_compiler_option(key, value) when key in @boolean_compiler_options do
if not is_boolean(value) do
raise "compiler option #{inspect(key)} should be a boolean, got: #{inspect(value)}"
end
:elixir_config.put(key, value)
:ok
end
def put_compiler_option(key, value) when key in @list_compiler_options do
if not is_list(value) do
raise "compiler option #{inspect(key)} should be a list, got: #{inspect(value)}"
end
if key == :parser_options and not Keyword.keyword?(value) do
raise "compiler option #{inspect(key)} should be a keyword list, " <>
"got: #{inspect(value)}"
end
if key == :tracers and not Enum.all?(value, &is_atom/1) do
raise "compiler option #{inspect(key)} should be a list of modules, " <>
"got: #{inspect(value)}"
end
:elixir_config.put(key, value)
:ok
end
def put_compiler_option(key, _value) do
raise "unknown compiler option: #{inspect(key)}"
end
@doc """
Purge compiler modules.
The compiler utilizes temporary modules to compile code. For example,
`elixir_compiler_1`, `elixir_compiler_2`, and so on. In case the compiled code
stores references to anonymous functions or similar, the Elixir compiler
may be unable to reclaim those modules, keeping an unnecessary amount of
code in memory and eventually leading to modules such as `elixir_compiler_12345`.
This function purges all modules currently kept by the compiler, allowing
old compiler module names to be reused. If there are any processes running
any code from such modules, they will be terminated too.
It returns `{:ok, number_of_modules_purged}`.
"""
@doc since: "1.7.0"
@spec purge_compiler_modules() :: {:ok, non_neg_integer()}
def purge_compiler_modules() do
:elixir_code_server.call(:purge_compiler_modules)
end
@doc """
Compiles the given string.
Returns a list of tuples where the first element is the module name
and the second one is its bytecode (as a binary). A `file` can be
given as second argument which will be used for reporting warnings
and errors.
**Warning**: `string` can be any Elixir code and code can be executed with
the same privileges as the Erlang VM: this means that such code could
compromise the machine (for example by executing system commands).
Don't use `compile_string/2` with untrusted input (such as strings coming
from the network).
"""
@spec compile_string(List.Chars.t(), binary) :: [{module, binary}]
def compile_string(string, file \\ "nofile") when is_binary(file) do
loaded = :elixir_compiler.string(to_charlist(string), file, fn _, _ -> :ok end)
Enum.map(loaded, fn {module, _map, binary} -> {module, binary} end)
end
@doc """
Compiles the quoted expression.
Returns a list of tuples where the first element is the module name and
the second one is its bytecode (as a binary). A `file` can be
given as second argument which will be used for reporting warnings
and errors.
"""
@spec compile_quoted(Macro.t(), binary) :: [{module, binary}]
def compile_quoted(quoted, file \\ "nofile") when is_binary(file) do
loaded = :elixir_compiler.quoted(quoted, file, fn _, _ -> :ok end)
Enum.map(loaded, fn {module, _map, binary} -> {module, binary} end)
end
@doc """
Compiles the given file.
Accepts `relative_to` as an argument to tell where the file is located.
Returns a list of tuples where the first element is the module name and
the second one is its bytecode (as a binary). Opposite to `require_file/2`,
it does not track the filename of the compiled file.
If you would like to get the result of evaluating file rather than the
modules defined in it, see `eval_file/2`.
For compiling many files concurrently, see `Kernel.ParallelCompiler.compile/2`.
"""
@doc since: "1.7.0"
@spec compile_file(binary, nil | binary) :: [{module, binary}]
def compile_file(file, relative_to \\ nil) when is_binary(file) do
loaded = :elixir_compiler.file(find_file(file, relative_to), fn _, _ -> :ok end)
verify_loaded(loaded)
end
@doc """
Ensures the given module is loaded.
If the module is already loaded, this works as no-op. If the module
was not yet loaded, it tries to load it.
If it succeeds in loading the module, it returns `{:module, module}`.
If not, returns `{:error, reason}` with the error reason.
## Code loading on the Erlang VM
Erlang has two modes to load code: interactive and embedded.
By default, the Erlang VM runs in interactive mode, where modules
are loaded as needed. In embedded mode the opposite happens, as all
modules need to be loaded upfront or explicitly.
Therefore, this function is used to check if a module is loaded
before using it and allows one to react accordingly. For example, the `URI`
module uses this function to check if a specific parser exists for a given
URI scheme.
## `ensure_compiled/1`
Elixir also contains an `ensure_compiled/1` function that is a
superset of `ensure_loaded/1`.
Since Elixir's compilation happens in parallel, in some situations
you may need to use a module that was not yet compiled, therefore
it can't even be loaded.
When invoked, `ensure_compiled/1` halts the compilation of the caller
until the module given to `ensure_compiled/1` becomes available or
all files for the current project have been compiled. If compilation
finishes and the module is not available, an error tuple is returned.
`ensure_compiled/1` does not apply to dependencies, as dependencies
must be compiled upfront.
In most cases, `ensure_loaded/1` is enough. `ensure_compiled/1`
must be used in rare cases, usually involving macros that need to
invoke a module for callback information.
## Examples
iex> Code.ensure_loaded(Atom)
{:module, Atom}
iex> Code.ensure_loaded(DoesNotExist)
{:error, :nofile}
"""
@spec ensure_loaded(module) ::
{:module, module} | {:error, :embedded | :badfile | :nofile | :on_load_failure}
def ensure_loaded(module) when is_atom(module) do
:code.ensure_loaded(module)
end
@doc """
Ensures the given module is loaded.
Similar to `ensure_loaded/1`, but returns `true` if the module
is already loaded or was successfully loaded. Returns `false`
otherwise.
## Examples
iex> Code.ensure_loaded?(Atom)
true
"""
@spec ensure_loaded?(module) :: boolean
def ensure_loaded?(module) when is_atom(module) do
match?({:module, ^module}, ensure_loaded(module))
end
@doc """
Ensures the given module is compiled and loaded.
If the module is already loaded, it works as no-op. If the module was
not compiled yet, `ensure_compiled/1` halts the compilation of the caller
until the module given to `ensure_compiled/1` becomes available or
all files for the current project have been compiled. If compilation
finishes and the module is not available, an error tuple is returned.
Given this function halts compilation, use it carefully. In particular,
avoid using it to guess which modules are in the system. Overuse of this
function can also lead to deadlocks, where two modules check at the same time
if the other is compiled. This returns a specific unavailable error code,
where we cannot successfully verify a module is available or not.
If it succeeds in loading the module, it returns `{:module, module}`.
If not, returns `{:error, reason}` with the error reason.
If the module being checked is currently in a compiler deadlock,
this function returns `{:error, :unavailable}`. Unavailable doesn't
necessarily mean the module doesn't exist, just that it is not currently
available, but it (or may not) become available in the future.
Check `ensure_loaded/1` for more information on module loading
and when to use `ensure_loaded/1` or `ensure_compiled/1`.
"""
@spec ensure_compiled(module) ::
{:module, module}
| {:error, :embedded | :badfile | :nofile | :on_load_failure | :unavailable}
def ensure_compiled(module) when is_atom(module) do
case :code.ensure_loaded(module) do
{:error, :nofile} = error ->
if is_pid(:erlang.get(:elixir_compiler_pid)) do
case Kernel.ErrorHandler.ensure_compiled(module, :module, :soft) do
:found -> {:module, module}
:deadlock -> {:error, :unavailable}
:not_found -> {:error, :nofile}
end
else
error
end
other ->
other
end
end
@doc false
@deprecated "Use Code.ensure_compiled/1 instead (see the proper disclaimers in its docs)"
def ensure_compiled?(module) when is_atom(module) do
match?({:module, ^module}, ensure_compiled(module))
end
@doc ~S"""
Returns the docs for the given module or path to `.beam` file.
When given a module name, it finds its BEAM code and reads the docs from it.
When given a path to a `.beam` file, it will load the docs directly from that
file.
It returns the term stored in the documentation chunk in the format defined by
[EEP 48](http://erlang.org/eep/eeps/eep-0048.html) or `{:error, reason}` if
the chunk is not available.
## Examples
# Module documentation of an existing module
iex> {:docs_v1, _, :elixir, _, %{"en" => module_doc}, _, _} = Code.fetch_docs(Atom)
iex> module_doc |> String.split("\n") |> Enum.at(0)
"Atoms are constants whose values are their own name."
# A module that doesn't exist
iex> Code.fetch_docs(ModuleNotGood)
{:error, :module_not_found}
"""
@doc since: "1.7.0"
@spec fetch_docs(module | String.t()) ::
{:docs_v1, annotation, beam_language, format, module_doc :: doc_content, metadata,
docs :: [doc_element]}
| {:error, :module_not_found | :chunk_not_found | {:invalid_chunk, binary}}
when annotation: :erl_anno.anno(),
beam_language: :elixir | :erlang | :lfe | :alpaca | atom(),
doc_content: %{required(binary) => binary} | :none | :hidden,
doc_element:
{{kind :: atom, function_name :: atom, arity}, annotation, signature, doc_content,
metadata},
format: binary,
signature: [binary],
metadata: map
def fetch_docs(module_or_path)
def fetch_docs(module) when is_atom(module) do
case :code.get_object_code(module) do
{_module, bin, _beam_path} -> do_fetch_docs(bin)
:error -> {:error, :module_not_found}
end
end
def fetch_docs(path) when is_binary(path) do
do_fetch_docs(String.to_charlist(path))
end
@docs_chunk 'Docs'
defp do_fetch_docs(bin_or_path) do
case :beam_lib.chunks(bin_or_path, [@docs_chunk]) do
{:ok, {_module, [{@docs_chunk, bin}]}} ->
try do
:erlang.binary_to_term(bin)
rescue
_ -> {:error, {:invalid_chunk, bin}}
end
{:error, :beam_lib, {:missing_chunk, _, @docs_chunk}} ->
{:error, :chunk_not_found}
end
end
@doc ~S"""
Deprecated function to retrieve old documentation format.
Elixir v1.7 adopts [EEP 48](http://erlang.org/eep/eeps/eep-0048.html)
which is a new documentation format meant to be shared across all
BEAM languages. The old format, used by `Code.get_docs/2`, is no
longer available, and therefore this function always returns `nil`.
Use `Code.fetch_docs/1` instead.
"""
@deprecated "Code.get_docs/2 always returns nil as its outdated documentation is no longer stored on BEAM files. Use Code.fetch_docs/1 instead"
@spec get_docs(module, :moduledoc | :docs | :callback_docs | :type_docs | :all) :: nil
def get_docs(_module, _kind) do
nil
end
## Helpers
# Finds the file given the relative_to path.
#
# If the file is found, returns its path in binary, fails otherwise.
defp find_file(file, relative_to) do
file =
if relative_to do
Path.expand(file, relative_to)
else
Path.expand(file)
end
if File.regular?(file) do
file
else
raise Code.LoadError, file: file
end
end
defp verify_loaded(loaded) do
maps_binaries = Enum.map(loaded, fn {_module, map, binary} -> {map, binary} end)
Module.ParallelChecker.verify(maps_binaries, [])
Enum.map(loaded, fn {module, map, _binary} -> {module, map} end)
end
end
| 35.37536 | 145 | 0.690332 |
082e5c183e5ec68850a73c3aab8692392f7b118b | 67 | exs | Elixir | test/ex_orient_test.exs | syncrypt/ex_orient | 966823f5c1823c5c5ed3b9ed5e0a61280ca44e40 | [
"MIT"
] | null | null | null | test/ex_orient_test.exs | syncrypt/ex_orient | 966823f5c1823c5c5ed3b9ed5e0a61280ca44e40 | [
"MIT"
] | null | null | null | test/ex_orient_test.exs | syncrypt/ex_orient | 966823f5c1823c5c5ed3b9ed5e0a61280ca44e40 | [
"MIT"
] | 1 | 2021-07-21T07:45:22.000Z | 2021-07-21T07:45:22.000Z | defmodule ExOrientTest do
use ExUnit.Case
doctest ExOrient
end
| 13.4 | 25 | 0.80597 |
082e60f74ca8ad57522426c35d90b5867bbc6f91 | 354 | ex | Elixir | apps/castles_game/lib/castles_game.ex | sjanota/castles | a890a1acf8c33a7cf7d6c49759387ea4dd4ce37a | [
"Apache-2.0"
] | null | null | null | apps/castles_game/lib/castles_game.ex | sjanota/castles | a890a1acf8c33a7cf7d6c49759387ea4dd4ce37a | [
"Apache-2.0"
] | null | null | null | apps/castles_game/lib/castles_game.ex | sjanota/castles | a890a1acf8c33a7cf7d6c49759387ea4dd4ce37a | [
"Apache-2.0"
] | null | null | null | defmodule CastlesGame do
use Application
def start(_start_type, _start_args) do
import Supervisor.Spec
children = [
supervisor(CastlesGame.GameSupervisor, []),
supervisor(CastlesGame.LobbySupervisor, [])
]
opts = [strategy: :one_for_one, name: CastlesGame.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 23.6 | 65 | 0.714689 |
082ecd49fa524b5cc0d5bb808160032bbcd061fd | 954 | exs | Elixir | api/config/test.exs | Feggah/university-api | 4cd6bf3f9cbfe0e5c27338e25c406a548ff3bf72 | [
"MIT"
] | 1 | 2021-10-04T23:26:37.000Z | 2021-10-04T23:26:37.000Z | api/config/test.exs | Feggah/university-api | 4cd6bf3f9cbfe0e5c27338e25c406a548ff3bf72 | [
"MIT"
] | 5 | 2021-10-04T23:09:14.000Z | 2022-02-25T09:35:25.000Z | api/config/test.exs | Feggah/university-api | 4cd6bf3f9cbfe0e5c27338e25c406a548ff3bf72 | [
"MIT"
] | null | null | null | import Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :api, Api.Repo,
username: "postgres",
password: "postgres",
database: "api_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox,
pool_size: 10
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :api, ApiWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "/6m9vGqJQ63P8A6rNxB26L1C9rHvBJHAP7E6uoty/D8OxbBx0M/Zbh49RAxh2Q9o",
server: false
# In test we don't send emails.
config :api, Api.Mailer, adapter: Swoosh.Adapters.Test
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime
| 30.774194 | 86 | 0.751572 |
082ed2ce771a854f77e732c4aa6985c2a4647d14 | 787 | exs | Elixir | test/regressions/i114_ordered_list_test.exs | RichMorin/earmark | e65fcf67345c84c23d237c732e5c174246662c68 | [
"Apache-1.1"
] | null | null | null | test/regressions/i114_ordered_list_test.exs | RichMorin/earmark | e65fcf67345c84c23d237c732e5c174246662c68 | [
"Apache-1.1"
] | null | null | null | test/regressions/i114_ordered_list_test.exs | RichMorin/earmark | e65fcf67345c84c23d237c732e5c174246662c68 | [
"Apache-1.1"
] | null | null | null | defmodule Regressions.I114OrderedListTest do
use ExUnit.Case
@default_start """
1. hello
1. world
"""
test "default" do
assert Earmark.as_html!(@default_start) == "<ol>\n<li><p>hello</p>\n</li>\n<li><p>world</p>\n</li>\n</ol>\n"
end
@explicit_start """
2. hello
1. world
"""
test "explicit" do
assert Earmark.as_html!(@explicit_start) == ~s{<ol start="2">\n<li><p>hello</p>\n</li>\n<li><p>world</p>\n</li>\n</ol>\n}
end
@scoped """
2. hello
4. world
42. again
"""
test "scoped" do
assert Earmark.as_html!(@scoped) == ~s{<ol start="2">\n<li><p>hello</p>\n<ol start="4">\n<li><p>world</p>\n<ol start="42">\n<li>again\n</li>\n</ol>\n</li>\n</ol>\n</li>\n</ol>\n}
end
end
# SPDX-License-Identifier: Apache-2.0
| 23.848485 | 182 | 0.570521 |
082ee5dbd56da5224f421326f0da3504867655d8 | 667 | exs | Elixir | mix.exs | jonnystorm/elexir | ef197f7c592f1eaa628fcd0970bec856e933284f | [
"WTFPL"
] | null | null | null | mix.exs | jonnystorm/elexir | ef197f7c592f1eaa628fcd0970bec856e933284f | [
"WTFPL"
] | null | null | null | mix.exs | jonnystorm/elexir | ef197f7c592f1eaa628fcd0970bec856e933284f | [
"WTFPL"
] | 2 | 2021-03-06T19:04:53.000Z | 2021-03-07T12:27:54.000Z | defmodule Elexir.Mixfile do
use Mix.Project
def project do
[app: :elexir,
version: "0.0.1",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 20.212121 | 77 | 0.605697 |
082f04cefa9cc7e9cf6e3f7756cc1b8e3c93c652 | 673 | ex | Elixir | lib/avrolixr.ex | avvo/avrolixr | e5f5dd663597d2ab062ec9fea2d1da38897a4c42 | [
"MIT"
] | 3 | 2016-09-13T07:14:59.000Z | 2017-07-20T07:47:48.000Z | lib/avrolixr.ex | avvo/avrolixr | e5f5dd663597d2ab062ec9fea2d1da38897a4c42 | [
"MIT"
] | 3 | 2016-10-12T21:24:11.000Z | 2017-06-29T08:35:55.000Z | lib/avrolixr.ex | avvo/avrolixr | e5f5dd663597d2ab062ec9fea2d1da38897a4c42 | [
"MIT"
] | 5 | 2017-02-25T18:06:21.000Z | 2018-10-24T14:28:50.000Z | defmodule Avrolixr do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
# Define workers and child supervisors to be supervised
children = [
# Starts a worker by calling: Avrolixr.Worker.start_link(arg1, arg2, arg3)
# worker(Avrolixr.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Avrolixr.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 32.047619 | 80 | 0.716196 |
082f5b086a6b7c59ed90d74ece3f4aea2dcd05f9 | 2,259 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_likelihood_adjustment.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_likelihood_adjustment.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_likelihood_adjustment.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LikelihoodAdjustment do
@moduledoc """
Message for specifying an adjustment to the likelihood of a finding as
part of a detection rule.
## Attributes
* `fixedLikelihood` (*type:* `String.t`, *default:* `nil`) - Set the likelihood of a finding to a fixed value.
* `relativeLikelihood` (*type:* `integer()`, *default:* `nil`) - Increase or decrease the likelihood by the specified number of
levels. For example, if a finding would be `POSSIBLE` without the
detection rule and `relative_likelihood` is 1, then it is upgraded to
`LIKELY`, while a value of -1 would downgrade it to `UNLIKELY`.
Likelihood may never drop below `VERY_UNLIKELY` or exceed
`VERY_LIKELY`, so applying an adjustment of 1 followed by an
adjustment of -1 when base likelihood is `VERY_LIKELY` will result in
a final likelihood of `LIKELY`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fixedLikelihood => String.t(),
:relativeLikelihood => integer()
}
field(:fixedLikelihood)
field(:relativeLikelihood)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LikelihoodAdjustment do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LikelihoodAdjustment.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LikelihoodAdjustment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.948276 | 131 | 0.73838 |
082f8616b4058c53e904f7fa542642a16b401fd1 | 10,679 | exs | Elixir | test/kane/subscription_test.exs | peburrows/murdoch | 731d73b4c0c0734a777991f330638c89ed1d16ec | [
"MIT"
] | 99 | 2016-04-28T13:45:44.000Z | 2021-12-03T06:48:28.000Z | test/kane/subscription_test.exs | peburrows/murdoch | 731d73b4c0c0734a777991f330638c89ed1d16ec | [
"MIT"
] | 29 | 2016-04-29T23:22:35.000Z | 2022-03-15T16:41:34.000Z | test/kane/subscription_test.exs | peburrows/murdoch | 731d73b4c0c0734a777991f330638c89ed1d16ec | [
"MIT"
] | 46 | 2016-03-21T18:21:38.000Z | 2022-03-23T07:49:42.000Z | defmodule Kane.SubscriptionTest do
use ExUnit.Case
alias Kane.Subscription
alias Kane.Topic
alias Kane.Message
setup do
bypass = Bypass.open()
Application.put_env(:kane, :endpoint, "http://localhost:#{bypass.port}")
{:ok, project} = Goth.Config.get(:project_id)
{:ok, bypass: bypass, project: project}
end
test "generating the create path", %{project: project} do
name = "path-sub"
sub = %Subscription{name: name}
assert "projects/#{project}/subscriptions/#{name}" == Subscription.path(sub, :create)
end
test "creating the JSON for creating a subscription", %{project: project} do
name = "sub-json"
topic = "sub-json-topic"
sub = %Subscription{
name: name,
topic: %Topic{name: topic},
filter: "attributes:domain"
}
assert %{
"topic" => "projects/#{project}/topics/#{topic}",
"ackDeadlineSeconds" => 10,
"filter" => "attributes:domain"
} == Subscription.data(sub, :create)
end
test "finding a subscription", %{bypass: bypass, project: project} do
name = "found-sub"
topic = "found-sub-topic"
Bypass.expect(bypass, fn conn ->
assert conn.method == "GET"
assert Regex.match?(~r{projects/#{project}/subscriptions/#{name}}, conn.request_path)
Plug.Conn.send_resp(
conn,
200,
Jason.encode!(%{name: name, topic: topic, ackDeadlineSeconds: 20})
)
end)
assert {:ok,
%Subscription{
name: ^name,
topic: %Topic{name: ^topic},
ack_deadline: 20
}} = Subscription.find(name)
end
test "creating a subscription", %{bypass: bypass} do
name = "create-sub"
topic = "topic-to-sub"
sub = %Subscription{name: name, topic: %Topic{name: topic}}
sname = Subscription.full_name(sub)
tname = Topic.full_name(sub.topic)
Bypass.expect(bypass, fn conn ->
{:ok, body, conn} = Plug.Conn.read_body(conn)
assert body ==
%{"topic" => tname, "ackDeadlineSeconds" => sub.ack_deadline} |> Jason.encode!()
assert conn.method == "PUT"
assert_content_type(conn, "application/json")
Plug.Conn.send_resp(conn, 201, ~s({
"name": "#{sname}",
"topic": "#{tname}",
"ackDeadlineSeconds": 10
}))
end)
assert {:ok, %Subscription{topic: %Topic{name: ^topic}, name: ^name, ack_deadline: 10}} =
Subscription.create(sub)
end
test "creating a subscription with filter includes a filter in the request body", %{
bypass: bypass
} do
name = "create-sub"
topic = "topic-to-sub"
filter = "attributes:domain"
sub = %Subscription{name: name, topic: %Topic{name: topic}, filter: filter}
sname = Subscription.full_name(sub)
tname = Topic.full_name(sub.topic)
Bypass.expect(bypass, fn conn ->
{:ok, body, conn} = Plug.Conn.read_body(conn)
assert body ==
%{
"topic" => tname,
"ackDeadlineSeconds" => sub.ack_deadline,
"filter" => filter
}
|> Jason.encode!()
assert conn.method == "PUT"
assert_content_type(conn, "application/json")
Plug.Conn.send_resp(conn, 201, ~s({
"name": "#{sname}",
"topic": "#{tname}",
"ackDeadlineSeconds": 10,
"filter": "#{filter}"
}))
end)
assert {:ok,
%Subscription{
topic: %Topic{name: ^topic},
name: ^name,
ack_deadline: 10,
filter: ^filter
}} = Subscription.create(sub)
end
test "deleting a subscription", %{bypass: bypass, project: project} do
name = "delete-me"
Bypass.expect(bypass, fn conn ->
assert conn.method == "DELETE"
assert Regex.match?(~r{projects/#{project}/subscriptions/#{name}}, conn.request_path)
Plug.Conn.send_resp(conn, 200, "{}\n")
end)
Subscription.delete(name)
end
test "pulling from a subscription", %{bypass: bypass} do
Bypass.expect(bypass, fn conn ->
assert conn.method == "POST"
assert_content_type(conn, "application/json")
assert Regex.match?(~r(:pull$), conn.request_path)
Plug.Conn.send_resp(conn, 200, ~s({"receivedMessages": [
{"ackId":"123",
"message": {
"messageId": "messId",
"publishTime": "2015-10-02T15:01:23.045123456Z",
"attributes": {
"key" : "val"
},
"data": "eyJoZWxsbyI6IndvcmxkIn0="
}
}
]}))
end)
assert {:ok, messages} =
Subscription.pull(%Subscription{name: "tasty", topic: %Topic{name: "messages"}})
assert is_list(messages)
Enum.each(messages, fn m ->
assert %Message{} = m
end)
end
test "pulling from a subscription passes the correct maxMessages value", %{bypass: bypass} do
Bypass.expect(bypass, fn conn ->
assert conn.method == "POST"
assert_content_type(conn, "application/json")
assert Regex.match?(~r(:pull$), conn.request_path)
{:ok, body, conn} = Plug.Conn.read_body(conn)
data = Jason.decode!(body)
assert data["maxMessages"] == 2
assert data["returnImmediately"] == true
Plug.Conn.send_resp(conn, 200, ~s({"recievedMessages": []}))
end)
assert {:ok, []} = Subscription.pull(%Subscription{name: "capped", topic: "sure"}, 2)
end
test "pulling from a subscription passes the correct options", %{bypass: bypass} do
Bypass.expect(bypass, fn conn ->
assert conn.method == "POST"
assert_content_type(conn, "application/json")
assert Regex.match?(~r(:pull$), conn.request_path)
{:ok, body, conn} = Plug.Conn.read_body(conn)
data = Jason.decode!(body)
assert data["maxMessages"] == 5
assert data["returnImmediately"] == false
Plug.Conn.send_resp(conn, 200, ~s({"receivedMessages": []}))
end)
assert {:ok, []} =
Subscription.pull(%Subscription{name: "capped", topic: "sure"},
max_messages: 5,
return_immediately: false
)
end
test "streaming messages from subscription", %{bypass: bypass} do
pid = self()
Bypass.expect(bypass, fn conn ->
assert conn.method == "POST"
assert_content_type(conn, "application/json")
assert Regex.match?(~r(:pull$), conn.request_path)
send(pid, :subscription_pull)
{:ok, _body, conn} = Plug.Conn.read_body(conn)
Plug.Conn.send_resp(conn, 200, ~s({"receivedMessages": [
{"ackId":"123",
"message": {
"messageId": "messId",
"publishTime": "2015-10-02T15:01:23.045123456Z",
"attributes": {
"key" : "val"
},
"data": "eyJoZWxsbyI6IndvcmxkIn0="
}
},
{"ackId":"456",
"message": {
"messageId": "messId",
"publishTime": "2015-10-02T15:01:23.045123456Z",
"attributes": {
"key" : "val"
},
"data": "eyJoZWxsbyI6IndvcmxkIn0="
}
}
]}))
end)
messages =
%Subscription{name: "capped", topic: "sure"}
|> Subscription.stream()
|> Enum.take(3)
assert length(messages) == 3
assert_received :subscription_pull
assert_received :subscription_pull
refute_received :subscription_pull
end
test "no acknowledgement when no messages given" do
# This implicitly tests that ByPass does not receive any request
assert :ok == Subscription.ack(%Subscription{name: "ack-my-sub"}, [])
end
test "acknowledging a message", %{bypass: bypass} do
Bypass.expect(bypass, fn conn ->
assert conn.method == "POST"
assert_content_type(conn, "application/json")
{:ok, body, conn} = Plug.Conn.read_body(conn)
body = body |> Jason.decode!()
assert ["123", "321"] = body["ackIds"]
Plug.Conn.send_resp(conn, 200, "{}\n")
end)
messages = [
%Message{ack_id: "123"},
%Message{ack_id: "321"}
]
assert :ok == Subscription.ack(%Subscription{name: "ack-my-sub"}, messages)
end
test "no-op when no messages are given to extend" do
# This implicitly tests that ByPass does not receive any request
assert :ok == Subscription.extend(%Subscription{name: "extend-ack-deadlines"}, [], 600)
end
test "extending a message ack deadline", %{bypass: bypass} do
Bypass.expect(bypass, fn conn ->
assert conn.method == "POST"
assert_content_type(conn, "application/json")
{:ok, body, conn} = Plug.Conn.read_body(conn)
body = body |> Jason.decode!()
assert ["123", "321"] = body["ackIds"]
assert 600 = body["ackDeadlineSeconds"]
Plug.Conn.send_resp(conn, 200, "{}\n")
end)
messages = [
%Message{ack_id: "123"},
%Message{ack_id: "321"}
]
assert :ok == Subscription.extend(%Subscription{name: "extend-ack-deadlines"}, messages, 600)
end
defp assert_content_type(conn, type) do
{"content-type", content_type} =
Enum.find(conn.req_headers, fn {prop, _} ->
prop == "content-type"
end)
assert String.contains?(content_type, type)
end
end
| 33.687697 | 97 | 0.506414 |
082f8bdca8e393b73e5b738a5c0c169418da1cf4 | 2,798 | exs | Elixir | mix.exs | alexfreska/ash_graphql | d7e56bec11b4e714c19ccebfa7ba5821669b19cc | [
"MIT"
] | null | null | null | mix.exs | alexfreska/ash_graphql | d7e56bec11b4e714c19ccebfa7ba5821669b19cc | [
"MIT"
] | null | null | null | mix.exs | alexfreska/ash_graphql | d7e56bec11b4e714c19ccebfa7ba5821669b19cc | [
"MIT"
] | null | null | null | defmodule AshGraphql.MixProject do
use Mix.Project
@description """
An absinthe-backed graphql extension for Ash
"""
@version "0.9.1"
def project do
[
app: :ash_graphql,
version: @version,
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
package: package(),
aliases: aliases(),
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
dialyzer: [plt_add_apps: [:ash]],
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.github": :test
],
docs: docs(),
description: @description,
source_url: "https://github.com/ash-project/ash_graphql",
homepage_url: "https://github.com/ash-project/ash_graphql"
]
end
defp elixirc_paths(:test) do
elixirc_paths(:dev) ++ ["test/support"]
end
defp elixirc_paths(_) do
["lib"]
end
defp docs do
[
main: "AshGraphql",
source_ref: "v#{@version}",
logo: "logos/small-logo.png",
extra_section: "GUIDES",
extras: [
"documentation/introduction/getting_started.md",
"documentation/multitenancy.md"
],
groups_for_extras: [
Introduction: Path.wildcard("documentation/introduction/*.md")
],
groups_for_modules: [
"Resource DSL": ~r/AshGraphql.Resource/,
"Api DSL": ~r/AshGraphql.Api/
]
]
end
defp package do
[
name: :ash_graphql,
licenses: ["MIT"],
links: %{
GitHub: "https://github.com/ash-project/ash_graphql"
}
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ash, ash_version("~> 1.26.2")},
{:absinthe_plug, "~> 1.4"},
{:absinthe, "~> 1.5.3"},
{:dataloader, "~> 1.0"},
{:jason, "~> 1.2"},
{:ex_doc, "~> 0.22", only: :dev, runtime: false},
{:ex_check, "~> 0.12.0", only: :dev},
{:credo, ">= 0.0.0", only: :dev, runtime: false},
{:dialyxir, ">= 0.0.0", only: :dev, runtime: false},
{:sobelow, ">= 0.0.0", only: :dev, runtime: false},
{:git_ops, "~> 2.0.1", only: :dev},
{:excoveralls, "~> 0.13.0", only: [:dev, :test]}
]
end
defp ash_version(default_version) do
case System.get_env("ASH_VERSION") do
nil -> default_version
"local" -> [path: "../ash"]
"master" -> [git: "https://github.com/ash-project/ash.git"]
version -> "~> #{version}"
end
end
defp aliases do
[
sobelow: "sobelow --skip",
credo: "credo --strict",
"ash.formatter": "ash.formatter --extensions AshGraphql.Resource,AshGraphql.Api"
]
end
end
| 24.761062 | 86 | 0.564332 |
082f9725773258ea3ecbda254bb03ce6f3a5015f | 1,105 | exs | Elixir | apps/definition_load_persist/test/load/persist_test.exs | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/definition_load_persist/test/load/persist_test.exs | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/definition_load_persist/test/load/persist_test.exs | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | defmodule Load.PersistTest do
use ExUnit.Case
import Checkov
describe "new/1" do
data_test "validates #{field} against bad input" do
input = put_in(%{}, [field], value)
assert {:error, [%{input: value, path: [field]} | _]} = Load.Persist.new(input)
where([
[:field, :value],
[:version, "1"],
[:id, ""],
[:id, nil],
[:dataset_id, ""],
[:dataset_id, nil],
[:subset_id, ""],
[:subset_id, nil],
[:source, nil],
[:source, ""],
[:destination, nil],
[:destination, ""]
])
end
test "will convert the schema back to structs" do
persist =
Load.Persist.new!(
id: "load1",
dataset_id: "ds1",
subset_id: "joe",
source: "topic",
destination: "table",
schema: [
%Dictionary.Type.String{name: "name"},
%Dictionary.Type.Integer{name: "age"}
]
)
json = Jason.encode!(persist)
assert persist == Load.Persist.from_json(json) |> elem(1)
end
end
end
| 24.021739 | 85 | 0.495928 |
082fb3dc354e8ee5e90ac16ead4ccaf27e80994e | 5,649 | ex | Elixir | clients/fitness/lib/google_api/fitness/v1/model/data_source.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/fitness/lib/google_api/fitness/v1/model/data_source.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/fitness/lib/google_api/fitness/v1/model/data_source.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Fitness.V1.Model.DataSource do
@moduledoc """
Definition of a unique source of sensor data. Data sources can expose raw data coming from hardware sensors on local or companion devices. They can also expose derived data, created by transforming or merging other data sources. Multiple data sources can exist for the same data type. Every data point inserted into or read from this service has an associated data source. The data source contains enough information to uniquely identify its data, including the hardware device and the application that collected and/or transformed the data. It also holds useful metadata, such as the hardware and application versions, and the device type. Each data source produces a unique stream of data, with a unique identifier. Not all changes to data source affect the stream identifier, so that data collected by updated versions of the same application/device can still be considered to belong to the same data stream.
## Attributes
- application (Application): Information about an application which feeds sensor data into the platform. Defaults to: `null`.
- dataQualityStandard ([String.t]): Defaults to: `null`.
- Enum - one of
- dataStreamId (String.t): A unique identifier for the data stream produced by this data source. The identifier includes: - The physical device's manufacturer, model, and serial number (UID). - The application's package name or name. Package name is used when the data source was created by an Android application. The developer project number is used when the data source was created by a REST client. - The data source's type. - The data source's stream name. Note that not all attributes of the data source are used as part of the stream identifier. In particular, the version of the hardware/the application isn't used. This allows us to preserve the same stream through version updates. This also means that two DataSource objects may represent the same data stream even if they're not equal. The exact format of the data stream ID created by an Android application is: type:dataType.name:application.packageName:device.manufacturer:device.model:device.uid:dataStreamName The exact format of the data stream ID created by a REST client is: type:dataType.name:developer project number:device.manufacturer:device.model:device.uid:dataStreamName When any of the optional fields that comprise of the data stream ID are blank, they will be omitted from the data stream ID. The minimum viable data stream ID would be: type:dataType.name:developer project number Finally, the developer project number is obfuscated when read by any REST or Android client that did not create the data source. Only the data source creator will see the developer project number in clear and normal form. Defaults to: `null`.
- dataStreamName (String.t): The stream name uniquely identifies this particular data source among other data sources of the same type from the same underlying producer. Setting the stream name is optional, but should be done whenever an application exposes two streams for the same data type, or when a device has two equivalent sensors. Defaults to: `null`.
- dataType (DataType): The data type defines the schema for a stream of data being collected by, inserted into, or queried from the Fitness API. Defaults to: `null`.
- device (Device): Representation of an integrated device (such as a phone or a wearable) that can hold sensors. Defaults to: `null`.
- name (String.t): An end-user visible name for this data source. Defaults to: `null`.
- type (String.t): A constant describing the type of this data source. Indicates whether this data source produces raw or derived data. Defaults to: `null`.
- Enum - one of [derived, raw]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:application => GoogleApi.Fitness.V1.Model.Application.t(),
:dataQualityStandard => list(any()),
:dataStreamId => any(),
:dataStreamName => any(),
:dataType => GoogleApi.Fitness.V1.Model.DataType.t(),
:device => GoogleApi.Fitness.V1.Model.Device.t(),
:name => any(),
:type => any()
}
field(:application, as: GoogleApi.Fitness.V1.Model.Application)
field(:dataQualityStandard, type: :list)
field(:dataStreamId)
field(:dataStreamName)
field(:dataType, as: GoogleApi.Fitness.V1.Model.DataType)
field(:device, as: GoogleApi.Fitness.V1.Model.Device)
field(:name)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Fitness.V1.Model.DataSource do
def decode(value, options) do
GoogleApi.Fitness.V1.Model.DataSource.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Fitness.V1.Model.DataSource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 79.56338 | 1,642 | 0.759958 |
082fbae788b0340949f4af7eb6a211677dc58baf | 2,750 | ex | Elixir | clients/spanner/lib/google_api/spanner/v1/model/commit_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/spanner/lib/google_api/spanner/v1/model/commit_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/spanner/lib/google_api/spanner/v1/model/commit_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1.Model.CommitRequest do
@moduledoc """
The request for Commit.
## Attributes
* `mutations` (*type:* `list(GoogleApi.Spanner.V1.Model.Mutation.t)`, *default:* `nil`) - The mutations to be executed when this transaction commits. All mutations are applied atomically, in the order they appear in this list.
* `returnCommitStats` (*type:* `boolean()`, *default:* `nil`) - If `true`, then statistics related to the transaction will be included in the CommitResponse. Default value is `false`.
* `singleUseTransaction` (*type:* `GoogleApi.Spanner.V1.Model.TransactionOptions.t`, *default:* `nil`) - Execute mutations in a temporary transaction. Note that unlike commit of a previously-started transaction, commit with a temporary transaction is non-idempotent. That is, if the `CommitRequest` is sent to Cloud Spanner more than once (for instance, due to retries in the application, or in the transport library), it is possible that the mutations are executed more than once. If this is undesirable, use BeginTransaction and Commit instead.
* `transactionId` (*type:* `String.t`, *default:* `nil`) - Commit a previously-started transaction.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:mutations => list(GoogleApi.Spanner.V1.Model.Mutation.t()),
:returnCommitStats => boolean(),
:singleUseTransaction => GoogleApi.Spanner.V1.Model.TransactionOptions.t(),
:transactionId => String.t()
}
field(:mutations, as: GoogleApi.Spanner.V1.Model.Mutation, type: :list)
field(:returnCommitStats)
field(:singleUseTransaction, as: GoogleApi.Spanner.V1.Model.TransactionOptions)
field(:transactionId)
end
defimpl Poison.Decoder, for: GoogleApi.Spanner.V1.Model.CommitRequest do
def decode(value, options) do
GoogleApi.Spanner.V1.Model.CommitRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Spanner.V1.Model.CommitRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.107143 | 550 | 0.742909 |
082fd909369b755a844a9eedef6ebb8faef27844 | 646 | ex | Elixir | lib/coherence/plugs/require_login.ex | harmon25/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 1 | 2022-03-06T16:30:21.000Z | 2022-03-06T16:30:21.000Z | lib/coherence/plugs/require_login.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | null | null | null | lib/coherence/plugs/require_login.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 2 | 2017-09-22T16:54:36.000Z | 2021-11-09T20:55:58.000Z | defmodule Coherence.RequireLogin do
@behaviour Plug
import Coherence.ControllerHelpers, only: [logged_out_url: 1]
import Plug.Conn
import Phoenix.Controller, only: [put_flash: 3, redirect: 2]
@dialyzer [
{:nowarn_function, call: 2},
{:nowarn_function, init: 1},
]
@spec init(Keyword.t) :: [tuple]
def init(options) do
%{option: options}
end
@spec call(Plug.Conn.t, any) :: Plug.Conn.t
def call(conn, _opts) do
unless Coherence.current_user(conn) do
conn
|> put_flash(:error, "Invalid Request.")
|> redirect(to: logged_out_url(conn))
|> halt
else
conn
end
end
end
| 20.83871 | 63 | 0.647059 |
083009277b9c0073abad19ed276b45c9da68942b | 1,222 | ex | Elixir | lib/speediview_ui/layout.ex | ConnorRigby/speediview | d840bcc5563314302ff3c99bbeeeace56519feea | [
"Apache-2.0"
] | 2 | 2020-06-25T22:28:19.000Z | 2020-08-27T02:36:20.000Z | lib/speediview_ui/layout.ex | ConnorRigby/megasquirt-dashboard | d840bcc5563314302ff3c99bbeeeace56519feea | [
"Apache-2.0"
] | null | null | null | lib/speediview_ui/layout.ex | ConnorRigby/megasquirt-dashboard | d840bcc5563314302ff3c99bbeeeace56519feea | [
"Apache-2.0"
] | 3 | 2020-06-26T12:32:25.000Z | 2021-04-16T08:59:21.000Z | defmodule SpeediViewUI.Layout do
@moduledoc """
Saves layouts to disk for next startup
"""
@doc "Save the layout to disk"
def save(layout) do
# remote_reload(layout)
File.write!(layout_file(), :erlang.term_to_binary(layout))
end
@doc "Lazily load the layout from disk"
def load() do
case File.read(layout_file()) do
{:ok, bin} -> :erlang.binary_to_term(bin)
_ -> %{}
end
end
defp layout_file do
Application.get_env(:speediview, __MODULE__, [])[:layout_file] || "layout.etf"
end
# if Mix.Project.config()[:target] == "host" do
# defp remote_reload(layout) do
# Node.stop()
# config = Mix.Project.config()
# cookie = config[:releases][config[:app]][:cookie]
# node_name = :"dash@sv-fd86.local"
# IO.puts "reloading layout on #{node_name}"
# {:ok, _} = Node.start(:"reload@0.0.0.0")
# Node.set_cookie(String.to_atom(cookie))
# true = Node.connect(node_name)
# :rpc.call(node_name, __MODULE__, :save, [layout])
# :rpc.call(node_name, Application, :stop, [:speediview])
# :rpc.call(node_name, Application, :start, [:speediview])
# end
# else
# defp remote_reload(_), do: :ok
# end
end
| 29.095238 | 82 | 0.62275 |
08301dcf07fd0802dbba36a62efd234669b38a13 | 2,150 | exs | Elixir | test/interactive/rill/framework_at_glance.exs | rill-project/rill | 16d35f9dda1ec58190fa6bda9923372834ab456b | [
"MIT"
] | 1 | 2022-01-26T15:06:14.000Z | 2022-01-26T15:06:14.000Z | test/interactive/rill/framework_at_glance.exs | rill-project/rill | 16d35f9dda1ec58190fa6bda9923372834ab456b | [
"MIT"
] | null | null | null | test/interactive/rill/framework_at_glance.exs | rill-project/rill | 16d35f9dda1ec58190fa6bda9923372834ab456b | [
"MIT"
] | null | null | null | defmodule Person do
defstruct [:name, :age]
end
defmodule Renamed do
use Rill, :message
defstruct [:name]
end
defmodule Person.Projection do
use Rill, :projection
@impl Rill.EntityProjection
def apply(%Renamed{} = renamed, person) do
Map.put(person, :name, renamed.name)
end
end
defmodule Store do
use Rill, [
:store,
entity: %Person{},
category: "person",
projection: Person.Projection
]
end
defmodule Handler do
use Rill, :handler
@impl Rill.Messaging.Handler
def handle(%Renamed{} = renamed, _session) do
IO.inspect(renamed, label: :renamed)
end
end
defmodule Run do
def run do
# {:ok, pid1} = Rill.MessageStore.Memory.start_link()
Rill.MessageStore.Mnesia.start()
# session = Rill.MessageStore.Memory.Session.new(pid1)
session = Rill.MessageStore.Mnesia.Session.new("MemoryMnesia")
session2 = Rill.MessageStore.Mnesia.Session.new("MemoryMnesia2")
renamed = %Renamed{name: "Joe"}
renamed2 = %Renamed{name: "John"}
Rill.MessageStore.write(session, renamed, "person-123")
Rill.MessageStore.write(session, renamed2, "person-123")
Rill.MessageStore.write(session2, renamed, "person-456")
[person, version] = Store.fetch(session, "123", include: [:version])
Supervisor.start_link(
[
{Rill.Consumer,
[
handlers: [Handler],
stream_name: "person",
identifier: "personIdentifier",
session: session,
poll_interval_milliseconds: 10000,
batch_size: 1
]}
],
strategy: :one_for_one
)
:timer.sleep(1500)
# IO.inspect will output `renamed` content
IO.inspect(person, label: :person)
IO.inspect(version, label: :version)
Rill.MessageStore.Mnesia.info(session)
Rill.MessageStore.Mnesia.truncate(session)
[empty_person, empty_version] =
Store.fetch(session, "123", include: [:version])
[person2, version2] = Store.fetch(session2, "456", include: [:version])
IO.inspect({empty_person, empty_version}, label: :empty_person)
IO.inspect({person2, version2}, label: :person2)
end
end
Run.run()
| 25 | 75 | 0.662326 |
08302e6b3f5e678c1322acabf663eef96424589e | 700 | ex | Elixir | lib/Domain/Post.ex | bruh-boys/anonpost | 1828f45443408183ce184e90e5108e6c8fd88f3a | [
"MIT"
] | 7 | 2021-08-10T16:53:05.000Z | 2021-12-04T18:35:22.000Z | lib/Domain/Post.ex | bruh-boys/anonpost | 1828f45443408183ce184e90e5108e6c8fd88f3a | [
"MIT"
] | 1 | 2021-08-19T03:30:13.000Z | 2021-08-19T03:30:13.000Z | lib/Domain/Post.ex | bruh-boys/anonpost | 1828f45443408183ce184e90e5108e6c8fd88f3a | [
"MIT"
] | null | null | null | defmodule Anonpost.Domain.Post do
defmodule Publ do
defstruct replygin_to: "no one",
board: "",
username: "anon",
title: "404 not found",
body: "404 not found",
time: DateTime.utc_now |>DateTime.to_unix,
comments: []
end
def getAttr(conn,board) do
params = conn.params
username = String.trim (params["username"])
%Publ{
board: board,
username: if(username == "", do: "anon", else: username),
title: params["title"],
body: params["body"],
time: DateTime.utc_now |>DateTime.to_unix
}
end
def struct_to_map(publ) do
publ |> Map.from_struct()
end
end
| 24.137931 | 63 | 0.562857 |
08304ebab23c57a4b0d540184f153cabfcda9862 | 1,042 | ex | Elixir | lib/mix/tasks/cmake/clean.ex | shoz-f/mix_cmake | af37dc09986b3930d2265fde1783f0d5de0ff42c | [
"Apache-2.0"
] | 2 | 2021-10-04T09:49:27.000Z | 2021-10-06T05:14:26.000Z | lib/mix/tasks/cmake/clean.ex | shoz-f/mix_cmake | af37dc09986b3930d2265fde1783f0d5de0ff42c | [
"Apache-2.0"
] | 1 | 2021-12-01T01:25:21.000Z | 2021-12-01T01:25:21.000Z | lib/mix/tasks/cmake/clean.ex | shoz-f/mix_cmake | af37dc09986b3930d2265fde1783f0d5de0ff42c | [
"Apache-2.0"
] | 1 | 2022-02-13T20:56:39.000Z | 2022-02-13T20:56:39.000Z | defmodule Mix.Tasks.Cmake.Clean do
use Mix.Task
alias Mix.Tasks.Cmake
require Cmake
@shortdoc "Clean outputs of Cmake"
@moduledoc """
Clean outputs of Cmake.
$ mix cmake.clean [opt]
## Command line options
* `--all` - remove cmake build directory.
* `--verbose` - print process detail
"""
@switches [
all: :boolean,
verbose: :boolean
]
@doc false
def run(argv) do
with {:ok, opts, dirs, _cmake_args} <- Cmake.parse_argv(argv, strict: @switches),
do: cmd(dirs, opts, [])
end
@doc false
def cmd(), do: cmd([], [], [])
@doc false
def cmd(dirs, opts, _cmake_args \\ []) do
cmake_config = Cmake.get_config()
[build_dir, _] = Cmake.get_dirs(dirs, cmake_config)
if opts[:all] do
Cmake.remove_build(build_dir)
else
cmake_args = ["--target", "clean"]
|> Cmake.conj_front(opts[:verbose], ["--verbose"])
cmake_env = Cmake.default_env()
Cmake.cmake(build_dir, ["--build", "."] ++ cmake_args, cmake_env)
end
end
end
| 20.84 | 85 | 0.603647 |
08305e5fc1b33e09a8473c9276e9493e04d0342a | 2,569 | ex | Elixir | lib/brave/character_generator.ex | st23am/brave | fb61e52f82e206843af1199e3830b03d6bc04f0e | [
"CC-BY-4.0"
] | null | null | null | lib/brave/character_generator.ex | st23am/brave | fb61e52f82e206843af1199e3830b03d6bc04f0e | [
"CC-BY-4.0"
] | 1 | 2021-03-09T19:28:25.000Z | 2021-03-09T19:28:25.000Z | lib/brave/character_generator.ex | st23am/brave | fb61e52f82e206843af1199e3830b03d6bc04f0e | [
"CC-BY-4.0"
] | null | null | null | defmodule Brave.CharacterGenerator do
alias Brave.Character
alias Brave.Equiptment
def new do
random()
end
def random do
%Character{}
|> roll_attributes
|> determine_traits
|> generate_name
|> generate_hit_points
|> add_inventory
end
def add_inventory(character) do
gear =
[%{name: "2 days worth of Rations", slots: 1}]
|> Enum.concat([Equiptment.random_armor()])
|> Enum.concat([Equiptment.random_helmets_and_shields()])
|> Enum.concat([Equiptment.random_general_gear()])
|> Enum.concat([Equiptment.random_general_gear_two()])
|> Enum.concat([Equiptment.random_dungeoneering_gear()])
|> Enum.concat([Equiptment.random_dungeoneering_gear()])
|> Enum.concat([Equiptment.random_weapon()])
|> List.flatten()
|> maybe_add_ammo()
gear
|> Enum.reduce(character, fn item, char -> apply_armor(item, char) end)
|> Map.put(:inventory, gear)
end
def apply_armor(%{name: "No Armor", bonus: _value}, character) do
character
|> Map.put(:armor, %{defense: 11, bonus: 1})
end
def apply_armor(%{name: _name, bonus: value}, character) do
Map.put(character, :armor, %{
defense: 10 + character.armor.bonus + value,
bonus: character.armor.bonus + value
})
end
def apply_armor(_item, character), do: character
def maybe_add_ammo(gear) do
ammo_need =
gear
|> Enum.filter(&is_map/1)
|> Enum.any?(fn %{name: name} ->
name in ["Bow", "Crossbow"]
end)
with true <- ammo_need do
gear
|> Enum.concat(Equiptment.ammo())
|> List.flatten()
else
false -> gear
end
end
def generate_name(character) do
character
|> Map.put(:name, Brave.Names.random())
end
def generate_hit_points(character) do
character
|> Map.put(:hp, Enum.random(1..8))
end
def roll_attributes(%Character{} = character) do
Enum.reduce(Character.attributes(), character, fn attr, character ->
Map.put(character, attr, attribute_roll())
end)
end
def attribute_roll do
score =
[
Enum.random(1..6),
Enum.random(1..6),
Enum.random(1..6)
]
|> Enum.sort()
|> hd
%{defense: score + 10, bonus: score}
end
def determine_traits(character) do
struct = %Brave.Character.Traits{}
traits =
Character.traits()
|> Enum.reduce(struct, fn {trait, values}, struct ->
Map.put(struct, trait, Enum.random(values))
end)
Map.put(character, :traits, traits)
end
end
| 23.787037 | 75 | 0.619696 |
08306be5368dcbf87a51cd10d9a546963babcb6c | 340 | ex | Elixir | lib/salty/stream_salsa208.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 23 | 2017-07-04T19:29:43.000Z | 2021-02-16T19:44:38.000Z | lib/salty/stream_salsa208.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 16 | 2017-08-13T15:31:25.000Z | 2019-06-19T14:44:13.000Z | lib/salty/stream_salsa208.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 19 | 2017-08-10T19:01:49.000Z | 2021-06-20T01:34:59.000Z | defmodule Salty.Stream.Salsa208 do
use Salty.Stream
def noncebytes do
C.stream_salsa208_NONCEBYTES()
end
def keybytes do
C.stream_salsa208_KEYBYTES()
end
def stream(outlen, nonce, key) do
C.stream_salsa208(outlen, nonce, key)
end
def xor(m, nonce, key) do
C.stream_salsa208_xor(m, nonce, key)
end
end
| 16.190476 | 41 | 0.705882 |
08307731ade2ed2918e5f0bd05b566034bd09f04 | 2,034 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/channel_audit_details.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/channel_audit_details.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/channel_audit_details.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.ChannelAuditDetails do
@moduledoc """
The auditDetails object encapsulates channel data that is relevant for YouTube Partners during the audit process.
## Attributes
* `communityGuidelinesGoodStanding` (*type:* `boolean()`, *default:* `nil`) - Whether or not the channel respects the community guidelines.
* `contentIdClaimsGoodStanding` (*type:* `boolean()`, *default:* `nil`) - Whether or not the channel has any unresolved claims.
* `copyrightStrikesGoodStanding` (*type:* `boolean()`, *default:* `nil`) - Whether or not the channel has any copyright strikes.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:communityGuidelinesGoodStanding => boolean(),
:contentIdClaimsGoodStanding => boolean(),
:copyrightStrikesGoodStanding => boolean()
}
field(:communityGuidelinesGoodStanding)
field(:contentIdClaimsGoodStanding)
field(:copyrightStrikesGoodStanding)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ChannelAuditDetails do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.ChannelAuditDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ChannelAuditDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.377358 | 143 | 0.747296 |
0830bcd05e6ff1f6da980a18db400ceb6f1187c7 | 1,803 | ex | Elixir | chromoid_web/lib/chromoid_web/router.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | 7 | 2020-11-18T11:29:20.000Z | 2022-01-16T03:16:14.000Z | chromoid_web/lib/chromoid_web/router.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | null | null | null | chromoid_web/lib/chromoid_web/router.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | 1 | 2021-01-06T15:40:46.000Z | 2021-01-06T15:40:46.000Z | defmodule ChromoidWeb.Router do
use ChromoidWeb, :router
import ChromoidWeb.UserAuth
import Phoenix.LiveDashboard.Router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :fetch_live_flash
plug :put_root_layout, {ChromoidWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
plug :fetch_current_user
end
scope "/", ChromoidWeb do
pipe_through :browser
get "/", PageController, :index
get "/discord/oauth", DiscordOauthController, :oauth
end
## Authentication routes
scope "/", ChromoidWeb do
pipe_through [:browser, :redirect_if_user_is_authenticated]
get "/users/register", UserRegistrationController, :new
post "/users/register", UserRegistrationController, :create
end
scope "/", ChromoidWeb do
pipe_through [:browser, :require_authenticated_user]
get "/logout", DiscordOauthController, :logout
live "/devices", DeviceLive, :index
live "/devices/:id", DeviceLive, :show
live "/devices/:id/nfc", DeviceNFCLive, :show
live "/devices/:device_id/nfc/:nfc_id/webhooks", NFCWebhookLive, :show
live "/devices/:device_id/nfc/:nfc_id/actions", NFCActionLive, :show
get "/devices/:id/stream", DeviceController, :stream
get "/devices/:id/live.mjpg", DeviceController, :live
end
scope "/", ChromoidWeb do
pipe_through [:browser, :require_authenticated_user, :require_admin_user]
live_dashboard "/admin/dashboard", metrics: ChromoidWeb.Telemetry
end
scope "/", ChromoidWeb do
pipe_through [:browser]
get "/users/confirm", UserConfirmationController, :new
post "/users/confirm", UserConfirmationController, :create
get "/users/confirm/:token", UserConfirmationController, :confirm
end
end
| 31.086207 | 77 | 0.724903 |
0830de31d352546f0e64cfd1231caa342eb9a14e | 1,603 | ex | Elixir | test/support/model_case.ex | Project-ShangriLa/sana_server_phoenix | d2ea4cc023d02e7249ae9267bb2b41a212b79ce7 | [
"Apache-2.0"
] | 5 | 2015-11-07T11:27:08.000Z | 2017-06-23T00:54:20.000Z | test/support/model_case.ex | Project-ShangriLa/sana_server_phoenix | d2ea4cc023d02e7249ae9267bb2b41a212b79ce7 | [
"Apache-2.0"
] | null | null | null | test/support/model_case.ex | Project-ShangriLa/sana_server_phoenix | d2ea4cc023d02e7249ae9267bb2b41a212b79ce7 | [
"Apache-2.0"
] | null | null | null | defmodule SanaServerPhoenix.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias SanaServerPhoenix.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
import SanaServerPhoenix.ModelCase
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(SanaServerPhoenix.Repo, [])
end
:ok
end
@doc """
Helper for returning list of errors in model when passed certain data.
## Examples
Given a User model that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(model, data) do
model.__struct__.changeset(model, data).errors
end
end
| 26.716667 | 84 | 0.694323 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.