hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ffcebbab06176fc90b4b90bcf3026ef635140592 | 1,839 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/range_partitioning_range.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/big_query/lib/google_api/big_query/v2/model/range_partitioning_range.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/big_query/lib/google_api/big_query/v2/model/range_partitioning_range.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.RangePartitioningRange do
@moduledoc """
[TrustedTester] [Required] Defines the ranges for range partitioning.
## Attributes
* `end` (*type:* `String.t`, *default:* `nil`) - [TrustedTester] [Required] The end of range partitioning, exclusive.
* `interval` (*type:* `String.t`, *default:* `nil`) - [TrustedTester] [Required] The width of each interval.
* `start` (*type:* `String.t`, *default:* `nil`) - [TrustedTester] [Required] The start of range partitioning, inclusive.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:end => String.t() | nil,
:interval => String.t() | nil,
:start => String.t() | nil
}
field(:end)
field(:interval)
field(:start)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.RangePartitioningRange do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.RangePartitioningRange.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.RangePartitioningRange do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.698113 | 125 | 0.712344 |
ffcec8e93a90f813d2b1e3f5f21a3824f3e44ec2 | 216 | ex | Elixir | exercises/practice/resistor-color-trio/lib/resistor_color_trio.ex | ryanzidago/elixir | d00ca743340fcc328c70ee351274f91b57fd4c8d | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/resistor-color-trio/lib/resistor_color_trio.ex | ryanzidago/elixir | d00ca743340fcc328c70ee351274f91b57fd4c8d | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/resistor-color-trio/lib/resistor_color_trio.ex | ryanzidago/elixir | d00ca743340fcc328c70ee351274f91b57fd4c8d | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule ResistorColorTrio do
@doc """
Calculate the resistance value in ohm or kiloohm from resistor colors
"""
@spec label(colors :: [atom]) :: {number, :ohms | :kiloohms}
def label(colors) do
end
end
| 24 | 71 | 0.689815 |
ffceecff6c41ddd11d73d342effe2bc7ab76edf8 | 1,495 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/file_io_details.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/file_io_details.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/file_io_details.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.FileIoDetails do
@moduledoc """
Metadata for a File connector used by the job.
## Attributes
- filePattern (String.t): File Pattern used to access files by the connector. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:filePattern => any()
}
field(:filePattern)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.FileIoDetails do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.FileIoDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.FileIoDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.145833 | 100 | 0.745151 |
ffcf01cfb88c638e23951335fc4f0299fd6cb0c3 | 213 | exs | Elixir | apps/bookmarker/priv/repo/migrations/20161013144009_add_bookmarks_tags_unique_index.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/bookmarker/priv/repo/migrations/20161013144009_add_bookmarks_tags_unique_index.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/bookmarker/priv/repo/migrations/20161013144009_add_bookmarks_tags_unique_index.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Bookmarker.Repo.Migrations.AddBookmarksTagsUniqueIndex do
use Ecto.Migration
def change do
create unique_index(:bookmarks_tags, [:bookmark_id, :tag_id], name: "bookmark_tag_composite")
end
end
| 26.625 | 96 | 0.798122 |
ffcf1bdfd9c0f5bdb499e06e1c14c3b87fe6bfe2 | 303 | ex | Elixir | hippo-backend/lib/graphql/types/types.ex | Tmw/Hippo | 6b100f4eb43c74dffff38dd974a00266f3dc99a2 | [
"MIT"
] | 8 | 2019-05-18T10:14:06.000Z | 2021-07-31T20:58:55.000Z | hippo-backend/lib/graphql/types/types.ex | Tmw/Hippo | 6b100f4eb43c74dffff38dd974a00266f3dc99a2 | [
"MIT"
] | 7 | 2019-07-20T17:48:03.000Z | 2022-02-26T10:41:56.000Z | hippo-backend/lib/graphql/types/types.ex | Tmw/Hippo | 6b100f4eb43c74dffff38dd974a00266f3dc99a2 | [
"MIT"
] | 1 | 2019-07-04T02:30:09.000Z | 2019-07-04T02:30:09.000Z | defmodule Hippo.GraphQL.Types do
@moduledoc false
use Absinthe.Schema.Notation
import_types(Hippo.GraphQL.Types.Identifier)
import_types(Hippo.GraphQL.Types.Project)
import_types(Hippo.GraphQL.Types.Lane)
import_types(Hippo.GraphQL.Types.Card)
import_types(Hippo.GraphQL.Types.Event)
end
| 27.545455 | 46 | 0.808581 |
ffcf1dbbcd1a8533b815751edef8f8e4e3b7e041 | 1,997 | exs | Elixir | test/support/migrations.exs | ArthurPai/ex_admin | 8894a5e2d4a8ca0c0bfeceded16df3b7058ca665 | [
"MIT"
] | null | null | null | test/support/migrations.exs | ArthurPai/ex_admin | 8894a5e2d4a8ca0c0bfeceded16df3b7058ca665 | [
"MIT"
] | null | null | null | test/support/migrations.exs | ArthurPai/ex_admin | 8894a5e2d4a8ca0c0bfeceded16df3b7058ca665 | [
"MIT"
] | 1 | 2021-04-14T16:18:31.000Z | 2021-04-14T16:18:31.000Z | defmodule TestExAdmin.Migrations do
use Ecto.Migration
def change do
create table(:users) do
add :name, :string
add :email, :string
add :active, :boolean
end
create table(:roles) do
add :name, :string
end
create table(:users_roles) do
add :user_id, references(:users, on_delete: :delete_all)
add :role_id, references(:roles, on_delete: :delete_all)
timestamps()
end
create index(:users_roles, [:user_id])
create index(:users_roles, [:role_id])
create table(:products) do
add :title, :string
add :price, :decimal
add :user_id, references(:users, on_delete: :delete_all)
end
create table(:noids, primary_key: false) do
add :name, :string, primary_key: true
add :description, :text
add :company, :string
add :user_id, references(:users, on_delete: :nothing)
end
create index(:noids, [:user_id])
create table(:noprimarys, primary_key: false) do
add :index, :integer
add :name, :string
add :description, :string
timestamps()
end
create table(:simple) do
add :name, :string
add :description, :string
end
create table(:restricteds) do
add :name, :string
add :description, :string
end
create table(:contacts) do
add :first_name, :string
add :last_name, :string
timestamps()
end
create table(:phone_numbers) do
add :number, :string
add :label, :string
timestamps()
end
create table(:contacts_phone_numbers) do
add :contact_id, references(:contacts, on_delete: :delete_all)
add :phone_number_id, references(:phone_numbers, on_delete: :delete_all)
end
create index(:contacts_phone_numbers, [:contact_id])
create index(:contacts_phone_numbers, [:phone_number_id])
create table(:uuid_schemas, primary_key: false) do
add :key, :uuid, primary_key: true
add :name, :string
end
end
end
| 24.654321 | 78 | 0.641462 |
ffcf2ebc70bf4d78a0adea93d6c7be977738758e | 5,843 | ex | Elixir | lib/oban/migrations.ex | stefanchrobot/oban | 961015b92b9cae442d2834ad220a85ccdcc9da2d | [
"Apache-2.0"
] | null | null | null | lib/oban/migrations.ex | stefanchrobot/oban | 961015b92b9cae442d2834ad220a85ccdcc9da2d | [
"Apache-2.0"
] | null | null | null | lib/oban/migrations.ex | stefanchrobot/oban | 961015b92b9cae442d2834ad220a85ccdcc9da2d | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Migrations do
@moduledoc """
Migrations create and modify the database tables Oban needs to function.
## Usage
To use migrations in your application you'll need to generate an `Ecto.Migration` that wraps
calls to `Oban.Migrations`:
```bash
mix ecto.gen.migration add_oban
```
Open the generated migration in your editor and call the `up` and `down` functions on
`Oban.Migrations`:
```elixir
defmodule MyApp.Repo.Migrations.AddOban do
use Ecto.Migration
def up, do: Oban.Migrations.up()
def down, do: Oban.Migrations.down()
end
```
This will run all of Oban's versioned migrations for your database.
Now, run the migration to create the table:
```bash
mix ecto.migrate
```
Migrations between versions are idempotent. As new versions are released, you
may need to run additional migrations. To do this, generate a new migration:
```bash
mix ecto.gen.migration upgrade_oban_to_v11
```
Open the generated migration in your editor and call the `up` and `down`
functions on `Oban.Migrations`, passing a version number:
```elixir
defmodule MyApp.Repo.Migrations.UpgradeObanToV11 do
use Ecto.Migration
def up, do: Oban.Migrations.up(version: 11)
def down, do: Oban.Migrations.down(version: 11)
end
```
## Isolation with Prefixes
Oban supports namespacing through PostgreSQL schemas, also called "prefixes" in Ecto. With
prefixes your jobs table can reside outside of your primary schema (usually public) and you can
have multiple separate job tables.
To use a prefix you first have to specify it within your migration:
```elixir
defmodule MyApp.Repo.Migrations.AddPrefixedObanJobsTable do
use Ecto.Migration
def up, do: Oban.Migrations.up(prefix: "private")
def down, do: Oban.Migrations.down(prefix: "private")
end
```
The migration will create the "private" schema and all tables, functions and triggers within
that schema. With the database migrated you'll then specify the prefix in your configuration:
```elixir
config :my_app, Oban,
prefix: "private",
...
```
In some cases, for example if your "private" schema already exists and your database user in
production doesn't have permissions to create a new schema, trying to create the schema from the
migration will result in an error. In such situations, it may be useful to inhibit the creation
of the "private" schema:
```elixir
defmodule MyApp.Repo.Migrations.AddPrefixedObanJobsTable do
use Ecto.Migration
def up, do: Oban.Migrations.up(prefix: "private", create_schema: false)
def down, do: Oban.Migrations.down(prefix: "private")
end
```
"""
use Ecto.Migration
alias Oban.{Config, Repo}
@initial_version 1
@current_version 11
@default_prefix "public"
@doc """
Run the `up` changes for all migrations between the initial version and the current version.
## Example
Run all migrations up to the current version:
Oban.Migrations.up()
Run migrations up to a specified version:
Oban.Migrations.up(version: 2)
Run migrations in an alternate prefix:
Oban.Migrations.up(prefix: "payments")
Run migrations in an alternate prefix but don't try to create the schema:
Oban.Migrations.up(prefix: "payments", create_schema: false)
"""
def up(opts \\ []) when is_list(opts) do
prefix = Keyword.get(opts, :prefix, @default_prefix)
version = Keyword.get(opts, :version, @current_version)
create_schema = Keyword.get(opts, :create_schema, prefix != "public")
initial = migrated_version(repo(), prefix)
cond do
initial == 0 ->
change(@initial_version..version, :up, %{prefix: prefix, create_schema: create_schema})
initial < version ->
change((initial + 1)..version, :up, %{prefix: prefix})
true ->
:ok
end
end
@doc """
Run the `down` changes for all migrations between the current version and the initial version.
## Example
Run all migrations from current version down to the first:
Oban.Migrations.down()
Run migrations down to and including a specified version:
Oban.Migrations.down(version: 5)
Run migrations in an alternate prefix:
Oban.Migrations.down(prefix: "payments")
"""
def down(opts \\ []) when is_list(opts) do
prefix = Keyword.get(opts, :prefix, @default_prefix)
version = Keyword.get(opts, :version, @initial_version)
initial = max(migrated_version(repo(), prefix), @initial_version)
if initial >= version do
change(initial..version, :down, %{prefix: prefix})
end
end
@doc false
def initial_version, do: @initial_version
@doc false
def current_version, do: @current_version
@doc false
def migrated_version(repo, prefix) do
query = """
SELECT description
FROM pg_class
LEFT JOIN pg_description ON pg_description.objoid = pg_class.oid
LEFT JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace
WHERE pg_class.relname = 'oban_jobs'
AND pg_namespace.nspname = '#{prefix}'
"""
conf = Config.new(repo: repo, prefix: prefix)
case Repo.query(conf, query) do
{:ok, %{rows: [[version]]}} when is_binary(version) -> String.to_integer(version)
_ -> 0
end
end
defp change(range, direction, opts) do
for index <- range do
pad_idx = String.pad_leading(to_string(index), 2, "0")
[__MODULE__, "V#{pad_idx}"]
|> Module.concat()
|> apply(direction, [opts])
end
case direction do
:up -> record_version(opts, Enum.max(range))
:down -> record_version(opts, Enum.min(range) - 1)
end
end
defp record_version(_opts, 0), do: :ok
defp record_version(%{prefix: prefix}, version) do
execute "COMMENT ON TABLE #{prefix}.oban_jobs IS '#{version}'"
end
end
| 26.802752 | 98 | 0.693308 |
ffcf5d315f63ef63f79c2b216d39e738400123b9 | 1,404 | exs | Elixir | lib/perspective/utilities/identifier/tests/identifier_macro_test.exs | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | 2 | 2020-04-24T19:43:06.000Z | 2020-04-24T19:52:27.000Z | lib/perspective/utilities/identifier/tests/identifier_macro_test.exs | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | null | null | null | lib/perspective/utilities/identifier/tests/identifier_macro_test.exs | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | null | null | null | defmodule Perspective.IdentifierMacro.Test do
use Perspective.TestCase
defmodule DefaultExample do
use Perspective.IdentifierMacro
end
defmodule UsageExample do
use Perspective.IdentifierMacro
path([id: id]) do
"usage-example/#{id}"
end
end
test "using Perspective.IdentifierMacro installs path/0, defaulting to the module's name" do
assert "Perspective.IdentifierMacro.Test.DefaultExample" == DefaultExample.path()
end
test "using Perspective.IdentifierMacro installs uri/0, defaulting to the module's appID-based URI", %{app_id: app_id} do
assert "#{app_id}/Perspective.IdentifierMacro.Test.DefaultExample" == DefaultExample.uri()
end
test "using Perspective.IdentifierMacro installs name/0, defaulting to a globally-addressed uri", %{app_id: app_id} do
assert {:global, "#{app_id}/Perspective.IdentifierMacro.Test.DefaultExample"} == DefaultExample.name()
end
test "calling IdentifierMacro.path(state) installs path/1" do
assert "usage-example/abc" == UsageExample.path([id: "abc"])
end
test "calling IdentifierMacro.path(state) installs uri/1", %{app_id: app_id} do
assert "#{app_id}/usage-example/abc" == UsageExample.uri([id: "abc"])
end
test "calling IdentifierMacro.path(state) installs name/1", %{app_id: app_id} do
assert {:global, "#{app_id}/usage-example/abc"} == UsageExample.name([id: "abc"])
end
end
| 35.1 | 123 | 0.731481 |
ffcf99adaf555e3fae30aaf8a43a7ae6ac53ccfc | 1,170 | ex | Elixir | lib/codes/codes_z39.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_z39.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_z39.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_Z39 do
alias IcdCode.ICDCode
def _Z390 do
%ICDCode{full_code: "Z390",
category_code: "Z39",
short_code: "0",
full_name: "Encounter for care and examination of mother immediately after delivery",
short_name: "Encounter for care and examination of mother immediately after delivery",
category_name: "Encounter for care and examination of mother immediately after delivery"
}
end
def _Z391 do
%ICDCode{full_code: "Z391",
category_code: "Z39",
short_code: "1",
full_name: "Encounter for care and examination of lactating mother",
short_name: "Encounter for care and examination of lactating mother",
category_name: "Encounter for care and examination of lactating mother"
}
end
def _Z392 do
%ICDCode{full_code: "Z392",
category_code: "Z39",
short_code: "2",
full_name: "Encounter for routine postpartum follow-up",
short_name: "Encounter for routine postpartum follow-up",
category_name: "Encounter for routine postpartum follow-up"
}
end
end
| 34.411765 | 98 | 0.65812 |
ffcfccc836706dc64c8c323f839ff39a770d53d5 | 385 | ex | Elixir | lib/elsol/query/terms.ex | boonious/elsol | ad983b7dddae74f78411334d850dfe37ac6ffbb2 | [
"Apache-2.0"
] | 9 | 2016-06-30T16:49:49.000Z | 2021-04-06T12:12:36.000Z | lib/elsol/query/terms.ex | boonious/elsol | ad983b7dddae74f78411334d850dfe37ac6ffbb2 | [
"Apache-2.0"
] | 11 | 2016-10-11T04:07:01.000Z | 2017-05-10T23:38:17.000Z | lib/elsol/query/terms.ex | boonious/elsol | ad983b7dddae74f78411334d850dfe37ac6ffbb2 | [
"Apache-2.0"
] | 4 | 2016-04-16T02:20:12.000Z | 2020-05-09T04:43:10.000Z | defmodule Elsol.Query.Terms do
defstruct url: nil, name: "/terms", terms: true, terms_fl: nil,
terms_limit: nil, terms_lower: nil, terms_lower_incl: nil, terms_mincount: nil,
terms_maxcount: nil, terms_prefix: nil, terms_raw: nil,
terms_regex: nil, terms_regex_flag: nil, terms_sort: nil,
terms_upper: nil, terms_upper_incl: nil
end | 42.777778 | 91 | 0.675325 |
ffcfd4199e7d435e3bced89c9223cf0c6a0c3a18 | 223 | exs | Elixir | priv/repo/migrations/20190818154452_add_group_id_to_ideas.exs | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 523 | 2017-03-15T15:21:11.000Z | 2022-03-14T03:04:18.000Z | priv/repo/migrations/20190818154452_add_group_id_to_ideas.exs | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 524 | 2017-03-16T18:31:09.000Z | 2022-02-26T10:02:06.000Z | priv/repo/migrations/20190818154452_add_group_id_to_ideas.exs | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 60 | 2017-05-01T18:02:28.000Z | 2022-03-04T21:04:56.000Z | defmodule RemoteRetro.Repo.Migrations.AddGroupIdToIdeas do
use Ecto.Migration
def change do
alter table(:ideas) do
add :group_id, references("groups")
end
create index(:ideas, [:group_id])
end
end
| 18.583333 | 58 | 0.704036 |
ffcff48b32d9aeae985e71c68e0f159d557a42ca | 176 | ex | Elixir | lib/extensions/persistent_session/store/persistent_session_cache.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | 1 | 2021-06-25T10:36:01.000Z | 2021-06-25T10:36:01.000Z | lib/extensions/persistent_session/store/persistent_session_cache.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | null | null | null | lib/extensions/persistent_session/store/persistent_session_cache.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | 1 | 2020-07-13T01:11:17.000Z | 2020-07-13T01:11:17.000Z | defmodule PowPersistentSession.Store.PersistentSessionCache do
@moduledoc false
use Pow.Store.Base,
ttl: :timer.hours(24) * 30,
namespace: "persistent_session"
end
| 25.142857 | 62 | 0.761364 |
ffd002a580dd5ba2d5af68872029f0c9123d5be3 | 21,454 | ex | Elixir | lib/phoenix/test/conn_test.ex | sjBao/phoenix | e0be71759d83c15f6d1a852d9872d6de1ea0db24 | [
"MIT"
] | 1 | 2019-03-30T16:26:27.000Z | 2019-03-30T16:26:27.000Z | lib/phoenix/test/conn_test.ex | sjBao/phoenix | e0be71759d83c15f6d1a852d9872d6de1ea0db24 | [
"MIT"
] | null | null | null | lib/phoenix/test/conn_test.ex | sjBao/phoenix | e0be71759d83c15f6d1a852d9872d6de1ea0db24 | [
"MIT"
] | null | null | null | defmodule Phoenix.ConnTest do
@moduledoc """
Conveniences for testing Phoenix endpoints and connection related helpers.
You likely want to use this module or make it part of your `ExUnit.CaseTemplate`.
Once used, this module automatically imports all functions defined here as
well as the functions in `Plug.Conn`.
## Endpoint testing
`Phoenix.ConnTest` typically works against endpoints. That's the preferred way
to test anything that your router dispatches to:
@endpoint MyAppWeb.Endpoint
test "says welcome on the home page" do
conn = get(build_conn(), "/")
assert conn.resp_body =~ "Welcome!"
end
test "logs in" do
conn = post(build_conn(), "/login", [username: "john", password: "doe"])
assert conn.resp_body =~ "Logged in!"
end
The `@endpoint` module attribute contains the endpoint under testing,
most commonly your application endpoint itself. If you are using the
MyApp.ConnCase generated by Phoenix, it is automatically set for you.
As in your router and controllers, the connection is the main abstraction
in testing. `build_conn()` returns a new connection and functions in this
module can be used to manipulate the connection before dispatching
to the endpoint.
For example, one could set the accepts header for json requests as
follows:
build_conn()
|> put_req_header("accept", "application/json")
|> get("/")
You can also create your own helpers, such as `json_conn()` that uses
`build_conn/0` and `put_req_header/3`, so you avoid repeating the connection
setup throughout your tests.
## Controller testing
The functions in this module can also be used for controller testing.
While endpoint testing is preferred over controller testing, especially
since the controller in Phoenix plays an integration role between your
domain and your views, unit testing controllers may be helpful in some
situations.
For such cases, you need to set the `@endpoint` attribute to your controller
and pass an atom representing the action to dispatch:
@endpoint MyAppWeb.HomeController
test "says welcome on the home page" do
conn = get(build_conn(), :index)
assert conn.resp_body =~ "Welcome!"
end
Keep in mind that, once the `@endpoint` variable is set, all tests after
setting it will be affected.
## Views testing
Under other circumstances, you may be testing a view or another layer that
requires a connection for processing. For such cases, a connection can be
created using the `build_conn/3` helper:
MyApp.UserView.render("hello.html", conn: build_conn(:get, "/"))
While `build_conn/0` returns a connection with no request information to it,
`build_conn/3` returns a connection with the given request information already
filled in.
## Recycling
Browsers implement a storage by using cookies. When a cookie is set in the
response, the browser stores it and sends it in the next request.
To emulate this behaviour, this module provides the idea of recycling.
The `recycle/1` function receives a connection and returns a new connection,
similar to the one returned by `build_conn/0` with all the response cookies
from the previous connection defined as request headers. This is useful when
testing multiple routes that require cookies or session to work.
Keep in mind Phoenix will automatically recycle the connection between
dispatches. This usually works out well most times, but it may discard
information if you are modifying the connection before the next dispatch:
# No recycling as the connection is fresh
conn = get(build_conn(), "/")
# The connection is recycled, creating a new one behind the scenes
conn = post(conn, "/login")
# We can also recycle manually in case we want custom headers
conn =
conn
|> recycle()
|> put_req_header("x-special", "nice")
# No recycling as we did it explicitly
conn = delete(conn, "/logout")
Recycling also recycles the "accept" and "authorization" headers,
as well as peer data information.
"""
@doc false
defmacro __using__(_) do
quote do
import Plug.Conn
import Phoenix.ConnTest
end
end
alias Plug.Conn
import ExUnit.Assertions, only: [flunk: 1]
@doc """
Creates a connection to be used in upcoming requests.
"""
@spec build_conn() :: Conn.t
def build_conn() do
build_conn(:get, "/", nil)
end
@doc """
Deprecated version of `conn/0`. Use `build_conn/0` instead.
"""
@spec conn() :: Conn.t
def conn() do
IO.warn "using conn/0 to build a connection is deprecated. Use build_conn/0 instead"
build_conn()
end
@doc """
Creates a connection to be used in upcoming requests
with a preset method, path and body.
This is useful when a specific connection is required
for testing a plug or a particular function.
"""
@spec build_conn(atom | binary, binary, binary | list | map | nil) :: Conn.t
def build_conn(method, path, params_or_body \\ nil) do
Plug.Adapters.Test.Conn.conn(%Conn{}, method, path, params_or_body)
|> Conn.put_private(:plug_skip_csrf_protection, true)
|> Conn.put_private(:phoenix_recycled, true)
end
@doc """
Deprecated version of `conn/3`. Use `build_conn/3` instead.
"""
@spec conn(atom | binary, binary, binary | list | map | nil) :: Conn.t
def conn(method, path, params_or_body \\ nil) do
IO.warn """
using conn/3 to build a connection is deprecated. Use build_conn/3 instead.
#{Exception.format_stacktrace}
"""
build_conn(method, path, params_or_body)
end
@http_methods [:get, :post, :put, :patch, :delete, :options, :connect, :trace, :head]
for method <- @http_methods do
@doc """
Dispatches to the current endpoint.
See `dispatch/5` for more information.
"""
defmacro unquote(method)(conn, path_or_action, params_or_body \\ nil) do
method = unquote(method)
quote do
Phoenix.ConnTest.dispatch(unquote(conn), @endpoint, unquote(method),
unquote(path_or_action), unquote(params_or_body))
end
end
end
@doc """
Dispatches the connection to the given endpoint.
When invoked via `get/3`, `post/3` and friends, the endpoint
is automatically retrieved from the `@endpoint` module
attribute, otherwise it must be given as an argument.
The connection will be configured with the given `method`,
`path_or_action` and `params_or_body`.
If `path_or_action` is a string, it is considered to be the
request path and stored as so in the connection. If an atom,
it is assumed to be an action and the connection is dispatched
to the given action.
## Parameters and body
This function, as well as `get/3`, `post/3` and friends, accepts the
request body or parameters as last argument:
get(build_conn(), "/", some: "param")
get(build_conn(), "/", "some=param&url=encoded")
The allowed values are:
* `nil` - meaning there is no body
* a binary - containing a request body. For such cases, `:headers`
must be given as option with a content-type
* a map or list - containing the parameters which will automatically
set the content-type to multipart. The map or list may contain
other lists or maps and all entries will be normalized to string
keys
* a struct - unlike other maps, a struct will be passed through as-is
without normalizing its entries
"""
def dispatch(conn, endpoint, method, path_or_action, params_or_body \\ nil)
def dispatch(%Plug.Conn{} = conn, endpoint, method, path_or_action, params_or_body) do
if is_nil(endpoint) do
raise "no @endpoint set in test case"
end
if is_binary(params_or_body) and is_nil(List.keyfind(conn.req_headers, "content-type", 0)) do
raise ArgumentError, "a content-type header is required when setting " <>
"a binary body in a test connection"
end
conn
|> ensure_recycled()
|> dispatch_endpoint(endpoint, method, path_or_action, params_or_body)
|> Conn.put_private(:phoenix_recycled, false)
|> from_set_to_sent()
end
def dispatch(conn, _endpoint, method, _path_or_action, _params_or_body) do
raise ArgumentError, "expected first argument to #{method} to be a " <>
"%Plug.Conn{}, got #{inspect conn}"
end
defp dispatch_endpoint(conn, endpoint, method, path, params_or_body) when is_binary(path) do
conn
|> Plug.Adapters.Test.Conn.conn(method, path, params_or_body)
|> endpoint.call(endpoint.init([]))
end
defp dispatch_endpoint(conn, endpoint, method, action, params_or_body) when is_atom(action) do
conn
|> Plug.Adapters.Test.Conn.conn(method, "/", params_or_body)
|> endpoint.call(endpoint.init(action))
end
defp from_set_to_sent(%Conn{state: :set} = conn), do: Conn.send_resp(conn)
defp from_set_to_sent(conn), do: conn
@doc """
Puts a request cookie.
"""
@spec put_req_cookie(Conn.t, binary, binary) :: Conn.t
defdelegate put_req_cookie(conn, key, value), to: Plug.Test
@doc """
Deletes a request cookie.
"""
@spec delete_req_cookie(Conn.t, binary) :: Conn.t
defdelegate delete_req_cookie(conn, key), to: Plug.Test
@doc """
Fetches the flash storage.
"""
@spec fetch_flash(Conn.t) :: Conn.t
defdelegate fetch_flash(conn), to: Phoenix.Controller
@doc """
Gets the whole flash storage.
"""
@spec get_flash(Conn.t) :: Conn.t
defdelegate get_flash(conn), to: Phoenix.Controller
@doc """
Gets the given key from the flash storage.
"""
@spec get_flash(Conn.t, term) :: Conn.t
defdelegate get_flash(conn, key), to: Phoenix.Controller
@doc """
Puts the given value under key in the flash storage.
"""
@spec put_flash(Conn.t, term, term) :: Conn.t
defdelegate put_flash(conn, key, value), to: Phoenix.Controller
@doc """
Clears up the flash storage.
"""
@spec clear_flash(Conn.t) :: Conn.t
defdelegate clear_flash(conn), to: Phoenix.Controller
@doc """
Returns the content type as long as it matches the given format.
## Examples
# Assert we have an html response with utf-8 charset
assert response_content_type(conn, :html) =~ "charset=utf-8"
"""
@spec response_content_type(Conn.t, atom) :: String.t | no_return
def response_content_type(conn, format) when is_atom(format) do
case Conn.get_resp_header(conn, "content-type") do
[] ->
raise "no content-type was set, expected a #{format} response"
[h] ->
if response_content_type?(h, format) do
h
else
raise "expected content-type for #{format}, got: #{inspect h}"
end
[_|_] ->
raise "more than one content-type was set, expected a #{format} response"
end
end
defp response_content_type?(header, format) do
case parse_content_type(header) do
{part, subpart} ->
format = Atom.to_string(format)
format in MIME.extensions(part <> "/" <> subpart) or
format == subpart or String.ends_with?(subpart, "+" <> format)
_ ->
false
end
end
defp parse_content_type(header) do
case Plug.Conn.Utils.content_type(header) do
{:ok, part, subpart, _params} ->
{part, subpart}
_ ->
false
end
end
@doc """
Asserts the given status code and returns the response body
if one was set or sent.
## Examples
conn = get(build_conn(), "/")
assert response(conn, 200) =~ "hello world"
"""
@spec response(Conn.t, status :: integer | atom) :: binary | no_return
def response(%Conn{state: :unset}, _status) do
raise """
expected connection to have a response but no response was set/sent.
Please verify that you assign to "conn" after a request:
conn = get(conn, "/")
assert html_response(conn) =~ "Hello"
"""
end
def response(%Conn{status: status, resp_body: body}, given) do
given = Plug.Conn.Status.code(given)
if given == status do
body
else
raise "expected response with status #{given}, got: #{status}, with body:\n#{inspect(body)}"
end
end
@doc """
Asserts the given status code, that we have an html response and
returns the response body if one was set or sent.
## Examples
assert html_response(conn, 200) =~ "<html>"
"""
@spec html_response(Conn.t, status :: integer | atom) :: String.t | no_return
def html_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :html)
body
end
@doc """
Asserts the given status code, that we have an text response and
returns the response body if one was set or sent.
## Examples
assert text_response(conn, 200) =~ "hello"
"""
@spec text_response(Conn.t, status :: integer | atom) :: String.t | no_return
def text_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :text)
body
end
@doc """
Asserts the given status code, that we have an json response and
returns the decoded JSON response if one was set or sent.
## Examples
body = json_response(conn, 200)
assert "can't be blank" in body["errors"]
"""
@spec json_response(Conn.t, status :: integer | atom) :: map | no_return
def json_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :json)
Phoenix.json_library().decode!(body)
end
@doc """
Returns the location header from the given redirect response.
Raises if the response does not match the redirect status code
(defaults to 302).
## Examples
assert redirected_to(conn) =~ "/foo/bar"
assert redirected_to(conn, 301) =~ "/foo/bar"
assert redirected_to(conn, :moved_permanently) =~ "/foo/bar"
"""
@spec redirected_to(Conn.t, status :: non_neg_integer) :: String.t
def redirected_to(conn, status \\ 302)
def redirected_to(%Conn{state: :unset}, _status) do
raise "expected connection to have redirected but no response was set/sent"
end
def redirected_to(conn, status) when is_atom(status) do
redirected_to(conn, Plug.Conn.Status.code(status))
end
def redirected_to(%Conn{status: status} = conn, status) do
location = Conn.get_resp_header(conn, "location") |> List.first
location || raise "no location header was set on redirected_to"
end
def redirected_to(conn, status) do
raise "expected redirection with status #{status}, got: #{conn.status}"
end
@doc """
Recycles the connection.
Recycling receives a connection and returns a new connection,
containing cookies and relevant information from the given one.
This emulates behaviour performed by browsers where cookies
returned in the response are available in following requests.
By default, only the headers "accept" and "authorization" are
recycled. However, a custom set of headers can be specified by
passing a list of strings representing its names as the second
argument of the function.
Note `recycle/1` is automatically invoked when dispatching
to the endpoint, unless the connection has already been
recycled.
"""
@spec recycle(Conn.t, [String.t]) :: Conn.t
def recycle(conn, headers \\ ~w(accept authorization)) do
build_conn()
|> Map.put(:host, conn.host)
|> Plug.Test.recycle_cookies(conn)
|> Plug.Test.put_peer_data(Plug.Conn.get_peer_data(conn))
|> copy_headers(conn.req_headers, headers)
end
defp copy_headers(conn, headers, copy) do
headers = for {k, v} <- headers, k in copy, do: {k, v}
%{conn | req_headers: headers ++ conn.req_headers}
end
@doc """
Ensures the connection is recycled if it wasn't already.
See `recycle/1` for more information.
"""
@spec ensure_recycled(Conn.t) :: Conn.t
def ensure_recycled(conn) do
if conn.private[:phoenix_recycled] do
conn
else
recycle(conn)
end
end
@doc """
Calls the Endpoint and bypasses Router match.
Useful for unit testing Plugs where Endpoint and/or
router pipeline plugs are required for proper setup.
Note the use of `get("/")` following `bypass_through` in the examples below.
To execute the plug pipelines, you must issue a request against the router.
Most often, you can simpy send a GET request against the root path, but you
may also specify a different method or path which your pipelines may operate
against. If you ommit the request you may find that your tests return
a `flash not fetched, call fetch_flash/2` or similar error.
## Examples
For example, imagine you are testing an authentication
plug in isolation, but you need to invoke the Endpoint plugs
and `:browser` pipeline of your Router for session and flash
related dependencies:
conn =
conn
|> bypass_through(MyAppWeb.Router, [:browser])
|> get("/")
|> MyApp.RequireAuthentication.call([])
assert conn.halted
Alternatively, you could invoke only the Endpoint, and Router:
conn =
conn
|> bypass_through(MyAppWeb.Router, [])
|> get("/")
|> MyApp.RequireAuthentication.call([])
assert conn.halted
Or only invoke the Endpoint's plugs:
conn =
conn
|> bypass_through()
|> get("/")
|> MyApp.RequireAuthentication.call([])
assert conn.halted
"""
@spec bypass_through(Conn.t) :: Conn.t
def bypass_through(conn) do
Plug.Conn.put_private(conn, :phoenix_bypass, :all)
end
@doc """
Calls the Endpoint and bypasses Router match.
See `bypass_through/1`.
"""
@spec bypass_through(Conn.t, module, atom | list) :: Conn.t
def bypass_through(conn, router, pipelines \\ []) do
Plug.Conn.put_private(conn, :phoenix_bypass, {router, List.wrap(pipelines)})
end
@doc """
Returns the matched params from the URL the connection was redirected to.
Uses the provided `%Plug.Conn{}`s router matched in the previous request.
Raises if the response's location header is not set.
## Examples
assert redirected_to(conn) =~ "/posts/123"
assert %{id: "123"} = redirected_params(conn)
"""
@spec redirected_params(Conn.t) :: map
def redirected_params(%Plug.Conn{} = conn) do
router = Phoenix.Controller.router_module(conn)
%URI{path: path, host: host} = conn |> redirected_to() |> URI.parse()
path_info = split_path(path)
{conn, _pipes, _dispatch} = router.__match_route__(conn, "GET", path_info, host || conn.host)
Enum.into(conn.path_params, %{}, fn {key, val} -> {String.to_atom(key), val} end)
end
defp split_path(path) do
for segment <- String.split(path, "/"), segment != "", do: segment
end
@doc """
Asserts an error was wrapped and sent with the given status.
Useful for testing actions that you expect raise an error and have
the response wrapped in an HTTP status, with content usually rendered
by your MyApp.ErrorView.
The function accepts a status either as an integer HTTP status or
atom, such as `404` or `:not_found`. If an error is raised, a
3-tuple of the wrapped response is returned matching the
status, headers, and body of the response:
{404, [{"content-type", "text/html"} | _], "Page not found"}
## Examples
assert_error_sent :not_found, fn ->
get(build_conn(), "/users/not-found")
end
response = assert_error_sent 404, fn ->
get(build_conn(), "/users/not-found")
end
assert {404, [_h | _t], "Page not found"} = response
"""
@spec assert_error_sent(integer | atom, function) :: {integer, list, term}
def assert_error_sent(status_int_or_atom, func) do
expected_status = Plug.Conn.Status.code(status_int_or_atom)
discard_previously_sent()
result =
func
|> wrap_request()
|> receive_response(expected_status)
discard_previously_sent()
result
end
defp receive_response({:ok, conn}, expected_status) do
if conn.state == :sent do
flunk "expected error to be sent as #{expected_status} status, but response sent #{conn.status} without error"
else
flunk "expected error to be sent as #{expected_status} status, but no error happened"
end
end
defp receive_response({:error, {_kind, exception, stack}}, expected_status) do
receive do
{ref, {^expected_status, headers, body}} when is_reference(ref) ->
{expected_status, headers, body}
{ref, {sent_status, _headers, _body}} when is_reference(ref) ->
reraise ExUnit.AssertionError.exception("""
expected error to be sent as #{expected_status} status, but got #{sent_status} from:
#{Exception.format_banner(:error, exception)}
"""), stack
after 0 ->
reraise ExUnit.AssertionError.exception("""
expected error to be sent as #{expected_status} status, but got an error with no response from:
#{Exception.format_banner(:error, exception)}
"""), stack
end
end
defp discard_previously_sent() do
receive do
{ref, {_, _, _}} when is_reference(ref) -> discard_previously_sent()
{:plug_conn, :sent} -> discard_previously_sent()
after
0 -> :ok
end
end
defp wrap_request(func) do
try do
{:ok, func.()}
catch
kind, error -> {:error, {kind, error, System.stacktrace()}}
end
end
end
| 32.020896 | 116 | 0.67978 |
ffd0146802d465c3e89ef63ceb380211feac843a | 1,800 | ex | Elixir | lib/mux/alarm.ex | fishcakez/elixir-mux | 8be71643a79d8eddd93b78b9c74fd14cac436a21 | [
"Apache-2.0"
] | 2 | 2017-07-26T07:28:33.000Z | 2017-08-08T16:26:05.000Z | lib/mux/alarm.ex | fishcakez/elixir-mux | 8be71643a79d8eddd93b78b9c74fd14cac436a21 | [
"Apache-2.0"
] | null | null | null | lib/mux/alarm.ex | fishcakez/elixir-mux | 8be71643a79d8eddd93b78b9c74fd14cac436a21 | [
"Apache-2.0"
] | 1 | 2021-03-13T23:02:29.000Z | 2021-03-13T23:02:29.000Z | defmodule Mux.Alarm do
@moduledoc false
@behaviour :gen_event
@spec new_table(atom) :: :ets.tab
def new_table(table),
do: :ets.new(table, [:named_table, :public, {:read_concurrency, true}])
@spec delete_table(:ets.tab) :: true
def delete_table(table),
do: :ets.delete(table)
@spec swap_sup_handler(:ets.tab) :: :ok
def swap_sup_handler(table) do
handler = {__MODULE__, table}
remove = {handler, :swap}
add = {handler, table}
:ok = :gen_event.swap_sup_handler(:alarm_handler, remove, add)
end
@spec register(:ets.tab, any, reference) :: boolean
def register(table, id, ref) do
{:ok, _} = Registry.register(Module.concat(table, Registry), id, ref)
handler = {__MODULE__, table}
# sync with handler to ensure no missed set/clears
{:ok, set?} = :gen_event.call(:alarm_handler, handler, {:set?, id})
set?
end
def init({table, _}),
do: {:ok, {table, Module.concat(table, Registry)}}
def handle_event({:set_alarm, {id, _} = alarm}, {table, registry} = state) do
:ets.insert(table, alarm)
# need to serialize notify to prevent out of order set/clear on same alarm
Registry.dispatch(registry, id, ¬ify(&1, :SET, id), [parallel: false])
{:ok, state}
end
def handle_event({:clear_alarm, id}, {table, registry} = state) do
:ets.delete(table, id)
Registry.dispatch(registry, id, ¬ify(&1, :CLEAR, id), [parallel: false])
{:ok, state}
end
def handle_call({:set?, id}, {table, _} = state),
do: {:ok, {:ok, :ets.member(table, id)}, state}
def handle_info(_, state),
do: {:ok, state}
def code_change(_, state, _),
do: {:ok, state}
def terminate(_, _),
do: :ok
defp notify(subs, tag, id),
do: Enum.each(subs, fn {pid, ref} -> send(pid, {tag, ref, id}) end)
end
| 29.508197 | 79 | 0.635556 |
ffd0378a8e389c25858d81fb34aee3a444a77d03 | 161 | exs | Elixir | config/prod.exs | r26D/elixir-rasa-action-server | df41d36189f4ed737752343457dfe06d37a8e758 | [
"MIT"
] | 1 | 2021-04-19T18:05:10.000Z | 2021-04-19T18:05:10.000Z | config/prod.exs | r26D/elixir-rasa-action-server | df41d36189f4ed737752343457dfe06d37a8e758 | [
"MIT"
] | null | null | null | config/prod.exs | r26D/elixir-rasa-action-server | df41d36189f4ed737752343457dfe06d37a8e758 | [
"MIT"
] | null | null | null | use Mix.Config
config :rasa_action_server,
port: String.to_integer(System.get_env("PORT") || "4444"),
redirect_url: System.get_env("REDIRECT_URL") | 32.2 | 65 | 0.708075 |
ffd038ec1db3be26e138b98eb1623ca394f09f15 | 9,088 | ex | Elixir | lib/floki/selector/parser.ex | rzane/floki | 16595deef0e6ec7f472ace31347b44c4e8bdc36c | [
"MIT"
] | 1,778 | 2015-01-07T14:12:31.000Z | 2022-03-29T22:42:48.000Z | lib/floki/selector/parser.ex | rzane/floki | 16595deef0e6ec7f472ace31347b44c4e8bdc36c | [
"MIT"
] | 279 | 2015-01-01T15:54:50.000Z | 2022-03-28T18:06:03.000Z | deps/floki/lib/floki/selector/parser.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | 166 | 2015-04-24T20:48:02.000Z | 2022-03-28T17:29:05.000Z | defmodule Floki.Selector.Parser do
require Logger
@moduledoc false
# Parses a list of tokens returned from `Tokenizer` and transform into a `Selector`.
alias Floki.Selector
alias Selector.{Functional, Tokenizer, PseudoClass, AttributeSelector, Combinator}
@attr_match_types [
:equal,
:dash_match,
:includes,
:prefix_match,
:sufix_match,
:substring_match
]
# Returns a list of `Selector` structs with the parsed selectors.
def parse(selector) when is_binary(selector) do
token_list = Tokenizer.tokenize(selector)
parse(token_list)
end
def parse(tokens) do
do_parse_all(tokens, [])
end
defp do_parse_all([], selectors) do
Enum.reverse(selectors)
end
defp do_parse_all(tokens, selectors) do
{selector, remaining_tokens} = do_parse(tokens, %Selector{})
do_parse_all(remaining_tokens, [selector | selectors])
end
defp do_parse([], selector), do: {selector, []}
defp do_parse([{:close_parentesis, _} | t], selector), do: {selector, t}
defp do_parse([{:comma, _} | t], selector), do: {selector, t}
defp do_parse([{:identifier, _, namespace}, {:namespace_pipe, _} | t], selector) do
do_parse(t, %{selector | namespace: to_string(namespace)})
end
defp do_parse([{:identifier, _, type} | t], selector) do
do_parse(t, %{selector | type: to_string(type)})
end
defp do_parse([{'*', _} | t], selector) do
do_parse(t, %{selector | type: "*"})
end
defp do_parse([{:hash, _, id} | t], selector) do
do_parse(t, %{selector | id: to_string(id) |> String.replace("\\.", ".")})
end
defp do_parse([{:class, _, class} | t], selector) do
do_parse(t, %{selector | classes: [to_string(class) | selector.classes]})
end
defp do_parse([{'[', _} | t], selector) do
{t, result} = consume_attribute(t)
do_parse(t, %{selector | attributes: [result | selector.attributes]})
end
defp do_parse([{:pseudo_not, _} | t], selector) do
{t, pseudo_not_class} = do_parse_pseudo_not(t, %PseudoClass{name: "not", value: []})
pseudo_classes = Enum.reject([pseudo_not_class | selector.pseudo_classes], &is_nil(&1))
do_parse(t, %{selector | pseudo_classes: pseudo_classes})
end
defp do_parse([{:pseudo, _, pseudo_class} | t], selector) do
pseudo_classes = [%PseudoClass{name: to_string(pseudo_class)} | selector.pseudo_classes]
do_parse(t, %{selector | pseudo_classes: pseudo_classes})
end
defp do_parse([{:pseudo_class_int, _, pseudo_class_int} | t], selector) do
[pseudo_class | pseudo_classes] = selector.pseudo_classes
do_parse(t, %{
selector
| pseudo_classes: [%{pseudo_class | value: pseudo_class_int} | pseudo_classes]
})
end
defp do_parse([{:pseudo_class_even, _} | t], selector) do
[pseudo_class | pseudo_classes] = selector.pseudo_classes
do_parse(t, %{selector | pseudo_classes: [%{pseudo_class | value: "even"} | pseudo_classes]})
end
defp do_parse([{:pseudo_class_odd, _} | t], selector) do
[pseudo_class | pseudo_classes] = selector.pseudo_classes
do_parse(t, %{selector | pseudo_classes: [%{pseudo_class | value: "odd"} | pseudo_classes]})
end
defp do_parse([{:pseudo_class_pattern, _, pattern} | t], selector) do
[pseudo_class | pseudo_classes] = selector.pseudo_classes
value =
case Functional.parse(pattern) do
:invalid -> to_string(pattern)
{:ok, value} -> value
end
do_parse(t, %{selector | pseudo_classes: [%{pseudo_class | value: value} | pseudo_classes]})
end
defp do_parse([{:pseudo_class_quoted, _, pattern} | t], selector) do
[pseudo_class | pseudo_classes] = selector.pseudo_classes
do_parse(t, %{
selector
| pseudo_classes: [%{pseudo_class | value: to_string(pattern)} | pseudo_classes]
})
end
defp do_parse([{:space, _} | t], selector) do
{remaining_tokens, combinator} = consume_combinator(t, :descendant)
{%{selector | combinator: combinator}, remaining_tokens}
end
defp do_parse([{:greater, _} | t], selector) do
{remaining_tokens, combinator} = consume_combinator(t, :child)
{%{selector | combinator: combinator}, remaining_tokens}
end
defp do_parse([{:plus, _} | t], selector) do
{remaining_tokens, combinator} = consume_combinator(t, :sibling)
{%{selector | combinator: combinator}, remaining_tokens}
end
defp do_parse([{:tilde, _} | t], selector) do
{remaining_tokens, combinator} = consume_combinator(t, :general_sibling)
{%{selector | combinator: combinator}, remaining_tokens}
end
defp do_parse([{:unknown, _, unknown} | t], selector) do
Logger.info(fn -> "Unknown token #{inspect(unknown)}. Ignoring." end)
do_parse(t, selector)
end
defp consume_attribute(tokens), do: consume_attribute(:consuming, tokens, %AttributeSelector{})
defp consume_attribute(_, [], attr_selector), do: {[], attr_selector}
defp consume_attribute(:done, tokens, attr_selector), do: {tokens, attr_selector}
defp consume_attribute(:consuming, [{:identifier, _, identifier} | t], attr_selector) do
new_selector = set_attribute_name_or_value(attr_selector, identifier)
consume_attribute(:consuming, t, new_selector)
end
defp consume_attribute(:consuming, [{match_type, _} | t], attr_selector)
when match_type in @attr_match_types do
new_selector = %{attr_selector | match_type: match_type}
consume_attribute(:consuming, t, new_selector)
end
defp consume_attribute(:consuming, [{:quoted, _, value} | t], attr_selector) do
new_selector = %{attr_selector | value: to_string(value)}
consume_attribute(:consuming, t, new_selector)
end
defp consume_attribute(:consuming, [{:attribute_identifier, _, value} | t], attr_selector) do
flag = String.at(to_string(value), -2)
new_selector = %{attr_selector | flag: flag}
consume_attribute(:done, t, new_selector)
end
defp consume_attribute(:consuming, [{']', _} | t], attr_selector) do
consume_attribute(:done, t, attr_selector)
end
defp consume_attribute(:consuming, [unknown | t], attr_selector) do
Logger.info(fn -> "Unknown token #{inspect(unknown)}. Ignoring." end)
consume_attribute(:consuming, t, attr_selector)
end
defp set_attribute_name_or_value(attr_selector, identifier) do
# When match type is not defined, this is an attribute name.
# Otherwise, it is an attribute value.
case attr_selector.match_type do
nil -> %{attr_selector | attribute: to_string(identifier)}
_ -> %{attr_selector | value: to_string(identifier)}
end
end
defp consume_combinator(tokens, combinator_type) when is_atom(combinator_type) do
consume_combinator(tokens, %Combinator{match_type: combinator_type, selector: %Selector{}})
end
defp consume_combinator([], combinator), do: {[], combinator}
defp consume_combinator(tokens, combinator) do
{selector, remaining_tokens} = do_parse(tokens, %Selector{})
{remaining_tokens, %{combinator | selector: selector}}
end
defp do_parse_pseudo_not([], pseudo_class) do
{[], pseudo_class}
end
defp do_parse_pseudo_not([{:close_parentesis, _} | t], pseudo_class) do
{t, pseudo_class}
end
defp do_parse_pseudo_not([{:space, _} | t], pseudo_class) do
do_parse_pseudo_not(t, pseudo_class)
end
defp do_parse_pseudo_not(tokens, pseudo_class) do
do_parse_pseudo_not(tokens, %Selector{}, pseudo_class)
end
defp do_parse_pseudo_not([], pseudo_not_selector, pseudo_class) do
pseudo_class = update_pseudo_not_value(pseudo_class, pseudo_not_selector)
{[], pseudo_class}
end
defp do_parse_pseudo_not([{:close_parentesis, _} | t], pseudo_not_selector, pseudo_class) do
pseudo_class = update_pseudo_not_value(pseudo_class, pseudo_not_selector)
{t, pseudo_class}
end
defp do_parse_pseudo_not([{:comma, _} | t], pseudo_not_selector, pseudo_class) do
pseudo_class = update_pseudo_not_value(pseudo_class, pseudo_not_selector)
do_parse_pseudo_not(t, pseudo_class)
end
defp do_parse_pseudo_not([{:space, _} | t], pseudo_not_selector, pseudo_class) do
do_parse_pseudo_not(t, pseudo_not_selector, pseudo_class)
end
defp do_parse_pseudo_not(tokens = [{'[', _} | _t], pseudo_not_selector, pseudo_class) do
{pseudo_not_selector, remaining_tokens} = do_parse(tokens, pseudo_not_selector)
pseudo_class = update_pseudo_not_value(pseudo_class, pseudo_not_selector)
do_parse_pseudo_not(remaining_tokens, pseudo_class)
end
defp do_parse_pseudo_not([next_token | t], pseudo_not_selector, pseudo_class) do
{pseudo_not_selector, _} = do_parse([next_token], pseudo_not_selector)
do_parse_pseudo_not(t, pseudo_not_selector, pseudo_class)
end
defp update_pseudo_not_value(pseudo_class, pseudo_not_selector = %Selector{combinator: nil}) do
pseudo_not_value = [pseudo_not_selector | Map.get(pseudo_class, :value, [])]
%{pseudo_class | value: pseudo_not_value}
end
defp update_pseudo_not_value(_pseudo_class, _pseudo_not_selector) do
Logger.info("Only simple selectors are allowed in :not() pseudo-class. Ignoring.")
nil
end
end
| 34.555133 | 97 | 0.708187 |
ffd0663c216e49a71d547e30df383354bb0f542f | 1,509 | exs | Elixir | mix.exs | matthewess/boilertalk | f8382108e6e029e5cc04f6420d60dd1adafa4c30 | [
"MIT"
] | null | null | null | mix.exs | matthewess/boilertalk | f8382108e6e029e5cc04f6420d60dd1adafa4c30 | [
"MIT"
] | null | null | null | mix.exs | matthewess/boilertalk | f8382108e6e029e5cc04f6420d60dd1adafa4c30 | [
"MIT"
] | null | null | null | defmodule Boilertalk.Mixfile do
use Mix.Project
def project do
[
app: :boilertalk,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Boilertalk.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 25.576271 | 79 | 0.59112 |
ffd07d8d6c23389064f30f349fb88cbcb64738d1 | 1,943 | ex | Elixir | clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/table.ex | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/table.ex | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/table.ex | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Area120Tables.V1alpha1.Model.Table do
@moduledoc """
A single table.
## Attributes
* `columns` (*type:* `list(GoogleApi.Area120Tables.V1alpha1.Model.ColumnDescription.t)`, *default:* `nil`) - List of columns in this table. Order of columns matches the display order.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The human readable title of the table.
* `name` (*type:* `String.t`, *default:* `nil`) - The resource name of the table. Table names have the form `tables/{table}`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:columns => list(GoogleApi.Area120Tables.V1alpha1.Model.ColumnDescription.t()),
:displayName => String.t(),
:name => String.t()
}
field(:columns, as: GoogleApi.Area120Tables.V1alpha1.Model.ColumnDescription, type: :list)
field(:displayName)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.Area120Tables.V1alpha1.Model.Table do
def decode(value, options) do
GoogleApi.Area120Tables.V1alpha1.Model.Table.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Area120Tables.V1alpha1.Model.Table do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.660377 | 187 | 0.723109 |
ffd0a4b872ea6ceda019a9f66e3d21431b3eec88 | 1,789 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authorization_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authorization_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authorization_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.AuthorizationConfig do
@moduledoc """
Configuration of authorization. This section determines the authorization provider, if unspecified, then no authorization check will be done. Example: experimental: authorization: provider: firebaserules.googleapis.com
## Attributes
- provider (String.t): The name of the authorization provider, such as firebaserules.googleapis.com. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:provider => any()
}
field(:provider)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.AuthorizationConfig do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.AuthorizationConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.AuthorizationConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.270833 | 241 | 0.76076 |
ffd0d4f25aa6c97f68116d5badd7c5a7f043fb5f | 892 | ex | Elixir | clients/play_custom_app/lib/google_api/play_custom_app/v1/metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/play_custom_app/lib/google_api/play_custom_app/v1/metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/play_custom_app/lib/google_api/play_custom_app/v1/metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PlayCustomApp.V1 do
@moduledoc """
API client metadata for GoogleApi.PlayCustomApp.V1.
"""
@discovery_revision "20211022"
def discovery_revision(), do: @discovery_revision
end
| 33.037037 | 74 | 0.761211 |
ffd10b77ce6081df93d76aa215e3014a67a1c6a6 | 916 | ex | Elixir | test/support/apps/phx1_4/lib/phx1_4/application.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 528 | 2019-09-13T15:10:36.000Z | 2022-03-31T10:28:27.000Z | test/support/apps/phx1_4/lib/phx1_4/application.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 133 | 2019-09-13T17:46:59.000Z | 2022-03-01T13:37:10.000Z | test/support/apps/phx1_4/lib/phx1_4/application.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 38 | 2019-10-29T20:37:13.000Z | 2022-03-03T05:19:33.000Z | defmodule Phx14.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the Ecto repository
Phx14.Repo,
# Start the endpoint when the application starts
Phx14Web.Endpoint
# Starts a worker by calling: Phx14.Worker.start_link(arg)
# {Phx14.Worker, arg},
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Phx14.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Phx14Web.Endpoint.config_change(changed, removed)
:ok
end
end
| 28.625 | 64 | 0.709607 |
ffd10e51d935640da76cad64e0d5838ccc93c875 | 795 | ex | Elixir | lib/vocabulary/udhr/multigram.ex | kipcole9/text_corpus_udhr | 8fb948444ebfe6160572bd907617bd1a270e090c | [
"Apache-2.0"
] | 1 | 2020-07-08T11:19:03.000Z | 2020-07-08T11:19:03.000Z | lib/vocabulary/udhr/multigram.ex | kipcole9/text_corpus_udhr | 8fb948444ebfe6160572bd907617bd1a270e090c | [
"Apache-2.0"
] | null | null | null | lib/vocabulary/udhr/multigram.ex | kipcole9/text_corpus_udhr | 8fb948444ebfe6160572bd907617bd1a270e090c | [
"Apache-2.0"
] | 1 | 2020-07-03T17:40:00.000Z | 2020-07-03T17:40:00.000Z | defmodule Text.Vocabulary.Udhr.Multigram do
@moduledoc """
An n-gram vocabulary composed
of n-grams ranging from 2 to 4
characters in length
"""
@behaviour Text.Vocabulary
@app Mix.Project.config[:app]
alias Text.Vocabulary
@ngram_range 2..4
@impl Text.Vocabulary
def load_vocabulary! do
Vocabulary.load_vocabulary!(__MODULE__)
end
@impl Text.Vocabulary
def ngram_range do
@ngram_range
end
@impl Text.Vocabulary
def get_vocabulary(language) do
Text.Vocabulary.get_vocabulary(__MODULE__, language)
end
@impl Text.Vocabulary
def filename do
Path.join(:code.priv_dir(@app), "vocabulary/udhr_multigram.etf")
end
@impl Text.Vocabulary
def calculate_ngrams(text) do
Vocabulary.calculate_ngrams(text, ngram_range())
end
end
| 18.928571 | 68 | 0.732075 |
ffd12bab88c2cb6b7455a195f71d018df4202176 | 210 | exs | Elixir | priv/repo/migrations/20180415220954_add_real_names_to_neighbors.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T15:46:49.000Z | 2021-01-13T15:46:49.000Z | priv/repo/migrations/20180415220954_add_real_names_to_neighbors.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T16:42:24.000Z | 2021-01-13T16:42:24.000Z | priv/repo/migrations/20180415220954_add_real_names_to_neighbors.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | null | null | null | defmodule Litelist.Repo.Migrations.AddRealNamesToNeighbors do
use Ecto.Migration
def change do
alter table(:neighbors) do
add :first_name, :string
add :last_name, :string
end
end
end
| 19.090909 | 61 | 0.714286 |
ffd15f3dead0aa1cee5d9a2354629b33cac0e4e1 | 797 | ex | Elixir | lib/mix/tasks/yacto.gen.migration.ex | fossabot/yacto | 07ed65ebe2ab030dcc0abe468600d6b06e8696e9 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/yacto.gen.migration.ex | fossabot/yacto | 07ed65ebe2ab030dcc0abe468600d6b06e8696e9 | [
"Apache-2.0"
] | 1 | 2019-03-06T20:05:53.000Z | 2019-03-06T20:05:54.000Z | lib/mix/tasks/yacto.gen.migration.ex | fossabot/yacto | 07ed65ebe2ab030dcc0abe468600d6b06e8696e9 | [
"Apache-2.0"
] | 1 | 2019-03-06T20:03:49.000Z | 2019-03-06T20:03:49.000Z | defmodule Mix.Tasks.Yacto.Gen.Migration do
use Mix.Task
@shortdoc "Generate migration file"
@switches [version: :integer]
def run(args) do
Mix.Task.run("loadpaths", args)
Mix.Task.run("app.start", args)
case OptionParser.parse(args, switches: @switches) do
{opts, [], _} ->
app = Keyword.fetch!(Mix.Project.config(), :app)
version = Keyword.get(opts, :version)
_ = Application.load(app)
schemas = Yacto.Migration.Util.get_all_schema(app)
Yacto.Migration.GenMigration.generate_migration(app, schemas, [], version, nil, Application.get_env(:yacto, :migration, []))
{_, [_ | _], _} ->
Mix.raise("Args error")
{_, _, invalids} ->
Mix.raise("Invalid arguments #{inspect(invalids)}")
end
end
end
| 27.482759 | 132 | 0.627353 |
ffd1892c2e14b77d4c588173085dbd14cd4369ad | 5,444 | exs | Elixir | apps/core/test/services/rollable/versions_test.exs | pluralsh/plural | d33d08dee6348126334810e044b3699ef3f74819 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/core/test/services/rollable/versions_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/core/test/services/rollable/versions_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule Core.Rollable.VersionsTest do
use Core.SchemaCase, async: true
alias Core.PubSub
alias Core.Services.Rollouts
describe "VersionCreated" do
test "it will execute a rollout for a version created event" do
%{chart: chart} = chart_version = insert(:version, version: "0.1.0")
auto_upgraded = for _ <- 1..3 do
insert(:chart_installation,
installation: insert(:installation, auto_upgrade: true),
chart: chart,
version: chart_version
)
end
queues = for %{installation: %{user: user}} <- auto_upgraded do
insert(:upgrade_queue, user: user)
end
ignored = insert_list(2, :chart_installation, chart: chart, version: chart_version)
version = insert(:version, version: "0.1.1", chart: chart)
insert(:version_tag, version: version, chart: chart, tag: "latest")
event = %PubSub.VersionCreated{item: version}
{:ok, rollout} = Rollouts.create_rollout(chart.repository_id, event)
{:ok, rolled} = Rollouts.execute(rollout)
assert rolled.status == :finished
assert rolled.count == 3
for bumped <- auto_upgraded,
do: assert refetch(bumped).version_id == version.id
for ignore <- ignored,
do: assert refetch(ignore).version_id == chart_version.id
for queue <- queues do
assert Core.Schema.Upgrade.for_queue(queue.id) |> Core.Repo.exists?()
end
end
test "it will defer updates if a version's dependencies aren't satisfied" do
dep_chart = insert(:chart)
%{chart: chart} = chart_version = insert(:version, version: "0.1.0")
inst = insert(:chart_installation,
installation: insert(:installation, auto_upgrade: true),
chart: chart,
version: chart_version
)
ignored = insert_list(2, :chart_installation, chart: chart, version: chart_version)
version = insert(:version, version: "0.1.1", chart: chart, dependencies: %{dependencies: [
%{type: :helm, repo: dep_chart.repository.name, name: dep_chart.name}
]})
insert(:version_tag, version: version, chart: chart, tag: "latest")
event = %PubSub.VersionCreated{item: version}
{:ok, rollout} = Rollouts.create_rollout(chart.repository_id, event)
{:ok, rolled} = Rollouts.execute(rollout)
assert rolled.status == :finished
assert rolled.count == 1
for bumped <- [inst],
do: assert refetch(bumped).version_id == chart_version.id
for ignore <- ignored,
do: assert refetch(ignore).version_id == chart_version.id
[deferred] = Core.Repo.all(Core.Schema.DeferredUpdate)
assert deferred.chart_installation_id == inst.id
assert deferred.version_id == version.id
end
end
describe "VersionUpdated" do
test "it can execute version updated rollouts too" do
%{chart: chart} = chart_version = insert(:version, version: "0.1.0")
auto_upgraded = for _ <- 1..3 do
insert(:chart_installation,
installation: insert(:installation, auto_upgrade: true),
chart: chart,
version: chart_version
)
end
queues = for %{installation: %{user: user}} <- auto_upgraded do
insert(:upgrade_queue, user: user)
end
ignored = insert_list(2, :chart_installation, chart: chart, version: chart_version)
version = insert(:version, version: "0.1.1", chart: chart)
insert(:version_tag, version: version, chart: chart, tag: "latest")
event = %PubSub.VersionUpdated{item: version}
{:ok, rollout} = Rollouts.create_rollout(chart.repository_id, event)
{:ok, rolled} = Rollouts.execute(rollout)
assert rolled.status == :finished
assert rolled.count == 3
for bumped <- auto_upgraded,
do: assert refetch(bumped).version_id == version.id
for ignore <- ignored,
do: assert refetch(ignore).version_id == chart_version.id
for queue <- queues do
assert Core.Schema.Upgrade.for_queue(queue.id) |> Core.Repo.exists?()
end
end
test "it can execute version updated rollouts for terraform" do
tf = insert(:terraform)
tf_version = insert(:version, terraform: tf, chart: nil, chart_id: nil, version: "0.1.0")
auto_upgraded = for _ <- 1..3 do
insert(:terraform_installation,
installation: insert(:installation, auto_upgrade: true),
terraform: tf,
version: tf_version
)
end
queues = for %{installation: %{user: user}} <- auto_upgraded do
insert(:upgrade_queue, user: user)
end
ignored = insert_list(2, :terraform_installation, terraform: tf, version: tf_version)
version = insert(:version, chart: nil, chart_id: nil, version: "0.1.1", terraform: tf)
insert(:version_tag, version: version, terraform: tf, tag: "latest")
event = %PubSub.VersionUpdated{item: version}
{:ok, rollout} = Rollouts.create_rollout(tf.repository_id, event)
{:ok, rolled} = Rollouts.execute(rollout)
assert rolled.status == :finished
assert rolled.count == 3
for bumped <- auto_upgraded,
do: assert refetch(bumped).version_id == version.id
for ignore <- ignored,
do: assert refetch(ignore).version_id == tf_version.id
for queue <- queues do
assert Core.Schema.Upgrade.for_queue(queue.id) |> Core.Repo.exists?()
end
end
end
end
| 34.675159 | 96 | 0.646032 |
ffd19ba1c1db409f9ddc5f3099a744ff4cb53092 | 6,271 | ex | Elixir | lib/wallaby/query/error_message.ex | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | null | null | null | lib/wallaby/query/error_message.ex | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | null | null | null | lib/wallaby/query/error_message.ex | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | null | null | null | defmodule Wallaby.Query.ErrorMessage do
@moduledoc false
alias Wallaby.Query
@doc """
Compose an error message based on the error method and query information
"""
@spec message(Query.t, any()) :: String.t
def message(%Query{} = query, :not_found) do
"Expected to find #{found_error_message(query)}"
end
def message(%Query{} = query, :found) do
"Expected not to find #{found_error_message(query)}"
end
def message(%{method: method, selector: selector}, :label_with_no_for) do
"""
The text '#{selector}' matched a label but the label has no 'for'
attribute and can't be used to find the correct #{method(method)}.
You can fix this by including the `for="YOUR_INPUT_ID"` attribute on the
appropriate label.
"""
end
def message(%{method: method, selector: selector}, {:label_does_not_find_field, for_text}) do
"""
The text '#{selector}' matched a label but the label's 'for' attribute
doesn't match the id of any #{method(method)}.
Make sure that id on your #{method(method)} is `id="#{for_text}"`.
"""
end
def message(%{selector: selector}, :button_with_bad_type) do
"""
The text '#{selector}' matched a button but the button has an invalid 'type' attribute.
You can fix this by including `type="[submit|reset|button|image]"` on the appropriate button.
"""
end
def message(_, :cannot_set_text_with_invisible_elements) do
"""
Cannot set the `text` filter when `visible` is set to `false`.
Text is based on visible text on the page. This is a limitation of webdriver.
Since the element isn't visible the text isn't visible. Because of that I
can't apply both filters correctly.
"""
end
def message(_, {:at_number, query}) do
# The query is invalid. the 'at' number requested is not within the results list (1-#{size}).
"""
The element at index #{Query.at_number(query)} is not available because #{result_count(query.result)} #{method(query)} #{result_expectation(query.result)}
"""
end
def message(_, :min_max) do
"""
The query is invalid. Cannot set the minimum greater than the maximum.
"""
end
def message(%{method: method, selector: selector}, :invalid_selector) do
"""
The #{method} '#{selector}' is not a valid query.
"""
end
def message(_, :unexpected_alert) do
"""
There was an unexpected alert.
"""
end
def help(elements) do
"""
If you expect to find the selector #{times(length(elements))} then you
should include the `count: #{length(elements)}` option in your finder.
"""
end
defp found_error_message(query) do
"""
#{expected_count(query)}, #{visibility(query)} #{method(query)} '#{query.selector}' but #{result_count(query.result)}, #{visibility(query)} #{short_method(query.method, Enum.count(query.result))} #{result_expectation(query.result)}.
"""
end
@doc """
Extracts the selector method from the selector and converts it into a human
readable format
"""
@spec method(Query.t) :: String.t
@spec method({atom(), boolean()}) :: String.t
def method(%Query{conditions: conditions} = query) do
method(query.method, conditions[:count] > 1)
end
def method(_), do: "element"
def method(:css, true), do: "elements that matched the css"
def method(:css, false), do: "element that matched the css"
def method(:select, true), do: "selects"
def method(:select, false), do: "select"
def method(:option, true), do: "option fields"
def method(:option, false), do: "option"
def method(:fillable_field, true), do: "text inputs or textareas"
def method(:fillable_field, false), do: "text input or textarea"
def method(:checkbox, true), do: "checkboxes"
def method(:checkbox, false), do: "checkbox"
def method(:radio_button, true), do: "radio buttons"
def method(:radio_button, false), do: "radio button"
def method(:link, true), do: "links"
def method(:link, false), do: "link"
def method(:xpath, true), do: "elements that matched the xpath"
def method(:xpath, false), do: "element that matched the xpath"
def method(:button, true), do: "buttons"
def method(:button, false), do: "button"
def method(:file_field, true), do: "file fields"
def method(:file_field, false), do: "file field"
def method(:text, true), do: "elements with the text"
def method(:text, false), do: "element with the text"
def short_method(:css, count) when count > 1, do: "elements"
def short_method(:css, count) when count == 0, do: "elements"
def short_method(:css, _), do: "element"
def short_method(:xpath, count) when count == 1, do: "element"
def short_method(:xpath, _), do: "elements"
def short_method(method, count), do: method(method, count != 1)
@doc """
Generates failure conditions based on query conditions.
"""
@spec conditions(Keyword.t) :: list(String.t)
def conditions(opts) do
opts
|> Keyword.delete(:visible)
|> Keyword.delete(:count)
|> Enum.map(&condition/1)
|> Enum.reject(& &1 == nil)
end
@doc """
Converts a condition into a human readable failure message.
"""
@spec condition({atom(), String.t}) :: String.t | nil
def condition({:text, text}) when is_binary(text) do
"text: '#{text}'"
end
def condition(_), do: nil
@doc """
Converts the visibilty attribute into a human readable form.
"""
@spec visibility(Query.t) :: String.t
def visibility(query) do
if Query.visible?(query) do
"visible"
else
"invisible"
end
end
defp result_count([_]), do: "only 1"
defp result_count(result), do: "#{Enum.count(result)}"
defp times(1), do: "1 time"
defp times(count), do: "#{count} times"
defp expected_count(query) do
conditions = query.conditions
cond do
conditions[:count] ->
"#{conditions[:count]}"
conditions[:minimum] && Enum.count(query.result) < conditions[:minimum] ->
"at least #{conditions[:minimum]}"
conditions[:maximum] && Enum.count(query.result) > conditions[:maximum] ->
"no more then #{conditions[:maximum]}"
true -> ""
end
end
def result_expectation(result) when length(result) == 1, do: "was found"
def result_expectation(_), do: "were found"
end
| 31.199005 | 236 | 0.659066 |
ffd1a0cc18959d9d950881a1c97ee8ab88fb6b9d | 551 | ex | Elixir | core/ets/ets.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | core/ets/ets.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | core/ets/ets.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.Ets do
defun init_all() :: :ok do
AntikytheraCore.Ets.SystemCache.init()
AntikytheraCore.Ets.ConfigCache.init()
AntikytheraCore.Ets.GearActionRunnerPools.init()
AntikytheraCore.Ets.TenantToGearsMapping.init()
AntikytheraCore.Ets.Memcache.init()
end
defun create_read_optimized_table(table_name :: v[atom]) :: :ok do
_table_id = :ets.new(table_name, [:public, :named_table, {:read_concurrency, true}])
:ok
end
end
| 29 | 88 | 0.735027 |
ffd1bcbace48a7ec7e0a2dc118ec9bfc8e6b70ce | 23,925 | ex | Elixir | deps/ecto/lib/ecto/adapters/mysql/connection.ex | scouten/crash_esqlite_case | 986f0b0721399c7ed520f6b9df133980906e3f51 | [
"MIT"
] | null | null | null | deps/ecto/lib/ecto/adapters/mysql/connection.ex | scouten/crash_esqlite_case | 986f0b0721399c7ed520f6b9df133980906e3f51 | [
"MIT"
] | null | null | null | deps/ecto/lib/ecto/adapters/mysql/connection.ex | scouten/crash_esqlite_case | 986f0b0721399c7ed520f6b9df133980906e3f51 | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(Mariaex.Connection) do
defmodule Ecto.Adapters.MySQL.Connection do
@moduledoc false
@default_port 3306
@behaviour Ecto.Adapters.SQL.Connection
## Connection
def connect(opts) do
opts = Keyword.update(opts, :port, @default_port, &normalize_port/1)
Mariaex.Connection.start_link(opts)
end
def query(conn, sql, params, opts \\ []) do
params = Enum.map params, fn
%Ecto.Query.Tagged{value: value} -> value
%{__struct__: _} = value -> value
%{} = value -> json_library.encode!(value)
value -> value
end
Mariaex.Connection.query(conn, sql, params, [decode: :manual] ++ opts)
end
def decode({:ok, res}, mapper) do
{:ok, Mariaex.Connection.decode(res, mapper) |> Map.from_struct}
end
def decode({:error, _} = err, _mapper) do
err
end
defp normalize_port(port) when is_binary(port), do: String.to_integer(port)
defp normalize_port(port) when is_integer(port), do: port
defp json_library do
Application.get_env(:ecto, :json_library)
end
def to_constraints(%Mariaex.Error{mariadb: %{code: 1062, message: message}}) do
case :binary.split(message, " for key ") do
[_, quoted] -> [unique: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Mariaex.Error{mariadb: %{code: code, message: message}})
when code in [1451, 1452] do
case :binary.split(message, [" CONSTRAINT ", " FOREIGN KEY "], [:global]) do
[_, quoted, _] -> [foreign_key: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Mariaex.Error{}),
do: []
defp strip_quotes(quoted) do
size = byte_size(quoted) - 2
<<_, unquoted::binary-size(size), _>> = quoted
unquoted
end
## Transaction
def begin_transaction do
"BEGIN"
end
def rollback do
"ROLLBACK"
end
def commit do
"COMMIT"
end
def savepoint(savepoint) do
"SAVEPOINT " <> savepoint
end
def rollback_to_savepoint(savepoint) do
"ROLLBACK TO SAVEPOINT " <> savepoint
end
## Query
alias Ecto.Query
alias Ecto.Query.SelectExpr
alias Ecto.Query.QueryExpr
alias Ecto.Query.JoinExpr
def all(query) do
sources = create_names(query)
from = from(sources)
select = select(query, sources)
join = join(query, sources)
where = where(query, sources)
group_by = group_by(query, sources)
having = having(query, sources)
order_by = order_by(query, sources)
limit = limit(query, sources)
offset = offset(query, sources)
lock = lock(query.lock)
assemble([select, from, join, where, group_by, having, order_by, limit, offset, lock])
end
def update_all(query) do
sources = create_names(query)
{table, name, _model} = elem(sources, 0)
update = "UPDATE #{table} AS #{name}"
fields = update_fields(query, sources)
join = join(query, sources)
where = where(query, sources)
assemble([update, join, "SET", fields, where])
end
def delete_all(query) do
sources = create_names(query)
{_table, name, _model} = elem(sources, 0)
delete = "DELETE #{name}.*"
from = from(sources)
join = join(query, sources)
where = where(query, sources)
assemble([delete, from, join, where])
end
def insert(prefix, table, header, rows, _returning) do
fields = Enum.map_join(header, ",", "e_name/1)
"INSERT INTO #{quote_table(prefix, table)} (" <> fields <> ") VALUES " <> insert_all(rows)
end
defp insert_all(rows) do
Enum.map_join(rows, ",", fn row ->
row = Enum.map_join(row, ",", fn
nil -> "DEFAULT"
_ -> "?"
end)
"(" <> row <> ")"
end)
end
def update(prefix, table, fields, filters, _returning) do
filters = Enum.map filters, fn field ->
"#{quote_name(field)} = ?"
end
fields = Enum.map fields, fn field ->
"#{quote_name(field)} = ?"
end
"UPDATE #{quote_table(prefix, table)} SET " <> Enum.join(fields, ", ") <>
" WHERE " <> Enum.join(filters, " AND ")
end
def delete(prefix, table, filters, _returning) do
filters = Enum.map filters, fn field ->
"#{quote_name(field)} = ?"
end
"DELETE FROM #{quote_table(prefix, table)} WHERE " <>
Enum.join(filters, " AND ")
end
## Query generation
binary_ops =
[==: "=", !=: "!=", <=: "<=", >=: ">=", <: "<", >: ">",
and: "AND", or: "OR",
ilike: "ILIKE", like: "LIKE"]
@binary_ops Keyword.keys(binary_ops)
Enum.map(binary_ops, fn {op, str} ->
defp handle_call(unquote(op), 2), do: {:binary_op, unquote(str)}
end)
defp handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)}
defp select(%Query{select: %SelectExpr{fields: fields}, distinct: distinct} = query,
sources) do
"SELECT " <>
distinct(distinct, sources, query) <>
select(fields, sources, query)
end
defp distinct(nil, _sources, _query), do: ""
defp distinct(%QueryExpr{expr: true}, _sources, _query), do: "DISTINCT "
defp distinct(%QueryExpr{expr: false}, _sources, _quety), do: ""
defp distinct(%QueryExpr{expr: exprs}, _sources, query) when is_list(exprs) do
error!(query, "DISTINCT with multiple columns is not supported by MySQL")
end
defp select([], _sources, _query),
do: "TRUE"
defp select(fields, sources, query),
do: Enum.map_join(fields, ", ", &expr(&1, sources, query))
defp from(sources) do
{table, name, _model} = elem(sources, 0)
"FROM #{table} AS #{name}"
end
defp update_fields(%Query{updates: updates} = query, sources) do
for(%{expr: expr} <- updates,
{op, kw} <- expr,
{key, value} <- kw,
do: update_op(op, key, value, sources, query)) |> Enum.join(", ")
end
defp update_op(:set, key, value, sources, query) do
quote_name(key) <> " = " <> expr(value, sources, query)
end
defp update_op(:inc, key, value, sources, query) do
quoted = quote_name(key)
quoted <> " = " <> quoted <> " + " <> expr(value, sources, query)
end
defp update_op(command, _key, _value, _sources, query) do
error!(query, "Unknown update operation #{inspect command} for MySQL")
end
defp join(%Query{joins: []}, _sources), do: []
defp join(%Query{joins: joins} = query, sources) do
Enum.map_join(joins, " ", fn
%JoinExpr{on: %QueryExpr{expr: expr}, qual: qual, ix: ix, source: source} ->
{join, name, _model} = elem(sources, ix)
qual = join_qual(qual)
join = join || "(" <> expr(source, sources, query) <> ")"
"#{qual} JOIN " <> join <> " AS #{name} ON " <> expr(expr, sources, query)
end)
end
defp join_qual(:inner), do: "INNER"
defp join_qual(:left), do: "LEFT OUTER"
defp join_qual(:right), do: "RIGHT OUTER"
defp join_qual(:full), do: "FULL OUTER"
defp where(%Query{wheres: wheres} = query, sources) do
boolean("WHERE", wheres, sources, query)
end
defp having(%Query{havings: havings} = query, sources) do
boolean("HAVING", havings, sources, query)
end
defp group_by(%Query{group_bys: group_bys} = query, sources) do
exprs =
Enum.map_join(group_bys, ", ", fn
%QueryExpr{expr: expr} ->
Enum.map_join(expr, ", ", &expr(&1, sources, query))
end)
case exprs do
"" -> []
_ -> "GROUP BY " <> exprs
end
end
defp order_by(%Query{order_bys: order_bys} = query, sources) do
exprs =
Enum.map_join(order_bys, ", ", fn
%QueryExpr{expr: expr} ->
Enum.map_join(expr, ", ", &order_by_expr(&1, sources, query))
end)
case exprs do
"" -> []
_ -> "ORDER BY " <> exprs
end
end
defp order_by_expr({dir, expr}, sources, query) do
str = expr(expr, sources, query)
case dir do
:asc -> str
:desc -> str <> " DESC"
end
end
defp limit(%Query{limit: nil}, _sources), do: []
defp limit(%Query{limit: %QueryExpr{expr: expr}} = query, sources) do
"LIMIT " <> expr(expr, sources, query)
end
defp offset(%Query{offset: nil}, _sources), do: []
defp offset(%Query{offset: %QueryExpr{expr: expr}} = query, sources) do
"OFFSET " <> expr(expr, sources, query)
end
defp lock(nil), do: []
defp lock(lock_clause), do: lock_clause
defp boolean(_name, [], _sources, _query), do: []
defp boolean(name, query_exprs, sources, query) do
name <> " " <>
Enum.map_join(query_exprs, " AND ", fn
%QueryExpr{expr: expr} ->
"(" <> expr(expr, sources, query) <> ")"
end)
end
defp expr({:^, [], [_ix]}, _sources, _query) do
"?"
end
defp expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query)
when is_atom(field) do
{_, name, _} = elem(sources, idx)
"#{name}.#{quote_name(field)}"
end
defp expr({:&, _, [idx, fields]}, sources, query) do
{table, name, model} = elem(sources, idx)
unless model do
error!(query, "MySQL requires a schema module when using selector " <>
"#{inspect name} but only the table #{inspect table} was given. " <>
"Please specify a model or specify exactly which fields from " <>
"#{inspect name} you desire")
end
Enum.map_join(fields, ", ", &"#{name}.#{quote_name(&1)}")
end
defp expr({:in, _, [_left, []]}, _sources, _query) do
"false"
end
defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do
args = Enum.map_join right, ",", &expr(&1, sources, query)
expr(left, sources, query) <> " IN (" <> args <> ")"
end
defp expr({:in, _, [left, {:^, _, [ix, length]}]}, sources, query) do
args = Enum.map_join(ix+1..ix+length, ",", fn (_) -> "?" end)
expr(left, sources, query) <> " IN (" <> args <> ")"
end
defp expr({:in, _, [left, right]}, sources, query) do
expr(left, sources, query) <> " = ANY(" <> expr(right, sources, query) <> ")"
end
defp expr({:is_nil, _, [arg]}, sources, query) do
"#{expr(arg, sources, query)} IS NULL"
end
defp expr({:not, _, [expr]}, sources, query) do
"NOT (" <> expr(expr, sources, query) <> ")"
end
defp expr({:fragment, _, [kw]}, _sources, query) when is_list(kw) or tuple_size(kw) == 3 do
error!(query, "MySQL adapter does not support keyword or interpolated fragments")
end
defp expr({:fragment, _, parts}, sources, query) do
Enum.map_join(parts, "", fn
{:raw, part} -> part
{:expr, expr} -> expr(expr, sources, query)
end)
end
defp expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do
"CAST(date_add(" <> expr(datetime, sources, query) <> ", "
<> interval(count, interval, sources, query) <> ") AS datetime)"
end
defp expr({:date_add, _, [date, count, interval]}, sources, query) do
"CAST(date_add(" <> expr(date, sources, query) <> ", "
<> interval(count, interval, sources, query) <> ") AS date)"
end
defp expr({fun, _, args}, sources, query) when is_atom(fun) and is_list(args) do
{modifier, args} =
case args do
[rest, :distinct] -> {"DISTINCT ", [rest]}
_ -> {"", args}
end
case handle_call(fun, length(args)) do
{:binary_op, op} ->
[left, right] = args
op_to_binary(left, sources, query) <>
" #{op} "
<> op_to_binary(right, sources, query)
{:fun, fun} ->
"#{fun}(" <> modifier <> Enum.map_join(args, ", ", &expr(&1, sources, query)) <> ")"
end
end
defp expr(list, _sources, query) when is_list(list) do
error!(query, "Array type is not supported by MySQL")
end
defp expr(%Decimal{} = decimal, _sources, _query) do
Decimal.to_string(decimal, :normal)
end
defp expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query)
when is_binary(binary) do
hex = Base.encode16(binary, case: :lower)
"x'#{hex}'"
end
defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query)
when type in [:id, :integer, :float] do
expr(other, sources, query)
end
defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do
"CAST(#{expr(other, sources, query)} AS " <> ecto_to_db(type, query) <> ")"
end
defp expr(nil, _sources, _query), do: "NULL"
defp expr(true, _sources, _query), do: "TRUE"
defp expr(false, _sources, _query), do: "FALSE"
defp expr(literal, _sources, _query) when is_binary(literal) do
"'#{escape_string(literal)}'"
end
defp expr(literal, _sources, _query) when is_integer(literal) do
String.Chars.Integer.to_string(literal)
end
defp expr(literal, _sources, _query) when is_float(literal) do
# MySQL doesn't support float cast
expr = String.Chars.Float.to_string(literal)
"(0 + #{expr})"
end
defp interval(count, "millisecond", sources, query) do
"INTERVAL (" <> expr(count, sources, query) <> " * 1000) microsecond"
end
defp interval(count, interval, sources, query) do
"INTERVAL " <> expr(count, sources, query) <> " " <> interval
end
defp op_to_binary({op, _, [_, _]} = expr, sources, query) when op in @binary_ops do
"(" <> expr(expr, sources, query) <> ")"
end
defp op_to_binary(expr, sources, query) do
expr(expr, sources, query)
end
defp create_names(%{prefix: prefix, sources: sources}) do
create_names(prefix, sources, 0, tuple_size(sources)) |> List.to_tuple()
end
defp create_names(prefix, sources, pos, limit) when pos < limit do
current =
case elem(sources, pos) do
{table, model} ->
name = String.first(table) <> Integer.to_string(pos)
{quote_table(prefix, table), name, model}
{:fragment, _, _} ->
{nil, "f" <> Integer.to_string(pos), nil}
end
[current|create_names(prefix, sources, pos + 1, limit)]
end
defp create_names(_prefix, _sources, pos, pos) do
[]
end
## DDL
alias Ecto.Migration.Table
alias Ecto.Migration.Index
alias Ecto.Migration.Reference
def execute_ddl({command, %Table{} = table, columns}) when command in [:create, :create_if_not_exists] do
engine = engine_expr(table.engine)
options = options_expr(table.options)
if_not_exists = if command == :create_if_not_exists, do: " IF NOT EXISTS", else: ""
"CREATE TABLE" <> if_not_exists <>
" #{quote_table(table.prefix, table.name)} (#{column_definitions(table, columns)})" <> engine <> options
end
def execute_ddl({command, %Table{} = table}) when command in [:drop, :drop_if_exists] do
if_exists = if command == :drop_if_exists, do: " IF EXISTS", else: ""
"DROP TABLE" <> if_exists <> " #{quote_table(table.prefix, table.name)}"
end
def execute_ddl({:alter, %Table{}=table, changes}) do
"ALTER TABLE #{quote_table(table.prefix, table.name)} #{column_changes(table, changes)}"
end
def execute_ddl({:create, %Index{}=index}) do
create = "CREATE#{if index.unique, do: " UNIQUE"} INDEX"
using = if index.using, do: "USING #{index.using}", else: []
if index.where do
error!(nil, "MySQL adapter does not where in indexes")
end
assemble([create,
quote_name(index.name),
"ON",
quote_table(index.prefix, index.table),
"(#{Enum.map_join(index.columns, ", ", &index_expr/1)})",
using,
if_do(index.concurrently, "LOCK=NONE")])
end
def execute_ddl({:create_if_not_exists, %Index{}}),
do: error!(nil, "MySQL adapter does not support create if not exists for index")
def execute_ddl({:drop, %Index{}=index}) do
assemble(["DROP INDEX",
quote_name(index.name),
"ON #{quote_table(index.prefix, index.table)}",
if_do(index.concurrently, "LOCK=NONE")])
end
def execute_ddl({:drop_if_exists, %Index{}}),
do: error!(nil, "MySQL adapter does not support drop if exists for index")
def execute_ddl({:rename, %Table{}=current_table, %Table{}=new_table}) do
"RENAME TABLE #{quote_table(current_table.prefix, current_table.name)} TO #{quote_table(new_table.prefix, new_table.name)}"
end
def execute_ddl({:rename, %Table{}=table, current_column, new_column}) do
[
"SELECT @column_type := COLUMN_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '#{table.name}' AND COLUMN_NAME = '#{current_column}' LIMIT 1",
"SET @rename_stmt = concat('ALTER TABLE #{quote_table(table.prefix, table.name)} CHANGE COLUMN `#{current_column}` `#{new_column}` ', @column_type)",
"PREPARE rename_stmt FROM @rename_stmt",
"EXECUTE rename_stmt"
]
end
def execute_ddl(string) when is_binary(string), do: string
def execute_ddl(keyword) when is_list(keyword),
do: error!(nil, "MySQL adapter does not support keyword lists in execute")
defp column_definitions(table, columns) do
Enum.map_join(columns, ", ", &column_definition(table, &1))
end
defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do
assemble([quote_name(name), reference_column_type(ref.type, opts),
column_options(name, opts), reference_expr(ref, table, name)])
end
defp column_definition(_table, {:add, name, type, opts}) do
assemble([quote_name(name), column_type(type, opts), column_options(name, opts)])
end
defp column_changes(table, columns) do
Enum.map_join(columns, ", ", &column_change(table, &1))
end
defp column_change(table, {:add, name, %Reference{} = ref, opts}) do
assemble(["ADD", quote_name(name), reference_column_type(ref.type, opts),
column_options(name, opts), constraint_expr(ref, table, name)])
end
defp column_change(_table, {:add, name, type, opts}) do
assemble(["ADD", quote_name(name), column_type(type, opts), column_options(name, opts)])
end
defp column_change(table, {:modify, name, %Reference{} = ref, opts}) do
assemble([
"MODIFY", quote_name(name), reference_column_type(ref.type, opts),
column_options(name, opts), constraint_expr(ref, table, name)
])
end
defp column_change(_table, {:modify, name, type, opts}) do
assemble(["MODIFY", quote_name(name), column_type(type, opts), column_options(name, opts)])
end
defp column_change(_table, {:remove, name}), do: "DROP #{quote_name(name)}"
defp column_options(name, opts) do
default = Keyword.fetch(opts, :default)
null = Keyword.get(opts, :null)
pk = Keyword.get(opts, :primary_key)
[default_expr(default), null_expr(null), pk_expr(pk, name)]
end
defp pk_expr(true, name), do: ", PRIMARY KEY(#{quote_name(name)})"
defp pk_expr(_, _), do: []
defp null_expr(false), do: "NOT NULL"
defp null_expr(true), do: "NULL"
defp null_expr(_), do: []
defp default_expr({:ok, nil}),
do: "DEFAULT NULL"
defp default_expr({:ok, literal}) when is_binary(literal),
do: "DEFAULT '#{escape_string(literal)}'"
defp default_expr({:ok, literal}) when is_number(literal) or is_boolean(literal),
do: "DEFAULT #{literal}"
defp default_expr({:ok, {:fragment, expr}}),
do: "DEFAULT #{expr}"
defp default_expr(:error),
do: []
defp index_expr(literal), do: quote_name(literal)
defp engine_expr(nil),
do: " ENGINE = INNODB"
defp engine_expr(storage_engine),
do: String.upcase(" ENGINE = #{storage_engine}")
defp options_expr(nil),
do: ""
defp options_expr(keyword) when is_list(keyword),
do: error!(nil, "MySQL adapter does not support keyword lists in :options")
defp options_expr(options),
do: " #{options}"
defp column_type(type, opts) do
size = Keyword.get(opts, :size)
precision = Keyword.get(opts, :precision)
scale = Keyword.get(opts, :scale)
type_name = ecto_to_db(type)
cond do
size -> "#{type_name}(#{size})"
precision -> "#{type_name}(#{precision},#{scale || 0})"
type == :string -> "#{type_name}(255)"
true -> "#{type_name}"
end
end
defp constraint_expr(%Reference{} = ref, table, name),
do: ", ADD CONSTRAINT #{reference_name(ref, table, name)} " <>
"FOREIGN KEY (#{quote_name(name)}) " <>
"REFERENCES #{quote_table(table.prefix, ref.table)}(#{quote_name(ref.column)})" <>
reference_on_delete(ref.on_delete)
defp reference_expr(%Reference{} = ref, table, name),
do: ", CONSTRAINT #{reference_name(ref, table, name)} FOREIGN KEY " <>
"(#{quote_name(name)}) REFERENCES " <>
"#{quote_table(table.prefix, ref.table)}(#{quote_name(ref.column)})" <>
reference_on_delete(ref.on_delete)
defp reference_name(%Reference{name: nil}, table, column),
do: quote_name("#{table.name}_#{column}_fkey")
defp reference_name(%Reference{name: name}, _table, _column),
do: quote_name(name)
defp reference_column_type(:serial, _opts), do: "BIGINT UNSIGNED"
defp reference_column_type(type, opts), do: column_type(type, opts)
defp reference_on_delete(:nilify_all), do: " ON DELETE SET NULL"
defp reference_on_delete(:delete_all), do: " ON DELETE CASCADE"
defp reference_on_delete(_), do: ""
## Helpers
defp quote_name(name)
defp quote_name(name) when is_atom(name),
do: quote_name(Atom.to_string(name))
defp quote_name(name) do
if String.contains?(name, "`") do
error!(nil, "bad field name #{inspect name}")
end
<<?`, name::binary, ?`>>
end
defp quote_table(nil, name), do: quote_table(name)
defp quote_table(prefix, name), do: quote_table(prefix) <> "." <> quote_table(name)
defp quote_table(name) when is_atom(name),
do: quote_table(Atom.to_string(name))
defp quote_table(name) do
if String.contains?(name, "`") do
error!(nil, "bad table name #{inspect name}")
end
<<?`, name::binary, ?`>>
end
defp assemble(list) do
list
|> List.flatten
|> Enum.join(" ")
end
defp if_do(condition, value) do
if condition, do: value, else: []
end
defp escape_string(value) when is_binary(value) do
value
|> :binary.replace("'", "''", [:global])
|> :binary.replace("\\", "\\\\", [:global])
end
defp ecto_to_db(type, query \\ nil)
defp ecto_to_db({:array, _}, query),
do: error!(query, "Array type is not supported by MySQL")
defp ecto_to_db(:id, _query), do: "integer"
defp ecto_to_db(:binary_id, _query), do: "binary(16)"
defp ecto_to_db(:string, _query), do: "varchar"
defp ecto_to_db(:float, _query), do: "double"
defp ecto_to_db(:binary, _query), do: "blob"
defp ecto_to_db(:uuid, _query), do: "binary(16)" # MySQL does not support uuid
defp ecto_to_db(:map, _query), do: "text"
defp ecto_to_db(other, _query), do: Atom.to_string(other)
defp error!(nil, message) do
raise ArgumentError, message
end
defp error!(query, message) do
raise Ecto.QueryError, query: query, message: message
end
end
end
| 33 | 158 | 0.594316 |
ffd1e3567572dd2b2f635115830d2a219c7acb56 | 692 | ex | Elixir | lib/store_card_web/controllers/fallback_controller.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | lib/store_card_web/controllers/fallback_controller.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | lib/store_card_web/controllers/fallback_controller.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | defmodule StoreCardWeb.FallbackController do
use Phoenix.Controller
alias StoreCardWeb.Router.Helpers, as: Routes
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> put_view(StoreCardWeb.ErrorView)
|> render(:"404")
end
def call(conn, {:error, :unauthorized}) do
conn
|> put_status(403)
|> put_view(StoreCardWeb.ErrorView)
|> render(:"403")
end
def call(conn, {:route, post}) do
IO.inspect post
redirect(conn, to: Routes.payment_path(conn, :new, %{id: post.id}))
end
def call(conn, {:error, _}) do
conn
|> put_status(:not_found)
|> put_view(StoreCardWeb.ErrorView)
|> render(:"404")
end
end
| 21.625 | 71 | 0.648844 |
ffd1e998fca1a4a2aa411dd3a60fa4ca7c37851c | 3,999 | ex | Elixir | lib/brady.ex | dbernazal/brady | bd8165dc775ca57b9ad38f46a49b215e3d0c4369 | [
"MIT"
] | null | null | null | lib/brady.ex | dbernazal/brady | bd8165dc775ca57b9ad38f46a49b215e3d0c4369 | [
"MIT"
] | 1 | 2021-02-24T08:26:38.000Z | 2021-02-24T08:26:38.000Z | lib/brady.ex | isabella232/brady | 7e5e1a8a3d6cbcff90191410ae00f1f7211308b9 | [
"MIT"
] | null | null | null | defmodule Brady do
alias Phoenix.Controller
require Logger
@doc """
Returns the controller name and controller-action name as a lowercase,
dasherized string.
For example, when the `conn` came from CoolWidgetsController#show:
Brady.body_class(conn) => 'cool-widgets cool-widgets-show'"
"""
@spec body_class(%Plug.Conn{}) :: String.t
def body_class(conn = %Plug.Conn{private: %{phoenix_controller: _}}) do
controller_name = format_controller_name(conn)
"#{format_path(conn)} #{controller_name} #{controller_name}-#{Controller.action_name(conn)}"
|> String.trim
end
def body_class(_) do
""
end
@doc """
Embeds an html safe raw SVG in the markup. Also takes an optional list of CSS
attributes and applies those to the SVG.
Ex:
Brady.inline_svg("test", class: "foo", "data-role": "bar") =>
{:safe,
"<svg class=\"foo\" data-role=\"bar\" height=\"100\" width=\"100\"><desc>This is a test svg</desc><circle cx=\"50\" cy=\"50\" r=\"40\" stroke=\"black\" stroke-width=\"3\" fill=\"red\"></circle></svg>"}
"""
@spec inline_svg(String.t, keyword) :: String.t
def inline_svg(file_name, options \\ []) do
path = static_path(file_name)
case File.read(path) do
{:ok, file} -> render_with_options(file, options)
{:error, _} -> raise "No SVG found at #{path}"
end
end
@doc """
Encodes an image to base64-encoded data uri, compatible for img src attributes. Only recommended
for files less than 2kb. This threshold is configurable with mix config:
config :brady, inline_threshold: 10_240
Ex:
Brady.data_uri("placeholder.gif")
# => "data:image/gif;base64,iVBORw0KGgoAAAA"
"""
def data_uri(path) do
app_dir = Application.app_dir(Application.get_env(:brady, :otp_app))
base64 =
[app_dir, "priv/static", path]
|> Path.join()
|> Path.expand()
|> File.read!()
|> Base.encode64()
|> maybe_warn_about_size(path)
mime = MIME.from_path(path)
"data:#{mime};base64,#{base64}"
end
defp maybe_warn_about_size(base64, path) do
limit = Application.get_env(:brady, :inline_threshold, 2048)
if String.length(base64) > limit do
Logger.warn("""
Warning: The file "#{path}" is large and not recommended for inlining in templates. Please reconsider inlining this image, or increase the inline threshold by setting:
config :brady, inline_threshold: size_in_bytes
""")
end
base64
end
defp render_with_options(markup, []), do: {:safe, markup}
defp render_with_options(markup, options) do
markup
|> Floki.parse_fragment!()
|> Floki.find("svg")
|> add_attributes(options)
|> Floki.raw_html
|> render_with_options([])
end
defp add_attributes([{tag_name, existing_attributes, contents}], attributes) do
attributes = Enum.map(attributes, fn{key, value} -> {to_string(key), value} end)
{tag_name, attributes ++ existing_attributes, contents}
end
defp static_path(file_name) do
app_dir = Application.app_dir(Application.get_env(:brady, :otp_app))
path = Application.get_env(:brady, :svg_path) || "priv/static/svg"
[app_dir, path, "#{file_name}.svg"] |> Path.join() |> Path.expand
end
defp format_path(conn) do
conn.path_info
|> remove_numbers
|> Enum.join("-")
end
defp remove_numbers(path_list) do
Enum.filter path_list, fn (item) ->
Integer.parse(item) == :error
end
end
defp format_controller_name(conn) do
conn
|> Controller.controller_module
|> to_string
|> String.split(".")
|> Enum.slice(2..-1)
|> Enum.join("")
|> remove_controller
|> dasherize
|> String.downcase
end
defp dasherize(name) do
~r/(?=[A-Z])/
|> Regex.split(name)
|> Enum.reject(&(&1 == ""))
|> Enum.join("-")
end
@controller_string_length 10
defp remove_controller(name) do
name
|> String.slice(0, String.length(name) - @controller_string_length)
end
end
| 28.769784 | 208 | 0.653163 |
ffd21136ace31179d02cc3646a60eeee74e7f275 | 11,328 | ex | Elixir | lib/mix/lib/releases/errors.ex | PhillippOhlandt/distillery | fa6777fdc0c61aa8fcad54ffaabbb6829dd4fb38 | [
"MIT"
] | null | null | null | lib/mix/lib/releases/errors.ex | PhillippOhlandt/distillery | fa6777fdc0c61aa8fcad54ffaabbb6829dd4fb38 | [
"MIT"
] | null | null | null | lib/mix/lib/releases/errors.ex | PhillippOhlandt/distillery | fa6777fdc0c61aa8fcad54ffaabbb6829dd4fb38 | [
"MIT"
] | null | null | null | defmodule Mix.Releases.Errors do
@moduledoc false
@doc """
Formats a list of errors into a human-friendly message.
This expects a list of `{:error, _}` tuples, and will convert them
to a single String at the end.
"""
@spec format_errors(list(term())) :: String.t()
def format_errors([err]), do: format_error(err)
def format_errors(errs) when is_list(errs) do
format_errors(errs, "Multiple errors detected:\n")
end
defp format_errors([], acc), do: acc
defp format_errors([err | rest], acc) do
format_errors(rest, acc <> "\n- " <> format_error(err))
end
@doc """
Formats errors produced during a release into human-friendly messages
This expects an `{:error, _}` tuple, and will convert it to a String
"""
@spec format_error(term()) :: String.t()
def format_error(err)
def format_error({:error, {:write_terms, mod, err}}) do
"Failed to write file: #{mod.format_error(err)}"
end
def format_error({:error, {:read_terms, mod, err}}) do
"Failed to parse file: #{mod.format_error(err)}"
end
def format_error({:error, {:template, err}}) do
"Template failed: #{Exception.message(err)}"
end
def format_error({:error, {:template_str, {str, description}}}) do
"Template failed, #{description}:\n template: #{str}"
end
def format_error({:error, {:mkdir_temp, mod, err}}) do
"Failed to create temp directory: #{mod.format_error(err)}"
end
def format_error({:error, {:apps, {:missing_required_lib, app, lib_dir}}}) do
"You have included a version of ERTS which does not contain a required library\n" <>
" required: #{inspect(app)}\n search path: #{Path.relative_to_cwd(lib_dir)}"
end
def format_error({:error, {:apps, {:invalid_start_type, app, start_type}}}) do
"Invalid start type for #{app}: #{start_type}"
end
def format_error({:error, {:apps, err}}) do
"Failed to get app metadata:\n #{format_error(err)}"
end
def format_error({:error, {:appups, mod, {:invalid_dotapp, reason}}}) do
"Invalid .app file for appup generation:\n #{mod.format_error(reason)}"
end
def format_error({:error, {:appups, {:mismatched_versions, meta}}}) do
"Invalid appup specification, mismatched versions found:\n" <>
Enum.join(Enum.map(meta, fn {k, v} -> " #{k}: #{v}" end), "\n")
end
def format_error({:error, {:plugin, {:plugin_failed, :bad_return_value, value}}}) do
"Plugin failed: invalid result returned\n" <>
" expected: nil or Release.t\n got: #{inspect(value)}"
end
def format_error({:error, {:plugin, {kind, err}}}) do
"Plugin failed: #{Exception.format(kind, err, System.stacktrace())}"
end
def format_error({:error, {:plugin, e}}) when is_map(e) do
"Plugin failed: #{Exception.message(e)}"
end
def format_error({:error, {:invalid_overlay, overlay}}) do
"Invalid overlay, please check to make sure it is a valid overlay type:\n" <>
" overlay: #{inspect(overlay)}"
end
def format_error({:error, {:overlay_failed, mod, {reason, file, overlay}}}) do
"Overlay failed, #{mod.format_error(reason)}:\n" <>
" file: #{Path.relative_to_cwd(file)}\n overlay: #{inspect(overlay)}"
end
def format_error({:error, :missing_environment}) do
"Release failed, unable to load selected environment\n" <>
" - Make sure `rel/config.exs` has environments configured\n" <>
" - Make sure at least one is set as default OR\n" <>
" - Pass --env=<env_name> to `mix release`"
end
def format_error({:error, :missing_release}) do
"Release failed, unable to load selected release\n" <>
" - Make sure `rel/config.exs` has at least one release configured\n" <>
" - Make sure at least one is set as default OR\n" <>
" - Pass --name=<rel_name> to `mix release`"
end
def format_error({:error, {:assembler, {:missing_rel, name, version, path}}}) do
"Release failed, missing .rel file for #{name}:#{version}:\n" <>
" path: #{Path.relative_to_cwd(path)}"
end
def format_error({:error, {:assembler, {:missing_rels, name, v1, v2, path1, path2}}}) do
"Release failed, missing .rel files for:\n" <>
" #{name}:#{v1} @ #{Path.relative_to_cwd(path1)}\n" <>
" #{name}:#{v2} @ #{Path.relative_to_cwd(path2)}"
end
def format_error({:error, {:assembler, {:bad_upgrade_spec, :upfrom_is_current, vsn}}}) do
"Upgrade failed, the current version and upfrom version are the same: #{vsn}"
end
def format_error({:error, {:assembler, {:bad_upgrade_spec, :doesnt_exist, vsn, path}}}) do
"Upgrade failed, version #{vsn} does not exist:\n" <>
" expected at: #{Path.relative_to_cwd(path)}"
end
def format_error({:error, {:assembler, {:malformed_relfile, path, rel}}}) do
"Malformed .rel file:\n" <>
" path: #{Path.relative_to_cwd(path)}\n contents: #{inspect(rel)}"
end
def format_error({:error, {:assembler, {:invalid_sys_config, {{line, col}, mod, err}}}}) do
"Could not parse sys.config starting at #{line}:#{col}:\n #{mod.format_error(err)}"
end
def format_error({:error, {:assembler, {:invalid_sys_config, {line, mod, err}}}}) do
"Could not parse sys.config starting at line #{line}:\n #{mod.format_error(err)}"
end
def format_error({:error, {:assembler, {:invalid_sys_config, :invalid_terms}}}) do
"Invalid sys.config: must be a list of {:app_name, [{:key, value}]} tuples"
end
def format_error({:error, {:assembler, :erts_missing_for_upgrades}}) do
"Invalid configuration:\n" <>
" Hot upgrades will fail when include_erts: false is set,\n" <>
" you need to set include_erts to true or a path if you plan to use them!"
end
def format_error({:error, {:assembler, {:invalid_erts_path, path, maybe_path}}}) do
"Invalid ERTS path, did you mean #{maybe_path} instead of #{path}?"
end
def format_error({:error, {:assembler, {:make_boot_script, {:unknown, file}}}}) do
"Release failed, unable to generate boot script for an unknown reason\n" <>
" Please open an issue and include the contents of #{file}"
end
def format_error({:error, {:assembler, {:make_boot_script, reason}}}) do
"Release failed, during .boot generation:\n #{reason}"
end
def format_error({:error, {:assembler, mod, {:start_clean, reason}}}) do
"Release failed during start_clean.boot generation:\n #{mod.format_error(reason)}"
end
def format_error({:error, {:assembler, {:start_clean, :unknown}}}) do
"Release failed, unable to generate start_clean.boot for unknown reasons\n" <>
" Please open an issue for this problem."
end
def format_error({:error, {:assembler, {:start_clean, reason}}}) do
"Release failed, unable to generate start_clean.boot:\n" <> " #{reason}"
end
def format_error({:error, {:assembler, mod, {:copy_app, app_dir, target_dir, reason}}}) do
"Failed to copy application: #{mod.format_error(reason)}\n" <>
" app dir: #{Path.relative_to_cwd(app_dir)}\n" <>
" target dir: #{Path.relative_to_cwd(target_dir)}"
end
def format_error({:error, {:assembler, mod, {:copy_app, target_dir, reason}}}) do
"Failed to copy application: #{mod.format_error(reason)}\n" <>
" target dir: #{Path.relative_to_cwd(target_dir)}"
end
def format_error({:error, {:assembler, mod, {:copy_consolidated, src_dir, target_dir, reason}}}) do
"Failed to copy consolidated protocols: #{mod.format_error(reason)}\n" <>
" src dir: #{Path.relative_to_cwd(src_dir)}\n" <>
" target dir: #{Path.relative_to_cwd(target_dir)}"
end
def format_error({:error, {:assembler, mod, {:include_erts, reason, file}}}) do
"Failed to include ERTS: #{mod.format_error(reason)}\n" <>
" file: #{Path.relative_to_cwd(file)}"
end
def format_error({:error, {:assembler, mod, {:include_erts, reason}}}) do
"Failed to include ERTS: #{mod.format_error(reason)}"
end
def format_error({:error, {:assembler, mod, {reason, file}}}) do
"Release failed, #{mod.format_error(reason)}:\n file: #{Path.relative_to_cwd(file)}"
end
def format_error({:error, {:assembler, mod, reason}}) do
"Release failed: #{mod.format_error(reason)}"
end
def format_error({:error, {:assembler, err}}) when is_binary(err) do
"Release failed with multiple errors:\n" <> err
end
def format_error({:error, {:assembler, e}}) when is_map(e) do
"Release failed during assembly:\n #{Exception.message(e)}"
end
def format_error({:error, {:assembler, {:error, reason}}}) do
"Release failed: #{Exception.format(:error, reason, System.stacktrace())}"
end
def format_error({:error, {:assembler, {area, err}}}) when is_map(err) do
"Release failed (#{area}): #{Exception.message(err)}"
end
def format_error({:error, {:tar_generation_warn, mod, warnings}}) do
"Release packaging failed due to warnings:\n #{mod.format_warning(warnings)}"
end
def format_error({:error, {:tar_generation_error, mod, errors}}) do
"Release packaging failed due to errors:\n #{mod.format_error(errors)}"
end
def format_error({:error, {:tar_generation_error, reason}}) do
"Release packaging failed unexpectedly: #{inspect(reason)}"
end
def format_error({:error, {:executable, {mod, reason}}}) do
"Failed to generate executable: #{mod.format_error(reason)}"
end
def format_error({:error, {:archiver, {mod, reason}}}) do
"Failed to archive release: #{mod.format_error(reason)}"
end
def format_error({:error, {:archiver, {mod, reason, file}}}) do
"Failed to archive release: #{mod.format_error(reason)}\n" <>
" file: #{Path.relative_to_cwd(file)}"
end
def format_error({:error, {:archiver, e}}) when is_map(e) do
"Failed to archive release: #{Exception.message(e)}"
end
def format_error({:error, {:invalid_erts, :missing_directory}}) do
"Invalid ERTS: missing erts-* directory:\n" <>
" Please check the path you provided to the `include_erts` option."
end
def format_error({:error, {:invalid_erts, :too_many}}) do
"Invalid ERTS: ambiguous path, too many erts-* directories found\n" <>
" Please ensure the path you provided to `include_erts` contains only a single erts-* directory."
end
def format_error({:error, {:invalid_erts, :missing_bin}}) do
"Invalid ERTS: missing bin directory\n" <>
" The path you provided to `include_erts` does not contain\n" <>
" `erts-*/bin`, please confirm the path is correct."
end
def format_error({:error, {:invalid_erts, :missing_lib}}) do
"Invalid ERTS: missing lib directory\n" <>
" The path you provided to `include_erts` does not contain\n" <>
" `erts-*/lib`, please confirm the path is correct."
end
def format_error({:error, {:invalid_erts, :cannot_determine_version}}) do
"Invalid ERTS: unable to locate erts-* directory\n" <>
" The path you provided to `include_erts` does not contain\n" <>
" `erts-*`, please confirm the path is correct."
end
def format_error({:error, errors}) when is_list(errors), do: format_errors(errors)
def format_error({:error, reason}) do
e = Exception.message(Exception.normalize(:error, reason))
"#{e}:\n#{Exception.format_stacktrace(System.stacktrace())}"
end
end
| 38.927835 | 106 | 0.65828 |
ffd235f0e3cec199166db53ba79ed984dac8d851 | 63 | exs | Elixir | test/test_helper.exs | chess4ever/mailroom | 314b5f954453a9c86ff1aeebca4a941124f78cd6 | [
"MIT"
] | 15 | 2016-10-03T18:03:29.000Z | 2021-11-26T14:36:14.000Z | test/test_helper.exs | chess4ever/mailroom | 314b5f954453a9c86ff1aeebca4a941124f78cd6 | [
"MIT"
] | 7 | 2016-11-03T21:51:13.000Z | 2021-12-07T14:45:37.000Z | test/test_helper.exs | chess4ever/mailroom | 314b5f954453a9c86ff1aeebca4a941124f78cd6 | [
"MIT"
] | 5 | 2016-11-03T21:20:27.000Z | 2021-11-18T13:37:52.000Z | Mailroom.TestServer.Application.start(nil, nil)
ExUnit.start()
| 21 | 47 | 0.809524 |
ffd257879f67c2d4f70bf36f9daaf8d26b01ed18 | 24,486 | ex | Elixir | lib/cadet/assessments/assessments.ex | geshuming/cadet | 783cc84173b8f4b504db3d3b1ca91037d51b5c8e | [
"MIT"
] | null | null | null | lib/cadet/assessments/assessments.ex | geshuming/cadet | 783cc84173b8f4b504db3d3b1ca91037d51b5c8e | [
"MIT"
] | null | null | null | lib/cadet/assessments/assessments.ex | geshuming/cadet | 783cc84173b8f4b504db3d3b1ca91037d51b5c8e | [
"MIT"
] | null | null | null | defmodule Cadet.Assessments do
@moduledoc """
Assessments context contains domain logic for assessments management such as
missions, sidequests, paths, etc.
"""
use Cadet, [:context, :display]
import Ecto.Query
alias Cadet.Accounts.User
alias Cadet.Assessments.{Answer, Assessment, Query, Question, Submission}
alias Cadet.Autograder.GradingJob
alias Ecto.Multi
@xp_early_submission_max_bonus 100
@xp_bonus_assessment_type ~w(mission sidequest)a
@submit_answer_roles ~w(student)a
@unsubmit_assessment_role ~w(staff admin)a
@grading_roles ~w()a
@see_all_submissions_roles ~w(staff admin)a
@open_all_assessment_roles ~w(staff admin)a
@spec user_total_xp(%User{}) :: integer()
def user_total_xp(%User{id: user_id}) when is_ecto_id(user_id) do
total_xp_bonus =
Submission
|> where(student_id: ^user_id)
|> Repo.aggregate(:sum, :xp_bonus)
|> case do
nil -> 0
xp when is_integer(xp) -> xp
end
total_xp =
Query.all_submissions_with_xp()
|> subquery()
|> where(student_id: ^user_id)
|> select([q], fragment("? + ?", sum(q.xp), sum(q.xp_adjustment)))
|> Repo.one()
|> decimal_to_integer()
total_xp_bonus + total_xp
end
@spec user_max_grade(%User{}) :: integer()
def user_max_grade(%User{id: user_id}) when is_ecto_id(user_id) do
Submission
|> where(status: ^:submitted)
|> where(student_id: ^user_id)
|> join(
:inner,
[s],
a in subquery(Query.all_assessments_with_max_grade()),
on: s.assessment_id == a.id
)
|> select([_, a], sum(a.max_grade))
|> Repo.one()
|> decimal_to_integer()
end
def user_total_grade(%User{id: user_id}) do
Query.all_submissions_with_grade()
|> subquery()
|> where(student_id: ^user_id)
|> select([q], fragment("? + ?", sum(q.grade), sum(q.adjustment)))
|> Repo.one()
|> decimal_to_integer()
end
defp decimal_to_integer(decimal) do
if Decimal.decimal?(decimal) do
Decimal.to_integer(decimal)
else
0
end
end
def user_current_story(user = %User{}) do
{:ok, %{result: story}} =
Multi.new()
|> Multi.run(:unattempted, fn _repo, _ ->
{:ok, get_user_story_by_type(user, :unattempted)}
end)
|> Multi.run(:result, fn _repo, %{unattempted: unattempted_story} ->
if unattempted_story do
{:ok, %{play_story?: true, story: unattempted_story}}
else
{:ok, %{play_story?: false, story: get_user_story_by_type(user, :attempted)}}
end
end)
|> Repo.transaction()
story
end
@spec get_user_story_by_type(%User{}, :unattempted | :attempted) :: String.t() | nil
def get_user_story_by_type(%User{id: user_id}, type)
when is_atom(type) do
filter_and_sort = fn query ->
case type do
:unattempted ->
query
|> where([_, s], is_nil(s.id))
|> order_by([a], asc: a.open_at)
:attempted ->
query |> order_by([a], desc: a.close_at)
end
end
Assessment
|> where(is_published: true)
|> where([a], not is_nil(a.story))
|> where([a], a.open_at <= from_now(0, "second") and a.close_at >= from_now(0, "second"))
|> join(:left, [a], s in Submission, on: s.assessment_id == a.id and s.student_id == ^user_id)
|> filter_and_sort.()
|> order_by([a], a.type)
|> select([a], a.story)
|> first()
|> Repo.one()
end
def assessment_with_questions_and_answers(id, user = %User{}) when is_ecto_id(id) do
assessment =
Assessment
|> where(id: ^id)
|> where(is_published: true)
|> Repo.one()
if assessment do
assessment_with_questions_and_answers(assessment, user)
else
{:error, {:bad_request, "Assessment not found"}}
end
end
def assessment_with_questions_and_answers(
assessment = %Assessment{id: id},
user = %User{role: role}
) do
if Timex.after?(Timex.now(), assessment.open_at) or role in @open_all_assessment_roles do
answer_query =
Answer
|> join(:inner, [a], s in assoc(a, :submission))
|> where([_, s], s.student_id == ^user.id)
questions =
Question
|> where(assessment_id: ^id)
|> join(:left, [q], a in subquery(answer_query), on: q.id == a.question_id)
|> join(:left, [_, a], g in assoc(a, :grader))
|> select([q, a, g], %{q | answer: %Answer{a | grader: g}})
|> order_by(:display_order)
|> Repo.all()
assessment = Map.put(assessment, :questions, questions)
{:ok, assessment}
else
{:error, {:unauthorized, "Assessment not open"}}
end
end
@doc """
Returns a list of assessments with all fields and an indicator showing whether it has been attempted
by the supplied user
"""
def all_published_assessments(user = %User{}) do
assessments =
Query.all_assessments_with_max_xp_and_grade()
|> subquery()
|> join(
:left,
[a],
s in subquery(Query.all_submissions_with_xp_and_grade()),
on: a.id == s.assessment_id and s.student_id == ^user.id
)
|> join(
:left,
[a, _],
q_count in subquery(Query.assessments_question_count()),
on: a.id == q_count.assessment_id
)
|> join(
:left,
[_, s, _],
a_count in subquery(Query.submissions_graded_count()),
on: s.id == a_count.submission_id
)
|> select([a, s, q_count, a_count], %{
a
| xp: fragment("? + ? + ?", s.xp, s.xp_adjustment, s.xp_bonus),
grade: fragment("? + ?", s.grade, s.adjustment),
user_status: s.status,
question_count: q_count.count,
graded_count: a_count.count
})
|> where(is_published: true)
|> order_by(:open_at)
|> Repo.all()
|> Enum.map(fn assessment = %Assessment{} ->
%{
assessment
| grading_status:
build_grading_status(assessment.question_count, assessment.graded_count)
}
end)
{:ok, assessments}
end
defp build_grading_status(q_count, g_count) do
cond do
g_count < q_count -> :grading
g_count == q_count -> :graded
true -> :none
end
end
def create_assessment(params) do
%Assessment{}
|> Assessment.changeset(params)
|> Repo.insert()
end
@spec insert_or_update_assessments_and_questions(map(), [map()]) ::
{:ok, any()}
| {:error, Ecto.Multi.name(), any(), %{optional(Ecto.Multi.name()) => any()}}
def insert_or_update_assessments_and_questions(assessment_params, questions_params) do
assessment_multi =
Multi.insert_or_update(
Multi.new(),
:assessment,
insert_or_update_assessment_changeset(assessment_params)
)
questions_params
|> Enum.with_index(1)
|> Enum.reduce(assessment_multi, fn {question_params, index}, multi ->
Multi.run(multi, String.to_atom("question#{index}"), fn _repo,
%{assessment: %Assessment{id: id}} ->
question_params
|> Map.put(:display_order, index)
|> build_question_changeset_for_assessment_id(id)
|> Repo.insert()
end)
end)
|> Repo.transaction()
end
@spec insert_or_update_assessment_changeset(map()) :: Ecto.Changeset.t()
defp insert_or_update_assessment_changeset(params = %{number: number}) do
Assessment
|> where(number: ^number)
|> Repo.one()
|> case do
nil ->
Assessment.changeset(%Assessment{}, params)
assessment ->
if Timex.after?(assessment.open_at, Timex.now()) do
# Delete all existing questions
%{id: assessment_id} = assessment
Question
|> where(assessment_id: ^assessment_id)
|> Repo.delete_all()
Assessment.changeset(assessment, params)
else
# if the assessment is already open, don't mess with it
create_invalid_changeset_with_error(:assessment, "is already open")
end
end
end
@spec build_question_changeset_for_assessment_id(map(), number() | String.t()) ::
Ecto.Changeset.t()
defp build_question_changeset_for_assessment_id(params, assessment_id)
when is_ecto_id(assessment_id) do
params_with_assessment_id = Map.put_new(params, :assessment_id, assessment_id)
Question.changeset(%Question{}, params_with_assessment_id)
end
def update_assessment(id, params) when is_ecto_id(id) do
simple_update(
Assessment,
id,
using: &Assessment.changeset/2,
params: params
)
end
def update_question(id, params) when is_ecto_id(id) do
simple_update(
Question,
id,
using: &Question.changeset/2,
params: params
)
end
def publish_assessment(id) do
update_assessment(id, %{is_published: true})
end
def create_question_for_assessment(params, assessment_id) when is_ecto_id(assessment_id) do
assessment =
Assessment
|> where(id: ^assessment_id)
|> join(:left, [a], q in assoc(a, :questions))
|> preload([_, q], questions: q)
|> Repo.one()
if assessment do
params_with_assessment_id = Map.put_new(params, :assessment_id, assessment.id)
%Question{}
|> Question.changeset(params_with_assessment_id)
|> put_display_order(assessment.questions)
|> Repo.insert()
else
{:error, "Assessment not found"}
end
end
def delete_question(id) when is_ecto_id(id) do
question = Repo.get(Question, id)
Repo.delete(question)
end
@doc """
Public internal api to submit new answers for a question. Possible return values are:
`{:ok, nil}` -> success
`{:error, error}` -> failed. `error` is in the format of `{http_response_code, error message}`
Note: In the event of `find_or_create_submission` failing due to a race condition, error will be:
`{:bad_request, "Missing or invalid parameter(s)"}`
"""
def answer_question(id, user = %User{role: role}, raw_answer) when is_ecto_id(id) do
if role in @submit_answer_roles do
question =
Question
|> where(id: ^id)
|> join(:inner, [q], assessment in assoc(q, :assessment))
|> preload([_, a], assessment: a)
|> Repo.one()
with {:question_found?, true} <- {:question_found?, is_map(question)},
{:is_open?, true} <- is_open?(question.assessment),
{:ok, submission} <- find_or_create_submission(user, question.assessment),
{:status, true} <- {:status, submission.status != :submitted},
{:ok, _} <- insert_or_update_answer(submission, question, raw_answer) do
update_submission_status(submission, question.assessment)
{:ok, nil}
else
{:question_found?, false} -> {:error, {:not_found, "Question not found"}}
{:is_open?, false} -> {:error, {:forbidden, "Assessment not open"}}
{:status, _} -> {:error, {:forbidden, "Assessment submission already finalised"}}
{:error, :race_condition} -> {:error, {:internal_server_error, "Please try again later."}}
_ -> {:error, {:bad_request, "Missing or invalid parameter(s)"}}
end
else
{:error, {:forbidden, "User is not permitted to answer questions"}}
end
end
def finalise_submission(assessment_id, %User{role: role, id: user_id})
when is_ecto_id(assessment_id) do
if role in @submit_answer_roles do
submission =
Submission
|> where(assessment_id: ^assessment_id)
|> where(student_id: ^user_id)
|> join(:inner, [s], a in assoc(s, :assessment))
|> preload([_, a], assessment: a)
|> Repo.one()
with {:submission_found?, true} <- {:submission_found?, is_map(submission)},
{:is_open?, true} <- is_open?(submission.assessment),
{:status, :attempted} <- {:status, submission.status},
{:ok, updated_submission} <- update_submission_status_and_xp_bonus(submission) do
GradingJob.force_grade_individual_submission(updated_submission)
{:ok, nil}
else
{:submission_found?, false} ->
{:error, {:not_found, "Submission not found"}}
{:is_open?, false} ->
{:error, {:forbidden, "Assessment not open"}}
{:status, :attempting} ->
{:error, {:bad_request, "Some questions have not been attempted"}}
{:status, :submitted} ->
{:error, {:forbidden, "Assessment has already been submitted"}}
_ ->
{:error, {:internal_server_error, "Please try again later."}}
end
else
{:error, {:forbidden, "User is not permitted to answer questions"}}
end
end
def unsubmit_submission(submission_id, user = %User{id: user_id, role: role})
when is_ecto_id(submission_id) do
if role in @unsubmit_assessment_role do
submission =
Submission
|> join(:inner, [s], a in assoc(s, :assessment))
|> preload([_, a], assessment: a)
|> Repo.get(submission_id)
with {:submission_found?, true} <- {:submission_found?, is_map(submission)},
{:is_open?, true} <- is_open?(submission.assessment),
{:status, :submitted} <- {:status, submission.status},
{:allowed_to_unsubmit?, true} <-
{:allowed_to_unsubmit?,
role == :admin || Cadet.Accounts.Query.avenger_of?(user, submission.student_id)} do
Multi.new()
|> Multi.run(
:rollback_submission,
fn _repo, _ ->
submission
|> Submission.changeset(%{
status: :attempted,
xp_bonus: 0,
unsubmitted_by_id: user_id,
unsubmitted_at: Timex.now()
})
|> Repo.update()
end
)
|> Multi.run(:rollback_answers, fn _repo, _ ->
Answer
|> join(:inner, [a], q in assoc(a, :question))
|> join(:inner, [a, _], s in assoc(a, :submission))
|> preload([_, q, s], question: q, submission: s)
|> where(submission_id: ^submission.id)
|> Repo.all()
|> Enum.reduce_while({:ok, nil}, fn answer, acc ->
case acc do
{:error, _} ->
{:halt, acc}
{:ok, _} ->
{:cont,
answer
|> Answer.grading_changeset(%{
grade: 0,
adjustment: 0,
xp: 0,
xp_adjustment: 0,
autograding_status: :none,
autograding_results: [],
comment: nil,
grader_id: nil
})
|> Repo.update()}
end
end)
end)
|> Repo.transaction()
{:ok, nil}
else
{:submission_found?, false} ->
{:error, {:not_found, "Submission not found"}}
{:is_open?, false} ->
{:error, {:forbidden, "Assessment not open"}}
{:status, :attempting} ->
{:error, {:bad_request, "Some questions have not been attempted"}}
{:status, :attempted} ->
{:error, {:bad_request, "Assessment has not been submitted"}}
{:allowed_to_unsubmit?, false} ->
{:error, {:forbidden, "Only Avenger of student or Admin is permitted to unsubmit"}}
_ ->
{:error, {:internal_server_error, "Please try again later."}}
end
else
{:error, {:forbidden, "User is not permitted to unsubmit questions"}}
end
end
@spec update_submission_status_and_xp_bonus(%Submission{}) ::
{:ok, %Submission{}} | {:error, Ecto.Changeset.t()}
defp update_submission_status_and_xp_bonus(submission = %Submission{}) do
assessment = submission.assessment
xp_bonus =
cond do
assessment.type not in @xp_bonus_assessment_type ->
0
Timex.before?(Timex.now(), Timex.shift(assessment.open_at, hours: 48)) ->
@xp_early_submission_max_bonus
true ->
deduction = Timex.diff(Timex.now(), assessment.open_at, :hours) - 48
Enum.max([0, @xp_early_submission_max_bonus - deduction])
end
submission
|> Submission.changeset(%{status: :submitted, xp_bonus: xp_bonus})
|> Repo.update()
end
def update_submission_status(submission = %Submission{}, assessment = %Assessment{}) do
model_assoc_count = fn model, assoc, id ->
model
|> where(id: ^id)
|> join(:inner, [m], a in assoc(m, ^assoc))
|> select([_, a], count(a.id))
|> Repo.one()
end
Multi.new()
|> Multi.run(:assessment, fn _repo, _ ->
{:ok, model_assoc_count.(Assessment, :questions, assessment.id)}
end)
|> Multi.run(:submission, fn _repo, _ ->
{:ok, model_assoc_count.(Submission, :answers, submission.id)}
end)
|> Multi.run(:update, fn _repo, %{submission: s_count, assessment: a_count} ->
if s_count == a_count do
submission |> Submission.changeset(%{status: :attempted}) |> Repo.update()
else
{:ok, nil}
end
end)
|> Repo.transaction()
end
@doc """
Function returning submissions under a grader.
The input parameters are the user and group_only.
group_only is used to check whether only the groups under the grader should be returned.
The parameter is a boolean which is false by default.
The return value is {:ok, submissions} if no errors else its
{:error, {:unauthorized, "User is not permitted to grade."}}
"""
@spec all_submissions_by_grader(%User{}) ::
{:ok, [%Submission{}]} | {:error, {:unauthorized, String.t()}}
def all_submissions_by_grader(grader = %User{role: role}, group_only \\ false) do
submission_query =
Submission
|> join(
:inner,
[s],
x in subquery(Query.submissions_xp_and_grade()),
on: s.id == x.submission_id
)
|> join(:inner, [s, _], st in assoc(s, :student))
|> join(:inner, [_, _, st], g in assoc(st, :group))
|> join(:left, [s, _, _, g], u in assoc(s, :unsubmitted_by))
|> join(
:inner,
[s, _, _, _, _],
a in subquery(Query.all_assessments_with_max_xp_and_grade()),
on: s.assessment_id == a.id
)
|> select([s, x, st, g, u, a], %Submission{
s
| grade: x.grade,
adjustment: x.adjustment,
xp: x.xp,
xp_adjustment: x.xp_adjustment,
student: st,
assessment: a,
group_name: g.name,
unsubmitted_by: u
})
cond do
role in @grading_roles ->
{:ok, submissions_by_group(grader, submission_query)}
role in @see_all_submissions_roles ->
submissions =
if group_only do
submissions_by_group(grader, submission_query)
else
Repo.all(submission_query)
end
{:ok, submissions}
true ->
{:error, {:unauthorized, "User is not permitted to grade."}}
end
end
@spec get_answers_in_submission(integer() | String.t(), %User{}) ::
{:ok, [%Answer{}]} | {:error, {:unauthorized, String.t()}}
def get_answers_in_submission(id, grader = %User{role: role}) when is_ecto_id(id) do
answer_query =
Answer
|> where(submission_id: ^id)
|> join(:inner, [a], q in assoc(a, :question))
|> join(:left, [a, ...], g in assoc(a, :grader))
|> join(:inner, [a, ...], s in assoc(a, :submission))
|> join(:inner, [a, ..., s], st in assoc(s, :student))
|> preload([_, q, g, s, st], question: q, grader: g, submission: {s, student: st})
cond do
role in @grading_roles ->
students = Cadet.Accounts.Query.students_of(grader)
answers =
answer_query
|> join(:inner, [..., s, _], t in subquery(students), on: t.id == s.student_id)
|> Repo.all()
|> Enum.sort_by(& &1.question.display_order)
{:ok, answers}
role in @see_all_submissions_roles ->
answers =
answer_query
|> Repo.all()
|> Enum.sort_by(& &1.question.display_order)
{:ok, answers}
true ->
{:error, {:unauthorized, "User is not permitted to grade."}}
end
end
@spec update_grading_info(
%{submission_id: integer() | String.t(), question_id: integer() | String.t()},
%{},
%User{}
) ::
{:ok, nil}
| {:error, {:unauthorized | :bad_request | :internal_server_error, String.t()}}
def update_grading_info(
%{submission_id: submission_id, question_id: question_id},
attrs,
grader = %User{id: grader_id, role: role}
)
when is_ecto_id(submission_id) and is_ecto_id(question_id) and
(role in @grading_roles or role in @see_all_submissions_roles) do
attrs = Map.put(attrs, "grader_id", grader_id)
answer_query =
Answer
|> where(submission_id: ^submission_id)
|> where(question_id: ^question_id)
# checks if role is in @grading_roles or @see_all_submissions_roles
answer_query =
if role in @grading_roles do
students = Cadet.Accounts.Query.students_of(grader)
answer_query
|> join(:inner, [a], s in assoc(a, :submission))
|> join(:inner, [a, s], t in subquery(students), on: t.id == s.student_id)
else
answer_query
end
answer = Repo.one(answer_query)
with {:answer_found?, true} <- {:answer_found?, is_map(answer)},
{:valid, changeset = %Ecto.Changeset{valid?: true}} <-
{:valid, Answer.grading_changeset(answer, attrs)},
{:ok, _} <- Repo.update(changeset) do
{:ok, nil}
else
{:answer_found?, false} ->
{:error, {:bad_request, "Answer not found or user not permitted to grade."}}
{:valid, changeset} ->
{:error, {:bad_request, full_error_messages(changeset.errors)}}
{:error, _} ->
{:error, {:internal_server_error, "Please try again later."}}
end
end
def update_grading_info(
_,
_,
_
) do
{:error, {:unauthorized, "User is not permitted to grade."}}
end
defp find_submission(user = %User{}, assessment = %Assessment{}) do
submission =
Submission
|> where(student_id: ^user.id)
|> where(assessment_id: ^assessment.id)
|> Repo.one()
if submission do
{:ok, submission}
else
{:error, nil}
end
end
defp is_open?(%Assessment{open_at: open_at, close_at: close_at, is_published: is_published}) do
{:is_open?, Timex.between?(Timex.now(), open_at, close_at) and is_published}
end
defp create_empty_submission(user = %User{}, assessment = %Assessment{}) do
%Submission{}
|> Submission.changeset(%{student: user, assessment: assessment})
|> Repo.insert()
|> case do
{:ok, submission} -> {:ok, submission}
{:error, _} -> {:error, :race_condition}
end
end
defp find_or_create_submission(user = %User{}, assessment = %Assessment{}) do
case find_submission(user, assessment) do
{:ok, submission} -> {:ok, submission}
{:error, _} -> create_empty_submission(user, assessment)
end
end
defp insert_or_update_answer(submission = %Submission{}, question = %Question{}, raw_answer) do
answer_content = build_answer_content(raw_answer, question.type)
answer_changeset =
%Answer{}
|> Answer.changeset(%{
answer: answer_content,
question_id: question.id,
submission_id: submission.id,
type: question.type
})
Repo.insert(
answer_changeset,
on_conflict: [set: [answer: get_change(answer_changeset, :answer)]],
conflict_target: [:submission_id, :question_id]
)
end
defp build_answer_content(raw_answer, question_type) do
case question_type do
:mcq ->
%{choice_id: raw_answer}
:programming ->
%{code: raw_answer}
end
end
defp submissions_by_group(grader = %User{role: :staff}, submission_query) do
students = Cadet.Accounts.Query.students_of(grader)
submission_query
|> join(:inner, [s], st in subquery(students), on: s.student_id == st.id)
|> Repo.all()
end
defp submissions_by_group(%User{role: :admin}, submission_query) do
Repo.all(submission_query)
end
end
| 31.554124 | 102 | 0.595851 |
ffd25e289b3120aff9c71289493009b6db7d0741 | 1,167 | ex | Elixir | apps/my_app_web/lib/my_app_web/channels/user_socket.ex | robmckinnon/phoenix-umbrella-with-node-js-example | 48cce2d9d9fc4564bc5983840c66d09c6594462d | [
"MIT"
] | null | null | null | apps/my_app_web/lib/my_app_web/channels/user_socket.ex | robmckinnon/phoenix-umbrella-with-node-js-example | 48cce2d9d9fc4564bc5983840c66d09c6594462d | [
"MIT"
] | null | null | null | apps/my_app_web/lib/my_app_web/channels/user_socket.ex | robmckinnon/phoenix-umbrella-with-node-js-example | 48cce2d9d9fc4564bc5983840c66d09c6594462d | [
"MIT"
] | null | null | null | defmodule MyAppWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", MyAppWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# MyAppWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.710526 | 83 | 0.701799 |
ffd2665294cd17e002fcdc3cc3ffeb0272fba66a | 918 | ex | Elixir | robotica_common/lib/eventbus.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 1 | 2019-04-23T09:16:44.000Z | 2019-04-23T09:16:44.000Z | robotica_common/lib/eventbus.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 107 | 2019-05-26T08:03:26.000Z | 2022-02-03T19:13:56.000Z | robotica_common/lib/eventbus.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 1 | 2019-08-10T20:44:24.000Z | 2019-08-10T20:44:24.000Z | defmodule RoboticaCommon.EventBus do
@moduledoc """
Defines helper functions from EventBus
"""
alias EventBus.Model.Event
alias EventBus.Util.Base62
alias EventBus.Util.MonotonicTime
defmacro __using__(_) do
quote do
require RoboticaCommon.EventBus
@eb_source String.replace("#{__MODULE__}", "Elixir.", "")
end
end
@spec notify_with_source(String.t(), atom(), any()) :: :ok
def notify_with_source(source, topic, data) do
id = Base62.unique_id()
initialized_at = MonotonicTime.now()
%Event{
id: id,
topic: topic,
transaction_id: id,
data: data,
initialized_at: initialized_at,
occurred_at: MonotonicTime.now(),
source: source
}
|> EventBus.notify()
end
defmacro notify(topic, data) do
quote do
RoboticaCommon.EventBus.notify_with_source(@eb_source, unquote(topic), unquote(data))
end
end
end
| 22.95 | 91 | 0.669935 |
ffd26d5b30370c4def32a6af8cf73ef4e07b7022 | 13,418 | ex | Elixir | lib/mix/tasks/dialyzer.ex | gabrielpra1/dialyxir | b3fb59c566fce5dd2975a5abf01556b29bf53a38 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/dialyzer.ex | gabrielpra1/dialyxir | b3fb59c566fce5dd2975a5abf01556b29bf53a38 | [
"Apache-2.0"
] | 2 | 2019-11-19T15:45:11.000Z | 2019-11-21T22:38:34.000Z | lib/mix/tasks/dialyzer.ex | kenny-evitt/dialyxir | 62310a9cbcbb88a9ec08061d93946bf0f096bbbc | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Dialyzer do
@shortdoc "Runs dialyzer with default or project-defined flags."
@moduledoc """
This task compiles the mix project, creates a PLT with dependencies if needed and runs `dialyzer`. Much of its behavior can be managed in configuration as described below.
If executed outside of a mix project, it will build the core PLT files and exit.
## Command line options
* `--no-compile` - do not compile even if needed.
* `--no-check` - do not perform (quick) check to see if PLT needs update.
* `--force-check` - force PLT check also if lock file is unchanged.
useful when dealing with local deps.
* `--ignore-exit-status` - display warnings but do not halt the VM or return an exit status code
* `--list-unused-filters` - list unused ignore filters
useful for CI. do not use with `mix do`.
* `--plt` - only build the required plt(s) and exit.
* `--format short` - format the warnings in a compact format.
* `--format raw` - format the warnings in format returned before Dialyzer formatting
* `--format dialyxir` - format the warnings in a pretty printed format
* `--format dialyzer` - format the warnings in the original Dialyzer format
* `--quiet` - suppress all informational messages
Warning flags passed to this task are passed on to `:dialyzer`.
e.g.
`mix dialyzer --unmatched_returns`
## Configuration
All configuration is included under a dialyzer key in the mix project keyword list.
### Flags
You can specify any `dialyzer` command line argument with the :flags keyword.
Dialyzer supports a number of warning flags used to enable or disable certain kinds of analysis features. Until version 0.4, `dialyxir` used by default the additional warning flags shown in the example below. However some of these create warnings that are often more confusing than helpful, particularly to new users of Dialyzer. As of 0.4, there are no longer any flags used by default. To get the old behavior, specify them in your Mix project file. For compatibility reasons you can use either the `-Wwarning` convention of the dialyzer CLI, or (preferred) the `WarnOpts` atoms supported by the [API](http://erlang.org/doc/man/dialyzer.html#gui-1). e.g.
```elixir
def project do
[
app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [flags: ["-Wunmatched_returns", :error_handling, :underspecs]]
]
end
```
### PLT Configuration
The task will build a PLT with default core Erlang applications: `:erts :kernel :stdlib :crypto` and re-use this core file in multiple projects - another core file is created for Elixir.
OTP application dependencies are (transitively) added to your project's PLT by default. The applications added are the same as you would see displayed with the command `mix app.tree`. There is also a `:plt_add_deps` option you can set to control the dependencies added. The following options are supported:
* :project - Direct Mix and OTP dependencies
* :apps_direct - Only Direct OTP application dependencies - not the entire tree
* :transitive - Include Mix and OTP application dependencies recursively
* :app_tree - Transitive OTP application dependencies e.g. `mix app.tree` (default)
```
def project do
[
app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [plt_add_deps: :apps_direct, plt_add_apps: [:wx]]
]
end
```
You can also configure applications to include in the PLT more directly:
* `dialyzer: :plt_add_apps` - applications to include
*in addition* to the core applications and project dependencies.
* `dialyzer: :plt_ignore_apps` - applications to ignore from the list of core
applications and dependencies.
* `dialyzer: :plt_apps` - a list of applications to include that will replace the default,
include all the apps you need e.g.
### Other Configuration
* `dialyzer: :plt_file` - Deprecated - specify the plt file name to create and use - default is to create one in the project's current build environmnet (e.g. _build/dev/) specific to the Erlang/Elixir version used. Note that use of this key in version 0.4 or later will produce a deprecation warning - you can silence the warning by providing a pair with key :no_warn e.g. `plt_file: {:no_warn,"filename"}`.
* `dialyzer: :plt_core_path` - specify an alternative to MIX_HOME to use to store the Erlang and Elixir core files.
* `dialyzer: :ignore_warnings` - specify file path to filter well-known warnings.
"""
use Mix.Task
import System, only: [user_home!: 0]
import Dialyxir.Output, only: [info: 1, error: 1]
alias Dialyxir.Project
alias Dialyxir.Plt
alias Dialyxir.Dialyzer
defmodule Build do
@shortdoc "Build the required plt(s) and exit."
@moduledoc """
This task compiles the mix project and creates a PLT with dependencies if needed.
It is equivalent to running `mix dialyzer --plt`
## Command line options
* `--no-compile` - do not compile even if needed.
"""
use Mix.Task
def run(args) do
Mix.Tasks.Dialyzer.run(["--plt" | args])
end
end
defmodule Clean do
@shortdoc "Delete plt(s) and exit."
@moduledoc """
This task deletes PLT files and hash files.
## Command line options
* `--all` - delete also core PLTs.
"""
use Mix.Task
@command_options [all: :boolean]
def run(args) do
{opts, _, _dargs} = OptionParser.parse(args, strict: @command_options)
Mix.Tasks.Dialyzer.clean(opts)
end
end
@default_warnings [:unknown]
@old_options [
halt_exit_status: :boolean
]
@command_options Keyword.merge(@old_options,
force_check: :boolean,
ignore_exit_status: :boolean,
list_unused_filters: :boolean,
no_check: :boolean,
no_compile: :boolean,
plt: :boolean,
quiet: :boolean,
raw: :boolean,
format: :string
)
def run(args) do
{opts, _, dargs} = OptionParser.parse(args, strict: @command_options)
original_shell = Mix.shell()
if opts[:quiet], do: Mix.shell(Mix.Shell.Quiet)
opts = Keyword.delete(opts, :quiet)
check_dialyzer()
compatibility_notice()
if Mix.Project.get() do
Project.check_config()
unless opts[:no_compile], do: Mix.Project.compile([])
_ =
unless no_check?(opts) do
info("Finding suitable PLTs")
force_check? = Keyword.get(opts, :force_check, false)
check_plt(force_check?)
end
default = Dialyxir.Project.default_ignore_warnings()
ignore_warnings = Dialyxir.Project.dialyzer_ignore_warnings()
cond do
!ignore_warnings && File.exists?(default) ->
info("""
No :ignore_warnings opt specified in mix.exs. Using default: #{default}.
""")
ignore_warnings && File.exists?(ignore_warnings) ->
info("""
ignore_warnings: #{ignore_warnings}
""")
ignore_warnings ->
info("""
:ignore_warnings opt specified in mix.exs: #{ignore_warnings}, but file does not exist.
""")
true ->
info("""
No :ignore_warnings opt specified in mix.exs and default does not exist.
""")
end
warn_old_options(opts)
unless opts[:plt], do: run_dialyzer(opts, dargs)
else
info("No mix project found - checking core PLTs...")
Project.plts_list([], false) |> Plt.check()
end
Mix.shell(original_shell)
end
def clean(opts, fun \\ &delete_plt/4) do
check_dialyzer()
compatibility_notice()
if opts[:all], do: Project.plts_list([], false) |> Plt.check(fun)
if Mix.Project.get() do
{apps, _hash} = dependency_hash()
info("Deleting PLTs")
Project.plts_list(apps, true, true) |> Plt.check(fun)
info("About to delete PLT hash file: #{plt_hash_file()}")
File.rm(plt_hash_file())
end
end
def delete_plt(plt, _, _, _) do
info("About to delete PLT file: #{plt}")
File.rm(plt)
end
defp no_check?(opts) do
case {in_child?(), no_plt?()} do
{true, true} ->
info("In an Umbrella child and no PLT found - building that first.")
build_parent_plt()
true
{true, false} ->
info("In an Umbrella child, not checking PLT...")
true
_ ->
opts[:no_check]
end
end
defp check_plt(force_check?) do
info("Checking PLT...")
{apps, hash} = dependency_hash()
if not force_check? and check_hash?(hash) do
info("PLT is up to date!")
else
Project.plts_list(apps) |> Plt.check()
File.write(plt_hash_file(), hash)
end
end
defp run_dialyzer(opts, dargs) do
args = [
{:check_plt, opts[:force_check] || false},
{:init_plt, String.to_charlist(Project.plt_file())},
{:files, Project.dialyzer_files()},
{:warnings, dialyzer_warnings(dargs)},
{:format, opts[:format]},
{:raw, opts[:raw]},
{:list_unused_filters, opts[:list_unused_filters]},
{:ignore_exit_status, opts[:ignore_exit_status]}
]
{status, exit_status, [time | result]} = Dialyzer.dialyze(args)
info(time)
report = if status == :ok, do: &info/1, else: &error/1
Enum.each(result, report)
unless exit_status == 0 || opts[:ignore_exit_status] do
error("Halting VM with exit status #{exit_status}")
System.halt(exit_status)
end
end
defp dialyzer_warnings(dargs) do
raw_opts = Project.dialyzer_flags() ++ Enum.map(dargs, &elem(&1, 0))
transform(raw_opts) ++ (@default_warnings -- Project.dialyzer_removed_defaults())
end
defp transform(options) when is_list(options), do: Enum.map(options, &transform/1)
defp transform(option) when is_atom(option), do: option
defp transform(option) when is_binary(option) do
option
|> String.replace_leading("-W", "")
|> String.replace("--", "")
|> String.to_atom()
end
defp in_child? do
String.contains?(Mix.Project.config()[:lockfile], "..")
end
defp no_plt? do
not File.exists?(Project.deps_plt())
end
defp build_parent_plt() do
parent = Mix.Project.config()[:lockfile] |> Path.expand() |> Path.dirname()
opts = [into: IO.stream(:stdio, :line), stderr_to_stdout: true, cd: parent]
# It would seem more natural to use Mix.in_project here to start in our parent project.
# However part of the app.tree resolution includes loading all sub apps, and we will
# hit an exception when we try to do that for *this* child, which is already loaded.
{out, rc} = System.cmd("mix", ["dialyzer", "--plt"], opts)
unless rc == 0 do
info("Error building parent PLT, process returned code: #{rc}\n#{out}")
end
end
defp check_dialyzer do
if not Code.ensure_loaded?(:dialyzer) do
error("""
DEPENDENCY MISSING
------------------------
If you are reading this message, then Elixir and Erlang are installed but the
Erlang Dialyzer is not available. Probably this is because you installed Erlang
with your OS package manager and the Dialyzer package is separate.
On Debian/Ubuntu:
`apt-get install erlang-dialyzer`
Fedora:
`yum install erlang-dialyzer`
Arch and Homebrew include Dialyzer in their base erlang packages. Please report a Github
issue to add or correct distribution-specific information.
""")
:erlang.halt(3)
end
end
defp warn_old_options(opts) do
for {opt, _} <- opts, @old_options[opt] do
error("#{opt} is no longer a valid CLI argument.")
end
nil
end
defp compatibility_notice do
old_plt = "#{user_home!()}/.dialyxir_core_*.plt"
if File.exists?(old_plt) &&
(!File.exists?(Project.erlang_plt()) || !File.exists?(Project.elixir_plt())) do
info("""
COMPATIBILITY NOTICE
------------------------
Previous usage of a pre-0.4 version of Dialyxir detected. Please be aware that the 0.4 release
makes a number of changes to previous defaults. Among other things, the PLT task is automatically
run when dialyzer is run, PLT paths have changed,
transitive dependencies are included by default in the PLT, and no additional warning flags
beyond the dialyzer defaults are included. All these properties can be changed in configuration.
(see `mix help dialyzer`).
If you no longer use the older Dialyxir in any projects and do not want to see this notice each time you upgrade your Erlang/Elixir distribution, you can delete your old pre-0.4 PLT files. ( rm ~/.dialyxir_core_*.plt )
""")
end
end
@spec check_hash?(binary()) :: boolean()
defp check_hash?(hash) do
case File.read(plt_hash_file()) do
{:ok, stored_hash} -> hash == stored_hash
_ -> false
end
end
defp plt_hash_file, do: Project.plt_file() <> ".hash"
@spec dependency_hash :: {[atom()], binary()}
def dependency_hash do
lock_file = Mix.Dep.Lock.read() |> :erlang.term_to_binary()
apps = Project.cons_apps()
apps |> inspect() |> info()
hash = :crypto.hash(:sha, lock_file <> :erlang.term_to_binary(apps))
{apps, hash}
end
end
| 34.582474 | 659 | 0.654047 |
ffd288c9e7f5777a30621ffc8d510587378a3fb6 | 1,133 | ex | Elixir | api/lib/designhubapi_web/controllers/user_controller.ex | rchatrath7/designhubapi | c38dafd3ba3a2b86ec3a12e71cbf95a5d366134d | [
"MIT"
] | null | null | null | api/lib/designhubapi_web/controllers/user_controller.ex | rchatrath7/designhubapi | c38dafd3ba3a2b86ec3a12e71cbf95a5d366134d | [
"MIT"
] | null | null | null | api/lib/designhubapi_web/controllers/user_controller.ex | rchatrath7/designhubapi | c38dafd3ba3a2b86ec3a12e71cbf95a5d366134d | [
"MIT"
] | null | null | null | defmodule DesignhubapiWeb.UserController do
use DesignhubapiWeb, :controller
alias Designhubapi.Accounts
alias Designhubapi.Accounts.User
action_fallback DesignhubapiWeb.FallbackController
def index(conn, _params) do
users = Accounts.list_users()
render(conn, "index.json", users: users)
end
def create(conn, %{"user" => user_params}) do
with {:ok, %User{} = user} <- Accounts.create_user(user_params) do
conn
|> put_status(:created)
|> put_resp_header("location", user_path(conn, :show, user))
|> render("show.json", user: user)
end
end
def show(conn, %{"id" => id}) do
user = Accounts.get_user!(id)
render(conn, "show.json", user: user)
end
def update(conn, %{"id" => id, "user" => user_params}) do
user = Accounts.get_user!(id)
with {:ok, %User{} = user} <- Accounts.update_user(user, user_params) do
render(conn, "show.json", user: user)
end
end
def delete(conn, %{"id" => id}) do
user = Accounts.get_user!(id)
with {:ok, %User{}} <- Accounts.delete_user(user) do
send_resp(conn, :no_content, "")
end
end
end
| 26.348837 | 76 | 0.643425 |
ffd28928911f6e598fbdfc2f003a9649f362961b | 2,402 | exs | Elixir | apps/alert_processor/mix.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | null | null | null | apps/alert_processor/mix.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 21 | 2021-03-12T17:05:30.000Z | 2022-02-16T21:48:35.000Z | apps/alert_processor/mix.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 1 | 2021-12-09T15:09:53.000Z | 2021-12-09T15:09:53.000Z | defmodule AlertProcessor.Mixfile do
use Mix.Project
def project do
[
app: :alert_processor,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.6",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:gettext] ++ Mix.compilers(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
# https://github.com/dariodf/lcov_ex/issues/2
test_coverage: [tool: LcovEx]
]
end
# Configuration for the OTP application.
# Type `mix help compile.app` for more information.
def application do
[mod: {AlertProcessor, []}, extra_applications: [:logger, :runtime_tools]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:bcrypt_elixir, "~> 1.0"},
{:bypass, "~> 0.9.0", only: :test},
{:calendar, "~> 1.0.0"},
{:comeonin, "~> 3.0"},
{:con_cache, "~> 0.12.1"},
{:cowboy, "~> 1.0"},
{:ecto, "~> 2.2.0"},
{:exactor, "~> 2.2.0"},
{:ex_aws, "~> 2.1.0"},
{:ex_aws_sns, "~> 2.2.0"},
{:ex_machina, "~> 2.2.0", only: :test},
{:exvcr, "~> 0.10.1", runtime: false},
{:fast_local_datetime, "~> 1.0.0"},
{:gettext, "~> 0.11"},
{:hackney, "~> 1.17.0"},
{:httpoison, "~> 1.1.1"},
{:paper_trail, "~> 0.7.5"},
{:poison, "~> 2.0"},
{:poolboy, ">= 0.0.0"},
{:postgrex, ">= 0.0.0"},
{:scrivener_ecto, "~> 1.0"},
{:sentry, "~> 7.0"},
{:sweet_xml, "~> 0.6"},
{:eflame, "~> 1.0", only: [:dev]},
{:tzdata, "~> 1.1.0"},
{:uuid, "~> 1.1.8"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
# defp aliases do
# ["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
# "ecto.reset": ["ecto.drop", "ecto.setup"],
# "test": ["ecto.create --quiet", "ecto.migrate", "coveralls.json"]]
# end
end
| 30.405063 | 80 | 0.534555 |
ffd2914e360dc4b8d94a2b1afa32af9b73e56975 | 2,706 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_batch_process_request.ex | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_batch_process_request.ex | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_batch_process_request.ex | jamesvl/elixir-google-api | 6c87fb31d996f08fb42ce6066317e9d652a87acc | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequest do
@moduledoc """
Request message for batch process document method.
## Attributes
* `inputConfigs` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequestBatchInputConfig.t)`, *default:* `nil`) - The input config for each single document in the batch process.
* `outputConfig` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequestBatchOutputConfig.t`, *default:* `nil`) - The overall output config for batch process.
* `skipHumanReview` (*type:* `boolean()`, *default:* `nil`) - Whether Human Review feature should be skipped for this request. Default to false.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:inputConfigs =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequestBatchInputConfig.t()
),
:outputConfig =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequestBatchOutputConfig.t(),
:skipHumanReview => boolean()
}
field(:inputConfigs,
as:
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequestBatchInputConfig,
type: :list
)
field(:outputConfig,
as:
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequestBatchOutputConfig
)
field(:skipHumanReview)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequest do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BatchProcessRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.112676 | 223 | 0.757206 |
ffd2b672aecdb40a526128c435ad65c12e604edb | 12,241 | ex | Elixir | lib/sourceror/range.ex | kianmeng/sourceror | 4d67ce5b3e795032a4df85266db1cb216841fc20 | [
"Apache-2.0"
] | null | null | null | lib/sourceror/range.ex | kianmeng/sourceror | 4d67ce5b3e795032a4df85266db1cb216841fc20 | [
"Apache-2.0"
] | null | null | null | lib/sourceror/range.ex | kianmeng/sourceror | 4d67ce5b3e795032a4df85266db1cb216841fc20 | [
"Apache-2.0"
] | null | null | null | defmodule Sourceror.Range do
@moduledoc false
import Sourceror.Identifier, only: [is_unary_op: 1, is_binary_op: 1]
defp split_on_newline(string) do
String.split(string, ~r/\n|\r\n|\r/)
end
def get_range(quoted, _opts \\ []) do
range = do_get_range(quoted)
comments =
case quoted do
{_, meta, _} ->
meta[:leading_comments] || []
_ ->
[]
end
first_comment = List.first(comments)
last_comment = List.last(comments)
{start_line, start_column} =
if first_comment do
{first_comment.line, min(range.start[:column], first_comment.column || 1)}
else
{range.start[:line], range.start[:column]}
end
end_column =
if last_comment && last_comment.line == range.start[:line] do
comment_length = String.length(last_comment.text)
max(range.end[:column], (last_comment.column || 1) + comment_length)
else
range.end[:column]
end
%{
start: [line: start_line, column: start_column],
end: [line: range.end[:line], column: end_column]
}
end
@spec get_range(Macro.t()) :: Sourceror.range()
defp do_get_range(quoted)
# Module aliases
defp do_get_range({:__aliases__, meta, segments}) do
start_pos = Keyword.take(meta, [:line, :column])
last_segment_length = List.last(segments) |> to_string() |> String.length()
end_pos = meta[:last] |> Keyword.update!(:column, &(&1 + last_segment_length))
%{start: start_pos, end: end_pos}
end
# Strings
defp do_get_range({:__block__, meta, [string]}) when is_binary(string) do
lines = split_on_newline(string)
last_line = List.last(lines) || ""
end_line = meta[:line] + length(lines)
end_line =
if meta[:delimiter] in [~S/"""/, ~S/'''/] do
end_line
else
end_line - 1
end
end_column =
if meta[:delimiter] in [~S/"""/, ~S/'''/] do
meta[:column] + String.length(meta[:delimiter])
else
count = meta[:column] + String.length(last_line) + String.length(meta[:delimiter])
if end_line == meta[:line] do
count + 1
else
count
end
end
%{
start: Keyword.take(meta, [:line, :column]),
end: [line: end_line, column: end_column]
}
end
# Integers, Floats
defp do_get_range({:__block__, meta, [number]}) when is_integer(number) or is_float(number) do
%{
start: Keyword.take(meta, [:line, :column]),
end: [line: meta[:line], column: meta[:column] + String.length(meta[:token])]
}
end
# Atoms
defp do_get_range({:__block__, meta, [atom]}) when is_atom(atom) do
start_pos = Keyword.take(meta, [:line, :column])
string = Atom.to_string(atom)
delimiter = meta[:delimiter] || ""
lines = split_on_newline(string)
last_line = List.last(lines) || ""
end_line = meta[:line] + length(lines) - 1
end_column = meta[:column] + String.length(last_line) + String.length(delimiter)
end_column =
cond do
end_line == meta[:line] && meta[:delimiter] ->
# Column and first delimiter
end_column + 2
end_line == meta[:line] ->
# Just the colon
end_column + 1
end_line != meta[:line] ->
# You're beautiful as you are, Courage
end_column
end
%{
start: start_pos,
end: [line: end_line, column: end_column]
}
end
# Block with no parenthesis
defp do_get_range({:__block__, _, args} = quoted) do
if Sourceror.has_closing_line?(quoted) do
get_range_for_node_with_closing_line(quoted)
else
{first, rest} = List.pop_at(args, 0)
{last, _} = List.pop_at(rest, -1, first)
%{
start: get_range(first).start,
end: get_range(last).end
}
end
end
# Variables
defp do_get_range({form, meta, context}) when is_atom(form) and is_atom(context) do
start_pos = Keyword.take(meta, [:line, :column])
end_pos = [
line: start_pos[:line],
column: start_pos[:column] + String.length(Atom.to_string(form))
]
%{start: start_pos, end: end_pos}
end
# 2-tuples from keyword lists
defp do_get_range({left, right}) do
left_range = get_range(left)
right_range = get_range(right)
%{start: left_range.start, end: right_range.end}
end
# Handles arguments. Lists are always wrapped in `:__block__`, so the only case
# in which we can have a naked list is in partial keyword lists, as in `[:a, :b, c: d, e: f]`,
# or stabs like `:foo -> :bar`
defp do_get_range(list) when is_list(list) do
first_range = List.first(list) |> get_range()
start_pos = first_range.start
end_pos =
if last = List.last(list) do
get_range(last).end
else
first_range.end
end
%{start: start_pos, end: end_pos}
end
# Stabs
# a -> b
defp do_get_range({:->, _, [left_args, right]}) do
start_pos = get_range(left_args).start
end_pos = get_range(right).end
%{start: start_pos, end: end_pos}
end
# Access syntax
defp do_get_range({{:., _, [Access, :get]}, _, _} = quoted) do
get_range_for_node_with_closing_line(quoted)
end
# Qualified tuple
defp do_get_range({{:., _, [_, :{}]}, _, _} = quoted) do
get_range_for_node_with_closing_line(quoted)
end
# Interpolated atoms
defp do_get_range({{:., _, [:erlang, :binary_to_atom]}, meta, [interpolation, :utf8]}) do
interpolation =
Macro.update_meta(interpolation, &Keyword.put(&1, :delimiter, meta[:delimiter]))
get_range_for_interpolation(interpolation)
end
# Qualified call
defp do_get_range({{:., _, [left, right]}, meta, []} = quoted) when is_atom(right) do
if Sourceror.has_closing_line?(quoted) do
get_range_for_node_with_closing_line(quoted)
else
start_pos = get_range(left).start
identifier_pos = Keyword.take(meta, [:line, :column])
parens_length =
if meta[:no_parens] do
0
else
2
end
end_pos = [
line: identifier_pos[:line],
column:
identifier_pos[:column] + String.length(Atom.to_string(right)) +
parens_length
]
%{start: start_pos, end: end_pos}
end
end
# Qualified call with arguments
defp do_get_range({{:., _, [left, _]}, _meta, args} = quoted) do
if Sourceror.has_closing_line?(quoted) do
get_range_for_node_with_closing_line(quoted)
else
start_pos = get_range(left).start
end_pos = get_range(List.last(args) || left).end
%{start: start_pos, end: end_pos}
end
end
# Unary operators
defp do_get_range({op, meta, [arg]}) when is_unary_op(op) do
start_pos = Keyword.take(meta, [:line, :column])
arg_range = get_range(arg)
end_column =
if arg_range.end[:line] == meta[:line] do
arg_range.end[:column]
else
arg_range.end[:column] + String.length(to_string(op))
end
%{start: start_pos, end: [line: arg_range.end[:line], column: end_column]}
end
# Binary operators
defp do_get_range({op, _, [left, right]}) when is_binary_op(op) do
%{
start: get_range(left).start,
end: get_range(right).end
}
end
# Stepped ranges
defp do_get_range({:"..//", _, [left, _middle, right]}) do
%{
start: get_range(left).start,
end: get_range(right).end
}
end
# Bitstrings and interpolations
defp do_get_range({:<<>>, meta, _} = quoted) do
if meta[:delimiter] do
get_range_for_interpolation(quoted)
else
get_range_for_bitstring(quoted)
end
end
# Sigils
defp do_get_range({sigil, meta, [{:<<>>, _, segments}, modifiers]} = quoted)
when is_list(modifiers) do
case Atom.to_string(sigil) do
<<"sigil_", _name>> ->
# Congratulations, it's a sigil!
start_pos = Keyword.take(meta, [:line, :column])
end_pos =
get_end_pos_for_interpolation_segments(segments, meta[:delimiter], start_pos)
|> Keyword.update!(:column, &(&1 + length(modifiers)))
end_pos =
cond do
multiline_delimiter?(meta[:delimiter]) and !has_interpolations?(segments) ->
# If it has no interpolations and is a multiline sigil, then the first
# line will be incorrectly reported because the first string in the
# segments(which is the only one) won't have a leading newline, so
# we're compensating for that here. The end column will be at the same
# indentation as the start column, plus the length of the multiline
# delimiter
[line: end_pos[:line] + 1, column: start_pos[:column] + 3]
multiline_delimiter?(meta[:delimiter]) or has_interpolations?(segments) ->
# If it's a multiline sigil or has interpolations, then the positions
# will already be correctly calculated
end_pos
true ->
# If it's a single line sigil, add the offset for the ~x
Keyword.update!(end_pos, :column, &(&1 + 2))
end
%{
start: start_pos,
end: end_pos
}
_ ->
get_range_for_unqualified_call(quoted)
end
end
# Unqualified calls
defp do_get_range({call, _, _} = quoted) when is_atom(call) do
get_range_for_unqualified_call(quoted)
end
defp get_range_for_unqualified_call({_call, meta, args} = quoted) do
if Sourceror.has_closing_line?(quoted) do
get_range_for_node_with_closing_line(quoted)
else
start_pos = Keyword.take(meta, [:line, :column])
end_pos = get_range(List.last(args)).end
%{start: start_pos, end: end_pos}
end
end
defp get_range_for_node_with_closing_line({_, meta, _} = quoted) do
start_position = Sourceror.get_start_position(quoted)
end_position = Sourceror.get_end_position(quoted)
end_position =
if Keyword.has_key?(meta, :end) do
Keyword.update!(end_position, :column, &(&1 + 3))
else
# If it doesn't have an end token, then it has either a ), a ] or a }
Keyword.update!(end_position, :column, &(&1 + 1))
end
%{start: start_position, end: end_position}
end
defp get_range_for_interpolation({:<<>>, meta, segments}) do
start_pos = Keyword.take(meta, [:line, :column])
end_pos =
get_end_pos_for_interpolation_segments(segments, meta[:delimiter] || "\"", start_pos)
%{start: start_pos, end: end_pos}
end
def get_end_pos_for_interpolation_segments(segments, delimiter, start_pos) do
end_pos =
Enum.reduce(segments, start_pos, fn
string, pos when is_binary(string) ->
lines = split_on_newline(string)
length = String.length(List.last(lines) || "")
line_count = length(lines) - 1
column =
if line_count > 0 do
start_pos[:column] + length
else
pos[:column] + length
end
[
line: pos[:line] + line_count,
column: column
]
{:"::", _, [{_, meta, _}, {:binary, _, _}]}, _pos ->
meta
|> Keyword.get(:closing)
|> Keyword.take([:line, :column])
# Add the closing }
|> Keyword.update!(:column, &(&1 + 1))
end)
cond do
multiline_delimiter?(delimiter) and has_interpolations?(segments) ->
[line: end_pos[:line], column: String.length(delimiter) + 1]
has_interpolations?(segments) ->
Keyword.update!(end_pos, :column, &(&1 + 1))
true ->
Keyword.update!(end_pos, :column, &(&1 + 2))
end
end
defp has_interpolations?(segments) do
Enum.any?(segments, &match?({:"::", _, _}, &1))
end
defp multiline_delimiter?(delimiter) do
delimiter in ~w[""" ''']
end
defp get_range_for_bitstring(quoted) do
range = get_range_for_node_with_closing_line(quoted)
# get_range_for_node_with_closing_line/1 will add 1 to the ending column
# because it assumes it ends with ), ] or }, but bitstring closing token is
# >>, so we need to add another 1
update_in(range, [:end, :column], &(&1 + 1))
end
end
| 28.075688 | 96 | 0.610816 |
ffd2f7132bcf31fa82a0f12418181c9ef478804a | 255 | ex | Elixir | apps/cashtrail/test/support/factory.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 6 | 2020-05-02T01:12:24.000Z | 2020-06-23T12:31:32.000Z | apps/cashtrail/test/support/factory.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 16 | 2020-05-02T00:07:51.000Z | 2021-06-11T01:54:11.000Z | apps/cashtrail/test/support/factory.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | null | null | null | defmodule Cashtrail.Factory do
@moduledoc false
use ExMachina.Ecto, repo: Cashtrail.Repo
use Cashtrail.Factory.EntitiesFactory
use Cashtrail.Factory.BankingFactory
use Cashtrail.Factory.ContactsFactory
use Cashtrail.Factory.UsersFactory
end
| 23.181818 | 42 | 0.819608 |
ffd34c0f5592dbdd906a2a0ff68a587af5093259 | 2,914 | ex | Elixir | clients/service_networking/lib/google_api/service_networking/v1/model/google_cloud_servicenetworking_v1beta_connection.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/service_networking/lib/google_api/service_networking/v1/model/google_cloud_servicenetworking_v1beta_connection.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/service_networking/lib/google_api/service_networking/v1/model/google_cloud_servicenetworking_v1beta_connection.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceNetworking.V1.Model.GoogleCloudServicenetworkingV1betaConnection do
@moduledoc """
Represents a private connection resource. A private connection is implemented as a VPC Network Peering connection between a service producer's VPC network and a service consumer's VPC network.
## Attributes
* `network` (*type:* `String.t`, *default:* `nil`) - The name of service consumer's VPC network that's connected with service producer network, in the following format: `projects/{project}/global/networks/{network}`. `{project}` is a project number, such as in `12345` that includes the VPC service consumer's VPC network. `{network}` is the name of the service consumer's VPC network.
* `peering` (*type:* `String.t`, *default:* `nil`) - Output only. The name of the VPC Network Peering connection that was created by the service producer.
* `reservedPeeringRanges` (*type:* `list(String.t)`, *default:* `nil`) - The name of one or more allocated IP address ranges for this service producer of type `PEERING`. Note that invoking this method with a different range when connection is already established will not modify already provisioned service producer subnetworks.
* `service` (*type:* `String.t`, *default:* `nil`) - Output only. The name of the peering service that's associated with this connection, in the following format: `services/{service name}`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:network => String.t(),
:peering => String.t(),
:reservedPeeringRanges => list(String.t()),
:service => String.t()
}
field(:network)
field(:peering)
field(:reservedPeeringRanges, type: :list)
field(:service)
end
defimpl Poison.Decoder,
for: GoogleApi.ServiceNetworking.V1.Model.GoogleCloudServicenetworkingV1betaConnection do
def decode(value, options) do
GoogleApi.ServiceNetworking.V1.Model.GoogleCloudServicenetworkingV1betaConnection.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.ServiceNetworking.V1.Model.GoogleCloudServicenetworkingV1betaConnection do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.770492 | 389 | 0.742279 |
ffd37e9cbd20f2c6c42d99dfda5a996200b69f51 | 1,691 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_https_proxies_set_ssl_certificates_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_https_proxies_set_ssl_certificates_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_https_proxies_set_ssl_certificates_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.TargetHttpsProxiesSetSslCertificatesRequest do
@moduledoc """
## Attributes
- sslCertificates ([String.t]): New set of SslCertificate resources to associate with this TargetHttpsProxy resource. Currently exactly one SslCertificate resource must be specified. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:sslCertificates => list(any())
}
field(:sslCertificates, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.Compute.V1.Model.TargetHttpsProxiesSetSslCertificatesRequest do
def decode(value, options) do
GoogleApi.Compute.V1.Model.TargetHttpsProxiesSetSslCertificatesRequest.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Compute.V1.Model.TargetHttpsProxiesSetSslCertificatesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.82 | 205 | 0.767002 |
ffd3930305bd776130160c923b7bf64b90910161 | 82 | exs | Elixir | phoenix_crud/test/views/page_view_test.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | phoenix_crud/test/views/page_view_test.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | phoenix_crud/test/views/page_view_test.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | defmodule PhoenixCrud.PageViewTest do
use PhoenixCrud.ConnCase, async: true
end
| 20.5 | 39 | 0.829268 |
ffd3ae3857220dd4d5b3f68c22378935c1872a00 | 3,192 | exs | Elixir | test/api/worker_definitions_test.exs | mathiaHT/ex_step_flow | 6496e9511239de64f00119428476338dfcde9dea | [
"MIT"
] | 4 | 2019-12-07T05:18:26.000Z | 2020-11-06T23:28:43.000Z | test/api/worker_definitions_test.exs | mathiaHT/ex_step_flow | 6496e9511239de64f00119428476338dfcde9dea | [
"MIT"
] | 53 | 2020-01-06T11:23:09.000Z | 2021-06-25T15:30:07.000Z | test/api/worker_definitions_test.exs | mathiaHT/ex_step_flow | 6496e9511239de64f00119428476338dfcde9dea | [
"MIT"
] | 3 | 2020-01-30T15:37:40.000Z | 2020-10-27T14:10:02.000Z | defmodule StepFlow.Api.WorkerDefinitionsTest do
use ExUnit.Case
use Plug.Test
alias Ecto.Adapters.SQL.Sandbox
alias StepFlow.Router
alias StepFlow.WorkerDefinitions.WorkerDefinition
doctest StepFlow
@opts Router.init([])
setup do
# Explicitly get a connection before each test
:ok = Sandbox.checkout(StepFlow.Repo)
# Setting the shared mode
Sandbox.mode(StepFlow.Repo, {:shared, self()})
for model <- [WorkerDefinition], do: StepFlow.Repo.delete_all(model)
:ok
end
test "GET /worker_definitions" do
{status, _headers, body} =
conn(:get, "/worker_definitions")
|> Router.call(@opts)
|> sent_resp
assert status == 200
assert body |> Jason.decode!() == %{"data" => [], "total" => 0}
{status, _headers, _body} =
conn(:post, "/worker_definitions", %{
queue_name: "my_queue",
label: "My Queue",
version: "1.2.3",
short_description: "short description",
description: "long description",
parameters: %{}
})
|> Router.call(@opts)
|> sent_resp
assert status == 201
{status, _headers, body} =
conn(:get, "/worker_definitions")
|> Router.call(@opts)
|> sent_resp
assert status == 200
assert body |> Jason.decode!() |> Map.get("total") == 1
end
test "POST /worker_definitions invalid" do
{status, _headers, body} =
conn(:post, "/worker_definitions", %{})
|> Router.call(@opts)
|> sent_resp
assert status == 422
assert body |> Jason.decode!() == %{
"errors" => %{
"queue_name" => ["can't be blank"],
"label" => ["can't be blank"],
"version" => ["can't be blank"],
"short_description" => ["can't be blank"],
"description" => ["can't be blank"]
}
}
end
test "POST /worker_definitions valid" do
{status, _headers, _body} =
conn(:post, "/worker_definitions", %{
queue_name: "my_queue",
label: "My Queue",
version: "1.2.3",
short_description: "short description",
description: "long description"
})
|> Router.call(@opts)
|> sent_resp
assert status == 201
end
test "SHOW /worker_definitions/:id" do
{status, _headers, body} =
conn(:post, "/worker_definitions", %{
queue_name: "my_queue",
label: "My Queue",
version: "1.2.3",
short_description: "short description",
description: "long description"
})
|> Router.call(@opts)
|> sent_resp
assert status == 201
workflow_id =
body
|> Jason.decode!()
|> Map.get("data")
|> Map.get("id")
|> Integer.to_string()
{status, _headers, body} =
conn(:get, "/worker_definitions/" <> workflow_id)
|> Router.call(@opts)
|> sent_resp
assert status == 200
data =
body
|> Jason.decode!()
|> Map.get("data")
queue_name =
data
|> Map.get("queue_name")
assert queue_name == "my_queue"
version =
data
|> Map.get("version")
assert version == "1.2.3"
end
end
| 24 | 72 | 0.558584 |
ffd3bf9c3e095503130956fd10da170466beb3aa | 655 | ex | Elixir | lib/day_05.ex | robindaumann/aoc-2020 | 63e2dd4cd3062b15fc5be3ec8b3fe75716701f7a | [
"MIT"
] | 1 | 2020-12-19T18:40:00.000Z | 2020-12-19T18:40:00.000Z | lib/day_05.ex | robindaumann/aoc-2020 | 63e2dd4cd3062b15fc5be3ec8b3fe75716701f7a | [
"MIT"
] | null | null | null | lib/day_05.ex | robindaumann/aoc-2020 | 63e2dd4cd3062b15fc5be3ec8b3fe75716701f7a | [
"MIT"
] | null | null | null | defmodule Day05 do
def part1(path) do
path
|> parse()
|> Enum.max()
end
def part2(path) do
uids = MapSet.new(parse(path))
{min, max} = Enum.min_max(uids)
[seat_id] = MapSet.difference(MapSet.new(min..max), uids) |> Enum.to_list()
# |> Enum.filter(fn e -> MapSet.member?(uids, e+1) and MapSet.member?(uids, e-1) end)
seat_id
end
def parse(path) do
path
|> File.stream!()
|> Enum.map(&uid/1)
end
def uid(boarding_pass) do
{number, "\n"} =
boarding_pass
|> String.replace(["B", "R"], "1")
|> String.replace(["F", "L"], "0")
|> Integer.parse(2)
number
end
end
| 18.714286 | 89 | 0.555725 |
ffd3d17b8c636faac206642eb1e009e0ccc3679f | 59 | ex | Elixir | lib/turret_web/views/page_view.ex | rosswilson/turret-elixir | effbc34a14e95d73db2075c66fe78f8432f83977 | [
"MIT"
] | 1 | 2021-02-03T23:34:04.000Z | 2021-02-03T23:34:04.000Z | lib/turret_web/views/page_view.ex | rosswilson/turret-elixir | effbc34a14e95d73db2075c66fe78f8432f83977 | [
"MIT"
] | 58 | 2021-02-16T10:16:08.000Z | 2022-03-07T10:57:32.000Z | lib/turret_web/views/page_view.ex | rosswilson/turret-elixir | effbc34a14e95d73db2075c66fe78f8432f83977 | [
"MIT"
] | null | null | null | defmodule TurretWeb.PageView do
use TurretWeb, :view
end
| 14.75 | 31 | 0.79661 |
ffd3d3286e8d2eb8b6fa0807c422ff42559b2638 | 1,487 | ex | Elixir | web/controllers/user_controller.ex | nsarno/winter | a65a6aa61d2b1af39277338277f8b3f479643939 | [
"MIT"
] | 3 | 2015-08-24T11:44:19.000Z | 2016-10-01T21:37:05.000Z | web/controllers/user_controller.ex | nsarno/winter | a65a6aa61d2b1af39277338277f8b3f479643939 | [
"MIT"
] | null | null | null | web/controllers/user_controller.ex | nsarno/winter | a65a6aa61d2b1af39277338277f8b3f479643939 | [
"MIT"
] | null | null | null | defmodule Storm.UserController do
use Storm.Web, :controller
alias Storm.User
plug :scrub_params, "user" when action in [:create, :update]
plug :authenticate! when action in [:update]
def index(conn, _params) do
users = Repo.all(User)
render(conn, "index.json", users: users)
end
def create(conn, %{"user" => user_params}) do
changeset = User.changeset(%User{}, user_params)
case Repo.insert(changeset) do
{:ok, user} ->
render(conn, "show.json", user: user)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(Storm.ChangesetView, "error.json", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
user = Repo.get!(User, id)
render conn, "show.json", user: user
end
def update(conn, %{"id" => id, "user" => user_params}) do
user = Repo.get!(User, id)
changeset = User.changeset(user, user_params)
case Repo.update(changeset) do
{:ok, user} ->
render(conn, "show.json", user: user)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(Storm.ChangesetView, "error.json", changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
user = Repo.get!(User, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
_user = Repo.delete!(user)
send_resp(conn, :no_content, "")
end
end
| 26.553571 | 74 | 0.616005 |
ffd3e05a6abd708b38e05aef42239caa30729ee3 | 4,146 | ex | Elixir | lib/mix/tasks/ggity_visual_geom_line.ex | srowley/ggity | e49f40141ea7b56e07cf9e00a20c04ed4d42df2f | [
"MIT"
] | 47 | 2020-06-21T15:23:54.000Z | 2022-03-13T01:24:19.000Z | lib/mix/tasks/ggity_visual_geom_line.ex | srowley/ggity | e49f40141ea7b56e07cf9e00a20c04ed4d42df2f | [
"MIT"
] | 3 | 2020-11-28T11:00:59.000Z | 2020-11-30T18:20:37.000Z | lib/mix/tasks/ggity_visual_geom_line.ex | srowley/ggity | e49f40141ea7b56e07cf9e00a20c04ed4d42df2f | [
"MIT"
] | 2 | 2020-11-28T10:40:10.000Z | 2021-05-28T06:44:47.000Z | defmodule Mix.Tasks.Ggity.Visual.Geom.Line do
@shortdoc "Launch a browser and draw sample line geom plots."
@moduledoc @shortdoc
use Mix.Task
alias GGity.{Examples, Plot}
@default_browser "firefox"
@doc false
@spec run(list(any)) :: any
def run([]), do: run([@default_browser])
def run(argv) do
plots =
Enum.join(
[
basic(),
fixed_line_and_mapped_points(),
fixed_aesthetics(),
date_time(),
group_by_color(),
group_by_linetype()
],
"\n"
)
test_file = "test/visual/visual_test.html"
browser =
case argv do
["--wsl"] ->
"/mnt/c/Program Files/Mozilla Firefox/firefox.exe"
[browser] ->
browser
end
File.write!(test_file, "<html><body #{grid_style()}>\n#{plots}\n</body></html>")
open_html_file(browser, test_file)
Process.sleep(1000)
File.rm(test_file)
end
defp open_html_file(browser, file) do
System.cmd(browser, [file])
end
defp grid_style do
"style='display: grid;grid-template-columns: repeat(3, 1fr)'"
end
defp basic do
Examples.economics()
|> Enum.filter(fn record -> Date.compare(record["date"], ~D[1970-12-31]) == :lt end)
|> Plot.new(%{x: "date", y: "unemploy"})
|> Plot.geom_line(size: 1)
|> Plot.labs(title: "Date data")
|> Plot.scale_x_date(date_labels: "%Y")
|> Plot.plot()
end
defp fixed_line_and_mapped_points do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.labs(title: "Fixed linetype: :twodash", x: "Weight")
|> Plot.geom_line(linetype: :twodash, size: 1)
|> Plot.plot()
end
defp fixed_aesthetics do
Examples.economics()
|> Plot.new(%{x: "date", y: "unemploy"})
|> Plot.geom_line(color: "red", size: 1)
|> Plot.labs(title: "Fixed color: \"red\"")
|> Plot.scale_x_date(breaks: 6, date_labels: "%m/%d/%Y")
|> Plot.theme(axis_text_x: GGity.Element.Text.element_text(angle: 30))
|> Plot.plot()
end
defp date_time do
[
%{date_time: ~N[2001-01-01 00:00:00], price: 0.13},
%{date_time: ~N[2001-01-01 03:00:00], price: 0.5},
%{date_time: ~N[2001-01-01 06:00:00], price: 0.9},
%{date_time: ~N[2001-01-01 09:00:00], price: 0.63},
%{date_time: ~N[2001-01-01 12:00:00], price: 0.45},
%{date_time: ~N[2001-01-01 15:00:00], price: 0.25},
%{date_time: ~N[2001-01-01 18:00:00], price: 0.12},
%{date_time: ~N[2001-01-01 21:00:00], price: 0.13},
%{date_time: ~N[2001-01-02 00:00:00], price: 0.24},
%{date_time: ~N[2001-01-02 03:00:00], price: 0.74},
%{date_time: ~N[2001-01-02 06:00:00], price: 0.77},
%{date_time: ~N[2001-01-02 09:00:00], price: 0.63},
%{date_time: ~N[2001-01-02 12:00:00], price: 0.23},
%{date_time: ~N[2001-01-02 15:00:00], price: 0.53},
%{date_time: ~N[2001-01-02 21:00:00], price: 0.26},
%{date_time: ~N[2001-01-03 00:00:00], price: 0.27},
%{date_time: ~N[2001-01-03 03:00:00], price: 0.03},
%{date_time: ~N[2001-01-03 06:00:00], price: 0.79},
%{date_time: ~N[2001-01-03 09:00:00], price: 0.78},
%{date_time: ~N[2001-01-03 12:00:00], price: 0.08},
%{date_time: ~N[2001-01-03 18:00:00], price: 0.3},
%{date_time: ~N[2001-01-04 00:00:00], price: 0.7}
]
|> Plot.new(%{x: :date_time, y: :price})
|> Plot.geom_line(size: 1)
|> Plot.scale_x_datetime(date_labels: "%b %d H%H")
|> Plot.labs(title: "DateTime data")
|> Plot.plot()
end
defp group_by_color do
Examples.economics_long()
|> Plot.new(%{x: "date", y: "value01"})
|> Plot.labs(title: "Mapped to color")
|> Plot.geom_line(%{color: "variable"}, linetype: :dotted)
|> Plot.scale_x_date(breaks: 6, date_labels: "%Y")
|> Plot.plot()
end
defp group_by_linetype do
Examples.economics_long()
|> Plot.new(%{x: "date", y: "value01"})
|> Plot.labs(title: "Mapped to linetype, custom glyph")
|> Plot.geom_line(%{linetype: "variable"}, key_glyph: :path, color: "purple")
|> Plot.scale_x_date(breaks: 6, date_labels: "%Y")
|> Plot.plot()
end
end
| 31.409091 | 88 | 0.58273 |
ffd40b3be61cd4075ff855c616c5a4fb8e23fc85 | 152 | ex | Elixir | web/controllers/page_controller.ex | w0rd-driven/scratch_phoenix | 465e01af6e7d649bfb308edf91247e9d6c6a5876 | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | w0rd-driven/scratch_phoenix | 465e01af6e7d649bfb308edf91247e9d6c6a5876 | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | w0rd-driven/scratch_phoenix | 465e01af6e7d649bfb308edf91247e9d6c6a5876 | [
"MIT"
] | null | null | null | defmodule ScratchPhoenix.PageController do
use ScratchPhoenix.Web, :controller
def index(conn, _params) do
render conn, "index.html"
end
end
| 19 | 42 | 0.756579 |
ffd427c58d5cea9d1020695bdd92a0f2941e41f1 | 171 | ex | Elixir | lib/phoenix_react_playground_web/controllers/page_controller.ex | chaiwa-berian/phoenix_react_playground | 6c0394c05439378af46256de2f303d002c2c8b92 | [
"Unlicense"
] | 88 | 2017-09-01T03:13:00.000Z | 2021-06-26T10:56:29.000Z | lib/phoenix_react_playground_web/controllers/page_controller.ex | chaiwa-berian/phoenix_react_playground | 6c0394c05439378af46256de2f303d002c2c8b92 | [
"Unlicense"
] | 4 | 2020-07-17T07:52:09.000Z | 2021-09-01T06:46:54.000Z | lib/phoenix_react_playground_web/controllers/page_controller.ex | chaiwa-berian/phoenix_react_playground | 6c0394c05439378af46256de2f303d002c2c8b92 | [
"Unlicense"
] | 21 | 2017-09-01T03:18:09.000Z | 2021-09-23T09:07:41.000Z | defmodule PhoenixReactPlaygroundWeb.PageController do
use PhoenixReactPlaygroundWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 21.375 | 53 | 0.783626 |
ffd434c0f2169716c123ddc9978f2aa2ede03f0d | 2,984 | exs | Elixir | config/target.exs | zacky1972/simd_sample | 3f50e698ae3c577e751ee7a5513086b7df7fca49 | [
"Apache-2.0"
] | 1 | 2021-09-03T07:56:54.000Z | 2021-09-03T07:56:54.000Z | config/target.exs | zacky1972/simd_sample | 3f50e698ae3c577e751ee7a5513086b7df7fca49 | [
"Apache-2.0"
] | null | null | null | config/target.exs | zacky1972/simd_sample | 3f50e698ae3c577e751ee7a5513086b7df7fca49 | [
"Apache-2.0"
] | null | null | null | import Config
# Use shoehorn to start the main application. See the shoehorn
# docs for separating out critical OTP applications such as those
# involved with firmware updates.
config :shoehorn,
init: [:nerves_runtime, :nerves_pack],
app: Mix.Project.config()[:app]
# Nerves Runtime can enumerate hardware devices and send notifications via
# SystemRegistry. This slows down startup and not many programs make use of
# this feature.
config :nerves_runtime, :kernel, use_system_registry: false
# Erlinit can be configured without a rootfs_overlay. See
# https://github.com/nerves-project/erlinit/ for more information on
# configuring erlinit.
config :nerves,
erlinit: [
hostname_pattern: "nerves-%s"
]
# Configure the device for SSH IEx prompt access and firmware updates
#
# * See https://hexdocs.pm/nerves_ssh/readme.html for general SSH configuration
# * See https://hexdocs.pm/ssh_subsystem_fwup/readme.html for firmware updates
keys =
[
Path.join([System.user_home!(), ".ssh", "id_rsa.pub"]),
Path.join([System.user_home!(), ".ssh", "id_ecdsa.pub"]),
Path.join([System.user_home!(), ".ssh", "id_ed25519.pub"])
]
|> Enum.filter(&File.exists?/1)
if keys == [],
do:
Mix.raise("""
No SSH public keys found in ~/.ssh. An ssh authorized key is needed to
log into the Nerves device and update firmware on it using ssh.
See your project's config.exs for this error message.
""")
config :nerves_ssh,
authorized_keys: Enum.map(keys, &File.read!/1)
# Configure the network using vintage_net
# See https://github.com/nerves-networking/vintage_net for more information
config :vintage_net,
regulatory_domain: "US",
config: [
{"usb0", %{type: VintageNetDirect}},
{"eth0", %{type: VintageNetDirect}},
# {"eth0",
# %{
# type: VintageNetEthernet,
# ipv4: %{method: :dhcp}
# }},
{"wlan0", %{type: VintageNetWiFi}}
]
config :mdns_lite,
# The `host` key specifies what hostnames mdns_lite advertises. `:hostname`
# advertises the device's hostname.local. For the official Nerves systems, this
# is "nerves-<4 digit serial#>.local". mdns_lite also advertises
# "nerves.local" for convenience. If more than one Nerves device is on the
# network, delete "nerves" from the list.
host: [:hostname, "nerves"],
ttl: 120,
# Advertise the following services over mDNS.
services: [
%{
name: "SSH Remote Login Protocol",
protocol: "ssh",
transport: "tcp",
port: 22
},
%{
name: "Secure File Transfer Protocol over SSH",
protocol: "sftp-ssh",
transport: "tcp",
port: 22
},
%{
name: "Erlang Port Mapper Daemon",
protocol: "epmd",
transport: "tcp",
port: 4369
}
]
# Import target specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
# Uncomment to use target specific configurations
# import_config "#{Mix.target()}.exs"
| 29.254902 | 81 | 0.68063 |
ffd470a585a0b26a45e6abf2bda0a33c429299b1 | 1,974 | ex | Elixir | clients/chrome_ux_report/lib/google_api/chrome_ux_report/v1/model/url_normalization.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/chrome_ux_report/lib/google_api/chrome_ux_report/v1/model/url_normalization.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/chrome_ux_report/lib/google_api/chrome_ux_report/v1/model/url_normalization.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ChromeUXReport.V1.Model.UrlNormalization do
@moduledoc """
Object representing the normalization actions taken to normalize a url to achieve a higher chance of successful lookup. These are simple automated changes that are taken when looking up the provided `url_patten` would be known to fail. Complex actions like following redirects are not handled.
## Attributes
* `normalizedUrl` (*type:* `String.t`, *default:* `nil`) - The URL after any normalization actions. This is a valid user experience URL that could reasonably be looked up.
* `originalUrl` (*type:* `String.t`, *default:* `nil`) - The original requested URL prior to any normalization actions.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:normalizedUrl => String.t() | nil,
:originalUrl => String.t() | nil
}
field(:normalizedUrl)
field(:originalUrl)
end
defimpl Poison.Decoder, for: GoogleApi.ChromeUXReport.V1.Model.UrlNormalization do
def decode(value, options) do
GoogleApi.ChromeUXReport.V1.Model.UrlNormalization.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ChromeUXReport.V1.Model.UrlNormalization do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.48 | 295 | 0.747214 |
ffd47496d6d0bc8f9a4c7d7a2143207f29cee184 | 1,341 | ex | Elixir | lib/twitch/auth.ex | aeturnum/twitch_discord_connector | b965ba1164540c92a925d2bd08e3fa299dfd457a | [
"MIT"
] | null | null | null | lib/twitch/auth.ex | aeturnum/twitch_discord_connector | b965ba1164540c92a925d2bd08e3fa299dfd457a | [
"MIT"
] | null | null | null | lib/twitch/auth.ex | aeturnum/twitch_discord_connector | b965ba1164540c92a925d2bd08e3fa299dfd457a | [
"MIT"
] | null | null | null | defmodule TwitchDiscordConnector.Twitch.Auth do
@moduledoc """
Twitch helpers to manage get auth headers based on store secrets.
"""
# http -v POST https://id.twitch.tv/oauth2/token
# client_id=... client_secret=... grant_type=client_credentials scope=''
alias TwitchDiscordConnector.JsonDB.TwitchApiDB
alias TwitchDiscordConnector.Twitch.Common
alias TwitchDiscordConnector.Util.L
@doc """
Get auth header or, if we need to renew our header, get it.
"""
def auth do
case TwitchApiDB.auth() do
nil -> refresh_auth() |> Map.get("header")
creds -> creds
end
|> headers()
end
defp headers(header) do
client_id = Map.get(TwitchApiDB.secrets(), "client_id")
[{"Authorization", header}, {"Client-Id", client_id}]
end
@doc """
Method that actually does the request to get a new bearer token.
"""
def refresh_auth do
L.i("Refreshing Auth")
Common.post(%{
url: "https://id.twitch.tv/oauth2/token",
body:
Map.merge(
TwitchApiDB.secrets(),
%{grant_type: "client_credentials", scope: ""}
)
})
|> case do
{:ok, _, info} ->
TwitchApiDB.set_auth(info)
{atm, code, info} ->
L.e("Got error on refreshing auth: {#{inspect(atm)}, #{code}, #{inspect(info)}}")
%{}
end
end
end
| 24.381818 | 89 | 0.62267 |
ffd48446fad3f77c3b382c7a40b69b719d435ff1 | 3,090 | exs | Elixir | test/custom_functions_test.exs | scohen/instruments | 6162bd3280795b544d052fce871acd9975aee542 | [
"MIT"
] | 167 | 2017-11-09T08:25:21.000Z | 2020-03-05T12:26:20.000Z | test/custom_functions_test.exs | Seanpm2001-Discord/instruments | 4e8a34655d5b469567be69ccf3cf2dcdeba798f6 | [
"MIT"
] | 7 | 2020-05-13T15:16:08.000Z | 2022-03-03T01:39:56.000Z | test/custom_functions_test.exs | Seanpm2001-Discord/instruments | 4e8a34655d5b469567be69ccf3cf2dcdeba798f6 | [
"MIT"
] | 16 | 2017-11-10T10:35:37.000Z | 2020-02-25T09:54:53.000Z | defmodule Instruments.CustomFunctionsTest do
use ExUnit.Case
alias Instruments.CustomFunctions
import MetricsAssertions
use Instruments
setup do
{:ok, _fake_statsd} = FakeStatsd.start_link(self())
:ok
end
defmodule Custom do
use CustomFunctions, prefix: "custom"
end
describe "adding a prefix" do
test "to increment calls" do
Custom.increment("foo.bar.baz")
assert_metric_reported(:increment, "custom.foo.bar.baz", 1)
Custom.increment("foo.bar.baz", 3)
assert_metric_reported(:increment, "custom.foo.bar.baz", 3)
Custom.increment("foo.bar.baz", 4, tags: ["stinky"])
assert_metric_reported(:increment, "custom.foo.bar.baz", 4, tags: ["stinky"])
end
test "to decrement calls" do
Custom.decrement("foo.bar.bax")
assert_metric_reported(:decrement, "custom.foo.bar.bax", 1)
Custom.decrement("foo.bar.bax", 3)
assert_metric_reported(:decrement, "custom.foo.bar.bax", 3)
Custom.decrement("foo.bar.baz", 4, tags: ["stinky"])
assert_metric_reported(:decrement, "custom.foo.bar.baz", 4, tags: ["stinky"])
end
test "to gauge calls" do
Custom.gauge("my.gauge", 384)
assert_metric_reported(:gauge, "custom.my.gauge", 384)
Custom.gauge("my.gauge", 946, tags: ["sweet_gauge"])
assert_metric_reported(:gauge, "custom.my.gauge", 946, tags: ["sweet_gauge"])
end
test "to histogram calls" do
Custom.histogram("my.histogram", 900, sample_rate: 1.0)
assert_metric_reported(:histogram, "custom.my.histogram", 900)
Custom.histogram("my.histogram", 901, tags: ["cool_metric"], sample_rate: 1.0)
assert_metric_reported(:histogram, "custom.my.histogram", 901, tags: ["cool_metric"])
end
test "to timing calls" do
Custom.timing("my.timing", 900, sample_rate: 1.0)
assert_metric_reported(:timing, "custom.my.timing", 900)
Custom.timing("my.timing", 901, tags: ["speed:fast"], sample_rate: 1.0)
assert_metric_reported(:timing, "custom.my.timing", 901, tags: ["speed:fast"])
end
test "to set calls" do
Custom.set("my.set", 900)
assert_metric_reported(:set, "custom.my.set", 900)
Custom.set("my.set", 901, tags: ["speed:fast"])
assert_metric_reported(:set, "custom.my.set", 901, tags: ["speed:fast"])
end
test "to measure_calls" do
func = fn ->
:timer.sleep(10)
:done
end
assert :done == Custom.measure("my.measure", [sample_rate: 1.0], func)
assert_metric_reported(:timing, "custom.my.measure", 10..12)
assert :done ==
Custom.measure("my.measure", [sample_rate: 1.0, tags: ["timing:short"]], func)
assert_metric_reported(:timing, "custom.my.measure", 10..11, tags: ["timing:short"])
end
end
test "setting a runtime prefix" do
defmodule RuntimePrefix do
use CustomFunctions, prefix: Application.get_env(:instruments, :custom_prefix, "foobar")
end
RuntimePrefix.increment("foo.bar", 3)
assert_metric_reported(:increment, "foobar.foo.bar", 3)
end
end
| 31.85567 | 94 | 0.657282 |
ffd4aa66e516ace391b13f8b448fa6bd2028fca9 | 1,367 | ex | Elixir | lib/podium/api.ex | backdoer/podium_ex | eb790c0f67f8c9765b11fc174beee63c0816ea1c | [
"MIT"
] | null | null | null | lib/podium/api.ex | backdoer/podium_ex | eb790c0f67f8c9765b11fc174beee63c0816ea1c | [
"MIT"
] | null | null | null | lib/podium/api.ex | backdoer/podium_ex | eb790c0f67f8c9765b11fc174beee63c0816ea1c | [
"MIT"
] | null | null | null | defmodule Podium.API do
@moduledoc """
Provides a basic HTTP interface to allow easy communication with the Podium
API, by wrapping `HTTPoison`.
"""
@default_base_url "https://api.podium.com/api/v2"
use HTTPoison.Base
@impl HTTPoison.Base
def process_url(path) do
base_url() <> path
end
@impl HTTPoison.Base
def process_request_headers(headers) do
[
{"Accept", "application/json"},
{"Authorization", api_key()},
{"Content-Type", "application/json"},
{"Application-Uid", application_uid()}
] ++ headers
end
@impl HTTPoison.Base
def process_request_body(body) do
Jason.encode!(body)
end
@impl HTTPoison.Base
def process_request_options(opts) do
Keyword.merge([timeout: timeout(), recv_timeout: recv_timeout()], opts)
end
@spec api_key() :: String.t()
defp api_key, do: Application.get_env(:podium_ex, :api_key, "")
@spec base_url() :: String.t()
defp base_url, do: Application.get_env(:podium_ex, :base_url, @default_base_url)
@spec timeout :: integer()
defp timeout, do: Application.get_env(:podium_ex, :timeout, 15_000)
@spec recv_timeout :: integer()
defp recv_timeout, do: Application.get_env(:podium_ex, :recv_timeout, 15_000)
@spec application_uid() :: String.t()
defp application_uid, do: Application.get_env(:podium_ex, :application_uid, "")
end
| 26.803922 | 82 | 0.692758 |
ffd4ab3d16560fe805db1491d55b430a40565d45 | 1,138 | exs | Elixir | test/react_render_test.exs | luisgserrano/elixir_react_render | 4c2e7937b4b1a068b089bec2dfb6c19f09bc1281 | [
"MIT"
] | null | null | null | test/react_render_test.exs | luisgserrano/elixir_react_render | 4c2e7937b4b1a068b089bec2dfb6c19f09bc1281 | [
"MIT"
] | 3 | 2019-11-25T05:00:52.000Z | 2020-01-31T04:54:39.000Z | test/react_render_test.exs | luisgserrano/elixir_react_render | 4c2e7937b4b1a068b089bec2dfb6c19f09bc1281 | [
"MIT"
] | null | null | null | defmodule ReactRender.Test do
use ExUnit.Case
doctest ReactRender
setup_all do
apply(ReactRender, :start_link, [[render_service_path: "./priv/server.js"]])
:ok
end
describe "get_html" do
test "returns html" do
{:ok, html} = ReactRender.get_html("./HelloWorld.js", %{name: "test"})
assert html =~ "<div data-reactroot=\"\">Hello"
assert html =~ "test</div>"
end
test "returns error when no component found" do
{:error, error} = ReactRender.get_html("./NotFound.js")
assert error.message =~ "Cannot find module"
end
end
describe "render" do
test "returns html" do
{:safe, html} = ReactRender.render("./HelloWorld.js", %{name: "test"})
assert html =~ "data-rendered"
assert html =~ "data-component"
assert html =~ "HelloWorld"
assert html =~ "<div data-reactroot=\"\">Hello"
assert html =~ "test</div>"
end
test "raises RenderError when no component found" do
assert_raise ReactRender.RenderError, "Cannot find module './NotFound.js'", fn ->
ReactRender.render("./NotFound.js")
end
end
end
end
| 28.45 | 87 | 0.629174 |
ffd4aea4b3c42fd9117b1f5788febcb33fab8feb | 1,208 | exs | Elixir | config/dev.exs | andersjanmyr/battlesnake-server-2018 | 091b4d1201d144de232be08fd7fb6df8156c5ee2 | [
"MIT"
] | 3 | 2018-07-14T22:55:23.000Z | 2019-02-25T06:11:55.000Z | config/dev.exs | andersjanmyr/battlesnake-server-2018 | 091b4d1201d144de232be08fd7fb6df8156c5ee2 | [
"MIT"
] | 7 | 2020-02-12T03:22:59.000Z | 2022-02-10T20:23:52.000Z | config/dev.exs | andersjanmyr/battlesnake-server-2018 | 091b4d1201d144de232be08fd7fb6df8156c5ee2 | [
"MIT"
] | 12 | 2018-03-27T05:27:20.000Z | 2019-04-02T08:19:04.000Z | use Mix.Config
config :bs, snake_api: Bs.Api
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :bs, BsWeb.Endpoint,
http: [port: {:system, :integer, "PORT", 3000}],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/.bin/webpack",
"--watch",
"--stdin",
"--verbose",
"--colors",
cd: Path.expand("../assets", __DIR__)
]
]
# Watch static and templates for browser reloading.
config :bs, BsWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/bs_web/views/.*(ex)$},
~r{lib/bs_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 27.454545 | 68 | 0.663907 |
ffd4ba1955938286cc0c00edbcddd9480121600c | 567 | exs | Elixir | mix.exs | xerions/exradius | d6166faeb3d18eed4bbb43fa70f3ae2326b3816c | [
"Apache-2.0"
] | 8 | 2016-01-22T11:33:59.000Z | 2018-02-21T07:49:03.000Z | mix.exs | xerions/exradius | d6166faeb3d18eed4bbb43fa70f3ae2326b3816c | [
"Apache-2.0"
] | 2 | 2016-06-24T10:52:28.000Z | 2019-02-25T07:28:09.000Z | mix.exs | xerions/exradius | d6166faeb3d18eed4bbb43fa70f3ae2326b3816c | [
"Apache-2.0"
] | 7 | 2016-01-22T11:35:44.000Z | 2021-05-07T11:11:15.000Z | defmodule Exradius.Mixfile do
use Mix.Project
def project do
[ app: :exradius,
version: "0.0.1",
deps: deps(Mix.env),
xref: [exclude: [:meck]]]
end
# Configuration for the OTP application
def application do
[]
end
# Returns the list of dependencies in the format:
# { :foobar, git: "https://github.com/elixir-lang/foobar.git", tag: "0.1" }
#
# To specify particular versions, regardless of the tag, do:
# { :barbat, "~> 0.1", github: "elixir-lang/barbat" }
defp deps(_) do
[{:eradius, "~> 0.8.2" }]
end
end
| 21.807692 | 77 | 0.610229 |
ffd4c97c87641d4a76449efa68c5a90d8d342cc8 | 1,280 | ex | Elixir | lib/sprint_poker/operations/game_operations.ex | elpassion/sprint-poker | 5c9b34bb264c7a30ff48f0aeac40821b67310ff8 | [
"MIT"
] | 199 | 2015-10-22T16:20:09.000Z | 2021-11-08T11:20:45.000Z | lib/sprint_poker/operations/game_operations.ex | elpassion/sprint-poker | 5c9b34bb264c7a30ff48f0aeac40821b67310ff8 | [
"MIT"
] | 4 | 2015-10-24T20:43:29.000Z | 2016-03-03T21:09:06.000Z | lib/sprint_poker/operations/game_operations.ex | elpassion/sprint-poker | 5c9b34bb264c7a30ff48f0aeac40821b67310ff8 | [
"MIT"
] | 34 | 2015-10-23T06:38:43.000Z | 2019-08-13T23:49:24.000Z | defmodule SprintPoker.GameOperations do
@moduledoc """
Game related operations
"""
import Ecto.Query, only: [from: 2]
alias SprintPoker.Repo
alias SprintPoker.Repo.Game
alias SprintPoker.Repo.Deck
alias SprintPoker.Repo.User
alias SprintPoker.Repo.Ticket
alias SprintPoker.StateOperations
alias Ecto.UUID
def create(params, user) do
changeset = Game.changeset(%Game{}, %{
name: params["name"],
owner_id: user.id,
deck_id: Repo.get!(Deck, params["deck"]["id"]).id
})
case changeset do
{:error, errors} ->
raise errors
_ ->
game = changeset |> Repo.insert!
StateOperations.create(game)
game |> Repo.preload([:owner, :deck])
end
end
def find(nil), do: :no_id
def find(game_id) do
case get(game_id) do
nil -> :error
game -> game |> Repo.preload([:owner, :deck])
end
end
def preload(data) do
Repo.preload(data, [
:owner,
:deck,
:state,
users: from(u in User, order_by: u.name),
tickets: from(t in Ticket, order_by: t.id)
])
end
def get(id, opts \\ []) do
case UUID.cast(id) do
{:ok, _} -> Repo.get(Game, id, opts)
_ -> nil
end
end
def get_decks() do
Repo.all(Deck)
end
end
| 20.645161 | 55 | 0.597656 |
ffd4ccf60da880a76b769728199e06a6196534bb | 1,735 | ex | Elixir | lib/client/analytics.ex | skylerparr/playfab_ex | b6a049511bc3466cea5959797425bffc099239de | [
"MIT"
] | 1 | 2021-12-21T14:22:12.000Z | 2021-12-21T14:22:12.000Z | lib/client/analytics.ex | skylerparr/playfab_ex | b6a049511bc3466cea5959797425bffc099239de | [
"MIT"
] | null | null | null | lib/client/analytics.ex | skylerparr/playfab_ex | b6a049511bc3466cea5959797425bffc099239de | [
"MIT"
] | null | null | null | defmodule PlayfabEx.Client.Analytics do
use Interface
@doc """
Write a PlayStream event to describe the provided player device information. This API method is not designed to be called directly by developers. Each PlayFab client SDK will eventually report this information automatically.
[online docs](https://api.playfab.com/documentation/client/method/ReportDeviceInfo)
"""
@spec report_device_info(map()) :: {:ok, map} | {:error, String.t}
definterface report_device_info(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.Analytics) || PlayfabEx.Client.Default.Analytics)
@doc """
Writes a character-based event into PlayStream.
[online docs](https://api.playfab.com/documentation/client/method/WriteCharacterEvent)
"""
@spec write_character_event(map()) :: {:ok, map} | {:error, String.t}
definterface write_character_event(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.Analytics) || PlayfabEx.Client.Default.Analytics)
@doc """
Writes a player-based event into PlayStream.
[online docs](https://api.playfab.com/documentation/client/method/WritePlayerEvent)
"""
@spec write_player_event(map()) :: {:ok, map} | {:error, String.t}
definterface write_player_event(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.Analytics) || PlayfabEx.Client.Default.Analytics)
@doc """
Writes a title-based event into PlayStream.
[online docs](https://api.playfab.com/documentation/client/method/WriteTitleEvent)
"""
@spec write_title_event(map()) :: {:ok, map} | {:error, String.t}
definterface write_title_event(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.Analytics) || PlayfabEx.Client.Default.Analytics)
end | 44.487179 | 226 | 0.748127 |
ffd4d282542420d871ae7620f2474bc89c357fdf | 1,451 | ex | Elixir | lib/absinthe/language/operation_definition.ex | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | null | null | null | lib/absinthe/language/operation_definition.ex | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | 2 | 2018-08-02T13:35:38.000Z | 2018-08-02T13:36:42.000Z | lib/absinthe/language/operation_definition.ex | jlgeering/absinthe | a3dbc29640d613928398626ad75a8f03203a1720 | [
"MIT"
] | null | null | null | defmodule Absinthe.Language.OperationDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct operation: nil,
name: nil,
variable_definitions: [],
directives: [],
selection_set: nil,
loc: %{start_line: nil}
@type t :: %__MODULE__{
operation: :query | :mutation | :subscription,
name: nil | String.t(),
variable_definitions: [Language.VariableDefinition.t()],
directives: [Language.Directive.t()],
selection_set: Language.SelectionSet.t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Document.Operation{
name: node.name,
type: node.operation,
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
variable_definitions: Blueprint.Draft.convert(node.variable_definitions, doc),
selections: Blueprint.Draft.convert(node.selection_set.selections, doc),
source_location: source_location(node.loc)
}
end
defp source_location(nil) do
nil
end
defp source_location(%{start_line: number}) do
Blueprint.Document.SourceLocation.at(number)
end
end
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
[node.variable_definitions, node.directives, List.wrap(node.selection_set)]
|> Enum.concat()
end
end
end
| 29.02 | 86 | 0.646451 |
ffd547c9a82f0053638db4d3a9349835d3b48d8a | 1,446 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_verify_draft_token_payload.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_verify_draft_token_payload.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_verify_draft_token_payload.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1VerifyDraftTokenPayload do
@moduledoc """
## Attributes
* `draftToken` (*type:* `String.t`, *default:* `nil`) - Draft token.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:draftToken => String.t()
}
field(:draftToken)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1VerifyDraftTokenPayload do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1VerifyDraftTokenPayload.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1VerifyDraftTokenPayload do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.765957 | 100 | 0.751729 |
ffd5cf94abef3d79a6496104d6575ab0cffd0cdb | 1,125 | ex | Elixir | lib/taut/application.ex | shimmerlabs/taut | 1f38f5ccfc39b94aa392105af6dfbf3a44a9cc19 | [
"MIT"
] | null | null | null | lib/taut/application.ex | shimmerlabs/taut | 1f38f5ccfc39b94aa392105af6dfbf3a44a9cc19 | [
"MIT"
] | null | null | null | lib/taut/application.ex | shimmerlabs/taut | 1f38f5ccfc39b94aa392105af6dfbf3a44a9cc19 | [
"MIT"
] | null | null | null | defmodule Taut.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
# Start the Ecto repository
Taut.Repo,
# Start the Telemetry supervisor
TautWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: Taut.PubSub},
# Start a worker by calling: Taut.Worker.start_link(arg)
# {Taut.Worker, arg}
]
# Start the Endpoint (http/https) unless we've set server: false
run_server = Application.get_env(:taut, :server, true)
children = children ++ if(run_server, do: [TautWeb.Endpoint], else: [])
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Taut.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
TautWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 30.405405 | 75 | 0.695111 |
ffd5e07b84b9a38c393fdcffafd6eb3ffa3a8a0b | 409 | exs | Elixir | test/vix/vips/blob_test.exs | akash-akya/vix | 72c1cc28315aa9913e90565ba2d96f7b0620fb8e | [
"MIT"
] | 40 | 2020-11-28T18:57:41.000Z | 2022-03-30T22:31:10.000Z | test/vix/vips/blob_test.exs | akash-akya/vix | 72c1cc28315aa9913e90565ba2d96f7b0620fb8e | [
"MIT"
] | 17 | 2021-04-24T16:34:46.000Z | 2022-03-31T13:38:04.000Z | test/vix/vips/blob_test.exs | akash-akya/vix | 72c1cc28315aa9913e90565ba2d96f7b0620fb8e | [
"MIT"
] | 3 | 2021-04-26T15:16:27.000Z | 2022-03-08T01:00:30.000Z | defmodule Vix.Vips.BlobTest do
use ExUnit.Case
alias Vix.Vips.Blob
test "to_nif_term" do
blob = Blob.to_nif_term(<<1, 2, 3, 4>>, nil)
{:ok, gtype} = Vix.Nif.nif_g_type_from_instance(blob)
assert Vix.Nif.nif_g_type_name(gtype) == {:ok, "VipsBlob"}
end
test "to_erl_term" do
blob = Blob.to_nif_term(<<1, 2, 3, 4>>, nil)
assert <<1, 2, 3, 4>> == Blob.to_erl_term(blob)
end
end
| 24.058824 | 62 | 0.640587 |
ffd62468de551936e089a36ab86371ae36d256e1 | 47,897 | exs | Elixir | test/nimble_options_test.exs | dashbitco/nimble_options | 8757703a24837135085e40181e3d14a50fe5c721 | [
"Apache-2.0"
] | 293 | 2020-04-03T15:58:27.000Z | 2022-03-29T10:29:32.000Z | test/nimble_options_test.exs | dashbitco/nimble_options | 8757703a24837135085e40181e3d14a50fe5c721 | [
"Apache-2.0"
] | 60 | 2020-04-04T15:46:00.000Z | 2022-02-09T14:09:59.000Z | test/nimble_options_test.exs | dashbitco/nimble_options | 8757703a24837135085e40181e3d14a50fe5c721 | [
"Apache-2.0"
] | 23 | 2020-04-04T15:43:10.000Z | 2022-02-09T13:03:38.000Z | defmodule NimbleOptionsTest do
use ExUnit.Case, async: true
doctest NimbleOptions
import ExUnit.CaptureIO
alias NimbleOptions.ValidationError
describe "validate keys" do
test "known options without types" do
schema = [name: [], context: []]
opts = [name: MyProducer, context: :ok]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "unknown options" do
schema = [an_option: [], other_option: []]
opts = [an_option: 1, not_an_option1: 1, not_an_option2: 1]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: [:not_an_option1, :not_an_option2],
value: nil,
message:
"unknown options [:not_an_option1, :not_an_option2], valid options are: [:an_option, :other_option]"
}}
end
end
describe "validate the schema itself before validating the options" do
test "raise ArgumentError when invalid" do
schema = [stages: [type: :foo]]
opts = [stages: 1]
message = """
invalid schema given to NimbleOptions.validate/2. \
Reason: invalid option type :foo.
Available types: :any, :keyword_list, :non_empty_keyword_list, :atom, \
:integer, :non_neg_integer, :pos_integer, :float, :mfa, :mod_arg, :string, :boolean, :timeout, \
:pid, {:fun, arity}, {:in, choices}, {:or, subtypes}, {:custom, mod, fun, args}, \
{:list, subtype} \
(in options [:stages])\
"""
assert_raise ArgumentError, message, fn ->
NimbleOptions.validate(opts, schema)
end
end
test "validate the keys recursively, if any" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [unknown_schema_option: 1],
arg: []
]
]
]
]
]
message = """
invalid schema given to NimbleOptions.validate/2. \
Reason: \
unknown options [:unknown_schema_option], \
valid options are: [:type, :required, :default, :keys, \
:deprecated, :rename_to, :doc, :subsection] \
(in options [:producers, :keys, :*, :keys, :module])\
"""
assert_raise ArgumentError, message, fn ->
NimbleOptions.validate([], schema)
end
end
end
describe "default value" do
test "is used when none is given" do
schema = [context: [default: :ok]]
assert NimbleOptions.validate([], schema) == {:ok, [context: :ok]}
end
test "is not used when one is given" do
schema = [context: [default: :ok]]
assert NimbleOptions.validate([context: :given], schema) == {:ok, [context: :given]}
end
test "is validated" do
schema = [
processors: [
type: :keyword_list,
default: [],
keys: [
stages: [type: :integer, default: "10"]
]
]
]
opts = [processors: []]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :stages,
keys_path: [:processors],
message: "expected :stages to be an integer, got: \"10\"",
value: "10"
}
}
end
end
describe ":required" do
test "when present" do
schema = [name: [required: true, type: :atom]]
opts = [name: MyProducer]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "when missing" do
schema = [name: [required: true], an_option: [], other_option: []]
opts = [an_option: 1, other_option: 2]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :name,
message:
"required option :name not found, received options: [:an_option, :other_option]"
}}
end
end
describe ":rename_to" do
test "renames option and removes the old option" do
schema = [
port: [rename_to: :new_port],
new_port: [type: {:custom, __MODULE__, :string_to_integer, []}]
]
assert NimbleOptions.validate([port: "4000"], schema) ==
{:ok, [new_port: 4000]}
end
test "is ignored when option is not present given" do
schema = [context: [rename_to: :new_context], new_context: []]
assert NimbleOptions.validate([], schema) == {:ok, []}
end
end
describe ":doc" do
test "valid documentation for key" do
schema = [context: [doc: "details", default: 1]]
assert NimbleOptions.validate([], schema) == {:ok, [context: 1]}
schema = [context: [doc: false, default: 1]]
assert NimbleOptions.validate([], schema) == {:ok, [context: 1]}
end
test "invalid documentation for key" do
message = """
invalid schema given to NimbleOptions.validate/2. Reason: expected :doc to match at least \
one given type, but didn't match any. Here are the reasons why it didn't match each of the \
allowed types:
* expected :doc to be in [false], got: 1
* expected :doc to be a string, got: 1 (in options [:context])\
"""
assert_raise ArgumentError, message, fn ->
schema = [context: [doc: 1, default: 1]]
NimbleOptions.validate([], schema)
end
end
end
describe ":deprecated" do
import ExUnit.CaptureIO
test "warns when given" do
schema = [context: [deprecated: "Use something else"]]
assert capture_io(:stderr, fn ->
assert NimbleOptions.validate([context: :ok], schema) == {:ok, [context: :ok]}
end) =~ ":context is deprecated. Use something else"
end
test "does not warn when not given" do
schema = [context: [deprecated: "Use something else"]]
assert NimbleOptions.validate([], schema) == {:ok, []}
end
test "warns when using default" do
schema = [context: [deprecated: "Use something else", default: :ok]]
assert capture_io(:stderr, fn ->
assert NimbleOptions.validate([], schema) == {:ok, [context: :ok]}
end) =~ ":context is deprecated. Use something else"
end
end
describe "type validation" do
test "valid positive integer" do
schema = [stages: [type: :pos_integer]]
opts = [stages: 1]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid positive integer" do
schema = [stages: [type: :pos_integer]]
assert NimbleOptions.validate([stages: 0], schema) ==
{:error,
%ValidationError{
key: :stages,
value: 0,
message: "expected :stages to be a positive integer, got: 0"
}}
assert NimbleOptions.validate([stages: :an_atom], schema) ==
{:error,
%ValidationError{
key: :stages,
value: :an_atom,
message: "expected :stages to be a positive integer, got: :an_atom"
}}
end
test "valid integer" do
schema = [min_demand: [type: :integer]]
opts = [min_demand: 12]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid integer" do
schema = [min_demand: [type: :integer]]
assert NimbleOptions.validate([min_demand: 1.5], schema) ==
{:error,
%ValidationError{
key: :min_demand,
value: 1.5,
message: "expected :min_demand to be an integer, got: 1.5"
}}
assert NimbleOptions.validate([min_demand: :an_atom], schema) ==
{:error,
%ValidationError{
key: :min_demand,
value: :an_atom,
message: "expected :min_demand to be an integer, got: :an_atom"
}}
end
test "valid non negative integer" do
schema = [min_demand: [type: :non_neg_integer]]
opts = [min_demand: 0]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid non negative integer" do
schema = [min_demand: [type: :non_neg_integer]]
assert NimbleOptions.validate([min_demand: -1], schema) ==
{:error,
%ValidationError{
key: :min_demand,
value: -1,
message: "expected :min_demand to be a non negative integer, got: -1"
}}
assert NimbleOptions.validate([min_demand: :an_atom], schema) ==
{:error,
%ValidationError{
key: :min_demand,
value: :an_atom,
message: "expected :min_demand to be a non negative integer, got: :an_atom"
}}
end
test "valid float" do
schema = [certainty: [type: :float]]
opts = [certainty: 0.5]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid float" do
schema = [certainty: [type: :float]]
assert NimbleOptions.validate([certainty: 1], schema) ==
{:error,
%ValidationError{
key: :certainty,
value: 1,
message: "expected :certainty to be a float, got: 1"
}}
assert NimbleOptions.validate([certainty: :an_atom], schema) ==
{:error,
%ValidationError{
key: :certainty,
value: :an_atom,
message: "expected :certainty to be a float, got: :an_atom"
}}
end
test "valid atom" do
schema = [name: [type: :atom]]
opts = [name: :an_atom]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid atom" do
schema = [name: [type: :atom]]
assert NimbleOptions.validate([name: 1], schema) ==
{:error,
%ValidationError{
key: :name,
value: 1,
message: "expected :name to be an atom, got: 1"
}}
end
test "valid string" do
schema = [doc: [type: :string]]
opts = [doc: "a string"]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid string" do
schema = [doc: [type: :string]]
assert NimbleOptions.validate([doc: :an_atom], schema) ==
{:error,
%ValidationError{
key: :doc,
value: :an_atom,
message: "expected :doc to be a string, got: :an_atom"
}}
end
test "valid boolean" do
schema = [required: [type: :boolean]]
opts = [required: true]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [required: false]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid boolean" do
schema = [required: [type: :boolean]]
assert NimbleOptions.validate([required: :an_atom], schema) ==
{:error,
%ValidationError{
key: :required,
value: :an_atom,
message: "expected :required to be a boolean, got: :an_atom"
}}
end
test "valid timeout" do
schema = [timeout: [type: :timeout]]
opts = [timeout: 0]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [timeout: 1000]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [timeout: :infinity]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid timeout" do
schema = [timeout: [type: :timeout]]
opts = [timeout: -1]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :timeout,
value: -1,
message: "expected :timeout to be non-negative integer or :infinity, got: -1"
}}
opts = [timeout: :invalid]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :timeout,
value: :invalid,
message:
"expected :timeout to be non-negative integer or :infinity, got: :invalid"
}}
end
test "valid pid" do
schema = [name: [type: :pid]]
opts = [name: self()]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid pid" do
schema = [name: [type: :pid]]
assert NimbleOptions.validate([name: 1], schema) ==
{:error,
%ValidationError{
key: :name,
value: 1,
message: "expected :name to be a pid, got: 1"
}}
end
test "valid mfa" do
schema = [transformer: [type: :mfa]]
opts = [transformer: {SomeMod, :func, [1, 2]}]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [transformer: {SomeMod, :func, []}]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid mfa" do
schema = [transformer: [type: :mfa]]
opts = [transformer: {"not_a_module", :func, []}]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :transformer,
value: {"not_a_module", :func, []},
message:
~s(expected :transformer to be a tuple {Mod, Fun, Args}, got: {"not_a_module", :func, []})
}
}
opts = [transformer: {SomeMod, "not_a_func", []}]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :transformer,
value: {SomeMod, "not_a_func", []},
message:
~s(expected :transformer to be a tuple {Mod, Fun, Args}, got: {SomeMod, "not_a_func", []})
}
}
opts = [transformer: {SomeMod, :func, "not_a_list"}]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :transformer,
value: {SomeMod, :func, "not_a_list"},
message:
~s(expected :transformer to be a tuple {Mod, Fun, Args}, got: {SomeMod, :func, "not_a_list"})
}
}
opts = [transformer: NotATuple]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :transformer,
value: NotATuple,
message: ~s(expected :transformer to be a tuple {Mod, Fun, Args}, got: NotATuple)
}
}
end
test "valid mod_arg" do
schema = [producer: [type: :mod_arg]]
opts = [producer: {SomeMod, [1, 2]}]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [producer: {SomeMod, []}]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid mod_arg" do
schema = [producer: [type: :mod_arg]]
opts = [producer: NotATuple]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :producer,
value: NotATuple,
message: ~s(expected :producer to be a tuple {Mod, Arg}, got: NotATuple)
}
}
opts = [producer: {"not_a_module", []}]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :producer,
value: {"not_a_module", []},
message:
~s(expected :producer to be a tuple {Mod, Arg}, got: {"not_a_module", []})
}
}
end
test "valid {:fun, arity}" do
schema = [partition_by: [type: {:fun, 1}]]
opts = [partition_by: fn x -> x end]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [partition_by: &:erlang.phash2/1]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid {:fun, arity}" do
schema = [partition_by: [type: {:fun, 1}]]
opts = [partition_by: :not_a_fun]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :partition_by,
value: :not_a_fun,
message: ~s(expected :partition_by to be a function of arity 1, got: :not_a_fun)
}
}
opts = [partition_by: fn x, y -> x * y end]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :partition_by,
value: opts[:partition_by],
message:
~s(expected :partition_by to be a function of arity 1, got: function of arity 2)
}
}
end
test "valid {:in, choices}" do
schema = [batch_mode: [type: {:in, [:flush, :bulk]}]]
opts = [batch_mode: :flush]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [batch_mode: :bulk]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
# With ranges
schema = [decimals: [type: {:in, 0..255}]]
opts = [decimals: 0]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [decimals: 100]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
# With sets
schema = [mode: [type: {:in, MapSet.new([:active, :passive])}]]
opts = [mode: :active]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid {:in, choices}" do
schema = [batch_mode: [type: {:in, [:flush, :bulk]}]]
opts = [batch_mode: :invalid]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :batch_mode,
value: :invalid,
message: "expected :batch_mode to be in [:flush, :bulk], got: :invalid"
}}
# With ranges
schema = [decimals: [type: {:in, 0..255}]]
opts = [decimals: -1]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :decimals,
value: -1,
message: "expected :decimals to be in 0..255, got: -1"
}}
# With sets
schema = [mode: [type: {:in, MapSet.new([:active, :passive])}]]
opts = [mode: :unknown]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :mode,
value: :unknown,
message: "expected :mode to be in #MapSet<[:active, :passive]>, got: :unknown"
}}
end
test "deprecation of {:one_of, choices}" do
schema = [batch_mode: [type: {:one_of, [:flush, :bulk]}]]
assert capture_io(:stderr, fn ->
opts = [batch_mode: :flush]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end) =~ "the {:one_of, choices} type is deprecated"
end
test "valid {:or, subtypes} with simple subtypes" do
schema = [docs: [type: {:or, [:string, :boolean]}]]
opts = [docs: false]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [docs: true]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [docs: "a string"]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "valid {:or, subtypes} with compound subtypes" do
schema = [docs: [type: {:or, [{:custom, __MODULE__, :string_to_integer, []}, :string]}]]
opts = [docs: "a string"]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [docs: "123"]
assert NimbleOptions.validate(opts, schema) == {:ok, [docs: 123]}
end
test "valid {:or, subtypes} with nested :or" do
# Nested :or.
schema = [
docs: [
type:
{:or,
[
{:or, [{:custom, __MODULE__, :string_to_integer, []}, :boolean]},
{:or, [:string]}
]}
]
]
opts = [docs: "123"]
assert NimbleOptions.validate(opts, schema) == {:ok, [docs: 123]}
opts = [docs: "a string"]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [docs: false]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "valid {:or, subtypes} with nested keyword lists" do
schema = [
docs: [
type: {:or, [:boolean, keyword_list: [enabled: [type: :boolean]]]}
]
]
opts = [docs: false]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [docs: [enabled: true]]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid {:or, subtypes}" do
schema = [docs: [type: {:or, [:string, :boolean]}]]
opts = [docs: :invalid]
expected_message = """
expected :docs to match at least one given type, but didn't match any. Here are the \
reasons why it didn't match each of the allowed types:
* expected :docs to be a boolean, got: :invalid
* expected :docs to be a string, got: :invalid\
"""
assert NimbleOptions.validate(opts, schema) ==
{:error, %ValidationError{key: :docs, value: :invalid, message: expected_message}}
end
test "invalid {:or, subtypes} with nested :or" do
schema = [
docs: [
type:
{:or,
[
{:or, [{:custom, __MODULE__, :string_to_integer, []}, :boolean]},
{:or, [:string]}
]}
]
]
opts = [docs: 1]
expected_message = """
expected :docs to match at least one given type, but didn't match any. \
Here are the reasons why it didn't match each of the allowed types:
* expected :docs to match at least one given type, but didn't match any. \
Here are the reasons why it didn't match each of the allowed types:
* expected :docs to be a string, got: 1
* expected :docs to match at least one given type, but didn't match any. \
Here are the reasons why it didn't match each of the allowed types:
* expected :docs to be a boolean, got: 1
* expected to be a string, got: 1\
"""
assert NimbleOptions.validate(opts, schema) ==
{:error, %ValidationError{key: :docs, value: 1, message: expected_message}}
end
test "invalid {:or, subtypes} with nested keyword lists" do
schema = [
docs: [
type: {:or, [:boolean, keyword_list: [enabled: [type: :boolean]]]}
]
]
opts = [docs: "123"]
expected_message = """
expected :docs to match at least one given type, but didn't match any. \
Here are the reasons why it didn't match each of the allowed types:
* expected :docs to be a keyword list, got: "123"
* expected :docs to be a boolean, got: "123"\
"""
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :docs,
value: "123",
keys_path: [],
message: expected_message
}}
opts = [docs: [enabled: "not a boolean"]]
expected_message = """
expected :docs to match at least one given type, but didn't match any. \
Here are the reasons why it didn't match each of the allowed types:
* expected :enabled to be a boolean, got: "not a boolean" (in options [:docs])
* expected :docs to be a boolean, got: [enabled: "not a boolean"]\
"""
assert NimbleOptions.validate(opts, schema) == {
:error,
%NimbleOptions.ValidationError{
key: :docs,
value: [enabled: "not a boolean"],
keys_path: [],
message: expected_message
}
}
end
test "{:custom, mod, fun, args} with empty args" do
schema = [buffer_keep: [type: {:custom, __MODULE__, :buffer_keep, []}]]
opts = [buffer_keep: :first]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [buffer_keep: :last]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [buffer_keep: :unknown]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :buffer_keep,
value: :unknown,
message: ~s(expected :first or :last, got: :unknown)
}
}
end
test "{:custom, mod, fun, args} with args" do
schema = [buffer_keep: [type: {:custom, __MODULE__, :choice, [[:first, :last]]}]]
opts = [buffer_keep: :first]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [buffer_keep: :last]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
opts = [buffer_keep: :unknown]
assert NimbleOptions.validate(opts, schema) == {
:error,
%ValidationError{
key: :buffer_keep,
value: :unknown,
message: ~s(expected one of [:first, :last], got: :unknown)
}
}
end
test "{:custom, mod, fun, args} can also cast the value of an option" do
schema = [connections: [type: {:custom, __MODULE__, :string_to_integer, []}]]
opts = [connections: "5"]
assert {:ok, validated_opts} = NimbleOptions.validate(opts, schema)
assert length(validated_opts) == 1
assert validated_opts[:connections] == 5
end
test "{:custom, mod, fun, args} enforces the returned value of the function" do
schema = [my_option: [type: {:custom, __MODULE__, :misbehaving_custom_validator, []}]]
message =
"custom validation function NimbleOptionsTest.misbehaving_custom_validator/1 " <>
"must return {:ok, value} or {:error, message}, got: :ok"
assert_raise RuntimeError, message, fn ->
assert NimbleOptions.validate([my_option: :whatever], schema)
end
end
test "valid {:list, subtype}" do
schema = [metadata: [type: {:list, :atom}]]
opts = [metadata: [:foo, :bar, :baz]]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
# Nested lists
schema = [metadata: [type: {:list, {:list, :atom}}]]
opts = [metadata: [[:foo, :bar], [:baz]]]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "invalid {:list, subtype}" do
schema = [metadata: [type: {:list, :atom}]]
# Not a list
opts = [metadata: "not a list"]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :metadata,
keys_path: [],
message: "expected :metadata to be a list, got: \"not a list\"",
value: "not a list"
}}
# List with invalid elements
opts = [metadata: [:foo, :bar, "baz", :bong, "another invalid value"]]
message = """
list element at position 2 in :metadata failed validation: expected "list element" \
to be an atom, got: "baz"\
"""
assert NimbleOptions.validate(opts, schema) == {
:error,
%NimbleOptions.ValidationError{
key: :metadata,
keys_path: [],
message: message,
value: [:foo, :bar, "baz", :bong, "another invalid value"]
}
}
# Nested list with invalid elements
schema = [metadata: [type: {:list, {:list, :atom}}]]
opts = [metadata: [[:foo, :bar], ["baz", :bong, "another invalid value"]]]
message = """
list element at position 1 in :metadata failed validation: \
list element at position 0 in "list element" failed validation: \
expected "list element" to be an atom, got: "baz"\
"""
assert NimbleOptions.validate(opts, schema) == {
:error,
%NimbleOptions.ValidationError{
key: :metadata,
keys_path: [],
message: message,
value: [[:foo, :bar], ["baz", :bong, "another invalid value"]]
}
}
end
test "{:list, subtype} with custom subtype" do
schema = [metadata: [type: {:list, {:custom, __MODULE__, :string_to_integer, []}}]]
# Valid
opts = [metadata: ["123", "456"]]
assert NimbleOptions.validate(opts, schema) == {:ok, [metadata: [123, 456]]}
# Invalid
opts = [metadata: ["123", "not an int"]]
message = """
list element at position 1 in :metadata failed validation: expected string to be \
convertible to integer\
"""
assert NimbleOptions.validate(opts, schema) == {
:error,
%NimbleOptions.ValidationError{
key: :metadata,
keys_path: [],
message: message,
value: ["123", "not an int"]
}
}
end
end
describe "nested options with predefined keys" do
test "known options" do
schema = [
processors: [
type: :keyword_list,
keys: [
stages: [],
max_demand: []
]
]
]
opts = [processors: [stages: 1, max_demand: 2]]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "unknown options" do
schema = [
processors: [
type: :keyword_list,
keys: [
stages: [],
min_demand: []
]
]
]
opts = [
processors: [
stages: 1,
unknown_option1: 1,
unknown_option2: 1
]
]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: [:unknown_option1, :unknown_option2],
keys_path: [:processors],
message:
"unknown options [:unknown_option1, :unknown_option2], valid options are: [:stages, :min_demand]"
}}
end
test "options with default values" do
schema = [
processors: [
type: :keyword_list,
keys: [
stages: [default: 10]
]
]
]
opts = [processors: []]
assert NimbleOptions.validate(opts, schema) == {:ok, [processors: [stages: 10]]}
end
test "empty default option with default values" do
schema = [
processors: [
type: :keyword_list,
default: [],
keys: [
stages: [default: 10]
]
]
]
assert NimbleOptions.validate([], schema) == {:ok, [processors: [stages: 10]]}
end
test "all required options present" do
schema = [
processors: [
type: :keyword_list,
keys: [
stages: [required: true],
max_demand: [required: true]
]
]
]
opts = [processors: [stages: 1, max_demand: 2]]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "required options missing" do
schema = [
processors: [
type: :keyword_list,
keys: [
stages: [required: true],
max_demand: [required: true]
]
]
]
opts = [processors: [max_demand: 1]]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :stages,
keys_path: [:processors],
message: "required option :stages not found, received options: [:max_demand]"
}}
end
test "nested options types" do
schema = [
processors: [
type: :keyword_list,
keys: [
name: [type: :atom],
stages: [type: :pos_integer]
]
]
]
opts = [processors: [name: MyModule, stages: :an_atom]]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :stages,
value: :an_atom,
keys_path: [:processors],
message: "expected :stages to be a positive integer, got: :an_atom"
}}
end
end
describe "nested options with custom keys" do
test "known options" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [],
arg: [type: :atom]
]
]
]
]
]
opts = [producers: [producer1: [module: MyModule, arg: :atom]]]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "unknown options" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [],
arg: []
]
]
]
]
]
opts = [producers: [producer1: [module: MyModule, arg: :ok, unknown_option: 1]]]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: [:unknown_option],
keys_path: [:producers, :producer1],
message: "unknown options [:unknown_option], valid options are: [:module, :arg]"
}}
end
test "options with default values" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
arg: [default: :ok]
]
]
]
]
]
opts = [producers: [producer1: []]]
assert NimbleOptions.validate(opts, schema) == {:ok, [producers: [producer1: [arg: :ok]]]}
end
test "all required options present" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [required: true],
arg: [required: true]
]
]
]
]
]
opts = [producers: [default: [module: MyModule, arg: :ok]]]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "required options missing" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [required: true],
arg: [required: true]
]
]
]
]
]
opts = [producers: [default: [module: MyModule]]]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :arg,
keys_path: [:producers, :default],
message: "required option :arg not found, received options: [:module]"
}}
end
test "nested options types" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [required: true, type: :atom],
stages: [type: :pos_integer]
]
]
]
]
]
opts = [
producers: [
producer1: [
module: MyProducer,
stages: :an_atom
]
]
]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :stages,
value: :an_atom,
keys_path: [:producers, :producer1],
message: "expected :stages to be a positive integer, got: :an_atom"
}}
end
test "validate empty keys for :non_empty_keyword_list" do
schema = [
producers: [
type: :non_empty_keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [required: true, type: :atom],
stages: [type: :pos_integer]
]
]
]
]
]
opts = [
producers: []
]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :producers,
value: [],
message: "expected :producers to be a non-empty keyword list, got: []"
}}
end
test "allow empty keys for :keyword_list" do
schema = [
producers: [
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
module: [required: true, type: :atom],
stages: [type: :pos_integer]
]
]
]
]
]
opts = [
producers: []
]
assert NimbleOptions.validate(opts, schema) == {:ok, opts}
end
test "default value for :keyword_list" do
schema = [
batchers: [
required: false,
default: [],
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys: [
stages: [type: :pos_integer, default: 1]
]
]
]
]
]
opts = []
assert NimbleOptions.validate(opts, schema) == {:ok, [batchers: []]}
end
end
describe "nested options show up in error messages" do
test "for options that we validate" do
schema = [
socket_options: [
type: :keyword_list,
keys: [
certificates: [
type: :keyword_list,
keys: [
path: [type: :string]
]
]
]
]
]
opts = [socket_options: [certificates: [path: :not_a_string]]]
assert NimbleOptions.validate(opts, schema) ==
{:error,
%ValidationError{
key: :path,
value: :not_a_string,
keys_path: [:socket_options, :certificates],
message: "expected :path to be a string, got: :not_a_string"
}}
end
end
describe "NimbleOptions.docs/1" do
test "override docs for recursive keys" do
docs = """
* `:type` - Required. The type of the option item.
* `:required` - Defines if the option item is required. The default value is `false`.
* `:keys` - Defines which set of keys are accepted.
* `:default` - The default.
"""
assert NimbleOptions.docs(recursive_schema()) == docs
end
test "generate inline indented docs for nested options" do
schema = [
producer: [
type: :non_empty_keyword_list,
doc: "The producer. Supported options:",
keys: [
module: [type: :mod_arg, doc: "The module."],
rate_limiting: [
type: :non_empty_keyword_list,
doc: """
A list of options to enable and configure rate limiting. Supported options:
""",
keys: [
allowed_messages: [type: :pos_integer, doc: "Number of messages per interval."],
interval: [required: true, type: :pos_integer, doc: "The interval."]
]
]
]
],
other_key: [type: :string]
]
docs = """
* `:producer` - The producer. Supported options:
* `:module` - The module.
* `:rate_limiting` - A list of options to enable and configure rate limiting. Supported options:
* `:allowed_messages` - Number of messages per interval.
* `:interval` - Required. The interval.
* `:other_key`
"""
assert NimbleOptions.docs(schema) == docs
assert NimbleOptions.docs(NimbleOptions.new!(schema)) == docs
end
test "passing specific indentation" do
nested_schema = [
allowed_messages: [type: :pos_integer, doc: "Allowed messages."],
interval: [type: :pos_integer, doc: "Interval."]
]
schema = [
producer: [
type: {:or, [:string, keyword_list: nested_schema]},
doc: """
The producer. Either a string or a keyword list with the following keys:
#{NimbleOptions.docs(nested_schema, nest_level: 1)}
"""
],
other_key: [type: :string]
]
docs = """
* `:producer` - The producer. Either a string or a keyword list with the following keys:
* `:allowed_messages` - Allowed messages.
* `:interval` - Interval.
* `:other_key`
"""
assert NimbleOptions.docs(schema) == docs
end
test "generate subsections for nested options" do
schema = [
name: [required: true, type: :atom, doc: "The name."],
producer: [
type: :non_empty_keyword_list,
doc: "This is the producer summary. See \"Producers options\" section below.",
subsection: """
### Producers options
The producer options allow users to set up the producer.
The available options are:
""",
keys: [
module: [type: :mod_arg, doc: "The module."],
concurrency: [type: :pos_integer, doc: "The concurrency."]
]
],
other_key: [type: :string]
]
docs = """
* `:name` - Required. The name.
* `:producer` - This is the producer summary. See "Producers options" section below.
* `:other_key`
### Producers options
The producer options allow users to set up the producer.
The available options are:
* `:module` - The module.
* `:concurrency` - The concurrency.
"""
assert NimbleOptions.docs(schema) == docs
end
test "keep indentation of multiline doc" do
schema = [
name: [
type: :string,
doc: """
The name.
This a multiline text.
Another line.
"""
],
module: [
type: :atom,
doc: "The module."
]
]
docs = """
* `:name` - The name.
This a multiline text.
Another line.
* `:module` - The module.
"""
assert NimbleOptions.docs(schema) == docs
end
test "the option doesn't appear in the documentation when the :doc option is false" do
schema = [
name: [type: :atom, doc: "An atom."],
secret: [type: :string, doc: false],
count: [type: :integer]
]
docs = """
* `:name` - An atom.
* `:count`
"""
assert NimbleOptions.docs(schema) == docs
end
test "the option and its children don't appear in the documentation when the :doc option is false" do
schema = [
producer: [
type: :keyword_list,
doc: false,
keys: [
name: [type: :atom],
concurrency: [type: :pos_integer]
]
]
]
docs = """
"""
assert NimbleOptions.docs(schema) == docs
end
test "stop generating docs recursively if type has no :keys" do
schema = [
custom_keys: [
type: :keyword_list,
doc: "Custom keys",
keys: [*: [type: :atom, doc: "Won't be there!"]]
]
]
opts = [custom_keys: [key1: :a, key2: :b]]
assert {:ok, ^opts} = NimbleOptions.validate(opts, schema)
assert NimbleOptions.docs(schema) == """
* `:custom_keys` - Custom keys
"""
end
end
describe "validate!/2 (raising version)" do
test "returns the direct options if the options are valid" do
schema = [name: [], context: []]
opts = [name: MyProducer, context: :ok]
assert NimbleOptions.validate!(opts, schema) == opts
end
test "raises a NimbleOptions.ValidationError if the options are invalid" do
schema = [an_option: [], other_option: []]
opts = [an_option: 1, not_an_option1: 1, not_an_option2: 1]
message =
"unknown options [:not_an_option1, :not_an_option2], valid options are: [:an_option, :other_option]"
assert_raise NimbleOptions.ValidationError, message, fn ->
NimbleOptions.validate!(opts, schema)
end
end
end
@compile_time_wrapper NimbleOptions.new!(an_option: [])
describe "wrapper struct" do
test "can be built from a valid schema" do
valid_schema = [an_option: [], other_option: []]
assert %NimbleOptions{} = NimbleOptions.new!(valid_schema)
invalid_schema = [:atom]
assert_raise FunctionClauseError, fn ->
NimbleOptions.new!(invalid_schema)
end
end
test "will not be validated once built" do
invalid_schema = [{"a_binary_key", []}]
invalid_struct = %NimbleOptions{schema: invalid_schema}
assert catch_error(NimbleOptions.validate([], invalid_struct))
end
test "can be built at compile time" do
assert {:ok, _} = NimbleOptions.validate([an_option: 1], @compile_time_wrapper)
end
end
def buffer_keep(value) when value in [:first, :last] do
{:ok, value}
end
def buffer_keep(value) do
{:error, "expected :first or :last, got: #{inspect(value)}"}
end
def choice(value, choices) do
if value in choices do
{:ok, value}
else
{:error, "expected one of #{inspect(choices)}, got: #{inspect(value)}"}
end
end
def string_to_integer(value) when is_binary(value) do
case Integer.parse(value) do
{int, ""} -> {:ok, int}
_other -> {:error, "expected string to be convertible to integer"}
end
end
def string_to_integer(other) do
{:error, "expected to be a string, got: #{inspect(other)}"}
end
def misbehaving_custom_validator(_value) do
:ok
end
defp recursive_schema() do
[
*: [
type: :keyword_list,
keys: [
type: [
type: :atom,
required: true,
doc: "The type of the option item."
],
required: [
type: :boolean,
default: false,
doc: "Defines if the option item is required."
],
keys: [
type: :keyword_list,
doc: "Defines which set of keys are accepted.",
keys: &recursive_schema/0
],
default: [
doc: "The default."
]
]
]
]
end
end
| 28.612306 | 120 | 0.512704 |
ffd6b3c8088c9c7b24b50f41f10603c47452eba5 | 139 | ex | Elixir | bench/support/sample_worker.ex | zeam-vm/worker_pool_bench | 88941613c88407dd7c1d78f09953bec600381a62 | [
"Apache-2.0"
] | null | null | null | bench/support/sample_worker.ex | zeam-vm/worker_pool_bench | 88941613c88407dd7c1d78f09953bec600381a62 | [
"Apache-2.0"
] | null | null | null | bench/support/sample_worker.ex | zeam-vm/worker_pool_bench | 88941613c88407dd7c1d78f09953bec600381a62 | [
"Apache-2.0"
] | null | null | null | defmodule SampleWorker do
use WorkerPool.Worker
@impl true
def work({pid, list}), do: send(pid, {:ok, Enum.map(list, & &1 * 2)})
end
| 23.166667 | 71 | 0.654676 |
ffd6c613c2c586af47688bb035c6ebc5e3873c15 | 3,020 | ex | Elixir | lib/wechat/web.ex | zhuoyue95/wechat-elixir-sdk | 64d087967c1e9210c8a2dbb12a6ed9364934b166 | [
"MIT"
] | null | null | null | lib/wechat/web.ex | zhuoyue95/wechat-elixir-sdk | 64d087967c1e9210c8a2dbb12a6ed9364934b166 | [
"MIT"
] | null | null | null | lib/wechat/web.ex | zhuoyue95/wechat-elixir-sdk | 64d087967c1e9210c8a2dbb12a6ed9364934b166 | [
"MIT"
] | null | null | null | defmodule Wechat.Web do
use Tesla
plug(Tesla.Middleware.BaseUrl, "https://api.weixin.qq.com/sns")
plug(Tesla.Middleware.JSON)
plug(Wechat.Tesla.Middleware.ForceDecodeJSON)
@app_id Application.get_env(:wechat, :app_id)
@app_secret Application.get_env(:wechat, :app_secret)
@spec authorise_with(String.t()) ::
{:ok, %{access_token: String.t(), open_id: String.t(), refresh_token: String.t()}}
| {:error, any()}
def authorise_with(code) do
query_params = [
appid: @app_id,
secret: @app_secret,
code: code,
grant_type: "authorization_code"
]
with {:ok, %Tesla.Env{body: body, status: 200}} <-
get("/oauth2/access_token", query: query_params) do
case body do
%{"access_token" => access_token, "openid" => open_id, "refresh_token" => refresh_token} ->
{:ok,
%{
access_token: access_token,
open_id: open_id,
refresh_token: refresh_token
}}
%{"errcode" => 40029} ->
{:error, :invalid_code}
%{"errcode" => 40163} ->
{:error, :code_already_used}
%{"errcode" => 41008} ->
{:error, :code_missing}
_ ->
body
end
end
end
@spec refresh_token(String.t()) :: {:ok, String.t()} | :error
def refresh_token(refresh_token) do
query_params = [appid: @app_id, refresh_token: refresh_token, grant_type: "refresh_token"]
with {:ok, %Tesla.Env{body: %{"access_token" => web_access_token}}} <-
get("/oauth2/refresh_token", query: query_params) do
{:ok, web_access_token}
else
_ ->
:error
end
end
@spec is_valid?(String.t(), String.t()) :: boolean | {:error, :upstream_error}
def is_valid?(web_access_token, open_id) do
with {:ok, %Tesla.Env{body: %{"errcode" => 0}}} <-
get("/auth", query: [access_token: web_access_token, openid: open_id]) do
true
else
{:ok, _} ->
false
{:error, _} ->
{:error, :upstream_error}
end
end
# JSSDK related
@spec get_jsapi_ticket(String.t()) :: {:ok, String.t()} | :error
def get_jsapi_ticket(access_token) do
query_params = [
type: "jsapi",
access_token: access_token
]
with {:ok, %Tesla.Env{body: %{"ticket" => ticket}}} <-
get("https://api.weixin.qq.com/cgi-bin/ticket/getticket", query: query_params) do
{:ok, ticket}
else
_ -> :error
end
end
@spec generate_jssdk_signature(String.t(), String.t(), String.t(), non_neg_integer()) ::
String.t()
def generate_jssdk_signature(url, jsapi_ticket, random_string, timestamp) do
data =
[
jsapi_ticket: jsapi_ticket,
noncestr: random_string,
timestamp: timestamp,
url: url
]
|> Enum.map(fn {k, v} -> Atom.to_string(k) <> "=" <> to_string(v) end)
|> Enum.join("&")
:crypto.hash(:sha, data)
|> Base.encode16()
|> String.downcase()
end
end
| 27.454545 | 99 | 0.579801 |
ffd6c82ada6c53d927fd58061a4c2099f045bf11 | 421 | ex | Elixir | lib/hologram/compiler/js_encoders/list_subtraction_operator.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | lib/hologram/compiler/js_encoders/list_subtraction_operator.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | lib/hologram/compiler/js_encoders/list_subtraction_operator.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | alias Hologram.Compiler.{Context, JSEncoder, Opts}
alias Hologram.Compiler.IR.ListSubtractionOperator
defimpl JSEncoder, for: ListSubtractionOperator do
def encode(%{left: left, right: right}, %Context{} = context, %Opts{} = opts) do
left = JSEncoder.encode(left, context, opts)
right = JSEncoder.encode(right, context, opts)
"Hologram.Interpreter.$list_subtraction_operator(#{left}, #{right})"
end
end
| 35.083333 | 82 | 0.738717 |
ffd6cd28ae60b6fa55929902066458dd6f752455 | 414 | exs | Elixir | ch15/spawn3.exs | rafanoronha/prog_elixir_1_6 | 5ae2137da4ffdb6de1c9b164812ed6e43c0922d3 | [
"MIT"
] | null | null | null | ch15/spawn3.exs | rafanoronha/prog_elixir_1_6 | 5ae2137da4ffdb6de1c9b164812ed6e43c0922d3 | [
"MIT"
] | null | null | null | ch15/spawn3.exs | rafanoronha/prog_elixir_1_6 | 5ae2137da4ffdb6de1c9b164812ed6e43c0922d3 | [
"MIT"
] | null | null | null | defmodule Spawn3 do
def greet do
receive do
{sender, msg} ->
send sender, {:ok, "Hello, #{msg}"}
end
end
end
# here's a client
pid = spawn(Spawn3, :greet, [])
send pid, {self(), "World!"}
receive do
{:ok, message} ->
IO.puts message
end
send pid, {self(), "Kermit!"}
receive do
{:ok, message} ->
IO.puts message
after 5000 ->
IO.puts "The greeter has gone away"
end
| 15.333333 | 43 | 0.584541 |
ffd6d61d4f16b737089fea7dde02fd370cf4f1c0 | 505 | exs | Elixir | lib/iex/mix.exs | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | 1 | 2017-09-09T20:59:04.000Z | 2017-09-09T20:59:04.000Z | lib/iex/mix.exs | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | lib/iex/mix.exs | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | defmodule IEx.Mixfile do
use Mix.Project
def project do
[app: :iex, version: System.version]
end
def application do
[ env: [
after_spawn: [],
inspect_opts: [limit: 50, raw: false, pretty: true],
colors: [ enabled: true,
eval_result: "yellow",
error: "red",
info: "normal",
directory: "blue",
device: "green" ],
history_size: 20,
started: true ] ]
end
end
| 22.954545 | 60 | 0.493069 |
ffd6f3485b73b3047ac7ef81f1e625b30cd6b521 | 249 | ex | Elixir | lib/reanix.ex | RubyFireStudio/phoenix-api-scaffold | 231c11fe6008a47d51e24b8e26fb6f5dc7a24ca6 | [
"MIT"
] | 1 | 2018-05-13T19:28:48.000Z | 2018-05-13T19:28:48.000Z | lib/reanix.ex | RubyFireStudio/phoenix-api-scaffold | 231c11fe6008a47d51e24b8e26fb6f5dc7a24ca6 | [
"MIT"
] | null | null | null | lib/reanix.ex | RubyFireStudio/phoenix-api-scaffold | 231c11fe6008a47d51e24b8e26fb6f5dc7a24ca6 | [
"MIT"
] | null | null | null | defmodule Reanix do
@moduledoc """
Reanix keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.9 | 66 | 0.751004 |
ffd6fe084f4cd90820543ef7553d2ff8fed169f8 | 2,288 | exs | Elixir | spec/assertions/refute_receive_spec.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 807 | 2015-03-25T14:00:19.000Z | 2022-03-24T08:08:15.000Z | spec/assertions/refute_receive_spec.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 254 | 2015-03-27T10:12:25.000Z | 2021-07-12T01:40:15.000Z | spec/assertions/refute_receive_spec.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 85 | 2015-04-02T10:25:19.000Z | 2021-01-30T21:30:43.000Z | defmodule RefuteReceiveSpec do
use ESpec, async: true
describe "ESpec.Assertions.RefuteReceive" do
context "Success" do
it "refutes recieve with no message" do
send(self(), :another_hello)
message = refute_receive :hello_refute_1
expect(message) |> to(eq "Have not received `:hello_refute_1`.")
end
it "refutes recieved with no message" do
send(self(), :another_hello)
message = refute_received :hello_refute_2
expect(message) |> to(eq "Have not received `:hello_refute_2`.")
end
it "refutes recieved with unbound variable" do
send(self(), :another_hello)
message = refute_received {_some, _unbound, _variable}
expect(message) |> to(eq "Have not received `{_some, _unbound, _variable}`.")
end
it "refutes recieved with _" do
send(self(), :another_hello)
message = refute_received {_, _, _, _}
expect(message) |> to(eq "Have not received `{_, _, _, _}`.")
end
end
context "Errors" do
it "refute received when message is in mailbox" do
try do
send(self(), :hello_refute)
refute_received :hello_refute
rescue
error in [ESpec.AssertionError] ->
message = "Expected not to receive `:hello_refute`, but have received."
expect(error.message) |> to(eq message)
end
end
it "refute received when message is in mailbox" do
try do
send(self(), :hello_refute)
refute_received _unbound
rescue
error in [ESpec.AssertionError] ->
message = "Expected not to receive `_unbound`, but have received."
expect(error.message) |> to(eq message)
end
end
it "refute received when message is somewhere in mailbox" do
try do
for i <- 1..10, do: send(self(), {:message, i})
send(self(), :hello_refute)
for i <- 1..10, do: send(self(), {:message, i})
refute_received :hello_refute
rescue
error in [ESpec.AssertionError] ->
message = "Expected not to receive `:hello_refute`, but have received."
expect(error.message) |> to(eq message)
end
end
end
end
end
| 33.15942 | 85 | 0.594406 |
ffd7158e576ea5f5142db5ee63a7a367ea97d81d | 397 | exs | Elixir | test/day_three_test.exs | bennyhat/advent-of-code-2018 | 3ea45cb0b1589ed4cd112a312d31835eb90b4da9 | [
"MIT"
] | null | null | null | test/day_three_test.exs | bennyhat/advent-of-code-2018 | 3ea45cb0b1589ed4cd112a312d31835eb90b4da9 | [
"MIT"
] | null | null | null | test/day_three_test.exs | bennyhat/advent-of-code-2018 | 3ea45cb0b1589ed4cd112a312d31835eb90b4da9 | [
"MIT"
] | null | null | null | defmodule Advent2018.DayThree.Test do
use ExUnit.Case
test "computes square inches of overlap" do
input = """
#1 @ 1,3: 4x4
#2 @ 3,1: 4x4
#3 @ 3,1: 4x4
#4 @ 5,5: 2x2
"""
assert Advent2018.DayThree.part_one(input) == 16
end
test "finds the common letters for the matching ids" do
input = ""
assert Advent2018.DayThree.part_two(input) != nil
end
end
| 19.85 | 57 | 0.622166 |
ffd732afed07b576876e4c004d4f84c82cbb70d1 | 2,145 | ex | Elixir | clients/books/lib/google_api/books/v1/model/volumeseriesinfo_volume_series.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/books/lib/google_api/books/v1/model/volumeseriesinfo_volume_series.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/volumeseriesinfo_volume_series.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Books.V1.Model.VolumeseriesinfoVolumeSeries do
@moduledoc """
## Attributes
* `issue` (*type:* `list(GoogleApi.Books.V1.Model.VolumeseriesinfoVolumeSeriesIssue.t)`, *default:* `nil`) - List of issues. Applicable only for Collection Edition and Omnibus.
* `orderNumber` (*type:* `integer()`, *default:* `nil`) - The book order number in the series.
* `seriesBookType` (*type:* `String.t`, *default:* `nil`) - The book type in the context of series. Examples - Single Issue, Collection Edition, etc.
* `seriesId` (*type:* `String.t`, *default:* `nil`) - The series id.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:issue => list(GoogleApi.Books.V1.Model.VolumeseriesinfoVolumeSeriesIssue.t()),
:orderNumber => integer(),
:seriesBookType => String.t(),
:seriesId => String.t()
}
field(:issue, as: GoogleApi.Books.V1.Model.VolumeseriesinfoVolumeSeriesIssue, type: :list)
field(:orderNumber)
field(:seriesBookType)
field(:seriesId)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.VolumeseriesinfoVolumeSeries do
def decode(value, options) do
GoogleApi.Books.V1.Model.VolumeseriesinfoVolumeSeries.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.VolumeseriesinfoVolumeSeries do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.303571 | 180 | 0.723077 |
ffd737c842ebbed559cd8cbe051c374f50a1c8b7 | 5,399 | exs | Elixir | test/glimesh/chat/parser_test.exs | Megami-Studios/glimesh.tv | 57dde3a328fabdcc3305be48ae1b82df27b83c9b | [
"MIT"
] | 328 | 2020-07-23T22:13:49.000Z | 2022-03-31T21:22:28.000Z | test/glimesh/chat/parser_test.exs | Megami-Studios/glimesh.tv | 57dde3a328fabdcc3305be48ae1b82df27b83c9b | [
"MIT"
] | 362 | 2020-07-23T22:38:38.000Z | 2022-03-24T02:11:16.000Z | test/glimesh/chat/parser_test.exs | Megami-Studios/glimesh.tv | 57dde3a328fabdcc3305be48ae1b82df27b83c9b | [
"MIT"
] | 72 | 2020-07-23T22:50:46.000Z | 2022-02-02T11:59:32.000Z | defmodule Glimesh.Chat.ParserTest do
use Glimesh.DataCase
import Glimesh.EmotesFixtures
alias Glimesh.Chat.Parser
alias Glimesh.Chat.Token
# If you are making changes to the parser and you'd like to benchmark it
# {benchmark, :ok} =
# :timer.tc(fn ->
# Parser.parse(
# "https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: https://glimesh.tv :glimwow: "
# )
#
# :ok
# end)
#
# IO.puts("Time to Parser: #{benchmark}μs") ~186μs on a Mac M1 16GB
describe "chat parser" do
setup do
%{static: static_global_emote_fixture(), animated: animated_global_emote_fixture()}
end
test "lexes a simple message", %{static: static, animated: animated} do
assert Parser.parse("") == [%Token{type: "text", text: ""}]
assert Parser.parse("Hello world") == [%Token{type: "text", text: "Hello world"}]
assert Parser.parse(":glimchef:") == [
%Token{
type: "emote",
text: ":glimchef:",
src: Glimesh.Emotes.full_url(static)
}
]
allow_animated_emotes = %Parser.Config{allow_animated_emotes: true}
assert Parser.parse(":glimdance:", allow_animated_emotes) == [
%Token{
type: "emote",
text: ":glimdance:",
src: Glimesh.Emotes.full_url(animated)
}
]
assert Parser.parse("https://glimesh.tv") == [
%Token{type: "url", text: "https://glimesh.tv", url: "https://glimesh.tv"}
]
assert Parser.parse("http://glimesh.tv") == [
%Token{type: "url", text: "http://glimesh.tv", url: "http://glimesh.tv"}
]
assert Parser.parse("glimesh.tv") == [
%Token{type: "text", text: "glimesh.tv"}
]
# Make sure we're not confusing a dot at the end for a URL
assert Parser.parse("example.") == [%Token{type: "text", text: "example."}]
end
test "respects the config", %{static: static} do
no_links = %Parser.Config{allow_links: false}
no_emotes = %Parser.Config{allow_emotes: false}
no_animated_emotes = %Parser.Config{allow_animated_emotes: false}
assert Parser.parse("https://example.com/", no_links) == [
%Token{type: "text", text: "https://example.com/"}
]
assert Parser.parse(":glimchef:", no_emotes) == [
%Token{type: "text", text: ":glimchef:"}
]
assert Parser.parse(":glimdance: :glimchef:", no_animated_emotes) == [
%Token{type: "text", text: ":glimdance:"},
%Token{type: "text", text: " "},
%Token{
type: "emote",
text: ":glimchef:",
src: Glimesh.Emotes.full_url(static)
}
]
end
test "allows animated emotes for channel, not for global" do
# Reset the config just in case it's been changed
Application.put_env(:glimesh, Glimesh.Emotes, allow_channel_animated_emotes: true)
streamer = Glimesh.AccountsFixtures.streamer_fixture()
{:ok, channel} =
Glimesh.Streams.update_emote_settings(streamer, streamer.channel, %{
emote_prefix: "testg"
})
{:ok, %Glimesh.Emotes.Emote{} = emote} =
Glimesh.Emotes.create_channel_emote(streamer, channel, %{
emote: "dance",
animated: true,
animated_file: "test/assets/glimdance.gif",
approved_at: NaiveDateTime.utc_now()
})
no_animated_emotes = %Parser.Config{
allow_animated_emotes: false,
channel_id: channel.id
}
assert Parser.parse(":glimdance: :testgdance:", no_animated_emotes) == [
%Token{type: "text", text: ":glimdance:"},
%Token{type: "text", text: " "},
%Token{
type: "emote",
text: ":testgdance:",
src: Glimesh.Emotes.full_url(emote)
}
]
end
test "lexes a complex message", %{static: static, animated: animated} do
allow_animated_emotes = %Parser.Config{allow_animated_emotes: true}
parsed =
Parser.parse(
"Hello https://glimesh.tv :glimchef: world! How:glimdance:are https://google.com you!",
allow_animated_emotes
)
assert parsed == [
%Token{type: "text", text: "Hello "},
%Token{type: "url", text: "https://glimesh.tv", url: "https://glimesh.tv"},
%Token{type: "text", text: " "},
%Token{type: "emote", text: ":glimchef:", src: Glimesh.Emotes.full_url(static)},
%Token{type: "text", text: " world! How"},
%Token{type: "emote", text: ":glimdance:", src: Glimesh.Emotes.full_url(animated)},
%Token{type: "text", text: "are "},
%Token{type: "url", text: "https://google.com", url: "https://google.com"},
%Token{type: "text", text: " you!"}
]
end
end
def measure(function) do
function
|> :timer.tc()
|> elem(0)
|> Kernel./(1_000_000)
end
end
| 35.519737 | 274 | 0.549917 |
ffd7406314ff3686dc5d5889379b739bc677ef7e | 984 | ex | Elixir | server/test/support/channel_case.ex | MikaAK/trello-burndown | b78d97fa03fcdd60c1c9652b65d272936f648c6f | [
"MIT"
] | null | null | null | server/test/support/channel_case.ex | MikaAK/trello-burndown | b78d97fa03fcdd60c1c9652b65d272936f648c6f | [
"MIT"
] | 3 | 2016-04-18T18:09:21.000Z | 2016-04-25T07:29:59.000Z | server/test/support/channel_case.ex | MikaAK/trello-burndown | b78d97fa03fcdd60c1c9652b65d272936f648c6f | [
"MIT"
] | null | null | null | defmodule TrelloBurndown.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias TrelloBurndown.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
# The default endpoint for testing
@endpoint TrelloBurndown.Endpoint
end
end
setup tags do
unless tags[:async] do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(TrelloBurndown.Repo)
end
:ok
end
end
| 23.428571 | 67 | 0.706301 |
ffd778a03b86435b0159be93e9b5a8211f3cc5b3 | 940 | exs | Elixir | deps/buffer/mix.exs | rsnestor/iot_emulator | 6e1a1835801eb180bc97838461ea5a2f44a6a3c4 | [
"MIT"
] | null | null | null | deps/buffer/mix.exs | rsnestor/iot_emulator | 6e1a1835801eb180bc97838461ea5a2f44a6a3c4 | [
"MIT"
] | null | null | null | deps/buffer/mix.exs | rsnestor/iot_emulator | 6e1a1835801eb180bc97838461ea5a2f44a6a3c4 | [
"MIT"
] | null | null | null | defmodule Buffer.Mixfile do
use Mix.Project
@version File.read!("VERSION") |> String.trim
def project do
[
app: :buffer,
version: @version,
elixir: "~> 1.5",
test_coverage: [tool: ExCoveralls],
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: [{:ex_doc, ">= 0.0.0", only: :dev}, {:excoveralls, "~> 0.7.3", only: :test}],
package: package(),
name: "buffer",
source_url: "https://github.com/adrienmo/buffer",
elixirc_options: [warnings_as_errors: true],
description: """
Provide read and write buffers for Elixir
"""
]
end
def application do
[applications: [], mod: {Buffer, []}]
end
defp package do
[
files: ~w(lib README.md LICENSE VERSION mix.exs),
maintainers: ["Adrien Moreau"],
licenses: ["MIT"],
links: %{github: "https://github.com/adrienmo/buffer"}
]
end
end
| 24.736842 | 89 | 0.582979 |
ffd7845db2f17d378877ea790f2b6246be652db3 | 6,086 | ex | Elixir | lib/ex_unit/lib/ex_unit.ex | jquadrin/elixir | 98746e08eaa2bf58c202e8500b6cf83ed2368cc0 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit.ex | jquadrin/elixir | 98746e08eaa2bf58c202e8500b6cf83ed2368cc0 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit.ex | jquadrin/elixir | 98746e08eaa2bf58c202e8500b6cf83ed2368cc0 | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit do
@moduledoc """
Basic unit testing framework for Elixir.
## Example
A basic setup for ExUnit is shown below:
# File: assertion_test.exs
# 1) Start ExUnit.
ExUnit.start
# 2) Create a new test module (test case) and use `ExUnit.Case`.
defmodule AssertionTest do
# 3) Notice we pass `async: true`, this runs the test case
# concurrently with other test cases
use ExUnit.Case, async: true
# 4) Use the `test` macro instead of `def` for clarity.
test "the truth" do
assert true
end
end
To run the tests above, run the file
using `elixir` from the command line. Assuming you named the file
`assertion_test.exs`, you can run it as:
bin/elixir assertion_test.exs
## Case, Callbacks and Assertions
See `ExUnit.Case` and `ExUnit.Callbacks`
for more information about defining test cases.
The `ExUnit.Assertions` module contains
a set of macros to easily generate assertions with appropriate
error messages.
## Integration with Mix
Mix is the project management and build tool for Elixir. Invoking `mix test`
from the command line will run the tests in each file matching the pattern
`*_test.exs` found in the `test` directory of your project.
You must create a `test_helper.exs` file inside the
`test` directory and put the code common to all tests there.
The minimum example of a `test_helper.exs` file would be:
# test/test_helper.exs
ExUnit.start
Mix will load the `test_helper.exs` file before executing the tests.
It is not necessary to `require` the `test_helper.exs` file in your test
files. See `Mix.Tasks.Test` for more information.
"""
@typedoc "The state returned by ExUnit.Test and ExUnit.TestCase"
@type state :: nil | {:failed, failed} | {:skip, binary} | {:invalid, module}
@type failed :: {Exception.kind, reason :: term, stacktrace :: [tuple]}
defmodule Test do
@moduledoc """
A struct that keeps information about the test.
It is received by formatters and contains the following fields:
* `:name` - the test name
* `:case` - the test case
* `:state` - the test state (see ExUnit.state)
* `:time` - the time to run the test
* `:tags` - the test tags
"""
defstruct name: nil,
case: nil,
state: nil,
time: 0,
tags: %{}
@type t :: %__MODULE__{
name: atom,
case: module,
state: ExUnit.state,
time: non_neg_integer,
tags: map}
end
defmodule TestCase do
@moduledoc """
A struct that keeps information about the test case.
It is received by formatters and contains the following fields:
* `:name` - the test case name
* `:state` - the test state (see ExUnit.state)
* `:tests` - all tests for this case
"""
defstruct name: nil,
state: nil,
tests: []
@type t :: %__MODULE__{
name: module,
state: ExUnit.state,
tests: [ExUnit.Test.t]}
end
defmodule TimeoutError do
defexception [:timeout]
def message(%{timeout: timeout}) do
"test timed out after #{timeout}ms (you can change the test timeout " <>
"by setting \"@tag timeout: x\" where x is an integer in milliseconds)"
end
end
use Application
@doc false
def start(_type, []) do
import Supervisor.Spec
children = [
worker(ExUnit.Server, []),
worker(ExUnit.OnExitHandler, [])
]
opts = [strategy: :one_for_one, name: ExUnit.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Starts ExUnit and automatically runs tests right before the
VM terminates. It accepts a set of options to configure `ExUnit`
(the same ones accepted by `configure/1`).
If you want to run tests manually, you can set `:autorun` to `false`.
"""
def start(options \\ []) do
{:ok, _} = Application.ensure_all_started(:ex_unit)
configure(options)
if Application.get_env(:ex_unit, :autorun, true) do
Application.put_env(:ex_unit, :autorun, false)
System.at_exit fn
0 ->
%{failures: failures} = ExUnit.run
System.at_exit fn _ ->
if failures > 0, do: exit({:shutdown, 1})
end
_ ->
:ok
end
end
end
@doc """
Configures ExUnit.
## Options
ExUnit supports the following options:
* `:colors` - a keyword list of colors to be used by some formatters.
The only option so far is `[enabled: boolean]` which defaults to `IO.ANSI.enabled?/1`
* `:formatters` - the formatters that will print results;
defaults to `[ExUnit.CLIFormatter]`
* `:max_cases` - maximum number of cases to run in parallel;
defaults to `:erlang.system_info(:schedulers_online)`
* `:trace` - set ExUnit into trace mode, this sets `:max_cases` to `1` and
prints each test case and test while running
* `:autorun` - if ExUnit should run by default on exit; defaults to `true`
* `:include` - specify which tests are run by skipping tests that do not
match the filter
* `:exclude` - specify which tests are run by skipping tests that match the
filter
* `:seed` - an integer seed value to randomize the test suite
* `:timeout` - set the timeout for the tests
"""
def configure(options) do
Enum.each options, fn {k, v} ->
Application.put_env(:ex_unit, k, v)
end
end
@doc """
Returns ExUnit configuration.
"""
def configuration do
Application.get_all_env(:ex_unit)
end
@doc """
API used to run the tests. It is invoked automatically
if ExUnit is started via `ExUnit.start/1`.
Returns a map containing the total number of tests, the number
of failures and the number of skipped tests.
"""
def run do
{async, sync, load_us} = ExUnit.Server.start_run
ExUnit.Runner.run async, sync, configuration, load_us
end
end
| 27.789954 | 91 | 0.633257 |
ffd793cb6bbb8040a640294ffdf8014e2e06b910 | 3,435 | exs | Elixir | test/changelog_web/controllers/admin/metacast_controller_text.exs | gustavoarmoa/changelog.com | e898a9979a237ae66962714821ed8633a4966f37 | [
"MIT"
] | 2,599 | 2016-10-25T15:02:53.000Z | 2022-03-26T02:34:42.000Z | test/changelog_web/controllers/admin/metacast_controller_text.exs | sdrees/changelog.com | 955cdcf93d74991062f19a03e34c9f083ade1705 | [
"MIT"
] | 253 | 2016-10-25T20:29:24.000Z | 2022-03-29T21:52:36.000Z | test/changelog_web/controllers/admin/metacast_controller_text.exs | sdrees/changelog.com | 955cdcf93d74991062f19a03e34c9f083ade1705 | [
"MIT"
] | 298 | 2016-10-25T15:18:31.000Z | 2022-01-18T21:25:52.000Z | defmodule ChangelogWeb.Admin.MetacastControllerTest do
use ChangelogWeb.ConnCase
alias Changelog.Metacast
alias ChangelogWeb.Router.Helpers
@valid_attrs %{name: "Polyglot", filter_query: "except"}
@invalid_attrs %{name: ""}
defp valid_attrs(slug), do: Map.put(@valid_attrs, :slug, slug)
@tag :as_admin
test "lists all metacasts", %{conn: conn} do
p1 = insert(:metacast, slug: "list-all-1")
p2 = insert(:metacast, slug: "list-all-2")
conn = get(conn, Helpers.admin_metacast_path(conn, :index))
assert html_response(conn, 200) =~ ~r/Metacasts/
assert String.contains?(conn.resp_body, p1.name)
assert String.contains?(conn.resp_body, p2.name)
end
@tag :as_admin
test "renders form to create new metacast", %{conn: conn} do
conn = get(conn, Helpers.admin_metacast_path(conn, :new))
assert html_response(conn, 200) =~ ~r/new/
end
@tag :as_admin
test "creates metacast and redirects", %{conn: conn} do
conn =
post(conn, Helpers.admin_metacast_path(conn, :create),
metacast: valid_attrs("creates-metacast-and-redirects"),
next: Helpers.admin_metacast_path(conn, :index)
)
assert redirected_to(conn) == Helpers.admin_metacast_path(conn, :index)
assert count(Metacast) == 1
end
@tag :as_admin
test "does not create with invalid attributes", %{conn: conn} do
count_before = count(Metacast)
conn = post(conn, Helpers.admin_metacast_path(conn, :create), metacast: @invalid_attrs)
assert html_response(conn, 200) =~ ~r/error/
assert count(Metacast) == count_before
end
@tag :as_admin
test "renders form to edit metacast", %{conn: conn} do
metacast = insert(:metacast, slug: "render-form-to-edit-metacast")
conn = get(conn, Helpers.admin_metacast_path(conn, :edit, metacast))
assert html_response(conn, 200) =~ ~r/edit/i
end
@tag :as_admin
test "updates metacast and redirects", %{conn: conn} do
metacast = insert(:metacast, slug: "updates-metacast-and-redirects")
conn =
put(conn, Helpers.admin_metacast_path(conn, :update, metacast),
metacast: valid_attrs("updates-metacast-and-redirects")
)
assert redirected_to(conn) == Helpers.admin_metacast_path(conn, :index)
assert count(Metacast) == 1
end
@tag :as_admin
test "does not update with invalid attributes", %{conn: conn} do
metacast = insert(:metacast, slug: "does-not-update-with-invalid-attributes")
count_before = count(Metacast)
conn =
put(conn, Helpers.admin_metacast_path(conn, :update, metacast), metacast: @invalid_attrs)
assert html_response(conn, 200) =~ ~r/error/
assert count(Metacast) == count_before
end
test "requires user auth on all actions", %{conn: conn} do
metacast = insert(:metacast, slug: "requires-user-auth-on-all-actions")
Enum.each(
[
get(conn, Helpers.admin_metacast_path(conn, :index)),
get(conn, Helpers.admin_metacast_path(conn, :new)),
post(conn, Helpers.admin_metacast_path(conn, :create),
metacast: valid_attrs("requires-user-auth-1")
),
get(conn, Helpers.admin_metacast_path(conn, :edit, metacast)),
put(conn, Helpers.admin_metacast_path(conn, :update, metacast),
metacast: valid_attrs("requires-user-auth-2")
)
],
fn conn ->
assert html_response(conn, 302)
assert conn.halted
end
)
end
end
| 32.40566 | 95 | 0.677438 |
ffd7c28214034d993abe05298afdff6b4642cb42 | 2,605 | ex | Elixir | lib/mp_api/accounts/message.ex | jsvelasquezv/mp_api | 9a2262188b5b12c0e2ecd9284a8e7f445d2be4a0 | [
"MIT"
] | null | null | null | lib/mp_api/accounts/message.ex | jsvelasquezv/mp_api | 9a2262188b5b12c0e2ecd9284a8e7f445d2be4a0 | [
"MIT"
] | null | null | null | lib/mp_api/accounts/message.ex | jsvelasquezv/mp_api | 9a2262188b5b12c0e2ecd9284a8e7f445d2be4a0 | [
"MIT"
] | null | null | null | defmodule MpApi.Accounts.Message do
@moduledoc """
A module for sending messages, by email or phone, to the user.
This module provides functions to be used with the Phauxth authentication
library when confirming users or handling password resets. It uses
Bamboo, with the Mandrill adapter, to email users. For tests, it uses
a test adapter, which is configured in the config/test.exs file.
If you want to use a different email adapter, or another email / phone
library, read the instructions below.
## Bamboo with a different adapter
Bamboo has adapters for Mailgun, Mailjet, Mandrill, Sendgrid, SMTP,
SparkPost, PostageApp, Postmark and Sendcloud.
See [Bamboo](https://github.com/thoughtbot/bamboo) for more information.
## Other email / phone library
If you do not want to use Bamboo, follow the instructions below:
1. Edit this file, using the email / phone library of your choice
2. Remove the lib/mp_api/mailer.ex file
3. Remove the Bamboo entries in the config/config.exs and config/test.exs files
4. Remove bamboo from the deps section in the mix.exs file
"""
import Bamboo.Email
alias MpApi.Mailer
@doc """
An email with a confirmation link in it.
"""
def confirm_request(address, key) do
prep_mail(address)
|> subject("Confirm your account")
|> text_body("Confirm your email here http://www.example.com/confirm?key=#{key}")
|> Mailer.deliver_now
end
@doc """
An email with a link to reset the password.
"""
def reset_request(address, nil) do
prep_mail(address)
|> subject("Reset your password")
|> text_body("You requested a password reset, but no user is associated with the email you provided.")
|> Mailer.deliver_now
end
def reset_request(address, key) do
prep_mail(address)
|> subject("Reset your password")
|> text_body("Reset your password at http://www.example.com/password_resets/edit?key=#{key}")
|> Mailer.deliver_now
end
@doc """
An email acknowledging that the account has been successfully confirmed.
"""
def confirm_success(address) do
prep_mail(address)
|> subject("Confirmed account")
|> text_body("Your account has been confirmed.")
|> Mailer.deliver_now
end
@doc """
An email acknowledging that the password has been successfully reset.
"""
def reset_success(address) do
prep_mail(address)
|> subject("Password reset")
|> text_body("Your password has been reset.")
|> Mailer.deliver_now
end
defp prep_mail(address) do
new_email()
|> to(address)
|> from("admin@example.com")
end
end
| 30.647059 | 106 | 0.709021 |
ffd7cfc402a190b9a5d7e262a25523574202831b | 3,110 | exs | Elixir | config/runtime.exs | froseph/ambry | 86c1a8528b9f3cc7e4a7debd8005df4116a7d1b1 | [
"MIT"
] | null | null | null | config/runtime.exs | froseph/ambry | 86c1a8528b9f3cc7e4a7debd8005df4116a7d1b1 | [
"MIT"
] | null | null | null | config/runtime.exs | froseph/ambry | 86c1a8528b9f3cc7e4a7debd8005df4116a7d1b1 | [
"MIT"
] | null | null | null | import Config
# config/runtime.exs is executed for all environments, including
# during releases. It is executed after compilation and before the
# system starts, so it is typically used to load production configuration
# and secrets from environment variables or elsewhere. Do not define
# any compile-time configuration in here, as it won't be applied.
config :ambry, uploads_path: Path.join(File.cwd!(), "uploads")
# The block below contains prod specific runtime configuration.
if config_env() == :prod do
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :ambry, Ambry.Repo,
# ssl: true,
# socket_options: [:inet6],
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
# The secret key base is used to sign/encrypt cookies and other secrets.
# A default value is used in config/dev.exs and config/test.exs but you
# want to use a different value for prod and you most likely don't want
# to check this value into version control, so we use an environment
# variable instead.
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
url_string =
System.get_env("BASE_URL") ||
raise """
environment variable BASE_URL is missing.
"""
%{host: host, port: port} = URI.parse(url_string)
config :ambry, AmbryWeb.Endpoint,
url: [host: host, port: port],
http: [
# Enable IPv6 and bind on all interfaces.
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
# See the documentation on https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html
# for details about using IPv6 vs IPv4 and loopback vs public addresses.
ip: {0, 0, 0, 0, 0, 0, 0, 0},
port: String.to_integer(System.get_env("PORT") || "80")
],
secret_key_base: secret_key_base
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
config :ambry, AmbryWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
# ## Configuring the mailer
#
# In production you need to configure the mailer to use a different adapter.
# Also, you may need to configure the Swoosh API client of your choice if you
# are not using SMTP. Here is an example of the configuration:
#
# config :ambry, Ambry.Mailer,
# adapter: Swoosh.Adapters.Mailgun,
# api_key: System.get_env("MAILGUN_API_KEY"),
# domain: System.get_env("MAILGUN_DOMAIN")
#
# For this example you need include a HTTP client required by Swoosh API client.
# Swoosh supports Hackney and Finch out of the box:
#
# config :swoosh, :api_client, Swoosh.ApiClient.Hackney
#
# See https://hexdocs.pm/swoosh/Swoosh.html#module-installation for details.
end
| 36.162791 | 82 | 0.689389 |
ffd7d5c85bfd9280ca23f7b13a740798c495892b | 242 | exs | Elixir | priv/repo/migrations/20220411084543_add_resources_number_hosts_number_to_cluster_read_model.exs | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-22T16:59:34.000Z | 2022-03-22T16:59:34.000Z | priv/repo/migrations/20220411084543_add_resources_number_hosts_number_to_cluster_read_model.exs | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 24 | 2022-03-22T16:45:25.000Z | 2022-03-31T13:00:02.000Z | priv/repo/migrations/20220411084543_add_resources_number_hosts_number_to_cluster_read_model.exs | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-30T14:16:16.000Z | 2022-03-30T14:16:16.000Z | defmodule Trento.Repo.Migrations.AddResourcesNumberHostsNumberToClusterReadModel do
use Ecto.Migration
def change do
alter table(:clusters) do
add :resources_number, :integer
add :hosts_number, :integer
end
end
end
| 22 | 83 | 0.752066 |
ffd7d69e2792beec42072f35ccb48978d746b0f7 | 64 | ex | Elixir | lib/skillset_web/views/skill_view.ex | LunarLogic/skillset | fe7eda83506ce45e2a60fd77bd17c3981b2ef254 | [
"MIT"
] | null | null | null | lib/skillset_web/views/skill_view.ex | LunarLogic/skillset | fe7eda83506ce45e2a60fd77bd17c3981b2ef254 | [
"MIT"
] | null | null | null | lib/skillset_web/views/skill_view.ex | LunarLogic/skillset | fe7eda83506ce45e2a60fd77bd17c3981b2ef254 | [
"MIT"
] | null | null | null | defmodule SkillsetWeb.SkillView do
use SkillsetWeb, :view
end
| 16 | 34 | 0.8125 |
ffd7fd395ed1577b349772165fe01189099c0974 | 472 | exs | Elixir | test/absinthe/integration/execution/simple_query_returning_list_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | test/absinthe/integration/execution/simple_query_returning_list_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | test/absinthe/integration/execution/simple_query_returning_list_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Elixir.Absinthe.Integration.Execution.SimpleQueryReturningListTest do
use Absinthe.Case, async: true
@query """
query {
things {
id
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"things" => [%{"id" => "bar", "name" => "Bar"}, %{"id" => "foo", "name" => "Foo"}]
}
}} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, [])
end
end
| 21.454545 | 98 | 0.489407 |
ffd7ff6b17a7099079ad40f08deb1622db20b92d | 815 | exs | Elixir | test/holidays/definitions/br_test.exs | assj/holidays | 158657e296179d858f173cff3656d3fcaf5c653d | [
"MIT"
] | 16 | 2016-01-11T16:18:38.000Z | 2022-02-24T09:41:05.000Z | test/holidays/definitions/br_test.exs | assj/holidays | 158657e296179d858f173cff3656d3fcaf5c653d | [
"MIT"
] | 9 | 2016-08-14T01:26:16.000Z | 2021-02-08T01:00:35.000Z | test/holidays/definitions/br_test.exs | assj/holidays | 158657e296179d858f173cff3656d3fcaf5c653d | [
"MIT"
] | 8 | 2017-06-08T09:23:46.000Z | 2021-08-17T19:03:05.000Z | defmodule Holidays.BrTest do
use ExUnit.Case
import Holidays.TestHelper
setup_all do
restart_application()
Holidays.Definitions.Br.init()
:ok
end
holiday_test "Confraternização Universal", {2018, 1, 1}, :br
holiday_test "Tiradentes", {2018, 4, 21}, :br
holiday_test "Dia do Trabalhador", {2018, 5, 1}, :br
holiday_test "Proclamação da Independência", {2018, 9, 7}, :br
holiday_test "Nossa Senhora Aparecida", {2018, 10, 12}, :br
holiday_test "Proclamação da República", {2018, 11, 15}, :br
holiday_test "Natal", {2018, 12, 25}, :br
holiday_test "Carnaval", {2018, 2, 13}, :br
holiday_test "Sexta-feira santa", {2018, 3, 30}, :br
holiday_test "Páscoa", {2018, 4, 1}, :br
holiday_test "Corpus Christi", {2018, 5, 31}, :br
holiday_test "Eleições", {2018, 10, 7}, :br
end
| 32.6 | 64 | 0.676074 |
ffd834c29b1afe4c58f537292316fc6cc2ea33d5 | 194 | ex | Elixir | apps/snitch_api/lib/snitch_api_web/views/wishlist_item_view.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 456 | 2018-09-20T02:40:59.000Z | 2022-03-07T08:53:48.000Z | apps/snitch_api/lib/snitch_api_web/views/wishlist_item_view.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 273 | 2018-09-19T06:43:43.000Z | 2021-08-07T12:58:26.000Z | apps/snitch_api/lib/snitch_api_web/views/wishlist_item_view.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 122 | 2018-09-26T16:32:46.000Z | 2022-03-13T11:44:19.000Z | defmodule SnitchApiWeb.WishListItemView do
use SnitchApiWeb, :view
use JaSerializer.PhoenixView
has_one(
:variant,
serializer: SnitchApiWeb.VariantView,
include: true
)
end
| 17.636364 | 42 | 0.747423 |
ffd85124d283ea2984fc42070a1534f4a72b34a0 | 492 | ex | Elixir | api/lib/core/config.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | api/lib/core/config.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | api/lib/core/config.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | defmodule Api.Core.Config do
require Logger
def get key do
with {:ok, val} <- Application.fetch_env(:api, key) do
val
else
_ -> Logger.error "#{key} not set in config!"
nil
end
end
def get module, key do
with {:ok, val} <- Application.fetch_env(:api, module),
val when val != nil <- get_in(val, [key])
do
val
else
_ -> Logger.error "#{key} not set in config for module #{module}!"
nil
end
end
end
| 20.5 | 72 | 0.558943 |
ffd85a65c8865468d2de5905482afb84cadde07d | 7,845 | ex | Elixir | lib/codes/codes_s69.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_s69.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_s69.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_S69 do
alias IcdCode.ICDCode
def _S6980XA do
%ICDCode{full_code: "S6980XA",
category_code: "S69",
short_code: "80XA",
full_name: "Other specified injuries of unspecified wrist, hand and finger(s), initial encounter",
short_name: "Other specified injuries of unspecified wrist, hand and finger(s), initial encounter",
category_name: "Other specified injuries of unspecified wrist, hand and finger(s), initial encounter"
}
end
def _S6980XD do
%ICDCode{full_code: "S6980XD",
category_code: "S69",
short_code: "80XD",
full_name: "Other specified injuries of unspecified wrist, hand and finger(s), subsequent encounter",
short_name: "Other specified injuries of unspecified wrist, hand and finger(s), subsequent encounter",
category_name: "Other specified injuries of unspecified wrist, hand and finger(s), subsequent encounter"
}
end
def _S6980XS do
%ICDCode{full_code: "S6980XS",
category_code: "S69",
short_code: "80XS",
full_name: "Other specified injuries of unspecified wrist, hand and finger(s), sequela",
short_name: "Other specified injuries of unspecified wrist, hand and finger(s), sequela",
category_name: "Other specified injuries of unspecified wrist, hand and finger(s), sequela"
}
end
def _S6981XA do
%ICDCode{full_code: "S6981XA",
category_code: "S69",
short_code: "81XA",
full_name: "Other specified injuries of right wrist, hand and finger(s), initial encounter",
short_name: "Other specified injuries of right wrist, hand and finger(s), initial encounter",
category_name: "Other specified injuries of right wrist, hand and finger(s), initial encounter"
}
end
def _S6981XD do
%ICDCode{full_code: "S6981XD",
category_code: "S69",
short_code: "81XD",
full_name: "Other specified injuries of right wrist, hand and finger(s), subsequent encounter",
short_name: "Other specified injuries of right wrist, hand and finger(s), subsequent encounter",
category_name: "Other specified injuries of right wrist, hand and finger(s), subsequent encounter"
}
end
def _S6981XS do
%ICDCode{full_code: "S6981XS",
category_code: "S69",
short_code: "81XS",
full_name: "Other specified injuries of right wrist, hand and finger(s), sequela",
short_name: "Other specified injuries of right wrist, hand and finger(s), sequela",
category_name: "Other specified injuries of right wrist, hand and finger(s), sequela"
}
end
def _S6982XA do
%ICDCode{full_code: "S6982XA",
category_code: "S69",
short_code: "82XA",
full_name: "Other specified injuries of left wrist, hand and finger(s), initial encounter",
short_name: "Other specified injuries of left wrist, hand and finger(s), initial encounter",
category_name: "Other specified injuries of left wrist, hand and finger(s), initial encounter"
}
end
def _S6982XD do
%ICDCode{full_code: "S6982XD",
category_code: "S69",
short_code: "82XD",
full_name: "Other specified injuries of left wrist, hand and finger(s), subsequent encounter",
short_name: "Other specified injuries of left wrist, hand and finger(s), subsequent encounter",
category_name: "Other specified injuries of left wrist, hand and finger(s), subsequent encounter"
}
end
def _S6982XS do
%ICDCode{full_code: "S6982XS",
category_code: "S69",
short_code: "82XS",
full_name: "Other specified injuries of left wrist, hand and finger(s), sequela",
short_name: "Other specified injuries of left wrist, hand and finger(s), sequela",
category_name: "Other specified injuries of left wrist, hand and finger(s), sequela"
}
end
def _S6990XA do
%ICDCode{full_code: "S6990XA",
category_code: "S69",
short_code: "90XA",
full_name: "Unspecified injury of unspecified wrist, hand and finger(s), initial encounter",
short_name: "Unspecified injury of unspecified wrist, hand and finger(s), initial encounter",
category_name: "Unspecified injury of unspecified wrist, hand and finger(s), initial encounter"
}
end
def _S6990XD do
%ICDCode{full_code: "S6990XD",
category_code: "S69",
short_code: "90XD",
full_name: "Unspecified injury of unspecified wrist, hand and finger(s), subsequent encounter",
short_name: "Unspecified injury of unspecified wrist, hand and finger(s), subsequent encounter",
category_name: "Unspecified injury of unspecified wrist, hand and finger(s), subsequent encounter"
}
end
def _S6990XS do
%ICDCode{full_code: "S6990XS",
category_code: "S69",
short_code: "90XS",
full_name: "Unspecified injury of unspecified wrist, hand and finger(s), sequela",
short_name: "Unspecified injury of unspecified wrist, hand and finger(s), sequela",
category_name: "Unspecified injury of unspecified wrist, hand and finger(s), sequela"
}
end
def _S6991XA do
%ICDCode{full_code: "S6991XA",
category_code: "S69",
short_code: "91XA",
full_name: "Unspecified injury of right wrist, hand and finger(s), initial encounter",
short_name: "Unspecified injury of right wrist, hand and finger(s), initial encounter",
category_name: "Unspecified injury of right wrist, hand and finger(s), initial encounter"
}
end
def _S6991XD do
%ICDCode{full_code: "S6991XD",
category_code: "S69",
short_code: "91XD",
full_name: "Unspecified injury of right wrist, hand and finger(s), subsequent encounter",
short_name: "Unspecified injury of right wrist, hand and finger(s), subsequent encounter",
category_name: "Unspecified injury of right wrist, hand and finger(s), subsequent encounter"
}
end
def _S6991XS do
%ICDCode{full_code: "S6991XS",
category_code: "S69",
short_code: "91XS",
full_name: "Unspecified injury of right wrist, hand and finger(s), sequela",
short_name: "Unspecified injury of right wrist, hand and finger(s), sequela",
category_name: "Unspecified injury of right wrist, hand and finger(s), sequela"
}
end
def _S6992XA do
%ICDCode{full_code: "S6992XA",
category_code: "S69",
short_code: "92XA",
full_name: "Unspecified injury of left wrist, hand and finger(s), initial encounter",
short_name: "Unspecified injury of left wrist, hand and finger(s), initial encounter",
category_name: "Unspecified injury of left wrist, hand and finger(s), initial encounter"
}
end
def _S6992XD do
%ICDCode{full_code: "S6992XD",
category_code: "S69",
short_code: "92XD",
full_name: "Unspecified injury of left wrist, hand and finger(s), subsequent encounter",
short_name: "Unspecified injury of left wrist, hand and finger(s), subsequent encounter",
category_name: "Unspecified injury of left wrist, hand and finger(s), subsequent encounter"
}
end
def _S6992XS do
%ICDCode{full_code: "S6992XS",
category_code: "S69",
short_code: "92XS",
full_name: "Unspecified injury of left wrist, hand and finger(s), sequela",
short_name: "Unspecified injury of left wrist, hand and finger(s), sequela",
category_name: "Unspecified injury of left wrist, hand and finger(s), sequela"
}
end
end
| 46.420118 | 114 | 0.659911 |
ffd85ac3ac99dfed7b132ed722addb6e7b31abd1 | 136 | ex | Elixir | test/support/test_repo.ex | sepow/ecto_identifier | 905c18d1221b525fcde1d5b1760692e76fd0bb1e | [
"Apache-2.0"
] | 15 | 2018-09-23T17:13:07.000Z | 2021-11-09T14:54:05.000Z | test/support/test_repo.ex | sepow/ecto_identifier | 905c18d1221b525fcde1d5b1760692e76fd0bb1e | [
"Apache-2.0"
] | 3 | 2021-05-17T16:01:18.000Z | 2021-06-13T20:29:18.000Z | test/support/test_repo.ex | sepow/ecto_identifier | 905c18d1221b525fcde1d5b1760692e76fd0bb1e | [
"Apache-2.0"
] | 5 | 2019-06-21T11:41:36.000Z | 2020-12-27T02:38:32.000Z | defmodule EctoIdentifier.TestRepo do
@moduledoc false
use Ecto.Repo, otp_app: :ecto_identifier, adapter: Ecto.Adapters.Postgres
end
| 27.2 | 75 | 0.808824 |
ffd86f4d8f037994253f1603ce474bf33521270c | 5,496 | ex | Elixir | lib/mix/ex_oauth2_provider/migration.ex | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | lib/mix/ex_oauth2_provider/migration.ex | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | lib/mix/ex_oauth2_provider/migration.ex | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | defmodule Mix.ExOauth2Provider.Migration do
@moduledoc """
Utilities module for ecto migrations in mix tasks.
"""
alias Mix.Generator
@doc """
Creates a migration file for a repo.
"""
@spec create_migration_file(atom(), binary(), binary()) :: any()
def create_migration_file(repo, name, content) do
base_name = "#{Macro.underscore(name)}.exs"
path =
repo
|> Mix.EctoSQL.source_repo_priv()
|> Path.join("migrations")
|> maybe_create_directory()
timestamp = timestamp(path)
path
|> ensure_unique(base_name, name)
|> Path.join("#{timestamp}_#{base_name}")
|> Generator.create_file(content)
end
defp maybe_create_directory(path) do
Generator.create_directory(path)
path
end
defp ensure_unique(path, base_name, name) do
path
|> Path.join("*_#{base_name}")
|> Path.wildcard()
|> case do
[] ->
path
_ ->
Mix.raise(
"migration can't be created, there is already a migration file with name #{name}."
)
end
end
defp timestamp(path, seconds \\ 0) do
timestamp = gen_timestamp(seconds)
path
|> Path.join("#{timestamp}_*.exs")
|> Path.wildcard()
|> case do
[] -> timestamp
_ -> timestamp(path, seconds + 1)
end
end
defp gen_timestamp(seconds) do
%{year: y, month: m, day: d, hour: hh, minute: mm, second: ss} =
DateTime.utc_now()
|> DateTime.to_unix()
|> Kernel.+(seconds)
|> DateTime.from_unix!()
"#{y}#{pad(m)}#{pad(d)}#{pad(hh)}#{pad(mm)}#{pad(ss)}"
end
defp pad(i) when i < 10, do: <<?0, ?0 + i>>
defp pad(i), do: to_string(i)
@template """
defmodule <%= inspect migration.repo %>.Migrations.<%= migration.name %> do
use Ecto.Migration
def change do
<%= for schema <- migration.schemas do %>
create table(:<%= schema.table %><%= if schema.binary_id do %>, primary_key: false<% end %>) do
<%= if schema.binary_id do %> add :id, :binary_id, primary_key: true
<% end %><%= for {k, v} <- schema.attrs do %> add <%= inspect k %>, <%= inspect v %><%= schema.defaults[k] %>
<% end %><%= for {_, i, _, s} <- schema.assocs do %> add <%= if(String.ends_with?(inspect(i), "_id"), do: inspect(i), else: inspect(i) <> "_id") %>, references(<%= inspect(s) %>, on_delete: :nothing<%= if schema.binary_id do %>, type: :binary_id<% end %>)
<% end %>
timestamps()
end
<%= for index <- schema.indexes do %>
<%= index %><% end %>
<% end %>
end
end
"""
alias ExOauth2Provider.{
AccessGrants.AccessGrant,
AccessTokens.AccessToken,
Applications.Application
}
@schemas [
{"applications", Application},
{"access_grants", AccessGrant},
{"access_tokens", AccessToken}
]
@spec gen(binary(), binary(), map()) :: binary()
def gen(name, namespace, %{repo: repo} = config) do
schemas =
for {table, module} <- @schemas,
do: schema(module, table, namespace, config)
EEx.eval_string(@template, migration: %{repo: repo, name: name, schemas: schemas})
end
defp schema(module, table, namespace, %{binary_id: binary_id}) do
attrs =
module.attrs()
|> Kernel.++(attrs_from_assocs(module.assocs(), namespace))
|> migration_attrs()
defaults = defaults(attrs)
{assocs, attrs} = partition_attrs(attrs)
table = "#{namespace}_#{table}"
indexes = migration_indexes(module.indexes(), table)
%{
table: table,
binary_id: binary_id,
attrs: attrs,
defaults: defaults,
assocs: assocs,
indexes: indexes
}
end
defp attrs_from_assocs(assocs, namespace) do
assocs
|> Enum.map(&attr_from_assoc(&1, namespace))
|> Enum.reject(&is_nil/1)
end
defp attr_from_assoc({:belongs_to, name, :users}, _namespace) do
{String.to_atom("#{name}_id"), {:references, :users}}
end
defp attr_from_assoc({:belongs_to, name, table}, namespace) do
{String.to_atom("#{name}_id"), {:references, String.to_atom("#{namespace}_#{table}")}}
end
defp attr_from_assoc({:belongs_to, name, table, _defaults}, namespace),
do: attr_from_assoc({:belongs_to, name, table}, namespace)
defp attr_from_assoc(_assoc, _opts), do: nil
defp migration_attrs(attrs) do
Enum.map(attrs, &to_migration_attr/1)
end
defp to_migration_attr({name, type}) do
{name, type, ""}
end
defp to_migration_attr({name, type, []}) do
to_migration_attr({name, type})
end
defp to_migration_attr({name, type, defaults}) do
defaults = Enum.map_join(defaults, ", ", fn {k, v} -> "#{k}: #{inspect(v)}" end)
{name, type, ", #{defaults}"}
end
defp defaults(attrs) do
Enum.map(attrs, fn {key, _value, defaults} ->
{key, defaults}
end)
end
defp partition_attrs(attrs) do
{assocs, attrs} =
Enum.split_with(attrs, fn
{_, {:references, _}, _} -> true
_ -> false
end)
attrs = Enum.map(attrs, fn {key_id, type, _defaults} -> {key_id, type} end)
assocs =
Enum.map(assocs, fn {key_id, {:references, source}, _} ->
key = String.replace(Atom.to_string(key_id), "_id", "")
{String.to_atom(key), key_id, nil, source}
end)
{assocs, attrs}
end
defp migration_indexes(indexes, table) do
Enum.map(indexes, &to_migration_index(table, &1))
end
defp to_migration_index(table, {key_or_keys, true}),
do: "create unique_index(:#{table}, #{inspect(List.wrap(key_or_keys))})"
end
| 26.941176 | 262 | 0.609534 |
ffd8704984e7817aa67e1098584f1daa7db2ec12 | 878 | exs | Elixir | test/lib/web/router_test.exs | mfeckie/phoenix_frontend_deploys | 86adf2a7a51ce6757e50dfbca74677a93a6950c2 | [
"MIT"
] | null | null | null | test/lib/web/router_test.exs | mfeckie/phoenix_frontend_deploys | 86adf2a7a51ce6757e50dfbca74677a93a6950c2 | [
"MIT"
] | null | null | null | test/lib/web/router_test.exs | mfeckie/phoenix_frontend_deploys | 86adf2a7a51ce6757e50dfbca74677a93a6950c2 | [
"MIT"
] | null | null | null | defmodule PhoenixFrontendDeploys.RouterTest do
use ExUnit.Case, async: true
use Plug.Test
@endpoint TestApp.Endpoint
test "Get frontend" do
index = conn(:get, "/frontends")
|> TestApp.Router.call([])
status = index.status
assert status == 200
end
test "Get revisions" do
revisions = conn(:get, "revisions")
|> TestApp.Router.call([])
status = revisions.status
assert status == 200
end
test "Post revision" do
# TODO - Test through the Router. I'm not thrilled with this method, but trying to get Phoenix parameter parsing working outside phoenix has been troublesome.
conn = conn(:post, "revisions/activate", %{"revision" => "abc123"})
revision = TestApp.RevisionsController.activate(conn, %{"revision" => "abc123"})
status = revision.resp_body
assert status == "{\"revision\":\"abc123\"}"
end
end
| 26.606061 | 163 | 0.673121 |
ffd872f8e46b98bdb420740f26cd9884cd4f76fd | 4,576 | exs | Elixir | test/meeseeks/selector/xpath/expr/arithmetic_test.exs | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 291 | 2017-03-27T15:53:36.000Z | 2022-03-14T23:01:42.000Z | test/meeseeks/selector/xpath/expr/arithmetic_test.exs | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 70 | 2017-03-30T23:32:34.000Z | 2021-06-27T06:26:28.000Z | test/meeseeks/selector/xpath/expr/arithmetic_test.exs | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 23 | 2017-06-18T10:29:04.000Z | 2021-11-04T13:08:12.000Z | defmodule Meeseeks.Selector.XPath.Expr.ArithmeticTest do
use ExUnit.Case
alias Meeseeks.Document
alias Meeseeks.Selector.Combinator
alias Meeseeks.Selector.XPath.Expr
@document Meeseeks.parse(
{"book", [], [{"chapter", [], [{"page", [], ["1"]}, {"page", [], ["2"]}]}]},
:tuple_tree
)
test "add" do
expr = %Expr.Arithmetic{
op: :+,
e1: %Expr.Number{value: 2},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 4
assert Expr.eval(expr, node, @document, context) == expected
end
test "infinity + 1" do
expr = %Expr.Arithmetic{
op: :+,
e1: %Expr.Number{value: :Infinity},
e2: %Expr.Number{value: 1}
}
node = Document.get_node(@document, 4)
context = %{}
expected = :Infinity
assert Expr.eval(expr, node, @document, context) == expected
end
test "subtract" do
expr = %Expr.Arithmetic{
op: :-,
e1: %Expr.Number{value: 2},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 0
assert Expr.eval(expr, node, @document, context) == expected
end
test "multiply" do
expr = %Expr.Arithmetic{
op: :*,
e1: %Expr.Number{value: 2},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 4
assert Expr.eval(expr, node, @document, context) == expected
end
test "divide" do
expr = %Expr.Arithmetic{
op: :div,
e1: %Expr.Number{value: 2},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 1
assert Expr.eval(expr, node, @document, context) == expected
end
test "divide by zero" do
expr = %Expr.Arithmetic{
op: :div,
e1: %Expr.Number{value: 2},
e2: %Expr.Number{value: 0}
}
node = Document.get_node(@document, 4)
context = %{}
expected = :Infinity
assert Expr.eval(expr, node, @document, context) == expected
end
test "mod" do
expr = %Expr.Arithmetic{
op: :mod,
e1: %Expr.Number{value: 2},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 0
assert Expr.eval(expr, node, @document, context) == expected
end
test "valid string" do
expr = %Expr.Arithmetic{
op: :+,
e1: %Expr.Literal{value: "2"},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 4
assert Expr.eval(expr, node, @document, context) == expected
end
test "invalid string" do
expr = %Expr.Arithmetic{op: :+, e1: %Expr.Literal{value: "two"}, e2: %Expr.Number{value: 2}}
node = Document.get_node(@document, 4)
context = %{}
expected = :NaN
assert Expr.eval(expr, node, @document, context) == expected
end
test "boolean" do
expr = %Expr.Arithmetic{
op: :+,
e1: %Expr.Function{f: true, args: []},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 3
assert Expr.eval(expr, node, @document, context) == expected
end
test "nodeset" do
expr = %Expr.Arithmetic{
op: :+,
e1: %Expr.Path{
steps: [
%Expr.Step{
combinator: %Combinator.DescendantsOrSelf{selector: nil},
predicates: [%Expr.NodeType{type: :node}]
},
%Expr.Step{
combinator: %Combinator.Children{selector: nil},
predicates: [%Expr.NameTest{namespace: nil, tag: "page"}]
}
],
type: :abs
},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = 3
assert Expr.eval(expr, node, @document, context) == expected
end
test "nodeset empty" do
expr = %Expr.Arithmetic{
op: :+,
e1: %Expr.Path{
steps: [
%Expr.Step{
combinator: %Combinator.DescendantsOrSelf{selector: nil},
predicates: [%Expr.NodeType{type: :node}]
},
%Expr.Step{
combinator: %Combinator.Children{selector: nil},
predicates: [%Expr.NameTest{namespace: nil, tag: "missing"}]
}
],
type: :abs
},
e2: %Expr.Number{value: 2}
}
node = Document.get_node(@document, 4)
context = %{}
expected = :NaN
assert Expr.eval(expr, node, @document, context) == expected
end
end
| 24.340426 | 96 | 0.559659 |
ffd87bb4ac841c80c5c331b8e6db938ffddc0446 | 1,063 | ex | Elixir | test/support/conn_case.ex | CandN/eliscore-chat | a626077bf1c46b329c15f63db9a25521b43bd61b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | CandN/eliscore-chat | a626077bf1c46b329c15f63db9a25521b43bd61b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | CandN/eliscore-chat | a626077bf1c46b329c15f63db9a25521b43bd61b | [
"MIT"
] | null | null | null | defmodule EliscoreChatWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import EliscoreChatWeb.Router.Helpers
# The default endpoint for testing
@endpoint EliscoreChatWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(EliscoreChat.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(EliscoreChat.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.25641 | 74 | 0.726246 |
ffd885144758f7a068260c2a80dee0c8e96315ea | 6,169 | exs | Elixir | test/operate/vm/extension/crypto_test.exs | operate-bsv/agent | 4f3d69b7eb33f3afe5c880c10e39485b18b27ec5 | [
"Apache-2.0"
] | 13 | 2019-02-14T05:27:03.000Z | 2019-08-14T03:35:52.000Z | test/operate/vm/extension/crypto_test.exs | operate-bsv/agent | 4f3d69b7eb33f3afe5c880c10e39485b18b27ec5 | [
"Apache-2.0"
] | 3 | 2019-11-28T11:38:39.000Z | 2021-12-15T01:23:09.000Z | test/operate/vm/extension/crypto_test.exs | operate-bsv/agent | 4f3d69b7eb33f3afe5c880c10e39485b18b27ec5 | [
"Apache-2.0"
] | 2 | 2019-12-16T10:46:51.000Z | 2020-10-10T11:23:46.000Z | defmodule Operate.VM.Extension.CryptoTest do
use ExUnit.Case
alias Operate.VM
doctest Operate.VM.Extension.Crypto
setup_all do
aes_key = BSV.Test.symetric_key
ecdsa_key = BSV.Test.ecdsa_key |> BSV.Crypto.ECDSA.PrivateKey.from_sequence
rsa_priv_key = BSV.Crypto.RSA.PrivateKey.from_sequence(BSV.Test.rsa_key)
rsa_pub_key = BSV.Crypto.RSA.PrivateKey.get_public_key(rsa_priv_key)
bsv_keys = BSV.KeyPair.from_ecdsa_key(BSV.Test.bsv_keys)
bsv_address = BSV.Address.from_public_key(bsv_keys)
vm = VM.init
|> VM.set!("aes_key", aes_key)
|> VM.set!("ecdsa_priv_key", ecdsa_key.private_key)
|> VM.set!("ecdsa_pub_key", ecdsa_key.public_key)
|> VM.set!("rsa_priv_key", BSV.Crypto.RSA.PrivateKey.as_raw(rsa_priv_key))
|> VM.set!("rsa_pub_key", BSV.Crypto.RSA.PublicKey.as_raw(rsa_pub_key))
|> VM.set!("bsv_priv_key", bsv_keys.private_key)
|> VM.set!("bsv_pub_key", bsv_keys.public_key)
|> VM.set!("bsv_address", BSV.Address.to_string(bsv_address))
%{
vm: vm
}
end
describe "Operate.VM.Extension.Crypto.aes_encrypt/3 and Operate.VM.Extension.Crypto.aes_decrypt/3" do
test "must encrypt with public key and decrypt with private key", ctx do
script = """
enc_data = crypto.aes.encrypt('hello world', aes_key)
return crypto.aes.decrypt(enc_data, aes_key)
"""
assert VM.eval!(ctx.vm, script) == "hello world"
end
end
describe "Operate.VM.Extension.Crypto.ecies_encrypt/3 and Operate.VM.Extension.Crypto.ecies_decrypt/3" do
test "must encrypt with public key and decrypt with private key", ctx do
script = """
enc_data = crypto.ecies.encrypt('hello world', ecdsa_pub_key)
return crypto.ecies.decrypt(enc_data, ecdsa_priv_key)
"""
assert VM.eval!(ctx.vm, script) == "hello world"
end
end
describe "Operate.VM.Extension.Crypto.ecdsa_sign/3 and Operate.VM.Extension.Crypto.ecdsa_verify/4" do
test "must sign and verify message", ctx do
script = """
sig = crypto.ecdsa.sign('hello world', ecdsa_priv_key)
return crypto.ecdsa.verify(sig, 'hello world', ecdsa_pub_key)
"""
assert VM.eval!(ctx.vm, script) == true
end
test "wont verify when different message", ctx do
script = """
sig = crypto.ecdsa.sign('hello world', ecdsa_priv_key)
return crypto.ecdsa.verify(sig, 'goodbye world', ecdsa_pub_key)
"""
assert VM.eval!(ctx.vm, script) == false
end
end
describe "Operate.VM.Extension.Crypto.rsa_encrypt/3 and Operate.VM.Extension.Crypto.rsa_decrypt/3" do
@tag :rsa
test "must encrypt with public key and decrypt with private key", ctx do
script = """
enc_data = crypto.rsa.encrypt('hello world', rsa_pub_key)
return crypto.rsa.decrypt(enc_data, rsa_priv_key)
"""
assert VM.eval!(ctx.vm, script) == "hello world"
end
@tag :rsa
test "must encrypt with private key and decrypt with public key", ctx do
script = """
enc_data = crypto.rsa.encrypt('hello world', rsa_priv_key)
return crypto.rsa.decrypt(enc_data, rsa_pub_key)
"""
assert VM.eval!(ctx.vm, script) == "hello world"
end
end
describe "Operate.VM.Extension.Crypto.rsa_sign/3 and Operate.VM.Extension.Crypto.rsa_verify/4" do
@tag :rsa
test "must sign and verify message", ctx do
script = """
sig = crypto.rsa.sign('hello world', rsa_priv_key)
return crypto.rsa.verify(sig, 'hello world', rsa_pub_key)
"""
assert VM.eval!(ctx.vm, script) == true
end
@tag :rsa
test "wont verify when different message", ctx do
script = """
sig = crypto.rsa.sign('hello world', rsa_priv_key)
return crypto.rsa.verify(sig, 'goodbye world', rsa_pub_key)
"""
assert VM.eval!(ctx.vm, script) == false
end
end
describe "Operate.VM.Extension.Crypto.hash functions" do
test "must create a ripemd160 hash", ctx do
assert VM.eval!(ctx.vm, "return crypto.hash.ripemd160('hello world')") == <<
152, 198, 21, 120, 76, 203, 95, 229, 147, 111, 188, 12, 190, 157,
253, 180, 8, 217, 47, 15>>
end
test "must create a sha1 hash", ctx do
assert VM.eval!(ctx.vm, "return crypto.hash.sha1('hello world')") == <<
42, 174, 108, 53, 201, 79, 207, 180, 21, 219, 233, 95, 64, 139,
156, 233, 30, 232, 70, 237>>
end
test "must create a sha256 hash", ctx do
assert VM.eval!(ctx.vm, "return crypto.hash.sha256('hello world')") == <<
185, 77, 39, 185, 147, 77, 62, 8, 165, 46, 82, 215, 218, 125, 171,
250, 196, 132, 239, 227, 122, 83, 128, 238, 144, 136, 247, 172,
226, 239, 205, 233>>
end
test "must create a sha512 hash", ctx do
assert VM.eval!(ctx.vm, "return crypto.hash.sha512('hello world')") == <<
48, 158, 204, 72, 156, 18, 214, 235, 76, 196, 15, 80, 201, 2, 242,
180, 208, 237, 119, 238, 81, 26, 124, 122, 155, 205, 60, 168, 109,
76, 216, 111, 152, 157, 211, 91, 197, 255, 73, 150, 112, 218, 52, 37,
91, 69, 176, 207, 216, 48, 232, 31, 96, 93, 207, 125, 197, 84,
46, 147, 174, 156, 215, 111>>
end
end
describe "Operate.VM.Extension.Crypto.bitcoin_message_sign/3 and Operate.VM.Extension.Crypto.bitcoin_message_verify/4" do
test "must sign and verify message", ctx do
script = """
sig = crypto.bitcoin_message.sign('hello world', bsv_priv_key)
return crypto.bitcoin_message.verify(sig, 'hello world', bsv_pub_key)
"""
assert VM.eval!(ctx.vm, script) == true
end
test "must verify message with address", ctx do
script = """
sig = crypto.bitcoin_message.sign('hello world', bsv_priv_key)
return crypto.bitcoin_message.verify(sig, 'hello world', bsv_address)
"""
assert VM.eval!(ctx.vm, script) == true
end
test "wont verify when different message", ctx do
script = """
sig = crypto.bitcoin_message.sign('hello world', bsv_priv_key)
return crypto.bitcoin_message.verify(sig, 'goodbye world', bsv_pub_key)
"""
assert VM.eval!(ctx.vm, script) == false
end
end
end
| 36.502959 | 123 | 0.64581 |
ffd888b1a81c553cfee35b3369469cc124a58c32 | 1,801 | exs | Elixir | config/prod.exs | leifg/byzal | 64b688de4c597808ee82a8222e7c3fd7ce4c295b | [
"MIT"
] | null | null | null | config/prod.exs | leifg/byzal | 64b688de4c597808ee82a8222e7c3fd7ce4c295b | [
"MIT"
] | null | null | null | config/prod.exs | leifg/byzal | 64b688de4c597808ee82a8222e7c3fd7ce4c295b | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :byzal, Byzal.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80]
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :byzal, Byzal.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :byzal, Byzal.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :byzal, Byzal.Endpoint, server: true
#
| 31.596491 | 67 | 0.702943 |
ffd88ed40d00e60991323d9940c7fa6bb401c12c | 3,980 | ex | Elixir | priv/templates/coh.install/models/coherence/schemas.ex | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | 2 | 2018-01-19T06:12:16.000Z | 2018-03-12T07:17:17.000Z | priv/templates/coh.install/models/coherence/schemas.ex | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | null | null | null | priv/templates/coh.install/models/coherence/schemas.ex | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | 1 | 2019-09-11T10:21:21.000Z | 2019-09-11T10:21:21.000Z | defmodule <%= base %>.Coherence.Schemas do
use Coherence.Config
import Ecto.Query
@user_schema Config.user_schema
@repo Config.repo
def list_user do
@repo.all @user_schema
end
def list_by_user(opts) do
@repo.all query_by(@user_schema, opts)
end
def get_by_user(opts) do
@repo.get_by @user_schema, opts
end
def get_user(id) do
@repo.get @user_schema, id
end
def get_user!(id) do
@repo.get! @user_schema, id
end
def get_user_by_email(email) do
@repo.get_by @user_schema, email: email
end
def change_user(struct, params) do
@user_schema.changeset struct, params
end
def change_user(params) do
@user_schema.changeset @user_schema.__struct__, params
end
def change_user do
@user_schema.changeset @user_schema.__struct__, %{}
end
def create_user(params) do
@repo.insert change_user(params)
end
def create_user!(params) do
@repo.insert! change_user(params)
end
def update_user(user, params) do
@repo.update change_user(user, params)
end
def update_user!(user, params) do
@repo.update! change_user(user, params)
end
Enum.each <%= schema_list %>, fn module ->
name =
module
|> Module.split
|> List.last
|> String.downcase
def unquote(String.to_atom("list_#{name}"))() do
@repo.all unquote(module)
end
def unquote(String.to_atom("list_#{name}"))(%Ecto.Query{} = query) do
@repo.all query
end
def unquote(String.to_atom("list_by_#{name}"))(opts) do
@repo.all query_by(unquote(module), opts)
end
def unquote(String.to_atom("get_#{name}"))(id) do
@repo.get unquote(module), id
end
def unquote(String.to_atom("get_#{name}!"))(id) do
@repo.get! unquote(module), id
end
def unquote(String.to_atom("get_by_#{name}"))(opts) do
@repo.get_by unquote(module), opts
end
def unquote(String.to_atom("change_#{name}"))(struct, params) do
unquote(module).changeset(struct, params)
end
def unquote(String.to_atom("change_#{name}"))(params) do
unquote(module).new_changeset(params)
end
def unquote(String.to_atom("change_#{name}"))() do
unquote(module).new_changeset(%{})
end
def unquote(String.to_atom("create_#{name}"))(params) do
@repo.insert unquote(module).new_changeset(params)
end
def unquote(String.to_atom("create_#{name}!"))(params) do
@repo.insert! unquote(module).new_changeset(params)
end
def unquote(String.to_atom("update_#{name}"))(struct, params) do
@repo.update unquote(module).changeset(struct, params)
end
def unquote(String.to_atom("update_#{name}!"))(struct, params) do
@repo.update! unquote(module).changeset(struct, params)
end
def unquote(String.to_atom("delete_#{name}"))(struct) do
@repo.delete struct
end
end
<%= if trackable? do %>
def last_trackable(user_id) do
schema =
@repo.one <%= base %>.Coherence.Trackable
|> where([t], t.user_id == ^user_id)
|> order_by(desc: :id)
|> limit(1)
case schema do
nil -> <%= base %>.Coherence.Trackable.__struct__
trackable -> trackable
end
end
<% end %>
def query_by(schema, opts) do
Enum.reduce opts, schema, fn {k, v}, query ->
where(query, [b], field(b, ^k) == ^v)
end
end
def delete_all(%Ecto.Query{} = query) do
@repo.delete_all query
end
def delete_all(module) when is_atom(module) do
@repo.delete_all module
end
def create(%Ecto.Changeset{} = changeset) do
@repo.insert changeset
end
def create!(%Ecto.Changeset{} = changeset) do
@repo.insert! changeset
end
def update(%Ecto.Changeset{} = changeset) do
@repo.update changeset
end
def update!(%Ecto.Changeset{} = changeset) do
@repo.update! changeset
end
def delete(schema) do
@repo.delete schema
end
def delete!(schema) do
@repo.delete! schema
end
end
| 22.359551 | 73 | 0.648995 |
ffd89e4eadea0c8e9735606d77570e32cfa10546 | 2,789 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_delta_presence_estimation_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_delta_presence_estimation_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_delta_presence_estimation_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DeltaPresenceEstimationConfig do
@moduledoc """
δ-presence metric, used to estimate how likely it is for an attacker to figure out that one given individual appears in a de-identified dataset. Similarly to the k-map metric, we cannot compute δ-presence exactly without knowing the attack dataset, so we use a statistical model instead.
## Attributes
* `auxiliaryTables` (*type:* `list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StatisticalTable.t)`, *default:* `nil`) - Several auxiliary tables can be used in the analysis. Each custom_tag used to tag a quasi-identifiers field must appear in exactly one field of one auxiliary table.
* `quasiIds` (*type:* `list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2QuasiId.t)`, *default:* `nil`) - Required. Fields considered to be quasi-identifiers. No two fields can have the same tag.
* `regionCode` (*type:* `String.t`, *default:* `nil`) - ISO 3166-1 alpha-2 region code to use in the statistical modeling. Set if no column is tagged with a region-specific InfoType (like US_ZIP_5) or a region code.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auxiliaryTables =>
list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StatisticalTable.t()) | nil,
:quasiIds => list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2QuasiId.t()) | nil,
:regionCode => String.t() | nil
}
field(:auxiliaryTables,
as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StatisticalTable,
type: :list
)
field(:quasiIds, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2QuasiId, type: :list)
field(:regionCode)
end
defimpl Poison.Decoder,
for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DeltaPresenceEstimationConfig do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DeltaPresenceEstimationConfig.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DeltaPresenceEstimationConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.483333 | 289 | 0.752241 |
ffd8c6a19b3716421eab861e8ddb8940b37dd832 | 177 | exs | Elixir | priv/repo/migrations/20210916131754_disallow_null_shorts.exs | isakal/trimmr.ex | 72b94b399cc0d700f40a1fd2bc7528ae4cd4a81d | [
"MIT"
] | null | null | null | priv/repo/migrations/20210916131754_disallow_null_shorts.exs | isakal/trimmr.ex | 72b94b399cc0d700f40a1fd2bc7528ae4cd4a81d | [
"MIT"
] | null | null | null | priv/repo/migrations/20210916131754_disallow_null_shorts.exs | isakal/trimmr.ex | 72b94b399cc0d700f40a1fd2bc7528ae4cd4a81d | [
"MIT"
] | null | null | null | defmodule Trimmr.Repo.Migrations.DisallowNullShorts do
use Ecto.Migration
def change do
alter table(:urls) do
modify :short, :string, null: false
end
end
end
| 17.7 | 54 | 0.717514 |
ffd8f2afcd01f930c4d5ee672b96fecb3542e51f | 2,484 | ex | Elixir | lib/codes/codes_q42.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_q42.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_q42.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_Q42 do
alias IcdCode.ICDCode
def _Q420 do
%ICDCode{full_code: "Q420",
category_code: "Q42",
short_code: "0",
full_name: "Congenital absence, atresia and stenosis of rectum with fistula",
short_name: "Congenital absence, atresia and stenosis of rectum with fistula",
category_name: "Congenital absence, atresia and stenosis of rectum with fistula"
}
end
def _Q421 do
%ICDCode{full_code: "Q421",
category_code: "Q42",
short_code: "1",
full_name: "Congenital absence, atresia and stenosis of rectum without fistula",
short_name: "Congenital absence, atresia and stenosis of rectum without fistula",
category_name: "Congenital absence, atresia and stenosis of rectum without fistula"
}
end
def _Q422 do
%ICDCode{full_code: "Q422",
category_code: "Q42",
short_code: "2",
full_name: "Congenital absence, atresia and stenosis of anus with fistula",
short_name: "Congenital absence, atresia and stenosis of anus with fistula",
category_name: "Congenital absence, atresia and stenosis of anus with fistula"
}
end
def _Q423 do
%ICDCode{full_code: "Q423",
category_code: "Q42",
short_code: "3",
full_name: "Congenital absence, atresia and stenosis of anus without fistula",
short_name: "Congenital absence, atresia and stenosis of anus without fistula",
category_name: "Congenital absence, atresia and stenosis of anus without fistula"
}
end
def _Q428 do
%ICDCode{full_code: "Q428",
category_code: "Q42",
short_code: "8",
full_name: "Congenital absence, atresia and stenosis of other parts of large intestine",
short_name: "Congenital absence, atresia and stenosis of other parts of large intestine",
category_name: "Congenital absence, atresia and stenosis of other parts of large intestine"
}
end
def _Q429 do
%ICDCode{full_code: "Q429",
category_code: "Q42",
short_code: "9",
full_name: "Congenital absence, atresia and stenosis of large intestine, part unspecified",
short_name: "Congenital absence, atresia and stenosis of large intestine, part unspecified",
category_name: "Congenital absence, atresia and stenosis of large intestine, part unspecified"
}
end
end
| 40.721311 | 104 | 0.663849 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.