hexsha
stringlengths
40
40
size
int64
2
991k
ext
stringclasses
2 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
208
max_stars_repo_name
stringlengths
6
106
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
list
max_stars_count
int64
1
33.5k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
208
max_issues_repo_name
stringlengths
6
106
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
list
max_issues_count
int64
1
16.3k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
208
max_forks_repo_name
stringlengths
6
106
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
list
max_forks_count
int64
1
6.91k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
991k
avg_line_length
float64
1
36k
max_line_length
int64
1
977k
alphanum_fraction
float64
0
1
083bc8f06d6e6713e4f8389a559fc7c8de03de0c
95
exs
Elixir
test/absinthe/integration/execution/aliases/with_errors.exs
maartenvanvliet/absinthe
ebe820717200f53756e225b3dffbfefe924a94d3
[ "MIT" ]
null
null
null
test/absinthe/integration/execution/aliases/with_errors.exs
maartenvanvliet/absinthe
ebe820717200f53756e225b3dffbfefe924a94d3
[ "MIT" ]
2
2020-07-21T05:23:37.000Z
2020-08-26T04:56:12.000Z
test/absinthe/integration/execution/aliases/with_errors.exs
jlgeering/absinthe
a3dbc29640d613928398626ad75a8f03203a1720
[ "MIT" ]
null
null
null
{:ok, %{data: %{"foo" => nil}, errors: [%{code: 42, message: "Custom Error", path: ["foo"]}]}}
47.5
94
0.515789
083bd90dc56675646d917e6c52422939770cc316
8,385
ex
Elixir
lib/mix/tasks/phx.gen.context.ex
zorn/phoenix
ac88958550fbd861e2f1e1af6e3c6b787b1a202e
[ "MIT" ]
1
2019-04-24T09:28:15.000Z
2019-04-24T09:28:15.000Z
lib/mix/tasks/phx.gen.context.ex
zorn/phoenix
ac88958550fbd861e2f1e1af6e3c6b787b1a202e
[ "MIT" ]
null
null
null
lib/mix/tasks/phx.gen.context.ex
zorn/phoenix
ac88958550fbd861e2f1e1af6e3c6b787b1a202e
[ "MIT" ]
null
null
null
defmodule Mix.Tasks.Phx.Gen.Context do @shortdoc "Generates a context with functions around an Ecto schema" @moduledoc """ Generates a context with functions around an Ecto schema. mix phx.gen.context Accounts User users name:string age:integer The first argument is the context module followed by the schema module and its plural name (used as the schema table name). The context is an Elixir module that serves as an API boundary for the given resource. A context often holds many related resources. Therefore, if the context already exists, it will be augmented with functions for the given resource. > Note: A resource may also be split > over distinct contexts (such as Accounts.User and Payments.User). The schema is responsible for mapping the database fields into an Elixir struct. Overall, this generator will add the following files to `lib/your_app`: * a context module in `accounts/accounts.ex`, serving as the API boundary * a schema in `accounts/user.ex`, with a `users` table A migration file for the repository and test files for the context will also be generated. ## Generating without a schema In some cases, you may wish to bootstrap the context module and tests, but leave internal implementation of the context and schema to yourself. Use the `--no-schema` flags to accomplish this. ## table By default, the table name for the migration and schema will be the plural name provided for the resource. To customize this value, a `--table` option may be provided. For example: mix phx.gen.context Accounts User users --table cms_users ## binary_id Generated migration can use `binary_id` for schema's primary key and its references with option `--binary-id`. ## Default options This generator uses default options provided in the `:generators` configuration of your application. These are the defaults: config :your_app, :generators, migration: true, binary_id: false, sample_binary_id: "11111111-1111-1111-1111-111111111111" You can override those options per invocation by providing corresponding switches, e.g. `--no-binary-id` to use normal ids despite the default configuration or `--migration` to force generation of the migration. Read the documentation for `phx.gen.schema` for more information on attributes. """ use Mix.Task alias Mix.Phoenix.{Context, Schema} alias Mix.Tasks.Phx.Gen @switches [binary_id: :boolean, table: :string, web: :string, schema: :boolean, context: :boolean, context_app: :string] @default_opts [schema: true, context: true] @doc false def run(args) do if Mix.Project.umbrella? do Mix.raise "mix phx.gen.context can only be run inside an application directory" end {context, schema} = build(args) binding = [context: context, schema: schema] paths = Mix.Phoenix.generator_paths() prompt_for_conflicts(context) prompt_for_code_injection(context) context |> copy_new_files(paths, binding) |> print_shell_instructions() end defp prompt_for_conflicts(context) do context |> files_to_be_generated() |> Mix.Phoenix.prompt_for_conflicts() end @doc false def build(args) do {opts, parsed, _} = parse_opts(args) [context_name, schema_name, plural | schema_args] = validate_args!(parsed) schema_module = inspect(Module.concat(context_name, schema_name)) schema = Gen.Schema.build([schema_module, plural | schema_args], opts, __MODULE__) context = Context.new(context_name, schema, opts) {context, schema} end defp parse_opts(args) do {opts, parsed, invalid} = OptionParser.parse(args, switches: @switches) merged_opts = @default_opts |> Keyword.merge(opts) |> put_context_app(opts[:context_app]) {merged_opts, parsed, invalid} end defp put_context_app(opts, nil), do: opts defp put_context_app(opts, string) do Keyword.put(opts, :context_app, String.to_atom(string)) end @doc false def files_to_be_generated(%Context{schema: schema}) do if schema.generate? do Gen.Schema.files_to_be_generated(schema) else [] end end @doc false def copy_new_files(%Context{schema: schema} = context, paths, binding) do if schema.generate?, do: Gen.Schema.copy_new_files(schema, paths, binding) inject_schema_access(context, paths, binding) inject_tests(context, paths, binding) context end defp inject_schema_access(%Context{file: file} = context, paths, binding) do unless Context.pre_existing?(context) do Mix.Generator.create_file(file, Mix.Phoenix.eval_from(paths, "priv/templates/phx.gen.context/context.ex", binding)) end paths |> Mix.Phoenix.eval_from("priv/templates/phx.gen.context/#{schema_access_template(context)}", binding) |> inject_eex_before_final_end(file, binding) end defp write_file(content, file) do File.write!(file, content) end defp inject_tests(%Context{test_file: test_file} = context, paths, binding) do unless Context.pre_existing_tests?(context) do Mix.Generator.create_file(test_file, Mix.Phoenix.eval_from(paths, "priv/templates/phx.gen.context/context_test.exs", binding)) end paths |> Mix.Phoenix.eval_from("priv/templates/phx.gen.context/test_cases.exs", binding) |> inject_eex_before_final_end(test_file, binding) end defp inject_eex_before_final_end(content_to_inject, file_path, binding) do file = File.read!(file_path) if String.contains?(file, content_to_inject) do :ok else Mix.shell.info([:green, "* injecting ", :reset, Path.relative_to_cwd(file_path)]) file |> String.trim_trailing() |> String.trim_trailing("end") |> EEx.eval_string(binding) |> Kernel.<>(content_to_inject) |> Kernel.<>("end\n") |> write_file(file_path) end end @doc false def print_shell_instructions(%Context{schema: schema}) do if schema.generate? do Gen.Schema.print_shell_instructions(schema) else :ok end end defp schema_access_template(%Context{schema: schema}) do if schema.generate? do "schema_access.ex" else "access_no_schema.ex" end end defp validate_args!([context, schema, _plural | _] = args) do cond do not Context.valid?(context) -> raise_with_help "Expected the context, #{inspect context}, to be a valid module name" not Schema.valid?(schema) -> raise_with_help "Expected the schema, #{inspect schema}, to be a valid module name" context == schema -> raise_with_help "The context and schema should have different names" true -> args end end defp validate_args!(_) do raise_with_help "Invalid arguments" end @doc false @spec raise_with_help(String.t) :: no_return() def raise_with_help(msg) do Mix.raise """ #{msg} mix phx.gen.html, phx.gen.json and phx.gen.context expect a context module name, followed by singular and plural names of the generated resource, ending with any number of attributes. For example: mix phx.gen.html Accounts User users name:string mix phx.gen.json Accounts User users name:string mix phx.gen.context Accounts User users name:string The context serves as the API boundary for the given resource. Multiple resources may belong to a context and a resource may be split over distinct contexts (such as Accounts.User and Payments.User). """ end def prompt_for_code_injection(%Context{} = context) do if Context.pre_existing?(context) do function_count = Context.function_count(context) file_count = Context.file_count(context) Mix.shell.info """ You are generating into an existing context. The #{inspect context.module} context currently has #{function_count} functions and \ #{file_count} files in its directory. * It's OK to have multiple resources in the same context as \ long as they are closely related * If they are not closely related, another context probably works better If you are not sure, prefer creating a new context over adding to the existing one. """ unless Mix.shell.yes?("Would you like to proceed?") do System.halt() end end end end
31.882129
132
0.7065
083be77f373d36bbb70e226bc622dd2c92c6fd5a
872
ex
Elixir
clients/fcm/lib/google_api/fcm/v1/metadata.ex
Contractbook/elixir-google-api
342751041aaf8c2e7f76f9922cf24b9c5895802b
[ "Apache-2.0" ]
null
null
null
clients/fcm/lib/google_api/fcm/v1/metadata.ex
Contractbook/elixir-google-api
342751041aaf8c2e7f76f9922cf24b9c5895802b
[ "Apache-2.0" ]
null
null
null
clients/fcm/lib/google_api/fcm/v1/metadata.ex
Contractbook/elixir-google-api
342751041aaf8c2e7f76f9922cf24b9c5895802b
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.FCM.V1 do @moduledoc """ API client metadata for GoogleApi.FCM.V1. """ @discovery_revision "20210607" def discovery_revision(), do: @discovery_revision end
32.296296
74
0.755734
083c0ae3ba62c5a5b672836c86f081eb1fc4a9c2
723
ex
Elixir
lib/remote_retro_web/controllers/page_controller.ex
octosteve/remote_retro
3385b0db3c2daab934ce12a2f7642a5f10ac5147
[ "MIT" ]
523
2017-03-15T15:21:11.000Z
2022-03-14T03:04:18.000Z
lib/remote_retro_web/controllers/page_controller.ex
octosteve/remote_retro
3385b0db3c2daab934ce12a2f7642a5f10ac5147
[ "MIT" ]
524
2017-03-16T18:31:09.000Z
2022-02-26T10:02:06.000Z
lib/remote_retro_web/controllers/page_controller.ex
octosteve/remote_retro
3385b0db3c2daab934ce12a2f7642a5f10ac5147
[ "MIT" ]
60
2017-05-01T18:02:28.000Z
2022-03-04T21:04:56.000Z
defmodule RemoteRetroWeb.PageController do use RemoteRetroWeb, :controller def index(conn, _params) do current_user_id = get_session(conn, :current_user_id) case current_user_id do nil -> render(conn, "index.html", %{ body_class: "landing-page", omit_header: true, }) _user -> redirect(conn, to: "/retros") end end def faq(conn, _params) do render(conn, "faq.html", %{ body_class: "copy-page", title: "Frequently Asked Questions | RemoteRetro.org", }) end def privacy(conn, _params) do render(conn, "privacy.html", %{ body_class: "copy-page", title: "Privacy Policy | RemoteRetro.org", }) end end
22.59375
60
0.611342
083c1e2582b6a423ec909e9e06ceb6dbd2549f98
945
ex
Elixir
test/support/channel_case.ex
AltCampus/altstatus_backend
70bf7a9d337e570f54002c3a7df264e88372adfa
[ "MIT" ]
1
2020-01-20T18:17:59.000Z
2020-01-20T18:17:59.000Z
test/support/channel_case.ex
AltCampus/altstatus_backend
70bf7a9d337e570f54002c3a7df264e88372adfa
[ "MIT" ]
null
null
null
test/support/channel_case.ex
AltCampus/altstatus_backend
70bf7a9d337e570f54002c3a7df264e88372adfa
[ "MIT" ]
2
2018-09-09T08:05:24.000Z
2018-09-09T08:35:18.000Z
defmodule AltstatusWeb.ChannelCase do @moduledoc """ This module defines the test case to be used by channel tests. Such tests rely on `Phoenix.ChannelTest` and also import other functionality to make it easier to build common datastructures and query the data layer. Finally, if the test case interacts with the database, it cannot be async. For this reason, every test runs inside a transaction which is reset at the beginning of the test unless the test case is marked as async. """ use ExUnit.CaseTemplate using do quote do # Import conveniences for testing with channels use Phoenix.ChannelTest # The default endpoint for testing @endpoint AltstatusWeb.Endpoint end end setup tags do :ok = Ecto.Adapters.SQL.Sandbox.checkout(Altstatus.Repo) unless tags[:async] do Ecto.Adapters.SQL.Sandbox.mode(Altstatus.Repo, {:shared, self()}) end :ok end end
24.868421
71
0.718519
083c2926cac2bb2a0a3069a665da8e430dd83961
2,459
ex
Elixir
lib/opus/pipeline/registration.ex
zorbash/opus
843f7b85bb28a6c860e9f48f4cb7001d6d6f1e8c
[ "MIT" ]
303
2018-09-11T10:19:01.000Z
2022-03-11T00:57:44.000Z
lib/opus/pipeline/registration.ex
zorbash/opus
843f7b85bb28a6c860e9f48f4cb7001d6d6f1e8c
[ "MIT" ]
24
2018-09-26T10:38:05.000Z
2021-09-29T12:51:28.000Z
lib/opus/pipeline/registration.ex
zorbash/opus
843f7b85bb28a6c860e9f48f4cb7001d6d6f1e8c
[ "MIT" ]
16
2018-10-05T18:54:18.000Z
2022-02-19T18:59:45.000Z
defmodule Opus.Pipeline.Registration do @moduledoc false def define_callback(_type, _stage_id, _name, nil) do quote do: :ok end def define_callback(type, stage_id, name, quoted_fun) do {name, _} = Code.eval_quoted(name) callback_name = :"opus_#{type}_callback_stage_#{name}_#{stage_id}" quote do if is_function(unquote(quoted_fun)) do if :erlang.fun_info(unquote(quoted_fun))[:arity] in [0, 1] do @opus_callbacks %{ stage_id: unquote(stage_id), type: unquote(type), name: unquote(callback_name) } end case :erlang.fun_info(unquote(quoted_fun))[:arity] do 0 -> @doc false def unquote(callback_name)() do unquote(quoted_fun).() end 1 -> @doc false def unquote(callback_name)(input) do unquote(quoted_fun).(input) end n -> IO.warn( "Expected #{unquote(type)} of #{unquote(name)} to take either 0 or 1 parameters, got #{ n }" ) end end end end def maybe_define_callbacks(stage_id, name, opts) do [ define_callback(:conditional, stage_id, name, ensure_valid_conditional!(opts)), define_callback(:with, stage_id, name, Access.get(opts, :with)), define_callback(:error_message, stage_id, name, Access.get(opts, :error_message)), define_callback(:retry_backoff, stage_id, name, Access.get(opts, :retry_backoff)) ] end def ensure_valid_conditional!(opts) do if_cond = Access.get(opts, :if) unless_cond = Access.get(opts, :unless) if if_cond && unless_cond do raise CompileError, file: __ENV__.file, line: __ENV__.line, description: "Invalid stage conditional. For each stage you may define either an :if or an :unless option. Not both" end if_cond || unless_cond end def normalize_opts(opts, id, callbacks) do callback_types = for %{stage_id: ^id, type: type} <- callbacks, do: type for {k, v} <- opts, into: [] do anonymous? = k in callback_types || (k in [:if, :unless] && :conditional in callback_types) callback = if anonymous?, do: :anonymous, else: v case k do term when term in [:if, :unless] -> {:conditional, {term, callback}} _ -> {k, callback} end end end end
29.27381
113
0.593737
083c2c3030644484b8a9702992d3cef7a6d56b34
692
ex
Elixir
lib/collision/intersection/hull_hull.ex
crertel/elixir_rigid_physics
059f121d5504b7b8fed22aa907503d9e9fd8ac1d
[ "Apache-2.0" ]
6
2019-05-23T19:57:26.000Z
2021-06-14T16:52:53.000Z
lib/collision/intersection/hull_hull.ex
crertel/elixir_rigid_physics
059f121d5504b7b8fed22aa907503d9e9fd8ac1d
[ "Apache-2.0" ]
10
2019-06-21T22:09:39.000Z
2019-07-10T04:16:12.000Z
lib/collision/intersection/hull_hull.ex
crertel/elixir_rigid_physics
059f121d5504b7b8fed22aa907503d9e9fd8ac1d
[ "Apache-2.0" ]
1
2021-01-09T01:58:55.000Z
2021-01-09T01:58:55.000Z
defmodule ElixirRigidPhysics.Collision.Intersection.HullHull do @moduledoc """ Module for sphere-hull intersection tests. """ require ElixirRigidPhysics.Dynamics.Body, as: Body require ElixirRigidPhysics.Geometry.Hull, as: Hull require ElixirRigidPhysics.Collision.Contact, as: Contact alias Graphmath.Vec3 @verysmol 1.0e-12 @doc """ Tests intersections of two bodies. """ @spec check(Body.body(), Body.body()) :: Contact.contact_result() def check( Body.body(shape: Hull.hull(faces: faces_a), position: p_a, orientation: o_b), Body.body(shape: Hull.hull(faces: faces_b), position: p_b, orientation: o_b) ) do :no_intersection end end
28.833333
85
0.715318
083c3f5997319dc34d770d5ed0114d1dd590a301
527
ex
Elixir
lib/video/dir.ex
breunigs/veloroute
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
[ "0BSD" ]
12
2018-06-15T10:18:43.000Z
2022-01-24T12:50:54.000Z
lib/video/dir.ex
breunigs/veloroute
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
[ "0BSD" ]
15
2018-06-21T18:04:12.000Z
2021-10-16T12:54:39.000Z
lib/video/dir.ex
breunigs/veloroute
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
[ "0BSD" ]
2
2020-03-09T19:21:36.000Z
2022-01-16T03:29:51.000Z
defmodule Video.Dir do def must_exist!(fun) when is_function(fun) do case present?() do {:error, msg} -> raise msg :ok -> fun.() end end def present? do Settings.video_source_dir_abs() |> File.stat() |> case do {:ok, %{type: :directory}} -> :ok {:ok, %{type: :symlink}} -> :ok any -> {:error, "#{Settings.video_source_dir_abs()} should point to video data, but it's not accessible: #{inspect(any)}"} end end end
18.821429
115
0.525617
083c43b5c6c15094dc263832858702ff74c58bff
1,274
exs
Elixir
test/phoenix_live_view/controller_test.exs
phaleth/phoenix_live_view
84108603663f546871dcbb9b32c2dfeb2f6afda5
[ "MIT" ]
2
2021-05-15T05:20:19.000Z
2021-05-20T17:55:04.000Z
test/phoenix_live_view/controller_test.exs
phaleth/phoenix_live_view
84108603663f546871dcbb9b32c2dfeb2f6afda5
[ "MIT" ]
1
2021-05-12T12:14:27.000Z
2021-05-12T12:14:27.000Z
test/phoenix_live_view/controller_test.exs
phaleth/phoenix_live_view
84108603663f546871dcbb9b32c2dfeb2f6afda5
[ "MIT" ]
1
2021-11-20T01:11:28.000Z
2021-11-20T01:11:28.000Z
defmodule Phoenix.LiveView.ControllerTest do use ExUnit.Case, async: true import Phoenix.ConnTest alias Phoenix.LiveViewTest.Endpoint @endpoint Endpoint setup do {:ok, conn: Phoenix.ConnTest.build_conn()} end test "live renders from controller without session", %{conn: conn} do conn = get(conn, "/controller/live-render-2") assert html_response(conn, 200) =~ "session: %{}" end test "live renders from controller with session", %{conn: conn} do conn = get(conn, "/controller/live-render-3") assert html_response(conn, 200) =~ "session: %{\"custom\" => :session}" end test "live renders from controller with merged assigns", %{conn: conn} do conn = get(conn, "/controller/live-render-4") assert html_response(conn, 200) =~ "title: Dashboard" end test "renders function components from dead view", %{conn: conn} do conn = get(conn, "/controller/render-with-function-component") assert html_response(conn, 200) =~ "RENDER:COMPONENT:from component" end test "renders function components from dead layout", %{conn: conn} do conn = get(conn, "/controller/render-layout-with-function-component") assert html_response(conn, 200) =~ """ LAYOUT:COMPONENT:from layout Hello """ end end
29.627907
75
0.690738
083c43c3388c5ca900926260436f363f895f1385
537
exs
Elixir
string_compression/test/string_compression/day07_test.exs
alex-dukhno/elixir-tdd-katas
57e25fc275c4274c889f2b3760276cc8a393de9e
[ "MIT" ]
null
null
null
string_compression/test/string_compression/day07_test.exs
alex-dukhno/elixir-tdd-katas
57e25fc275c4274c889f2b3760276cc8a393de9e
[ "MIT" ]
null
null
null
string_compression/test/string_compression/day07_test.exs
alex-dukhno/elixir-tdd-katas
57e25fc275c4274c889f2b3760276cc8a393de9e
[ "MIT" ]
null
null
null
defmodule StringCompression.Day07Test do use ExUnit.Case, async: true alias StringCompression.Day07, as: StringCompression test "compress empty string", do: assert StringCompression.compress("") == "" test "compress single char string", do: assert StringCompression.compress("a") == "1a" test "compress string of unique characters", do: assert StringCompression.compress("abc") == "1a1b1c" test "compress string of doubled characters" do assert StringCompression.compress("aabbcc") == "2a2b2c" end end
28.263158
59
0.728119
083c484be4a5ddf5270212779fa9a7a3e283ffb3
518
exs
Elixir
.credo.exs
Atlas42/nebulex_ecto
76e546376011253f7def394e9aa501e5f35c8478
[ "MIT" ]
25
2017-05-11T14:30:31.000Z
2020-11-15T23:25:50.000Z
.credo.exs
Atlas42/nebulex_ecto
76e546376011253f7def394e9aa501e5f35c8478
[ "MIT" ]
11
2017-08-22T00:07:33.000Z
2021-02-10T14:27:14.000Z
.credo.exs
Atlas42/nebulex_ecto
76e546376011253f7def394e9aa501e5f35c8478
[ "MIT" ]
6
2017-10-16T00:11:02.000Z
2021-03-05T13:22:15.000Z
%{ configs: [ %{ name: "default", files: %{ included: ["lib/", "src/", "test/", "benchmarks/"], excluded: [~r"/_build/", ~r"/deps/"] }, color: true, checks: [ ## Design Checks {Credo.Check.Design.AliasUsage, priority: :low}, ## Readability Checks {Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 100}, ## Refactoring Opportunities {Credo.Check.Refactor.LongQuoteBlocks, false} ] } ] }
22.521739
81
0.525097
083c677c4609620aedbd0807aa8f936c2c94fe3e
979
exs
Elixir
config/config.exs
MisterToolbox/logger_file_backend
ee10e27a5aa792ed927cfcbaff603935c9d6e8bf
[ "MIT" ]
2
2020-02-19T23:21:13.000Z
2021-09-30T21:29:07.000Z
config/config.exs
MisterToolbox/logger_file_backend
ee10e27a5aa792ed927cfcbaff603935c9d6e8bf
[ "MIT" ]
null
null
null
config/config.exs
MisterToolbox/logger_file_backend
ee10e27a5aa792ed927cfcbaff603935c9d6e8bf
[ "MIT" ]
2
2020-02-19T23:21:01.000Z
2021-04-21T00:20:08.000Z
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for third- # party users, it should be done in your mix.exs file. # Sample configuration: # # config :logger, # level: :info, # format: "$time $metadata[$level] $message\n" # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # import_config "#{Mix.env}.exs"
39.16
73
0.754852
083c7f9d4b620b79dbc574088107565ddc7ac658
529
ex
Elixir
phx_channels/lib/phx_channels_web/channels/user_socket.ex
iomonad/elixir-pocs
bd0fc282a81c013769dea28a8063a2bfbaac557d
[ "Unlicense" ]
null
null
null
phx_channels/lib/phx_channels_web/channels/user_socket.ex
iomonad/elixir-pocs
bd0fc282a81c013769dea28a8063a2bfbaac557d
[ "Unlicense" ]
null
null
null
phx_channels/lib/phx_channels_web/channels/user_socket.ex
iomonad/elixir-pocs
bd0fc282a81c013769dea28a8063a2bfbaac557d
[ "Unlicense" ]
null
null
null
defmodule PhxChannelsWeb.UserSocket do use Phoenix.Socket ## Channels channel "room:*", PhxChannelsWeb.RoomChannel @max_age 24 * 60 * 60 def connect(%{"token" => token}, socket) do case Phoenix.Token.verify(socket, "user token", token, max_age: @max_age) do {:ok, user_id} -> {:ok, assign(socket, :current_user_id, user_id)} {:error, _reason} -> :error end end @impl true def connect(_params, socket, _connect_info) do {:ok, socket} end @impl true def id(_socket), do: nil end
21.16
80
0.655955
083ce425ab19a71d0201b62398ed68e52d7dceec
2,411
ex
Elixir
lib/gealts/crossover.ex
0010-IO/gealts
841b4af2c0931579be135cc9f8e0231690d5d0df
[ "Unlicense" ]
8
2015-10-13T08:19:17.000Z
2021-06-04T05:52:29.000Z
lib/gealts/crossover.ex
0010-IO/gealts
841b4af2c0931579be135cc9f8e0231690d5d0df
[ "Unlicense" ]
null
null
null
lib/gealts/crossover.ex
0010-IO/gealts
841b4af2c0931579be135cc9f8e0231690d5d0df
[ "Unlicense" ]
null
null
null
defmodule Gealts.Crossover do @moduledoc """ Randomly selects a position in a chromosome, then exchanges sub-chromosomes. Chromosomes fit for "mating" are randomly selected, the number of parent chromosomes is controlled by the @cr (crossover rate) parameter. """ alias Gealts.MathUtils @cr 0.25 @type ind_chrome :: {non_neg_integer, Gealts.Chromosome.t} @type mates :: {ind_chrome, ind_chrome} @doc """ Select chromosomes fit for mating. Pair chromosomes together and merge their sub-chromosome populations based on a randomly selected cutoff point. Update original chromosome population. """ @spec mate([Gealts.Chromosome.t]) :: [Gealts.Chromosome.t] def mate(chromes) do chromes |> select |> link |> merge |> update(chromes) end @spec select([Gealts.Chromosome.t]) :: [ind_chrome] defp select(chromes) do select(chromes, MathUtils.random_list(length(chromes)), 0, []) end defp select(_chromes, [], _i, acc) do acc |> Enum.reverse end defp select(chromes, [r | rest], i, acc) when r > @cr do select(chromes, rest, i + 1, acc) end defp select(chromes, [_r | rest], i, acc) do select(chromes, rest, i + 1, [{i, Enum.at(chromes, i)} | acc]) end @spec link([ind_chrome]) :: [mates] defp link([]) do [] end defp link(chromes) do link(chromes, Enum.at(chromes, 0), []) end defp link([], _first, acc) do acc |> Enum.reverse end defp link([a, b | chromes], first, acc) do link([b | chromes], first, [{a, b} | acc]) end defp link([a | chromes], first, acc) do link(chromes, first, [{a, first} | acc]) end @spec merge([mates]) :: [ind_chrome] defp merge([]) do [] end defp merge(chromes) do vals = for _ <- 1..length(chromes), do: MathUtils.random_int(1, 4) merge(chromes, vals, []) end defp merge([], _vals, acc) do Enum.reverse(acc) end defp merge([{{pos, chrome_a}, {_pos, chrome_b}} | rest ], [val | vals], acc) do merged = Enum.slice(chrome_a.values, 0, val) ++ Enum.slice(chrome_b.values, val, length(chrome_b.values)) merge(rest, vals, [{pos, %{chrome_a | values: merged}} | acc]) end @spec update([ind_chrome], [Gealts.Chromosome.t]) :: [Gealts.Chromosome.t] defp update([], chromes) do chromes end defp update([{n, chrome} | rest], chromes) do update(rest, List.replace_at(chromes, n, chrome)) end end
27.712644
109
0.642472
083cfa382e59afbab2ce4e9ab751ae8590602a46
1,977
ex
Elixir
clients/monitoring/lib/google_api/monitoring/v3/model/point_data.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/monitoring/lib/google_api/monitoring/v3/model/point_data.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/monitoring/lib/google_api/monitoring/v3/model/point_data.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Monitoring.V3.Model.PointData do @moduledoc """ A point's value columns and time interval. Each point has one or more point values corresponding to the entries in point_descriptors field in the TimeSeriesDescriptor associated with this object. ## Attributes * `timeInterval` (*type:* `GoogleApi.Monitoring.V3.Model.TimeInterval.t`, *default:* `nil`) - The time interval associated with the point. * `values` (*type:* `list(GoogleApi.Monitoring.V3.Model.TypedValue.t)`, *default:* `nil`) - The values that make up the point. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :timeInterval => GoogleApi.Monitoring.V3.Model.TimeInterval.t() | nil, :values => list(GoogleApi.Monitoring.V3.Model.TypedValue.t()) | nil } field(:timeInterval, as: GoogleApi.Monitoring.V3.Model.TimeInterval) field(:values, as: GoogleApi.Monitoring.V3.Model.TypedValue, type: :list) end defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.PointData do def decode(value, options) do GoogleApi.Monitoring.V3.Model.PointData.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.PointData do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
39.54
197
0.743551
083d00ab679f17ede6b811d463b0318fb917ddb0
589
ex
Elixir
lib/roger/partition/global/state_persister.ex
jnylen/roger
074338eceae4783221088e8b235a635452708ef1
[ "MIT" ]
null
null
null
lib/roger/partition/global/state_persister.ex
jnylen/roger
074338eceae4783221088e8b235a635452708ef1
[ "MIT" ]
null
null
null
lib/roger/partition/global/state_persister.ex
jnylen/roger
074338eceae4783221088e8b235a635452708ef1
[ "MIT" ]
null
null
null
defmodule Roger.Partition.Global.StatePersister do @moduledoc """ Behaviour for the persistence of the global partition state. See `Roger.Partition.Global` on how to implement a custom persister module. """ @doc """ Called when the global state process starts. """ @callback init(id :: String.t) :: :ok @doc """ Called when the global state needs to be stored. """ @callback store(id :: String.t, data :: binary) :: :ok @doc """ Called when the global state needs to be loaded. """ @callback load(id :: String.t) :: {:ok, binary} | {:error, term} end
23.56
77
0.658744
083d0f89e80119524f6b9dbbc435b551a631320a
76
exs
Elixir
test/myapp_web/views/page_view_test.exs
p-jaya/elixir-docker
08c2b401888e6154abb8a96afc02f8ae25ddb24d
[ "MIT" ]
11
2018-07-20T07:37:06.000Z
2020-08-18T17:48:00.000Z
test/myapp_web/views/page_view_test.exs
p-jaya/elixir-docker
08c2b401888e6154abb8a96afc02f8ae25ddb24d
[ "MIT" ]
null
null
null
test/myapp_web/views/page_view_test.exs
p-jaya/elixir-docker
08c2b401888e6154abb8a96afc02f8ae25ddb24d
[ "MIT" ]
1
2018-07-20T01:48:02.000Z
2018-07-20T01:48:02.000Z
defmodule MyappWeb.PageViewTest do use MyappWeb.ConnCase, async: true end
19
36
0.815789
083d1b5290e47a8ec6d54e8fd09785347e95fb31
4,561
ex
Elixir
rustler_mix/lib/rustler.ex
ericlathrop/rustler
662e36b11703e590fdb21fd392e2c80b26225033
[ "Apache-2.0", "MIT" ]
null
null
null
rustler_mix/lib/rustler.ex
ericlathrop/rustler
662e36b11703e590fdb21fd392e2c80b26225033
[ "Apache-2.0", "MIT" ]
null
null
null
rustler_mix/lib/rustler.ex
ericlathrop/rustler
662e36b11703e590fdb21fd392e2c80b26225033
[ "Apache-2.0", "MIT" ]
null
null
null
defmodule Rustler do @moduledoc """ Provides compile-time configuration for a NIF module. When used, Rustler expects the `:otp_app` as option. The `:otp_app` should point to the OTP application that the dynamic library can be loaded from. For example: defmodule MyNIF do use Rustler, otp_app: :my_nif end This allows the module to be configured like so: config :my_nif, MyNIF, crate: :my_nif, load_data: [1, 2, 3] ## Configuration options * `:cargo` - Specify how to envoke the rust compiler. Options are: - `:system` (default) - use `cargo` from the system (must be in `$PATH`) - `{:system, <channel>}` - use `cargo` from the system with the given channel. Specified as a string, passed directly to `cargo` (e.g. "+nightly"). - `{:rustup, <version>}` - use `rustup` to specify which channel to use. Available options include: `:stable`, `:beta`, `:nightly`, or a string which specifies a specific version (e.g. `"1.39.0"`). - `{:bin, "/path/to/binary"}` - provide a specific path to `cargo`. * `:crate` - the name of the Rust crate, if different from your `otp_app` value. If you have more than one crate in your project, you will need to be explicit about which crate you intend to use. * `:default_features` - a boolean to specify whether the crate's default features should be used. * `:env` - Specify a list of environment variables when envoking the compiler. * `:features` - a list of features to enable when compiling the crate. * `:load_data` - Any valid term. This value is passed into the NIF when it is loaded (default: `0`) * `:load_from` - This option allows control over where the final artifact should be loaded from at runtime. By default the compiled artifact is loaded from the owning `:otp_app`'s `priv/native` directory. This option comes in handy in combination with the `:skip_compilation?` option in order to load pre-compiled artifacts. To override the default behaviour specify a tuple: `{:my_app, "priv/native/<artifact>"}`. Due to the way `:erlang.load_nif/2` works, the artifact should not include the file extension (i.e. `.so`, `.dll`). * `:mode` - Specify which mode to compile the crate with. If you do not specify this option, a default will be provide based on the `Mix.env()`: - When `Mix.env()` is `:dev` or `:test`, the crate will be compiled in `:debug` mode. - When `Mix.env()` is `:prod` or `:bench`, the crate will be compiled in `:release` mode. * `:path` - By default, rustler expects the crate to be found in `native/<crate>` in the root of the project. Use this option to override this. * `:skip_compilation?` - This option skips envoking the rust compiler. Specify this option in combination with `:load_from` to load a pre-compiled artifact. * `:target` - Specify a compile [target] triple. * `:target_dir`: Override the compiler output directory. Any of the above options can be passed directly into the `use` macro like so: defmodule MyNIF do use Rustler, otp_app: :my_nif, crate: :some_other_crate, load_data: :something end [target]: https://forge.rust-lang.org/release/platform-support.html """ defmacro __using__(opts) do quote bind_quoted: [opts: opts] do config = Rustler.Compiler.compile_crate(__MODULE__, opts) for resource <- config.external_resources do @external_resource resource end if config.lib do @load_from config.load_from @load_data config.load_data @before_compile Rustler end end end defmacro __before_compile__(_env) do quote do @on_load :rustler_init @doc false def rustler_init do # Remove any old modules that may be loaded so we don't get # {:error, {:upgrade, 'Upgrade not supported by this NIF library.'}} :code.purge(__MODULE__) {otp_app, path} = @load_from load_path = otp_app |> Application.app_dir(path) |> to_charlist() :erlang.load_nif(load_path, @load_data) end end end @doc false def rustler_version, do: "0.22.0" @doc """ Supported NIF API versions. """ def nif_versions, do: [ '2.7', '2.8', '2.9', '2.10', '2.11', '2.12', '2.13', '2.14', '2.15', '2.16' ] end
32.578571
95
0.634948
083d4932d0cb82922df1f22bf496646f2c603db5
2,304
ex
Elixir
lib/cizen/dispatcher/sender.ex
Hihaheho-Studios/Cizen
09ba3c66aa11d0db913ffde804509bc7bef80db9
[ "MIT" ]
null
null
null
lib/cizen/dispatcher/sender.ex
Hihaheho-Studios/Cizen
09ba3c66aa11d0db913ffde804509bc7bef80db9
[ "MIT" ]
null
null
null
lib/cizen/dispatcher/sender.ex
Hihaheho-Studios/Cizen
09ba3c66aa11d0db913ffde804509bc7bef80db9
[ "MIT" ]
null
null
null
defmodule Cizen.Dispatcher.Sender do @moduledoc false use GenServer alias Cizen.Dispatcher.Node def start_link(opts) do allowed_to_send? = Keyword.get(opts, :allowed_to_send?, false) root_node = Keyword.get(opts, :root_node, Node) next_sender = Keyword.fetch!(opts, :next_sender) name = Keyword.fetch!(opts, :name) GenServer.start_link(__MODULE__, {name, root_node, next_sender, allowed_to_send?}, name: name) end def push(sender, event) do GenServer.cast(sender, {:push, event}) end # Passes the token to the next sender. def allow_to_send(sender) do GenServer.cast(sender, :allow_to_send) end defp reset(state) do state |> Map.put(:event, nil) |> Map.put(:allowed_to_send?, false) |> Map.put(:destinations, nil) end def init({name, root_node, next_sender, allowed_to_send?}) do state = %{ name: name, root_node: root_node, next_sender: next_sender, event_queue: :queue.new() } |> reset() |> Map.put(:allowed_to_send?, allowed_to_send?) {:ok, state} end def handle_cast(:allow_to_send, state) do state = %{state | allowed_to_send?: true} send_if_fulfilled(state) end def handle_cast({:push, event}, %{event: nil} = state) do state = %{state | event: event} push_event(state) end def handle_cast({:push, event}, state) do state = %{state | event_queue: :queue.in(event, state.event_queue)} {:noreply, state} end defp send_if_fulfilled(state) do if not is_nil(state.event) and state.allowed_to_send? do Enum.each(state.destinations, fn pid -> send(pid, state.event) end) allow_to_send(state.next_sender) state = state |> reset() try_dequeue_event(state) else {:noreply, state} end end defp push_event(%{event: event, root_node: root_node} = state) do destinations = Node.push(root_node, event) state = Map.put(state, :destinations, destinations) send_if_fulfilled(state) end defp try_dequeue_event(%{event_queue: queue} = state) do case :queue.out(queue) do {{:value, event}, queue} -> state = %{state | event: event, event_queue: queue} push_event(state) _ -> {:noreply, state} end end end
24.510638
98
0.647135
083d4b3fc8d90f6ea465f9b59adbd2cc4f9bb492
719
exs
Elixir
test/prometheus_telemetry/periodic_measurements/erlang_vm_test.exs
theblitzapp/prometheus_telemetry_elixir
a2c81e84f832c622ac5eb98bca89526a52cca3f0
[ "MIT" ]
3
2022-03-20T00:21:06.000Z
2022-03-31T13:40:20.000Z
test/prometheus_telemetry/periodic_measurements/erlang_vm_test.exs
theblitzapp/prometheus_telemetry_elixir
a2c81e84f832c622ac5eb98bca89526a52cca3f0
[ "MIT" ]
null
null
null
test/prometheus_telemetry/periodic_measurements/erlang_vm_test.exs
theblitzapp/prometheus_telemetry_elixir
a2c81e84f832c622ac5eb98bca89526a52cca3f0
[ "MIT" ]
null
null
null
defmodule PrometheusTelemetry.PeriodicMeasurements.ErlangVMTest do use ExUnit.Case alias PrometheusTelemetry.PeriodicMeasurements.ErlangVM @event_name [:erlang_vm_uptime] doctest ErlangVM describe "vm_wall_clock/0" do setup do: %{self: self()} test "writes value to telemetry", %{self: self, test: test} do PrometheusTelemetry.TestHelpers.start_telemetry_listener(self, test, @event_name) ErlangVM.vm_wall_clock() assert_receive {:telemetry_event, @event_name, %{uptime: _}, _metadata} end end describe "periodic_measurements/0" do test "returns expected list" do assert [{ErlangVM, :vm_wall_clock, []}] = ErlangVM.periodic_measurements() end end end
25.678571
87
0.732962
083d57085b367b428aa7b93fe28d411ce489692f
1,127
exs
Elixir
config/config.exs
jvantuyl/cidrex
54f6f05fe45c17859abb437fa8b37e50ce7d87a6
[ "MIT" ]
null
null
null
config/config.exs
jvantuyl/cidrex
54f6f05fe45c17859abb437fa8b37e50ce7d87a6
[ "MIT" ]
null
null
null
config/config.exs
jvantuyl/cidrex
54f6f05fe45c17859abb437fa8b37e50ce7d87a6
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for # third-party users, it should be done in your "mix.exs" file. # You can configure your application as: # # config :cidrex, key: :value # # and access this configuration in your application as: # # Application.get_env(:cidrex, :key) # # You can also configure a third-party app: # # config :logger, level: :info # # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # # import_config "#{Mix.env()}.exs"
36.354839
73
0.750665
083d5938f038b5c1406cdc6c42ac2bdf0edab694
777
ex
Elixir
apps/ewallet/lib/ewallet/web/email_validator.ex
amadeobrands/ewallet
505b7822721940a7b892a9b35c225e80cc8ac0b4
[ "Apache-2.0" ]
1
2018-12-07T06:21:21.000Z
2018-12-07T06:21:21.000Z
apps/ewallet/lib/ewallet/web/email_validator.ex
amadeobrands/ewallet
505b7822721940a7b892a9b35c225e80cc8ac0b4
[ "Apache-2.0" ]
null
null
null
apps/ewallet/lib/ewallet/web/email_validator.ex
amadeobrands/ewallet
505b7822721940a7b892a9b35c225e80cc8ac0b4
[ "Apache-2.0" ]
null
null
null
defmodule EWallet.EmailValidator do @moduledoc """ This module validates an email string. """ @email_regex ~r/^[^\@]+\@[^\@]+$/ @doc """ Checks whether the email address looks correct. """ @spec valid?(String.t() | nil) :: boolean() def valid?(nil), do: false def valid?(email) do Regex.match?(@email_regex, email) end @doc """ Checks whether the email address looks correct. Returns `{:ok, email}` if valid, returns `{:error, :invalid_email}` if invalid. """ @spec validate(String.t() | nil) :: {:ok, String.t()} | {:error, :invalid_email} def validate(nil), do: {:error, :invalid_email} def validate(email) do if Regex.match?(@email_regex, email) do {:ok, email} else {:error, :invalid_email} end end end
24.28125
82
0.621622
083d7b1f4367dabcc640f7f33f8d421d074ea848
2,743
exs
Elixir
test/soap/request_test.exs
the-AjK/soap
587f8a538084345fad01cb92e7c1ee9e51ca509d
[ "MIT" ]
74
2019-02-15T14:00:24.000Z
2022-03-27T06:02:43.000Z
test/soap/request_test.exs
the-AjK/soap
587f8a538084345fad01cb92e7c1ee9e51ca509d
[ "MIT" ]
23
2019-02-14T15:18:59.000Z
2022-01-13T10:27:44.000Z
test/soap/request_test.exs
the-AjK/soap
587f8a538084345fad01cb92e7c1ee9e51ca509d
[ "MIT" ]
49
2019-04-17T12:29:45.000Z
2022-03-16T11:45:46.000Z
defmodule Soap.RequestTest do use ExUnit.Case import Mock doctest Soap.Request alias Soap.{Request, Wsdl} @request_with_header ~S""" <?xml version="1.0" encoding="UTF-8"?> <env:Envelope xmlns:env="http://schemas.xmlsoap.org/soap/envelope/" xmlns:tns="http://test.com" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <env:Header> <Authentication xmlns="http://test.com"> <token>barbaz</token> </Authentication> </env:Header> <env:Body> <tns:sayHello xmlns="http://test.com"> <body>Hello John</body> </tns:sayHello> </env:Body> </env:Envelope> """ |> String.replace(~r/>\n.*?</, "><") |> String.trim() test "#call returns response body" do {_, wsdl} = Fixtures.get_file_path("wsdl/SendService.wsdl") |> Wsdl.parse_from_file() operation = "SendMessage" params = %{inCommonParms: [{"userID", "WSPB"}]} http_poison_result = {:ok, %HTTPoison.Response{status_code: 200, body: "Anything"}} with_mock HTTPoison, post: fn _, _, _, _ -> http_poison_result end do assert(Request.call(wsdl, operation, params) == http_poison_result) end end test "#call can take request options" do {_, wsdl} = Fixtures.get_file_path("wsdl/SendService.wsdl") |> Wsdl.parse_from_file() operation = "SendMessage" params = %{inCommonParms: [{"userID", "WSPB"}]} http_poison_result = {:ok, %HTTPoison.Response{status_code: 200, body: "Anything"}} hackney = [basic_auth: {"user", "pass"}] with_mock HTTPoison, post: fn _, _, _, [hackney: ^hackney] -> http_poison_result end do assert(Request.call(wsdl, operation, params, [], hackney: hackney) == http_poison_result) end end test "#get_url returns correct soap:address" do endpoint = "http://localhost:8080/soap/SendService" {_, wsdl} = Fixtures.get_file_path("wsdl/SendService.wsdl") |> Wsdl.parse_from_file() result = wsdl[:endpoint] assert result == endpoint end test "#call takes a tuple with soap headers and params" do {_, wsdl} = Fixtures.get_file_path("wsdl/SoapHeader.wsdl") |> Wsdl.parse_from_file() operation = "sayHello" params = {%{token: "barbaz"}, %{body: "Hello John"}} with_mock HTTPoison, post: fn _, body, _, _ -> body end do assert(Request.call(wsdl, operation, params) == @request_with_header) end end end
41.560606
218
0.578564
083d996288c357b7149f34e35bf2c02bbd474058
907
ex
Elixir
lib/hello_phoenix/endpoint.ex
chrhsmt/hello_phoenix
18c719cab56827847ddcf676df5141d1cf5e420b
[ "MIT" ]
null
null
null
lib/hello_phoenix/endpoint.ex
chrhsmt/hello_phoenix
18c719cab56827847ddcf676df5141d1cf5e420b
[ "MIT" ]
1
2018-12-14T08:36:11.000Z
2018-12-14T08:36:11.000Z
lib/hello_phoenix/endpoint.ex
chrhsmt/hello_phoenix
18c719cab56827847ddcf676df5141d1cf5e420b
[ "MIT" ]
null
null
null
defmodule HelloPhoenix.Endpoint do use Phoenix.Endpoint, otp_app: :hello_phoenix # Serve at "/" the static files from "priv/static" directory. # # You should set gzip to true if you are running phoenix.digest # when deploying your static files in production. plug Plug.Static, at: "/", from: :hello_phoenix, gzip: false, only: ~w(css images js favicon.ico robots.txt) # Code reloading can be explicitly enabled under the # :code_reloader configuration of your endpoint. if code_reloading? do plug Phoenix.LiveReloader plug Phoenix.CodeReloader end plug Plug.Logger plug Plug.Parsers, parsers: [:urlencoded, :multipart, :json], pass: ["*/*"], json_decoder: Poison plug Plug.MethodOverride plug Plug.Head plug Plug.Session, store: :cookie, key: "_hello_phoenix_key", signing_salt: "KkSx0mVv" plug :router, HelloPhoenix.Router end
25.194444
65
0.708931
083da4e0a1970e17e4fcd97c02c0ce0fcaab9be6
29,184
ex
Elixir
api/deps/phoenix_live_view/lib/phoenix_live_view/engine.ex
makao95/scrumpoker
de235a049a360612cde23ae5992375cb01a7feba
[ "MIT" ]
2
2020-05-06T14:51:56.000Z
2020-05-06T14:52:21.000Z
api/deps/phoenix_live_view/lib/phoenix_live_view/engine.ex
makao95/scrumpoker
de235a049a360612cde23ae5992375cb01a7feba
[ "MIT" ]
3
2021-06-20T14:51:14.000Z
2021-06-25T00:56:11.000Z
deps/phoenix_live_view/lib/phoenix_live_view/engine.ex
carlosviana/blog
1dcf58c3ca40bc3a7105d75de6f51954eb44bca8
[ "MIT" ]
1
2020-05-16T22:44:14.000Z
2020-05-16T22:44:14.000Z
defmodule Phoenix.LiveView.Component do @moduledoc """ The struct returned by components in .leex templates. This component is never meant to be output directly into the template. It should always be handled by the diffing algorithm. """ defstruct [:id, :component, :assigns] @type t :: %__MODULE__{ id: binary(), component: module(), assigns: map() } defimpl Phoenix.HTML.Safe do def to_iodata(%{id: id, component: component}) do raise ArgumentError, """ cannot convert component #{inspect(component)} with id #{inspect(id)} to HTML. A component must always be returned directly as part of a LiveView template. For example, this is not allowed: <%= content_tag :div do %> <%= live_component SomeComponent %> <% end %> That's because the component is inside `content_tag`. However, this works: <div> <%= live_component SomeComponent %> </div> Components are also allowed inside Elixir's special forms, such as `if`, `for`, `case`, and friends. <%= for item <- items do %> <%= live_component SomeComponent, id: item %> <% end %> However, using other module functions such as `Enum`, will not work: <%= Enum.map(items, fn item -> %> <%= live_component SomeComponent, id: item %> <% end %> """ end end end defmodule Phoenix.LiveView.Comprehension do @moduledoc """ The struct returned by for-comprehensions in .leex templates. See a description about its fields and use cases in `Phoenix.LiveView.Engine` docs. """ defstruct [:static, :dynamics, :fingerprint] @type t :: %__MODULE__{ static: [String.t()], dynamics: [ [ iodata() | Phoenix.LiveView.Rendered.t() | Phoenix.LiveView.Comprehension.t() | Phoenix.LiveView.Component.t() ] ], fingerprint: integer() } defimpl Phoenix.HTML.Safe do def to_iodata(%Phoenix.LiveView.Comprehension{static: static, dynamics: dynamics}) do for dynamic <- dynamics, do: to_iodata(static, dynamic) end defp to_iodata([static_head | static_tail], [%_{} = struct | dynamic_tail]) do dynamic_head = Phoenix.HTML.Safe.to_iodata(struct) [static_head, dynamic_head | to_iodata(static_tail, dynamic_tail)] end defp to_iodata([static_head | static_tail], [dynamic_head | dynamic_tail]) do [static_head, dynamic_head | to_iodata(static_tail, dynamic_tail)] end defp to_iodata([static_head], []) do [static_head] end end end defmodule Phoenix.LiveView.Rendered do @moduledoc """ The struct returned by .leex templates. See a description about its fields and use cases in `Phoenix.LiveView.Engine` docs. """ defstruct [:static, :dynamic, :fingerprint] @type t :: %__MODULE__{ static: [String.t()], dynamic: (boolean() -> [ nil | iodata() | Phoenix.LiveView.Rendered.t() | Phoenix.LiveView.Comprehension.t() | Phoenix.LiveView.Component.t() ]), fingerprint: integer() } defimpl Phoenix.HTML.Safe do def to_iodata(%Phoenix.LiveView.Rendered{static: static, dynamic: dynamic}) do to_iodata(static, dynamic.(false), []) end def to_iodata(%_{} = struct) do Phoenix.HTML.Safe.to_iodata(struct) end def to_iodata(nil) do raise "cannot convert .leex template with change tracking to iodata" end def to_iodata(other) do other end defp to_iodata([static_head | static_tail], [dynamic_head | dynamic_tail], acc) do to_iodata(static_tail, dynamic_tail, [to_iodata(dynamic_head), static_head | acc]) end defp to_iodata([static_head], [], acc) do Enum.reverse([static_head | acc]) end end end defmodule Phoenix.LiveView.Engine do @moduledoc ~S""" The `.leex` (Live EEx) template engine that tracks changes. In the documentation below, we will explain how it works internally. For user-facing documentation, see `Phoenix.LiveView`. ## Phoenix.LiveView.Rendered Whenever you render a `.leex` template, it returns a `Phoenix.LiveView.Rendered` structure. This structure has three fields: `:static`, `:dynamic` and `:fingerprint`. The `:static` field is a list of literal strings. This allows the Elixir compiler to optimize this list and avoid allocating its strings on every render. The `:dynamic` field contains a function that takes a boolean argument (see "Tracking changes" below), and returns a list of dynamic content. Each element in the list is either one of: 1. iodata - which is the dynamic content 2. nil - the dynamic content did not change 3. another `Phoenix.LiveView.Rendered` struct, see "Nesting and fingerprinting" below 4. a `Phoenix.LiveView.Comprehension` struct, see "Comprehensions" below 5. a `Phoenix.LiveView.Component` struct, see "Component" below When you render a `.leex` template, you can convert the rendered structure to iodata by alternating the static and dynamic fields, always starting with a static entry followed by a dynamic entry. The last entry will always be static too. So the following structure: %Phoenix.LiveView.Rendered{ static: ["foo", "bar", "baz"], dynamic: fn track_changes? -> ["left", "right"] end } Results in the following content to be sent over the wire as iodata: ["foo", "left", "bar", "right", "baz"] This is also what calling `Phoenix.HTML.Safe.to_iodata/1` with a `Phoenix.LiveView.Rendered` structure returns. Of course, the benefit of `.leex` templates is exactly that you do not need to send both static and dynamic segments every time. So let's talk about tracking changes. ## Tracking changes By default, a `.leex` template does not track changes. Change tracking can be enabled by including a changed map in the assigns with the key `__changed__` and passing `true` to the dynamic parts. The map should contain the name of any changed field as key and the boolean true as value. If a field is not listed in `:changed`, then it is always considered unchanged. If a field is unchanged and `.leex` believes a dynamic expression no longer needs to be computed, its value in the `dynamic` list will be `nil`. This information can be leveraged to avoid sending data to the client. ## Nesting and fingerprinting `Phoenix.LiveView` also tracks changes across `.leex` templates. Therefore, if your view has this: <%= render "form.html", assigns %> Phoenix will be able to track what is static and dynamic across templates, as well as what changed. A rendered nested `.leex` template will appear in the `dynamic` list as another `Phoenix.LiveView.Rendered` structure, which must be handled recursively. However, because the rendering of live templates can be dynamic in itself, it is important to distinguish which `.leex` template was rendered. For example, imagine this code: <%= if something?, do: render("one.html", assigns), else: render("other.html", assigns) %> To solve this, all `Phoenix.LiveView.Rendered` structs also contain a fingerprint field that uniquely identifies it. If the fingerprints are equal, you have the same template, and therefore it is possible to only transmit its changes. ## Comprehensions Another optimization done by `.leex` templates is to track comprehensions. If your code has this: <%= for point <- @points do %> x: <%= point.x %> y: <%= point.y %> <% end %> Instead of rendering all points with both static and dynamic parts, it returns a `Phoenix.LiveView.Comprehension` struct with the static parts, that are shared across all points, and a list of dynamics to be interpolated inside the static parts. If `@points` is a list with `%{x: 1, y: 2}` and `%{x: 3, y: 4}`, the above expression would return: %Phoenix.LiveView.Comprehension{ static: ["\n x: ", "\n y: ", "\n"], dynamics: [ ["1", "2"], ["3", "4"] ] } This allows `.leex` templates to drastically optimize the data sent by comprehensions, as the static parts are emitted only once, regardless of the number of items. The list of dynamics is always a list of iodatas or components, as we don't perform change tracking inside the comprehensions themselves. Similarly, comprehensions do not have fingerprints because they are only optimized at the root, so conditional evaluation, as the one seen in rendering, is not possible. The only possible outcome for a dynamic field that returns a comprehension is `nil`. ## Components `.leex` also supports stateful components. Since they are stateful, they are always handled lazily by the diff algorithm. """ @behaviour Phoenix.Template.Engine # TODO: Use @impl true instead of @doc false when we require Elixir v1.12 @doc false def compile(path, _name) do trim = Application.get_env(:phoenix, :trim_on_html_eex_engine, true) EEx.compile_file(path, engine: __MODULE__, line: 1, trim: trim) end @behaviour EEx.Engine @assigns_var Macro.var(:assigns, nil) @doc false def init(_opts) do %{ static: [], dynamic: [], vars_count: 0 } end @doc false def handle_begin(state) do %{state | static: [], dynamic: []} end @doc false def handle_end(state) do %{static: static, dynamic: dynamic} = state safe = {:safe, Enum.reverse(static)} {:__block__, [live_rendered: true], Enum.reverse([safe | dynamic])} end @doc false def handle_body(state) do {:ok, rendered} = to_rendered_struct(handle_end(state), {:untainted, %{}}, %{}) quote do require Phoenix.LiveView.Engine unquote(rendered) end end @doc false def handle_text(state, text) do handle_text(state, [], text) end @doc false def handle_text(state, _meta, text) do %{static: static} = state %{state | static: [text | static]} end @doc false def handle_expr(state, "=", ast) do %{static: static, dynamic: dynamic, vars_count: vars_count} = state var = Macro.var(:"arg#{vars_count}", __MODULE__) ast = quote do: unquote(var) = unquote(__MODULE__).to_safe(unquote(ast)) %{state | dynamic: [ast | dynamic], static: [var | static], vars_count: vars_count + 1} end def handle_expr(state, "", ast) do %{dynamic: dynamic} = state %{state | dynamic: [ast | dynamic]} end def handle_expr(state, marker, ast) do EEx.Engine.handle_expr(state, marker, ast) end ## Entry point for rendered structs defp to_rendered_struct(expr, vars, assigns) do with {:__block__, [live_rendered: true], entries} <- expr, {dynamic, [{:safe, static}]} <- Enum.split(entries, -1) do {block, static, dynamic, fingerprint} = analyze_static_and_dynamic(static, dynamic, vars, assigns) {:ok, quote do dynamic = fn track_changes? -> changed = case unquote(@assigns_var) do %{__changed__: changed} when track_changes? -> changed _ -> nil end unquote({:__block__, [], block}) unquote(dynamic) end %Phoenix.LiveView.Rendered{ static: unquote(static), dynamic: dynamic, fingerprint: unquote(fingerprint) } end} else _ -> :error end end defmacrop to_safe_match(var, ast) do quote do {:=, [], [ {_, _, __MODULE__} = unquote(var), {{:., _, [__MODULE__, :to_safe]}, _, [unquote(ast)]} ]} end end defp analyze_static_and_dynamic(static, dynamic, initial_vars, assigns) do {block, _} = Enum.map_reduce(dynamic, {0, initial_vars}, fn to_safe_match(var, ast), {counter, vars} -> vars = reset_vars(initial_vars, vars) {ast, keys, vars} = analyze_and_return_tainted_keys(ast, vars, assigns) live_struct = to_live_struct(ast, vars, assigns) {to_conditional_var(keys, var, live_struct), {counter + 1, vars}} ast, {counter, vars} -> vars = reset_vars(initial_vars, vars) {ast, vars, _} = analyze(ast, vars, assigns) {ast, {counter, vars}} end) {static, dynamic} = bins_and_vars(static) {block, static, dynamic, fingerprint(block, static)} end ## Optimize possible expressions into live structs (rendered / comprehensions) defp to_live_struct({:for, _, [_ | _]} = expr, vars, _assigns) do with {:for, meta, [_ | _] = args} <- expr, {filters, [[do: {:__block__, _, block}]]} <- Enum.split(args, -1), {dynamic, [{:safe, static}]} <- Enum.split(block, -1) do {block, static, dynamic, fingerprint} = analyze_static_and_dynamic(static, dynamic, taint_vars(vars), %{}) for = {:for, meta, filters ++ [[do: {:__block__, [], block ++ [dynamic]}]]} quote do %Phoenix.LiveView.Comprehension{ static: unquote(static), dynamics: unquote(for), fingerprint: unquote(fingerprint) } end else _ -> to_safe(expr, true) end end defp to_live_struct({macro, meta, [_ | _] = args} = expr, vars, assigns) when is_atom(macro) do if classify_taint(macro, args) in [:live, :render] do {args, [opts]} = Enum.split(args, -1) # The reason we can safely ignore assigns here is because # each branch in the live/render constructs are their own # rendered struct and, if the rendered has a new fingerpint, # then change tracking is fully disabled. # # For example, take this code: # # <%= if @foo do %> # <%= @bar %> # <% else %> # <%= @baz %> # <% end %> # # In theory, @bar and @baz should be recomputed whenever # @foo changes, because changing @foo may require a value # that was not available on the page to show. However, # given the branches have different fingerprints, the # diff mechanism takes care of forcing all assigns to # be rendered without us needing to handle it here. {args, vars, _} = analyze_list(args, vars, assigns, []) opts = for {key, value} <- opts do {key, maybe_block_to_rendered(value, vars)} end to_safe({macro, meta, args ++ [opts]}, true) else to_safe(expr, true) end end defp to_live_struct(expr, _vars, _assigns) do to_safe(expr, true) end defp maybe_block_to_rendered([{:->, _, _} | _] = blocks, vars) do for {:->, meta, [args, block]} <- blocks do # Variables defined in the head should not taint the whole body, # only their usage within the body. {args, match_vars, assigns} = analyze_list(args, vars, %{}, []) # So we collect them as usual but keep the original tainting. vars = reset_vars(vars, match_vars) case to_rendered_struct(block, vars, assigns) do {:ok, rendered} -> {:->, meta, [args, rendered]} :error -> {:->, meta, [args, block]} end end end defp maybe_block_to_rendered(block, vars) do case to_rendered_struct(block, vars, %{}) do {:ok, rendered} -> rendered :error -> block end end defp to_conditional_var(:all, var, live_struct) do quote do: unquote(var) = unquote(live_struct) end defp to_conditional_var(keys, var, live_struct) when keys == %{} do quote do unquote(var) = case changed do %{} -> nil _ -> unquote(live_struct) end end end defp to_conditional_var(keys, var, live_struct) do quote do unquote(var) = case unquote(changed_assigns(keys)) do true -> unquote(live_struct) false -> nil end end end defp changed_assigns(assigns) do checks = for {key, _} <- assigns, not nested_and_parent_is_checked?(key, assigns) do case key do [assign] -> quote do unquote(__MODULE__).changed_assign?(changed, unquote(assign)) end nested -> quote do unquote(__MODULE__).nested_changed_assign?( unquote(@assigns_var), changed, unquote(nested) ) end end end Enum.reduce(checks, &{:or, [], [&1, &2]}) end # If we are accessing @foo.bar.baz but in the same place we also pass # @foo.bar or @foo, we don't need to check for @foo.bar.baz. # If there is no nesting, then we are not nesting. defp nested_and_parent_is_checked?([_], _assigns), do: false # Otherwise, we convert @foo.bar.baz into [:baz, :bar, :foo], discard :baz, # and then check if [:foo, :bar] and then [:foo] is in it. defp nested_and_parent_is_checked?(keys, assigns), do: parent_is_checked?(tl(Enum.reverse(keys)), assigns) defp parent_is_checked?([], _assigns), do: false defp parent_is_checked?(rest, assigns), do: Map.has_key?(assigns, Enum.reverse(rest)) or parent_is_checked?(tl(rest), assigns) ## Extracts binaries and variable from iodata defp bins_and_vars(acc), do: bins_and_vars(acc, [], []) defp bins_and_vars([bin1, bin2 | acc], bins, vars) when is_binary(bin1) and is_binary(bin2), do: bins_and_vars([bin1 <> bin2 | acc], bins, vars) defp bins_and_vars([bin, var | acc], bins, vars) when is_binary(bin) and is_tuple(var), do: bins_and_vars(acc, [bin | bins], [var | vars]) defp bins_and_vars([var | acc], bins, vars) when is_tuple(var), do: bins_and_vars(acc, ["" | bins], [var | vars]) defp bins_and_vars([bin], bins, vars) when is_binary(bin), do: {Enum.reverse([bin | bins]), Enum.reverse(vars)} defp bins_and_vars([], bins, vars), do: {Enum.reverse(["" | bins]), Enum.reverse(vars)} ## Assigns tracking # Here we compute if an expression should be always computed, # never computed, or some times computed based on assigns. # # If any assign is used, we store it in the assigns and use it to compute # if it should be changed or not. # # However, operations that change the lexical scope, such as imports and # defining variables, taint the analysis. Because variables can be set at # any moment in Elixir, via macros, without appearing on the left side of # `=` or in a clause, whenever we see a variable, we consider it as tainted, # regardless of its position. # # The tainting that happens from lexical scope is called weak-tainting, # because it is disabled under certain special forms. There is also # strong-tainting, which are always computed. Strong-tainting only happens # if the `assigns` variable is used. defp analyze_and_return_tainted_keys(ast, vars, assigns) do {ast, vars, assigns} = analyze(ast, vars, assigns) {tainted_assigns?, assigns} = Map.pop(assigns, __MODULE__, false) keys = if match?({:tainted, _}, vars) or tainted_assigns?, do: :all, else: assigns {ast, keys, vars} end # Nested assign defp analyze_assign({{:., dot_meta, [left, right]}, meta, []}, vars, assigns, nest) do {left, vars, assigns} = analyze_assign(left, vars, assigns, [right | nest]) {{{:., dot_meta, [left, right]}, meta, []}, vars, assigns} end # Non-expanded assign defp analyze_assign({:@, meta, [{name, _, context}]}, vars, assigns, nest) when is_atom(name) and is_atom(context) do expr = quote line: meta[:line] || 0 do unquote(__MODULE__).fetch_assign!(unquote(@assigns_var), unquote(name)) end {expr, vars, Map.put(assigns, [name | nest], true)} end # Expanded assign access. The non-expanded form is handled on root, # then all further traversals happen on the expanded form defp analyze_assign( {{:., _, [__MODULE__, :fetch_assign!]}, _, [{:assigns, _, nil}, name]} = expr, vars, assigns, nest ) when is_atom(name) do {expr, vars, Map.put(assigns, [name | nest], true)} end defp analyze_assign(expr, vars, assigns, _nest) do analyze(expr, vars, assigns) end # Delegates to analyze assign defp analyze({{:., _, [_, _]}, _, []} = expr, vars, assigns) do analyze_assign(expr, vars, assigns, []) end defp analyze({:@, _, [{name, _, context}]} = expr, vars, assigns) when is_atom(name) and is_atom(context) do analyze_assign(expr, vars, assigns, []) end defp analyze( {{:., _, [__MODULE__, :fetch_assign!]}, _, [{:assigns, _, nil}, name]} = expr, vars, assigns ) when is_atom(name) do analyze_assign(expr, vars, assigns, []) end # Assigns is a strong-taint defp analyze({:assigns, _, nil} = expr, vars, assigns) do {expr, vars, taint_assigns(assigns)} end # Our own vars are ignored. They appear from nested do/end in EEx templates. defp analyze({_, _, __MODULE__} = expr, vars, assigns) do {expr, vars, assigns} end # Also skip special variables defp analyze({name, _, context} = expr, vars, assigns) when name in [:__MODULE__, :__ENV__, :__STACKTRACE__, :__DIR__] and is_atom(context) do {expr, vars, assigns} end # Vars always taint unless we are in restricted mode. defp analyze({name, _, context} = expr, {:restricted, map}, assigns) when is_atom(name) and is_atom(context) do if Map.has_key?(map, {name, context}) do {expr, {:tainted, map}, assigns} else {expr, {:restricted, map}, assigns} end end defp analyze({name, _, context} = expr, {_, map}, assigns) when is_atom(name) and is_atom(context) do {expr, {:tainted, Map.put(map, {name, context}, true)}, assigns} end # Ignore binary modifiers defp analyze({:"::", meta, [left, right]}, vars, assigns) do {left, vars, assigns} = analyze(left, vars, assigns) {{:"::", meta, [left, right]}, vars, assigns} end # Classify calls defp analyze({left, meta, args} = expr, vars, assigns) do case classify_taint(left, args) do :always -> case vars do {:restricted, _} -> {expr, vars, assigns} {_, map} -> {expr, {:tainted, map}, assigns} end :render -> {args, [opts]} = Enum.split(args, -1) {args, vars, assigns} = analyze_list(args, vars, assigns, []) {opts, vars, assigns} = analyze_with_restricted_vars(opts, vars, assigns) {{left, meta, args ++ [opts]}, vars, assigns} :none -> {left, vars, assigns} = analyze(left, vars, assigns) {args, vars, assigns} = analyze_list(args, vars, assigns, []) {{left, meta, args}, vars, assigns} # :never or :live _ -> {args, vars, assigns} = analyze_with_restricted_vars(args, vars, assigns) {{left, meta, args}, vars, assigns} end end defp analyze({left, right}, vars, assigns) do {left, vars, assigns} = analyze(left, vars, assigns) {right, vars, assigns} = analyze(right, vars, assigns) {{left, right}, vars, assigns} end defp analyze([_ | _] = list, vars, assigns) do analyze_list(list, vars, assigns, []) end defp analyze(other, vars, assigns) do {other, vars, assigns} end defp analyze_list([head | tail], vars, assigns, acc) do {head, vars, assigns} = analyze(head, vars, assigns) analyze_list(tail, vars, assigns, [head | acc]) end defp analyze_list([], vars, assigns, acc) do {Enum.reverse(acc), vars, assigns} end # vars is one of: # # * {:tainted, map} # * {:restricted, map} # * {:untainted, map} # # Seeing a variable at any moment taints it unless we are inside a # scope. For example, in case/cond/with/fn/try, the variable is only # tainted if it came from outside of the case/cond/with/fn/try. # So for those constructs we set the mode to restricted and stop # collecting vars. defp analyze_with_restricted_vars(ast, {kind, map}, assigns) do {ast, {new_kind, _}, assigns} = analyze(ast, {unless_tainted(kind, :restricted), map}, assigns) {ast, {unless_tainted(new_kind, kind), map}, assigns} end defp reset_vars({kind, _}, {_, map}), do: {kind, map} defp taint_vars({_, map}), do: {:tainted, map} defp taint_assigns(assigns), do: Map.put(assigns, __MODULE__, true) defp unless_tainted(:tainted, _), do: :tainted defp unless_tainted(_, kind), do: kind ## Callbacks defp fingerprint(block, static) do <<fingerprint::8*16>> = [block | static] |> :erlang.term_to_binary() |> :erlang.md5() fingerprint end @doc false defmacro to_safe(ast) do to_safe(ast, false) end defp to_safe(ast, bool) do to_safe(ast, line_from_expr(ast), bool) end defp line_from_expr({_, meta, _}) when is_list(meta), do: Keyword.get(meta, :line, 0) defp line_from_expr(_), do: 0 defp to_safe(literal, _line, _extra_clauses?) when is_binary(literal) or is_atom(literal) or is_number(literal) do literal |> Phoenix.HTML.Safe.to_iodata() |> IO.iodata_to_binary() end defp to_safe(literal, line, _extra_clauses?) when is_list(literal) do quote line: line, do: Phoenix.HTML.Safe.List.to_iodata(unquote(literal)) end defp to_safe(expr, line, false) do quote line: line, do: unquote(__MODULE__).safe_to_iodata(unquote(expr)) end defp to_safe(expr, line, true) do quote line: line, do: unquote(__MODULE__).live_to_iodata(unquote(expr)) end @doc false def safe_to_iodata(expr) do case expr do {:safe, data} -> data bin when is_binary(bin) -> Plug.HTML.html_escape_to_iodata(bin) other -> Phoenix.HTML.Safe.to_iodata(other) end end @doc false def live_to_iodata(expr) do case expr do {:safe, data} -> data %{__struct__: Phoenix.LiveView.Rendered} = other -> other %{__struct__: Phoenix.LiveView.Component} = other -> other %{__struct__: Phoenix.LiveView.Comprehension} = other -> other bin when is_binary(bin) -> Plug.HTML.html_escape_to_iodata(bin) other -> Phoenix.HTML.Safe.to_iodata(other) end end @doc false def changed_assign?(nil, _name) do true end def changed_assign?(changed, name) do case changed do %{^name => _} -> true %{} -> false end end @doc false def nested_changed_assign?(assigns, changed, [head | _] = all) do changed_assign?(changed, head) and recur_changed_assign?(assigns, changed, all) end defp recur_changed_assign?(assigns, changed, [head]) do case {assigns, changed} do {%{^head => value}, %{^head => value}} -> false {_, _} -> true end end defp recur_changed_assign?(assigns, changed, [head | tail]) do case {assigns, changed} do {%{^head => assigns_value}, %{^head => changed_value}} -> recur_changed_assign?(assigns_value, changed_value, tail) {_, _} -> true end end @doc false def fetch_assign!(assigns, key) do case assigns do %{^key => val} -> val %{} -> raise ArgumentError, """ assign @#{key} not available in eex template. Please make sure all proper assigns have been set. If this is a child template, ensure assigns are given explicitly by the parent template as they are not automatically forwarded. Available assigns: #{inspect(Enum.map(assigns, &elem(&1, 0)))} """ end end # For case/if/unless, we are not leaking the variable given as argument, # such as `if var = ... do`. This does not follow Elixir semantics, but # yields better optimizations. defp classify_taint(:case, [_, _]), do: :live defp classify_taint(:if, [_, _]), do: :live defp classify_taint(:unless, [_, _]), do: :live defp classify_taint(:cond, [_]), do: :live defp classify_taint(:try, [_]), do: :live defp classify_taint(:receive, [_]), do: :live defp classify_taint(:with, _), do: :live defp classify_taint(:live_component, [_, [do: _]]), do: :render defp classify_taint(:live_component, [_, _, [do: _]]), do: :render # TODO: Remove me when live_component/4 is removed defp classify_taint(:live_component, [_, _, _, [do: _]]), do: :render defp classify_taint(:component, [_, [do: _]]), do: :render defp classify_taint(:component, [_, _, [do: _]]), do: :render defp classify_taint(:render_layout, [_, _, _, [do: _]]), do: :render defp classify_taint(:alias, [_]), do: :always defp classify_taint(:import, [_]), do: :always defp classify_taint(:require, [_]), do: :always defp classify_taint(:alias, [_, _]), do: :always defp classify_taint(:import, [_, _]), do: :always defp classify_taint(:require, [_, _]), do: :always defp classify_taint(:&, [_]), do: :never defp classify_taint(:for, _), do: :never defp classify_taint(:fn, _), do: :never defp classify_taint(_, _), do: :none end
31.584416
96
0.636342
083db4e05453aea762a1cad3eb29ea4cae86b51c
1,798
exs
Elixir
server/config/dev.exs
Nymrinae/TimeManager
5048280da7c497909bca7faf7d2256c07438d442
[ "MIT" ]
null
null
null
server/config/dev.exs
Nymrinae/TimeManager
5048280da7c497909bca7faf7d2256c07438d442
[ "MIT" ]
null
null
null
server/config/dev.exs
Nymrinae/TimeManager
5048280da7c497909bca7faf7d2256c07438d442
[ "MIT" ]
null
null
null
use Mix.Config # Configure your database config :server, Server.Repo, # username: System.get_env("PG_USER"), # password: System.get_env("PG_PWD"), # database: System.get_env("PG_DB"), # hostname: System.get_env("PG_HOST"), username: "postgres", password: "postgres", database: "postgres", hostname: "db", show_sensitive_data_on_connection_error: true, pool_size: 10 # For development, we disable any cache and enable # debugging and code reloading. # # The watchers configuration can be used to run external # watchers to your application. For example, we use it # with webpack to recompile .js and .css sources. config :server, ServerWeb.Endpoint, http: [port: 4000], debug_errors: true, code_reloader: true, check_origin: false, watchers: [] # ## SSL Support # # In order to use HTTPS in development, a self-signed # certificate can be generated by running the following # Mix task: # # mix phx.gen.cert # # Note that this task requires Erlang/OTP 20 or later. # Run `mix help phx.gen.cert` for more information. # # The `http:` config above can be replaced with: # # https: [ # port: 4001, # cipher_suite: :strong, # keyfile: "priv/cert/selfsigned_key.pem", # certfile: "priv/cert/selfsigned.pem" # ], # # If desired, both `http:` and `https:` keys can be # configured to run both http and https servers on # different ports. # Do not include metadata nor timestamps in development logs config :logger, :console, format: "[$level] $message\n" # Set a higher stacktrace during development. Avoid configuring such # in production as building large stacktraces may be expensive. config :phoenix, :stacktrace_depth, 20 # Initialize plugs at runtime for faster development compilation config :phoenix, :plug_init_mode, :runtime
29
68
0.719689
083df63de7701b4169b6adab50db123e261c89f3
342
exs
Elixir
api/priv/repo/seeds.exs
panayi/react-phoenix-starter-kit
6615402deeac9b359954cdd8b523f7735b75eec9
[ "MIT" ]
1
2017-06-02T07:17:22.000Z
2017-06-02T07:17:22.000Z
priv/repo/seeds.exs
mcampa/bigcommerce-elixir-app
f1fe40b2a456b49c09970a5317108316f054d5ec
[ "MIT" ]
null
null
null
priv/repo/seeds.exs
mcampa/bigcommerce-elixir-app
f1fe40b2a456b49c09970a5317108316f054d5ec
[ "MIT" ]
null
null
null
# Script for populating the database. You can run it as: # # mix run priv/repo/seeds.exs # # Inside the script, you can read and write to any of your # repositories directly: # # App.Repo.insert!(%App.SomeModel{}) # # We recommend using the bang functions (`insert!`, `update!` # and so on) as they will fail if something goes wrong.
28.5
61
0.69883
083e05763ebba1ee991bb643f6b6ee107c0bcf49
1,547
exs
Elixir
mix.exs
paultannenbaum/mechanize
97fd54c0421689026c01b9bf38206fa74e8f7e1a
[ "MIT" ]
25
2020-06-26T02:21:35.000Z
2022-03-05T18:51:46.000Z
mix.exs
paultannenbaum/mechanize
97fd54c0421689026c01b9bf38206fa74e8f7e1a
[ "MIT" ]
29
2019-07-02T21:50:06.000Z
2020-05-28T18:34:01.000Z
mix.exs
paultannenbaum/mechanize
97fd54c0421689026c01b9bf38206fa74e8f7e1a
[ "MIT" ]
4
2020-06-24T02:11:47.000Z
2022-03-06T00:50:59.000Z
defmodule Mechanize.MixProject do use Mix.Project @version "0.1.0" def project do [ app: :mechanize, version: @version, elixir: "~> 1.7", start_permanent: Mix.env() == :prod, deps: deps(), test_coverage: [tool: ExCoveralls], dialyzer: [plt_add_deps: :transitive], elixirc_paths: elixirc_paths(Mix.env()), description: "Build web scrapers and automate interaction with websites in Elixir with ease!", package: package(), # Docs name: "Mechanize", source_url: "https://github.com/gushonorato/mechanize", docs: [ main: "readme", extras: ["README.md"] ] ] end defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(_), do: ["lib"] # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger, :httpoison] ] end # Run "mix help deps" to learn about dependencies. defp deps do [ {:httpoison, "~> 1.5"}, {:floki, "~> 0.26.0"}, {:credo, "~> 1.0.0", only: [:dev, :test], runtime: false}, {:excoveralls, github: "parroty/excoveralls", only: [:dev, :test]}, {:mix_test_watch, "~> 0.8", only: :dev, runtime: false}, {:bypass, "~> 1.0", only: :test}, {:ex_doc, "~> 0.22", only: :dev, runtime: false} ] end defp package() do [ maintainers: ["Gustavo Honorato"], licenses: ["MIT"], links: %{"GitHub" => "https://github.com/gushonorato/mechanize"} ] end end
25.783333
100
0.574661
083e1e613e974f91b619e0d421aab38c0d623be4
2,284
ex
Elixir
clients/compute/lib/google_api/compute/v1/model/target_vpn_gateways_scoped_list_warning.ex
kyleVsteger/elixir-google-api
3a0dd498af066a4361b5b0fd66ffc04a57539488
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/model/target_vpn_gateways_scoped_list_warning.ex
kyleVsteger/elixir-google-api
3a0dd498af066a4361b5b0fd66ffc04a57539488
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/model/target_vpn_gateways_scoped_list_warning.ex
kyleVsteger/elixir-google-api
3a0dd498af066a4361b5b0fd66ffc04a57539488
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedListWarning do @moduledoc """ [Output Only] Informational warning which replaces the list of addresses when the list is empty. ## Attributes * `code` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response. * `data` (*type:* `list(GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedListWarningData.t)`, *default:* `nil`) - [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": "scope", "value": "zones/us-east1-d" } * `message` (*type:* `String.t`, *default:* `nil`) - [Output Only] A human-readable description of the warning code. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :code => String.t() | nil, :data => list(GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedListWarningData.t()) | nil, :message => String.t() | nil } field(:code) field(:data, as: GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedListWarningData, type: :list) field(:message) end defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedListWarning do def decode(value, options) do GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedListWarning.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedListWarning do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
41.527273
195
0.727671
083e1eb6adbe774765accb191e85c5cd8ab9c3e9
215
ex
Elixir
elixir/plug-cowboy/lib/plug_cowboy_app/models/status.ex
ivanjj22/performance-benchmark-stacks
085dd392fdcf2b75ec3ea62b5272ff4d26d48c8c
[ "MIT" ]
5
2021-05-14T13:59:13.000Z
2022-02-06T19:28:50.000Z
elixir/plug-cowboy/lib/plug_cowboy_app/models/status.ex
ivanjj22/performance-benchmark-stacks
085dd392fdcf2b75ec3ea62b5272ff4d26d48c8c
[ "MIT" ]
null
null
null
elixir/plug-cowboy/lib/plug_cowboy_app/models/status.ex
ivanjj22/performance-benchmark-stacks
085dd392fdcf2b75ec3ea62b5272ff4d26d48c8c
[ "MIT" ]
4
2021-04-28T20:34:37.000Z
2021-10-06T20:29:47.000Z
defmodule Models.Status do @moduledoc false defstruct [ status: "" ] def up() do %__MODULE__{ status: "UP" } end def down() do %__MODULE__{ status: "DOWN" } end end
10.238095
26
0.534884
083e33fbf4654f74ae8baddf7d2e21695b26e0c8
1,954
ex
Elixir
lib/ex_oauth2_provider/plug/error_handler.ex
loopsocial/ex_oauth2_provider
59d177f1c7581e1d794823279067022b1598f5f2
[ "MIT" ]
null
null
null
lib/ex_oauth2_provider/plug/error_handler.ex
loopsocial/ex_oauth2_provider
59d177f1c7581e1d794823279067022b1598f5f2
[ "MIT" ]
null
null
null
lib/ex_oauth2_provider/plug/error_handler.ex
loopsocial/ex_oauth2_provider
59d177f1c7581e1d794823279067022b1598f5f2
[ "MIT" ]
null
null
null
defmodule ExOauth2Provider.Plug.ErrorHandler do @moduledoc """ A default error handler that can be used for failed authentication """ alias Plug.Conn @callback unauthenticated(Conn.t(), map()) :: Conn.t() @callback unauthorized(Conn.t(), map()) :: Conn.t() @callback no_resource(Conn.t(), map()) :: Conn.t() @doc false @spec unauthenticated(Conn.t(), map()) :: Conn.t() def unauthenticated(conn, _params) do respond(conn, response_type(conn), 401, "Unauthenticated") end @doc false @spec unauthorized(Conn.t(), map()) :: Conn.t() def unauthorized(conn, _params) do respond(conn, response_type(conn), 403, "Unauthorized") end @doc false @spec no_resource(Conn.t(), map()) :: Conn.t() def no_resource(conn, _params) do respond(conn, response_type(conn), 403, "Unauthorized") end @doc false @spec already_authenticated(Conn.t(), map()) :: Conn.t() def already_authenticated(conn, _params), do: Conn.halt(conn) defp respond(conn, :json, status, msg) do conn |> Conn.configure_session(drop: true) |> Conn.put_resp_content_type("application/json") |> Conn.send_resp(status, Jason.encode!(%{errors: [msg]})) rescue ArgumentError -> conn |> Conn.put_resp_content_type("application/json") |> Conn.send_resp(status, Jason.encode!(%{errors: [msg]})) end defp respond(conn, :html, status, msg) do conn |> Conn.configure_session(drop: true) |> Conn.put_resp_content_type("text/plain") |> Conn.send_resp(status, msg) rescue ArgumentError -> conn |> Conn.put_resp_content_type("text/plain") |> Conn.send_resp(status, msg) end defp response_type(conn) do accept = accept_header(conn) case Regex.match?(~r/json/, accept) do true -> :json false -> :html end end defp accept_header(conn) do conn |> Conn.get_req_header("accept") |> List.first() |> Kernel.||("") end end
26.405405
68
0.651484
083e34445f293ee3594a1e912cec77df4b42d7f7
15,760
ex
Elixir
lib/ecto/query/api.ex
thenrio/ecto
87ad8d75a23d63b5f8f5a5cecadb2365d6ed6083
[ "Apache-2.0" ]
null
null
null
lib/ecto/query/api.ex
thenrio/ecto
87ad8d75a23d63b5f8f5a5cecadb2365d6ed6083
[ "Apache-2.0" ]
null
null
null
lib/ecto/query/api.ex
thenrio/ecto
87ad8d75a23d63b5f8f5a5cecadb2365d6ed6083
[ "Apache-2.0" ]
null
null
null
defmodule Ecto.Query.API do @moduledoc """ Lists all functions allowed in the query API. * Comparison operators: `==`, `!=`, `<=`, `>=`, `<`, `>` * Arithmetic operators: `+`, `-`, `*`, `/` * Boolean operators: `and`, `or`, `not` * Inclusion operator: `in/2` * Search functions: `like/2` and `ilike/2` * Null check functions: `is_nil/1` * Aggregates: `count/0`, `count/1`, `avg/1`, `sum/1`, `min/1`, `max/1` * Date/time intervals: `datetime_add/3`, `date_add/3`, `from_now/2`, `ago/2` * Inside select: `struct/2`, `map/2`, `merge/2` and literals (map, tuples, lists, etc) * General: `fragment/1`, `field/2` and `type/2` Note the functions in this module exist for documentation purposes and one should never need to invoke them directly. Furthermore, it is possible to define your own macros and use them in Ecto queries (see docs for `fragment/1`). ## Window API Ecto also supports many of the windows functions found in SQL databases. See `Ecto.Query.WindowAPI` for more information. ## About the arithmetic operators The Ecto implementation of these operators provide only a thin layer above the adapters. So if your adapter allows you to use them in a certain way (like adding a date and an interval in PostgreSQL), it should work just fine in Ecto queries. """ @dialyzer :no_return @doc """ Binary `==` operation. """ def left == right, do: doc! [left, right] @doc """ Binary `!=` operation. """ def left != right, do: doc! [left, right] @doc """ Binary `<=` operation. """ def left <= right, do: doc! [left, right] @doc """ Binary `>=` operation. """ def left >= right, do: doc! [left, right] @doc """ Binary `<` operation. """ def left < right, do: doc! [left, right] @doc """ Binary `>` operation. """ def left > right, do: doc! [left, right] @doc """ Binary `+` operation. """ def left + right, do: doc! [left, right] @doc """ Binary `-` operation. """ def left - right, do: doc! [left, right] @doc """ Binary `*` operation. """ def left * right, do: doc! [left, right] @doc """ Binary `/` operation. """ def left / right, do: doc! [left, right] @doc """ Binary `and` operation. """ def left and right, do: doc! [left, right] @doc """ Binary `or` operation. """ def left or right, do: doc! [left, right] @doc """ Unary `not` operation. """ def not(value), do: doc! [value] @doc """ Checks if the left-value is included in the right one. from p in Post, where: p.id in [1, 2, 3] The right side may either be a list, a literal list or even a column in the database with array type: from p in Post, where: "elixir" in p.tags """ def left in right, do: doc! [left, right] @doc """ Searches for `search` in `string`. from p in Post, where: like(p.body, "Chapter%") Translates to the underlying SQL LIKE query, therefore its behaviour is dependent on the database. In particular, PostgreSQL will do a case-sensitive operation, while the majority of other databases will be case-insensitive. For performing a case-insensitive `like` in PostgreSQL, see `ilike/2`. You should be very careful when allowing user sent data to be used as part of LIKE query, since they allow to perform [LIKE-injections](https://githubengineering.com/like-injection/). """ def like(string, search), do: doc! [string, search] @doc """ Searches for `search` in `string` in a case insensitive fashion. from p in Post, where: ilike(p.body, "Chapter%") Translates to the underlying SQL ILIKE query. This operation is only available on PostgreSQL. """ def ilike(string, search), do: doc! [string, search] @doc """ Checks if the given value is nil. from p in Post, where: is_nil(p.published_at) To check if a given value is not nil use: from p in Post, where: not is_nil(p.published_at) """ def is_nil(value), do: doc! [value] @doc """ Counts the entries in the table. from p in Post, select: count() """ def count, do: doc! [] @doc """ Counts the given entry. from p in Post, select: count(p.id) """ def count(value), do: doc! [value] @doc """ Counts the distinct values in given entry. from p in Post, select: count(p.id, :distinct) """ def count(value, :distinct), do: doc! [value, :distinct] @doc """ Takes whichever value is not null, or null if they both are. In SQL, COALESCE takes any number of arguments, but in ecto it only takes two, so it must be chained to achieve the same effect. from p in Payment, select: p.value |> coalesce(p.backup_value) |> coalesce(0) """ def coalesce(value, expr), do: doc! [value, expr] @doc """ Applies the given expression as a FILTER clause against an aggregate. This is currently only supported by Postgres. from p in Payment, select: filter(avg(p.value), p.value > 0 and p.value < 100) from p in Payment, select: avg(p.value) |> filter(p.value < 0) """ def filter(value, filter), do: doc! [value, filter] @doc """ Calculates the average for the given entry. from p in Payment, select: avg(p.value) """ def avg(value), do: doc! [value] @doc """ Calculates the sum for the given entry. from p in Payment, select: sum(p.value) """ def sum(value), do: doc! [value] @doc """ Calculates the minimum for the given entry. from p in Payment, select: min(p.value) """ def min(value), do: doc! [value] @doc """ Calculates the maximum for the given entry. from p in Payment, select: max(p.value) """ def max(value), do: doc! [value] @doc """ Adds a given interval to a datetime. The first argument is a `datetime`, the second one is the count for the interval, which may be either positive or negative and the interval value: # Get all items published since the last month from p in Post, where: p.published_at > datetime_add(^NaiveDateTime.utc_now(), -1, "month") In the example above, we used `datetime_add/3` to subtract one month from the current datetime and compared it with the `p.published_at`. If you want to perform operations on date, `date_add/3` could be used. The following intervals are supported: year, month, week, day, hour, minute, second, millisecond and microsecond. """ def datetime_add(datetime, count, interval), do: doc! [datetime, count, interval] @doc """ Adds a given interval to a date. See `datetime_add/3` for more information. """ def date_add(date, count, interval), do: doc! [date, count, interval] @doc """ Adds the given interval to the current time in UTC. The current time in UTC is retrieved from Elixir and not from the database. ## Examples from a in Account, where: a.expires_at < from_now(3, "month") """ def from_now(count, interval), do: doc! [count, interval] @doc """ Subtracts the given interval from the current time in UTC. The current time in UTC is retrieved from Elixir and not from the database. ## Examples from p in Post, where: p.published_at > ago(3, "month") """ def ago(count, interval), do: doc! [count, interval] @doc """ Send fragments directly to the database. It is not possible to represent all possible database queries using Ecto's query syntax. When such is required, it is possible to use fragments to send any expression to the database: def unpublished_by_title(title) do from p in Post, where: is_nil(p.published_at) and fragment("lower(?)", p.title) == ^title end Every occurence of the `?` character will be interpreted as a place for additional argument. If the literal character `?` is required, it can be escaped with `\\\\?` (one escape for strings, another for fragment). In the example above, we are using the lower procedure in the database to downcase the title column. It is very important to keep in mind that Ecto is unable to do any type casting described above when fragments are used. You can however use the `type/2` function to give Ecto some hints: fragment("lower(?)", p.title) == type(^title, :string) Or even say the right side is of the same type as `p.title`: fragment("lower(?)", p.title) == type(^title, p.title) It is possible to make use of PostgreSQL's JSON/JSONB data type with fragments, as well: fragment("?->>? ILIKE ?", p.map, "key_name", ^some_value) ## Keyword fragments In order to support databases that do not have string-based queries, like MongoDB, fragments also allow keywords to be given: from p in Post, where: fragment(title: ["$eq": ^some_value]) ## Defining custom functions using macros and fragment You can add a custom Ecto query function using macros. For example to expose SQL's coalesce function you can define this macro: defmodule CustomFunctions do defmacro coalesce(left, right) do quote do fragment("coalesce(?, ?)", unquote(left), unquote(right)) end end end To have coalesce/2 available, just import the module that defines it. import CustomFunctions The only downside is that it will show up as a fragment when inspecting the Elixir query. Other than that, it should be equivalent to a built-in Ecto query function. """ def fragment(fragments), do: doc! [fragments] @doc """ Allows a field to be dynamically accessed. def at_least_four(doors_or_tires) do from c in Car, where: field(c, ^doors_or_tires) >= 4 end In the example above, both `at_least_four(:doors)` and `at_least_four(:tires)` would be valid calls as the field is dynamically generated. """ def field(source, field), do: doc! [source, field] @doc """ Used in `select` to specify which struct fields should be returned. For example, if you don't need all fields to be returned as part of a struct, you can filter it to include only certain fields by using `struct/2`: from p in Post, select: struct(p, [:title, :body]) `struct/2` can also be used to dynamically select fields: fields = [:title, :body] from p in Post, select: struct(p, ^fields) As a convenience, `select` allows developers to take fields without an explicit call to `struct/2`: from p in Post, select: [:title, :body] Or even dynamically: fields = [:title, :body] from p in Post, select: ^fields For preloads, the selected fields may be specified from the parent: from(city in City, preload: :country, select: struct(city, [:country_id, :name, country: [:id, :population]])) If the same source is selected multiple times with a `struct`, the fields are merged in order to avoid fetching multiple copies from the database. In other words, the expression below: from(city in City, preload: :country, select: {struct(city, [:country_id]), struct(city, [:name])} is expanded to: from(city in City, preload: :country, select: {struct(city, [:country_id, :name]), struct(city, [:country_id, :name])} **IMPORTANT**: When filtering fields for associations, you MUST include the foreign keys used in the relationship, otherwise Ecto will be unable to find associated records. """ def struct(source, fields), do: doc! [source, fields] @doc """ Used in `select` to specify which fields should be returned as a map. For example, if you don't need all fields to be returned or neither need a struct, you can use `map/2` to achieve both: from p in Post, select: map(p, [:title, :body]) `map/2` can also be used to dynamically select fields: fields = [:title, :body] from p in Post, select: map(p, ^fields) If the same source is selected multiple times with a `map`, the fields are merged in order to avoid fetching multiple copies from the database. In other words, the expression below: from(city in City, preload: :country, select: {map(city, [:country_id]), map(city, [:name])} is expanded to: from(city in City, preload: :country, select: {map(city, [:country_id, :name]), map(city, [:country_id, :name])} For preloads, the selected fields may be specified from the parent: from(city in City, preload: :country, select: map(city, [:country_id, :name, country: [:id, :population]])) **IMPORTANT**: When filtering fields for associations, you MUST include the foreign keys used in the relationship, otherwise Ecto will be unable to find associated records. """ def map(source, fields), do: doc! [source, fields] @doc """ Merges the map on the right over the map on the left. If the map on the left side is a struct, Ecto will check all of the field on the right previously exist on the left before merging. from(city in City, select: merge(city, %{virtual_field: "some_value"})) This function is primarily used by `Ecto.Query.select_merge/3` to merge different select clauses. """ def merge(left_map, right_map), do: doc! [left_map, right_map] @doc """ Returns value from the `json_field` pointed to by `path`. from(post in Post, select: json_extract_path(post.meta, ["author", "name"])) The query can be also rewritten as: from(post in Post, select: post.meta["author"]["name"]) Path elements can be integers to access values in JSON arrays: from(post in Post, select: post.meta["tags"][0]["name"]) Any element of the path can be dynamic: field = "name" from(post in Post, select: post.meta["author"][^field]) **Warning**: the underlying data in the JSON column is returned without any additional decoding, e.g. datetimes (which are encoded as strings) are returned as strings. This also means that queries like: `where: post.meta["published_at"] > from_now(-1, "day")` may return incorrect results or fail as the underlying database may try to compare e.g. `json` with `date` types. Use `type/2` to force the types on the database level. """ def json_extract_path(json_field, path), do: doc! [json_field, path] @doc """ Casts the given value to the given type at the database level. Most of the times, Ecto is able to proper cast interpolated values due to its type checking mechanism. In some situations though, you may want to tell Ecto that a parameter has some particular type: type(^title, :string) It is also possible to say the type must match the same of a column: type(^title, p.title) Ecto will ensure `^title` is cast to the given type and enforce such type at the database level. If the value is returned in a `select`, Ecto will also enforce the proper type throughout. When performing arithmetic operations, `type/2` can be used to cast all the parameters in the operation to the same type: from p in Post, select: type(p.visits + ^a_float + ^a_integer, :decimal) Inside `select`, `type/2` can also be used to cast fragments: type(fragment("NOW"), :naive_datetime) Or to type fields from schemaless queries: from p in "posts", select: type(p.cost, :decimal) Or to type aggregation results: from p in Post, select: type(avg(p.cost), :integer) from p in Post, select: type(filter(avg(p.cost), p.cost > 0), :integer) """ def type(interpolated_value, type), do: doc! [interpolated_value, type] defp doc!(_) do raise "the functions in Ecto.Query.API should not be invoked directly, " <> "they serve for documentation purposes only" end end
29.961977
91
0.663388
083e43ba7640126f57768abc73d92a117e93e631
283
ex
Elixir
lib/fb_cryptocurrency_chatbot.ex
mohammedmazharuddin/fb_cryptocurrency_chatbot
0f4f445edf810638f30ca77dc288fca4d5428719
[ "MIT" ]
null
null
null
lib/fb_cryptocurrency_chatbot.ex
mohammedmazharuddin/fb_cryptocurrency_chatbot
0f4f445edf810638f30ca77dc288fca4d5428719
[ "MIT" ]
null
null
null
lib/fb_cryptocurrency_chatbot.ex
mohammedmazharuddin/fb_cryptocurrency_chatbot
0f4f445edf810638f30ca77dc288fca4d5428719
[ "MIT" ]
null
null
null
defmodule FbCryptocurrencyChatbot do @moduledoc """ FbCryptocurrencyChatbot keeps the contexts that define your domain and business logic. Contexts are also responsible for managing your data, regardless if it comes from the database, an external API or others. """ end
28.3
68
0.780919
083e4945de4333afb752974e738f266e763ec62e
253
ex
Elixir
programming_elixir/chapter14/spawn1.ex
hectoregm/elixir
c4dc9cd327c6d935de93337e5c52d58b82c4d339
[ "MIT" ]
null
null
null
programming_elixir/chapter14/spawn1.ex
hectoregm/elixir
c4dc9cd327c6d935de93337e5c52d58b82c4d339
[ "MIT" ]
null
null
null
programming_elixir/chapter14/spawn1.ex
hectoregm/elixir
c4dc9cd327c6d935de93337e5c52d58b82c4d339
[ "MIT" ]
null
null
null
defmodule Spawn1 do def greet do receive do {sender, msg} -> send sender, { :ok, "Hello, #{msg}" } end end end pid = spawn(Spawn1, :greet, []) send pid, {self, "World!"} receive do {:ok, message} -> IO.puts message end
14.882353
45
0.565217
083e49b5dfc68e8d15c615710a6115c23e1540c0
2,775
exs
Elixir
mix.exs
paradox460/earmark
24c516daf264a4dced1424777c60d9f1620f11bb
[ "Apache-1.1" ]
null
null
null
mix.exs
paradox460/earmark
24c516daf264a4dced1424777c60d9f1620f11bb
[ "Apache-1.1" ]
null
null
null
mix.exs
paradox460/earmark
24c516daf264a4dced1424777c60d9f1620f11bb
[ "Apache-1.1" ]
null
null
null
defmodule Earmark.Mixfile do use Mix.Project @version "1.4.14" @url "https://github.com/pragdave/earmark" @deps [ {:earmark_parser, ">= 1.4.12"}, {:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false}, {:benchfella, "~> 0.3.0", only: [:dev]}, {:earmark_ast_dsl, "~> 0.2.5", only: [:test]}, {:excoveralls, "~> 0.11.2", only: [:test]}, {:floki, "~> 0.21", only: [:dev, :test]}, {:traverse, "~> 1.0.1", only: [:dev, :test]} ] @description """ Earmark is a pure-Elixir Markdown converter. It is intended to be used as a library (just call Earmark.as_html), but can also be used as a command-line tool (run mix escript.build first). Output generation is pluggable. """ ############################################################ def project do [ app: :earmark, version: @version, elixir: "~> 1.10", elixirc_paths: elixirc_paths(Mix.env()), escript: escript_config(), deps: @deps, description: @description, package: package(), preferred_cli_env: [ coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test ], test_coverage: [tool: ExCoveralls], aliases: [docs: &build_docs/1, readme: &readme/1] ] end defp package do [ files: [ "lib", "mix.exs", "README.md" ], maintainers: [ "Robert Dober <robert.dober@gmail.com>", "Dave Thomas <dave@pragdave.me>" ], licenses: [ "Apache-2.0" ], links: %{ "GitHub" => "https://github.com/pragdave/earmark" } ] end defp escript_config do [main_module: Earmark.CLI] end defp elixirc_paths(:test), do: ["lib", "test/support", "dev"] defp elixirc_paths(:dev), do: ["lib", "bench", "dev"] defp elixirc_paths(_), do: ["lib"] @prerequisites """ run `mix escript.install hex ex_doc` and adjust `PATH` accordingly """ defp build_docs(_) do Mix.Task.run("compile") ex_doc = Path.join(Mix.path_for(:escripts), "ex_doc") Mix.shell().info("Using escript: #{ex_doc} to build the docs") unless File.exists?(ex_doc) do raise "cannot build docs because escript for ex_doc is not installed, make sure to \n#{@prerequisites}" end args = ["Earmark", @version, Mix.Project.compile_path()] opts = ~w[--main Earmark --source-ref v#{@version} --source-url #{@url}] Mix.shell().info("Running: #{ex_doc} #{inspect(args ++ opts)}") System.cmd(ex_doc, args ++ opts) Mix.shell().info("Docs built successfully") end defp readme(args) do Code.require_file("tasks/readme.exs") Mix.Tasks.Readme.run(args) end end # SPDX-License-Identifier: Apache-2.0
25.934579
109
0.575495
083e4b90c28a15dbcaeba80df6912e35abe636b3
1,556
ex
Elixir
lib/epi_locator_web/views/error_helpers.ex
RatioPBC/epi-locator
58c90500c4e0071ce365d76ec9812f9051d6a9f9
[ "Apache-2.0" ]
null
null
null
lib/epi_locator_web/views/error_helpers.ex
RatioPBC/epi-locator
58c90500c4e0071ce365d76ec9812f9051d6a9f9
[ "Apache-2.0" ]
6
2021-10-19T01:55:57.000Z
2022-02-15T01:04:19.000Z
lib/epi_locator_web/views/error_helpers.ex
RatioPBC/epi-locator
58c90500c4e0071ce365d76ec9812f9051d6a9f9
[ "Apache-2.0" ]
2
2022-01-21T08:38:50.000Z
2022-01-21T08:42:04.000Z
defmodule EpiLocatorWeb.ErrorHelpers do @moduledoc """ Conveniences for translating and building error messages. """ use Phoenix.HTML @doc """ Generates tag for inlined form input errors. """ def error_tag(form, field) do Enum.map(Keyword.get_values(form.errors, field), fn error -> content_tag(:span, translate_error(error), class: "invalid-feedback", phx_feedback_for: input_id(form, field) ) end) end @doc """ Translates an error message using gettext. """ def translate_error({msg, opts}) do # When using gettext, we typically pass the strings we want # to translate as a static argument: # # # Translate "is invalid" in the "errors" domain # dgettext("errors", "is invalid") # # # Translate the number of files with plural rules # dngettext("errors", "1 file", "%{count} files", count) # # Because the error messages we show in our forms and APIs # are defined inside Ecto, we need to translate them dynamically. # This requires us to call the Gettext module passing our gettext # backend as first argument. # # Note we use the "errors" domain, which means translations # should be written to the errors.po file. The :count option is # set by Ecto and indicates we should also apply plural rules. if count = opts[:count] do Gettext.dngettext(EpiLocatorWeb.Gettext, "errors", msg, msg, count, opts) else Gettext.dgettext(EpiLocatorWeb.Gettext, "errors", msg, opts) end end end
32.416667
79
0.667095
083e4d20cefd41773b0b083b7b930f2615acbbae
2,719
ex
Elixir
clients/games_configuration/lib/google_api/games_configuration/v1configuration/model/leaderboard_configuration_detail.ex
mocknen/elixir-google-api
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
[ "Apache-2.0" ]
null
null
null
clients/games_configuration/lib/google_api/games_configuration/v1configuration/model/leaderboard_configuration_detail.ex
mocknen/elixir-google-api
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
[ "Apache-2.0" ]
null
null
null
clients/games_configuration/lib/google_api/games_configuration/v1configuration/model/leaderboard_configuration_detail.ex
mocknen/elixir-google-api
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail do @moduledoc """ This is a JSON template for a leaderboard configuration detail. ## Attributes - iconUrl (String.t): The icon url of this leaderboard. Writes to this field are ignored. Defaults to: `null`. - kind (String.t): Uniquely identifies the type of this resource. Value is always the fixed string gamesConfiguration#leaderboardConfigurationDetail. Defaults to: `null`. - name (LocalizedStringBundle): Localized strings for the leaderboard name. Defaults to: `null`. - scoreFormat (GamesNumberFormatConfiguration): The score formatting for the leaderboard. Defaults to: `null`. - sortRank (integer()): The sort rank of this leaderboard. Writes to this field are ignored. Defaults to: `null`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :iconUrl => any(), :kind => any(), :name => GoogleApi.GamesConfiguration.V1configuration.Model.LocalizedStringBundle.t(), :scoreFormat => GoogleApi.GamesConfiguration.V1configuration.Model.GamesNumberFormatConfiguration.t(), :sortRank => any() } field(:iconUrl) field(:kind) field(:name, as: GoogleApi.GamesConfiguration.V1configuration.Model.LocalizedStringBundle) field( :scoreFormat, as: GoogleApi.GamesConfiguration.V1configuration.Model.GamesNumberFormatConfiguration ) field(:sortRank) end defimpl Poison.Decoder, for: GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail do def decode(value, options) do GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail.decode( value, options ) end end defimpl Poison.Encoder, for: GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
38.295775
172
0.75285
083e731f2a983d231b50d1201b40426faa7b0e63
745
exs
Elixir
config/test.exs
fossabot/crimson_commerce
c23c9ec8d0deedccd0e98e2e6adf634f96ec27ab
[ "MIT" ]
64
2019-10-18T02:53:22.000Z
2021-11-24T13:10:29.000Z
config/test.exs
soediro/crimson_commerce
a6f260dd68afb4cd497a14dd33b5075830d5f84a
[ "MIT" ]
334
2019-10-15T22:14:20.000Z
2022-03-31T06:07:26.000Z
config/test.exs
soediro/crimson_commerce
a6f260dd68afb4cd497a14dd33b5075830d5f84a
[ "MIT" ]
12
2019-10-18T02:53:29.000Z
2021-11-16T01:08:46.000Z
use Mix.Config # Configure your database # # The MIX_TEST_PARTITION environment variable can be used # to provide built-in test partitioning in CI environment. # Run `mix help test` for more information. config :crimson_commerce, CrimsonCommerce.Repo, username: "postgres", password: "postgres", database: "crimson_commerce_test#{System.get_env("MIX_TEST_PARTITION")}", port: System.get_env("POSTGRES_PORT"), hostname: "localhost", pool: Ecto.Adapters.SQL.Sandbox # We don't run a server during test. If one is required, # you can enable the server option below. config :crimson_commerce, CrimsonCommerceWeb.Endpoint, http: [port: 4002], server: false # Print only warnings and errors during test config :logger, level: :warn
31.041667
75
0.762416
083e981be048a9756c41e6f9e91fac4114682767
12,030
ex
Elixir
lib/absinthe_error_payload/payload.ex
chubarovNick/absinthe_error_payload
7a906bb11370ac65d69293d6eca4546ddc7da9c5
[ "MIT", "BSD-3-Clause" ]
null
null
null
lib/absinthe_error_payload/payload.ex
chubarovNick/absinthe_error_payload
7a906bb11370ac65d69293d6eca4546ddc7da9c5
[ "MIT", "BSD-3-Clause" ]
null
null
null
lib/absinthe_error_payload/payload.ex
chubarovNick/absinthe_error_payload
7a906bb11370ac65d69293d6eca4546ddc7da9c5
[ "MIT", "BSD-3-Clause" ]
null
null
null
defmodule AbsintheErrorPayload.Payload do @moduledoc """ Absinthe Middleware to build a mutation payload response. AbsintheErrorPayload mutation responses (aka "payloads") have three fields - `successful` - Indicates if the mutation completed successfully or not. Boolean. - `messages` - a list of validation errors. Always empty on success - `result` - the data object that was created/updated/deleted on success. Always nil when unsuccesful ## Usage In your schema file 1. `import AbsintheErrorPayload.Payload` 2. `import_types AbsintheErrorPayload.ValidationMessageTypes` 3. create a payload object for each object using `payload_object(payload_name, object_name)` 4. create a mutation that returns the payload object. Add the payload middleware after the resolver. ``` field :create_user, type: :user_payload, description: "add a user" do arg :user, :create_user_params resolve &UserResolver.create/2 middleware &build_payload/2 end ``` ## Example Schema Object Schema: ```elixir defmodule MyApp.Schema.User do @moduledoc false use Absinthe.Schema.Notation import AbsintheErrorPayload.Payload import_types AbsintheErrorPayload.ValidationMessageTypes alias MyApp.Resolvers.User, as: UserResolver object :user, description: "Someone on our planet" do field :id, non_null(:id), description: "unique identifier" field :first_name, non_null(:string), description: "User's first name" field :last_name, :string, description: "Optional Last Name" field :age, :integer, description: "Age in Earth years" field :inserted_at, :time, description: "Created at" field :updated_at, :time, description: "Last updated at" end input_object :create_user_params, description: "create a user" do field :first_name, non_null(:string), description: "Required first name" field :last_name, :string, description: "Optional last name" field :age, :integer, description: "Age in Earth years" end payload_object(:user_payload, :user) object :user_mutations do field :create_user, type: :user_payload, description: "Create a new user" do arg :user, :create_user_params resolve &UserResolver.create/2 middleware &build_payload/2 end end ``` In your main schema file ``` import_types MyApp.Schema.User mutation do ... import_fields :user_mutations end ``` ## Alternate Use If you'd prefer not to use the middleware style, you can generate AbsintheErrorPayload payloads in your resolver instead. See `success_payload/1` and `error_payload/1` for examples. """ @enforce_keys [:successful] defstruct successful: nil, messages: [], result: nil use Absinthe.Schema.Notation import AbsintheErrorPayload.ChangesetParser alias __MODULE__ alias AbsintheErrorPayload.ValidationMessage @doc """ Create a payload object definition Each object that can be mutated will need its own graphql response object in order to return typed responses. This is a helper method to generate a custom payload object ## Usage payload_object(:user_payload, :user) is the equivalent of ```elixir object :user_payload do field :successful, non_null(:boolean), description: "Indicates if the mutation completed successfully or not. " field :messages, list_of(:validation_message), description: "A list of failed validations. May be blank or null if mutation succeeded." field :result, :user, description: "The object created/updated/deleted by the mutation" end ``` This method must be called after `import_types AbsintheErrorPayload.MutationTypes` or it will fail due to `:validation_message` not being defined. """ defmacro payload_object(payload_name, result_object_name) do quote location: :keep do object unquote(payload_name) do field(:successful, non_null(:boolean), description: "Indicates if the mutation completed successfully or not. ") field(:messages, list_of(:validation_message), description: "A list of failed validations. May be blank or null if mutation succeeded.") field(:result, unquote(result_object_name), description: "The object created/updated/deleted by the mutation. May be null if mutation failed.") end end end @doc """ Convert a resolution value to a mutation payload To be used as middleware by Absinthe.Graphql. It should be placed immediatly after the resolver. The middleware will automatically transform an invalid changeset into validation errors. Your resolver could then look like: ```elixir @doc " Creates a new user Results are wrapped in a result monad as expected by absinthe. " def create(%{user: attrs}, _resolution) do case UserContext.create_user(attrs) do {:ok, user} -> {:ok, user} {:error, %Ecto.Changeset{} = changeset} -> {:ok, changeset} end end ``` The build payload middleware will also accept error tuples with single or lists of `AbsintheErrorPayload.ValidationMessage` or string errors. However, lists and strings will need to be wrapped in an :ok tuple or they will be seen as errors by graphql. An example resolver could look like: ``` @doc " updates an existing user. Results are wrapped in a result monad as expected by absinthe. " def update(%{id: id, user: attrs}, _resolution) do case UserContext.get_user(id) do nil -> {:ok, %ValidationMessage{field: :id, code: "not found", message: "does not exist"}} user -> do_update_user(user, attrs) end end defp do_update_user(user, attrs) do case UserContext.update_user(user, attrs) do {:ok, user} -> {:ok, user} {:error, %Ecto.Changeset{} = changeset} -> {:ok, changeset} end end ``` Valid formats are: ``` %ValidationMessage{} {:error, %ValidationMessage{}} {:error, [%ValidationMessage{},%ValidationMessage{}]} {:error, "This is an error"} {:error, ["This is an error", "This is another error"]} ``` ## Alternate Use If you'd prefer not to use the middleware style, you can generate AbsintheErrorPayload payloads in your resolver instead. See `convert_to_payload/1`, `success_payload/1` and `error_payload/1` for examples. """ def build_payload(%{value: value, errors: []} = resolution, _config) do result = convert_to_payload(value) Absinthe.Resolution.put_result(resolution, {:ok, result}) end @doc """ Convert resolution errors to a mutation payload The build payload middleware will accept lists of `AbsintheErrorPayload.ValidationMessage` or string errors. Valid formats are: ``` [%ValidationMessage{},%ValidationMessage{}] "This is an error" ["This is an error", "This is another error"] ``` """ def build_payload(%{errors: errors} = resolution, _config) do result = convert_to_payload({:error, errors}) Absinthe.Resolution.put_result(resolution, {:ok, result}) end @doc """ Direct converter from value to a `Payload` struct. This function will automatically transform an invalid changeset into validation errors. Changesets, error tuples and lists of `AbsintheErrorPayload.ValidationMessage` will be identified as errors and will generate an error payload. Error formats are: ``` %Ecto.Changeset{valid?: false} %ValidationMessage{} {:error, %ValidationMessage{}} {:error, [%ValidationMessage{},%ValidationMessage{}]} {:error, "This is an error"} {:error, ["This is an error", "This is another error"]} ``` All other values will be converted to a success payload. or string errors. However, lists and strings will need to be wrapped in an :ok tuple or they will be seen as errors by graphql. An example use could look like: ``` @doc " Load a user matching an id Results are wrapped in a result monad as expected by absinthe. " def get_user(%{id: id}, _resolution) do case UserContext.get_user(id) do nil -> %ValidationMessage{field: :id, code: "not found", message: "does not exist"}} user -> user end |> AbsintheErrorPayload.Payload.convert_to_payload() end """ def convert_to_payload({:error, %ValidationMessage{} = message}) do error_payload(message) end def convert_to_payload(%ValidationMessage{} = message) do error_payload(message) end def convert_to_payload({:error, message}) when is_binary(message) do message |> generic_validation_message() |> error_payload() end def convert_to_payload({:error, list}) when is_list(list), do: error_payload(list) def convert_to_payload(%Ecto.Changeset{valid?: false} = changeset) do changeset |> extract_messages() |> error_payload() end def convert_to_payload(value), do: success_payload(value) @doc """ Generates a mutation error payload. ## Examples iex> error_payload(%ValidationMessage{code: "required", field: "name"}) %Payload{successful: false, messages: [%ValidationMessage{code: "required", field: "name"}]} iex> error_payload([%ValidationMessage{code: "required", field: "name"}]) %Payload{successful: false, messages: [%ValidationMessage{code: "required", field: "name"}]} ## Usage If you prefer not to use the Payload.middleware, you can use this method in your resolvers instead. ```elixir @doc " updates an existing user. Results are wrapped in a result monad as expected by absinthe. " def update(%{id: id, user: attrs}, _resolution) do case UserContext.get_user(id) do nil -> {:ok, error_payload([%ValidationMessage{field: :id, code: "not found", message: "does not exist"}])} user -> do_update_user(user, attrs) end end defp do_update_user(user, attrs) do case UserContext.update_user(user, attrs) do {:ok, user} -> {:ok, success_payload(user)} {:error, %Ecto.Changeset{} = changeset} -> {:ok, error_payload(changeset)} end end ``` """ def error_payload(%ValidationMessage{} = message), do: error_payload([message]) def error_payload(messages) when is_list(messages) do messages = Enum.map(messages, &prepare_message/1) %Payload{successful: false, messages: messages} end @doc "convert validation message field to camelCase format used by graphQL" def convert_field_name(%ValidationMessage{} = message) do field = cond do message.field == nil -> camelized_name(message.key) message.key == nil -> camelized_name(message.field) true -> camelized_name(message.field) end %{message | field: field, key: field} end defp camelized_name(nil), do: nil defp camelized_name(field) do field |> to_string() |> Absinthe.Utils.camelize(lower: true) end defp prepare_message(%ValidationMessage{} = message) do convert_field_name(message) end defp prepare_message(message) when is_binary(message) do generic_validation_message(message) end defp prepare_message(message) do raise ArgumentError, "Unexpected validation message: #{inspect(message)}" end @doc """ Generates a success payload. ## Examples iex> success_payload(%User{first_name: "Stich", last_name: "Pelekai", id: 626}) %Payload{successful: true, result: %User{first_name: "Stich", last_name: "Pelekai", id: 626}} ## Usage If you prefer not to use the `build_payload/2` middleware, you can use this method in your resolvers instead. ```elixir @doc " Creates a new user Results are wrapped in a result monad as expected by absinthe. " def create(%{user: attrs}, _resolution) do case UserContext.create_user(attrs) do {:ok, user} -> {:ok, success_payload(user)} {:error, %Ecto.Changeset{} = changeset} -> {:ok, error_payload(changeset)} end end ``` """ def success_payload(result) do %Payload{successful: true, result: result} end defp generic_validation_message(message) do %ValidationMessage{ code: :unknown, field: nil, template: message, message: message, options: [] } end end
30.302267
151
0.706484
083eb94cea3e4c810775e1c406fd0c5af1727712
200
exs
Elixir
programming_elixir/default_params2.exs
enilsen16/elixir
b4d1d45858a25e4beb39e07de8685f3d93d6a520
[ "MIT" ]
null
null
null
programming_elixir/default_params2.exs
enilsen16/elixir
b4d1d45858a25e4beb39e07de8685f3d93d6a520
[ "MIT" ]
null
null
null
programming_elixir/default_params2.exs
enilsen16/elixir
b4d1d45858a25e4beb39e07de8685f3d93d6a520
[ "MIT" ]
null
null
null
defmodule DefaultParams2 do def func(p1, p2 \\ 123) def func(p1, p2) when is_list(p1) do "You said #{p2} with a list" end def func(p1, p2) do "You passed in #{p1} and #{p2}" end end
20
38
0.615
083ee524f15214affa3ddda749ce1818afb1c196
4,717
ex
Elixir
lib/oli_web/controllers/registration_controller.ex
DevShashi1993/oli-torus
e6e0b66f0973f9790a5785731b22db6fb1c50a73
[ "MIT" ]
45
2020-04-17T15:40:27.000Z
2022-03-25T00:13:30.000Z
lib/oli_web/controllers/registration_controller.ex
DevShashi1993/oli-torus
e6e0b66f0973f9790a5785731b22db6fb1c50a73
[ "MIT" ]
944
2020-02-13T02:37:01.000Z
2022-03-31T17:50:07.000Z
lib/oli_web/controllers/registration_controller.ex
DevShashi1993/oli-torus
e6e0b66f0973f9790a5785731b22db6fb1c50a73
[ "MIT" ]
23
2020-07-28T03:36:13.000Z
2022-03-17T14:29:02.000Z
defmodule OliWeb.RegistrationController do use OliWeb, :controller alias Oli.Institutions alias Oli.Lti_1p3.Tool.Registration alias Oli.Branding alias OliWeb.Common.{Breadcrumb} def root_breadcrumbs(institution_id, action, name) do OliWeb.InstitutionController.root_breadcrumbs() ++ [ Breadcrumb.new(%{ full_title: "Registrations", link: Routes.institution_registration_path(OliWeb.Endpoint, action, institution_id) }), Breadcrumb.new(%{ full_title: name }) ] end def edit_breadcrumbs(institution_id, id) do OliWeb.InstitutionController.root_breadcrumbs() ++ [ Breadcrumb.new(%{ full_title: "Registrations", link: Routes.institution_registration_path(OliWeb.Endpoint, :edit, institution_id, id) }), Breadcrumb.new(%{ full_title: "Edit" }) ] end def available_brands(institution_id) do institution = Institutions.get_institution!(institution_id) available_brands = Branding.list_available_brands(institution_id) institution_brands = available_brands |> Enum.filter(fn b -> b.institution_id != nil end) |> Enum.map(fn brand -> {brand.name, brand.id} end) other_brands = available_brands |> Enum.filter(fn b -> b.institution_id == nil end) |> Enum.map(fn brand -> {brand.name, brand.id} end) [] |> Enum.concat( if Enum.count(institution_brands) > 0, do: ["#{institution.name} Brands": institution_brands], else: [] ) |> Enum.concat(if Enum.count(other_brands) > 0, do: ["Other Brands": other_brands], else: []) end def new(conn, %{"institution_id" => institution_id}) do changeset = Institutions.change_registration(%Registration{institution_id: institution_id}) render(conn, "new.html", changeset: changeset, breadcrumbs: root_breadcrumbs(institution_id, :create, "New"), institution_id: institution_id, available_brands: available_brands(institution_id), title: "Create Registration" ) end def create(conn, %{"institution_id" => institution_id, "registration" => registration_params}) do {:ok, active_jwk} = Lti_1p3.get_active_jwk() registration_params = registration_params |> Map.put("institution_id", institution_id) |> Map.put("tool_jwk_id", active_jwk.id) case Institutions.create_registration(registration_params) do {:ok, _registration} -> conn |> put_flash(:info, "Registration created successfully.") |> redirect(to: Routes.institution_path(conn, :show, institution_id)) {:error, %Ecto.Changeset{} = changeset} -> render(conn, "new.html", changeset: changeset, breadcrumbs: root_breadcrumbs(institution_id, :create, "New"), institution_id: institution_id, available_brands: available_brands(institution_id), title: "Create Registration" ) end end def edit(conn, %{"institution_id" => institution_id, "id" => id}) do registration = Institutions.get_registration_preloaded!(id) changeset = Institutions.change_registration(registration) render(conn, "edit.html", registration: registration, breadcrumbs: edit_breadcrumbs(institution_id, id), changeset: changeset, institution_id: institution_id, available_brands: available_brands(institution_id), title: "Edit Registration" ) end def update(conn, %{ "institution_id" => institution_id, "id" => id, "registration" => registration_params }) do registration = Institutions.get_registration!(id) case Institutions.update_registration(registration, registration_params) do {:ok, _registration} -> conn |> put_flash(:info, "Registration updated successfully.") |> redirect(to: Routes.institution_path(conn, :show, institution_id)) {:error, %Ecto.Changeset{} = changeset} -> render(conn, "edit.html", breadcrumbs: edit_breadcrumbs(institution_id, id), registration: registration, changeset: changeset, institution_id: institution_id, available_brands: available_brands(institution_id), title: "Edit Registration" ) end end def delete(conn, %{"institution_id" => institution_id, "id" => id}) do registration = Institutions.get_registration!(id) {:ok, _registration} = Institutions.delete_registration(registration) conn |> put_flash(:info, "Registration deleted successfully.") |> redirect(to: Routes.institution_path(conn, :show, institution_id)) end end
32.986014
99
0.664193
083f180147081b6852f051d0f5a4100e45edaa4f
1,607
ex
Elixir
clients/alert_center/lib/google_api/alert_center/v1beta1/model/resource_info.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/alert_center/lib/google_api/alert_center/v1beta1/model/resource_info.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/alert_center/lib/google_api/alert_center/v1beta1/model/resource_info.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.AlertCenter.V1beta1.Model.ResourceInfo do @moduledoc """ Proto that contains resource information. ## Attributes * `documentId` (*type:* `String.t`, *default:* `nil`) - Drive file ID. * `resourceTitle` (*type:* `String.t`, *default:* `nil`) - Title of the resource, for example email subject, or document title. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :documentId => String.t(), :resourceTitle => String.t() } field(:documentId) field(:resourceTitle) end defimpl Poison.Decoder, for: GoogleApi.AlertCenter.V1beta1.Model.ResourceInfo do def decode(value, options) do GoogleApi.AlertCenter.V1beta1.Model.ResourceInfo.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.AlertCenter.V1beta1.Model.ResourceInfo do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
32.14
131
0.728687
083f18dbe7ffd3b3be03f7333bd7b9026aaeaf19
86
exs
Elixir
test/bitstring_web/views/layout_view_test.exs
the-hills/www.bitstring.xyz
fde44f0812634614bf166ef20505bdaa15a2feb4
[ "MIT" ]
1
2017-09-22T14:53:03.000Z
2017-09-22T14:53:03.000Z
test/bitstring_web/views/layout_view_test.exs
the-hills/www.bitstring.xyz
fde44f0812634614bf166ef20505bdaa15a2feb4
[ "MIT" ]
null
null
null
test/bitstring_web/views/layout_view_test.exs
the-hills/www.bitstring.xyz
fde44f0812634614bf166ef20505bdaa15a2feb4
[ "MIT" ]
null
null
null
defmodule BitstringWeb.LayoutViewTest do use BitstringWeb.ConnCase, async: true end
21.5
40
0.837209
083f19a8c2c6c3fd499b1a3de8e67ba04b51b5b0
3,103
exs
Elixir
exercises/isbn-verifier/test/isbn_verifier_test.exs
DuoPan/elixir
e96388f242c383c1f45935570ed2f42394171fc6
[ "MIT" ]
2
2019-07-09T05:23:38.000Z
2019-07-29T01:39:59.000Z
exercises/isbn-verifier/test/isbn_verifier_test.exs
DuoPan/elixir
e96388f242c383c1f45935570ed2f42394171fc6
[ "MIT" ]
null
null
null
exercises/isbn-verifier/test/isbn_verifier_test.exs
DuoPan/elixir
e96388f242c383c1f45935570ed2f42394171fc6
[ "MIT" ]
null
null
null
defmodule IsbnVerifierTest do use ExUnit.Case # @tag :pending test "valid isbn number" do assert IsbnVerifier.isbn?("3-598-21508-8") end @tag :pending test "invalid isbn check digit" do refute IsbnVerifier.isbn?("3-598-21508-9") end @tag :pending test "valid isbn number with a check digit of 10" do assert IsbnVerifier.isbn?("3-598-21507-X") end @tag :pending test "check digit is a character other than X" do refute IsbnVerifier.isbn?("3-598-21507-A") end @tag :pending test "invalid character in isbn" do refute IsbnVerifier.isbn?("3-598-2K507-0") end @tag :pending test "X is only valid as a check digit" do refute IsbnVerifier.isbn?("3-598-2X507-0") end @tag :pending test "valid isbn without separating dashes" do assert IsbnVerifier.isbn?("3598215088") end @tag :pending test "isbn without separating dashes and X as check digit" do assert IsbnVerifier.isbn?("359821507X") end @tag :pending test "isbn without check digit and dashes" do refute IsbnVerifier.isbn?("359821507") end @tag :pending test "too long isbn and no dashes" do refute IsbnVerifier.isbn?("3598215078X") end @tag :pending test "isbn without check digit" do refute IsbnVerifier.isbn?("3-598-21507") end @tag :pending test "too long isbn" do refute IsbnVerifier.isbn?("3-598-21507-XA") end @tag :pending test "check digit of X should not be used for 0" do refute IsbnVerifier.isbn?("3-598-21515-X") end # Test cases from international ISBN to test variable dash placement # Adapted from https://en.wikipedia.org/wiki/International_Standard_Book_Number#Registrant_element @tag :pending test "Qatar NCCAH, Doha" do assert IsbnVerifier.isbn?("99921-58-10-7") end @tag :pending test "Singapore World Scientific" do assert IsbnVerifier.isbn?("9971-5-0210-0") end @tag :pending test "Greece Sigma Publications" do assert IsbnVerifier.isbn?("960-425-059-0") end @tag :pending test "Czech Republic; Slovakia Taita Publishers" do assert IsbnVerifier.isbn?("80-902734-1-6") end @tag :pending test "Brazil Companhia das Letras" do assert IsbnVerifier.isbn?("85-359-0277-5") end @tag :pending test "English-speaking area Simon Wallenberg Press" do assert IsbnVerifier.isbn?("1-84356-028-3") end @tag :pending test "English-speaking area Scribner" do assert IsbnVerifier.isbn?("0-684-84328-5") end @tag :pending test "English-speaking area Frederick Ungar" do assert IsbnVerifier.isbn?("0-8044-2957-X") end @tag :pending test "English-speaking area J. A. Allen & Co." do assert IsbnVerifier.isbn?("0-85131-041-9") end @tag :pending test "English-speaking area Edupedia Publications Pvt Ltd." do assert IsbnVerifier.isbn?("93-86954-21-4") end @tag :pending test "English-speaking area Willmann-Bell" do assert IsbnVerifier.isbn?("0-943396-04-2") end @tag :pending test "English-speaking area KT Publishing" do assert IsbnVerifier.isbn?("0-9752298-0-X") end end
23.507576
100
0.694811
083f1a70cf938179ba673e2b9dfdc5f948452fc1
3,868
exs
Elixir
test/phoenix/live_dashboard/pages/ports_page_test.exs
RomanKotov/phoenix_live_dashboard
439283fa625f5af876e01eb5edcb20aec7f3b2da
[ "MIT" ]
1
2020-11-04T16:18:16.000Z
2020-11-04T16:18:16.000Z
test/phoenix/live_dashboard/pages/ports_page_test.exs
RomanKotov/phoenix_live_dashboard
439283fa625f5af876e01eb5edcb20aec7f3b2da
[ "MIT" ]
null
null
null
test/phoenix/live_dashboard/pages/ports_page_test.exs
RomanKotov/phoenix_live_dashboard
439283fa625f5af876e01eb5edcb20aec7f3b2da
[ "MIT" ]
null
null
null
defmodule Phoenix.LiveDashboard.PortsPageTest do use ExUnit.Case, async: true import Phoenix.ConnTest import Phoenix.LiveViewTest @endpoint Phoenix.LiveDashboardTest.Endpoint test "menu_link/2" do assert {:ok, "Ports"} = Phoenix.LiveDashboard.PortsPage.menu_link(nil, nil) end test "shows ports with limit" do {:ok, live, rendered} = live(build_conn(), "/dashboard/nonode@nohost/ports") assert rendered |> :binary.matches("</tr>") |> length() <= 100 rendered = render_patch(live, "/dashboard/nonode@nohost/ports?limit=2") assert rendered |> :binary.matches("</tr>") |> length() > 1 end test "search" do Port.open({:spawn, "sleep 5"}, [:binary]) {:ok, live, _} = live(build_conn(), ports_path(50, "", :input, :desc)) rendered = render(live) assert rendered =~ "forker" assert rendered =~ "sleep" assert rendered =~ "ports out of" refute rendered =~ "ports out of 1" refute rendered =~ "ports out of 0" assert rendered =~ ports_href(50, "", :input, :asc) {:ok, live, _} = live(build_conn(), ports_path(50, "sleep", :input, :desc)) rendered = render(live) assert rendered =~ "sleep" refute rendered =~ "forker" assert rendered =~ "ports out of 1" assert rendered =~ ports_href(50, "sleep", :input, :asc) refute rendered =~ ports_href(50, "forker", :input, :asc) {:ok, live, _} = live(build_conn(), ports_path(50, "forker", :input, :desc)) rendered = render(live) assert rendered =~ "forker" refute rendered =~ "sleep" assert rendered =~ "ports out of 1" assert rendered =~ ports_href(50, "forker", :input, :asc) refute rendered =~ ports_href(50, "sleep", :input, :asc) end test "order ports by output" do # We got already forker running as #Port<0.0> # And we need something thats on all systems and stays attached to the port sleep = Port.open({:spawn, "sleep 5"}, [:binary]) send(sleep, {self(), {:command, "increase output"}}) {:ok, live, _} = live(build_conn(), ports_path(50, "", :output, :asc)) rendered = render(live) assert rendered =~ ~r/forker.*sleep/s assert rendered =~ ports_href(50, "", :output, :desc) refute rendered =~ ports_href(50, "", :output, :asc) rendered = render_patch(live, "/dashboard/nonode@nohost/ports?limit=50&sort_dir=desc&sort_by=output") assert rendered =~ ~r/sleep.*forker/s refute rendered =~ ~r/forker.*sleep/s assert rendered =~ ports_href(50, "", :output, :asc) refute rendered =~ ports_href(50, "", :output, :desc) rendered = render_patch(live, "/dashboard/nonode@nohost/ports?limit=50&sort_dir=asc&sort_by=output") assert rendered =~ ~r/forker.*sleep/s refute rendered =~ ~r/sleep.*forker/s assert rendered =~ ports_href(50, "", :output, :desc) refute rendered =~ ports_href(50, "", :output, :asc) end test "shows port info modal" do {:ok, live, _} = live(build_conn(), port_info_path(hd(Port.list()), 50, :output, :asc)) rendered = render(live) assert rendered =~ ports_href(50, "", :output, :asc) assert rendered =~ "modal-content" assert rendered =~ ~r/Port Name.*forker/ refute live |> element("#modal .close") |> render_click() =~ "modal" return_path = ports_path(50, "", :output, :asc) assert_patch(live, return_path) end defp ports_href(limit, search, sort_by, sort_dir) do ~s|href="#{Plug.HTML.html_escape_to_iodata(ports_path(limit, search, sort_by, sort_dir))}"| end defp port_info_path(port, limit, sort_by, sort_dir) do ports_path(limit, "", sort_by, sort_dir) <> "&info=#{Phoenix.LiveDashboard.Helpers.encode_port(port)}" end defp ports_path(limit, search, sort_by, sort_dir) do "/dashboard/nonode%40nohost/ports?" <> "limit=#{limit}&search=#{search}&sort_by=#{sort_by}&sort_dir=#{sort_dir}" end end
36.490566
96
0.651241
083f31ebaf84a5e3da148c45325374bb811c1857
7,420
ex
Elixir
kousa/lib/data-layer/user_data.ex
joshrezende/dogehouse
2c77fa86fc0c625ded3d4b5d112fbdd22a3cdbfe
[ "MIT" ]
1
2021-02-26T19:55:41.000Z
2021-02-26T19:55:41.000Z
kousa/lib/data-layer/user_data.ex
agt25/dogehouse
6666028fa52b48f11bf64e60e859610dce2a3956
[ "MIT" ]
1
2022-02-27T19:40:57.000Z
2022-02-27T19:40:57.000Z
kousa/lib/data-layer/user_data.ex
agt25/dogehouse
6666028fa52b48f11bf64e60e859610dce2a3956
[ "MIT" ]
null
null
null
defmodule Kousa.Data.User do import Ecto.Query, warn: false alias Beef.{Repo, User} @fetch_limit 16 def edit_profile(user_id, data) do %User{id: user_id} |> User.edit_changeset(data) |> Repo.update() end def search(query, offset) do query_with_percent = "%" <> query <> "%" items = from(u in Beef.User, where: ilike(u.username, ^query_with_percent) or ilike(u.displayName, ^query_with_percent), left_join: cr in Beef.Room, on: u.currentRoomId == cr.id and cr.isPrivate == false, select: %{u | currentRoom: cr}, limit: @fetch_limit, offset: ^offset ) |> Beef.Repo.all() {Enum.slice(items, 0, -1 + @fetch_limit), if(length(items) == @fetch_limit, do: -1 + offset + @fetch_limit, else: nil)} end def bulk_insert(users) do Beef.Repo.insert_all( Beef.User, users, on_conflict: :nothing ) end def find_by_github_ids(ids) do from(u in Beef.User, where: u.githubId in ^ids, select: u.id) |> Beef.Repo.all() end def inc_num_following(user_id, n) do from(u in User, where: u.id == ^user_id, update: [ inc: [ numFollowing: ^n ] ] ) |> Repo.update_all([]) end def get_users_in_current_room(user_id) do case tuple_get_current_room_id(user_id) do {:ok, current_room_id} -> {current_room_id, from(u in Beef.User, where: u.currentRoomId == ^current_room_id, left_join: rp in Beef.RoomPermission, on: rp.userId == u.id and rp.roomId == u.currentRoomId, select: %{u | roomPermissions: rp} ) |> Beef.Repo.all()} _ -> {nil, []} end end def get_by_id(user_id) do Beef.Repo.get(Beef.User, user_id) end def get_by_username(username) do from(u in Beef.User, where: u.username == ^username, limit: 1 ) |> Beef.Repo.one() end def set_reason_for_ban(user_id, reason_for_ban) do from(u in User, where: u.id == ^user_id, update: [ set: [ reasonForBan: ^reason_for_ban ] ] ) |> Repo.update_all([]) end @spec get_by_id_with_current_room(any) :: any def get_by_id_with_current_room(user_id) do from(u in Beef.User, left_join: a0 in assoc(u, :currentRoom), where: u.id == ^user_id, limit: 1, preload: [ currentRoom: a0 ] ) |> Beef.Repo.one() end def set_online(user_id) do from(u in User, where: u.id == ^user_id, update: [ set: [ online: true ] ] ) |> Repo.update_all([]) end def set_user_left_current_room(user_id) do Kousa.RegUtils.lookup_and_cast(Kousa.Gen.UserSession, user_id, {:set_current_room_id, nil}) from(u in User, where: u.id == ^user_id, update: [ set: [ currentRoomId: nil ] ] ) |> Repo.update_all([]) end def set_offline(user_id) do from(u in User, where: u.id == ^user_id, update: [ set: [ online: false, lastOnline: fragment("now()") ] ] # select: u ) |> Repo.update_all([]) end def get_current_room(user_id) do room_id = get_current_room_id(user_id) case room_id do nil -> nil id -> Kousa.Data.Room.get_room_by_id(id) end end def tuple_get_current_room_id(user_id) do case Kousa.RegUtils.lookup_and_call( Kousa.Gen.UserSession, user_id, {:get_current_room_id} ) do {:ok, nil} -> {nil, nil} x -> x end end def get_current_room_id(user_id) do case Kousa.RegUtils.lookup_and_call( Kousa.Gen.UserSession, user_id, {:get_current_room_id} ) do {:ok, id} -> id _ -> nil end end def set_current_room(user_id, room_id, can_speak \\ false, returning \\ false) do roomPermissions = case can_speak do true -> case Kousa.Data.RoomPermission.set_is_speaker(user_id, room_id, true, true) do {:ok, x} -> x _ -> nil end _ -> Kousa.Data.RoomPermission.get(user_id, room_id) end Kousa.RegUtils.lookup_and_cast( Kousa.Gen.UserSession, user_id, {:set_current_room_id, room_id} ) q = from(u in Beef.User, where: u.id == ^user_id, update: [ set: [ currentRoomId: ^room_id ] ] ) q = if returning, do: select(q, [u], u), else: q case q |> Beef.Repo.update_all([]) do {_, [user]} -> %{user | roomPermissions: roomPermissions} _ -> nil end end def twitter_find_or_create(user) do db_user = from(u in Beef.User, where: (not is_nil(u.email) and u.email == ^user.email and u.email != "") or u.twitterId == ^user.twitterId, limit: 1 ) |> Repo.one() cond do db_user -> if is_nil(db_user.twitterId) do from(u in Beef.User, where: u.id == ^db_user.id, update: [ set: [ twitterId: ^user.twitterId ] ] ) |> Repo.update_all([]) end {:find, db_user} true -> {:create, Repo.insert!( %User{ username: Kousa.Random.big_ascii_id(), email: if(user.email == "", do: nil, else: user.email), twitterId: user.twitterId, avatarUrl: user.avatarUrl, displayName: if(is_nil(user.displayName) or String.trim(user.displayName) == "", do: "Novice Doge", else: user.displayName ), bio: user.bio, hasLoggedIn: true }, returning: true )} end end def github_find_or_create(user, github_access_token) do githubId = Integer.to_string(user["id"]) db_user = from(u in Beef.User, where: u.githubId == ^githubId or (not is_nil(u.email) and u.email != "" and u.email == ^user["email"]), limit: 1 ) |> Repo.one() cond do db_user -> if is_nil(db_user.githubId) do from(u in Beef.User, where: u.id == ^db_user.id, update: [ set: [ githubId: ^githubId, githubAccessToken: ^github_access_token ] ] ) |> Repo.update_all([]) end {:find, db_user} true -> {:create, Repo.insert!( %User{ username: Kousa.Random.big_ascii_id(), githubId: githubId, email: if(user["email"] == "", do: nil, else: user["email"]), githubAccessToken: github_access_token, avatarUrl: user["avatar_url"], displayName: if(is_nil(user["name"]) or String.trim(user["name"]) == "", do: "Novice Doge", else: user["name"] ), bio: user["bio"], hasLoggedIn: true }, returning: true )} end end end
22.901235
95
0.515094
083f350168760c775ce10032315fa850bdb526eb
5,563
exs
Elixir
lib/elixir/test/elixir/code_test.exs
TurtleAI/elixir
2fb41ebef4d06315dd6c05ee00899572b27ee50a
[ "Apache-2.0" ]
null
null
null
lib/elixir/test/elixir/code_test.exs
TurtleAI/elixir
2fb41ebef4d06315dd6c05ee00899572b27ee50a
[ "Apache-2.0" ]
null
null
null
lib/elixir/test/elixir/code_test.exs
TurtleAI/elixir
2fb41ebef4d06315dd6c05ee00899572b27ee50a
[ "Apache-2.0" ]
null
null
null
Code.require_file "test_helper.exs", __DIR__ defmodule CodeTest do use ExUnit.Case, async: true doctest Code import PathHelpers def genmodule(name) do defmodule name do Kernel.LexicalTracker.remote_references(__MODULE__) end end contents = quote do defmodule CodeTest.Sample do def eval_quoted_info, do: {__MODULE__, __ENV__.file, __ENV__.line} end end Code.eval_quoted contents, [], file: "sample.ex", line: 13 test "eval string" do assert Code.eval_string("1 + 2") == {3, []} assert {3, _} = Code.eval_string("a + b", [a: 1, b: 2], Macro.Env.location(__ENV__)) end test "eval string with other context" do assert Code.eval_string("var!(a, Sample) = 1") == {1, [{{:a, Sample}, 1}]} end test "eval binary errors" do msg = "nofile:2: a binary field without size is only allowed at the end of a binary pattern" assert_raise CompileError, msg, fn -> Code.eval_string(""" foo = "foo" "\\"" <> bar <> "\\"" = foo """) end end test "eval with unnamed scopes" do assert {%RuntimeError{}, [a: %RuntimeError{}]} = Code.eval_string("a = (try do (raise \"hello\") rescue e -> e end)") end test "eval options" do assert Code.eval_string("is_atom(:foo) and K.is_list([])", [], functions: [{Kernel, [is_atom: 1]}], macros: [{Kernel, [..: 2, and: 2]}], aliases: [{K, Kernel}], requires: [Kernel]) == {true, []} end test "eval stacktrace" do try do Code.eval_string("<<a :: size(b)>>", a: :a, b: :b) rescue _ -> assert System.stacktrace |> Enum.any?(&(elem(&1, 0) == __MODULE__)) end end test "eval with requires" do assert Code.eval_string("Kernel.if true, do: :ok", [], requires: [Z, Kernel]) == {:ok, []} end test "eval quoted" do assert Code.eval_quoted(quote(do: 1 + 2)) == {3, []} assert CodeTest.Sample.eval_quoted_info() == {CodeTest.Sample, "sample.ex", 13} end test "eval quoted with env" do alias :lists, as: MyList assert Code.eval_quoted(quote(do: MyList.flatten [[1, 2, 3]]), [], __ENV__) == {[1, 2, 3], []} end test "eval file" do assert Code.eval_file(fixture_path("code_sample.exs")) == {3, [var: 3]} assert_raise Code.LoadError, fn -> Code.eval_file("non_existent.exs") end end test "require" do Code.require_file fixture_path("code_sample.exs") assert fixture_path("code_sample.exs") in Code.loaded_files assert Code.require_file(fixture_path("code_sample.exs")) == nil Code.unload_files [fixture_path("code_sample.exs")] refute fixture_path("code_sample.exs") in Code.loaded_files assert Code.require_file(fixture_path("code_sample.exs")) != nil end test "string to quoted" do assert Code.string_to_quoted("1 + 2") == {:ok, {:+, [line: 1], [1, 2]}} assert Code.string_to_quoted!("1 + 2") == {:+, [line: 1], [1, 2]} assert Code.string_to_quoted("a.1") == {:error, {1, "syntax error before: ", "1"}} assert_raise SyntaxError, fn -> Code.string_to_quoted!("a.1") end end test "string to quoted existing atoms only" do assert :badarg = catch_error(Code.string_to_quoted!(":there_is_no_such_atom", existing_atoms_only: true)) end test "string_to_quoted!" do assert Code.string_to_quoted!("1 + 2") == {:+, [line: 1], [1, 2]} assert_raise SyntaxError, fn -> Code.string_to_quoted!("a.1") end assert_raise TokenMissingError, fn -> Code.string_to_quoted!("1 +") end end test "compile source" do assert __MODULE__.__info__(:compile)[:source] == String.to_charlist(__ENV__.file) end test "compile info returned with source accessible through keyword module" do compile = __MODULE__.__info__(:compile) assert Keyword.get(compile, :source) != nil end test "compile string works accross lexical scopes" do assert [{CompileCrossSample, _}] = Code.compile_string("CodeTest.genmodule CompileCrossSample") after :code.purge CompileCrossSample :code.delete CompileCrossSample end test "compile string" do assert [{CompileStringSample, _}] = Code.compile_string("defmodule CompileStringSample, do: :ok") after :code.purge CompileSimpleSample :code.delete CompileSimpleSample end test "compile quoted" do assert [{CompileQuotedSample, _}] = Code.compile_string("defmodule CompileQuotedSample, do: :ok") after :code.purge CompileQuotedSample :code.delete CompileQuotedSample end test "ensure_loaded?" do assert Code.ensure_loaded?(__MODULE__) refute Code.ensure_loaded?(Code.NoFile) end test "ensure_compiled?" do assert Code.ensure_compiled?(__MODULE__) refute Code.ensure_compiled?(Code.NoFile) end test "compiler_options/1 validates options" do message = "unknown compiler option: :not_a_valid_option" assert_raise RuntimeError, message, fn -> Code.compiler_options(not_a_valid_option: :foo) end message = "compiler option :debug_info should be a boolean, got: :not_a_boolean" assert_raise RuntimeError, message, fn -> Code.compiler_options(debug_info: :not_a_boolean) end end end defmodule Code.SyncTest do use ExUnit.Case test "path manipulation" do path = Path.join(__DIR__, "fixtures") Code.prepend_path path assert to_charlist(path) in :code.get_path Code.delete_path path refute to_charlist(path) in :code.get_path end end
29.278947
109
0.650908
083f449894eb9ae2f3c9875745f9459efb4324a5
3,330
ex
Elixir
lib/error.ex
EevanW/ex_open_travel
cc52155dd0dbf7e9a305c07890c2de47e87e3585
[ "Apache-2.0" ]
null
null
null
lib/error.ex
EevanW/ex_open_travel
cc52155dd0dbf7e9a305c07890c2de47e87e3585
[ "Apache-2.0" ]
null
null
null
lib/error.ex
EevanW/ex_open_travel
cc52155dd0dbf7e9a305c07890c2de47e87e3585
[ "Apache-2.0" ]
null
null
null
defmodule ExOpenTravel.Error do defexception [:reason] @type t :: %__MODULE__{reason: any} @impl true def exception(code), do: %__MODULE__{reason: reason_for(code)} @impl true def message(%__MODULE__{reason: reason}), do: humanize_error(reason) @doc """ Convert API Error code to reason atom """ @spec reason_for(neg_integer()) :: atom() def reason_for(:invalid_endpoint), do: :invalid_endpoint def reason_for(:empty_payload), do: :empty_payload def reason_for({:http_error, {"15", string}}), do: {:date_in_the_past_or_not_alowed, string} def reason_for({:http_error, {"112", string}}), do: {:too_many_nights, string} def reason_for({:http_error, {"321", string}}), do: {:required_field_missing, string} def reason_for({:http_error, {"402", string}}), do: {:invalid_room_type, string} def reason_for({:http_error, {"404", string}}), do: {:invalid_date_range, string} def reason_for({:http_error, {"497", string}}), do: {:invalid_credentials, string} def reason_for({:http_error, {code, string}}), do: {:http_error, {code, string}} def reason_for({:function_clause, reason}), do: {:function_clause, reason} def reason_for({:argument_error, reason}), do: {:argument_error, reason} def reason_for({:fatal, reason}), do: {:catch_error, reason} def reason_for({:exit, reason}), do: {:catch_error, reason} def reason_for(e), do: {:undefined_error, e} def reason_for("SOAP-ENV:" <> _, reason), do: {:invalid_api_request, reason} def reason_for(arg1, arg2), do: {:undefined_error, {arg1, arg2}} @doc """ Convert reason atom to human readable string """ @spec humanize_error(atom) :: String.t() def humanize_error(:xml_parsing_error), do: "XML parsing error" def humanize_error(:invalid_endpoint), do: "Invalid endpoint" def humanize_error(:empty_payload), do: "Empty payload" def humanize_error(:undefined_error), do: "Undefined error" def humanize_error(reason) when is_binary(reason), do: reason def humanize_error({:invalid_api_request, reason}), do: "Invalid builded structure of API request: #{inspect(reason)}" def humanize_error({:function_clause, reason}), do: "Function clause error: #{inspect(reason)}}" def humanize_error({:argument_error, reason}), do: "Argument error: #{inspect(reason)}}" def humanize_error({:catch_error, reason}), do: "Catch error: #{inspect(reason)}}" def humanize_error({:date_in_the_past_or_not_alowed, reason}), do: "Invalid date: updates in the past are not allowed: #{inspect(reason)}}" def humanize_error({:required_field_missing, reason}), do: "Required field missing error: #{inspect(reason)}}" def humanize_error({:invalid_room_type, reason}), do: "Invalid room type error: #{inspect(reason)}}" def humanize_error({:invalid_date_range, reason}), do: "Invalid date range error: #{inspect(reason)}}" def humanize_error({:invalid_credentials, reason}), do: "Invalid credentials error: #{inspect(reason)}}" def humanize_error({:too_many_nights, reason}), do: "Too many nights: Maximum update limit reached (date range max = 31): #{inspect(reason)}}" def humanize_error({:undefined_error, reason}), do: "Undefined error: #{inspect(reason)}}" def humanize_error(reason), do: "Undefined error: #{inspect(reason)}}" end
37.840909
98
0.701502
083f5884270d41b2c4801d0f01d8a64de4f77fc7
129
exs
Elixir
.formatter.exs
feng19/wechat
431c22818c60cd01fc5c676aa060feb303d0c444
[ "Apache-2.0" ]
7
2021-01-22T04:07:29.000Z
2021-12-14T14:01:30.000Z
.formatter.exs
feng19/wechat
431c22818c60cd01fc5c676aa060feb303d0c444
[ "Apache-2.0" ]
1
2021-03-17T15:44:26.000Z
2021-03-17T15:44:26.000Z
.formatter.exs
feng19/wechat
431c22818c60cd01fc5c676aa060feb303d0c444
[ "Apache-2.0" ]
2
2021-03-17T14:35:56.000Z
2021-08-10T07:44:10.000Z
# Used by "mix format" [ import_deps: [:plug, :tesla], inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] ]
21.5
69
0.573643
083f65d57d08a56cd66ce6755927bf9c78a6e33f
965
exs
Elixir
mix.exs
iautom8things/nicene
f3c7bdd7dbc678610c8a1dcb58eb0a89190f34d9
[ "MIT" ]
null
null
null
mix.exs
iautom8things/nicene
f3c7bdd7dbc678610c8a1dcb58eb0a89190f34d9
[ "MIT" ]
null
null
null
mix.exs
iautom8things/nicene
f3c7bdd7dbc678610c8a1dcb58eb0a89190f34d9
[ "MIT" ]
null
null
null
defmodule Nicene.MixProject do use Mix.Project @github_url "https://github.com/sketch-hq/nicene" def project do [ app: :nicene, version: "0.5.0", elixir: "~> 1.7", start_permanent: false, description: "A Credo plugin containing additional checks.", deps: deps(), package: package(), # Docs name: "Nicene", docs: [ main: "Readme", extras: ["README.md"], source_url: @github_url ] ] end def application(), do: [] defp package() do [ files: [ "priv", "lib", "mix.exs", "README.md", "LICENSE" ], maintainers: ["Devon Estes"], licenses: ["MIT"], links: %{ "GitHub" => @github_url } ] end defp deps() do [ {:assertions, "~> 0.15.0", only: [:test]}, {:ex_doc, ">= 0.0.0", only: :dev, runtime: false}, {:credo, "~> 1.2.0"} ] end end
18.207547
66
0.481865
083f803a6793527ceb52ccb7aa6190f07e17bc91
108
ex
Elixir
apps/thousand/lib/thousand/repo.ex
thousandfr/api
bbcdbcf269000bba566ac7d7d8a351b6f6dd667a
[ "MIT" ]
null
null
null
apps/thousand/lib/thousand/repo.ex
thousandfr/api
bbcdbcf269000bba566ac7d7d8a351b6f6dd667a
[ "MIT" ]
3
2018-11-14T19:35:46.000Z
2019-01-04T23:10:40.000Z
apps/thousand/lib/thousand/repo.ex
thousandfr/api
bbcdbcf269000bba566ac7d7d8a351b6f6dd667a
[ "MIT" ]
null
null
null
defmodule Thousand.Repo do use Ecto.Repo, otp_app: :thousand, adapter: Ecto.Adapters.Postgres end
18
35
0.731481
083f84eec7c74c258ccc4f02f0717bb3c60e048d
1,811
ex
Elixir
backend/lib/backend/accounts.ex
silver-panda/budgetr
d8cad5c1401677947444add24c8d41f2450c8cc3
[ "MIT" ]
null
null
null
backend/lib/backend/accounts.ex
silver-panda/budgetr
d8cad5c1401677947444add24c8d41f2450c8cc3
[ "MIT" ]
null
null
null
backend/lib/backend/accounts.ex
silver-panda/budgetr
d8cad5c1401677947444add24c8d41f2450c8cc3
[ "MIT" ]
null
null
null
defmodule Backend.Accounts do @moduledoc """ The Accounts context. """ import Ecto.Query, warn: false alias Backend.Repo alias Backend.Accounts.User @doc """ Returns the list of users. ## Examples iex> list_users() [%User{}, ...] """ def list_users do Repo.all(User) end @doc """ Gets a single user. Raises `Ecto.NoResultsError` if the User does not exist. ## Examples iex> get_user!(123) %User{} iex> get_user!(456) ** (Ecto.NoResultsError) """ def get_user!(id), do: Repo.get!(User, id) @doc """ Gets a single user by email """ def get_user_by_email(email) do from(u in User, where: u.email == ^email) |> Repo.one() end @doc """ Creates a user. ## Examples iex> create_user(%{field: value}) {:ok, %User{}} iex> create_user(%{field: bad_value}) {:error, %Ecto.Changeset{}} """ def create_user(attrs \\ %{}) do %User{} |> User.changeset(attrs) |> Repo.insert() end @doc """ Updates a user. ## Examples iex> update_user(user, %{field: new_value}) {:ok, %User{}} iex> update_user(user, %{field: bad_value}) {:error, %Ecto.Changeset{}} """ def update_user(%User{} = user, attrs) do user |> User.changeset(attrs) |> Repo.update() end @doc """ Deletes a User. ## Examples iex> delete_user(user) {:ok, %User{}} iex> delete_user(user) {:error, %Ecto.Changeset{}} """ def delete_user(%User{} = user) do Repo.delete(user) end @doc """ Returns an `%Ecto.Changeset{}` for tracking user changes. ## Examples iex> change_user(user) %Ecto.Changeset{source: %User{}} """ def change_user(%User{} = user) do User.changeset(user, %{}) end end
16.169643
59
0.568747
083fb9e14fac8fb47926008dbafeb87f10776ba3
103
exs
Elixir
programming_elixir_1.3_snippets/factorial.exs
benjohns1/elixer-app
6e866ec084c5e75442c0b70f66e35f61b5b74d34
[ "MIT" ]
null
null
null
programming_elixir_1.3_snippets/factorial.exs
benjohns1/elixer-app
6e866ec084c5e75442c0b70f66e35f61b5b74d34
[ "MIT" ]
null
null
null
programming_elixir_1.3_snippets/factorial.exs
benjohns1/elixer-app
6e866ec084c5e75442c0b70f66e35f61b5b74d34
[ "MIT" ]
null
null
null
defmodule Factorial do def of(0), do: 1 def of(n) when is_integer(n) and n > 0, do: n * of(n-1) end
25.75
57
0.631068
083fcd7f0a9a2a08962c368416b253d29281cca8
526
ex
Elixir
lib/ash/page/offset.ex
maartenvanvliet/ash
c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db
[ "MIT" ]
null
null
null
lib/ash/page/offset.ex
maartenvanvliet/ash
c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db
[ "MIT" ]
null
null
null
lib/ash/page/offset.ex
maartenvanvliet/ash
c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db
[ "MIT" ]
null
null
null
defmodule Ash.Page.Offset do @moduledoc """ A page of results from `offset` based pagination. If a resource supports `keyset` pagination as well, it will also have the `keyset` metadata. """ defstruct [:results, :limit, :offset, :count, :rerun] @type t :: %__MODULE__{} def new(results, count, original_query, opts) do %__MODULE__{ results: results, limit: opts[:page][:limit], count: count, offset: opts[:page][:offset] || 0, rerun: {original_query, opts} } end end
23.909091
55
0.638783
083ff7cb5c7d86f7e7b8959d028d6faecd664822
2,001
ex
Elixir
clients/admin/lib/google_api/admin/directory_v1/model/mobile_devices.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
null
null
null
clients/admin/lib/google_api/admin/directory_v1/model/mobile_devices.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/admin/lib/google_api/admin/directory_v1/model/mobile_devices.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Admin.Directory_v1.Model.MobileDevices do @moduledoc """ ## Attributes * `etag` (*type:* `String.t`, *default:* `nil`) - ETag of the resource. * `kind` (*type:* `String.t`, *default:* `admin#directory#mobiledevices`) - Kind of resource this is. * `mobiledevices` (*type:* `list(GoogleApi.Admin.Directory_v1.Model.MobileDevice.t)`, *default:* `nil`) - List of Mobile Device objects. * `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token used to access next page of this result. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :etag => String.t(), :kind => String.t(), :mobiledevices => list(GoogleApi.Admin.Directory_v1.Model.MobileDevice.t()), :nextPageToken => String.t() } field(:etag) field(:kind) field(:mobiledevices, as: GoogleApi.Admin.Directory_v1.Model.MobileDevice, type: :list) field(:nextPageToken) end defimpl Poison.Decoder, for: GoogleApi.Admin.Directory_v1.Model.MobileDevices do def decode(value, options) do GoogleApi.Admin.Directory_v1.Model.MobileDevices.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Admin.Directory_v1.Model.MobileDevices do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
35.732143
140
0.712644
083ff9bba72c589967b3d5023f28d27ece210ec3
3,409
ex
Elixir
lib/tesla/adapter/httpc.ex
mmcc/tesla
75384470030fb9b44df0e8930371ae8ed081d388
[ "MIT" ]
null
null
null
lib/tesla/adapter/httpc.ex
mmcc/tesla
75384470030fb9b44df0e8930371ae8ed081d388
[ "MIT" ]
1
2019-10-25T19:27:33.000Z
2019-10-25T19:27:33.000Z
lib/tesla/adapter/httpc.ex
mmcc/tesla
75384470030fb9b44df0e8930371ae8ed081d388
[ "MIT" ]
1
2020-04-04T03:19:50.000Z
2020-04-04T03:19:50.000Z
defmodule Tesla.Adapter.Httpc do @moduledoc """ Adapter for [httpc](http://erlang.org/doc/man/httpc.html) This is the default adapter. **NOTE** Tesla overrides default autoredirect value with false to ensure consistency between adapters """ @behaviour Tesla.Adapter import Tesla.Adapter.Shared, only: [stream_to_fun: 1, next_chunk: 1] alias Tesla.Multipart @override_defaults autoredirect: false @http_opts ~w(timeout connect_timeout ssl essl autoredirect proxy_auth version relaxed url_encode)a @doc false def call(env, opts) do opts = Tesla.Adapter.opts(@override_defaults, env, opts) with {:ok, {status, headers, body}} <- request(env, opts) do {:ok, format_response(env, status, headers, body)} end end defp format_response(env, {_, status, _}, headers, body) do %{env | status: status, headers: format_headers(headers), body: format_body(body)} end # from http://erlang.org/doc/man/httpc.html # headers() = [header()] # header() = {field(), value()} # field() = string() # value() = string() defp format_headers(headers) do for {key, value} <- headers do {String.downcase(to_string(key)), to_string(value)} end end # from http://erlang.org/doc/man/httpc.html # string() = list of ASCII characters # Body = string() | binary() defp format_body(data) when is_list(data), do: IO.iodata_to_binary(data) defp format_body(data) when is_binary(data), do: data defp request(env, opts) do content_type = to_charlist(Tesla.get_header(env, "content-type") || "") handle( request( env.method, Tesla.build_url(env.url, env.query) |> to_charlist, Enum.map(env.headers, fn {k, v} -> {to_charlist(k), to_charlist(v)} end), content_type, env.body, Keyword.split(opts, @http_opts) ) ) end # fix for # see https://github.com/teamon/tesla/issues/147 defp request(:delete, url, headers, content_type, nil, {http_opts, opts}) do request(:delete, url, headers, content_type, "", {http_opts, opts}) end defp request(method, url, headers, _content_type, nil, {http_opts, opts}) do :httpc.request(method, {url, headers}, http_opts, opts) end defp request(method, url, headers, _content_type, %Multipart{} = mp, opts) do headers = headers ++ Multipart.headers(mp) headers = for {key, value} <- headers, do: {to_charlist(key), to_charlist(value)} {content_type, headers} = case List.keytake(headers, 'content-type', 0) do nil -> {'text/plain', headers} {{_, ct}, headers} -> {ct, headers} end body = stream_to_fun(Multipart.body(mp)) request(method, url, headers, to_charlist(content_type), body, opts) end defp request(method, url, headers, content_type, %Stream{} = body, opts) do fun = stream_to_fun(body) request(method, url, headers, content_type, fun, opts) end defp request(method, url, headers, content_type, body, opts) when is_function(body) do body = {:chunkify, &next_chunk/1, body} request(method, url, headers, content_type, body, opts) end defp request(method, url, headers, content_type, body, {http_opts, opts}) do :httpc.request(method, {url, headers, content_type, body}, http_opts, opts) end defp handle({:error, {:failed_connect, _}}), do: {:error, :econnrefused} defp handle(response), do: response end
32.778846
101
0.669111
08401be8d23ad444ae8b79e26803b8655687bf04
238
ex
Elixir
test/fixtures/elixir/get_basic_auth.ex
csperando/curlconverter
733f110e5621375701f4424299ccd72e669876f6
[ "MIT" ]
536
2021-10-06T17:21:25.000Z
2022-03-31T13:05:48.000Z
test/fixtures/elixir/get_basic_auth.ex
csperando/curlconverter
733f110e5621375701f4424299ccd72e669876f6
[ "MIT" ]
74
2021-10-08T13:57:14.000Z
2022-03-31T06:55:39.000Z
test/fixtures/elixir/get_basic_auth.ex
csperando/curlconverter
733f110e5621375701f4424299ccd72e669876f6
[ "MIT" ]
104
2021-10-06T19:36:15.000Z
2022-03-31T07:34:04.000Z
request = %HTTPoison.Request{ method: :get, url: "https://localhost:28139/", options: [hackney: [basic_auth: {~s|some_username|, ~s|some_password|}]], headers: [], params: [], body: "" } response = HTTPoison.request(request)
21.636364
75
0.647059
08403c175270e36be30d5dd4edf4c99563171f35
1,794
exs
Elixir
test/ex_oauth2_provider/applications/application_test.exs
robotarmy/ex_oauth2_provider
b2cf7a80f8b545c12012d1195b1f7616b8f02f63
[ "MIT" ]
null
null
null
test/ex_oauth2_provider/applications/application_test.exs
robotarmy/ex_oauth2_provider
b2cf7a80f8b545c12012d1195b1f7616b8f02f63
[ "MIT" ]
null
null
null
test/ex_oauth2_provider/applications/application_test.exs
robotarmy/ex_oauth2_provider
b2cf7a80f8b545c12012d1195b1f7616b8f02f63
[ "MIT" ]
null
null
null
defmodule ExOauth2Provider.Applications.ApplicationTest do use ExOauth2Provider.TestCase alias ExOauth2Provider.Applications.Application alias Dummy.OauthApplications.OauthApplication describe "changeset/2 with existing application" do setup do application = Ecto.put_meta(%OauthApplication{}, state: :loaded) {:ok, application: application} end test "validates", %{application: application} do changeset = Application.changeset(application, %{name: ""}) assert changeset.errors[:name] end test "validates uid", %{application: application} do changeset = Application.changeset(application, %{uid: ""}) assert changeset.errors[:uid] end test "validates secret", %{application: application} do changeset = Application.changeset(application, %{secret: nil}) assert changeset.errors[:secret] == {"can't be blank", []} changeset = Application.changeset(application, %{secret: ""}) assert is_nil(changeset.errors[:secret]) end test "requires valid redirect uri", %{application: application} do changeset = Application.changeset(application, %{redirect_uri: ""}) assert changeset.errors[:redirect_uri] end test "require valid redirect uri", %{application: application} do ["", "invalid", "https://example.com invalid", "https://example.com http://example.com"] |> Enum.each(fn(redirect_uri) -> changeset = Application.changeset(application, %{redirect_uri: redirect_uri}) assert changeset.errors[:redirect_uri] end) end test "doesn't require scopes", %{application: application} do changeset = Application.changeset(application, %{scopes: ""}) refute changeset.errors[:scopes] end end end
33.222222
85
0.685061
08405bc38723e68926cfe6ab9086fe3697a0000f
1,086
exs
Elixir
apps/publishing/test/publishing/manage/platform_test.exs
pinpox/branchpage
e478ed9085c06cc3c5680b0ca4dc20eff2e74653
[ "MIT" ]
49
2021-06-06T05:40:30.000Z
2021-08-23T04:50:46.000Z
apps/publishing/test/publishing/manage/platform_test.exs
felipelincoln/mvp
6f3ca7dfafe884af40883f84f3eb825bb061c974
[ "MIT" ]
40
2021-01-09T16:50:50.000Z
2021-10-01T16:27:35.000Z
apps/publishing/test/publishing/manage/platform_test.exs
felipelincoln/mvp
6f3ca7dfafe884af40883f84f3eb825bb061c974
[ "MIT" ]
5
2021-02-20T12:58:39.000Z
2022-02-01T02:23:23.000Z
defmodule Publishing.Manage.PlatformTest do use Publishing.DataCase alias Publishing.Factory alias Publishing.Manage.Platform alias Publishing.Repo import Publishing.ChangesetHelpers @valid_empty_attrs %{} @valid_attrs %{name: "test-name"} @invalid_cast_attrs %{name: 0} test "changeset/2 with valid empty params" do changeset = Platform.changeset(%Platform{}, @valid_empty_attrs) assert changeset.valid? end test "changeset/2 with valid params" do changeset = Platform.changeset(%Platform{}, @valid_attrs) assert changeset.valid? end test "changeset/2 with invalid cast params" do changeset = Platform.changeset(%Platform{}, @invalid_cast_attrs) refute changeset.valid? assert %{name: [:cast]} = errors_on(changeset) end test "changeset/2 with existing platform returns error on insert" do _ = Factory.insert(:platform, name: "platform") {:error, changeset} = %Platform{} |> Platform.changeset(%{name: "platform"}) |> Repo.insert() assert %{name: [nil]} = errors_on(changeset) end end
25.857143
70
0.705341
08409df23343df476c71ff2b148f2df9d590de6b
726
exs
Elixir
elixir/udemy/cards/mix.exs
normancapule/lab
2011896287df31926a8d09732c432b9de92bfcd9
[ "MIT" ]
null
null
null
elixir/udemy/cards/mix.exs
normancapule/lab
2011896287df31926a8d09732c432b9de92bfcd9
[ "MIT" ]
5
2020-10-22T08:14:20.000Z
2020-10-22T08:19:09.000Z
elixir/udemy/cards/mix.exs
normancapule/lab
2011896287df31926a8d09732c432b9de92bfcd9
[ "MIT" ]
null
null
null
defmodule Cards.Mixfile do use Mix.Project def project do [app: :cards, version: "0.1.0", elixir: "~> 1.9", build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, deps: deps()] end # Configuration for the OTP application # # Type "mix help compile.app" for more information def application do [applications: [:logger]] end # Dependencies can be Hex packages: # # {:mydep, "~> 0.3.0"} # # Or git/path repositories: # # {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"} # # Type "mix help deps" for more examples and options defp deps do [ {:ex_doc, "~> 0.21", only: :dev, runtime: false}, ] end end
20.742857
77
0.592287
0840c13f6ad8b1d10b7c3b65b4fe98da43804301
944
ex
Elixir
src/mbs/lib/workflow/job/run_build/context/files.ex
visciang/mbs
3f218b82f3097c03149b6a4a440804056d94c387
[ "MIT" ]
null
null
null
src/mbs/lib/workflow/job/run_build/context/files.ex
visciang/mbs
3f218b82f3097c03149b6a4a440804056d94c387
[ "MIT" ]
1
2021-12-15T07:22:59.000Z
2021-12-15T07:47:56.000Z
src/mbs/lib/workflow/job/run_build/context/files.ex
visciang/mbs
3f218b82f3097c03149b6a4a440804056d94c387
[ "MIT" ]
null
null
null
defmodule MBS.Workflow.Job.RunBuild.Context.Files do @moduledoc false alias MBS.Docker alias MBS.Manifest.BuildDeploy alias MBS.Utils @spec put(BuildDeploy.Component.t(), boolean()) :: :ok | {:error, term()} def put(_component, false = _sandboxed), do: :ok def put(%BuildDeploy.Component{id: id, dependencies: dependencies} = component, true = _sandboxed) do temp_dir = Utils.mktemp() files = [component | dependencies] |> Enum.flat_map(fn %BuildDeploy.Component{type: %BuildDeploy.Component.Build{files: files}} -> files end) |> MapSet.new() files |> paths_dirname() |> Enum.each(&File.mkdir_p!(Path.join(temp_dir, &1))) files |> Enum.each(&File.ln_s!(&1, Path.join(temp_dir, &1))) Docker.container_dput(id, temp_dir, "/", id) end @spec paths_dirname(MapSet.t(Path.t())) :: MapSet.t(Path.t()) defp paths_dirname(paths), do: MapSet.new(paths, &Path.dirname(&1)) end
29.5
112
0.667373
08410791adbb7096ee36e1894a615e6c0d48008f
4,789
exs
Elixir
lib/logger/test/logger/backends/console_test.exs
alexcastano/elixir
0221ce1f79d1cfd0955a9fa46a6d84d0193ad838
[ "Apache-2.0" ]
null
null
null
lib/logger/test/logger/backends/console_test.exs
alexcastano/elixir
0221ce1f79d1cfd0955a9fa46a6d84d0193ad838
[ "Apache-2.0" ]
null
null
null
lib/logger/test/logger/backends/console_test.exs
alexcastano/elixir
0221ce1f79d1cfd0955a9fa46a6d84d0193ad838
[ "Apache-2.0" ]
1
2021-09-30T01:21:02.000Z
2021-09-30T01:21:02.000Z
defmodule Logger.Backends.ConsoleTest do use Logger.Case require Logger import ExUnit.CaptureIO setup do on_exit(fn -> :ok = Logger.configure_backend( :console, format: nil, device: :user, level: nil, metadata: [], colors: [enabled: false] ) end) end test "does not start when there is no user" do :ok = Logger.remove_backend(:console) user = Process.whereis(:user) try do Process.unregister(:user) assert :gen_event.add_handler(Logger, Logger.Backends.Console, :console) == {:error, :ignore} after Process.register(user, :user) end after {:ok, _} = Logger.add_backend(:console) end test "may use another device" do Logger.configure_backend(:console, device: :standard_error) assert capture_io(:standard_error, fn -> Logger.debug("hello") Logger.flush() end) =~ "hello" end test "configures format" do Logger.configure_backend(:console, format: "$message [$level]") assert capture_log(fn -> Logger.debug("hello") end) =~ "hello [debug]" end test "configures metadata" do Logger.configure_backend(:console, format: "$metadata$message", metadata: [:user_id]) assert capture_log(fn -> Logger.debug("hello") end) =~ "hello" Logger.metadata(user_id: 11) Logger.metadata(user_id: 13) assert capture_log(fn -> Logger.debug("hello") end) =~ "user_id=13 hello" end test "configures formatter to {module, function} tuple" do Logger.configure_backend(:console, format: {__MODULE__, :format}) assert capture_log(fn -> Logger.debug("hello") end) =~ "my_format: hello" end def format(_level, message, _ts, _metadata) do "my_format: #{message}" end test "configures metadata to :all" do Logger.configure_backend(:console, format: "$metadata", metadata: :all) Logger.metadata(user_id: 11) Logger.metadata(dynamic_metadata: 5) %{module: mod, function: {name, arity}, file: file, line: line} = __ENV__ log = capture_log(fn -> Logger.debug("hello") end) assert log =~ "file=#{file}" assert log =~ "line=#{line + 4}" assert log =~ "module=#{inspect(mod)}" assert log =~ "function=#{name}/#{arity}" assert log =~ "dynamic_metadata=5 user_id=11" end test "provides metadata defaults" do metadata = [:file, :line, :module, :function] Logger.configure_backend(:console, format: "$metadata", metadata: metadata) %{module: mod, function: {name, arity}, file: file, line: line} = __ENV__ assert capture_log(fn -> Logger.debug("hello") end) =~ "file=#{file} line=#{line + 3} module=#{inspect(mod)} function=#{name}/#{arity}" end test "configures level" do Logger.configure_backend(:console, level: :info) assert capture_log(fn -> Logger.debug("hello") end) == "" end test "configures colors" do Logger.configure_backend(:console, format: "$message", colors: [enabled: true]) assert capture_log(fn -> Logger.debug("hello") end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() Logger.configure_backend(:console, colors: [debug: :magenta]) assert capture_log(fn -> Logger.debug("hello") end) == IO.ANSI.magenta() <> "hello" <> IO.ANSI.reset() assert capture_log(fn -> Logger.info("hello") end) == IO.ANSI.normal() <> "hello" <> IO.ANSI.reset() Logger.configure_backend(:console, colors: [info: :cyan]) assert capture_log(fn -> Logger.info("hello") end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() assert capture_log(fn -> Logger.warn("hello") end) == IO.ANSI.yellow() <> "hello" <> IO.ANSI.reset() Logger.configure_backend(:console, colors: [warn: :cyan]) assert capture_log(fn -> Logger.warn("hello") end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() assert capture_log(fn -> Logger.error("hello") end) == IO.ANSI.red() <> "hello" <> IO.ANSI.reset() Logger.configure_backend(:console, colors: [error: :cyan]) assert capture_log(fn -> Logger.error("hello") end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() end test "uses colors from metadata" do Logger.configure_backend(:console, format: "$message", colors: [enabled: true]) assert capture_log(fn -> Logger.log(:error, "hello", ansi_color: :yellow) end) == IO.ANSI.yellow() <> "hello" <> IO.ANSI.reset() end end
27.682081
93
0.587388
08410bddaf64b6562bdd2c9eb4d7873962cc86fc
350
ex
Elixir
lib/slow_worker.ex
willferguson/waterpark_test
0eec8a785b2c01d1708603bb6965ceea58e77d3b
[ "Apache-2.0" ]
null
null
null
lib/slow_worker.ex
willferguson/waterpark_test
0eec8a785b2c01d1708603bb6965ceea58e77d3b
[ "Apache-2.0" ]
null
null
null
lib/slow_worker.ex
willferguson/waterpark_test
0eec8a785b2c01d1708603bb6965ceea58e77d3b
[ "Apache-2.0" ]
null
null
null
defmodule SlowWorker do use GenServer def start_link(args) do GenServer.start_link(__MODULE__, args) end def init(args) do send(self(), {:work, args}) {:ok, args} end def handle_info({:work, args}, state) do :timer.sleep(5000) {pid, arg} = args send(pid, {:result, arg}) {:stop, :normal, state} end end
15.909091
42
0.617143
08411a8a0ff415c2551ad5e178c10621124f20d1
227
exs
Elixir
apps/elixir_phoenix_chatroom_web/test/elixir_phoenix_chatroom_web/controllers/page_controller_test.exs
unip62/elixir-phoenix-chatroom
4c6af20d0461a572bcf3a70fb80cfe088e12ebbe
[ "MIT" ]
1
2018-02-19T09:02:14.000Z
2018-02-19T09:02:14.000Z
apps/elixir_phoenix_chatroom_web/test/elixir_phoenix_chatroom_web/controllers/page_controller_test.exs
unip62/elixir-phoenix-chatroom
4c6af20d0461a572bcf3a70fb80cfe088e12ebbe
[ "MIT" ]
null
null
null
apps/elixir_phoenix_chatroom_web/test/elixir_phoenix_chatroom_web/controllers/page_controller_test.exs
unip62/elixir-phoenix-chatroom
4c6af20d0461a572bcf3a70fb80cfe088e12ebbe
[ "MIT" ]
null
null
null
defmodule ElixirPhoenixChatroomWeb.PageControllerTest do use ElixirPhoenixChatroomWeb.ConnCase test "GET /", %{conn: conn} do conn = get conn, "/" assert html_response(conn, 200) =~ "Welcome to Phoenix!" end end
25.222222
60
0.722467
084144cfd488476b81ed55b3b2aff5ffb7f092be
2,346
exs
Elixir
test/credo/check/warning/map_get_unsafe_pass_test.exs
hrzndhrn/credo
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
[ "MIT" ]
4,590
2015-09-28T06:01:43.000Z
2022-03-29T08:48:57.000Z
test/credo/check/warning/map_get_unsafe_pass_test.exs
hrzndhrn/credo
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
[ "MIT" ]
890
2015-11-16T21:07:07.000Z
2022-03-29T08:52:07.000Z
test/credo/check/warning/map_get_unsafe_pass_test.exs
hrzndhrn/credo
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
[ "MIT" ]
479
2015-11-17T19:42:40.000Z
2022-03-29T00:09:21.000Z
defmodule Credo.Check.Warning.MapGetUnsafePassTest do use Credo.Test.Case @described_check Credo.Check.Warning.MapGetUnsafePass # # cases NOT raising issues # test "it should NOT report expected code" do """ defmodule CredoSampleModule do def some_function(parameter1, parameter2) do IO.inspect parameter1 + parameter2 Map.get(%{}, :foo, []) |> Enum.map(&(&1)) end end """ |> to_source_file |> run_check(@described_check) |> refute_issues() end test "it should NOT report expected code 2" do """ defmodule CredoSampleModule do def some_function(parameter1, parameter2) do IO.inspect parameter1 + parameter2 %{} |> Map.get(:foo, []) |> Enum.each(&IO.puts/1) end end """ |> to_source_file |> run_check(@described_check) |> refute_issues() end test "it should NOT report expected code 3" do """ defmodule CredoSampleModule do def some_function(parameter1) do %{} |> Map.get(:foo) |> some_arbitrary_function end end """ |> to_source_file |> run_check(@described_check) |> refute_issues() end # # cases raising issues # test "it should report a violation" do """ defmodule CredoSampleModule do def some_function() do %{} |> Map.get(:foo) |> Enum.sum end end """ |> to_source_file |> run_check(@described_check) |> assert_issue() end test "it should report a violation /2" do """ defmodule CredoSampleModule do def some_function(parameter1, parameter2) do some_map = %{} Map.get(some_map, :items) |> Enum.map(fn x -> x["id"] end) end end """ |> to_source_file |> run_check(@described_check) |> assert_issue() end test "it should report a violation /3" do """ defmodule CredoSampleModule do def some_function(a, b, c) do a |> Enum.map(fn x -> x |> Map.get(b) |> Enum.reduce([], &some_fun/1) end) |> some_other_function(c) end end """ |> to_source_file |> run_check(@described_check) |> assert_issue() end end
19.55
55
0.558397
08417642c10e66b210992dc56e7775e2fa3c0296
11,300
exs
Elixir
lib/elixir/test/elixir/kernel/docs_test.exs
britto/elixir
1f6e7093cff4b68dada60b924399bc8404d39a7e
[ "Apache-2.0" ]
2
2020-06-02T18:00:28.000Z
2021-12-10T03:21:42.000Z
lib/elixir/test/elixir/kernel/docs_test.exs
britto/elixir
1f6e7093cff4b68dada60b924399bc8404d39a7e
[ "Apache-2.0" ]
1
2020-09-14T16:23:33.000Z
2021-03-25T17:38:59.000Z
lib/elixir/test/elixir/kernel/docs_test.exs
britto/elixir
1f6e7093cff4b68dada60b924399bc8404d39a7e
[ "Apache-2.0" ]
null
null
null
Code.require_file("../test_helper.exs", __DIR__) defmodule Kernel.DocsTest do use ExUnit.Case import PathHelpers defmacro wrong_doc_baz do quote do @doc "Wrong doc" @doc since: "1.2" def baz(_arg) def baz(arg), do: arg + 1 end end test "attributes format" do defmodule DocAttributes do @moduledoc "Module doc" assert @moduledoc == "Module doc" assert Module.get_attribute(__MODULE__, :moduledoc) == {__ENV__.line - 2, "Module doc"} @typedoc "Type doc" assert @typedoc == "Type doc" assert Module.get_attribute(__MODULE__, :typedoc) == {__ENV__.line - 2, "Type doc"} @type foobar :: any @doc "Function doc" assert @doc == "Function doc" assert Module.get_attribute(__MODULE__, :doc) == {__ENV__.line - 2, "Function doc"} def foobar() do :ok end end end test "compiled without docs" do Code.compiler_options(docs: false) write_beam( defmodule WithoutDocs do @moduledoc "Module doc" @doc "Some doc" def foobar(arg), do: arg end ) assert Code.fetch_docs(WithoutDocs) == {:error, :chunk_not_found} after Code.compiler_options(docs: true) end test "compiled in memory does not have accessible docs" do defmodule InMemoryDocs do @moduledoc "Module doc" @doc "Some doc" def foobar(arg), do: arg end assert Code.fetch_docs(InMemoryDocs) == {:error, :module_not_found} end test "non-existent beam file" do assert {:error, :module_not_found} = Code.fetch_docs("bad.beam") end test "raises on invalid @doc since: ..." do assert_raise ArgumentError, ~r"should be a string representing the version", fn -> defmodule InvalidSince do @doc since: 1.2 def foo, do: :bar end end end test "raises on invalid @doc" do assert_raise ArgumentError, ~r/When set dynamically, it should be {line, doc}/, fn -> defmodule DocAttributesFormat do Module.put_attribute(__MODULE__, :moduledoc, "Other") end end message = ~r/should be either false, nil, a string, or a keyword list/ assert_raise ArgumentError, message, fn -> defmodule AtSyntaxDocAttributesFormat do @moduledoc :not_a_binary end end assert_raise ArgumentError, message, fn -> defmodule AtSyntaxDocAttributesFormat do @moduledoc true end end end describe "compiled with docs" do test "infers signatures" do write_beam( defmodule SignatureDocs do def arg_names([], [], %{}, [], %{}), do: false @year 2015 def with_defaults(@year, arg \\ 0, year \\ @year, fun \\ &>=/2) do {fun, arg + year} end def with_map_and_default(%{key: value} \\ %{key: :default}), do: value def with_struct(%URI{}), do: :ok def with_underscore({_, _} = _two_tuple), do: :ok def with_underscore(_), do: :error def only_underscore(_), do: :ok def two_good_names(first, :ok), do: first def two_good_names(second, :error), do: second end ) assert {:docs_v1, _, :elixir, _, _, _, docs} = Code.fetch_docs(SignatureDocs) signatures = for {{:function, n, a}, _, signature, _, %{}} <- docs, do: {{n, a}, signature} assert [ arg_names, only_underscore, two_good_names, with_defaults, with_map_and_default, with_struct, with_underscore ] = Enum.sort(signatures) # arg_names/5 assert {{:arg_names, 5}, ["arg_names(list1, list2, map1, list3, map2)"]} = arg_names # only_underscore/1 assert {{:only_underscore, 1}, ["only_underscore(_)"]} = only_underscore # two_good_names/2 assert {{:two_good_names, 2}, ["two_good_names(first, atom)"]} = two_good_names # with_defaults/4 assert {{:with_defaults, 4}, ["with_defaults(int, arg \\\\ 0, year \\\\ 2015, fun \\\\ &>=/2)"]} = with_defaults # with_map_and_default/1 assert {{:with_map_and_default, 1}, ["with_map_and_default(map \\\\ %{key: :default})"]} = with_map_and_default # with_struct/1 assert {{:with_struct, 1}, ["with_struct(uri)"]} = with_struct # with_underscore/1 assert {{:with_underscore, 1}, ["with_underscore(two_tuple)"]} = with_underscore end test "includes docs for functions, modules, types and callbacks" do write_beam( defmodule SampleDocs do @moduledoc "Module doc" @moduledoc authors: "Elixir Contributors", purpose: :test @doc "My struct" defstruct [:sample] @typedoc "Type doc" @typedoc since: "1.2.3", color: :red @type foo(any) :: any @typedoc "Opaque type doc" @opaque bar(any) :: any @doc "Callback doc" @doc since: "1.2.3", color: :red, deprecated: "use baz/2 instead" @doc color: :blue, stable: true @callback foo(any) :: any @doc false @doc since: "1.2.3" @callback bar() :: term @callback baz(any, term) :: any @doc "Callback with multiple clauses" @callback callback_multi(integer) :: integer @callback callback_multi(atom) :: atom @doc "Macrocallback doc" @macrocallback qux(any) :: any @doc "Macrocallback with multiple clauses" @macrocallback macrocallback_multi(integer) :: integer @macrocallback macrocallback_multi(atom) :: atom @doc "Function doc" @doc since: "1.2.3", color: :red @doc color: :blue, stable: true @deprecated "use baz/2 instead" def foo(arg \\ 0), do: arg + 1 @doc "Multiple function head doc" @deprecated "something else" def bar(_arg) def bar(arg), do: arg + 1 require Kernel.DocsTest Kernel.DocsTest.wrong_doc_baz() @doc "Multiple function head and docs" @doc since: "1.2.3" def baz(_arg) @doc false def qux(true), do: false @doc "A guard" defguard is_zero(v) when v == 0 # We do this to avoid the deprecation warning. module = Module module.add_doc(__MODULE__, __ENV__.line, :def, {:nullary, 0}, [], "add_doc") def nullary, do: 0 end ) assert {:docs_v1, _, :elixir, "text/markdown", %{"en" => module_doc}, module_doc_meta, docs} = Code.fetch_docs(SampleDocs) assert module_doc == "Module doc" assert %{authors: "Elixir Contributors", purpose: :test} = module_doc_meta [ callback_bar, callback_baz, callback_multi, callback_foo, function_struct_0, function_struct_1, function_bar, function_baz, function_foo, function_nullary, function_qux, guard_is_zero, macrocallback_multi, macrocallback_qux, type_bar, type_foo ] = Enum.sort(docs) assert {{:callback, :bar, 0}, _, [], :hidden, %{}} = callback_bar assert {{:callback, :baz, 2}, _, [], :none, %{}} = callback_baz assert {{:callback, :foo, 1}, _, [], %{"en" => "Callback doc"}, %{since: "1.2.3", deprecated: "use baz/2 instead", color: :blue, stable: true}} = callback_foo assert {{:callback, :callback_multi, 1}, _, [], %{"en" => "Callback with multiple clauses"}, %{}} = callback_multi assert {{:function, :__struct__, 0}, _, ["%Kernel.DocsTest.SampleDocs{}"], %{"en" => "My struct"}, %{}} = function_struct_0 assert {{:function, :__struct__, 1}, _, ["__struct__(kv)"], :none, %{}} = function_struct_1 assert {{:function, :bar, 1}, _, ["bar(arg)"], %{"en" => "Multiple function head doc"}, %{deprecated: "something else"}} = function_bar assert {{:function, :baz, 1}, _, ["baz(arg)"], %{"en" => "Multiple function head and docs"}, %{since: "1.2.3"}} = function_baz assert {{:function, :foo, 1}, _, ["foo(arg \\\\ 0)"], %{"en" => "Function doc"}, %{ since: "1.2.3", deprecated: "use baz/2 instead", color: :blue, stable: true, defaults: 1 }} = function_foo assert {{:function, :nullary, 0}, _, ["nullary()"], %{"en" => "add_doc"}, %{}} = function_nullary assert {{:function, :qux, 1}, _, ["qux(bool)"], :hidden, %{}} = function_qux assert {{:macro, :is_zero, 1}, _, ["is_zero(v)"], %{"en" => "A guard"}, %{guard: true}} = guard_is_zero assert {{:macrocallback, :macrocallback_multi, 1}, _, [], %{"en" => "Macrocallback with multiple clauses"}, %{}} = macrocallback_multi assert {{:macrocallback, :qux, 1}, _, [], %{"en" => "Macrocallback doc"}, %{}} = macrocallback_qux assert {{:type, :bar, 1}, _, [], %{"en" => "Opaque type doc"}, %{opaque: true}} = type_bar assert {{:type, :foo, 1}, _, [], %{"en" => "Type doc"}, %{since: "1.2.3", color: :red}} = type_foo end end test "fetch docs chunk from doc/chunks" do Code.compiler_options(docs: false) doc_chunks_path = Path.join([tmp_path(), "doc", "chunks"]) File.rm_rf!(doc_chunks_path) File.mkdir_p!(doc_chunks_path) write_beam( defmodule ExternalDocs do end ) assert Code.fetch_docs(ExternalDocs) == {:error, :chunk_not_found} path = Path.join([doc_chunks_path, "#{ExternalDocs}.chunk"]) chunk = {:docs_v1, 1, :elixir, "text/markdown", %{"en" => "Some docs"}, %{}} File.write!(path, :erlang.term_to_binary(chunk)) assert Code.fetch_docs(ExternalDocs) == chunk after Code.compiler_options(docs: true) end test "@impl true doesn't set @doc false if previous implementation has docs" do write_beam( defmodule Docs do defmodule SampleBehaviour do @callback foo(any()) :: any() @callback bar() :: any() @callback baz() :: any() end @behaviour SampleBehaviour @doc "Foo docs" def foo(nil), do: nil @impl true def foo(_), do: false @impl true def bar(), do: true @doc "Baz docs" @impl true def baz(), do: true def fuz(), do: true end ) {:docs_v1, _, _, _, _, _, docs} = Code.fetch_docs(Docs) function_docs = for {{:function, name, arity}, _, _, doc, _} <- docs, do: {{name, arity}, doc} assert [ {{:bar, 0}, :hidden}, {{:baz, 0}, %{"en" => "Baz docs"}}, {{:foo, 1}, %{"en" => "Foo docs"}}, {{:fuz, 0}, :none} ] = Enum.sort(function_docs) end describe "special signatures" do test "fn" do {:docs_v1, _, _, _, _, _, docs} = Code.fetch_docs(Kernel.SpecialForms) {_, _, fn_docs, _, _} = Enum.find(docs, &match?({_, :fn, 1}, elem(&1, 0))) assert fn_docs == ["fn"] end end end
29.973475
100
0.558673
08417ac8e067ed11fbd9109f1a6aa50beb0c1848
558
ex
Elixir
lib/console_web/views/changeset_view.ex
maco2035/console
2a9a65678b8c671c7d92cdb62dfcfc71b84957c5
[ "Apache-2.0" ]
83
2018-05-31T14:49:10.000Z
2022-03-27T16:49:49.000Z
lib/console_web/views/changeset_view.ex
maco2035/console
2a9a65678b8c671c7d92cdb62dfcfc71b84957c5
[ "Apache-2.0" ]
267
2018-05-22T23:19:02.000Z
2022-03-31T04:31:06.000Z
lib/console_web/views/changeset_view.ex
maco2035/console
2a9a65678b8c671c7d92cdb62dfcfc71b84957c5
[ "Apache-2.0" ]
18
2018-11-20T05:15:54.000Z
2022-03-28T08:20:13.000Z
defmodule ConsoleWeb.ChangesetView do use ConsoleWeb, :view @doc """ Traverses and translates changeset errors. See `Ecto.Changeset.traverse_errors/2` and `ConsoleWeb.ErrorHelpers.translate_error/1` for more details. """ def translate_errors(changeset) do Ecto.Changeset.traverse_errors(changeset, &translate_error/1) end def render("error.json", %{changeset: changeset}) do # When encoded, the changeset returns its errors # as a JSON object. So we just pass it forward. %{errors: translate_errors(changeset)} end end
27.9
65
0.738351
084181ba673f87c47e64f81d801218d0ab617bf7
2,024
exs
Elixir
config/prod.exs
cavneb/elixir_casts
c650a2850825e0305387b95ba6a7eb386e984097
[ "MIT" ]
null
null
null
config/prod.exs
cavneb/elixir_casts
c650a2850825e0305387b95ba6a7eb386e984097
[ "MIT" ]
null
null
null
config/prod.exs
cavneb/elixir_casts
c650a2850825e0305387b95ba6a7eb386e984097
[ "MIT" ]
null
null
null
use Mix.Config # For production, we configure the host to read the PORT # from the system environment. Therefore, you will need # to set PORT=80 before running your server. # # You should also configure the url host to something # meaningful, we use this information when generating URLs. # # Finally, we also include the path to a manifest # containing the digested version of static files. This # manifest is generated by the mix phoenix.digest task # which you typically run after static files are built. config :elixir_casts, ElixirCasts.Endpoint, http: [port: {:system, "PORT"}], url: [host: "example.com", port: 80], cache_static_manifest: "priv/static/manifest.json" # Do not print debug messages in production config :logger, level: :info # ## SSL Support # # To get SSL working, you will need to add the `https` key # to the previous section and set your `:url` port to 443: # # config :elixir_casts, ElixirCasts.Endpoint, # ... # url: [host: "example.com", port: 443], # https: [port: 443, # keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"), # certfile: System.get_env("SOME_APP_SSL_CERT_PATH")] # # Where those two env variables return an absolute path to # the key and cert in disk or a relative path inside priv, # for example "priv/ssl/server.key". # # We also recommend setting `force_ssl`, ensuring no data is # ever sent via http, always redirecting to https: # # config :elixir_casts, ElixirCasts.Endpoint, # force_ssl: [hsts: true] # # Check `Plug.SSL` for all available options in `force_ssl`. # ## Using releases # # If you are doing OTP releases, you need to instruct Phoenix # to start the server for all endpoints: # # config :phoenix, :serve_endpoints, true # # Alternatively, you can configure exactly which server to # start per endpoint: # # config :elixir_casts, ElixirCasts.Endpoint, server: true # # Finally import the config/prod.secret.exs # which should be versioned separately. import_config "prod.secret.exs"
32.645161
67
0.718379
0841910668f660c1b6204635a0a36b8b711bd78a
882
ex
Elixir
clients/sql_admin/lib/google_api/sql_admin/v1/metadata.ex
renovate-bot/elixir-google-api
1da34cd39b670c99f067011e05ab90af93fef1f6
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/sql_admin/lib/google_api/sql_admin/v1/metadata.ex
swansoffiee/elixir-google-api
9ea6d39f273fb430634788c258b3189d3613dde0
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/sql_admin/lib/google_api/sql_admin/v1/metadata.ex
dazuma/elixir-google-api
6a9897168008efe07a6081d2326735fe332e522c
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.SQLAdmin.V1 do @moduledoc """ API client metadata for GoogleApi.SQLAdmin.V1. """ @discovery_revision "20211206" def discovery_revision(), do: @discovery_revision end
32.666667
74
0.758503
08419fe2a52a4d5d25ac984dc1a33485fee3a832
2,175
ex
Elixir
lib/vintage_net_wifi/access_point.ex
axelson/vintage_net_wifi
b989ec5a8122fca212cf62e7e6b489af4314cb5e
[ "Apache-2.0" ]
20
2019-12-03T18:26:25.000Z
2022-02-08T09:27:23.000Z
lib/vintage_net_wifi/access_point.ex
axelson/vintage_net_wifi
b989ec5a8122fca212cf62e7e6b489af4314cb5e
[ "Apache-2.0" ]
29
2020-01-21T02:21:47.000Z
2022-02-16T13:02:24.000Z
lib/vintage_net_wifi/access_point.ex
axelson/vintage_net_wifi
b989ec5a8122fca212cf62e7e6b489af4314cb5e
[ "Apache-2.0" ]
12
2020-02-16T16:48:03.000Z
2022-03-24T12:10:52.000Z
defmodule VintageNetWiFi.AccessPoint do alias VintageNetWiFi.Utils @moduledoc """ Information about a WiFi access point * `:bssid` - a unique address for the access point * `:flags` - a list of flags describing properties on the access point * `:frequency` - the access point's frequency in MHz * `:signal_dbm` - the signal strength in dBm * `:ssid` - the access point's name """ @type flag :: :wpa2_psk_ccmp | :wpa2_eap_ccmp | :wpa2_eap_ccmp_tkip | :wpa2_psk_ccmp_tkip | :wpa2_psk_sae_ccmp | :wpa2_sae_ccmp | :wpa2_ccmp | :wpa_psk_ccmp | :wpa_psk_ccmp_tkip | :wpa_eap_ccmp | :wpa_eap_ccmp_tkip | :wep | :ibss | :mesh | :ess | :p2p | :wps | :rsn_ccmp @type band :: :wifi_2_4_ghz | :wifi_5_ghz | :unknown defstruct [:bssid, :frequency, :band, :channel, :signal_dbm, :signal_percent, :flags, :ssid] @type t :: %__MODULE__{ bssid: String.t(), frequency: non_neg_integer(), band: band(), channel: non_neg_integer(), signal_dbm: integer(), signal_percent: 0..100, flags: [flag()], ssid: String.t() } @doc """ Create an AccessPoint when only the BSSID is known """ @spec new(any) :: VintageNetWiFi.AccessPoint.t() def new(bssid) do %__MODULE__{ bssid: bssid, frequency: 0, band: :unknown, channel: 0, signal_dbm: -99, signal_percent: 0, flags: [], ssid: "" } end @doc """ Create a new AccessPoint with all of the information """ @spec new(String.t(), String.t(), non_neg_integer(), integer(), [flag()]) :: VintageNetWiFi.AccessPoint.t() def new(bssid, ssid, frequency, signal_dbm, flags) do info = Utils.frequency_info(frequency) %__MODULE__{ bssid: bssid, frequency: frequency, band: info.band, channel: info.channel, signal_dbm: signal_dbm, signal_percent: info.dbm_to_percent.(signal_dbm), flags: flags, ssid: ssid } end end
25.290698
94
0.573333
0841a1bbc1b94e2b2fbfdfc1f519eab5846b4ff1
3,019
ex
Elixir
lib/checker_mal_web/controllers/unapproved_controller.ex
Hiyori-API/checker-mal
c52f6e8a248ba160ffebc2c9369a933fc8fc4499
[ "MIT" ]
null
null
null
lib/checker_mal_web/controllers/unapproved_controller.ex
Hiyori-API/checker-mal
c52f6e8a248ba160ffebc2c9369a933fc8fc4499
[ "MIT" ]
null
null
null
lib/checker_mal_web/controllers/unapproved_controller.ex
Hiyori-API/checker-mal
c52f6e8a248ba160ffebc2c9369a933fc8fc4499
[ "MIT" ]
null
null
null
defmodule CheckerMalWeb.UnapprovedController do use CheckerMalWeb, :controller require Logger @html_basepath Application.get_env(:checker_mal, :unapproved_html_basepath, "/mal_unapproved") @error_msg "Page is currently being updated, this page will automatically refresh when its done..." def get_data(type) when is_atom(type) do # last_updated_at returns :error when server is still booting case GenServer.call(CheckerMal.Core.Unapproved, :last_updated_at) do {:ok, last_updated_naive} -> ids = try do GenServer.call(CheckerMal.UnapprovedHtml.Cache, type) catch :exit, {:timeout, _err} -> [] end %{ :since_update_mins => div(NaiveDateTime.diff(NaiveDateTime.utc_now(), last_updated_naive), 60), :ids => ids } {:error, :uninitialized} -> %{ids: [], refresh_equiv: true} end end def convert_media_type(nil), do: nil def convert_media_type(type) when is_bitstring(type) do case type do "doujinshi" -> "Doujinshi" "light_novel" -> "Light Novel" "manga" -> "Manga" "one_shot" -> "One Shot" "manhwa" -> "Manhwa" "manhua" -> "Manhua" "novel" -> "Novel" "tv" -> "TV" "ova" -> "OVA" "special" -> "Special" "movie" -> "Movie" "unknown" -> "Unknown" "music" -> "Music" "ona" -> "ONA" _ -> type end end def fetch_metadata(stype, ids) when is_bitstring(stype) and is_list(ids) do GenServer.call( CheckerMal.UnapprovedHtml.EntryCache, {:get_info, stype, ids}, :timer.seconds(10) ) |> Map.to_list() |> Enum.map(fn {id, {name, etype, nsfw}} -> {id, %{ :name => name, :type => convert_media_type(etype), :nsfw => nsfw }} end) |> Enum.into(Map.new()) end defp data_controller(type, conn) do stype = Atom.to_string(type) data = get_data(type) # flash error if page is initializing/updating conn = cond do Enum.empty?(data[:ids]) -> conn |> put_flash(:error, @error_msg) true -> conn end # get entry info (name/type/nsfw) entryinfo = fetch_metadata(stype, data[:ids]) # map so that its easier to use in eex data = Map.put(data, :info, entryinfo) |> Map.put( :title, "Unapproved MAL Entries - #{stype |> String.capitalize()}" ) |> Map.put(:basepath, @html_basepath) |> Map.put(:type, stype) {conn, data} end def controller(conn, type) when is_atom(type) do {conn, data} = data_controller(type, conn) render(conn, "unapproved.html", data: data) end def anime(conn, _params), do: controller(conn, :anime) def manga(conn, _params), do: controller(conn, :manga) end
21.260563
101
0.555813
0841b38466d0f4367cab9e587953a66d7a358d9c
10,761
ex
Elixir
lib/elixir/lib/inspect.ex
stevedomin/elixir
df1a7d36472a92aedc97c0afe9f782678b7aa7e5
[ "Apache-2.0" ]
null
null
null
lib/elixir/lib/inspect.ex
stevedomin/elixir
df1a7d36472a92aedc97c0afe9f782678b7aa7e5
[ "Apache-2.0" ]
null
null
null
lib/elixir/lib/inspect.ex
stevedomin/elixir
df1a7d36472a92aedc97c0afe9f782678b7aa7e5
[ "Apache-2.0" ]
8
2018-02-20T18:30:53.000Z
2019-06-18T14:23:31.000Z
import Kernel, except: [inspect: 1] import Inspect.Algebra alias Code.Identifier defprotocol Inspect do @moduledoc """ The `Inspect` protocol is responsible for converting any Elixir data structure into an algebra document. This document is then formatted, either in pretty printing format or a regular one. The `inspect/2` function receives the entity to be inspected followed by the inspecting options, represented by the struct `Inspect.Opts`. Inspection is done using the functions available in `Inspect.Algebra`. ## Examples Many times, inspecting a structure can be implemented in function of existing entities. For example, here is `MapSet`'s `inspect` implementation: defimpl Inspect, for: MapSet do import Inspect.Algebra def inspect(dict, opts) do concat(["#MapSet<", to_doc(MapSet.to_list(dict), opts), ">"]) end end The `concat/1` function comes from `Inspect.Algebra` and it concatenates algebra documents together. In the example above, it is concatenating the string `"MapSet<"` (all strings are valid algebra documents that keep their formatting when pretty printed), the document returned by `Inspect.Algebra.to_doc/2` and the other string `">"`. Since regular strings are valid entities in an algebra document, an implementation of inspect may simply return a string, although that will devoid it of any pretty-printing. ## Error handling In case there is an error while your structure is being inspected, Elixir will raise an `ArgumentError` error and will automatically fall back to a raw representation for printing the structure. You can however access the underlying error by invoking the Inspect implementation directly. For example, to test Inspect.MapSet above, you can invoke it as: Inspect.MapSet.inspect(MapSet.new(), %Inspect.Opts{}) """ # Handle structs in Any @fallback_to_any true def inspect(term, opts) end defimpl Inspect, for: Atom do require Macro def inspect(atom, opts) do color(Identifier.inspect_as_atom(atom), color_key(atom), opts) end defp color_key(atom) when is_boolean(atom), do: :boolean defp color_key(nil), do: nil defp color_key(_), do: :atom end defimpl Inspect, for: BitString do def inspect(term, opts) when is_binary(term) do %Inspect.Opts{binaries: bins, base: base, printable_limit: printable_limit} = opts if base == :decimal and (bins == :as_strings or (bins == :infer and String.printable?(term, printable_limit))) do inspected = case Identifier.escape(term, ?", printable_limit) do {escaped, ""} -> [?", escaped, ?"] {escaped, _} -> [?", escaped, ?", " <> ..."] end color(IO.iodata_to_binary(inspected), :string, opts) else inspect_bitstring(term, opts) end end def inspect(term, opts) do inspect_bitstring(term, opts) end defp inspect_bitstring("", opts) do color("<<>>", :binary, opts) end defp inspect_bitstring(bitstring, opts) do left = color("<<", :binary, opts) right = color(">>", :binary, opts) inner = each_bit(bitstring, opts.limit, opts) group(concat(concat(left, nest(inner, 2)), right)) end defp each_bit(_, 0, _) do "..." end defp each_bit(<<>>, _counter, _opts) do :doc_nil end defp each_bit(<<h::8>>, _counter, opts) do Inspect.Integer.inspect(h, opts) end defp each_bit(<<h, t::bitstring>>, counter, opts) do flex_glue( concat(Inspect.Integer.inspect(h, opts), ","), each_bit(t, decrement(counter), opts) ) end defp each_bit(bitstring, _counter, opts) do size = bit_size(bitstring) <<h::size(size)>> = bitstring Inspect.Integer.inspect(h, opts) <> "::size(" <> Integer.to_string(size) <> ")" end @compile {:inline, decrement: 1} defp decrement(:infinity), do: :infinity defp decrement(counter), do: counter - 1 end defimpl Inspect, for: List do def inspect([], opts) do color("[]", :list, opts) end # TODO: Remove :char_list and :as_char_lists handling in 2.0 def inspect(term, opts) do %Inspect.Opts{ charlists: lists, char_lists: lists_deprecated, printable_limit: printable_limit } = opts lists = if lists == :infer and lists_deprecated != :infer do case lists_deprecated do :as_char_lists -> IO.warn( "the :char_lists inspect option and its :as_char_lists " <> "value are deprecated, use the :charlists option and its " <> ":as_charlists value instead" ) :as_charlists _ -> IO.warn("the :char_lists inspect option is deprecated, use :charlists instead") lists_deprecated end else lists end open = color("[", :list, opts) sep = color(",", :list, opts) close = color("]", :list, opts) cond do lists == :as_charlists or (lists == :infer and List.ascii_printable?(term, printable_limit)) -> inspected = case Identifier.escape(IO.chardata_to_string(term), ?', printable_limit) do {escaped, ""} -> [?', escaped, ?'] {escaped, _} -> [?', escaped, ?', " ++ ..."] end IO.iodata_to_binary(inspected) keyword?(term) -> container_doc(open, term, close, opts, &keyword/2, separator: sep, break: :strict) true -> container_doc(open, term, close, opts, &to_doc/2, separator: sep) end end @doc false def keyword({key, value}, opts) do key = color(Identifier.inspect_as_key(key), :atom, opts) concat(key, concat(" ", to_doc(value, opts))) end @doc false def keyword?([{key, _value} | rest]) when is_atom(key) do case Atom.to_charlist(key) do 'Elixir.' ++ _ -> false _ -> keyword?(rest) end end def keyword?([]), do: true def keyword?(_other), do: false end defimpl Inspect, for: Tuple do def inspect(tuple, opts) do open = color("{", :tuple, opts) sep = color(",", :tuple, opts) close = color("}", :tuple, opts) container_opts = [separator: sep, break: :flex] container_doc(open, Tuple.to_list(tuple), close, opts, &to_doc/2, container_opts) end end defimpl Inspect, for: Map do def inspect(map, opts) do inspect(map, "", opts) end def inspect(map, name, opts) do map = :maps.to_list(map) open = color("%" <> name <> "{", :map, opts) sep = color(",", :map, opts) close = color("}", :map, opts) container_doc(open, map, close, opts, traverse_fun(map, opts), separator: sep, break: :strict) end defp traverse_fun(list, opts) do if Inspect.List.keyword?(list) do &Inspect.List.keyword/2 else sep = color(" => ", :map, opts) &to_map(&1, &2, sep) end end defp to_map({key, value}, opts, sep) do concat(concat(to_doc(key, opts), sep), to_doc(value, opts)) end end defimpl Inspect, for: Integer do def inspect(term, %Inspect.Opts{base: base} = opts) do inspected = Integer.to_string(term, base_to_value(base)) |> prepend_prefix(base) color(inspected, :number, opts) end defp base_to_value(base) do case base do :binary -> 2 :decimal -> 10 :octal -> 8 :hex -> 16 end end defp prepend_prefix(value, :decimal), do: value defp prepend_prefix(<<?-, value::binary>>, base) do "-" <> prepend_prefix(value, base) end defp prepend_prefix(value, base) do prefix = case base do :binary -> "0b" :octal -> "0o" :hex -> "0x" end prefix <> value end end defimpl Inspect, for: Float do def inspect(term, opts) do inspected = IO.iodata_to_binary(:io_lib_format.fwrite_g(term)) color(inspected, :number, opts) end end defimpl Inspect, for: Regex do def inspect(regex, opts) do {escaped, _} = Identifier.escape(regex.source, ?/, :infinity, &escape_map/1) source = IO.iodata_to_binary(['~r/', escaped, ?/, regex.opts]) color(source, :regex, opts) end defp escape_map(?\a), do: '\\a' defp escape_map(?\f), do: '\\f' defp escape_map(?\n), do: '\\n' defp escape_map(?\r), do: '\\r' defp escape_map(?\t), do: '\\t' defp escape_map(?\v), do: '\\v' defp escape_map(_), do: false end defimpl Inspect, for: Function do def inspect(function, _opts) do fun_info = :erlang.fun_info(function) mod = fun_info[:module] name = fun_info[:name] if fun_info[:type] == :external and fun_info[:env] == [] do inspected_as_atom = Identifier.inspect_as_atom(mod) inspected_as_function = Identifier.inspect_as_function(name) "&#{inspected_as_atom}.#{inspected_as_function}/#{fun_info[:arity]}" else case Atom.to_charlist(mod) do 'elixir_compiler_' ++ _ -> if function_exported?(mod, :__RELATIVE__, 0) do "#Function<#{uniq(fun_info)} in file:#{mod.__RELATIVE__}>" else default_inspect(mod, fun_info) end _ -> default_inspect(mod, fun_info) end end end defp default_inspect(mod, fun_info) do inspected_as_atom = Identifier.inspect_as_atom(mod) extracted_name = extract_name(fun_info[:name]) "#Function<#{uniq(fun_info)}/#{fun_info[:arity]} in #{inspected_as_atom}#{extracted_name}>" end defp extract_name([]) do "" end defp extract_name(name) do case Identifier.extract_anonymous_fun_parent(name) do {name, arity} -> "." <> Identifier.inspect_as_function(name) <> "/" <> arity :error -> "." <> Identifier.inspect_as_function(name) end end defp uniq(fun_info) do Integer.to_string(fun_info[:new_index]) <> "." <> Integer.to_string(fun_info[:uniq]) end end defimpl Inspect, for: PID do def inspect(pid, _opts) do "#PID" <> IO.iodata_to_binary(:erlang.pid_to_list(pid)) end end defimpl Inspect, for: Port do def inspect(port, _opts) do IO.iodata_to_binary(:erlang.port_to_list(port)) end end defimpl Inspect, for: Reference do def inspect(ref, _opts) do '#Ref' ++ rest = :erlang.ref_to_list(ref) "#Reference" <> IO.iodata_to_binary(rest) end end defimpl Inspect, for: Any do def inspect(%module{} = struct, opts) do try do module.__struct__ rescue _ -> Inspect.Map.inspect(struct, opts) else dunder -> if :maps.keys(dunder) == :maps.keys(struct) do pruned = :maps.remove(:__exception__, :maps.remove(:__struct__, struct)) colorless_opts = %{opts | syntax_colors: []} Inspect.Map.inspect(pruned, Inspect.Atom.inspect(module, colorless_opts), opts) else Inspect.Map.inspect(struct, opts) end end end end
27.521739
101
0.638695
0841be7902caa4ab2fd9f79696f7e97bcbf2c831
11,681
ex
Elixir
lib/radixir/bech32.ex
radixir/radixir
703034330e857bc084b78dd927ec611c3ea54349
[ "Apache-2.0" ]
16
2022-01-05T20:41:55.000Z
2022-03-25T09:06:43.000Z
lib/radixir/bech32.ex
radixir/radixir
703034330e857bc084b78dd927ec611c3ea54349
[ "Apache-2.0" ]
null
null
null
lib/radixir/bech32.ex
radixir/radixir
703034330e857bc084b78dd927ec611c3ea54349
[ "Apache-2.0" ]
1
2022-02-10T21:55:26.000Z
2022-02-10T21:55:26.000Z
defmodule Radixir.Bech32 do @moduledoc false # @moduledoc """ # This is an implementation of BIP-0173 # Bech32 address format for native v0-16 witness outputs. # See https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki for details # """ @gen {0x3B6A57B2, 0x26508E6D, 0x1EA119FA, 0x3D4233DD, 0x2A1462B3} use Bitwise char_table = [ {0, ~c(qpzry9x8)}, {8, ~c(gf2tvdw0)}, {16, ~c(s3jn54kh)}, {24, ~c(ce6mua7l)} ] |> Enum.map(fn {x, chars} -> Enum.zip(chars, 0..(length(chars) - 1)) |> Enum.map(fn {char, val} -> {char, val + x} end) end) |> Enum.reduce([], &++/2) |> Enum.sort() |> MapSet.new() # Generate a lookup function for {char, val} <- char_table do defp char_to_value(unquote(char)), do: unquote(val) # Uppercase too if char >= ?a and char <= ?z do char = char - ?a + ?A defp char_to_value(unquote(char)), do: unquote(val) end end defp char_to_value(_char) do nil end # Generate a lookup function for {char, val} <- char_table do defp value_to_char(unquote(val)), do: unquote(char) end defp value_to_char(_char) do nil end defp polymod(values) when is_list(values) do values |> Enum.reduce(1, fn v, chk -> b = chk >>> 25 chk = Bitwise.bxor((chk &&& 0x1FFFFFF) <<< 5, v) 0..4 |> Enum.reduce(chk, fn i, chk -> Bitwise.bxor(chk, if((b >>> i &&& 1) !== 0, do: @gen |> elem(i), else: 0)) end) end) end defp hrp_expand(s) when is_binary(s) do chars = String.to_charlist(s) for(c <- chars, do: c >>> 5) ++ [0] ++ for c <- chars, do: c &&& 31 end defp verify_checksum(hrp, data_string) when is_binary(hrp) and is_binary(data_string) do data = data_string |> String.to_charlist() |> Enum.map(&char_to_value/1) if data |> Enum.all?(&(&1 !== nil)) do if polymod(hrp_expand(hrp) ++ data) === 1 do :ok else {:error, :checksum_failed} end else {:error, :invalid_char} end end defp split_hrp_and_data_string(addr) do # Reversing is done here in case '1' is in the human readable part (hrp) # so we want to split on the last occurrence case String.split(addr |> String.reverse(), "1", parts: 2) do [data_string, hrp] -> {:ok, hrp |> String.reverse(), data_string |> String.reverse()} _ -> {:error, :not_bech32} end end @doc ~S""" Verify the checksum of the address report any errors. Note that this doesn't perform exhaustive validation of the address. If you need to make sure the address is well formed please use `decode/1` or `decode/2` instead. Returns `:ok` or an `{:error, reason}` tuple. ## Example iex> Bech32.verify("ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq") :ok """ @spec verify(String.t()) :: :ok | {:error, :checksum_failed | :invalid_char | :not_bech32} def verify(addr) when is_binary(addr) do case split_hrp_and_data_string(addr) do {:ok, hrp, data_string} -> verify_checksum(hrp, data_string) {:error, :not_bech32} -> {:error, :not_bech32} end end @doc ~S""" Verify the checksum of the address report success or failure. Note that this doesn't perform exhaustive validation of the address. If you need to make sure the address is well formed please use `decode/1` or `decode/2` instead. Returns `true` or `false`. ## Example iex> Bech32.verify_predicate("ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq") true """ @spec verify_predicate(String.t()) :: boolean def verify_predicate(addr) when is_binary(addr) do case verify(addr) do :ok -> true _ -> false end end @doc ~S""" Get the human readable part of the address. Very little validation is done here please use `decode/1` or `decode/2` if you need to validate the address. Returns `{:ok, hrp :: String.t()}` or an `{:error, reason}` tuple. ## Example iex> Bech32.get_hrp("ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq") {:ok, "ckb"} """ @spec get_hrp(addr :: String.t()) :: {:ok, hrp :: String.t()} | {:error, :not_bech32} def get_hrp(addr) when is_binary(addr) do case split_hrp_and_data_string(addr) do {:ok, hrp, _data_string} -> {:ok, hrp} {:error, :not_bech32} -> {:error, :not_bech32} end end @doc ~S""" Create a checksum from the human readable part plus the data part. Returns a binary that represents the checksum. ## Example iex> Bech32.create_checksum("ckb", <<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>) <<4, 5, 2, 7, 25, 10>> """ @spec create_checksum(String.t(), binary) :: binary def create_checksum(hrp, data) when is_binary(hrp) and is_binary(data) do data = :erlang.binary_to_list(data) values = hrp_expand(hrp) ++ data pmod = Bitwise.bxor(polymod(values ++ [0, 0, 0, 0, 0, 0]), 1) for(i <- 0..5, do: pmod >>> (5 * (5 - i)) &&& 31) |> :erlang.list_to_binary() end @doc ~S""" Encode a bech32 address from the hrp and data directly (data is a raw binary with no pre-processing). Returns a bech32 address as a string. ## Example iex> Bech32.encode("ckb", <<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>) "ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq" """ @spec encode(String.t(), binary) :: String.t() def encode(hrp, data) when is_binary(hrp) and is_binary(data) do encode_from_5bit(hrp, convertbits(data)) end @doc ~S""" Encode address from 5 bit encoded values in each byte. In other words bytes should have a value between `0` and `31`. Returns a bech32 address as a string. ## Example iex> Bech32.encode_from_5bit("ckb", Bech32.convertbits(<<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>)) "ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq" """ @spec encode_from_5bit(String.t(), binary) :: String.t() def encode_from_5bit(hrp, data) when is_binary(hrp) and is_binary(data) do hrp <> "1" <> :erlang.list_to_binary( for <<d::8 <- data <> create_checksum(hrp, data)>>, do: value_to_char(d) ) end @doc ~S""" Convert raw binary to 5 bit per byte encoded byte string. Returns a binary that uses 5 bits per byte. ## Example iex> Bech32.convertbits(<<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>) <<0, 4, 0, 13, 27, 25, 28, 0, 3, 16, 3, 19, 27, 30, 25, 20, 12, 19, 3, 27, 3, 28, 2, 27, 16, 1, 21, 27, 5, 14, 29, 24, 9, 26, 12, 16>> """ @spec convertbits(binary, pos_integer, pos_integer, boolean) :: binary def convertbits(data, frombits \\ 8, tobits \\ 5, pad \\ true) def convertbits(data, frombits, tobits, pad) when is_binary(data) and is_integer(frombits) and is_integer(tobits) and is_boolean(pad) and frombits >= tobits and frombits > 0 and tobits > 0 do num_data_bits = bit_size(data) num_tail_bits = rem(num_data_bits, tobits) data = if pad do missing_bits = 8 - num_tail_bits <<data::bitstring, 0::size(missing_bits)>> else data end :erlang.list_to_binary(for <<x::size(tobits) <- data>>, do: x) end def convertbits(data, frombits, tobits, pad) when is_binary(data) and is_integer(frombits) and is_integer(tobits) and is_boolean(pad) and frombits <= tobits and frombits > 0 and tobits > 0 do data = data |> :erlang.binary_to_list() |> Enum.reverse() |> Enum.reduce("", fn v, acc -> <<v::size(frombits), acc::bitstring>> end) data = if pad do leftover_bits = bit_size(data) |> rem(tobits) padding_bits = tobits - leftover_bits <<data::bitstring, 0::size(padding_bits)>> else data end for(<<c::size(tobits) <- data>>, do: c) |> :erlang.list_to_binary() end @doc ~S""" Decode a bech32 address. You can also pass the `:ignore_length` keyword into the opts if you want to allow more than 90 chars for currencies like Nervos CKB. Returns `{:ok, hrp :: String.t(), data :: binary}` or an `{:error, reason}` tuple. Note that we return 8 bits per byte here not 5 bits per byte. ## Example iex> Bech32.decode("ckb1qyq036wytncnfv0ekfjqrch7s5hzr4hkjl4qs54f7e") {:ok, "ckb", <<1, 0, 248, 233, 196, 92, 241, 52, 177, 249, 178, 100, 1, 226, 254, 133, 46, 33, 214, 246, 151, 234>>} """ @spec decode(String.t(), keyword) :: {:ok, hrp :: String.t(), data :: binary} | {:error, :no_separator | :no_hrp | :checksum_too_short | :too_long | :not_in_charset | :checksum_failed | :invalid_char | :mixed_case_char} def decode(addr, opts \\ []) when is_binary(addr) do unless Enum.any?(:erlang.binary_to_list(addr), fn c -> c < ?! or c > ?~ end) do unless String.downcase(addr) !== addr and String.upcase(addr) !== addr do addr = String.downcase(addr) data_part = ~r/.+(1[qpzry9x8gf2tvdw0s3jn54khce6mua7l]+)$/ |> Regex.run(addr) case ~r/.+(1.+)$/ |> Regex.run(addr, return: :index) do nil -> {:error, :no_separator} [_, {last_one_pos, _tail_size_including_one}] -> cond do last_one_pos === 0 -> {:error, :no_hrp} last_one_pos + 7 > byte_size(addr) -> {:error, :checksum_too_short} byte_size(addr) > 90 and Keyword.get(opts, :ignore_length, false) {:error, :too_long} data_part === nil -> {:error, :not_in_charset} true -> <<hrp::binary-size(last_one_pos), "1", data_with_checksum::binary>> = addr case verify_checksum(hrp, data_with_checksum) do :ok -> checksum_bits = 6 * 8 data_bits = bit_size(data_with_checksum) - checksum_bits <<data::bitstring-size(data_bits), _checksum::size(checksum_bits)>> = data_with_checksum data = data |> :erlang.binary_to_list() |> Enum.map(&char_to_value/1) |> Enum.reverse() |> Enum.reduce( "", fn v, acc -> <<v::5, acc::bitstring>> end ) data_bitlen = bit_size(data) data_bytes = div(data_bitlen, 8) data = case rem(data_bitlen, 8) do 0 -> data n when n < 5 -> data_bitlen = data_bytes * 8 <<data::bitstring-size(data_bitlen), _::bitstring>> = data data n -> missing_bits = 8 - n <<data::bitstring, 0::size(missing_bits)>> end {:ok, hrp, data} {:error, reason} -> {:error, reason} end end end else {:error, :mixed_case_char} end else {:error, :invalid_char} end end end
32.628492
164
0.565448
08420cafe7cee7f1935d0eae8aec21f56a2ae3a6
1,504
ex
Elixir
apps/metrics_reporter/lib/metrics_reporter/packages.ex
rucker/hindsight
876a5d344c5d8eebbea37684ee07e0a91e4430f0
[ "Apache-2.0" ]
12
2020-01-27T19:43:02.000Z
2021-07-28T19:46:29.000Z
apps/metrics_reporter/lib/metrics_reporter/packages.ex
rucker/hindsight
876a5d344c5d8eebbea37684ee07e0a91e4430f0
[ "Apache-2.0" ]
81
2020-01-28T18:07:23.000Z
2021-11-22T02:12:13.000Z
apps/metrics_reporter/lib/metrics_reporter/packages.ex
rucker/hindsight
876a5d344c5d8eebbea37684ee07e0a91e4430f0
[ "Apache-2.0" ]
10
2020-02-13T21:24:09.000Z
2020-05-21T18:39:35.000Z
defmodule MetricsReporter.Packages do @moduledoc """ Pre-define commonly associated bundles of Telemetry Metrics definitions for easy inclusion in the MetricsReporter `metrics/0` function when configuring an instance of MetricsReporter. """ import Telemetry.Metrics def kafka_metrics() do [ counter("destination.kafka.write.count") ] end def phoenix_metrics() do [ last_value("phoenix.endpoint.start.time"), last_value("phoenix.router.dispatch.start"), last_value( "phoenix.endpoint.stop.duration.seconds", event_name: [:phoenix, :endpoint, :stop], measurement: :duration, unit: {:native, :second} ), last_value( "phoenix.router_dispatch.stop.duration.seconds", event_name: [:phoenix, :router_dispatch, :stop], measurement: :duration, unit: {:native, :second} ), last_value( "phoenix.socket_connected.duration.seconds", event_name: [:phoenix, :socket_connected], measurement: :duration, unit: {:native, :second} ), last_value( "phoenix.channel_joined.duration.seconds", event_name: [:phoenix, :channel_joined], measurement: :duration, unit: {:native, :second} ), last_value( "phoenix.channel_handled_in.duration.seconds", event_name: [:phoenix, :channel_handled_in], measurement: :duration, unit: {:native, :second} ) ] end end
28.923077
67
0.631649
08421e3717c7f021e023ba4df4735ccbb46c5888
1,235
ex
Elixir
web/uploaders/photo_file_definition.ex
mciastek/emotions-wheel-backend
072a88b3ad15b2c6d2aad414e6b7cfc8fb4a70bf
[ "MIT" ]
null
null
null
web/uploaders/photo_file_definition.ex
mciastek/emotions-wheel-backend
072a88b3ad15b2c6d2aad414e6b7cfc8fb4a70bf
[ "MIT" ]
null
null
null
web/uploaders/photo_file_definition.ex
mciastek/emotions-wheel-backend
072a88b3ad15b2c6d2aad414e6b7cfc8fb4a70bf
[ "MIT" ]
null
null
null
defmodule EmotionsWheelBackend.PhotoFileDefinition do use Arc.Definition use Arc.Ecto.Definition @acl :public_read @versions [:original, :thumb] @extension_whitelist ~w(.jpg .jpeg .gif .png) def transform(:thumb, _) do {:convert, "-strip -thumbnail 200x200^ -gravity center -extent 200x200"} end # def __storage, do: Arc.Storage.Local def filename(version, {file, _}), do: "#{file.file_name}" # Whitelist file extensions: def validate({file, _}) do file_extension = file.file_name |> Path.extname |> String.downcase Enum.member?(@extension_whitelist, file_extension) end # Override the storage directory: def storage_dir(version, {file, scope}), do: "uploads/photos/#{version}" # Provide a default URL if there hasn't been a file uploaded # def default_url(version, scope) do # "/images/avatars/default_#{version}.png" # end # Specify custom headers for s3 objects # Available options are [:cache_control, :content_disposition, # :content_encoding, :content_length, :content_type, # :expect, :expires, :storage_class, :website_redirect_location] # def s3_object_headers(version, {file, scope}) do [content_type: Plug.MIME.path(file.file_name)] end end
30.875
76
0.71417
084223544972af1b976485bef6c59435beff9dbb
872
exs
Elixir
config/config.exs
sevenc-nanashi/dqs
332eddc4e71e104c7a2d30f9049bf22f1743bd7c
[ "MIT" ]
null
null
null
config/config.exs
sevenc-nanashi/dqs
332eddc4e71e104c7a2d30f9049bf22f1743bd7c
[ "MIT" ]
null
null
null
config/config.exs
sevenc-nanashi/dqs
332eddc4e71e104c7a2d30f9049bf22f1743bd7c
[ "MIT" ]
1
2021-05-28T10:21:21.000Z
2021-05-28T10:21:21.000Z
import Config config :nostrum, token: System.get_env("BOT_TOKEN"), num_shards: :auto, gateway_intents: :all config :dqs, ecto_repos: [Dqs.Repo] config :dqs, Dqs.Repo, database: "dqs", username: System.get_env("POSTGRES_USER"), password: System.get_env("POSTGRES_PASSWORD"), hostname: System.get_env("POSTGRES_HOSTNAME"), pool_size: 10 config :dqs, :board_channel_id, System.get_env("QUESTION_BOARD_CHANNEL_ID") |> String.to_integer config :dqs, :closed_category_id, System.get_env("CLOSED_CATEGORY_ID") |> String.to_integer config :dqs, :open_category_id, System.get_env("OPEN_CATEGORY_ID") |> String.to_integer config :dqs, :prefix, System.get_env("PREFIX") config :dqs, :board_channel_id, System.get_env("QUESTION_BOARD_CHANNEL_ID") |> String.to_integer config :dqs, :guild_id, System.get_env("GUILD_ID")
31.142857
96
0.724771
084232376b5eb64d446ee3a7ba3ed8ea69ea0f06
1,809
exs
Elixir
mix.exs
dragonwasrobot/json-schema-to-elm
9200d1afde86b79bff37216395ae7dc2e7ff45be
[ "MIT" ]
74
2017-03-05T22:26:34.000Z
2022-03-14T13:22:47.000Z
mix.exs
dragonwasrobot/json-schema-to-elm
9200d1afde86b79bff37216395ae7dc2e7ff45be
[ "MIT" ]
43
2017-03-05T22:56:21.000Z
2022-03-26T19:19:26.000Z
mix.exs
dragonwasrobot/json-schema-to-elm
9200d1afde86b79bff37216395ae7dc2e7ff45be
[ "MIT" ]
6
2017-06-19T01:49:32.000Z
2018-06-22T02:04:24.000Z
defmodule JS2E.Mixfile do use Mix.Project @version "2.8.1" @elixir_version "~> 1.9" def project do [ app: :js2e, version: @version, elixir: @elixir_version, aliases: aliases(), deps: deps(), description: description(), dialyzer: dialyzer(), docs: docs(), escript: escript(), preferred_cli_env: preferred_cli_env(), test_coverage: test_coverage(), build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod ] end def application do [applications: [:logger]] end defp aliases do [ build: ["deps.get", "compile", "escript.build"], check: ["credo --strict --ignore=RedundantBlankLines"] ] end defp deps do [ {:credo, "~> 1.6.0", only: [:dev, :test], runtime: false}, {:dialyxir, "~> 1.1.0", only: [:dev], runtime: false}, {:ex_doc, "~> 0.19-rc", only: :dev, runtime: false}, {:excoveralls, "~> 0.10", only: :test, runtime: false}, # {:json_schema, path: "../json_schema/"} {:json_schema, "~> 0.3"} ] end defp description do """ Generates Elm types, JSON decoders, JSON encoders and fuzz tests from JSON schema specifications. """ end defp dialyzer do [plt_add_deps: :project] end defp docs do [ name: "JSON Schema to Elm", formatter_opts: [gfm: true], source_ref: @version, source_url: "https://github.com/dragonwasrobot/json-schema-to-elm", extras: [] ] end defp escript do [main_module: JS2E, name: "js2e"] end defp preferred_cli_env do [ coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test ] end defp test_coverage do [tool: ExCoveralls] end end
21.282353
78
0.578773
0842419bf7cf84aa52fbb303925644687e85d2e2
1,139
ex
Elixir
lib/qwirkl_ex_web/router.ex
ksherman/QwirkleEx
be4aa971767de916b9fc3a9a52124f17ce58b196
[ "MIT" ]
1
2021-02-01T12:09:47.000Z
2021-02-01T12:09:47.000Z
lib/qwirkl_ex_web/router.ex
ksherman/QwirkleEx
be4aa971767de916b9fc3a9a52124f17ce58b196
[ "MIT" ]
null
null
null
lib/qwirkl_ex_web/router.ex
ksherman/QwirkleEx
be4aa971767de916b9fc3a9a52124f17ce58b196
[ "MIT" ]
null
null
null
defmodule QwirklExWeb.Router do use QwirklExWeb, :router pipeline :browser do plug :accepts, ["html"] plug :fetch_session plug :fetch_live_flash plug :put_root_layout, {QwirklExWeb.LayoutView, :root} plug :protect_from_forgery plug :put_secure_browser_headers end pipeline :api do plug :accepts, ["json"] end scope "/", QwirklExWeb do pipe_through :browser live "/", PageLive, :index end # Other scopes may use custom stacks. # scope "/api", QwirklExWeb do # pipe_through :api # end # Enables LiveDashboard only for development # # If you want to use the LiveDashboard in production, you should put # it behind authentication and allow only admins to access it. # If your application does not have an admins-only section yet, # you can use Plug.BasicAuth to set up some basic authentication # as long as you are also using SSL (which you should anyway). if Mix.env() in [:dev, :test] do import Phoenix.LiveDashboard.Router scope "/" do pipe_through :browser live_dashboard "/dashboard", metrics: QwirklExWeb.Telemetry end end end
25.886364
70
0.702371
0842a7578f49ef3499c2e19f61313d4a09e45bc3
643
exs
Elixir
test/action_for_children/web/controllers/page_controller_test.exs
cast-fuse/action-for-children
3cded6e04abdaf13d4d1033bf1daa783bc0e5ef2
[ "BSD-3-Clause" ]
null
null
null
test/action_for_children/web/controllers/page_controller_test.exs
cast-fuse/action-for-children
3cded6e04abdaf13d4d1033bf1daa783bc0e5ef2
[ "BSD-3-Clause" ]
57
2017-05-25T10:32:59.000Z
2021-05-06T21:03:39.000Z
test/action_for_children/web/controllers/page_controller_test.exs
cast-fuse/action-for-children
3cded6e04abdaf13d4d1033bf1daa783bc0e5ef2
[ "BSD-3-Clause" ]
null
null
null
defmodule ActionForChildrenWeb.PageControllerTest do use ActionForChildrenWeb.ConnCase test "non logged in user sees options to start conversation and enter code", %{conn: conn} do conn = get(conn, "/") contents = [ "Action for Children", "intercom", "form", "Continue a conversation" ] Enum.map(contents, fn content -> assert html_response(conn, 200) =~ content end) end test "logged in user redirected to conversation", %{conn: conn} do user = insert_user() conn = assign(conn, :user, user) conn = get(conn, "/") assert redirected_to(conn) == "/users" end end
23.814815
95
0.645412
0842f46511b37097badec0658a9dc734e3aa8fe1
187
ex
Elixir
lib/mix/tasks/exq.run.ex
buob/exq
921fe26c7f3db7d77ff67e3a03ea9b83612f5598
[ "Apache-2.0" ]
null
null
null
lib/mix/tasks/exq.run.ex
buob/exq
921fe26c7f3db7d77ff67e3a03ea9b83612f5598
[ "Apache-2.0" ]
null
null
null
lib/mix/tasks/exq.run.ex
buob/exq
921fe26c7f3db7d77ff67e3a03ea9b83612f5598
[ "Apache-2.0" ]
3
2021-04-07T14:50:17.000Z
2021-04-07T14:50:30.000Z
defmodule Mix.Tasks.Exq.Run do use Mix.Task @shortdoc "Starts the Exq worker" def run(_args) do Exq.start_link IO.puts "Started Exq" :timer.sleep(:infinity) end end
15.583333
35
0.679144
0842fba2b398e0990fd3bd6d58f7261160b6fae5
1,693
exs
Elixir
apps/tai/test/tai/events_logger_test.exs
chrism2671/tai-1
847827bd23908adfad4a82c83d5295bdbc022796
[ "MIT" ]
null
null
null
apps/tai/test/tai/events_logger_test.exs
chrism2671/tai-1
847827bd23908adfad4a82c83d5295bdbc022796
[ "MIT" ]
null
null
null
apps/tai/test/tai/events_logger_test.exs
chrism2671/tai-1
847827bd23908adfad4a82c83d5295bdbc022796
[ "MIT" ]
1
2020-05-03T23:32:11.000Z
2020-05-03T23:32:11.000Z
defmodule Tai.EventsLoggerTest do use ExUnit.Case, async: false import ExUnit.CaptureLog @event %Support.CustomEvent{hello: "world"} setup do start_supervised!({Tai.Events, 1}) :ok end test "can start multiple loggers with different ids" do {:ok, a} = Tai.EventsLogger.start_link(id: :a) {:ok, b} = Tai.EventsLogger.start_link(id: :b) :ok = GenServer.stop(a) :ok = GenServer.stop(b) end test "logs error events" do logger = start_supervised!({Tai.EventsLogger, id: __MODULE__}) assert capture_log(fn -> send(logger, {Tai.Event, @event, :error}) :timer.sleep(100) end) =~ "[error] {\"data\":{\"hello\":\"custom\"},\"type\":\"Support.CustomEvent\"}" end test "logs warn events" do logger = start_supervised!({Tai.EventsLogger, id: __MODULE__}) assert capture_log(fn -> send(logger, {Tai.Event, @event, :warn}) :timer.sleep(100) end) =~ "[warn] {\"data\":{\"hello\":\"custom\"},\"type\":\"Support.CustomEvent\"}" end test "logs info events" do logger = start_supervised!({Tai.EventsLogger, id: __MODULE__}) assert capture_log(fn -> send(logger, {Tai.Event, @event, :info}) :timer.sleep(100) end) =~ "[info] {\"data\":{\"hello\":\"custom\"},\"type\":\"Support.CustomEvent\"}" end test "logs debug events" do logger = start_supervised!({Tai.EventsLogger, id: __MODULE__}) assert capture_log(fn -> send(logger, {Tai.Event, @event, :debug}) :timer.sleep(100) end) =~ "[debug] {\"data\":{\"hello\":\"custom\"},\"type\":\"Support.CustomEvent\"}" end end
30.232143
95
0.588895
08435625b6be1b2c80e9bc2f80e7a6e3889c700d
1,701
ex
Elixir
apps/enchat_web/lib/enchat_web/endpoint.ex
Allypost/enchat
f9cff2906116550099f4574bf44e8dc1fea6d476
[ "MIT" ]
null
null
null
apps/enchat_web/lib/enchat_web/endpoint.ex
Allypost/enchat
f9cff2906116550099f4574bf44e8dc1fea6d476
[ "MIT" ]
null
null
null
apps/enchat_web/lib/enchat_web/endpoint.ex
Allypost/enchat
f9cff2906116550099f4574bf44e8dc1fea6d476
[ "MIT" ]
null
null
null
defmodule EnchatWeb.Endpoint do use Phoenix.Endpoint, otp_app: :enchat_web socket("/socket", EnchatWeb.UserSocket) # Serve at "/" the static files from "priv/static" directory. # # You should set gzip to true if you are running phoenix.digest # when deploying your static files in production. plug(Plug.Static, at: "/", from: :enchat_web, gzip: false, only: ~w(css fonts images js favicon.ico robots.txt) ) # Code reloading can be explicitly enabled under the # :code_reloader configuration of your endpoint. if code_reloading? do socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket) plug(Phoenix.LiveReloader) plug(Phoenix.CodeReloader) end plug(Plug.RequestId) plug(Plug.Logger) plug(Plug.Parsers, parsers: [:urlencoded, :multipart, :json], pass: ["*/*"], json_decoder: Poison ) plug(Plug.MethodOverride) plug(Plug.Head) # The session will be stored in the cookie and signed, # this means its contents can be read but not tampered with. # Set :encryption_salt if you would also like to encrypt it. plug(Plug.Session, store: :cookie, key: "_enchat_web_key", signing_salt: "YI39jqRy" ) plug(EnchatWeb.Router) @doc """ Callback invoked for dynamically configuring the endpoint. It receives the endpoint configuration and checks if configuration should be loaded from the system environment. """ def init(_key, config) do if config[:load_from_system_env] do port = System.get_env("PORT") || raise "expected the PORT environment variable to be set" {:ok, Keyword.put(config, :http, [:inet6, port: port])} else {:ok, config} end end end
27
95
0.696649
084360acf9d9fd670e1e22d32407512a0999ca6f
944
exs
Elixir
config/dev.exs
wkozyra95/phoenix-boilerplate
8fd4607639f327928b0f8319a697f8efa951556e
[ "MIT" ]
null
null
null
config/dev.exs
wkozyra95/phoenix-boilerplate
8fd4607639f327928b0f8319a697f8efa951556e
[ "MIT" ]
null
null
null
config/dev.exs
wkozyra95/phoenix-boilerplate
8fd4607639f327928b0f8319a697f8efa951556e
[ "MIT" ]
null
null
null
use Mix.Config # For development, we disable any cache and enable # debugging and code reloading. # # The watchers configuration can be used to run external # watchers to your application. For example, we use it # with brunch.io to recompile .js and .css sources. config :starter_project, StarterProject.Endpoint, http: [port: 4000], debug_errors: true, code_reloader: true, check_origin: false, watchers: [] # Do not include metadata nor timestamps in development logs config :logger, :console, format: "[$level] $message\n" # Set a higher stacktrace during development. Avoid configuring such # in production as building large stacktraces may be expensive. config :phoenix, :stacktrace_depth, 20 # Configure your database config :starter_project, StarterProject.Repo, adapter: Ecto.Adapters.Postgres, username: "postgres", password: "postgres", database: "starter_project_dev", hostname: "localhost", pool_size: 10
29.5
68
0.760593
084369b9ed8fbbba884ef58f4de8fbac917bb8dd
8,207
ex
Elixir
lib/ex_doc/formatter/html/templates.ex
kianmeng/ex_doc
3c33f4c330bc3f002e0f2c9eca023b11c9837a3c
[ "Apache-2.0", "CC-BY-4.0" ]
null
null
null
lib/ex_doc/formatter/html/templates.ex
kianmeng/ex_doc
3c33f4c330bc3f002e0f2c9eca023b11c9837a3c
[ "Apache-2.0", "CC-BY-4.0" ]
null
null
null
lib/ex_doc/formatter/html/templates.ex
kianmeng/ex_doc
3c33f4c330bc3f002e0f2c9eca023b11c9837a3c
[ "Apache-2.0", "CC-BY-4.0" ]
null
null
null
defmodule ExDoc.Formatter.HTML.Templates do @moduledoc false require EEx # TODO: It should not depend on the parent module # TODO: Add tests that assert on the returned structured, not on JSON alias ExDoc.Utils.SimpleJSON alias ExDoc.Formatter.HTML @doc """ Generate content from the module template for a given `node` """ def module_page(module_node, nodes_map, config) do summary = module_summary(module_node) module_template(config, module_node, summary, nodes_map) end @doc """ Get the full specs from a function, already in HTML form. """ def get_specs(%ExDoc.TypeNode{spec: spec}) do [spec] end def get_specs(%ExDoc.FunctionNode{specs: specs}) when is_list(specs) do presence(specs) end def get_specs(_node) do nil end @doc """ Get defaults clauses. """ def get_defaults(%{defaults: defaults}) do defaults end def get_defaults(_) do [] end @doc """ Get the pretty name of a function node """ def pretty_type(%{type: t}) do Atom.to_string(t) end @doc """ Returns the HTML formatted title for the module page. """ def module_title(%{type: :task, title: title}), do: title def module_title(%{type: :module, title: title}), do: title def module_title(%{type: type, title: title}), do: title <> " <small>#{type}</small>" @doc """ Gets the first paragraph of the documentation of a node. It strips surrounding white-spaces and traling `:`. If `doc` is `nil`, it returns `nil`. """ @spec synopsis(String.t()) :: String.t() @spec synopsis(nil) :: nil def synopsis(nil), do: nil def synopsis(doc) when is_binary(doc) do case :binary.split(doc, "</p>") do [left, _] -> String.trim_trailing(left, ":") <> "</p>" [all] -> all end end defp presence([]), do: nil defp presence(other), do: other @doc false def h(binary) do escape_map = [ {"&", "&amp;"}, {"<", "&lt;"}, {">", "&gt;"}, {~S("), "&quot;"} ] Enum.reduce(escape_map, binary, fn {pattern, escape}, acc -> String.replace(acc, pattern, escape) end) end @doc false def enc(binary), do: URI.encode(binary) @doc """ Create a JS object which holds all the items displayed in the sidebar area """ def create_sidebar_items(nodes_map, extras) do nodes = nodes_map |> Enum.map(&sidebar_module/1) |> Map.new() |> Map.put(:extras, sidebar_extras(extras)) ["sidebarNodes=" | SimpleJSON.encode(nodes)] end defp sidebar_extras(extras) do for extra <- extras do %{id: id, title: title, group: group, content: content} = extra %{ id: to_string(id), title: to_string(title), group: to_string(group), headers: extract_headers(content) } end end defp sidebar_module({id, modules}) do modules = for module <- modules do extra = module |> module_summary() |> Enum.reject(fn {_type, nodes_map} -> nodes_map == [] end) |> case do [] -> [] entries -> [nodeGroups: Enum.map(entries, &sidebar_entries/1)] end sections = module_sections(module) pairs = for key <- [:id, :title, :nested_title, :nested_context], value = Map.get(module, key), do: {key, value} Map.new([group: to_string(module.group)] ++ extra ++ pairs ++ sections) end {id, modules} end defp sidebar_entries({group, docs}) do nodes = Enum.map(docs, fn doc -> %{id: doc.id, anchor: URI.encode(HTML.link_id(doc))} end) %{key: HTML.text_to_id(group), name: group, nodes: nodes} end defp module_sections(%ExDoc.ModuleNode{rendered_doc: nil}), do: [sections: []] defp module_sections(module) do {sections, _} = module.rendered_doc |> extract_headers() |> Enum.map_reduce(%{}, fn header, acc -> # TODO Duplicates some of the logic of link_headings/3 case Map.fetch(acc, header.id) do {:ok, id} -> {%{header | anchor: "module-#{header.anchor}-#{id}"}, Map.put(acc, header.id, id + 1)} :error -> {%{header | anchor: "module-#{header.anchor}"}, Map.put(acc, header.id, 1)} end end) [sections: sections] end @h2_regex ~r/<h2.*?>(.*?)<\/h2>/m defp extract_headers(content) do @h2_regex |> Regex.scan(content, capture: :all_but_first) |> List.flatten() |> Enum.filter(&(&1 != "")) |> Enum.map(&HTML.strip_tags/1) |> Enum.map(&%{id: &1, anchor: URI.encode(HTML.text_to_id(&1))}) end def module_summary(module_node) do [Types: module_node.typespecs] ++ function_groups(module_node.function_groups, module_node.docs) ++ [Callbacks: Enum.filter(module_node.docs, &(&1.type in [:callback, :macrocallback]))] end defp function_groups(groups, docs) do for group <- groups, do: {group, Enum.filter(docs, &(&1.group == group))} end defp logo_path(%{logo: nil}), do: nil defp logo_path(%{logo: logo}), do: "assets/logo#{Path.extname(logo)}" defp sidebar_type(:exception), do: "modules" defp sidebar_type(:extra), do: "extras" defp sidebar_type(:module), do: "modules" defp sidebar_type(:behaviour), do: "modules" defp sidebar_type(:protocol), do: "modules" defp sidebar_type(:task), do: "tasks" defp sidebar_type(:search), do: "search" def asset_rev(output, pattern) do output = Path.expand(output) output |> Path.join(pattern) |> Path.wildcard() |> relative_asset(output, pattern) end defp relative_asset([], output, pattern), do: raise("could not find matching #{output}/#{pattern}") defp relative_asset([h | _], output, _pattern), do: Path.relative_to(h, output) @doc """ Link headings found with `regex` with in the given `content`. IDs are prefixed with `prefix`. """ @heading_regex ~r/<(h[23]).*?>(.*?)<\/\1>/m @spec link_headings(String.t(), Regex.t(), String.t()) :: String.t() def link_headings(content, regex \\ @heading_regex, prefix \\ "") def link_headings(nil, _, _), do: nil def link_headings(content, regex, prefix) do regex |> Regex.scan(content) |> Enum.reduce({content, %{}}, fn [match, tag, title], {content, occurrences} -> possible_id = HTML.text_to_id(title) id_occurred = Map.get(occurrences, possible_id, 0) anchor_id = if id_occurred >= 1, do: "#{possible_id}-#{id_occurred}", else: possible_id replacement = link_heading(match, tag, title, anchor_id, prefix) linked_content = String.replace(content, match, replacement, global: false) incremented_occs = Map.put(occurrences, possible_id, id_occurred + 1) {linked_content, incremented_occs} end) |> elem(0) end defp link_heading(match, _tag, _title, "", _prefix), do: match defp link_heading(_match, tag, title, id, prefix) do """ <#{tag} id="#{prefix}#{id}" class="section-heading"> <a href="##{prefix}#{id}" class="hover-link"><span class="icon-link" aria-hidden="true"></span></a> #{title} </#{tag}> """ end defp link_moduledoc_headings(content) do link_headings(content, @heading_regex, "module-") end defp link_detail_headings(content, prefix) do link_headings(content, @heading_regex, prefix <> "-") end templates = [ detail_template: [:node, :_module], footer_template: [:config], head_template: [:config, :page], module_template: [:config, :module, :summary, :nodes_map], not_found_template: [:config, :nodes_map], api_reference_entry_template: [:module_node], api_reference_template: [:config, :nodes_map], extra_template: [:config, :title, :nodes_map, :content, :refs], search_template: [:config, :nodes_map], sidebar_template: [:config, :nodes_map], summary_template: [:name, :nodes], summary_entry_template: [:node], redirect_template: [:config, :redirect_to], bottom_actions_template: [:refs] ] Enum.each(templates, fn {name, args} -> filename = Path.expand("templates/#{name}.eex", __DIR__) @doc false EEx.function_from_file(:def, name, filename, args, trim: true) end) end
28.496528
105
0.626051
08437bbf709fed584be03f38c4a9e0bb0283c87e
499
ex
Elixir
ros/ros_ui_station/lib/ros_ui_station_web/views/error_view.ex
kujua/elixir-handbook
4185ad8da7f652fdb59c799dc58bcb33fda10475
[ "Apache-2.0" ]
1
2019-07-01T18:47:28.000Z
2019-07-01T18:47:28.000Z
ros/ros_ui_station/lib/ros_ui_station_web/views/error_view.ex
kujua/elixir-handbook
4185ad8da7f652fdb59c799dc58bcb33fda10475
[ "Apache-2.0" ]
4
2020-07-17T16:57:18.000Z
2021-05-09T23:50:52.000Z
ros/ros_ui_station/lib/ros_ui_station_web/views/error_view.ex
kujua/elixir-handbook
4185ad8da7f652fdb59c799dc58bcb33fda10475
[ "Apache-2.0" ]
null
null
null
defmodule Ros.StationWeb.ErrorView do use Ros.StationWeb, :view # If you want to customize a particular status code # for a certain format, you may uncomment below. # def render("500.html", _assigns) do # "Internal Server Error" # end # By default, Phoenix returns the status message from # the template name. For example, "404.html" becomes # "Not Found". def template_not_found(template, _assigns) do Phoenix.Controller.status_message_from_template(template) end end
29.352941
61
0.735471
08438c15e6b3de62be99f59663ab6275bef6c33f
19,386
ex
Elixir
lib/aws/generated/network_firewall.ex
onno-vos-dev/aws-elixir
00f02c2bce689b932948b6a4d603fd44bb5fc0e9
[ "Apache-2.0" ]
null
null
null
lib/aws/generated/network_firewall.ex
onno-vos-dev/aws-elixir
00f02c2bce689b932948b6a4d603fd44bb5fc0e9
[ "Apache-2.0" ]
null
null
null
lib/aws/generated/network_firewall.ex
onno-vos-dev/aws-elixir
00f02c2bce689b932948b6a4d603fd44bb5fc0e9
[ "Apache-2.0" ]
null
null
null
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE! # See https://github.com/aws-beam/aws-codegen for more details. defmodule AWS.NetworkFirewall do @moduledoc """ This is the API Reference for AWS Network Firewall. This guide is for developers who need detailed information about the Network Firewall API actions, data types, and errors. * The REST API requires you to handle connection details, such as calculating signatures, handling request retries, and error handling. For general information about using the AWS REST APIs, see [AWS APIs](https://docs.aws.amazon.com/general/latest/gr/aws-apis.html). To access Network Firewall using the REST API endpoint: `https://network-firewall.<region>.amazonaws.com ` * Alternatively, you can use one of the AWS SDKs to access an API that's tailored to the programming language or platform that you're using. For more information, see [AWS SDKs](http://aws.amazon.com/tools/#SDKs). * For descriptions of Network Firewall features, including and step-by-step instructions on how to use them through the Network Firewall console, see the [Network Firewall Developer Guide](https://docs.aws.amazon.com/network-firewall/latest/developerguide/). Network Firewall is a stateful, managed, network firewall and intrusion detection and prevention service for Amazon Virtual Private Cloud (Amazon VPC). With Network Firewall, you can filter traffic at the perimeter of your VPC. This includes filtering traffic going to and coming from an internet gateway, NAT gateway, or over VPN or AWS Direct Connect. Network Firewall uses rules that are compatible with Suricata, a free, open source intrusion detection system (IDS) engine. AWS Network Firewall supports Suricata version 5.0.2. For information about Suricata, see the [Suricata website](https://suricata-ids.org/). You can use Network Firewall to monitor and protect your VPC traffic in a number of ways. The following are just a few examples: * Allow domains or IP addresses for known AWS service endpoints, such as Amazon S3, and block all other forms of traffic. * Use custom lists of known bad domains to limit the types of domain names that your applications can access. * Perform deep packet inspection on traffic entering or leaving your VPC. * Use stateful protocol detection to filter protocols like HTTPS, regardless of the port used. To enable Network Firewall for your VPCs, you perform steps in both Amazon VPC and in Network Firewall. For information about using Amazon VPC, see [Amazon VPC User Guide](https://docs.aws.amazon.com/vpc/latest/userguide/). To start using Network Firewall, do the following: 1. (Optional) If you don't already have a VPC that you want to protect, create it in Amazon VPC. 2. In Amazon VPC, in each Availability Zone where you want to have a firewall endpoint, create a subnet for the sole use of Network Firewall. 3. In Network Firewall, create stateless and stateful rule groups, to define the components of the network traffic filtering behavior that you want your firewall to have. 4. In Network Firewall, create a firewall policy that uses your rule groups and specifies additional default traffic filtering behavior. 5. In Network Firewall, create a firewall and specify your new firewall policy and VPC subnets. Network Firewall creates a firewall endpoint in each subnet that you specify, with the behavior that's defined in the firewall policy. 6. In Amazon VPC, use ingress routing enhancements to route traffic through the new firewall endpoints. """ alias AWS.Client alias AWS.Request def metadata do %AWS.ServiceMetadata{ abbreviation: "Network Firewall", api_version: "2020-11-12", content_type: "application/x-amz-json-1.0", credential_scope: nil, endpoint_prefix: "network-firewall", global?: false, protocol: "json", service_id: "Network Firewall", signature_version: "v4", signing_name: "network-firewall", target_prefix: "NetworkFirewall_20201112" } end @doc """ Associates a `FirewallPolicy` to a `Firewall`. A firewall policy defines how to monitor and manage your VPC network traffic, using a collection of inspection rule groups and other settings. Each firewall requires one firewall policy association, and you can use the same firewall policy for multiple firewalls. """ def associate_firewall_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AssociateFirewallPolicy", input, options) end @doc """ Associates the specified subnets in the Amazon VPC to the firewall. You can specify one subnet for each of the Availability Zones that the VPC spans. This request creates an AWS Network Firewall firewall endpoint in each of the subnets. To enable the firewall's protections, you must also modify the VPC's route tables for each subnet's Availability Zone, to redirect the traffic that's coming into and going out of the zone through the firewall endpoint. """ def associate_subnets(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AssociateSubnets", input, options) end @doc """ Creates an AWS Network Firewall `Firewall` and accompanying `FirewallStatus` for a VPC. The firewall defines the configuration settings for an AWS Network Firewall firewall. The settings that you can define at creation include the firewall policy, the subnets in your VPC to use for the firewall endpoints, and any tags that are attached to the firewall AWS resource. After you create a firewall, you can provide additional settings, like the logging configuration. To update the settings for a firewall, you use the operations that apply to the settings themselves, for example `UpdateLoggingConfiguration`, `AssociateSubnets`, and `UpdateFirewallDeleteProtection`. To manage a firewall's tags, use the standard AWS resource tagging operations, `ListTagsForResource`, `TagResource`, and `UntagResource`. To retrieve information about firewalls, use `ListFirewalls` and `DescribeFirewall`. """ def create_firewall(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateFirewall", input, options) end @doc """ Creates the firewall policy for the firewall according to the specifications. An AWS Network Firewall firewall policy defines the behavior of a firewall, in a collection of stateless and stateful rule groups and other settings. You can use one firewall policy for multiple firewalls. """ def create_firewall_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateFirewallPolicy", input, options) end @doc """ Creates the specified stateless or stateful rule group, which includes the rules for network traffic inspection, a capacity setting, and tags. You provide your rule group specification in your request using either `RuleGroup` or `Rules`. """ def create_rule_group(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateRuleGroup", input, options) end @doc """ Deletes the specified `Firewall` and its `FirewallStatus`. This operation requires the firewall's `DeleteProtection` flag to be `FALSE`. You can't revert this operation. You can check whether a firewall is in use by reviewing the route tables for the Availability Zones where you have firewall subnet mappings. Retrieve the subnet mappings by calling `DescribeFirewall`. You define and update the route tables through Amazon VPC. As needed, update the route tables for the zones to remove the firewall endpoints. When the route tables no longer use the firewall endpoints, you can remove the firewall safely. To delete a firewall, remove the delete protection if you need to using `UpdateFirewallDeleteProtection`, then delete the firewall by calling `DeleteFirewall`. """ def delete_firewall(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteFirewall", input, options) end @doc """ Deletes the specified `FirewallPolicy`. """ def delete_firewall_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteFirewallPolicy", input, options) end @doc """ Deletes a resource policy that you created in a `PutResourcePolicy` request. """ def delete_resource_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteResourcePolicy", input, options) end @doc """ Deletes the specified `RuleGroup`. """ def delete_rule_group(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteRuleGroup", input, options) end @doc """ Returns the data objects for the specified firewall. """ def describe_firewall(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeFirewall", input, options) end @doc """ Returns the data objects for the specified firewall policy. """ def describe_firewall_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeFirewallPolicy", input, options) end @doc """ Returns the logging configuration for the specified firewall. """ def describe_logging_configuration(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeLoggingConfiguration", input, options) end @doc """ Retrieves a resource policy that you created in a `PutResourcePolicy` request. """ def describe_resource_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeResourcePolicy", input, options) end @doc """ Returns the data objects for the specified rule group. """ def describe_rule_group(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeRuleGroup", input, options) end @doc """ High-level information about a rule group, returned by operations like create and describe. You can use the information provided in the metadata to retrieve and manage a rule group. You can retrieve all objects for a rule group by calling `DescribeRuleGroup`. """ def describe_rule_group_metadata(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeRuleGroupMetadata", input, options) end @doc """ Removes the specified subnet associations from the firewall. This removes the firewall endpoints from the subnets and removes any network filtering protections that the endpoints were providing. """ def disassociate_subnets(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DisassociateSubnets", input, options) end @doc """ Retrieves the metadata for the firewall policies that you have defined. Depending on your setting for max results and the number of firewall policies, a single call might not return the full list. """ def list_firewall_policies(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListFirewallPolicies", input, options) end @doc """ Retrieves the metadata for the firewalls that you have defined. If you provide VPC identifiers in your request, this returns only the firewalls for those VPCs. Depending on your setting for max results and the number of firewalls, a single call might not return the full list. """ def list_firewalls(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListFirewalls", input, options) end @doc """ Retrieves the metadata for the rule groups that you have defined. Depending on your setting for max results and the number of rule groups, a single call might not return the full list. """ def list_rule_groups(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListRuleGroups", input, options) end @doc """ Retrieves the tags associated with the specified resource. Tags are key:value pairs that you can use to categorize and manage your resources, for purposes like billing. For example, you might set the tag key to "customer" and the value to the customer name or ID. You can specify one or more tags to add to each AWS resource, up to 50 tags for a resource. You can tag the AWS resources that you manage through AWS Network Firewall: firewalls, firewall policies, and rule groups. """ def list_tags_for_resource(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListTagsForResource", input, options) end @doc """ Creates or updates an AWS Identity and Access Management policy for your rule group or firewall policy. Use this to share rule groups and firewall policies between accounts. This operation works in conjunction with the AWS Resource Access Manager (RAM) service to manage resource sharing for Network Firewall. Use this operation to create or update a resource policy for your rule group or firewall policy. In the policy, you specify the accounts that you want to share the resource with and the operations that you want the accounts to be able to perform. When you add an account in the resource policy, you then run the following Resource Access Manager (RAM) operations to access and accept the shared rule group or firewall policy. * [GetResourceShareInvitations](https://docs.aws.amazon.com/ram/latest/APIReference/API_GetResourceShareInvitations.html) - Returns the Amazon Resource Names (ARNs) of the resource share invitations. * [AcceptResourceShareInvitation](https://docs.aws.amazon.com/ram/latest/APIReference/API_AcceptResourceShareInvitation.html) - Accepts the share invitation for a specified resource share. For additional information about resource sharing using RAM, see [AWS Resource Access Manager User Guide](https://docs.aws.amazon.com/ram/latest/userguide/what-is.html). """ def put_resource_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PutResourcePolicy", input, options) end @doc """ Adds the specified tags to the specified resource. Tags are key:value pairs that you can use to categorize and manage your resources, for purposes like billing. For example, you might set the tag key to "customer" and the value to the customer name or ID. You can specify one or more tags to add to each AWS resource, up to 50 tags for a resource. You can tag the AWS resources that you manage through AWS Network Firewall: firewalls, firewall policies, and rule groups. """ def tag_resource(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "TagResource", input, options) end @doc """ Removes the tags with the specified keys from the specified resource. Tags are key:value pairs that you can use to categorize and manage your resources, for purposes like billing. For example, you might set the tag key to "customer" and the value to the customer name or ID. You can specify one or more tags to add to each AWS resource, up to 50 tags for a resource. You can manage tags for the AWS resources that you manage through AWS Network Firewall: firewalls, firewall policies, and rule groups. """ def untag_resource(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UntagResource", input, options) end @doc """ Modifies the flag, `DeleteProtection`, which indicates whether it is possible to delete the firewall. If the flag is set to `TRUE`, the firewall is protected against deletion. This setting helps protect against accidentally deleting a firewall that's in use. """ def update_firewall_delete_protection(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateFirewallDeleteProtection", input, options) end @doc """ Modifies the description for the specified firewall. Use the description to help you identify the firewall when you're working with it. """ def update_firewall_description(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateFirewallDescription", input, options) end @doc """ Updates the properties of the specified firewall policy. """ def update_firewall_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateFirewallPolicy", input, options) end @doc """ Modifies the flag, `ChangeProtection`, which indicates whether it is possible to change the firewall. If the flag is set to `TRUE`, the firewall is protected from changes. This setting helps protect against accidentally changing a firewall that's in use. """ def update_firewall_policy_change_protection(%Client{} = client, input, options \\ []) do Request.request_post( client, metadata(), "UpdateFirewallPolicyChangeProtection", input, options ) end @doc """ Sets the logging configuration for the specified firewall. To change the logging configuration, retrieve the `LoggingConfiguration` by calling `DescribeLoggingConfiguration`, then change it and provide the modified object to this update call. You must change the logging configuration one `LogDestinationConfig` at a time inside the retrieved `LoggingConfiguration` object. You can perform only one of the following actions in any call to `UpdateLoggingConfiguration`: * Create a new log destination object by adding a single `LogDestinationConfig` array element to `LogDestinationConfigs`. * Delete a log destination object by removing a single `LogDestinationConfig` array element from `LogDestinationConfigs`. * Change the `LogDestination` setting in a single `LogDestinationConfig` array element. You can't change the `LogDestinationType` or `LogType` in a `LogDestinationConfig`. To change these settings, delete the existing `LogDestinationConfig` object and create a new one, using two separate calls to this update operation. """ def update_logging_configuration(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateLoggingConfiguration", input, options) end @doc """ Updates the rule settings for the specified rule group. You use a rule group by reference in one or more firewall policies. When you modify a rule group, you modify all firewall policies that use the rule group. To update a rule group, first call `DescribeRuleGroup` to retrieve the current `RuleGroup` object, update the object as needed, and then provide the updated object to this call. """ def update_rule_group(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateRuleGroup", input, options) end def update_subnet_change_protection(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateSubnetChangeProtection", input, options) end end
41.246809
199
0.743475
0843ac137a84039e514b69bd954855ba574c1ab4
30,924
exs
Elixir
lib/elixir/test/elixir/kernel/errors_test.exs
elkinsd/elixir
810965e193cb57b82363e7c0c97b719743b7964f
[ "Apache-2.0" ]
null
null
null
lib/elixir/test/elixir/kernel/errors_test.exs
elkinsd/elixir
810965e193cb57b82363e7c0c97b719743b7964f
[ "Apache-2.0" ]
null
null
null
lib/elixir/test/elixir/kernel/errors_test.exs
elkinsd/elixir
810965e193cb57b82363e7c0c97b719743b7964f
[ "Apache-2.0" ]
null
null
null
Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.ErrorsTest do use ExUnit.Case, async: true import CompileAssertion defmacro hello do quote location: :keep do def hello, do: :world end end test "invalid token" do assert_compile_fail SyntaxError, "nofile:1: unexpected token: \"\u200B\" (column 7, codepoint U+200B)", '[foo: \u200B]\noops' end test "invalid quoted token" do assert_compile_fail SyntaxError, "nofile:1: syntax error before: \"world\"", '"hello" "world"' assert_compile_fail SyntaxError, "nofile:1: syntax error before: Foobar", '1 Foobar' assert_compile_fail SyntaxError, "nofile:1: syntax error before: foo", 'Foo.:foo' assert_compile_fail SyntaxError, "nofile:1: syntax error before: \"foo\"", 'Foo.:"foo\#{:bar}"' assert_compile_fail SyntaxError, "nofile:1: syntax error before: \"", 'Foo.:"\#{:bar}"' end test "invalid identifier" do msg = fn char, name -> "nofile:1: invalid character '#{char}' in identifier: #{name}" end assert_compile_fail SyntaxError, msg.(:@, "foo@"), 'foo@' assert_compile_fail SyntaxError, msg.(:@, "foo@"), 'foo@ ' assert_compile_fail SyntaxError, msg.(:@, "foo@bar"), 'foo@bar' assert_compile_fail SyntaxError, msg.(:!, "Foo!"), 'Foo!' end test "invalid fn" do assert_compile_fail SyntaxError, "nofile:1: expected clauses to be defined with -> inside: 'fn'", 'fn 1 end' end test "invalid Access" do msg = fn(val) -> "nofile:1: the Access syntax and calls to Access.get/2" <> " are not available for the value: " <> val end assert_compile_fail CompileError, msg.("1"), "1[:foo]" assert_compile_fail CompileError, msg.("1.1"), "1.1[:foo]" assert_compile_fail CompileError, msg.("{}"), "{}[:foo]" assert_compile_fail CompileError, msg.(":foo"), ":foo[:foo]" assert_compile_fail CompileError, msg.("\"\""), "\"\"[:foo]" assert_compile_fail CompileError, msg.("<<>>"), "<<>>[:foo]" end test "kw missing space" do msg = "nofile:1: keyword argument must be followed by space after: foo:" assert_compile_fail SyntaxError, msg, "foo:bar" assert_compile_fail SyntaxError, msg, "foo:+" assert_compile_fail SyntaxError, msg, "foo:+1" end test "sigil terminator" do assert_compile_fail TokenMissingError, "nofile:3: missing terminator: \" (for sigil ~r\" starting at line 1)", '~r"foo\n\n' assert_compile_fail TokenMissingError, "nofile:3: missing terminator: } (for sigil ~r{ starting at line 1)", '~r{foo\n\n' end test "dot terminator" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: \" (for function name starting at line 1)", 'foo."bar' end test "string terminator" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: \" (for string starting at line 1)", '"bar' end test "heredoc start" do assert_compile_fail SyntaxError, "nofile:1: heredoc start must be followed by a new line after \"\"\"", '"""bar\n"""' end test "heredoc terminator" do assert_compile_fail TokenMissingError, "nofile:2: missing terminator: \"\"\" (for heredoc starting at line 1)", '"""\nbar' assert_compile_fail SyntaxError, "nofile:2: invalid location for heredoc terminator, please escape token or move it to its own line: \"\"\"", '"""\nbar"""' end test "unexpected end" do assert_compile_fail SyntaxError, "nofile:1: unexpected token: end", '1 end' end test "syntax error" do assert_compile_fail SyntaxError, "nofile:1: syntax error before: '.'", '+.foo' end test "syntax error before sigil" do msg = fn x -> "nofile:1: syntax error before: sigil ~s starting with content '#{x}'" end assert_compile_fail SyntaxError, msg.("bar baz"), '~s(foo) ~s(bar baz)' assert_compile_fail SyntaxError, msg.(""), '~s(foo) ~s()' assert_compile_fail SyntaxError, msg.("bar "), '~s(foo) ~s(bar \#{:baz})' assert_compile_fail SyntaxError, msg.(""), '~s(foo) ~s(\#{:bar} baz)' end test "compile error on op ambiguity" do msg = "nofile:1: \"a -1\" looks like a function call but there is a variable named \"a\", " <> "please use explicit parentheses or even spaces" assert_compile_fail CompileError, msg, 'a = 1; a -1' max = 1 assert max == 1 assert (max 1, 2) == 2 end test "syntax error with do" do assert_compile_fail SyntaxError, ~r/nofile:1: unexpected token "do"./, 'if true, do\n' assert_compile_fail SyntaxError, ~r/nofile:1: unexpected keyword "do:"./, 'if true do:\n' end test "syntax error on parens call" do msg = "nofile:1: unexpected parentheses. If you are making a function call, do not " <> "insert spaces between the function name and the opening parentheses. " <> "Syntax error before: '('" assert_compile_fail SyntaxError, msg, 'foo (hello, world)' end test "syntax error on nested no parens call" do msg = "nofile:1: unexpected comma. Parentheses are required to solve ambiguity" assert_compile_fail SyntaxError, msg, '[foo 1, 2]' assert_compile_fail SyntaxError, msg, '[foo bar 1, 2]' assert_compile_fail SyntaxError, msg, '[do: foo 1, 2]' assert_compile_fail SyntaxError, msg, 'foo(do: bar 1, 2)' assert_compile_fail SyntaxError, msg, '{foo 1, 2}' assert_compile_fail SyntaxError, msg, '{foo bar 1, 2}' assert_compile_fail SyntaxError, msg, 'foo 1, foo 2, 3' assert_compile_fail SyntaxError, msg, 'foo 1, @bar 3, 4' assert_compile_fail SyntaxError, msg, 'foo 1, 2 + bar 3, 4' assert_compile_fail SyntaxError, msg, 'foo(1, foo 2, 3)' assert is_list List.flatten [1] assert is_list Enum.reverse [3, 2, 1], [4, 5, 6] assert is_list(Enum.reverse [3, 2, 1], [4, 5, 6]) assert false || is_list Enum.reverse [3, 2, 1], [4, 5, 6] assert [List.flatten List.flatten [1]] == [[1]] interpret = fn x -> Macro.to_string Code.string_to_quoted! x end assert interpret.("f 1 + g h 2, 3") == "f(1 + g(h(2, 3)))" assert interpret.("assert [] = TestRepo.all from p in Post, where: p.title in ^[]") == "assert([] = TestRepo.all(from(p in Post, where: p.title() in ^[])))" end test "syntax error on atom dot alias" do msg = "nofile:1: atom cannot be followed by an alias. If the '.' was meant to be " <> "part of the atom's name, the atom name must be quoted. Syntax error before: '.'" assert_compile_fail SyntaxError, msg, ':foo.Bar' assert_compile_fail SyntaxError, msg, ':"foo".Bar' end test "syntax error with no token" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: ) (for \"(\" starting at line 1)", 'case 1 (' end test "clause with defaults" do assert_compile_fail CompileError, "nofile:3: definitions with multiple clauses and default values require a function head", ~C''' defmodule Kernel.ErrorsTest.ClauseWithDefaults1 do def hello(arg \\ 0), do: nil def hello(arg \\ 1), do: nil end ''' assert_compile_fail CompileError, "nofile:2: undefined function foo/0", ~C''' defmodule Kernel.ErrorsTest.ClauseWithDefaults3 do def hello(foo, bar \\ foo()) def hello(foo, bar), do: foo + bar end ''' end test "invalid match pattern" do assert_compile_fail CompileError, "nofile:2: invalid expression in match", ''' case true do true && true -> true end ''' end test "different defs with defaults" do assert_compile_fail CompileError, "nofile:3: def hello/3 defaults conflicts with def hello/2", ~C''' defmodule Kernel.ErrorsTest.DifferentDefsWithDefaults1 do def hello(a, b \\ nil), do: a + b def hello(a, b \\ nil, c \\ nil), do: a + b + c end ''' assert_compile_fail CompileError, "nofile:3: def hello/2 conflicts with defaults from def hello/3", ~C''' defmodule Kernel.ErrorsTest.DifferentDefsWithDefaults2 do def hello(a, b \\ nil, c \\ nil), do: a + b + c def hello(a, b \\ nil), do: a + b end ''' end test "bad form" do assert_compile_fail CompileError, "nofile:2: undefined function bar/0", ''' defmodule Kernel.ErrorsTest.BadForm do def foo, do: bar() end ''' end test "unbound var" do assert_compile_fail CompileError, "nofile:1: unbound variable ^x", '^x = 1' end test "unbound not match" do assert_compile_fail CompileError, "nofile:1: cannot use ^x outside of match clauses", '^x' end test "unbound expr" do assert_compile_fail CompileError, "nofile:1: invalid argument for unary operator ^, expected an existing variable, got: ^is_atom(:foo)", '^is_atom(:foo) = true' end test "literal on map and struct" do assert_compile_fail SyntaxError, "nofile:1: syntax error before: '}'", '%{{:a, :b}}' assert_compile_fail SyntaxError, "nofile:1: syntax error before: '{'", '%{:a, :b}{a: :b}' assert_compile_fail CompileError, "nofile:1: expected key-value pairs in a map, got: put_in(foo.bar().baz(), nil)", 'foo = 1; %{put_in(foo.bar.baz, nil), :bar}' end test "struct fields on defstruct" do assert_compile_fail ArgumentError, "struct field names must be atoms, got: 1", ''' defmodule Kernel.ErrorsTest.StructFieldsOnDefstruct do defstruct [1, 2, 3] end ''' end test "struct access on body" do assert_compile_fail CompileError, "nofile:3: cannot access struct Kernel.ErrorsTest.StructAccessOnBody, " <> "the struct was not yet defined or the struct " <> "is being accessed in the same context that defines it", ''' defmodule Kernel.ErrorsTest.StructAccessOnBody do defstruct %{name: "Brasilia"} %Kernel.ErrorsTest.StructAccessOnBody{} end ''' end test "unbound map key var" do assert_compile_fail CompileError, ~r"nofile:1: illegal use of variable x inside map key match,", '%{x => 1} = %{}' assert_compile_fail CompileError, ~r"nofile:1: illegal use of variable x inside map key match,", '%{x = 1 => 1}' end test "struct errors" do assert_compile_fail CompileError, "nofile:1: BadStruct.__struct__/1 is undefined, cannot expand struct BadStruct", '%BadStruct{}' assert_compile_fail CompileError, "nofile:1: BadStruct.__struct__/0 is undefined, cannot expand struct BadStruct", '%BadStruct{} = %{}' defmodule BadStruct do def __struct__ do [] end end assert_compile_fail CompileError, "nofile:1: expected Kernel.ErrorsTest.BadStruct.__struct__/0 to return a map, got: []", '%#{BadStruct}{} = %{}' defmodule GoodStruct do defstruct name: "john" end assert_compile_fail KeyError, "key :age not found in: %Kernel.ErrorsTest.GoodStruct{name: \"john\"}", '%#{GoodStruct}{age: 27}' assert_compile_fail CompileError, "nofile:1: unknown key :age for struct Kernel.ErrorsTest.GoodStruct", '%#{GoodStruct}{age: 27} = %{}' end test "name for defmodule" do assert_compile_fail CompileError, "nofile:1: invalid module name: 3", 'defmodule 1 + 2, do: 3' end test "invalid unquote" do assert_compile_fail CompileError, "nofile:1: unquote called outside quote", 'unquote 1' end test "invalid unquote splicing in oneliners" do assert_compile_fail ArgumentError, "unquote_splicing only works inside arguments and block contexts, " <> "wrap it in parens if you want it to work with one-liners", ''' defmodule Kernel.ErrorsTest.InvalidUnquoteSplicingInOneliners do defmacro oneliner2 do quote do: unquote_splicing 1 end def callme do oneliner2 end end ''' end test "invalid quote args" do assert_compile_fail CompileError, "nofile:1: invalid arguments for quote", 'quote 1' assert_compile_fail CompileError, "nofile:1: invalid options for quote, expected a keyword list", 'quote(:foo, do: foo)' end test "invalid calls" do assert_compile_fail CompileError, "nofile:1: invalid call foo(1)(2)", 'foo(1)(2)' assert_compile_fail CompileError, "nofile:1: invalid call 1.foo()", '1.foo' end test "unhandled stab" do assert_compile_fail CompileError, "nofile:1: unhandled operator ->", '(bar -> baz)' end test "undefined non local function" do assert_compile_fail CompileError, "nofile:1: undefined function call/2", 'call foo, do: :foo' end test "invalid attribute" do msg = ~r"cannot inject attribute @foo into function/macro because cannot escape " assert_raise ArgumentError, msg, fn -> defmodule InvalidAttribute do @foo fn -> nil end def bar, do: @foo end end end test "invalid struct field value" do msg = ~r"invalid value for struct field baz, cannot escape " assert_raise ArgumentError, msg, fn -> defmodule InvaliadStructFieldValue do defstruct baz: fn -> nil end end end end test "match attribute in module" do msg = "invalid write attribute syntax, you probably meant to use: @foo expression" assert_raise ArgumentError, msg, fn -> defmodule MatchAttributeInModule do @foo = 42 end end end test "invalid fn args" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: end (for \"fn\" starting at line 1)", 'fn 1' end test "invalid escape" do assert_compile_fail TokenMissingError, "nofile:1: invalid escape \\ at end of file", '1 \\' end test "function local conflict" do assert_compile_fail CompileError, "nofile:1: imported Kernel.&&/2 conflicts with local function", ''' defmodule Kernel.ErrorsTest.FunctionLocalConflict do def other, do: 1 && 2 def _ && _, do: :error end ''' end test "macro local conflict" do assert_compile_fail CompileError, "nofile:6: call to local macro &&/2 conflicts with imported Kernel.&&/2, " <> "please rename the local macro or remove the conflicting import", ''' defmodule Kernel.ErrorsTest.MacroLocalConflict do def hello, do: 1 || 2 defmacro _ || _, do: :ok defmacro _ && _, do: :error def world, do: 1 && 2 end ''' end test "macro with undefined local" do assert_compile_fail UndefinedFunctionError, "function Kernel.ErrorsTest.MacroWithUndefinedLocal.unknown/1" <> " is undefined (function unknown/1 is not available)", ''' defmodule Kernel.ErrorsTest.MacroWithUndefinedLocal do defmacrop bar, do: unknown(1) def baz, do: bar() end ''' end test "private macro" do assert_compile_fail UndefinedFunctionError, "function Kernel.ErrorsTest.PrivateMacro.foo/0 is undefined (function foo/0 is not available)", ''' defmodule Kernel.ErrorsTest.PrivateMacro do defmacrop foo, do: 1 defmacro bar, do: __MODULE__.foo defmacro baz, do: bar() end ''' end test "function definition with alias" do assert_compile_fail CompileError, "nofile:2: function names should start with lowercase characters or underscore, invalid name Bar", ''' defmodule Kernel.ErrorsTest.FunctionDefinitionWithAlias do def Bar do :baz end end ''' end test "function import conflict" do assert_compile_fail CompileError, "nofile:3: function exit/1 imported from both :erlang and Kernel, call is ambiguous", ''' defmodule Kernel.ErrorsTest.FunctionImportConflict do import :erlang, warn: false def foo, do: exit(:test) end ''' end test "import invalid macro" do assert_compile_fail CompileError, "nofile:1: cannot import Kernel.invalid/1 because it is undefined or private", 'import Kernel, only: [invalid: 1]' end test "import with invalid options" do assert_compile_fail CompileError, "nofile:1: invalid :only option for import, expected a keyword list", 'import Kernel, only: [:invalid]' assert_compile_fail CompileError, "nofile:1: invalid :except option for import, expected a keyword list", 'import Kernel, except: [:invalid]' end test "import with conflicting options" do assert_compile_fail CompileError, "nofile:1: :only and :except can only be given together to import" <> " when :only is either :functions or :macros", 'import Kernel, only: [], except: []' end test "unrequired macro" do assert_compile_fail CompileError, "nofile:2: you must require Kernel.ErrorsTest before invoking " <> "the macro Kernel.ErrorsTest.hello/0", ''' defmodule Kernel.ErrorsTest.UnrequiredMacro do Kernel.ErrorsTest.hello() end ''' end test "def defmacro clause change" do assert_compile_fail CompileError, "nofile:3: defmacro foo/1 already defined as def", ''' defmodule Kernel.ErrorsTest.DefDefmacroClauseChange do def foo(1), do: 1 defmacro foo(x), do: x end ''' end test "def defp clause change from another file" do assert_compile_fail CompileError, "nofile:4: def hello/0 already defined as defp", ''' defmodule Kernel.ErrorsTest.DefDefmacroClauseChange do require Kernel.ErrorsTest defp hello, do: :world Kernel.ErrorsTest.hello() end ''' end test "internal function overridden" do assert_compile_fail CompileError, "nofile:1: function __info__/1 is internal and should not be overridden", ''' defmodule Kernel.ErrorsTest.InternalFunctionOverridden do def __info__(_), do: [] end ''' end test "no macros" do assert_compile_fail CompileError, "nofile:2: could not load macros from module :lists", ''' defmodule Kernel.ErrorsTest.NoMacros do import :lists, only: :macros end ''' end test "invalid macro" do assert_compile_fail CompileError, "nofile: invalid quoted expression: {:foo, :bar, :baz, :bat}", ''' defmodule Kernel.ErrorsTest.InvalidMacro do defmacrop oops do {:foo, :bar, :baz, :bat} end def test, do: oops() end ''' end test "unloaded module" do assert_compile_fail CompileError, "nofile:1: module Certainly.Doesnt.Exist is not loaded and could not be found", 'import Certainly.Doesnt.Exist' end test "module imported from the context it was defined in" do assert_compile_fail CompileError, ~r"nofile:4: module Kernel.ErrorsTest.ScheduledModule.Hygiene is not loaded but was defined.", ''' defmodule Kernel.ErrorsTest.ScheduledModule do defmodule Hygiene do end import Kernel.ErrorsTest.ScheduledModule.Hygiene end ''' end test "module imported from the same module" do assert_compile_fail CompileError, ~r"nofile:3: you are trying to use the module Kernel.ErrorsTest.ScheduledModule.Hygiene which is currently being defined", ''' defmodule Kernel.ErrorsTest.ScheduledModule do defmodule Hygiene do import Kernel.ErrorsTest.ScheduledModule.Hygiene end end ''' end test "already compiled module" do assert_compile_fail ArgumentError, "could not call eval_quoted on module Record " <> "because it was already compiled", 'Module.eval_quoted Record, quote(do: 1), [], file: __ENV__.file' end test "doc attributes format" do message = "expected the moduledoc attribute to be {line, doc} (where \"doc\" is " <> "a binary, a boolean, or nil), got: \"Other\"" assert_raise ArgumentError, message, fn -> defmodule DocAttributesFormat do @moduledoc "ModuleTest" {668, "ModuleTest"} = Module.get_attribute(__MODULE__, :moduledoc) Module.put_attribute(__MODULE__, :moduledoc, "Other") end end message = "expected the moduledoc attribute to contain a binary, a boolean, or nil, got: :not_a_binary" assert_raise ArgumentError, message, fn -> defmodule AtSyntaxDocAttributesFormat do @moduledoc :not_a_binary end end end test "interpolation error" do assert_compile_fail SyntaxError, "nofile:1: \"do\" is missing terminator \"end\". unexpected token: \")\" at line 1", '"foo\#{case 1 do )}bar"' end test "in definition module" do assert_compile_fail CompileError, "nofile:2: cannot define module Kernel.ErrorsTest.InDefinitionModule " <> "because it is currently being defined in nofile:1", ''' defmodule Kernel.ErrorsTest.InDefinitionModule do defmodule Elixir.Kernel.ErrorsTest.InDefinitionModule, do: true end ''' end test "invalid definition" do assert_compile_fail CompileError, "nofile:1: invalid syntax in def 1.(hello)", 'defmodule Kernel.ErrorsTest.InvalidDefinition, do: (def 1.(hello), do: true)' end test "duplicated bitstring size" do assert_compile_fail CompileError, "nofile:1: duplicated size definition in bitstring", '<<1::size(12)-size(13)>>' end test "invalid bitstring specified" do assert_compile_fail CompileError, "nofile:1: unknown bitstring specifier :atom", '<<1::(:atom)>>' assert_compile_fail CompileError, "nofile:1: unknown bitstring specifier unknown()", '<<1::unknown>>' assert_compile_fail CompileError, "nofile:1: unknown bitstring specifier another(12)", '<<1::another(12)>>' assert_compile_fail CompileError, "nofile:1: size in bitstring expects an integer or a variable as argument, got: :a", '<<1::size(:a)>>' assert_compile_fail CompileError, "nofile:1: unit in bitstring expects an integer as argument, got: :x", '<<1::unit(:x)>>' end test "invalid alias" do assert_compile_fail CompileError, "nofile:1: invalid value for keyword :as, expected a simple alias, got nested alias: Sample.Lists", 'alias :lists, as: Sample.Lists' assert_compile_fail CompileError, "nofile:1: invalid argument for alias, expected a compile time atom or alias, got: 1 + 2", 'alias 1 + 2' end test "invalid alias expansion" do assert_compile_fail CompileError, ~r"nofile:1: invalid alias: \"foo\.Foo\"", 'foo = :foo; foo.Foo' end test "invalid import option" do assert_compile_fail CompileError, "nofile:1: unsupported option :ops given to import", 'import :lists, [ops: 1]' end test "invalid rescue clause" do assert_compile_fail CompileError, "nofile:4: invalid rescue clause. The clause should match on an alias, a variable or be in the \"var in [alias]\" format", 'try do\n1\nrescue\n%UndefinedFunctionError{arity: 1} -> false\nend' end test "invalid for without generators" do assert_compile_fail CompileError, "nofile:1: for comprehensions must start with a generator", 'for is_atom(:foo), do: :foo' end test "invalid for bit generator" do assert_compile_fail CompileError, "nofile:1: bitstring fields without size are not allowed in bitstring generators", 'for <<x::binary <- "123">>, do: x' end test "invalid size in bitstrings" do assert_compile_fail CompileError, "nofile:1: cannot use ^x outside of match clauses", 'x = 8; <<a, b::size(^x)>> = <<?a, ?b>>' end test "unbound cond" do assert_compile_fail CompileError, "nofile:1: unbound variable _ inside cond. If you want the last clause to always match, " <> "you probably meant to use: true ->", 'cond do _ -> true end' end test "fun different arities" do assert_compile_fail CompileError, "nofile:1: cannot mix clauses with different arities in function definition", 'fn x -> x; x, y -> x + y end' end test "end of expression" do # All valid examples Code.eval_quoted ''' 1; 2; 3 (;) (;1) (1;) (1; 2) fn -> 1; 2 end fn -> ; end if true do ; end try do ; catch _, _ -> ; after ; end ''' # All invalid examples assert_compile_fail SyntaxError, "nofile:1: syntax error before: ';'", '1+;\n2' assert_compile_fail SyntaxError, "nofile:1: syntax error before: ';'", 'max(1, ;2)' end test "new line error" do assert_compile_fail SyntaxError, "nofile:3: syntax error before: eol", 'if true do\n foo = [],\n baz\nend' end # As reported and discussed in # https://github.com/elixir-lang/elixir/issues/4419. test "characters literal are printed correctly in syntax errors" do assert_compile_fail SyntaxError, "nofile:1: syntax error before: ?a", ':ok ?a' assert_compile_fail SyntaxError, "nofile:1: syntax error before: ?\\s", ':ok ?\\s' assert_compile_fail SyntaxError, "nofile:1: syntax error before: ?す" ':ok ?す' end test "invalid var or function on guard" do assert_compile_fail CompileError, "nofile:4: unknown variable something_that_does_not_exist or " <> "cannot invoke local something_that_does_not_exist/0 inside guard", ''' defmodule Kernel.ErrorsTest.InvalidVarOrFunctionOnGuard do def bar do case [] do [] when something_that_does_not_exist() == [] -> :ok end end end ''' end test "bodyless function with guard" do assert_compile_fail CompileError, "nofile:2: missing do keyword in def", ''' defmodule Kernel.ErrorsTest.BodyessFunctionWithGuard do def foo(n) when is_number(n) end ''' end test "invalid args for bodyless clause" do assert_compile_fail CompileError, "nofile:2: can use only variables and \\\\ as arguments in function heads", ''' defmodule Kernel.ErrorsTest.InvalidArgsForBodylessClause do def foo(arg // nil) def foo(_), do: :ok end ''' end test "invalid function on match" do assert_compile_fail CompileError, "nofile:3: cannot invoke local something_that_does_not_exist/1 inside match," <> " called as: something_that_does_not_exist(:foo)", ''' defmodule Kernel.ErrorsTest.InvalidFunctionOnMatch do def fun do case [] do; something_that_does_not_exist(:foo) -> :ok; end end end ''' end test "invalid remote on match" do assert_compile_fail CompileError, "nofile:1: cannot invoke remote function Hello.something_that_does_not_exist/0 inside match", 'case [] do; Hello.something_that_does_not_exist() -> :ok; end' end test "invalid remote on guard" do assert_compile_fail CompileError, "nofile:1: cannot invoke remote function Hello.something_that_does_not_exist/0 inside guard", 'case [] do; [] when Hello.something_that_does_not_exist == [] -> :ok; end' end test "typespec errors" do assert_compile_fail CompileError, "nofile:2: type foo() undefined", ''' defmodule Kernel.ErrorsTest.TypespecErrors1 do @type omg :: foo end ''' message = "nofile:2: spec for undefined function omg/0" assert_compile_fail CompileError, message, ''' defmodule Kernel.ErrorsTest.TypespecErrors2 do @spec omg :: atom end ''' end test "bad unquoting" do assert_compile_fail CompileError, "nofile: invalid quoted expression: {:foo, 0, 1}", ''' defmodule Kernel.ErrorsTest.BadUnquoting do def range(unquote({:foo, 0, 1})), do: :ok end ''' end test "bad multi-call" do assert_compile_fail CompileError, "nofile:1: invalid argument for alias, expected a compile time atom or alias, got: 42", 'alias IO.{ANSI, 42}' assert_compile_fail CompileError, "nofile:1: :as option is not supported by multi-alias call", 'alias Elixir.{Map}, as: Dict' assert_compile_fail UndefinedFunctionError, "function List.{}/1 is undefined or private", '[List.{Chars}, "one"]' end test "macros error stacktrace" do assert [{:erlang, :+, [1, :foo], _}, {Kernel.ErrorsTest.MacrosErrorStacktrace, :sample, 1, _} | _] = rescue_stacktrace(""" defmodule Kernel.ErrorsTest.MacrosErrorStacktrace do defmacro sample(num), do: num + :foo def other, do: sample(1) end """) end test "macros function clause stacktrace" do assert [{__MODULE__, :sample, 1, _} | _] = rescue_stacktrace(""" defmodule Kernel.ErrorsTest.MacrosFunctionClauseStacktrace do import Kernel.ErrorsTest sample(1) end """) end test "macros interpreted function clause stacktrace" do assert [{Kernel.ErrorsTest.MacrosInterpretedFunctionClauseStacktrace, :sample, 1, _} | _] = rescue_stacktrace(""" defmodule Kernel.ErrorsTest.MacrosInterpretedFunctionClauseStacktrace do defmacro sample(0), do: 0 def other, do: sample(1) end """) end test "macros compiled callback" do assert [{Kernel.ErrorsTest, :__before_compile__, [%Macro.Env{module: Kernel.ErrorsTest.MacrosCompiledCallback}], _} | _] = rescue_stacktrace(""" defmodule Kernel.ErrorsTest.MacrosCompiledCallback do Module.put_attribute(__MODULE__, :before_compile, Kernel.ErrorsTest) end """) end test "failed remote call stacktrace includes file/line info" do try do bad_remote_call(1) rescue ArgumentError -> stack = System.stacktrace assert [{:erlang, :apply, [1, :foo, []], []}, {__MODULE__, :bad_remote_call, 1, [file: _, line: _]} | _] = stack end end defp bad_remote_call(x), do: x.foo defmacro sample(0), do: 0 defmacro before_compile(_) do quote(do: _) end ## Helpers defp rescue_stacktrace(expr) do result = try do :elixir.eval(to_charlist(expr), []) nil rescue _ -> System.stacktrace end result || raise(ExUnit.AssertionError, message: "Expected function given to rescue_stacktrace to fail") end end
30.140351
128
0.640797
0843c65b373ef1ebc8efbca17bdae94a320ef83b
1,283
ex
Elixir
clients/site_verification/lib/google_api/site_verification/v1/connection.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
1
2018-12-03T23:43:10.000Z
2018-12-03T23:43:10.000Z
clients/site_verification/lib/google_api/site_verification/v1/connection.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
null
null
null
clients/site_verification/lib/google_api/site_verification/v1/connection.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the elixir code generator program. # Do not edit the class manually. defmodule GoogleApi.SiteVerification.V1.Connection do @moduledoc """ Handle Tesla connections for GoogleApi.SiteVerification.V1. """ @type t :: Tesla.Env.client() use GoogleApi.Gax.Connection, scopes: [ # Manage the list of sites and domains you control "https://www.googleapis.com/auth/siteverification", # Manage your new site verifications with Google "https://www.googleapis.com/auth/siteverification.verify_only" ], otp_app: :google_api_site_verification, base_url: "https://www.googleapis.com/siteVerification/v1/" end
35.638889
77
0.744349
0843de5ce7fb42f326efb43c45ba1e1f4c0213c9
565
ex
Elixir
server_elixir/lib/server_elixir/application.ex
johnosullivan/MiHome
4e4062fd8801144a26bea8811e76688009913f94
[ "MIT" ]
7
2018-05-29T01:41:12.000Z
2021-12-26T04:27:21.000Z
server_elixir/lib/server_elixir/application.ex
johnosullivan/MiHome
4e4062fd8801144a26bea8811e76688009913f94
[ "MIT" ]
4
2017-12-06T14:48:59.000Z
2021-05-04T23:57:30.000Z
server_elixir/lib/server_elixir/application.ex
johnosullivan/MiHome
4e4062fd8801144a26bea8811e76688009913f94
[ "MIT" ]
5
2017-10-23T16:30:57.000Z
2019-07-27T19:40:11.000Z
defmodule ServerElixir.Application do # See https://hexdocs.pm/elixir/Application.html # for more information on OTP Applications @moduledoc false use Application @impl true def start(_type, _args) do children = [ # Start Database ServerElixir.Repo, # Core Endpoints ServerElixirWeb.CoreEndpoint ] # See https://hexdocs.pm/elixir/Supervisor.html # for other strategies and supported options opts = [strategy: :one_for_one, name: ServerElixir.Supervisor] Supervisor.start_link(children, opts) end end
24.565217
66
0.709735
0843ee52b73829b5a6bf31e9cc8eb5483069f2e7
3,329
ex
Elixir
lib/livebook/application.ex
qhwa/livebook
26226a42e0544fe3870c29bc68d752e7d6d3e63c
[ "Apache-2.0" ]
null
null
null
lib/livebook/application.ex
qhwa/livebook
26226a42e0544fe3870c29bc68d752e7d6d3e63c
[ "Apache-2.0" ]
null
null
null
lib/livebook/application.ex
qhwa/livebook
26226a42e0544fe3870c29bc68d752e7d6d3e63c
[ "Apache-2.0" ]
null
null
null
defmodule Livebook.Application do # See https://hexdocs.pm/elixir/Application.html # for more information on OTP Applications @moduledoc false use Application def start(_type, _args) do ensure_distribution!() set_cookie() # We register our own :standard_error below Process.unregister(:standard_error) children = [ # Start the Telemetry supervisor LivebookWeb.Telemetry, # Start the PubSub system {Phoenix.PubSub, name: Livebook.PubSub}, # Start the our own :standard_error handler (standard error -> group leader) # This way we can run multiple embedded runtimes without worrying # about restoring :standard_error to a valid process when terminating {Livebook.Runtime.ErlDist.IOForwardGL, name: :standard_error}, # Start the supervisor dynamically managing sessions Livebook.SessionSupervisor, # Start the server responsible for associating files with sessions Livebook.Session.FileGuard, # Start the Node Pool for managing node names Livebook.Runtime.NodePool, # Start the Endpoint (http/https) LivebookWeb.Endpoint ] # Similarly as with :standard_error, we register our backend # within the Livebook node, specifically for the embedded runtime Logger.add_backend(Livebook.Runtime.ErlDist.LoggerGLBackend) opts = [strategy: :one_for_one, name: Livebook.Supervisor] with {:ok, _} = result <- Supervisor.start_link(children, opts) do display_startup_info() result end end # Tell Phoenix to update the endpoint configuration # whenever the application is updated. def config_change(changed, _new, removed) do LivebookWeb.Endpoint.config_change(changed, removed) :ok end defp ensure_distribution!() do unless Node.alive?() do case System.cmd("epmd", ["-daemon"]) do {_, 0} -> :ok _ -> Livebook.Config.abort!(""" could not start epmd (Erlang Port Mapper Driver). Livebook uses epmd to \ talk to different runtimes. You may have to start epmd explicitly by calling: epmd -daemon Or by calling: elixir --sname test -e "IO.puts node()" Then you can try booting Livebook again """) end {type, name} = get_node_type_and_name() case Node.start(name, type) do {:ok, _} -> :ok {:error, reason} -> Livebook.Config.abort!("could not start distributed node: #{inspect(reason)}") end end end defp set_cookie() do cookie = Application.fetch_env!(:livebook, :cookie) Node.set_cookie(cookie) end defp get_node_type_and_name() do Application.get_env(:livebook, :node) || {:shortnames, random_short_name()} end defp random_short_name() do :"livebook_#{Livebook.Utils.random_short_id()}" end defp display_startup_info() do if Phoenix.Endpoint.server?(:livebook, LivebookWeb.Endpoint) do IO.puts("[Livebook] Application running at #{access_url()}") end end defp access_url() do root_url = LivebookWeb.Endpoint.url() if Livebook.Config.auth_mode() == :token do token = Application.fetch_env!(:livebook, :token) root_url <> "/?token=" <> token else root_url end end end
28.698276
88
0.665065
084424b371815f0b82c4a6cd224ce12a7772221a
15,621
ex
Elixir
clients/compute/lib/google_api/compute/v1/api/interconnects.ex
leandrocp/elixir-google-api
a86e46907f396d40aeff8668c3bd81662f44c71e
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/api/interconnects.ex
leandrocp/elixir-google-api
a86e46907f396d40aeff8668c3bd81662f44c71e
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/api/interconnects.ex
leandrocp/elixir-google-api
a86e46907f396d40aeff8668c3bd81662f44c71e
[ "Apache-2.0" ]
1
2020-11-10T16:58:27.000Z
2020-11-10T16:58:27.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.Compute.V1.Api.Interconnects do @moduledoc """ API calls for all endpoints tagged `Interconnects`. """ alias GoogleApi.Compute.V1.Connection alias GoogleApi.Gax.{Request, Response} @doc """ Deletes the specified interconnect. ## Parameters - connection (GoogleApi.Compute.V1.Connection): Connection to server - project (String.t): Project ID for this request. - interconnect (String.t): Name of the interconnect to delete. - opts (KeywordList): [optional] Optional parameters - :alt (String.t): Data format for the response. - :fields (String.t): Selector specifying which fields to include in a partial response. - :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. - :oauth_token (String.t): OAuth 2.0 token for the current user. - :prettyPrint (boolean()): Returns response with indentations and line breaks. - :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters. - :userIp (String.t): Deprecated. Please use quotaUser instead. - :requestId (String.t): An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). ## Returns {:ok, %GoogleApi.Compute.V1.Model.Operation{}} on success {:error, info} on failure """ @spec compute_interconnects_delete(Tesla.Env.client(), String.t(), String.t(), keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Operation.t()} | {:error, Tesla.Env.t()} def compute_interconnects_delete(connection, project, interconnect, opts \\ []) do optional_params = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :requestId => :query } request = Request.new() |> Request.method(:delete) |> Request.url("/{project}/global/interconnects/{interconnect}", %{ "project" => URI.encode_www_form(project), "interconnect" => URI.encode_www_form(interconnect) }) |> Request.add_optional_params(optional_params, opts) connection |> Connection.execute(request) |> Response.decode(struct: %GoogleApi.Compute.V1.Model.Operation{}) end @doc """ Returns the specified interconnect. Get a list of available interconnects by making a list() request. ## Parameters - connection (GoogleApi.Compute.V1.Connection): Connection to server - project (String.t): Project ID for this request. - interconnect (String.t): Name of the interconnect to return. - opts (KeywordList): [optional] Optional parameters - :alt (String.t): Data format for the response. - :fields (String.t): Selector specifying which fields to include in a partial response. - :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. - :oauth_token (String.t): OAuth 2.0 token for the current user. - :prettyPrint (boolean()): Returns response with indentations and line breaks. - :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters. - :userIp (String.t): Deprecated. Please use quotaUser instead. ## Returns {:ok, %GoogleApi.Compute.V1.Model.Interconnect{}} on success {:error, info} on failure """ @spec compute_interconnects_get(Tesla.Env.client(), String.t(), String.t(), keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Interconnect.t()} | {:error, Tesla.Env.t()} def compute_interconnects_get(connection, project, interconnect, opts \\ []) do optional_params = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query } request = Request.new() |> Request.method(:get) |> Request.url("/{project}/global/interconnects/{interconnect}", %{ "project" => URI.encode_www_form(project), "interconnect" => URI.encode_www_form(interconnect) }) |> Request.add_optional_params(optional_params, opts) connection |> Connection.execute(request) |> Response.decode(struct: %GoogleApi.Compute.V1.Model.Interconnect{}) end @doc """ Creates a Interconnect in the specified project using the data included in the request. ## Parameters - connection (GoogleApi.Compute.V1.Connection): Connection to server - project (String.t): Project ID for this request. - opts (KeywordList): [optional] Optional parameters - :alt (String.t): Data format for the response. - :fields (String.t): Selector specifying which fields to include in a partial response. - :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. - :oauth_token (String.t): OAuth 2.0 token for the current user. - :prettyPrint (boolean()): Returns response with indentations and line breaks. - :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters. - :userIp (String.t): Deprecated. Please use quotaUser instead. - :requestId (String.t): An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). - :body (Interconnect): ## Returns {:ok, %GoogleApi.Compute.V1.Model.Operation{}} on success {:error, info} on failure """ @spec compute_interconnects_insert(Tesla.Env.client(), String.t(), keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Operation.t()} | {:error, Tesla.Env.t()} def compute_interconnects_insert(connection, project, opts \\ []) do optional_params = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :requestId => :query, :body => :body } request = Request.new() |> Request.method(:post) |> Request.url("/{project}/global/interconnects", %{ "project" => URI.encode_www_form(project) }) |> Request.add_optional_params(optional_params, opts) connection |> Connection.execute(request) |> Response.decode(struct: %GoogleApi.Compute.V1.Model.Operation{}) end @doc """ Retrieves the list of interconnect available to the specified project. ## Parameters - connection (GoogleApi.Compute.V1.Connection): Connection to server - project (String.t): Project ID for this request. - opts (KeywordList): [optional] Optional parameters - :alt (String.t): Data format for the response. - :fields (String.t): Selector specifying which fields to include in a partial response. - :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. - :oauth_token (String.t): OAuth 2.0 token for the current user. - :prettyPrint (boolean()): Returns response with indentations and line breaks. - :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters. - :userIp (String.t): Deprecated. Please use quotaUser instead. - :filter (String.t): A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either &#x3D;, !&#x3D;, &gt;, or &lt;. For example, if you are filtering Compute Engine instances, you can exclude instances named example-instance by specifying name !&#x3D; example-instance. You can also filter nested fields. For example, you could specify scheduling.automaticRestart &#x3D; false to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels. To filter on multiple expressions, provide each separate expression within parentheses. For example, (scheduling.automaticRestart &#x3D; true) (cpuPlatform &#x3D; \&quot;Intel Skylake\&quot;). By default, each expression is an AND expression. However, you can include AND and OR expressions explicitly. For example, (cpuPlatform &#x3D; \&quot;Intel Skylake\&quot;) OR (cpuPlatform &#x3D; \&quot;Intel Broadwell\&quot;) AND (scheduling.automaticRestart &#x3D; true). - :maxResults (integer()): The maximum number of results per page that should be returned. If the number of available results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to get the next page of results in subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default: 500) - :orderBy (String.t): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name. You can also sort results in descending order based on the creation timestamp using orderBy&#x3D;\&quot;creationTimestamp desc\&quot;. This sorts results based on the creationTimestamp field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first. Currently, only sorting by name or creationTimestamp desc is supported. - :pageToken (String.t): Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list request to get the next page of results. ## Returns {:ok, %GoogleApi.Compute.V1.Model.InterconnectList{}} on success {:error, info} on failure """ @spec compute_interconnects_list(Tesla.Env.client(), String.t(), keyword()) :: {:ok, GoogleApi.Compute.V1.Model.InterconnectList.t()} | {:error, Tesla.Env.t()} def compute_interconnects_list(connection, project, opts \\ []) do optional_params = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :filter => :query, :maxResults => :query, :orderBy => :query, :pageToken => :query } request = Request.new() |> Request.method(:get) |> Request.url("/{project}/global/interconnects", %{ "project" => URI.encode_www_form(project) }) |> Request.add_optional_params(optional_params, opts) connection |> Connection.execute(request) |> Response.decode(struct: %GoogleApi.Compute.V1.Model.InterconnectList{}) end @doc """ Updates the specified interconnect with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules. ## Parameters - connection (GoogleApi.Compute.V1.Connection): Connection to server - project (String.t): Project ID for this request. - interconnect (String.t): Name of the interconnect to update. - opts (KeywordList): [optional] Optional parameters - :alt (String.t): Data format for the response. - :fields (String.t): Selector specifying which fields to include in a partial response. - :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. - :oauth_token (String.t): OAuth 2.0 token for the current user. - :prettyPrint (boolean()): Returns response with indentations and line breaks. - :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters. - :userIp (String.t): Deprecated. Please use quotaUser instead. - :requestId (String.t): An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). - :body (Interconnect): ## Returns {:ok, %GoogleApi.Compute.V1.Model.Operation{}} on success {:error, info} on failure """ @spec compute_interconnects_patch(Tesla.Env.client(), String.t(), String.t(), keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Operation.t()} | {:error, Tesla.Env.t()} def compute_interconnects_patch(connection, project, interconnect, opts \\ []) do optional_params = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :requestId => :query, :body => :body } request = Request.new() |> Request.method(:patch) |> Request.url("/{project}/global/interconnects/{interconnect}", %{ "project" => URI.encode_www_form(project), "interconnect" => URI.encode_www_form(interconnect) }) |> Request.add_optional_params(optional_params, opts) connection |> Connection.execute(request) |> Response.decode(struct: %GoogleApi.Compute.V1.Model.Operation{}) end end
54.618881
1,213
0.707317
0844264911d65e0f41421d5d89cd609127afea03
2,829
exs
Elixir
test/channels/lobby_channel_test.exs
Pianist038801/SprintPoker
ae14f79b8cd4254a1c5f5fef698db1cf2d20cf9c
[ "MIT" ]
null
null
null
test/channels/lobby_channel_test.exs
Pianist038801/SprintPoker
ae14f79b8cd4254a1c5f5fef698db1cf2d20cf9c
[ "MIT" ]
null
null
null
test/channels/lobby_channel_test.exs
Pianist038801/SprintPoker
ae14f79b8cd4254a1c5f5fef698db1cf2d20cf9c
[ "MIT" ]
null
null
null
defmodule SprintPoker.LobbyChannelTest do use SprintPoker.ChannelCase alias SprintPoker.LobbyChannel alias SprintPoker.User alias SprintPoker.Deck alias SprintPoker.Repo alias SprintPoker.Game alias SprintPoker.Deck alias SprintPoker.State test "joining lobby sends auth_token" do user = %User{} |> User.changeset(%{name: "test user"}) |> Repo.insert! socket("user:#{user.id}", %{user_id: user.id}) |> subscribe_and_join(LobbyChannel, "lobby") auth_token_response = %{"auth_token": user.auth_token} assert_push "auth_token", ^auth_token_response end test "joining lobby sends user" do user = %User{} |> User.changeset(%{name: "test user"}) |> Repo.insert! socket("user:#{user.id}", %{user_id: user.id}) |> subscribe_and_join(LobbyChannel, "lobby") user_response = %{"user": user} assert_push "user", ^user_response end test "joining lobby sends decks" do user = %User{} |> User.changeset(%{name: "test user"}) |> Repo.insert! socket("user:#{user.id}", %{user_id: user.id}) |> subscribe_and_join(LobbyChannel, "lobby") decks = Repo.all(Deck) decks_response = %{decks: decks} assert_push "decks", ^decks_response end test "joining lobby sends game" do user = %User{} |> User.changeset(%{name: "test user"}) |> Repo.insert! deck = %Deck{} |> Deck.changeset(%{name: "test deck"}) |> Repo.insert! game = %Game{} |> Game.changeset(%{name: "test game", owner_id: user.id, deck_id: deck.id}) |> Repo.insert! _state = %State{} |> State.changeset(%{name: "none", game_id: game.id}) |> Repo.insert! socket("user:#{user.id}", %{user_id: user.id}) |> subscribe_and_join(LobbyChannel, "lobby", %{"game_id" => game.id}) game = game |> Repo.preload([:owner, :deck]) game_response = %{"game": game} assert_push "game", ^game_response end test "'user:update' resends updated user" do user = %User{} |> User.changeset(%{name: "test user"}) |> Repo.insert! {:ok, _, socket } = socket("user:#{user.id}", %{user_id: user.id}) |> subscribe_and_join(LobbyChannel, "lobby") socket |> push "user:update", %{"user" => %{"name" => "new name"}} change_user_name_response = %{user: %User{user | name: "new name"}} assert_push "user", ^change_user_name_response end test "'game:create' resends new game with owner_id and name" do user = %User{} |> User.changeset(%{name: "test user"}) |> Repo.insert! deck = %Deck{} |> Deck.changeset(%{name: "test deck"}) |> Repo.insert! {:ok, _, socket } = socket("user:#{user.id}", %{user_id: user.id}) |> subscribe_and_join(LobbyChannel, "lobby") socket |> push "game:create", %{"name" => "new game", "deck" => %{"id" => deck.id}} owner_id = user.id assert_push "game", %{game: %{id: _, name: "new game", owner_id: ^owner_id}} end end
37.72
120
0.644751
084458bdd825b74f4ebe5309dda15e9396fd51c5
347
ex
Elixir
apps/admin_api/lib/admin_api/v1/views/transaction_calculation_view.ex
vanmil/ewallet
6c1aca95a83e0a9d93007670a40d8c45764a8122
[ "Apache-2.0" ]
1
2018-12-07T06:21:21.000Z
2018-12-07T06:21:21.000Z
apps/admin_api/lib/admin_api/v1/views/transaction_calculation_view.ex
vanmil/ewallet
6c1aca95a83e0a9d93007670a40d8c45764a8122
[ "Apache-2.0" ]
null
null
null
apps/admin_api/lib/admin_api/v1/views/transaction_calculation_view.ex
vanmil/ewallet
6c1aca95a83e0a9d93007670a40d8c45764a8122
[ "Apache-2.0" ]
null
null
null
defmodule AdminAPI.V1.TransactionCalculationView do use AdminAPI, :view alias EWallet.Web.V1.{ResponseSerializer, TransactionCalculationSerializer} def render("calculation.json", %{calculation: calculation}) do calculation |> TransactionCalculationSerializer.serialize() |> ResponseSerializer.serialize(success: true) end end
31.545455
77
0.786744
084478fbe968959b19ae3ea2aa83cd1d45b875eb
692
ex
Elixir
lib/nautilus/core/validators/client_validator/client_validator.ex
CarloHFR/NautilusGateway
26211948c5f9127e6662a90e41df5b43b2408372
[ "MIT" ]
null
null
null
lib/nautilus/core/validators/client_validator/client_validator.ex
CarloHFR/NautilusGateway
26211948c5f9127e6662a90e41df5b43b2408372
[ "MIT" ]
null
null
null
lib/nautilus/core/validators/client_validator/client_validator.ex
CarloHFR/NautilusGateway
26211948c5f9127e6662a90e41df5b43b2408372
[ "MIT" ]
null
null
null
defmodule Nautilus.Core.Validators.ClientValidator.ClientValidator do @moduledoc """ This module is responsible for validate client """ @key_value_adapter Application.get_env(:nautilus, :KeyValueBucketInterface) @doc """ This function receive a id and pid """ def validate_client(id, pid) do with {:ok, client_info} <- @key_value_adapter.get(id), true <- Process.alive?(client_info[:pid]), true <- client_info[:pid] == pid do {:ok, :validclient} else _ -> _status = @key_value_adapter.delete_by_id(id) {:error, :invalidclient} end end end
27.68
106
0.59104
0844798c1400a6e8d17030e2deb50a7fe04eebd8
23,417
ex
Elixir
lib/ex_unit/lib/ex_unit/doc_test.ex
bmwiedemann/elixir
7a1ae92b42438dcd9115dca3e324980cdc0534ff
[ "Apache-2.0" ]
null
null
null
lib/ex_unit/lib/ex_unit/doc_test.ex
bmwiedemann/elixir
7a1ae92b42438dcd9115dca3e324980cdc0534ff
[ "Apache-2.0" ]
null
null
null
lib/ex_unit/lib/ex_unit/doc_test.ex
bmwiedemann/elixir
7a1ae92b42438dcd9115dca3e324980cdc0534ff
[ "Apache-2.0" ]
null
null
null
defmodule ExUnit.DocTest do @moduledoc """ ExUnit.DocTest implements functionality similar to [Python's doctest](https://docs.python.org/2/library/doctest.html). It allows us to generate tests from the code examples in a module/function/macro's documentation. To do this, invoke the `doctest/1` macro from within your test case and ensure your code examples are written according to the syntax and guidelines below. ## Syntax Every new test starts on a new line, with an `iex>` prefix. Multiline expressions can be used by prefixing subsequent lines with either `...>` (recommended) or `iex>`. The expected result should start at the next line after the `iex>` or `...>` line(s) and is terminated either by a newline, new `iex>` prefix or the end of the string literal. ## Examples To run doctests include them in an ExUnit case with a `doctest` macro: defmodule MyModuleTest do use ExUnit.Case, async: true doctest MyModule end The `doctest` macro loops through all functions and macros defined in `MyModule`, parsing their documentation in search of code examples. A very basic example is: iex> 1 + 1 2 Expressions on multiple lines are also supported: iex> Enum.map [1, 2, 3], fn(x) -> ...> x * 2 ...> end [2, 4, 6] Multiple results can be checked within the same test: iex> a = 1 1 iex> a + 1 2 If you want to keep any two tests separate, add an empty line between them: iex> a = 1 1 iex> a + 1 # will fail with a "undefined function a/0" error 2 If you don't want to assert for every result in a doctest, you can omit the result: iex> pid = spawn(fn -> :ok end) iex> is_pid(pid) true This is useful when the result is something variable (like a PID in the example above) or when the result is a complicated data structure and you don't want to show it all, but just parts of it or some of its properties. Similarly to IEx you can use numbers in your "prompts": iex(1)> [1 + 2, ...(1)> 3] [3, 3] This is useful in two cases: * being able to refer to specific numbered scenarios * copy-pasting examples from an actual IEx session You can also select or skip functions when calling `doctest`. See the documentation on the `:except` and `:only` options below for more information. ## Opaque types Some types' internal structures are kept hidden and instead show a user-friendly structure when inspected. The idiom in Elixir is to print those data types in the format `#Name<...>`. Because those values are treated as comments in Elixir code due to the leading `#` sign, they require special care when being used in doctests. Imagine you have a map that contains a MapSet and is printed as: %{users: #MapSet<[:foo, :bar]>} If you try to match on such an expression, `doctest` will fail to compile. There are two ways to resolve this. The first is to rely on the fact that doctest can compare internal structures as long as they are at the root. So one could write: iex> map = %{users: Enum.into([:foo, :bar], MapSet.new())} iex> map.users #MapSet<[:foo, :bar]> Whenever a doctest starts with "#Name<", `doctest` will perform a string comparison. For example, the above test will perform the following match: inspect(map.users) == "#MapSet<[:foo, :bar]>" Alternatively, since doctest results are actually evaluated, you can have the MapSet building expression as the doctest result: iex> %{users: Enum.into([:foo, :bar], MapSet.new())} %{users: Enum.into([:foo, :bar], MapSet.new())} The downside of this approach is that the doctest result is not really what users would see in the terminal. ## Exceptions You can also showcase expressions raising an exception, for example: iex(1)> String.to_atom((fn() -> 1 end).()) ** (ArgumentError) argument error What DocTest will be looking for is a line starting with `** (` and it will parse it accordingly to extract the exception name and message. At this moment, the exception parser would make the parser treat the next line as a start of a completely new expression (if it is prefixed with `iex>`) or a no-op line with documentation. Thus, multiline messages are not supported. ## When not to use doctest In general, doctests are not recommended when your code examples contain side effects. For example, if a doctest prints to standard output, doctest will not try to capture the output. Similarly, doctests do not run in any kind of sandbox. So any module defined in a code example is going to linger throughout the whole test suite run. """ @opaque_type_regex ~r/#[\w\.]+</ defmodule Error do defexception [:message] @impl true def exception(opts) do module = Keyword.fetch!(opts, :module) message = Keyword.fetch!(opts, :message) file = module.__info__(:compile)[:source] |> Path.relative_to_cwd() info = Exception.format_file_line(file, opts[:line]) %__MODULE__{message: info <> " " <> message} end end @doc """ This macro is used to generate ExUnit test cases for doctests. Calling `doctest(Module)` will generate tests for all doctests found in the `module`. Options can also be given: * `:except` - generates tests for all functions except those listed (list of `{function, arity}` tuples, and/or `:moduledoc`). * `:only` - generates tests only for functions listed (list of `{function, arity}` tuples, and/or `:moduledoc`). * `:import` - when `true`, one can test a function defined in the module without referring to the module name. However, this is not feasible when there is a clash with a module like Kernel. In these cases, `:import` should be set to `false` and a full `Module.function` construct should be used. ## Examples doctest MyModule, except: [:moduledoc, trick_fun: 1] This macro is auto-imported with every `ExUnit.Case`. """ defmacro doctest(module, opts \\ []) do require = if is_atom(Macro.expand(module, __CALLER__)) do quote do require unquote(module) end end tests = quote bind_quoted: [module: module, opts: opts] do env = __ENV__ file = ExUnit.DocTest.__file__(module) for {name, test} <- ExUnit.DocTest.__doctests__(module, opts) do @file file doc = ExUnit.Case.register_test(env, :doctest, name, []) def unquote(doc)(_), do: unquote(test) end end [require, tests] end @doc false def __file__(module) do source = module.__info__(:compile)[:source] || raise "#{inspect(module)} does not have compile-time source information" "(for doctest at) " <> Path.relative_to_cwd(source) end @doc false def __doctests__(module, opts) do do_import = Keyword.get(opts, :import, false) extract(module) |> filter_by_opts(opts) |> Stream.with_index() |> Enum.map(fn {test, acc} -> compile_test(test, module, do_import, acc + 1) end) end defp filter_by_opts(tests, opts) do except = Keyword.get(opts, :except, []) case Keyword.fetch(opts, :only) do {:ok, []} -> [] {:ok, only} -> tests |> Stream.reject(&(&1.fun_arity in except)) |> Stream.filter(&(&1.fun_arity in only)) :error -> Stream.reject(tests, &(&1.fun_arity in except)) end end ## Compilation of extracted tests defp compile_test(test, module, do_import, n) do {test_name(test, module, n), test_content(test, module, do_import)} end defp test_name(%{fun_arity: :moduledoc}, m, n) do "module #{inspect(m)} (#{n})" end defp test_name(%{fun_arity: {f, a}}, m, n) do "#{inspect(m)}.#{f}/#{a} (#{n})" end defp test_content(%{exprs: exprs, line: line}, module, do_import) do file = module.__info__(:compile)[:source] |> Path.relative_to_cwd() location = [line: line, file: file] stack = Macro.escape([{module, :__MODULE__, 0, location}]) if multiple_exceptions?(exprs) do raise Error, line: line, module: module, message: "multiple exceptions in one doctest case are not supported" end tests = Enum.map(exprs, fn {expr, expected} -> test_case_content(expr, expected, location, stack) end) {:__block__, [], test_import(module, do_import) ++ tests} end defp multiple_exceptions?(exprs) do Enum.count(exprs, fn {_, {:error, _, _}} -> true _ -> false end) > 1 end defp test_case_content(expr, {:test, expected}, location, stack) do expr_ast = string_to_quoted(location, stack, expr) expected_ast = string_to_quoted(location, stack, expected) quote do expected = unquote(expected_ast) case unquote(expr_ast) do ^expected -> :ok actual -> expr = "#{unquote(String.trim(expr))} === #{unquote(String.trim(expected))}" error = [message: "Doctest failed", expr: expr, left: actual, right: expected] reraise ExUnit.AssertionError, error, unquote(stack) end end end defp test_case_content(expr, {:inspect, expected}, location, stack) do expr_ast = quote do inspect(unquote(string_to_quoted(location, stack, expr))) end expected_ast = string_to_quoted(location, stack, expected) quote do expected = unquote(expected_ast) case unquote(expr_ast) do ^expected -> :ok actual -> expr = "inspect(#{unquote(String.trim(expr))}) === #{unquote(String.trim(expected))}" error = [message: "Doctest failed", expr: expr, left: actual, right: expected] reraise ExUnit.AssertionError, error, unquote(stack) end end end defp test_case_content(expr, {:error, exception, message}, location, stack) do expr_ast = string_to_quoted(location, stack, expr) quote do stack = unquote(stack) expr = unquote(String.trim(expr)) try do unquote(expr_ast) rescue error -> actual_exception = error.__struct__ actual_message = Exception.message(error) message = cond do actual_exception != unquote(exception) -> "Doctest failed: expected exception #{inspect(unquote(exception))} but got #{ inspect(actual_exception) } with message #{inspect(actual_message)}" actual_message != unquote(message) -> "Doctest failed: wrong message for #{inspect(actual_exception)}\n" <> "expected:\n" <> " #{inspect(unquote(message))}\n" <> "actual:\n" <> " #{inspect(actual_message)}" true -> nil end if message do reraise ExUnit.AssertionError, [message: message, expr: expr], stack end else _ -> message = "Doctest failed: expected exception #{inspect(unquote(exception))} but nothing was raised" error = [message: message, expr: expr] reraise ExUnit.AssertionError, error, stack end end end defp test_import(_mod, false), do: [] defp test_import(mod, _) do [quote(do: import(unquote(mod)))] end defp string_to_quoted(location, stack, expr) do try do Code.string_to_quoted!(expr, location) rescue e -> ex_message = "(#{inspect(e.__struct__)}) #{Exception.message(e)}" message = "Doctest did not compile, got: #{ex_message}" message = if e.__struct__ == TokenMissingError and expr =~ Regex.recompile!(@opaque_type_regex) do message <> """ . If you are planning to assert on the result of an iex> expression \ which contains a value inspected as #Name<...>, please make sure \ the inspected value is placed at the beginning of the expression; \ otherwise Elixir will treat it as a comment due to the leading sign #.\ """ else message end opts = if String.valid?(expr) do [message: message, expr: String.trim(expr)] else [message: message] end quote do reraise ExUnit.AssertionError, unquote(opts), unquote(stack) end end end ## Extraction of the tests defp extract(module) do case Code.fetch_docs(module) do {:docs_v1, annotation, _, _, moduledoc, _, docs} -> extract_from_moduledoc(annotation, moduledoc, module) ++ extract_from_docs(Enum.sort(docs), module) {:error, reason} -> raise Error, module: module, message: "could not retrieve the documentation for module #{inspect(module)}. " <> explain_docs_error(reason) end end defp explain_docs_error(:module_not_found), do: "The BEAM file of the module cannot be accessed" defp explain_docs_error(:chunk_not_found), do: "The module was not compiled with documentation" defp explain_docs_error({:invalid_chunk, _}), do: "The documentation chunk in the module is invalid" defp extract_from_moduledoc(_, doc, _module) when doc in [:none, :hidden], do: [] defp extract_from_moduledoc(annotation, %{"en" => doc}, module) do for test <- extract_tests(:erl_anno.line(annotation), doc, module) do normalize_test(test, :moduledoc) end end defp extract_from_docs(docs, module) do for doc <- docs, doc <- extract_from_doc(doc, module), do: doc end defp extract_from_doc({{kind, _, _}, _, _, doc, _}, _module) when kind not in [:function, :macro] or doc in [:none, :hidden], do: [] defp extract_from_doc({{_, name, arity}, annotation, _, %{"en" => doc}, _}, module) do line = :erl_anno.line(annotation) for test <- extract_tests(line, doc, module) do normalize_test(test, {name, arity}) end end defp extract_tests(line_no, doc, module) do all_lines = String.split(doc, "\n", trim: false) lines = adjust_indent(all_lines, line_no + 1, module) extract_tests(lines, "", "", [], true, module) end @iex_prompt ["iex>", "iex("] @dot_prompt ["...>", "...("] defp adjust_indent(lines, line_no, module) do adjust_indent(:text, lines, line_no, [], 0, module) end defp adjust_indent(:after_prompt, [], line_no, _adjusted_lines, _indent, module) do raise_incomplete_doctest(line_no, module) end defp adjust_indent(_kind, [], _line_no, adjusted_lines, _indent, _module) do Enum.reverse(adjusted_lines) end defp adjust_indent(:text, [line | rest], line_no, adjusted_lines, indent, module) do case String.starts_with?(String.trim_leading(line), @iex_prompt) do true -> line_indent = get_indent(line, indent) adjust_indent(:prompt, [line | rest], line_no, adjusted_lines, line_indent, module) false -> adjust_indent(:text, rest, line_no + 1, adjusted_lines, indent, module) end end defp adjust_indent(kind, [line | rest], line_no, adjusted_lines, indent, module) when kind in [:prompt, :after_prompt] do stripped_line = strip_indent(line, indent) case String.trim_leading(line) do "" -> raise_incomplete_doctest(line_no, module) ^stripped_line -> :ok _ -> n_spaces = if indent == 1, do: "#{indent} space", else: "#{indent} spaces" raise Error, line: line_no, module: module, message: """ indentation level mismatch on doctest line: #{inspect(line)} If you are planning to assert on the result of an `iex>` expression, \ make sure the result is indented at the beginning of `iex>`, which \ in this case is exactly #{n_spaces}. If instead you have an `iex>` expression that spans over multiple lines, \ please make sure that each line after the first one begins with `...>`. """ end adjusted_lines = [{stripped_line, line_no} | adjusted_lines] next = cond do kind == :prompt -> :after_prompt String.starts_with?(stripped_line, @iex_prompt ++ @dot_prompt) -> :after_prompt true -> :code end adjust_indent(next, rest, line_no + 1, adjusted_lines, indent, module) end defp adjust_indent(:code, [line | rest], line_no, adjusted_lines, indent, module) do stripped_line = strip_indent(line, indent) cond do stripped_line == "" -> adjusted_lines = [{stripped_line, line_no} | adjusted_lines] adjust_indent(:text, rest, line_no + 1, adjusted_lines, 0, module) String.starts_with?(String.trim_leading(line), @iex_prompt) -> adjust_indent(:prompt, [line | rest], line_no, adjusted_lines, indent, module) true -> adjusted_lines = [{stripped_line, line_no} | adjusted_lines] adjust_indent(:code, rest, line_no + 1, adjusted_lines, indent, module) end end defp get_indent(line, current_indent) do case :binary.match(line, "iex") do {pos, _len} -> pos :nomatch -> current_indent end end defp strip_indent(line, indent) do length = byte_size(line) - indent if length > 0 do binary_part(line, indent, length) else "" end end @fences ["```", "~~~"] defp extract_tests(lines, expr_acc, expected_acc, acc, new_test, module) defp extract_tests([], "", "", [], _, _) do [] end defp extract_tests([], "", "", acc, _, _) do Enum.reverse(acc) end # End of input and we've still got a test pending. defp extract_tests([], expr_acc, expected_acc, [test | rest], _, _) do test = add_expr(test, expr_acc, expected_acc) Enum.reverse([test | rest]) end # We've encountered the next test on an adjacent line. Put them into one group. defp extract_tests( [{"iex>" <> _, _} | _] = list, expr_acc, expected_acc, [test | rest], new_test, module ) when expr_acc != "" and expected_acc != "" do test = add_expr(test, expr_acc, expected_acc) extract_tests(list, "", "", [test | rest], new_test, module) end # Store expr_acc and start a new test case. defp extract_tests([{"iex>" <> string, line_no} | lines], "", expected_acc, acc, true, module) do test = %{line: line_no, fun_arity: nil, exprs: []} extract_tests(lines, string, expected_acc, [test | acc], false, module) end # Store expr_acc. defp extract_tests([{"iex>" <> string, _} | lines], "", expected_acc, acc, false, module) do extract_tests(lines, string, expected_acc, acc, false, module) end # Still gathering expr_acc. Synonym for the next clause. defp extract_tests( [{"iex>" <> string, _} | lines], expr_acc, expected_acc, acc, new_test, module ) do extract_tests(lines, expr_acc <> "\n" <> string, expected_acc, acc, new_test, module) end # Still gathering expr_acc. Synonym for the previous clause. defp extract_tests( [{"...>" <> string, _} | lines], expr_acc, expected_acc, acc, new_test, module ) when expr_acc != "" do extract_tests(lines, expr_acc <> "\n" <> string, expected_acc, acc, new_test, module) end # Expression numbers are simply skipped. defp extract_tests( [{<<"iex(", _>> <> string = line, line_no} | lines], expr_acc, expected_acc, acc, new_test, module ) do new_line = {"iex" <> skip_iex_number(string, module, line_no, line), line_no} extract_tests([new_line | lines], expr_acc, expected_acc, acc, new_test, module) end # Expression numbers are simply skipped redux. defp extract_tests( [{<<"...(", _>> <> string, line_no} = line | lines], expr_acc, expected_acc, acc, new_test, module ) do new_line = {"..." <> skip_iex_number(string, module, line_no, line), line_no} extract_tests([new_line | lines], expr_acc, expected_acc, acc, new_test, module) end # Skip empty or documentation line. defp extract_tests([_ | lines], "", "", acc, _, module) do extract_tests(lines, "", "", acc, true, module) end # Encountered end of fenced code block, store pending test defp extract_tests( [{<<fence::3-bytes>> <> _, _} | lines], expr_acc, expected_acc, [test | rest], _new_test, module ) when fence in @fences and expr_acc != "" do test = add_expr(test, expr_acc, expected_acc) extract_tests(lines, "", "", [test | rest], true, module) end # Encountered an empty line, store pending test defp extract_tests([{"", _} | lines], expr_acc, expected_acc, [test | rest], _new_test, module) do test = add_expr(test, expr_acc, expected_acc) extract_tests(lines, "", "", [test | rest], true, module) end # Finally, parse expected_acc. defp extract_tests([{expected, _} | lines], expr_acc, "", acc, new_test, module) do extract_tests(lines, expr_acc, expected, acc, new_test, module) end defp extract_tests([{expected, _} | lines], expr_acc, expected_acc, acc, new_test, module) do extract_tests(lines, expr_acc, expected_acc <> "\n" <> expected, acc, new_test, module) end defp skip_iex_number(")>" <> string, _module, _line_no, _line) do ">" <> string end defp skip_iex_number("", module, line_no, line) do message = "unknown IEx prompt: #{inspect(line)}.\nAccepted formats are: iex>, iex(1)>, ...>, ...(1)>}" raise Error, line: line_no, module: module, message: message end defp skip_iex_number(<<_>> <> string, module, line_no, line) do skip_iex_number(string, module, line_no, line) end defp normalize_test(%{exprs: exprs} = test, fa) do %{test | fun_arity: fa, exprs: Enum.reverse(exprs)} end defp add_expr(%{exprs: exprs} = test, expr, expected) do %{test | exprs: [{expr, tag_expected(expected)} | exprs]} end defp tag_expected(string) do case string do "** (" <> error -> [mod, message] = :binary.split(error, ")") {:error, Module.concat([mod]), String.trim_leading(message)} _ -> if inspectable?(string) do {:inspect, inspect(string)} else {:test, string} end end end defp inspectable?(<<?#, char, rest::binary>>) when char in ?A..?Z, do: inspectable_end?(rest) defp inspectable?(_), do: false defp inspectable_end?(<<?., char, rest::binary>>) when char in ?A..?Z, do: inspectable_end?(rest) defp inspectable_end?(<<char, rest::binary>>) when char in ?A..?Z when char in ?a..?z when char in ?0..?9 when char == ?_, do: inspectable_end?(rest) defp inspectable_end?(<<?<, _::binary>>), do: true defp inspectable_end?(_), do: false defp raise_incomplete_doctest(line_no, module) do raise Error, line: line_no, module: module, message: "expected non-blank line to follow iex> prompt" end end
30.530639
102
0.627536
0844a1119ba72d40b45daa68ef649887458ffeb9
2,470
ex
Elixir
lib/enmity/user.ex
Cantido/enmity
385d6b54824e167a77b7eeab445a4e42ef19114e
[ "Unlicense", "MIT" ]
1
2019-11-25T11:09:04.000Z
2019-11-25T11:09:04.000Z
lib/enmity/user.ex
Cantido/enmity
385d6b54824e167a77b7eeab445a4e42ef19114e
[ "Unlicense", "MIT" ]
7
2020-10-14T18:28:36.000Z
2022-02-24T11:06:17.000Z
lib/enmity/user.ex
Cantido/enmity
385d6b54824e167a77b7eeab445a4e42ef19114e
[ "Unlicense", "MIT" ]
null
null
null
defmodule Enmity.User do alias Enmity.HTTP @moduledoc """ Operations on Users. """ @doc """ Gets a user. """ def get(user_id) do HTTP.get("/users/#{user_id}") end @doc """ Gets the current user. """ def get_me do get("@me") end @doc """ Modify the current user. ## Examples Change your username: Enmity.User.modify_me(username: "My brand new name") Change your avatar: Enmity.User.modify_me(avatar: <<...>>) You can also change both at the same time. This function always returns the updated user object. """ def modify_me(args) do args = args |> Keyword.take([:username, :avatar]) |> Map.new() args = if Map.has_key?(args, :avatar) do Map.update!(args, :avatar, &convert_to_data_uri!/1) else args end HTTP.patch("/users/@me", Poison.encode!(args)) end defp convert_to_data_uri!(image) when is_binary(image) do case convert_to_data_uri(image) do {:ok, uri} -> uri {:error, [invalid_image_type: type]} -> raise "Invalid image type: #{type}" end end defp convert_to_data_uri(image) when is_binary(image) do case ExImageInfo.info(image) do {"image/jpeg", _, _, _} -> {:ok, "data:image/jpeg;base64,#{Base.encode64(image)}"} {"image/png", _, _, _} -> {:ok, "data:image/png;base64,#{Base.encode64(image)}"} {"image/gif", _, _, _} -> {:ok, "data:image/gif;base64.,#{Base.encode64(image)}"} {type, _, _, _} -> {:error, [invalid_image_type: type]} end end @doc """ Get all the current user's guilds (called "servers" on the frontend) """ def my_guilds do HTTP.get("/users/@me/guilds") end @doc """ Leave a guild. """ def leave_guild(guild_id) do HTTP.delete("/users/@me/guilds/#{guild_id}") end @doc """ Create a direct message with the given recipient. """ def create_dm(recipient_id) do HTTP.post("/users/@me/channels", Poison.encode!(recipient_id)) end @doc """ Create a group DM with the given users. A list of the user's tokens must be provided, along with the nicknames of the users involved. """ def create_group_dm(tokens, nicks) when is_list(tokens) and is_map(nicks) do HTTP.post("/users/@me/channels", Poison.encode!(%{tokens: tokens, nicks: nicks})) end @doc """ Get all of the current user's Facebook, Twitch, Twitter, etc. connections. """ def my_connections do HTTP.get("/users/@me/connections") end end
23.52381
95
0.631579
0844fe79ce031396e3a58d312d25b3ed23894b14
370
ex
Elixir
lib/nookal/dispatcher.ex
theo-agilelab/nookal-elixir
09db7cc48c48ed1e714fb74c5c38c9a21e7e189a
[ "MIT" ]
1
2020-06-11T07:57:06.000Z
2020-06-11T07:57:06.000Z
lib/nookal/dispatcher.ex
theo-agilelab/nookal-elixir
09db7cc48c48ed1e714fb74c5c38c9a21e7e189a
[ "MIT" ]
null
null
null
lib/nookal/dispatcher.ex
theo-agilelab/nookal-elixir
09db7cc48c48ed1e714fb74c5c38c9a21e7e189a
[ "MIT" ]
1
2019-09-05T08:30:48.000Z
2019-09-05T08:30:48.000Z
defmodule Nookal.Dispatcher do @moduledoc false @callback dispatch(req_path :: String.t()) :: {:ok, term()} | {:error, term()} @callback dispatch(req_path :: String.t(), req_params :: map()) :: {:ok, term()} | {:error, term()} @callback upload(file_content :: String.t(), req_params :: map()) :: {:ok, term()} | {:error, term()} end
37
80
0.567568
08452f6318e55f530d072d7dcfd2d7a3ee5eed4c
195
exs
Elixir
priv/repo/migrations/20190308170458_add_roles_to_users.exs
BaltimoreCity/IdeaPortal
dc1c775dfaec2aac974b821cd3700d76770c1e76
[ "MIT" ]
5
2019-08-29T20:22:25.000Z
2020-04-01T17:40:48.000Z
priv/repo/migrations/20190308170458_add_roles_to_users.exs
BaltimoreCity/IdeaPortal
dc1c775dfaec2aac974b821cd3700d76770c1e76
[ "MIT" ]
34
2019-03-06T17:53:29.000Z
2021-09-01T01:25:23.000Z
priv/repo/migrations/20190308170458_add_roles_to_users.exs
BaltimoreCity/IdeaPortal
dc1c775dfaec2aac974b821cd3700d76770c1e76
[ "MIT" ]
2
2020-01-10T22:12:36.000Z
2021-01-22T04:37:45.000Z
defmodule IdeaPortal.Repo.Migrations.AddRolesToUsers do use Ecto.Migration def change do alter table(:users) do add(:role, :string, default: "user", null: false) end end end
19.5
55
0.697436
08455c97d5b17394c1ef6ed366ac34325d8f1f94
23,419
ex
Elixir
apps/core/lib/core/services/repositories.ex
michaeljguarino/chartmart
a34c949cc29d6a1ab91c04c5e4f797e6f0daabfc
[ "Apache-2.0" ]
null
null
null
apps/core/lib/core/services/repositories.ex
michaeljguarino/chartmart
a34c949cc29d6a1ab91c04c5e4f797e6f0daabfc
[ "Apache-2.0" ]
2
2019-12-13T23:55:50.000Z
2019-12-17T05:49:58.000Z
apps/core/lib/core/services/repositories.ex
michaeljguarino/chartmart
a34c949cc29d6a1ab91c04c5e4f797e6f0daabfc
[ "Apache-2.0" ]
null
null
null
defmodule Core.Services.Repositories do use Core.Services.Base import Core.Policies.Repository alias Core.PubSub alias Core.Services.Users alias Core.Auth.Jwt alias Core.Clients.Hydra alias Core.Schema.{ Repository, Installation, User, DockerRepository, DockerImage, LicenseToken, License, Integration, Subscription, Plan, Artifact, OIDCProvider, ApplyLock } alias Piazza.Crypto.RSA @type error :: {:error, term} @type repository_resp :: {:ok, Repository.t} | error @spec get_installation!(binary) :: Installation.t def get_installation!(id), do: Core.Repo.get!(Installation, id) @spec get_installation(binary, binary) :: Installation.t | nil def get_installation(user_id, repo_id) do Core.Repo.get_by(Installation, repository_id: repo_id, user_id: user_id) end def get_installation_by_key!(key), do: Core.Repo.get_by!(Installation, license_key: key) def get_installation_by_key(key), do: Core.Repo.get_by(Installation, license_key: key) @spec get_repository!(binary) :: Repository.t def get_repository!(id), do: Core.Repo.get(Repository, id) @spec get_repository_by_name!(binary) :: Repository.t def get_repository_by_name!(name), do: Core.Repo.get_by!(Repository, name: name) @spec get_repository_by_name(binary) :: Repository.t | nil def get_repository_by_name(name), do: Core.Repo.get_by(Repository, name: name) def get_license_token(token), do: Core.Repo.get_by(LicenseToken, token: token) def get_artifact(repo_id, name, platform, arch) do Core.Repo.get_by( Artifact, repository_id: repo_id, name: name, platform: platform, arch: arch ) end def get_dkr_image!(image_id), do: Core.Repo.get!(DockerImage, image_id) def get_dkr_repository(repo_name, dkr_name) do DockerRepository.for_repository_name(repo_name) |> Core.Repo.get_by!(name: dkr_name) end def get_dkr_image(repo_name, dkr_name, tag) do DockerRepository.for_repository_name(repo_name) |> DockerRepository.for_name(dkr_name) |> DockerImage.for_repositories() |> DockerImage.for_tag(tag) |> Core.Repo.one() end def get_oidc_provider_by_client!(client_id) do Core.Repo.get_by!(OIDCProvider, client_id: client_id) |> Core.Repo.preload([:bindings, installation: :repository]) end @doc """ Creates a new repository for the user's publisher Will throw if there is no publisher """ @spec create_repository(map, User.t) :: repository_resp def create_repository(attrs, %User{} = user) do publisher = Users.get_publisher_by_owner!(user.id) create_repository(attrs, publisher.id, user) end @doc """ Creates a repository for a publisher id. Will fail if the user does not have publish permissions, or is not the owner of the publisher. """ @spec create_repository(map, binary, User.t) :: repository_resp def create_repository(attrs, publisher_id, %User{} = user) do start_transaction() |> add_operation(:repo, fn _ -> %Repository{publisher_id: publisher_id} |> Repository.changeset(attrs) |> allow(user, :create) |> when_ok(:insert) end) |> add_operation(:licensed, fn %{repo: repo} -> generate_keys(repo) end) |> execute(extract: :licensed) |> notify(:create, user) end @doc """ Updates the given repository. Fails if the user is not the publisher """ @spec update_repository(map, binary, User.t) :: repository_resp def update_repository(attrs, repo_id, %User{} = user) do get_repository!(repo_id) |> Core.Repo.preload([:integration_resource_definition, :tags, :dashboards, :shell, :database]) |> Repository.changeset(attrs) |> allow(user, :edit) |> when_ok(:update) |> notify(:update, user) end @doc """ Deletes the repository. This might be deprecated as it's inherently unsafe. Fails if the user is not the publisher. """ @spec delete_repository(binary, User.t) :: repository_resp def delete_repository(repo_id, %User{} = user) do get_repository!(repo_id) |> allow(user, :edit) |> when_ok(:delete) end @doc """ Creates or updates a repository depending on whether one exists for `name`. All access policies for the delegated operations apply """ @spec upsert_repository(map, binary, binary, User.t) :: repository_resp def upsert_repository(attrs, name, publisher_id, %User{} = user) do case get_repository_by_name(name) do %Repository{id: id} -> update_repository(attrs, id, user) nil -> create_repository(Map.put(attrs, :name, name), publisher_id, user) end end @doc """ Returns the list of docker accesses available for `user` against the given repository """ @spec authorize_docker(binary, binary, User.t | nil) :: [:push | :pull] def authorize_docker(repo_name, dkr_name, nil) do DockerRepository.for_repository_name(repo_name) |> Core.Repo.get_by(name: dkr_name) |> case do %DockerRepository{public: true} -> [:pull] _ -> [] end end def authorize_docker(repo_name, dkr_name, %User{} = user) do repo = get_repository_by_name!(repo_name) |> Core.Repo.preload([:publisher]) Parallax.new() |> Parallax.operation(:push, fn -> allow(repo, user, :edit) end) |> Parallax.operation(:pull, fn -> allow(repo, user, :pull) end) |> Parallax.execute() |> Enum.filter(fn {_, {:ok, _}} -> true _ -> false end) |> Enum.map(&elem(&1, 0)) |> Enum.concat(authorize_docker(repo_name, dkr_name, nil)) |> Enum.uniq() end @doc """ Persists a given docker image with the given tag. Called by the docker registry notification webhook. """ @spec create_docker_image(binary, binary, binary, User.t) :: {:ok, DockerImage.t} | {:error, term} def create_docker_image(repo, tag, digest, user) do [cm_repo | rest] = String.split(repo, "/") cm_repo = get_repository_by_name!(cm_repo) start_transaction() |> add_operation(:repo, fn _ -> Enum.join(rest, "/") |> upsert_docker_repo(cm_repo) end) |> add_operation(:image, fn %{repo: repo} -> upsert_image(tag, digest, repo) end) |> execute() |> notify(:create, user) end @doc """ Appends vulnerabilities to a docker image """ @spec add_vulnerabilities(list, Image.t) :: {:ok, DockerImage.t} | {:error, term} def add_vulnerabilities(vulns, image) do Core.Repo.preload(image, [:vulnerabilities]) |> DockerImage.vulnerability_changeset(%{ vulnerabilities: vulns, scanned_at: Timex.now(), grade: grade(vulns) }) |> Core.Repo.update() end defp grade(vulns) when is_list(vulns) do vulns |> Enum.reduce(%{}, fn %{severity: severity}, acc -> Map.update(acc, severity, 0, & &1 + 1) end) |> case do %{critical: _} -> :f %{high: _} -> :d %{medium: _} -> :c %{low: _} -> :b _ -> :a end end defp grade(_), do: :a defp upsert_docker_repo(name, %Repository{id: id}) do case Core.Repo.get_by(DockerRepository, repository_id: id, name: name) do nil -> %DockerRepository{repository_id: id, name: name} %DockerRepository{} = repo -> repo end |> DockerRepository.changeset() |> Core.Repo.insert_or_update() end defp upsert_image(nil, _, _), do: {:ok, nil} defp upsert_image(tag, digest, %DockerRepository{id: id}) do case Core.Repo.get_by(DockerImage, docker_repository_id: id, tag: tag) do nil -> %DockerImage{docker_repository_id: id, tag: tag} %DockerImage{} = repo -> repo end |> DockerImage.changeset(%{digest: digest}) |> Core.Repo.insert_or_update() end def update_docker_repository(attrs, id, %User{} = user) do Core.Repo.get!(DockerRepository, id) |> DockerRepository.changeset(attrs) |> allow(user, :edit) |> when_ok(:update) |> notify(:update, user) end @doc """ Constructs a docker-compliant jwt for the given repo and scopes. """ @spec docker_token([binary | atom], binary, User.t | nil) :: {:ok, binary} | {:error, term} def docker_token(scopes, repo_name, user) do signer = Jwt.signer() access = [%{"type" => "repository", "name" => repo_name, "actions" => scopes}] with {:ok, claims} <- Jwt.generate_claims(%{"sub" => dkr_sub(user), "access" => access}), {:ok, token, _} <- Jwt.encode_and_sign(claims, signer), do: {:ok, token} end defp dkr_sub(%{email: email}), do: email defp dkr_sub(_), do: "" @doc """ Constructs a dummy jwt for user on docker login """ @spec dkr_login_token(User.t | nil) :: {:ok, binary} | {:error, term} def dkr_login_token(nil), do: {:error, :invalid_password} def dkr_login_token(%User{} = user) do signer = Jwt.signer() with {:ok, claims} <- Jwt.generate_claims(%{"sub" => user.email, "access" => []}), {:ok, token, _} <- Jwt.encode_and_sign(claims, signer), do: {:ok, token} end @doc """ Creates or updates the given integration for the repo. Fails if the user is not the publisher """ @spec upsert_integration(map, binary, User.t) :: {:ok, Integration.t} | {:error, term} def upsert_integration(%{name: name} = attrs, repo_id, %User{} = user) do repo = get_repository!(repo_id) |> Core.Repo.preload([:integration_resource_definition]) pub = Users.get_publisher_by_owner(user.id) case Core.Repo.get_by(Integration, name: name, repository_id: repo_id) do %Integration{} = int -> Core.Repo.preload(int, [:tags]) _ -> %Integration{repository_id: repo_id, name: name, publisher_id: pub && pub.id} end |> Integration.changeset(Map.put(attrs, :publisher_id, pub && pub.id)) |> Integration.validate(repo.integration_resource_definition) |> allow(user, :edit) |> when_ok(&Core.Repo.insert_or_update/1) end @doc """ Creates a new installation for a repository for the given user """ @spec create_installation(map, binary, User.t) :: {:ok, Installation.t} | {:error, term} def create_installation(attrs, repository_id, %User{} = user) do repo = get_repository!(repository_id) attrs = add_track_tag(attrs, repo) %Installation{repository_id: repository_id, user_id: user.id, auto_upgrade: true} |> Installation.changeset(Map.put_new(attrs, :context, %{})) |> allow(user, :create) |> when_ok(:insert) |> notify(:create, user) end defp add_track_tag(attrs, %Repository{default_tag: tag}) when is_binary(tag) and byte_size(tag) > 0, do: Map.put(attrs, :track_tag, tag) defp add_track_tag(attrs, _), do: attrs @doc """ Updates the given installation. Fails if the user is not the original installer. """ @spec update_installation(map, binary, User.t) :: {:ok, Installation.t} | {:error, term} def update_installation(attrs, inst_id, %User{} = user) do get_installation!(inst_id) |> Installation.changeset(attrs) |> allow(user, :edit) |> when_ok(:update) |> notify(:update, user) end @doc """ Deletes the given installation. If there is also a subscription, will delete it as well. Fails if the user is not the installer. """ @spec delete_installation(binary | Installation.t, User.t) :: {:ok, Installation.t} | {:error, term} def delete_installation(%Installation{} = installation, %User{} = user) do start_transaction() |> add_operation(:subscription, fn _ -> Core.Repo.preload(installation, [:subscription]) |> case do %{subscription: %Subscription{} = sub} -> Core.Services.Payments.cancel_subscription(sub, user) _ -> {:ok, nil} end end) |> add_operation(:installation, fn _ -> installation |> allow(user, :edit) |> when_ok(:delete) end) |> execute(extract: :installation) |> notify(:delete, user) end def delete_installation(inst_id, user), do: get_installation!(inst_id) |> delete_installation(user) @doc """ Will delete all installations for a user and reset their provider pin """ @spec reset_installations(User.t) :: {:ok, integer} | {:error, term} def reset_installations(%User{} = user) do Installation.for_user(user.id) |> Core.Repo.all() |> Enum.reduce(start_transaction(), fn inst, tx -> add_operation(tx, inst.id, fn _ -> delete_installation(inst, user) end) end) |> add_operation(:user, fn _ -> Users.update_provider(nil, user) end) |> execute() |> when_ok(fn results -> Map.keys(results) |> Enum.reject(& &1 == :user) |> Enum.count() end) end @oidc_scopes "profile code openid" @doc """ Creates a new oidc provider for a given installation, enabling a log-in with plural experience """ @spec create_oidc_provider(map, binary, User.t) :: {:ok, OIDCProvider.t} | {:error, term} def create_oidc_provider(attrs, installation_id, %User{} = user) do start_transaction() |> add_operation(:installation, fn _ -> get_installation!(installation_id) |> allow(user, :edit) end) |> add_operation(:client, fn _ -> Map.take(attrs, [:redirect_uris]) |> Map.put(:scope, @oidc_scopes) |> Map.put(:token_endpoint_auth_method, oidc_auth_method(attrs.auth_method)) |> Hydra.create_client() end) |> add_operation(:oidc_provider, fn %{installation: %{id: id}, client: %{client_id: cid, client_secret: secret}} -> %OIDCProvider{installation_id: id} |> OIDCProvider.changeset(Map.merge(attrs, %{client_id: cid, client_secret: secret})) |> Core.Repo.insert() end) |> execute(extract: :oidc_provider) |> notify(:create) end @doc """ Inserts or updates the oidc provider for an installation """ @spec upsert_oidc_provider(map, binary, User.t) :: {:ok, OIDCProvider.t} | {:error, term} def upsert_oidc_provider(attrs, installation_id, %User{} = user) do case Core.Repo.get_by(OIDCProvider, installation_id: installation_id) do %OIDCProvider{} -> update_oidc_provider(attrs, installation_id, user) _ -> create_oidc_provider(attrs, installation_id, user) end end @doc """ Updates the spec of an installation's oidc provider """ @spec update_oidc_provider(map, binary, User.t) :: {:ok, OIDCProvider.t} | {:error, term} def update_oidc_provider(attrs, installation_id, %User{} = user) do start_transaction() |> add_operation(:installation, fn _ -> get_installation!(installation_id) |> Core.Repo.preload([oidc_provider: :bindings]) |> allow(user, :edit) end) |> add_operation(:client, fn %{installation: %{oidc_provider: %{client_id: id, auth_method: auth_method}}} -> attrs = Map.take(attrs, [:redirect_uris]) |> Map.put(:scope, @oidc_scopes) |> Map.put(:token_endpoint_auth_method, oidc_auth_method(auth_method)) Hydra.update_client(id, attrs) end) |> add_operation(:oidc_provider, fn %{installation: %{oidc_provider: provider}} -> provider |> OIDCProvider.changeset(attrs) |> Core.Repo.update() end) |> execute(extract: :oidc_provider) |> notify(:update) end @doc """ Gets or creates a new apply lock to use in plural apply commands. The user performing this action will own the lock until manually released """ @spec acquire_apply_lock(binary, User.t) :: {:ok, ApplyLock.t} | {:error, term} def acquire_apply_lock(repository_id, %User{} = user) do case Core.Repo.get_by(ApplyLock, repository_id: repository_id) do %ApplyLock{} = lock -> lock nil -> %ApplyLock{repository_id: repository_id} end |> allow(user, :create) |> when_ok(fn lock -> ApplyLock.changeset(lock, %{owner_id: user.id}) |> Core.Repo.insert_or_update() end) end @doc """ Updates the lock and releases ownership by the given user """ @spec release_apply_lock(map, binary, User.t) :: {:ok, ApplyLock.t} | {:error, term} def release_apply_lock(attrs, repository_id, %User{id: user_id} = user) do case Core.Repo.get_by(ApplyLock, repository_id: repository_id) do %ApplyLock{owner_id: ^user_id} = lock -> flush_lock(lock, attrs, user) nil -> flush_lock(%ApplyLock{repository_id: repository_id}, attrs, user) _ -> {:error, :not_found} end end defp flush_lock(lock, attrs, user) do lock |> allow(user, :create) |> when_ok(&ApplyLock.changeset(&1, Map.put(attrs, :owner_id, nil))) |> when_ok(&Core.Repo.insert_or_update/1) end defp oidc_auth_method(:basic), do: "client_secret_basic" defp oidc_auth_method(:post), do: "client_secret_post" @doc """ Deletes an oidc provider and its hydra counterpart """ @spec delete_oidc_provider(binary, User.t) :: {:ok, OIDCProvider.t} | {:error, term} def delete_oidc_provider(installation_id, %User{} = user) do start_transaction() |> add_operation(:installation, fn _ -> get_installation!(installation_id) |> Core.Repo.preload([oidc_provider: :bindings]) |> allow(user, :edit) end) |> add_operation(:client, fn %{installation: %{oidc_provider: %{client_id: id}}} -> with :ok <- Hydra.delete_client(id), do: {:ok, nil} end) |> add_operation(:oidc_provider, fn %{installation: %{oidc_provider: provider}} -> Core.Repo.delete(provider) end) |> execute(extract: :oidc_provider) end @doc """ Creates a new artifact for the repository, representing a downloadable resource like a cli, desktop app, etc. Fails if the user is not the publisher """ @spec create_artifact(map, binary, User.t) :: {:ok, Artifact.t} | {:error, term} def create_artifact(%{name: name, platform: plat} = attrs, repository_id, %User{} = user) do attrs = Map.put_new(attrs, :arch, "amd64") get_artifact(repository_id, name, plat, attrs.arch) |> case do %Artifact{} = art -> art _ -> %Artifact{repository_id: repository_id} end |> Artifact.changeset(attrs) |> allow(user, :edit) |> when_ok(&Core.Repo.insert_or_update/1) end @doc """ Generates a refresh token for the license, constructs the policy given the current subscription (or free if there are no plans configured). Fails if the installation has no suscription but the repository has plans available. """ @spec generate_license(Installation.t) :: {:ok, binary | nil} def generate_license(%Installation{} = installation) do %{repository: repo} = installation = Core.Repo.preload(installation, [:repository, [subscription: :plan]]) with {:ok, %{token: token}} <- upsert_license_token(installation), {:ok, policy} <- mk_policy(installation, Core.Services.Payments.has_plans?(repo.id)) do License.new(policy: policy, refresh_token: token, secrets: repo.secrets) |> Jason.encode!() |> RSA.encrypt(ExPublicKey.loads!(repo.private_key)) else _ -> {:ok, nil} end end def license(%Installation{} = installation) do handle_notify(PubSub.LicensePing, installation) %{repository: repo} = installation = Core.Repo.preload(installation, [:repository, [subscription: :plan]]) with {:ok, policy} <- mk_policy(installation, Core.Services.Payments.has_plans?(repo.id)), do: {:ok, License.new(policy: policy, secrets: repo.secrets)} end defp mk_policy(%Installation{subscription: %Subscription{line_items: %{items: items}, plan: plan} = sub}, _) do limits = Enum.into(items, %{}, fn %{dimension: dim} -> {dim, Subscription.dimension(sub, dim)} end) features = Plan.features(plan) {:ok, %{limits: limits, features: features, plan: plan.name, free: false}} end defp mk_policy(_, false), do: {:ok, %{free: true}} defp mk_policy(_, _), do: :error @doc """ Constructs a new license file with the given license token. """ @spec refresh_license(LicenseToken.t) :: {:ok, binary} | {:error, :not_found} def refresh_license(%LicenseToken{installation: %Installation{} = installation}), do: generate_license(installation) def refresh_license(token) when is_binary(token) do Core.Repo.get_by!(LicenseToken, token: token) |> Core.Repo.preload([:installation]) |> refresh_license() end def refresh_license(_), do: {:error, :not_found} @doc """ Generates an rsa key pair and persists it to the repository """ @spec generate_keys(Repository.t) :: {:ok, Repository.t} | {:error, term} def generate_keys(%Repository{} = repo) do with {:ok, keypair} <- RSA.generate_keypair(), {:ok, {priv, pub}} <- RSA.pem_encode(keypair) do repo |> Repository.key_changeset(%{private_key: priv, public_key: pub}) |> Core.Repo.update() end end @doc """ Self-explanatory """ @spec upsert_license_token(Installation.t) :: {:ok, LicenseToken.t} | {:error, term} def upsert_license_token(%Installation{id: id}) do case Core.Repo.get_by(LicenseToken, installation_id: id) do %LicenseToken{} = token -> token nil -> %LicenseToken{} end |> LicenseToken.changeset(%{installation_id: id}) |> Core.Repo.insert_or_update() end @doc """ Attempts to grab the contents of a repo's github readme and returns the result """ @spec fetch_readme(Repository.t) :: binary | nil def fetch_readme(%Repository{git_url: "https://github.com" <> _ = url}) when is_binary(url), do: readme_fetch("#{url}/raw/{branch}/README.md") def fetch_readme(_), do: nil defp readme_fetch(url) do Enum.find_value(~w(main master), fn branch -> String.replace(url, "{branch}", branch) |> HTTPoison.get([], follow_redirect: true) |> case do {:ok, %HTTPoison.Response{status_code: 200, body: body}} -> body _ -> nil end end) end @doc """ Returns whether a user can `:access` the repository. """ @spec authorize(binary, User.t) :: {:ok, Repository.t} | {:error, term} def authorize(repo_id, %User{} = user) when is_binary(repo_id) do get_repository!(repo_id) |> authorize(user) end def authorize(%Repository{} = repo, user), do: allow(repo, user, :access) defp notify({:ok, %Installation{} = inst}, :create, user), do: handle_notify(PubSub.InstallationCreated, inst, actor: user) defp notify({:ok, %Installation{} = inst}, :update, user), do: handle_notify(PubSub.InstallationUpdated, inst, actor: user) defp notify({:ok, %Installation{} = inst}, :delete, user), do: handle_notify(PubSub.InstallationDeleted, inst, actor: user) defp notify({:ok, %DockerRepository{} = repo}, :update, user), do: handle_notify(PubSub.DockerRepositoryUpdated, repo, actor: user) defp notify({:ok, %Repository{} = repo}, :create, user), do: handle_notify(PubSub.RepositoryCreated, repo, actor: user) defp notify({:ok, %Repository{} = repo}, :update, user), do: handle_notify(PubSub.RepositoryUpdated, repo, actor: user) defp notify({:ok, %{image: %DockerImage{} = img}} = res, :create, user) do img = Core.Repo.preload(img, [docker_repository: :repository]) handle_notify(PubSub.DockerImageCreated, img, actor: user) res end defp notify(pass, _, _), do: pass defp notify({:ok, %OIDCProvider{} = oidc}, :create), do: handle_notify(PubSub.OIDCProviderCreated, oidc) defp notify({:ok, %OIDCProvider{} = oidc}, :update), do: handle_notify(PubSub.OIDCProviderUpdated, oidc) defp notify(pass, _), do: pass end
34.439706
113
0.662795
08455d99da80cfd98338ec27abbce72b2e44d090
8,356
ex
Elixir
lib/amqp/connection.ex
ayanda-d/amqp
09bba8f72cda24f7b02b2a25d6e4d9c97a1af661
[ "MIT" ]
1
2021-07-07T10:04:38.000Z
2021-07-07T10:04:38.000Z
lib/amqp/connection.ex
ayanda-d/amqp
09bba8f72cda24f7b02b2a25d6e4d9c97a1af661
[ "MIT" ]
null
null
null
lib/amqp/connection.ex
ayanda-d/amqp
09bba8f72cda24f7b02b2a25d6e4d9c97a1af661
[ "MIT" ]
null
null
null
defmodule AMQP.Connection do @moduledoc """ Functions to operate on Connections. """ import AMQP.Core alias AMQP.Connection defstruct [:pid] @type t :: %Connection{pid: pid} @doc """ Opens an new Connection to an AMQP broker. The connections created by this module are supervised under amqp_client's supervision tree. Please note that connections do not get restarted automatically by the supervision tree in case of a failure. If you need robust connections and channels, use monitors on the returned connection PID. The connection parameters can be passed as a keyword list or as a AMQP URI. When using a keyword list, the following options can be used: # Options * `:username` - The name of a user registered with the broker (defaults to \"guest\"); * `:password` - The password of user (defaults to \"guest\"); * `:virtual_host` - The name of a virtual host in the broker (defaults to \"/\"); * `:host` - The hostname of the broker (defaults to \"localhost\"); * `:port` - The port the broker is listening on (defaults to `5672`); * `:channel_max` - The channel_max handshake parameter (defaults to `0`); * `:frame_max` - The frame_max handshake parameter (defaults to `0`); * `:heartbeat` - The hearbeat interval in seconds (defaults to `0` - turned off); * `:connection_timeout` - The connection timeout in milliseconds (defaults to `infinity`); * `:ssl_options` - Enable SSL by setting the location to cert files (defaults to `none`); * `:client_properties` - A list of extra client properties to be sent to the server, defaults to `[]`; * `:socket_options` - Extra socket options. These are appended to the default options. \ See http://www.erlang.org/doc/man/inet.html#setopts-2 and http://www.erlang.org/doc/man/gen_tcp.html#connect-4 \ for descriptions of the available options. ## Enabling SSL To enable SSL, supply the following in the `ssl_options` field: * `cacertfile` - Specifies the certificates of the root Certificate Authorities that we wish to implicitly trust; * `certfile` - The client's own certificate in PEM format; * `keyfile` - The client's private key in PEM format; ### Example ``` AMQP.Connection.open port: 5671, ssl_options: [cacertfile: '/path/to/testca/cacert.pem', certfile: '/path/to/client/cert.pem', keyfile: '/path/to/client/key.pem', # only necessary with intermediate CAs # depth: 2, verify: :verify_peer, fail_if_no_peer_cert: true] ``` ## Examples iex> AMQP.Connection.open host: \"localhost\", port: 5672, virtual_host: \"/\", username: \"guest\", password: \"guest\" {:ok, %AMQP.Connection{}} iex> AMQP.Connection.open \"amqp://guest:guest@localhost\" {:ok, %AMQP.Connection{}} """ @spec open(keyword|String.t) :: {:ok, t} | {:error, atom} | {:error, any} def open(options \\ []) def open(options) when is_list(options) do options = options |> normalize_ssl_options amqp_params = amqp_params_network(username: Keyword.get(options, :username, "guest"), password: Keyword.get(options, :password, "guest"), virtual_host: Keyword.get(options, :virtual_host, "/"), host: Keyword.get(options, :host, 'localhost') |> to_charlist, port: Keyword.get(options, :port, :undefined), channel_max: Keyword.get(options, :channel_max, 0), frame_max: Keyword.get(options, :frame_max, 0), heartbeat: Keyword.get(options, :heartbeat, 0), connection_timeout: Keyword.get(options, :connection_timeout, :infinity), ssl_options: Keyword.get(options, :ssl_options, :none), client_properties: Keyword.get(options, :client_properties, []), socket_options: Keyword.get(options, :socket_options, []), auth_mechanisms: Keyword.get(options, :auth_mechanisms, [&:amqp_auth_mechanisms.plain/3, &:amqp_auth_mechanisms.amqplain/3])) do_open(amqp_params) end def open(uri) when is_binary(uri) do case uri |> to_charlist |> :amqp_uri.parse do {:ok, amqp_params} -> do_open(amqp_params) error -> error end end @doc """ Opens an new direct Connection to an AMQP broker. Direct connection is the special type of connection that is supported by RabbitMQ broker, where Erlang distribution protocol is used to communicate with broker. It's a bit faster than the regular AMQP protocol, as there is no need to serialize and deserialize AMQP frames (especially when we are using this library at the same BEAM node where the RabbitMQ runs). But it's less secure, as giving direct access to a client means it has full control over RabbitMQ node. The connections created by this function are not restaretd automatically, see open/1 for more details. The connection parameters are passed as a keyword list with the following options available: # Options * `:username` - The name of a user registered with the broker (defaults to `:none`); * `:password` - The password of the user (defaults to `:none`); * `:virtual_host` - The name of a virtual host in the broker (defaults to \"/\"); * `:node` - Erlang node name to connect to (defaults to the current node); * `:client_properties` - A list of extra client properties to be sent to the server, defaults to `[]`; # Adapter options Additional details can be provided when a direct connection is used to provide connectivity for some non-AMQP protocol (like it happens in STOMP and MQTT plugins for RabbitMQ). We assume that you know what you are doing in this case, here is the options that maps to corresponding fields of `#amqp_adapter_info{}` record: `:adapter_host`, `:adapter_port`, `:adapter_peer_host`, `:adapter_peer_port`, `:adapter_name`, `:adapter_protocol`, `:adapter_additional_info`. ## Examples AMQP.Connection.open_direct node: :rabbit@localhost {:ok, %AMQP.Connection{}} """ @spec open_direct(keyword) :: {:ok, t} | {:error, atom} def open_direct(options \\ []) def open_direct(options) when is_list(options) do adapter_info = amqp_adapter_info( host: Keyword.get(options, :adapter_host, :unknown), port: Keyword.get(options, :adapter_port, :unknown), peer_host: Keyword.get(options, :adapter_peer_host, :unknown), peer_port: Keyword.get(options, :adapter_peer_port, :unknown), name: Keyword.get(options, :adapter_name, :unknown), protocol: Keyword.get(options, :adapter_protocol, :unknown), additional_info: Keyword.get(options, :adapter_additional_info, [])) amqp_params = amqp_params_direct( username: Keyword.get(options, :username, :none), password: Keyword.get(options, :password, :none), virtual_host: Keyword.get(options, :virtual_host, "/"), node: Keyword.get(options, :node, node()), adapter_info: adapter_info, client_properties: Keyword.get(options, :client_properties, [])) do_open(amqp_params) end @doc """ Closes an open Connection. """ @spec close(t) :: :ok | {:error, any} def close(conn) do case :amqp_connection.close(conn.pid) do :ok -> :ok error -> {:error, error} end end defp do_open(amqp_params) do case :amqp_connection.start(amqp_params) do {:ok, pid} -> {:ok, %Connection{pid: pid}} error -> error end end defp normalize_ssl_options(options) when is_list(options) do for {k, v} <- options do if k in [:cacertfile, :cacertfile, :cacertfile] do {k, to_charlist(v)} else {k, v} end end end defp normalize_ssl_options(options), do: options end
41.98995
157
0.629607