hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c9a26787eb3d666a15feef1ea8817b3e7136fce | 5,397 | ex | Elixir | lib/gpixir.ex | benfb/gpixir | 919e15fb517027f0124375cf6ed2b0908884fa80 | [
"MIT"
] | 1 | 2016-04-10T08:14:23.000Z | 2016-04-10T08:14:23.000Z | lib/gpixir.ex | benfb/gpixir | 919e15fb517027f0124375cf6ed2b0908884fa80 | [
"MIT"
] | null | null | null | lib/gpixir.ex | benfb/gpixir | 919e15fb517027f0124375cf6ed2b0908884fa80 | [
"MIT"
] | null | null | null | defmodule Gpixir do
@moduledoc "A genetic programming library for Elixir"
import IO, only: [puts: 1]
import Enum, only: [count: 1, map: 2, into: 2, take: 2, zip: 2, random: 1, to_list: 1]
import Stream, only: [concat: 2]
import Gpixir.Util
@function_table zip(["and", "or", "nand", "nor", "not"],
[&p_and/2, &p_or/2, &nand/2, &nor/2, ¬/1])
@target_data [[false, false, false, true],
[false, false, true, false],
[false, true, false, false],
[false, true, true, true],
[true, false, false, false],
[true, false, true, true],
[true, true, false, true],
[true, true, true, false]]
def random_function do
@function_table |> Dict.values |> random
end
def random_terminal do
random([fn(in1, _, _) -> in1 end,
fn(_, in2, _) -> in2 end,
fn(_, _, in3) -> in3 end])
end
def random_code(depth) do
if (depth == 0) or (:rand.uniform(2) == 1) do
# puts "Depth: #{depth}"
random_terminal
else
f = random_function
# puts "Depth: #{depth}, Arity: #{arity f}"
# puts Macro.to_string(f)
# puts "Arity: #{arity(f)}"
# USE MACROS AND QUOTING/THE AST INSTEAD OF NESTED CALLS
f.(repeatedly(fn() -> random_code(depth - 1) end, arity(f)))
# Stream.map(&(repeatedly(fn() -> random_code(depth - 1) end) |> Stream.take(arity(&1))), f)
# Stream.concat([f], repeatedly(fn() -> random_code(depth - 1) end) |> take(arity(f)))
end
end
def error(individual) do
puts "Individual: #{Macro.to_string individual}"
puts "Getting error..."
value_function = fn(in1, in2, in3) -> individual.([in1, in2, in3]) end
puts "Value function: #{Macro.to_string value_function}"
s = @target_data |> map(fn([in1, in2, in3, correct_output]) ->
if(individual.(in1, in2, in3) == correct_output) do
puts "Inserting 0"
0
else
puts "Inserting 1"
1
end
end) |> sum_list
puts Macro.to_string s
s
end
def codesize(c) do
puts "Getting codesize..."
puts Macro.to_string(c)
String.length(Macro.to_string(c))
end
def random_subtree(i) do
if :rand.uniform(codesize(i) + 1) == 1 do
puts "The thing is: #{Macro.to_string(i)}"
i
else
tl(i) |> map(fn(a) -> repeatedly(a, codesize(a)) end)
|> List.flatten
|> random
|> random_subtree
# random_subtree(Enum.random(List.flatten([Stream.map(tl(i), fn(a) -> repeatedly(a, codesize(a)) end)])))
end
end
def replace_random_subtree(i, replacement) do
if :rand.uniform(codesize(i) + 1) == 1 do
replacement
else
# zipped = Enum.zip(tl(i), one_to_inf)
# puts "The thing: #{Macro.to_string(zipped)}"
position_to_change = zip(tl(i), one_to_inf)
|> map(fn{a, b} -> repeat(codesize(a), b) end)
|> Stream.concat
|> random
map(zip(for(n <- zero_to_inf, do: n == position_to_change), i), fn{a, b} ->
if a do
replace_random_subtree(b, replacement)
else
b
end
end)
end
end
def mutate(i) do
puts "Mutating!!"
replace_random_subtree(i, random_code(2))
end
def crossover(i, j) do
puts "Crossing over!!"
replace_random_subtree(i, random_subtree(j))
end
def sort_by_error(population) do
puts "Sorting #{Macro.to_string population} by error..."
errors = population |> map(&(Enum.concat([error(&1)], [&1])))
puts "Got errors!!"
puts Macro.to_string errors
es = into([], errors
|> Enum.sort(fn([err1, _], [err2, _]) -> err1 < err2 end)
|> map(&second/1))
puts Macro.to_string es
es
end
def select(population, tournament_size) do
puts "Selecting!!"
size = count(population)
Enum.fetch(population, repeat(:rand.uniform(size), tournament_size)
|> to_list
|> Enum.min)
end
def evolve_sub(generation, population, size) do
best = hd(population)
best_error = error(best)
puts "======================"
puts "Generation: #{generation}"
puts "Best error: #{best_error}"
puts "Best program: #{Macro.to_string best}"
puts "Median error: #{error(Enum.fetch!(population, div(size, 2)))}"
puts "Average program size: #{Macro.to_string((population |> map(&codesize/1) |> sum_list) / count(population))}"
if best_error < 100.1 do
puts "Success: #{Macro.to_string best}"
# puts "Success: #{Macro.to_string(Macro.expand(best, __ENV__))}"
else
mutated = repeatedly(mutate(select(population, 5)), round(size * 0.05))
crossed_over = repeatedly(crossover(select(population, 5), select(population, 5)), round(size * 0.8))
mut_and_cross = concat(mutated, crossed_over)
selected = repeatedly(fn() -> select(population, 5) end, round(size * 0.1))
to_be_sorted = mut_and_cross |> concat(selected) |> to_list |> sort_by_error
evolve_sub(generation + 1, to_be_sorted, size)
end
end
def evolve(popsize) do
puts "Starting evolution..."
will_sort = to_list(repeatedly(fn() -> random_code(2) end, popsize))
evolve_sub(0, sort_by_error(will_sort), popsize)
end
end
| 33.73125 | 117 | 0.579952 |
1c9a40b27627c1942677cec53859afd88639df4b | 310 | ex | Elixir | lib/flix_web/graphql/schema.ex | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | 3 | 2021-03-21T23:52:16.000Z | 2021-06-02T03:47:00.000Z | lib/flix_web/graphql/schema.ex | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | 44 | 2021-04-09T04:04:13.000Z | 2022-03-29T06:29:37.000Z | lib/flix_web/graphql/schema.ex | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | null | null | null | defmodule FlixWeb.Graphql.Schema do
use Absinthe.Schema
import_types(FlixWeb.Graphql.Types.{
Genre,
Movie,
Review,
User
})
import_types(FlixWeb.Graphql.Schemas.Queries.{
Movie,
User
})
query do
import_fields(:movie_queries)
import_fields(:user_queries)
end
end
| 14.761905 | 48 | 0.683871 |
1c9a40f78984ffdace822e924b8173ef0854db90 | 439 | exs | Elixir | elixir/samples/kv_umbrella/apps/kv/test/kv/router_test.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 2 | 2015-12-09T02:16:51.000Z | 2021-07-26T22:53:43.000Z | elixir/samples/kv_umbrella/apps/kv/test/kv/router_test.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | null | null | null | elixir/samples/kv_umbrella/apps/kv/test/kv/router_test.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 1 | 2016-05-08T18:40:31.000Z | 2016-05-08T18:40:31.000Z | defmodule KV.RouterTest do
use ExUnit.Case, async: true
@tag :distributed
test "route requests accross nodes" do
assert KV.Router.route("hello", Kernel, :node, []) == :"foo@GrayBox"
assert KV.Router.route("world", Kernel, :node, []) == :"bar@GrayBox"
end
test "raises on unknown entries" do
assert_raise RuntimeError, ~r/Could not find entry/, fn ->
KV.Router.route(<<0>>, Kernel, :node, [])
end
end
end | 29.266667 | 72 | 0.651481 |
1c9a75a2d2e185de29b4bb7aa2932e69c7d54a15 | 2,451 | ex | Elixir | clients/firebase_app_check/lib/google_api/firebase_app_check/v1beta/model/google_firebase_appcheck_v1beta_safety_net_config.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/firebase_app_check/lib/google_api/firebase_app_check/v1beta/model/google_firebase_appcheck_v1beta_safety_net_config.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/firebase_app_check/lib/google_api/firebase_app_check/v1beta/model/google_firebase_appcheck_v1beta_safety_net_config.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaSafetyNetConfig do
@moduledoc """
An app's SafetyNet configuration object. This configuration controls certain properties of the App Check token returned by ExchangeSafetyNetToken, such as its ttl. Note that your registered SHA-256 certificate fingerprints are used to validate tokens issued by SafetyNet; please register them via the Firebase Console or programmatically via the [Firebase Management Service](https://firebase.google.com/docs/projects/api/reference/rest/v1beta1/projects.androidApps.sha/create).
## Attributes
* `name` (*type:* `String.t`, *default:* `nil`) - Required. The relative resource name of the SafetyNet configuration object, in the format: ``` projects/{project_number}/apps/{app_id}/safetyNetConfig ```
* `tokenTtl` (*type:* `String.t`, *default:* `nil`) - Specifies the duration for which App Check tokens exchanged from SafetyNet tokens will be valid. If unset, a default value of 1 hour is assumed. Must be between 30 minutes and 7 days, inclusive.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:name => String.t() | nil,
:tokenTtl => String.t() | nil
}
field(:name)
field(:tokenTtl)
end
defimpl Poison.Decoder,
for: GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaSafetyNetConfig do
def decode(value, options) do
GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaSafetyNetConfig.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.FirebaseAppCheck.V1beta.Model.GoogleFirebaseAppcheckV1betaSafetyNetConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.563636 | 480 | 0.75969 |
1c9a75f3c42d2e5d2474e04a5054af9c2bb07c28 | 2,486 | ex | Elixir | lib/live_chat_web.ex | msimonborg/live_chat | 0a2405b0399e3af569a5c24d8b0c6295132d9561 | [
"MIT"
] | null | null | null | lib/live_chat_web.ex | msimonborg/live_chat | 0a2405b0399e3af569a5c24d8b0c6295132d9561 | [
"MIT"
] | null | null | null | lib/live_chat_web.ex | msimonborg/live_chat | 0a2405b0399e3af569a5c24d8b0c6295132d9561 | [
"MIT"
] | null | null | null | defmodule LiveChatWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use LiveChatWeb, :controller
use LiveChatWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: LiveChatWeb
import Phoenix.LiveView.Controller
import Plug.Conn
import LiveChatWeb.Gettext
alias LiveChatWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/live_chat_web/templates",
namespace: LiveChatWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {LiveChatWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def component do
quote do
use Phoenix.Component
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import LiveChatWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import LiveChatWeb.ErrorHelpers
import LiveChatWeb.Gettext
alias LiveChatWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22 | 81 | 0.679807 |
1c9af0af1b789efa79c9672d6c7215accce4c491 | 259 | ex | Elixir | lib/exsemantica.ex | ExSemantica/exsemantica | c2d72513195f44b6b5f73c8cc07394de0a1fd273 | [
"Apache-2.0"
] | null | null | null | lib/exsemantica.ex | ExSemantica/exsemantica | c2d72513195f44b6b5f73c8cc07394de0a1fd273 | [
"Apache-2.0"
] | 2 | 2020-07-21T20:53:13.000Z | 2020-07-21T20:54:15.000Z | lib/exsemantica.ex | Chlorophytus/eactivitypub | 469346b4d5cd7ad2b575c245ac50fd71b00c4864 | [
"Apache-2.0"
] | null | null | null | defmodule Exsemantica do
@moduledoc """
Exsemantica keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.9 | 66 | 0.760618 |
1c9b1519fc188b07405aa5c9e2665886e06a48cb | 2,149 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/side_input_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/side_input_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/side_input_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataflow.V1b3.Model.SideInputInfo do
@moduledoc """
Information about a side input of a DoFn or an input of a SeqDoFn.
## Attributes
* `kind` (*type:* `map()`, *default:* `nil`) - How to interpret the source element(s) as a side input value.
* `sources` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.Source.t)`, *default:* `nil`) - The source(s) to read element(s) from to get the value of this side input.
If more than one source, then the elements are taken from the
sources, in the specified order if order matters.
At least one source is required.
* `tag` (*type:* `String.t`, *default:* `nil`) - The id of the tag the user code will access this side input by;
this should correspond to the tag of some MultiOutputInfo.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => map(),
:sources => list(GoogleApi.Dataflow.V1b3.Model.Source.t()),
:tag => String.t()
}
field(:kind, type: :map)
field(:sources, as: GoogleApi.Dataflow.V1b3.Model.Source, type: :list)
field(:tag)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.SideInputInfo do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.SideInputInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.SideInputInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.701754 | 167 | 0.709632 |
1c9b234682b2fc4e8495692b38204f60dbe68296 | 1,433 | ex | Elixir | lib/deuce_web/router.ex | fremantle-industries/deuce | b0b95333bc43d55e25f1c6a3a033bc053223a64c | [
"MIT"
] | 2 | 2022-01-16T23:58:45.000Z | 2022-03-16T09:29:16.000Z | lib/deuce_web/router.ex | fremantle-industries/deuce | b0b95333bc43d55e25f1c6a3a033bc053223a64c | [
"MIT"
] | null | null | null | lib/deuce_web/router.ex | fremantle-industries/deuce | b0b95333bc43d55e25f1c6a3a033bc053223a64c | [
"MIT"
] | null | null | null | defmodule DeuceWeb.Router do
use DeuceWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {DeuceWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", DeuceWeb do
pipe_through :browser
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", DeuceWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: DeuceWeb.Telemetry
end
end
# Enables the Swoosh mailbox preview in development.
#
# Note that preview only shows emails that were sent by the same
# node running the Phoenix server.
if Mix.env() == :dev do
scope "/dev" do
pipe_through :browser
forward "/mailbox", Plug.Swoosh.MailboxPreview
end
end
end
| 25.140351 | 70 | 0.69365 |
1c9b29b25f198e05ca5ff0fe040c1dd083dbd60b | 971 | ex | Elixir | test/route.ex | elixir-web/weber | 1c8caa43681cc432813dff33b2c6d08ca1d61f29 | [
"MIT"
] | 124 | 2015-01-03T16:48:21.000Z | 2022-02-02T21:13:11.000Z | test/route.ex | elixir-web/weber | 1c8caa43681cc432813dff33b2c6d08ca1d61f29 | [
"MIT"
] | 2 | 2015-03-08T05:29:36.000Z | 2015-07-19T15:31:19.000Z | test/route.ex | elixir-web/weber | 1c8caa43681cc432813dff33b2c6d08ca1d61f29 | [
"MIT"
] | 12 | 2015-02-23T02:09:27.000Z | 2016-08-07T13:50:38.000Z | defmodule Route do
import Weber.Route
require Weber.Route
route on("GET", "/weber", :TestTestTest.Main, :action)
|> on("GET", "/include", :TestTestTest.Include, :include_action)
|> on("GET", "/json/action", :TestTestTest.JSON, :json_action)
|> on("GET", "/add/:username", :TestTestTest.Main, :add_username_action)
|> on("GET", "/delete/:username/id/:id", :TestTestTest.Main, :delete_username_action)
|> on("GET", "/content_for", :TestTestTest.ContentFor, :content_for_action)
|> on("GET", "/partials", :TestTestTest.Partials, :partials)
|> on("GET", "/unauthorized", :TestTestTest.Exceptions, :unauthorized_action)
|> on("GET", "/unknown", :TestTestTest.Exceptions, :error_500_action)
|> on("GET", "/render_other_action", :TestTestTest.Redirect, :render_other_action)
|> on("GET", "/render_other_controller", :TestTestTest.Redirect, :render_other_controller)
|> redirect("GET", "/redirect", "/weber")
end
| 48.55 | 95 | 0.670443 |
1c9b4c104d2d42bd6a2f3e4d45a8bee554026072 | 2,442 | ex | Elixir | lib/cadet/auth/provider.ex | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 27 | 2018-01-20T05:56:24.000Z | 2021-05-24T03:21:55.000Z | lib/cadet/auth/provider.ex | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 731 | 2018-04-16T13:25:49.000Z | 2021-06-22T07:16:12.000Z | lib/cadet/auth/provider.ex | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 43 | 2018-01-20T06:35:46.000Z | 2021-05-05T03:22:35.000Z | defmodule Cadet.Auth.Provider do
@moduledoc """
An identity provider, which takes the OAuth2 authentication code and exchanges
it for a token with the OAuth2 provider, and then retrieves the user ID, name,
and user role.
"""
alias Cadet.Accounts.Role
@type code :: String.t()
@type token :: String.t()
@type client_id :: String.t()
@type redirect_uri :: String.t()
@type error :: :upstream | :invalid_credentials | :other
@type provider_instance :: String.t()
@type username :: String.t()
@type prefix :: String.t()
@doc "Exchanges the OAuth2 authorisation code for a token and the user ID."
@callback authorise(any(), code, client_id, redirect_uri) ::
{:ok, %{token: token, username: String.t()}} | {:error, error(), String.t()}
@doc "Retrieves the name of the user with the associated token."
@callback get_name(any(), token) :: {:ok, String.t()} | {:error, error(), String.t()}
@doc "Retrieves the role of the user with the associated token."
@callback get_role(any(), token) :: {:ok, Role.t()} | {:error, error(), String.t()}
@spec get_instance_config(provider_instance) :: {module(), any()} | nil
def get_instance_config(instance) do
Application.get_env(:cadet, :identity_providers, %{})[instance]
end
@spec authorise(provider_instance, code, client_id, redirect_uri) ::
{:ok, %{token: token, username: String.t()}} | {:error, error(), String.t()}
def authorise(instance, code, client_id, redirect_uri) do
case get_instance_config(instance) do
{provider, config} -> provider.authorise(config, code, client_id, redirect_uri)
_ -> {:error, :other, "Invalid or nonexistent provider config"}
end
end
@spec get_name(provider_instance, token) :: {:ok, String.t()} | {:error, error(), String.t()}
def get_name(instance, token) do
case get_instance_config(instance) do
{provider, config} -> provider.get_name(config, token)
_ -> {:error, :other, "Invalid or nonexistent provider config"}
end
end
# no longer used anymore currently
# coveralls-ignore-start
@spec get_role(provider_instance, token) :: {:ok, String.t()} | {:error, error(), String.t()}
def get_role(instance, token) do
case get_instance_config(instance) do
{provider, config} -> provider.get_role(config, token)
_ -> {:error, :other, "Invalid or nonexistent provider config"}
end
end
# coveralls-ignore-stop
end
| 38.15625 | 95 | 0.672809 |
1c9b644c36174782b80b7d4bec48bf879cfea326 | 1,901 | exs | Elixir | otapi/config/dev.exs | MauriceReeves-usds/OpenTransplant | c0196ff8bc5b914ce07502a4b7a760c508854804 | [
"Apache-2.0"
] | 32 | 2021-05-15T15:03:39.000Z | 2022-03-30T01:19:43.000Z | otapi/config/dev.exs | MauriceReeves-usds/OpenTransplant | c0196ff8bc5b914ce07502a4b7a760c508854804 | [
"Apache-2.0"
] | 3 | 2021-05-28T18:54:48.000Z | 2021-07-15T01:06:26.000Z | otapi/config/dev.exs | MauriceReeves-usds/OpenTransplant | c0196ff8bc5b914ce07502a4b7a760c508854804 | [
"Apache-2.0"
] | 1 | 2021-06-25T20:40:30.000Z | 2021-06-25T20:40:30.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :otapi, OtapiWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :otapi, OtapiWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/otapi_web/(live|views)/.*(ex)$",
~r"lib/otapi_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.955882 | 68 | 0.681746 |
1c9bccd1b5f626b191d8c396fcf045fe2ce93d3b | 658 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/97.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/97.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/97.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
SYM * noun * * cm * quantifier;
demonstrative * noun conj * cm * quantifier;
conj * cardinal verb a51 noun * quantifier;
cm * cardinal pnoun * noun * quantifier;
cm * cardinal pnoun * adjective * quantifier;
cm * quantifier noun * adjective * quantifier;
adverb * adjective verb_aux * noun * quantifier;
pn * cm SYM * demonstrative * pn;
pn * noun SYM * cm * quantifier;
noun * noun conj * cm * pn;
cm * noun SYM * cm * quantifier;
conj * noun verb * noun * quantifier;
cm * cm pn * noun * pn;
particle * cm cm * noun * pn;
demonstrative * noun SYM * adjective * quantifier;
verb_aux a75 adjective verb * noun * quantifier;
| 34.631579 | 51 | 0.674772 |
1c9bdc1921379aa21f1c54bd148dbc689666adce | 277 | ex | Elixir | lib/excheck/sample.ex | devstopfix/excheck | c8cbf457e9bf7c13a0a4b70c60c8e02495ff53a1 | [
"MIT"
] | null | null | null | lib/excheck/sample.ex | devstopfix/excheck | c8cbf457e9bf7c13a0a4b70c60c8e02495ff53a1 | [
"MIT"
] | null | null | null | lib/excheck/sample.ex | devstopfix/excheck | c8cbf457e9bf7c13a0a4b70c60c8e02495ff53a1 | [
"MIT"
] | null | null | null | defmodule ExCheck.Sample do
@moduledoc """
Sample logic to be tested by ExCheck (refer to sample_test.exs for tests).
"""
@doc "concatenate the list."
def concat(x, y) do
x ++ y
end
@doc "push element in the list."
def push(x, y) do
[x | y]
end
end
| 17.3125 | 76 | 0.624549 |
1c9c0fc7bb0321da5915881d20ffa3c00c4593fd | 6,241 | exs | Elixir | test/acceptance/plugins/validator_test.exs | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 328 | 2017-05-05T15:19:46.000Z | 2022-03-11T10:52:45.000Z | test/acceptance/plugins/validator_test.exs | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 83 | 2017-04-30T10:36:15.000Z | 2019-10-14T13:14:34.000Z | test/acceptance/plugins/validator_test.exs | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 29 | 2017-05-02T14:36:50.000Z | 2021-09-03T13:36:17.000Z | defmodule Annon.Acceptance.Plugins.ValidatorTest do
@moduledoc false
use Annon.AcceptanceCase, async: true
@schema %{"type" => "object",
"properties" => %{"foo" => %{"type" => "number"}, "bar" => %{ "type" => "string"}},
"required" => ["bar"]}
setup do
api_path = "/my_validated_api-" <> Ecto.UUID.generate() <> "/"
api = :api
|> build_factory_params(%{
request: %{
methods: ["GET", "POST", "PUT", "DELETE"],
scheme: "http",
host: get_endpoint_host(:public),
port: get_endpoint_port(:public),
path: api_path
}
})
|> create_api()
|> get_body()
api_id = get_in(api, ["data", "id"])
%{api_id: api_id, api_path: api_path}
end
describe "Validator Plugin" do
test "create", %{api_id: api_id} do
validator = :validator_plugin
|> build_factory_params(%{settings: %{
rules: [%{methods: ["POST", "PUT", "PATCH"], path: "/.*", schema: %{}}]
}})
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => validator})
|> assert_status(201)
%{
"data" => [%{
"name" => "validator",
"api_id" => ^api_id
}
]} = "apis/#{api_id}/plugins"
|> put_management_url()
|> get!()
|> get_body()
end
test "create with invalid settings", %{api_id: api_id} do
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => %{}})
|> assert_status(422)
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => build_invalid_plugin("validator")})
|> assert_status(422)
%{
"error" => %{
"invalid" => [
%{"entry" => "$.settings.rules.[0].methods.[0]", "rules" => [
%{"params" => ["POST", "PUT", "PATCH"], "rule" => "inclusion"}
]},
%{"entry" => "$.settings.rules.[0].path", "rules" => [
%{"params" => ["string", _], "rule" => "cast"} # TODO: Remove tail from "params"
]},
%{"entry" => "$.settings.rules.[0].schema", "rules" => [
%{"params" => ["object", _], "rule" => "cast"} # TODO: Remove tail from "params"
]}
]
}
} = "apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => %{
name: "validator",
is_enabled: false,
settings: %{
rules: [%{methods: ["UNKNOWN"], path: 123, schema: nil}]
}
}})
|> assert_status(422)
|> get_body()
end
end
test "validates versus schema", %{api_id: api_id, api_path: api_path} do
validator_plugin = :validator_plugin
|> build_factory_params(%{settings: %{
rules: [%{methods: ["POST", "PUT"], path: "/.*", schema: @schema}]
}})
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => validator_plugin})
|> assert_status(201)
assert %{
"error" => %{"type" => "validation_failed"}
} = api_path
|> put_public_url()
|> post!(%{data: "aaaa"})
|> assert_status(422)
|> get_body()
api_path
|> put_public_url()
|> post!(%{bar: "foo"})
|> assert_status(404)
end
test "works without matching rules", %{api_id: api_id, api_path: api_path} do
validator_plugin = :validator_plugin
|> build_factory_params(%{settings: %{
rules: [%{methods: ["PATCH"], path: "/.*", schema: @schema}]
}})
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => validator_plugin})
|> assert_status(201)
api_path
|> put_public_url()
|> post!(%{"plugin" => %{data: "aaaa"}})
|> assert_status(404)
|> get_body()
end
test "first of many rules is applied", %{api_id: api_id, api_path: api_path} do
validator_plugin = :validator_plugin
|> build_factory_params(%{settings: %{
rules: [
%{methods: ["POST", "PUT", "PATCH"], path: "/.*", schema: %{}}, # Allow request
%{methods: ["POST", "PUT", "PATCH"], path: "/.*", schema: @schema} # And deny it
]
}})
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => validator_plugin})
|> assert_status(201)
api_path
|> put_public_url()
|> post!(%{"plugin" => %{data: "aaaa"}})
|> assert_status(404)
|> get_body()
end
test "following rules can't cancel validation results", %{api_id: api_id, api_path: api_path} do
validator_plugin = :validator_plugin
|> build_factory_params(%{settings: %{
rules: [
%{methods: ["POST", "PUT"], path: "/.*", schema: @schema},
%{methods: ["POST", "PUT"], path: "/.*", schema: %{}}
]
}})
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => validator_plugin})
|> assert_status(201)
api_path
|> put_public_url()
|> post!(%{"plugin" => %{data: "aaaa"}})
|> assert_status(422)
|> get_body()
end
describe "rules is filtered" do
test "by method", %{api_id: api_id, api_path: api_path} do
validator_plugin = :validator_plugin
|> build_factory_params(%{settings: %{
rules: [
%{methods: ["PUT", "PATCH"], path: "/.*", schema: @schema},
%{methods: ["POST"], path: "/.*", schema: %{}}
]
}})
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => validator_plugin})
|> assert_status(201)
api_path
|> put_public_url()
|> post!(%{"plugin" => %{data: "aaaa"}})
|> assert_status(404)
|> get_body()
end
test "by path", %{api_id: api_id, api_path: api_path} do
validator_plugin = :validator_plugin
|> build_factory_params(%{settings: %{
rules: [
%{methods: ["POST"], path: "/foo$", schema: @schema}
]
}})
"apis/#{api_id}/plugins/validator"
|> put_management_url()
|> put!(%{"plugin" => validator_plugin})
|> assert_status(201)
"#{api_path}foo"
|> put_public_url()
|> post!(%{"plugin" => %{data: "aaaa"}})
|> assert_status(422)
|> get_body()
end
end
end
| 28.368182 | 98 | 0.528922 |
1c9c10a7e29a5d1ba5e9d82cb256755573dea0fd | 435 | exs | Elixir | mix.exs | drowzy/massa | 624cb02e0039b0624c534636f96fd157b1e34a95 | [
"Apache-2.0"
] | 20 | 2021-06-28T12:03:49.000Z | 2022-03-28T22:35:56.000Z | mix.exs | drowzy/massa | 624cb02e0039b0624c534636f96fd157b1e34a95 | [
"Apache-2.0"
] | 48 | 2021-03-19T12:01:38.000Z | 2022-03-29T21:19:26.000Z | mix.exs | drowzy/massa | 624cb02e0039b0624c534636f96fd157b1e34a95 | [
"Apache-2.0"
] | 5 | 2021-03-18T21:46:10.000Z | 2021-10-01T17:39:05.000Z | defmodule MongooseProxyProject.MixProject do
use Mix.Project
def project do
[
apps_path: "apps",
version: "0.1.0",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Dependencies listed here are available only for this
# project and cannot be accessed from applications inside
# the apps folder.
#
# Run "mix help deps" for examples and options.
defp deps do
[]
end
end
| 19.772727 | 59 | 0.648276 |
1c9c19fde03db8d380dd1814ab2ac6d68462e39d | 1,664 | ex | Elixir | web/controllers/coherence/redirects.ex | bagilevi/uptom | 50894abb8f7bd052e12c37155b5c33450abcc9bd | [
"MIT"
] | 6 | 2017-05-12T04:20:09.000Z | 2020-11-07T02:00:56.000Z | web/controllers/coherence/redirects.ex | bagilevi/uptom | 50894abb8f7bd052e12c37155b5c33450abcc9bd | [
"MIT"
] | null | null | null | web/controllers/coherence/redirects.ex | bagilevi/uptom | 50894abb8f7bd052e12c37155b5c33450abcc9bd | [
"MIT"
] | 2 | 2020-05-18T08:06:22.000Z | 2020-12-19T14:24:40.000Z | defmodule Coherence.Redirects do
@moduledoc """
Define controller action redirection functions.
This module contains default redirect functions for each of the controller
actions that perform redirects. By using this Module you get the following
functions:
* session_create/2
* session_delete/2
* password_create/2
* password_update/2,
* unlock_create_not_locked/2
* unlock_create_invalid/2
* unlock_create/2
* unlock_edit_not_locked/2
* unlock_edit/2
* unlock_edit_invalid/2
* registration_create/2
* invitation_create/2
* confirmation_create/2
* confirmation_edit_invalid/2
* confirmation_edit_expired/2
* confirmation_edit/2
* confirmation_edit_error/2
You can override any of the functions to customize the redirect path. Each
function is passed the `conn` and `params` arguments from the controller.
## Examples
import MyProject.Router.Helpers
# override the log out action back to the log in page
def session_delete(conn, _), do: redirect(conn, session_path(conn, :new))
# redirect the user to the login page after registering
def registration_create(conn, _), do: redirect(conn, session_path(conn, :new))
# disable the user_return_to feature on login
def session_create(conn, _), do: redirect(conn, landing_path(conn, :index))
"""
use Redirects
# Uncomment the import below if adding overrides
# import Uptom.Router.Helpers
# Add function overrides below
# Example usage
# Uncomment the following line to return the user to the login form after logging out
# def session_delete(conn, _), do: redirect(conn, session_path(conn, :new))
end
| 30.254545 | 87 | 0.742788 |
1c9c1e4a250122a54c4adcf44e3b84bcf1a83229 | 417 | exs | Elixir | memorex/test/memorex_web/views/error_view_test.exs | at7heb/liveview_elixirconf_2021 | eee64f38ec8a7365e8b728d76cd795a5c23199a9 | [
"MIT"
] | null | null | null | memorex/test/memorex_web/views/error_view_test.exs | at7heb/liveview_elixirconf_2021 | eee64f38ec8a7365e8b728d76cd795a5c23199a9 | [
"MIT"
] | null | null | null | memorex/test/memorex_web/views/error_view_test.exs | at7heb/liveview_elixirconf_2021 | eee64f38ec8a7365e8b728d76cd795a5c23199a9 | [
"MIT"
] | 6 | 2021-10-07T14:50:48.000Z | 2021-10-08T14:50:09.000Z | defmodule MemorexWeb.ErrorViewTest do
use MemorexWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(MemorexWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(MemorexWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27.8 | 92 | 0.733813 |
1c9c29a69e8309d200220924ef9a6e9c8d374e21 | 682 | ex | Elixir | lib/daily_meals.ex | cassiofariasmachado/daily-meals | fa1965fa1190818e8f74382635b5174ab1e19eee | [
"MIT"
] | null | null | null | lib/daily_meals.ex | cassiofariasmachado/daily-meals | fa1965fa1190818e8f74382635b5174ab1e19eee | [
"MIT"
] | null | null | null | lib/daily_meals.ex | cassiofariasmachado/daily-meals | fa1965fa1190818e8f74382635b5174ab1e19eee | [
"MIT"
] | null | null | null | defmodule DailyMeals do
@moduledoc """
DailyMeals keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
alias DailyMeals.Meals.Create, as: CreateMeal
alias DailyMeals.Meals.Delete, as: DeleteMeal
alias DailyMeals.Meals.Get, as: GetMeal
alias DailyMeals.Meals.Update, as: UpdateMeal
defdelegate create_meal(params), to: CreateMeal, as: :call
defdelegate delete_meal(id), to: DeleteMeal, as: :call
defdelegate update_meal(params), to: UpdateMeal, as: :call
defdelegate get_meal_by_id(id), to: GetMeal, as: :by_id
end
| 34.1 | 66 | 0.759531 |
1c9c42c20458db3f959a71a2f1dd642242585f25 | 150 | ex | Elixir | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/List.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/List.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/List.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | @[][key: value]
@[] [key: value]
@[][()]
@[] [()]
@[][matched_expression]
@[] [matched_expression]
@[][matched_expression,]
@[] [matched_expression,]
| 16.666667 | 25 | 0.586667 |
1c9c54f96e7676bec079e1e80462799a00b07142 | 971 | ex | Elixir | lib/chess_web/controllers/session_controller.ex | jesseshieh/chess | 80e6854d5f0a05420e5eea6deee9e41f03445b5a | [
"MIT"
] | null | null | null | lib/chess_web/controllers/session_controller.ex | jesseshieh/chess | 80e6854d5f0a05420e5eea6deee9e41f03445b5a | [
"MIT"
] | null | null | null | lib/chess_web/controllers/session_controller.ex | jesseshieh/chess | 80e6854d5f0a05420e5eea6deee9e41f03445b5a | [
"MIT"
] | null | null | null | defmodule ChessWeb.SessionController do
use ChessWeb, :controller
alias Chess.Auth
alias Chess.Auth.User
alias Chess.Auth.Guardian
def new(conn, _params) do
changeset = User.changeset(%User{})
render(conn, "new.html", changeset: changeset)
end
def create(
conn,
%{"user" => %{"username" => username, "password" => password}}
) do
case Auth.authenticate_user(username, password) do
{:ok, user} ->
conn
|> Guardian.Plug.sign_in(user)
|> put_flash(:info, "You are logged in")
|> redirect(to: game_path(conn, :index))
{:error, _error} ->
changeset = User.changeset(%User{})
conn
|> put_flash(:error, "Bad username or password")
|> render("new.html", changeset: changeset)
end
end
def delete(conn, _params) do
conn
|> Guardian.Plug.sign_out()
|> put_flash(:info, "You are logged out")
|> redirect(to: page_path(conn, :index))
end
end
| 25.552632 | 66 | 0.61483 |
1c9c722625b3af71c381c488f7e0e8bd5bd12d5e | 1,949 | ex | Elixir | lib/oban/peers/global.ex | stefanchrobot/oban | 961015b92b9cae442d2834ad220a85ccdcc9da2d | [
"Apache-2.0"
] | null | null | null | lib/oban/peers/global.ex | stefanchrobot/oban | 961015b92b9cae442d2834ad220a85ccdcc9da2d | [
"Apache-2.0"
] | null | null | null | lib/oban/peers/global.ex | stefanchrobot/oban | 961015b92b9cae442d2834ad220a85ccdcc9da2d | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Peers.Global do
@moduledoc """
A cluster based peer that coordinates through a distributed registry.
Leadership is coordinated through global locks. It requires a functional distributed Erlang
cluster, without one global plugins (Cron, Lifeline, Stager, etc.) will not function correctly.
## Usage
Specify the `Global` peer in your Oban configuration.
config :my_app, Oban,
peer: Oban.Peers.Global,
...
"""
@behaviour Oban.Peer
use GenServer
alias Oban.Backoff
defmodule State do
@moduledoc false
defstruct [
:conf,
:name,
:timer,
interval: :timer.seconds(30),
leader?: false
]
end
@impl Oban.Peer
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl Oban.Peer
def leader?(pid), do: GenServer.call(pid, :leader?)
@impl GenServer
def init(opts) do
Process.flag(:trap_exit, true)
{:ok, struct!(State, opts), {:continue, :start}}
end
@impl GenServer
def terminate(_reason, %State{timer: timer} = state) do
if is_reference(timer), do: Process.cancel_timer(timer)
if state.leader?, do: :global.del_lock(key(state), nodes())
:ok
end
@impl GenServer
def handle_continue(:start, %State{} = state) do
handle_info(:election, state)
end
@impl GenServer
def handle_call(:leader?, _from, %State{} = state) do
{:reply, state.leader?, state}
end
@impl GenServer
def handle_info(:election, %State{} = state) do
locked? = :global.set_lock(key(state), nodes(), 0)
{:noreply, schedule_election(%{state | leader?: locked?})}
end
defp schedule_election(%State{interval: interval} = state) do
time = Backoff.jitter(interval, mode: :dec)
%{state | timer: Process.send_after(self(), :election, time)}
end
# Helpers
defp key(state), do: {:oban, state.conf.name}
defp nodes, do: [Node.self() | Node.list()]
end
| 22.402299 | 97 | 0.661365 |
1c9cb2c2981ed874eb4fa47a5678b3936eca74ae | 644 | ex | Elixir | parkapp_server/lib/parkapp_web/controllers/html/authentication_controller.ex | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | 2 | 2018-11-06T12:21:16.000Z | 2018-11-21T10:20:17.000Z | parkapp_server/lib/parkapp_web/controllers/html/authentication_controller.ex | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | parkapp_server/lib/parkapp_web/controllers/html/authentication_controller.ex | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | defmodule ParkappWeb.HTML.AuthenticationController do
@moduledoc """
The HTML AuthenticationController handles the simple web login
"""
use ParkappWeb, :controller
alias ParkappWeb.Auth
def login(conn, _) do
render(conn, "login.html")
end
def process_login(conn, %{"login" => %{"secret" => secret}}) do
cond do
secret == "bitmaker" ->
conn = Auth.Guardian.generate_mock_auth_token(conn)
put_flash(conn, :info, "Logged in")
|> redirect(to: home_path(conn, :current_version))
true ->
put_flash(conn, :error, "Wrong secret")
|> render("login.html")
end
end
end
| 24.769231 | 65 | 0.64441 |
1c9cbec7f6b84db9ff33e589c405c204b8088170 | 1,586 | ex | Elixir | apps/artemis/lib/artemis/contexts/reports/list_incident_reports.ex | artemis-platform/artemis_dashboard | 5ab3f5ac4c5255478bbebf76f0e43b44992e3cab | [
"MIT"
] | 9 | 2019-08-19T19:56:34.000Z | 2022-03-22T17:56:38.000Z | apps/artemis/lib/artemis/contexts/reports/list_incident_reports.ex | chrislaskey/atlas_dashboard | 9009ef5aac8fefba126fa7d3e3b82d1b610ee6fe | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis/lib/artemis/contexts/reports/list_incident_reports.ex | chrislaskey/atlas_dashboard | 9009ef5aac8fefba126fa7d3e3b82d1b610ee6fe | [
"MIT"
] | 2 | 2019-07-05T22:51:47.000Z | 2019-08-19T19:56:37.000Z | defmodule Artemis.ListIncidentReports do
use Artemis.Context
use Artemis.ContextReport
use Artemis.ContextCache,
cache_reset_on_cloudant_changes: [
%{schema: Artemis.Incident, action: "create"},
%{schema: Artemis.Incident, action: "delete"},
%{schema: Artemis.Incident, action: "update"}
],
cache_reset_on_events: [
"incident:created",
"incident:deleted",
"incident:deleted:all",
"incident:updated"
]
import Artemis.Ecto.DateMacros
import Ecto.Query
alias Artemis.Incident
alias Artemis.Repo
def call(reports \\ [], params \\ %{}, user) do
get_reports(reports, params, user)
end
# Callbacks
@impl true
def get_allowed_reports(_user) do
[
:count_by_team_id_and_service_name_and_day_of_week
]
end
@impl true
def get_report(:count_by_team_id_and_service_name_and_day_of_week, params, _user) do
Incident
|> maybe_where_start_date(params)
|> maybe_where_end_date(params)
|> group_by([i], [i.team_id, i.service_name, date_part("isodow", i.triggered_at)])
|> select([i], [max(i.team_id), max(i.service_name), date_part("isodow", i.triggered_at), count(i.id)])
|> Repo.all()
end
# Helpers
defp maybe_where_start_date(query, %{start_date: start_date}) do
where(query, [i], i.triggered_at >= ^start_date)
end
defp maybe_where_start_date(query, _params), do: query
defp maybe_where_end_date(query, %{end_date: end_date}) do
where(query, [i], i.triggered_at < ^end_date)
end
defp maybe_where_end_date(query, _params), do: query
end
| 26 | 107 | 0.692938 |
1c9cd0328966be0929812afcfb3aadf5df9f897a | 250 | exs | Elixir | bench/vector/take_while.exs | sabiwara/aja | cde91e4263e54a11a1685a777dbffd4912fe3864 | [
"MIT"
] | 95 | 2020-10-18T09:27:46.000Z | 2022-03-29T20:03:16.000Z | bench/vector/take_while.exs | sabiwara/aja | cde91e4263e54a11a1685a777dbffd4912fe3864 | [
"MIT"
] | 1 | 2021-09-22T20:30:08.000Z | 2021-10-13T23:55:34.000Z | bench/vector/take_while.exs | sabiwara/aja | cde91e4263e54a11a1685a777dbffd4912fe3864 | [
"MIT"
] | 1 | 2020-12-15T12:36:16.000Z | 2020-12-15T12:36:16.000Z | list = Enum.to_list(1..100)
vector = Aja.Vector.new(list)
fun = fn x -> x <= 80 end
Benchee.run(%{
"Aja.Vector.take_while/2" => fn -> Aja.Vector.take_while(vector, fun) end,
"Enum.take_while/2 (list)" => fn -> Enum.take_while(list, fun) end
})
| 25 | 76 | 0.644 |
1c9ce5f03a98c5418ac43de14fb3a8cfc19d4b4e | 1,163 | ex | Elixir | web/channels/user_socket.ex | itaym2/wiksir | a834ad95163e0b807dcba6d8ef17b998e312f396 | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | itaym2/wiksir | a834ad95163e0b807dcba6d8ef17b998e312f396 | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | itaym2/wiksir | a834ad95163e0b807dcba6d8ef17b998e312f396 | [
"MIT"
] | null | null | null | defmodule Wiksir.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", Wiksir.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Wiksir.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.605263 | 83 | 0.700774 |
1c9ce9bb59e53f077173774fa45bb8e6fd89d6c1 | 337 | ex | Elixir | lib/zaryn/p2p/message/get_transaction_chain.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | 1 | 2021-07-06T19:47:14.000Z | 2021-07-06T19:47:14.000Z | lib/zaryn/p2p/message/get_transaction_chain.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | lib/zaryn/p2p/message/get_transaction_chain.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | defmodule Zaryn.P2P.Message.GetTransactionChain do
@moduledoc """
Represents a message to request an entire transaction chain
"""
@enforce_keys [:address]
defstruct [:address, :after]
alias Zaryn.Crypto
@type t :: %__MODULE__{
address: Crypto.versioned_hash(),
after: nil | DateTime.t()
}
end
| 22.466667 | 61 | 0.661721 |
1c9d0b2b40626ee76def4c9c77fa84953f719c95 | 370 | ex | Elixir | apps/fz_vpn/lib/fz_vpn/interface/wg_adapter.ex | CloudFire-LLC/cloudfire-ce | 416ea0d9c9528790fdf70c432aa4eb507d7b2074 | [
"Apache-2.0"
] | null | null | null | apps/fz_vpn/lib/fz_vpn/interface/wg_adapter.ex | CloudFire-LLC/cloudfire-ce | 416ea0d9c9528790fdf70c432aa4eb507d7b2074 | [
"Apache-2.0"
] | 1 | 2020-04-24T01:53:41.000Z | 2020-04-24T01:53:41.000Z | apps/fz_vpn/lib/fz_vpn/interface/wg_adapter.ex | CloudFire-LLC/cloudfire-ce | 416ea0d9c9528790fdf70c432aa4eb507d7b2074 | [
"Apache-2.0"
] | null | null | null | defmodule FzVpn.Interface.WGAdapter do
@moduledoc """
This module determines by application environment which WireGuard adapter to
use: `Live` or `Sandbox`.
`Live` is used for environments where WireGuard is available and `Sandbox` is
used for environments where it isn't.
"""
def wg_adapter do
Application.fetch_env!(:fz_vpn, :wg_adapter)
end
end
| 26.428571 | 79 | 0.743243 |
1c9d1553f83ba7184db75b338af105133914162d | 135 | ex | Elixir | lib/blue_jet/app/file_storage/file_collection/proxy.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | lib/blue_jet/app/file_storage/file_collection/proxy.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | lib/blue_jet/app/file_storage/file_collection/proxy.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.FileStorage.FileCollection.Proxy do
use BlueJet, :proxy
def put(file_collection, _, _), do: file_collection
end
| 22.5 | 53 | 0.785185 |
1c9d6381db59923a734ae3cb9f696a68a6ebfaa3 | 1,872 | ex | Elixir | clients/games/lib/google_api/games/v1/model/played.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/played.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/played.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1.Model.Played do
@moduledoc """
This is a JSON template for metadata about a player playing a game with the currently authenticated user.
## Attributes
* `autoMatched` (*type:* `boolean()`, *default:* `nil`) - True if the player was auto-matched with the currently authenticated user.
* `kind` (*type:* `String.t`, *default:* `games#played`) - Uniquely identifies the type of this resource. Value is always the fixed string games#played.
* `timeMillis` (*type:* `String.t`, *default:* `nil`) - The last time the player played the game in milliseconds since the epoch in UTC.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoMatched => boolean(),
:kind => String.t(),
:timeMillis => String.t()
}
field(:autoMatched)
field(:kind)
field(:timeMillis)
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.Played do
def decode(value, options) do
GoogleApi.Games.V1.Model.Played.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.Played do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.320755 | 156 | 0.715278 |
1c9d78dff93b7985d97d506fe9bc5cdf90e5bcbd | 13,527 | ex | Elixir | lib/guardian/permissions.ex | reshmalalwani3108/guardian | 0a03235d5a177caf94c0c3326b7f8b30bcfd4755 | [
"MIT"
] | null | null | null | lib/guardian/permissions.ex | reshmalalwani3108/guardian | 0a03235d5a177caf94c0c3326b7f8b30bcfd4755 | [
"MIT"
] | null | null | null | lib/guardian/permissions.ex | reshmalalwani3108/guardian | 0a03235d5a177caf94c0c3326b7f8b30bcfd4755 | [
"MIT"
] | null | null | null | defmodule Guardian.Permissions do
@moduledoc """
An optional plugin to Guardian to provide permissions for your tokens
These can be used for any token types since they only work on the `claims`.
Permissions are set on a per implementation module basis.
Each implementation module can have their own sets.
Permissions are similar in concept to OAuth2 scopes. They're encoded into a token
and the permissions granted last as long as the token does.
This makes it unsuitable for highly dynamic permission schemes.
They're best left to an application to implement.
For example. (at the time of writing) some of the Facebook permissions are:
* public_profile
* user_about_me
* user_actions.books
* user_actions.fitness
* user_actions.music
To create permissions for your application similar to these:
```elixir
defmodule MyApp.Auth.Token do
use Guardian, otp_app: :my_app,
permissions: %{
default: [:public_profile, :user_about_me]
user_actions: %{
books: 0b1,
fitness: 0b100,
music: 0b1000,
}
}
use Guardian.Permissions, :encoding: Guardian.Permissions.BitwiseEncoding
# Per default permissons will be encoded Bitwise, but other encoders also exist
# * Guardian.Permissions.TextEncoding
# * Guardian.Permissions.AtomEncoing
# It is even posible to supply your own encoding module
# snip
def build_claims(claims, _resource, opts) do
claims =
claims
|> encode_permissions_into_claims!(Keyword.get(opts, :permissions))
{:ok, claims}
end
end
```
This will take the permission set in the `opts` at `:permissions` and
put it into the `"pems"` key of the claims as a map of:
`%{set_name => integer}`
The permissions can be defined as a list (positional value based on index)
or a map where the value for each permission is manually provided.
They can be provided either as options to `use Guardian` or in the config for
your implementation module.
Once you have a token, you can interact with it.
```elixir
# Get the encoded permissions from the claims
found_perms = MyApp.Auth.Token.decode_permissions_from_claims(claims)
# Check if all permissions are present
has_all_these_things? =
claims
|> MyApp.Auth.Token.decode_permissions_from_claims
|> MyApp.Auth.Token.all_permissions?(%{default: [:user_about_me, :public_profile]})
# Checks if any permissions are present
show_any_media_things? =
claims
|> MyApp.Auth.Token.decode_permissions_from_claims
|> MyApp.Auth.Token.any_permissions?(%{user_actions: [:books, :fitness, :music]})
```
### Using with Plug
To use a plug for ensuring permissions you can use the `Guardian.Permissions` module as part of a
Guardian pipeline.
```elixir
# After a pipeline has setup the implementation module and error handler
# Ensure that both the `public_profile` and `user_actions.books` permissions are present in the token
plug Guardian.Permissions, ensure: %{default: [:public_profile], user_actions: [:books]}
# Allow the request to continue when the token contains any of the permission sets specified
plug Guardian.Permissions, one_of: [
%{default: [:public_profile], user_actions: [:books]},
%{default: [:public_profile], user_actions: [:music]},
]
# Look for permissions for a token in a different location
plug Guardian.Permissions, key: :impersonate, ensure: %{default: [:public_profile]}
```
If the token satisfies either the permissions listed in `ensure` or one of the sets in the `one_of` key
the request will continue. If not, then `auth_error` callback will be called on the error handler with
`auth_error(conn, {:unauthorized, reason}, options)`
"""
@type label :: atom
@type permission_set :: %{optional(label) => pos_integer}
@type t :: %{optional(label) => permission_set}
@type input_label :: String.t() | atom
@type input_set :: [input_label, ...] | pos_integer
@type input_permissions :: %{optional(input_label) => input_set}
@type plug_option ::
{:ensure, permission_set}
| {:one_of, [permission_set, ...]}
| {:key, atom}
| {:module, module}
| {:error_handler, module}
defmodule PermissionNotFoundError do
defexception [:message]
end
defmacro __using__(opts \\ []) do
# Credo is incorrectly identifying an unless block with negated condition 2017-06-10
# credo:disable-for-next-line /\.Refactor\./
quote do
alias Guardian.Permissions.PermissionNotFoundError
import unquote(Keyword.get(opts, :encoding, Guardian.Permissions.BitwiseEncoding))
defdelegate max(), to: Guardian.Permissions
raw_perms = @config_with_key.(:permissions)
unless raw_perms do
raise "Permissions are not defined for #{to_string(__MODULE__)}"
end
@normalized_perms Guardian.Permissions.normalize_permissions(raw_perms)
@available_permissions Guardian.Permissions.available_from_normalized(@normalized_perms)
@doc """
Lists all permissions in a normalized way using %{permission_set_name => [permission_name, ...]}
"""
@spec available_permissions() :: Guardian.Permissions.t()
def available_permissions, do: @available_permissions
@doc """
Decodes permissions from the permissions found in claims (encoded to integers) or
from a list of permissions.
iex> MyTokens.decode_permissions(%{default: [:public_profile]})
%{default: [:public_profile]}
iex> MyTokens.decode_permissions{%{"default" => 1, "user_actions" => 1}}
%{default: [:public_profile], user_actions: [:books]}
When using integers (after encoding to claims), unknown bit positions are ignored.
iex> MyTokens.decode_permissions(%{"default" => -1})
%{default: [:public_profile, :user_about_me]}
"""
@spec decode_permissions(Guardian.Permissions.input_permissions() | nil) :: Guardian.Permissions.t()
def decode_permissions(nil), do: %{}
def decode_permissions(map) when is_map(map) do
for {k, v} <- map, Map.get(@normalized_perms, to_string(k)) != nil, into: %{} do
key = k |> to_string() |> String.to_atom()
{key, do_decode_permissions(v, k)}
end
end
@doc """
Decodes permissions directly from a claims map. This does the same as `decode_permissions` but
will fetch the permissions map from the `"pem"` key where `Guardian.Permissions places them
when it encodes them into claims.
"""
@spec decode_permissions_from_claims(Guardian.Token.claims()) :: Guardian.Permissions.t()
def decode_permissions_from_claims(%{"pem" => perms}), do: decode_permissions(perms)
def decode_permissions_from_claims(_), do: %{}
@doc """
Encodes the permissions provided into the claims in the `"pem"` key.
Permissions are encoded into an integer inside the token corresponding
with the value provided in the configuration.
"""
@spec encode_permissions_into_claims!(
Guardian.Token.claims(),
Guardian.Permissions.input_permissions() | nil
) :: Guardian.Token.claims()
def encode_permissions_into_claims!(claims, nil), do: claims
def encode_permissions_into_claims!(claims, perms) do
encoded_perms = encode_permissions!(perms)
Map.put(claims, "pem", encoded_perms)
end
@doc """
Checks to see if any of the permissions provided are present
in the permissions (previously extracted from claims)
iex> claims |> MyTokens.decode_permissions() |> any_permissions?(%{user_actions: [:books, :music]})
true
"""
@spec any_permissions?(
Guardian.Permissions.input_permissions(),
Guardian.Permissions.input_permissions()
) :: boolean
def any_permissions?(has_perms, test_perms) when is_map(test_perms) do
has_perms = decode_permissions(has_perms)
test_perms = decode_permissions(test_perms)
Enum.any?(test_perms, fn {k, needs} ->
has_perms |> Map.get(k) |> do_any_permissions?(MapSet.new(needs))
end)
end
defp do_any_permissions?(nil, _), do: false
defp do_any_permissions?(list, needs) do
matches = MapSet.intersection(needs, MapSet.new(list))
MapSet.size(matches) > 0
end
@doc """
Checks to see if all of the permissions provided are present
in the permissions (previously extracted from claims)
iex> claims |> MyTokens.decode_permissions() |> all_permissions?(%{user_actions: [:books, :music]})
true
"""
@spec all_permissions?(
Guardian.Permissions.input_permissions(),
Guardian.Permissions.input_permissions()
) :: boolean
def all_permissions?(has_perms, test_perms) when is_map(test_perms) do
has_perms_bits = decode_permissions(has_perms)
test_perms_bits = decode_permissions(test_perms)
Enum.all?(test_perms_bits, fn {k, needs} ->
has = Map.get(has_perms_bits, k, [])
MapSet.subset?(MapSet.new(needs), MapSet.new(has))
end)
end
@doc """
Encodes the permissions provided into numeric form
iex> MyTokens.encode_permissions!(%{user_actions: [:books, :music]})
%{user_actions: 9}
"""
@spec encode_permissions!(Guardian.Permissions.input_permissions() | nil) :: Guardian.Permissions.t()
def encode_permissions!(nil), do: %{}
def encode_permissions!(map) when is_map(map) do
for {k, v} <- map, into: %{} do
key = String.to_atom(to_string(k))
{key, do_encode_permissions!(v, k)}
end
end
@doc """
Validates that all permissions provided exist in the configuration.
iex> MyTokens.validate_permissions!(%{default: [:user_about_me]})
iex> MyTokens.validate_permissions!(%{not: [:a, :thing]})
raise Guardian.Permissions.PermissionNotFoundError
"""
def validate_permissions!(map) when is_map(map) do
Enum.all?(&do_validate_permissions!/1)
end
defp do_decode_permissions(other), do: do_decode_permissions(other, "default")
defp do_decode_permissions(value, type) when is_atom(type),
do: do_decode_permissions(value, to_string(type))
defp do_decode_permissions(value, type) when is_list(value) do
do_validate_permissions!({type, value})
decode(value, type, @normalized_perms)
end
defp do_decode_permissions(value, type) when is_integer(value) do
decode(value, type, @normalized_perms)
end
defp do_encode_permissions!(value, type) when is_atom(type),
do: do_encode_permissions!(value, to_string(type))
defp do_encode_permissions!(value, type) when is_integer(value),
do: encode(value, type, @normalized_perms)
defp do_encode_permissions!(value, type) when is_list(value) do
do_validate_permissions!({type, value})
encode(value, type, @normalized_perms)
end
defp do_validate_permissions!({type, value}) when is_atom(type),
do: do_validate_permissions!({to_string(type), value})
defp do_validate_permissions!({type, map}) when is_map(map) do
list = map |> Map.keys() |> Enum.map(&to_string/1)
do_validate_permissions!({type, list})
end
defp do_validate_permissions!({type, list}) do
perm_set = Map.get(@normalized_perms, type)
if perm_set do
provided_set = list |> Enum.map(&to_string/1) |> MapSet.new()
known_set = perm_set |> Map.keys() |> MapSet.new()
diff = MapSet.difference(provided_set, known_set)
if MapSet.size(diff) > 0 do
message = "#{to_string(__MODULE__)} Type: #{type} Missing Permissions: #{Enum.join(diff, ", ")}"
raise PermissionNotFoundError, message: message
end
:ok
else
raise PermissionNotFoundError, message: "#{to_string(__MODULE__)} - Type: #{type}"
end
end
end
end
defdelegate init(opts), to: Guardian.Permissions.Plug
defdelegate call(conn, opts), to: Guardian.Permissions.Plug
@doc """
Provides an encoded version of all permissions, and all possible future permissions
for a permission set
"""
def max, do: -1
@doc false
def normalize_permissions(perms) do
perms = Enum.into(perms, %{})
for {k, v} <- perms, into: %{} do
case v do
# A list of permission names.
# Positional values
list
when is_list(list) ->
perms =
for {perm, idx} <- Enum.with_index(list), into: %{} do
{to_string(perm), trunc(:math.pow(2, idx))}
end
{to_string(k), perms}
# A map of permissions. The permissions should be name => bit value
map
when is_map(map) ->
perms = for {perm, val} <- map, into: %{}, do: {to_string(perm), val}
{to_string(k), perms}
end
end
end
@doc false
def available_from_normalized(perms) do
for {k, v} <- perms, into: %{} do
list = v |> Map.keys() |> Enum.map(&String.to_atom/1)
{String.to_atom(k), list}
end
end
end
| 35.880637 | 108 | 0.656021 |
1c9d8be64465c5f8b2d867c219bebc8d22a3eced | 16,426 | ex | Elixir | lib/mix/lib/mix/tasks/format.ex | mattmatters/elixir | e0d1c2e4cae0277e69fec086b92d82f13d2aa033 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/format.ex | mattmatters/elixir | e0d1c2e4cae0277e69fec086b92d82f13d2aa033 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/format.ex | mattmatters/elixir | e0d1c2e4cae0277e69fec086b92d82f13d2aa033 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Format do
use Mix.Task
@shortdoc "Formats the given files/patterns"
@moduledoc """
Formats the given files and patterns.
mix format mix.exs "lib/**/*.{ex,exs}" "test/**/*.{ex,exs}"
If any of the files is `-`, then the output is read from stdin
and written to stdout.
## Formatting options
The formatter will read a `.formatter.exs` in the current directory for
formatter configuration. Evaluating this file should return a keyword list.
Here is an example `.formatter.exs` that works as a starting point:
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]
Besides the options listed in `Code.format_string!/2`, the `.formatter.exs`
supports the following options:
* `:inputs` (a list of paths and patterns) - specifies the default inputs
to be used by this task. For example, `["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]`.
Patterns are expanded with `Path.wildcard/2`.
* `:subdirectories` (a list of paths and patterns) - specifies subdirectories
that have their own formatting rules. Each subdirectory should have a
`.formatter.exs` that configures how entries in that subdirectory should be
formatted as. Configuration between `.formatter.exs` are not shared nor
inherited. If a `.formatter.exs` lists "lib/app" as a subdirectory, the rules
in `.formatter.exs` won't be available in `lib/app/.formatter.exs`.
Note that the parent `.formatter.exs` must not specify files inside the "lib/app"
subdirectory in its `:inputs` configuration. If this happens, the behaviour of
which formatter configuration will be picked is unspecified.
* `:import_deps` (a list of dependencies as atoms) - specifies a list
of dependencies whose formatter configuration will be imported.
When specified, the formatter should run in the same directory as
the `mix.exs` file that defines those dependencies. See the "Importing
dependencies configuration" section below for more information.
* `:export` (a keyword list) - specifies formatter configuration to be exported.
See the "Importing dependencies configuration" section below.
## Task-specific options
* `--check-formatted` - checks that the file is already formatted.
This is useful in pre-commit hooks and CI scripts if you want to
reject contributions with unformatted code. However keep in mind
that the formatted output may differ between Elixir versions as
improvements and fixes are applied to the formatter.
* `--check-equivalent` - checks if the files after formatting have the
same AST as before formatting. If the ASTs are not equivalent,
it is a bug in the code formatter. This option is recommended if you
are automatically formatting files.
* `--dry-run` - does not save files after formatting.
* `--dot-formatter` - path to the file with formatter configuration.
Defaults to `.formatter.exs` if one is available. See the "`.formatter.exs`"
section for more information.
If any of the `--check-*` flags are given and a check fails, the formatted
contents won't be written to disk nor printed to standard output.
## When to format code
We recommend developers to format code directly in their editors, either
automatically when saving a file or via an explicit command or key binding. If
such option is not yet available in your editor of choice, adding the required
integration is usually a matter of invoking:
cd $project && mix format $file
where `$file` refers to the current file and `$project` is the root of your
project.
It is also possible to format code across the whole project by passing a list
of patterns and files to `mix format`, as shown at the top of this task
documentation. This list can also be set in the `.formatter.exs` under the
`:inputs` key.
## Importing dependencies configuration
This task supports importing formatter configuration from dependencies.
A dependency that wants to export formatter configuration needs to have a
`.formatter.exs` file at the root of the project. In this file, the dependency
can export a `:export` option with configuration to export. For now, only one
option is supported under `:export`: `:locals_without_parens` (whose value has
the same shape as the value of the `:locals_without_parens` in `Code.format_string!/2`).
The functions listed under `:locals_without_parens` in the `:export` option of
a dependency can be imported in a project by listing that dependency in the
`:import_deps` option of the formatter configuration file of the project.
For example, consider I have a project `my_app` that depends on `my_dep`.
`my_dep` wants to export some configuration, so `my_dep/.formatter.exs`
would look like this:
# my_dep/.formatter.exs
[
# Regular formatter configuration for my_dep
# ...
export: [
locals_without_parens: [some_dsl_call: 2, some_dsl_call: 3]
]
]
In order to import configuration, `my_app`'s `.formatter.exs` would look like
this:
# my_app/.formatter.exs
[
import_deps: [:my_dep]
]
"""
@switches [
check_equivalent: :boolean,
check_formatted: :boolean,
dot_formatter: :string,
dry_run: :boolean
]
@manifest "cached_dot_formatter"
@manifest_vsn 1
def run(args) do
{opts, args} = OptionParser.parse!(args, strict: @switches)
{dot_formatter, formatter_opts} = eval_dot_formatter(opts)
{formatter_opts_and_subs, _sources} =
eval_deps_and_subdirectories(dot_formatter, [], formatter_opts, [dot_formatter])
args
|> expand_args(dot_formatter, formatter_opts_and_subs)
|> Task.async_stream(&format_file(&1, opts), ordered: false, timeout: 30000)
|> Enum.reduce({[], [], []}, &collect_status/2)
|> check!()
end
@doc """
Returns formatter options to be used for the given file.
"""
def formatter_opts_for_file(file, opts \\ []) do
{dot_formatter, formatter_opts} = eval_dot_formatter(opts)
{formatter_opts_and_subs, _sources} =
eval_deps_and_subdirectories(dot_formatter, [], formatter_opts, [dot_formatter])
split = file |> Path.relative_to_cwd() |> Path.split()
find_formatter_opts_for_file(split, formatter_opts_and_subs)
end
defp eval_dot_formatter(opts) do
cond do
dot_formatter = opts[:dot_formatter] ->
{dot_formatter, eval_file_with_keyword_list(dot_formatter)}
File.regular?(".formatter.exs") ->
{".formatter.exs", eval_file_with_keyword_list(".formatter.exs")}
true ->
{".formatter.exs", []}
end
end
# This function reads exported configuration from the imported
# dependencies and subdirectories and deals with caching the result
# of reading such configuration in a manifest file.
defp eval_deps_and_subdirectories(dot_formatter, prefix, formatter_opts, sources) do
deps = Keyword.get(formatter_opts, :import_deps, [])
subs = Keyword.get(formatter_opts, :subdirectories, [])
if not is_list(deps) do
Mix.raise("Expected :import_deps to return a list of dependencies, got: #{inspect(deps)}")
end
if not is_list(subs) do
Mix.raise("Expected :subdirectories to return a list of directories, got: #{inspect(subs)}")
end
if deps == [] and subs == [] do
{{formatter_opts, []}, sources}
else
manifest = Path.join(Mix.Project.manifest_path(), @manifest)
maybe_cache_in_manifest(dot_formatter, manifest, fn ->
{subdirectories, sources} = eval_subs_opts(subs, prefix, sources)
{{eval_deps_opts(formatter_opts, deps), subdirectories}, sources}
end)
end
end
defp maybe_cache_in_manifest(dot_formatter, manifest, fun) do
cond do
is_nil(Mix.Project.get()) or dot_formatter != ".formatter.exs" -> fun.()
entry = read_manifest(manifest) -> entry
true -> write_manifest!(manifest, fun.())
end
end
def read_manifest(manifest) do
with {:ok, binary} <- File.read(manifest),
{:ok, {@manifest_vsn, entry, sources}} <- safe_binary_to_term(binary),
expanded_sources = Enum.flat_map(sources, &Path.wildcard(&1, match_dot: true)),
false <- Mix.Utils.stale?([Mix.Project.config_mtime() | expanded_sources], [manifest]) do
{entry, sources}
else
_ -> nil
end
end
defp safe_binary_to_term(binary) do
{:ok, :erlang.binary_to_term(binary)}
rescue
_ -> :error
end
defp write_manifest!(manifest, {entry, sources}) do
File.mkdir_p!(Path.dirname(manifest))
File.write!(manifest, :erlang.term_to_binary({@manifest_vsn, entry, sources}))
{entry, sources}
end
defp eval_deps_opts(formatter_opts, []) do
formatter_opts
end
defp eval_deps_opts(formatter_opts, deps) do
deps_paths = Mix.Project.deps_paths()
parenless_calls =
for dep <- deps,
dep_path = assert_valid_dep_and_fetch_path(dep, deps_paths),
dep_dot_formatter = Path.join(dep_path, ".formatter.exs"),
File.regular?(dep_dot_formatter),
dep_opts = eval_file_with_keyword_list(dep_dot_formatter),
parenless_call <- dep_opts[:export][:locals_without_parens] || [],
uniq: true,
do: parenless_call
Keyword.update(
formatter_opts,
:locals_without_parens,
parenless_calls,
&(&1 ++ parenless_calls)
)
end
defp eval_subs_opts(subs, prefix, sources) do
{subs, sources} =
Enum.flat_map_reduce(subs, sources, fn sub, sources ->
prefix = Path.join(prefix ++ [sub])
{Path.wildcard(prefix), [Path.join(prefix, ".formatter.exs") | sources]}
end)
Enum.flat_map_reduce(subs, sources, fn sub, sources ->
sub_formatter = Path.join(sub, ".formatter.exs")
if File.exists?(sub_formatter) do
formatter_opts = eval_file_with_keyword_list(sub_formatter)
{formatter_opts_and_subs, sources} =
eval_deps_and_subdirectories(:in_memory, [sub], formatter_opts, sources)
{[{sub, formatter_opts_and_subs}], sources}
else
{[], sources}
end
end)
end
defp assert_valid_dep_and_fetch_path(dep, deps_paths) when is_atom(dep) do
case Map.fetch(deps_paths, dep) do
{:ok, path} ->
if File.dir?(path) do
path
else
Mix.raise(
"Unavailable dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency cannot be found in the file system, please run \"mix deps.get\" and try again"
)
end
:error ->
Mix.raise(
"Unknown dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency is not listed in your mix.exs for environment #{inspect(Mix.env())}"
)
end
end
defp assert_valid_dep_and_fetch_path(dep, _deps_paths) do
Mix.raise("Dependencies in :import_deps should be atoms, got: #{inspect(dep)}")
end
defp eval_file_with_keyword_list(path) do
{opts, _} = Code.eval_file(path)
unless Keyword.keyword?(opts) do
Mix.raise("Expected #{inspect(path)} to return a keyword list, got: #{inspect(opts)}")
end
opts
end
defp expand_args([], dot_formatter, formatter_opts_and_subs) do
if no_entries_in_formatter_opts?(formatter_opts_and_subs) do
Mix.raise(
"Expected one or more files/patterns to be given to mix format " <>
"or for a .formatter.exs to exist with an :inputs or :subdirectories key"
)
end
dot_formatter
|> expand_dot_inputs([], formatter_opts_and_subs, %{})
|> Enum.uniq()
end
defp expand_args(files_and_patterns, _dot_formatter, {formatter_opts, subs}) do
files =
for file_or_pattern <- files_and_patterns,
file <- stdin_or_wildcard(file_or_pattern),
uniq: true,
do: file
if files == [] do
Mix.raise(
"Could not find a file to format. The files/patterns given to command line " <>
"did not point to any existing file. Got: #{inspect(files_and_patterns)}"
)
end
for file <- files do
if file == :stdin do
{file, formatter_opts}
else
split = file |> Path.relative_to_cwd() |> Path.split()
{file, find_formatter_opts_for_file(split, {formatter_opts, subs})}
end
end
end
defp expand_dot_inputs(dot_formatter, prefix, {formatter_opts, subs}, acc) do
if no_entries_in_formatter_opts?({formatter_opts, subs}) do
Mix.raise("Expected :inputs or :subdirectories key in #{dot_formatter}")
end
map =
for input <- List.wrap(formatter_opts[:inputs]),
file <- Path.wildcard(Path.join(prefix ++ [input]), match_dot: true),
do: {file, formatter_opts},
into: %{}
Enum.reduce(subs, Map.merge(acc, map), fn {sub, formatter_opts_and_subs}, acc ->
sub_formatter = Path.join(sub, ".formatter.exs")
expand_dot_inputs(sub_formatter, [sub], formatter_opts_and_subs, acc)
end)
end
defp find_formatter_opts_for_file(split, {formatter_opts, subs}) do
Enum.find_value(subs, formatter_opts, fn {sub, formatter_opts_and_subs} ->
if List.starts_with?(split, Path.split(sub)) do
find_formatter_opts_for_file(split, formatter_opts_and_subs)
end
end)
end
defp no_entries_in_formatter_opts?({formatter_opts, subs}) do
is_nil(formatter_opts[:inputs]) and subs == []
end
defp stdin_or_wildcard("-"), do: [:stdin]
defp stdin_or_wildcard(path), do: path |> Path.expand() |> Path.wildcard(match_dot: true)
defp read_file(:stdin) do
{IO.stream(:stdio, :line) |> Enum.to_list() |> IO.iodata_to_binary(), file: "stdin"}
end
defp read_file(file) do
{File.read!(file), file: file}
end
defp format_file({file, formatter_opts}, task_opts) do
{input, extra_opts} = read_file(file)
output = IO.iodata_to_binary([Code.format_string!(input, extra_opts ++ formatter_opts), ?\n])
check_equivalent? = Keyword.get(task_opts, :check_equivalent, false)
check_formatted? = Keyword.get(task_opts, :check_formatted, false)
dry_run? = Keyword.get(task_opts, :dry_run, false)
cond do
check_equivalent? and not equivalent?(input, output) ->
{:not_equivalent, file}
check_formatted? ->
if input == output, do: :ok, else: {:not_formatted, file}
dry_run? ->
:ok
true ->
write_or_print(file, input, output)
end
rescue
exception ->
{:exit, file, exception, __STACKTRACE__}
end
defp write_or_print(file, input, output) do
cond do
file == :stdin -> IO.write(output)
input == output -> :ok
true -> File.write!(file, output)
end
:ok
end
defp collect_status({:ok, :ok}, acc), do: acc
defp collect_status({:ok, {:exit, _, _, _} = exit}, {exits, not_equivalent, not_formatted}) do
{[exit | exits], not_equivalent, not_formatted}
end
defp collect_status({:ok, {:not_equivalent, file}}, {exits, not_equivalent, not_formatted}) do
{exits, [file | not_equivalent], not_formatted}
end
defp collect_status({:ok, {:not_formatted, file}}, {exits, not_equivalent, not_formatted}) do
{exits, not_equivalent, [file | not_formatted]}
end
defp check!({[], [], []}) do
:ok
end
defp check!({[{:exit, file, exception, stacktrace} | _], _not_equivalent, _not_formatted}) do
Mix.shell().error("mix format failed for file: #{Path.relative_to_cwd(file)}")
reraise exception, stacktrace
end
defp check!({_exits, [_ | _] = not_equivalent, _not_formatted}) do
Mix.raise("""
mix format failed due to --check-equivalent.
The following files were not equivalent:
#{to_bullet_list(not_equivalent)}
Please report this bug with the input files at github.com/elixir-lang/elixir/issues
""")
end
defp check!({_exits, _not_equivalent, [_ | _] = not_formatted}) do
Mix.raise("""
mix format failed due to --check-formatted.
The following files were not formatted:
#{to_bullet_list(not_formatted)}
""")
end
defp to_bullet_list(files) do
Enum.map_join(files, "\n", &" * #{&1}")
end
defp equivalent?(input, output) do
Code.Formatter.equivalent(input, output) == :ok
end
end
| 34.078838 | 110 | 0.676854 |
1c9da7b0285f3f786d56531eb42b566bf2800cc0 | 788 | exs | Elixir | mix.exs | mpope9/lru_cache | 81fe0d5cc12070c84f94e857dca12a5b18d0656b | [
"Apache-2.0"
] | 41 | 2015-12-09T00:45:19.000Z | 2021-12-03T11:15:09.000Z | mix.exs | mpope9/lru_cache | 81fe0d5cc12070c84f94e857dca12a5b18d0656b | [
"Apache-2.0"
] | 2 | 2018-02-05T12:47:19.000Z | 2022-01-16T17:36:59.000Z | mix.exs | mpope9/lru_cache | 81fe0d5cc12070c84f94e857dca12a5b18d0656b | [
"Apache-2.0"
] | 13 | 2016-01-28T12:19:04.000Z | 2021-12-02T16:55:21.000Z | defmodule LruCache.Mixfile do
use Mix.Project
@github "https://github.com/arago/lru_cache"
def project do
[
app: :lru_cache,
version: "0.1.3",
elixir: "~> 1.2-dev",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
name: "LRU Cache",
source_url: @github,
description: description(),
package: package()
]
end
def application do
[applications: [:logger]]
end
defp deps do
[{:earmark, "~> 0.1", only: :dev}, {:ex_doc, "~> 0.11", only: :dev}]
end
defp description do
"ETS-based LRU Cache"
end
defp package do
[
maintainers: ["Dmitry Russ(Aleksandrov)"],
licenses: ["Apache 2.0"],
links: %{"Github" => @github}
]
end
end
| 19.7 | 72 | 0.563452 |
1c9da82a2630d15b8582f0206947c1c0951f7eb7 | 851 | ex | Elixir | lib/ex_ami/server_config.ex | palexanderm/ex_ami | 3f713e7a18feebe14fd89c8adfec4cc3af1c3f05 | [
"MIT"
] | 18 | 2015-02-18T23:30:48.000Z | 2020-12-07T11:02:27.000Z | lib/ex_ami/server_config.ex | palexanderm/ex_ami | 3f713e7a18feebe14fd89c8adfec4cc3af1c3f05 | [
"MIT"
] | 2 | 2015-09-03T12:31:13.000Z | 2018-01-17T00:49:13.000Z | lib/ex_ami/server_config.ex | palexanderm/ex_ami | 3f713e7a18feebe14fd89c8adfec4cc3af1c3f05 | [
"MIT"
] | 8 | 2016-05-11T01:17:28.000Z | 2021-12-24T20:56:26.000Z | defmodule ExAmi.ServerConfig do
use ExAmi.Logger
def get(server_info, key) do
search(server_info, key)
end
defp search([], _key), do: nil
defp search([{_, [{k, v} | _]} | _], key) when k == key, do: v
defp search([{_, [{_, v} | tail2]} | tail], key) do
case search(v, key) do
nil ->
case search(tail2, key) do
nil -> search(tail, key)
other -> other
end
other ->
other
end
end
defp search([{k, v} | _], key) when k == key, do: v
defp search([_ | tail], key), do: search(tail, key)
defp search({k, v}, key) when k == key, do: v
defp search({_, [{k, v} | _]}, key) when k == key, do: v
defp search({_, [{_, v} | tail]}, key) do
case search(v, key) do
nil -> search(tail, key)
other -> other
end
end
defp search(_, _), do: nil
end
| 22.394737 | 64 | 0.52879 |
1c9dcb0a380981d71badd9b17c3869025f67844a | 1,078 | exs | Elixir | test/asset/farmware_installation_test.exs | FarmBot/farmbot_os | 5ebdca3afd672eb6b0af5c71cfca02488b32569a | [
"MIT"
] | 843 | 2016-10-05T23:46:05.000Z | 2022-03-14T04:31:55.000Z | farmbot_core/test/asset/farmware_installation_test.exs | gdwb/farmbot_os | 0ef2697c580c9fbf37a22daa063a64addfcb778d | [
"MIT"
] | 455 | 2016-10-15T08:49:16.000Z | 2022-03-15T12:23:04.000Z | farmbot_core/test/asset/farmware_installation_test.exs | gdwb/farmbot_os | 0ef2697c580c9fbf37a22daa063a64addfcb778d | [
"MIT"
] | 261 | 2016-10-10T04:37:06.000Z | 2022-03-13T21:07:38.000Z | defmodule FarmbotCore.Asset.FarmwareInstallationTest do
use ExUnit.Case
alias FarmbotCore.Asset.FarmwareInstallation
def fake_install() do
%FarmwareInstallation{
id: 23,
url: "http://www.lycos.com",
manifest: %{
package: "xpackage",
language: "xlanguage",
author: "xauthor",
description: "xdescription",
url: "xurl",
zip: "xzip",
executable: "xexecutable",
args: "xargs",
config: "xconfig",
package_version: "xpackage_version",
farmware_manifest_version: "xfarmware_manifest_version",
farmware_tools_version_requirement:
"xfarmware_tools_version_requirement",
farmbot_os_version_requirement: "xfarmbot_os_version_requirement"
}
}
end
test "changeset" do
cs = FarmwareInstallation.changeset(fake_install())
assert cs.valid?
end
test "view" do
pg = fake_install()
expected = %{id: 23, url: "http://www.lycos.com"}
actual = FarmwareInstallation.render(pg)
assert expected == actual
end
end
| 25.666667 | 73 | 0.649351 |
1c9e2ce8aac6ddc12ce8ad8b01581cef034f9030 | 277 | exs | Elixir | priv/repo/migrations/20181122063608_create_user_settings.exs | macypa/storeHall | 9fe810f763527cc87fa165138bdfb3cda92fa553 | [
"MIT"
] | null | null | null | priv/repo/migrations/20181122063608_create_user_settings.exs | macypa/storeHall | 9fe810f763527cc87fa165138bdfb3cda92fa553 | [
"MIT"
] | 4 | 2019-07-16T06:24:42.000Z | 2021-05-07T22:26:14.000Z | priv/repo/migrations/20181122063608_create_user_settings.exs | macypa/storeHall | 9fe810f763527cc87fa165138bdfb3cda92fa553 | [
"MIT"
] | 1 | 2018-11-23T21:13:31.000Z | 2018-11-23T21:13:31.000Z | defmodule StoreHall.Repo.Migrations.CreateUserSettings do
use Ecto.Migration
def change do
create table(:user_settings, primary_key: false) do
add :id, :string, primary_key: true
add :settings, :map
timestamps(type: :timestamptz)
end
end
end
| 19.785714 | 57 | 0.703971 |
1c9e50ec29207f6f9d7b03aa9d8db28bdddada0e | 299 | exs | Elixir | test/test_helper.exs | Vincent-Legros/mirego-boilerplate | 20d94568133b6dd67862d864a420296217bd7b81 | [
"BSD-3-Clause"
] | 854 | 2019-03-18T19:13:58.000Z | 2022-03-30T01:47:30.000Z | test/test_helper.exs | Vincent-Legros/mirego-boilerplate | 20d94568133b6dd67862d864a420296217bd7b81 | [
"BSD-3-Clause"
] | 167 | 2019-03-18T21:23:28.000Z | 2022-03-31T19:07:34.000Z | test/test_helper.exs | Vincent-Legros/mirego-boilerplate | 20d94568133b6dd67862d864a420296217bd7b81 | [
"BSD-3-Clause"
] | 63 | 2019-03-28T14:19:11.000Z | 2022-02-15T17:22:14.000Z | # NOTE: When using Elixir 1.12+, we could ditch the next line and use `mix test --warnings-as-errors` instead
Code.put_compiler_option(:warnings_as_errors, true)
{:ok, _} = Application.ensure_all_started(:ex_machina)
ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(ElixirBoilerplate.Repo, :manual)
| 33.222222 | 109 | 0.77592 |
1c9e5383e0ebb1fad0044c16e3d3187fb90ef0c8 | 538 | ex | Elixir | memory_backend/lib/memory_backend/game_store.ex | AdrianPaulCarrieres/lpiot2020-memory-adrianpaulcarrieres | 0a2d66c6ecf501188a949807c8ea2d99c26c531b | [
"MIT"
] | null | null | null | memory_backend/lib/memory_backend/game_store.ex | AdrianPaulCarrieres/lpiot2020-memory-adrianpaulcarrieres | 0a2d66c6ecf501188a949807c8ea2d99c26c531b | [
"MIT"
] | 15 | 2020-12-23T16:09:28.000Z | 2020-12-26T22:32:47.000Z | memory_backend/lib/memory_backend/game_store.ex | AdrianPaulCarrieres/lpiot2020-memory-adrianpaulcarrieres | 0a2d66c6ecf501188a949807c8ea2d99c26c531b | [
"MIT"
] | null | null | null | defmodule MemoryBackend.GameStore do
use Agent
require Logger
@doc """
Starts a new game store.
"""
def start_link(_opts) do
Agent.start_link(fn -> %MemoryBackend.Game{} end)
end
@doc """
Get a game from the game store agent.
"""
def get(pid) do
Agent.get(
pid,
fn x -> x end
)
end
@doc """
Set game in game store agent
"""
def set(pid, game = %MemoryBackend.Game{}) do
# Logger.info("Game updated #{inspect(game)}")
Agent.update(pid, fn _old_game -> game end)
end
end
| 17.933333 | 53 | 0.609665 |
1c9e99633c2774f1fcdd3235c1bb421568ee62ad | 1,514 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/firewall_log_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/firewall_log_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/firewall_log_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.FirewallLogConfig do
@moduledoc """
The available logging options for a firewall rule.
## Attributes
- enable (boolean()): This field denotes whether to enable logging for a particular firewall rule. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enable => any()
}
field(:enable)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.FirewallLogConfig do
def decode(value, options) do
GoogleApi.Compute.V1.Model.FirewallLogConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.FirewallLogConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.541667 | 121 | 0.746367 |
1c9ea4f389982c883e23d4f4a59d81505d807037 | 420 | exs | Elixir | template_method/html_report.exs | joshnuss/design-patterns-in-elixir | 7f07cae0701ad460b6b275e382fad03324656989 | [
"MIT"
] | 516 | 2015-09-25T18:43:37.000Z | 2022-03-22T16:33:08.000Z | template_method/html_report.exs | joshnuss/design-patterns-in-elixir | 7f07cae0701ad460b6b275e382fad03324656989 | [
"MIT"
] | 2 | 2017-10-01T22:33:34.000Z | 2019-02-21T18:21:54.000Z | template_method/html_report.exs | joshnuss/design-patterns-in-elixir | 7f07cae0701ad460b6b275e382fad03324656989 | [
"MIT"
] | 52 | 2015-11-15T05:58:45.000Z | 2022-01-21T20:01:17.000Z | defmodule HTMLReport do
use Report
def output_start,
do: IO.puts("<html>")
def output_head do
IO.puts(" <head>")
IO.puts(" <title>#{@title}</title>")
IO.puts(" </head>")
end
def output_body_start,
do: IO.puts("<body>")
def output_line(line),
do: IO.puts(" <p>#{line}</p>")
def output_body_end,
do: IO.puts("</body>")
def output_end,
do: IO.puts("</html>")
end
| 16.8 | 43 | 0.569048 |
1c9eb156ea7eabcdf67e793296adb6b86c440493 | 612 | exs | Elixir | test/utils/xml_test.exs | szTheory/Woolly | cf7ad76cbaa6d9be38121816c5da223f18853653 | [
"MIT"
] | 47 | 2016-05-20T10:16:00.000Z | 2020-06-25T09:38:49.000Z | test/utils/xml_test.exs | pjhampton/woolly | 58f6d4901c21b715ec18e01afdf8608a37e1ca61 | [
"MIT"
] | 8 | 2016-05-24T15:31:40.000Z | 2020-03-03T18:33:54.000Z | test/utils/xml_test.exs | pjhampton/woolly | 58f6d4901c21b715ec18e01afdf8608a37e1ca61 | [
"MIT"
] | 8 | 2016-07-09T21:46:22.000Z | 2020-03-03T16:42:30.000Z | defmodule Woolly.Utils.XMLTest do
use ExUnit.Case, async: true
import Woolly.Utils.XML
test :removing_xml do
assert "my name is woolly" === remove_xml("my name is <name>woolly</name>")
assert "" === remove_xml("<!doctype html><html><head></head><body></body></html>")
assert "I love you" === remove_xml("<positive>I love you</positive>")
assert "I hate you" === remove_xml("<negative score='3'>I hate you</negative>")
assert "Peter" === remove_xml("<name>Peter</name>")
assert "2 + 2 = 99" === remove_xml("<num>2</num> <op>+</op> <num>2</num> <op>=</op> <num>99</num>")
end
end
| 40.8 | 103 | 0.627451 |
1c9eb34a1310c543bc58ed403ade5c45c23114d2 | 1,173 | ex | Elixir | apps/customer/lib/customer/web/channels/user_socket.ex | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 102 | 2017-05-21T18:24:04.000Z | 2022-03-10T12:53:20.000Z | apps/customer/lib/customer/web/channels/user_socket.ex | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 2 | 2017-05-21T01:53:30.000Z | 2017-12-01T00:27:06.000Z | apps/customer/lib/customer/web/channels/user_socket.ex | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 18 | 2017-05-22T09:51:36.000Z | 2021-09-24T00:57:01.000Z | defmodule Customer.Web.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", Customer.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Customer.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.868421 | 83 | 0.702472 |
1c9eb9a19af42ef2c01713a46c1ec06a7c3e9e52 | 517 | exs | Elixir | clients/you_tube/test/video_categories_test.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/you_tube/test/video_categories_test.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/you_tube/test/video_categories_test.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | defmodule GoogleApi.YouTube.VideoCategoriesTest do
use GoogleApi.YouTube.TestHelper
if System.get_env("GOOGLE_APPLICATION_CREDENTIALS") == nil do
@tag :skip
end
test "video categories" do
conn = GoogleApi.YouTube.V3.Connection.new(&for_scope/1)
part = "snippet"
region_code = "US"
{:ok, resp} = GoogleApi.YouTube.V3.Api.VideoCategories.youtube_video_categories_list(conn, part, regionCode: region_code)
assert %GoogleApi.YouTube.V3.Model.VideoCategoryListResponse{} = resp
end
end
| 32.3125 | 125 | 0.752418 |
1c9ef6bc4e669b270616194f62c5ef54ddaa3f8c | 3,691 | exs | Elixir | test/cloak/vault_test.exs | robinvdvleuten/cloak | 70cbf4bc59f15b5f60831d455b8dc7b6f457a1ff | [
"MIT"
] | 475 | 2015-09-19T14:09:11.000Z | 2022-03-28T14:53:27.000Z | test/cloak/vault_test.exs | robinvdvleuten/cloak | 70cbf4bc59f15b5f60831d455b8dc7b6f457a1ff | [
"MIT"
] | 98 | 2015-12-16T03:53:48.000Z | 2022-03-24T22:34:56.000Z | test/cloak/vault_test.exs | robinvdvleuten/cloak | 70cbf4bc59f15b5f60831d455b8dc7b6f457a1ff | [
"MIT"
] | 67 | 2015-10-21T11:56:38.000Z | 2022-03-18T13:51:16.000Z | defmodule Cloak.VaultTest do
use ExUnit.Case
alias Cloak.TestVault
defmodule RuntimeVault do
use Cloak.Vault, otp_app: :cloak
end
defmodule SupervisedVault do
use Cloak.Vault, otp_app: :cloak
end
describe ".start_link/1" do
test "allows configuration" do
key = :crypto.strong_rand_bytes(32)
{:ok, pid} =
RuntimeVault.start_link(
ciphers: [
default: {Cloak.Ciphers.AES.GCM, tag: "AES.GCM.V1", key: key}
],
json_library: Jason
)
assert RuntimeVault.json_library() == Jason
assert "plaintext" ==
"plaintext"
|> RuntimeVault.encrypt!()
|> RuntimeVault.decrypt!()
GenServer.stop(pid)
end
test "can be supervised" do
assert {:ok, pid} = Supervisor.start_link([SupervisedVault], strategy: :one_for_one)
assert SupervisedVault.json_library() == Jason
GenServer.stop(pid)
assert {:ok, pid} =
Supervisor.start_link(
[
{SupervisedVault, json_library: Jason}
],
strategy: :one_for_one
)
assert SupervisedVault.json_library() == Jason
GenServer.stop(pid)
end
end
describe ".init/1" do
test "returns the given config" do
assert {:ok, []} == TestVault.init([])
end
end
describe ".encrypt/1" do
test "encrypts ciphertext" do
assert {:ok, ciphertext} = TestVault.encrypt("plaintext")
assert ciphertext != "plaintext"
end
end
describe ".encrypt!/1" do
test "encrypts ciphertext" do
ciphertext = TestVault.encrypt!("plaintext")
assert is_binary(ciphertext)
assert ciphertext != "plaintext"
end
end
describe ".encrypt/2" do
test "encrypts ciphertext with the cipher associated with label" do
assert {:ok, ciphertext} = TestVault.encrypt("plaintext", :secondary)
assert ciphertext != "plaintext"
end
test "returns error if no cipher associated with label" do
assert {:error, %Cloak.MissingCipher{}} = TestVault.encrypt("plaintext", :nonexistent)
end
end
describe ".encrypt!/2" do
test "encrypts ciphertext with cipher associated with label" do
ciphertext = TestVault.encrypt!("plaintext", :secondary)
assert is_binary(ciphertext)
assert ciphertext != "plaintext"
end
test "raises error if no cipher associated with label" do
assert_raise Cloak.MissingCipher, fn ->
TestVault.encrypt!("plaintext", :nonexistent)
end
end
end
describe ".decrypt/1" do
test "decrypts ciphertext" do
{:ok, ciphertext1} = TestVault.encrypt("plaintext")
{:ok, ciphertext2} = TestVault.encrypt("plaintext", :secondary)
assert {:ok, "plaintext"} = TestVault.decrypt(ciphertext1)
assert {:ok, "plaintext"} = TestVault.decrypt(ciphertext2)
end
test "returns error if no module found to decrypt" do
assert {:error, %Cloak.MissingCipher{}} = TestVault.decrypt(<<123, 123>>)
end
end
describe ".decrypt!" do
test "decrypts ciphertext" do
ciphertext1 = TestVault.encrypt!("plaintext")
ciphertext2 = TestVault.encrypt!("plaintext", :secondary)
assert "plaintext" == TestVault.decrypt!(ciphertext1)
assert "plaintext" == TestVault.decrypt!(ciphertext2)
end
test "raises error if no module found to decrypt" do
assert_raise Cloak.MissingCipher, fn ->
TestVault.decrypt!(<<123, 123>>)
end
end
end
describe ".json_library/1" do
test "returns Jason by default" do
assert TestVault.json_library() == Jason
end
end
end
| 27.139706 | 92 | 0.630994 |
1c9f1f2043bc9370974d6982cdaac0e38e2ad860 | 3,282 | ex | Elixir | lib/steps/patch/recompile_nifs.ex | tinfoil/burrito | 018dd1933b04933c0cf472938f76175b5650ea12 | [
"MIT"
] | null | null | null | lib/steps/patch/recompile_nifs.ex | tinfoil/burrito | 018dd1933b04933c0cf472938f76175b5650ea12 | [
"MIT"
] | null | null | null | lib/steps/patch/recompile_nifs.ex | tinfoil/burrito | 018dd1933b04933c0cf472938f76175b5650ea12 | [
"MIT"
] | null | null | null | defmodule Burrito.Steps.Patch.RecompileNIFs do
alias Burrito.Builder.Context
alias Burrito.Builder.Log
alias Burrito.Builder.Step
alias Burrito.Builder.Target
@behaviour Step
@impl Step
def execute(%Context{} = context) do
if context.target.cross_build do
triplet = Target.make_triplet(context.target)
{:local_unpacked, path: erts_location} = context.target.erts_source
nif_sniff()
|> Enum.each(fn dep ->
maybe_recompile_nif(dep, context.work_dir, erts_location, triplet)
end)
end
context
end
def nif_sniff() do
# The current procedure for finding out if a dependency has a NIF:
# - List all deps in the project using Mix.Project.deps_paths/0
# - Iterate over those, and use Mix.Project.in_project/4 to execute a function inside their project context
# - Check if they contain :elixir_make in their `:compilers`
#
# We'll probably need to expand how we detect NIFs, but :elixir_make is a popular way to compile NIFs
# so it's a good place to start...
paths = Mix.Project.deps_paths() |> Enum.filter(fn {name, _} -> name != :burrito end)
Enum.map(paths, fn {dep_name, path} ->
Mix.Project.in_project(dep_name, path, fn module ->
if module && Keyword.has_key?(module.project, :compilers) do
{dep_name, path, Enum.member?(module.project[:compilers], :elixir_make)}
else
{dep_name, path, false}
end
end)
end)
end
defp maybe_recompile_nif({_, _, false}, _, _, _), do: :no_nif
defp maybe_recompile_nif(
{dep, path, true},
release_working_path,
erts_path,
cross_target
) do
dep = Atom.to_string(dep)
Log.info(:step, "Going to recompile NIF for cross-build: #{dep} -> #{cross_target}")
_ = System.cmd("make", ["clean"], cd: path, stderr_to_stdout: true, into: IO.stream())
erts_include =
Path.join(erts_path, ["otp-*/", "erts*/", "/include"]) |> Path.wildcard() |> List.first()
build_result =
System.cmd("make", ["--always-make"],
cd: path,
stderr_to_stdout: true,
env: [
{"RANLIB", "zig ranlib"},
{"AR", "zig ar"},
{"CC", "zig cc -target #{cross_target} -v -shared -Wl,-undefined=dynamic_lookup"},
{"CXX", "zig c++ -target #{cross_target} -v -shared -Wl,-undefined=dynamic_lookup"},
{"CXXFLAGS", "-I#{erts_include}"},
{"CFLAGS", "-I#{erts_include}"}
],
into: IO.stream()
)
case build_result do
{_, 0} ->
Log.info(:step, "Successfully re-built #{dep} for #{cross_target}!")
src_priv_files = Path.join(path, ["priv/*"]) |> Path.wildcard()
output_priv_dir =
Path.join(release_working_path, ["lib/#{dep}*/priv"]) |> Path.wildcard() |> List.first()
Enum.each(src_priv_files, fn file ->
file_name = Path.basename(file)
dst_fullpath = Path.join(output_priv_dir, file_name)
Log.info(:step, "#{file} -> #{output_priv_dir}")
File.copy!(file, dst_fullpath)
end)
{output, _} ->
Log.error(:step, "Failed to rebuild #{dep} for #{cross_target}!")
Log.error(:step, output)
exit(1)
end
end
end
| 31.557692 | 113 | 0.607252 |
1c9f2ab2b8c3b8927608594083b469120be97a42 | 249 | exs | Elixir | apps/alert_processor/priv/repo/migrations/20170605184024_add_subscription_metadata_fields.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | null | null | null | apps/alert_processor/priv/repo/migrations/20170605184024_add_subscription_metadata_fields.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 21 | 2021-03-12T17:05:30.000Z | 2022-02-16T21:48:35.000Z | apps/alert_processor/priv/repo/migrations/20170605184024_add_subscription_metadata_fields.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 1 | 2021-12-09T15:09:53.000Z | 2021-12-09T15:09:53.000Z | defmodule AlertProcessor.Repo.Migrations.AddSubscriptionMetadataFields do
use Ecto.Migration
def change do
alter table(:subscriptions) do
add :origin, :string
add :destination, :string
add :type, :string
end
end
end
| 20.75 | 73 | 0.710843 |
1c9f6a47305ea39cdac395c7f9a6596bb39c51a3 | 281 | ex | Elixir | tests/dummy/web/controllers/owner_relationship_controller.ex | autoxjs/autox-phoenix | 6446f4487e3af28955f6560973cff6add34be4d4 | [
"MIT"
] | null | null | null | tests/dummy/web/controllers/owner_relationship_controller.ex | autoxjs/autox-phoenix | 6446f4487e3af28955f6560973cff6add34be4d4 | [
"MIT"
] | 20 | 2016-04-05T06:28:58.000Z | 2016-05-12T15:45:37.000Z | tests/dummy/web/controllers/owner_relationship_controller.ex | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | null | null | null | defmodule Dummy.OwnerRelationshipController do
use Dummy.Web, :controller
plug :scrub_params, "data" when action in [:create, :update, :delete]
plug Autox.AutoParentPlug, Dummy
plug Autox.AutoPaginatePlug when action in [:index]
use Autox.RelationshipController
end | 31.222222 | 71 | 0.772242 |
1c9f897c39a09143b97746f14adfd14ef5bbdac7 | 2,029 | exs | Elixir | config/dev.exs | dbernazal/html_playback | 489c4a26ad3aafb9484a143f9352f0ce5dd26a7f | [
"MIT"
] | null | null | null | config/dev.exs | dbernazal/html_playback | 489c4a26ad3aafb9484a143f9352f0ce5dd26a7f | [
"MIT"
] | 2 | 2021-03-09T11:51:15.000Z | 2021-05-10T01:10:16.000Z | config/dev.exs | dbernazal/html_playback | 489c4a26ad3aafb9484a143f9352f0ce5dd26a7f | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :html_playback, HtmlPlaybackWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
],
live_reload: [
patterns: [
~r{lib/my_app_web/live/.*(ex)$}
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :html_playback, HtmlPlaybackWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/html_playback_web/{live,views}/.*(ex)$",
~r"lib/html_playback_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.794521 | 68 | 0.680631 |
1c9fad691e78bf299859a9fa830bcfd3928fa38f | 671 | exs | Elixir | examples/simple_ecto/mix.exs | stevedomin/dynamo | 3c28f27603a480d2755a89aba3a91e88df3469ce | [
"Apache-2.0"
] | 415 | 2015-01-04T15:59:47.000Z | 2022-02-02T21:12:46.000Z | examples/simple_ecto/mix.exs | stevedomin/dynamo | 3c28f27603a480d2755a89aba3a91e88df3469ce | [
"Apache-2.0"
] | 9 | 2015-02-06T16:09:46.000Z | 2018-08-08T06:33:50.000Z | examples/simple_ecto/mix.exs | stevedomin/dynamo | 3c28f27603a480d2755a89aba3a91e88df3469ce | [
"Apache-2.0"
] | 44 | 2015-02-01T15:49:50.000Z | 2021-11-29T09:08:37.000Z | defmodule SimpleEcto.Mixfile do
use Mix.Project
def project do
[ app: :simple_ecto,
version: "0.0.1",
dynamos: [SimpleEcto.Dynamo],
compilers: [:elixir, :dynamo, :app],
env: [prod: [compile_path: "ebin"]],
compile_path: "tmp/#{Mix.env}/simple_ecto/ebin",
deps: deps ]
end
# Configuration for the OTP application
def application do
[ applications: [:cowboy, :dynamo],
mod: { SimpleEcto, [] } ]
end
defp deps do
[ { :cowboy, github: "extend/cowboy" },
{ :dynamo, "0.1.0-dev", path: "../.." },
{ :postgrex, github: "ericmj/postgrex" },
{ :ecto, github: "elixir-lang/ecto"} ]
end
end
| 24.851852 | 54 | 0.582712 |
1c9fb05cec6092275736eba1d9b31bcc787279c1 | 483 | ex | Elixir | lib/betex_web/channels/sport_channel.ex | esl/betex | b887d95c9c6edac4bcadb8da188fae215d04fe6c | [
"Apache-2.0"
] | 1 | 2021-06-15T08:18:50.000Z | 2021-06-15T08:18:50.000Z | lib/betex_web/channels/sport_channel.ex | AdiletAbylov/betex | b887d95c9c6edac4bcadb8da188fae215d04fe6c | [
"Apache-2.0"
] | null | null | null | lib/betex_web/channels/sport_channel.ex | AdiletAbylov/betex | b887d95c9c6edac4bcadb8da188fae215d04fe6c | [
"Apache-2.0"
] | 2 | 2021-06-23T16:35:04.000Z | 2021-06-23T16:35:44.000Z | defmodule BetexWeb.SportChannel do
@moduledoc """
Module handles `sport:*` channel events.
"""
use Phoenix.Channel
alias Betex.Kaffe.MessageProducer
def join("sport:lobby", _payload, socket) do
{:ok, socket}
end
def handle_in("viewing", _message, socket) do
MessageProducer.viewing_message("some_id")
{:noreply, socket}
end
def handle_in("left", _message, socket) do
MessageProducer.viewing_message("some_id")
{:noreply, socket}
end
end
| 21 | 47 | 0.699793 |
1c9ffa21291bd59fbdfd72e9aacc7d5f791969aa | 1,124 | exs | Elixir | clients/you_tube_analytics/mix.exs | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube_analytics/mix.exs | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube_analytics/mix.exs | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | defmodule GoogleApi.YouTubeAnalytics.V1.Mixfile do
use Mix.Project
def project do
[app: :google_api_you_tube_analytics,
version: "0.0.1",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/you_tube_analytics"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:tesla, "~> 0.8"},
{:poison, ">= 1.0.0"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Retrieves your YouTube Analytics data.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/you_tube_analytics",
"Homepage" => "http://developers.google.com/youtube/analytics/"
}
]
end
end
| 23.914894 | 118 | 0.598754 |
1c9ffd14ed869485c4993e6878c02260e186a56c | 2,040 | exs | Elixir | mix.exs | BobbyMcWho/scenic_new | b358e5e15dc768db610d757b78865d4907ec2ccb | [
"Apache-2.0"
] | null | null | null | mix.exs | BobbyMcWho/scenic_new | b358e5e15dc768db610d757b78865d4907ec2ccb | [
"Apache-2.0"
] | null | null | null | mix.exs | BobbyMcWho/scenic_new | b358e5e15dc768db610d757b78865d4907ec2ccb | [
"Apache-2.0"
] | null | null | null | defmodule ScenicNew.MixProject do
use Mix.Project
@version "0.10.3"
@github "https://github.com/boydm/scenic_new"
def project do
[
app: :scenic_new,
version: @version,
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
docs: [
main: "Mix.Tasks.Scenic.New"
],
description: description(),
package: [
contributors: ["Boyd Multerer"],
maintainers: ["Boyd Multerer"],
licenses: ["Apache 2"],
links: %{github: @github},
files: [
"templates/**/*.jpg",
"templates/**/gitignore",
"templates/**/*.exs",
"templates/**/*.config",
"templates/**/*.txt",
"templates/**/*.jpg",
"templates/**/*.png",
"templates/**/*.eex",
"config",
# "test",
"mix.exs",
".formatter.exs",
".gitignore",
"LICENSE",
"README.md",
"lib/**/*.ex"
]
],
preferred_cli_env: [
coveralls: :test,
"coveralls.html": :test,
"coveralls.json": :test
],
test_coverage: [tool: ExCoveralls]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: []
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.19", only: [:dev, :docs], runtime: false},
{:excoveralls, "~> 0.5.7", only: :test}
]
end
defp aliases do
[
build: [&build_releases/1]
]
end
defp build_releases(_) do
Mix.Tasks.Compile.run([])
Mix.Tasks.Archive.Build.run([])
Mix.Tasks.Archive.Build.run(["--output=scenic_new.ez"])
File.rename("scenic_new.ez", "./archives/scenic_new.ez")
File.rename("scenic_new-#{@version}.ez", "./archives/scenic_new-#{@version}.ez")
end
defp description() do
"""
ScenicNew - Mix task to generate a starter application
"""
end
end
| 23.448276 | 84 | 0.52549 |
1ca019da93ec92a2c14c00ae76a19a22c839d3b9 | 1,183 | ex | Elixir | clients/backend-client-elixir/lib/adf_sender_connector/message.ex | bancolombia/async-dataflow | 7e543f9818e6a25ce1ef28b36f17f0377fdda6cf | [
"MIT"
] | 2 | 2022-01-11T21:03:44.000Z | 2022-03-15T15:13:11.000Z | clients/backend-client-elixir/lib/adf_sender_connector/message.ex | bancolombia/async-dataflow | 7e543f9818e6a25ce1ef28b36f17f0377fdda6cf | [
"MIT"
] | 3 | 2021-12-21T21:04:20.000Z | 2022-03-15T16:16:45.000Z | clients/backend-client-elixir/lib/adf_sender_connector/message.ex | bancolombia/async-dataflow | 7e543f9818e6a25ce1ef28b36f17f0377fdda6cf | [
"MIT"
] | 2 | 2022-02-08T22:33:36.000Z | 2022-03-25T19:55:18.000Z | defmodule AdfSenderConnector.Message do
defstruct ~w[channel_ref message_id correlation_id message_data event_name]a
@moduledoc """
Notification message representation
"""
@type channel_ref() :: String.t()
@type message_id() :: String.t()
@type correlation_id() :: String.t()
@type message_data() :: iodata()
@type event_name() :: String.t()
@type t() :: AdfSenderConnector.Message.t()
@doc """
Creates a message.
"""
@spec new(channel_ref(), message_id(), correlation_id(), message_data(), event_name()) :: t()
def new(channel_ref, message_id, correlation_id, message_data, event_name) do
%__MODULE__{
channel_ref: channel_ref,
message_id: message_id,
correlation_id: correlation_id,
message_data: message_data,
event_name: event_name
}
end
@doc """
Creates a message with minimal data needed.
"""
@spec new(channel_ref(), message_data(), event_name()) :: t()
def new(channel_ref, message_data, event_name) do
%__MODULE__{
channel_ref: channel_ref,
message_id: UUID.uuid1(),
correlation_id: nil,
message_data: message_data,
event_name: event_name
}
end
end
| 26.886364 | 95 | 0.677937 |
1ca04c09df6be20f17283d4e22d2a3b2c07fd026 | 273 | ex | Elixir | lib/boilerplate_web/controllers/auth_error_controller.ex | lorenzosinisi/react-phoenix-users-boilerplate | f39022a41c2b08947c9b4451248febce5005c1a3 | [
"MIT"
] | 152 | 2017-05-29T06:04:01.000Z | 2021-12-11T19:24:02.000Z | lib/boilerplate_web/controllers/auth_error_controller.ex | lorenzosinisi/react-phoenix-users-boilerplate | f39022a41c2b08947c9b4451248febce5005c1a3 | [
"MIT"
] | 13 | 2017-07-29T18:26:37.000Z | 2018-10-26T08:33:16.000Z | lib/boilerplate_web/controllers/auth_error_controller.ex | lorenzosinisi/react-phoenix-users-boilerplate | f39022a41c2b08947c9b4451248febce5005c1a3 | [
"MIT"
] | 12 | 2017-11-18T19:13:44.000Z | 2019-10-10T01:29:28.000Z | defmodule BoilerplateWeb.AuthErrorController do
import Plug.Conn
use BoilerplateWeb, :controller
def auth_error(conn, {_type, _reason}, _opts) do
conn
|> put_status(:unauthorized)
|> render(BoilerplateWeb.SessionView, "wrong_credentials.json")
end
end
| 24.818182 | 67 | 0.750916 |
1ca056cc468810fa0026f3c9671792e1c0b68666 | 3,893 | exs | Elixir | test/live_sup/core/widgets_test.exs | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | test/live_sup/core/widgets_test.exs | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | 3 | 2022-02-23T15:51:48.000Z | 2022-03-14T22:52:43.000Z | test/live_sup/core/widgets_test.exs | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule LiveSup.Test.Core.WidgetsTest do
use ExUnit.Case, async: true
use LiveSup.DataCase
import LiveSup.Test.ProjectsFixtures
import LiveSup.Test.DatasourcesFixtures
alias LiveSup.Core.{Widgets, Datasources}
describe "widgets" do
@describetag :widgets
alias LiveSup.Schemas.Widget
@name "Last Rollbar Errors"
@valid_attrs %{
name: @name,
slug: "last-rollbar-errors",
enabled: true,
worker_handler: "LastRollbarErrors",
ui_handler: "LastRollbarErrors",
labels: [],
global: true,
settings: %{number_of_errors: 5}
}
@update_attrs %{name: "Last Rollbar Errors X", handler: "LastRollbarErrorsX"}
@invalid_attrs %{name: nil, handler: nil}
def widget_fixture(attrs \\ %{}) do
datasource = datasource_fixture()
{:ok, widget} =
attrs
|> Enum.into(%{
name: "Last Rollbar Errors",
slug: "last-rollbar-errors#{System.unique_integer()}",
enabled: true,
worker_handler: "LastRollbarErrors#{System.unique_integer()}",
ui_handler: "LastRollbarErrors#{System.unique_integer()}",
labels: [],
global: true,
settings: %{number_of_errors: 5},
datasource_id: datasource.id
})
|> Widgets.create()
# We have to reload the datasource
# otherwise the settings attribute uses atoms
# instead of strings
Widgets.get!(widget.id)
end
test "all/0 returns all widgets" do
widget = widget_fixture()
assert Widgets.all() == [widget]
end
test "all/1 returns all widgets by datasource" do
widget = widget_fixture()
widget_fixture()
assert Widgets.all(%{datasource_id: widget.datasource_id}) == [widget]
end
test "get!/1 returns the datasource with given id" do
widget = widget_fixture()
assert Widgets.get!(widget.id) == widget
end
test "create/1 with valid data creates a widget" do
datasource = datasource_fixture()
attrs = @valid_attrs |> Enum.into(%{datasource_id: datasource.id})
assert {:ok, %Widget{} = widget} = Widgets.create(attrs)
assert widget.labels == []
assert widget.name == @name
assert widget.settings == %{number_of_errors: 5}
end
test "create/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Widgets.create(@invalid_attrs)
end
test "update/2 with valid data updates the widget" do
widget = widget_fixture()
assert {:ok, %Widget{} = widget} = Widgets.update(widget, @update_attrs)
assert widget.labels == []
assert widget.name == "Last Rollbar Errors X"
assert widget.settings == %{"number_of_errors" => 5}
end
test "update/2 with invalid data returns error changeset" do
widget = widget_fixture()
assert {:error, %Ecto.Changeset{}} = Widgets.update(widget, @invalid_attrs)
assert widget == Widgets.get!(widget.id)
end
test "delete/1 deletes the datasource" do
widget = widget_fixture()
assert {:ok, %Widget{}} = Widgets.delete(widget)
assert_raise Ecto.NoResultsError, fn -> Widgets.get!(widget.id) end
end
test "change/1 returns a datasource changeset" do
widget = widget_fixture()
assert %Ecto.Changeset{} = Widgets.change(widget)
end
test "create_instance/2 create a widget instance" do
project = project_fixture()
datasource = datasource_fixture()
{:ok, datasource_instance} =
Datasources.create_instance(datasource, project, %{api_key: "xxxxxxxxxx"})
widget = widget_fixture(%{datasource_id: datasource.id})
{:ok, widget_instance} = Widgets.create_instance(widget, datasource_instance)
assert widget_instance.name == widget.name
assert widget_instance.settings == widget.settings
end
end
end
| 31.144 | 83 | 0.650398 |
1ca06684ad8b31c4c3f78dac42b060c2940792ac | 657 | exs | Elixir | test/authoritex/loc_test.exs | nulib/authoritex | a9b277e20873a886e2578f14f58acb277a501f01 | [
"MIT"
] | 2 | 2020-06-11T10:37:21.000Z | 2020-10-13T18:12:42.000Z | test/authoritex/loc_test.exs | nulib/authoritex | a9b277e20873a886e2578f14f58acb277a501f01 | [
"MIT"
] | 21 | 2020-05-12T21:06:32.000Z | 2022-01-14T14:43:45.000Z | test/authoritex/loc_test.exs | nulib/authoritex | a9b277e20873a886e2578f14f58acb277a501f01 | [
"MIT"
] | null | null | null | defmodule Authoritex.LOCTest do
alias Authoritex.LOC
use Authoritex.TestCase,
module: LOC,
code: "loc",
description: "Library of Congress Linked Data",
test_uris: [
"http://id.loc.gov/vocabulary/organizations/iehs",
"info:lc/vocabulary/organizations/iehs"
],
bad_uri: "http://id.loc.gov/vocabulary/organizations/wrong-id",
expected: [
id: "http://id.loc.gov/vocabulary/organizations/iehs",
label: "Evanston Township High School",
qualified_label: "Evanston Township High School",
hint: nil
],
search_result_term: "evanston township high",
search_count_term: "high school"
end
| 29.863636 | 67 | 0.680365 |
1ca066cfc02bed14cae496d809edb20d41020d87 | 8,017 | ex | Elixir | lib/mix_tasks_phoenix_gen_instachat.ex | itsgreggreg/phoenix_generator | ed39acbaf42900706968fff2a28978605af8fc78 | [
"MIT"
] | 32 | 2015-01-18T08:41:38.000Z | 2016-12-07T07:11:36.000Z | lib/mix_tasks_phoenix_gen_instachat.ex | itsgreggreg/phoenix_generator | ed39acbaf42900706968fff2a28978605af8fc78 | [
"MIT"
] | 5 | 2015-01-31T20:18:43.000Z | 2016-05-03T15:16:30.000Z | lib/mix_tasks_phoenix_gen_instachat.ex | itsgreggreg/phoenix_generator | ed39acbaf42900706968fff2a28978605af8fc78 | [
"MIT"
] | 5 | 2015-01-28T06:32:48.000Z | 2016-11-26T18:16:00.000Z | defmodule Mix.Tasks.Phoenix.Gen.Instachat do
# This module is a copy paste of Gen.Channel with a few additions
use Mix.Task
import Mix.Generator
import Phoenix.Gen.Utils
@shortdoc "Generate an instant chatroom for a Phoenix Application"
@moduledoc """
Generates a zero configuration chat room
mix phoenix.gen.instachat
## Command line options
## Examples
mix phoenix.gen.instachat
"""
def run(_opts) do
channel_name = "instachat"
bindings = [
app_name: app_name_camel,
module_name: Mix.Utils.camelize(channel_name)<>"Channel",
channel_name: channel_name
]
# Add the Channel
channel_file = Path.join channels_path, "#{channel_name}_channel.ex"
create_file channel_file, channel_template(bindings)
# Add the Channel Route
route_file = Path.join ~w|web router.ex|
route_contents = File.read! route_file
[_ | captures] = Regex.run(~r/(.*defmodule.*)(\nend)/s, route_contents)
route_contents = Enum.join captures, channel_route_template(bindings)
File.write! route_file, route_contents
Mix.Shell.IO.info "A route was added for this channel."
# Generate the Controller and Action
Mix.Tasks.Phoenix.Gen.Controller.run ["instachat", "index"]
# Overwrite the Template
template_path = Path.join [templates_path, "instachat", "index.html.eex"]
File.write! template_path, html_template([])
# Add the Config
config_path = Path.join ~w|config config.exs|
File.write! config_path, config_template([]), [:append]
Mix.Shell.IO.info "Your config/config.exs was edited."
Mix.Shell.IO.info "An ets table will be started to manage nicknames.\n"
# For some reason router changes don't trigger a recompile
# so we must manually clean the projec
Mix.Tasks.Clean.run []
Mix.Shell.IO.info """
A chat room has been generated.
Run: mix phoenix.server
Visit: http://localhost:4000/instachat
It takes a minute for the chat to start working, I'm not sure why. :/
"""
end
embed_template :channel_route, """
socket "/instachatsocket", <%= @app_name %> do
channel "instachat:*", InstachatChannel
end
"""
embed_template :config, """
# Start an ets table to manage chat nick names
:ets.new(:instachat, [:named_table, :public, :set])
"""
embed_template :channel, """
defmodule <%= @app_name %>.<%= @module_name %> do
use Phoenix.Channel
@doc "Handles when a user first joins the channel."
def join(topic, nick, socket) do
case :ets.lookup(:instachat, nick) do
[{^nick, _}] ->
reply socket, "error", %{nickTaken: true}
{:error, :nick_taken, socket}
_ ->
socket = Phoenix.Socket.put_topic(socket, topic)
socket = Phoenix.Socket.assign(socket, :nick, nick)
:ets.insert(:instachat, {nick, socket[:pid]})
reply socket, topic, %{start: "Welcome to the chat!"}
end
end
@doc "Handles a user sending a chat message"
def handle_in(topic, %{"body" => body}, socket) do
broadcast socket, topic,
%{body: body, nick: socket.assigns[:nick], room: socket.assigns[:room]}
end
@doc "Handles a user joining a room"
def handle_in(topic, %{"room" => room}, socket) do
socket = Phoenix.Socket.assign(socket, :room, room)
broadcast socket, topic, %{join: socket.assigns[:nick], room: room}
reply socket, topic, %{entered: room}
end
@doc "Handles outgoing broadcasts of chat messages"
def handle_out(topic, %{body: body, nick: nick, room: room}, socket) do
cond do
# don't send the message to people not in the room
socket.assigns[:room] !== room -> {:ok, socket}
# send it to everyone else
true -> reply socket,
topic, %{body: body, nick: nick}
end
end
@doc "Handles outgoing broadcasts of room joins"
def handle_out(topic, %{join: nick, room: room}, socket) do
cond do
# don't send the join message to people not in the room
socket.assigns[:room] !== room -> {:ok, socket}
# don't send the join message to the person who joined
socket.assigns[:nick] == nick -> {:ok, socket}
# send it to everyone else
true -> reply socket,
topic, %{join: nick}
end
end
@doc "Catchall for broadcasts, anything not handlede gets forwarded"
def handle_out(topic, msg, socket) do
reply socket, topic, msg
end
@doc "Remove nicknames when a user disconnects"
def leave(_, socket) do
:ets.delete(:instachat, socket.assigns[:nick])
{:ok, socket}
end
end
"""
embed_template :html, """
<br>
<div id="chat">
<form id="room-form" class="form-inline">
<div class="row">
<div class="col-xs-1">
<label for="room">Room</label>
</div>
<div class="col-xs-4">
<input id="room" type="text" class="form-control" value="general">
</div>
<div class="col-xs-4">
<button id="room-submit" type="submit" class="btn btn-default">Join</button>
</div>
</div>
</form>
<br>
<div id="messages" style="height:300px; overflow-y:scroll" class="form-control" rows="15">
</div>
<br>
<form id="message-form" class="form">
<div class="row">
<div class="col-xs-10">
<input id="message" type="text" class="form-control">
</div>
<div class="col-xs-1">
<button id="message-submit" type="submit" class="btn btn-primary">Send</button>
</div>
</div>
</form>
</div>
<br>
<script src="http://code.jquery.com/jquery-2.1.3.min.js"></script>
<script src="/js/phoenix.js"></script>
<script type="text/javascript">
// Helper function to prompt a user for a nick name
var get_nick = function(message){
var n = ""
while(!n){
n = window.prompt(message).trim();
}
return n;
}
// Helper function to insert messages
var append_message = function(msg){
$("#messages").append("<p>"+msg+"</p>")
$("#messages").scrollTop($("#messages")[0].scrollHeight)
}
// Global channel var used in UI event handlers
var channel = undefined;
// Global socket
var socket = new Phoenix.Socket("/instachatsocket");
// Channel callbacks
var callback = function(chan) {
// Set the global channel
channel = chan;
// Prompt for a new nick if we recieve an error
channel.on("error", function(error){
var nick = get_nick("Sorry that nick is taken. Please choose another.")
channel.message = nick;
socket.rejoin(channel);
});
// Event handlers for mesages from the socket
channel.on("instachat:room", function(payload){
for( var key in payload ) {
switch(key){
case "entered":
append_message("You entered the room: "+payload.entered+".")
break;
case "body":
append_message("<strong>"+payload.nick+"</strong>: "+payload.body);
break;
case "join":
append_message(payload.join + " joined the chat.");
break;
case "start":
append_message(payload.start);
channel.send("instachat:room", {room: "general"});
break;
}
}
});
};
// UI events
$("#message-form").submit(function(e){
e.preventDefault();
message = $("#message").val();
$("#message").val("");
channel.send("instachat:room", {body: message});
})
$("#room-form").submit(function(e){
e.preventDefault();
room = $("#room").val();
channel.send("instachat:room", {room: room});
})
// Prompt for a nick
var nick = get_nick("Please choose a nickname");
// Join the socket
socket.join("instachat:room", nick, callback);
</script>
"""
end
| 31.562992 | 94 | 0.600225 |
1ca080dd98fd1e6dea6013c245067d3fbc36dc63 | 2,494 | ex | Elixir | lib/lotus/props/background.ex | code-shoily/lotus | d14958956103f2376d51974f40bcc7d7c59c2ad9 | [
"MIT"
] | 3 | 2021-09-20T10:34:15.000Z | 2021-09-20T16:23:07.000Z | lib/lotus/props/background.ex | code-shoily/lotus | d14958956103f2376d51974f40bcc7d7c59c2ad9 | [
"MIT"
] | null | null | null | lib/lotus/props/background.ex | code-shoily/lotus | d14958956103f2376d51974f40bcc7d7c59c2ad9 | [
"MIT"
] | 1 | 2021-11-23T13:10:27.000Z | 2021-11-23T13:10:27.000Z | defmodule Lotus.Props.Background do
@moduledoc """
<docs> https://getuikit.com/docs/background
"""
defmacro __using__(_) do
quote do
@doc """
Background_size
"""
prop background, :string, values: ~w/default muted primary secondary/
@doc """
Background_size
"""
prop background_size, :string, values: ~w/cover contain width-1-1 height-1-1/
@doc """
Background_position
"""
prop background_position, :string, values: ~w/
top-left
top-center
top-right
center-left
center-center
center-right
bottom-left
bottom-center
bottom-right
/
@doc """
Background_no_repeat
"""
prop background_no_repeat, :boolean
@doc """
Background_fixed
"""
prop background_fixed, :boolean
@doc """
Background_blend
"""
prop background_blend, :string, values: ~w/
multiply
screen
overlay
darken
lighten
color-dodge
color-burn
hard-light
soft-light
difference
exclusion
hue
saturation
color
luminosity
/
@doc """
Background_image (Small)
"""
prop background_image_small, :string
@doc """
Background_image (Medium)
"""
prop background_image_medium, :string
@doc """
Background_image (Large)
"""
prop background_image_large, :string
@doc """
Background_image (Extra large)
"""
prop background_image_xlarge, :string
defp background_class(assigns) do
Surface.css_class(
"uk-background-#{assigns.background}": assigns.background,
"uk-background-#{assigns.background_size}": assigns.background_size,
"uk-background-#{assigns.background_position}": assigns.background_position,
"uk-background-norepeat": assigns.background_no_repeat,
"uk-background-fixed": assigns.background_fixed,
"uk-background-blend-#{assigns.background_blend}": assigns.background_blend,
"uk-background-image@s": assigns.background_image_small,
"uk-background-image@m": assigns.background_image_medium,
"uk-background-image@l": assigns.background_image_large,
"uk-background-image@xl": assigns.background_image_xlarge
)
|> List.wrap()
end
end
end
end
| 24.693069 | 86 | 0.590617 |
1ca097dc0772c730497cc52ab3c75e46ac9de5a6 | 8,936 | ex | Elixir | lib/elixir/lib/kernel/parallel_compiler.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/parallel_compiler.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/parallel_compiler.ex | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | 1 | 2020-12-07T08:04:16.000Z | 2020-12-07T08:04:16.000Z | defmodule Kernel.ParallelCompiler do
@moduledoc """
A module responsible for compiling files in parallel.
"""
@doc """
Compiles the given files.
Those files are compiled in parallel and can automatically
detect dependencies between them. Once a dependency is found,
the current file stops being compiled until the dependency is
resolved.
If there is an error during compilation or if `warnings_as_errors`
is set to `true` and there is a warning, this function will fail
with an exception.
This function receives a set of callbacks as options:
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
The compiler doesn't care about the return values of the callbacks.
Returns the modules generated by each compiled file.
"""
def files(files, callbacks \\ [])
def files(files, callbacks) when is_list(callbacks) do
spawn_compilers(files, nil, callbacks)
end
@doc """
Compiles the given files to the given path.
Read `files/2` for more information.
"""
def files_to_path(files, path, callbacks \\ [])
def files_to_path(files, path, callbacks) when is_binary(path) and is_list(callbacks) do
spawn_compilers(files, path, callbacks)
end
defp spawn_compilers(files, path, callbacks) do
true = Code.ensure_loaded?(Kernel.ErrorHandler)
compiler_pid = self()
:elixir_code_server.cast({:reset_warnings, compiler_pid})
schedulers = max(:erlang.system_info(:schedulers_online), 2)
result = spawn_compilers(files, files, path, callbacks, [], [], schedulers, [])
# In case --warning-as-errors is enabled and there was a warning,
# compilation status will be set to error and we fail with CompileError
case :elixir_code_server.call({:compilation_status, compiler_pid}) do
:ok -> result
:error -> exit({:shutdown, 1})
end
end
# We already have 4 currently running, don't spawn new ones
defp spawn_compilers(entries, original, output, callbacks, waiting, queued, schedulers, result) when
length(queued) - length(waiting) >= schedulers do
wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result)
end
# Release waiting processes
defp spawn_compilers([h|t], original, output, callbacks, waiting, queued, schedulers, result) when is_pid(h) do
{_kind, ^h, ref, _module} = List.keyfind(waiting, h, 1)
send h, {ref, :ready}
waiting = List.keydelete(waiting, h, 1)
spawn_compilers(t, original, output, callbacks, waiting, queued, schedulers, result)
end
# Spawn a compiler for each file in the list until we reach the limit
defp spawn_compilers([h|t], original, output, callbacks, waiting, queued, schedulers, result) do
parent = self()
{pid, ref} =
:erlang.spawn_monitor fn ->
# Notify Code.ensure_compiled/2 that we should
# attempt to compile the module by doing a dispatch.
:erlang.put(:elixir_ensure_compiled, true)
# Set the elixir_compiler_pid used by our custom Kernel.ErrorHandler.
:erlang.put(:elixir_compiler_pid, parent)
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
exit(try do
_ = if output do
:elixir_compiler.file_to_path(h, output)
else
:elixir_compiler.file(h)
end
{:compiled, h}
catch
kind, reason ->
{:failure, kind, reason, System.stacktrace}
end)
end
spawn_compilers(t, original, output, callbacks, waiting,
[{pid, ref, h}|queued], schedulers, result)
end
# No more files, nothing waiting, queue is empty, we are done
defp spawn_compilers([], _original, _output, _callbacks, [], [], _schedulers, result) do
for {:module, mod} <- result, do: mod
end
# Queued x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures
defp spawn_compilers([], original, output, callbacks, waiting, queued, schedulers, result) when length(waiting) == length(queued) do
Enum.each queued, fn {child, _, _} ->
{_kind, ^child, ref, _module} = List.keyfind(waiting, child, 1)
send child, {ref, :release}
end
wait_for_messages([], original, output, callbacks, waiting, queued, schedulers, result)
end
# No more files, but queue and waiting are not full or do not match
defp spawn_compilers([], original, output, callbacks, waiting, queued, schedulers, result) do
wait_for_messages([], original, output, callbacks, waiting, queued, schedulers, result)
end
# Wait for messages from child processes
defp wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result) do
receive do
{:struct_available, module} ->
available = for {:struct, pid, _, waiting_module} <- waiting,
module == waiting_module,
not pid in entries,
do: pid
spawn_compilers(available ++ entries, original, output, callbacks,
waiting, queued, schedulers, [{:struct, module}|result])
{:module_available, child, ref, file, module, binary} ->
if callback = Keyword.get(callbacks, :each_module) do
callback.(file, module, binary)
end
# Release the module loader which is waiting for an ack
send child, {ref, :ack}
available = for {_kind, pid, _, waiting_module} <- waiting,
module == waiting_module,
not pid in entries,
do: pid
spawn_compilers(available ++ entries, original, output, callbacks,
waiting, queued, schedulers, [{:module, module}|result])
{:waiting, kind, child, ref, on} ->
defined = fn {k, m} -> on == m and k in [kind, :module] end
# Oops, we already got it, do not put it on waiting.
if :lists.any(defined, result) do
send child, {ref, :ready}
else
waiting = [{kind, child, ref, on}|waiting]
end
spawn_compilers(entries, original, output, callbacks, waiting, queued, schedulers, result)
{:DOWN, _down_ref, :process, down_pid, {:compiled, file}} ->
if callback = Keyword.get(callbacks, :each_file) do
callback.(file)
end
# Sometimes we may have spurious entries in the waiting
# list because someone invoked try/rescue UndefinedFunctionError
new_entries = List.delete(entries, down_pid)
new_queued = List.keydelete(queued, down_pid, 0)
new_waiting = List.keydelete(waiting, down_pid, 1)
spawn_compilers(new_entries, original, output, callbacks, new_waiting, new_queued, schedulers, result)
{:DOWN, down_ref, :process, _down_pid, reason} ->
handle_failure(down_ref, reason, entries, waiting, queued)
wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result)
end
end
defp handle_failure(ref, reason, entries, waiting, queued) do
if file = find_failure(ref, queued) do
print_failure(file, reason)
if all_missing?(entries, waiting, queued) do
collect_failures(queued, length(queued) - 1)
end
exit({:shutdown, 1})
end
end
defp find_failure(ref, queued) do
case List.keyfind(queued, ref, 1) do
{_child, ^ref, file} -> file
_ -> nil
end
end
defp print_failure(_file, {:compiled, _}) do
:ok
end
defp print_failure(file, {:failure, kind, reason, stacktrace}) do
IO.puts "\n== Compilation error on file #{Path.relative_to_cwd(file)} =="
IO.puts Exception.format(kind, reason, prune_stacktrace(stacktrace))
end
defp print_failure(file, reason) do
IO.puts "\n== Compilation error on file #{Path.relative_to_cwd(file)} =="
IO.puts Exception.format(:exit, reason, [])
end
@elixir_internals [:elixir_compiler, :elixir_module, :elixir_translator, :elixir_expand]
defp prune_stacktrace([{mod, _, _, _}|t]) when mod in @elixir_internals do
prune_stacktrace(t)
end
defp prune_stacktrace([h|t]) do
[h|prune_stacktrace(t)]
end
defp prune_stacktrace([]) do
[]
end
defp all_missing?(entries, waiting, queued) do
entries == [] and waiting != [] and
length(waiting) == length(queued)
end
defp collect_failures(_queued, 0), do: :ok
defp collect_failures(queued, remaining) do
receive do
{:DOWN, down_ref, :process, _down_pid, reason} ->
if file = find_failure(down_ref, queued) do
print_failure(file, reason)
collect_failures(queued, remaining - 1)
else
collect_failures(queued, remaining)
end
after
# Give up if no failure appears in 5 seconds
5000 -> :ok
end
end
end
| 35.744 | 134 | 0.659691 |
1ca0b5f310fd328f2cf0e724e1342cfc6da5e448 | 624 | ex | Elixir | lib/movement/operation.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/movement/operation.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/movement/operation.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Movement.Operation do
defstruct action: nil,
key: nil,
text: nil,
file_comment: nil,
file_index: 0,
value_type: nil,
plural: false,
locked: false,
batch: false,
translation_id: nil,
rollbacked_operation_id: nil,
batch_operation_id: nil,
revision_id: nil,
version_id: nil,
document_id: nil,
project_id: nil,
previous_translation: nil,
placeholders: [],
options: []
@type t :: %__MODULE__{}
end
| 26 | 41 | 0.49359 |
1ca0bba3e64185034e7368dc4f2371802d5847ba | 1,552 | exs | Elixir | mix.exs | elpassion/sprint-poker | 5c9b34bb264c7a30ff48f0aeac40821b67310ff8 | [
"MIT"
] | 199 | 2015-10-22T16:20:09.000Z | 2021-11-08T11:20:45.000Z | mix.exs | elpassion/sprint-poker | 5c9b34bb264c7a30ff48f0aeac40821b67310ff8 | [
"MIT"
] | 4 | 2015-10-24T20:43:29.000Z | 2016-03-03T21:09:06.000Z | mix.exs | elpassion/sprint-poker | 5c9b34bb264c7a30ff48f0aeac40821b67310ff8 | [
"MIT"
] | 34 | 2015-10-23T06:38:43.000Z | 2019-08-13T23:49:24.000Z | defmodule SprintPoker.Mixfile do
use Mix.Project
def project do
[
app: :sprint_poker,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {SprintPoker.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:cowboy, "~> 1.0"},
{:airbrakex, "~> 0.1"},
{:credo, "~> 0.7", only: [:dev, :test]},
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 25.866667 | 79 | 0.585052 |
1ca1027714231f3e239b972b7cb2f77385860866 | 15 | exs | Elixir | test/test_helper.exs | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | 8 | 2017-07-24T20:54:25.000Z | 2021-04-06T19:19:32.000Z | test/test_helper.exs | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | 2 | 2021-03-08T16:21:37.000Z | 2021-05-06T17:53:24.000Z | test/test_helper.exs | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | 2 | 2019-12-12T17:04:32.000Z | 2020-07-22T15:42:32.000Z | ExUnit.start
| 3.75 | 12 | 0.733333 |
1ca15603ce7285c11a2b73ecacb9cb120ffd324b | 1,058 | ex | Elixir | lib/avrora/storage.ex | ni-lti-sdm/avrora | 0e9a1ded482de4e0eb0843c692e844593d0e5341 | [
"MIT"
] | 59 | 2019-07-11T15:29:26.000Z | 2022-03-23T19:35:55.000Z | lib/avrora/storage.ex | ni-lti-sdm/avrora | 0e9a1ded482de4e0eb0843c692e844593d0e5341 | [
"MIT"
] | 63 | 2019-08-09T17:52:26.000Z | 2022-03-16T22:08:04.000Z | lib/avrora/storage.ex | ni-lti-sdm/avrora | 0e9a1ded482de4e0eb0843c692e844593d0e5341 | [
"MIT"
] | 22 | 2019-07-29T10:50:47.000Z | 2021-09-04T13:37:08.000Z | defmodule Avrora.Storage do
@moduledoc """
Behavior for storing and getting schemas by name or integer ID.
"""
@typedoc "Schema indentifier."
@type schema_id :: String.t() | integer()
@callback get(key :: schema_id) ::
{:ok, result :: nil | Avrora.Schema.t()} | {:error, reason :: term()}
@callback put(key :: schema_id, value :: Avrora.Schema.t()) ::
{:ok, result :: Avrora.Schema.t()} | {:error, reason :: term()}
defmodule Transient do
@moduledoc """
Storage behavior which allows keys to be removed or expired.
"""
alias Avrora.Storage
@typedoc "Naive timestamp with second precision."
@type timestamp :: timeout()
@callback delete(key :: Storage.schema_id()) ::
{:ok, result :: boolean()} | {:error, reason :: term()}
@callback expire(key :: Storage.schema_id(), ttl :: timeout()) ::
{:ok, timestamp :: timestamp()} | {:error, reason :: term()}
@callback flush() :: {:ok, result :: boolean()} | {:error, reason :: term()}
end
end
| 31.117647 | 83 | 0.587902 |
1ca15da88d4fab13c05ad3f3aadbb2dc6a0363dd | 5,768 | exs | Elixir | test/stream_test.exs | nedap/mariaex | 102a23088386eb5038337084be34775e59945924 | [
"Apache-2.0"
] | 264 | 2015-03-05T06:55:21.000Z | 2021-08-02T22:12:35.000Z | test/stream_test.exs | nedap/mariaex | 102a23088386eb5038337084be34775e59945924 | [
"Apache-2.0"
] | 215 | 2015-03-04T23:39:52.000Z | 2022-01-17T05:14:10.000Z | test/stream_test.exs | nedap/mariaex | 102a23088386eb5038337084be34775e59945924 | [
"Apache-2.0"
] | 132 | 2015-03-04T22:50:48.000Z | 2021-12-09T23:28:59.000Z | defmodule StreamTest do
use ExUnit.Case, async: true
import Mariaex.TestHelper
setup_all do
{:ok, pid} = connect()
{:ok, _} = Mariaex.query(pid, "CREATE TABLE stream (id int, text text)", [])
{:ok, _} = Mariaex.query(pid, "INSERT INTO stream VALUES (1, 'foo'), (2, 'bar')", [])
:sys.terminate(pid, :normal)
:ok
end
setup do
{:ok, pid} = connect()
{:ok, [pid: pid]}
end
test "simple text stream", context do
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, "SELECT * FROM stream", [], [query_type: :text])
assert [%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]}] =
Enum.to_list(stream)
:done
end) == {:ok, :done}
end
test "simple unnamed unprepared stream", context do
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, "SELECT * FROM stream", [], [])
assert [%Mariaex.Result{num_rows: 0, rows: []},
%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]}] =
Enum.to_list(stream)
:done
end) == {:ok, :done}
assert [[1, "foo"], [2, "bar"]] = query("SELECT * FROM stream", [])
end
test "simple unnamed prepared stream", context do
query = prepare("", "SELECT * FROM stream")
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, query, [], [])
assert [%Mariaex.Result{num_rows: 0, rows: []},
%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]}] =
Enum.to_list(stream)
:done
end) == {:ok, :done}
assert [[1, "foo"], [2, "bar"]] = execute(query, [])
end
test "simple named prepared stream", context do
query = prepare("stream", "SELECT * FROM stream")
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = DBConnection.stream(conn, query, [], [])
assert [%Mariaex.Result{num_rows: 0, rows: []},
%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]}] =
Enum.to_list(stream)
:done
end) == {:ok, :done}
assert [[1, "foo"], [2, "bar"]] = execute(query, [])
end
test "interleaving unnamed prepared stream", context do
query = prepare("", "SELECT * FROM stream")
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, query, [], [])
assert [{%Mariaex.Result{num_rows: 0, rows: []},
%Mariaex.Result{num_rows: 0, rows: []}},
{%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]},
%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]}}] =
Enum.zip(stream, stream)
:done
end) == {:ok, :done}
assert [[1, "foo"], [2, "bar"]] = execute(query, [])
end
test "interleaving named prepared stream", context do
query = prepare("stream", "SELECT * FROM stream")
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, query, [], [])
assert [{%Mariaex.Result{num_rows: 0, rows: []},
%Mariaex.Result{num_rows: 0, rows: []}},
{%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]},
%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]}}] =
Enum.zip(stream, stream)
:done
end) == {:ok, :done}
assert [[1, "foo"], [2, "bar"]] = execute(query, [])
end
test "split on max_rows stream", context do
query = prepare("stream", "SELECT * FROM stream")
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = DBConnection.stream(conn, query, [], [max_rows: 1])
assert [%Mariaex.Result{num_rows: 0, rows: []},
%Mariaex.Result{num_rows: 1, rows: [[1, "foo"]]},
%Mariaex.Result{num_rows: 1, rows: [[2, "bar"]]},
%Mariaex.Result{num_rows: 0, rows: []}] =
Enum.to_list(stream)
:done
end) == {:ok, :done}
assert [[1, "foo"], [2, "bar"]] = execute(query, [])
end
test "take first result with stream", context do
query = prepare("stream", "SELECT * FROM stream")
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = DBConnection.stream(conn, query, [], [])
assert [%Mariaex.Result{num_rows: 0, rows: []}] = Enum.take(stream, 1)
:done
end) == {:ok, :done}
assert [[1, "foo"], [2, "bar"]] = execute(query, [])
end
test "insert stream", context do
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, "UPDATE stream SET text='foo' WHERE id=?", [1], [])
assert [%Mariaex.Result{num_rows: 1, rows: nil}] = Enum.to_list(stream)
:done
end) == {:ok, :done}
end
test "select empty rows stream", context do
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, "SELECT * FROM stream WHERE id=?", [42], [])
assert [%Mariaex.Result{num_rows: 0, rows: []},
%Mariaex.Result{num_rows: 0, rows: []}] = Enum.to_list(stream)
:done
end) == {:ok, :done}
end
test "call procedure stream", context do
sql =
"""
CREATE PROCEDURE streamproc ()
BEGIN
SELECT * FROM stream;
END
"""
assert :ok = query(sql, [])
assert Mariaex.transaction(context[:pid], fn(conn) ->
stream = Mariaex.stream(conn, "CALL streamproc()", [], [])
assert [%Mariaex.Result{num_rows: 2, rows: [[1, "foo"], [2, "bar"]]}] = Enum.to_list(stream)
assert %Mariaex.Result{rows: [[42]]} = Mariaex.query!(conn, "SELECT 42", [])
:done
end) == {:ok, :done}
end
defp connect() do
opts = [database: "mariaex_test", username: "mariaex_user", password: "mariaex_pass", backoff_type: :stop]
Mariaex.Connection.start_link(opts)
end
end
| 37.454545 | 110 | 0.567441 |
1ca178b8a69794844f1e2ed5370179b923ec0283 | 943 | ex | Elixir | lib/exploring_elixir/e001/jsonfilter.ex | arcseldon/exploring-elixir | 0115aed80e5905384e7277dfe740d09e3a496b7b | [
"Apache-2.0"
] | 30 | 2017-07-03T23:53:37.000Z | 2021-03-14T21:27:14.000Z | lib/exploring_elixir/e001/jsonfilter.ex | arcseldon/exploring-elixir | 0115aed80e5905384e7277dfe740d09e3a496b7b | [
"Apache-2.0"
] | 1 | 2018-05-25T20:00:24.000Z | 2018-05-30T09:40:09.000Z | lib/exploring_elixir/e001/jsonfilter.ex | aseigo/exploring-elixir | 0115aed80e5905384e7277dfe740d09e3a496b7b | [
"Apache-2.0"
] | 4 | 2017-07-27T09:07:17.000Z | 2019-05-22T11:14:25.000Z | defmodule ExploringElixir.JSONFilter do
def extract(pid, json, key) when is_pid(pid) and is_binary(json) and is_binary(key) do
{_worker_pid, _monitor_ref} = spawn_monitor(__MODULE__, :extract_data, [self(), json, key])
wait_for_response pid
end
def wait_for_response(pid) do
receive do
{:DOWN, _monitor, _func, _pid, :normal} -> Process.send pid, "Processing successful!", []
{:DOWN, _monitor, _func, _pid, reason} -> Process.send pid, {:error, "Processing failed: #{inspect reason}"}, []
data ->
Process.send pid, data, []
wait_for_response pid
after
1_000 -> Process.send pid, {:error, "Timeout"}, []
end
end
def extract_data(pid, json, key) when is_pid(pid) and is_binary(json) and is_binary(key) do
{:ok, term} = Poison.decode json
Process.send pid, {:progress, 50}, []
Process.send pid, term[key], []
Process.send pid, {:progress, 100}, []
end
end
| 36.269231 | 118 | 0.654295 |
1ca17b571a17bf83ea9b265d0082fd96dd8c09f4 | 1,521 | exs | Elixir | elixir/word-count/word_count_test.exs | ArtemGordinsky/exercism | 74e91e3b48ca920803474ec435c13eac66351b9f | [
"Unlicense"
] | null | null | null | elixir/word-count/word_count_test.exs | ArtemGordinsky/exercism | 74e91e3b48ca920803474ec435c13eac66351b9f | [
"Unlicense"
] | null | null | null | elixir/word-count/word_count_test.exs | ArtemGordinsky/exercism | 74e91e3b48ca920803474ec435c13eac66351b9f | [
"Unlicense"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("word_count.exs", __DIR__)
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
defmodule WordsTest do
use ExUnit.Case
test "count one word" do
assert Words.count("word") == %{ "word" => 1 }
end
test "count one of each" do
expected = %{ "one" => 1 , "of" => 1 , "each" => 1 }
assert Words.count("one of each") == expected
end
test "count multiple occurrences" do
expected = %{ "one" => 1 , "fish" => 4 , "two" => 1 , "red" => 1 , "blue" => 1 }
assert Words.count("one fish two fish red fish blue fish") == expected
end
test "ignore punctuation" do
expected = %{"car" => 1, "carpet" => 1, "as" => 1, "java" => 1, "javascript" => 1}
assert Words.count("car : carpet as java : javascript!!&@$%^&") == expected
end
test "include numbers" do
expected = %{"testing" => 2, "1" => 1, "2" => 1}
assert Words.count("testing, 1, 2 testing") == expected
end
test "hyphens" do
expected = %{"co-operative" => 1}
assert Words.count("co-operative") == expected
end
test "ignore underscores" do
expected = %{"two" => 1, "words" => 1}
assert Words.count("two_words") == expected
end
test "normalize case" do
expected = %{"go" => 3}
assert Words.count("go Go GO") == expected
end
test "German" do
expected = %{"götterfunken" => 1, "schöner" => 1, "freude" => 1}
assert Words.count("Freude schöner Götterfunken") == expected
end
end
| 24.142857 | 88 | 0.595003 |
1ca18df260021650f053e5c2bee4332fc86122cc | 16,075 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/api/routines.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/api/routines.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/api/routines.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Api.Routines do
@moduledoc """
API calls for all endpoints tagged `Routines`.
"""
alias GoogleApi.BigQuery.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes the routine specified by routineId from the dataset.
## Parameters
* `connection` (*type:* `GoogleApi.BigQuery.V2.Connection.t`) - Connection to server
* `project_id` (*type:* `String.t`) - Required. Project ID of the routine to delete
* `dataset_id` (*type:* `String.t`) - Required. Dataset ID of the routine to delete
* `routine_id` (*type:* `String.t`) - Required. Routine ID of the routine to delete
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec bigquery_routines_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def bigquery_routines_delete(
connection,
project_id,
dataset_id,
routine_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url(
"/bigquery/v2/projects/{+projectId}/datasets/{+datasetId}/routines/{+routineId}",
%{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1),
"datasetId" => URI.encode(dataset_id, &URI.char_unreserved?/1),
"routineId" => URI.encode(routine_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Gets the specified routine resource by routine ID.
## Parameters
* `connection` (*type:* `GoogleApi.BigQuery.V2.Connection.t`) - Connection to server
* `project_id` (*type:* `String.t`) - Required. Project ID of the requested routine
* `dataset_id` (*type:* `String.t`) - Required. Dataset ID of the requested routine
* `routine_id` (*type:* `String.t`) - Required. Routine ID of the requested routine
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:readMask` (*type:* `String.t`) - If set, only the Routine fields in the field mask are returned in the
response. If unset, all Routine fields are returned.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQuery.V2.Model.Routine{}}` on success
* `{:error, info}` on failure
"""
@spec bigquery_routines_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQuery.V2.Model.Routine.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigquery_routines_get(
connection,
project_id,
dataset_id,
routine_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:readMask => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/bigquery/v2/projects/{+projectId}/datasets/{+datasetId}/routines/{+routineId}",
%{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1),
"datasetId" => URI.encode(dataset_id, &URI.char_unreserved?/1),
"routineId" => URI.encode(routine_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.Routine{}])
end
@doc """
Creates a new routine in the dataset.
## Parameters
* `connection` (*type:* `GoogleApi.BigQuery.V2.Connection.t`) - Connection to server
* `project_id` (*type:* `String.t`) - Required. Project ID of the new routine
* `dataset_id` (*type:* `String.t`) - Required. Dataset ID of the new routine
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.BigQuery.V2.Model.Routine.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQuery.V2.Model.Routine{}}` on success
* `{:error, info}` on failure
"""
@spec bigquery_routines_insert(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.Routine.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigquery_routines_insert(
connection,
project_id,
dataset_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/bigquery/v2/projects/{+projectId}/datasets/{+datasetId}/routines", %{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1),
"datasetId" => URI.encode(dataset_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.Routine{}])
end
@doc """
Lists all routines in the specified dataset. Requires the READER dataset
role.
## Parameters
* `connection` (*type:* `GoogleApi.BigQuery.V2.Connection.t`) - Connection to server
* `project_id` (*type:* `String.t`) - Required. Project ID of the routines to list
* `dataset_id` (*type:* `String.t`) - Required. Dataset ID of the routines to list
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - If set, then only the Routines matching this filter are returned.
The current supported form is either "routine_type:<RoutineType>" or
"routineType:<RoutineType>", where <RoutineType> is a RoutineType enum.
Example: "routineType:SCALAR_FUNCTION".
* `:maxResults` (*type:* `integer()`) - The maximum number of results to return in a single response page.
Leverage the page tokens to iterate through the entire collection.
* `:pageToken` (*type:* `String.t`) - Page token, returned by a previous call, to request the next page of
results
* `:readMask` (*type:* `String.t`) - If set, then only the Routine fields in the field mask, as well as
project_id, dataset_id and routine_id, are returned in the response.
If unset, then the following Routine fields are returned:
etag, project_id, dataset_id, routine_id, routine_type, creation_time,
last_modified_time, and language.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQuery.V2.Model.ListRoutinesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec bigquery_routines_list(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.ListRoutinesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigquery_routines_list(
connection,
project_id,
dataset_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:pageToken => :query,
:readMask => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/bigquery/v2/projects/{+projectId}/datasets/{+datasetId}/routines", %{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1),
"datasetId" => URI.encode(dataset_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.ListRoutinesResponse{}])
end
@doc """
Updates information in an existing routine. The update method replaces the
entire Routine resource.
## Parameters
* `connection` (*type:* `GoogleApi.BigQuery.V2.Connection.t`) - Connection to server
* `project_id` (*type:* `String.t`) - Required. Project ID of the routine to update
* `dataset_id` (*type:* `String.t`) - Required. Dataset ID of the routine to update
* `routine_id` (*type:* `String.t`) - Required. Routine ID of the routine to update
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.BigQuery.V2.Model.Routine.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQuery.V2.Model.Routine{}}` on success
* `{:error, info}` on failure
"""
@spec bigquery_routines_update(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQuery.V2.Model.Routine.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigquery_routines_update(
connection,
project_id,
dataset_id,
routine_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url(
"/bigquery/v2/projects/{+projectId}/datasets/{+datasetId}/routines/{+routineId}",
%{
"projectId" => URI.encode(project_id, &URI.char_unreserved?/1),
"datasetId" => URI.encode(dataset_id, &URI.char_unreserved?/1),
"routineId" => URI.encode(routine_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.Routine{}])
end
end
| 41.753247 | 187 | 0.617729 |
1ca19ace240fdca2146a0d469610aae68058c691 | 21,792 | ex | Elixir | lib/elixir/lib/io.ex | frerich/elixir | c799fcdcf7f9ed8b49f6de9146ede41642a491d3 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/io.ex | frerich/elixir | c799fcdcf7f9ed8b49f6de9146ede41642a491d3 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/io.ex | frerich/elixir | c799fcdcf7f9ed8b49f6de9146ede41642a491d3 | [
"Apache-2.0"
] | null | null | null | defmodule IO do
@moduledoc ~S"""
Functions handling input/output (IO).
Many functions in this module expect an IO device as an argument.
An IO device must be a PID or an atom representing a process.
For convenience, Elixir provides `:stdio` and `:stderr` as
shortcuts to Erlang's `:standard_io` and `:standard_error`.
The majority of the functions expect chardata. In case another type is given,
functions will convert those types to string via the `String.Chars` protocol
(as shown in typespecs). For more information on chardata, see the
"IO data" section below.
## IO devices
An IO device may be an atom or a PID. In case it is an atom,
the atom must be the name of a registered process. In addition,
Elixir provides two shortcuts:
* `:stdio` - a shortcut for `:standard_io`, which maps to
the current `Process.group_leader/0` in Erlang
* `:stderr` - a shortcut for the named process `:standard_error`
provided in Erlang
IO devices maintain their position, which means subsequent calls to any
reading or writing functions will start from the place where the device
was last accessed. The position of files can be changed using the
`:file.position/2` function.
## IO data
IO data is a data type that can be used as a more efficient alternative to binaries
in certain situations.
A term of type **IO data** is a binary or a list containing bytes (integers within the `0..255` range)
or nested IO data. The type is recursive. Let's see an example of one of
the possible IO data representing the binary `"hello"`:
[?h, "el", ["l", [?o]]]
The built-in `t:iodata/0` type is defined in terms of `t:iolist/0`. An IO list is
the same as IO data but it doesn't allow for a binary at the top level (but binaries
are still allowed in the list itself).
### Use cases for IO data
IO data exists because often you need to do many append operations
on smaller chunks of binaries in order to create a bigger binary. However, in
Erlang and Elixir concatenating binaries will copy the concatenated binaries
into a new binary.
def email(username, domain) do
username <> "@" <> domain
end
In this function, creating the email address will copy the `username` and `domain`
binaries. Now imagine you want to use the resulting email inside another binary:
def welcome_message(name, username, domain) do
"Welcome #{name}, your email is: #{email(username, domain)}"
end
IO.puts(welcome_message("Meg", "meg", "example.com"))
#=> "Welcome Meg, your email is: meg@example.com"
Every time you concatenate binaries or use interpolation (`#{}`) you are making
copies of those binaries. However, in many cases you don't need the complete
binary while you create it, but only at the end to print it out or send it
somewhere. In such cases, you can construct the binary by creating IO data:
def email(username, domain) do
[username, ?@, domain]
end
def welcome_message(name, username, domain) do
["Welcome ", name, ", your email is: ", email(username, domain)]
end
IO.puts(welcome_message("Meg", "meg", "example.com"))
#=> "Welcome Meg, your email is: meg@example.com"
Building IO data is cheaper than concatenating binaries. Concatenating multiple
pieces of IO data just means putting them together inside a list since IO data
can be arbitrarily nested, and that's a cheap and efficient operation. Most of
the IO-based APIs, such as `:gen_tcp` and `IO`, receive IO data and write it
to the socket directly without converting it to binary.
One drawback of IO data is that you can't do things like pattern match on the
first part of a piece of IO data like you can with a binary, because you usually
don't know the shape of the IO data. In those cases, you may need to convert it
to a binary by calling `iodata_to_binary/1`, which is reasonably efficient
since it's implemented natively in C. Other functionality, like computing the
length of IO data, can be computed directly on the iodata by calling `iodata_length/1`.
### Chardata
Erlang and Elixir also have the idea of `t:chardata/0`. Chardata is very
similar to IO data: the only difference is that integers in IO data represent
bytes while integers in chardata represent Unicode code points. Bytes
(`t:byte/0`) are integers within the `0..255` range, while Unicode code points
(`t:char/0`) are integers within the `0..0x10FFFF` range. The `IO` module provides
the `chardata_to_string/1` function for chardata as the "counter-part" of the
`iodata_to_binary/1` function for IO data.
If you try to use `iodata_to_binary/1` on chardata, it will result in an
argument error. For example, let's try to put a code point that is not
representable with one byte, like `?π`, inside IO data:
IO.iodata_to_binary(["The symbol for pi is: ", ?π])
#=> ** (ArgumentError) argument error
If we use chardata instead, it will work as expected:
iex> IO.chardata_to_string(["The symbol for pi is: ", ?π])
"The symbol for pi is: π"
"""
@type device :: atom | pid
@type nodata :: {:error, term} | :eof
@type chardata :: String.t() | maybe_improper_list(char | chardata, String.t() | [])
defguardp is_device(term) when is_atom(term) or is_pid(term)
defguardp is_iodata(data) when is_list(data) or is_binary(data)
@doc """
Reads from the IO `device`.
The `device` is iterated by the given number of characters, line by line if
`:line` is given, or until `:eof`.
It returns:
* `data` - the output characters
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
"""
@spec read(device, :eof | :line | non_neg_integer) :: chardata | nodata
def read(device \\ :stdio, line_or_chars)
# TODO: Deprecate me on v1.17
def read(device, :all) do
with :eof <- read(device, :eof) do
with [_ | _] = opts <- :io.getopts(device),
false <- Keyword.get(opts, :binary, true) do
''
else
_ -> ""
end
end
end
def read(device, :eof) do
getn(device, '', :eof)
end
def read(device, :line) do
:io.get_line(map_dev(device), '')
end
def read(device, count) when is_integer(count) and count >= 0 do
:io.get_chars(map_dev(device), '', count)
end
@doc """
Reads from the IO `device`. The operation is Unicode unsafe.
The `device` is iterated by the given number of bytes, line by line if
`:line` is given, or until `:eof`.
It returns:
* `data` - the output bytes
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
Note: do not use this function on IO devices in Unicode mode
as it will return the wrong result.
"""
@spec binread(device, :eof | :line | non_neg_integer) :: iodata | nodata
def binread(device \\ :stdio, line_or_chars)
# TODO: Deprecate me on v1.17
def binread(device, :all) do
with :eof <- binread(device, :eof), do: ""
end
def binread(device, :eof) do
binread_eof(map_dev(device), "")
end
def binread(device, :line) do
case :file.read_line(map_dev(device)) do
{:ok, data} -> data
other -> other
end
end
def binread(device, count) when is_integer(count) and count >= 0 do
case :file.read(map_dev(device), count) do
{:ok, data} -> data
other -> other
end
end
@read_all_size 4096
defp binread_eof(mapped_dev, acc) do
case :file.read(mapped_dev, @read_all_size) do
{:ok, data} -> binread_eof(mapped_dev, acc <> data)
:eof -> if acc == "", do: :eof, else: acc
other -> other
end
end
@doc """
Writes `chardata` to the given `device`.
By default, the `device` is the standard output.
## Examples
IO.write("sample")
#=> sample
IO.write(:stderr, "error")
#=> error
"""
@spec write(device, chardata | String.Chars.t()) :: :ok
def write(device \\ :stdio, chardata) do
:io.put_chars(map_dev(device), to_chardata(chardata))
end
@doc """
Writes `iodata` to the given `device`.
This operation is meant to be used with "raw" devices
that are started without an encoding. The given `iodata`
is written as is to the device, without conversion. For
more information on IO data, see the "IO data" section in
the module documentation.
Use `write/2` for devices with encoding.
Important: do **not** use this function on IO devices in
Unicode mode as it will write the wrong data. In particular,
the standard IO device is set to Unicode by default, so writing
to stdio with this function will likely result in the wrong data
being sent down the wire.
"""
@spec binwrite(device, iodata) :: :ok | {:error, term}
def binwrite(device \\ :stdio, iodata) when is_iodata(iodata) do
:file.write(map_dev(device), iodata)
end
@doc """
Writes `item` to the given `device`, similar to `write/2`,
but adds a newline at the end.
By default, the `device` is the standard output. It returns `:ok`
if it succeeds.
## Examples
IO.puts("Hello World!")
#=> Hello World!
IO.puts(:stderr, "error")
#=> error
"""
@spec puts(device, chardata | String.Chars.t()) :: :ok
def puts(device \\ :stdio, item) when is_device(device) do
:io.put_chars(map_dev(device), [to_chardata(item), ?\n])
end
@doc """
Writes a `message` to stderr, along with the given `stacktrace`.
This function also notifies the compiler a warning was printed
(in case --warnings-as-errors was enabled). It returns `:ok`
if it succeeds.
An empty list can be passed to avoid stacktrace printing.
## Examples
stacktrace = [{MyApp, :main, 1, [file: 'my_app.ex', line: 4]}]
IO.warn("variable bar is unused", stacktrace)
#=> warning: variable bar is unused
#=> my_app.ex:4: MyApp.main/1
"""
@spec warn(chardata | String.Chars.t(), Exception.stacktrace()) :: :ok
def warn(message, []) do
message = [to_chardata(message), ?\n]
:elixir_errors.io_warn(0, nil, message, message)
end
def warn(message, [{_, _, _, opts} | _] = stacktrace) do
message = to_chardata(message)
formatted_trace = Enum.map_join(stacktrace, "\n ", &Exception.format_stacktrace_entry(&1))
line = opts[:line]
file = opts[:file]
:elixir_errors.io_warn(
line || 0,
file && List.to_string(file),
message,
[message, ?\n, " ", formatted_trace, ?\n]
)
end
@doc false
def warn_once(key, message, stacktrace_drop_levels) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
stacktrace = Enum.drop(stacktrace, stacktrace_drop_levels)
if :elixir_config.warn(key, stacktrace) do
warn(message, stacktrace)
else
:ok
end
end
@doc """
Writes a `message` to stderr, along with the current stacktrace.
It returns `:ok` if it succeeds.
Do not call this function at the tail of another function. Due to tail
call optimization, a stacktrace entry would not be added and the
stacktrace would be incorrectly trimmed. Therefore make sure at least
one expression (or an atom such as `:ok`) follows the `IO.warn/1` call.
## Examples
IO.warn("variable bar is unused")
#=> warning: variable bar is unused
#=> (iex) evaluator.ex:108: IEx.Evaluator.eval/4
"""
@spec warn(chardata | String.Chars.t()) :: :ok
def warn(message) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
warn(message, Enum.drop(stacktrace, 2))
end
@doc """
Inspects and writes the given `item` to the device.
It's important to note that it returns the given `item` unchanged.
This makes it possible to "spy" on values by inserting an
`IO.inspect/2` call almost anywhere in your code, for example,
in the middle of a pipeline.
It enables pretty printing by default with width of
80 characters. The width can be changed by explicitly
passing the `:width` option.
The output can be decorated with a label, by providing the `:label`
option to easily distinguish it from other `IO.inspect/2` calls.
The label will be printed before the inspected `item`.
See `Inspect.Opts` for a full list of remaining formatting options.
## Examples
IO.inspect(<<0, 1, 2>>, width: 40)
Prints:
<<0, 1, 2>>
We can use the `:label` option to decorate the output:
IO.inspect(1..100, label: "a wonderful range")
Prints:
a wonderful range: 1..100
The `:label` option is especially useful with pipelines:
[1, 2, 3]
|> IO.inspect(label: "before")
|> Enum.map(&(&1 * 2))
|> IO.inspect(label: "after")
|> Enum.sum()
Prints:
before: [1, 2, 3]
after: [2, 4, 6]
"""
@spec inspect(item, keyword) :: item when item: var
def inspect(item, opts \\ []) do
inspect(:stdio, item, opts)
end
@doc """
Inspects `item` according to the given options using the IO `device`.
See `inspect/2` for a full list of options.
"""
@spec inspect(device, item, keyword) :: item when item: var
def inspect(device, item, opts) when is_device(device) and is_list(opts) do
label = if label = opts[:label], do: [to_chardata(label), ": "], else: []
opts = Inspect.Opts.new(opts)
doc = Inspect.Algebra.group(Inspect.Algebra.to_doc(item, opts))
chardata = Inspect.Algebra.format(doc, opts.width)
puts(device, [label, chardata])
item
end
@doc """
Gets a number of bytes from IO device `:stdio`.
If `:stdio` is a Unicode device, `count` implies
the number of Unicode code points to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
See `IO.getn/3` for a description of return values.
"""
@spec getn(
device | chardata | String.Chars.t(),
pos_integer | :eof | chardata | String.Chars.t()
) ::
chardata | nodata
def getn(prompt, count \\ 1)
def getn(prompt, :eof) do
getn(:stdio, prompt, :eof)
end
def getn(prompt, count) when is_integer(count) and count > 0 do
getn(:stdio, prompt, count)
end
def getn(device, prompt) when not is_integer(prompt) do
getn(device, prompt, 1)
end
@doc """
Gets a number of bytes from the IO `device`.
If the IO `device` is a Unicode device, `count` implies
the number of Unicode code points to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
It returns:
* `data` - the input characters
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
"""
@spec getn(device, chardata | String.Chars.t(), pos_integer | :eof) :: chardata | nodata
def getn(device, prompt, :eof) do
getn_eof(map_dev(device), to_chardata(prompt), [])
end
def getn(device, prompt, count) when is_integer(count) and count > 0 do
:io.get_chars(map_dev(device), to_chardata(prompt), count)
end
defp getn_eof(device, prompt, acc) do
case :io.get_line(device, prompt) do
line when is_binary(line) or is_list(line) -> getn_eof(device, '', [line | acc])
:eof -> wrap_eof(:lists.reverse(acc))
other -> other
end
end
defp wrap_eof([h | _] = acc) when is_binary(h), do: IO.iodata_to_binary(acc)
defp wrap_eof([h | _] = acc) when is_list(h), do: :lists.flatten(acc)
defp wrap_eof([]), do: :eof
@doc ~S"""
Reads a line from the IO `device`.
It returns:
* `data` - the characters in the line terminated
by a line-feed (LF) or end of file (EOF)
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
## Examples
To display "What is your name?" as a prompt and await user input:
IO.gets("What is your name?\n")
"""
@spec gets(device, chardata | String.Chars.t()) :: chardata | nodata
def gets(device \\ :stdio, prompt) do
:io.get_line(map_dev(device), to_chardata(prompt))
end
@doc """
Returns a line-based `IO.Stream` on `:stdio`.
This is equivalent to:
IO.stream(:stdio, :line)
"""
@doc since: "1.12.0"
def stream, do: stream(:stdio, :line)
@doc """
Converts the IO `device` into an `IO.Stream`.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The `device` is iterated by the given number of characters or line by line if
`:line` is given.
This reads from the IO as UTF-8. Check out
`IO.binstream/2` to handle the IO as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
`stream/1` has been introduced in Elixir v1.12.0,
while `stream/2` has been available since v1.0.0.
## Examples
Here is an example on how we mimic an echo server
from the command line:
Enum.each(IO.stream(:stdio, :line), &IO.write(&1))
"""
@spec stream(device, :line | pos_integer) :: Enumerable.t()
def stream(device \\ :stdio, line_or_codepoints)
when line_or_codepoints == :line
when is_integer(line_or_codepoints) and line_or_codepoints > 0 do
IO.Stream.__build__(map_dev(device), false, line_or_codepoints)
end
@doc """
Returns a raw, line-based `IO.Stream` on `:stdio`. The operation is Unicode unsafe.
This is equivalent to:
IO.binstream(:stdio, :line)
"""
@doc since: "1.12.0"
def binstream, do: binstream(:stdio, :line)
@doc """
Converts the IO `device` into an `IO.Stream`. The operation is Unicode unsafe.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The `device` is iterated by the given number of bytes or line by line if
`:line` is given. This reads from the IO device as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
Finally, do not use this function on IO devices in Unicode
mode as it will return the wrong result.
`binstream/1` has been introduced in Elixir v1.12.0,
while `binstream/2` has been available since v1.0.0.
"""
@spec binstream(device, :line | pos_integer) :: Enumerable.t()
def binstream(device \\ :stdio, line_or_bytes)
when line_or_bytes == :line
when is_integer(line_or_bytes) and line_or_bytes > 0 do
IO.Stream.__build__(map_dev(device), true, line_or_bytes)
end
@doc """
Converts chardata into a string.
For more information about chardata, see the ["Chardata"](#module-chardata)
section in the module documentation.
In case the conversion fails, it raises an `UnicodeConversionError`.
If a string is given, it returns the string itself.
## Examples
iex> IO.chardata_to_string([0x00E6, 0x00DF])
"æß"
iex> IO.chardata_to_string([0x0061, "bc"])
"abc"
iex> IO.chardata_to_string("string")
"string"
"""
@spec chardata_to_string(chardata) :: String.t()
def chardata_to_string(chardata)
def chardata_to_string(string) when is_binary(string) do
string
end
def chardata_to_string(list) when is_list(list) do
List.to_string(list)
end
@doc """
Converts IO data into a binary
The operation is Unicode unsafe.
Note that this function treats integers in the given IO data as
raw bytes and does not perform any kind of encoding conversion.
If you want to convert from a charlist to a UTF-8-encoded string,
use `chardata_to_string/1` instead. For more information about
IO data and chardata, see the ["IO data"](#module-io-data) section in the
module documentation.
If this function receives a binary, the same binary is returned.
Inlined by the compiler.
## Examples
iex> bin1 = <<1, 2, 3>>
iex> bin2 = <<4, 5>>
iex> bin3 = <<6>>
iex> IO.iodata_to_binary([bin1, 1, [2, 3, bin2], 4 | bin3])
<<1, 2, 3, 1, 2, 3, 4, 5, 4, 6>>
iex> bin = <<1, 2, 3>>
iex> IO.iodata_to_binary(bin)
<<1, 2, 3>>
"""
@spec iodata_to_binary(iodata) :: binary
def iodata_to_binary(iodata) do
:erlang.iolist_to_binary(iodata)
end
@doc """
Returns the size of an IO data.
For more information about IO data, see the ["IO data"](#module-io-data)
section in the module documentation.
Inlined by the compiler.
## Examples
iex> IO.iodata_length([1, 2 | <<3, 4>>])
4
"""
@spec iodata_length(iodata) :: non_neg_integer
def iodata_length(iodata) do
:erlang.iolist_size(iodata)
end
@doc false
def each_stream(device, line_or_codepoints) do
case read(device, line_or_codepoints) do
:eof ->
{:halt, device}
{:error, reason} ->
raise IO.StreamError, reason: reason
data ->
{[data], device}
end
end
@doc false
def each_binstream(device, line_or_chars) do
case binread(device, line_or_chars) do
:eof ->
{:halt, device}
{:error, reason} ->
raise IO.StreamError, reason: reason
data ->
{[data], device}
end
end
@compile {:inline, map_dev: 1, to_chardata: 1}
# Map the Elixir names for standard IO and error to Erlang names
defp map_dev(:stdio), do: :standard_io
defp map_dev(:stderr), do: :standard_error
defp map_dev(other) when is_atom(other) or is_pid(other) or is_tuple(other), do: other
defp to_chardata(list) when is_list(list), do: list
defp to_chardata(other), do: to_string(other)
end
| 30.016529 | 104 | 0.667126 |
1ca1a1857a8cfdbd5ebf81e1607eaa4748ec4ee3 | 1,059 | ex | Elixir | lib/credo/check/consistency/line_endings.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/check/consistency/line_endings.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/check/consistency/line_endings.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | defmodule Credo.Check.Consistency.LineEndings do
@moduledoc """
Windows and *nix systems use different line-endings in files.
While this is not necessarily a concern for the correctness of your code,
you should use a consistent style throughout your codebase.
"""
@explanation [check: @moduledoc]
@collector Credo.Check.Consistency.LineEndings.Collector
use Credo.Check, run_on_all: true, base_priority: :high
@doc false
def run(source_files, exec, params \\ []) when is_list(source_files) do
@collector.find_and_append_issues(source_files, exec, params, &issues_for/3)
end
defp issues_for(expected, source_file, params) do
source_file
|> IssueMeta.for(params)
|> format_issue(message: message_for(expected))
|> List.wrap()
end
defp message_for(:unix = _expected) do
"File is using windows line endings while most of the files use unix line endings."
end
defp message_for(:windows = _expected) do
"File is using unix line endings while most of the files use windows line endings."
end
end
| 30.257143 | 87 | 0.738432 |
1ca1b1b99d98522ca3ccc5a779fdfab13b97af20 | 7,875 | ex | Elixir | lib/logger/lib/logger/formatter.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/logger/lib/logger/formatter.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/logger/lib/logger/formatter.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | import Kernel, except: [inspect: 2]
defmodule Logger.Formatter do
@moduledoc ~S"""
Conveniences for formatting data for logs.
This module allows developers to specify a string that
serves as template for log messages, for example:
$time $metadata[$level] $message\n
Will print error messages as:
18:43:12.439 user_id=13 [error] Hello\n
The valid parameters you can use are:
* `$time` - the time the log message was sent
* `$date` - the date the log message was sent
* `$message` - the log message
* `$level` - the log level
* `$node` - the node that prints the message
* `$metadata` - user controlled data presented in `"key=val key2=val2 "` format
* `$levelpad` - sets to a single space if level is 4 characters long,
otherwise set to the empty space. Used to align the message after level.
Backends typically allow developers to supply such control
strings via configuration files. This module provides `compile/1`,
which compiles the string into a format for fast operations at
runtime and `format/5` to format the compiled pattern into an
actual IO data.
## Metadata
Metadata to be sent to the logger can be read and written with
the `Logger.metadata/0` and `Logger.metadata/1` functions. For example,
you can set `Logger.metadata([user_id: 13])` to add user_id metadata
to the current process. The user can configure the backend to choose
which metadata it wants to print and it will replace the `$metadata`
value.
"""
@type time :: {{1970..10000, 1..12, 1..31}, {0..23, 0..59, 0..59, 0..999}}
@type pattern :: :date | :level | :levelpad | :message | :metadata | :node | :time
@valid_patterns [:time, :date, :message, :level, :node, :metadata, :levelpad]
@default_pattern "\n$time $metadata[$level] $levelpad$message\n"
@replacement "�"
@doc """
Prunes invalid Unicode code points from lists and invalid UTF-8 bytes.
Typically called after formatting when the data cannot be printed.
"""
@spec prune(IO.chardata()) :: IO.chardata()
def prune(binary) when is_binary(binary), do: prune_binary(binary, "")
def prune([h | t]) when h in 0..1_114_111, do: [h | prune(t)]
def prune([h | t]), do: [prune(h) | prune(t)]
def prune([]), do: []
def prune(_), do: @replacement
defp prune_binary(<<h::utf8, t::binary>>, acc), do: prune_binary(t, <<acc::binary, h::utf8>>)
defp prune_binary(<<_, t::binary>>, acc), do: prune_binary(t, <<acc::binary, @replacement>>)
defp prune_binary(<<>>, acc), do: acc
@doc """
Compiles a format string into a data structure that `format/5` can handle.
Check the module doc for documentation on the valid parameters that
will be interpolated in the pattern. If you pass `nil` as the pattern,
the pattern defaults to:
#{inspect(@default_pattern)}
If you want to customize formatting through a custom formatter, you can
pass a `{module, function}` tuple as the `pattern`.
iex> Logger.Formatter.compile("$time $metadata [$level] $message\\n")
[:time, " ", :metadata, " [", :level, "] ", :message, "\\n"]
iex> Logger.Formatter.compile({MyLoggerFormatter, :format})
{MyLoggerFormatter, :format}
"""
@spec compile(binary | nil) :: [pattern | binary]
@spec compile(pattern) :: pattern when pattern: {module, function :: atom}
def compile(pattern)
def compile(nil), do: compile(@default_pattern)
def compile({mod, fun}) when is_atom(mod) and is_atom(fun), do: {mod, fun}
def compile(str) when is_binary(str) do
regex = ~r/(?<head>)\$[a-z]+(?<tail>)/
for part <- Regex.split(regex, str, on: [:head, :tail], trim: true) do
case part do
"$" <> code -> compile_code(String.to_atom(code))
_ -> part
end
end
end
defp compile_code(key) when key in @valid_patterns, do: key
defp compile_code(key) when is_atom(key) do
raise ArgumentError, "$#{key} is an invalid format pattern"
end
@doc """
Formats time as chardata.
"""
@spec format_time({0..23, 0..59, 0..59, 0..999}) :: IO.chardata()
def format_time({hh, mi, ss, ms}) do
[pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)]
end
@doc """
Formats date as chardata.
"""
@spec format_date({1970..10000, 1..12, 1..31}) :: IO.chardata()
def format_date({yy, mm, dd}) do
[Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)]
end
defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)]
defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)]
defp pad3(int), do: Integer.to_string(int)
defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)]
defp pad2(int), do: Integer.to_string(int)
@doc """
Takes a compiled format and injects the level, timestamp, message, and
metadata keyword list and returns a properly formatted string.
## Examples
iex> pattern = Logger.Formatter.compile("[$level] $message")
iex> timestamp = {{1977, 01, 28}, {13, 29, 00, 000}}
iex> formatted = Logger.Formatter.format(pattern, :info, "hello", timestamp, [])
iex> IO.chardata_to_string(formatted)
"[info] hello"
"""
@spec format({atom, atom} | [pattern | binary], Logger.level(), Logger.message(), time, keyword) ::
IO.chardata()
def format({mod, fun}, level, msg, timestamp, metadata) do
apply(mod, fun, [level, msg, timestamp, metadata])
end
def format(config, level, msg, timestamp, metadata) do
for config_option <- config do
output(config_option, level, msg, timestamp, metadata)
end
end
defp output(:message, _, msg, _, _), do: msg
defp output(:date, _, _, {date, _time}, _), do: format_date(date)
defp output(:time, _, _, {_date, time}, _), do: format_time(time)
defp output(:level, level, _, _, _), do: Atom.to_string(level)
defp output(:node, _, _, _, _), do: Atom.to_string(node())
defp output(:metadata, _, _, _, []), do: ""
defp output(:metadata, _, _, _, meta), do: metadata(meta)
defp output(:levelpad, level, _, _, _), do: levelpad(level)
defp output(other, _, _, _, _), do: other
# TODO: Deprecate me on Elixir v1.13+ or later
defp levelpad(:info), do: " "
defp levelpad(:warn), do: " "
defp levelpad(_), do: ""
defp metadata([{key, value} | metadata]) do
if formatted = metadata(key, value) do
[to_string(key), ?=, formatted, ?\s | metadata(metadata)]
else
metadata(metadata)
end
end
defp metadata([]) do
[]
end
defp metadata(:time, _), do: nil
defp metadata(:gl, _), do: nil
defp metadata(:report_cb, _), do: nil
defp metadata(_, nil), do: nil
defp metadata(_, string) when is_binary(string), do: string
defp metadata(_, integer) when is_integer(integer), do: Integer.to_string(integer)
defp metadata(_, float) when is_float(float), do: Float.to_string(float)
defp metadata(_, pid) when is_pid(pid), do: :erlang.pid_to_list(pid)
defp metadata(_, atom) when is_atom(atom) do
case Atom.to_string(atom) do
"Elixir." <> rest -> rest
"nil" -> ""
binary -> binary
end
end
defp metadata(_, ref) when is_reference(ref) do
'#Ref' ++ rest = :erlang.ref_to_list(ref)
rest
end
defp metadata(:file, file) when is_list(file), do: file
defp metadata(:domain, [head | tail]) when is_atom(head) do
Enum.map_intersperse([head | tail], ?., &Atom.to_string/1)
end
defp metadata(:mfa, {mod, fun, arity})
when is_atom(mod) and is_atom(fun) and is_integer(arity) do
Exception.format_mfa(mod, fun, arity)
end
defp metadata(:initial_call, {mod, fun, arity})
when is_atom(mod) and is_atom(fun) and is_integer(arity) do
Exception.format_mfa(mod, fun, arity)
end
defp metadata(_, list) when is_list(list), do: nil
defp metadata(_, other) do
case String.Chars.impl_for(other) do
nil -> nil
impl -> impl.to_string(other)
end
end
end
| 34.090909 | 101 | 0.651683 |
1ca1b1d8a6f7795ed10fb54cc6722a9f738f5f37 | 5,509 | exs | Elixir | lib/elixir/test/elixir/set_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2015-02-23T00:01:48.000Z | 2015-02-23T00:01:48.000Z | lib/elixir/test/elixir/set_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/set_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
# A TestSet implementation used only for testing.
defmodule TestSet do
defstruct list: []
def new(list \\ []) when is_list(list) do
%TestSet{list: list}
end
def reduce(%TestSet{list: list}, acc, fun) do
Enumerable.reduce(list, acc, fun)
end
def member?(%TestSet{list: list}, v) do
v in list
end
def size(%TestSet{list: list}) do
length(list)
end
end
defmodule SetTest.Common do
defmacro __using__(_) do
quote location: :keep do
defp new_set(list \\ []) do
Enum.into list, set_impl.new
end
defp new_set(list, fun) do
Enum.into list, set_impl.new, fun
end
defp int_set() do
Enum.into [1, 2, 3], set_impl.new
end
test "delete/2" do
result = Set.delete(new_set([1, 2, 3]), 2)
assert Set.equal?(result, new_set([1, 3]))
end
test "delete/2 with match" do
refute Set.member?(Set.delete(int_set, 1), 1)
assert Set.member?(Set.delete(int_set, 1.0), 1)
end
test "difference/2" do
result = Set.difference(new_set([1, 2, 3]), new_set([3]))
assert Set.equal?(result, new_set([1, 2]))
end
test "difference/2 with match" do
refute Set.member?(Set.difference(int_set, new_set([1])), 1)
assert Set.member?(Set.difference(int_set, new_set([1.0])), 1)
end
test "difference/2 with other set" do
result = Set.difference(new_set([1, 2, 3]), TestSet.new([3]))
assert Set.equal?(result, new_set([1, 2]))
end
test "disjoint?/2" do
assert Set.disjoint?(new_set([1, 2, 3]), new_set([4, 5 ,6]))
refute Set.disjoint?(new_set([1, 2, 3]), new_set([3, 4 ,5]))
end
test "disjoint/2 with other set" do
assert Set.disjoint?(new_set([1, 2, 3]), TestSet.new([4, 5 ,6]))
refute Set.disjoint?(new_set([1, 2, 3]), TestSet.new([3, 4 ,5]))
end
test "equal?/2" do
assert Set.equal?(new_set([1, 2, 3]), new_set([3, 2, 1]))
refute Set.equal?(new_set([1, 2, 3]), new_set([3.0, 2.0, 1.0]))
end
test "equal?/2 with other set" do
assert Set.equal?(new_set([1, 2, 3]), TestSet.new([3, 2, 1]))
refute Set.equal?(new_set([1, 2, 3]), TestSet.new([3.0, 2.0, 1.0]))
end
test "intersection/2" do
result = Set.intersection(new_set([1, 2, 3]), new_set([2, 3, 4]))
assert Set.equal?(result, new_set([2, 3]))
end
test "intersection/2 with match" do
assert Set.member?(Set.intersection(int_set, new_set([1])), 1)
refute Set.member?(Set.intersection(int_set, new_set([1.0])), 1)
end
test "intersection/2 with other set" do
result = Set.intersection(new_set([1, 2, 3]), TestSet.new([2, 3, 4]))
assert Set.equal?(result, new_set([2, 3]))
end
test "member?/2" do
assert Set.member?(new_set([1, 2, 3]), 2)
refute Set.member?(new_set([1, 2, 3]), 4)
refute Set.member?(new_set([1, 2, 3]), 1.0)
end
test "put/2" do
result = Set.put(new_set([1, 2]), 3)
assert Set.equal?(result, new_set([1, 2, 3]))
end
test "put/2 with match" do
assert Set.size(Set.put(int_set, 1)) == 3
assert Set.size(Set.put(int_set, 1.0)) == 4
end
test "size/1" do
assert Set.size(new_set([1, 2, 3])) == 3
end
test "subset?/2" do
assert Set.subset?(new_set([1, 2]), new_set([1, 2, 3]))
refute Set.subset?(new_set([1, 2, 3]), new_set([1, 2]))
end
test "subset/2 with match?" do
assert Set.subset?(new_set([1]), int_set)
refute Set.subset?(new_set([1.0]), int_set)
end
test "subset?/2 with other set" do
assert Set.subset?(new_set([1, 2]), TestSet.new([1, 2, 3]))
refute Set.subset?(new_set([1, 2, 3]), TestSet.new([1, 2]))
end
test "to_list/1" do
assert Set.to_list(new_set([1, 2, 3])) |> Enum.sort == [1, 2, 3]
end
test "union/2" do
result = Set.union(new_set([1, 2, 3]), new_set([2, 3, 4]))
assert Set.equal?(result, new_set([1, 2, 3, 4]))
end
test "union/2 with match" do
assert Set.size(Set.union(int_set, new_set([1]))) == 3
assert Set.size(Set.union(int_set, new_set([1.0]))) == 4
end
test "union/2 with other set" do
result = Set.union(new_set([1, 2, 3]), TestSet.new([2, 3, 4]))
assert Set.equal?(result, new_set([1, 2, 3, 4]))
end
test "is enumerable" do
assert Enum.member?(int_set, 1)
refute Enum.member?(int_set, 1.0)
assert Enum.sort(int_set) == [1,2,3]
end
test "is collectable" do
assert Set.equal?(new_set([1, 1, 2, 3, 3, 3]), new_set([1, 2, 3]))
assert Set.equal?(new_set([1, 1, 2, 3, 3, 3], &(&1 * 2)), new_set([2, 4, 6]))
end
test "is zippable" do
set = new_set(1..8)
list = Dict.to_list(set)
assert Enum.zip(list, list) == Enum.zip(set, set)
set = new_set(1..100)
list = Dict.to_list(set)
assert Enum.zip(list, list) == Enum.zip(set, set)
end
test "unsupported set" do
assert_raise ArgumentError, "unsupported set: :bad_set", fn ->
Set.to_list :bad_set
end
end
end
end
end
defmodule Set.HashSetTest do
use ExUnit.Case, async: true
use SetTest.Common
doctest Set
def set_impl, do: HashSet
end
| 29.148148 | 85 | 0.559267 |
1ca1bb6ebcf8855d1c6379fbab4c7965055e551f | 263 | exs | Elixir | priv/repo/migrations/20180309212149_add_members_to_projects.exs | sobolevn/docdog-engine | 5f7ad60465063fac3d56574f2bf27a4a52d0a91e | [
"MIT"
] | null | null | null | priv/repo/migrations/20180309212149_add_members_to_projects.exs | sobolevn/docdog-engine | 5f7ad60465063fac3d56574f2bf27a4a52d0a91e | [
"MIT"
] | null | null | null | priv/repo/migrations/20180309212149_add_members_to_projects.exs | sobolevn/docdog-engine | 5f7ad60465063fac3d56574f2bf27a4a52d0a91e | [
"MIT"
] | null | null | null | defmodule Docdog.Repo.Migrations.AddMembersToProjects do
use Ecto.Migration
def change do
alter table(:projects) do
add :members, {:array, :integer}, default: [], null: false
end
create index(:projects, [:members], using: "GIN")
end
end
| 21.916667 | 64 | 0.680608 |
1ca1d0bf9e924e716f7fc653e5556154ef8d4dfb | 1,281 | ex | Elixir | lib/middleware/simple_policy.ex | spunkedy/sucrose | 57a5f969d878f49e82faf666b7d5c5b21c67c771 | [
"MIT"
] | 2 | 2019-09-23T18:19:26.000Z | 2019-09-30T13:18:06.000Z | lib/middleware/simple_policy.ex | spunkedy/sucrose | 57a5f969d878f49e82faf666b7d5c5b21c67c771 | [
"MIT"
] | null | null | null | lib/middleware/simple_policy.ex | spunkedy/sucrose | 57a5f969d878f49e82faf666b7d5c5b21c67c771 | [
"MIT"
] | null | null | null | defmodule Sucrose.Middleware.SimplePolicy do
alias Absinthe.Resolution
alias Sucrose.Common
@behaviour Absinthe.Middleware
@moduledoc """
This is a simple policy handler that takes a very simple approach to absinthe resolution handling
The basis for all of the handlers is to have a common response type:
To use this policy you must return the common return handler.
`Sucrose.Common.handle_response/2`
"""
@error_message :no_proper_resolution_or_config
def call(resolution = %{context: %{claim: _}}, %{handler: handler}) do
check = simple_resolution(resolution)
response =
case check do
%{parent: :mutation} -> handler.can_mutate?(check)
_ -> handler.can_query?(check)
end
Common.handle_response(response, check)
rescue
_ ->
{:error, @error_message}
end
def call(_, _) do
{:error, @error_message}
end
@spec simple_resolution(map()) :: %{
child: :atom,
parent: :atom,
claim: any(),
resolution: map()
}
def simple_resolution(resolution) do
%{
child: resolution.definition.schema_node.identifier,
parent: resolution.parent_type.identifier,
claim: resolution.context.claim,
resolution: resolution
}
end
end
| 25.62 | 99 | 0.67057 |
1ca1f1681bcd86f940a3a34783dee99af7dc8c9b | 5,821 | exs | Elixir | priv/release.exs | cohawk/petal_components | 2741c4de66fa8dfa2896fe1e6e02f1d1653422cd | [
"MIT"
] | null | null | null | priv/release.exs | cohawk/petal_components | 2741c4de66fa8dfa2896fe1e6e02f1d1653422cd | [
"MIT"
] | null | null | null | priv/release.exs | cohawk/petal_components | 2741c4de66fa8dfa2896fe1e6e02f1d1653422cd | [
"MIT"
] | null | null | null | defmodule Releaser.VersionUtils do
@doc """
Some utilities to get and set version numbers in the `mix.exs` file and to programatically transform version numbers.
1. Create a file RELEASE.md and fill out the following templates.
```
RELEASE_TYPE: patch
- Fixed x
- Added y
```
Release types:
- major: 0.0.1 -> 1.0.0 (when you make incompatible API changes)
- minor: 0.0.1 -> 0.1.0 (when you add functionality in a backwards compatible manner)
- patch: 0.0.1 -> 0.0.2 (when you make backwards compatible bug fixes)
2. Run in the terminal:
mix run priv/release.exs
3. Deploy to Hex.pm
mix hex.publish
4. Push to git
git push origin main
"""
@version_line_regex ~r/(\n\s*@version\s+")([^\n]+)("\n)/
def bump_major(%Version{} = version) do
%{version | major: version.major + 1, minor: 0, patch: 0}
end
def bump_minor(%Version{} = version) do
%{version | minor: version.minor + 1, patch: 0}
end
def bump_patch(%Version{} = version) do
%{version | patch: version.patch + 1}
end
def version_to_string(%Version{} = version) do
"#{version.major}.#{version.minor}.#{version.patch}"
end
def get_version() do
config = File.read!("mix.exs")
case Regex.run(@version_line_regex, config) do
[_line, _pre, version, _post] ->
Version.parse!(version)
_ ->
raise "Invalid project version in your mix.exs file"
end
end
def set_version(old_version, version) do
version_string = version_to_string(version)
old_version_string = version_to_string(old_version)
contents = File.read!("mix.exs")
replaced =
Regex.replace(@version_line_regex, contents, fn _, pre, _version, post ->
"#{pre}#{version_string}#{post}"
end)
File.write!("mix.exs", replaced)
# Update the readme to point to the latest version
readme_contents = File.read!("README.md")
replaced_readme = String.replace(readme_contents, old_version_string, version_string)
File.write!("README.md", replaced_readme)
end
def update_version(%Version{} = version, "major"), do: bump_major(version)
def update_version(%Version{} = version, "minor"), do: bump_minor(version)
def update_version(%Version{} = version, "patch"), do: bump_patch(version)
def update_version(%Version{} = _version, type), do: raise("Invalid version type: #{type}")
end
defmodule Releaser.Changelog do
@doc """
Functions to append entries to the changelog.
"""
alias Releaser.VersionUtils
@release_filename "RELEASE.md"
@release_type_regex ~r/^(RELEASE_TYPE:\s+)(\w+)(.*)/s
@changelog_filename "CHANGELOG.md"
@changelog_entry_header_level 3
@changelog_entries_marker "# Changelog\n"
def remove_release_file() do
File.rm!(@release_filename)
end
def extract_release_type() do
contents = File.read!(@release_filename)
{type, text} =
case Regex.run(@release_type_regex, contents) do
[_line, _pre, type, text] ->
{type, String.trim(text)}
_ ->
raise "Invalid project version in your mix.exs file"
end
{type, text}
end
def changelog_entry(%Version{} = version, %DateTime{} = date_time, text) do
header_prefix = String.duplicate("#", @changelog_entry_header_level)
version_string = VersionUtils.version_to_string(version)
date_time_string =
date_time
|> DateTime.truncate(:second)
|> NaiveDateTime.to_string()
"""
#{header_prefix} #{version_string} - #{date_time_string}
#{text}
"""
end
def add_changelog_entry(entry) do
contents = File.read!(@changelog_filename)
[first, last] = String.split(contents, @changelog_entries_marker)
replaced =
Enum.join([
first,
@changelog_entries_marker,
entry,
last
])
File.write!(@changelog_filename, replaced)
end
end
defmodule Releaser.Git do
@doc """
This module contains some git-specific functionality
"""
alias Releaser.VersionUtils
def add_commit_and_tag(version) do
version_string = VersionUtils.version_to_string(version)
Mix.Shell.IO.cmd("git add .", [])
Mix.Shell.IO.cmd(~s'git commit -m "Bumped version number"')
Mix.Shell.IO.cmd(~s'git tag -a v#{version_string} -m "Version #{version_string}"')
end
end
defmodule Releaser.Tests do
def run_tests!() do
error_code = Mix.Shell.IO.cmd("mix test", [])
if error_code != 0 do
raise "This version can't be released because tests are failing."
end
:ok
end
end
defmodule Releaser.Publish do
def publish!() do
error_code = Mix.Shell.IO.cmd("mix hex.publish", [])
if error_code != 0 do
raise "Couldn't publish package on hex."
end
:ok
end
end
defmodule Releaser do
alias Releaser.VersionUtils
alias Releaser.Changelog
alias Releaser.Git
alias Releaser.Tests
alias Releaser.Publish
def run() do
# Run the tests before generating the release.
# If any test fails, stop.
Tests.run_tests!()
# Get the current version from the mix.exs file.
version = VersionUtils.get_version()
# Extract the changelog entry and add it to the changelog.
# Use the information in the RELEASE.md file to bump the version number.
{release_type, text} = Changelog.extract_release_type()
new_version = VersionUtils.update_version(version, release_type)
entry = Changelog.changelog_entry(new_version, DateTime.utc_now(), text)
Changelog.add_changelog_entry(entry)
# Set a new version on the mix.exs file
VersionUtils.set_version(version, new_version)
# Remove the release file
Changelog.remove_release_file()
# Commit the changes and ad a new 'v*.*.*' tag
Git.add_commit_and_tag(new_version)
end
end
# Generate and publish a new release
Releaser.run()
| 26.339367 | 119 | 0.676344 |
1ca210a1cc578add303629aef09f5822cb32e488 | 5,862 | ex | Elixir | lib/mix/tasks/prom_ex.dashboard.export.ex | LostKobrakai/prom_ex | 3557e828cf3905adaf6719003e68b65294b62288 | [
"MIT"
] | 354 | 2020-10-21T06:27:15.000Z | 2022-03-29T13:22:46.000Z | lib/mix/tasks/prom_ex.dashboard.export.ex | LostKobrakai/prom_ex | 3557e828cf3905adaf6719003e68b65294b62288 | [
"MIT"
] | 111 | 2020-11-25T21:27:13.000Z | 2022-03-28T10:42:59.000Z | lib/mix/tasks/prom_ex.dashboard.export.ex | blockfi/prom_ex | bca03035f52d023dfec365f82f01c648b367073b | [
"MIT"
] | 45 | 2020-12-31T20:37:11.000Z | 2022-03-18T13:12:21.000Z | defmodule Mix.Tasks.PromEx.Dashboard.Export do
@moduledoc """
This will render a PromEx dashboard either to STDOUT or to a file depending on
the CLI arguments that are provided.
The following CLI flags are supported:
```md
-d, --dashboard The name of the dashboard that you would like to export from PromEx.
For example, if you would like to export the Ecto dashboard, provide
the value `ecto.json`.
-m, --module The PromEx module which will be used to render the dashboards.
This is needed to fetch any relevant assigns from the
`c:PromEx.dashboard_assigns/0` callback
-s, --stdout A boolean flag denoting that the rendered dashboard should be output
to STDOUT.
-f, --file_path If you would like the write the generated JSON dashboard definition
to a file, you can provide a relative file path in the project's
`priv` directory.
-a, --assign Any additional assigns you would like to pass to the dashboard for
rendering. You are able to pass multiple assigns by passing multiple
--assign arguments. For example: `--assign some=thing --assign another=thing`.
```
"""
@shortdoc "Export a rendered dashboard to STDOUT or a file"
use Mix.Task
alias PromEx.DashboardRenderer
@impl true
def run(args) do
# Compile the project
Mix.Task.run("compile")
# Get CLI args and set up uploader
cli_args = parse_options(args)
prom_ex_module =
"Elixir.#{cli_args.module}"
|> String.to_atom()
|> Code.ensure_compiled()
|> case do
{:module, module} ->
module
{:error, reason} ->
raise "#{cli_args.module} is not a valid PromEx module because #{inspect(reason)}"
end
check_valid_dashboard(cli_args)
render_dashboard(prom_ex_module, cli_args)
end
defp parse_options(args) do
cli_options = [module: :string, stdout: :boolean, file_path: :string, dashboard: :string, assign: [:string, :keep]]
cli_aliases = [m: :module, s: :stdout, f: :file_path, d: :dashboard, a: :assign]
# Parse out the arguments and put defaults where necessary
args
|> OptionParser.parse(aliases: cli_aliases, strict: cli_options)
|> case do
{options, _remaining_args, [] = _errors} ->
options
|> Enum.reduce(%{}, fn
{:assign, assign_value}, acc when is_map_key(acc, :assigns) ->
[key, value] = String.split(assign_value, "=", parts: 2)
new_assign = {String.to_atom(key), value}
Map.put(acc, :assigns, [new_assign | acc.assigns])
{:assign, assign_value}, acc ->
[key, value] = String.split(assign_value, "=", parts: 2)
Map.put(acc, :assigns, [{String.to_atom(key), value}])
{opt, value}, acc ->
Map.put(acc, opt, value)
end)
|> Map.put_new(:assigns, [])
{_options, _remaining_args, errors} ->
raise "Invalid CLI args were provided: #{inspect(errors)}"
end
|> Map.put_new(:stdout, false)
|> Map.put_new(:dashboard, nil)
|> Map.put_new_lazy(:module, fn ->
Mix.Project.config()
|> Keyword.get(:app)
|> Atom.to_string()
|> Macro.camelize()
|> Kernel.<>(".PromEx")
end)
end
defp check_valid_dashboard(%{dashboard: nil}) do
raise "You must provide a --dashboard argument"
end
defp check_valid_dashboard(_args) do
:ok
end
defp render_dashboard(prom_ex_module, cli_args) do
user_provided_assigns = prom_ex_module.dashboard_assigns()
default_title =
prom_ex_module.__otp_app__()
|> Atom.to_string()
|> Macro.camelize()
default_dashboard_name =
cli_args.dashboard
|> Path.basename()
|> normalize_file_name()
|> Macro.camelize()
default_dashboard_assigns = [
otp_app: prom_ex_module.__otp_app__(),
title: "#{default_title} - PromEx #{default_dashboard_name} Dashboard"
]
dashboard_render =
:prom_ex
|> DashboardRenderer.build(cli_args.dashboard, prom_ex_module.__otp_app__())
|> DashboardRenderer.merge_assigns(default_dashboard_assigns)
|> DashboardRenderer.merge_assigns(user_provided_assigns)
|> DashboardRenderer.merge_assigns(cli_args.assigns)
|> DashboardRenderer.render_dashboard(prom_ex_module)
|> DashboardRenderer.decode_dashboard()
|> check_dashboard_render()
handle_export(cli_args, prom_ex_module, dashboard_render)
end
defp handle_export(%{stdout: true}, _prom_ex_module, dashboard_render) do
IO.puts(dashboard_render.rendered_file)
end
defp handle_export(%{file_path: file_path}, prom_ex_module, dashboard_render) do
priv_path =
prom_ex_module.__otp_app__()
|> :code.priv_dir()
|> :erlang.list_to_binary()
full_path = Path.join([priv_path, file_path])
File.write!(full_path, dashboard_render.rendered_file)
end
defp handle_export(_cli_args, _prom_ex_module, _dashboard_render) do
raise "You must specify either a file path to write the dashboard to, or provide the --stdout flag to print to STDOUT"
end
defp check_dashboard_render(%DashboardRenderer{valid_json?: false}) do
raise "The rendered dashboard yielded an invalid JSON data structure. Be sure to check your assigns."
end
defp check_dashboard_render(%DashboardRenderer{valid_file?: false}) do
raise "The dashboard that you selected does not exist in PromEx. Be sure that you typed it correctly."
end
defp check_dashboard_render(dashboard_render) do
dashboard_render
end
defp normalize_file_name(path) do
if Path.extname(path) == "" do
path
else
path
|> Path.rootname()
|> normalize_file_name()
end
end
end
| 32.748603 | 122 | 0.659161 |
1ca226af5fe29c52bfa8d82dc8735f53c437cc6a | 508 | ex | Elixir | lib/wishlist_web/live/assignment_live/show.ex | egutter/wishlist | af7b71c96ef9efded708c5ecfe3bab5a00c0761e | [
"MIT"
] | null | null | null | lib/wishlist_web/live/assignment_live/show.ex | egutter/wishlist | af7b71c96ef9efded708c5ecfe3bab5a00c0761e | [
"MIT"
] | null | null | null | lib/wishlist_web/live/assignment_live/show.ex | egutter/wishlist | af7b71c96ef9efded708c5ecfe3bab5a00c0761e | [
"MIT"
] | null | null | null | defmodule WishlistWeb.AssignmentLive.Show do
use WishlistWeb, :live_view
alias Wishlist.Wishlists
@impl true
def mount(_params, _session, socket) do
{:ok, socket}
end
@impl true
def handle_params(%{"id" => id}, _, socket) do
{:noreply,
socket
|> assign(:page_title, page_title(socket.assigns.live_action))
|> assign(:assignment, Wishlists.get_assignment!(id))}
end
defp page_title(:show), do: "Show Assignment"
defp page_title(:edit), do: "Edit Assignment"
end
| 23.090909 | 67 | 0.688976 |
1ca230f12aa96ec3f3f76ac6f530cdcfda2677a5 | 3,315 | ex | Elixir | lib/rabbit/message.ex | suitepad-gmbh/rabbit | c6ba3cbca6b49362c3a9dd32d3af001a1de750ad | [
"MIT"
] | 36 | 2019-06-05T02:27:11.000Z | 2022-02-27T22:01:27.000Z | lib/rabbit/message.ex | suitepad-gmbh/rabbit | c6ba3cbca6b49362c3a9dd32d3af001a1de750ad | [
"MIT"
] | 41 | 2019-06-06T07:50:20.000Z | 2021-08-02T10:18:08.000Z | lib/rabbit/message.ex | suitepad-gmbh/rabbit | c6ba3cbca6b49362c3a9dd32d3af001a1de750ad | [
"MIT"
] | 2 | 2021-01-13T16:13:05.000Z | 2021-04-09T13:22:19.000Z | defmodule Rabbit.Message do
@moduledoc """
A message consumed by a `Rabbit.Consumer`.
After starting a consumer, any message passed to the `c:Rabbit.Consumer.handle_message/1`
callback will be wrapped in a messsage struct. The struct has the following
fields:
* `:consumer` - The PID of the consumer process.
* `:module` - The module of the consumer process.
* `:channel` - The `AMQP.Channel` being used by the consumer.
* `:payload` - The raw payload of the message.
* `:decoded_payload` - If the message has a content type - this will be the
payload decoded using the applicable serializer.
* `:meta` - The metadata sent when publishing or set by the broker.
* `:custom_meta` - The custom metadata included when starting a consumer.
* `:error_reason` - The reason for any error that occurs during the message
handling callback.
* `:error_stack` - The stacktrace that might accompany the error.
"""
defstruct [
:consumer,
:module,
:channel,
:payload,
:decoded_payload,
:meta,
:custom_meta,
:error_reason,
:error_stack
]
@type t :: %__MODULE__{
consumer: pid(),
module: module(),
channel: AMQP.Channel.t(),
payload: binary(),
decoded_payload: any(),
meta: map(),
custom_meta: map(),
error_reason: any(),
error_stack: nil | list()
}
@doc """
Creates a new message struct.
"""
@spec new(Rabbit.Consumer.t(), module(), AMQP.Channel.t(), any(), map(), map()) ::
Rabbit.Message.t()
def new(consumer, module, channel, payload, meta, custom_meta) do
%__MODULE__{
consumer: consumer,
module: module,
channel: channel,
payload: payload,
meta: meta,
custom_meta: custom_meta
}
end
@doc """
Awknowledges a message.
## Options
* `:multiple` - If `true`, all messages up to the one specified by its
`delivery_tag` are considered acknowledged by the server.
"""
@spec ack(Rabbit.Message.t(), keyword()) :: :ok | {:error, :blocked | :closing}
def ack(message, opts \\ []) do
AMQP.Basic.ack(message.channel, message.meta.delivery_tag, opts)
end
@doc """
Negative awknowledges a message.
## Options
* `:multiple` - If `true`, all messages up to the one specified by it
`delivery_tag` are considered acknowledged by the server.
* `:requeue` - If `true`, the message will be returned to the queue and redelivered
to the next available consumer.
"""
@spec nack(Rabbit.Message.t(), keyword()) :: :ok | {:error, :blocked | :closing}
def nack(message, opts \\ []) do
AMQP.Basic.nack(message.channel, message.meta.delivery_tag, opts)
end
@doc """
Rejects a message.
## Options
* `:requeue` - If `true`, the message will be returned to the queue and redelivered
to the next available consumer.
"""
@spec reject(Rabbit.Message.t(), keyword()) :: :ok | {:error, :blocked | :closing}
def reject(message, opts \\ []) do
AMQP.Basic.reject(message.channel, message.meta.delivery_tag, opts)
end
@doc false
@spec put_error(Rabbit.Message.t(), any(), list()) :: Rabbit.Message.t()
def put_error(message, reason, stack) do
%{message | error_reason: reason, error_stack: stack}
end
end
| 30.136364 | 91 | 0.642232 |
1ca2576e55f22b0fb5f2c25a3dc5ab7adee9823e | 973 | ex | Elixir | lib/freshcom_web/plugs/pagination.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 9 | 2018-12-16T14:02:59.000Z | 2021-01-19T07:25:40.000Z | lib/freshcom_web/plugs/pagination.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | null | null | null | lib/freshcom_web/plugs/pagination.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 4 | 2018-12-16T17:50:01.000Z | 2021-01-19T07:25:51.000Z | defmodule FreshcomWeb.PaginationPlug do
import Plug.Conn
@defaults %{number: 1, size: 25}
def init(_), do: []
def call(%{query_params: query_params} = conn, _) do
assign(conn, :pagination, pagination(query_params["page"]))
end
defp pagination(%{"number" => number} = raw) do
number = to_int(number) || @defaults[:number]
size = to_int(raw["size"]) || @defaults[:size]
%{number: number, size: size}
end
defp pagination(%{"before_id" => before_id} = raw) do
size = to_int(raw["size"]) || @defaults[:size]
%{before_id: before_id, size: size}
end
defp pagination(%{"after_id" => after_id} = raw) do
size = to_int(raw["size"]) || @defaults[:size]
%{after_id: after_id, size: size}
end
defp pagination(_), do: @defaults
defp to_int(i) when is_integer(i), do: i
defp to_int(s) when is_binary(s) do
case Integer.parse(s) do
{i, _} -> i
:error -> nil
end
end
defp to_int(_), do: nil
end
| 23.166667 | 63 | 0.621788 |
1ca2b554e6ab7294f3e719b88b5b7b441263ef4b | 1,692 | exs | Elixir | apps/neoscan_sync/mix.exs | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 75 | 2017-07-23T02:45:32.000Z | 2021-12-13T11:04:17.000Z | apps/neoscan_sync/mix.exs | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 252 | 2017-07-13T19:36:00.000Z | 2021-07-28T18:40:00.000Z | apps/neoscan_sync/mix.exs | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 87 | 2017-07-23T02:45:34.000Z | 2022-03-02T14:54:27.000Z | defmodule NeoscanSync.Mixfile do
use Mix.Project
def project do
[
app: :neoscan_sync,
version: "2.0.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.4",
elixirc_options: [warnings_as_errors: true],
elixirc_paths: elixirc_paths(Mix.env()),
test_coverage: [
tool: ExCoveralls
],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[
extra_applications: [:logger, :httpoison],
mod: {NeoscanSync.Application, []}
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test"]
defp elixirc_paths(_), do: ["lib"]
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# To depend on another app inside the umbrella:
#
# {:my_app, in_umbrella: true}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:neoscan, in_umbrella: true},
{:neoscan_node, in_umbrella: true},
{:excoveralls, "~> 0.9", only: :test},
{:buffer, "~> 0.3.12"}
]
end
end
| 25.253731 | 79 | 0.584515 |
1ca2bfb677d70ea2027089049db9d1e87199ca8e | 752 | ex | Elixir | apps/instance/lib/instance/application.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 1 | 2020-10-23T19:25:27.000Z | 2020-10-23T19:25:27.000Z | apps/instance/lib/instance/application.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 5 | 2019-01-11T11:48:08.000Z | 2019-01-16T17:29:23.000Z | apps/instance/lib/instance/application.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 7 | 2019-10-09T05:49:52.000Z | 2022-03-23T16:48:45.000Z | defmodule Staxx.Instance.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
{Registry, keys: :unique, name: Staxx.Instance.InstanceRegistry},
{Registry, keys: :unique, name: Staxx.Instance.StackRegistry},
Staxx.Instance.Terminator,
Staxx.Instance.DynamicSupervisor,
Staxx.Instance.Stack.ConfigLoader
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Staxx.Instance.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 31.333333 | 71 | 0.722074 |
1ca2d10bc14ff23d2e73713a1bff75ad899ef59c | 1,809 | exs | Elixir | clients/tasks/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/tasks/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/tasks/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Tasks.Mixfile do
use Mix.Project
@version "0.13.0"
def project() do
[
app: :google_api_tasks,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/tasks"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Tasks API client library. The Google Tasks API lets you manage your tasks and task lists.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/tasks",
"Homepage" => "https://developers.google.com/tasks/"
}
]
end
end
| 27 | 96 | 0.64953 |
1ca2db5d62ac20cb0554b41784c7be5950881776 | 198 | exs | Elixir | priv/repo/migrations/20200124122726_add_joined_to_players_teams.exs | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | null | null | null | priv/repo/migrations/20200124122726_add_joined_to_players_teams.exs | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | 2 | 2021-11-04T21:05:24.000Z | 2021-11-04T21:51:48.000Z | priv/repo/migrations/20200124122726_add_joined_to_players_teams.exs | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | 1 | 2021-11-04T18:40:26.000Z | 2021-11-04T18:40:26.000Z | defmodule Kamleague.Repo.Migrations.AddJoinedToPlayersTeams do
use Ecto.Migration
def change do
alter table(:players_teams) do
add :joined, :boolean, default: false
end
end
end
| 19.8 | 62 | 0.737374 |
1ca3071f332c446fa334061cb7fb0f8b744c5028 | 12,970 | ex | Elixir | apps/artemis_web/lib/artemis_web/view_helpers/tables.ex | artemis-platform/artemis_dashboard | 5ab3f5ac4c5255478bbebf76f0e43b44992e3cab | [
"MIT"
] | 9 | 2019-08-19T19:56:34.000Z | 2022-03-22T17:56:38.000Z | apps/artemis_web/lib/artemis_web/view_helpers/tables.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis_web/lib/artemis_web/view_helpers/tables.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2019-07-05T22:51:47.000Z | 2019-08-19T19:56:37.000Z | defmodule ArtemisWeb.ViewHelper.Tables do
use Phoenix.HTML
import Phoenix.HTML.Tag
@default_delimiter ","
@doc """
Generates empty table row if no records match
"""
def render_table_row_if_empty(records, options \\ [])
def render_table_row_if_empty(%{entries: entries}, options), do: render_table_row_if_empty(entries, options)
def render_table_row_if_empty(records, options) when length(records) == 0 do
message = Keyword.get(options, :message, "No records found")
Phoenix.View.render(ArtemisWeb.LayoutView, "table_row_if_empty.html", message: message)
end
def render_table_row_if_empty(_records, _options), do: nil
@doc """
Render sortable table header
"""
def sortable_table_header(conn, value, label, delimiter \\ @default_delimiter) do
path = order_path(conn, value, delimiter)
text = content_tag(:span, label)
icon = content_tag(:i, "", class: icon_class(conn, value, delimiter))
content_tag(:a, [text, icon], href: path)
end
defp order_path(conn, value, delimiter) do
updated_query_params = update_query_param(conn, value, delimiter)
query_string = Plug.Conn.Query.encode(updated_query_params)
"#{Map.get(conn, :request_path)}?#{query_string}"
end
defp update_query_param(conn, value, delimiter) do
inverse = inverse_value(value)
query_params = Map.get(conn, :query_params) || %{}
current_value = Map.get(query_params, "order", "")
current_fields = String.split(current_value, delimiter)
updated_fields =
cond do
Enum.member?(current_fields, value) -> replace_item(current_fields, value, inverse)
Enum.member?(current_fields, inverse) -> replace_item(current_fields, inverse, value)
true -> [value]
end
updated_value = Enum.join(updated_fields, delimiter)
Map.put(query_params, "order", updated_value)
end
defp inverse_value(value), do: "-#{value}"
defp replace_item(list, current, next) do
case Enum.find_index(list, &(&1 == current)) do
nil -> list
index -> List.update_at(list, index, fn _ -> next end)
end
end
defp icon_class(conn, value, delimiter) do
base = "sort icon"
query_params = Map.get(conn, :query_params) || %{}
current_value = Map.get(query_params, "order", "")
current_fields = String.split(current_value, delimiter)
cond do
Enum.member?(current_fields, value) -> "#{base} ascending"
Enum.member?(current_fields, inverse_value(value)) -> "#{base} descending"
true -> base
end
end
@doc """
Render Data Table
Example:
<%=
render_data_table(
@conn,
@customers,
allowed_columns: allowed_columns(),
default_columns: ["name", "slug", "actions"],
selectable: true
)
%>
Options:
allowed_columns: map of allowed columns
default_columns: list of strings
selectable: include checkbox for bulk actions
query_params: map of connection query params
request_path: string of connection request path
user: struct of current user
## Features
### Column Ordering
The `columns` query param can be used to define a custom order to table
columns. For example, the default columns might be:
Name | Slug | Actions
By passing in the query param `?columns=status,name,address` the table
will transform to show:
Status | Name | Address
This enables custom reporting in a standard and repeatable way across the
application. Since query params are used to define the columns, any reports a
user creates can be revisited using the same URL. Which in turn, also makes
it easy to share with others.
### Table Export
Custom exporters can be defined for any format, like `html`, `json`, `csv`,
`xls`, or `pdf`. There's no conventions to learn or magic. As documented below,
standard Elixir and Phoenix code can be used to define and write custom
exporters in any format.
## Options
The goal of the data table is to be extensible without introducing new
data table specific conventions. Instead, enable extension using standard
Elixir and Phoenix calls.
### Allowed Columns
The value for `allowed_columns` should be a map. A complete example may look like:
%{
"name" => [
label: fn (_conn) -> "Name" end,
value: fn (_conn, row) -> row.name end,
],
"slug" => [
label: fn (_conn) -> "Slug" end,
value: fn (_conn, row) -> row.slug end,
]
}
The key for each entry should be a URI friendly slug. It is used to match
against the `columns` query param.
The value for each entry is a keyword list. It must define a `label` and
`value` function.
The `label` function is used in column headings. It takes one argument, the
`conn` struct. The most common return will be a simple bitstring, but
the `conn` is included for more advanced usage, for instance creating an
anchor link.
The `value` function is used for the column value. It takes two arguments,
the `conn` struct and the `row` value. The most common return will be calling
an attribute on the row value, for instance `data.name`. The `conn` value is
included for more advanced usage.
#### Support for Different Content Types / Formats
The required `label` and `value` functions should return simple values, like
bitstrings, integers, and floats.
Format specific values, such as HTML tags, should be defined in format
specific keys. For instance:
"name" => [
label: fn (_conn) -> "Name" end,
value: fn (_conn, row) -> row.name end,
value_html: fn (conn, row) ->
link(row.name, to: Routes.permission_path(conn, :show, row))
end
]
The data table function will first search for `label_<format>` and
`value_<format>` keys. E.g. a standard `html` request would search for
`label_html` and `value_html`. And in turn, a request for `csv` content type
would search for `label_csv` and `value_csv`. If format specific keys are not
found, the require `label` and `value` keys will be used as a fallback.
### Default Columns
The default columns option should be a list of bitstrings, each corresponding
to a key defined in the `allowed_columns` map.
default_columns: ["name", "slug"]
"""
def render_data_table(conn_or_socket_or_assigns, data, options \\ [])
def render_data_table(%{socket: socket} = assigns, data, options) do
options =
options
|> Keyword.put_new(:query_params, assigns[:query_params])
|> Keyword.put_new(:request_path, assigns[:request_path])
|> Keyword.put_new(:user, assigns[:user])
render_data_table(socket, data, options)
end
def render_data_table(%{conn: %Plug.Conn{} = conn} = _assigns, data, options) do
render_data_table(conn, data, options)
end
def render_data_table(conn_or_socket, data, options) do
format = get_request_format(conn_or_socket)
conn_or_socket = update_conn_or_socket_fields(conn_or_socket, options)
columns = get_data_table_columns(conn_or_socket, options)
headers? = Keyword.get(options, :headers, true)
compact? = Keyword.get(options, :compact, false)
class = "data-table-container"
class =
case compact? do
true -> class <> " compact"
false -> class
end
class =
case headers? do
true -> class <> " with-headers"
false -> class <> " without-headers"
end
assigns = [
class: class,
columns: columns,
conn_or_socket: conn_or_socket,
data: data,
headers?: headers?,
id: Keyword.get(options, :id, Artemis.Helpers.UUID.call()),
selectable: Keyword.get(options, :selectable),
show_only: Keyword.get(options, :show_only)
]
Phoenix.View.render(ArtemisWeb.LayoutView, "data_table.#{format}", assigns)
end
defp update_conn_or_socket_fields(%Phoenix.LiveView.Socket{} = socket, options) do
socket
|> Map.put(:query_params, Keyword.get(options, :query_params))
|> Map.put(:request_path, Keyword.get(options, :request_path))
|> Map.put(:assigns, %{
user: Keyword.fetch!(options, :user)
})
end
defp update_conn_or_socket_fields(conn, _options), do: conn
defp get_request_format(conn) do
Phoenix.Controller.get_format(conn)
rescue
_ -> :html
end
@doc """
Compares the `?columns=` query param value against the `allowed_columns`. If
the query param is not set, compares the `default_columns` value instead.
Returns a map of matching keys in `allowed_columns`.
"""
def get_data_table_columns(%Plug.Conn{} = conn, options) do
assigns = %{
query_params: conn.query_params
}
get_data_table_columns(assigns, options)
end
def get_data_table_columns(assigns, options) do
selectable? = Keyword.get(options, :selectable, false)
allowed_columns = Keyword.get(options, :allowed_columns, [])
requested_columns = parse_data_table_requested_columns(assigns, options)
filtered =
Enum.reduce(requested_columns, [], fn key, acc ->
case Map.get(allowed_columns, key) do
nil -> acc
column -> [column | acc]
end
end)
columns = Enum.reverse(filtered)
case selectable? do
true -> [get_checkbox_column() | columns]
false -> columns
end
end
defp get_checkbox_column() do
[
label: fn _conn -> nil end,
label_html: fn _conn ->
tag(:input, class: "ui checkbox select-all-rows", type: "checkbox", name: "id-toggle")
end,
value: fn _conn, _row -> nil end,
value_html: fn _conn, row ->
value = Map.get(row, :_id, Map.get(row, :id))
tag(:input, class: "ui checkbox select-row", type: "checkbox", name: "id[]", value: value)
end
]
end
@doc """
Parse query params and return requested data table columns
"""
def parse_data_table_requested_columns(conn_or_assigns, options \\ [])
def parse_data_table_requested_columns(%Phoenix.LiveView.Socket{} = _socket, options) do
get_data_table_requested_columns(options)
end
def parse_data_table_requested_columns(%Plug.Conn{} = conn, options) do
conn
|> Map.get(:query_params)
|> parse_data_table_requested_columns(options)
end
def parse_data_table_requested_columns(%{query_params: query_params}, options) do
parse_data_table_requested_columns(query_params, options)
end
def parse_data_table_requested_columns(query_params, options) when is_map(query_params) do
query_params
|> Map.get("columns")
|> get_data_table_requested_columns(options)
end
defp get_data_table_requested_columns(options), do: Keyword.get(options, :default_columns, [])
defp get_data_table_requested_columns(nil, options), do: Keyword.get(options, :default_columns, [])
defp get_data_table_requested_columns(value, _) when is_bitstring(value), do: String.split(value, ",")
defp get_data_table_requested_columns(value, _) when is_list(value), do: value
@doc """
Renders the label for a data center column.
"""
def render_data_table_label(conn, column, format) do
key = String.to_atom("label_#{format}")
default = Keyword.fetch!(column, :label)
render = Keyword.get(column, key, default)
render.(conn)
end
@doc """
Renders the row value for a data center column.
"""
def render_data_table_value(conn, column, row, format) do
key = String.to_atom("value_#{format}")
default = Keyword.fetch!(column, :value)
render = Keyword.get(column, key, default)
render.(conn, row)
end
@doc """
Render a select box to allow users to choose custom columns
"""
def render_data_table_column_selector(%Plug.Conn{} = conn, available_columns) do
assigns = %{
conn: conn,
query_params: conn.query_params,
request_path: conn.request_path
}
render_data_table_column_selector(assigns, available_columns)
end
def render_data_table_column_selector(assigns, available_columns) do
conn_or_socket = Map.get(assigns, :conn) || Map.get(assigns, :socket)
selected = parse_data_table_requested_columns(assigns)
class = if length(selected) > 0, do: "active"
sorted_by_selected =
Enum.sort_by(available_columns, fn column ->
key = elem(column, 1)
index = Enum.find_index(selected, &(&1 == key)) || :infinity
index
end)
assigns = [
available: sorted_by_selected,
class: class,
conn_or_socket: conn_or_socket,
selected: selected
]
Phoenix.View.render(ArtemisWeb.LayoutView, "data_table_columns.html", assigns)
end
@doc """
Prints a primary and secondary value
"""
def render_table_entry(primary, secondary \\ nil)
def render_table_entry(primary, secondary) when is_nil(secondary), do: primary
def render_table_entry(primary, secondary) do
[
content_tag(:div, primary),
content_tag(:div, secondary, class: "secondary-value")
]
end
end
| 31.103118 | 110 | 0.685582 |
1ca3216b1595b21c01af4323298f6de331f2a487 | 613 | ex | Elixir | lib/bolt/consumer/channel_delete.ex | CyberFlameGO/bolt | 225e6276983bec646e7f13519df066e8e1e770ed | [
"ISC"
] | 31 | 2018-12-06T23:12:33.000Z | 2022-03-29T18:34:25.000Z | lib/bolt/consumer/channel_delete.ex | CyberFlameGO/bolt | 225e6276983bec646e7f13519df066e8e1e770ed | [
"ISC"
] | 18 | 2021-06-14T19:03:26.000Z | 2022-03-15T17:46:22.000Z | lib/bolt/consumer/channel_delete.ex | CyberFlameGO/bolt | 225e6276983bec646e7f13519df066e8e1e770ed | [
"ISC"
] | 4 | 2018-11-07T18:52:28.000Z | 2022-03-16T00:14:38.000Z | defmodule Bolt.Consumer.ChannelDelete do
@moduledoc "Handles the `CHANNEL_DELETE` event."
alias Bolt.ModLog
alias Nostrum.Struct.Channel
@spec handle(Channel.t()) :: nil | ModLog.on_emit()
def handle(channel) do
unless channel.guild_id == nil do
type_name =
case channel.type do
0 -> "text channel"
2 -> "voice channel"
4 -> "category"
_ -> "unknown channel type"
end
ModLog.emit(
channel.guild_id,
"CHANNEL_DELETE",
"#{type_name} #{channel.name} (`#{channel.id}`) was deleted"
)
end
end
end
| 23.576923 | 68 | 0.588907 |
1ca33648ee1672c09e92b5c6ea4895380361ae52 | 212 | ex | Elixir | config.ex | jjg/jsfs | 73613eaf44edd476d368d62a079b1c1347bd3ac4 | [
"MIT"
] | 3 | 2015-01-04T22:00:22.000Z | 2016-06-02T19:49:33.000Z | config.ex | jjg/jsfs | 73613eaf44edd476d368d62a079b1c1347bd3ac4 | [
"MIT"
] | 109 | 2015-01-01T17:48:42.000Z | 2016-12-29T00:19:40.000Z | config.ex | jjg/jsfs | 73613eaf44edd476d368d62a079b1c1347bd3ac4 | [
"MIT"
] | 5 | 2015-01-19T20:45:47.000Z | 2021-02-23T17:15:10.000Z | module.exports = {
STORAGE_LOCATIONS:[
{"path":"./blocks1/"},
{"path":"./blocks2/"}
],
BLOCK_SIZE: 1048576,
LOG_LEVEL: 0,
SERVER_PORT: 7302,
REQUEST_TIMEOUT: 30, // minutes
CONFIGURED_STORAGE: "fs"
};
| 17.666667 | 32 | 0.650943 |
1ca349cafcaf19320e7f3c6dd0c2c5241aac1212 | 8,209 | exs | Elixir | lib/mix/test/test_helper.exs | michaelkschmidt/elixir | 805efd071d220b7431269b9716c5f7ce44ca36e8 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/test_helper.exs | michaelkschmidt/elixir | 805efd071d220b7431269b9716c5f7ce44ca36e8 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/test_helper.exs | michaelkschmidt/elixir | 805efd071d220b7431269b9716c5f7ce44ca36e8 | [
"Apache-2.0"
] | null | null | null | Mix.start()
Mix.shell(Mix.Shell.Process)
Application.put_env(:mix, :colors, [enabled: false])
ExUnit.start [trace: "--trace" in System.argv]
defmodule MixTest.Case do
use ExUnit.CaseTemplate
defmodule Sample do
def project do
[app: :sample,
version: "0.1.0",
aliases: [sample: "compile"]]
end
end
using do
quote do
import MixTest.Case
end
end
setup config do
if apps = config[:apps] do
Logger.remove_backend(:console)
end
on_exit fn ->
Application.start(:logger)
Mix.env(:dev)
Mix.Task.clear
Mix.Shell.Process.flush
Mix.ProjectStack.clear_cache
Mix.ProjectStack.clear_stack
delete_tmp_paths()
if apps do
for app <- apps do
Application.stop(app)
Application.unload(app)
end
Logger.add_backend(:console, flush: true)
end
end
:ok
end
def fixture_path do
Path.expand("fixtures", __DIR__)
end
def fixture_path(extension) do
Path.join fixture_path(), extension
end
def tmp_path do
Path.expand("../tmp", __DIR__)
end
def tmp_path(extension) do
Path.join tmp_path(), to_string(extension)
end
def purge(modules) do
Enum.each modules, fn(m) ->
:code.purge(m)
:code.delete(m)
end
end
def in_tmp(which, function) do
path = tmp_path(which)
File.rm_rf! path
File.mkdir_p! path
File.cd! path, function
end
defmacro in_fixture(which, block) do
module = inspect __CALLER__.module
function = Atom.to_string elem(__CALLER__.function, 0)
tmp = Path.join(module, function)
quote do
unquote(__MODULE__).in_fixture(unquote(which), unquote(tmp), unquote(block))
end
end
def in_fixture(which, tmp, function) do
src = fixture_path(which)
dest = tmp_path(tmp)
flag = String.to_charlist(tmp_path())
File.rm_rf!(dest)
File.mkdir_p!(dest)
File.cp_r!(src, dest)
get_path = :code.get_path
previous = :code.all_loaded
try do
File.cd! dest, function
after
:code.set_path(get_path)
for {mod, file} <- :code.all_loaded -- previous,
file == :in_memory or
(is_list(file) and :lists.prefix(flag, file)) do
purge [mod]
end
end
end
def ensure_touched(file) do
ensure_touched(file, File.stat!(file).mtime)
end
def ensure_touched(file, current) do
File.touch!(file)
unless File.stat!(file).mtime > current do
ensure_touched(file, current)
end
end
def os_newline do
case :os.type do
{:win32, _} -> "\r\n"
_ -> "\n"
end
end
def mix(args, envs \\ []) when is_list(args) do
System.cmd(elixir_executable(),
["-r", mix_executable(), "--" | args],
stderr_to_stdout: true,
env: envs) |> elem(0)
end
def mix_port(args, envs \\ []) when is_list(args) do
Port.open({:spawn_executable, elixir_executable()}, [
{:args, ["-r", mix_executable(), "--" | args]},
{:env, envs},
:binary,
:use_stdio,
:stderr_to_stdout
])
end
defp mix_executable do
Path.expand("../../../bin/mix", __DIR__)
end
defp elixir_executable do
Path.expand("../../../bin/elixir", __DIR__)
end
defp delete_tmp_paths do
tmp = tmp_path() |> String.to_charlist
for path <- :code.get_path,
:string.str(path, tmp) != 0,
do: :code.del_path(path)
end
end
## Set up Mix home with Rebar
home = MixTest.Case.tmp_path(".mix")
File.mkdir_p!(home)
System.put_env("MIX_HOME", home)
rebar = System.get_env("REBAR") || Path.expand("../../../rebar", __DIR__)
File.cp!(rebar, Path.join(home, "rebar"))
rebar = System.get_env("REBAR3") || Path.expand("../../../rebar3", __DIR__)
File.cp!(rebar, Path.join(home, "rebar3"))
## Copy fixtures to tmp
fixtures = ~w(rebar_dep rebar_override)
Enum.each(fixtures, fn fixture ->
source = MixTest.Case.fixture_path(fixture)
dest = MixTest.Case.tmp_path(fixture)
File.mkdir_p!(dest)
File.cp_r!(source, dest)
end)
## Generate Git repo fixtures
# Git repo
target = Path.expand("fixtures/git_repo", __DIR__)
unless File.dir?(target) do
File.mkdir_p!(Path.join(target, "lib"))
File.write! Path.join(target, "mix.exs"), """
## Auto-generated fixture
raise "I was not supposed to be loaded"
"""
File.cd! target, fn ->
System.cmd("git", ~w[init])
System.cmd("git", ~w[config user.email "mix@example.com"])
System.cmd("git", ~w[config user.name "mix-repo"])
System.cmd("git", ~w[add .])
System.cmd("git", ~w[commit -m "bad"])
end
File.write! Path.join(target, "mix.exs"), """
## Auto-generated fixture
defmodule GitRepo.Mixfile do
use Mix.Project
def project do
[app: :git_repo, version: "0.1.0"]
end
end
"""
File.cd! target, fn ->
System.cmd("git", ~w[add .])
System.cmd("git", ~w[commit -m "ok"])
System.cmd("git", ~w[tag without_module])
end
File.write! Path.join(target, "lib/git_repo.ex"), """
## Auto-generated fixture
defmodule GitRepo do
def hello do
"World"
end
end
"""
## Sparse
subdir = Path.join(target, "sparse_dir")
File.mkdir_p!(Path.join(subdir, "lib"))
File.write! Path.join(subdir, "mix.exs"), """
## Auto-generated fixture
defmodule GitSparseRepo.Mixfile do
use Mix.Project
def project do
[app: :git_sparse_repo, version: "0.1.0"]
end
end
"""
File.write! Path.join(subdir, "lib/git_sparse_repo.ex"), """
## Auto-generated fixture
defmodule GitSparseRepo do
def hello do
"World"
end
end
"""
File.cd! target, fn ->
System.cmd("git", ~w[add .])
System.cmd("git", ~w[commit -m "lib"])
System.cmd("git", ~w[tag with_module])
end
end
# Deps on Git repo
target = Path.expand("fixtures/deps_on_git_repo", __DIR__)
unless File.dir?(target) do
File.mkdir_p!(Path.join(target, "lib"))
File.write! Path.join(target, "mix.exs"), """
## Auto-generated fixture
defmodule DepsOnGitRepo.Mixfile do
use Mix.Project
def project do
[app: :deps_on_git_repo,
version: "0.2.0",
deps: [{:git_repo, git: MixTest.Case.fixture_path("git_repo")}]]
end
end
"""
File.write! Path.join(target, "lib/deps_on_git_repo.ex"), """
## Auto-generated fixture
GitRepo.hello
"""
File.cd! target, fn ->
System.cmd("git", ~w[init])
System.cmd("git", ~w[config user.email "mix@example.com"])
System.cmd("git", ~w[config user.name "mix-repo"])
System.cmd("git", ~w[add .])
System.cmd("git", ~w[commit -m "ok"])
end
end
# Git Rebar
target = Path.expand("fixtures/git_rebar", __DIR__)
unless File.dir?(target) do
File.mkdir_p!(Path.join(target, "src"))
File.write! Path.join([target, "src", "git_rebar.app.src"]), """
{application, git_rebar,
[
{vsn, "0.1.0"}
]}.
"""
File.write! Path.join([target, "src", "git_rebar.erl"]), """
-module(git_rebar).
-export ([any_function/0]).
any_function() ->
ok.
"""
File.cd! target, fn ->
System.cmd("git", ~w[init])
System.cmd("git", ~w[config user.email "mix@example.com"])
System.cmd("git", ~w[config user.name "mix-repo"])
System.cmd("git", ~w[add .])
System.cmd("git", ~w[commit -m "ok"])
end
end
Enum.each [:invalidapp, :invalidvsn, :noappfile, :nosemver, :ok], fn(dep) ->
File.mkdir_p! Path.expand("fixtures/deps_status/deps/#{dep}/.git", __DIR__)
end
## Generate helper modules
path = MixTest.Case.tmp_path("beams")
File.rm_rf!(path)
File.mkdir_p!(path)
write_beam = fn {:module, name, bin, _} ->
path
|> Path.join(Atom.to_string(name) <> ".beam")
|> File.write!(bin)
end
defmodule Mix.Tasks.Hello do
use Mix.Task
@shortdoc "This is short documentation, see"
@moduledoc """
A test task.
"""
def run([]) do
"Hello, World!"
end
def run(["--parser" | args]) do
OptionParser.parse!(args, strict: [int: :integer])
end
def run(args) do
"Hello, #{Enum.join(args, " ")}!"
end
end |> write_beam.()
defmodule Mix.Tasks.Invalid do
end |> write_beam.()
defmodule Mix.Tasks.Acronym.HTTP do
use Mix.Task
def run(_), do: "An HTTP Task"
end |> write_beam.()
| 22.008043 | 82 | 0.618589 |
1ca396b867deb99ca94438cfe825de9824df7729 | 934 | ex | Elixir | test/support/channel_case.ex | donkeybanana/s2i-elixir | c13796ec3e66bd372d08f08a36f5704d6da98015 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | donkeybanana/s2i-elixir | c13796ec3e66bd372d08f08a36f5704d6da98015 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | donkeybanana/s2i-elixir | c13796ec3e66bd372d08f08a36f5704d6da98015 | [
"MIT"
] | null | null | null | defmodule LiveWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use LiveWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import LiveWeb.ChannelCase
# The default endpoint for testing
@endpoint LiveWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 26.685714 | 61 | 0.735546 |
1ca3aa3f0941880d5fa0efc669757d40c1b531d2 | 322 | ex | Elixir | deps/plug_wait1/lib/plug/adapters/wait1/conn/query_string.ex | conorfoley/dota_hero_combos | d75a3f0673fc1f1d0845c9d5c692c0605d3b445d | [
"MIT"
] | null | null | null | deps/plug_wait1/lib/plug/adapters/wait1/conn/query_string.ex | conorfoley/dota_hero_combos | d75a3f0673fc1f1d0845c9d5c692c0605d3b445d | [
"MIT"
] | null | null | null | deps/plug_wait1/lib/plug/adapters/wait1/conn/query_string.ex | conorfoley/dota_hero_combos | d75a3f0673fc1f1d0845c9d5c692c0605d3b445d | [
"MIT"
] | null | null | null | defmodule Plug.Adapters.Wait1.Conn.QS do
defstruct kvs: nil
end
defimpl String.Chars, for: Plug.Adapters.Wait1.Conn.QS do
def to_string(%{:kvs => kvs}) when is_map(kvs) do
Plug.Conn.Query.encode(kvs)
end
def to_string(%{:kvs => kvs}) when is_binary(kvs) do
kvs
end
def to_string(_) do
""
end
end | 21.466667 | 57 | 0.680124 |
1ca412c2043ca502ac04ea27d5fbd797eceb2938 | 1,590 | ex | Elixir | lib/ssl_certs/cli/create.ex | aforward/sslcerts | cd0d1310102e509b4f00d24040af939e4cefd395 | [
"MIT"
] | 9 | 2017-08-11T14:43:09.000Z | 2021-03-27T21:07:41.000Z | lib/ssl_certs/cli/create.ex | aforward/sslcerts | cd0d1310102e509b4f00d24040af939e4cefd395 | [
"MIT"
] | null | null | null | lib/ssl_certs/cli/create.ex | aforward/sslcerts | cd0d1310102e509b4f00d24040af939e4cefd395 | [
"MIT"
] | 1 | 2017-09-14T20:08:53.000Z | 2017-09-14T20:08:53.000Z | defmodule Sslcerts.Cli.Create do
use Mix.Task
use FnExpr
alias Sslcerts.Io.Shell
alias Sslcerts.Cli.{Parser, Install}
@moduledoc """
Create a new certificate
sslcerts create
This assumes that `bits` has been installed, if that's not the case, then ensure that you first run
sslcerts install bits
## Available configurations
* `--email` The email associated with the certificate
* `--domains` The domains you are certifying
* `--webroot` The root of your static assets to allow certbot to confirm it's your domain
* `--ini` The path of the certbot configs (defaults to /etc/letsencrypt/letsencrypt.ini)
* `--keysize` The size of the certificate key (defaults to 4096)
* `--post-hook` The script to run after a successful renewal (See `--post-hook` in certbot)
"""
@options %{
email: :string,
domains: :list,
webroot: :string,
ini: :string,
keysize: :integer,
post_hook: :string
}
def run(raw_args) do
Sslcerts.start()
Install.run(["certbot" | raw_args])
raw_args
|> Parser.parse(@options)
|> invoke(fn {%{ini: ini, post_hook: post_hook, domains: domains}, []} ->
System.cmd(
"certbot",
[
"certonly",
"--non-interactive",
"--agree-tos",
"--post-hook",
post_hook || "touch /tmp/certbot.#{domains |> List.first()}.created",
"--config",
ini
]
)
end)
|> shell_info(raw_args)
end
def shell_info({output, _}, opts), do: Shell.info(output, opts)
end
| 26.065574 | 101 | 0.608805 |
1ca423d30fdc205faa4acecdbe794cb603479a1e | 979 | ex | Elixir | uptime_gui/test/support/channel_case.ex | mattiaslundberg/uptime | cef2657e5b5a4a851c088947ee9050c3b31dabdf | [
"MIT"
] | null | null | null | uptime_gui/test/support/channel_case.ex | mattiaslundberg/uptime | cef2657e5b5a4a851c088947ee9050c3b31dabdf | [
"MIT"
] | null | null | null | uptime_gui/test/support/channel_case.ex | mattiaslundberg/uptime | cef2657e5b5a4a851c088947ee9050c3b31dabdf | [
"MIT"
] | null | null | null | defmodule UptimeGuiWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
import UptimeGui.Factories
# The default endpoint for testing
@endpoint UptimeGuiWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(UptimeGui.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(UptimeGui.Repo, {:shared, self()})
end
:ok
end
end
| 24.475 | 71 | 0.71808 |
1ca425f82906a9d8ba4d285e780ab5596fe22b50 | 3,372 | ex | Elixir | apps/rig_api/lib/rig_api/controllers/message_controller.ex | vanillebear/reactive-interaction-gateway | 5eb2afa00b8e74bddfd712ba282a3768e8ede239 | [
"Apache-2.0"
] | null | null | null | apps/rig_api/lib/rig_api/controllers/message_controller.ex | vanillebear/reactive-interaction-gateway | 5eb2afa00b8e74bddfd712ba282a3768e8ede239 | [
"Apache-2.0"
] | 132 | 2018-11-26T14:00:54.000Z | 2022-03-11T04:17:54.000Z | apps/rig_api/lib/rig_api/controllers/message_controller.ex | vanillebear/reactive-interaction-gateway | 5eb2afa00b8e74bddfd712ba282a3768e8ede239 | [
"Apache-2.0"
] | null | null | null | defmodule RigApi.MessageController do
require Logger
use RigApi, :controller
use PhoenixSwagger
alias RigCloudEvents.CloudEvent
action_fallback(RigApi.FallbackController)
@event_filter Application.get_env(:rig, :event_filter)
swagger_path :create do
post("/v1/messages")
summary("Submit an event, to be forwarded to subscribed frontends.")
description("Allows you to submit a single event to RIG using a simple, \
synchronous call. While for production setups we recommend ingesting events \
asynchronously (e.g., via a Kafka topic), using this endpoint can be simple \
alternative during development or for low-traffic production setups.")
parameters do
messageBody(
:body,
Schema.ref(:CloudEvent),
"CloudEvent",
required: true
)
end
response(202, "Accepted - message queued for transport")
response(400, "Bad Request: Failed to parse request body :parse-error")
end
@doc """
Accepts message to be sent to front-ends.
Note that `message` is _always_ a map. For example:
- Given '"foo"', the `:json` parser will pass '{"_json": "foo"}'.
- Given 'foo', the `:urlencoded` parser will pass '{"foo": nil}'.
"""
def create(conn, message) do
with {:ok, cloud_event} <- CloudEvent.parse(message) do
@event_filter.forward_event(cloud_event)
send_resp(conn, :accepted, "message queued for transport")
else
{:error, reason} ->
conn
|> put_status(:bad_request)
|> text("Failed to parse request body: #{inspect(reason)}")
end
end
def swagger_definitions do
%{
CloudEvent:
swagger_schema do
title("CloudEvent")
description("The broadcasted CloudEvent according to the CloudEvents spec.")
properties do
id(
:string,
"ID of the event. The semantics of this string are explicitly undefined to ease \
the implementation of producers. Enables deduplication.",
required: true,
example: "A database commit ID"
)
specversion(
:string,
"The version of the CloudEvents specification which the event uses. This \
enables the interpretation of the context. Compliant event producers \
MUST use a value of 0.2 when referring to this version of the \
specification.",
required: true,
example: "0.2"
)
source(
:string,
"This describes the event producer. Often this will include information such \
as the type of the event source, the organization publishing the event, the \
process that produced the event, and some unique identifiers. The exact syntax \
and semantics behind the data encoded in the URI is event producer defined.",
required: true,
example: "/cloudevents/spec/pull/123"
)
type(
:string,
"Type of occurrence which has happened. Often this attribute is used for \
routing, observability, policy enforcement, etc.",
required: true,
example: "com.example.object.delete.v2"
)
end
end
}
end
end
| 32.737864 | 95 | 0.612396 |
1ca4275284f0a18cca20e2b94d9d9f22fbc9fe0b | 1,656 | ex | Elixir | clients/analytics/lib/google_api/analytics/v3/model/hash_client_id_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/hash_client_id_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/hash_client_id_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Analytics.V3.Model.HashClientIdRequest do
@moduledoc """
JSON template for a hash Client Id request resource.
## Attributes
- clientId (String.t): Defaults to: `null`.
- kind (String.t): Defaults to: `null`.
- webPropertyId (String.t): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clientId => any(),
:kind => any(),
:webPropertyId => any()
}
field(:clientId)
field(:kind)
field(:webPropertyId)
end
defimpl Poison.Decoder, for: GoogleApi.Analytics.V3.Model.HashClientIdRequest do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.HashClientIdRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Analytics.V3.Model.HashClientIdRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.666667 | 80 | 0.72343 |
1ca4300b6cbf49a2e8bd17ebe4a6f2689a1abba6 | 703 | exs | Elixir | test/api/number_test.exs | emeric-martineau/cloud_stack_lang | 50c9164c06b2a683d3de84c493aaddd3e55de8b8 | [
"Apache-2.0"
] | null | null | null | test/api/number_test.exs | emeric-martineau/cloud_stack_lang | 50c9164c06b2a683d3de84c493aaddd3e55de8b8 | [
"Apache-2.0"
] | null | null | null | test/api/number_test.exs | emeric-martineau/cloud_stack_lang | 50c9164c06b2a683d3de84c493aaddd3e55de8b8 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2020 Cloud Stack Lang Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule CloudStackLang.NumberTest do
use ExUnit.Case, async: true
doctest CloudStackLang.Number
end
| 35.15 | 74 | 0.770982 |
1ca432c40530d53999a82e602531a029c7ee2325 | 1,085 | ex | Elixir | lib/sutur_web/channels/user_socket.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | 1 | 2021-11-16T02:18:31.000Z | 2021-11-16T02:18:31.000Z | lib/sutur_web/channels/user_socket.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | null | null | null | lib/sutur_web/channels/user_socket.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | null | null | null | defmodule SuturWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", SuturWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# SuturWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.138889 | 83 | 0.693088 |
1ca44adca8d5aac30d6bd0e23e6aedce8a748bfa | 1,675 | exs | Elixir | test/plausible_web/views/stats_view_test.exs | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 984 | 2019-09-02T11:36:41.000Z | 2020-06-08T06:25:48.000Z | test/plausible_web/views/stats_view_test.exs | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 24 | 2019-09-10T09:53:17.000Z | 2020-06-08T07:35:26.000Z | test/plausible_web/views/stats_view_test.exs | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 51 | 2019-09-03T10:48:10.000Z | 2020-06-07T00:23:34.000Z | defmodule PlausibleWeb.StatsView.Test do
use PlausibleWeb.ConnCase, async: true
alias PlausibleWeb.StatsView
describe "large_number_format" do
test "numbers under 1000 stay the same" do
assert StatsView.large_number_format(100) == "100"
end
test "1000 becomes 1k" do
assert StatsView.large_number_format(1000) == "1k"
end
test "1111 becomes 1.1k" do
assert StatsView.large_number_format(1111) == "1.1k"
end
test "10_000 becomes 10k" do
assert StatsView.large_number_format(10_000) == "10k"
end
test "15_993 becomes 15.9k" do
assert StatsView.large_number_format(15_923) == "15.9k"
end
test "wat" do
assert StatsView.large_number_format(49012) == "49k"
end
test "999_999 becomes 999k" do
assert StatsView.large_number_format(999_999) == "999k"
end
test "1_000_000 becomes 1m" do
assert StatsView.large_number_format(1_000_000) == "1M"
end
test "2_590_000 becomes 2.5m" do
assert StatsView.large_number_format(2_590_000) == "2.5M"
end
test "99_999_999 becomes 99.9m" do
assert StatsView.large_number_format(99_999_999) == "99.9M"
end
test "101_000_000 becomes 101m" do
assert StatsView.large_number_format(101_000_000) == "101M"
end
test "2_500_000_000 becomes 2.5bn" do
assert StatsView.large_number_format(2_500_000_000) == "2.5B"
end
test "25_500_000_000 becomes 25bn" do
assert StatsView.large_number_format(25_500_000_000) == "25.5B"
end
test "250_500_000_000 becomes 250bn" do
assert StatsView.large_number_format(250_500_000_000) == "250B"
end
end
end
| 26.587302 | 69 | 0.690746 |
1ca4501b92f645699509480c0d7cd78dbf3ae314 | 6,139 | ex | Elixir | lib/mix/lib/mix/compilers/erlang.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/compilers/erlang.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/compilers/erlang.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Compilers.Erlang do
@moduledoc false
@doc """
Compiles the files in `mappings` with given extensions into
the destination, automatically invoking the callback for each
stale input and output pair (or for all if `force` is `true`) and
removing files that no longer have a source, while keeping the
`manifest` up to date.
`mappings` should be a list of tuples in the form of `{src, dest}` paths.
## Examples
For example, a simple compiler for Lisp Flavored Erlang
would be implemented like:
manifest = Path.join Mix.Project.manifest_path, ".compile.lfe"
dest = Mix.Project.compile_path
compile manifest, [{"src", dest}], :lfe, :beam, opts, fn
input, output ->
:lfe_comp.file(to_erl_file(input),
[output_dir: Path.dirname(output)])
end
The command above will:
1. look for files ending with the `lfe` extension in `src` path
and their `beam` counterpart in `ebin` path
2. for each stale file (or for all if `force` is `true`),
invoke the callback passing the calculated input
and output
3. update the manifest with the newly compiled outputs
4. remove any output in the manifest that does not
have an equivalent source
The callback must return `{:ok, mod}` or `:error` in case
of error. An error is raised at the end if any of the
files failed to compile.
"""
def compile(manifest, mappings, src_ext, dest_ext, force, callback) when is_boolean(force) do
compile(manifest, mappings, src_ext, dest_ext, [force: force], callback)
end
def compile(manifest, mappings, src_ext, dest_ext, opts, callback) do
force = opts[:force]
files =
for {src, dest} <- mappings do
extract_targets(src, src_ext, dest, dest_ext, force)
end |> Enum.concat
compile(manifest, files, src_ext, opts, callback)
end
@doc """
Compiles the given `mappings`.
`mappings` should be a list of tuples in the form of `{src, dest}`.
A `manifest` file and a `callback` to be invoked for each src/dest pair
must be given. A src/dest pair where destination is `nil` is considered
to be up to date and won't be (re-)compiled.
"""
def compile(manifest, mappings, opts \\ [], callback) do
compile(manifest, mappings, :erl, opts, callback)
end
defp compile(manifest, mappings, ext, opts, callback) do
stale = for {:stale, src, dest} <- mappings, do: {src, dest}
# Get the previous entries from the manifest
timestamp = :calendar.universal_time()
entries = read_manifest(manifest)
# Files to remove are the ones in the manifest
# but they no longer have a source
removed = Enum.filter(entries, fn entry ->
not Enum.any?(mappings, fn {_status, _src, dest} -> dest == entry end)
end)
if stale == [] && removed == [] do
:noop
else
Mix.Utils.compiling_n(length(stale), ext)
Mix.Project.ensure_structure()
# Let's prepend the newly created path so compiled files
# can be accessed still during compilation (for behaviours
# and what not).
Code.prepend_path(Mix.Project.compile_path)
# Remove manifest entries with no source
Enum.each(removed, &File.rm/1)
verbose = opts[:verbose]
# Compile stale files and print the results
results =
for {input, output} <- stale do
result = callback.(input, output)
with {:ok, _} <- result do
File.touch!(output, timestamp)
verbose && Mix.shell.info "Compiled #{input}"
end
result
end
# Write final entries to manifest
entries = (entries -- removed) ++ Enum.map(stale, &elem(&1, 1))
write_manifest(manifest, :lists.usort(entries), timestamp)
# Raise if any error, return :ok otherwise
if :error in results do
Mix.raise "Encountered compilation errors"
end
:ok
end
end
@doc """
Ensures the native Erlang application is available.
"""
def ensure_application!(app, input) do
case Application.ensure_all_started(app) do
{:ok, _} ->
:ok
{:error, _} ->
Mix.raise "Could not compile #{inspect Path.relative_to_cwd(input)} because " <>
"the application \"#{app}\" could not be found. This may happen if " <>
"your package manager broke Erlang into multiple packages and may " <>
"be fixed by installing the missing \"erlang-dev\" and \"erlang-#{app}\" packages"
end
end
@doc """
Removes compiled files for the given `manifest`.
"""
def clean(manifest) do
Enum.each read_manifest(manifest), &File.rm/1
File.rm manifest
end
@doc """
Converts the given `file` to a format accepted by
the Erlang compilation tools.
"""
def to_erl_file(file) do
to_charlist(file)
end
@doc """
Asserts that the `:erlc_paths` configuration option that many Mix tasks
rely on is valid.
Raises a `Mix.Error` exception if the option is not valid, returns `:ok`
otherwise.
"""
def assert_valid_erlc_paths(erlc_paths) do
if is_list(erlc_paths) do
:ok
else
Mix.raise ":erlc_paths should be a list of paths, got: #{inspect(erlc_paths)}"
end
end
defp extract_targets(src_dir, src_ext, dest_dir, dest_ext, force) do
files = Mix.Utils.extract_files(List.wrap(src_dir), List.wrap(src_ext))
for file <- files do
module = module_from_artifact(file)
target = Path.join(dest_dir, module <> "." <> to_string(dest_ext))
if force || Mix.Utils.stale?([file], [target]) do
{:stale, file, target}
else
{:ok, file, target}
end
end
end
defp module_from_artifact(artifact) do
artifact |> Path.basename |> Path.rootname
end
defp read_manifest(file) do
case File.read(file) do
{:ok, contents} -> String.split(contents, "\n")
{:error, _} -> []
end
end
defp write_manifest(file, entries, timestamp) do
Path.dirname(file) |> File.mkdir_p!
File.write!(file, Enum.join(entries, "\n"))
File.touch!(file, timestamp)
end
end
| 30.391089 | 100 | 0.646522 |
1ca45a5945a3c1eace2112344102d8f67b09597c | 947 | ex | Elixir | lib/portal/application.ex | auth0-samples/auth0-elixir-single-page-app | 05c344e828ecf5ac716c5537b0e5760571f0574f | [
"MIT"
] | 6 | 2018-07-12T20:50:21.000Z | 2021-04-10T19:53:10.000Z | lib/portal/application.ex | auth0-samples/auth0-elixir-single-page-app | 05c344e828ecf5ac716c5537b0e5760571f0574f | [
"MIT"
] | null | null | null | lib/portal/application.ex | auth0-samples/auth0-elixir-single-page-app | 05c344e828ecf5ac716c5537b0e5760571f0574f | [
"MIT"
] | 2 | 2018-01-30T22:52:01.000Z | 2018-02-05T12:55:28.000Z | defmodule Portal.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(PortalWeb.Endpoint, []),
# Start your own worker by calling: Portal.Worker.start_link(arg1, arg2, arg3)
# worker(Portal.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Portal.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
PortalWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.566667 | 84 | 0.719113 |
1ca45bc30f84a59c2c4560b4890ba3c2900e9c3f | 835 | exs | Elixir | exercism/elixir/rna-transcription/test/rna_transcription_test.exs | TGITS/programming-workouts | 799e805ccf3fd0936ec8ac2417f7193b8e9bcb55 | [
"MIT"
] | null | null | null | exercism/elixir/rna-transcription/test/rna_transcription_test.exs | TGITS/programming-workouts | 799e805ccf3fd0936ec8ac2417f7193b8e9bcb55 | [
"MIT"
] | 16 | 2020-05-30T12:38:13.000Z | 2022-02-19T09:23:31.000Z | exercism/elixir/rna-transcription/test/rna_transcription_test.exs | TGITS/programming-workouts | 799e805ccf3fd0936ec8ac2417f7193b8e9bcb55 | [
"MIT"
] | null | null | null | defmodule RnaTranscriptionTest do
use ExUnit.Case
# @tag :pending
test "transcribes guanine to cytosine" do
assert RnaTranscription.to_rna('G') == 'C'
end
#@tag :pending
test "transcribes cytosine to guanine" do
assert RnaTranscription.to_rna('C') == 'G'
end
#@tag :pending
test "transcribes thymidine to adenine" do
assert RnaTranscription.to_rna('T') == 'A'
end
#@tag :pending
test "transcribes adenine to uracil" do
assert RnaTranscription.to_rna('A') == 'U'
end
#@tag :pending
test "it transcribes all dna nucleotides to rna equivalents" do
assert RnaTranscription.to_rna('ACGTGGTCTTAA') == 'UGCACCAGAAUU'
end
test "An invalid nucleotides value should raise a RuntimeException" do
assert_raise RuntimeError, fn ->
RnaTranscription.to_rna('H')
end
end
end
| 23.857143 | 72 | 0.700599 |
1ca4949eb1370db9d206b0f4868b998371739583 | 383 | ex | Elixir | 2015/day6/lib/main.ex | SuddenGunter/adventofcode | 702dd927b1d23c4c5c4b2e67898f4b3c914abfcf | [
"MIT"
] | null | null | null | 2015/day6/lib/main.ex | SuddenGunter/adventofcode | 702dd927b1d23c4c5c4b2e67898f4b3c914abfcf | [
"MIT"
] | null | null | null | 2015/day6/lib/main.ex | SuddenGunter/adventofcode | 702dd927b1d23c4c5c4b2e67898f4b3c914abfcf | [
"MIT"
] | null | null | null | defmodule Day6.CLI do
def main(_args) do
case Input.read("data.txt", &Parser.data/1) do
{:ok, contents} -> solve(contents)
{:error, reason} -> IO.puts(reason)
end
end
defp solve(contents) do
IO.write("task #1 solution: ")
Task1.solution(contents) |> IO.puts()
IO.write("task #2 solution: ")
Task2.solution(contents) |> IO.puts()
end
end
| 22.529412 | 50 | 0.616188 |
1ca49a4aba32a1447da8af0dc7ee34405a030b7f | 737 | ex | Elixir | test/support/fixtures.ex | Strech/avrora | e8f0904950d25ea86dd629a1e834938a65836791 | [
"MIT"
] | 59 | 2019-07-11T15:29:26.000Z | 2022-03-23T19:35:55.000Z | test/support/fixtures.ex | Strech/avrora | e8f0904950d25ea86dd629a1e834938a65836791 | [
"MIT"
] | 63 | 2019-08-09T17:52:26.000Z | 2022-03-16T22:08:04.000Z | test/support/fixtures.ex | Strech/avrora | e8f0904950d25ea86dd629a1e834938a65836791 | [
"MIT"
] | 22 | 2019-07-29T10:50:47.000Z | 2021-09-04T13:37:08.000Z | defmodule Fixtures do
@moduledoc """
This is an End-to-End integration use-case. It will validate multi-client capabilities.
So if you run `mix test` and get test errors or something like this
```
could not compile dependency :avrora, "mix compile" failed
```
it means that something is wrong with `Avrora.Client` module.
"""
defmodule Alpha do
@moduledoc false
use Avrora.Client, schemas_path: Path.expand("./test/fixtures/schms")
end
defmodule Beta do
@moduledoc false
use Avrora.Client, schemas_path: Path.expand("./test/fixtures/avro")
end
defmodule Gamma do
@moduledoc false
use Avrora.Client, otp_app: :area, registry_url: "http://gamma.io", names_cache_ttl: 1_000
end
end
| 25.413793 | 94 | 0.710991 |
1ca4c8dcad94d42f8d9f3a16b085f9096ce0ce6d | 1,786 | ex | Elixir | plugins/one_chat/lib/one_chat/robot/adapters/ucx_chat/connection.ex | smpallen99/ucx_ucc | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 11 | 2017-05-15T18:35:05.000Z | 2018-02-05T18:27:40.000Z | plugins/one_chat/lib/one_chat/robot/adapters/ucx_chat/connection.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 15 | 2017-11-27T10:38:05.000Z | 2018-02-09T20:42:08.000Z | plugins/one_chat/lib/one_chat/robot/adapters/ucx_chat/connection.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 4 | 2017-09-13T11:34:16.000Z | 2018-02-26T13:37:06.000Z | defmodule OneChat.Robot.Adapters.OneChat.Connection do
@moduledoc false
use GenServer
alias OneChat.Robot.Adapters.OneChat.{Connection}
@name :robot
require Logger
defstruct name: nil, owner: nil, user: nil
def start(opts) do
name = Keyword.get(opts, :name)
# user = Keyword.get(opts, :user, get_system_user())
user = nil
GenServer.start(__MODULE__, {self(), name, user}, name: @name)
end
def status(), do: GenServer.call(@name, :status)
def init({owner, name, user}) do
GenServer.cast(self(), :after_init)
{:ok, %Connection{name: name, owner: owner, user: user}}
end
def handle_cast(:after_init, state) do
{:noreply, state}
end
def handle_call(:status, _from, state) do
{:reply, state, state}
end
def handle_info({:reply, _payload = %{text: text, room: room, user: %{id: user_id, name: _name}}}, %{} = state) do
body = if Regex.match? ~r/^http.+?(jpg|jpeg|png|gif)$/, text do
# body = String.replace(text, ~r/^https?:\/\//, "")
~s(<img src="#{text}" class="bot-img">)
else
text
end
# this is where we send a message to the users.
# need to figure out if this is a private message, or a channel message
# Logger.error "reply text: #{inspect text} "
# IO.inspect {room, payload}, label: "bot {room, payload}"
OneChatWeb.RoomChannel.Message.bot_response_message room, user_id, body
{:noreply, state}
end
@doc false
def handle_info({:message, text, channel, user}, %{owner: owner} = state) do
Logger.debug "message text: #{inspect text}, channel.id: #{inspect channel.id}"
spawn fn ->
:timer.sleep 200
Kernel.send(owner, {:message, %{"text" => text, "user" => user, "channel" => channel}})
end
{:noreply, state}
end
end
| 28.349206 | 117 | 0.637178 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.