code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
-module(typecheck). -import(lists, [zip/2, unzip/1, unzip3/1, seq/2, nth/2]). -import(domain, [intersection/1, intersection/2, intersect_envs/2, union/1, union/2]). -export([domain/6, qualified_apply/7]). -include_lib("eunit/include/eunit.hrl"). domain(Mode, Arities, Stack, Args, DomainModule, Term) -> ArgDomains = [case maps:get(A, Arities, undefined) of undefined -> A; 0 -> erlang:apply(DomainModule, A, []); Arity -> EnvF = env_fun(Mode, DomainModule, A, Arity), EnvF(Stack, element(2, Term)) end || A <- Args], Env = maps:map(fun(Tag, Arity) -> env_fun(Mode, DomainModule, Tag, Arity) end, Arities), def(Mode, Env, Stack, ArgDomains, Term). % The environment consists of curried functions, taking first a stack and then % (if the arity is more than 1) whatever parameters is needed env_fun(Mode, DomainModule, Tag, Arity) -> fun(Stack, Ctx) -> case Arity of 0 -> apply_domain(Mode, Stack, Ctx, DomainModule, Tag, []); _N -> F = fun(Params) -> apply_domain(Mode, Stack, Ctx, DomainModule, Tag, Params) end, utils:function(Arity, F) end end. def(Mode, Env, Stack, ArgDomains, {'fun', Ctx, Clauses}) -> Tags = [symbol:id('') || _ <- ArgDomains], case clauses(Mode, Env, Stack, zip(Tags, ArgDomains), Clauses) of {error, Errs} -> {error, Errs}; {ok, {LEnv, LDomain}} -> ActualDomains = [maps:get(Tag, LEnv) || Tag <- Tags], case check(Mode, Stack, ArgDomains, ActualDomains, Ctx, LDomain) of {error, Errs} -> {error, Errs}; {ok, Domain} -> Domain end end; def(Mode, Env, Stack, [], Term) -> case expr(Mode, Env, Stack, Term) of {error, Errs} -> {error, Errs}; {ok, {_, Domain}} -> Domain end. clauses(Mode, Env, Stack, Args, [{clause, Ctx, _, _} | _] = Clauses) -> ArgDomains = [ArgD || {_, ArgD} <- Args], Scan = fun S([]) -> []; S([Clause | Cs]) -> {E, D, PsDomains} = clause(Mode, Env, Stack, Args, Clause), % Only scan patterns until (and including) the first pattern which domain % is a subset of the pattern domain. There's no point in keeping on % scanning patterns if we can infer from the argument domains that they % will not be reached case domain:subset(ArgDomains, PsDomains) of true -> [{E, D, PsDomains}]; false -> [{E, D, PsDomains} | S(Cs)] end end, {EnvCs, DomainCs, DomainPs} = unzip3(Scan(Clauses)), ActualDomains = [union(Ds) || Ds <- pivot(DomainPs)], check(Mode, Stack, ArgDomains, ActualDomains, Ctx, {env_union(EnvCs), union(DomainCs)}). clause(Mode, Env, Stack, Args, {clause, _, Patterns, Expr}) -> Scan = fun(ArgTag, Domain, Pattern) -> % The strictness of a pattern is either `normal` or % `lenient`. A pattern is evaluated at compile time in to a % literal domain. % % When we typecheck using a strictness mode of `strict`, we % check that the set of values that an expression can take % is a subset of possible values which it is allowed to take % given the constraints in place (e.g. a pattern). When % using a strictness mode of `normal`, we check instead that % the value domain intersects with the constraint domain, % i.e. that some values satisfy the constraints. % % % When evaluating code at runtime, having made sure that a % value domain is a subset of the constraints, means that % we're sure that any function called with the value is % defined for that value. % % Patterns on the other hand are evaluated to domain % literals (e.g. a list of possible values) at compile time % and aren't 'executed' at run-time. When we evaluate a % pattern at run-time, we're interested in the fact that a % pattern may match. Even though we might type-check % with the strictness mode set to 'strict', we aren't % interested in whether an expression is strictly a subset % of a patternb. % % There's one complication in that application arguments in % a pattern are treated as expressions. This means that an % argument to an application could itself be an expression % calling a function. Since this evaluation would take time % at compile time, we should only be evaluating the domain % level of any function, which removes the risk of calling a % function for values it isn't defined for. It might be a % faff to make sure that we don't end up passing the % application a bunch of terms that haven't been evaluated % to their domains though. PatternMode = case Mode of lenient -> lenient; _ -> normal end, case pattern(PatternMode, Env, Stack, Domain, Pattern) of {error, _} = Errs -> {#{}, Errs}; {ok, {E, D}} -> {merge(E, #{ArgTag => D}), D} end end, {PsEnvs, PsDomains} = unzip([Scan(ArgTag, ArgDomain, Pattern) || {{ArgTag, ArgDomain}, Pattern} <- zip(Args, Patterns)]), ClauseEnv = merge([Env | PsEnvs]), % If any pattern returns an error, there's no point in scanning the % expression since it can't be satisfied under the current constraints case lists:member(none, PsDomains) orelse lists:any(fun error:match/1, PsDomains) of true -> {ClauseEnv, none, PsDomains}; false -> case expr(Mode, ClauseEnv, Stack, Expr) of {error, _} = Errs -> {ClauseEnv, Errs, PsDomains}; {ok, {ExprEnv, ExprDomain}} -> {merge(ClauseEnv, ExprEnv), ExprDomain, PsDomains} end end. pattern(Mode, Env, Stack, {sum, Ds}, Term) -> {SumEnvs, SumDomains} = comb([pattern(Mode, Env, Stack, D, Term) || D <- Ds]), case union(SumDomains) of {error, Errs} -> {error, Errs}; D -> {ok, {env_union(SumEnvs), D}} end; pattern(Mode, Env, Stack, {recur, _F} = D, Term) -> pattern(Mode, Env, Stack, domain:unroll(D, element(2, Term)), Term); pattern(Mode, Env, Stack, any, {list, _, Elems} = Term) -> pattern(Mode, Env, Stack, [any || _ <- Elems], Term); pattern(Mode, Env, Stack, any, {tagged, _, _, _} = Term) -> pattern(Mode, Env, Stack, {tagged, symbol:tag(Term), any}, Term); pattern(Mode, Env, Stack, any, {dict, Ctx, Elems}) -> case map_pattern(Mode, Env, Stack, any, Elems) of {error, Errs} -> {error, Errs}; {ok, {KeyEnvs, KeyDomains}} -> case check(Mode, Stack, any, intersection(KeyDomains), Ctx) of {error, Errs} -> {error, Errs}; {ok, ResDomain} -> {ok, {merge(KeyEnvs), ResDomain}} end end; % Pattern of `[T, S, R]` pattern(Mode, Env, Stack, Domain, {list, _, Elems}) when is_list(Domain) andalso (length(Domain) == length(Elems)) -> case map_pattern(Mode, Env, Stack, Domain, Elems) of {error, Errs} -> {error, Errs}; {ok, {TEnvs, TDomains}} -> {ok, {env_union(TEnvs), TDomains}} end; pattern(Mode, Env, Stack, Domain, {list, Ctx, Elems}) -> case map_pattern(lenient, Env, Stack, any, Elems) of {error, Errs} -> {error, Errs}; {ok, {TEnvs, TDomains}} -> case check(Mode, Stack, Domain, TDomains, Ctx) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, {env_union(TEnvs), Res}} end end; % Pattern of `(T | S | R)` pattern(Mode, Env, Stack, Domain, {sum, _, Elems}) -> {SumEnvs, SumDomains} = comb([pattern(Mode, Env, Stack, Domain, E) || E <- Elems]), case union(SumDomains) of {error, Errs} -> {error, Errs}; D -> {ok, {env_union(SumEnvs), D}} end; % Pattern of `{k: v, ...}` pattern(Mode, Env, Stack, Domain, {dict, Ctx, Elems}) when is_map(Domain) -> Scan = fun(Elem) -> case maps:get(symbol:name(Elem), Domain, Mode) of lenient -> pattern(Mode, Env, Stack, none, Elem); normal -> error:format({nonexistent_key, symbol:name(Elem), Domain}, {typecheck, Ctx, Stack}); strict -> error:format({nonexistent_key, symbol:name(Elem), Domain}, {typecheck, Ctx, Stack}); ElemDomain -> pattern(Mode, Env, Stack, ElemDomain, Elem) end end, case error:collect([Scan(E) || E <- Elems]) of {error, Errs} -> {error, Errs}; {ok, Res} -> {KeyEnvs, KeyDomains} = unzip(Res), {ok, {merge(KeyEnvs), intersection([Domain | KeyDomains])}} end; pattern(Mode, Env, Stack, Domain, {dict, Ctx, Elems}) -> case map_pattern(lenient, Env, Stack, any, Elems) of {error, Errs} -> {error, Errs}; {ok, {TEnvs, TDomains}} -> case check(Mode, Stack, Domain, intersection(TDomains), Ctx) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, {env_union(TEnvs), Res}} end end; % Pattern of: `T: S` (tagged type) pattern(Mode, Env, Stack, Domain, {tagged, Ctx, _, Val} = T) -> Tag = symbol:tag(T), case Domain of {tagged, Tag, D} -> case pattern(Mode, Env, Stack, D, Val) of {error, Errs} -> {error, Errs}; {ok, {TEnv, TDomain}} -> {ok, {TEnv, {tagged, Tag, TDomain}}} end; _ -> case pattern(lenient, Env, Stack, any, Val) of {error, Errs} -> {error, Errs}; {ok, {TEnv, TDomain}} -> case check(Mode, Stack, Domain, {tagged, Tag, TDomain}, Ctx) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, {TEnv, Res}} end end end; % Pattern of `v: T`, e.g. an alias for a variable `v` to value of pattern `T` pattern(Mode, Env, Stack, Domain, {pair, _, Key, Val}) -> case pattern(Mode, Env, Stack, Domain, Val) of {error, Errs} -> {error, Errs}; {ok, {ValEnv, ValDomain}} -> case pattern(Mode, Env, Stack, ValDomain, Key) of {error, Errs} -> {error, Errs}; {ok, {KeyEnv, KeyDomain}} -> {ok, {merge(ValEnv, KeyEnv), intersection(KeyDomain, ValDomain)}} end end; % Pattern of `{ k: T, ... }` pattern(Mode, Env, Stack, Domain, {dict_pair, _, Key, Val}) -> case pattern(Mode, Env, Stack, Domain, Val) of {error, Errs} -> {error, Errs}; {ok, {ValEnv, ValDomain}} -> {ok, {ValEnv, #{symbol:name(Key) => ValDomain}}} end; % Pattern: `T(a)` or `f(b)` pattern(Mode, Env, Stack, Domain, {application, Ctx, _, _} = Term) -> % The arguments of a function application are interpreted as an expression % and not as a pattern. This allows us to pass other variables in scope as % arguments. case expr(Mode, Env, Stack, Term) of {error, Errs} -> {error, Errs}; {ok, {_, AppDomain}} -> case check(Mode, Stack, AppDomain, Domain, Ctx) of {error, Errs} -> {error, Errs}; {ok, ResDomain} -> {ok, {#{}, ResDomain}} end end; % Pattern: `kind/prelude/Boolean` pattern(Mode, Env, Stack, Domain, {qualified_symbol, Ctx, _, _} = Term) -> case expr(Mode, Env, Stack, Term) of {error, Errs} -> {error, Errs}; {ok, {_, QDomain}} -> case check(Mode, Stack, QDomain, Domain, Ctx) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, {#{}, Res}} end end; % Pattern: `kind/prelude/Option(T)` pattern(Mode, Env, Stack, Domain, {qualified_application, Ctx, _, _, _} = Term) -> case expr(Mode, Env, Stack, Term) of {error, Errs} -> {error, Errs}; {ok, {_, QDomain}} -> case check(Mode, Stack, QDomain, Domain, Ctx) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, {#{}, Res}} end end; % Pattern: `1` or `'atom'` or `"string"` pattern(Mode, _, Stack, Domain, {value, Ctx, _, Val}) -> case check(Mode, Stack, Domain, Val, Ctx) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, {#{}, Res}} end; % Pattern: `T` pattern(Mode, Env, Stack, Domain, {keyword, Ctx, _, _} = T) -> EnvDomain = symbol:tag(T), case check(Mode, Stack, EnvDomain, Domain, Ctx) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, {#{}, Res}} end; % Pattern: `a` pattern(Mode, Env, Stack, Domain, {variable, Ctx, Name, Tag}) -> % The tagger tags a top-level variable with it's fname (e.g. {f, 1}) % For consistency, the env stores both types and defs without arity case maps:get(Tag, Env, maps:get(Name, Env, undefined)) of undefined -> {ok, {#{Tag => fun(_Stack, _Ctx) -> Domain end}, Domain}}; EnvF -> case EnvF(Stack, Ctx) of {error, Errs} -> {error, Errs}; EnvDomain -> case check(Mode, Stack, EnvDomain, Domain, Ctx) of {error, Errs} -> {error, Errs}; {ok, ResDomain} -> {ok, {#{Tag => fun(_Stack, _Ctx) -> ResDomain end}, ResDomain}} end end end. % Expr: `m/T(a)` or `m/f(b)` expr(Mode, Env, Stack, {qualified_application, Ctx, ModulePath, Name, Args}) -> case map_expr(Mode, Env, Stack, Args) of {error, Errs} -> {error, Errs}; {ok, {_, ArgDomains}} -> qualified_apply(Mode, Env, Stack, Ctx, ModulePath, Name, ArgDomains) end; % Expr: `m/T(a)` or `m/f(b)` expr(Mode, Env, Stack, {beam_application, Ctx, ModulePath, Name, Args}) -> case map_expr(Mode, Env, Stack, Args) of {error, Errs} -> {error, Errs}; {ok, {_, ArgDomains}} -> beam_apply(Env, ModulePath, Name, ArgDomains) end; % Expr: `kind/prelude/Boolean` expr(Mode, Env, Stack, {qualified_symbol, Ctx, ModulePath, Name}) -> ModuleName = module:beam_name(ModulePath), Arity = utils:get_min_arity(ModuleName, Name), case Arity =:= 0 of true -> qualified_apply(Mode, Env, Stack, Ctx, ModulePath, Name, []); false -> error:format({wrong_arity, module:kind_name(ModulePath), Name, 0, Arity}, {Stack, Ctx, typecheck}) end; expr(Mode, Env, Stack, {seq, _Ctx, Expr1, Expr2}) -> case expr(Mode, Env, Stack, Expr1) of {error, Errs} -> {error, Errs}; {ok, _} -> expr(Mode, Env, Stack, Expr2) end; % Expr: `val x = { ... } % x + 4` expr(Mode, Env, Stack, {'let', Ctx, Pattern, Expr, Term}) -> case expr(Mode, Env, Stack, Expr) of {error, Errs} -> {error, Errs}; {ok, {_, ExprDomain}} -> case pattern(Mode, Env, Stack, ExprDomain, Pattern) of {error, Errs} -> {error, Errs}; {ok, {PatternEnv, PatternDomain}} -> case check(Mode, Stack, ExprDomain, PatternDomain, Ctx) of {error, Errs} -> {error, Errs}; {ok, _} -> NewEnv = merge(Env, PatternEnv), expr(Mode, NewEnv, Stack, Term) end end end; % Expr: `| T -> { ... } % | X -> { ....}` expr(Mode, Env, Stack, {'fun', Ctx, [{clause, _, Ps, _} | _]} = Fun) -> Arity = length(Ps), F = utils:function(Arity, fun(ArgDomains) -> % I like this pattern and find it % delightfully cheeky. % As part of the stack I need a name for % the anonymous function. To do so, I % generate a function that takes a stack % as an argument and use the erlang name % of this function to name the name % function before calling it with the % newly constructed stack that I've made F = fun(_LocalStack) -> def(Mode, Env, Stack, ArgDomains, Fun) end, NewStack = [{utils:gen_tag(F), Ctx, ArgDomains} | Stack], F(NewStack) end), {ok, {#{}, F}}; % Expr: `T(a)` or `f(b)` expr(Mode, Env, Stack, {application, Ctx, Expr, Args}) -> case map_expr(Mode, Env, Stack, Args) of {error, Errs} -> {error, Errs}; {ok, {_, ArgDomains}} -> case expr(Mode, Env, Stack, Expr) of {error, Errs} -> {error, Errs}; {ok, {_, ExprDomain}} -> FunctionMode = case Mode of lenient -> lenient; _ -> normal end, case check(FunctionMode, Stack, ExprDomain, domain:function(length(Args)), Ctx) of {error, _} -> case is_function(ExprDomain) of false -> error:format({expected_function_domain, ExprDomain}, {typecheck, Ctx, Stack}); true -> Arity = utils:get_arity(ExprDomain), error:format({wrong_arity, symbol:tag(Expr), length(Args), Arity}, {typecheck, Ctx, Stack}) end; {ok, none} -> {ok, {#{}, none}}; % In case Mode is lenient {ok, _ResDomain} -> case ExprDomain of any -> {ok, {#{}, any}}; _ -> case erlang:apply(ExprDomain, ArgDomains) of {error, Errs} -> {error, Errs}; Domain -> {ok, {#{}, Domain}} end end end end end; % Expr of form: `T(a)` where `T` is a recursive type (and so we want to not % call the type constructor in an infinite loop expr(Mode, Env, Stack, {recursive_type_application, Ctx, Tag, Args}) -> F = fun() -> T = {type, Ctx, Tag, [Tag]}, case expr(Mode, Env, Stack, {application, Ctx, T, Args}) of {error, Errs} -> {error, Errs}; {ok, {_, Domain}} -> Domain end end, {ok, {#{}, {recur, F}}}; % expr of form `T` where T is recursive like `type T -> {a: Boolean, b: T}` expr(Mode, Env, Stack, {recursive_type, Ctx, Name, Path}) -> F = fun() -> case expr(Mode, Env, Stack, {type, Ctx, Name, Path}) of {error, Errs} -> {error, Errs}; {ok, {_, Domain}} -> Domain end end, {ok, {#{}, {recur, F}}}; % Expr: `T : S` (tagged type) expr(Mode, Env, Stack, {tagged, _, _, Value} = T) -> case expr(Mode, Env, Stack, Value) of {error, Errs} -> {error, Errs}; {ok, {VEnv, VDomain}} -> {ok, {VEnv, {tagged, symbol:tag(T), VDomain}}} end; % Expr: `s : T` expr(Mode, Env, Stack, {pair, Ctx, Key, Value}) -> error:flatmap2(expr(Mode, Env, Stack, Value), expr(Mode, Env, Stack, Key), fun({_, ValueDomain}, {_, KeyDomain}) -> case check(Mode, Stack, KeyDomain, ValueDomain, Ctx) of {error, _} -> error:format({pair_not_subset, KeyDomain, ValueDomain}, {typecheck, Ctx, Stack}); {ok, Domain} -> {ok, {#{}, Domain}} end end); % Pair in Expr: `{s : T, ...}` expr(Mode, Env, Stack, {dict_pair, _, Key, Value}) -> case expr(Mode, Env, Stack, Value) of {error, Errs} -> {error, Errs}; {ok, {_, ValDomain}} -> {ok, {#{}, #{symbol:name(Key) => ValDomain}}} end; % Expr: `{k: R}` expr(Mode, Env, Stack, {dict, Ctx, Elems}) -> case utils:duplicates(Elems, fun symbol:name/1) of [] -> case map_expr(Mode, Env, Stack, Elems) of {error, Errs} -> {error, Errs}; {ok, {_, ElemsDomain}} -> {ok, {#{}, intersection(ElemsDomain)}} end; Duplicates -> Keys = lists:map(fun({T, _}) -> symbol:name(T) end, Duplicates), error:format({duplicate_keys, Keys}, {typecheck, Ctx, Stack}) end; % Expr: `[T, S, ...]` expr(Mode, Env, Stack, {list, _, Elems}) -> case map_expr(Mode, Env, Stack, Elems) of {error, Errs} -> {error, Errs}; {ok, {_, ElemsDomain}} -> {ok, {#{}, ElemsDomain}} end; % Expr: `T | S` expr(Mode, Env, Stack, {sum, _, Elems}) -> case map_expr(Mode, Env, Stack, Elems) of {error, Errs} -> {error, Errs}; {ok, {_, ElemsDomain}} -> {ok, {#{}, union(ElemsDomain)}} end; % Expr: `1` or `'atom'` or `"string"` expr(_, _, _, {value, _, _, Val}) -> {ok, {#{}, Val}}; % Expr: `T` expr(_, Env, Stack, {type, Ctx, _, _} = T) -> Tag = symbol:tag(T), EnvRes = case maps:get(Tag, Env, undefined) of undefined -> Tag; EnvF -> EnvF(Stack, Ctx) end, case EnvRes of {error, Errs} -> {error, Errs}; EnvDomain -> {ok, {#{}, EnvDomain}} end; % Expr: `a` expr(_, Env, Stack, {variable, Ctx, Name, Tag}) -> % The tagger tags a top-level variable with it's fname (e.g. {f, 1}) % For consistency, the env stores both types and defs without arity case maps:get(Tag, Env, maps:get(Name, Env, undefined)) of undefined -> error:format({undefined_variable, Name}, {typecheck, Ctx, Stack}); EnvF -> case EnvF(Stack, Ctx) of {error, Errs} -> {error, Errs}; EnvDomain -> {ok, {#{Tag => fun(_Stack, _Ctx) -> EnvDomain end}, EnvDomain}} end end. map_expr(Mode, Env, Stack, Terms) -> case error:collect([expr(Mode, Env, Stack, T) || T <- Terms]) of {error, Errs} -> {error, Errs}; {ok, Res} -> {ok, unzip(Res)} end. map_pattern(Mode, Env, Stack, Domain, Terms) when not(is_list(Domain)) -> map_pattern(Mode, Env, Stack, [Domain || _ <- Terms], Terms); map_pattern(Mode, Env, Stack, Domains, Terms) when length(Domains) == length(Terms) -> case error:collect([pattern(Mode, Env, Stack, D, T) || {D, T} <- zip(Domains, Terms)]) of {error, Errs} -> {error, Errs}; {ok, Res} -> {TEnvs, TDomains} = unzip(Res), {ok, {TEnvs, TDomains}} end. check(Strictness, Stack, D1, D2, Ctx) -> check(Strictness, Stack, D1, D2, Ctx, intersection(D1, D2)). check(Strictness, Stack, L1, L2, Ctx, Ret) when is_list(L1) andalso is_list(L2) andalso length(L1) == length(L2) -> case error:collect([check(Strictness, Stack, D1, D2, Ctx, {}) || {D1, D2} <- zip(L1, L2)]) of {error, Errs} -> {error, Errs}; {ok, _} -> {ok, Ret} end; check(strict, Stack, D1, D2, Ctx, Ret) -> F = fun(T1, T2) -> case domain:subset(T1, T2) of true -> {ok, Ret}; false -> error:format({domain_not_subset, T1, T2}, {typecheck, Ctx, Stack}) end end, error:flatmap2(D1, D2, F); check(normal, Stack, D1, D2, Ctx, Ret) -> F = fun(T1, T2) -> case intersection(T1, T2) =:= none of false -> {ok, Ret}; true -> error:format({domains_do_not_intersect, T1, D2}, {typecheck, Ctx, Stack}) end end, error:flatmap2(D1, D2, F); check(lenient, _, D1, D2, _, Ret) -> error:map2(D1, D2, fun(_, _) -> Ret end). pivot([]) -> []; pivot([H | _] = ListOfLists) -> Rev = lists:foldl(fun(Elems, Accs) -> [[E | Acc] || {E, Acc} <- zip(Elems, Accs)] end, [[] || _ <- H], ListOfLists), [lists:reverse(L) || L <- Rev]. apply_domain(Mode, Stack, Ctx, Module, Tag, Params) -> NewStack = [{module:beam_name([Module, Tag]), Ctx, Params} | Stack], case check_stack_recursion(NewStack) of error -> {recur, fun () -> erlang:apply(Module, Tag, [NewStack, Mode] ++ Params) end}; ok -> erlang:apply(Module, Tag, [NewStack, Mode] ++ Params) end. % Qualified apply is tricky because the appropriate domain function can take % a few different shapes: % % - In the normal case a domain function for a kind function or type will live % in the corresponding domain module % - For a sub-type defined in a type module, the domain function is the same as % the type function % - When all arguments are literal domains and the function called is an erlang % function, we can call the erlang function, but only if it doesn't have % side-effects qualified_apply(Mode, Env, Stack, Ctx, ModulePath, Name, ArgDomains) -> ModuleName = module:beam_name(ModulePath), NewStack = [{module:beam_name(ModulePath ++ [Name]), Ctx, ArgDomains} | Stack], DomainModuleName = module:beam_name(ModulePath ++ [domain]), % Check if domain function exists case erlang:function_exported(DomainModuleName, Name, length(ArgDomains)) of false -> error:format({undefined_qualified_symbol, module:kind_name(ModulePath ++ [domain]), {Name, length(ArgDomains)}}, {typecheck, Ctx, Stack}); true -> case erlang:apply(DomainModuleName, Name, [NewStack, Mode] ++ ArgDomains) of {error, Errs} -> {error, Errs}; Res -> {ok, {#{}, Res}} end end. % If the domain is literal and the function is whitelisted, call it directly % otherwise, return the `any` domain beam_apply(Env, ModulePath, Name, ArgDomains) -> ModuleName = module:beam_name(ModulePath), case domain:is_literal(Env, ArgDomains) of true -> case import:is_whitelisted(ModuleName, Name) of true -> {ok, {#{}, erlang:apply(ModuleName, Name, ArgDomains)}}; false -> {ok, {#{}, any}} end; false -> {ok, {#{}, any}} end. check_stack_recursion(Stack) -> check_stack_recursion(Stack, #{}). check_stack_recursion([], _) -> ok; check_stack_recursion([{Name, _, Domains} | Tail], Seen) -> case maps:is_key({Name, Domains}, Seen) of true -> error; false -> check_stack_recursion(Tail, maps:put({Name, Domains}, true, Seen)) end. merge(E1, E2) -> F = fun(K, _) -> case {maps:is_key(K, E1), maps:is_key(K, E2)} of {true, true} -> intersection(maps:get(K, E1), maps:get(K, E2)); {false, true} -> maps:get(K, E2); {true, false} -> maps:get(K, E1) end end, maps:map(F, maps:merge(E1, E2)). merge(Envs) when is_list(Envs) -> lists:foldl(fun(E1,E2) -> merge(E1, E2) end, #{}, Envs). env_union(Envs) when is_list(Envs) -> lists:foldl(fun(E1, E2) -> env_union(E1, E2) end, #{}, Envs). env_union(E1, E2) -> F = fun(K, _) -> case {maps:is_key(K, E1), maps:is_key(K, E2)} of {true, true} -> union(maps:get(K, E1), maps:get(K, E2)); {false, true} -> maps:get(K, E2); {true, false} -> maps:get(K, E1) end end, maps:map(F, maps:merge(E1, E2)). comb(L) when is_list(L) -> comb(L, []). comb([], Acc) -> unzip(lists:reverse(Acc)); comb([{error, _} = Err | Tail], Acc) -> comb(Tail, [{#{}, Err} | Acc]); comb([{ok, {E, D}} | Tail], Acc) -> comb(Tail, [{E, D} | Acc]).
src/type/typecheck.erl
0.562177
0.458652
typecheck.erl
starcoder
%% Licensed under the Apache License, Version 2.0 (the “License”); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an “AS IS” BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% @doc General balanced binary Merkle trees. Similar to {@link //stdlib/gb_trees}, but with Merkle proofs. %% %% Keys and values need to be binaries. Values are stored only in leaf nodes to shorten Merkle proofs. %% %% Hashes of leaf nodes are based on concatenation of hashes of key and value. Hashes of inner nodes are based on concatenation of hashes of left and right node. %% %% Similarly as in {@link //stdlib/gb_trees}, deletions do not cause trees to rebalance. %% %% SHA-256 is used as the default hashing algorithm. You can define the `GB_MERKLE_TREES_HASH_ALGORITHM' macro to use another algorithm. See documentation of {@link //crypto/crypto:hash/2} for available choices. %% %% @author <NAME> <<EMAIL>> [http://jurewicz.org.pl] %% %% @reference See <a href="http://cglab.ca/~morin/teaching/5408/refs/a99.pdf">Arne Andersson’s “General Balanced Trees” article</a> for insights about the balancing algorithm. The original balance condition has been changed to 2^h(T) ≤ |T|^2. %% @reference See <a href="https://github.com/tendermint/go-merkle">go-merkle</a> for a similar in purpose library written in Go which uses AVL trees instead of general balanced trees. %% @see //stdlib/gb_trees %% @see //crypto/crypto:hash/2 -module(gb_merkle_trees). -export([balance/1, delete/2, empty/0, enter/3, foldr/3, from_list/1, from_orddict/1, from_orddict/2, keys/1, lookup/2, merkle_proof/2, root_hash/1, size/1, to_orddict/1, verify_merkle_proof/4]). -ifdef(TEST). -include_lib("triq/include/triq.hrl"). -include_lib("eunit/include/eunit.hrl"). -endif. -ifndef(GB_MERKLE_TREES_HASH_ALGORITHM). -define(GB_MERKLE_TREES_HASH_ALGORITHM, sha256). -endif. -define(HASH(X), crypto:hash(?GB_MERKLE_TREES_HASH_ALGORITHM, X)). %% Trees are balanced using the condition 2^h(T) ≤ |T|^C -define(C, 2). -type key() :: binary(). -type value() :: binary(). -type hash() :: binary(). %% We distinguish inner nodes and tree nodes by tuple length instead of using records to save some space. -type leaf_node() :: {key(), value(), hash()}. -type inner_node() :: {key(), hash() | to_be_computed, Left :: inner_node() | leaf_node(), Right :: inner_node() | leaf_node()}. -type tree_node() :: leaf_node() | inner_node() | empty. -opaque tree() :: {Size :: non_neg_integer(), RootNode :: tree_node()}. -type merkle_proof() :: {hash() | merkle_proof(), hash() | merkle_proof()}. -export_type( [key/0, value/0, hash/0, tree/0, merkle_proof/0]). -spec delete(key(), tree()) -> tree(). %% @doc Remove key from tree. The key must be present in the tree. delete(Key, {Size, RootNode}) -> {Size - 1, delete_1(Key, RootNode)}. -spec delete_1(key(), tree_node()) -> tree_node(). delete_1(Key, {Key, _, _}) -> empty; delete_1(Key, {InnerKey, _, LeftNode, RightNode}) -> case Key < InnerKey of true -> case delete_1(Key, LeftNode) of empty -> RightNode; NewLeftNode -> {InnerKey, inner_hash(node_hash(NewLeftNode), node_hash(RightNode)), NewLeftNode, RightNode} end; _ -> case delete_1(Key, RightNode) of empty -> LeftNode; NewRightNode -> {InnerKey, inner_hash(node_hash(LeftNode), node_hash(NewRightNode)), LeftNode, NewRightNode} end end. -spec empty() -> tree(). %% @doc Return an empty tree. empty() -> {0, empty}. -spec size(tree()) -> non_neg_integer(). %% @doc Return number of elements stored in the tree. size({Size, _}) -> Size. -spec leaf_hash(key(), value()) -> hash(). leaf_hash(Key, Value) -> KeyHash = ?HASH(Key), ValueHash = ?HASH(Value), ?HASH(<<KeyHash/binary, ValueHash/binary>>). -spec inner_hash(hash(), hash()) -> hash(). inner_hash(LeftHash, RightHash) -> ?HASH(<<LeftHash/binary, RightHash/binary>>). -spec root_hash(tree()) -> hash() | undefined. %% @doc Return the hash of root node. root_hash({_, RootNode}) -> node_hash(RootNode). -spec merkle_proof(key(), tree()) -> merkle_proof(). %% @doc For a given key return a proof that, along with its value, it is contained in tree. %% Hash for root node is not included in the proof. merkle_proof(Key, {_Size, RootNode}) -> merkle_proof_node(Key, RootNode). -spec merkle_proof_node(key(), tree_node()) -> merkle_proof(). merkle_proof_node(Key, {Key, Value, _}) -> {?HASH(Key), ?HASH(Value)}; merkle_proof_node(Key, {InnerKey, _, Left, Right}) -> case Key < InnerKey of true -> {merkle_proof_node(Key, Left), node_hash(Right)}; _ -> {node_hash(Left), merkle_proof_node(Key, Right)} end. -spec verify_merkle_proof(key(), value(), Root::hash(), merkle_proof()) -> ok | {error, Reason} when Reason :: {key_hash_mismatch, hash()} | {value_hash_mismatch, hash()} | {root_hash_mismatch, hash()}. %% @doc Verify a proof against a leaf and a root node hash. verify_merkle_proof(Key, Value, RootHash, Proof) -> {KH, VH} = {?HASH(Key), ?HASH(Value)}, {PKH, PVH} = bottom_merkle_proof_pair(Proof), if PKH =/= KH -> {error, {key_hash_mismatch, PKH}}; PVH =/= VH -> {error, {value_hash_mismatch, PKH}}; true -> PRH = merkle_fold(Proof), if PRH =/= RootHash -> {error, {root_hash_mismatch, PRH}}; true -> ok end end. -spec from_list(list({key(), value()})) -> tree(). %% @doc Create a tree from a list. %% This creates a tree by iteratively inserting elements and not necessarily results in a perfect balance, like the one obtained when running {@link from_orddict/1}. from_list(List) -> from_list(List, empty()). -spec from_list(list({key(), value()}), Acc :: tree()) -> tree(). from_list([], Acc) -> Acc; from_list([{Key, Value}|Rest], Acc) -> from_list(Rest, enter(Key, Value, Acc)). -spec from_orddict(OrdDict :: list({key(), value()})) -> tree(). %% @equiv from_orddict(OrdDict, length(OrdDict)) from_orddict(OrdDict) -> from_orddict(OrdDict, length(OrdDict)). -spec from_orddict(list({key(), value()}), Size :: non_neg_integer()) -> tree(). %% @doc Create a perfectly balanced tree from an ordered dictionary. from_orddict(OrdDict, Size) -> {Size, balance_orddict(OrdDict, Size)}. -spec to_orddict(tree()) -> list({key(), value()}). %% @doc Convert tree to an orddict. to_orddict(Tree) -> foldr( fun (KV, Acc) -> [KV|Acc] end, [], Tree). -spec keys(tree()) -> list(key()). %% @doc Return the keys as an ordered list. keys(Tree) -> foldr( fun ({Key, _}, Acc) -> [Key|Acc] end, [], Tree). -spec foldr(fun(({key(), value()}, Acc :: any()) -> any()), Acc :: any(), tree()) -> Acc :: any(). %% @doc Iterate through keys and values, from those with highest keys to lowest. foldr(Fun, Acc, {_, RootNode}) -> foldr_1(Fun, Acc, RootNode). -spec foldr_1(fun(({key(), value()}, Acc :: any()) -> any()), Acc :: any(), tree_node()) -> Acc :: any(). foldr_1(_, Acc, empty) -> Acc; foldr_1(F, Acc, _LeafNode={Key, Value, _}) -> F({Key, Value}, Acc); foldr_1(F, Acc, {_, _, Left, Right}) -> foldr_1(F, foldr_1(F, Acc, Right), Left). -spec node_hash(tree_node()) -> hash() | undefined. node_hash(empty) -> undefined; node_hash({_, _, Hash}) -> Hash; node_hash({_, Hash, _, _}) -> Hash. -spec enter(key(), value(), tree()) -> tree(). %% @doc Insert or update key and value into tree. enter(Key, Value, {Size, RootNode}) -> {NewRootNode, undefined, undefined, KeyExists} = enter_1(Key, Value, RootNode, 0, Size), NewSize = case KeyExists of true -> Size; _ -> Size + 1 end, {NewSize, NewRootNode}. -spec enter_1(key(), value(), tree_node(), Depth :: non_neg_integer(), TreeSize :: non_neg_integer()) -> {tree_node(), RebalancingCount :: pos_integer() | undefined, Height :: non_neg_integer() | undefined, KeyExists :: boolean()}. enter_1(Key, Value, empty, _, _) -> {{Key, Value, leaf_hash(Key, Value)}, undefined, undefined, false}; enter_1(Key, Value, ExistingLeafNode={ExistingKey, _, _}, Depth, TreeSize) -> NewLeafNode = {Key, Value, leaf_hash(Key, Value)}, case Key =:= ExistingKey of true -> {NewLeafNode, undefined, undefined, true}; _ -> NewTreeSize = TreeSize + 1, NewDepth = Depth + 1, {InnerKey, LeftNode, RightNode} = case Key > ExistingKey of true -> {Key, ExistingLeafNode, NewLeafNode}; _ -> {ExistingKey, NewLeafNode, ExistingLeafNode} end, case rebalancing_needed(NewTreeSize, NewDepth) of true -> {{InnerKey, to_be_computed, LeftNode, RightNode}, 2, 1, false}; _ -> {{InnerKey, inner_hash(node_hash(LeftNode), node_hash(RightNode)), LeftNode, RightNode}, undefined, undefined, false} end end; enter_1(Key, Value, InnerNode={InnerKey, _, LeftNode, RightNode}, Depth, TreeSize) -> NodeToFollowSymb = case Key < InnerKey of true -> left; _ -> right end, {NodeToFollow, NodeNotChanged} = case NodeToFollowSymb of right -> {RightNode, LeftNode}; left -> {LeftNode, RightNode} end, {NewNode, RebalancingCount, Height, KeyExists} = enter_1(Key, Value, NodeToFollow, Depth + 1, TreeSize), {NewLeftNode, NewRightNode} = case NodeToFollowSymb of right -> {LeftNode, NewNode}; _ -> {NewNode, RightNode} end, case RebalancingCount of undefined -> {update_inner_node(InnerNode, NewLeftNode, NewRightNode), undefined, undefined, KeyExists}; _ -> Count = RebalancingCount + node_size(NodeNotChanged), NewHeight = Height + 1, NewInnerNodeUnbalanced = {InnerKey, to_be_computed, NewLeftNode, NewRightNode}, case may_be_rebalanced(Count, NewHeight) of true -> {balance_node(NewInnerNodeUnbalanced, Count), undefined, undefined, KeyExists}; _ -> {NewInnerNodeUnbalanced, Count, NewHeight, KeyExists} end end. -spec rebalancing_needed(TreeSize :: non_neg_integer(), Depth :: non_neg_integer()) -> boolean(). rebalancing_needed(TreeSize, Depth) -> math:pow(2, Depth) > math:pow(TreeSize, ?C). -spec may_be_rebalanced(Count :: non_neg_integer(), Height :: non_neg_integer()) -> boolean(). may_be_rebalanced(Count, Height) -> math:pow(2, Height) > math:pow(Count, ?C). -spec node_size(tree_node()) -> non_neg_integer(). node_size(empty) -> 0; node_size({_, _, _}) -> 1; node_size({_, _, Left, Right}) -> node_size(Left) + node_size(Right). -spec balance_orddict(list({key(), value()}), Size :: non_neg_integer()) -> tree_node(). balance_orddict(KVOrdDict, Size) -> {Node, []} = balance_orddict_1(KVOrdDict, Size), Node. -spec balance_orddict_1(list({key(), value()}), Size :: non_neg_integer()) -> {tree_node(), list({key(), value()})}. balance_orddict_1(OrdDict, Size) when Size > 1 -> Size2 = Size div 2, Size1 = Size - Size2, {LeftNode, OrdDict1=[{Key, _} | _]} = balance_orddict_1(OrdDict, Size1), {RightNode, OrdDict2} = balance_orddict_1(OrdDict1, Size2), InnerNode = {Key, inner_hash(node_hash(LeftNode), node_hash(RightNode)), LeftNode, RightNode}, {InnerNode, OrdDict2}; balance_orddict_1([{Key, Value} | OrdDict], 1) -> {{Key, Value, leaf_hash(Key, Value)}, OrdDict}; balance_orddict_1(OrdDict, 0) -> {empty, OrdDict}. -spec node_to_orddict(tree_node()) -> list({key(), value()}). node_to_orddict(Node) -> foldr_1( fun (KV, Acc) -> [KV|Acc] end, [], Node). -spec balance_node(tree_node(), Size :: non_neg_integer()) -> tree_node(). balance_node(Node, Size) -> KVOrdDict = node_to_orddict(Node), balance_orddict(KVOrdDict, Size). -spec balance(tree()) -> tree(). %% @doc Perfectly balance a tree. balance({Size, RootNode}) -> {Size, balance_orddict(node_to_orddict(RootNode), Size)}. -spec lookup(key(), tree()) -> value() | none. %% @doc Fetch value for key from tree. lookup(Key, {_, RootNode}) -> lookup_1(Key, RootNode). -spec lookup_1(key(), inner_node() | leaf_node()) -> value() | none. lookup_1(Key, {Key, Value, _}) -> Value; lookup_1(Key, {InnerKey, _, Left, Right}) -> case Key < InnerKey of true -> lookup_1(Key, Left); _ -> lookup_1(Key, Right) end; lookup_1(_, _) -> none. -spec update_inner_node(inner_node(), Left :: tree_node(), Right :: tree_node()) -> inner_node(). update_inner_node(Node={Key, _, Left, Right}, NewLeft, NewRight) -> case lists:map(fun node_hash/1, [Left, Right, NewLeft, NewRight]) of [LeftHash, RightHash, LeftHash, RightHash] -> %% Nothing changed, no need to rehash. Node; [_, _, NewLeftHash, NewRightHash] -> {Key, inner_hash(NewLeftHash, NewRightHash), NewLeft, NewRight} end. -spec merkle_fold(merkle_proof()) -> hash(). merkle_fold({Left, Right}) -> LeftHash = merkle_fold(Left), RightHash = merkle_fold(Right), ?HASH(<<LeftHash/binary, RightHash/binary>>); merkle_fold(Hash) -> Hash. -spec bottom_merkle_proof_pair(merkle_proof()) -> {hash(), hash()}. bottom_merkle_proof_pair({Pair, Hash}) when is_tuple(Pair), is_binary(Hash) -> bottom_merkle_proof_pair(Pair); bottom_merkle_proof_pair({_Hash, Pair}) when is_tuple(Pair) -> bottom_merkle_proof_pair(Pair); bottom_merkle_proof_pair(Pair) -> Pair. -ifdef(TEST). empty_test_() -> [?_assertEqual(0, ?MODULE:size(empty()))]. %% Types for Triq. key() -> binary(). value() -> binary(). kv_orddict() -> ?LET(L, list({key(), value()}), orddict:from_list(L)). tree() -> %% The validity of data generated by this generator depends on the validity of the `from_list' function. %% This should not be a problem as long as the `from_list' function itself is tested. ?LET(KVO, list({key(), value()}), from_list(KVO)). non_empty_tree() -> ?SUCHTHAT(Tree, tree(), element(1, Tree) > 0). %% Helper functions for Triq. -spec height(tree()) -> non_neg_integer(). height({_, RootNode}) -> node_height(RootNode). -spec node_height(tree_node()) -> non_neg_integer(). node_height(empty) -> %% Strictly speaking, there is no height for empty tree. 0; node_height({_, _, _}) -> 0; node_height({_, _, Left, Right}) -> 1 + max(node_height(Left), node_height(Right)). -spec shallow_height(tree()) -> non_neg_integer(). shallow_height({_, RootNode}) -> node_shallow_height(RootNode). -spec node_shallow_height(tree_node()) -> non_neg_integer(). node_shallow_height(empty) -> %% Strictly speaking, there is no height for empty tree. 0; node_shallow_height({_, _, _}) -> 0; node_shallow_height({_, _, Left, Right}) -> 1 + min(node_shallow_height(Left), node_shallow_height(Right)). -spec is_perfectly_balanced(tree()) -> boolean(). is_perfectly_balanced(Tree) -> height(Tree) - shallow_height(Tree) =< 1. -spec fun_idempotent(F :: fun((X) -> X), X) -> boolean(). %% @doc Return true if F(X) =:= X. fun_idempotent(F, X) -> F(X) =:= X. prop_lookup_does_not_fetch_deleted_key() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, none =:= lookup(Key, delete(Key, enter(Key, Value, Tree)))). prop_deletion_decreases_size_by_1() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, ?MODULE:size(enter(Key, Value, Tree)) - 1 =:= ?MODULE:size(delete(Key, enter(Key, Value, Tree)))). prop_merkle_proofs_fold_to_root_hash() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, root_hash(enter(Key, Value, Tree)) =:= merkle_fold(merkle_proof(Key, enter(Key, Value, Tree)))). prop_merkle_proofs_contain_kv_hashes_at_the_bottom() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, bottom_merkle_proof_pair(merkle_proof(Key, enter(Key, Value, Tree))) =:= {?HASH(Key), ?HASH(Value)}). prop_merkle_proofs_can_be_verified() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, ok =:= verify_merkle_proof(Key, Value, root_hash(enter(Key, Value, Tree)), merkle_proof(Key, enter(Key, Value, Tree)))). prop_merkle_proofs_verification_reports_mismatch_for_wrong_key() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, case verify_merkle_proof(<<"X", Key/binary>>, Value, root_hash(enter(Key, Value, Tree)), merkle_proof(Key, enter(Key, Value, Tree))) of {error, {key_hash_mismatch, H}} when is_binary(H) -> true; _ -> false end). prop_merkle_proofs_verification_reports_mismatch_for_wrong_value() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, case verify_merkle_proof(Key, <<"X", Value/binary>>, root_hash(enter(Key, Value, Tree)), merkle_proof(Key, enter(Key, Value, Tree))) of {error, {value_hash_mismatch, H}} when is_binary(H) -> true; _ -> false end). prop_merkle_proofs_verification_reports_mismatch_for_wrong_root_hash() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, case verify_merkle_proof(Key, Value, begin RH = root_hash(enter(Key, Value, Tree)), <<"X", RH/binary>> end, merkle_proof(Key, enter(Key, Value, Tree))) of {error, {root_hash_mismatch, H}} when is_binary(H) -> true; _ -> false end). prop_from_list_size() -> ?FORALL(KVList, list({key(), value()}), length(proplists:get_keys(KVList)) =:= ?MODULE:size(from_list(KVList))). prop_from_orddict_size() -> ?FORALL(KVO, kv_orddict(), length(KVO) =:= ?MODULE:size(from_list(KVO))). prop_orddict_conversion_idempotence() -> ?FORALL(KVO, kv_orddict(), KVO =:= to_orddict(from_orddict(KVO))). prop_from_orddict_returns_a_perfectly_balanced_tree() -> ?FORALL(KVO, kv_orddict(), is_perfectly_balanced(from_orddict(KVO))). prop_keys() -> ?FORALL(Tree, tree(), keys(Tree) =:= [Key || {Key, _} <- to_orddict(Tree)]). from_list_sometimes_doesnt_return_a_perfectly_balanced_tree_test() -> ?assertNotEqual( true, triq:counterexample( ?FORALL( KVList, list({key(), value()}), is_perfectly_balanced(from_list(KVList))))). prop_foldr_iterates_on_proper_ordering_and_contains_no_duplicates() -> ?FORALL(Tree, tree(), fun_idempotent( fun lists:usort/1, foldr( fun({Key, _}, Acc) -> [Key|Acc] end, [], Tree) )). prop_enter_is_idempotent() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, fun_idempotent( fun (Tree_) -> enter(Key, Value, Tree_) end, enter(Key, Value, Tree))). prop_entered_value_can_be_retrieved() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, Value =:= lookup(Key, enter(Key, Value, Tree))). prop_entered_value_can_be_retrieved_after_balancing() -> ?FORALL({Tree, Key, Value}, {tree(), key(), value()}, Value =:= lookup(Key, balance(enter(Key, Value, Tree)))). prop_height_constrained() -> ?FORALL(Tree, non_empty_tree(), math:pow(2, height(Tree)) =< math:pow(?MODULE:size(Tree), ?C)). prop_balancing_yields_same_orddict() -> ?FORALL(Tree, tree(), to_orddict(Tree) =:= to_orddict(balance(Tree))). prop_entering_key_second_time_does_not_increase_size() -> ?FORALL({Tree, Key, Value1, Value2}, {tree(), key(), value(), value()}, ?MODULE:size(enter(Key, Value1, Tree)) =:= ?MODULE:size(enter(Key, Value2, enter(Key, Value1, Tree)))). prop_tree_after_explicit_balancing_is_perfectly_balanced() -> ?FORALL(Tree, tree(), is_perfectly_balanced(balance(Tree))). -endif.
src/gb_merkle_trees.erl
0.835685
0.537223
gb_merkle_trees.erl
starcoder
-module (deliv_dependency_failures_impl). %% Provides the business logic for dependency failures data -include("deliv_types.hrl"). -export ([ merge_overlapping_sets/2, are_dependencies_blocked/1, is_pipeline_blocked/2, get_all_blocked_pipeline_ids/1 ]). %% Blocked sets must be pairwise disjoint (no two sets have an intersection). %% A new blocked set may have an intersection with one or more sets in the %% set of blocked sets. By unioning intersecting sets we ensure pairwise %% disjointness. -spec merge_overlapping_sets(pipeline_statuses(), [pipeline_statuses()]) -> {pipeline_statuses(), [pipeline_statuses()]}. merge_overlapping_sets(PipelineStatuses, BlockedSets) -> lists:foldl( fun (BlockedMap, {SetToUnion, NewBlockedSets}) -> case sets:is_disjoint(key_set(BlockedMap), key_set(SetToUnion)) of true -> {SetToUnion, [BlockedMap | NewBlockedSets]}; false -> {maps:merge(BlockedMap, SetToUnion), NewBlockedSets} end end, {PipelineStatuses, []}, BlockedSets ). %% PipelineStatuses is a list of maps, the maps are called blocked sets, %% whose keys are Pipeline IDs. Each Pipeline ID represents a blocked pipeline. %% %% This function returns all Pipeline IDs in a list given a list of blocked sets %% by iterating over every blocked set (a map) and grabbing every Pipeline ID (the %% keys of the maps). %% %% Blocked sets are disjoint so we are not worried about duplicate keys. %% No promises on ordering of returned list. -spec get_all_blocked_pipeline_ids([pipeline_statuses()]) -> [pipeline_id()]. get_all_blocked_pipeline_ids(PipelineStatuses) -> lists:foldl( fun(BlockedMap, PipelineIds) -> lists:append(maps:keys(BlockedMap), PipelineIds) end, [], PipelineStatuses ). %% @doc Determine whether a pipeline-set is blocked. %% @return true if any pipeline has status that is not 'passed', else false -spec are_dependencies_blocked(pipeline_statuses()) -> boolean(). are_dependencies_blocked(DependencyFailures) -> lists:any(fun(Val) -> Val /= passed end, maps:values(DependencyFailures)). -spec is_pipeline_blocked(pipeline_id(), [pipeline_statuses()]) -> boolean(). is_pipeline_blocked(PipelineId, BlockedMaps) -> lists:any(fun(Map) -> maps:is_key(PipelineId, Map) end, BlockedMaps). %% @private %% @doc Retrieve the keys of a map as a set -spec key_set(#{T => any()}) -> sets:set(T). key_set(Map) -> sets:from_list(maps:keys(Map)).
components/automate-workflow-server/apps/delivery/src/deliv_dependency_failures_impl.erl
0.545528
0.514705
deliv_dependency_failures_impl.erl
starcoder
%% Licensed to the Apache Software Foundation (ASF) under one %% or more contributor license agreements. See the NOTICE file %% distributed with this work for additional information %% regarding copyright ownership. The ASF licenses this file %% to you under the Apache License, Version 2.0 (the %% "License"); you may not use this file except in compliance %% with the License. You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% @doc %% Logger handler for sending messages to systemd's `journal'. %% %% == Usage == %% %% Run this after the `systemd' application is started: %% %% ``` %% logger:add_handler(journal, systemd_journal_h, #{}). %% ''' %% %% == Options == %% %% <dl> %% <dt>`fields :: [field_definition()]'</dt> %% <dd>Contains list of all fields that will be passed to the `journald'. %% %% Defaults to: %% %% ``` %% [syslog_timestamp, %% syslog_pid, %% syslog_priority, %% {"ERL_PID", pid}, %% {"CODE_FILE", file}, %% {"CODE_LINE", line}, %% {"CODE_MFA", mfa}] %% ''' %% %% See {@section Fields} below.</dd> %% %% <dt>`report_cb :: fun ((logger:report()) -> [field()]'</dt> %% <dd>Function that takes `Prefix' and Logger's report and returns list %% of 2-ary tuples where first one MUST contain only uppercase ASCII %% letters, digits and underscore characters, and must not start with %% underscore. Field name and second one is field value in form of %% `iolist()'. It is important to note that value can contain any data, %% and does not need to be in any encoding; it can even be binary. %% %% === Example === %% %% ``` %% my_formatter(Prefix, #{field := Field}) when is_integer(Field) -> %% [ %% {[Prefix,"_FIELD"], io_lib:format("~.16B", [Field]} %% ]. %% ''' %% %% Remember that all field names <b>MUST NOT</b> start with the underscore, %% otherwise `journald' can ignore them. Such behaviour is not enforced on %% data returned by `report_cb' and it is left up to the implementor to %% remember it.</dd> %% </dl> %% %% == Fields == %% %% Fields list contain definition of fields that will be presented in the log %% message fed into `journald'. Few of them have special meaning and you can %% see list of them in the <a href="https://www.freedesktop.org/software/systemd/man/systemd.journal-fields.html"> %% `systemd.journal-fields(7)' manpage</a>. %% %% Metakeys (i.e. atoms) in `fields' list will be sent to %% the `journald' as a uppercased atom names. %% %% Entries in form of `{Name :: field_name(), metakey()}' will use `Name' %% as the field name. `Name' will be checked if it is correct `journald' field %% name (i.e. contains only ASCII letters, digits, and underscores, %% additionally do not start with underscore). %% %% Entries in form of `{Name :: field_name(), Data :: iolist()}' will use %% `Name' as field name and will contain `Data' as a literal. %% %% If entry data is empty or not set then it will be ommited in the output. %% %% === Special fields === %% %% Special fields availables: %% %% <dl> %% <dt>`level'</dt> %% <dd>Log level presented as string.</dd> %% <dt>`priority'</dt> %% <dd>Log level presented as decimal representation of syslog level.</dd> %% <dt>`os_pid'</dt> %% <dd>OS PID for current Erlang process. This is <b>NOT Erlang PID</b>. %% </dd> %% <dt>`mfa'</dt> %% <dd>Calling function presented in form `Module:Function/Arity'.</dd> %% <dt>`time'</dt> %% <dd>Timestamp of log message presented in RFC3339 format in UTC.</dd> %% </dl> %% %% Otherwise field is treated as a entry key where `key' is equivalent of %% `[key]' and is used as a list of atoms to extract data from the metadata map. %% %% === Syslog compatibility === %% %% To provide better compatibility and user convinience: %% %% <dl> %% <dt>`syslog_priority'</dt> %% <dd>Will work exactly the same as `{"SYSLOG_PRIORITY", priority}'.</dd> %% <dt>`syslog_pid'</dt> %% <dd>Will work exactly the same as `{"SYSLOG_PID", os_pid}'.</dd> %% <dt>`syslog_timestamp'</dt> %% <dd>Will work exactly the same as `{"SYSLOG_TIMESTAMP", time}'.</dd> %% </dl> %% %% @since 0.3.0 %% @end -module(systemd_journal_h). -behaviour(gen_server). -include("systemd_internal.hrl"). -define(JOURNAL_SOCKET, {local, <<"/run/systemd/journal/socket">>}). % logger handler callbacks -export([adding_handler/1, changing_config/3, filter_config/1, removing_handler/1, log/2]). % gen_server callbacks -export([start_link/0, init/1, handle_call/3, handle_cast/2]). -define(FORMATTER, {logger_formatter, #{}}). -define(CHILD_SPEC(Id, Args), #{id => Id, start => {?MODULE, start_link, Args}, restart => temporary}). -define(DEFAULT_FIELDS, [{"SYSLOG_TIMESTAMP", time}, {"SYSLOG_PID", os_pid}, {"SYSLOG_PRIORITY", priority}, {"ERL_PID", pid}, {"CODE_FILE", file}, {"CODE_LINE", line}, {"CODE_MFA", mfa}]). % ----------------------------------------------------------------------------- % Logger Handler %% @hidden -spec adding_handler(logger:handler_config()) -> {ok, logger:handler_config()} | {error, term()}. adding_handler(HConfig) -> Config0 = maps:get(config, HConfig, #{}), {Path, Config} = case maps:is_key(path, Config0) of true -> maps:take(path, Config0); false -> {?JOURNAL_SOCKET, Config0} end, case validate_config(Config) of ok -> Fields = [translate_field(Field) || Field <- maps:get(fields, Config, ?DEFAULT_FIELDS)], case start_connection(HConfig) of {ok, Pid} -> {ok, Socket} = gen_server:call(Pid, get), {ok, HConfig#{config => Config#{pid => Pid, fields => Fields, socket => Socket, path => Path}}}; Err -> Err end; Error -> Error end. %% @hidden changing_config(update, #{config := OldHConfig}, NewConfig) -> NewHConfig = maps:get(config, NewConfig, #{}), case validate_config(NewHConfig) of ok -> Fields = case maps:is_key(fields, NewHConfig) of true -> NewFields = maps:get(fields, NewHConfig), [translate_field(Field) || Field <- NewFields]; false -> maps:get(fields, OldHConfig) end, {ok, NewConfig#{config => OldHConfig#{fields := Fields}}}; Error -> Error end; changing_config(set, #{config := OldHConfig}, NewConfig) -> NewHConfig = maps:get(config, NewConfig, #{}), case validate_config(NewHConfig) of ok -> Fields = maps:get(fields, NewHConfig, ?DEFAULT_FIELDS), Formatted = [translate_field(Field) || Field <- Fields], {ok, NewConfig#{config => OldHConfig#{fields := Formatted}}}; Error -> Error end. translate_field(syslog_timestamp) -> {"SYSLOG_TIMESTAMP", time}; translate_field(syslog_pid) -> {"SYSLOG_PID", os_pid}; translate_field(syslog_priority) -> {"SYSLOG_PRIORITY", priority}; translate_field(Atom) when is_atom(Atom) -> {Atom, Atom}; translate_field({_Name, _Data} = Field) -> Field. validate_config(Config0) when is_map(Config0) -> Config = maps:without([pid, socket, path], Config0), do_validate(maps:to_list(Config)). do_validate([{fields, Fields} | Rest]) -> case check_fields(Fields) of ok -> do_validate(Rest); Error -> Error end; do_validate([]) -> ok; do_validate([Option | _]) -> {error, {invalid_option, Option}}. -define(IS_STRING(Name), (is_binary(Name) orelse is_list(Name))). check_fields([Atom | Rest]) when is_atom(Atom) -> Name = atom_to_list(Atom), case check_name(Name) of true -> check_fields(Rest); false -> {error, {name_invalid, Name}} end; check_fields([{Atom, _} | Rest]) when is_atom(Atom) -> check_fields([Atom | Rest]); check_fields([{Name, _} | Rest]) when ?IS_STRING(Name) -> case check_name(unicode:characters_to_list(Name)) of true -> check_fields(Rest); false -> {error, {name_invalid, Name}} end; check_fields([]) -> ok; check_fields([Unknown | _]) -> {error, {invalid_field, Unknown}}. check_name([C|Rest]) when $A =< C, C =< $Z; $a =< C, C =< $z; $0 =< C, C =< $9 -> check_name_rest(Rest); check_name(_) -> false. check_name_rest([C|Rest]) when $A =< C, C =< $Z; $a =< C, C =< $z; $0 =< C, C =< $9; C == $_ -> check_name_rest(Rest); check_name_rest([]) -> true; check_name_rest(_) -> false. %% @hidden -spec filter_config(logger:handler_config()) -> logger:handler_config(). filter_config(#{config := Config0} = HConfig) -> Config = maps:without([pid, socket, path], Config0), HConfig#{config => Config}. start_connection(#{id := Id}) -> case supervisor:start_child(?SUPERVISOR, ?CHILD_SPEC(Id, [])) of {ok, Pid} -> {ok, Pid}; {error, Error} -> {error, {spawn_error, Error}} end. %% @hidden -spec removing_handler(logger:handler_config()) -> ok. removing_handler(#{config := #{pid := Pid}}) -> ok = gen_server:call(Pid, stop), ok. %% @hidden -spec log(logger:log_event(), logger:handler_config()) -> ok. log(LogEvent, #{config := Config} = HConfig) -> #{socket := Socket, path := Path, fields := Fields} = Config, {FMod, FConf} = maps:get(formatter, HConfig, ?FORMATTER), Msg = FMod:format(LogEvent, FConf), case string:is_empty(Msg) of false -> FieldsData = [{Name, get_field(Field, LogEvent)} || {Name, Field} <- Fields], Data = systemd_protocol:encode([{"MESSAGE", Msg} | FieldsData]), ok = gen_udp:send(Socket, Path, 0, Data); true -> ok end. get_field(os_pid, _LogEvent) -> os:getpid(); get_field(time, #{meta := #{time := Time}}) -> calendar:system_time_to_rfc3339(Time, [{unit, microsecond}, {offset, "Z"}]); get_field(mfa, #{meta := #{mfa := {M, F, A}}}) -> io_lib:format("~tp:~tp/~B", [M, F, A]); get_field(priority, #{level := Level}) -> level_to_char(Level); get_field(level, #{level := Level}) -> atom_to_binary(Level, utf8); get_field(Metakey, #{meta := Meta}) -> case get_meta(Metakey, Meta) of undefined -> ""; Data -> to_string(Data) end. get_meta([], Data) -> Data; get_meta([Atom | Rest], Meta) when is_map(Meta) -> case maps:get(Atom, Meta, undefined) of undefined -> undefined; Next -> get_meta(Rest, Next) end; get_meta(Atom, Meta) when is_atom(Atom) -> maps:get(Atom, Meta, undefined); get_meta(_, _) -> undefined. to_string(Atom) when is_atom(Atom) -> atom_to_list(Atom); to_string(Pid) when is_pid(Pid) -> pid_to_list(Pid); to_string(Ref) when is_reference(Ref) -> ref_to_list(Ref); to_string(Int) when is_integer(Int) -> integer_to_list(Int); to_string(List) when is_list(List) -> case printable_list(List) of true -> List; false -> io_lib:format("~tp", [List]) end; to_string(Bin) when is_binary(Bin) -> case printable_list(binary_to_list(Bin)) of true -> Bin; false -> io_lib:format("~tp", [Bin]) end; to_string(X) -> io_lib:format("~tp", [X]). printable_list([]) -> false; printable_list(X) -> io_lib:printable_list(X). level_to_char(debug) -> "7"; level_to_char(info) -> "6"; level_to_char(notice) -> "5"; level_to_char(warning) -> "4"; level_to_char(error) -> "3"; level_to_char(critical) -> "2"; level_to_char(alert) -> "1"; level_to_char(emergency) -> "0". % ----------------------------------------------------------------------------- % Socket handler %% @hidden start_link() -> gen_server:start_link(?MODULE, [], []). %% @hidden init(_Arg) -> % We never receive on this socket, so we set is as {active, false} {ok, Socket} = gen_udp:open(0, [binary, local, {active, false}]), {ok, Socket}. %% @hidden handle_call(get, _Ref, Socket) -> {reply, {ok, Socket}, Socket}; handle_call(stop, _Ref, Socket) -> {stop, normal, ok, Socket}. %% @hidden handle_cast(_Msg, Socket) -> {noreply, Socket}.
src/systemd_journal_h.erl
0.659624
0.540742
systemd_journal_h.erl
starcoder
%%============================================================================== %% Copyright 2014 <NAME> %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%============================================================================== %% @doc Application module for JOBS. %% Normally, JOBS is configured at startup, using a static configuration. %% There is a reconfiguration API {@link jobs}, which is mainly for evolution %% of the system. %% %% == Configuring JOBS == %% A static configuration can be provided via application environment %% variables for the `jobs' application. The following is a list of %% recognised configuration parameters. %% %% === {config, Filename} === %% Evaluate a file using {@link //kernel/file:script/1}, treating the data %% returned from the script as a list of configuration options. %% %% === {queues, QueueOptions} === %% Configure a list of queues according to the provided QueueOptions. %% If no queues are specified, a queue named `default' will be created %% with default characteristics. %% %% Below are the different queue configuration options: %% %% ==== {Name, Options} ==== %% This is the generic queue configuration pattern. %% `Name :: any()' is used to identify the queue. %% %% Options: %% %% `{mod, Module::atom()}' provides the name of the queueing module. %% The default module is `jobs_queue'. %% %% `{type, fifo | lifo | approve | reject | {producer, F}}' %% specifies the semantics of the queue. Note that the specified queue module %% may be limited to only one type (e.g. the `jobs_queue_list' module only %% supports `lifo' semantics). %% %% If the type is `{producer, F}', it doesn't matter which queue module is %% used, as it is not possible to submit job requests to a producer queue. %% The producer queue will initiate jobs using `spawn_monitor(F)' at the %% rate given by the regulators for the queue. %% %% If the type is `approve' or `reject', respectively, all other options will %% be irrelevant. Any request to the queue will either be immediately approved %% or immediately rejected. %% %% `{max_time, integer() | undefined}' specifies the longest time that a job %% request may spend in the queue. If `undefined', no limit is imposed. %% %% `{max_size, integer() | undefined}' specifies the maximum length (number %% of job requests) of the queue. If the queue has reached the maximum length, %% subsequent job requests will be rejected unless it is possible to remove %% enough requests that have exceeded the maximum allowed time in the queue. %% %% `{regulators, [{regulator_type(), Opts]}' specifies the regulation %% characteristics of the queue. %% %% The following types of regulator are supported: %% %% `regulator_type() :: rate | counter | group_rate' %% %% It is possible to combine different types of regulator on the same queue, %% e.g. a queue may have both rate- and counter regulation. It is not possible %% to have two different rate regulators for the same queue. %% %% Common regulator options: %% %% `{name, term()}' names the regulator; by default, a name will be generated. %% %% `{limit, integer()}' defines the limit for the regulator. If it is a rate %% regulator, the value represents the maximum number of jobs/second; if it %% is a counter regulator, it represents the total number of "credits" %% available. %% %% `{modifiers, [modifier()]}' %% %% <pre> %% modifier() :: {IndicatorName :: any(), unit()} %% | {Indicator, local_unit(), remote_unit()} %% | {Indicator, Fun} %% %% local_unit() :: unit() :: integer() %% remote_unit() :: {avg, unit()} | {max, unit()} %% </pre> %% %% Feedback indicators are sent from the sampler framework. Each indicator %% has the format `{IndicatorName, LocalLoadFactor, Remote}'. %% %% `Remote :: [{Node, LoadFactor}]' %% %% `IndicatorName' defines the type of indicator. It could be e.g. `cpu', %% `memory', `mnesia', or any other name defined by one of the sampler plugins. %% %% The effect of a modifier is calculated as the sum of the effects from local %% and remote load. As the remote load is represented as a list of %% `{Node,Factor}' it is possible to multiply either the average or the max %% load on the remote nodes with the given factor: `{avg,Unit} | {max, Unit}'. %% %% For custom interpretation of the feedback indicator, it is possible to %% specify a function `F(LocalFactor, Remote) -> Effect', where Effect is a %% positive integer. %% %% The resulting effect value is used to reduce the predefined regulator limit %% with the given number of percentage points, e.g. if a rate regulator has %% a predefined limit of 100 jobs/sec, and `Effect = 20', the current rate %% limit will become 80 jobs/sec. %% %% `{rate, Opts}' - rate regulation %% %% Currently, no special options exist for rate regulators. %% %% `{counter, Opts}' - counter regulation %% %% The option `{increment, I}' can be used to specify how much of the credit %% pool should be assigned to each job. The default increment is 1. %% %% `{named_counter, Name, Increment}' reuses an existing counter regulator. %% This can be used to link multiple queues to a shared credit pool. Note that %% this does not use the existing counter regulator as a template, but actually %% shares the credits with any other queues using the same named counter. %% %% __NOTE__ Currently, if there is no counter corresponding to the alias, %% the entry will simply be ignored during regulation. It is likely that this %% behaviour will change in the future. %% %% ==== {Name, standard_rate, R} ==== %% A simple rate-regulated queue with throughput rate `R', and basic cpu- and %% memory-related feedback compensation. %% %% ==== {Name, standard_counter, N} ==== %% A simple counter-regulated queue, giving each job a weight of 1, and thus %% allowing at most `N' jobs to execute concurrently. Basic cpu- and memory- %% related feedback compensation. %% %% ==== {Name, producer, F, Options} ==== %% A producer queue is not open for incoming jobs, but will rather initiate %% jobs at the given rate. %% @end %% -module(jobs_app). -export([start/2, stop/1, init/1]). start(_, _) -> supervisor:start_link({local,?MODULE},?MODULE,[]). stop(_) -> ok. init([]) -> {ok, {{rest_for_one,3,10}, [{jobs_server, {jobs_server,start_link,[]}, permanent, 3000, worker, [jobs_server]}| sampler_spec()]}}. sampler_spec() -> Mod = case application:get_env(sampler) of {ok,M} when M =/= undefined -> M; _ -> jobs_sampler end, [{jobs_sampler, {Mod,start_link,[]}, permanent, 3000, worker, [Mod]}].
src/jobs_app.erl
0.8059
0.615637
jobs_app.erl
starcoder
%% %% %CopyrightBegin% %% %% Copyright Ericsson AB 2000-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% %CopyrightEnd% %% -module(xref_reader). -export([module/5]). -import(lists, [keysearch/3, member/2, reverse/1]). -record(xrefr, {module=[], function=[], def_at=[], l_call_at=[], x_call_at=[], el=[], ex=[], x=[], df, builtins_too=false, is_abstr, % abstract module? funvars=[], % records variables bound to funs % (for coping with list comprehension) matches=[], % records other bound variables unresolved=[], % unresolved calls, {{mfa(),mfa()},Line} %% experimental; -xref(FunEdge) is recognized. lattrs=[], % local calls, {{mfa(),mfa()},Line} xattrs=[], % external calls, -"- battrs=[], % badly formed xref attributes, term(). on_load % function name }). -include("xref.hrl"). %% The versions of the abstract code are as follows: %% R7: abstract_v1 %% R8: abstract_v2 %% R9C: raw_abstract_v1 %% -> {ok, Module, {DefAt, CallAt, LC, XC, X, Attrs}, Unresolved}} | EXIT %% Attrs = {ALC, AXC, Bad} %% ALC, AXC and Bad are extracted from the attribute 'xref'. An experiment. module(Module, Forms, CollectBuiltins, X, DF) -> Attrs = [{Attr,V} || {attribute,_Line,Attr,V} <- Forms], IsAbstract = xref_utils:is_abstract_module(Attrs), S = #xrefr{module = Module, builtins_too = CollectBuiltins, is_abstr = IsAbstract, x = X, df = DF}, forms(Forms, S). forms([F | Fs], S) -> S1 = form(F, S), forms(Fs, S1); forms([], S) -> #xrefr{module = M, def_at = DefAt, l_call_at = LCallAt, x_call_at = XCallAt, el = LC, ex = XC, x = X, df = Depr, on_load = OnLoad, lattrs = AL, xattrs = AX, battrs = B, unresolved = U} = S, OL = case OnLoad of undefined -> []; F -> [{M, F, 0}] end, #xrefr{def_at = DefAt, l_call_at = LCallAt, x_call_at = XCallAt, el = LC, ex = XC, x = X, df = Depr, on_load = OnLoad, lattrs = AL, xattrs = AX, battrs = B, unresolved = U} = S, Attrs = {lists:reverse(AL), lists:reverse(AX), lists:reverse(B)}, {ok, M, {DefAt, LCallAt, XCallAt, LC, XC, X, Attrs, Depr, OL}, U}. form({attribute, Line, xref, Calls}, S) -> % experimental #xrefr{module = M, function = Fun, lattrs = L, xattrs = X, battrs = B} = S, attr(Calls, erl_anno:line(Line), M, Fun, L, X, B, S); form({attribute, _, on_load, {F, 0}}, S) -> S#xrefr{on_load = F}; form({attribute, _Line, _Attr, _Val}, S) -> S; form({function, _, module_info, 0, _Clauses}, S) -> S; form({function, _, module_info, 1, _Clauses}, S) -> S; form({function, 0 = _Line, behaviour_info, 1, _Clauses}, S) -> S; form({function, Anno, Name, Arity, Clauses}, S) -> MFA0 = {S#xrefr.module, Name, Arity}, MFA = adjust_arity(S, MFA0), S1 = S#xrefr{function = MFA}, Line = erl_anno:line(Anno), S2 = S1#xrefr{def_at = [{MFA,Line} | S#xrefr.def_at]}, S3 = clauses(Clauses, S2), S3#xrefr{function = []}; form(_, S) -> %% OTP 20. Other uninteresting forms such as {eof, _} and {warning, _}. %% Exposed because sys_pre_expand is no longer run. S. clauses(Cls, S) -> #xrefr{funvars = FunVars, matches = Matches} = S, clauses(Cls, FunVars, Matches, S). clauses([{clause, _Line, _H, G, B} | Cs], FunVars, Matches, S) -> S1 = case S#xrefr.builtins_too of true -> expr(G, S); false -> S end, S2 = expr(B, S1), S3 = S2#xrefr{funvars = FunVars, matches = Matches}, clauses(Cs, S3); clauses([], _FunVars, _Matches, S) -> S. attr(NotList, Ln, M, Fun, AL, AX, B, S) when not is_list(NotList) -> attr([NotList], Ln, M, Fun, AL, AX, B, S); attr([E={From, To} | As], Ln, M, Fun, AL, AX, B, S) -> case mfa(From, M) of {_, _, MFA} when MFA =:= Fun; [] =:= Fun -> attr(From, To, Ln, M, Fun, AL, AX, B, S, As, E); {_, _, _} -> attr(As, Ln, M, Fun, AL, AX, [E | B], S); _ -> attr(Fun, E, Ln, M, Fun, AL, AX, B, S, As, E) end; attr([To | As], Ln, M, Fun, AL, AX, B, S) -> attr(Fun, To, Ln, M, Fun, AL, AX, B, S, As, To); attr([], _Ln, _M, _Fun, AL, AX, B, S) -> S#xrefr{lattrs = AL, xattrs = AX, battrs = B}. attr(From, To, Ln, M, Fun, AL, AX, B, S, As, E) -> case {mfa(From, M), mfa(To, M)} of {{true,_,F}, {_,external,T}} -> attr(As, Ln, M, Fun, AL, [{{F,T},Ln} | AX], B, S); {{true,_,F}, {_,local,T}} -> attr(As, Ln, M, Fun, [{{F,T},Ln} | AL], AX, B, S); _ -> attr(As, Ln, M, Fun, AL, AX, [E | B], S) end. mfa({F,A}, M) when is_atom(F), is_integer(A) -> {true, local, {M,F,A}}; mfa(MFA={M,F,A}, M1) when is_atom(M), is_atom(F), is_integer(A) -> {M=:=M1, external, MFA}; mfa(_, _M) -> false. expr({'if', _Line, Cs}, S) -> clauses(Cs, S); expr({'case', _Line, E, Cs}, S) -> S1 = expr(E, S), clauses(Cs, S1); expr({'receive', _Line, Cs}, S) -> clauses(Cs, S); expr({'receive', _Line, Cs, To, ToEs}, S) -> S1 = expr(To, S), S2 = expr(ToEs, S1), clauses(Cs, S2); expr({'try',_Line,Es,Scs,Ccs,As}, S) -> S1 = expr(Es, S), S2 = clauses(Scs, S1), S3 = clauses(Ccs, S2), expr(As, S3); expr({'fun', Line, {function, {atom,_,Mod}, {atom,_,Name}, {integer,_,Arity}}}, S) -> %% New format in R15. M:F/A (literals). As = lists:duplicate(Arity, {atom, Line, foo}), external_call(Mod, Name, As, Line, false, S); expr({'fun', Line, {function, Mod, Name, {integer,_,Arity}}}, S) -> %% New format in R15. M:F/A (one or more variables). As = lists:duplicate(Arity, {atom, Line, foo}), external_call(erlang, apply, [Mod, Name, list2term(As)], Line, true, S); expr({'fun', Line, {function, Mod, Name, _Arity}}, S) -> %% New format in R15. M:F/A (one or more variables). As = {var, Line, '_'}, external_call(erlang, apply, [Mod, Name, As], Line, true, S); %% Only abstract_v1 and abstract_v2. expr({'fun', Line, {function, Name, Arity}, _Extra}, S) -> %% Added in R8. handle_call(local, S#xrefr.module, Name, Arity, Line, S); expr({'fun', _Line, {clauses, Cs}, _Extra}, S) -> clauses(Cs, S); %% End abstract_v1 and abstract_v2. expr({'fun', Line, {function, Name, Arity}}, S) -> %% Added in OTP 20. handle_call(local, S#xrefr.module, Name, Arity, Line, S); expr({'fun', _Line, {clauses, Cs}}, S) -> clauses(Cs, S); expr({named_fun, _Line, '_', Cs}, S) -> clauses(Cs, S); expr({named_fun, _Line, Name, Cs}, S) -> S1 = S#xrefr{funvars = [Name | S#xrefr.funvars]}, clauses(Cs, S1); expr({call, Line, {atom, _, Name}, As}, S) -> S1 = handle_call(local, S#xrefr.module, Name, length(As), Line, S), expr(As, S1); expr({call, Line, {remote, _Line, {atom,_,Mod}, {atom,_,Name}}, As}, S) -> external_call(Mod, Name, As, Line, false, S); expr({call, Line, {remote, _Line, Mod, Name}, As}, S) -> %% Added in R8. external_call(erlang, apply, [Mod, Name, list2term(As)], Line, true, S); expr({call, Line, F, As}, S) -> external_call(erlang, apply, [F, list2term(As)], Line, true, S); expr({match, _Line, {var,_,Var}, {'fun', _, {clauses, Cs}, _Extra}}, S) -> %% This is what is needed in R7 to avoid warnings for the functions %% that are passed around by the "expansion" of list comprehension. S1 = S#xrefr{funvars = [Var | S#xrefr.funvars]}, clauses(Cs, S1); expr({match, _Line, {var,_,Var}, {'fun', _, {clauses, Cs}}}, S) -> %% OTP 20. Exposed because sys_pre_expand is no longer run. S1 = S#xrefr{funvars = [Var | S#xrefr.funvars]}, clauses(Cs, S1); expr({match, _Line, {var,_,Var}, {named_fun, _, _, _} = Fun}, S) -> %% OTP 20. Exposed because sys_pre_expand is no longer run. S1 = S#xrefr{funvars = [Var | S#xrefr.funvars]}, expr(Fun, S1); expr({match, _Line, {var,_,Var}, E}, S) -> %% Used for resolving code like %% Args = [A,B], apply(m, f, Args) S1 = S#xrefr{matches = [{Var, E} | S#xrefr.matches]}, expr(E, S1); expr({op, _Line, 'orelse', Op1, Op2}, S) -> expr([Op1, Op2], S); expr({op, _Line, 'andalso', Op1, Op2}, S) -> expr([Op1, Op2], S); expr({op, Line, Op, Operand1, Operand2}, S) -> external_call(erlang, Op, [Operand1, Operand2], Line, false, S); expr({op, Line, Op, Operand}, S) -> external_call(erlang, Op, [Operand], Line, false, S); expr(T, S) when is_tuple(T) -> expr(tuple_to_list(T), S); expr([E | Es], S) -> expr(Es, expr(E, S)); expr(_E, S) -> S. %% Mod and Fun may not correspond to something in the abstract code, %% which is signalled by X =:= true. external_call(Mod, Fun, ArgsList, Line, X, S) -> Arity = length(ArgsList), W = case xref_utils:is_funfun(Mod, Fun, Arity) of true when erlang =:= Mod, apply =:= Fun, 2 =:= Arity -> apply2; true when erts_debug =:= Mod, apply =:= Fun,4 =:= Arity -> debug4; true when erlang =:= Mod, spawn_opt =:= Fun -> Arity - 1; true -> Arity; false when Mod =:= erlang -> case erl_internal:type_test(Fun, Arity) of true -> type; false -> false end; false -> false end, S1 = if W =:= type; X -> S; true -> handle_call(external, Mod, Fun, Arity, Line, S) end, case {W, ArgsList} of {false, _} -> expr(ArgsList, S1); {type, _} -> expr(ArgsList, S1); {apply2, [{tuple, _, [M,F]}, ArgsTerm]} -> eval_args(M, F, ArgsTerm, Line, S1, ArgsList, []); {1, [{tuple, _, [M,F]} | R]} -> % R = [] unless spawn_opt eval_args(M, F, list2term([]), Line, S1, ArgsList, R); {2, [Node, {tuple, _, [M,F]} | R]} -> % R = [] unless spawn_opt eval_args(M, F, list2term([]), Line, S1, ArgsList, [Node | R]); {3, [M, F, ArgsTerm | R]} -> % R = [] unless spawn_opt eval_args(M, F, ArgsTerm, Line, S1, ArgsList, R); {4, [Node, M, F, ArgsTerm | R]} -> % R = [] unless spawn_opt eval_args(M, F, ArgsTerm, Line, S1, ArgsList, [Node | R]); {debug4, [M, F, ArgsTerm, _]} -> eval_args(M, F, ArgsTerm, Line, S1, ArgsList, []); _Else -> % apply2, 1 or 2 check_funarg(W, ArgsList, Line, S1) end. eval_args(Mod, Fun, ArgsTerm, Line, S, ArgsList, Extra) -> {IsSimpleCall, M, F} = mod_fun(Mod, Fun), case term2list(ArgsTerm, [], S) of undefined -> S1 = unresolved(M, F, -1, Line, S), expr(ArgsList, S1); ArgsList2 when not IsSimpleCall -> S1 = unresolved(M, F, length(ArgsList2), Line, S), expr(ArgsList, S1); ArgsList2 when IsSimpleCall -> S1 = expr(Extra, S), external_call(M, F, ArgsList2, Line, false, S1) end. mod_fun({atom,_,M1}, {atom,_,F1}) -> {true, M1, F1}; mod_fun({atom,_,M1}, _) -> {false, M1, ?VAR_EXPR}; mod_fun(_, {atom,_,F1}) -> {false, ?MOD_EXPR, F1}; mod_fun(_, _) -> {false, ?MOD_EXPR, ?VAR_EXPR}. check_funarg(W, ArgsList, Line, S) -> {FunArg, Args} = fun_args(W, ArgsList), S1 = case funarg(FunArg, S) of true -> S; false when is_integer(W) -> % 1 or 2 unresolved(?MOD_EXPR, ?VAR_EXPR, 0, Line, S); false -> % apply2 N = case term2list(Args, [], S) of undefined -> -1; As -> length(As) end, unresolved(?MOD_EXPR, ?VAR_EXPR, N, Line, S) end, expr(ArgsList, S1). funarg({'fun', _, _Clauses, _Extra}, _S) -> true; funarg({'fun', _, {clauses, _}}, _S) -> %% OTP 20. sys_pre_expand not run. true; funarg({'fun', _, {function, _, _}}, _S) -> %% OTP 20. sys_pre_expand not run. true; funarg({'fun', _, {function,_,_,_}}, _S) -> %% New abstract format for fun M:F/A in R15. true; funarg({named_fun, _, _, _}, _S) -> %% OTP 20. sys_pre_expand not run. true; funarg({var, _, Var}, S) -> member(Var, S#xrefr.funvars); funarg(_, _S) -> false. fun_args(apply2, [FunArg, Args]) -> {FunArg, Args}; fun_args(1, [FunArg | Args]) -> {FunArg, Args}; fun_args(2, [_Node, FunArg | Args]) -> {FunArg, Args}. list2term(L) -> A = erl_anno:new(0), list2term(L, A). list2term([A | As], Anno) -> {cons, Anno, A, list2term(As)}; list2term([], Anno) -> {nil, Anno}. term2list({cons, _Line, H, T}, L, S) -> term2list(T, [H | L], S); term2list({nil, _Line}, L, _S) -> reverse(L); term2list({var, _, Var}, L, S) -> case keysearch(Var, 1, S#xrefr.matches) of {value, {Var, E}} -> term2list(E, L, S); false -> undefined end; term2list(_Else, _L, _S) -> undefined. unresolved(M, F, A, Line, S) -> handle_call(external, {M,F,A}, Line, S, true). handle_call(Locality, Module, Name, Arity, Line, S) -> case xref_utils:is_builtin(Module, Name, Arity) of true when not S#xrefr.builtins_too -> S; _Else -> To = {Module, Name, Arity}, handle_call(Locality, To, Line, S, false) end. handle_call(Locality, To0, Anno, S, IsUnres) -> From = S#xrefr.function, To = adjust_arity(S, To0), Call = {From, To}, Line = erl_anno:line(Anno), CallAt = {Call, Line}, S1 = if IsUnres -> S#xrefr{unresolved = [CallAt | S#xrefr.unresolved]}; true -> S end, case Locality of local -> S1#xrefr{el = [Call | S1#xrefr.el], l_call_at = [CallAt | S1#xrefr.l_call_at]}; external -> S1#xrefr{ex = [Call | S1#xrefr.ex], x_call_at = [CallAt | S1#xrefr.x_call_at]} end. adjust_arity(#xrefr{is_abstr = true, module = M}, {M, F, A} = MFA) -> case xref_utils:is_static_function(F, A) of true -> MFA; false -> {M,F,A-1} end; adjust_arity(_S, MFA) -> MFA.
lib/tools/src/xref_reader.erl
0.532425
0.454412
xref_reader.erl
starcoder
%%%------------------------------------------------------------------- %%% File: gen_stream_odd_nums.erl %%% Author: <NAME> %%% Description: %%% Generate odd numbers as a binary stream. This is an example of %%% a gen_stream behaviour which does not have any internal state. %%% To accomplish this, the functions need to be able to globally %%% compute stream elements based on the positional information %%% relative to the beginning of the stream. Used for testing by %%% gen_stream_SUITE. %%%------------------------------------------------------------------- -module(gen_stream_odd_nums). -behaviour(gen_stream). -export([init/1, terminate/2, code_change/3, stream_size/1, extract_block/5, extract_final_block/5, inc_progress/2]). -export([gen_block/2]). %% -include_lib("stdlib/include/gen_stream.hrl"). %% -include("../include/gen_stream.hrl"). %%%------------------------------------------------------------------- %%% Argument parsing %%%------------------------------------------------------------------- get_size_option(Options) -> proplists:get_value(stream_size, Options, infinite). %%%------------------------------------------------------------------- %%% External gen_stream behaviour functions %%%------------------------------------------------------------------- %% This function is called when gen_stream gets {behaviour, odd_nums, InitArgs} %% InitArgs should be [{stream_size, size}] init(_Args) -> ok. %% This function is called when gen_stream gets {behaviour, odd_nums, InitArgs} %% InitArgs should be [{stream_size, Size}] if not 'infinite' stream_size(Options) -> case get_size_option(Options) of Length when is_integer(Length) -> Length; _Other -> infinite end. %% This function is called to track pct_complete %% Since we only return binaries, blindly add its size. inc_progress(Seen, Chunk) -> Seen + size(Chunk). %% This behaviour has no state to cleanup. terminate(_Reason, _State) -> ok. %% Action to take on a code change. code_change(_OldVsn, ModState, _Extra) -> ModState. %% This function is called by gen_stream on normal block retrieval extract_block(_State, Pos, NumBytes, _ChunkSize, _BlockFactor) -> %% Factor = ChunkSize div 4, %% gen_block(Pos div 4, Factor * BlockFactor). gen_block(Pos div 4, NumBytes div 4). %% This function is only called when the last chunk is to be returned. %% The ChunkSize should be the remaining length, not the original ChunkSize. extract_final_block(State, Pos, NumBytes, ChunkSize, BlockFactor) -> %% This should never happen with an infinite stream, %% but is no different than normal extract_block, since %% ChunkSize has been reduced to just indicate the remaining. extract_block(State, Pos, NumBytes, ChunkSize, BlockFactor). %%%------------------------------------------------------------------- %%% Internal functions %%%------------------------------------------------------------------- %% Here is the generator code -- the real algorithm of this stream. gen_block(Pos, Count) -> Counters = lists:seq(Pos, Pos + Count - 1), Elems = [ N * 2 + 1 || N <- Counters], Bins = [ << E:32 >> || E <- Elems], list_to_binary(Bins).
src/gen_stream_odd_nums.erl
0.570331
0.465934
gen_stream_odd_nums.erl
starcoder
%% Copyright (c) 2013 <NAME> %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. -module(rebar_vim_listener). -behaviour(eunit_listener). -export([start/1]). -export([init/1]). -export([handle_begin/3]). -export([handle_end/3]). -export([handle_cancel/3]). -export([terminate/2]). -record(state, {rebar_vim_device :: rebar_vim_io:device(), sources = dict:new() :: dict:dict(), groups = dict:new() :: dict:dict()}). -type state() :: #state{}. -type data() :: [{atom(), term()}]. -spec start(list()) -> pid() | {pid(), reference()}. start(Opts) -> eunit_listener:start(?MODULE, Opts). -spec init(list()) -> state(). init(_Opts) -> {ok, RVDevice} = rebar_vim_io:open(user), #state{rebar_vim_device=RVDevice}. -spec handle_begin(test | group, data(), state()) -> state(). handle_begin(group, Data, State = #state{groups=Groups}) -> {id, Id} = lists:keyfind(id, 1, Data), case lists:keyfind(desc, 1, Data) of {desc, ""} -> Groups2 = dict:store(Id, undefined, Groups), State#state{groups=Groups2}; {desc, Desc} -> Groups2 = dict:store(Id, Desc, Groups), State#state{groups=Groups2}; false -> State end; handle_begin(test, Data, State = #state{sources=Sources}) -> Module = get_module(Data), case dict:is_key(Module, Sources) of true -> State; false -> File = rebar_vim_util:module_to_source(Module), Sources2 = dict:store(Module, File, Sources), State#state{sources=Sources2} end. -spec handle_end(group | test, data(), state()) -> state(). handle_end(group, _Data, State) -> State; handle_end(test, Data, State) -> case lists:keyfind(status, 1, Data) of {status, ok} -> State; {status, {skipped, Reason}} -> handle_error(skipped, Reason, Data, State); {status, {error, Reason}} -> handle_error(failed, Reason, Data, State) end. -spec handle_cancel(group | test, data(), state()) -> state(). handle_cancel(group, _Data, State) -> State; handle_cancel(test, Data, State) -> {reason, Reason} = lists:keyfind(reason, 1, Data), handle_error(skipped, Reason, Data, State). -spec terminate(data(), state()) -> ok. terminate(Data, #state{rebar_vim_device=RVDevice}) -> rebar_vim_io:write(RVDevice, Data), rebar_vim_io:close(RVDevice). %% internal handle_error(ErrorType, Reason, Data, State = #state{rebar_vim_device=RVDevice, sources=Sources, groups=Groups}) -> Module = get_module(Data), File = dict:fetch(Module, Sources), Line = proplists:get_value(line, Data, none), Reason2 = format_reason(ErrorType, Reason, Data, Groups), rebar_vim_io:format(RVDevice, File, Line, ErrorType, Reason2), State. get_module(Data) -> {source, {Module, _Function, _Arity}} = lists:keyfind(source, 1, Data), Module. format_reason(ErrorType, Reason, Data, Groups) -> {source, {_Module, Function, Arity}} = lists:keyfind(source, 1, Data), {id, Id} = lists:keyfind(id, 1, Data), Group = get_group_desc(Id, Groups), rebar_vim_io:format_reason(Group, Function, Arity, {ErrorType, Reason}). get_group_desc(Id, Groups) -> [_|GroupIdRev] = lists:reverse(Id), GroupId = lists:reverse(GroupIdRev), case dict:find(GroupId, Groups) of {ok, undefined} -> undefined; {ok, Desc} -> {group, Desc}; error -> undefined end.
src/rebar_vim_listener.erl
0.523177
0.412826
rebar_vim_listener.erl
starcoder
%% Copyright (c) Facebook, Inc. and its affiliates. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License %% See assert_diagnostic.hrl. -module(assert_diagnostic). -include_lib("stdlib/include/assert.hrl"). -include_lib("test/assert_diagnostic.hrl"). -export([ format/2, check_length/2, check_elements/3, assert_snapshot_match/2, assert_binary_match/2 ]). format(T, Args) -> lists:flatten(io_lib:format(T, Args)). check_length(A, B) -> case length(B) - length(A) of 0 -> []; D when D > 0 -> [ format("Actual list has ~p unexpected items: ~p", [ D, lists:sublist(B, length(A) + 1, D) ]) ]; D -> [ format("Actual list lacks ~p expected items: ~p", [ -D, lists:sublist(A, length(B) + 1, -D) ]) ] end. %% Collect all differences for each index I. %% Very naive for now. Don't try to 'diff': %% No detection of inserted nor deleted items. check_elements([], _, _) -> []; check_elements(_, [], _) -> []; check_elements([H | T1], [H | T2], I) -> check_elements(T1, T2, I + 1); check_elements([H1 | T1], [H2 | T2], I) -> [ format( "Item ~p differs:~n" "Expected: ~p~n" "Value: ~p", [I, H1, H2] ) | check_elements(T1, T2, I + 1) ]. %% Check the formatted result matches the reference. assert_snapshot_match(Expected, Output) -> case Output of {ok, Formatted, _} -> assert_binary_match(Expected, Formatted); {skip, _} -> ok; Other -> ct:fail("unexpected: ~p~n", [Other]) end. assert_binary_match(Expected, Formatted) -> case Formatted of Expected -> ok; Other -> % Split by lines (preserving empty lines). Expected2 = string:split(Expected, "\n", all), Other2 = string:split(Other, "\n", all), % We already know they are not equal, % this macro gives a better diagnostic. ?assertListEqual(Expected2, Other2) end.
test/assert_diagnostic.erl
0.747892
0.507446
assert_diagnostic.erl
starcoder
%%%------------------------------------------------------------------------ %% Copyright 2019, OpenTelemetry Authors %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% @doc %% @end %%%------------------------------------------------------------------------- -module(ot_meter). -include("meter.hrl"). -callback new_instrument(opentelemetry:meter(), name(), instrument_kind(), instrument_opts()) -> boolean(). -callback new_instruments(opentelemetry:meter(), [instrument_opts()]) -> boolean(). -callback record(opentelemetry:meter(), term (), number()) -> ok. -callback record(opentelemetry:meter(), name(), labels(), number()) -> ok. -callback record_batch(opentelemetry:meter(), [{instrument(), number()}], labels()) -> ok. -callback bind(opentelemetry:meter(), instrument(), labels()) -> term(). -callback release(opentelemetry:meter(), term()) -> ok. -callback register_observer(opentelemetry:meter(), ot_meter:name(), ot_observer:callback()) -> ok | unknown_instrument. -callback set_observer_callback(opentelemetry:meter(), ot_meter:name(), ot_observer:callback()) -> ok | unknown_instrument. -callback observe(ot_observer:observer_instrument(), number(), labels()) -> ok. -export([new_instrument/4, new_instruments/2, instrument_definition/4, bind/3, release/1, record/2, record/4, record_batch/3, register_observer/3, set_observer_callback/3, observe/3]). -type label_key() :: unicode:unicode_binary(). -type label_value() :: unicode:unicode_binary(). -type label() :: {label_key(), label_value()}. -type labels() :: [label()]. -type name() :: unicode:unicode_binary(). -type description() :: unicode:unicode_binary(). -type instrument_kind() :: module(). -type unit() :: atom(). -type number_kind() :: integer | float. -type instrument_config() :: #{description => description(), number_kind => number_kind(), unit => unit(), monotonic := boolean(), synchronous := boolean()}. -type instrument_properties() :: #{monotonic := boolean(), synchronous := boolean()}. -type instrument_opts() :: #{description => description(), number_kind => number_kind(), unit => unit()}. -type instrument_definition() :: {name(), instrument_kind(), instrument_opts()}. -type instrument() :: term(). -type bound_instrument() :: {opentelemetry:meter(), term()}. -type measurement() :: {bound_instrument() | name(), number()}. -export_type([bound_instrument/0, name/0, description/0, instrument_kind/0, instrument_config/0, instrument_opts/0, instrument_definition/0, number_kind/0, unit/0, measurement/0, labels/0]). -spec new_instrument(opentelemetry:meter(), name(), instrument_kind(), instrument_opts()) -> boolean(). new_instrument(Meter={Module, _}, Name, InstrumentKind, InstrumentOpts) -> Module:new_instrument(Meter, Name, InstrumentKind, InstrumentOpts). -spec new_instruments(opentelemetry:meter(), [instrument_definition()]) -> boolean(). new_instruments(Meter={Module, _}, List) -> Module:new_instruments(Meter, List). -spec instrument_definition(module(), name(), instrument_properties(), instrument_opts()) -> instrument_definition(). instrument_definition(InstrumentModule, Name, Properties, Opts) -> %% instrument config values are not allowed to be overridden so in case the user %% attempts to pass as an optiion this merge will use the config value {Name, InstrumentModule, maps:merge(Opts, Properties)}. -spec bind(opentelemetry:meter(), name(), labels()) -> bound_instrument(). bind(Meter={Module, _}, Name, Labels) -> {Meter, Module:bind(Meter, Name, Labels)}. -spec release(bound_instrument()) -> ok. release({Meter={Module, _}, BoundInstrument}) -> Module:release(Meter, BoundInstrument). -spec record(opentelemetry:meter(), name(), number(), labels()) -> ok. record(Meter={Module, _}, Name, Number, Labels) -> Module:record(Meter, Name, Labels, Number). -spec record(bound_instrument(), number()) -> ok. record({Meter={Module, _}, BoundInstrument}, Number) -> Module:record(Meter, BoundInstrument, Number). -spec record_batch(opentelemetry:meter(), labels(), [measurement()]) -> ok. record_batch(Meter={Module, _}, Labels, Measurements) -> Module:record_batch(Meter, Labels, Measurements). -spec register_observer(opentelemetry:meter(), ot_meter:name(), ot_observer:callback()) -> ok | unknown_instrument. register_observer(Meter={Module, _}, Name, Callback) -> Module:register_observer(Meter, Name, Callback). -spec set_observer_callback(opentelemetry:meter(), ot_meter:name(), ot_observer:callback()) -> ok | unknown_instrument. set_observer_callback(Meter={Module, _}, Name, Callback) -> Module:set_observer_callback(Meter, Name, Callback). -spec observe(ot_observer:observer_result(), number(), labels()) -> ok. observe({Module, Instrument}, Number, Labels) -> Module:observe(Instrument, Number, Labels).
apps/opentelemetry_api/src/ot_meter.erl
0.819496
0.480905
ot_meter.erl
starcoder
%%% Licensed under the Apache License, Version 2.0 (the "License"); %%% you may not use this file except in compliance with the License. %%% You may obtain a copy of the License at %%% %%% http://www.apache.org/licenses/LICENSE-2.0 %%% %%% Unless required by applicable law or agreed to in writing, software %%% distributed under the License is distributed on an "AS IS" BASIS, %%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %%% See the License for the specific language governing permissions and %%% limitations under the License. -module(mvptree). -export([from_list/3, search/5]). split([], _) -> []; split(List, N) when length(List) =< N -> [List]; split(List, N) -> {List1, List2} = lists:split(N, List), [List1|split(List2, N)]. from_list(_, _, []) -> none; from_list(M, Distance, [{Point, Value}|Rest]) -> {tree, {Point, Value}, [ {element(1, hd(List)), element(1, lists:last(List)), from_list(M, Distance, [L || {_, L} <- List])} || List <- split( lists:keysort(1, [{Distance(Point, L), {L,V}} || {L, V} <- Rest]), case length(Rest) of Len when Len rem M == 0 -> Len div M; Len -> Len div M + 1 end ) ]}. search(Distance, Loc, Limit, Default, Tree) -> {_, V} = search(Distance, Loc, {Limit, Default}, Tree), V. search(_, _, State, none) -> State; search(Distance, Loc, {Tau, Nearest}, {tree, {Point, Value}, []}) -> D = Distance(Point, Loc), if D =< Tau -> {D, Value}; true -> {Tau, Nearest} end; search(Distance, Loc, {Tau, Nearest}, {tree, {Point, Value}, [{Min, Max, Tree}]}) -> D = Distance(Point, Loc), {Tau1, _} = State1 = if D =< Tau -> {D, Value}; true -> {Tau, Nearest} end, if D + Tau1 >= Min, D - Tau1 =< Max -> search(Distance, Loc, State1, Tree); true -> State1 end; search(Distance, Loc, {Tau, Nearest}, {tree, {Point, Value}, [{Min1, Max1, Tree1}, {Min2, Max2, Tree2}]}) -> D = Distance(Point, Loc), {Tau1, _} = State1 = if D =< Tau -> {D, Value}; true -> {Tau, Nearest} end, Mid = (Max1 + Min2) / 2.0, if D < Mid -> if D + Tau1 >= Min1 -> {Tau2, _} = State2 = search(Distance, Loc, State1, Tree1), if D + Tau2 >= Min2 -> search(Distance, Loc, State2, Tree2); true -> State2 end; true -> State1 end; true -> if D - Tau1 =< Max2 -> {Tau2, _} = State2 = search(Distance, Loc, State1, Tree2), if D - Tau2 =< Max1 -> search(Distance, Loc, State2, Tree1); true -> State2 end; true -> State1 end end; search(Distance, Loc, {Tau, Nearest}, {tree, {Point, Value}, SubTrees}) -> D = Distance(Point, Loc), search_subtrees( Distance, Loc, if D =< Tau -> {D, Value}; true -> {Tau, Nearest} end, sort_subtree(SubTrees, [], D)). search_subtrees(_, _, State, []) -> State; search_subtrees(Distance, Loc, {Tau, Nearest}, [{MinD, Tree}|Rest]) -> if MinD =< Tau -> search_subtrees( Distance, Loc, search(Distance, Loc, {Tau,Nearest}, Tree), Rest); true -> {Tau, Nearest} end. sort_subtree([], Acc, _) -> Acc; sort_subtree([{_, Max, Tree}|Rest], Acc, D) when Max =< D -> sort_subtree(Rest, [{D-Max, Tree}|Acc], D); sort_subtree([{Min, _, Tree}|Rest], Acc, D) when Min =< D -> sort_subtree(Rest, [{0.0, Tree}|Acc], D); sort_subtree(List, Acc, D) -> merge(Acc, sort_subtree(List, D)). sort_subtree([], _) -> []; sort_subtree([{Min, _, Tree}|Rest], D) when Min > D -> [{Min-D, Tree}|sort_subtree(Rest, D)]. merge([], B) -> B; merge(A, []) -> A; merge([{K1,V1}|T1], [{K2,_}|_]=L2) when K1 =< K2-> [{K1,V1}|merge(T1, L2)]; merge(L1, [H2|T2]) -> [H2|merge(L1,T2)].
src/mvptree.erl
0.563138
0.63202
mvptree.erl
starcoder
%%%------------------------------------------------------------------- %%% @doc %%% A library for creating "optics", a composable traversal over %%% arbitrary containers with the possibility of error. %%% %%% The traversal is modeled as an opaque type containing both a fold %%% and mapfold type. As mapfold is a superset of fold, fold is %%% implemented for efficiency only. The usual fold and mapfold return %%% types are wrapped in ok/error tuples to represent the possibility %%% of failure, with the provided compositions being responsible for %%% propagating errors back out and skipping further execution. %%% %%% These optics can then be composed to read and update nested data %%% structures. Three types of composition are possible. A wrap, which %%% modifies an existing optic into a new form. A chain, which %%% combines two optics so that one focuses on the value the previous %%% focuses on. Finally, a merge, which combines two optics to allow %%% both to focus over the same data. %%% @end %%%------------------------------------------------------------------- -module(optic). -record(optic, {fold :: optic_fold(), mapfold :: optic_mapfold()}). -type error() :: {error, term()}. %% The expected error format. -type option(Success) :: {ok, Success} | error(). %% A result type for operations that might fail. -type callback_map() :: fun((Elem :: term()) -> NewElem :: term()). %% Callback function invoked by map for each element of a container. -type callback_fold() :: fun((Elem :: term(), NewAcc :: term()) -> Acc :: term()). %% Callback function invoked by fold for each element of a container. -type callback_mapfold() :: fun((Elem :: term(), Acc :: term()) -> {NewElem :: term(), NewAcc :: term()}). %% Callback function invoked by mapfold for each element of a %% container. -type callback_filter() :: fun((Elem :: term()) -> boolean()). %% Callback function invoked by filter optics for each element of a %% container. -type callback_new() :: fun((Data :: term(), Template :: term()) -> NewData :: term()). %% Callback function invoked to create new containers. -type optic_fold() :: fun((Fold :: callback_fold(), Acc :: term(), Data :: term()) -> option(NewAcc :: term())). %% A fold function to be used as part of an optic. -type optic_mapfold() :: fun((MapFold :: callback_mapfold(), Acc :: term(), Data :: term()) -> option({NewData :: term(), NewAcc :: term()})). %% A mapfold function to be used as part of an optic. -type optic_wrap(Over) :: fun((Over) -> Over). %% A mapping function to wrap optics. -type optic_wrap_fold() :: optic_wrap(optic_fold()). %% A mapping function over optic folds. -type optic_wrap_mapfold() :: optic_wrap(optic_mapfold()). %% A mapping function over optic mapfolds. -type variations() :: #{create=>term(), strict=>boolean(), filter=>callback_filter(), require=>callback_filter()} | proplists:proplist(). %% Shared options to control optic construction. -opaque optic() :: #optic{}. %% A composable traversal over an arbitrary container. -type optics() :: [optic()]. %% A list of traversals to be composed. %% API -export([% Optic creation. new/1, new/2, wrap/2, wrap/3, chain/1, merge/1, is_optic/1, variations/3, create/3, lax/1, % Optic application. fold/4, get/2, mapfold/4, map/3, put/3, % Optics. id/0, error/1, filter/1, require/1]). -export_type([option/1, callback_map/0, callback_fold/0, callback_mapfold/0, callback_filter/0, optic_fold/0, optic_mapfold/0, optic_wrap_fold/0, optic_wrap_mapfold/0, variations/0, optic/0, optics/0]). %%%=================================================================== %%% API - Optic Creation & Composition %%%=================================================================== %% @doc %% Create a new optic for traversing a data structure. %% %% This is the less efficient form of optic construction and will %% infer a fold function from the given mapfold function. %% @end %% @returns An opaque optic record. %% @see new/2 -spec new(MapFold) -> optic() when MapFold :: optic_mapfold(). new(MapFold) -> Fold = fun (Fun, Acc, Data) -> case MapFold( fun (Elem, InnerAcc) -> {Elem, Fun(Elem, InnerAcc)} end, Acc, Data) of {ok, {_, NewAcc}} -> {ok, NewAcc}; {error, _} = Error -> Error end end, new(MapFold, Fold). %% @doc %% Create a new optic for traversing a data structure. %% %% Well behaved optics should implement the following properties: %% %% <ul> %% <li>Get -> Put: Writing the same value as was read should result in %% the original structure.</li> %% <li>Put -> Get: Reading a value that was written should result in %% the same value as was written.</li> %% <li>Put -> Put: Writing a value twice should result in only the %% last written value.</li> %% </ul> %% %% Optics which are not well behaved will be more difficult to use and %% compose with other optics. Their behaviour will change depending on %% the order in which they are applied and the number of times they %% are applied. %% %% @end %% @param MapFold %% At a minimum, an optic requires a mapfold function to be provided %% for both collecting and modifying values. This function must take %% three arguments; a callback function, an initial accumulator value %% and an arbitrary structure to traverse. The callback function will %% expect two values; an element and the current accumulator. It will %% return a two item tuple with the modified element and the modified %% accumulator. The mapfold function will return a two item tuple with %% the modified structure and the final accumulator value, wrapped in %% an ok or error tagged tuple. %% @end %% @param Fold %% A fold function can also be provided for more efficient traversal %% without modification. If one is not provided, it will be %% inefficiently inferred from the MapFold function. This function %% must take three arguments; a callback function, an initial %% accumulator value and an arbitrary structure to traverse. The %% callback function will expect two values; an element and the %% current accumulator. It will return the modified accumulator. The %% fold function will return a final accumulator value, wrapped in an %% ok or error tagged tuple. %% @end %% @returns An opaque optic record. -spec new(MapFold, Fold) -> optic() when MapFold :: optic_mapfold(), Fold :: optic_fold(). new(MapFold, Fold) -> #optic{fold=Fold, mapfold=MapFold}. %% @doc %% Wrap an existing optic. %% %% This is the less efficient form of optic construction and will %% infer a fold wrapper from the given mapfold wrapper. %% @end %% @returns An opaque optic record. %% @see wrap/3 -spec wrap(Optic, WrapMapFold) -> optic() when Optic :: optic(), WrapMapFold :: optic_wrap_mapfold(). wrap(#optic{} = Optic, WrapMapFold) -> WrapFold = fun (Fold) -> MapFold = fun (Fun, Acc, Data) -> case Fold(Fun, Acc, Data) of {ok, NewAcc} -> {ok, {Data, NewAcc}}; {error, _} = Error -> Error end end, WrappedMapFold = WrapMapFold(MapFold), fun (Fun, Acc, Data) -> case WrappedMapFold(Fun, Acc, Data) of {ok, {_NewData, NewAcc}} -> {ok, NewAcc}; {error, _} = Error -> Error end end end, wrap(Optic, WrapMapFold, WrapFold). %% @doc %% Wrap an existing optic. %% %% This allows for modifying or replacing the methods of an existing %% optic by applying a mapping function to each of the mapfold and %% fold methods. %% @end %% @param Optic An existing optic to modify. %% @param WrapMapFold %% A mapping function to apply to the optic's mapfold function. %% @end %% @param WrapFold %% A mapping function to apply to the optic's fold function. %% @end %% @returns An opaque optic record. -spec wrap(Optic, WrapMapFold, WrapFold) -> optic() when Optic :: optic(), WrapMapFold :: optic_wrap_mapfold(), WrapFold :: optic_wrap_fold(). wrap(#optic{fold=Fold, mapfold=MapFold}, WrapMapFold, WrapFold) -> NewMapFold = WrapMapFold(MapFold), NewFold = WrapFold(Fold), new(NewMapFold, NewFold). %% @doc %% Combine existing optics into a chain. In left to right order, each %% optic then focuses on the result of the previous optic. The result %% of this composition is itself an optic. %% %% This is the default composition method used for functions which %% accept optics. %% @end %% @param Optics The list of optics to compose. %% @returns An opaque optic record. -spec chain(Optics) -> optic() when Optics :: optics(). chain(#optic{} = Optic) -> Optic; chain([]) -> id(); chain([Head | Tail]) -> lists:foldl(fun compose/2, Head, Tail). %% @doc %% Merge existing optics into a single optic. In left to right order, %% each optic focuses on the same data. The result of this composition %% is itself an optic. %% %% It is the optic product type. %% @end %% @param Optics The list of optics to compose. %% @returns An opaque optic record. -spec merge(Optics) -> optic() when Optics :: optics(). merge(#optic{} = Optic) -> Optic; merge([]) -> id(); merge([Head | Tail]) -> lists:foldl(fun product/2, Head, Tail). %% @doc %% Check if a term is an optic. %% @end %% @param Candidate The term to test. %% @returns A boolean flag. -spec is_optic(Candidate) -> boolean() when Candidate :: term(). is_optic(#optic{}) -> true; is_optic(_) -> false. %% @private %% @doc %% Internal interface for generating optics which support a range of %% optional behaviours. The optic must return {error, undefined} when %% it encounters an unknown type for these options to work. Due to the %% restrictions this places on optic behaviour it is intended only for %% internal use. %% %% Optics with the "create" option enabled are not well behaved, and %% may exhibit unexpected behaviour when composed. It is also possible %% for "filter" and "require" to no longer be well behaved, depending %% on the filter function used. %% @end %% @param Optic The base optic to modify. %% @param Options %% The selected options. Expected options are a boolean "strict" for %% if type errors should be reported or ignored, an arbitrarily valued %% "create" for if type errors should force container creation, a %% "filter" function to restrict the elements selected and a "require" %% function to error when requirements are not met. %% @param New %% When the "create" option is selected, the function to invoke to %% perform the creation. %% @end %% @returns An opaque optic record. %% @see create/3 %% @see filter/1 %% @see lax/1 %% @see require/1 -spec variations(Optic, Options, New) -> optic() when Optic :: optic(), Options :: variations(), New :: callback_new(). variations(#optic{} = Optic, Options, New) when is_list(Options) -> % Normalize proplist option form to map form. Strict = proplists:get_bool(strict, Options), RequiredOptions = #{strict=>Strict}, OptionalOptions = lists:foldl( fun (Option, Acc) -> case proplists:lookup(Option, Options) of {Option, Value} -> Acc#{Option=>Value}; none -> Acc end end, RequiredOptions, [create, filter, require]), variations(Optic, OptionalOptions, New); variations(#optic{} = Optic, #{} = Options, New) -> CreateOptic = case maps:is_key(create, Options) of true -> #{create:=Template} = Options, create(Optic, New, Template); false -> Optic end, LaxOptic = case maps:get(strict, Options, false) of true -> CreateOptic; false -> lax(CreateOptic); InvalidStrict -> erlang:error({invalid_strict_value, InvalidStrict}) end, RequireOptic = case maps:get(require, Options, undefined) of undefined -> LaxOptic; Require when is_function(Require) -> chain([LaxOptic, require(Require)]); InvalidRequire -> erlang:error({invalid_require_value, InvalidRequire}) end, FilterOptic = case maps:get(filter, Options, undefined) of undefined -> RequireOptic; Filter when is_function(Filter) -> chain([RequireOptic, filter(Filter)]); InvalidFilter -> erlang:error({invalid_filter_value, InvalidFilter}) end, FilterOptic. %% @doc %% Wrap an existing optic to cause it to create a new container when %% the optic would otherwise return `{error, undefined}' or %% `{error, required}' during a mapfold operation. %% @end %% @param Optic The existing optic to wrap. %% @param New %% The callback function to apply when the mapfold fails. %% Must take two arguments, the existing data and a template argument %% to use to populate the new data. Should return the new container, %% which will immediately have the wrapped mapfold function re-applied %% after creation. %% @end %% @param Template %% The template value to be given to the callback function. %% @end %% @returns An opaque optic record. -spec create(Optic, New, Template) -> optic() when Optic :: optic(), New :: callback_new(), Template :: term(). create(Optic, New, Template) -> WrapFold = fun (Fold) -> Fold end, WrapMapFold = fun (MapFold) -> fun (Fun, Acc, Data) -> case MapFold(Fun, Acc, Data) of {error, Reason} when Reason == undefined; Reason == required -> MapFold(Fun, Acc, New(Data, Template)); Result -> Result end end end, optic:wrap(Optic, WrapMapFold, WrapFold). %% @doc %% Wrap an existing optic to cause it to skip an element when the %% optic would otherwise return `{error, undefined}' or %% `{error, required}' during a fold or mapfold operation. %% @end %% @param Optic The existing optic to wrap. %% @returns An opaque optic record. -spec lax(Optic) -> optic() when Optic :: optic(). lax(Optic) -> WrapFold = fun (Fold) -> fun (Fun, Acc, Data) -> case Fold(Fun, Acc, Data) of {error, Reason} when Reason == undefined; Reason == required -> {ok, Acc}; Result -> Result end end end, WrapMapFold = fun (MapFold) -> fun (Fun, Acc, Data) -> case MapFold(Fun, Acc, Data) of {error, Reason} when Reason == undefined; Reason == required -> {ok, {Data, Acc}}; Result -> Result end end end, wrap(Optic, WrapMapFold, WrapFold). %%%=================================================================== %%% API - Optic Application %%%=================================================================== %% @doc %% Given a list of optics, performs a recursive fold over the result %% of focusing on the given data structure. The order of traversal is %% determined by the optics used. %% @end %% @param Optics A list of optics to apply. Leftmost is applied first. %% @param Fold %% The callback function to invoke on the focused elements and %% accumulator. Expected to return the modified accumulator. %% @end %% @param Acc The initial accumulator value. %% @param Data The container to apply the optics to. %% @returns %% On success, returns a tuple of ok and the final accumulator value. %% On failure, returns an error tuple. %% @end -spec fold(Optics, Fold, Acc, Data) -> option(NewAcc) when Optics :: optics(), Fold :: callback_fold(), Acc :: term(), Data :: term(), NewAcc :: term(). fold(Optics, Fold, Acc, Data) -> #optic{fold=OpticFold} = optic:chain(Optics), OpticFold(Fold, Acc, Data). %% @doc %% Given a list of optics, returns a list of the values focused on by %% the final optic. %% @end %% @param Optics A list of optics to apply. Leftmost is applied first. %% @param Data The container to apply the optics to. %% @returns A list of the focused values. -spec get(Optics, Data) -> option(Values) when Optics :: optics(), Data :: term(), Values :: [term()]. get(Optics, Data) -> case fold(Optics, fun (Elem, Acc) -> [Elem | Acc] end, [], Data) of {ok, Acc} -> {ok, lists:reverse(Acc)}; {error, _} = Error -> Error end. %% @doc %% Given a list of optics, performs a recursive map and fold over the %% result of focusing on the given data structure. %% @end %% @param Optics A list of optics to apply. Leftmost is applied first. %% @param Data The container to apply the optics to. %% @param MapFold %% The callback function to invoke on the focused elements and %% accumulator. Expected to return a tuple of the modified element and %% accumulator. %% @end %% @param Acc The initial accumulator value. %% @returns %% On success, returns a tuple of ok and a tuple of the modified %% container and the final accumulator value. On failure, returns an %% error tuple. %% @end -spec mapfold(Optics, MapFold, Acc, Data) -> option({NewData, NewAcc}) when Optics :: optics(), MapFold :: callback_mapfold(), Acc :: term(), Data :: term(), NewData :: term(), NewAcc :: term(). mapfold(Optics, MapFold, Acc, Data) -> #optic{mapfold=OpticMapFold} = optic:chain(Optics), OpticMapFold(MapFold, Acc, Data). %% @doc %% Given a list of optics, performs a recursive map over the result of %% focusing on the given data structure. %% @end %% @param Optics A list of optics to apply. Leftmost is applied first. %% @param Data The container to apply the optics to. %% @param Map %% The callback function to invoke on the focused elements. Expected %% to return a modified element. %% @end %% @returns %% On success, returns a tuple of ok and the modified container. %% On failure, returns an error tuple. %% @end -spec map(Optics, Map, Data) -> option(NewData) when Optics :: optics(), Map :: callback_map(), Data :: term(), NewData :: term(). map(Optics, Map, Data) -> case mapfold(Optics, fun (Elem, undefined) -> {Map(Elem), undefined} end, undefined, Data) of {ok, {Updated, undefined}} -> {ok, Updated}; {error, _} = Error -> Error end. %% @doc %% Given a list of optics, modifies the values focused on by %% the final optic. %% @end %% @param Optics A list of optics to apply. Leftmost is applied first. %% @param Data The container to apply the optics to. %% @returns %% On success, returns a tuple of ok and the modified container. %% On failure, returns an error tuple. %% @end -spec put(Optics, Value, Data) -> option(NewData) when Optics :: optics(), Value :: term(), Data :: term(), NewData :: term(). put(Optics, Value, Data) -> map(Optics, fun (_) -> Value end, Data). %%%=================================================================== %%% API - Optics %%%=================================================================== %% @doc %% Focus on what was given. %% %% This is the identity optic, it can be chained with any other optic %% and will return the same optic. %% %% Example: %% %% ``` %% > optic:get([optic:id()], anything). %% {ok,[anything]} %% ''' %% @end %% @returns An opaque optic record. -spec id() -> optic(). id() -> Fold = fun (Fun, Acc, Data) -> {ok, Fun(Data, Acc)} end, new(Fold, Fold). %% @doc %% Always errors with the given reason. %% %% Example: %% %% ``` %% > optic:get([optic:error(reason)], anything). %% {error, reason} %% ''' %% @end %% @param Reason The error description to return. %% @returns An opaque optic record. -spec error(Reason) -> optic:optic() when Reason :: term(). error(Reason) -> Fold = fun (_Fun, _Acc, _Data) -> {error, Reason} end, new(Fold, Fold). %% @doc %% Only focuses on the current data if the given filter function %% returns true. Otherwise the data is skipped. %% %% Can fail to be well behaved depending on if the filter criteria is %% part of the focus. %% %% Example: %% %% ``` %% > IsOdd = fun (Elem) -> Elem % 2 == 1 end, %% > optic:get([optic:filter(IsOdd)], [1,2,3]). %% {ok, [1,3]} %% ''' %% @end %% @param Filter %% The filter function to invoke to determine if the element should be %% focused. Takes the current data as an argument, returns a boolean %% true or false. %% @end %% @returns An opaque optic record. -spec filter(Filter) -> optic:optic() when Filter :: callback_filter(). filter(Filter) -> Fold = fun (Fun, Acc, Data) -> case Filter(Data) of true -> {ok, Fun(Data, Acc)}; false -> {ok, Acc} end end, MapFold = fun (Fun, Acc, Data) -> case Filter(Data) of true -> {ok, Fun(Data, Acc)}; false -> {ok, {Data, Acc}} end end, new(MapFold, Fold). %% @doc %% Only focuses on the current data if the given filter function %% returns true. Otherwise an `{error, required}` is returned. %% %% Can fail to be well behaved depending on if the filter criteria is %% part of the focus. %% %% Example: %% %% ``` %% > IsOdd = fun (Elem) -> Elem % 2 == 1 end, %% > optic:get([optic:require(IsOdd)], [1,2,3]). %% {error, required} %% ''' %% @end %% @param Filter %% The filter function to invoke to determine if the element should be %% focused. Takes the current data as an argument, returns a boolean %% true or false. %% @end %% @returns An opaque optic record. -spec require(Filter) -> optic:optic() when Filter :: callback_filter(). require(Filter) -> Fold = fun (Fun, Acc, Data) -> case Filter(Data) of true -> {ok, Fun(Data, Acc)}; false -> {error, required} end end, MapFold = fun (Fun, Acc, Data) -> case Filter(Data) of true -> {ok, Fun(Data, Acc)}; false -> {error, required} end end, new(MapFold, Fold). %%%=================================================================== %%% Internal Functions %%%=================================================================== compose(#optic{fold=Fold1, mapfold=MapFold1}, #optic{fold=Fold2, mapfold=MapFold2}) -> Fold = fun (Fun, Acc, Data) -> case Fold2( fun (Elem, {ok, InnerAcc}) -> Fold1(Fun, InnerAcc, Elem); (_Elem, {error, _} = Error) -> Error end, {ok, Acc}, Data) of {ok, {ok, _} = Result} -> Result; {ok, {error, _} = Error} -> Error; {error, _} = Error -> Error end end, MapFold = fun (Fun, Acc, Data) -> case MapFold2( fun (Elem, {ok, InnerAcc}) -> case MapFold1(Fun, InnerAcc, Elem) of {ok, {NewElem, NewInnerAcc}} -> {NewElem, {ok, NewInnerAcc}}; {error, _} = Error -> {Elem, Error} end; (Elem, {error, _} = Error) -> {Elem, Error} end, {ok, Acc}, Data) of {ok, {NewData, {ok, NewAcc}}} -> {ok, {NewData, NewAcc}}; {ok, {_, {error, _} = Error}} -> Error; {error, _} = Error -> Error end end, new(MapFold, Fold). product(#optic{fold=Fold1, mapfold=MapFold1}, #optic{fold=Fold2, mapfold=MapFold2}) -> Fold = fun (Fun, Acc, Data) -> case Fold2(Fun, Acc, Data) of {ok, NewAcc} -> Fold1(Fun, NewAcc, Data); {error, _} = Error -> Error end end, MapFold = fun (Fun, Acc, Data) -> case MapFold2(Fun, Acc, Data) of {ok, {NewData, NewAcc}} -> MapFold1(Fun, NewAcc, NewData); {error, _} = Error -> Error end end, new(MapFold, Fold).
src/optic.erl
0.675978
0.565899
optic.erl
starcoder
%%-------------------------------------------------------------------- %% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_map_lib). -export([ deep_get/2 , deep_get/3 , deep_find/2 , deep_put/3 , deep_remove/2 , deep_merge/2 , safe_atom_key_map/1 , unsafe_atom_key_map/1 , jsonable_map/1 , jsonable_value/1 , deep_convert/2 ]). -export_type([config_key/0, config_key_path/0]). -type config_key() :: atom() | binary(). -type config_key_path() :: [config_key()]. %%----------------------------------------------------------------- -spec deep_get(config_key_path(), map()) -> term(). deep_get(ConfKeyPath, Map) -> Ref = make_ref(), Res = deep_get(ConfKeyPath, Map, Ref), case Res =:= Ref of true -> error({config_not_found, ConfKeyPath}); false -> Res end. -spec deep_get(config_key_path(), map(), term()) -> term(). deep_get(ConfKeyPath, Map, Default) -> case deep_find(ConfKeyPath, Map) of {not_found, _KeyPath, _Data} -> Default; {ok, Data} -> Data end. -spec deep_find(config_key_path(), map()) -> {ok, term()} | {not_found, config_key_path(), term()}. deep_find([], Map) -> {ok, Map}; deep_find([Key | KeyPath] = Path, Map) when is_map(Map) -> case maps:find(Key, Map) of {ok, SubMap} -> deep_find(KeyPath, SubMap); error -> {not_found, Path, Map} end; deep_find(_KeyPath, Data) -> {not_found, _KeyPath, Data}. -spec deep_put(config_key_path(), map(), term()) -> map(). deep_put([], Map, Config) when is_map(Map) -> Config; deep_put([], _Map, Config) -> %% not map, replace it Config; deep_put([Key | KeyPath], Map, Config) -> SubMap = deep_put(KeyPath, maps:get(Key, Map, #{}), Config), Map#{Key => SubMap}. -spec deep_remove(config_key_path(), map()) -> map(). deep_remove([], Map) -> Map; deep_remove([Key], Map) -> maps:remove(Key, Map); deep_remove([Key | KeyPath], Map) -> case maps:find(Key, Map) of {ok, SubMap} when is_map(SubMap) -> Map#{Key => deep_remove(KeyPath, SubMap)}; {ok, _Val} -> Map; error -> Map end. %% #{a => #{b => 3, c => 2}, d => 4} %% = deep_merge(#{a => #{b => 1, c => 2}, d => 4}, #{a => #{b => 3}}). -spec deep_merge(map(), map()) -> map(). deep_merge(BaseMap, NewMap) -> NewKeys = maps:keys(NewMap) -- maps:keys(BaseMap), MergedBase = maps:fold(fun(K, V, Acc) -> case maps:find(K, NewMap) of error -> Acc#{K => V}; {ok, NewV} when is_map(V), is_map(NewV) -> Acc#{K => deep_merge(V, NewV)}; {ok, NewV} -> Acc#{K => NewV} end end, #{}, BaseMap), maps:merge(MergedBase, maps:with(NewKeys, NewMap)). -spec deep_convert(map(), fun((K::any(), V::any()) -> {K1::any(), V1::any()})) -> map(). deep_convert(Map, ConvFun) when is_map(Map) -> maps:fold(fun(K, V, Acc) -> {K1, V1} = ConvFun(K, deep_convert(V, ConvFun)), Acc#{K1 => V1} end, #{}, Map); deep_convert(ListV, ConvFun) when is_list(ListV) -> [deep_convert(V, ConvFun) || V <- ListV]; deep_convert(Val, _) -> Val. -spec unsafe_atom_key_map(#{binary() | atom() => any()}) -> #{atom() => any()}. unsafe_atom_key_map(Map) -> covert_keys_to_atom(Map, fun(K) -> binary_to_atom(K, utf8) end). -spec safe_atom_key_map(#{binary() | atom() => any()}) -> #{atom() => any()}. safe_atom_key_map(Map) -> covert_keys_to_atom(Map, fun(K) -> binary_to_existing_atom(K, utf8) end). -spec jsonable_map(map() | list()) -> map() | list(). jsonable_map(Map) -> deep_convert(Map, fun(K, V) -> {jsonable_value(K), jsonable_value(V)} end). jsonable_value([]) -> []; jsonable_value(Val) when is_list(Val) -> case io_lib:printable_unicode_list(Val) of true -> unicode:characters_to_binary(Val); false -> Val end; jsonable_value(Val) -> Val. %%--------------------------------------------------------------------------- covert_keys_to_atom(BinKeyMap, Conv) -> deep_convert(BinKeyMap, fun (K, V) when is_atom(K) -> {K, V}; (K, V) when is_binary(K) -> {Conv(K), V} end).
apps/emqx/src/emqx_map_lib.erl
0.617974
0.451327
emqx_map_lib.erl
starcoder
%% @author <NAME> <<EMAIL>> [http://yarivsblog.com] %% @copyright <NAME> 2006-2007 %% %% @doc Smerl: Simple Metaprogramming for Erlang %% %% Smerl is an Erlang library %% that simplifies the creation and manipulation of Erlang modules in %% runtime. %% %% You don't need to know Smerl in order to use ErlyWeb; Smerl %% is included in ErlyWeb because ErlyWeb uses it internally. %% %% Smerl uses Erlang's capabilities for hot code swapping and %% abstract syntax tree transformations to do its magic. Smerl is inspired by %% the rdbms_codegen.erl module in the RDBMS application written by %% <NAME>. RDBMS is part of Jungerl ([http://jungerl.sf.net]). %% %% Here's a quick example illustrating how to use Smerl: %% ``` %% test_smerl() -> %% M1 = smerl:new(foo), %% {ok, M2} = smerl:add_func(M1, "bar() -> 1 + 1."), %% smerl:compile(M2), %% foo:bar(), % returns 2`` %% smerl:has_func(M2, bar, 0). % returns true %% ''' %% %% New functions can be expressed either as strings of Erlang code %% or as abstract forms. For more information, read the Abstract Format %% section in the ERTS User's guide %% ([http://erlang.org/doc/doc-5.5/erts-5.5/doc/html/absform.html#4]). %% %% Using the abstract format, the 3rd line of the above example %% would be written as %% ``` %% {ok,M2} = smerl:add_func(M1, {function,1,bar,0, %% [{clause,1,[],[], %% [{op,1,'+',{integer,1,1},{integer,1,1}}]}]). %% ''' %% %% <p>The abstact format may look more verbose in this example, but %% it's also easier to manipulate in code.</p> %% %% Copyright (c) <NAME> 2006-2007 %% %% Permission is hereby granted, free of charge, to any person %% obtaining a copy of this software and associated documentation %% files (the "Software"), to deal in the Software without restriction, %% including without limitation the rights to use, copy, modify, merge, %% publish, distribute, sublicense, and/or sell copies of the Software, %% and to permit persons to whom the Software is furnished to do %% so, subject to the following conditions: %% %% The above copyright notice and this permission notice shall be included %% in all copies or substantial portions of the Software. %% %% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, %% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF %% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. %% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY %% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, %% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE %% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -module(smerl). -author("<NAME> (<EMAIL>, http://yarivsblog.com"). -export([new/1, for_file/1, for_file/2, for_file/3, get_module/1, set_module/2, get_forms/1, set_forms/2, get_exports/1, set_exports/2, get_export_all/1, set_export_all/2, remove_export/3, add_func/2, add_func/3, remove_func/3, has_func/3, replace_func/2, % replace_func/3, compile/1, compile/2, rename/2, embed_all/2, to_src/1, to_src/2 ]). -define(L(Obj), io:format("LOG ~s ~w ~p\n", [?FILE, ?LINE, Obj])). -define(S(Obj), io:format("LOG ~s ~w ~s\n", [?FILE, ?LINE, Obj])). -include_lib("kernel/include/file.hrl"). %% @type meta_mod(). A data structure holding the abstract representation %% for a module. %% @type func_form(). The abstract form for the function, as described %% in the ERTS Users' manual. %% The record type holding the abstract representation for a module. -record(meta_mod, {module, file, exports = [], forms = [], export_all = false}). %% @doc Create a new meta_mod for a module with the given name. %% %% @spec new(Module::atom()) -> meta_mod() new(ModuleName) when is_atom(ModuleName) -> #meta_mod{module = ModuleName}. %% @equiv for_file(SrcFilePath, []) for_file(SrcFilePath) -> for_file(SrcFilePath, []). %% @equiv for_file(SrcFilePath, IncludePaths, []) for_file(SrcFilePath, IncludePaths) -> for_file(SrcFilePath, IncludePaths, []). %% @doc Create a meta_mod for a module from its source file. %% %% @spec for_file(SrcFilePath::string(), IncludePaths::[string()], %% Macros::[{atom(), term()}]) -> %% {ok, meta_mod()} | {error, invalid_module} for_file(SrcFilePath, IncludePaths, Macros) -> case epp:parse_file(SrcFilePath, [filename:dirname(SrcFilePath) | IncludePaths], Macros) of {ok, Forms} -> mod_for_forms(Forms); _err -> {error, {invalid_module, SrcFilePath}} end. mod_for_forms([{attribute,_,file,{FileName,_FileNum}}, {attribute, _, module, ModuleName}|Forms]) -> {Exports, OtherForms, ExportAll} = lists:foldl( fun({attribute, _, export, ExportList}, {ExportsAcc, FormsAcc, ExportAll}) -> {ExportList ++ ExportsAcc, FormsAcc, ExportAll}; ({attribute, _, compile, export_all}, {ExportsAcc, FormsAcc, _ExportAll}) -> {ExportsAcc, FormsAcc, true}; ({eof, _}, Acc) -> Acc; (Form, {ExportsAcc, FormsAcc, ExportAll}) -> {ExportsAcc, [Form | FormsAcc], ExportAll} end, {[], [], false}, Forms), {ok, #meta_mod{module = ModuleName, file = FileName, exports = Exports, forms = OtherForms, export_all = ExportAll }}; mod_for_forms(Mod) -> {error, {invalid_module, Mod}}. %% @doc Return the module name for the meta_mod. %% %% @spec(MetaMod::meta_mod()) -> atom() get_module(MetaMod) -> MetaMod#meta_mod.module. %% @doc Set the meta_mod's module name. %% %% @spec set_module(MetaMod::meta_mod(), NewName::atom()) -> %% NewMod::meta_mod() set_module(MetaMod, NewName) -> MetaMod#meta_mod{module = NewName}. %% @doc Return the list of function forms in the meta_mod. %% %% @spec get_forms(MetaMod::meta_mod()) -> [Form] get_forms(MetaMod) -> MetaMod#meta_mod.forms. set_forms(MetaMod, Forms) -> MetaMod#meta_mod{forms = Forms}. %% @doc Return the list of exports in the meta_mod. %% %% @spec get_exports(MetaMod::meta_mod()) -> %% [{FuncName::atom(), Arity::integer()}] get_exports(MetaMod) -> case MetaMod#meta_mod.export_all of false -> MetaMod#meta_mod.exports; true -> lists:foldl( fun({function, _L, Name, Arity, _Clauses}, Exports) -> [{Name, Arity} | Exports]; (_Form, Exports) -> Exports end, [], MetaMod#meta_mod.forms) end. %% @doc Set the meta_mod's export list to the new list. %% %% @spec set_exports(MetaMod::meta_mod(), %% Exports::[{FuncName::atom(), Arity::integer()}]) -> NewMod::meta_mod() set_exports(MetaMod, Exports) -> MetaMod#meta_mod{exports = Exports}. %% @doc Get the export_all value for the module. %% %% @spec get_export_all(MetaMod::meta_mod) -> true | false get_export_all(MetaMod) -> MetaMod#meta_mod.export_all. %% @doc Set the export_all value for the module. %% %% @spec set_export_all(MetaMod::meta_mod(), Val::true | false) -> %% NewMetaMod::meta_mod() set_export_all(MetaMod, Val) -> MetaMod#meta_mod{export_all = Val}. %% @doc Remove the export from the list of exports in the meta_mod. %% %% @spec remove_export(MetaMod::meta_mod(), FuncName::atom(), %% Arity::integer()) -> NewMod::meta_mod() remove_export(MetaMod, FuncName, Arity) -> MetaMod#meta_mod{exports = lists:delete({FuncName, Arity}, MetaMod#meta_mod.exports)}. %% @doc Add a new function to the meta_mod and return the resulting meta_mod. %% This function calls add_func(MetaMod, Form, true). %% %% @spec add_func(MetaMod::meta_mod(), Form::func_form() | string()) -> %% {ok, NewMod::meta_mod()} | {error, parse_error} add_func(MetaMod, Form) -> add_func(MetaMod, Form, true). %% @doc Add a new function to the meta_mod and return the new MetaMod %% record. Export is a boolean variable indicating if the function should %% be added to the module's exports. %% %% @spec add_func(MetaMod::meta_mod(), Func::func_form() | string(), %% Export::boolean()) -> %% {ok, NewMod::meta_mod()} | {error, parse_error} add_func(MetaMod, Func, Export) when is_list(Func) -> case parse_func_string(Func) of {ok, Form} -> add_func(MetaMod, Form, Export); Err -> Err end; add_func(MetaMod, {function, _Line, FuncName, Arity, _Clauses} = Form, true) -> Foo = {ok, MetaMod#meta_mod{ exports = [{FuncName, Arity} | MetaMod#meta_mod.exports], forms = [Form | MetaMod#meta_mod.forms] }}, Foo; add_func(MetaMod, {function, _Line, _FuncName, _Arity, _Clauses} = Form, false) -> {ok, MetaMod#meta_mod{forms = [Form | MetaMod#meta_mod.forms]}}; %%add_func(MetaMod, Name, Fun) when is_function(Fun) -> %% add_func(MetaMod, Name, Fun, true); add_func(_, _, _) -> {error, parse_error}. %% add_func(MetaMod, Name, Fun, Export) when is_function(Fun) -> %% case form_for_fun(Name, Fun) of %% {ok, Form} -> %% add_func(MetaMod, Form, Export); %% Err -> %% Err %% end. %% form_for_fun(Name, Fun) -> %% Line = 999, %% Info = erlang:fun_info(Fun), %% case Info of %% [{module, _ModName}, _FuncName, _Arity, _Env, {type, external}] -> %% {error, cant_add_external_funcs}; %% [_Pid, _Module, _NewIdx, _NewUniq, _Index, _Uniq, _Name, %% {arity, Arity}, %% {env, [Vars, _Unknown1, _Unknown2, Clauses]}, %% {type, local}] -> %% EnvVars = lists:map( %% fun({VarName, Val}) -> %% {match,Line,{var,Line,VarName}, %% erl_parse:abstract(Val)} %% end, Vars), %% NewClauses = lists:map( %% fun({clause, Line1, Params, Guards, Exprs}) -> %% {clause, Line1, Params, Guards, %% EnvVars ++ Exprs} %% end, Clauses), %% {ok, {function, Line, Name, Arity, NewClauses}}; %% _Other -> %% {error, bad_fun} %% end. parse_func_string(Func) -> case erl_scan:string(Func) of {ok, Toks, _} -> case erl_parse:parse_form(Toks) of {ok, _Form} = Res -> Res; _Err -> {error, parse_error} end; _Err -> {error, parse_error} end. %% @doc Try to remove the function from the meta_mod. %% If the function exists, the new meta_mod is returned. Otherwise, %% original meta_mod is returned. %% %% @spec remove_func(MetaMod::meta_mod(), FuncName::string(), Arity::integer()) %% -> NewMod::meta_mod() %% remove_func(MetaMod, FuncName, Arity) -> MetaMod#meta_mod{forms = lists:filter( fun({function, _Line, FuncName1, Arity1, _Clauses}) when FuncName1 =:= FuncName, Arity =:= Arity1-> false; (_) -> true end, MetaMod#meta_mod.forms), exports = lists:filter( fun({FuncName1, Arity1}) when FuncName1 =:= FuncName, Arity1 =:= Arity -> false; (_) -> true end, MetaMod#meta_mod.exports) }. %% @doc Check whether the meta_mod has a function with the given name %% and arity. %% @spec has_func(MetaMod::meta_mod(), FuncName::atom(), Arity::integer()) -> %% bool() has_func(MetaMod, FuncName, Arity) -> lists:any(fun({function, _Line, FuncName1, Arity1, _Clauses}) when FuncName1 == FuncName, Arity1 == Arity -> true; (_) -> false end, MetaMod#meta_mod.forms). %% @doc %% Replace an existing function with the new one. If the function doesn't exist %% the new function is added to the meta_mod. %% This is tantamount to calling smerl:remove_func followed by smerl:add_func. %% %% @spec replace_func(MetaMod::meta_mod(), Function::string() | func_form()) -> %% {ok, NewMod::meta_mod()} | {error, Error} replace_func(MetaMod, Function) when is_list(Function) -> case parse_func_string(Function) of {ok, Form} -> replace_func(MetaMod, Form); Err -> Err end; replace_func(MetaMod, {function, _Line, FuncName, Arity, _Clauses} = Form) -> Mod1 = remove_func(MetaMod, FuncName, Arity), add_func(Mod1, Form); replace_func(_MetaMod, _) -> {error, parse_error}. %% %% @doc Simliar to replace_func/2, but accepts a function %% %% name + fun expression. %% %% %% %% @spec replace_func(MetaMod::meta_mod(), Name::atom(), Fun::function()) -> %% %% {ok, NewMod::meta_mod()} | {error, Error} %% replace_func(MetaMod, Name, Fun) when is_function(Fun) -> %% case form_for_fun(Name, Fun) of %% {ok, Form} -> %% replace_func(MetaMod, Form); %% Err -> %% Err %% end. %% @doc Compile the module represented by the meta_mod and load the %% resulting BEAM into the emulator. This function calls %% compile(MetaMod, [report_errors, report_warnings]). %% %% @spec compile(MetaMod::meta_mod()) -> ok | {error, Error} compile(MetaMod) -> compile(MetaMod, undefined). %% @doc Compile the module represented by the meta_mod and load the %% resulting BEAM into the emulator. 'Options' is a list of options as %% described in the 'compile' module in the Erlang documentation. %% %% If the 'outdir' option is provided, %% the .beam file is written to the destination directory. %% %% @spec compile(MetaMod::meta_mod(), Options::[term()]) -> ok | {error, Error} compile(MetaMod, undefined) -> compile(MetaMod, [report_errors, report_warnings, return_errors]); compile(MetaMod, Options) -> Forms = [{attribute, 2, module, MetaMod#meta_mod.module}, {attribute, 3, export, get_exports(MetaMod)}], FileName = case MetaMod#meta_mod.file of undefined -> atom_to_list(get_module(MetaMod)); Val -> Val end, Forms1 = [{attribute, 1, file, {FileName, 1}} | Forms], Forms2 = Forms1 ++ lists:reverse(MetaMod#meta_mod.forms), case compile:forms(Forms2, Options) of {ok, Module, Bin} -> Res = case lists:keysearch(outdir, 1, Options) of {value, {outdir, OutDir}} -> file:write_file( OutDir ++ ['/' | atom_to_list(MetaMod#meta_mod.module)] ++ ".beam", Bin); false -> ok end, case Res of ok -> code:purge(Module), case code:load_binary( Module, atom_to_list(Module) ++ ".erl", Bin) of {module, _Module} -> ok; Err -> Err end; Err -> Err end; Err -> Err end. %% @doc Change the name of the function represented by the form. %% %% @spec rename(Form::func_form(), NewName::atom()) -> %% {ok, NewForm::func_form()} | {error, Err} rename({function, Line, _Name, Arity, Clauses}, NewName) -> {function, Line, NewName, Arity, Clauses}. replace_vars(Clause, Vals) -> Tree = erl_syntax_lib:map( fun({var,_L2,Name} = Expr) -> case proplists:lookup(Name, Vals) of none -> Expr; {_, Val} -> Val end; (Expr) -> Expr end, Clause), {clause, _, _, _, NewExprs} = erl_syntax:revert(Tree), NewExprs. %% @doc This function takes a function form and list of name/value pairs, %% and replaces all the function's parameters that whose names match an %% element from the list with the predefined value. %% %% @spec embed_params(Func::func_form(), %% Vals::[{Name::atom(), Value::term()}]) -> NewForm::func_form() embed_params({function, L, Name, Arity, Clauses}, Vals) -> NewClauses = lists:map( fun({clause, L1, Params, Guards, _Exprs} = Clause) -> {EmbeddedVals, OtherParams} = lists:foldr( fun({var,_, VarName} = Param, {Embedded, Rest}) -> case proplists:lookup(VarName, Vals) of none -> {Embedded, [Param | Rest]}; {_, Val} -> {[{VarName, erl_parse:abstract(Val)} | Embedded], Rest} end; (Param, {Embedded, Rest}) -> {Embedded, [Param | Rest]} end, {[], []}, Params), NewExprs = replace_vars(Clause, EmbeddedVals), {clause, L1, OtherParams, Guards, NewExprs} %% {Params1, Matches1, _RemainingVals} = %% lists:foldl( %% fun({var, _L2, ParamName} = Param, %% {Params2, Matches2, Vals1}) -> %% case lists:keysearch(ParamName, 1, Vals1) of %% {value, {_Name, Val} = Elem} -> %% Match = {match, L1, Param, %% erl_parse:abstract(Val)}, %% {Params2, [Match | Matches2], %% lists:delete(Elem, Vals1)}; %% false -> %% {[Param | Params2], Matches2, Vals1} %% end; %% (Param, {Params2, Matches2, Vals1}) -> %% {[Param | Params2], Matches2, Vals1} %% end, {[], [], Vals}, Params), %% [{clause, L1, lists:reverse(Params1), Guards, %% lists:reverse(Matches1) ++ Exprs} | Clauses1] end, Clauses), NewArity = case NewClauses of [{clause, _L2, Params, _Guards, _Exprs}|_] -> length(Params); _ -> Arity end, {function, L, Name, NewArity, NewClauses}. %% @doc Apply the embed_params function with the list of {Name, Value} %% pairs to all forms in the meta_mod. Exports %% for functions whose arities change due to the embedding are preserved. %% %% @spec embed_all(MetaMod::meta_mod(), Vals::[{Name::atom(), %% Value::term()}]) -> NewMetaMod::meta_mod() embed_all(MetaMod, Vals) -> Forms = get_forms(MetaMod), Exports = get_exports(MetaMod), {NewForms, Exports3, NewExports} = lists:foldl( fun({function, _L, Name, Arity, _Clauses} = Form, {Forms1, Exports1, NewExports1}) -> {function, _, _, NewArity, _} = NewForm = embed_params(Form, Vals), Exports2 = lists:delete({Name, Arity}, Exports1), NewExports2 = case length(Exports2) == length(Exports1) of true -> NewExports1; false -> [{Name, NewArity} | NewExports1] end, {[NewForm | Forms1], Exports2, NewExports2}; (Form, {Forms1, Exports1, NewExports1}) -> {[Form | Forms1], Exports1, NewExports1} end, {[], Exports, []}, Forms), #meta_mod{module = get_module(MetaMod), exports = Exports3 ++ NewExports, forms = lists:reverse(NewForms), export_all = get_export_all(MetaMod)}. %% @doc Return the pretty-printed source code for the module. %% %% @spec to_src(MetaMod::meta_mod()) -> string() to_src(MetaMod) -> ExportsForm = {attribute,1,export,get_exports(MetaMod)}, AllForms = [{attribute,1,module,get_module(MetaMod)}, ExportsForm | get_forms(MetaMod)], erl_prettypr:format(erl_syntax:form_list(AllForms)). %% @doc Write the pretty printed source code for the module %% to the file with the given file name. %% %% @spec to_src(MetaMod::meta_mod(), FileName::string()) -> %% ok | {error, Error} to_src(MetaMod, FileName) -> Src = to_src(MetaMod), file:write_file(FileName, list_to_binary(Src)).
apps/vmq_plugin/src/smerl.erl
0.519034
0.585042
smerl.erl
starcoder
%% @author <NAME> <<EMAIL>> [http://yarivsblog.com] %% @copyright <NAME> 2006-2007 %% %% @doc %% ErlSQL is a domain specific embedded language for %% expressing SQL statements in Erlang as well as a library %% for generating the literal equivalents of ErlSQL expressions. %% %% ErlSQL lets you describe SQL queries using a combination of Erlang %% lists, tuples, atoms and values in a way that resembles the %% structure of SQL statements. You can pass this structure to %% the sql/1 or sql/2 functions, which parse it and return an %% iolist (a tree of strings and/or binaries) or a single binary, %% either of which can be sent to database engine through a socket %% (usually via a database-specific driver). %% %% ErlSQL supports a large subset of the SQL language implemented by %% some popular RDBMS's, including most common INSERT, UPDATE, DELETE %% and SELECT statements. ErlSQL can generate complex queries including %% those with unions, nested statements and aggregate functions, but %% it does not currently attempt to cover every feature and extension %% of the SQL language. %% %% ErlSQL's benefits are:<br/> %% - Easy dynamic generation of SQL queries from Erlang by combining %% native Erlang types rather than string fragments.<br/> %% - Prevention of most, if not all, SQL injection attacks by %% assuring that all string values are properly escaped.<br/> %% - Efficient generation of iolists as nested lists of binaries.<br/> %% %% Warning: ErlSQL allows you to write verbatim WHERE clauses as well as %% verbatim LIMIT and other trailing clauses, but using this feature %% is highly discouraged because it exposes you to SQL injection attacks. %% %% For usage examples, look at the file test_erlsql.erl under the test/ %% directory. %% For license information see LICENSE.TXT -module(erlsql). -author("<NAME> (<EMAIL>) (http://yarivsblog.com)"). -export([sql/1, sql/2, unsafe_sql/1, unsafe_sql/2, encode/1]). -define(L(Obj), io:format("LOG ~w ~p\n", [?LINE, Obj])). %% @doc Generate an iolist (a tree of strings and/or binaries) %% for a literal SQL statement that corresponds to the ESQL %% structure. If the structure is invalid, this function would %% crash. %% This function does not allow writing literal WHERE, LIMIT %% and other trailing clauses. To write such clauses, %% call unsafe_sql/1 or unsafe_sql/2. %% %% @spec sql(ErlSQL::term()) -> iolist() sql(Esql) -> sql2(Esql, true). %% @doc Similar to sql/1, but accepts a boolean parameter %% indicating if the return value should be a single binary %% rather than an iolist. %% %% @spec sql(ErlSQL::term(), boolean()) -> binary() | iolist() sql(Esql, true) -> iolist_to_binary(sql(Esql)); sql(Esql, false) -> sql(Esql). %% @doc Generate an iolist (a tree of strings and/or binaries) %% for a literal SQL statement that corresponds to the ESQL %% structure. If the structure is invalid, this function %% throws an exception. %% This function allows writing literal WHERE, LIMIT %% and other trailing clauses, such as {where, "a=" ++ Val}, %% or "WHERE a=" ++ Str ++ " LIMIT 5". %% Such clauses are unsafe because they expose you to SQL %% injection attacks. When you use unsafe_sql, make sure to %% quote all your strings using the encode/1 function. %% %% @spec unsafe_sql(ErlSQL::term()) -> iolist() %% @throws {error, {unsafe_expression, Expr}} unsafe_sql(Esql) -> sql2(Esql, false). %% @doc Similar to unsafe_sql/1, but accepts a boolean parameter %% indicating if the return value should be a binary or an iolist. %% %% @spec unsafe_sql(ErlSQL::term(), AsBinary::bool()) -> binary() | iolist() %% @throws {error, {unsafe_expression, Expr}} unsafe_sql(Esql, true) -> iolist_to_binary(unsafe_sql(Esql)); unsafe_sql(Esql, false) -> unsafe_sql(Esql). %% @doc Calls encode(Val, true). %% %% @spec encode(Val::term()) -> binary() encode(Val) -> encode(Val, true). %% @doc Encode a value as a string or a binary to be embedded in %% a SQL statement. This function can encode numbers, atoms, %% date/time/datetime values, strings and binaries %% (which it escapes automatically). %% %% @spec encode(Val::term(), AsBinary::bool()) -> string() | binary() encode(Val, false) when Val == undefined; Val == null -> "null"; encode(Val, true) when Val == undefined; Val == null -> <<"null">>; encode(Val, false) when is_binary(Val) -> binary_to_list(quote(Val)); encode(Val, true) when is_binary(Val) -> quote(Val); encode(Val, true) -> list_to_binary(encode(Val,false)); encode(Val, false) when is_atom(Val) -> quote(atom_to_list(Val)); encode(Val, false) when is_list(Val) -> quote(Val); encode(Val, false) when is_integer(Val) -> integer_to_list(Val); encode(Val, false) when is_float(Val) -> mochinum:digits(Val); encode({datetime, Val}, AsBinary) -> encode(Val, AsBinary); encode({{Year,Month,Day}, {Hour,Minute,Second}}, false) -> [Year1,Month1,Day1,Hour1,Minute1,Second1] = lists:map(fun two_digits/1,[Year, Month, Day, Hour, Minute,Second]), lists:flatten(io_lib:format("'~s-~s-~s ~s:~s:~s'", [Year1,Month1,Day1,Hour1,Minute1,Second1])); encode({date, {Year, Month, Day}}, false) -> [Year1,Month1,Day1] = lists:map(fun two_digits/1,[Year, Month, Day]), lists:flatten(io_lib:format("'~s-~s-~s'",[Year1,Month1,Day1])); encode({time, {Hour, Minute, Second}}, false) -> [Hour1,Minute1,Second1] = lists:map(fun two_digits/1,[Hour, Minute, Second]), lists:flatten(io_lib:format("'~s:~s:~s'",[Hour1,Minute1,Second1])); encode(Val, _AsBinary) -> {error, {unrecognized_value, {Val}}}. two_digits(Nums) when is_list(Nums) -> [two_digits(Num) || Num <- Nums]; two_digits(Num) -> [Str] = io_lib:format("~b", [Num]), case length(Str) of 1 -> [$0 | Str]; _ -> Str end. sql2({select, Tables}, Safe)-> select(Tables, Safe); sql2({select, Fields, {from, Tables}}, Safe) -> select(Fields, Tables, Safe); sql2({select, Fields, {from, Tables}, {where, WhereExpr}}, Safe) -> select(undefined, Fields, Tables, WhereExpr, undefined, Safe); sql2({select, Fields, {from, Tables}, {where, WhereExpr}, Extras}, Safe) -> select(undefined, Fields, Tables, WhereExpr, Extras, Safe); sql2({select, Fields, {from, Tables}, WhereExpr, Extras}, Safe) -> select(undefined, Fields, Tables, WhereExpr, Extras, Safe); sql2({select, Fields, {from, Tables}, Extras}, Safe) -> select(undefined, Fields, Tables, undefined, Extras, Safe); sql2({select, Tables, {where, WhereExpr}}, Safe) -> select(undefined, undefined, Tables, WhereExpr, Safe); sql2({select, Tables, WhereExpr}, Safe) -> select(undefined, undefined, Tables, WhereExpr, Safe); sql2({select, Modifier, Fields, {from, Tables}}, Safe) -> select(Modifier, Fields, Tables, Safe); sql2({select, Modifier, Fields, {from, Tables}, {where, WhereExpr}}, Safe) -> select(Modifier, Fields, Tables, WhereExpr, Safe); sql2({select, Modifier, Fields, {from, Tables}, Extras}, Safe) -> select(Modifier, Fields, Tables, undefined, Extras, Safe); sql2({select, Modifier, Fields, {from, Tables}, {where, WhereExpr}, Extras}, Safe) -> select(Modifier, Fields, Tables, WhereExpr, Extras, Safe); sql2({select, Modifier, Fields, {from, Tables}, WhereExpr, Extras}, Safe) -> select(Modifier, Fields, Tables, WhereExpr, Extras, Safe); sql2({Select1, union, Select2}, Safe) -> [$(, sql2(Select1, Safe), <<") UNION (">>, sql2(Select2, Safe), $)]; sql2({Select1, union, Select2, {where, WhereExpr}}, Safe) -> [sql2({Select1, union, Select2}, Safe), where(WhereExpr, Safe)]; sql2({Select1, union, Select2, Extras}, Safe) -> [sql2({Select1, union, Select2}, Safe), extra_clause(Extras, Safe)]; sql2({Select1, union, Select2, {where, _} = Where, Extras}, Safe) -> [sql2({Select1, union, Select2, Where}, Safe), extra_clause(Extras, Safe)]; sql2({insert, Table, Params}, _Safe) -> insert(Table, Params); sql2({insert, Table, Fields, Values}, _Safe) -> insert(Table, Fields, Values); sql2({merge, Table, Fields, Values,Props}, _Safe) -> merge(Table, Fields, Values, Props ); sql2({replace, Table, Fields, Values}, _Safe) -> replace(Table, Fields, Values); sql2({update, Table, Props}, Safe) -> update(Table, Props, Safe); sql2({update, Table, Props, {where, Where}}, Safe) -> update(Table, Props, Where, Safe); sql2({update, Table, Props, Where}, Safe) -> update(Table, Props, Where, Safe); sql2({delete, {from, Table}}, Safe) -> delete(Table, Safe); sql2({delete, Table}, Safe) -> delete(Table, Safe); sql2({delete, {from, Table}, {where, Where}}, Safe) -> delete(Table, undefined, Where, Safe); sql2({delete, Table, {where, Where}}, Safe) -> delete(Table, undefined, Where, Safe); sql2({delete, Table, Where}, Safe) -> delete(Table, undefined, Where, Safe); sql2({delete, Table, Using, Where}, Safe) -> delete(Table, Using, Where, Safe); sql2({delete, Table, Using, Where, Extras}, Safe) -> delete(Table, Using, Where, Extras, Safe). %% Internal functions select(Fields, Safe) -> select(undefined, Fields, undefined, undefined, undefined, Safe). select(Fields, Tables, Safe) -> select(undefined, Fields, Tables, undefined, undefined, Safe). select(Modifier, Fields, Tables, Safe) -> select(Modifier, Fields, Tables, undefined, undefined, Safe). select(Modifier, Fields, Tables, WhereExpr, Safe) -> select(Modifier, Fields, Tables, WhereExpr, undefined, Safe). select(Modifier, Fields, Tables, WhereExpr, Extras, Safe) -> S1 = <<"SELECT ">>, S2 = case Modifier of undefined -> S1; Modifier -> Modifier1 = case Modifier of distinct -> 'DISTINCT'; 'all' -> 'ALL'; Other -> Other end, [S1, convert(Modifier1), 32] end, ListFun = fun(Val) -> expr2(Val, Safe) end, FieldListFun = fun(Val) -> mcs_expr2(Val, Safe) end, S3 = [S2, make_list(Fields, FieldListFun)], S4 = case Tables of undefined -> S3; [{table,_DB,_TabName}=Table]-> [S3, <<" FROM ">>, convert(Table)]; _Other -> [S3, <<" FROM ">>, make_list(Tables, ListFun)] end, S5 = case where(WhereExpr, Safe) of undefined -> S4; WhereClause -> [S4, WhereClause] end, case extra_clause(Extras, Safe) of undefined -> S5; Expr -> [S5, Expr] end. where(undefined, _) -> []; where(Expr, true) when is_list(Expr); is_binary(Expr) -> throw({error, {unsafe_expression, Expr}}); where(Expr, false) when is_binary(Expr) -> Res = case Expr of <<"WHERE ", _Rest/binary>> = Expr1 -> Expr1; <<"where ", Rest/binary>> -> <<"WHERE ", Rest/binary>>; Expr1 -> <<"WHERE ", Expr1/binary>> end, [32, Res]; where(Exprs, false) when is_list(Exprs)-> where(list_to_binary(Exprs), false); where(Expr, Safe) when is_tuple(Expr) -> case expr(Expr, Safe) of undefined -> []; Other -> [<<" WHERE ">>, Other] end. extra_clause(undefined, _Safe) -> undefined; extra_clause(Expr, true) when is_binary(Expr) -> throw({error, {unsafe_expression, Expr}}); extra_clause(Expr, false) when is_binary(Expr) -> [32, Expr]; extra_clause([Expr], false) when is_binary(Expr) -> [32, Expr]; extra_clause(Exprs, Safe) when is_list(Exprs) -> case is_tuple(hd(Exprs)) of true -> extra_clause2(Exprs, false); false -> if not Safe -> [32, list_to_binary(Exprs)]; true -> throw({error, {unsafe_expression, Exprs}}) end end; extra_clause(Exprs, true) when is_list(Exprs) -> extra_clause2(Exprs, true); extra_clause({limit, Num}, _Safe) -> [<<" LIMIT ">>, encode(Num)]; extra_clause({limit, Offset, Num}, _Safe) -> [<<" LIMIT ">>, encode(Offset), $, , encode(Num)]; extra_clause({group_by, ColNames}, _Safe) -> [<<" GROUP BY ">>, make_list(ColNames, fun convert/1)]; extra_clause({group_by, ColNames, having, Expr}, Safe) -> [extra_clause({group_by, ColNames}, Safe), <<" HAVING ">>, expr(Expr, Safe)]; extra_clause({order_by, ColNames}, Safe) -> [<<" ORDER BY ">>, make_list(ColNames, fun({Name, Modifier}) when Modifier == 'asc' -> [expr(Name, Safe), 32, convert('ASC')]; ({Name, Modifier}) when Modifier == 'desc' -> [expr(Name, Safe), 32, convert('DESC')]; (Name) -> expr(Name, Safe) end)]. extra_clause2(Exprs, Safe) -> Res = lists:foldl( fun(undefined, Acc) -> Acc; (Expr, Acc) -> [extra_clause(Expr, Safe) | Acc] end, [], Exprs), [lists:reverse(Res)]. insert(Table, Params) -> Names = make_list(Params, fun({Name, _Value}) -> convert(Name) end), Values = [$(, make_list( Params, fun({_Name, Value}) -> encode(Value) end), $)], make_insert_query(Table, Names, Values). insert(Table, Fields, Records) -> Names = make_list(Fields, fun convert/1), Values = make_list( Records, fun(Record) -> Record1 = if is_tuple(Record) -> tuple_to_list(Record); true -> Record end, [$(, make_list(Record1, fun encode/1), $)] end), make_insert_query(Table, Names, Values). replace(Table, Fields, Records) -> Names = make_list(Fields, fun convert/1), Values = make_list( Records, fun(Record) -> Record1 = if is_tuple(Record) -> tuple_to_list(Record); true -> Record end, [$(, make_list(Record1, fun encode/1), $)] end), make_replace_query(Table, Names, Values). merge(Table, Fields, Records, Props ) -> Names = make_list(Fields, fun convert/1), Values = make_list( Records, fun(Record) -> Record1 = if is_tuple(Record) -> tuple_to_list(Record); true -> Record end, [$(, make_list(Record1, fun encode/1), $)] end), make_merge_query(Table, Names, Values, Props). make_replace_query(Table, Names, Values) -> [<<"REPLACE INTO ">>, convert(Table), $(, Names, <<") VALUES ">>, Values]. make_insert_query(Table, Names, Values) -> [<<"INSERT INTO ">>, convert(Table), $(, Names, <<") VALUES ">>, Values]. make_merge_query(Table, Names, Values,Props) -> S2 = make_list(Props, fun({Field, Val}) -> [convert(Field), <<" = ">>, expr(Val, false)] end), [<<"INSERT INTO ">>, convert(Table), $(, Names, <<") VALUES ">>, Values, <<" ON DUPLICATE KEY UPDATE ">>, S2 ]. update(Table, Props, Safe) -> update(Table, Props, undefined, Safe). update(Table, Props, Where, Safe) when not is_list(Props) -> update(Table, [Props], Where, Safe); update(Table, Props, Where, Safe) -> S1 = [<<"UPDATE ">>, convert(Table), <<" SET ">>], S2 = make_list(Props, fun({Field, Val}) -> [convert(Field), <<" = ">>, expr(Val, Safe)] end), [S1, S2, where(Where, Safe)]. delete(Table, Safe) -> delete(Table, undefined, undefined, undefined, Safe). delete(Table, Using, WhereExpr, Safe) -> delete(Table, Using, WhereExpr, undefined, Safe). delete(Table, Using, WhereExpr, Extras, Safe) -> S1 = [<<"DELETE FROM ">>, convert(Table)], S2 = if Using == undefined -> S1; true -> [S1, <<" USING ">>, make_list(Using, fun convert/1)] end, S3 = case where(WhereExpr, Safe) of undefined -> S2; WhereClause -> [S2, WhereClause] end, if Extras == undefined -> S3; true -> [S3, extra_clause(Extras, Safe)] end. convert({table,DB,TableName})-> L = lists:concat([ DB,".`",TableName,"`" ]), list_to_binary(L); convert(Val) when is_atom(Val)-> L = lists:concat([ "`",Val,"`" ]), list_to_binary(L). %% convert(Val) when is_atom(Val)-> %% {_Stuff, Bin} = split_binary(term_to_binary(Val), 4), %% Bin. make_list(Vals, ConvertFun) when is_list(Vals) -> {Res, _} = lists:foldl( fun(Val, {Acc, false}) -> {[ConvertFun(Val) | Acc], true}; (Val, {Acc, true}) -> {[ConvertFun(Val) , $, | Acc], true} end, {[], false}, Vals), lists:reverse(Res); make_list(Val, ConvertFun) -> ConvertFun(Val). expr(undefined, _Safe) -> <<"NULL">>; expr({Not, Expr}, Safe) when (Not == 'not' orelse Not == '!') -> [<<"NOT ">>, check_expr(Expr, Safe)]; expr({Table, Field}, _Safe) when is_atom(Table), is_atom(Field) -> [convert(Table), $., convert(Field)]; expr({Expr1, as, Alias}, Safe) when is_atom(Alias) -> [expr2(Expr1, Safe), <<" AS ">>, convert(Alias)]; expr({call, FuncName, []}, _Safe) -> [convert(FuncName), <<"()">>]; expr({call, FuncName, Param}, Safe) -> [convert(FuncName), $(, expr2(Param, Safe), $)]; expr({Val, Op, {select, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {select, _, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {select, _, _, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {select, _, _, _, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {select, _, _, _, _, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {select, _, _, _, _, _, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {_, union, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {_, union, _, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({Val, Op, {_, union, _, _, _} = Subquery}, Safe) -> subquery(Val, Op, Subquery, Safe); expr({_, in, []}, _Safe) -> <<"0">>; expr({Val, Op, Values}, Safe) when (Op == in orelse Op == any orelse Op == some) andalso is_list(Values) -> [expr2(Val, Safe), subquery_op(Op), make_list(Values, fun encode/1), $)]; expr({undefined, Op, Expr2}, Safe) when Op == 'and'; Op == 'not' -> expr(Expr2, Safe); expr({Expr1, Op, undefined}, Safe) when Op == 'and'; Op == 'not' -> expr(Expr1, Safe); expr({Expr1, Op, Expr2}, Safe) -> {B1, B2} = if (Op == 'and' orelse Op == 'or') -> {check_expr(Expr1, Safe), check_expr(Expr2, Safe)}; true -> {expr2(Expr1, Safe), expr2(Expr2, Safe)} end, [$(, B1, 32, op(Op), 32, B2, $)]; expr({list, Vals}, _Safe) when is_list(Vals) -> [$(, make_list(Vals, fun encode/1), $)]; expr({Op, Exprs}, Safe) when is_list(Exprs) -> [$(, lists:foldl( fun(Expr, []) -> expr(Expr, Safe); (Expr, Acc) -> [expr(Expr, Safe), 32, op(Op), 32, Acc] end, [], lists:reverse(Exprs)), $)]; expr('?', _Safe) -> $?; expr(null, _Safe) -> <<"NULL">>; expr(Val, _Safe) when is_atom(Val) -> convert(Val); expr(Val, _Safe) -> encode(Val). check_expr(Expr, Safe) when is_list(Expr); is_binary(Expr) -> if Safe -> throw({error, {unsafe_expression, Expr}}); true -> iolist_to_binary([$(, Expr, $)]) end; check_expr(Expr, Safe) -> expr(Expr, Safe). op(Op) -> convert(op1(Op)). op1('and') -> 'AND'; op1('or') -> 'OR'; op1(like) -> 'LIKE'; op1(Op) -> Op. subquery(Val, Op, Subquery, Safe) -> [expr2(Val, Safe), subquery_op(Op), sql2(Subquery, Safe), $)]. subquery_op(in) -> <<" IN (">>; subquery_op(any) -> <<" ANY (">>; subquery_op(some) -> <<" SOME (">>. mcs_expr2(Expr, Safe) -> expr2(Expr, Safe). %% modified by bisonwu expr2(undefined, _Safe) -> <<"NULL">>; expr2(Expr, _Safe) when is_atom(Expr) -> %%convert(Expr); list_to_binary( lists:concat(["`",atom_to_list(Expr),"`"]) ); expr2(Expr, Safe) -> expr(Expr, Safe). quote(String) when is_list(String) -> [39 | lists:reverse([39 | quote(String, [])])]; %% 39 is $' quote(Bin) when is_binary(Bin) -> list_to_binary(quote(binary_to_list(Bin))). quote([], Acc) -> Acc; quote([0 | Rest], Acc) -> quote(Rest, [$0, $\\ | Acc]); quote([10 | Rest], Acc) -> quote(Rest, [$n, $\\ | Acc]); quote([13 | Rest], Acc) -> quote(Rest, [$r, $\\ | Acc]); quote([$\\ | Rest], Acc) -> quote(Rest, [$\\ , $\\ | Acc]); quote([39 | Rest], Acc) -> %% 39 is $' quote(Rest, [39, $\\ | Acc]); %% 39 is $' quote([34 | Rest], Acc) -> %% 34 is $" quote(Rest, [34, $\\ | Acc]); %% 34 is $" quote([26 | Rest], Acc) -> quote(Rest, [$Z, $\\ | Acc]); quote([C | Rest], Acc) -> quote(Rest, [C | Acc]).
src/erlsql.erl
0.558086
0.634784
erlsql.erl
starcoder
%% Copyright (c) Facebook, Inc. and its affiliates. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% @doc AST conversion between erlfmt and syntax_tools. -module(els_erlfmt_ast). -export([erlfmt_to_st/1]). %% syntax_tree -> erlfmt conversion is not used in erlang_ls, %% removed to not have to update it with erlang_ls custom changes and %% fix dialyzer warnings about erl_syntax:get_pos/set_pos %%-export([st_to_erlfmt/1]). % dialyzer hates erlfmt_parse:abstract_node() -type erlfmt() :: term(). -type syntax_tools() :: erl_syntax:syntaxTree(). -spec erlfmt_to_st(Node :: erlfmt()) -> syntax_tools(). %% @doc Convert from erlfmt ASTs to Syntax Tools ASTs. %% Note: the erl_syntax library still refers to the 2nd element as "pos" %% even though it has morphed into a generic annotation in erl_parse trees %% (represented as property lists, or as maps in the erlfmt %% representation). Actual erl_syntax nodes have an additional annotation %% field, separate from the position info, but this is not being used here. %% Hence, the erl_syntax:set_pos() function is used for all annotations. erlfmt_to_st(Node) -> Context = get('$erlfmt_ast_context$'), case Node of %% --------------------------------------------------------------------- %% The following cases can be easily rewritten without losing information %% The special `match` node is encoded as a regular binary operator {op, Pos, '=', Left, Right} -> erlfmt_to_st_1({match, Pos, Left, Right}); %% The special `catch` node is encoded as a regular unary operator {op, Pos, 'catch', Expr} -> erlfmt_to_st_1({'catch', Pos, Expr}); %% Type annotations are represented as :: operators {op, Pos, '::', Left, Right} -> erlfmt_to_st_1({ann_type, Pos, [Left, Right]}); %% --------------------------------------------------------------------- %% Whenever simply rewriting the node to the corresponding standard %% erl_parse form would discard information (such as annotations on %% atoms which are stored naked in the erl_parse format), we must %% construct the node using the erl_syntax API, which supports %% preserving annotations on such sub-fields. %% raw strings only occur as forms, for when parsing the form failed {raw_string, Pos, Text} -> update_tree_with_meta(erl_syntax:text("\n>>>>\n" ++ Text ++ "\n<<<<\n"), Pos); %% A new node `{macro_call, Anno, Name, Args}` is introduced, where %% `Name` is either an `atom` or a `var` node and `Args` is a list of %% expressions, types, or special `op` nodes with `'when'` operator. {macro_call, Pos, Name, Args} -> Args1 = case Args of none -> none; _ -> [erlfmt_to_st(A) || A <- Args] end, update_tree_with_meta(erl_syntax:macro(erlfmt_to_st(Name), Args1), Pos); %% The value of an attribute node is always a list of abstract term %% formats instead of concrete terms. The name is always represented %% as a full `atom` node. {attribute, Pos, {atom, _, record} = Tag, [Name, Tuple]} -> %% The record name is represented as node instead of a raw atom %% and typed record fields are represented as '::' ops {tuple, TPos, Fields} = Tuple, Fields1 = [ case F of {op, FPos, '::', B, T} -> B1 = erlfmt_to_st(B), %% Convert field types in a type context put('$erlfmt_ast_context$', type), T1 = erlfmt_to_st(T), erase('$erlfmt_ast_context$'), update_tree_with_meta( erl_syntax:typed_record_field(B1, T1), FPos ); _ -> erlfmt_to_st(F) end || F <- Fields ], Tuple1 = update_tree_with_meta(erl_syntax:tuple(Fields1), TPos), update_tree_with_meta( erl_syntax:attribute( erlfmt_to_st(Tag), [ erlfmt_to_st(Name), Tuple1 ] ), Pos ); %% Representation for types is in general the same as for %% corresponding values. The `type` node is not used at all. This %% means new binary operators `|`, `::`, and `..` inside types. {attribute, Pos, {atom, _, Tag} = Name, [Def]} when Tag =:= type; Tag =:= opaque -> put('$erlfmt_ast_context$', type), {op, OPos, '::', Type, Definition} = Def, {TypeName, Args} = case Type of {call, _CPos, TypeName0, Args0} -> {TypeName0, Args0}; {macro_call, CPos, {_, MPos, _} = MacroName, Args0} -> EndLoc = maps:get(end_location, MPos), TypeName0 = {macro_call, CPos#{end_location => EndLoc}, MacroName, none}, {TypeName0, Args0} end, Tree = update_tree_with_meta( erl_syntax:attribute(erlfmt_to_st(Name), [update_tree_with_meta( erl_syntax:tuple([erlfmt_to_st(TypeName), erlfmt_to_st(Definition), erl_syntax:list([erlfmt_to_st(A) || A <- Args])]), OPos)]), Pos), erase('$erlfmt_ast_context$'), Tree; {attribute, Pos, {atom, _, RawName} = Name, Args} when RawName =:= callback; RawName =:= spec -> put('$erlfmt_ast_context$', type), [{spec, SPos, FName, Clauses}] = Args, {spec_clause, _, {args, _, ClauseArgs}, _, _} = hd(Clauses), Arity = length(ClauseArgs), Tree = update_tree_with_meta( erl_syntax:attribute(erlfmt_to_st(Name), [update_tree_with_meta( erl_syntax:tuple([erl_syntax:tuple([erlfmt_to_st(FName), erl_syntax:integer(Arity)]), erl_syntax:list([erlfmt_to_st(C) || C <- Clauses])]), SPos)]), Pos), erase('$erlfmt_ast_context$'), Tree; {spec_clause, Pos, {args, _HeadMeta, Args}, ReturnType, empty} -> update_tree_with_meta( erl_syntax_function_type([erlfmt_to_st(A) || A <- Args], erlfmt_to_st(ReturnType)), Pos); {spec_clause, Pos, {args, _HeadMeta, Args}, ReturnType, GuardOr} -> FunctionType = update_tree_with_meta( erl_syntax_function_type([erlfmt_to_st(A) || A <- Args], erlfmt_to_st(ReturnType)), Pos), FunctionConstraint = erlfmt_guard_to_st(GuardOr), update_tree_with_meta( erl_syntax:constrained_function_type(FunctionType, [FunctionConstraint]), Pos); {op, Pos, '|', A, B} when Context =:= type -> update_tree_with_meta( erl_syntax:type_union([erlfmt_to_st(A), erlfmt_to_st(B)]), Pos); {op, Pos, '..', A, B} when Context =:= type -> %% erlfmt_to_st_1({type, Pos, range, [A, B]}), update_tree_with_meta( erl_syntax:integer_range_type(erlfmt_to_st(A), erlfmt_to_st(B)), Pos); %%{op, Pos, '::', A, B} when Context =:= type -> %% update_tree_with_meta( %% erl_syntax:annotated_type(erlfmt_to_st(A), %% erlfmt_to_st(B)), %% Pos); {record, Pos, Name, Fields} when Context =:= type -> %% The record name is represented as node instead of a raw atom %% and typed record fields are represented as '::' ops Fields1 = [ case F of {op, FPos, '::', B, T} -> B1 = erlfmt_to_st(B), T1 = erlfmt_to_st(T), update_tree_with_meta( erl_syntax:record_type_field(B1, T1), FPos ); _ -> erlfmt_to_st(F) end || F <- Fields ], update_tree_with_meta( erl_syntax:record_type( erlfmt_to_st(Name), Fields1 ), Pos ); {call, Pos, {remote, _, _, _} = Name, Args} when Context =:= type -> update_tree_with_meta( erl_syntax:type_application(erlfmt_to_st(Name), [erlfmt_to_st(A) || A <- Args]), Pos); {call, Pos, Name, Args} when Context =:= type -> TypeTag = case Name of {atom, _, NameAtom} -> Arity = length(Args), case erl_internal:is_type(NameAtom, Arity) of true -> type_application; false -> user_type_application end; _ -> user_type_application end, update_tree_with_meta( erl_syntax:TypeTag(erlfmt_to_st(Name), [erlfmt_to_st(A) || A <- Args]), Pos); {attribute, Pos, {atom, _, define} = Tag, [Name, empty]} -> %% the erlfmt parser allows defines with empty bodies (with the %% closing parens following after the comma); we must turn the %% atom 'empty' into a proper node here Body = erl_syntax:set_pos(erl_syntax:text(""), dummy_anno()), update_tree_with_meta( erl_syntax:attribute( erlfmt_to_st(Tag), [ erlfmt_to_st(Name), Body ] ), Pos ); {attribute, Pos, Name, no_parens} -> %% a directive without parentheses, like -endif. update_tree_with_meta(erl_syntax:attribute(erlfmt_to_st(Name)), Pos); %% Attributes are not processed to convert the `fun/arity` syntax into %% tuples, they are left as the `op` nodes with the `/` operator. %% Additionally, the `import` and `export` attributes are not %% processed to convert the `cons` node chains into lists and contain %% `list` nodes. {attribute, Pos, Name, Args} -> %% general attributes -Name(Arg1, ... ArgN) %% (Name is not a naked atom, so Node is not erl_parse compatible) Args1 = [fold_arity_qualifiers(erlfmt_to_st(A)) || A <- Args], update_tree_with_meta(erl_syntax:attribute(erlfmt_to_st(Name), Args1), Pos); %% The `function` node has a different AST representation: `{function, %% Anno, Clauses}`, where `Clauses` is a list of `clause` nodes or %% `macro_call` nodes. Additionally it is less strict - it does not %% enforce all clauses have the same name and arity. {function, Pos, Clauses} -> case get_function_name(Clauses) of none -> %% treat clauses as a list of regular nodes %% (presumably macro calls) and use an empty text node %% as the function name Clauses1 = [erlfmt_to_st(C) || C <- Clauses], Name = erl_syntax:set_pos(erl_syntax:text(""), dummy_anno()), update_tree_with_meta( erl_syntax:function( Name, Clauses1 ), Pos ); Name -> Clauses1 = [erlfmt_clause_to_st(C) || C <- Clauses], update_tree_with_meta( erl_syntax:function( erlfmt_to_st(Name), Clauses1 ), Pos ) end; {'try', Pos, {body, _, _} = Body, Clauses, Handlers, After} -> %% TODO: preserving annotations on bodies and clause groups Body1 = [erlfmt_to_st(Body)], Clauses1 = case Clauses of {clauses, _, CList} -> [erlfmt_clause_to_st(C) || C <- CList]; none -> [] end, Handlers1 = case Handlers of {clauses, _, HList} -> [erlfmt_clause_to_st(C) || C <- HList]; none -> [] end, After1 = [erlfmt_to_st(E) || E <- After], update_tree_with_meta( erl_syntax:try_expr( Body1, Clauses1, Handlers1, After1 ), Pos ); {clause, Pos, {call, CPos, Name, Args}, Guard, Body} -> %% free standing named clause - make a magic tuple to %% hold both the name and the clause with the args AAnno = dummy_anno(), Clause = {clause, Pos, {args, CPos, Args}, Guard, Body}, erlfmt_to_st_1({tuple, CPos, [{atom, AAnno, '*named_clause*'}, Name, Clause]}); {clause, _, _, _, _} = Clause -> %% clauses of case/if/receive/try erlfmt_clause_to_st(Clause); %% Lists are represented as a `list` node instead of a chain of `cons` %% and `nil` nodes, similar to the `tuple` node. The last element of %% the list can be a `cons` node representing explicit consing syntax. {list, Pos, Elements} -> %% a "cons" node here means 'H | T' in isolation %% and can only exist at the end of a list body {Es, Tail} = case lists:reverse(Elements) of [{cons, _CPos, H, T} | Rest] -> {lists:reverse([H | Rest]), erlfmt_to_st(T)}; _ -> {Elements, none} end, Es1 = [erlfmt_to_st(E) || E <- Es], update_tree_with_meta(erl_syntax:list(Es1, Tail), Pos); %% The record name is always represented as node instead of a raw atom {record, Pos, Name, Fields} -> % a new record instance Fields1 = [erlfmt_to_st(F) || F <- Fields], update_tree_with_meta( erl_syntax:record_expr( erlfmt_to_st(Name), Fields1 ), Pos ); {record, Pos, Expr, Name, Fields} -> % updating a record Fields1 = [erlfmt_to_st(F) || F <- Fields], update_tree_with_meta( erl_syntax:record_expr( erlfmt_to_st(Expr), erlfmt_to_st(Name), Fields1 ), Pos ); {record_field, Pos, Name} -> %% a record field without value, just the field name update_tree_with_meta(erl_syntax:record_field(erlfmt_to_st(Name)), Pos); {record_field, Pos, Name, Value} -> %% a record field "name = val" update_tree_with_meta( erl_syntax:record_field( erlfmt_to_st(Name), erlfmt_to_st(Value) ), Pos ); {record_field, Pos, Expr, Record, Field} -> %% a record field access expression "expr#record.field" update_tree_with_meta( erl_syntax:record_access( erlfmt_to_st(Expr), erlfmt_to_st(Record), erlfmt_to_st(Field) ), Pos ); {record_index, Pos, Record, Field} -> %% a record field index "#record.field" update_tree_with_meta( erl_syntax:record_index_expr( erlfmt_to_st(Record), erlfmt_to_st(Field) ), Pos ); %% The `fun` node has a different AST representation: %% `{'fun', Anno, Value}`, where `Value` is one of: %% * `{function, Anno, Name, Arity}`, where `Name` and `Arity` are an %% `atom` and `integer` node respectively or `var` or `macro_call` %% nodes. %% * `{function, Anno, Module, Name, Arity}`, where `Module`, `Name`, %% and `Arity` are `atom`, `atom`, and `integer` nodes respectively %% or a `var` or `macro_call` node. %% * `{clauses, Anno, Clauses}`, where `Clauses` is a list of `clause` %% nodes. Additionally it is less strict - the clauses aren't %% checked for the same name or arity. %% * `type` for the anonymous function type `fun()`. %% * `{type, Anno, Args, Res}` for the anonymous function type %% `fun((...Args) -> Res)` where `Args` is a `args` node. %% * The `named_fun` node is not used - instead, clauses have a call %% head, just as for plain functions. {'fun', Pos, {clauses, _CPos, Clauses}} -> %% TODO: can we preserve CPos in any useful way? [{clause, _, Head, _, _} | _] = Clauses, Clauses1 = [erlfmt_clause_to_st(C) || C <- Clauses], case Head of {call, _, Name, _} -> %% if the head has function call shape, it's a named fun update_tree_with_meta( erl_syntax:named_fun_expr( erlfmt_to_st(Name), Clauses1 ), Pos ); _ -> update_tree_with_meta(erl_syntax:fun_expr(Clauses1), Pos) end; {'fun', Pos, {function, FPos, Name, Arity}} -> FName = update_tree_with_meta( erl_syntax:arity_qualifier( erlfmt_to_st(Name), erlfmt_to_st(Arity) ), FPos ), update_tree_with_meta(erl_syntax:implicit_fun(FName), Pos); {'fun', Pos, {function, FPos, Module, Name, Arity}} -> %% note that the inner arity qualifier gets no annotation FName = update_tree_with_meta( erl_syntax:module_qualifier( erlfmt_to_st(Module), erl_syntax:arity_qualifier( erlfmt_to_st(Name), erlfmt_to_st(Arity) ) ), FPos ), update_tree_with_meta(erl_syntax:implicit_fun(FName), Pos); {'fun', Pos, type} -> update_tree_with_meta(erl_syntax:fun_type(), Pos); {'fun', Pos, {type, _, {args, _, Args}, Res}} -> update_tree_with_meta( erl_syntax_function_type( [erlfmt_to_st(A) || A <- Args], erlfmt_to_st(Res)), Pos); {'bin', Pos, Elements} when Context =:= type -> %% Note: we loose a lot of Annotation info here %% Note2: erl_parse assigns the line number (with no column) to the dummy zeros {M, N} = case Elements of [{bin_element, _, {var, _, '_'}, {bin_size, _, {var, _, '_'}, NNode}, default}] -> {{integer, dummy_anno(), 0}, NNode}; [{bin_element, _, {var, _, '_'}, MNode, default}] -> {MNode, {integer, dummy_anno(), 0}}; [{bin_element, _, {var, _, '_'}, MNode, default}, {bin_element, _, {var, _, '_'}, {bin_size, _, {var, _, '_'}, NNode}, default}] -> {MNode, NNode}; [] -> {{integer, dummy_anno(), 0}, {integer, dummy_anno(), 0}}; _ -> %% No idea what this is - what ST should we create? %% maybe just a binary(), or an empty text node {{integer, dummy_anno(), 0}, {integer, dummy_anno(), 1}} end, update_tree_with_meta( erl_syntax:bitstring_type( erlfmt_to_st(M), erlfmt_to_st(N)), Pos); %% Bit type definitions inside binaries are represented as full nodes %% instead of raw atoms and integers. The unit notation `unit:Int` is %% represented with a `{remote, Anno, {atom, Anno, unit}, Int}` node. {bin_element, Pos, Expr, Size, Types} when Types =/= default -> Types1 = lists:map( fun ({remote, QPos, {atom, _, _} = A, {integer, _, _} = I}) -> update_tree_with_meta( erl_syntax:size_qualifier( erlfmt_to_st(A), erlfmt_to_st(I) ), QPos ); (T) -> erlfmt_to_st(T) end, Types ), Size1 = case Size of default -> none; _ -> erlfmt_to_st(Size) end, update_tree_with_meta( erl_syntax:binary_field( erlfmt_to_st(Expr), Size1, Types1 ), Pos ); {'receive', Pos, {clauses, _PosClauses, ClauseList}} -> update_tree_with_meta( erl_syntax:receive_expr([erlfmt_to_st(C) || C <- ClauseList]), Pos); {'receive', Pos, Clauses, {after_clause, _PosAfter, Timeout, Action}} -> Clauses1 = case Clauses of empty -> []; {clauses, _PosClauses, ClauseList} -> [erlfmt_to_st(C) || C <- ClauseList] end, update_tree_with_meta( erl_syntax:receive_expr( Clauses1, erlfmt_to_st(Timeout), [erlfmt_to_st(A) || A <- Action]), Pos); %% --------------------------------------------------------------------- %% The remaining cases have been added by erlfmt and need special handling %% (many are represented as magically-tagged tuples for now) %% A new operator node `{op, Anno, 'when', Expr, Guard}` is %% introduced, which can occur as a body of a macro. It represents %% "free-standing" `Expr when Guard` expressions as used, for %% example, in the `assertMatch` macro. {op, Pos, 'when', Expr, Guard} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*when*'}, Expr, Guard]}); %% A new node `{exprs, Anno, Exprs}` represents a %% "free-standing" comma separated sequence of expressions {exprs, Pos, Exprs} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*exprs*'} | Exprs]}); %% A new node `{body, Anno, Exprs}` represents a comma separated %% sequence of expressions as in 'try ... of/catch' {body, Pos, Exprs} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*body*'} | Exprs]}); %% The erlfmt parser also accepts general guards (comma and %% semicolon separated sequences of guard expressions) as the body %% of a macro {guard_or, _Pos, _Exprs} -> erlfmt_guard_to_st(Node); {guard_and, _Pos, _Exprs} -> erlfmt_guard_to_st(Node); %% Record name fragments "#name" may also occur as the body of a macro {record_name, Pos, Name} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*record_name*'}, Name]}); %% A new node `{concat, Anno, Concatables}`, where `Concatables` is a %% list of `string`, `var`, and `macro_call` nodes. This is used to %% represent implicit string concatenation, for example `"foo" "bar"`. {concat, Pos, Subtrees} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*concat*'} | Subtrees]}); %% A new node `{macro_string, Anno, Name}` is introduced, where `Name` %% is either an `atom` or a `var` node. It represents `??Name`. {macro_string, Pos, Name} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*stringify*'}, Name]}); %% erlfmt preserves '...' tokens as nodes (which erl_parse doesn't) {'...', Pos} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*...*'}]}); %% sometimes erlfmt leaves comments as separate nodes %% instead of attaching them to another node {comment, Pos, Lines} -> update_tree_with_meta(erl_syntax:comment(Lines), Pos); %% erlfmt has a separate entry for shebang nodes; we use raw strings {shebang, Pos, Text} -> erlfmt_to_st({raw_string, Pos, Text}); %% args nodes may (in macros) occur free floating {args, Pos, Args} -> AAnno = dummy_anno(), erlfmt_to_st_1({tuple, Pos, [{atom, AAnno, '*args*'} | Args]}); %% TODO: %% New `{spec_clause, Anno, Head, Body, Guards}` node for clauses %% inside `spec` and `callback` attributes, similar to the `clause` %% node above. It reflects the fact that in specs guards come after %% body. The `Head` element is always an `args` node. _ -> %% all remaining cases can be handled by the default erl_syntax %% subtree traversal erlfmt_to_st_1(Node) end. %% erl_parse format is compatible with erl_syntax %% But since OTP 24 erl_syntax expects a proper erl_anno:anno() in pos. %% So first replace the Meta from Node with proper erl_syntax pos+annotation to %% make dialyzer happy. -spec erlfmt_to_st_1(erlfmt() | syntax_tools()) -> syntax_tools(). erlfmt_to_st_1(Node) when is_map(element(2, Node))-> Node2 = convert_meta_to_anno(Node), erlfmt_to_st_2(Node2); erlfmt_to_st_1(Node) -> erlfmt_to_st_2(Node). -spec erlfmt_to_st_2(syntax_tools()) -> syntax_tools(). erlfmt_to_st_2(Node) -> case erl_syntax:subtrees(Node) of [] -> % leaf node Node; Groups0 -> %% recurse and replace the subtrees Groups1 = erlfmt_subtrees_to_st(Groups0), erl_syntax:update_tree(Node, Groups1) end. -spec erlfmt_subtrees_to_st([[any()]]) -> [[any()]]. erlfmt_subtrees_to_st(Groups) -> [ [ erlfmt_to_st(Subtree) || Subtree <- Group ] || Group <- Groups ]. -spec get_function_name(maybe_improper_list()) -> any(). get_function_name([{clause, _, {call, _, Name, _}, _, _} | _]) -> %% take the name node of the first clause with a call shape %% TODO: this loses info if not all clauses have the same name Name; get_function_name([_ | Cs]) -> get_function_name(Cs); get_function_name([]) -> none. %% The `clause` node has a different AST representation: %% `{clause, Anno, Head, Guards, Body}`, where the `Guards` element is either %% an atom `empty` or a `guard_or` node, and `Head` element is one of: %% * regular `call` node for functions and named funs; %% * atom `empty` for `if` expressions; %% * `{args, Anno, Args}` node for an list of expressions wrapped in parentheses; %% * `{catch, Anno, Args}` node for clauses in `catch` clauses, where %% 2 to 3 arguments represent the various `:` separated syntaxes; %% * other expression for `case`, `receive`, "of" part of `try` expression %% and simple `catch` clauses without `:`. %% TODO: can we preserve CPos/APos annotations here somehow? -spec erlfmt_clause_to_st(_) -> any(). erlfmt_clause_to_st({clause, Pos, empty, Guard, Body}) -> erlfmt_clause_to_st(Pos, [], Guard, Body); erlfmt_clause_to_st({clause, Pos, {call, _CPos, _, Args}, Guard, Body}) -> Patterns = [erlfmt_to_st(A) || A <- Args], erlfmt_clause_to_st(Pos, Patterns, Guard, Body); erlfmt_clause_to_st({clause, Pos, {args, _APos, Args}, Guard, Body}) -> Patterns = [erlfmt_to_st(A) || A <- Args], erlfmt_clause_to_st(Pos, Patterns, Guard, Body); erlfmt_clause_to_st({clause, Pos, {'catch', APos, Args}, Guard, Body}) -> Pattern = case [erlfmt_to_st(A) || A <- Args] of [Class, Term] -> update_tree_with_meta(erl_syntax:class_qualifier(Class, Term), APos); [Class, Term, Trace] -> update_tree_with_meta(erl_syntax:class_qualifier(Class, Term, Trace), APos) end, erlfmt_clause_to_st(Pos, [Pattern], Guard, Body); erlfmt_clause_to_st({clause, Pos, Expr, Guard, Body}) -> erlfmt_clause_to_st(Pos, [erlfmt_to_st(Expr)], Guard, Body); erlfmt_clause_to_st(Other) -> %% might be a macro call erlfmt_to_st(Other). -spec erlfmt_clause_to_st(_,[any()],_,[any()]) -> any(). erlfmt_clause_to_st(Pos, Patterns, Guard, Body) -> Groups = [ Patterns, [erlfmt_guard_to_st(Guard)], [erlfmt_to_st(B) || B <- Body] ], update_tree_with_meta(erl_syntax:make_tree(clause, Groups), Pos). %% New `{guard_or, Anno, GuardAndList}` and `{guard_and, Anno, Exprs}` nodes %% are introduced to support annotating guard sequences, instead of a plain %% nested list of lists structure. -spec erlfmt_guard_to_st(_) -> any(). erlfmt_guard_to_st(empty) -> none; erlfmt_guard_to_st({guard_or, Pos, List}) -> update_tree_with_meta( erl_syntax:disjunction([ erlfmt_guard_to_st(E) || E <- List ]), Pos ); erlfmt_guard_to_st({guard_and, Pos, List}) -> update_tree_with_meta( erl_syntax:conjunction([ erlfmt_guard_to_st(E) || E <- List ]), Pos ); erlfmt_guard_to_st(Other) -> erlfmt_to_st(Other). -spec fold_arity_qualifiers(_) -> any(). fold_arity_qualifiers(Tree) -> erl_syntax_lib:map(fun fold_arity_qualifier/1, Tree). -spec fold_arity_qualifier(_) -> any(). fold_arity_qualifier(Node) -> case erl_syntax:type(Node) of infix_expr -> Op = erl_syntax:infix_expr_operator(Node), case erl_syntax:type(Op) of operator -> case erl_syntax:atom_value(Op) of '/' -> N = erl_syntax:infix_expr_left(Node), A = erl_syntax:infix_expr_right(Node), case erl_syntax:type(N) =:= atom andalso erl_syntax:type(A) =:= integer of true -> Q = erl_syntax:arity_qualifier(N, A), erl_syntax:copy_attrs(Op, Q); false -> Node end; _ -> Node end; _ -> Node end; _ -> Node end. -spec dummy_anno() -> erl_anno:anno(). dummy_anno() -> erl_anno:set_generated(true, erl_anno:new({0, 1})). %% erlfmt ast utilities -spec get_anno(tuple()) -> term(). get_anno(Node) -> element(2, Node). -spec set_anno(tuple(), term()) -> tuple(). set_anno(Node, Loc) -> setelement(2, Node, Loc). %% @doc Silence warning about breaking the contract %% erl_syntax:function_type/2 has wrong spec before OTP 24 -spec erl_syntax_function_type('any_arity' | [syntax_tools()], syntax_tools()) -> syntax_tools(). erl_syntax_function_type(Arguments, Return) -> apply(erl_syntax, function_type, [Arguments, Return]). %% Convert erlfmt_scan:anno to erl_syntax pos+annotation %% %% Note: nothing from meta is stored in annotation %% as erlang_ls only needs start and end locations. -spec update_tree_with_meta(syntax_tools(), erlfmt_scan:anno()) -> syntax_tools(). update_tree_with_meta(Tree, Meta) -> Anno = meta_to_anno(Meta), Tree2 = erl_syntax:set_pos(Tree, Anno), %% erl_syntax:set_ann(Tree2, [{meta, Meta}]). Tree2. -spec convert_meta_to_anno(erlfmt()) -> syntax_tools(). convert_meta_to_anno(Node) -> Meta = get_anno(Node), Node2 = set_anno(Node, meta_to_anno(Meta)), %% erl_syntax:set_ann(Node2, [{meta, Meta}]). Node2. -spec meta_to_anno(erlfmt_scan:anno()) -> erl_anno:anno(). meta_to_anno(Meta) -> %% Recommenting can modify the start and end locations of certain trees %% see erlfmt_recomment:put_(pre|post)_comments/1 From = case maps:is_key(pre_comments, Meta) of true -> maps:get(inner_location, Meta); false -> maps:get(location, Meta) end, To = case maps:is_key(post_comments, Meta) of true -> maps:get(inner_end_location, Meta); false -> maps:get(end_location, Meta) end, erl_anno:from_term([{location, From}, {end_location, To}]).
apps/els_lsp/src/els_erlfmt_ast.erl
0.644784
0.496399
els_erlfmt_ast.erl
starcoder
%%% ========================================================================== %%% Copyright 2015 Silent Circle %%% %%% Licensed under the Apache License, Version 2.0 (the "License"); %%% you may not use this file except in compliance with the License. %%% You may obtain a copy of the License at %%% %%% http://www.apache.org/licenses/LICENSE-2.0 %%% %%% Unless required by applicable law or agreed to in writing, software %%% distributed under the License is distributed on an "AS IS" BASIS, %%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %%% See the License for the specific language governing permissions and %%% limitations under the License. %%% ========================================================================== %% @author <NAME> <<EMAIL>> %% @copyright 2015 Silent Circle %% @doc %% %% == SRV records == %% %% SRV records essentially allow transparent DNS-level redirects of services to another domain or port. A simple example is when you have an XMPP server and you want users to have addresses like <EMAIL>, but your XMPP server is really installed on xmpp.example.com. In principle they work the same way as MX records do for email. %% %% For a server `example.com' wanting to delegate its XMPP services to the server at 'xmpp.example.com', here are some example records: %% ``` %% _xmpp-client._tcp.example.com. 18000 IN SRV 0 5 5222 xmpp.example.com. %% _xmpp-server._tcp.example.com. 18000 IN SRV 0 5 5269 xmpp.example.com. %% ''' %% %% The target domain **MUST** be an existing A record of the target server; it cannot be an IP address, and cannot be a CNAME record. %% %% The 18000 in this example is the TTL (time-to-live), it tells other servers how long they should cache your record for - a higher number will reduce DNS traffic, and result in slightly faster connections (since DNS info will be more likely to be cached, and won't need to be re-fetched). A lower TTL is more useful if you are going to be changing your record, since you have to wait for the TTL until all caches have expired your old record. %% %% The 0 and 5 are the record's priority and weight. These values are specific to SRV records, and allow you to have multiple targets with different priorities (e.g. for load balancing or fallback in case of a down server) - lower priority targets are tried first. The weight is used to bias resolvers towards certain targets in case of a priority tie. Most services will not need to change these values, and 0 and 5 are sensible defaults. %% %% Next is the port the service is running on. Clients will typically connect to 5222. %% %% == How to use SRV records == %% %% Clients resolve the SRV records for _xmpp-client._tcp.example.com. One or more SRV records will be returned. Clients then select a record based on priority and weight as described in the table below. %% %% ``` %% Priority The priority of the server. Clients attempt to contact the server with the lowest priority. %% Weight A load-balancing mechanism that is used when selecting a target host from those that have the same priority. Clients randomly choose SRV records that specify target hosts to be contacted, with probability proportional to the weight %% Port Number The port where the server is listening for this service. %% Target The fully qualified domain name of the host computer. %% ''' %% %% === Example 1: Equal Priorities === %% %% Let's say an SRV resolution request returns the following: %% %% ``` %% Priority Weight Port Target %% 0 30 5222 JHGAJSGHD.example.net %% 0 40 5222 KJGOIUTRG.example.net %% 0 15 5222 NBGDPRLGH.example.net %% 0 15 5222 WMFPSNMGJ.example.net %% ''' %% <ul> %% <li>The lowest priority records are chosen. In this case, all of the records are at priority 0.</li> %% <li>A record is randomly selected from the group such that its probability of selection is proportional to its relative weight. In this example, the weights add up nicely to 100, so they could be thought of as percentages. In this arrangement, KJGOIUTRG.example.net would be chosen 40% of the time, and NBGDPRLGH.example.net 15% of the time.</li> %% </ul> %% %% === Example 2: Different Priorities === %% %% ``` %% Priority Weight Port Target %% 0 30 5222 JHGAJSGHD.example.net %% 1 40 5222 KJGOIUTRG.example.net %% 2 15 5222 NBGDPRLGH.example.net %% 3 15 5222 WMFPSNMGJ.example.net %% ''' %% %% Here, the weights are irrelevant. JHGAJSGHD.example.net will be chosen every time. If connection attempts to it fail, then the next highest priority record, KJGOIUTRG.example.net, is chosen, and so on. %% %% == What this module provides == %% %% The idea is to call fetch_srv_rrs/1, then pick_server/1 using the `Rest' list, until a host is reachable. %% %% === Example === %% ``` %% RRs = sc_util_srv:fetch_srv_rrs("_some-client._tcp.some.domain"), %% case try_connect(RRs) of %% {ok, Conn, SrvInfo} -> %% use_connection(Conn, SrvInfo); %% {error, Reason} -> %% handle_this(Reason) %% end. %% %% try_connect([]) -> %% {error, no_more_hosts_to_try}; %% try_connect(RRs) -> %% case sc_util_srv:pick_server(RRs) of %% {Host, Port, _TTL, Rest} = SrvInfo -> %% case my_connect(Host, Port) of %% {ok, Connection} -> %% {ok, Connection, SrvInfo}; %% {error, connection_failure} -> %% try_connect(Rest); %% {error, Reason} -> %% handle_error(Reason) %% end; %% undefined -> %% {error, no_hosts_available} %% end. %% %% ''' %% @end -module(sc_util_srv). -export([ fetch_srv_rrs/1, pick_server/1 ]). -include("sc_util_srv.hrl"). -export_type([ srv_prio/0, srv_weight/0, srv_port/0, srv_host/0, dns_ttl/0, dns_srv_tuple/0, dns_rr_prop/0, dns_rr_props/0, rr/0, rrs/0, srv_info/0, service_name/0, weighted_rr/0, weighted_rrs/0, weighted_sums/0 ]). -type dns_srv_tuple() :: {srv_prio(), srv_weight(), srv_port(), srv_host()}. -type dns_rr_prop() :: {domain, string()} | {type, atom()} | {class, atom()} | {ttl, dns_ttl()} | {data, dns_srv_tuple()}. -type dns_rr_props() :: [dns_rr_prop()]. -type rr() :: #rr{}. -type rrs() :: [rr()]. -type srv_info() :: {srv_host(), srv_port(), dns_ttl(), rrs()}. -type service_name() :: binary() | string(). -type weighted_rr() :: {rr(), RunningTotal :: non_neg_integer()}. -type weighted_rrs() :: [weighted_rr()]. -type weighted_sums() :: {weighted_rrs(), Sum :: non_neg_integer()}. %%%-------------------------------------------------------------------- %%% API %%%-------------------------------------------------------------------- %%-------------------------------------------------------------------- %% @doc Pick the server with the highest weighting and priority from %% the list of RRs. Return `undefined' if RRs is an empty list. %% @end %%-------------------------------------------------------------------- -spec pick_server(RRs) -> SrvInfo when RRs :: rrs(), SrvInfo :: srv_info(). pick_server(RRs) -> SortedRRs = sort_rrs(RRs), select_host(SortedRRs). -spec fetch_srv_rrs(ServiceName) -> Result when ServiceName :: service_name(), Result :: {ok, rrs()} | {error, any()}. fetch_srv_rrs(<<ServiceName/binary>>) -> fetch_srv_rrs(binary_to_list(ServiceName)); fetch_srv_rrs(ServiceName) when is_list(ServiceName) -> case inet_res:resolve(ServiceName, in, srv, [], ?DEFAULT_DNS_TIMEOUT) of {ok, DnsMsg} -> % Convert opaque DNS record structures to proplists DnsProps = inet_dns:(inet_dns:record_type(DnsMsg))(DnsMsg), case sc_util:req_val(anlist, DnsProps) of [] -> {error, {no_srv_records_for, ServiceName}}; DnsRRs -> RRRecType = inet_dns:record_type(hd(DnsRRs)), Res = dns_props_to_rrs([inet_dns:RRRecType(RR) || RR <- DnsRRs]), {ok, Res} end; {error, Reason} -> {error, {dns_srv_resolution_error, {Reason, ServiceName}}} end. -spec select_host(SortedRRs) -> SrvInfo when SortedRRs :: rrs(), SrvInfo :: srv_info() | undefined. select_host([_|_] = SortedRRs) -> RR = case select_highest_prio(SortedRRs) of [RR0] -> RR0; [_|_] = L -> select_randomly_by_weight(L) end, make_srv_info(RR, lists:delete(RR, SortedRRs)); select_host([]) -> undefined. %%%-------------------------------------------------------------------- %%% Internal functions %%%-------------------------------------------------------------------- -compile({inline, [make_srv_info/2, make_srv_info/4]}). -spec make_srv_info(RR, Rest) -> SrvInfo when RR :: rr(), Rest :: rrs(), SrvInfo :: srv_info(). make_srv_info(RR, Rest) -> make_srv_info(RR#rr.host, RR#rr.port, RR#rr.ttl, Rest). -spec make_srv_info(H, P, TTL, Rest) -> SrvInfo when H :: srv_host(), P :: srv_port(), TTL :: dns_ttl(), Rest :: rrs(), SrvInfo :: srv_info(). make_srv_info(H, P, TTL, Rest) -> {H, P, TTL, Rest}. -compile({inline, [select_highest_prio/1]}). -spec select_highest_prio(SortedRRs) -> HighestRRs when SortedRRs :: rrs(), HighestRRs :: rrs(). select_highest_prio(SortedRRs) -> Highest = (hd(SortedRRs))#rr.prio, lists:takewhile(fun(#rr{prio = P}) -> P =:= Highest end, SortedRRs). -spec select_randomly_by_weight(RRs) -> RR when RRs :: rrs(), RR :: rr(). select_randomly_by_weight(RRs) -> {WeightedRRs, Sum} = weighted_sums(RRs), Rnd = rand:uniform(Sum + 1) - 1, % range 0..N-1 find_first_greater_or_equal(Rnd, WeightedRRs). -spec find_first_greater_or_equal(Num, Weighted) -> RR when Weighted :: weighted_rrs(), Num :: pos_integer(), RR :: rr(). find_first_greater_or_equal(Num, Weighted) -> {RR, _} = hd(lists:dropwhile(fun({_RR, S}) -> S < Num end, Weighted)), RR. -spec weighted_sums(RRs) -> WeightedSums when RRs :: rrs(), WeightedSums :: weighted_sums(). weighted_sums(RRs) -> lists:mapfoldl( fun(#rr{} = RR, Sum0) -> Sum = Sum0 + RR#rr.weight, {{RR, Sum}, Sum} end, 0, RRs). sort_rrs(RRs) -> lists:sort(RRs). dns_props_to_rrs(ListOfDnsProps) -> [dns_prop_to_rr(DnsProps) || DnsProps <- ListOfDnsProps]. -spec dns_prop_to_rr(DnsProps) -> RR when DnsProps :: dns_rr_props(), RR :: rr(). dns_prop_to_rr(DnsProps) -> {Prio, Weight, Port, Host} = sc_util:req_val(data, DnsProps), TTL = sc_util:req_val(ttl, DnsProps), #rr{prio = Prio, weight = Weight, port = Port, host = Host, ttl = TTL}.
src/sc_util_srv.erl
0.713232
0.452657
sc_util_srv.erl
starcoder
%% @doc horrible-hash is implementation of (perl) hash as an erlang process %% @version 0.1 %% @reference <a href="https://github.com/eiri/horrible-hash">https://github.com/eiri/horrible-hash</a> %% @author <NAME> <<EMAIL>> %% @copyright 2016 <NAME> -module(horrible_hash). %% API exports -export([new/1, delete/1, get/2, set/3, exists/2, delete/2]). -export([keys/1, values/1, each/1]). %%==================================================================== %% API functions %%==================================================================== %% @doc Creates a new hash with a given name. %% Internally creates a registred process with hash name %% that uses process dictionary to hold provided values. -spec new(Name::atom()) -> true. new(Name) when is_atom(Name) -> Pid = erlang:spawn(fun loop/0), erlang:register(Name, Pid). %% @doc Delete existing hash. -spec delete(Name::atom()) -> boolean(). delete(Name) -> case whereis(Name) of undefined -> false; Pid -> unregister(Name), Ref = monitor(process, Pid), erlang:send(Pid, quit), receive {'DOWN', Ref, process, _, normal} -> true end end. %% @doc Gets a value for a given key. %% If no value exists returns atom undefined. -spec get(Name::atom(), Key::any()) -> Value::any() | undefined. get(Name, Key) -> case whereis(Name) of undefined -> undefined; Pid -> Ref = make_ref(), erlang:send(Pid, {get, {self(), Ref}, Key}), receive {Ref, Value} -> Value end end. %% @doc Sets a value for a given key. %% If the key already has a value, overrides it. -spec set(Name::atom(), Key::any(), Value::any()) -> boolean(). set(Name, Key, Value) -> case whereis(Name) of undefined -> false; Pid -> {set, Key, Value} == erlang:send(Pid, {set, Key, Value}) end. %% @doc Retrns true is a given key associated with a value in a hash. %% Otherwise returns false. -spec exists(Name::atom(), Key::any()) -> boolean(). exists(Name, Key) -> case whereis(Name) of undefined -> false; Pid -> Ref = make_ref(), erlang:send(Pid, {exists, {self(), Ref}, Key}), receive {Ref, Boolean} -> Boolean end end. %% @doc Removes a given key from a hash. %% Returns true even if the hash doesn't have the key. -spec delete(Name::atom(), Key::any()) -> boolean(). delete(Name, Key) -> case whereis(Name) of undefined -> false; Pid -> {delete, Key} == erlang:send(Pid, {delete, Key}) end. %% @doc Returns a list of all the keys allocated in a hash. %% Order not guaranteed. -spec keys(Name::atom()) -> [Key::any()]. keys(Name) -> case whereis(Name) of undefined -> []; Pid -> Ref = make_ref(), erlang:send(Pid, {{self(), Ref}, keys}), receive {Ref, Keys} -> Keys end end. %% @doc Returns a list of all the values stored in a hash. %% Order not guaranteed. -spec values(Name::atom()) -> [Value::any()]. values(Name) -> case whereis(Name) of undefined -> []; Pid -> Ref = make_ref(), erlang:send(Pid, {{self(), Ref}, values}), receive {Ref, Values} -> Values end end. %% @doc Returns iterator for a hash. %% On each call returns a tuple of {Key, Value} until end is reached. %% At which moment returns an empty list. Repeated call will initiate %% a new iterator. -spec each(Name::atom()) -> [{Key::any(), Value::any()}]. each(Name) -> case whereis(Name) of undefined -> []; Pid -> Ref = make_ref(), erlang:send(Pid, {{self(), Ref}, each}), receive {Ref, Iteration} -> Iteration end end. %%==================================================================== %% Internal functions %%==================================================================== loop() -> loop(undefined). loop(Iterator) -> receive {get, {From, Ref}, Key} -> Value = erlang:get(Key), erlang:send(From, {Ref, Value}), loop(Iterator); {set, Key, Value} -> erlang:put(Key, Value), loop(Iterator); {exists, {From, Ref}, Key} -> erlang:send(From, {Ref, undefined /= erlang:get(Key)}), loop(Iterator); {delete, Key} -> erlang:erase(Key), loop(Iterator); {{From, Ref}, keys} -> Keys = [Key || {Key, _} <- erlang:get()], erlang:send(From, {Ref, Keys}), loop(Iterator); {{From, Ref}, values} -> Values = [Value || {_, Value} <- erlang:get()], erlang:send(From, {Ref, Values}), loop(Iterator); {{From, Ref}, each} -> KVs = erlang:get(), NewIterator = maybe_start_iterator(Iterator, KVs), case is_pid(NewIterator) andalso is_process_alive(NewIterator) of true -> erlang:send(NewIterator, {{From, Ref}, each}), loop(NewIterator); false -> erlang:send(From, {Ref, []}), loop() end; quit -> ok after infinity -> end_of_universe end. maybe_start_iterator(undefined, KVs) -> spawn(fun() -> iterator(KVs) end); maybe_start_iterator(Iterator, _) -> Iterator. iterator([]) -> ok; iterator([Iteration | Rest]) -> receive {{From, Ref}, each} -> erlang:send(From, {Ref, [Iteration]}), iterator(Rest) after infinity -> end_of_universe end.
src/horrible_hash.erl
0.502197
0.52208
horrible_hash.erl
starcoder
-module(solution). -export([main/0]). % The a_i and val are from S={1,..,100} % S contains 25 prime numbers % We use the fundamental theorem of arithmetic, % x = prod_1^25 P_i^e_i, % representing each number x as vector of exponents (e_i) in N^25 % Then: % mult(x, y) -> (e_i + f_i) % lcm(x, y) -> (max(e_i, f_i)) % lcm(a[L..R]) -> Range Maximum Query problem main() -> {ok, [N]} = io:fread("", "~d"), L = read_list(N), {ok, [K]} = io:fread("", "~d"), Ops = read_ops(K), A = array(L), Primes = primes(100), Vectors = vectors(Primes), RMQ = rmq(A, Vectors), %io:format("RMQ=~p~n", [RMQ]), eval(Ops, RMQ, Primes, Vectors). % evaluate queries eval([], _RMQ, _Primes, _Vectors) -> true; eval([[$Q], L, R|T], RMQ, Primes, Vectors) -> V = q(RMQ, L, R), M = vector_to_int(V, Primes), M2 = M rem p(), io:format("~p~n", [M2]), eval(T, RMQ, Primes, Vectors); eval([[$U], Idx, Val|T], RMQ, Primes, Vectors) -> RMQ2 = u(RMQ, Idx, Val, Vectors), %io:format("RMQ=~p~n", [RMQ2]), eval(T, RMQ2, Primes, Vectors). % big prime (for a finite field from 0 to P-1) p() -> 1000000007. % query operation % find the least common multiple of the numbers in A[L, R] (mod P) q(RMQ, L, R) -> rmq_lcm(L, R, RMQ). % update operation u(RMQ, _Idx, 1, _Vectors) -> RMQ; u(RMQ, Idx, Val, Vectors) -> VF = maps:get(Val, Vectors), {Parent, none, none, VI} = maps:get(Idx, RMQ), VI2 = mult(VF, VI), RMQ2 = maps:update(Idx, {Parent, none, none, VI2}, RMQ), update(Parent, RMQ2). update(none, RMQ) -> RMQ; update(Node, RMQ) -> {Parent, ChildLeft, ChildRight, V} = maps:get(Node, RMQ), {_, _, _, VLeft} = maps:get(ChildLeft, RMQ), {_, _, _, VRight} = maps:get(ChildRight, RMQ), V2 = lcm(VLeft, VRight), case V2 =:= V of true -> RMQ; false -> RMQ2 = maps:update(Node, {Parent, ChildLeft, ChildRight, V2}, RMQ), update(Parent, RMQ2) end. % determine lcm for range query rmq_lcm(L, R, RMQ) -> Root = maps:get(root, RMQ), rmq_lcm(Root, L, R, RMQ). rmq_lcm(Node, L, R, RMQ) -> case is_inside(Node, L, R) of true -> {_, _, _, V} = maps:get(Node, RMQ), V; false -> case is_outside(Node, L, R) of true -> % neutral element regarding LCM is 1 []; false -> {ChildLeft, ChildRight} = split(Node), VLeft = rmq_lcm(ChildLeft, L, R, RMQ), VRight = rmq_lcm(ChildRight, L, R, RMQ), lcm(VLeft, VRight) end end. is_inside(Element, L, R) when is_integer(Element) -> (Element >= L) andalso (Element =< R); is_inside(Interval, L, R) -> {Left, Right} = Interval, (Left >= L) andalso (Right =< R). is_outside(Element, L, R) when is_integer(Element) -> (Element < L) orelse (Element > R); is_outside(Interval, L, R) -> {Left, Right} = Interval, (Right < L) orelse (Left > R). % calculate Range Maximum Query data structure % this solution builds a segment tree according to % https://www.geeksforgeeks.org/segment-tree-set-1-range-minimum-query/ rmq(A, Vectors) -> N = maps:size(A), Left = 0, Right = N-1, Node = key(Left, Right), Parent = none, RMQ0 = maps:new(), RMQ = maps:put(root, Node, RMQ0), rmq(Node, Parent, A, Vectors, RMQ). rmq(Element, Parent, A, Vectors, RMQ) when is_integer(Element) -> % solve Value = maps:get(Element, A), V = maps:get(Value, Vectors), maps:put(Element, {Parent, none, none, V}, RMQ); rmq(Interval, Parent, A, Vectors, RMQ) -> % divide {ChildLeft, ChildRight} = split(Interval), RMQ1 = rmq(ChildLeft, Interval, A, Vectors, RMQ), RMQ2 = rmq(ChildRight, Interval, A, Vectors, RMQ1), % conquer {Interval, _, _, VLeft} = maps:get(ChildLeft, RMQ2), {Interval, _, _, VRight} = maps:get(ChildRight, RMQ2), V = lcm(VLeft, VRight), maps:put(Interval, {Parent, ChildLeft, ChildRight, V}, RMQ2). split(Interval) -> {Left, Right} = Interval, Mid = Left + (Right-Left) div 2, ChildLeft = key(Left, Mid), ChildRight = key(Mid+1, Right), {ChildLeft, ChildRight}. key(Left, Right) when Left =:= Right -> Left; key(Left, Right) -> {Left, Right}. % lcm: max per component lcm(V1, V2) -> V = [], lcm(V1, V2, V). lcm([], [], V) -> lists:reverse(V); lcm([], [H|T], V) -> lcm([], T, [H|V]); lcm([H|T], [], V) -> lcm(T, [], [H|V]); lcm([H1|T1], [H2|T2], V) -> M = max(H1, H2), V2 = [M|V], lcm(T1, T2, V2). % mult: add per component mult(V1, V2) -> V = [], mult(V1, V2, V). mult([], [], V) -> lists:reverse(V); mult([], [H|T], V) -> mult([], T, [H|V]); mult([H|T], [], V) -> mult(T, [], [H|V]); mult([H1|T1], [H2|T2], V) -> M = H1+H2, V2 = [M|V], mult(T1, T2, V2). % map of 25-vectors for 1..100 vectors(Primes) -> X = 1, Map = maps:new(), vectors(X, Primes, Map). vectors(X, _Primes, Map) when X > 100 -> Map; vectors(X, Primes, Map) -> V = vector(X, Primes), Map2 = maps:put(X, V, Map), vectors(X+1, Primes, Map2). % calculate 25-vector for X vector(X, Primes) -> I = 25, L = [], vector(I, X, Primes, L). vector(I, _X, _Primes, L) when I =:= 0 -> L; vector(I, X, Primes, L) -> P = maps:get(I, Primes), {X2, E} = shave(X, P), case (L =:= []) andalso (E =:= 0) of true -> % we drop leading zero digits, so [] represents 1 L2 = L; false -> L2 = [E|L] end, vector(I-1, X2, Primes, L2). % try to shave of the factor P from x shave(X, P) -> E = 0, shave(E, X, P). shave(E, X, P) when (X rem P) =/= 0 -> {X, E}; shave(E, X, P) -> shave(E+1, X div P, P). % convert 25-vector to int vector_to_int(V, Primes) -> I = 1, X = 1, vector_to_int(V, I, X, Primes). vector_to_int([], _I, X, _Primes) -> X; vector_to_int([H|T], I, X, Primes) -> P = maps:get(I, Primes), F = pow(P, H), X2 = F*X, vector_to_int(T, I+1, X2, Primes). % simple exponentiation pow(_X, 0) -> 1; pow(X, E) -> X * pow(X, E-1). % code from Project Euler #10 Summation of primes: % map of prime numbers <= n primes(1) -> maps:new(); primes(2) -> maps:put(1, 2, primes(1)); primes(N) when N < 5 -> maps:put(2, 3, primes(2)); primes(N) -> primes(5, N, primes(3)). primes(X, N, Primes) when X > N -> Primes; primes(X, N, Primes) -> case is_prime(X, Primes) of true -> I_max = maps:size(Primes)+1, Primes2 = maps:put(I_max, X, Primes); false -> Primes2 = Primes end, primes(X+2, N, Primes2). is_prime(X, Primes) -> I = 2, is_prime(I, X, Primes). is_prime(I, X, Primes) -> P = maps:get(I, Primes), case P*P > X of true -> true; false -> case X rem P =:= 0 of true -> false; false -> is_prime(I+1, X, Primes) end end. % array map starting at index 0 array(L) -> I=0, Map = maps:new(), array(L, I, Map). array([], _I, Map) -> Map; array([H|T], I, Map) -> Map2 = maps:put(I, H, Map), array(T, I+1, Map2). read_ops(N) -> Fmt = unicode:characters_to_list(lists:duplicate(N, "~s~d~d")), {ok, L} = io:fread("", Fmt), L. read_list(N) -> Fmt = unicode:characters_to_list(lists:duplicate(N, "~d")), {ok, L} = io:fread("", Fmt), L.
Functional Programming/Persistent Structures/Minimum Multiple/version 4/solution.erl
0.529263
0.47792
solution.erl
starcoder
%%%------------------------------------------------------------------- %% @author <NAME> <<EMAIL>> %% @copyright (C) 2017, <NAME> %% @doc erl_knearest_neighbor.erl %% We have a set of movies with textual desciption (assoicated "tags"), %% We have a set of users and their preference of movies in the form of ratings %% We want to recommend movies for a user based on its ratings %% We need to find movies that are similar to what the user have rated %% %% To be able to calculate similarities of movies we need to represent each movie %% mathematically in a way such that metrics like euclidean distance or cosine similarity %% gives indiciation of actual similary of movies. %% %% The approach I've taken here is to only focus on the textual description (the tags), %% not the titles of the movies. If two movies have similar tags they are considered similar. %% Feature extraction to represent text can be done simply with Onehot-vector where each entry %% represent the frequency of a tag for the movie. This would however loose the information that %% some tags are more important than others for example "indians" is a much less frequent tag than %% "based on a book" to represent this importance-factor of each tag we multiply each tag-frequency(TF) %% with a weight called "Inverse-document frequency (IDF)" which leaves us with our final represenation of each tag for each movie which is the TF-IDF value. %% %% Now given the numerical representation of each movie as a row in a large matrix where the row %% is a vector with the TF-IDF for each of its tags, we can calculate the similarities of movies. %% %% To recommend the user movies we need to represent the user's ratings as a movie so that we can %% find similar movies and recommend those. To represent user-profile as a movie we merge all %% tags of the movies rated by the user and their associated TFIDFs and then we multiply %% each TFIDF with a weight that is based on how high the user-rating was. %% %% Finally we calculate the top K recommended movies by calculating the cosine-similarity between %% each unseen movie and the user-profile (both represented as vectors) and return the top K %% movies similar to the user-profile. %% %% Example: %% > c(erl_content_based_recommendation). %% > erl_content_based_recommendation:recommend(4045, 5). %% > erl_content_based_recommendation:recommend(144, 5). %% @end %%%------------------------------------------------------------------- -module(erl_content_based_recommendation). -author('<NAME> <<EMAIL>>'). %% API -export([recommend/2]). %% records -record(movie_score, {id::movie_id(), score::score()}). -record(user_profile, {id::user_id(), preference::tf_vector()}). -record(tf_vector_entry, {tag_id :: tag_id(), tf :: tf()}). -record(model, {tag_ids :: tag_ids(), tf_idfs :: movie_tfidfs()}). -record(movie_tfidf, {id :: movie_id(), tfidf :: tf_vector()}). -record(tag_id, {tag :: movie_tag(), id :: tag_id()}). -record(dataset, {movie_tags :: movie_id_tags(), movie_titles :: movie_id_titles(), user_ratings :: user_id_movie_ratings(), user_names :: user_id_names()}). -record(movie_id_tag, {id :: movie_id(), tag :: movie_tag()}). -record(movie_id_title, {id :: movie_id(), title :: movie_title()}). -record(user_id_movie_rating, {id :: user_id(), movie :: movie_id(), rating :: movie_rating()}). -record(user_id_name, {id :: user_id(), name :: user_name()}). %% types -type movie_id() :: integer(). -type movie_tag() :: string(). -type movie_id_tags() :: list(#movie_id_tag{}). -type movie_title() :: string(). -type movie_id_titles() :: list(#movie_id_title{}). -type user_id() :: integer(). -type movie_rating() :: float(). -type user_name() :: string(). -type user_id_names() :: list(#user_id_name{}). -type user_id_movie_ratings() :: list(#user_id_movie_rating{}). -type tag_id() :: integer(). -type tag_ids() :: list(#tag_id{}). -type tf() :: float(). -type tf_vector() :: list(#tf_vector_entry{}). -type movie_tfidfs() :: list(#movie_tfidf{}). -type score() :: float(). -type movie_scores() :: list(#movie_score{}). -type parsed_csv() :: list(csv_line()). -type csv_line() :: list(string()). %%==================================================================== %% API functions %%==================================================================== %% Return top K recommended unseen movies for user. -spec recommend(user_id(), integer()) -> movie_scores(). recommend(User, K)-> Dataset = read_data(), io:format("Parse Dataset complete ~n", []), Model = tfidf_model(Dataset), io:format("Create Model complete ~n", []), UserProfile = user_profile(User, Model, Dataset), io:format("Create UserProfile complete ~n", []), io:format("Top ~p recommended movies for user: ~p~n", [K, User]), top_K_movies(Model, UserProfile, Dataset#dataset.user_ratings, K). %%==================================================================== %% Internal functions %%==================================================================== %% Return top K movies with highest cosine similarity -spec top_K_movies(#model{}, #user_profile{}, user_id_movie_ratings(), integer()) -> movie_scores(). top_K_movies(Model, UserProfile, Ratings, K)-> UserRatings = lists:filter(fun(R) -> R#user_id_movie_rating.id =:= UserProfile#user_profile.id end, Ratings), ScoredMovies = score_movies(Model, UserProfile), UnSeen = lists:filter(fun(M) -> case lists:keyfind(M#movie_score.id, #user_id_movie_rating.movie, UserRatings) of false -> true; _ -> false end end, ScoredMovies), {_, TopK} = lists:foldl(fun(_, {MoviesLeft, TopKAcc}) -> TopScore = top_score(MoviesLeft, nil), MoviesLeft1 = lists:keydelete(TopScore#movie_score.id, #movie_score.id, MoviesLeft), {MoviesLeft1, [TopScore|TopKAcc]} end, {UnSeen, []}, lists:seq(1, K)), TopK. %% Return highest movie_score -spec top_score(movie_scores(), nil | float()) -> #movie_score{}. top_score([], Max)-> Max; top_score([H|T], nil) -> top_score(T, H); top_score([H|T], Max) -> case H#movie_score.score > Max#movie_score.score of true -> top_score(T, H); false -> top_score(T, Max) end. %% Calculate score (cosine similarity) for each movie based on the user -spec score_movies(#model{}, #user_profile{}) -> movie_scores(). score_movies(Model, UserProfile)-> lists:foldl(fun(Movie, Acc) -> Numerator = dot_product(UserProfile#user_profile.preference, Movie#movie_tfidf.tfidf), Denominator = norm(UserProfile#user_profile.preference) * norm(Movie#movie_tfidf.tfidf), CosineSim = Numerator/Denominator, [#movie_score{id=Movie#movie_tfidf.id, score=CosineSim}|Acc] end, [], Model#model.tf_idfs). %% Return dot-product (scalar) of vector A and B -spec dot_product(tf_vector(), tf_vector()) -> float(). dot_product(A,B)-> ATags = lists:map(fun(Entry) -> Entry#tf_vector_entry.tag_id end, A), BTags = lists:map(fun(Entry) -> Entry#tf_vector_entry.tag_id end, B), AllTags = ATags ++ BTags, lists:sum(lists:map(fun(Tag) -> case lists:keyfind(Tag, #tf_vector_entry.tag_id, A) of false -> 0; TFEntryA -> case lists:keyfind(Tag, #tf_vector_entry.tag_id, B) of false -> 0; TFEntryB -> TFEntryA#tf_vector_entry.tf * TFEntryB#tf_vector_entry.tf end end end, AllTags)). %% Return norm of vector -spec norm(tf_vector()) -> float(). norm(A)-> math:sqrt(lists:sum(lists:map(fun(TFE) -> math:pow(TFE#tf_vector_entry.tf, 2) end, A))). %% Return user profile given user-id and model and dataset -spec user_profile(user_id(), #model{}, #dataset{}) -> #user_profile{}. user_profile(User, Model, Dataset)-> Ratings = lists:filter(fun(Rating) -> Rating#user_id_movie_rating.id =:= User end, Dataset#dataset.user_ratings), AvgRating = average_rating(Ratings), lists:foldl(fun(Rating, Acc)-> RatingVal = Rating#user_id_movie_rating.rating, Multiplier = RatingVal - AvgRating, MovieId = Rating#user_id_movie_rating.movie, MovieModel = lists:keyfind(MovieId, #movie_tfidf.id, Model#model.tf_idfs), update_user_profile_vector(MovieModel#movie_tfidf.tfidf, Multiplier, Acc) end, #user_profile{id=User, preference=[]},Ratings). %% Update user-profile with a moviemodel based on his/hers rating -spec update_user_profile_vector(tf_vector(), float(), #user_profile{}) -> #user_profile{}. update_user_profile_vector(MovieModel, Multiplier, Profile)-> lists:foldl(fun(TFIDFVector, Acc) -> TagId = TFIDFVector#tf_vector_entry.tag_id, TF = TFIDFVector#tf_vector_entry.tf, Increment = TF*Multiplier, case lists:keyfind(TagId, #tf_vector_entry.tag_id, Acc#user_profile.preference) of false -> #user_profile{ id=Acc#user_profile.id, preference=[#tf_vector_entry{tag_id=TagId, tf=Increment}|Acc#user_profile.preference]}; Old -> Pref = lists:keyreplace(TagId, #tf_vector_entry.tag_id, Acc#user_profile.preference, #tf_vector_entry{ tag_id=TagId, tf=Increment+Old#tf_vector_entry.tf}), #user_profile{ id=Acc#user_profile.id, preference=Pref } end end, Profile, MovieModel). %% Calculate average rating -spec average_rating(user_id_movie_ratings()) -> float(). average_rating(Ratings)-> Sum = lists:sum(lists:map(fun(Rating) -> Rating#user_id_movie_rating.rating end, Ratings)), Len = length(Ratings), case Len > 0 of true -> Sum/Len; false -> Sum end. %% Calculate the TFIDF model given a dataset. %% Essentially it will calculate the TF-vector for each movie weighted with the IDF for each tag. -spec tfidf_model(#dataset{}) -> #model{}. tfidf_model(Dataset)-> TagIds = tag_ids(Dataset#dataset.movie_tags), Movies = sets:to_list(sets:from_list(lists:map(fun(M) -> M#movie_id_title.id end, Dataset#dataset.movie_titles))), MoviesTF = lists:map(fun(M) -> #movie_tfidf{id = M, tfidf = tf(TagIds, Dataset#dataset.movie_tags, M)} end, Movies), DocTF = doc_tf(TagIds, MoviesTF), N = length(Dataset#dataset.movie_titles), GlobalIDF = idf(DocTF, N), WeightedMoviesTags = tfidf(GlobalIDF, MoviesTF), NormalizedWeightedMoviesTags = normalize(WeightedMoviesTags), #model{tag_ids = TagIds, tf_idfs = NormalizedWeightedMoviesTags}. %% Normalizes list of movie vectors -spec normalize(movie_tfidfs()) -> tf_vector(). normalize(MoviesIDF)-> lists:map(fun(M) -> Magnitude = norm(M#movie_tfidf.tfidf), NormalizedTFIDFVector = lists:map(fun(TFE) -> #tf_vector_entry{ tag_id=TFE#tf_vector_entry.tag_id, tf=TFE#tf_vector_entry.tf/Magnitude} end, M#movie_tfidf.tfidf), #movie_tfidf{id=M#movie_tfidf.id, tfidf=NormalizedTFIDFVector} end, MoviesIDF). %% Calculates idf given a Global TF-vector and the total number of movies -spec idf(tf_vector(), integer())-> tf_vector(). idf(TFVector, Size)-> lists:map(fun(TFEntry) -> N = Size, DocTF = TFEntry#tf_vector_entry.tf, IDF = math:log10(N/(1+DocTF)), #tf_vector_entry{tag_id=TFEntry#tf_vector_entry.tag_id, tf=IDF} end, TFVector). %% Weights the TF for each movie and tag wwith the corresponding tag-idf -spec tfidf(tf_vector(), movie_tfidfs())-> tf_vector(). tfidf(GlobalIDF, MoviesTF)-> lists:map(fun(M) -> TFVector = M#movie_tfidf.tfidf, TFIDFVector = lists:map(fun(TFE) -> TF = TFE#tf_vector_entry.tf, TagID = TFE#tf_vector_entry.tag_id, IDF = lists:keyfind(TFE#tf_vector_entry.tag_id, #tf_vector_entry.tag_id, GlobalIDF), TFIDF = TF*IDF#tf_vector_entry.tf, #tf_vector_entry{tag_id=TagID, tf=TFIDF} end, TFVector), #movie_tfidf{id=M#movie_tfidf.id, tfidf=TFIDFVector} end, MoviesTF). %% Calculates the vector with number of documents with tf(t,d) != 0 for each tag -spec doc_tf(tag_ids(), movie_tfidfs()) -> tf_vector(). doc_tf(TagIds, MoviesTF)-> lists:map(fun(TagId) -> DocsMatch = lists:foldl(fun(MTF, Acc) -> case lists:keyfind(TagId#tag_id.id, #tf_vector_entry.tag_id, MTF#movie_tfidf.tfidf) of false -> Acc; _ -> Acc + 1 end end, 0, MoviesTF), #tf_vector_entry{tag_id=TagId#tag_id.id, tf=DocsMatch} end, TagIds). %% Calculates the TF for each tag -spec tf(tag_ids(), movie_id_tags()) -> tf_vector(). tf(TagIds, MovieTags)-> lists:foldl(fun(MT, Acc) -> TagId = lists:keyfind(MT#movie_id_tag.tag, #tag_id.tag, TagIds), case lists:keyfind(TagId#tag_id.id, #tf_vector_entry.tag_id, Acc) of false -> [#tf_vector_entry{tag_id=TagId#tag_id.id, tf=1}|Acc]; MT0 -> lists:keyreplace(TagId#tag_id.id, #tf_vector_entry.tag_id, Acc, #tf_vector_entry{ tag_id=TagId#tag_id.id, tf=MT0#tf_vector_entry.tf+1 }) end end, [], MovieTags). %% Calculates the TF-vector for a given movie -spec tf(tag_ids(), movie_id_tags(), movie_id()) -> tf_vector(). tf(TagIds, MovieTags, Movie)-> FilteredTags = lists:filter(fun(MT) -> MT#movie_id_tag.id =:= Movie end, MovieTags), tf(TagIds, FilteredTags). %% Maps string-tags to numeric identifiers -spec tag_ids(movie_id_tags()) -> tag_ids(). tag_ids(MovieTags)-> Tags = sets:to_list(sets:from_list(lists:map(fun(MovieTag) -> MovieTag#movie_id_tag.tag end, MovieTags))), {TagIds, _} = lists:foldl(fun(Tag, {Acc, IdAcc}) -> {[#tag_id{tag=Tag, id=IdAcc+1}|Acc], IdAcc+1} end, {[], length(Tags)}, Tags), TagIds. %% Reads and parses the data -spec read_data() -> #dataset{}. read_data()-> io:format("Reading data...~n"), MovieTags = movie_tags(), io:format("MovieTags read and parsed ~n"), MovieTitles = movie_titles(), io:format("MovieTitles read and parsed ~n"), UserRatings = user_ratings(), io:format("UserRatings read and parsed ~n"), UserNames = user_names(), io:format("UserNames read and parsed ~n"), #dataset{ movie_tags=MovieTags, movie_titles=MovieTitles, user_ratings=UserRatings, user_names=UserNames }. %% Reads and parses data/users.csv -spec user_names() -> user_id_names(). user_names()-> ParsedCsv = parse_csv(read_lines("data/users.csv")), lists:map(fun([Id,Name]) -> {IdInt, _} = string:to_integer(Id), #user_id_name{ id=IdInt, name = Name } end, ParsedCsv). %% Reads and parses data/ratings.csv -spec user_ratings() -> user_id_movie_ratings(). user_ratings()-> ParsedCsv = parse_csv(read_lines("data/ratings.csv")), lists:map(fun([Id,MovieId,Rating]) -> {IdInt, _} = string:to_integer(Id), {MovieIdInt, _} = string:to_integer(MovieId), {RatingFloat, _} = string:to_float(Rating), #user_id_movie_rating{ id=IdInt, movie=MovieIdInt, rating=RatingFloat } end, ParsedCsv). %% Reads and parses data/movie-titles.csv -spec movie_titles() -> movie_id_titles(). movie_titles()-> ParsedCsv = parse_csv(read_lines("data/movie-titles.csv")), lists:map(fun([Id,Title]) -> {IdInt, _} = string:to_integer(Id), #movie_id_title{ id=IdInt, title=Title } end, ParsedCsv). %% Reads and parses data/movie-tags.csv -spec movie_tags() -> movie_id_tags(). movie_tags()-> ParsedCsv = parse_csv(read_lines("data/movie-tags.csv")), lists:map(fun([Id,Tag]) -> {IdInt, _} = string:to_integer(Id), #movie_id_tag{ id=IdInt, tag=Tag } end, ParsedCsv). %% Reads input file into list of lines -spec read_lines(FileName::string())-> ListOfLines :: list(). read_lines(FileName)-> {ok, Binary} = file:read_file(FileName), string:tokens(erlang:binary_to_list(Binary), "\n"). %% Parses list of lines into feature-vectors -spec parse_csv(FileName::list())-> ParsedCsv :: parsed_csv(). parse_csv(Lines)-> lists:map(fun(Line) -> string:tokens(Line, ",") end, Lines).
erl_content_based_recommendation/erl_content_based_recommendation.erl
0.654453
0.671498
erl_content_based_recommendation.erl
starcoder
-module(zset). -record(zset, {map :: #{}, tree :: gb_trees:tree()}). -export_type([zset/2, result/2, iterator/2, next/2]). -type result(K, V) :: {K, integer(), V}. -opaque zset(K, V) :: #zset{map::#{K => integer()}, tree::gb_trees:tree({integer(), K}, V)}. -opaque iterator(K, V) :: gb_trees:iter({integer(), K}, V). -opaque next(K, V) :: {{integer(), K}, V, iterator(K, V)}. -export([new/0, enter/4, find/2, delete/2]). -export([to_list/1, from_list/1]). -export([size/1, fold/3, range/3, page/3, top/2, top/3]). %% @doc Create a new empty zset. -spec new() -> #zset{}. new() -> #zset{map = #{}, tree = gb_trees:empty()}. %% @doc Enter the `Member' with the `Value', rated by `Score' into the `ZSet'. -spec enter(Member :: K, integer(), Value :: V, zset(K, V)) -> zset(K, V). enter(Member, Score, Value, ZSet = #zset{map=Map, tree=Tree}) -> case maps:find(Member, Map) of error -> Map1 = maps:put(Member, Score, Map), Tree1 = gb_trees:enter({Score, {v, Member}}, Value, Tree), ZSet#zset{map=Map1, tree =Tree1}; {ok, PrevScore} -> Map1 = maps:put(Member, Score, Map), Tree1 = gb_trees:delete({PrevScore, {v, Member}}, Tree), Tree2 = gb_trees:enter({Score, {v, Member}}, Value, Tree1), ZSet#zset{map=Map1, tree=Tree2} end. %% @doc Delete the `Member' from the `ZSet'. -spec delete(Member :: K, zset(K, term())) -> zset(K, term()). delete(Member, ZSet=#zset{map=Map, tree=Tree}) -> case maps:find(Member, Map) of error -> ZSet; {ok, Score} -> Map1 = maps:remove(Member, Map), Tree1 = gb_trees:delete_any({Score, {v, Member}}, Tree), ZSet#zset{map=Map1, tree=Tree1} end. %% @doc Get the size of the `ZSet'. -spec size(zset(term(), term())) -> non_neg_integer(). size(ZSet) -> maps:size(ZSet#zset.map). %% @doc Find a `Member' in the `ZSet'. -spec find(Member :: K, zset(K, V)) -> error | result(K, V). find(Member, ZSet = #zset{map=Map}) -> case maps:find(Member, Map) of error -> error; {ok, Score} -> Value = gb_trees:get({Score, {v, Member}}, ZSet#zset.tree), {Member, Score, Value} end. %% @doc Fold the `ZSet' using the `Fun' starts with the `Initial'. -spec fold(fun((K, V, term(), term()) -> term()), term(), zset(K, V)) -> term(). fold(Fun, Initial, ZSet) -> I = iterator(ZSet), do_fold(Fun, next(I), Initial). %% @doc Get a list representation of the `ZSet'. -spec to_list(zset(K, V)) -> [result(K, V)]. to_list(ZSet) -> Fun = fun({{Score, {v, Member}}, Value}) -> {Member, Score, Value} end, L = gb_trees:to_list(ZSet#zset.tree), lists:map(Fun, L). %% @doc Convert the `List' into a zset. -spec from_list([result(K, V)]) -> zset(K, V). from_list(List) -> Fun = fun({K, S, V}, Acc) -> enter(K, S, V, Acc) end, lists:foldl(Fun, new(), List). %% @doc Get the range from the `ZSet' between `Start' and `Stop' scores. -spec range(integer(), integer(), zset(K, V)) -> [result(K, V)]. range(Start, Stop, ZSet) -> I = iterator({Start, undefined}, ZSet), lists:reverse(do_range(Stop, next(I), [])). %% @doc Get top `Count' values from the `ZSet' starting from the lowest score. -spec top(non_neg_integer(), zset(K, V)) -> [result(K, V)]. top(Count, ZSet) -> I = iterator(ZSet), lists:reverse(do_top(Count, next(I), [])). %% @doc Get top `Count' values from the `ZSet' starting from the `Score'. -spec top(integer(), non_neg_integer(), zset(K, term())) -> [result(K, term())]. top(Score, Count, ZSet) -> I = iterator({Score, undefined}, ZSet), lists:reverse(do_top(Count, next(I), [])). %% @doc Get `Count' values from the `ZSet' starting from the `Member'. -spec page(K, non_neg_integer(), zset(K, V)) -> [result(K, V)]. page(Member, Count, ZSet=#zset{map=Map}) -> case maps:find(Member, Map) of error -> []; {ok, Score} -> I = iterator({Score, {v, Member}}, ZSet), lists:reverse(do_top(Count, next(I), [])) end. do_fold(_, none, Acc) -> Acc; do_fold(Fun, {{Score, {v, Member}}, Value, Iterator}, Acc) -> Acc1 = Fun(Member, Score, Value, Acc), do_fold(Fun, next(Iterator), Acc1). do_top(0, _, Acc) -> Acc; do_top(_Count, none, Acc) -> Acc; do_top(Count, {{Score, {v, Member}}, Value, Iterator}, Acc) -> do_top(Count - 1, next(Iterator), [{Member, Score, Value}| Acc]). do_range(_Stop, none, Acc) -> Acc; do_range(Stop, {{Score, {v, Member}}, Value, Iter}, Acc) when Score =< Stop -> do_range(Stop, next(Iter), [{Member, Score, Value} | Acc]); do_range(_, _, Acc) -> Acc. -spec iterator(zset(K, V)) -> iterator(K, V). iterator(ZSet) -> gb_trees:iterator(ZSet#zset.tree). -spec iterator({integer(), K}, zset(K, V)) -> iterator(K, V). iterator(Start, #zset{tree=T}) -> gb_trees:iterator_from(Start, T). -spec next(gb_trees:iter()) -> none | {{integer(), any()}, any(), gb_trees:iter()}. next(Iter1) -> gb_trees:next(Iter1).
src/zset.erl
0.594198
0.471467
zset.erl
starcoder
-module(influxdb_uri). -export([ encode/1 ]). -export([ encode_query/1, encode_component/1, encode_component_plus/1, encode_component/2, is_char_reserved/1, is_char_unreserved/1 ]). -export_type([ uri/0, query/0 ]). %% == Encoding URI == -spec encode(uri()) -> binary(). -type uri() :: #{ scheme := iodata(), authority => iodata(), userinfo => iodata(), host => iodata(), port => non_neg_integer(), path := iodata(), query => query(), fragment => iodata() }. %% @doc Encode a URI. encode(#{scheme := Scheme, path := Path} = Uri) -> iolist_to_binary([Scheme, $:, encode_authority(Uri), Path, case encode_query(maps:get(query, Uri, [])) of <<>> -> []; EncodedQuery -> [$?, EncodedQuery] end, case maps:get(fragment, Uri, "") of "" -> []; Fragment -> [$#, Fragment] end]). -spec encode_authority(uri()) -> iodata(). encode_authority(#{authority := Authority}) -> ["//", Authority]; encode_authority(#{host := Host} = Uri) -> UserinfoPart = case maps:find(userinfo, Uri) of {ok, Userinfo} -> [Userinfo, $@]; error -> [] end, PortPart = case maps:find(port, Uri) of {ok, Port} -> [$:, integer_to_list(Port)]; error -> [] end, HostPart = case lists:member($:, Host) of true -> [$[, Host, $]]; false -> Host end, ["//", UserinfoPart, HostPart, PortPart]; encode_authority(_) -> <<>>. %% == Encoding data == -spec encode_query(query()) -> binary(). -type query() :: #{query_key() => query_value()} | [{query_key(), query_value()} | query_key()]. -type query_key() :: iodata() | atom(). -type query_value() :: iodata() | boolean(). %% @doc Encode a map or a list of key value pairs into `key1=value1&key2=value2&key3&key4=value4...' where the keys and %% values are encoded using {@link encode_component_plus/1}. encode_query(Values) when is_map(Values) -> encode_query(maps:to_list(Values)); encode_query(Values) when is_list(Values) -> Encoded = lists:map(fun ({Key, true}) -> encode_component_plus(Key); ({_Key, false}) -> <<>>; ({Key, Value}) -> [encode_component_plus(Key), $=, encode_component_plus(Value)]; (Key) -> encode_component_plus(Key) end, Values), iolist_to_binary(lists:join($&, Encoded)). -spec encode_component(iodata()) -> binary(). %% @doc Encode a string percent-escaping all but the "unreserved" characters. encode_component(String) -> encode_component(String, fun is_char_unreserved/1). -spec encode_component(String, Pred) -> binary() when String :: atom() | iodata(), Pred :: fun((byte()) -> boolean() | binary()). %% @doc Encode a string percent-escaping based on the given predicate function. %% %% The predicate can return either a boolean, or a binary replacement. Returning a boolean determines whether the %% given byte should be percent escaped or not, returning `true' means that the byte must be left as is, whereas %% returning false will encode the byte as `%XX' where `XX' is the hex representation of the byte. If a binary is %% returned, this binary will be used as replacement for the byte. encode_component(String, Pred) when is_list(String) -> encode_component(unicode:characters_to_binary(String), Pred); encode_component(String, Pred) when is_binary(String) -> << <<(case Pred(C) of true -> <<C>>; false -> H = encode_hex(C bsr 4), L = encode_hex(C band 16#0f), <<$%, H, L>>; Bin when is_binary(Bin) -> Bin end)/binary>> || <<C>> <= String >>; encode_component(String, Pred) when is_atom(String) -> encode_component(atom_to_binary(String, utf8), Pred). -spec encode_component_plus(atom() | iodata()) -> binary(). %% @doc Encode a string percent-escaping all but the "unreserved" characters and converts space characters to `+'. encode_component_plus(String) -> encode_component(String, fun ($\s) -> <<$+>>; (C) -> is_char_unreserved(C) end). -spec is_char_reserved(byte()) -> boolean(). %% @doc Checks if the character is a URI "reserved" character according to RFC 3986. %% %% Reserved characters are specified in <a href="http://tools.ietf.org/html/rfc3986#section-2.2">RFC 3986, section 2.2</a>. %% The list of characters in the RFC are defined as `:/?#[]@!$&\'()*+,;='. is_char_reserved(Char) -> lists:member(Char, ":/?#[]@!$&\'()*+,;="). -spec is_char_unreserved(byte()) -> boolean(). %% @doc Checks if the character is a URI "unreserved" character according to RFC 3986. %% %% Unreserved characters are specified in <a href="http://tools.ietf.org/html/rfc3986#section-2.3">RFC 3986, section 2.3</a>. %% The list of characters in the RFC are defined as `0-9' `A-Z' `a-z' as well as `~', `_', `-' and `.'. is_char_unreserved(Char) -> (Char >= $0 andalso Char =< $9) orelse (Char >= $A andalso Char =< $Z) orelse (Char >= $a andalso Char =< $z) orelse lists:member(Char, "~_-."). encode_hex(0) -> $0; encode_hex(1) -> $1; encode_hex(2) -> $2; encode_hex(3) -> $3; encode_hex(4) -> $4; encode_hex(5) -> $5; encode_hex(6) -> $6; encode_hex(7) -> $7; encode_hex(8) -> $8; encode_hex(9) -> $9; encode_hex(10) -> $A; encode_hex(11) -> $B; encode_hex(12) -> $C; encode_hex(13) -> $D; encode_hex(14) -> $E; encode_hex(15) -> $F. -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -define(label(S), unicode:characters_to_list(io_lib:format("~p", [S]))). encode_component_test_() -> Tests = #{ "" => <<>>, "test" => <<"test">>, "hello world" => <<"hello%20world">>, "hello world+" => <<"hello%20world%2B">>, "hello world♥" => <<"hello%20world%E2%99%A5">>, <<0, 1, 255>> => <<"%00%01%FF">>, [<<0>>, $\s] => <<"%00%20">> }, [{?label(Input), ?_assertEqual(Output, encode_component(Input))} || {Input, Output} <- maps:to_list(Tests)]. encode_component_plus_test_() -> Tests = #{ "" => <<>>, "test" => <<"test">>, "hello world" => <<"hello+world">>, "hello world+" => <<"hello+world%2B">>, "hello world♥" => <<"hello+world%E2%99%A5">>, <<0, 1, 255>> => <<"%00%01%FF">>, [<<0>>, $\s] => <<"%00+">> }, [{?label(Input), ?_assertEqual(Output, encode_component_plus(Input))} || {Input, Output} <- maps:to_list(Tests)]. -endif.
src/influxdb_uri.erl
0.539469
0.472562
influxdb_uri.erl
starcoder
%%% %%% Asynchronous Game of Life %%% http://en.wikipedia.org/wiki/Conway's_Game_of_Life %%% %%% This implementation preserves the standard game behaviour by %%% introducing a master process which represents a global clock. %%% %%% The game runs for finite number of steps and then stops. %%% %%% Another feature of this implementation is that new processes %%% are spawned for live cells only, therefore it requires less %%% processes than fully async implementation. %%% %%% See also: %%% life.erl - standard game implementation, %%% life_async_grid - fully async implementation. %%% -module(life_async). -author("<NAME> <<EMAIL>>"). %% Published API -export([start/2]). %% Internal functions for process spawning -export([new_cell/3]). %% ------------------------------------------------------------------ %% Game Logic %% ------------------------------------------------------------------ %% @doc Starts the new game and runs Iter iterations. %% Seed is a list of unique cell coordinates. %% %% Blinker example: life_async:start([{3,2},{3,3},{3,4}], 2). %% %% Returns final generation. start(Seed, Iter) -> god(Seed, new_world(), Iter). %% ------------------------------------------------------------------ %% Master process %% ------------------------------------------------------------------ %% @doc Main loop (usually runs in shell process). %% Map is a list of coordinates where new cells to be born. %% World is a hashmap of live cells. Keys are coordinates, %% values are processes. One process per live cell. god(Map, World, 0) -> ok = apocalypse(cells(World)), Map ++ coordinates(World); god(Map, World, N) -> io:format("~p: ~p~n", [N, Map ++ coordinates(World)]), NewWorld = create(Map, World), ok = leave_them_alone(Map), NewMap = schedule_new_cells(NewWorld), god(NewMap, step(NewWorld), N-1). %% @doc When game is over, all cells die. apocalypse([]) -> ok; apocalypse([Cell|Cells]) -> exit(Cell, kill), apocalypse(Cells). %% @doc Make every new cell alive and add it to the World. create([], World) -> World; create([Coord|Cs], World) -> Cell = spawn(?MODULE, new_cell, [Coord, World, self()]), create(Cs, add(Coord, Cell, World)). %% @doc Wait for every cell to confirm it is doing ok. leave_them_alone([]) -> ok; leave_them_alone([C|Cs]) -> receive {created, C} -> leave_them_alone(Cs) end. %% @doc It is God's responsibility to decide which cell becomes %% alive on the next step. schedule_new_cells(World) -> Cells = cells(World), ok = ask_for_dead_neighbours(Cells), Counters = collect_dead_neighbours_info(Cells), new_map(Counters). %% @doc Ask every cell to check which of its neighbours is dead. ask_for_dead_neighbours([]) -> ok; ask_for_dead_neighbours([Cell|Cells]) -> Cell ! {dead_neighbours, self()}, ask_for_dead_neighbours(Cells). %% @doc Wait for all cells to report about their dead neighbours. %% Return a dictionary of frequencies, i.e. keys are coordinates %% of dead cells, values are numbers of reports. collect_dead_neighbours_info(Cells) -> lists:foldl( fun(Cell, Acc) -> receive {dead_cells, Cell, DeadCells} -> update_counters(DeadCells, Acc) end end, dict:new(), Cells). update_counters([], Counters) -> Counters; update_counters([C|Cs], Counters) -> update_counters(Cs, dict:update_counter(C, 1, Counters)). %% @doc Select which cells will be alive on the next step. %% If 3 live cells reported about the same dead cell, %% it will become alive. new_map(Counters) -> new_map(dict:to_list(Counters), []). new_map([], Acc) -> Acc; new_map([{C,3}|Cs], Acc) -> new_map(Cs, [C|Acc]); new_map([_|Cs], Acc) -> new_map(Cs, Acc). %% @doc Runs one step of the game. %% Let the live cells to choose their future, then collect survivals. step(World) -> Cells = cells(World), ok = time_to_choose(Cells), filter_survivals(Cells, World). time_to_choose([]) -> ok; time_to_choose([Cell|Cells]) -> Cell ! {live_or_die, self()}, time_to_choose(Cells). filter_survivals(Cells, World) -> lists:foldl( fun(Cell, Acc) -> receive {dying, Cell, Coord} -> remove(Coord, Acc); {survived, Cell, _} -> Acc end end, World, Cells). %% ------------------------------------------------------------------ %% Cell process %% ------------------------------------------------------------------ %% @doc Initial steps of newly born cell. new_cell(Coord, World, God) -> Cells = cells(World), ok = hello_world(Cells, Coord), Neighbours = know_your_neighbours(Cells, Coord), ok = mature(God, Coord), cell(Coord, Neighbours). %% @doc Tell everyone about your existence. hello_world([], _) -> ok; hello_world([Cell|Cells], Coord) -> Cell ! {ping, self(), Coord}, hello_world(Cells, Coord). %% @doc Wait for everyone to respond to see who is your neighbour. know_your_neighbours(Cells, Coord) -> lists:foldl( fun(Cell, Acc) -> receive {pong, Cell, NCoord} -> case is_neighbour(Coord, NCoord) of true -> add(NCoord, Cell, Acc); _ -> Acc end end end, new_world(), Cells). is_neighbour({X1,Y1}, {X2,Y2}) -> Dx = abs(X1 - X2), Dy = abs(Y1 - Y2), Dx < 2 andalso Dy < 2. %% @doc Master process is waiting for your maturity. %% Send a message to unblock it. mature(God, Coord) -> God ! {created, Coord}, ok. %% @doc Main loop of live cell is to respond to messages %% from other cells and from master process. cell(Coord, Neighbours) -> Cells = cells(Neighbours), receive {ping, Cell, NCoord} -> % Tell about yourself Cell ! {pong, self(), Coord}, % and check if newcomer is your new neighbour case is_neighbour(Coord, NCoord) of true -> cell(Coord, add(NCoord, Cell, Neighbours)); _ -> cell(Coord, Neighbours) end; {live_or_die, God} -> % To be or not to be - it is actually your choice NewState = case population(Neighbours) of 2 -> survived; 3 -> survived; _ -> dying end, % Tell your neighbours about your choice lists:foreach(fun(N) -> N ! {NewState, self(), Coord} end, Cells), % Check which of your neighbours is going to die NewNeighbours = lists:foldl( fun(Cell, Acc) -> receive {dying, _, _} -> Acc; {survived, Cell, NCoord} -> add(NCoord, Cell, Acc) end end, new_world(), Cells), % Notify master process about your choice God ! {NewState, self(), Coord}, case NewState of survived -> cell(Coord, NewNeighbours); dying -> ok end; {dead_neighbours, God} -> % help master process to conduct a census God ! {dead_cells, self(), dead_cells(Coord, Neighbours)}, cell(Coord, Neighbours) end. dead_cells(Coord, Neighbours) -> [N || N <- neighbours(Coord), not(lists:member(N, coordinates(Neighbours)))]. neighbours({X, Y}) -> [{X + DX, Y + DY} || DX <- [-1, 0, 1], DY <- [-1, 0, 1], {DX, DY} =/= {0, 0}]. %% ------------------------------------------------------------------ %% Data Abstractions %% %% Currently gb_trees are used. With Erlang 17 we can use frames. %% ------------------------------------------------------------------ new_world() -> gb_trees:empty(). population(World) -> gb_trees:size(World). add(Coord, Cell, World) -> gb_trees:insert(Coord, Cell, World). remove(Coord, World) -> gb_trees:delete(Coord, World). coordinates(World) -> gb_trees:keys(World). cells(World) -> gb_trees:values(World).
src/life_async.erl
0.590425
0.676994
life_async.erl
starcoder
%% %% %CopyrightBegin% %% %% Copyright Ericsson AB 2002-2013. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% %CopyrightEnd% %% -module(beam_dead). -export([module/2]). %%% Dead code is code that is executed but has no effect. This %%% optimization pass either removes dead code or jumps around it, %%% potentially making it unreachable and a target for the %%% the beam_jump pass. -import(lists, [mapfoldl/3,reverse/1]). module({Mod,Exp,Attr,Fs0,_}, _Opts) -> {Fs1,Lc1} = beam_clean:clean_labels(Fs0), {Fs,Lc} = mapfoldl(fun function/2, Lc1, Fs1), %%{Fs,Lc} = {Fs1,Lc1}, {ok,{Mod,Exp,Attr,Fs,Lc}}. function({function,Name,Arity,CLabel,Is0}, Lc0) -> try Is1 = beam_jump:remove_unused_labels(Is0), %% Initialize label information with the code %% for the func_info label. Without it, a register %% may seem to be live when it is not. [{label,L}|FiIs] = Is1, D0 = beam_utils:empty_label_index(), D = beam_utils:index_label(L, FiIs, D0), %% Optimize away dead code. {Is2,Lc} = forward(Is1, Lc0), Is3 = backward(Is2, D), Is = move_move_into_block(Is3, []), {{function,Name,Arity,CLabel,Is},Lc} catch Class:Error -> Stack = erlang:get_stacktrace(), io:fwrite("Function: ~w/~w\n", [Name,Arity]), erlang:raise(Class, Error, Stack) end. %% 'move' instructions outside of blocks may thwart the jump optimizer. %% Move them back into the block. move_move_into_block([{block,Bl0},{move,S,D}|Is], Acc) -> Bl = Bl0 ++ [{set,[D],[S],move}], move_move_into_block([{block,Bl}|Is], Acc); move_move_into_block([{move,S,D}|Is], Acc) -> Bl = [{set,[D],[S],move}], move_move_into_block([{block,Bl}|Is], Acc); move_move_into_block([I|Is], Acc) -> move_move_into_block(Is, [I|Acc]); move_move_into_block([], Acc) -> reverse(Acc). %%% %%% Scan instructions in execution order and remove redundant 'move' %%% instructions. 'move' instructions are redundant if we know that %%% the register already contains the value being assigned, as in the %%% following code: %%% %%% test is_eq_exact SomeLabel Src Dst %%% move Src Dst %%% %%% or in: %%% %%% test is_nil SomeLabel Dst %%% move nil Dst %%% %%% or in: %%% %%% select_val Register FailLabel [... Literal => L1...] %%% . %%% . %%% . %%% L1: move Literal Register %%% %%% Also add extra labels to help the second backward pass. %%% forward(Is, Lc) -> forward(Is, #{}, Lc, []). forward([{move,_,_}=Move|[{label,L}|_]=Is], D, Lc, Acc) -> %% move/2 followed by jump/1 is optimized by backward/3. forward([Move,{jump,{f,L}}|Is], D, Lc, Acc); forward([{bif,_,_,_,_}=Bif|[{label,L}|_]=Is], D, Lc, Acc) -> %% bif/4 followed by jump/1 is optimized by backward/3. forward([Bif,{jump,{f,L}}|Is], D, Lc, Acc); forward([{block,[]}|Is], D, Lc, Acc) -> %% Empty blocks can prevent optimizations. forward(Is, D, Lc, Acc); forward([{select,select_val,Reg,_,List}=I|Is], D0, Lc, Acc) -> D = update_value_dict(List, Reg, D0), forward(Is, D, Lc, [I|Acc]); forward([{label,Lbl}=LblI,{block,[{set,[Dst],[Lit],move}|BlkIs]}=Blk|Is], D, Lc, Acc) -> %% Assumption: The target labels in a select_val/3 instruction %% cannot be reached in any other way than through the select_val/3 %% instruction (i.e. there can be no fallthrough to such label and %% it cannot be referenced by, for example, a jump/1 instruction). Key = {Lbl,Dst}, Block = case D of #{Key := Lit} -> {block,BlkIs}; %Safe to remove move instruction. _ -> Blk %Must keep move instruction. end, forward([Block|Is], D, Lc, [LblI|Acc]); forward([{label,Lbl}=LblI|[{move,Lit,Dst}|Is1]=Is0], D, Lc, Acc) -> %% Assumption: The target labels in a select_val/3 instruction %% cannot be reached in any other way than through the select_val/3 %% instruction (i.e. there can be no fallthrough to such label and %% it cannot be referenced by, for example, a jump/1 instruction). Is = case maps:find({Lbl,Dst}, D) of {ok,Lit} -> Is1; %Safe to remove move instruction. _ -> Is0 %Keep move instruction. end, forward(Is, D, Lc, [LblI|Acc]); forward([{test,is_eq_exact,_,[Same,Same]}|Is], D, Lc, Acc) -> forward(Is, D, Lc, Acc); forward([{test,is_eq_exact,_,[Dst,Src]}=I, {block,[{set,[Dst],[Src],move}|Bl]}|Is], D, Lc, Acc) -> forward([I,{block,Bl}|Is], D, Lc, Acc); forward([{test,is_nil,_,[Dst]}=I, {block,[{set,[Dst],[nil],move}|Bl]}|Is], D, Lc, Acc) -> forward([I,{block,Bl}|Is], D, Lc, Acc); forward([{test,is_eq_exact,_,[Dst,Src]}=I,{move,Src,Dst}|Is], D, Lc, Acc) -> forward([I|Is], D, Lc, Acc); forward([{test,is_nil,_,[Dst]}=I,{move,nil,Dst}|Is], D, Lc, Acc) -> forward([I|Is], D, Lc, Acc); forward([{test,_,_,_}=I|Is]=Is0, D, Lc, Acc) -> %% Help the second, backward pass to by inserting labels after %% relational operators so that they can be skipped if they are %% known to be true. case useful_to_insert_label(Is0) of false -> forward(Is, D, Lc, [I|Acc]); true -> forward(Is, D, Lc+1, [{label,Lc},I|Acc]) end; forward([I|Is], D, Lc, Acc) -> forward(Is, D, Lc, [I|Acc]); forward([], _, Lc, Acc) -> {Acc,Lc}. update_value_dict([Lit,{f,Lbl}|T], Reg, D0) -> Key = {Lbl,Reg}, D = case D0 of #{Key := inconsistent} -> D0; #{Key := _} -> D0#{Key := inconsistent}; _ -> D0#{Key => Lit} end, update_value_dict(T, Reg, D); update_value_dict([], _, D) -> D. useful_to_insert_label([_,{label,_}|_]) -> false; useful_to_insert_label([{test,Op,_,_}|_]) -> case Op of is_lt -> true; is_ge -> true; is_eq_exact -> true; is_ne_exact -> true; _ -> false end. %%% %%% Scan instructions in reverse execution order and try to %%% shortcut branch instructions. %%% %%% For example, in this code: %%% %%% move Literal Register %%% jump L1 %%% . %%% . %%% . %%% L1: test is_{integer,atom} FailLabel Register %%% select_val {x,0} FailLabel [... Literal => L2...] %%% . %%% . %%% . %%% L2: ... %%% %%% the 'selectval' instruction will always transfer control to L2, %%% so we can just as well jump to L2 directly by rewriting the %%% first part of the sequence like this: %%% %%% move Literal Register %%% jump L2 %%% %%% If register Register is killed at label L2, we can remove the %%% 'move' instruction, leaving just the 'jump' instruction: %%% %%% jump L2 %%% %%% These transformations may leave parts of the code unreachable. %%% The beam_jump pass will remove the unreachable code. backward(Is, D) -> backward(Is, D, []). backward([{test,is_eq_exact,Fail,[Dst,{integer,Arity}]}=I| [{bif,tuple_size,Fail,[Reg],Dst}|Is]=Is0], D, Acc) -> %% Provided that Dst is killed following this sequence, %% we can rewrite the instructions like this: %% %% bif tuple_size Fail Reg Dst ==> is_tuple Fail Reg %% is_eq_exact Fail Dst Integer test_arity Fail Reg Integer %% %% (still two instructions, but they they will be combined to %% one by the loader). case beam_utils:is_killed(Dst, Acc, D) andalso (Arity bsr 32) =:= 0 of false -> %% Not safe because the register Dst is not killed %% (probably cannot not happen in practice) or the arity %% does not fit in 32 bits (the loader will fail to load %% the module). We must move the first instruction to the %% accumulator to avoid an infinite loop. backward(Is0, D, [I|Acc]); true -> %% Safe. backward([{test,test_arity,Fail,[Reg,Arity]}, {test,is_tuple,Fail,[Reg]}|Is], D, Acc) end; backward([{label,Lbl}=L|Is], D, Acc) -> backward(Is, beam_utils:index_label(Lbl, Acc, D), [L|Acc]); backward([{select,select_val,Reg,{f,Fail0},List0}|Is], D, Acc) -> List = shortcut_select_list(List0, Reg, D, []), Fail1 = shortcut_label(Fail0, D), Fail = shortcut_bs_test(Fail1, Is, D), Sel = {select,select_val,Reg,{f,Fail},List}, backward(Is, D, [Sel|Acc]); backward([{jump,{f,To0}},{move,Src,Reg}=Move|Is], D, Acc) -> To = shortcut_select_label(To0, Reg, Src, D), Jump = {jump,{f,To}}, case beam_utils:is_killed_at(Reg, To, D) of false -> backward([Move|Is], D, [Jump|Acc]); true -> backward([Jump|Is], D, Acc) end; backward([{jump,{f,To}}=J|[{bif,Op,_,Ops,Reg}|Is]=Is0], D, Acc) -> try replace_comp_op(To, Reg, Op, Ops, D) of I -> backward(Is, D, I++Acc) catch throw:not_possible -> backward(Is0, D, [J|Acc]) end; backward([{test,bs_start_match2,F,_,[R,_],Ctxt}=I|Is], D, [{test,bs_match_string,F,[Ctxt,Bs]}, {test,bs_test_tail2,F,[Ctxt,0]}|Acc0]=Acc) -> case beam_utils:is_killed(Ctxt, Acc0, D) of true -> Eq = {test,is_eq_exact,F,[R,{literal,Bs}]}, backward(Is, D, [Eq|Acc0]); false -> backward(Is, D, [I|Acc]) end; backward([{test,bs_start_match2,{f,To0},Live,[Src|_]=Info,Dst}|Is], D, Acc) -> To = shortcut_bs_start_match(To0, Src, D), I = {test,bs_start_match2,{f,To},Live,Info,Dst}, backward(Is, D, [I|Acc]); backward([{test,Op,{f,To0},Ops0}|Is], D, Acc) -> To1 = shortcut_bs_test(To0, Is, D), To2 = shortcut_label(To1, D), To3 = shortcut_rel_op(To2, Op, Ops0, D), %% Try to shortcut a repeated test: %% %% test Op {f,Fail1} Operands test Op {f,Fail2} Operands %% . . . ==> ... %% Fail1: test Op {f,Fail2} Operands Fail1: test Op {f,Fail2} Operands %% To = case beam_utils:code_at(To3, D) of [{test,Op,{f,To4},Ops}|_] -> case equal_ops(Ops0, Ops) of true -> To4; false -> To3 end; _Code -> To3 end, I = case Op of is_eq_exact -> combine_eqs(To, Ops0, D, Acc); _ -> {test,Op,{f,To},Ops0} end, backward(Is, D, [I|Acc]); backward([{test,Op,{f,To0},Live,Ops0,Dst}|Is], D, Acc) -> To1 = shortcut_bs_test(To0, Is, D), To2 = shortcut_label(To1, D), %% Try to shortcut a repeated test: %% %% test Op {f,Fail1} _ Ops _ test Op {f,Fail2} _ Ops _ %% . . . ==> ... %% Fail1: test Op {f,Fail2} _ Ops _ Fail1: test Op {f,Fail2} _ Ops _ %% To = case beam_utils:code_at(To2, D) of [{test,Op,{f,To3},_,Ops,_}|_] -> case equal_ops(Ops0, Ops) of true -> To3; false -> To2 end; _Code -> To2 end, I = {test,Op,{f,To},Live,Ops0,Dst}, backward(Is, D, [I|Acc]); backward([{kill,_}=I|Is], D, [{line,_},Exit|_]=Acc) -> case beam_jump:is_exit_instruction(Exit) of false -> backward(Is, D, [I|Acc]); true -> backward(Is, D, Acc) end; backward([I|Is], D, Acc) -> backward(Is, D, [I|Acc]); backward([], _D, Acc) -> Acc. equal_ops([{field_flags,FlA0}|T0], [{field_flags,FlB0}|T1]) -> FlA = lists:keydelete(anno, 1, FlA0), FlB = lists:keydelete(anno, 1, FlB0), FlA =:= FlB andalso equal_ops(T0, T1); equal_ops([Op|T0], [Op|T1]) -> equal_ops(T0, T1); equal_ops([], []) -> true; equal_ops(_, _) -> false. shortcut_select_list([Lit,{f,To0}|T], Reg, D, Acc) -> To = shortcut_select_label(To0, Reg, Lit, D), shortcut_select_list(T, Reg, D, [{f,To},Lit|Acc]); shortcut_select_list([], _, _, Acc) -> reverse(Acc). shortcut_label(To0, D) -> case beam_utils:code_at(To0, D) of [{jump,{f,To}}|_] -> shortcut_label(To, D); _ -> To0 end. shortcut_select_label(To, Reg, Lit, D) -> shortcut_rel_op(To, is_ne_exact, [Reg,Lit], D). %% Replace a comparison operator with a test instruction and a jump. %% For example, if we have this code: %% %% bif '=:=' Fail Src1 Src2 {x,0} %% jump L1 %% . %% . %% . %% L1: select_val {x,0} FailLabel [... true => L2..., ...false => L3...] %% %% the first two instructions can be replaced with %% %% test is_eq_exact L3 Src1 Src2 %% jump L2 %% %% provided that {x,0} is killed at both L2 and L3. replace_comp_op(To, Reg, Op, Ops, D) -> False = comp_op_find_shortcut(To, Reg, {atom,false}, D), True = comp_op_find_shortcut(To, Reg, {atom,true}, D), [bif_to_test(Op, Ops, False),{jump,{f,True}}]. comp_op_find_shortcut(To0, Reg, Val, D) -> case shortcut_select_label(To0, Reg, Val, D) of To0 -> not_possible(); To -> case beam_utils:is_killed_at(Reg, To, D) of false -> not_possible(); true -> To end end. bif_to_test(Name, Args, Fail) -> try beam_utils:bif_to_test(Name, Args, {f,Fail}) catch error:_ -> not_possible() end. not_possible() -> throw(not_possible). %% combine_eqs(To, Operands, Acc) -> Instruction. %% Combine two is_eq_exact instructions or (an is_eq_exact %% instruction and a select_val instruction) to a select_val %% instruction if possible. %% %% Example: %% %% is_eq_exact F1 Reg Lit1 select_val Reg F2 [ Lit1 L1 %% L1: . Lit2 L2 ] %% . %% . ==> %% . %% F1: is_eq_exact F2 Reg Lit2 F1: is_eq_exact F2 Reg Lit2 %% L2: .... L2: %% combine_eqs(To, [Reg,{Type,_}=Lit1]=Ops, D, [{label,L1}|_]) when Type =:= atom; Type =:= integer -> case beam_utils:code_at(To, D) of [{test,is_eq_exact,{f,F2},[Reg,{Type,_}=Lit2]}, {label,L2}|_] when Lit1 =/= Lit2 -> {select,select_val,Reg,{f,F2},[Lit1,{f,L1},Lit2,{f,L2}]}; [{select,select_val,Reg,{f,F2},[{Type,_}|_]=List0}|_] -> List = remove_from_list(Lit1, List0), {select,select_val,Reg,{f,F2},[Lit1,{f,L1}|List]}; _Is -> {test,is_eq_exact,{f,To},Ops} end; combine_eqs(To, Ops, _D, _Acc) -> {test,is_eq_exact,{f,To},Ops}. remove_from_list(Lit, [Lit,{f,_}|T]) -> T; remove_from_list(Lit, [Val,{f,_}=Fail|T]) -> [Val,Fail|remove_from_list(Lit, T)]; remove_from_list(_, []) -> []. %% shortcut_bs_test(TargetLabel, ReversedInstructions, D) -> TargetLabel' %% Try to shortcut the failure label for bit syntax matching. shortcut_bs_test(To, Is, D) -> shortcut_bs_test_1(beam_utils:code_at(To, D), Is, To, D). shortcut_bs_test_1([{bs_restore2,Reg,SavePoint}, {label,_}, {test,bs_test_tail2,{f,To},[_,TailBits]}|_], PrevIs, To0, D) -> case count_bits_matched(PrevIs, {Reg,SavePoint}, 0) of Bits when Bits > TailBits -> %% This instruction will fail. We know because a restore has been %% done from the previous point SavePoint in the binary, and we %% also know that the binary contains at least Bits bits from %% SavePoint. %% %% Since we will skip a bs_restore2 if we shortcut to label To, %% we must now make sure that code at To does not depend on %% the position in the context in any way. case shortcut_bs_pos_used(To, Reg, D) of false -> To; true -> To0 end; _Bits -> To0 end; shortcut_bs_test_1([_|_], _, To, _) -> To. %% counts_bits_matched(ReversedInstructions, SavePoint, Bits) -> Bits' %% Given a reversed instruction stream, determine the minimum number %% of bits that will be matched by bit syntax instructions up to the %% given save point. count_bits_matched([{test,bs_get_utf8,{f,_},_,_,_}|Is], SavePoint, Bits) -> count_bits_matched(Is, SavePoint, Bits+8); count_bits_matched([{test,bs_get_utf16,{f,_},_,_,_}|Is], SavePoint, Bits) -> count_bits_matched(Is, SavePoint, Bits+16); count_bits_matched([{test,bs_get_utf32,{f,_},_,_,_}|Is], SavePoint, Bits) -> count_bits_matched(Is, SavePoint, Bits+32); count_bits_matched([{test,_,_,_,[_,Sz,U,{field_flags,_}],_}|Is], SavePoint, Bits) -> case Sz of {integer,N} -> count_bits_matched(Is, SavePoint, Bits+N*U); _ -> count_bits_matched(Is, SavePoint, Bits) end; count_bits_matched([{test,bs_match_string,_,[_,Bs]}|Is], SavePoint, Bits) -> count_bits_matched(Is, SavePoint, Bits+bit_size(Bs)); count_bits_matched([{test,_,_,_}|Is], SavePoint, Bits) -> count_bits_matched(Is, SavePoint, Bits); count_bits_matched([{bs_save2,Reg,SavePoint}|_], {Reg,SavePoint}, Bits) -> %% The save point we are looking for - we are done. Bits; count_bits_matched([_|_], _, Bits) -> Bits. shortcut_bs_pos_used(To, Reg, D) -> shortcut_bs_pos_used_1(beam_utils:code_at(To, D), Reg, D). shortcut_bs_pos_used_1([{bs_context_to_binary,Reg}|_], Reg, _) -> false; shortcut_bs_pos_used_1(Is, Reg, D) -> not beam_utils:is_killed(Reg, Is, D). %% shortcut_bs_start_match(TargetLabel, Reg) -> TargetLabel %% A failing bs_start_match2 instruction means that the source (Reg) %% cannot be a binary. That means that it is safe to skip %% bs_context_to_binary instructions operating on Reg, and %% bs_start_match2 instructions operating on Reg. shortcut_bs_start_match(To, Reg, D) -> shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To, D). shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To, D) -> shortcut_bs_start_match_1(Is, Reg, To, D); shortcut_bs_start_match_1([{jump,{f,To}}|_], Reg, _, D) -> Code = beam_utils:code_at(To, D), shortcut_bs_start_match_1(Code, Reg, To, D); shortcut_bs_start_match_1([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_], Reg, _, D) -> Code = beam_utils:code_at(To, D), shortcut_bs_start_match_1(Code, Reg, To, D); shortcut_bs_start_match_1(_, _, To, _) -> To. %% shortcut_rel_op(FailLabel, Operator, [Operand], D) -> FailLabel' %% Try to shortcut the given test instruction. Example: %% %% is_ge L1 {x,0} 48 %% . %% . %% . %% L1: is_ge L2 {x,0} 65 %% %% The first test instruction can be rewritten to "is_ge L2 {x,0} 48" %% since the instruction at L1 will also fail. %% %% If there are instructions between L1 and the other test instruction %% it may still be possible to do the shortcut. For example: %% %% L1: is_eq_exact L3 {x,0} 92 %% is_ge L2 {x,0} 65 %% %% Since the first test instruction failed, we know that {x,0} must %% be less than 48; therefore, we know that {x,0} cannot be equal to %% 92 and the jump to L3 cannot happen. shortcut_rel_op(To, Op, Ops, D) -> case normalize_op({test,Op,{f,To},Ops}) of {{NormOp,A,B},_} -> Normalized = {negate_op(NormOp),A,B}, shortcut_rel_op_fp(To, Normalized, D); {_,_} -> To; error -> To end. shortcut_rel_op_fp(To0, Normalized, D) -> Code = beam_utils:code_at(To0, D), case shortcut_any_label(Code, Normalized) of error -> To0; To -> shortcut_rel_op_fp(To, Normalized, D) end. %% shortcut_any_label([Instruction], PrevCondition) -> FailLabel | error %% Using PrevCondition (a previous condition known to be true), %% try to shortcut to another failure label. shortcut_any_label([{jump,{f,Lbl}}|_], _Prev) -> Lbl; shortcut_any_label([{label,Lbl}|_], _Prev) -> Lbl; shortcut_any_label([{select,select_val,R,{f,Fail},L}|_], Prev) -> shortcut_selectval(L, R, Fail, Prev); shortcut_any_label([I|Is], Prev) -> case normalize_op(I) of error -> error; {Normalized,Fail} -> %% We have a relational operator. case will_succeed(Prev, Normalized) of no -> %% This test instruction will always branch %% to Fail. Fail; yes -> %% This test instruction will never branch, %% so we will look at the next instruction. shortcut_any_label(Is, Prev); maybe -> %% May or may not branch. From now on, we can only %% shortcut to the this specific failure label %% Fail. shortcut_specific_label(Is, Fail, Prev) end end. %% shortcut_specific_label([Instruction], FailLabel, PrevCondition) -> %% FailLabel | error %% We have previously encountered a test instruction that may or %% may not branch to FailLabel. Therefore we are only allowed %% to do the shortcut to the same fail label (FailLabel). shortcut_specific_label([{label,_}|Is], Fail, Prev) -> shortcut_specific_label(Is, Fail, Prev); shortcut_specific_label([{select,select_val,R,{f,F},L}|_], Fail, Prev) -> case shortcut_selectval(L, R, F, Prev) of Fail -> Fail; _ -> error end; shortcut_specific_label([I|Is], Fail, Prev) -> case normalize_op(I) of error -> error; {Normalized,Fail} -> case will_succeed(Prev, Normalized) of no -> %% Will branch to FailLabel. Fail; yes -> %% Will definitely never branch. shortcut_specific_label(Is, Fail, Prev); maybe -> %% May branch, but still OK since it will branch %% to FailLabel. shortcut_specific_label(Is, Fail, Prev) end; {Normalized,_} -> %% This test instruction will branch to a different %% fail label, if it branches at all. case will_succeed(Prev, Normalized) of yes -> %% Still OK, since the branch will never be %% taken. shortcut_specific_label(Is, Fail, Prev); no -> %% Give up. The branch will definitely be taken %% to a different fail label. error; maybe -> %% Give up. If the branch is taken, it will be %% to a different fail label. error end end. %% shortcut_selectval(List, Reg, Fail, PrevCond) -> FailLabel | error %% Try to shortcut a selectval instruction. A selectval instruction %% is equivalent to the following instruction sequence: %% %% is_ne_exact L1 Reg Value1 %% . %% . %% . %% is_ne_exact LN Reg ValueN %% jump DefaultFailLabel %% shortcut_selectval([Val,{f,Lbl}|T], R, Fail, Prev) -> case will_succeed(Prev, {'=/=',R,get_literal(Val)}) of yes -> shortcut_selectval(T, R, Fail, Prev); no -> Lbl; maybe -> error end; shortcut_selectval([], _, Fail, _) -> Fail. %% will_succeed(PrevCondition, Condition) -> yes | no | maybe %% PrevCondition is a condition known to be true. This function %% will tell whether Condition will succeed. will_succeed({Op1,Reg,A}, {Op2,Reg,B}) -> will_succeed_1(Op1, A, Op2, B); will_succeed({'=:=',Reg,{literal,A}}, {TypeTest,Reg}) -> case erlang:TypeTest(A) of false -> no; true -> yes end; will_succeed({_,_,_}, maybe) -> maybe; will_succeed({_,_,_}, Test) when is_tuple(Test) -> maybe. will_succeed_1('=:=', A, '<', B) -> if B =< A -> no; true -> yes end; will_succeed_1('=:=', A, '=<', B) -> if B < A -> no; true -> yes end; will_succeed_1('=:=', A, '=:=', B) -> if A =:= B -> yes; true -> no end; will_succeed_1('=:=', A, '=/=', B) -> if A =:= B -> no; true -> yes end; will_succeed_1('=:=', A, '>=', B) -> if B > A -> no; true -> yes end; will_succeed_1('=:=', A, '>', B) -> if B >= A -> no; true -> yes end; will_succeed_1('=/=', A, '=/=', B) when A =:= B -> yes; will_succeed_1('=/=', A, '=:=', B) when A =:= B -> no; will_succeed_1('<', A, '=:=', B) when B >= A -> no; will_succeed_1('<', A, '=/=', B) when B >= A -> yes; will_succeed_1('<', A, '<', B) when B >= A -> yes; will_succeed_1('<', A, '=<', B) when B > A -> yes; will_succeed_1('<', A, '>=', B) when B > A -> no; will_succeed_1('<', A, '>', B) when B >= A -> no; will_succeed_1('=<', A, '=:=', B) when B > A -> no; will_succeed_1('=<', A, '=/=', B) when B > A -> yes; will_succeed_1('=<', A, '<', B) when B > A -> yes; will_succeed_1('=<', A, '=<', B) when B >= A -> yes; will_succeed_1('=<', A, '>=', B) when B > A -> no; will_succeed_1('=<', A, '>', B) when B >= A -> no; will_succeed_1('>=', A, '=:=', B) when B < A -> no; will_succeed_1('>=', A, '=/=', B) when B < A -> yes; will_succeed_1('>=', A, '<', B) when B =< A -> no; will_succeed_1('>=', A, '=<', B) when B < A -> no; will_succeed_1('>=', A, '>=', B) when B =< A -> yes; will_succeed_1('>=', A, '>', B) when B < A -> yes; will_succeed_1('>', A, '=:=', B) when B =< A -> no; will_succeed_1('>', A, '=/=', B) when B =< A -> yes; will_succeed_1('>', A, '<', B) when B =< A -> no; will_succeed_1('>', A, '=<', B) when B < A -> no; will_succeed_1('>', A, '>=', B) when B =< A -> yes; will_succeed_1('>', A, '>', B) when B < A -> yes; will_succeed_1(_, _, _, _) -> maybe. %% normalize_op(Instruction) -> {Normalized,FailLabel} | error %% Normalized = {Operator,Register,Literal} | %% {TypeTest,Register} | %% maybe %% Operation = '<' | '=<' | '=:=' | '=/=' | '>=' | '>' %% TypeTest = is_atom | is_integer ... %% Literal = {literal,Term} %% %% Normalize a relational operator to facilitate further %% comparisons between operators. Always make the register %% operand the first operand. Thus the following instruction: %% %% {test,is_ge,{f,99},{integer,13},{x,0}} %% %% will be normalized to: %% %% {'=<',{x,0},{literal,13}} %% %% NOTE: Bit syntax test instructions are scary. They may change the %% state of match contexts and update registers, so we don't dare %% mess with them. normalize_op({test,is_ge,{f,Fail},Ops}) -> normalize_op_1('>=', Ops, Fail); normalize_op({test,is_lt,{f,Fail},Ops}) -> normalize_op_1('<', Ops, Fail); normalize_op({test,is_eq_exact,{f,Fail},Ops}) -> normalize_op_1('=:=', Ops, Fail); normalize_op({test,is_ne_exact,{f,Fail},Ops}) -> normalize_op_1('=/=', Ops, Fail); normalize_op({test,is_nil,{f,Fail},[R]}) -> normalize_op_1('=:=', [R,nil], Fail); normalize_op({test,Op,{f,Fail},[R]}) -> case erl_internal:new_type_test(Op, 1) of true -> {{Op,R},Fail}; false -> {maybe,Fail} end; normalize_op({test,_,{f,Fail},_}=I) -> case beam_utils:is_pure_test(I) of true -> {maybe,Fail}; false -> error end; normalize_op(_) -> error. normalize_op_1(Op, [Op1,Op2], Fail) -> case {get_literal(Op1),get_literal(Op2)} of {error,error} -> %% Both operands are registers. {maybe,Fail}; {error,Lit} -> {{Op,Op1,Lit},Fail}; {Lit,error} -> {{turn_op(Op),Op2,Lit},Fail}; {_,_} -> %% Both operands are literals. Can probably only %% happen if the Core Erlang optimizations passes were %% turned off, so don't bother trying to do something %% smart here. {maybe,Fail} end. turn_op('<') -> '>'; turn_op('>=') -> '=<'; turn_op('=:='=Op) -> Op; turn_op('=/='=Op) -> Op. negate_op('>=') -> '<'; negate_op('<') -> '>='; negate_op('=<') -> '>'; negate_op('>') -> '=<'; negate_op('=:=') -> '=/='; negate_op('=/=') -> '=:='. get_literal({atom,Val}) -> {literal,Val}; get_literal({integer,Val}) -> {literal,Val}; get_literal({float,Val}) -> {literal,Val}; get_literal(nil) -> {literal,[]}; get_literal({literal,_}=Lit) -> Lit; get_literal({_,_}) -> error.
lib/compiler/src/beam_dead.erl
0.548915
0.423637
beam_dead.erl
starcoder
%% ------------------------------------------------------------------- %% %% Copyright (c) 2014 SyncFree Consortium. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(vectorclock). -include("antidote.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -export([ get_clock_of_dc/2, set_clock_of_dc/3, from_list/1, new/0, eq/2, all_dots_smaller/2, all_dots_greater/2, le/2, ge/2, gt/2, lt/2, max/1, min/1]). -export_type([vectorclock/0]). -spec new() -> vectorclock(). new() -> dict:new(). -spec get_clock_of_dc(any(), vectorclock()) -> non_neg_integer(). get_clock_of_dc(Key, VectorClock) -> case dict:find(Key, VectorClock) of {ok, Value} -> Value; error -> 0 end. -spec set_clock_of_dc(any(), non_neg_integer(), vectorclock()) -> vectorclock(). set_clock_of_dc(Key, Value, VectorClock) -> dict:store(Key, Value, VectorClock). -spec from_list([{any(), non_neg_integer()}]) -> vectorclock(). from_list(List) -> dict:from_list(List). -spec max([vectorclock()]) -> vectorclock(). max([]) -> new(); max([V]) -> V; max([V1, V2|T]) -> max([merge(fun erlang:max/2, V1, V2)|T]). -spec min([vectorclock()]) -> vectorclock(). min([]) -> new(); min([V]) -> V; min([V1, V2|T]) -> min([merge(fun erlang:min/2, V1, V2)|T]). -spec merge(fun((non_neg_integer(), non_neg_integer()) -> non_neg_integer()), vectorclock(), vectorclock()) -> vectorclock(). merge(F, V1, V2) -> AllDCs = dict:fetch_keys(V1) ++ dict:fetch_keys(V2), Func = fun(DC) -> A = get_clock_of_dc(DC, V1), B = get_clock_of_dc(DC, V2), {DC, F(A, B)} end, from_list(lists:map(Func, AllDCs)). -spec for_all_keys(fun((non_neg_integer(), non_neg_integer()) -> boolean()), vectorclock(), vectorclock()) -> boolean(). for_all_keys(F, V1, V2) -> %% We could but do not care about duplicate DC keys - finding duplicates is not worth the effort AllDCs = dict:fetch_keys(V1) ++ dict:fetch_keys(V2), Func = fun(DC) -> A = get_clock_of_dc(DC, V1), B = get_clock_of_dc(DC, V2), F(A, B) end, lists:all(Func, AllDCs). -spec eq(vectorclock(), vectorclock()) -> boolean(). eq(V1, V2) -> for_all_keys(fun(A, B) -> A == B end, V1, V2). -spec le(vectorclock(), vectorclock()) -> boolean(). le(V1, V2) -> for_all_keys(fun(A, B) -> A =< B end, V1, V2). -spec ge(vectorclock(), vectorclock()) -> boolean(). ge(V1, V2) -> for_all_keys(fun(A, B) -> A >= B end, V1, V2). -spec all_dots_smaller(vectorclock(), vectorclock()) -> boolean(). all_dots_smaller(V1, V2) -> for_all_keys(fun(A, B) -> A < B end, V1, V2). -spec all_dots_greater(vectorclock(), vectorclock()) -> boolean(). all_dots_greater(V1, V2) -> for_all_keys(fun(A, B) -> A > B end, V1, V2). -spec gt(vectorclock(), vectorclock()) -> boolean(). gt(V1, V2) -> ge(V1, V2) and (not eq(V1, V2)). -spec lt(vectorclock(), vectorclock()) -> boolean(). lt(V1, V2) -> le(V1, V2) and (not eq(V1, V2)). -ifdef(TEST). vectorclock_test() -> V1 = vectorclock:from_list([{1, 5}, {2, 4}, {3, 5}, {4, 6}]), V2 = vectorclock:from_list([{1, 4}, {2, 3}, {3, 4}, {4, 5}]), V3 = vectorclock:from_list([{1, 5}, {2, 4}, {3, 4}, {4, 5}]), V4 = vectorclock:from_list([{1, 6}, {2, 3}, {3, 1}, {4, 7}]), V5 = vectorclock:from_list([{1, 6}, {2, 7}]), ?assertEqual(all_dots_greater(V1, V2), true), ?assertEqual(all_dots_smaller(V2, V1), true), ?assertEqual(all_dots_greater(V1, V3), false), ?assertEqual(gt(V1, V3), true), ?assertEqual(gt(V1, V1), false), ?assertEqual(ge(V1, V4), false), ?assertEqual(le(V1, V4), false), ?assertEqual(eq(V1, V4), false), ?assertEqual(ge(V1, V5), false). vectorclock_max_test() -> V1 = vectorclock:from_list([{1, 5}, {2, 4}]), V2 = vectorclock:from_list([{1, 6}, {2, 3}]), V3 = vectorclock:from_list([{1, 3}, {3, 2}]), Expected12 = vectorclock:from_list([{1, 6}, {2, 4}]), Expected23 = vectorclock:from_list([{1, 6}, {2, 3}, {3, 2}]), Expected13 = vectorclock:from_list([{1, 5}, {2, 4}, {3, 2}]), Expected123 = vectorclock:from_list([{1, 6}, {2, 4}, {3, 2}]), Unexpected123 = vectorclock:from_list([{1, 5}, {2, 5}, {3, 5}]), ?assertEqual(eq(max([V1, V2]), Expected12), true), ?assertEqual(eq(max([V2, V3]), Expected23), true), ?assertEqual(eq(max([V1, V3]), Expected13), true), ?assertEqual(eq(max([V1, V2, V3]), Expected123), true), ?assertEqual(eq(max([V1, V2, V3]), Unexpected123), false). vectorclock_min_test() -> V1 = vectorclock:from_list([{1, 5}, {2, 4}]), V2 = vectorclock:from_list([{1, 6}, {2, 3}]), V3 = vectorclock:from_list([{1, 3}, {3, 2}]), Expected12 = vectorclock:from_list([{1, 5}, {2, 3}]), Expected23 = vectorclock:from_list([{1, 3}]), Expected13 = vectorclock:from_list([{1, 3}]), Expected123 = vectorclock:from_list([{1, 3}]), Unexpected123 = vectorclock:from_list([{1, 3}, {2, 3}, {3, 2}]), ?assertEqual(eq(min([V1, V2]), Expected12), true), ?assertEqual(eq(min([V2, V3]), Expected23), true), ?assertEqual(eq(min([V1, V3]), Expected13), true), ?assertEqual(eq(min([V1, V2, V3]), Expected123), true), ?assertEqual(eq(min([V1, V2, V3]), Unexpected123), false). -endif.
src/vectorclock.erl
0.636579
0.452657
vectorclock.erl
starcoder
%%%------------------------------------------------------------------- %%% @author <NAME> %%% @copyright (C) 2021, <COMPANY> %%% @doc %%% Providing a game grid functionality for a 2048 game with a desired size %%% @end %%%------------------------------------------------------------------- -module(tofe_grid). -define(DEFAULT_POPULATE_TIMES, 4). %% API -export([new/1]). -export([populate_randomly/1, populate_randomly/2]). -export([has_remaining_moves/1]). -export([slide_rows/2]). -export([slide_columns/2]). -export([has_empty_spaces/1, has_possible_moves/1]). -opaque grid() :: [list()]. -opaque row() :: list(). -export_type [grid/0, row/0]. -spec new(Size :: non_neg_integer()) -> grid(). %% -------------------------------------------------------------------- %% @doc %% Performing creating a new grid (matrix) by desired size %% @end %% -------------------------------------------------------------------- new(Size) -> [ [null || _ColI <- lists:seq(1, Size)] || _RowI <-lists:seq(1, Size)]. -spec has_remaining_moves(grid()) -> boolean(). %% -------------------------------------------------------------------- %% @doc %% Performing check if there are any available moves left before end the game. %% @end %% -------------------------------------------------------------------- has_remaining_moves(Grid) -> has_empty_spaces(Grid) andalso has_possible_moves(Grid). %% -------------------------------------------------------------------- %% @doc %% Randomly populating grid with values (2 or 4). By default - 4 times %% @end %% -------------------------------------------------------------------- -spec populate_randomly(grid()) -> grid(). populate_randomly(Grid) -> populate_randomly(Grid, ?DEFAULT_POPULATE_TIMES). -spec populate_randomly(grid(), Repeat :: non_neg_integer()) -> grid(). %% -------------------------------------------------------------------- %% @doc %% Randomly populating grid with values (2 or 4). Can specify a number of items added to the grid %% @end %% -------------------------------------------------------------------- populate_randomly(Grid, 0) -> Grid; populate_randomly(Grid, Repeat) -> lists:foldl( fun(_RepeatI, GridI) -> random_put(GridI, get_a_number()) end, Grid, lists:seq(1, Repeat)). %% -------------------------------------------------------------------- %% @doc %% Randomly getting a number 2 or 4 in a 75% proportion of 2 instead of 2 %% @end %% -------------------------------------------------------------------- get_a_number() -> tofe_utils:pick_random([2,2,2,4]). %% -------------------------------------------------------------------- %% @doc %% Randomly put a value into a cell which has no value %% @end %% -------------------------------------------------------------------- random_put(Grid, Value) -> [pipe]( empty_cells(Grid), tofe_utils:pick_random(_), put_at(Grid, _, Value)). %% -------------------------------------------------------------------- %% @doc %% Getting a list of empty cells :: Returning [{X, Y} | Tail}] when X - row number, Y - Col number %% @end %% -------------------------------------------------------------------- empty_cells(Grid) -> lists:flatten([ [ {RowI, ColI} || {ColI, null} <- tofe_utils:with_index(Row)] || {RowI, Row} <- tofe_utils:with_index(Grid) ]). %% -------------------------------------------------------------------- %% @doc %% Put a value into the X row, Y col %% @end %% -------------------------------------------------------------------- put_at(Grid, {X, Y}, Value) -> GridTuple = list_to_tuple(Grid), [pipe](GridTuple, erlang:element(X, _), list_to_tuple(_), erlang:setelement(Y, _, Value), tuple_to_list(_), erlang:setelement(X, GridTuple, _), tuple_to_list(_) ). %% -------------------------------------------------------------------- %% @doc %% Sliding rows %% @end %% -------------------------------------------------------------------- slide_rows(Grid, Direction) -> slide_vectors(Grid, Direction). %% -------------------------------------------------------------------- %% @doc %% Sliding a column with a transpose approach %% @end %% -------------------------------------------------------------------- slide_columns(Grid, Direction) -> [pipe]( Grid, tofe_utils:transpose(_), slide_vectors(_, Direction), tofe_utils:transpose(_) ). slide_vectors(VectorLists, Direction) -> [ slide_vector(VectorList, Direction) || VectorList <- VectorLists]. slide_vector(VectorList, Direction) -> tofe_vector:slide(VectorList, Direction). %% -------------------------------------------------------------------- %% @doc %% Lazy checking for empty spaces in a grid, in case of first side is executed, second expr will not be evaluated %% @end %% -------------------------------------------------------------------- has_empty_spaces([]) -> false; has_empty_spaces([null | _Row]) -> true; has_empty_spaces([I | Row]) when is_integer(I) -> has_empty_spaces(Row); has_empty_spaces([Row | Rows]) when is_list(Row) -> case has_empty_spaces(Row) of false -> has_empty_spaces(Rows); true -> true end. %% -------------------------------------------------------------------- %% @doc %% Lazy checking for possible moves. First evaluated expr which returns true stops the pipeline of evaluating others. %% @end %% -------------------------------------------------------------------- has_possible_moves(Grid) -> slide_rows(Grid, forward) /= Grid orelse slide_rows(Grid, backward) /= Grid orelse slide_columns(Grid, forward) /= Grid orelse slide_columns(Grid, backward) /= Grid.
src/tofe_grid.erl
0.558207
0.545891
tofe_grid.erl
starcoder
%%%------------------------------------------------------------------- %%% @doc %%% Provides a set of utilities to work with enumerables. %%% @end %%%------------------------------------------------------------------- -module(shards_enum). %% API -export([ map/2, reduce/3, reduce_while/3, pmap/2, pmap/3 ]). %%%=================================================================== %%% API %%%=================================================================== %% @doc %% Returns a list where each element is the result of invoking `Fun' on each %% corresponding element of `Enumerable'. %% %% For maps, the function expects a key-value tuple. %% @end -spec map(Fun, Enumerable) -> [Result] when Fun :: fun((Elem) -> Result), Elem :: term(), Result :: term(), Enumerable :: [term()] | map() | non_neg_integer(). map(Fun, Enumerable) when is_function(Fun, 1) -> do_map(Fun, Enumerable). %% @private do_map(Fun, Enum) when is_list(Enum) -> [Fun(Elem) || Elem <- Enum]; do_map(Fun, Enum) when is_map(Enum) -> maps:fold(fun(Key, Value, Acc) -> [Fun({Key, Value}) | Acc] end, [], Enum); do_map(Fun, Enum) when is_integer(Enum) -> do_map(Fun, [], Enum, 0). %% @private do_map(Fun, Acc, N, Count) when Count < N -> do_map(Fun, [Fun(Count) | Acc], N, Count + 1); do_map(_Fun, Acc, _N, _Count) -> Acc. %% @doc %% Invokes `Fun' for each element in the `Enumerable' with the %% accumulator. %% %% The initial value of the accumulator is `Acc0'. The function is %% invoked for each element in the enumerable with the accumulator. %% The result returned by the function is used as the accumulator %% for the next iteration. The function returns the last accumulator. %% @end -spec reduce(Fun, Acc0, Enumerable) -> Acc1 when Fun :: fun((Elem, AccIn) -> AccOut), Elem :: term(), AccIn :: term(), AccOut :: term(), Acc0 :: term(), Acc1 :: term(), Enumerable :: [term()] | map() | non_neg_integer(). reduce(Fun, Acc0, Enumerable) when is_function(Fun, 2) -> do_reduce(Fun, Acc0, Enumerable). %% @private do_reduce(Fun, AccIn, Enum) when is_list(Enum) -> lists:foldl(Fun, AccIn, Enum); do_reduce(Fun, AccIn, Enum) when is_map(Enum) -> maps:fold(fun(Key, Value, Acc) -> Fun({Key, Value}, Acc) end, AccIn, Enum); do_reduce(Fun, AccIn, Enum) when is_integer(Enum) -> do_reduce(Fun, AccIn, Enum, 0). %% @private do_reduce(Fun, Acc, N, Count) when Count < N -> do_reduce(Fun, Fun(Count, Acc), N, Count + 1); do_reduce(_Fun, Acc, _N, _Count) -> Acc. %% @doc %% Reduces enumerable until `Fun' returns `{halt, AccOut}'. %% %% The return value for `Fun' is expected to be %% %% <ul> %% <li> %% `{cont, AccOut}' to continue the reduction with `AccOut' as the new accumulator or %% </li> %% <li> %% `{halt, AccOut}' to halt the reduction %% </li> %% </ul> %% %% If fun returns `{halt, AccOut}' the reduction is halted and the function %% returns `AccOut'. Otherwise, if the enumerable is exhausted, the function %% returns the accumulator of the last `{cont, AccOut}'. %% @end -spec reduce_while(Fun, Acc0, Enumerable) -> Acc1 when Fun :: fun((Elem, AccIn) -> FunRes), FunRes :: {cont | halt, AccOut}, Elem :: term(), AccIn :: term(), AccOut :: term(), Acc0 :: term(), Acc1 :: term(), Enumerable :: [term()] | map() | non_neg_integer(). reduce_while(Fun, AccIn, Enumerable) when is_function(Fun, 2) -> try do_reduce_while(Fun, AccIn, Enumerable) catch throw:{halt, AccOut} -> AccOut end. %% @private do_reduce_while(Fun, AccIn, Enum) when is_list(Enum) -> lists:foldl(fun(Elem, Acc) -> case Fun(Elem, Acc) of {cont, AccOut} -> AccOut; {halt, AccOut} -> throw({halt, AccOut}) end end, AccIn, Enum); do_reduce_while(Fun, AccIn, Enum) when is_map(Enum) -> maps:fold(fun(Key, Value, Acc) -> case Fun({Key, Value}, Acc) of {cont, AccOut} -> AccOut; {halt, AccOut} -> throw({halt, AccOut}) end end, AccIn, Enum); do_reduce_while(Fun, AccIn, Enum) when is_integer(Enum) -> do_reduce_while(Fun, AccIn, Enum, 0). %% @private do_reduce_while(Fun, Acc, N, Count) when Count < N -> case Fun(Count, Acc) of {cont, AccOut} -> do_reduce_while(Fun, AccOut, N, Count + 1); {halt, AccOut} -> throw({halt, AccOut}) end; do_reduce_while(_Fun, Acc, _N, _Count) -> Acc. %% @equiv pmap(Fun, infinity, Enumerable) pmap(Fun, Enumerable) -> pmap(Fun, infinity, Enumerable). %% @doc %% Similar to `shards_enum:map/2' but it runs in parallel. %% @end -spec pmap(Fun, Timeout, Enumerable) -> [Result] when Fun :: fun((Elem) -> Result), Elem :: term(), Timeout :: timeout(), Result :: term(), Enumerable :: [term()] | map() | non_neg_integer(). pmap(Fun, Timeout, Enumerable) when is_function(Fun, 1), is_list(Enumerable) -> Parent = self(), Running = [spawn_monitor(fun() -> Parent ! {self(), Fun(E)} end) || E <- Enumerable], pmap_collect(Running, Timeout, {[], undefined}). %% @private pmap_collect([], _Timeout, {Acc, undefined}) -> Acc; pmap_collect([], _Timeout, {_Acc, {error, {Reason, Stacktrace}}}) -> erlang:raise(error, Reason, Stacktrace); pmap_collect([], _Timeout, {_Acc, {error, Reason}}) -> error(Reason); pmap_collect([{Pid, MRef} | Next], Timeout, {Acc, Err}) -> receive {Pid, Res} -> erlang:demonitor(MRef, [flush]), pmap_collect(Next, Timeout, {[Res | Acc], Err}); {'DOWN', MRef, process, _Pid, Reason} -> pmap_collect(Next, Timeout, {Acc, {error, Reason}}) after Timeout -> exit(pmap_timeout) end.
src/shards_enum.erl
0.573201
0.450239
shards_enum.erl
starcoder
%% @author <NAME> <<EMAIL>> %% @copyright 2010 <NAME> %% @doc Operators for expression evaluation in templates %% Copyright 2010 <NAME> %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(erlydtl_operators). -author("<NAME> <<EMAIL>>"). -export([ 'and'/3, 'not'/2, 'or'/3, 'xor'/3, concat/3, add/3, sub/3, divide/3, multiply/3, modulo/3, negate/2, ge/3, le/3, gt/3, lt/3, eq/3, ne/3 ]). 'and'(A, B, Context) -> erlydtl_runtime:is_true(A, Context) and erlydtl_runtime:is_true(B, Context). 'not'(A, Context) -> erlydtl_runtime:is_false(A, Context). 'or'(A, B, Context) -> erlydtl_runtime:is_true(A, Context) or erlydtl_runtime:is_true(B, Context). 'xor'(A, B, Context) -> erlydtl_runtime:is_true(A, Context) xor erlydtl_runtime:is_true(B, Context). concat(A, B, _Context) when is_list(A), is_list(B) -> A++B; concat(A, B, Context) -> ABin = z_convert:to_binary(A, Context), BBin = z_convert:to_binary(B, Context), <<ABin/binary, BBin/binary>>. add(A, B, _Context) -> case to_numbers(A, B) of {undefined, _} -> undefined; {_, undefined} -> undefined; {A1, B1} -> A1 + B1 end. sub(A, B, _Context) -> case to_numbers(A, B) of {undefined, _} -> undefined; {_, undefined} -> undefined; {A1, B1} -> A1 - B1 end. divide(A, B, _Context) -> case to_numbers(A, B) of {undefined, _} -> undefined; {_, undefined} -> undefined; {_, 0} -> undefined; {_, 0.0} -> undefined; {A1, B1} -> A1 / B1 end. multiply(A, B, _Context) -> case to_numbers(A, B) of {undefined, _} -> undefined; {_, undefined} -> undefined; {A1, B1} -> A1 * B1 end. modulo(A, B, _Context) -> case to_numbers(A, B) of {undefined, _} -> undefined; {_, undefined} -> undefined; {_, 0} -> undefined; {_, 0.0} -> undefined; {A1, B1} -> A1 rem B1 end. negate(undefined, _Context) -> undefined; negate(A, _Context) when is_number(A) -> 0 - A; negate(A, _Context) -> 0 - z_convert:to_integer(A). ge(Input, Value, _Context) -> case to_values(Input, Value) of {undefined, _} -> undefined; {_, undefined} -> undefined; {A, B} -> A >= B end. le(Input, Value, _Context) -> case to_values(Input, Value) of {undefined, _} -> undefined; {_, undefined} -> undefined; {A, B} -> A =< B end. gt(Input, Value, _Context) -> case to_values(Input, Value) of {undefined, _} -> undefined; {_, undefined} -> undefined; {A, B} -> A > B end. lt(Input, Value, _Context) -> case to_values(Input, Value) of {undefined, _} -> undefined; {_, undefined} -> undefined; {A, B} -> A < B end. eq(Input, Value, _Context) -> {A, B} = to_values(Input, Value), A == B. ne(Input, Value, _Context) -> {A, B} = to_values(Input, Value), A /= B. %% @doc Convert the two parameters to compatible values to_values(undefined, B) -> {undefined, B}; to_values(A, undefined) -> {A, undefined}; to_values(A, B) when is_number(A), is_number(B) -> {A,B}; to_values(A, B) when is_boolean(A); is_boolean(B) -> {z_convert:to_bool(A), z_convert:to_bool(B)}; to_values(A, B) when is_integer(A); is_integer(B) -> {z_convert:to_integer(A), z_convert:to_integer(B)}; to_values(A, B) when is_float(A); is_float(B) -> {z_convert:to_float(A), z_convert:to_float(B)}; to_values(A, B) when is_binary(A), is_binary(B) -> {A,B}; to_values(A, B) when is_tuple(A), is_tuple(B) -> {A, B}; to_values(A, B) -> {z_convert:to_list(A), z_convert:to_list(B)}. %% @doc Convert the two parameters to compatible numerical values to_numbers(undefined, B) -> {undefined, B}; to_numbers(A, undefined) -> {A, undefined}; to_numbers(A, B) when is_number(A), is_number(B) -> {A,B}; to_numbers(A, B) when is_integer(A); is_integer(B) -> {z_convert:to_integer(A), z_convert:to_integer(B)}; to_numbers(A, B) when is_float(A); is_float(B) -> {z_convert:to_float(A), z_convert:to_float(B)}; to_numbers(A, B) -> {z_convert:to_integer(A), z_convert:to_integer(B)}.
src/erlydtl/erlydtl_operators.erl
0.504639
0.465205
erlydtl_operators.erl
starcoder
%% %% %CopyrightBegin% %% %% Copyright Ericsson AB 2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% %CopyrightEnd% %% -module(prim_buffer). -export([on_load/0]). %% This is a mutable binary buffer that helps break out buffering logic from %% NIFs/drivers, which is often the only thing that prevents the C code from %% being reduced to bare system call wrappers. %% %% All operations in this file are thread-unsafe and risk crashing the emulator %% if you're not careful. -export([new/0, size/1, wipe/1, read/2, read_iovec/2, write/2, skip/2]). -export([find_byte_index/2]). -export([try_lock/1, unlock/1]). -type prim_buffer() :: term(). %% Controls when to copy rather than extract sub-binaries from the buffer, %% reducing the risk of small reads keeping a large binary alive. -define(COPYING_READ_LIMIT, 512). %% Reads that fit into heap binaries are always copied since the cost of %% peeking binaries that short is largely equivalent to copying. -define(ERL_ONHEAP_BIN_LIMIT, 64). on_load() -> case erlang:load_nif(atom_to_list(?MODULE), 0) of ok -> ok end. -spec new() -> prim_buffer(). new() -> erlang:nif_error(undef). -spec size(Buffer :: prim_buffer()) -> non_neg_integer(). size(_Buffer) -> erlang:nif_error(undef). %% Reads data as a binary from the front of the buffer. This will almost always %% result in copying so it should be avoided unless you absolutely must have a %% binary. -spec read(Buffer :: prim_buffer(), Size :: non_neg_integer()) -> binary(). read(Buffer, Size) when Size =< ?ERL_ONHEAP_BIN_LIMIT -> copying_read(Buffer, Size); read(Buffer, Size) when Size > ?ERL_ONHEAP_BIN_LIMIT -> iolist_to_binary(read_iovec(Buffer, Size)). %% Reads data as an erlang:iovec() binary from the front of the buffer, %% avoiding copying if reasonable. -spec read_iovec(Buffer, Size) -> IOVec when Buffer :: prim_buffer(), Size :: non_neg_integer(), IOVec :: erlang:iovec(). read_iovec(Buffer, Size) when Size =< ?ERL_ONHEAP_BIN_LIMIT -> [copying_read(Buffer, Size)]; read_iovec(Buffer, Size) when Size > ?ERL_ONHEAP_BIN_LIMIT -> Head = peek_head(Buffer), HeadSize = byte_size(Head), if (HeadSize - Size) > ?COPYING_READ_LIMIT, Size =< ?COPYING_READ_LIMIT -> [copying_read(Buffer, Size)]; HeadSize > Size -> skip(Buffer, Size), {First, _Rest} = split_binary(Head, Size), [First]; HeadSize < Size -> skip(Buffer, HeadSize), [Head | read_iovec(Buffer, Size - HeadSize)]; HeadSize =:= Size -> skip(Buffer, Size), [Head] end. %% Writes an erlang:iovec() to the back of the buffer. -spec write(Buffer :: prim_buffer(), IOVec :: erlang:iovec()) -> ok. write(_Buffer, _IOVec) -> erlang:nif_error(undef). %% Removes data from the front of the buffer without reading it. -spec skip(Buffer :: prim_buffer(), Size :: non_neg_integer()) -> ok. skip(_Buffer, _Size) -> erlang:nif_error(undef). -spec wipe(Buffer :: prim_buffer()) -> ok. wipe(Buffer) -> skip(Buffer, prim_buffer:size(Buffer)). %% Finds the start-index of the first occurence of Needle, for implementing %% read_line and similar. -spec find_byte_index(Buffer, Needle) -> Result when Buffer :: prim_buffer(), Needle :: non_neg_integer(), Result :: {ok, non_neg_integer()} | not_found. find_byte_index(_Buffer, _Needle) -> erlang:nif_error(undef). %% Attempts to take a unique lock on the buffer. Failure handling is left to %% the user. -spec try_lock(Buffer :: prim_buffer()) -> acquired | busy. try_lock(_Buffer) -> erlang:nif_error(undef). -spec unlock(Buffer :: prim_buffer()) -> ok. unlock(_Buffer) -> erlang:nif_error(undef). %% Unexported helper functions: %% Reads data from the front of the buffer, returning a copy of the data to %% avoid holding references. -spec copying_read(Buffer :: prim_buffer(), Size :: non_neg_integer()) -> binary(). copying_read(_Buffer, _Size) -> erlang:nif_error(undef). %% Returns the binary at the front of the buffer without modifying the buffer. -spec peek_head(Buffer :: prim_buffer()) -> binary(). peek_head(_Buffer) -> erlang:nif_error(undef).
erts/preloaded/src/prim_buffer.erl
0.523908
0.407068
prim_buffer.erl
starcoder
%% @doc %% Encoder/decoder for PostGIS binary data representation. %% %% <ul> %% <li>[https://en.wikipedia.org/wiki/Well-known_text]</li> %% <li>[http://postgis.net/docs/manual-2.4/using_postgis_dbmanagement.html#EWKB_EWKT]</li> %% </ul> -module(ewkb). -export([decode_geometry/1, encode_geometry/1]). -export_type([point_type/0, point/1, multi_point/1, line_string/1, multi_line_string/1, basic_string/1, curve/1, multi_curve/1, polygon/1, multi_polygon/1, triangle/1, curve_polygon/1, polyhedral_surface/1, surface/1, multi_surface/1, tin/1, geometry/1, geometry/0, geometry_collection/1, geom_type/0]). -include("epgsql_geometry.hrl"). -type point_type() :: '2d' | '3d' | '2dm' | '3dm'. -type point(PointType) :: #point{ point_type :: PointType }. -type multi_point(PointType) :: #multi_point{ point_type :: PointType }. -type line_string(PointType) :: #line_string{ point_type :: PointType }. -type multi_line_string(PointType) :: #multi_line_string{ point_type :: PointType }. -type basic_string(PointType) :: #circular_string{ point_type :: PointType } | #line_string{ point_type :: PointType }. -type curve(PointType) :: #circular_string{ point_type :: PointType } | #line_string{ point_type :: PointType } | #compound_curve{ point_type :: PointType }. -type multi_curve(PointType) :: #multi_curve{ point_type :: PointType }. -type polygon(PointType) :: #polygon{ point_type :: PointType }. -type multi_polygon(PointType) :: #multi_polygon{ point_type :: PointType }. -type triangle(PointType) :: #triangle{ point_type :: PointType }. -type curve_polygon(PointType) :: #curve_polygon{ point_type :: PointType }. -type polyhedral_surface(PointType) :: #polyhedral_surface{ point_type :: PointType }. -type surface(PointType) :: polygon(PointType) | curve_polygon(PointType) | polyhedral_surface(PointType). -type multi_surface(PointType) :: #multi_surface{ point_type :: PointType }. -type tin(PointType) :: #tin{ point_type :: PointType }. -type geometry(PointType) :: point(PointType) | line_string(PointType) | triangle(PointType) | tin(PointType) | curve(PointType) | surface(PointType) | multi_point(PointType) | multi_line_string(PointType) | multi_polygon(PointType) | multi_curve(PointType) | multi_surface(PointType) | geometry_collection(PointType). -type geometry() :: geometry(point_type()). -type geometry_collection(PointType) :: [geometry(PointType)]. -type geom_type() :: geometry | point % | line_string% | polygon% | multi_point% | multi_line_string% | multi_polygon% | geometry_collection% | circular_string% | compound_curve% | curve_polygon% | multi_curve% | multi_surface% | curve% | surface% | polyhedral_surface% | tin% | triangle.% -spec decode_geometry(binary()) -> geometry(). decode_geometry(Binary) -> {Geometry, <<>>} = decode_geometry_data(Binary), Geometry. -spec encode_geometry(geometry()) -> binary(). encode_geometry(Geometry) -> Type = encode_type(Geometry), PointType = encode_point_type(Geometry), Data = encode_geometry_data(Geometry), <<1, Type/binary, PointType/binary, Data/binary>>. encode_geometry_data(#point{ point_type = '2d', x = X, y = Y }) -> Xbin = encode_float64(X), Ybin = encode_float64(Y), <<Xbin/binary, Ybin/binary>>; encode_geometry_data(#point{ point_type = '2dm', x = X, y = Y, m = M }) -> Xbin = encode_float64(X), Ybin = encode_float64(Y), Mbin = encode_float64(M), <<Xbin/binary, Ybin/binary, Mbin/binary>>; encode_geometry_data(#point{ point_type = '3d', x = X, y = Y, z = Z }) -> Xbin = encode_float64(X), Ybin = encode_float64(Y), Zbin = encode_float64(Z), <<Xbin/binary, Ybin/binary, Zbin/binary>>; encode_geometry_data(#point{ point_type = '3dm', x = X, y = Y, z = Z, m = M }) -> Xbin = encode_float64(X), Ybin = encode_float64(Y), Zbin = encode_float64(Z), Mbin = encode_float64(M), <<Xbin/binary, Ybin/binary, Zbin/binary, Mbin/binary>>; encode_geometry_data({SimpleCollection, _, Data}) when SimpleCollection == line_string; SimpleCollection == circular_string; SimpleCollection == polygon; SimpleCollection == triangle -> encode_collection(Data); encode_geometry_data({TypedCollection, _, Data}) when TypedCollection == multi_point; TypedCollection == multi_line_string; TypedCollection == multi_curve; TypedCollection == multi_polygon; TypedCollection == multi_surface; TypedCollection == compound_curve; TypedCollection == curve_polygon; TypedCollection == geometry_collection; TypedCollection == polyhedral_surface; TypedCollection == tin -> encode_typed_collection(Data). encode_collection(Collection) when is_list(Collection) -> Length = length(Collection), LengthBin = encode_int32(Length), CollectionBin = lists:foldl( fun(Element, Acc) -> ElementBin = encode_geometry_data(Element), <<Acc/binary, ElementBin/binary>> end, <<>>, Collection), <<LengthBin/binary, CollectionBin/binary>>. encode_typed_collection(Collection) when is_list(Collection) -> Length = length(Collection), LengthBin = encode_int32(Length), CollectionBin = lists:foldl( fun(Element, Acc) -> ElementBin = encode_geometry(Element), <<Acc/binary, ElementBin/binary>> end, <<>>, Collection), <<LengthBin/binary, CollectionBin/binary>>. encode_int32(Int) when is_integer(Int) -> <<Int:1/little-integer-unit:32>>. encode_float64(Int) when is_number(Int) -> <<Int:1/little-float-unit:64>>. -spec decode_geometry_data(binary()) -> {geometry(), binary()}. decode_geometry_data(Binary) -> <<1, TypeCode:2/binary, SubtypeCode:2/binary, Data/binary>> = Binary, Type = decode_type(TypeCode), Subtype = decode_point_type(SubtypeCode), decode_geometry_data(Type, Subtype, Data). -spec decode_geometry_data(geom_type(), point_type(), binary()) -> {geometry(), binary()}. decode_geometry_data(curve, _, _) -> error({curve, not_supported}); decode_geometry_data(surface, _, _) -> error({surface, not_supported}); decode_geometry_data(geometry, _, _) -> error({geometry, not_supported}); decode_geometry_data(point, PointType, Data) -> decode_point(PointType, Data); decode_geometry_data(LineType, PointType, Data) when LineType == line_string; LineType == circular_string -> {Points, Rest} = decode_collection(point, PointType, Data), {{LineType, PointType, Points}, Rest}; decode_geometry_data(polygon, PointType, Data) -> {Lines, Rest} = decode_collection(line_string, PointType, Data), {#polygon{ point_type = PointType, rings = Lines }, Rest}; decode_geometry_data(triangle, PointType, Data) -> {#polygon{ rings = Rings }, Rest} = decode_geometry_data(polygon, PointType, Data), {#triangle{ point_type = PointType, rings = Rings }, Rest}; decode_geometry_data(Collection, PointType, Data) when Collection == multi_point; Collection == multi_line_string; Collection == multi_curve; Collection == multi_polygon; Collection == multi_surface; Collection == compound_curve; Collection == curve_polygon; Collection == geometry_collection; Collection == polyhedral_surface; Collection == tin -> {Lines, Rest} = decode_typed_collection(Data), {{Collection, PointType, Lines}, Rest}. -spec decode_collection(geom_type(), point_type(), binary()) -> {[geometry()], binary()}. decode_collection(Type, PointType, Data) -> {Length, CountRest} = decode_int32(Data), lists:foldl( fun(_, {Geoms, Rest}) -> {Geom, R} = decode_geometry_data(Type, PointType, Rest), {Geoms ++ [Geom], R} end, {[], CountRest}, lists:seq(1, Length)). -spec decode_typed_collection(binary()) -> {[geometry()], binary()}. decode_typed_collection(Data) -> {Length, CountRest} = decode_int32(Data), lists:foldl( fun(_, {Geoms, Rest}) -> {Geom, R} = decode_geometry_data(Rest), {Geoms ++ [Geom], R} end, {[], CountRest}, lists:seq(1, Length)). -spec decode_int32(binary()) -> {integer(), binary()}. decode_int32(<<Hex:4/binary, Rest/binary>>) -> <<Int:1/little-integer-unit:32>> = Hex, {Int, Rest}. -spec decode_float64(binary()) -> {float(), binary()}. decode_float64(<<Hex:8/binary, Rest/binary>>) -> <<Float:1/little-float-unit:64>> = Hex, {Float, Rest}. decode_point(PointType, Data) -> {Values, Rest} = lists:foldl( fun(_, {Values, Rest}) -> {Value, R} = decode_float64(Rest), {Values ++ [Value], R} end, {[], Data}, lists:seq(1, point_size(PointType))), Point = case {PointType, Values} of {'2d', [X, Y]} -> #point{ point_type = PointType, x = X, y = Y }; {'2dm', [X, Y, M]} -> #point{ point_type = PointType, x = X, y = Y, m = M }; {'3d', [X, Y, Z]} -> #point{ point_type = PointType, x = X, y = Y, z = Z }; {'3dm', [X, Y, Z, M]} -> #point{ point_type = PointType, x = X, y = Y, z = Z, m = M } end, {Point, Rest}. -spec point_size(point_type()) -> 2..4. point_size('2d') -> 2; point_size('2dm') -> 3; point_size('3d') -> 3; point_size('3dm') -> 4. -spec decode_type(binary()) -> geom_type(). decode_type(<<0, 0>>) -> geometry; decode_type(<<1, 0>>) -> point; decode_type(<<2, 0>>) -> line_string; decode_type(<<3, 0>>) -> polygon; decode_type(<<4, 0>>) -> multi_point; decode_type(<<5, 0>>) -> multi_line_string; decode_type(<<6, 0>>) -> multi_polygon; decode_type(<<7, 0>>) -> geometry_collection; decode_type(<<8, 0>>) -> circular_string; decode_type(<<9, 0>>) -> compound_curve; decode_type(<<10, 0>>) -> curve_polygon; decode_type(<<11, 0>>) -> multi_curve; decode_type(<<12, 0>>) -> multi_surface; decode_type(<<13, 0>>) -> curve; decode_type(<<14, 0>>) -> surface; decode_type(<<15, 0>>) -> polyhedral_surface; decode_type(<<16, 0>>) -> tin; decode_type(<<17, 0>>) -> triangle. -spec encode_type(geometry() | geom_type()) -> binary(). encode_type(Geometry) when is_tuple(Geometry) -> encode_type(element(1, Geometry)); encode_type(geometry) -> <<00, 0>>; encode_type(point) -> <<01, 0>>; encode_type(line_string) -> <<02, 0>>; encode_type(polygon) -> <<03, 0>>; encode_type(multi_point) -> <<04, 0>>; encode_type(multi_line_string) -> <<05, 0>>; encode_type(multi_polygon) -> <<06, 0>>; encode_type(geometry_collection) -> <<07, 0>>; encode_type(circular_string) -> <<08, 0>>; encode_type(compound_curve) -> <<09, 0>>; encode_type(curve_polygon) -> <<10, 0>>; encode_type(multi_curve) -> <<11, 0>>; encode_type(multi_surface) -> <<12, 0>>; encode_type(curve) -> <<13, 0>>; encode_type(surface) -> <<14, 0>>; encode_type(polyhedral_surface) -> <<15, 0>>; encode_type(tin) -> <<16, 0>>; encode_type(triangle) -> <<17, 0>>. -spec decode_point_type(binary()) -> point_type(). decode_point_type(<<0, 0>>) -> '2d'; decode_point_type(<<0, 64>>) -> '2dm'; decode_point_type(<<0, 128>>) -> '3d'; decode_point_type(<<0, 192>>) -> '3dm'. -spec encode_point_type(geometry() | point_type()) -> binary(). encode_point_type(Geometry) when is_tuple(Geometry) -> encode_point_type(element(2, Geometry)); encode_point_type('2d') -> <<0, 0>>; encode_point_type('2dm') -> <<0, 64>>; encode_point_type('3d') -> <<0, 128>>; encode_point_type('3dm') -> <<0, 192>>.
src/ewkb.erl
0.660501
0.655915
ewkb.erl
starcoder
%%-------------------------------------------------------------------- %% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- %% @doc This module implements a condition variable %% %% Warning: don't wait for condition variables that are never going to %% be set with a timeout: it will leave a proxy process in the %% system. Only use this module for a strictly known set of condition %% variables that are expected to be set eventually. -module(mria_condition_var). %% API: -export([init/0, stop/0, set/2, unset/1, is_set/1, read/1, read/2, peek/1, wait_vars/2]). %% Internal exports: -export([waker_entrypoint/2]). %%================================================================================ %% Types %%================================================================================ -type key() :: term(). -type value() :: term(). -define(status_tab, mria_rlog_status_tab). %%================================================================================ %% API funcions %%================================================================================ -spec init() -> ok. init() -> ets:new(?status_tab, [ set , named_table , public , {read_concurrency, true} , {write_concurrency, false} ]), ok. -spec stop() -> ok. stop() -> Wakers = lists:flatten(ets:match(?status_tab, {'_', {unset, '$1'}})), [exit(I, shutdown) || I <- Wakers], ets:delete(?status_tab), ok. %% @doc Set the value of a condition variable. %% %% Warning: only one process can set a condition variable at a %% time. Race conditions between different processes setting the %% condition variable are not handled. -spec set(key(), value()) -> ok. set(Key, Value) -> case ets:lookup(?status_tab, Key) of [{_, {set, _OldValue}}] -> ets:insert(?status_tab, {Key, {set, Value}}); [{_, {unset, Pid}}] -> %% Notify waker and wait for it update the value: MRef = monitor(process, Pid), Pid ! {set, Value}, receive {'DOWN', MRef, _, _, _} -> ok end; [] -> %% The value is not set, and nobody waits for it: case ets:insert_new(?status_tab, {Key, {set, Value}}) of true -> ok; false -> %% Race condition: someone just installed a waker %% process. Retry: set(Key, Value) end end, ok. %% @doc Delete the value of the condition variable: -spec unset(key()) -> ok. unset(Key) -> case ets:lookup(?status_tab, Key) of [{_, {set, _OldValue}}] -> ets:delete(?status_tab, Key); %% If there is a waker process, we just leave it alone: _ -> ok end, ok. %% @doc Check if the variable is set -spec is_set(key()) -> boolean(). is_set(Key) -> peek(Key) =/= undefined. %% @doc Read the value of the variable if it's set, but don't wait for %% it -spec peek(key()) -> {ok, value()} | undefined. peek(Key) -> case ets:lookup(?status_tab, Key) of [{_, {set, Val}}] -> {ok, Val}; _ -> undefined end. %% @doc Wait for the variable to be set and return the value -spec read(key()) -> value(). read(Key) -> case read_or_wait(Key) of {set, Value} -> Value; {wait, MRef} -> receive %% Rather unconventionally, the actual information is %% transmitted in a DOWN message from a temporary %% "waker" process. See `waker_entrypoint': {'DOWN', MRef, _, _, {cvar_set, Value}} -> Value; {'DOWN', MRef, _, _, noproc} -> %% Race condition: the variable was set between %% `read_or_wait' and `monitor' calls. read(Key) end end. %% @doc Wait for the variable to be set and return the value if it was %% set within the timeout -spec read(key(), timeout()) -> {ok, value()} | timeout. read(Key, infinity) -> {ok, read(Key)}; read(Key, Timeout) -> case read_or_wait(Key) of {set, Value} -> {ok, Value}; {wait, MRef} -> receive %% Rather unconventionally, the actual information is %% transmitted in a DOWN message from a temporary %% "waker" process. See `waker_loop': {'DOWN', MRef, _, _, {cvar_set, Value}} -> {ok, Value}; {'DOWN', MRef, _, _, noproc} -> %% Race condition: the variable was set between %% `read_or_wait' and `monitor' calls: read(Key, 0) after Timeout -> demonitor(MRef, [flush]), timeout end end. %% @doc Wait for multiple variables -spec wait_vars([key()], timeout()) -> ok | {timeout, [key()]}. wait_vars(Keys, infinity) -> _ = [read(I) || I <- Keys], ok; wait_vars(Keys, Timeout) -> L = [{I, MRef} || I <- Keys, {wait, MRef} <- [read_or_wait(I)]], {TimedOutKeys, MRefs} = lists:unzip(do_wait_vars(L, Timeout)), _ = [demonitor(I, [flush]) || I <- MRefs], case TimedOutKeys of [] -> ok; _ -> {timeout, TimedOutKeys} end. %%================================================================================ %% Internal functions %%================================================================================ -spec read_or_wait(key()) -> {set, value()} | {wait, reference()}. read_or_wait(Key) -> case ets:lookup(?status_tab, Key) of [] -> {Pid, MRef} = spawn_monitor(?MODULE, waker_entrypoint, [Key, self()]), %% Wait until the newly created process either establishes itself %% as a waker for the key, or exits: receive {Pid, proceed} -> {wait, MRef}; {'DOWN', MRef, _, _, Reason} -> cvar_retry = Reason, %% assert read_or_wait(Key) end; [{_, {set, Val}}] -> {set, Val}; [{_, {unset, Pid}}] -> {wait, monitor(process, Pid)} end. -spec do_wait_vars([{key(), reference()}], integer()) -> [key()]. do_wait_vars([], _) -> []; do_wait_vars([{Key, MRef}|Rest], TimeLeft) -> T0 = erlang:monotonic_time(millisecond), receive {'DOWN', MRef, _, _, Reason} -> %% assert: case Reason of {cvar_set, _} -> ok; noproc -> ok end, T1 = erlang:monotonic_time(millisecond), do_wait_vars(Rest, TimeLeft - (T1 - T0)) after TimeLeft -> [{Key, MRef}|do_wait_vars(Rest, 0)] end. %%================================================================================ %% Waker process implementation %%================================================================================ -spec waker_entrypoint(key(), pid()) -> no_return(). waker_entrypoint(Key, Parent) -> case ets_insert_new({Key, {unset, self()}}) of false -> %% Race condition: someone installed the waker before us, %% or the variable has been set, so exit and signal the %% parent to retry: exit(cvar_retry); true -> %% We are the official waker for the variable now. Wait %% for it to be set: Parent ! {self(), proceed}, receive {set, Value} -> ets_insert({Key, {set, Value}}), %% This will broadcast the variable value in the %% DOWN message to the processes that monitor us: exit({cvar_set, Value}) end end. ets_insert(Value) -> try ets:insert(?status_tab, Value) catch error:badarg -> exit(cvar_stopped) end. ets_insert_new(Value) -> try ets:insert_new(?status_tab, Value) catch error:badarg -> exit(cvar_stopped) end.
src/mria_condition_var.erl
0.560854
0.406891
mria_condition_var.erl
starcoder
% Copyright (c) 2014-2015, <NAME> <<EMAIL>> % % Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, % provided that the above copyright notice and this permission notice appear in all copies. % % THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED % WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL % DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, % NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. % % @author <NAME> <<EMAIL>> % @copyright {@years} <NAME> % @version {@version} % @doc The `noesis_proplists' module includes functions that work on property lists. -module(noesis_proplists). -compile({no_auto_import, [get_keys/1]}). % Types -type proplist() :: [{term(), term()}]. -type proplist(Key) :: [{Key, term()}]. -type proplist(Key, Value) :: [{Key, Value}]. -export_type([ proplist/0, proplist/1, proplist/2 ]). % API -export([ get_keys/1, get_value/2, get_value/3, delete_keys/2, keypos/2, extract/2, extract/3, extract/4, partial_extract/2, partial_extract/3, merge/2, merge/3 ]). % API % @doc Returns a list of all keys in the property list, not including duplicates. -spec get_keys(proplist()) -> [term()]. get_keys(List) -> Keys = [Key || {Key, _Value} <- List], lists:usort(Keys). % @doc Delegates to {@link get_value/3} and sets the default return value to `undefined'. -spec get_value(term(), proplist()) -> term(). get_value(Key, List) -> get_value(Key, List, undefined). % @doc Returns the value of a key/value property in `List'. Returns the default value if `Key' is not found. -spec get_value(term(), proplist(), term()) -> term(). get_value(Key, List, Default) -> case lists:keyfind(Key, 1, List) of false -> Default; {Key, Value} -> Value end. % @doc Removes multiple keys from a property list and returns a new one. Duplicate keys are also removed. -spec delete_keys(Keys :: [term()], proplist()) -> proplist(). delete_keys([], List) -> List; delete_keys([Key|Rest]=Keys, List) -> List2 = lists:keydelete(Key, 1, List), case lists:keymember(Key, 1, List2) of true -> delete_keys(Keys, List2); false -> delete_keys(Rest, List2) end. % @doc Returns the position of a key in a property list. If `Key' does not exist `undefined' will be returned. -spec keypos(term(), proplist()) -> pos_integer() | undefined. keypos(Key, List) -> keypos_acc(Key, List, 1). % @doc Delegates to {@link extract/4}. `NullValue' is set to `undefined' and `Defaults' is set to an empty list. -spec extract([term()], proplist()) -> proplist(). extract(Keys, List) -> extract(Keys, List, undefined, []). % @doc Delegates to {@link extract/4}. Allows you to set `NullValue' and sets `Defaults' to an empty list. -spec extract([term()], proplist(), term()) -> proplist(). extract(Keys, List, NullValue) -> extract(Keys, List, NullValue, []). % @doc Extracts key/value pairs from a property list and returns a new property list containing only extracted pairs.<br /> % You can provide a set of default values that will be used as fallback if the key is not found in `List'. If the key is not % found in `Defaults' either, the `NullValue' will be used for the value part.<br /> % Duplicate entries in `Keys' will be ignored. -spec extract([term()], proplist(), term(), proplist()) -> proplist(). extract(Keys, List, NullValue, Defaults) -> lists:map(fun(Key) -> case get_value(Key, List) of undefined -> Default = get_value(Key, Defaults, NullValue), {Key, Default}; Value -> {Key, Value} end end, lists:usort(Keys)). % @doc Delegates to {@link partial_extract/3}. `NullValue' is set to `undefined'. -spec partial_extract([term()], proplist()) -> proplist(). partial_extract(Keys, List) -> partial_extract(Keys, List, undefined). % @doc Extracts key/value pairs from a property list and returns a new property list containing only extracted pairs.<br /> % Pairs not found in `List' (and all those matching `NullValue') will not be included in the property list that is returned. -spec partial_extract([term()], proplist(), term()) -> proplist(). partial_extract(Keys, List, NullValue) -> List2 = extract(Keys, List, NullValue), [{Key, Value} || {Key, Value} <- List2, Value =/= NullValue]. % @doc Delegates to {@link merge/3}. `Fun' is set to function that always uses the value from `ListB'. -spec merge(proplist(), proplist()) -> proplist(). merge(ListA, ListB) -> merge(fun(_Key, _ValueA, ValueB) -> ValueB end, ListA, ListB). % @doc Merges two property lists, `ListA' and `ListB', to create a new list. All the Key/Value pairs from both lists are % included in the new property list. If a key occurs in both lists then `Fun' is called with the key and both values % to return a new value. -spec merge(fun((Key, ValueA, ValueB) -> Value), proplist(Key, ValueA), proplist(Key, ValueB)) -> proplist(Key, Value). merge(Fun, ListA, ListB) -> DictA = orddict:from_list(ListA), DictB = orddict:from_list(ListB), MergedDict = orddict:merge(Fun, DictA, DictB), orddict:to_list(MergedDict). % Private -spec keypos_acc(term(), proplist(), pos_integer()) -> pos_integer() | undefined. keypos_acc(_Key, [], _Acc) -> undefined; keypos_acc(Key, [{Key, _Value}|_Rest], Acc) -> Acc; keypos_acc(Key, [_Pair|Rest], Acc) -> keypos_acc(Key, Rest, Acc + 1).
src/noesis_proplists.erl
0.735642
0.429908
noesis_proplists.erl
starcoder
-module(directed_weighted_graph_SUITE). -compile(export_all). -compile(nowarn_export_all). -include_lib("common_test/include/ct.hrl"). -include_lib("eunit/include/eunit.hrl"). -include("data_structures.hrl"). all() -> [ add_edge_test, delete_edge_test, delete_vertex_test, delete_not_existing_vertex, multiple_add_edge_test, multiple_delete_edge_test, multiple_graphs_test, from_list_test, delete_test, incident_edges_test ]. add_edge_test(_Config) -> Graph = directed_weighted_graph:add_edge({{1, 2}, 1}, directed_weighted_graph:new()), ?assertEqual([#weighted_edge{route = {1, 2}, weight = 1}], directed_weighted_graph:to_list(Graph)). delete_edge_test(_Config) -> Graph = directed_weighted_graph:add_edge({{1, 2}, 1}, directed_weighted_graph:new()), directed_weighted_graph:delete_edge({1, 2}, Graph), ?assertEqual([], directed_weighted_graph:to_list(Graph)). delete_vertex_test(_Config) -> ToDeleteVertex = 1, List = [{{1, 2}, 1}, {{2, 1}, 1}, {{2, 3}, 1}, {{3, 2}, 1}, {{1, 3}, 1}], FilteredList = lists:filter( fun({{From, To}, _Weight}) -> From /= ToDeleteVertex andalso To /= ToDeleteVertex end, List), WrappedList = lists:map( fun({Route = {_From, _To}, Weight}) -> #weighted_edge{route = Route, weight = Weight} end, FilteredList), Graph = lists:foldl( fun(Elem, Acc) -> directed_weighted_graph:add_edge(Elem, Acc) end, directed_weighted_graph:new(), List), Graph = directed_weighted_graph:delete_vertex(ToDeleteVertex, Graph), ?assertEqual(lists:sort(WrappedList), directed_weighted_graph:to_list(Graph)). delete_not_existing_vertex(_Config) -> ToDeleteVertex = 4, List = [{{1, 2}, 1}, {{2, 1}, 1}, {{2, 3}, 1}, {{3, 2}, 1}, {{1, 3}, 1}], FilteredList = lists:filter( fun({{From, To}, _Weight}) -> From /= ToDeleteVertex andalso To /= ToDeleteVertex end, List), WrappedList = lists:map( fun({Route = {_From, _To}, Weight}) -> #weighted_edge{route = Route, weight = Weight} end, FilteredList), Graph = lists:foldl( fun(Elem, Acc) -> directed_weighted_graph:add_edge(Elem, Acc) end, directed_weighted_graph:new(), List), Graph = directed_weighted_graph:delete_vertex(ToDeleteVertex, Graph), ?assertEqual(lists:sort(WrappedList), directed_weighted_graph:to_list(Graph)). multiple_add_edge_test(_Config) -> List = [{{1, 2}, 1}, {{2, 1}, 1}, {{2, 3}, 1}, {{3, 2}, 1}, {{1, 3}, 1}], Graph = lists:foldl( fun(Elem, Acc) -> directed_weighted_graph:add_edge(Elem, Acc) end, directed_weighted_graph:new(), List), WrappedList = lists:map( fun({Route = {_From, _To}, Weight}) -> #weighted_edge{route = Route, weight = Weight} end, List), ?assertEqual(lists:sort(WrappedList), directed_weighted_graph:to_list(Graph)). multiple_delete_edge_test(_Config) -> List = [{{1, 2}, 1}, {{2, 1}, 1}, {{2, 3}, 1}, {{3, 2}, 1}, {{1, 3}, 1}], Graph = lists:foldl( fun(Elem, Acc) -> directed_weighted_graph:add_edge(Elem, Acc) end, directed_weighted_graph:new(), List), WrappedList = lists:map( fun({Route = {_From, _To}, Weight}) -> #weighted_edge{route = Route, weight = Weight} end, List), ?assertEqual(lists:sort(WrappedList), directed_weighted_graph:to_list(Graph)), lists:foreach( fun({Route = {_From, _To}, _Weight}) -> directed_weighted_graph:delete_edge(Route, Graph) end, List), ?assertEqual([], directed_weighted_graph:to_list(Graph)). multiple_graphs_test(_Config) -> Graph0 = directed_weighted_graph:add_edge({{1, 2}, 1}, directed_weighted_graph:new()), Graph1 = directed_weighted_graph:add_edge({{3, 4}, 1}, directed_weighted_graph:new()), ?assertEqual([#weighted_edge{route = {1, 2}, weight = 1}], directed_weighted_graph:to_list(Graph0)), ?assertEqual([#weighted_edge{route = {3, 4}, weight = 1}], directed_weighted_graph:to_list(Graph1)). from_list_test(_Config) -> List = [{{1, 2}, 1}, {{2, 1}, 1}, {{2, 3}, 1}, {{3, 2}, 1}, {{1, 3}, 1}], WrappedList = lists:map( fun({Route = {_From, _To}, Weight}) -> #weighted_edge{route = Route, weight = Weight} end, List), Graph = directed_weighted_graph:from_list(List), ?assertEqual(lists:sort(WrappedList), directed_weighted_graph:to_list(Graph)). delete_test(_Config) -> List = [{{1, 2}, 1}, {{2, 1}, 1}, {{2, 3}, 1}, {{3, 2}, 1}, {{1, 3}, 1}], WrappedList = lists:map( fun({Route = {_From, _To}, Weight}) -> #weighted_edge{route = Route, weight = Weight} end, List), Graph = directed_weighted_graph:from_list(List), ?assertEqual(lists:sort(WrappedList), directed_weighted_graph:to_list(Graph)), directed_weighted_graph:delete(Graph), ?assertError(badarg, directed_weighted_graph:to_list(Graph)). incident_edges_test(_Config) -> Vertex = 1, List = [{{1, 2}, 1}, {{2, 1}, 1}, {{2, 3}, 1}, {{3, 2}, 1}, {{1, 3}, 1}], Graph = directed_weighted_graph:from_list(List), Incident = lists:filter( fun({{From, To}, _Weight}) -> From =:= Vertex orelse To =:= Vertex end, List), IncidentWrapped = lists:map( fun({Route = {_From, _To}, Weight}) -> #weighted_edge{route = Route, weight = Weight} end, Incident), IncidentEdges = directed_weighted_graph:incident_edges(Vertex, Graph), ?assertEqual(lists:sort(IncidentWrapped), IncidentEdges).
test/directed_weighted_graph_SUITE.erl
0.529507
0.544135
directed_weighted_graph_SUITE.erl
starcoder
-module(ktn_lists). -export([ delete_first/2, split_when/2, map/3, filter/3 ]). %% @doc Returns a copy of List deleting the first Element where Fun(Element) %% returns true, if there is such an element. %% @end -spec delete_first(fun((term()) -> boolean()), list()) -> list(). delete_first(Fun, List) -> delete_first(Fun, List, []). delete_first(Fun, [], Acc) when is_function(Fun, 1) -> lists:reverse(Acc); delete_first(Fun, [Head | Tail], Acc) -> case Fun(Head) of false -> delete_first(Fun, Tail, [Head | Acc]); true -> lists:concat([lists:reverse(Acc), Tail]) end. %% @doc Splits a list whenever an element satisfies the When predicate. %% Returns a list of lists where each list includes the matched element %% as its last one. %% E.g. %% split_when(fun (X) -> $. == X end, "a.b.c") = ["a.", "b.", "c"] %% @end -spec split_when(fun(), list()) -> list(). split_when(When, List) -> split_when(When, List, [[]]). split_when(When, [], [[] | Results]) -> split_when(When, [], Results); split_when(_When, [], Results) -> Reversed = lists:map(fun lists:reverse/1, Results), lists:reverse(Reversed); split_when(When, [Head | Tail], [Current0 | Rest]) -> Current = [Head | Current0], Result = case When(Head) of true -> [[], Current | Rest]; false -> [Current | Rest] end, split_when(When, Tail, Result). %% @doc Like lists:map/2 but allows specifying additional arguments. %% E.g. %% ktn_lists:map(fun (X, Y) -> X * Y end, [2], [1, 2, 3]) = [2, 4, 6] %% @end -spec map(fun(), list(), list()) -> list(). map(Fun, Args, [Head | Tail]) -> [apply(Fun, [Head | Args])| map(Fun, Args, Tail)]; map(Fun, _, []) when is_function(Fun) -> []. %% @doc Like lists:filter/2 but allows specifying additional arguments. %% E.g. %% `ktn_lists:filter(fun (X, Y) -> X * Y < 3 end, [2], [1, 2, 3]) = [2]' %% @end -spec filter(fun(), list(), list()) -> list(). filter(Pred, Args, List) when is_function(Pred) -> [Elem || Elem <- List, apply(Pred, [Elem | Args])].
src/ktn_lists.erl
0.519765
0.661294
ktn_lists.erl
starcoder
-module(pratt_parser). -export([eval/1, expression/2, expect/2]). -record(pratt_parser, {tokens}). -define(is_pratt_parser(Term), is_record(Term, pratt_parser)). -include("token.hrl"). %% A Pratt parser. Similar to a recursive decent parser but instead %% of coding a function for each production, the syntax is coded in a %% set of token objects. New operators and statements can be slipped %% in to the language with the proper precedence by adding new token %% objects to the lexer without altering the code for existing tokens. %% Pretty cool. %% %% Tokens are records with three fields: %% lbp: operator precedence. Higher numbers bind more tightly. %% nud: a fun, called as nud(Parser) when the token is the first %% token in an expression, including a recursive call to expresssion %% (i.e., subexpression). For example, this would be called for a %% unary operator, a literal, or for the "if" in the construct "if %% <cond> then <expr>". It is the token's responsibility to call %% pratt_parser:expression and/or pratt_parser:expect to handle the %% remainder of the expression, if any. %% Returns {NewParser, Value}. %% led: fun, called as (Parser, Left) when the token is preceeded by a %% subexpression, Left. The token may be postfix or infix. It is the %% token's responsibility to call pratt_parser:expression and/or %% pratt_parser:expect to handle the remainder of the expression, if %% any, and combine it with Left. %% Returns {NewParser, Value}. %% %% Only lbp is mandatory. nud and led will be called only when necessary, %% if ever. %% nud and led can call pratt_parser:expression(Parser, Rbp) to %% recursively parse the right expression. Rp should be the token's %% for left-associativity, bp-1 for right. %% %% pratt_parser:eval(Tokens) will return the result of the parse. %% %% Syntax errors aren't handled at the moment and will cause ridiculous %% exceptions to be thrown. %% http://javascript.crockford.com/tdop/tdop.html %% http://effbot.org/zone/simple-top-down-parsing.htm %% http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/ %% Runs the Tokens through the parser and returns Value. %% eval(Tokens) when is_list(Tokens) -> PrattParser = new(Tokens), {_New, Value} = expression(PrattParser, 0), Value. new(Tokens) when is_list(Tokens) -> #pratt_parser{tokens = Tokens}. lookahead_token(_This = #pratt_parser{tokens = [Token | _Rest]}) -> Token; lookahead_token(_This = #pratt_parser{tokens = []}) -> #token{type = end_token}. token(This = #pratt_parser{tokens = [Token | Rest]}) -> {This#pratt_parser{tokens = Rest}, Token}. %% Returns {NewThis, Value}. %% expression(This, Rbp) -> {This2, Token} = token(This), {This3, Left} = (Token#token.nud)(This2), more_expression(This3, Left, Rbp). %% Returns {NewThis, Value}. %% more_expression(This, Left, Rbp) -> LookaheadToken = lookahead_token(This), case Rbp < LookaheadToken#token.lbp of true -> {This2, Token} = token(This), {This3, Left2} = (Token#token.led)(This2, Left), more_expression(This3, Left2, Rbp); false -> {This, Left} end. %% If the next token is the ExpectedType the consume it otherwise %% throw an exception. %% expect(This, ExpectedType) -> Type = (lookahead_token(This))#token.type, case Type == ExpectedType of true -> {NewThis, _Token} = token(This), NewThis; false -> throw(spud:format("Expected ~s, got ~s", [ExpectedType, Type])) end.
src/pratt_parser.erl
0.546496
0.569075
pratt_parser.erl
starcoder
%% Minimalistic module for performing time-ordered actions in load tests without an event loop -module(timetable). -export([new/3, do/3, do/4, merge/1]). -type time_diff() :: non_neg_integer(). %% milliseconds -type timetable(Event) :: [{time_diff(), Event}]. -type executor(Event) :: fun((escalus:client(), Event) -> any()). -type executor(Event, State) :: fun((escalus:client(), Event, State) -> State). -export_type([timetable/1]). %% @doc Creates a new timetable with Event scheduled every Interval (ms) %% Start time is a random Offset where 0 =< Offset =< Interval -spec new(Event, non_neg_integer(), time_diff()) -> timetable(Event). new(Event, Count, Interval) -> Offset = rand:uniform(Interval + 1) - 1, [{Interval * I + Offset, Event} || I <- lists:seq(0, Count - 1)]. %% @doc Executes F for each {Time, Event} from TimeTable as soon as Time (ms) passed %% AND previous executions finished. -spec do(escalus:client(), executor(Event), timetable(Event)) -> ok. do(Client, F, TimeTable) -> do(Client, fun(_, Event, no_state) -> F(Client, Event), no_state end, TimeTable, no_state), ok. %% @doc Like do/3, but passes State through the executions of F. -spec do(escalus:client(), executor(Event, State), timetable(Event), State) -> State. do(Client, F, TimeTable, State) -> step(Client, F, TimeTable, State, current_time()). -spec step(escalus:client(), executor(Event, State), timetable(Event), State, integer()) -> State. step(Client, F, [{Time, Event} | TimeTable], State, StartTime) -> Elapsed = current_time() - StartTime, TimeDiff = max(0, Time - Elapsed), escalus_connection:wait(Client, TimeDiff), NewState = F(Client, Event, State), step(Client, F, TimeTable, NewState, StartTime); step(_Client, _F, [], State, _StartTime) -> State. %% @doc Merges Tables preserving the time order -spec merge([timetable(Event)]) -> timetable(Event). merge(Tables) -> lists:merge(Tables). current_time() -> erlang:system_time(millisecond).
src/helpers/timetable.erl
0.542136
0.565869
timetable.erl
starcoder
% Licensed under the Apache License, Version 2.0 (the "License"); you may not % use this file except in compliance with the License. You may obtain a copy of % the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, WITHOUT % WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the % License for the specific language governing permissions and limitations under % the License. %% @doc Saves a Key/Value pair to a ini file. The Key consists of a Section %% and Option combination. If that combination is found in the ini file %% the new value replaces the old value. If only the Section is found the %% Option and value combination is appended to the Section. If the Section %% does not yet exist in the ini file, it is added and the Option/Value %% pair is appended. %% @see config -module(config_writer). -export([save_to_file/2]). %% @spec save_to_file( %% Config::{{Section::string(), Option::string()}, Value::string()}, %% File::filename()) -> ok %% @doc Saves a Section/Key/Value triple to the ini file File::filename() save_to_file({{Section, Key}, Value}, File) -> {ok, OldFileContents} = file:read_file(File), Lines = re:split(OldFileContents, "\r\n|\n|\r|\032", [{return, list}]), SectionLine = "[" ++ Section ++ "]", {ok, Pattern} = re:compile(["^(\\Q", Key, "\\E\\s*=)|\\[[a-zA-Z0-9\.\_-]*\\]"]), NewLines = process_file_lines(Lines, [], SectionLine, Pattern, Key, Value), NewFileContents = reverse_and_add_newline(strip_empty_lines(NewLines), []), file:write_file(File, NewFileContents). process_file_lines([Section|Rest], SeenLines, Section, Pattern, Key, Value) -> process_section_lines(Rest, [Section|SeenLines], Pattern, Key, Value); process_file_lines([Line|Rest], SeenLines, Section, Pattern, Key, Value) -> process_file_lines(Rest, [Line|SeenLines], Section, Pattern, Key, Value); process_file_lines([], SeenLines, Section, _Pattern, Key, Value) -> % Section wasn't found. Append it with the option here. [Key ++ " = " ++ Value, Section, "" | strip_empty_lines(SeenLines)]. process_section_lines([Line|Rest], SeenLines, Pattern, Key, Value) -> case re:run(Line, Pattern, [{capture, all_but_first}]) of nomatch -> % Found nothing interesting. Move on. process_section_lines(Rest, [Line|SeenLines], Pattern, Key, Value); {match, []} -> % Found another section. Append the option here. lists:reverse(Rest) ++ [Line, "", Key ++ " = " ++ Value | strip_empty_lines(SeenLines)]; {match, _} -> % Found the option itself. Replace it. lists:reverse(Rest) ++ [Key ++ " = " ++ Value | SeenLines] end; process_section_lines([], SeenLines, _Pattern, Key, Value) -> % Found end of file within the section. Append the option here. [Key ++ " = " ++ Value | strip_empty_lines(SeenLines)]. reverse_and_add_newline([Line|Rest], Content) -> reverse_and_add_newline(Rest, [Line, "\n", Content]); reverse_and_add_newline([], Content) -> Content. strip_empty_lines(["" | Rest]) -> strip_empty_lines(Rest); strip_empty_lines(All) -> All.
src/config_writer.erl
0.712332
0.469034
config_writer.erl
starcoder
%% Copyright (c) 2013 <NAME> %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% File : luerl_comp_peep.erl %% Author : <NAME> %% Purpose : A basic LUA 5.2 compiler for Luerl. %% Does peep-hole optimisation in the compiler. -module(luerl_comp_peep). -include("luerl.hrl"). -include("luerl_comp.hrl"). -include("luerl_instrs.hrl"). -export([chunk/2]). %% chunk(St0, Opts) -> {ok,St0}. chunk(#code{code=Is0}=Code, Opts) -> Is1 = instrs(Is0, nil), %No local state luerl_comp:debug_print(Opts, "cp: ~p\n", [Is1]), {ok,Code#code{code=Is1}}. %% Combining instructions, table in Acc. instrs([?PUSH,?LOAD_LIT(L),?GET_KEY|Is], St) -> instrs([?GET_LIT_KEY(L)|Is], St); instrs([?PUSH,?LOAD_LIT(L),?SET_KEY|Is], St) -> instrs([?SET_LIT_KEY(L)|Is], St); instrs([?LOAD_LIT(L),?PUSH|Is], St) -> instrs([?PUSH_LIT(L)|Is], St); instrs([?LOAD_LVAR(D,I),?PUSH|Is], St) -> instrs([?PUSH_LVAR(D,I)|Is], St); instrs([?LOAD_EVAR(D,I),?PUSH|Is], St) -> instrs([?PUSH_EVAR(D,I)|Is], St); instrs([?LOAD_GVAR(K),?PUSH|Is], St) -> instrs([?PUSH_GVAR(K)|Is], St); %% Are these safe? Value should be left in Acc. instrs([?STORE_LVAR(D,I),?LOAD_LVAR(D,I)|Is], St) -> instrs([?STORE_LVAR(D,I)|Is], St); instrs([?STORE_EVAR(D,I),?LOAD_EVAR(D,I)|Is], St) -> instrs([?STORE_EVAR(D,I)|Is], St); instrs([?STORE_GVAR(K),?LOAD_GVAR(K)|Is], St) -> instrs([?STORE_GVAR(K)|Is], St); %% Doing sub instructions. instrs([?FDEF(Lsz,Esz,Pars,Fis0)|Is], St) -> Fis1 = instrs(Fis0, St), [?FDEF(Lsz,Esz,Pars,Fis1)|instrs(Is, St)]; instrs([?BLOCK(Lsz,Esz,Bis0)|Is], St) -> Bis1 = instrs(Bis0, St), [?BLOCK(Lsz,Esz,Bis1)|instrs(Is, St)]; instrs([?REPEAT(Ris0)|Is], St) -> Ris1 = instrs(Ris0, St), [?REPEAT(Ris1)|instrs(Is, St)]; instrs([?WHILE(Eis0, Wis0)|Is], St) -> Eis1 = instrs(Eis0, St), Wis1 = instrs(Wis0, St), [?WHILE(Eis1, Wis1)|instrs(Is, St)]; instrs([?IF_TRUE(Tis0)|Is], St) -> Tis1 = instrs(Tis0, St), [?IF_TRUE(Tis1)|instrs(Is, St)]; instrs([?IF_FALSE(Fis0)|Is], St) -> Fis1 = instrs(Fis0, St), [?IF_FALSE(Fis1)|instrs(Is, St)]; instrs([?IF(Tis0, Fis0)|Is], St) -> Tis1 = instrs(Tis0, St), Fis1 = instrs(Fis0, St), [?IF(Tis1, Fis1)|instrs(Is, St)]; instrs([?NFOR(V, Fis0)|Is], St) -> Fis1 = instrs(Fis0, St), [?NFOR(V, Fis1)|instrs(Is, St)]; instrs([?GFOR(Vs, Fis0)|Is], St) -> Fis1 = instrs(Fis0, St), [?GFOR(Vs, Fis1)|instrs(Is, St)]; %% Nothing to do. instrs([I|Is], St) -> [I|instrs(Is, St)]; instrs([], _) -> [].
src/luerl_comp_peep.erl
0.586996
0.520618
luerl_comp_peep.erl
starcoder
%% @author <NAME> <<EMAIL>> %% @doc Measure the rate of a connection %% <p>The rate module can measure the current rate of a connection by %% using a sliding window protocol on top of updates.</p> %% <p>You can initialize a new rate object with {@link init/0} after %% which you can subsequently update it by calling {@link %% update/2}. Each update will track the amount of bytes that has been %% downloaded and use a 20 second sliding window to interpolate the %% rate. The advantage of this solution is that small fluctuations %% will not affect the rate and since it is effectively a running %% average over the 20 seconds.</p> %% @end -module(etorrent_rate). %% API -export([init/0, init/1, update/2, format_eta/2]). -include("etorrent_rate.hrl"). -include("log.hrl"). -define(MAX_RATE_PERIOD, 20). %% ==================================================================== %% @doc Convenience initializer for {@link init/1} %% @end init() -> init(?RATE_FUDGE). %% @doc Initialize the rate tuple. %% <p>Takes a single integer, fudge, which is the fudge factor used to start up. %% It fakes the startup of the rate calculation.</p> %% @end -spec init(integer()) -> #peer_rate{}. init(Fudge) -> T = now_secs(), #peer_rate { next_expected = T + Fudge, last = T - Fudge, rate_since = T - Fudge }. %% @doc Update the rate record with Amount new downloaded bytes %% @end -spec update(#peer_rate{}, integer()) -> #peer_rate{}. update(#peer_rate {rate = Rate, total = Total, next_expected = NextExpected, last = Last, rate_since = RateSince} = RT, Amount) when is_integer(Amount) -> T = now_secs(), case T < NextExpected andalso Amount =:= 0 of true -> %% We got 0 bytes, but we did not expect them yet, so just %% return the current tuple (simplification candidate) RT; false -> %% New rate: Timeslot between Last and RateSince contributes %% with the old rate. Then we add the new Amount and calc. %% the rate for the interval [T, RateSince]. R = (Rate * (Last - RateSince) + Amount) / (T - RateSince), #peer_rate { rate = R, %% New Rate total = Total + Amount, %% We expect the next data-block at the minimum of 5 secs or %% when Amount bytes has been fetched at the current rate. next_expected = T + lists:min([5, Amount / lists:max([R, 0.0001])]), last = T, %% RateSince is manipulated so it does not go beyond %% ?MAX_RATE_PERIOD rate_since = lists:max([RateSince, T - ?MAX_RATE_PERIOD])} end. %% @doc Calculate estimated time of arrival. %% @end -type eta() :: {integer(), {integer(), integer(), integer()}}. -spec eta(integer(), float()) -> eta() | unknown. eta(_Left, DR) when DR == 0 -> unknown; eta(Left, DownloadRate) when is_integer(Left) -> calendar:seconds_to_daystime(round(Left / DownloadRate)); eta(unknown, _DownloadRate) -> unknown. %% @doc Format an ETA given bytes Left and a Rate %% <p>This function will return an iolist() format of the ETA given %% how many bytes there are `Left' to download and the current `DownloadRate'. %% </p> %% @end -spec format_eta(integer(), float()) -> iolist(). format_eta(Left, DownloadRate) -> case eta(Left, DownloadRate) of {DaysLeft, {HoursLeft, MinutesLeft, SecondsLeft}} -> io_lib:format("ETA: ~Bd ~Bh ~Bm ~Bs", [DaysLeft, HoursLeft, MinutesLeft, SecondsLeft]); unknown -> "ETA: Unknown" end. % @doc Returns the number of seconds elapsed as gregorian calendar seconds % @end -spec now_secs() -> integer(). now_secs() -> calendar:datetime_to_gregorian_seconds( calendar:local_time()).
apps/etorrent/src/etorrent_rate.erl
0.706089
0.699691
etorrent_rate.erl
starcoder
% Licensed under the Apache License, Version 2.0 (the "License"); you may not % use this file except in compliance with the License. You may obtain a copy of % the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, WITHOUT % WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the % License for the specific language governing permissions and limitations under % the License. -module(couch_task_status). -behaviour(gen_server). % This module allows is used to track the status of long running tasks. % Long running tasks register (add_task/3) then update their status (update/1) % and the task and status is added to tasks list. When the tracked task dies % it will be automatically removed the tracking. To get the tasks list, use the % all/0 function -export([start_link/0, stop/0]). -export([all/0, add_task/3, update/1, update/2, set_update_frequency/1]). -export([init/1, terminate/2, code_change/3]). -export([handle_call/3, handle_cast/2, handle_info/2]). -import(couch_util, [to_binary/1]). -include("couch_db.hrl"). start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). stop() -> gen_server:cast(?MODULE, stop). all() -> gen_server:call(?MODULE, all). add_task(Type, TaskName, StatusText) -> put(task_status_update, {{0, 0, 0}, 0}), Msg = { add_task, to_binary(Type), to_binary(TaskName), to_binary(StatusText) }, gen_server:call(?MODULE, Msg). set_update_frequency(Msecs) -> put(task_status_update, {{0, 0, 0}, Msecs * 1000}). update(StatusText) -> update("~s", [StatusText]). update(Format, Data) -> {LastUpdateTime, Frequency} = get(task_status_update), case timer:now_diff(Now = now(), LastUpdateTime) >= Frequency of true -> put(task_status_update, {Now, Frequency}), Msg = ?l2b(io_lib:format(Format, Data)), gen_server:cast(?MODULE, {update_status, self(), Msg}); false -> ok end. init([]) -> % read configuration settings and register for configuration changes ets:new(?MODULE, [ordered_set, protected, named_table]), {ok, nil}. terminate(_Reason,_State) -> ok. handle_call({add_task, Type, TaskName, StatusText}, {From, _}, Server) -> case ets:lookup(?MODULE, From) of [] -> true = ets:insert(?MODULE, {From, {Type, TaskName, StatusText}}), erlang:monitor(process, From), {reply, ok, Server}; [_] -> {reply, {add_task_error, already_registered}, Server} end; handle_call(all, _, Server) -> All = [ [ {type, Type}, {task, Task}, {status, Status}, {pid, ?l2b(pid_to_list(Pid))} ] || {Pid, {Type, Task, Status}} <- ets:tab2list(?MODULE) ], {reply, All, Server}. handle_cast({update_status, Pid, StatusText}, Server) -> [{Pid, {Type, TaskName, _StatusText}}] = ets:lookup(?MODULE, Pid), ?LOG_DEBUG("New task status for ~s: ~s",[TaskName, StatusText]), true = ets:insert(?MODULE, {Pid, {Type, TaskName, StatusText}}), {noreply, Server}; handle_cast(stop, State) -> {stop, normal, State}. handle_info({'DOWN', _MonitorRef, _Type, Pid, _Info}, Server) -> %% should we also erlang:demonitor(_MonitorRef), ? ets:delete(?MODULE, Pid), {noreply, Server}. code_change(_OldVsn, State, _Extra) -> {ok, State}.
src/couchdb/couch_task_status.erl
0.653238
0.428652
couch_task_status.erl
starcoder
%% @author <NAME> <<EMAIL>> %% @copyright 2009-2010 <NAME> %% Date: 2009-04-26 %% @doc String related functions %% @todo Make this UTF-8 safe %% @todo Check valid chars for filenames, allow chinese, japanese, etc? %% CJK Unified Ideographs Extension A: Range: 3400-4DBF %% CJK Unified Ideographs: Range: 4E00-9FAF %% Kangxi Radicals: Range 2F00-2FDF %% See also: http://www.utf8-chartable.de/ %% Copyright 2009-2010 <NAME> %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(z_string). -author("<NAME> <<EMAIL>"). %% interface functions -export([ trim/1, trim_left/1, trim_right/1, trim/2, trim_left/2, trim_right/2, trim_left_func/2, is_string/1, first_char/1, last_char/1, unquote/1, unquote/2, nospaces/1, line/1, to_rootname/1, to_name/1, to_slug/1, to_lower/1, to_upper/1, replace/3, truncate/2, truncate/3, truncatewords/2, truncatewords/3, split_lines/1, escape_ical/1, starts_with/2, ends_with/2, contains/2, split/2, test/0 ]). -include_lib("include/zotonic.hrl"). %% @doc Remove whitespace at the start and end of the string trim(B) when is_binary(B) -> trim_right(trim_left(B)); trim(L) when is_list(L) -> binary_to_list(trim(iolist_to_binary(L))). %% @doc Remove all occurences of a character at the start and end of a string. trim(B, Char) when is_binary(B) -> trim_right(trim_left(B, Char), Char); trim(L, Char) when is_list(L) -> binary_to_list(trim(iolist_to_binary(L), Char)). %% @doc Remove whitespace at the start the string trim_left(S) -> trim_left_func(S, fun(C) -> C =< 32 end). %% @doc Remove all occurences of a char at the start of a string trim_left(S, Char) -> trim_left_func(S, fun(C) -> C == Char end). trim_left_func(<<Char, Rest/binary>> = Bin, F) -> case F(Char) of true -> trim_left_func(Rest, F); false -> Bin end; trim_left_func([Char|Rest] = L, F) when is_integer(Char) -> case F(Char) of true -> trim_left(Rest, F); false -> L end; trim_left_func([L|Rest], F) when is_list(L); is_binary(L) -> case trim_left_func(L, F) of [] -> trim_left_func(Rest, F); <<>> -> trim_left_func(Rest, F); Other -> [Other|Rest] end; trim_left_func(Other, _F) -> Other. %% @doc Remove whitespace at the end of the string trim_right(B) when is_binary(B) -> trim_right(B, <<>>, <<>>); trim_right(L) -> binary_to_list(trim_right(iolist_to_binary(L))). trim_right(<<C, Rest/binary>>, WS, Acc) -> case C of W when W =< 32 -> trim_right(Rest, <<WS/binary, C>>, Acc); _ -> trim_right(Rest, <<>>, <<Acc/binary, WS/binary, C>>) end; trim_right(<<>>, _WS, Acc) -> Acc. %% @doc Remove all occurences of a char at the end of the string trim_right(B, Char) when is_binary(B) -> trim_right(B, Char, <<>>, <<>>); trim_right(L, Char) -> binary_to_list(trim_right(iolist_to_binary(L), Char)). trim_right(<<C, Rest/binary>>, Char, WS, Acc) -> case C of Char -> trim_right(Rest, Char, <<WS/binary, C>>, Acc); _ -> trim_right(Rest, Char, <<>>, <<Acc/binary, WS/binary, C>>) end; trim_right(<<>>, _Char, _WS, Acc) -> Acc. %% @doc Check if the variable is a one dimensional list, probably a string is_string([]) -> true; is_string([C|Rest]) when is_integer(C) andalso C =< 255 andalso (C >= 32 orelse C == 9 orelse C == 10 orelse C == 12 orelse C == 13) -> is_string(Rest); is_string(_) -> false. %% @doc Return the first character of a string. %% @todo Make this UTF-8 safe first_char([]) -> undefined; first_char([H|T]) when is_integer(H) -> truncate([H|T], 1, ""); first_char(<<>>) -> undefined; first_char(<<C, _/binary>>) -> C. %% @doc Return the last character of a string last_char([]) -> undefined; last_char([C]) -> C; last_char([_|R]) -> last_char(R); last_char(<<>>) -> undefined; last_char(<<C>>) -> C; last_char(<<_, R/binary>>) -> last_char(R). %% @doc Remove the first and last char if they are double quotes. unquote(S) -> unquote(S, $"). unquote(S, Q) -> case S of <<Q, R/binary>> -> unquote1(R, <<>>, Q, S); [Q|R] -> unquote1(R, [], Q, S); _ -> S end. unquote1([], _Acc, _Q, S) -> S; unquote1([Q], Acc, Q, _S) -> lists:reverse(Acc); unquote1([H|T], Acc, Q, S) -> unquote1(T, [H|Acc], Q, S); unquote1(<<>>, _Acc, _Q, S) -> S; unquote1(<<Q>>, Acc, Q, _S) -> Acc; unquote1(<<C,R/binary>>, Acc, Q, S) -> unquote1(R, <<Acc/binary, C>>, Q, S). %% @doc Remove all spaces and control characters from a string. nospaces(B) when is_binary(B) -> nospaces(binary_to_list(B)); nospaces(L) -> nospaces(L, []). nospaces([], Acc) -> lists:reverse(Acc); nospaces([C|Rest], Acc) when C =< 32 -> nospaces(Rest, Acc); nospaces([C|Rest], Acc) -> nospaces(Rest, [C|Acc]). %% @doc Make sure that the string is on one line only, replace control characters with spaces line(B) when is_binary(B) -> line(binary_to_list(B)); line(L) -> line1(L, []). line1([], Acc) -> lists:reverse(Acc); line1([H|T], Acc) when H < 32 -> line1(T, [32|Acc]); line1([H|T], Acc) -> line1(T, [H|Acc]). %% @doc Return a lowercase string for the input %% @spec to_lower(Value) -> String to_lower(B) when is_binary(B) -> to_lower(binary_to_list(B)); to_lower(A) when is_atom(A) -> to_lower(atom_to_list(A)); to_lower(L) when is_list(L) -> to_lower(lists:flatten(L), []). to_lower([], Acc) -> lists:reverse(Acc); to_lower([B|T], Acc) when is_binary(B) -> to_lower(binary_to_list(B)++T, Acc); to_lower([H|T], Acc) when H >= $A andalso H =< $Z -> to_lower(T, [H+32|Acc]); to_lower("Å"++T, Acc) -> to_lower(T, [165,195|Acc]); to_lower("Ä"++T, Acc) -> to_lower(T, [164,195|Acc]); to_lower("Á"++T, Acc) -> to_lower(T, [161,195|Acc]); to_lower("À"++T, Acc) -> to_lower(T, [160,195|Acc]); to_lower("Ë"++T, Acc) -> to_lower(T, [171,195|Acc]); to_lower("Ê"++T, Acc) -> to_lower(T, [170,195|Acc]); to_lower("É"++T, Acc) -> to_lower(T, [169,195|Acc]); to_lower("È"++T, Acc) -> to_lower(T, [168,195|Acc]); to_lower("Ï"++T, Acc) -> to_lower(T, [175,195|Acc]); to_lower("Î"++T, Acc) -> to_lower(T, [174,195|Acc]); to_lower("Í"++T, Acc) -> to_lower(T, [173,195|Acc]); to_lower("Ì"++T, Acc) -> to_lower(T, [172,195|Acc]); to_lower("Ü"++T, Acc) -> to_lower(T, [188,195|Acc]); to_lower("Û"++T, Acc) -> to_lower(T, [187,195|Acc]); to_lower("Ú"++T, Acc) -> to_lower(T, [186,195|Acc]); to_lower("Ù"++T, Acc) -> to_lower(T, [185,195|Acc]); to_lower("Ö"++T, Acc) -> to_lower(T, [182,195|Acc]); to_lower("Ô"++T, Acc) -> to_lower(T, [180,195|Acc]); to_lower("Ó"++T, Acc) -> to_lower(T, [179,195|Acc]); to_lower("Ò"++T, Acc) -> to_lower(T, [178,195|Acc]); to_lower("Ø"++T, Acc) -> to_lower(T, [184,195|Acc]); to_lower("Ç"++T, Acc) -> to_lower(T, [167,195|Acc]); to_lower("Æ"++T, Acc) -> to_lower(T, [166,195|Acc]); to_lower("Œ"++T, Acc) -> to_lower(T, [147,197|Acc]); % Cyrillic support to_lower("А"++T, Acc) -> to_lower(T, [176,208|Acc]); to_lower("Б"++T, Acc) -> to_lower(T, [177,208|Acc]); to_lower("В"++T, Acc) -> to_lower(T, [178,208|Acc]); to_lower("Г"++T, Acc) -> to_lower(T, [179,208|Acc]); to_lower("Д"++T, Acc) -> to_lower(T, [180,208|Acc]); to_lower("Е"++T, Acc) -> to_lower(T, [181,208|Acc]); to_lower("Ё"++T, Acc) -> to_lower(T, [145,209|Acc]); to_lower("Ж"++T, Acc) -> to_lower(T, [182,208|Acc]); to_lower("З"++T, Acc) -> to_lower(T, [183,208|Acc]); to_lower("И"++T, Acc) -> to_lower(T, [184,208|Acc]); to_lower("Й"++T, Acc) -> to_lower(T, [185,208|Acc]); to_lower("К"++T, Acc) -> to_lower(T, [186,208|Acc]); to_lower("Л"++T, Acc) -> to_lower(T, [187,208|Acc]); to_lower("М"++T, Acc) -> to_lower(T, [188,208|Acc]); to_lower("Н"++T, Acc) -> to_lower(T, [189,208|Acc]); to_lower("О"++T, Acc) -> to_lower(T, [190,208|Acc]); to_lower("П"++T, Acc) -> to_lower(T, [191,208|Acc]); to_lower("Р"++T, Acc) -> to_lower(T, [128,209|Acc]); to_lower("С"++T, Acc) -> to_lower(T, [129,209|Acc]); to_lower("Т"++T, Acc) -> to_lower(T, [130,209|Acc]); to_lower("У"++T, Acc) -> to_lower(T, [131,209|Acc]); to_lower("Ф"++T, Acc) -> to_lower(T, [132,209|Acc]); to_lower("Х"++T, Acc) -> to_lower(T, [133,209|Acc]); to_lower("Ц"++T, Acc) -> to_lower(T, [134,209|Acc]); to_lower("Ч"++T, Acc) -> to_lower(T, [135,209|Acc]); to_lower("Ш"++T, Acc) -> to_lower(T, [136,209|Acc]); to_lower("Щ"++T, Acc) -> to_lower(T, [137,209|Acc]); to_lower("Ъ"++T, Acc) -> to_lower(T, [138,209|Acc]); to_lower("Ы"++T, Acc) -> to_lower(T, [139,209|Acc]); to_lower("Ь"++T, Acc) -> to_lower(T, [140,209|Acc]); to_lower("Э"++T, Acc) -> to_lower(T, [141,209|Acc]); to_lower("Ю"++T, Acc) -> to_lower(T, [142,209|Acc]); to_lower("Я"++T, Acc) -> to_lower(T, [143,209|Acc]); % Extra Ukrainian characters to_lower("Ґ"++T, Acc) -> to_lower(T, [145,210|Acc]); to_lower("Ї"++T, Acc) -> to_lower(T, [151,209|Acc]); to_lower("І"++T, Acc) -> to_lower(T, [150,209|Acc]); to_lower("Є"++T, Acc) -> to_lower(T, [148,209|Acc]); % Polish support to_lower("Ą"++T, Acc) -> to_lower(T, [133,196|Acc]); to_lower("Ę"++T, Acc) -> to_lower(T, [153,196|Acc]); to_lower("Ć"++T, Acc) -> to_lower(T, [135,196|Acc]); to_lower("Ł"++T, Acc) -> to_lower(T, [130,197|Acc]); to_lower("Ń"++T, Acc) -> to_lower(T, [132,197|Acc]); to_lower("Ś"++T, Acc) -> to_lower(T, [155,197|Acc]); to_lower("Ź"++T, Acc) -> to_lower(T, [186,197|Acc]); to_lower("Ż"++T, Acc) -> to_lower(T, [188,197|Acc]); % Turkish support to_lower("Ş"++T, Acc) -> to_lower(T, [159,197|Acc]); to_lower("Ğ"++T, Acc) -> to_lower(T, [159,196|Acc]); to_lower("İ"++T, Acc) -> to_lower(T, [177,196|Acc]); % Other characters are taken as-is to_lower([H|T], Acc) -> to_lower(T, [H|Acc]). %% @doc Return a uppercase string for the input %% @spec to_upper(Value) -> String to_upper(B) when is_binary(B) -> to_upper(binary_to_list(B)); to_upper(A) when is_atom(A) -> to_upper(atom_to_list(A)); to_upper(L) when is_list(L) -> to_upper(lists:flatten(L), []). to_upper([], Acc) -> lists:reverse(Acc); to_upper([B|T], Acc) when is_binary(B) -> to_upper(binary_to_list(B)++T, Acc); to_upper([H|T], Acc) when H >= $a andalso H =< $z -> to_upper(T, [H-32|Acc]); to_upper("å"++T, Acc) -> to_upper(T, [133,195|Acc]); to_upper("ä"++T, Acc) -> to_upper(T, [132,195|Acc]); to_upper("á"++T, Acc) -> to_upper(T, [129,195|Acc]); to_upper("à"++T, Acc) -> to_upper(T, [128,195|Acc]); to_upper("ë"++T, Acc) -> to_upper(T, [139,195|Acc]); to_upper("ê"++T, Acc) -> to_upper(T, [138,195|Acc]); to_upper("é"++T, Acc) -> to_upper(T, [137,195|Acc]); to_upper("è"++T, Acc) -> to_upper(T, [136,195|Acc]); to_upper("ï"++T, Acc) -> to_upper(T, [143,195|Acc]); to_upper("Î"++T, Acc) -> to_upper(T, [142,195|Acc]); to_upper("í"++T, Acc) -> to_upper(T, [141,195|Acc]); to_upper("ì"++T, Acc) -> to_upper(T, [140,195|Acc]); to_upper("ü"++T, Acc) -> to_upper(T, [156,195|Acc]); to_upper("û"++T, Acc) -> to_upper(T, [155,195|Acc]); to_upper("ú"++T, Acc) -> to_upper(T, [154,195|Acc]); to_upper("ù"++T, Acc) -> to_upper(T, [153,195|Acc]); to_upper("ö"++T, Acc) -> to_upper(T, [150,195|Acc]); to_upper("ô"++T, Acc) -> to_upper(T, [148,195|Acc]); to_upper("ó"++T, Acc) -> to_upper(T, [147,195|Acc]); to_upper("ò"++T, Acc) -> to_upper(T, [146,195|Acc]); to_upper("ø"++T, Acc) -> to_upper(T, [152,195|Acc]); to_upper("ç"++T, Acc) -> to_upper(T, [135,195|Acc]); to_upper("æ"++T, Acc) -> to_upper(T, [134,195|Acc]); to_upper("œ"++T, Acc) -> to_upper(T, [146,197|Acc]); % Cyrillic support to_upper("а"++T, Acc) -> to_upper(T, [144,208|Acc]); to_upper("б"++T, Acc) -> to_upper(T, [145,208|Acc]); to_upper("в"++T, Acc) -> to_upper(T, [146,208|Acc]); to_upper("г"++T, Acc) -> to_upper(T, [147,208|Acc]); to_upper("д"++T, Acc) -> to_upper(T, [148,208|Acc]); to_upper("е"++T, Acc) -> to_upper(T, [149,208|Acc]); to_upper("ё"++T, Acc) -> to_upper(T, [129,208|Acc]); to_upper("ж"++T, Acc) -> to_upper(T, [150,208|Acc]); to_upper("з"++T, Acc) -> to_upper(T, [151,208|Acc]); to_upper("и"++T, Acc) -> to_upper(T, [152,208|Acc]); to_upper("й"++T, Acc) -> to_upper(T, [153,208|Acc]); to_upper("к"++T, Acc) -> to_upper(T, [154,208|Acc]); to_upper("л"++T, Acc) -> to_upper(T, [155,208|Acc]); to_upper("м"++T, Acc) -> to_upper(T, [156,208|Acc]); to_upper("н"++T, Acc) -> to_upper(T, [157,208|Acc]); to_upper("о"++T, Acc) -> to_upper(T, [158,208|Acc]); to_upper("п"++T, Acc) -> to_upper(T, [159,208|Acc]); to_upper("р"++T, Acc) -> to_upper(T, [160,208|Acc]); to_upper("с"++T, Acc) -> to_upper(T, [161,208|Acc]); to_upper("т"++T, Acc) -> to_upper(T, [162,208|Acc]); to_upper("у"++T, Acc) -> to_upper(T, [163,208|Acc]); to_upper("ф"++T, Acc) -> to_upper(T, [164,208|Acc]); to_upper("х"++T, Acc) -> to_upper(T, [165,208|Acc]); to_upper("ц"++T, Acc) -> to_upper(T, [166,208|Acc]); to_upper("ч"++T, Acc) -> to_upper(T, [167,208|Acc]); to_upper("ш"++T, Acc) -> to_upper(T, [168,208|Acc]); to_upper("щ"++T, Acc) -> to_upper(T, [169,208|Acc]); to_upper("ъ"++T, Acc) -> to_upper(T, [170,208|Acc]); to_upper("ы"++T, Acc) -> to_upper(T, [171,208|Acc]); to_upper("ь"++T, Acc) -> to_upper(T, [172,208|Acc]); to_upper("э"++T, Acc) -> to_upper(T, [173,208|Acc]); to_upper("ю"++T, Acc) -> to_upper(T, [174,208|Acc]); to_upper("я"++T, Acc) -> to_upper(T, [175,208|Acc]); % Extra Ukrainian characters to_upper("ґ"++T, Acc) -> to_upper(T, [144,210|Acc]); to_upper("ї"++T, Acc) -> to_upper(T, [135,208|Acc]); to_upper("і"++T, Acc) -> to_upper(T, [143,208|Acc]); to_upper("є"++T, Acc) -> to_upper(T, [132,208|Acc]); % Polish support to_upper("ą"++T, Acc) -> to_upper(T, [132,196|Acc]); to_upper("ę"++T, Acc) -> to_upper(T, [152,196|Acc]); to_upper("ć"++T, Acc) -> to_upper(T, [134,196|Acc]); to_upper("ł"++T, Acc) -> to_upper(T, [129,197|Acc]); to_upper("ń"++T, Acc) -> to_upper(T, [131,197|Acc]); to_upper("ś"++T, Acc) -> to_upper(T, [154,197|Acc]); to_upper("ź"++T, Acc) -> to_upper(T, [185,197|Acc]); to_upper("ż"++T, Acc) -> to_upper(T, [187,197|Acc]); % Turkish support to_upper("ş"++T, Acc) -> to_upper(T, [158,197|Acc]); to_upper("ğ"++T, Acc) -> to_upper(T, [158,196|Acc]); to_upper("ı"++T, Acc) -> to_upper(T, [176,196|Acc]); % Other chars are taken as-is to_upper([H|T], Acc) -> to_upper(T, [H|Acc]). %% @doc Filter a filename so that we obtain a basename that is safe to use. %% @spec to_rootname(string()) -> string() to_rootname(Filename) -> to_slug(filename:rootname(filename:basename(Filename))). %% @doc Map a string to a slug that can be used in the uri of a page. Same as a name, but then with dashes instead of underscores. %% @spec to_slug(String) -> String to_slug(Title) -> Slug = to_name(Title), [ case C of $_ -> $-; _ -> C end || C <- Slug ]. %% @doc Map a string to a value that can be used as a name or slug. Maps all characters to lowercase and remove non digalpha chars %% @spec to_name(String) -> String to_name({trans, Tr}) -> case proplists:get_value(en, Tr) of undefined -> case Tr of [{_,V}|_] -> to_name(V); _ -> to_name([]) end; V -> to_name(V) end; to_name(Name) when is_binary(Name) -> to_name(binary_to_list(Name)); to_name(Name) when is_atom(Name) -> to_name(atom_to_list(Name)); to_name(Name) -> to_name(Name, [], 0). to_name([], Acc, _I) -> case string:strip(lists:reverse(Acc), both, $_) of [] -> "_"; Name -> Name end; to_name(_, Acc, N) when N >= 80 -> to_name([], Acc, 80); to_name([C|T], Acc, I) when C >= $A andalso C =< $Z -> to_name(T, [C+32|Acc], I+1); to_name([C|T], Acc, I) when (C >= $a andalso C =< $z) orelse (C >= $0 andalso C =< $9) orelse C =:= $_ -> to_name(T, [C|Acc], I+1); to_name("ä"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("ë"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("ï"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("ü"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("ö"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("Ä"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("Ë"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("Ï"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("Ü"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("Ö"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("é"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("è"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("É"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("È"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("í"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("ì"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("Í"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("Ì"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("ú"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("ù"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("Ú"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("Ù"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("ó"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("ò"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("Ó"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("Ò"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("ß"++T, Acc, I) -> to_name(T, [$s,$s|Acc], I+2); to_name("ç"++T, Acc, I) -> to_name(T, [$c|Acc], I+1); to_name("Ç"++T, Acc, I) -> to_name(T, [$c|Acc], I+1); to_name("ø"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("Ø"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("å"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("Å"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("€"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("ÿ"++T, Acc, I) -> to_name(T, [$i,$j|Acc], I+2); to_name("@"++T, Acc, I) -> to_name(T, [$_,$t,$a,$_|Acc], I+4); % Cyrillic support (from http://en.wikipedia.org/wiki/Romanization_of_Russian) to_name("А"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("а"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("Б"++T, Acc, I) -> to_name(T, [$b|Acc], I+1); to_name("б"++T, Acc, I) -> to_name(T, [$b|Acc], I+1); to_name("В"++T, Acc, I) -> to_name(T, [$v|Acc], I+1); to_name("в"++T, Acc, I) -> to_name(T, [$v|Acc], I+1); to_name("Г"++T, Acc, I) -> to_name(T, [$g|Acc], I+1); to_name("г"++T, Acc, I) -> to_name(T, [$g|Acc], I+1); to_name("Д"++T, Acc, I) -> to_name(T, [$d|Acc], I+1); to_name("д"++T, Acc, I) -> to_name(T, [$d|Acc], I+1); to_name("Е"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("е"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("Ё"++T, Acc, I) -> to_name(T, [$o,$y|Acc], I+2); to_name("ё"++T, Acc, I) -> to_name(T, [$o,$y|Acc], I+2); to_name("Ж"++T, Acc, I) -> to_name(T, [$h,$z|Acc], I+2); to_name("ж"++T, Acc, I) -> to_name(T, [$h,$z|Acc], I+2); to_name("З"++T, Acc, I) -> to_name(T, [$z|Acc], I+1); to_name("з"++T, Acc, I) -> to_name(T, [$z|Acc], I+1); to_name("И"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("и"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("Й"++T, Acc, I) -> to_name(T, [$j|Acc], I+1); to_name("й"++T, Acc, I) -> to_name(T, [$j|Acc], I+1); to_name("К"++T, Acc, I) -> to_name(T, [$k|Acc], I+1); to_name("к"++T, Acc, I) -> to_name(T, [$k|Acc], I+1); to_name("Л"++T, Acc, I) -> to_name(T, [$l|Acc], I+1); to_name("л"++T, Acc, I) -> to_name(T, [$l|Acc], I+1); to_name("М"++T, Acc, I) -> to_name(T, [$m|Acc], I+1); to_name("м"++T, Acc, I) -> to_name(T, [$m|Acc], I+1); to_name("Н"++T, Acc, I) -> to_name(T, [$n|Acc], I+1); to_name("н"++T, Acc, I) -> to_name(T, [$n|Acc], I+1); to_name("О"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("о"++T, Acc, I) -> to_name(T, [$o|Acc], I+1); to_name("П"++T, Acc, I) -> to_name(T, [$p|Acc], I+1); to_name("п"++T, Acc, I) -> to_name(T, [$p|Acc], I+1); to_name("Р"++T, Acc, I) -> to_name(T, [$r|Acc], I+1); to_name("р"++T, Acc, I) -> to_name(T, [$r|Acc], I+1); to_name("С"++T, Acc, I) -> to_name(T, [$s|Acc], I+1); to_name("с"++T, Acc, I) -> to_name(T, [$s|Acc], I+1); to_name("Т"++T, Acc, I) -> to_name(T, [$t|Acc], I+1); to_name("т"++T, Acc, I) -> to_name(T, [$t|Acc], I+1); to_name("У"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("у"++T, Acc, I) -> to_name(T, [$u|Acc], I+1); to_name("Ф"++T, Acc, I) -> to_name(T, [$f|Acc], I+1); to_name("ф"++T, Acc, I) -> to_name(T, [$f|Acc], I+1); to_name("Х"++T, Acc, I) -> to_name(T, [$h|Acc], I+1); to_name("х"++T, Acc, I) -> to_name(T, [$h|Acc], I+1); to_name("Ц"++T, Acc, I) -> to_name(T, [$c|Acc], I+1); to_name("ц"++T, Acc, I) -> to_name(T, [$c|Acc], I+1); to_name("Ч"++T, Acc, I) -> to_name(T, [$h,$c|Acc], I+2); to_name("ч"++T, Acc, I) -> to_name(T, [$h,$c|Acc], I+2); to_name("Ш"++T, Acc, I) -> to_name(T, [$h,$s|Acc], I+2); to_name("ш"++T, Acc, I) -> to_name(T, [$h,$s|Acc], I+2); to_name("Щ"++T, Acc, I) -> to_name(T, [$h,$h,$s|Acc], I+3); to_name("щ"++T, Acc, I) -> to_name(T, [$h,$h,$s|Acc], I+3); to_name("Ъ"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); to_name("ъ"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); to_name("Ы"++T, Acc, I) -> to_name(T, [$y|Acc], I+1); to_name("ы"++T, Acc, I) -> to_name(T, [$y|Acc], I+1); to_name("Ь"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); to_name("ь"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); to_name("Э"++T, Acc, I) -> to_name(T, [$h,$e|Acc], I+2); to_name("э"++T, Acc, I) -> to_name(T, [$h,$e|Acc], I+2); to_name("Ю"++T, Acc, I) -> to_name(T, [$u,$y|Acc], I+2); to_name("ю"++T, Acc, I) -> to_name(T, [$u,$y|Acc], I+2); to_name("Я"++T, Acc, I) -> to_name(T, [$a,$y|Acc], I+2); to_name("я"++T, Acc, I) -> to_name(T, [$a,$y|Acc], I+2); % Ukrainian support to_name("Ґ"++T, Acc, I) -> to_name(T, [$g|Acc], I+1); to_name("ґ"++T, Acc, I) -> to_name(T, [$g|Acc], I+1); to_name("Ї"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("ї"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("І"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("і"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("Є"++T, Acc, I) -> to_name(T, [$e,$y|Acc], I+2); to_name("є"++T, Acc, I) -> to_name(T, [$e,$y|Acc], I+2); % Polish support to_name("Ą"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("ą"++T, Acc, I) -> to_name(T, [$a|Acc], I+1); to_name("Ę"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("ę"++T, Acc, I) -> to_name(T, [$e|Acc], I+1); to_name("Ć"++T, Acc, I) -> to_name(T, [$c|Acc], I+1); to_name("ć"++T, Acc, I) -> to_name(T, [$c|Acc], I+1); to_name("Ł"++T, Acc, I) -> to_name(T, [$l|Acc], I+1); to_name("ł"++T, Acc, I) -> to_name(T, [$l|Acc], I+1); to_name("Ń"++T, Acc, I) -> to_name(T, [$n|Acc], I+1); to_name("ń"++T, Acc, I) -> to_name(T, [$n|Acc], I+1); to_name("Ś"++T, Acc, I) -> to_name(T, [$s|Acc], I+1); to_name("ś"++T, Acc, I) -> to_name(T, [$s|Acc], I+1); to_name("Ź"++T, Acc, I) -> to_name(T, [$z|Acc], I+1); to_name("ź"++T, Acc, I) -> to_name(T, [$z|Acc], I+1); to_name("Ż"++T, Acc, I) -> to_name(T, [$z|Acc], I+1); to_name("ż"++T, Acc, I) -> to_name(T, [$z|Acc], I+1); % Turkish support to_name("Ş"++T, Acc, I) -> to_name(T, [$s|Acc], I+1); to_name("ş"++T, Acc, I) -> to_name(T, [$s|Acc], I+1); to_name("Ğ"++T, Acc, I) -> to_name(T, [$g|Acc], I+1); to_name("ğ"++T, Acc, I) -> to_name(T, [$g|Acc], I+1); to_name("İ"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); to_name("ı"++T, Acc, I) -> to_name(T, [$i|Acc], I+1); % Some entities - we might want to add generic code here, depends % on where to_name/1 is used (can we assume that the input is always html?) to_name("&amp;"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); to_name("&lt;"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); to_name("&gt;"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); to_name("&#39;"++T, Acc, I) -> to_name(T, [$_|Acc], I+1); % Other sequences of characters are mapped to $_ to_name([_C|T], [$_|_] = Acc, I) -> to_name(T, Acc, I+1); to_name([_C|T], Acc, I) -> to_name(T, [$_|Acc], I+1). %% @doc Replace a string inside another string %% Copyright 2008 <NAME> (Nitrogen, MIT License) replace([], _, _) -> []; replace(String, S1, S2) when is_list(String), is_list(S1), is_list(S2) -> Length = length(S1), case string:substr(String, 1, Length) of S1 -> S2 ++ replace(string:substr(String, Length + 1), S1, S2); _ -> [hd(String)|replace(tl(String), S1, S2)] end. %% @doc Truncate a string. Append the '...' character at the place of break off. %% @spec truncate(String, int()) -> String truncate(L, N) -> truncate(L, N, "…"). truncate(B, N, Append) when is_binary(B) -> truncate(z_convert:to_list(B), N, Append); truncate(_L, N, _Append) when N =< 0 -> []; truncate(L, N, Append) -> truncate(L, N, Append, in_word, [], in_word, []). truncate([], _, _Append, _LastState, _Last, _AccState, Acc) -> lists:reverse(Acc); truncate(_, 0, _Append, sentence, Last, _AccState, _Acc) -> lists:reverse(Last); truncate(_, 0, Append, _, [], _AccState, Acc) -> lists:reverse(insert_acc(Append, Acc)); truncate(_, 0, Append, _LastState, Last, _AccState, _Acc) -> lists:reverse(insert_acc(Append, Last)); truncate([C|Rest], N, Append, LastState, Last, AccState, Acc) when C == $.; C == $!; C == $? -> case AccState of in_word -> truncate(Rest, N-1, Append, sentence, [C|Acc], sentence, [C|Acc]); word -> truncate(Rest, N-1, Append, sentence, [C|Acc], sentence, [C|Acc]); _ -> truncate(Rest, N-1, Append, LastState, Last, sentence, [C|Acc]) end; truncate([C|Rest], N, Append, LastState, Last, AccState, Acc) when C == $;; C == $-; C == $, -> case AccState of in_word -> truncate(Rest, N-1, Append, sentence, Acc, word, [C|Acc]); _ -> truncate(Rest, N-1, Append, LastState, Last, word, [C|Acc]) end; truncate([C|Rest], N, Append, LastState, Last, AccState, Acc) when C == 32; C == 9; C == 10; C == 13; C == $/; C == $|; C == $(; C == $); C == $" -> case AccState of in_word -> truncate(Rest, N-1, Append, word, Acc, word, [C|Acc]); _ -> truncate(Rest, N-1, Append, LastState, Last, word, [C|Acc]) end; truncate([$&|_]=Input, N, Append, LastState, Last, AccState, Acc) -> {Rest1,Acc1} = get_entity(Input,Acc), case AccState of in_word -> truncate(Rest1, N-1, Append, word, Acc1, word, Acc1); _ -> truncate(Rest1, N-1, Append, LastState, Last, word, Acc1) end; %% Overlong encoding: start of a 2-byte sequence, but code point <= 127 truncate([X,A|Rest], N, Append, LastState, Last, _AccState, Acc) when X >= 192, X =< 193 -> truncate(Rest, N-1, Append, LastState, Last, in_word, [A,X|Acc]); %% Start of 2-byte sequence truncate([X,A|Rest], N, Append, LastState, Last, _AccState, Acc) when X >= 194, X =< 223 -> truncate(Rest, N-1, Append, LastState, Last, in_word, [A,X|Acc]); %% Start of 3-byte sequence truncate([X,A,B|Rest], N, Append, LastState, Last, _AccState, Acc) when X >= 224, X =< 239 -> truncate(Rest, N-1, Append, LastState, Last, in_word, [B,A,X|Acc]); %% Start of 4-byte sequence truncate([X,A,B,C|Rest], N, Append, LastState, Last, _AccState, Acc) when X >= 240, X =< 244 -> truncate(Rest, N-1, Append, LastState, Last, in_word, [C,B,A,X|Acc]); %% Restricted by RFC 3629: start of 4-byte sequence for codepoint above 10FFFF truncate([X,A,B,C|Rest], N, Append, LastState, Last, _AccState, Acc) when X >= 245, X =< 247 -> truncate(Rest, N-1, Append, LastState, Last, in_word, [C,B,A,X|Acc]); %% Restricted by RFC 3629: start of 5-byte sequence truncate([X,A,B,C,D|Rest], N, Append, LastState, Last, _AccState, Acc) when X >= 248, X =< 251 -> truncate(Rest, N-1, Append, LastState, Last, in_word, [D,C,B,A,X|Acc]); %% Restricted by RFC 3629: start of 6-byte sequence truncate([X,A,B,C,D,E|Rest], N, Append, LastState, Last, _AccState, Acc) when X >= 252, X =< 253 -> truncate(Rest, N-1, Append, LastState, Last, in_word, [E,D,C,B,A,X|Acc]); %% Any other character truncate([C|Rest], N, Append, LastState, Last, _AccState, Acc) -> truncate(Rest, N-1, Append, LastState, Last, in_word, [C|Acc]). insert_acc([], Acc) -> Acc; insert_acc([H|T], Acc) -> insert_acc(T, [H|Acc]). get_entity([], Acc) -> {[],Acc}; get_entity([$;|Rest], Acc) -> {Rest,[$;|Acc]}; get_entity([C|Rest], Acc) -> get_entity(Rest, [C|Acc]). truncatewords(S, Words) -> truncatewords(S, Words, "…"). truncatewords(S, Words, Append) when is_binary(S) -> truncatewords(z_convert:to_list(S), in_space, Words, Append, []); truncatewords(S, Words, Append) when is_list(S) -> truncatewords(S, in_space, Words, Append, []). truncatewords(_S, _State, 0, Append, Acc) -> lists:reverse(trim_left_func(Acc, fun iswordsep/1), Append); truncatewords([], _State, _Words, _Append, Acc) -> lists:reverse(Acc); truncatewords([C|Rest], in_space, Words, Append, Acc) -> case iswordsep(C) of true -> truncatewords(Rest, in_space, Words, Append, [C|Acc]); false -> truncatewords(Rest, in_word, Words, Append, [C|Acc]) end; truncatewords([C|Rest], in_word, Words, Append, Acc) -> case iswordsep(C) of true -> truncatewords(Rest, in_space, Words-1, Append, [C|Acc]); false -> truncatewords(Rest, in_word, Words, Append, [C|Acc]) end. iswordsep($\s) -> true; iswordsep($\n) -> true; iswordsep($\r) -> true; iswordsep($\t) -> true; iswordsep($,) -> true; iswordsep($:) -> true; iswordsep($;) -> true; iswordsep(_) -> false. %% @doc Split the binary into lines. Line separators can be \r, \n or \r\n. split_lines(B) when is_binary(B) -> split_lines(B, <<>>, []). split_lines(<<>>, Line, Acc) -> lists:reverse([Line|Acc]); split_lines(<<13,10,Rest/binary>>, Line, Acc) -> split_lines(Rest, <<>>, [Line|Acc]); split_lines(<<13,Rest/binary>>, Line, Acc) -> split_lines(Rest, <<>>, [Line|Acc]); split_lines(<<10,Rest/binary>>, Line, Acc) -> split_lines(Rest, <<>>, [Line|Acc]); split_lines(<<C, Rest/binary>>, Line, Acc) -> split_lines(Rest, <<Line/binary, C>>, Acc). %% @doc Escape special characters for ical RFC2445 elements escape_ical(L) when is_list(L) -> escape_ical(iolist_to_binary(L)); escape_ical(B) when is_binary(B) -> escape_ical(B, <<>>, 0); escape_ical(A) when is_atom(A) -> escape_ical(atom_to_list(A)). escape_ical(<<>>, Acc, _N) -> Acc; escape_ical(B, Acc, N) when N >= 70 -> escape_ical(B, <<Acc/binary, 13, 10, 32>>, 0); escape_ical(<<13, 10, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, $\\, $n>>, N+2); escape_ical(<<10, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, $\\, $n>>, N+2); escape_ical(<<9, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, 32>>, N+1); escape_ical(<<$", Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, $\\, $">>, N+2); escape_ical(<<$,, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, $\\, $,>>, N+2); escape_ical(<<$:, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, $", $:, $">>, N+3); escape_ical(<<$;, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, $\\, $;>>, N+2); escape_ical(<<$\\, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, $\\, $\\>>, N+2); escape_ical(<<C, Rest/binary>>, Acc, N) -> escape_ical(Rest, <<Acc/binary, C>>, N+1). %% @doc Return true if Start is a prefix of Word %% @spec starts_with(String, String) -> bool() starts_with(Start, B) when is_binary(Start), is_binary(B) -> StartSize = size(Start), case B of <<Start:StartSize/binary, _/binary>> -> true; _ -> false end; starts_with(Start, String) -> starts_with(iolist_to_binary(Start), iolist_to_binary(String)). %% @doc Return true iff Word ends with End %% @spec ends_with(String, String) -> bool() ends_with(End, B) when is_binary(End), is_binary(B) -> StartSize = size(B) - size(End), case B of <<_:StartSize/binary, End/binary>> -> true; _ ->false end; ends_with(End, String) -> ends_with(iolist_to_binary(End), iolist_to_binary(String)). %% @doc Return true iff What is found in the string %% @spec contains(String, String) -> bool() contains(What, B) when is_binary(What), is_binary(B) -> contains(What, size(What), B, 0); contains(What, String) -> contains(iolist_to_binary(What), iolist_to_binary(String)). contains(_What, _SizeWhat, B, C) when C > size(B) -> false; contains(What, SizeWhat, B, C) -> case B of <<_:C/binary, What:SizeWhat/binary, _/binary>> ->true; _ ->contains(What, SizeWhat, B, C + 1) end. %% @doc Split a string, see http://www.erlang.org/pipermail/erlang-questions/2008-October/038896.html %% @spec split(String, String) -> list() split(String, []) -> split0(String); split(String, [Sep]) when is_integer(Sep) -> split1(String, Sep); split(String, [C1,C2|L]) when is_integer(C1), is_integer(C2) -> split2(String, C1, C2, L). %% Split a string at "", which is deemed to occur _between_ %% adjacent characters, but queerly, not at the beginning %% or the end. split0([C|Cs]) -> [[C] | split0(Cs)]; split0([]) -> []. %% Split a string at a single character separator. split1(String, Sep) -> split1_loop(String, Sep, ""). split1_loop([Sep|String], Sep, Rev) -> [lists:reverse(Rev) | split1(String, Sep)]; split1_loop([Chr|String], Sep, Rev) -> split1_loop(String, Sep, [Chr|Rev]); split1_loop([], _, Rev) -> [lists:reverse(Rev)]. %% Split a string at a multi-character separator %% [C1,C2|L]. These components are split out for %% a fast match. split2(String, C1, C2, L) -> split2_loop(String, C1, C2, L, ""). split2_loop([C1|S = [C2|String]], C1, C2, L, Rev) -> case split_prefix(L, String) of no -> split2_loop(S, C1, C2, L, [C1|Rev]) ; Rest -> [lists:reverse(Rev) | split2(Rest, C1, C2, L)] end; split2_loop([Chr|String], C1, C2, L, Rev) -> split2_loop(String, C1, C2, L, [Chr|Rev]); split2_loop([], _, _, _, Rev) -> [lists:reverse(Rev)]. split_prefix([C|L], [C|S]) -> split_prefix(L, S); split_prefix([], S) -> S; split_prefix(_, _) -> no. test() -> A = "üçgen", A = to_lower(to_upper(A)), "ucgen" = to_name(A), "a" = first_char("aap"), "Ж" = first_char("ЖЖЖxx"), "ć" = first_char("ćaap"), ok.
src/support/z_string.erl
0.564219
0.432063
z_string.erl
starcoder
%% Copyright (c) 2018 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(emqx_pqueue_SUITE). -include("emqx_mqtt.hrl"). -include_lib("eunit/include/eunit.hrl"). -compile(export_all). -compile(nowarn_export_all). -define(PQ, emqx_pqueue). all() -> [t_priority_queue_plen, t_priority_queue_out2]. t_priority_queue_plen(_) -> Q = ?PQ:new(), 0 = ?PQ:plen(0, Q), Q0 = ?PQ:in(z, Q), 1 = ?PQ:plen(0, Q0), Q1 = ?PQ:in(x, 1, Q0), 1 = ?PQ:plen(1, Q1), Q2 = ?PQ:in(y, 2, Q1), 1 = ?PQ:plen(2, Q2), Q3 = ?PQ:in(z, 2, Q2), 2 = ?PQ:plen(2, Q3), {_, Q4} = ?PQ:out(1, Q3), 0 = ?PQ:plen(1, Q4), {_, Q5} = ?PQ:out(Q4), 1 = ?PQ:plen(2, Q5), {_, Q6} = ?PQ:out(Q5), 0 = ?PQ:plen(2, Q6), 1 = ?PQ:len(Q6), {_, Q7} = ?PQ:out(Q6), 0 = ?PQ:len(Q7). t_priority_queue_out2(_) -> Els = [a, {b, 1}, {c, 1}, {d, 2}, {e, 2}, {f, 2}], Q = ?PQ:new(), Q0 = lists:foldl( fun({El, P}, Acc) -> ?PQ:in(El, P, Acc); (El, Acc) -> ?PQ:in(El, Acc) end, Q, Els), {Val, Q1} = ?PQ:out(Q0), {value, d} = Val, {Val1, Q2} = ?PQ:out(2, Q1), {value, e} = Val1, {Val2, Q3} = ?PQ:out(1, Q2), {value, b} = Val2, {Val3, Q4} = ?PQ:out(Q3), {value, f} = Val3, {Val4, Q5} = ?PQ:out(Q4), {value, c} = Val4, {Val5, Q6} = ?PQ:out(Q5), {value, a} = Val5, {empty, _Q7} = ?PQ:out(Q6).
test/emqx_pqueue_SUITE.erl
0.607081
0.48249
emqx_pqueue_SUITE.erl
starcoder
%% -*- erlang -*- %% %% A generic Petri net OTP behavior. %% %% Copyright 2016-2017 <NAME> %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% ------------------------------------------------------------------- %% @author <NAME> <<EMAIL>> %% @version 0.1.7 %% @copyright 2016-2017 <NAME> %% %% @doc Callback function definitions and API for the `gen_pnet' behavior. %% %% <h3>Net Structure Callback Functions</h3> %% %% There are six callbacks that define the Petri net structure and its initial %% marking: %% %% <ul> %% <li>`place_lst/0' returns the names of the places in the net</li> %% <li>`trsn_lst/0' returns the names of the transitions in the net</li> %% <li>`init_marking/2' returns the initial marking for a given place</li> %% <li>`preset/1' returns the preset places of a given transition</li> %% <li>`is_enabled/3' determines whether a given transition is enabled in a %% given mode</li> %% <li>`fire/3' returns which tokens are produced on what places if a given %% transition is fired in a given mode that enables this transition</li> %% </ul> %% %% We have a look at each of them in turn. %% %% <h4>place_lst/0</h4> %% %% The `place_lst/0' function lets us define the names of all places in the net. %% %% Example: %% ``` %% place_lst() -> %% [coin_slot, cash_box, signal, storage, compartment]. %% ''' %% Here, we define the net to have the five places in the cookie vending %% machine. %% %% <h4>trsn_lst/0</h4> %% %% The `trsn_lst/0' function lets us define the names of all transitions in the %% net. %% %% Example: %% ``` %% trsn_lst() -> %% [a, b]. %% ''' %% Here, we define the net to have the two places `a' and `b' in the cookie %% vending machine. %% %% <h4>preset/1</h4> %% %% The `preset/1' lets us define the preset places of a given transition. %% %% Example: %% ``` %% preset( a ) -> [coin_slot]; %% preset( b ) -> [signal, storage]. %% ''' %% Here, we define the preset of the transition `a' to be just the place %% `coin_slot' while the transition `b' has the places `signal' and `storage' %% in its preset. %% %% <h4>init_marking/2</h4> %% %% The `init_marking/2' function lets us define the initial marking for a given %% place in the form of a token list. The argument `UsrInfo' is the user info %% field that has been generated in the actor interface callback `init/1'. %% %% Example: %% ``` %% init_marking( storage, _UsrInfo ) -> [cookie_box, cookie_box, cookie_box]; %% init_marking( _Place, _UsrInfo ) -> []. %% ''' %% Here, we initialize the storage place with three `cookie_box' tokens. All %% other places are left empty. %% %% <h4>is_enabled/3</h4> %% %% The `is_enabled/3' function is a predicate determining whether a given %% transition is enabled in a given mode. The `UsrInfo' argument is the user %% info field that has been created with `init/1'. %% %% Example: %% ``` %% is_enabled( a, #{ coin_slot := [coin] }, _UsrInfo ) -> true; %% is_enabled( b, #{ signal := [sig], storage := [cookie_box] }, _UsrInfo ) -> true; %% is_enabled( _Trsn, _Mode, _UsrInfo ) -> false. %% ''' %% Here, we state that the transition `a' is enabled if it can consume a single %% `coin' from the `coin_slot' place. Similarly, the transition `b' is enabled %% if it can consume a `sig' token from the `signal' place and a `cookie_box' %% token from the `storage` place. No other configuration can enable a %% transition. E.g., managing to get a `button' token on the `coin_slot' place %% will not enable any transition. %% %% <h4>fire/3</h4> %% %% The `fire/3' function defines what tokens are produced when a given %% transition fires in a given mode. As arguments it takes the name of the %% transition, and a firing mode in the form of a hash map mapping place names %% to token lists. The `fire/3' function is called only on modes for which %% `is_enabled/3' returns `true'. The `fire/3' function is expected to return %% either a `{produce, ProduceMap}' tuple or the term `abort'. If `abort' is %% returned, the firing is aborted. Nothing is produced or consumed. %% %% Example: %% ``` %% fire( a, _Mode, _UsrInfo ) -> %% {produce, #{ cash_box => [coin], signal => [sig] }}; %% fire( b, _Mode, _UsrInfo ) -> %% {produce, #{ compartment => [cookie_box] }}. %% ''' %% Here, the firing of the transition `a' produces a `coin' token on the %% `cash_box' place and a `sig' token on the `signal' place. Similarly, the %% firing of the transition `b' produces a `cookie_box' token on the %% `compartment' place. We do not need to state the tokens to be consumed %% because the firing mode already uniquely identifies the tokens to be %% consumed. %% %% %% <h3>Interface Callback Functions</h3> %% %% In addition to the structure callback functions there are another seven %% callback functions that determine how the net instance appears as an Erlang %% actor to the outside world: %% %% <ul> %% <li>`code_change/3' determines what happens when a hot code reload %% appears</li> %% <li>`handle_call/3' synchronous message exchange</li> %% <li>`handle_cast/2' asynchronous message reception</li> %% <li>`handle_info/2' asynchronous reception of an unformatted message</li> %% <li>`init/1' initializes the gen_pnet instance</li> %% <li>`terminate/2' determines what happens when the net instance is %% stopped</li> %% <li>`trigger/3' allows to add a side effects to the generation of a %% token</li> %% </ul> %% %% <h4>code_change/3</h4> %% %% The `code_change/3' function determines what happens when a hot code reload %% appears. This callback is identical to the `code_change/3' function in the %% `gen_server' behavior. %% %% Example: %% ``` %% code_change( _OldVsn, NetState, _Extra ) -> {ok, NetState}. %% ''' %% %% <h4>handle_call/3</h4> %% %% The `handle_call/3' function performs a synchronous exchange of messages %% between the caller and the net instance. The first argument is the request %% message, the second argument is a tuple identifying the caller, and the third %% argument is a `#net_state{}' record instance describing the current state of %% the net. The `handle_call/3' function can generate a reply without changing %% the net marking by returning a `{reply, Reply}' tuple, it can generate a %% reply, consuming or producing tokens by returning a %% `{reply, Reply, ConsumeMap, ProduceMap}' tuple, it can defer replying without %% changing the net marking by returning `noreply', it can defer replying, %% consuming or producing tokens by returning a %% `{noreply, ConsumeMap, ProduceMap}' tuple, or it can stop the net instance by %% returning `{stop, Reason, Reply}'. %% %% Example: %% ``` %% handle_call( insert_coin, _From, _NetState ) -> %% {reply, ok, #{}, #{ coin_slot => [coin] }}; %% %% handle_call( remove_cookie_box, _From, NetState ) -> %% %% case gen_pnet:get_ls( compartment, NetState ) of %% [] -> {reply, {error, empty_compartment}}; %% [_|_] -> {reply, ok, #{ compartment => [cookie_box] }, #{}} %% end; %% %% handle_call( _Request, _From, _NetState ) -> {reply, {error, bad_msg}}. %% ''' %% Here, we react to two kinds of messages: Inserting a coin in the coin slot %% and removing a cookie box from the compartment. Thus, we react to an %% `insert_coin' message by replying with `ok', consuming nothing and producing %% a `coin' token on the `coin_slot' place. When receiving a `remove_cookie_box' %% message, we check whether the `compartment' place is empty, replying with an %% error message if it is, otherwise replying with `ok', consuming one %% `cookie_box' token from the `compartment' place, and producing nothing. Calls %% that are neither `insert_coin' nor `remove_cookie_box' are responded to with %% an error message. %% %% <h4>handle_cast/2</h4> %% %% The `handle_cast/2' function reacts to an asynchronous message received by %% the net instance. The first argument is the request while the second argument %% is a `#net_state{}' record instance. The `handle_cast/2' function can either %% leave the net unchanged by returning `noreply' or it can consume or produce %% tokens by returning a `{noreply, ConsumeMap, ProduceMap}' tuple. %% %% Example: %% ``` %% handle_cast( _Request, _NetState ) -> noreply. %% ''' %% Here, we just ignore any cast. %% %% <h4>handle_info/2</h4> %% %% The `handle_info/2' function reacts to an asynchronous, unformatted message %% received by the net instance. The first argument is the message term while %% the second argument is a `#net_state{}' record instance. The `handle_info/2' %% function can either leave the net unchanged by returning `noreply' or it can %% consume or produce tokens by returning a `{noreply, ConsumeMap, ProduceMap}' %% tuple. %% %% Example: %% ``` %% handle_info( _Request, _NetState ) -> noreply. %% ''' %% Here, we just ignore any message. %% %% <h4>init/1</h4> %% %% The `init/1' function initializes the net instance. It is given an initial %% argument which is provided with `gen_pnet:start_link/n'. The `init/1' %% function is expected to return a user info field which is later handed to %% other callback functions. %% %% Example: %% ``` %% init( _NetArg ) -> []. %% ''' %% Here, we return the empty list as a dummy user info field. %% %% <h4>terminate/2</h4> %% %% The `terminate/2' function determines what happens when the net instance is %% stopped. The first argument is the reason for termination while the second %% argument is a `#net_state{}' record instance. This callback is identical to %% the `terminate/2' function in the `gen_server' behavior. %% %% Example: %% ``` %% terminate( _Reason, _NetState ) -> ok. %% ''' %% %% <h4>trigger/3</h4> %% %% The `trigger/3' function determines what happens when a token is produced on %% a given place. Its first argument `Place' is the place name, its second %% argument `Token' is the token about to be produced, and its third argument %% `NetState' is the current state of the net. The `trigger/3' function is %% expected to return either `pass' in which case the token is produced %% normally, or `drop' in which case the token is forgotten. %% %% Example: %% ``` %% trigger( _Place, _Token, _NetState ) -> pass. %% ''' %% Here, we simply let any token pass. %% %% @end %% ------------------------------------------------------------------- -module( gen_pnet ). -behaviour( gen_server ). %%==================================================================== %% Exports %%==================================================================== % API functions -export( [start_link/3, start_link/4, ls/2, marking/1, call/2, call/3, cast/2, stats/1, reply/2, reset_stats/1, stop/1, usr_info/1, state_property/3] ). % Net state constructor and accessor functions -export( [get_ls/2, get_usr_info/1, get_stats/1] ). % gen_server callbacks -export( [code_change/3, handle_call/3, handle_cast/2, handle_info/2, init/1, terminate/2] ). %%==================================================================== %% Includes %%==================================================================== -include( "gen_pnet.hrl" ). %%==================================================================== %% Type definitions %%==================================================================== -type name() :: atom() | {atom(), atom()} | {global, _} | {via, atom(), _} | pid(). -type server_name() :: {local, atom()} | {global, atom()} | {via, atom(), _}. -type start_link_result() :: {ok, pid()} | ignore | {error, _}. -type handle_call_request() :: {ls, atom()} | marking | usr_info | {call, _} | stats | reset_stats. -type handle_call_result() :: {reply, _, #net_state{}} | {noreply, #net_state{}} | {stop, _, _, #net_state{}}. -type handle_cast_request() :: continue | {cast, _}. -type handle_cast_result() :: {noreply, #net_state{}} | {stop, _, #net_state{}}. -type handle_info_result() :: {noreply, #net_state{}} | {stop, _, #net_state{}}. -type prop() :: atom() | {atom(), _}. %%==================================================================== %% Callback definitions %%==================================================================== %% Structure callbacks -callback place_lst() -> [atom()]. -callback trsn_lst() -> [atom()]. -callback init_marking( Place :: atom(), UsrInfo :: _ ) -> [_]. -callback preset( Trsn :: atom() ) -> [atom()]. -callback is_enabled( Trsn :: atom(), Mode :: #{ atom() => [_]}, UsrInfo :: _ ) -> boolean(). -callback fire( Trsn :: atom(), Mode :: #{ atom() => [_] }, UsrInfo :: _ ) -> abort | {produce, #{ atom() => [_] }}. %% Interface callbacks -callback code_change( OldVsn :: _, NetState :: #net_state{}, Extra :: _ ) -> {ok, #net_state{}} | {error, _}. -callback handle_call( Request :: _, From :: {pid(), _}, NetState :: #net_state{} ) -> {reply, _} | {reply, _, #{ atom() => [_] }, #{ atom() => [_] }} | noreply | {noreply, #{ atom() => [_] }, #{ atom() => [_] }} | {stop, _, _}. -callback handle_cast( Request :: _, NetState :: #net_state{} ) -> noreply | {noreply, #{ atom() => [_] }, #{ atom() => [_] }} | {stop, _}. -callback handle_info( Info :: _, NetState :: #net_state{} ) -> noreply | {noreply, #{ atom() => [_] }, #{ atom() => [_] }} | {stop, _}. -callback init( NetArg :: _ ) -> _. -callback terminate( Reason :: _, NetState :: #net_state{} ) -> ok. -callback trigger( Place :: atom(), Token :: _, NetState :: #net_state{} ) -> pass | drop. %%==================================================================== %% API functions %%==================================================================== %% @doc Starts an unregistered net instance. %% @see start_link/4 -spec start_link( NetMod, NetArg, Options ) -> start_link_result() when NetMod :: atom(), NetArg :: _, Options :: [prop()]. start_link( NetMod, NetArg, Options ) when is_atom( NetMod ), is_list( Options ) -> gen_server:start_link( ?MODULE, {NetMod, NetArg}, Options ). %% @doc Starts a net instance registered as `ServerName' using the callback %% module `NetMod' as the callback module for this net instance. %% %% The `InitArg' argument is later handed to the `init/1' callback. The %% `ServerName' argument can be %% `{local, Name} | {global, Name} | {via, Module, ViaName}'. Internally, %% the server name `ServerName' and option list `Options' are handed down %% to `gen_server:start_link/4' as is. %% %% @see init/1 -spec start_link( ServerName, NetMod, InitArg, Options ) -> start_link_result() when ServerName :: server_name(), NetMod :: atom(), InitArg :: _, Options :: [prop()]. start_link( ServerName, NetMod, InitArg, Options ) when is_tuple( ServerName ), is_atom( NetMod ), is_list( Options ) -> gen_server:start_link( ServerName, ?MODULE, {NetMod, InitArg}, Options ). %% @doc Query the list of tokens on the place named `Place' in the net instance %% identified as `Name'. %% %% Herein, `Name' can be a process id or a registered process name. The %% return value is either `{ok, [_]}' if the place exists or a %% `{error, #bad_place{}}' tuple. -spec ls( Name, Place ) -> {ok, [_]} | {error, #bad_place{}} when Name :: name(), Place :: atom(). ls( Name, Place ) when is_atom( Place ) -> gen_server:call( Name, {ls, Place} ). %% @doc Query the marking map of the net instance identified as `Name' %% associating to each place name the list of tokens that this place holds. %% %% Herein, `Name' can be a process id or a registered process name. The %% return value is the Petri net's marking map. -spec marking( Name :: name() ) -> #{ atom() => [_] }. marking( Name ) -> gen_server:call( Name, marking ). %% @doc Query the user info term from the net instance identified as `Name'. -spec usr_info( Name :: name() ) -> _. usr_info( Name ) -> gen_server:call( Name, usr_info ). %% @doc Query the statistics gathered by the net instance identified as `Name'. %% %% The throughput is given as a `#stats{}' record consisting of three %% `#stat{}' record instances characterizing the current, maximum, and %% minimum throughput of this net in transition firings per second. -spec stats( Name :: name() ) -> #stats{}. stats( Name ) -> gen_server:call( Name, stats ). %% @doc Requests the net instance identified as `Name' to clear its stats. -spec reset_stats( Name :: name() ) -> ok. reset_stats( Name ) -> gen_server:call( Name, reset_stats ). %% @doc Requests the net instance identified as `Name' to stop. -spec stop( Name :: name() ) -> ok. stop( Name ) -> gen_server:stop( Name ). %% @doc Synchronously send the term `Request' to the net instance identified as %% `Name' and return the reply. %% The timeout is implicitly set to five seconds. %% %% @see call/3 -spec call( Name :: name(), Request :: _ ) -> _. call( Name, Request ) -> gen_server:call( Name, {call, Request} ). %% @doc Synchronously send the term `Request' to the net instance identified as %% `Name' and return the reply. %% %% The timeout is explicitly set to `Timeout'. The request is handled by %% the `handle_call/3' callback function of the interface module. Herein %% `Timeout' must be a non-negative integer or the atom `infinity'. -spec call( Name, Request, Timeout ) -> _ when Name :: name(), Request :: _, Timeout :: non_neg_integer() | infinity. call( Name, Request, Timeout ) when is_integer( Timeout ), Timeout >= 0 -> gen_server:call( Name, {call, Request}, Timeout ); call( Name, Request, infinity ) -> gen_server:call( Name, {call, Request}, infinity ). %% @doc Asynchronously send the term `Request' to the net instance identified as %% `Name'. %% %% The request is handled by the `handle_cast/2' callback function of the %% interface module. Note that the cast succeeds even if a non-existing %% process is addressed or the net instance is down. -spec cast( Name :: name(), Request :: _ ) -> ok. cast( Name, Request ) -> gen_server:cast( Name, {cast, Request} ). %% @doc Sends a reply to a calling client process. %% %% This funciton is to be used when the reply to a caller has been %% deferred by returning `{noreply, _, _}' in `handle_call/3'. %% %% @see handle_call/3 -spec reply( Client :: {pid(), _}, Reply :: _ ) -> _. reply( Client, Reply ) when is_tuple( Client ) -> gen_server:reply( Client, Reply ). %% @doc Checks if a predicate about the state of the net holds. %% %% The function takes a Petri net instance identified as `Name' and asks it %% to verify the predicate `Pred' over its marking. Herein, `Pred' is a %% function that takes n token lists, where each of the token lists subsume %% the tokens present on the places identified by the `PlaceLst' argument. %% The predicate is expected to return either `ok' or `{error, Reason}' %% where Reason can be any Erlang term. -spec state_property( Name, Pred, PlaceLst ) -> ok | {error, Reason} when Name :: name(), Pred :: fun( ( ... ) -> ok | {error, Reason} ), PlaceLst :: [atom()]. state_property( Name, Pred, PlaceLst ) when is_list( PlaceLst ), is_function( Pred, length( PlaceLst ) ) -> Marking = gen_pnet:marking( Name ), ArgLst = [maps:get( Place, Marking ) || Place <- PlaceLst], apply( Pred, ArgLst ). %%==================================================================== %% Net state constructor and accessor functions %%==================================================================== %% @doc Extracts the list of tokens on a given place from a given net state. %% %% Throws an error if the list does not exist. -spec get_ls( Place :: atom(), NetState :: #net_state{} ) -> [_]. get_ls( Place, #net_state{ marking = Marking } ) -> maps:get( Place, Marking ). %% @doc Extracts the user info field from a given net state. -spec get_usr_info( NetState :: #net_state{} ) -> _. get_usr_info( #net_state{ usr_info = UsrInfo } ) -> UsrInfo. %% @doc Extracts the stats field from a given net instance. -spec get_stats( NetState :: #net_state{} ) -> #stats{}. get_stats( #net_state{ stats = Stats } ) -> Stats. %%==================================================================== %% Generic server callback functions %%==================================================================== %% @private -spec code_change( OldVsn, NetState, Extra ) -> {ok, #net_state{}} | {error, _} when OldVsn :: _, NetState :: #net_state{}, Extra :: _. code_change( OldVsn, NetState = #net_state{ net_mod = NetMod }, Extra ) -> NetMod:code_change( OldVsn, NetState, Extra ). %% @private -spec handle_call( Request, From, NetState ) -> handle_call_result() when Request :: handle_call_request(), From :: {pid(), _}, NetState :: #net_state{}. handle_call( {ls, Place}, _From, NetState = #net_state{ marking = Marking } ) -> Reply = case maps:is_key( Place, Marking ) of true -> {ok, maps:get( Place, Marking )}; false -> {error, #bad_place{ name = Place }} end, {reply, Reply, NetState}; handle_call( marking, _From, NetState = #net_state{ marking = Marking } ) -> {reply, Marking, NetState}; handle_call( usr_info, _From, NetState = #net_state{ usr_info = UsrInfo } ) -> {reply, UsrInfo, NetState}; handle_call( {call, Request}, From, NetState = #net_state{ net_mod = NetMod } ) -> case NetMod:handle_call( Request, From, NetState ) of {reply, Reply} -> {reply, Reply, NetState}; {reply, Reply, CnsMap, ProdMap} -> NetState1 = cns( CnsMap, NetState ), NetState2 = handle_trigger( ProdMap, NetState1 ), continue( self() ), {reply, Reply, NetState2}; noreply -> {noreply, NetState}; {noreply, CnsMap, ProdMap} -> NetState1 = cns( CnsMap, NetState ), NetState2 = handle_trigger( ProdMap, NetState1 ), continue( self() ), {noreply, NetState2}; {stop, Reason, Reply} -> {stop, Reason, Reply, NetState} end; handle_call( stats, _From, NetState = #net_state{ stats = Stats } ) -> {reply, Stats, NetState}; handle_call( reset_stats, _From, NetState ) -> {reply, ok, NetState#net_state{ stats = undefined }}. %% @private -spec handle_cast( Request, NetState ) -> handle_cast_result() when Request :: handle_cast_request(), NetState :: #net_state{}. handle_cast( continue, NetState = #net_state{ stats = Stats, tstart = T1, cnt = Cnt } ) -> case progress( NetState ) of abort -> {noreply, NetState}; {delta, Mode, Pm} -> NetState1 = cns( Mode, NetState ), NetState2 = handle_trigger( Pm, NetState1 ), continue( self() ), NetState3 = if Cnt < 1000 -> NetState2#net_state{ cnt = Cnt+1 }; true -> T2 = os:system_time(), Tmean = round( ( T1+T2 )/2 ), Tdelta = T2-T1, CurrentFps = 1000000000000/Tdelta, Current = #stat{ t = Tmean, fps = CurrentFps }, {Hi1, Lo1} = case Stats of undefined -> {Current, Current}; #stats{ hi = H, lo = L } -> {H, L} end, #stat{ fps = HiFps } = Hi1, #stat{ fps = LoFps } = Lo1, Hi2 = if CurrentFps > HiFps -> Current; true -> Hi1 end, Lo2 = if CurrentFps < LoFps -> Current; true -> Lo1 end, NetState2#net_state{ stats = #stats{ current = Current, hi = Hi2, lo = Lo2 }, tstart = T2, cnt = 0 } end, {noreply, NetState3} end; handle_cast( {cast, Request}, NetState = #net_state{ net_mod = NetMod } ) -> case NetMod:handle_cast( Request, NetState ) of noreply -> {noreply, NetState}; {noreply, CnsMap, ProdMap} -> NetState1 = cns( CnsMap, NetState ), NetState2 = handle_trigger( ProdMap, NetState1 ), continue( self() ), {noreply, NetState2}; {stop, Reason} -> {stop, Reason, NetState} end. %% @private -spec handle_info( Info, NetState ) -> handle_info_result() when Info :: _, NetState :: #net_state{}. handle_info( Info, NetState = #net_state{ net_mod = NetMod } ) -> case NetMod:handle_info( Info, NetState ) of noreply -> {noreply, NetState}; {noreply, CnsMap, ProdMap} -> NetState1 = cns( CnsMap, NetState ), NetState2 = handle_trigger( ProdMap, NetState1 ), continue( self() ), {noreply, NetState2}; {stop, Reason} -> {stop, Reason, NetState} end. %% @private -spec init( ArgPair :: {atom(), _} ) -> {ok, #net_state{}}. init( {NetMod, NetArg} ) -> UsrInfo = NetMod:init( NetArg ), PlaceLst = NetMod:place_lst(), F = fun( P, Acc ) -> Acc#{ P => NetMod:init_marking( P, UsrInfo ) } end, InitMarking = lists:foldl( F, #{}, PlaceLst ), continue( self() ), {ok, #net_state{ net_mod = NetMod, usr_info = UsrInfo, marking = InitMarking, stats = undefined, tstart = os:system_time(), cnt = 0 }}. %% @private -spec terminate( Reason :: _, NetState :: #net_state{} ) -> ok. terminate( Reason, NetState = #net_state{ net_mod = NetMod } ) -> NetMod:terminate( Reason, NetState ). %%==================================================================== %% Internal functions %%==================================================================== %% @doc Continue making progress in net instance under process id `Name'. %% %% Note that continuing succeeds even if a non-existing process is %% addressed or the net instance is down. -spec continue( Name :: name() ) -> ok. continue( Name ) -> gen_server:cast( Name, continue ). -spec handle_trigger( ProdMap, NetState ) -> #net_state{} when ProdMap :: #{ atom() => [_] }, NetState :: #net_state{}. handle_trigger( ProdMap, NetState = #net_state{ net_mod = NetMod } ) -> G = fun( P, TkLst, Acc ) -> F = fun( Tk, A ) -> case NetMod:trigger( P, Tk, NetState ) of pass -> [Tk|A]; drop -> A end end, TkLst1 = lists:foldl( F, [], TkLst ), Acc#{ P => TkLst1 } end, ProdMap1 = maps:fold( G, #{}, ProdMap ), prd( ProdMap1, NetState ). -spec cns( Mode, NetState ) -> #net_state{} when Mode :: #{ atom() => [_] }, NetState :: #net_state{}. cns( Mode, NetState = #net_state{ marking = Marking } ) -> F = fun( T, TkLst, Acc ) -> Acc#{ T => TkLst--maps:get( T, Mode, [] ) } end, NetState#net_state{ marking = maps:fold( F, #{}, Marking ) }. -spec prd( ProdMap, NetState ) -> #net_state{} when ProdMap :: #{ atom() => [_] }, NetState :: #net_state{}. prd( ProdMap, NetState = #net_state{ marking = Marking } ) -> F = fun( T, TkLst, Acc ) -> Acc#{ T => TkLst++maps:get( T, ProdMap, [] ) } end, NetState#net_state{ marking = maps:fold( F, #{}, Marking ) }. -spec progress( NetState :: #net_state{} ) -> abort | {delta, #{ atom() => [_]}, #{ atom() => [_] }}. progress( #net_state{ marking = Marking, net_mod = NetMod, usr_info = UsrInfo } ) -> % get all transitions in the net TrsnLst = NetMod:trsn_lst(), F = fun( T, Acc ) -> Preset = NetMod:preset( T ), MLst = enum_mode( Preset, Marking ), IsEnabled = fun( M ) -> NetMod:is_enabled( T, M, UsrInfo ) end, EnabledMLst = lists:filter( IsEnabled, MLst ), case EnabledMLst of [] -> Acc; [_|_] -> Acc#{ T => EnabledMLst } end end, % derive a map listing all enabled modes for each transition ModeMap = lists:foldl( F, #{}, TrsnLst ), % delegate enabled mode map to attempt_progress function attempt_progress( ModeMap, NetMod, UsrInfo ). -spec attempt_progress( ModeMap, NetMod, UsrInfo ) -> abort | {delta, _, _} when ModeMap :: #{ atom() => [_] }, NetMod :: atom(), UsrInfo :: _. attempt_progress( ModeMap, NetMod, UsrInfo ) -> case maps:size( ModeMap ) of 0 -> abort; _ -> TrsnLst = maps:keys( ModeMap ), Trsn = lib_combin:pick_from( TrsnLst ), #{ Trsn := ModeLst } = ModeMap, Mode = lib_combin:pick_from( ModeLst ), case NetMod:fire( Trsn, Mode, UsrInfo ) of {produce, ProdMap} -> {delta, Mode, ProdMap}; abort -> ModeLst1 = ModeLst--[Mode], case ModeLst1 of [] -> attempt_progress( maps:remove( Trsn, ModeMap ), NetMod, UsrInfo ); [_|_] -> attempt_progress( ModeMap#{ Trsn := ModeLst1 }, NetMod, UsrInfo ) end end end. -spec enum_mode( Preset, Marking ) -> [#{ atom() => [_] }] when Preset :: [atom()], Marking :: #{ atom() => [_] }. enum_mode( Preset, Marking ) -> F = fun( P, Acc ) -> N = maps:get( P, Acc, 0 ), Acc#{ P => N+1 } end, % gather count map CountMap = lists:foldl( F, #{}, Preset ), G = fun( P, N, Acc ) -> #{ P := TkLst } = Marking, Acc#{ P => lib_combin:cnr( N, TkLst ) } end, % enumerate drawing combinations for each preset place individually CmbMap = maps:fold( G, #{}, CountMap ), % enumerate permutations of map containing drawing combinations lib_combin:permut_map( CmbMap ).
src/gen_pnet.erl
0.717507
0.513607
gen_pnet.erl
starcoder
%% This is an extreme simplification of a typical transient fault. %% The underlaying error is a possible race condition that kicks in now and then. %% The race is observable by a non-functioning API call, resulting in 'error' %% instead of expected value 'ok' %% %% The original bug is a piece of software with many more layers, in which the API %% is a http request. A lot more happens in a real system, many more layers, %% but it is interesting to see whether sessions types can contribute in this %% extreme simplification of the issue. %% %% For the purpose of the example, I removed all OTP behaviour. %% If this error can successfully be detected with session types, then creating %% a version with appplication, supervisor and gen_servers is a logical next step. %% %% Compare results to the use of PULSE %% http://www.cse.chalmers.se/~nicsma/papers/finding-race-conditions.pdf %% (https://dl.acm.org/doi/10.1145/1631687.1596574) %% -module(transient). %% The API -export([start/0, stop/1]). -export([create_room/2, create_user/3]). -spec start() -> pid(). start() -> spawn(fun() -> server(0) end). -spec stop(pid()) -> ok. stop(Pid) -> Pid ! {stop, self()}, receive stopped -> ok after 200 -> exit(Pid, kill), ok end. -spec create_room(pid(), string()) -> integer(). create_room(Pid, Name) -> Pid ! {room, self(), Name}, receive {room_id, Room} -> Room end. %% Interesting other error is when we match in the receive %% on {user_id, _}, because then client could get in a deadlock state. -spec create_user(pid(), integer(), string()) -> {user_id, integer()}. create_user(Pid, RoomId, Name) -> Pid ! {user, self(), RoomId, Name}, receive User -> User end. %% This code should be in separate modules %% but here for case of simplicity in working with it server(Rooms) -> receive {room, From, Name} -> RoomId = list_to_atom(lists:concat(["room",Rooms])), From ! {room_id, RoomId}, _ = spawn_link(fun() -> room(RoomId, Name, [{From, "created"}]) end), server(Rooms + 1); {user, From, RoomId, Name} -> try RoomId ! {user, From, Name} catch _:_ -> From ! error end, server(Rooms); {stop, From} -> exit(stopped) end. room(RoomId, Name, Msgs) -> register(RoomId, self()), room_loop(RoomId, Name, Msgs, []). room_loop(RoomId, Name, Msgs, Users) -> receive {user, From, User} -> UserId = length(Users), From ! {user_id, UserId}, room_loop(RoomId, Name, [{From, User, "welcome"} | Msgs], [{UserId, User} | Users]) end.
transient/src/transient.erl
0.513181
0.549761
transient.erl
starcoder
%%%------------------------------------------------------------------------ %% Copyright 2019, OpenTelemetry Authors %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% @doc This module has the behaviour that each exporter must implement %% and creates the buffer of trace spans to be exported. %% %% The exporter process can be configured to export the current finished %% spans based on timeouts and the size of the finished spans table. %% %% Timeouts: %% exporting_timeout_ms: How long to let the exports run before killing. %% check_table_size_ms: Timeout to check the size of the export table. %% scheduled_delay_ms: How often to trigger running the exporters. %% %% The size limit of the current table where finished spans are stored can %% be configured with the `max_queue_size' option. %% @end %%%----------------------------------------------------------------------- -module(otel_batch_processor). -behaviour(gen_statem). -behaviour(otel_span_processor). -export([start_link/1, on_start/3, on_end/2, set_exporter/1, set_exporter/2]). -export([init/1, callback_mode/0, idle/3, exporting/3, terminate/3]). -include_lib("opentelemetry_api/include/opentelemetry.hrl"). -include_lib("kernel/include/logger.hrl"). -include("otel_span.hrl"). -record(data, {exporter :: {module(), term()} | undefined, resource :: otel_resource:t(), handed_off_table :: atom() | undefined, runner_pid :: pid() | undefined, max_queue_size :: integer() | infinity, exporting_timeout_ms :: integer(), check_table_size_ms :: integer() | infinity, scheduled_delay_ms :: integer()}). -define(CURRENT_TABLES_KEY, {?MODULE, current_table}). -define(TABLE_1, otel_export_table1). -define(TABLE_2, otel_export_table2). -define(CURRENT_TABLE, persistent_term:get(?CURRENT_TABLES_KEY)). -define(DEFAULT_MAX_QUEUE_SIZE, 2048). -define(DEFAULT_SCHEDULED_DELAY_MS, timer:seconds(5)). -define(DEFAULT_EXPORTER_TIMEOUT_MS, timer:minutes(5)). -define(DEFAULT_CHECK_TABLE_SIZE_MS, timer:seconds(1)). -define(ENABLED_KEY, {?MODULE, enabled_key}). start_link(Opts) -> gen_statem:start_link({local, ?MODULE}, ?MODULE, [Opts], []). %% @equiv set_exporter(Exporter, []) set_exporter(Exporter) -> set_exporter(Exporter, []). %% @doc Sets the batch exporter `Exporter'. -spec set_exporter(module(), term()) -> ok. set_exporter(Exporter, Options) -> gen_statem:call(?MODULE, {set_exporter, {Exporter, Options}}). -spec on_start(otel_ctx:t(), opentelemetry:span(), otel_span_processor:processor_config()) -> opentelemetry:span(). on_start(_Ctx, Span, _) -> Span. -spec on_end(opentelemetry:span(), otel_span_processor:processor_config()) -> true | dropped | {error, invalid_span} | {error, no_export_buffer}. on_end(Span=#span{}, _) -> do_insert(Span); on_end(_Span, _) -> {error, invalid_span}. init([Args]) -> process_flag(trap_exit, true), SizeLimit = maps:get(max_queue_size, Args, ?DEFAULT_MAX_QUEUE_SIZE), ExportingTimeout = maps:get(exporting_timeout_ms, Args, ?DEFAULT_EXPORTER_TIMEOUT_MS), ScheduledDelay = maps:get(scheduled_delay_ms, Args, ?DEFAULT_SCHEDULED_DELAY_MS), CheckTableSize = maps:get(check_table_size_ms, Args, ?DEFAULT_CHECK_TABLE_SIZE_MS), Exporter = init_exporter(maps:get(exporter, Args, undefined)), Resource = otel_tracer_provider:resource(), _Tid1 = new_export_table(?TABLE_1), _Tid2 = new_export_table(?TABLE_2), persistent_term:put(?CURRENT_TABLES_KEY, ?TABLE_1), enable(), {ok, idle, #data{exporter=Exporter, resource = Resource, handed_off_table=undefined, max_queue_size=case SizeLimit of infinity -> infinity; _ -> SizeLimit div erlang:system_info(wordsize) end, exporting_timeout_ms=ExportingTimeout, check_table_size_ms=CheckTableSize, scheduled_delay_ms=ScheduledDelay}}. callback_mode() -> [state_functions, state_enter]. idle(enter, _OldState, #data{scheduled_delay_ms=SendInterval}) -> {keep_state_and_data, [{{timeout, export_spans}, SendInterval, export_spans}]}; idle(_, export_spans, Data) -> {next_state, exporting, Data}; idle(EventType, Event, Data) -> handle_event_(idle, EventType, Event, Data). exporting({timeout, export_spans}, export_spans, _) -> {keep_state_and_data, [postpone]}; exporting(enter, _OldState, Data=#data{exporting_timeout_ms=ExportingTimeout, scheduled_delay_ms=SendInterval}) -> {OldTableName, RunnerPid} = export_spans(Data), {keep_state, Data#data{runner_pid=RunnerPid, handed_off_table=OldTableName}, [{state_timeout, ExportingTimeout, exporting_timeout}, {{timeout, export_spans}, SendInterval, export_spans}]}; exporting(state_timeout, exporting_timeout, Data=#data{handed_off_table=ExportingTable}) -> %% kill current exporting process because it is taking too long %% which deletes the exporting table, so create a new one and %% repeat the state to force another span exporting immediately Data1 = kill_runner(Data), new_export_table(ExportingTable), {repeat_state, Data1}; %% important to verify runner_pid and FromPid are the same in case it was sent %% after kill_runner was called but before it had done the unlink exporting(info, {'EXIT', FromPid, _}, Data=#data{runner_pid=FromPid}) -> complete_exporting(Data); %% important to verify runner_pid and FromPid are the same in case it was sent %% after kill_runner was called but before it had done the unlink exporting(info, {completed, FromPid}, Data=#data{runner_pid=FromPid}) -> complete_exporting(Data); exporting(EventType, Event, Data) -> handle_event_(exporting, EventType, Event, Data). handle_event_(_State, {timeout, check_table_size}, check_table_size, #data{max_queue_size=infinity}) -> keep_state_and_data; handle_event_(_State, {timeout, check_table_size}, check_table_size, #data{max_queue_size=MaxQueueSize}) -> case ets:info(?CURRENT_TABLE, size) of M when M >= MaxQueueSize -> disable(), keep_state_and_data; _ -> enable(), keep_state_and_data end; handle_event_(_, {call, From}, {set_exporter, Exporter}, Data=#data{exporter=OldExporter}) -> shutdown_exporter(OldExporter), {keep_state, Data#data{exporter=init_exporter(Exporter)}, [{reply, From, ok}]}; handle_event_(_, _, _, _) -> keep_state_and_data. terminate(_, _, _Data) -> %% TODO: flush buffers to exporter ok. %% enable()-> persistent_term:put(?ENABLED_KEY, true). disable() -> persistent_term:put(?ENABLED_KEY, false). is_enabled() -> persistent_term:get(?ENABLED_KEY, true). do_insert(Span) -> try case is_enabled() of true -> ets:insert(?CURRENT_TABLE, Span); _ -> dropped end catch error:badarg -> {error, no_batch_span_processor}; _:_ -> {error, other} end. complete_exporting(Data=#data{handed_off_table=ExportingTable}) when ExportingTable =/= undefined -> new_export_table(ExportingTable), {next_state, idle, Data#data{runner_pid=undefined, handed_off_table=undefined}}. kill_runner(Data=#data{runner_pid=RunnerPid}) -> erlang:unlink(RunnerPid), erlang:exit(RunnerPid, kill), Data#data{runner_pid=undefined, handed_off_table=undefined}. new_export_table(Name) -> ets:new(Name, [public, named_table, {write_concurrency, true}, duplicate_bag, %% OpenTelemetry exporter protos group by the %% instrumentation_library. So using instrumentation_library %% as the key means we can easily lookup all spans for %% for each instrumentation_library and export together. {keypos, #span.instrumentation_library}]). init_exporter(undefined) -> undefined; init_exporter({ExporterModule, Config}) when is_atom(ExporterModule) -> case ExporterModule:init(Config) of {ok, ExporterConfig} -> {ExporterModule, ExporterConfig}; ignore -> undefined end; init_exporter(ExporterModule) when is_atom(ExporterModule) -> init_exporter({ExporterModule, []}). shutdown_exporter(undefined) -> ok; shutdown_exporter({ExporterModule, Config}) -> ExporterModule:shutdown(Config). export_spans(#data{exporter=Exporter, resource=Resource}) -> CurrentTable = ?CURRENT_TABLE, NewCurrentTable = case CurrentTable of ?TABLE_1 -> ?TABLE_2; ?TABLE_2 -> ?TABLE_1 end, %% an atom is a single word so this does not trigger a global GC persistent_term:put(?CURRENT_TABLES_KEY, NewCurrentTable), %% set the table to accept inserts enable(), Self = self(), RunnerPid = erlang:spawn_link(fun() -> send_spans(Self, Resource, Exporter) end), ets:give_away(CurrentTable, RunnerPid, export), {CurrentTable, RunnerPid}. %% Additional benefit of using a separate process is calls to `register` won't %% timeout if the actual exporting takes longer than the call timeout send_spans(FromPid, Resource, Exporter) -> receive {'ETS-TRANSFER', Table, FromPid, export} -> TableName = ets:rename(Table, current_send_table), export(Exporter, Resource, TableName), ets:delete(TableName), completed(FromPid) end. completed(FromPid) -> FromPid ! {completed, self()}. export(undefined, _, _) -> true; export({Exporter, Config}, Resource, SpansTid) -> %% don't let a exporter exception crash us %% and return true if exporter failed try Exporter:export(SpansTid, Resource, Config) =:= failed_not_retryable catch Class:Exception:StackTrace -> ?LOG_INFO("exporter threw exception: exporter=~p ~p:~p stacktrace=~p", [Exporter, Class, Exception, StackTrace]), true end.
apps/opentelemetry/src/otel_batch_processor.erl
0.586168
0.435241
otel_batch_processor.erl
starcoder
% Licensed under the Apache License, Version 2.0 (the "License"); you may not % use this file except in compliance with the License. You may obtain a copy of % the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, WITHOUT % WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the % License for the specific language governing permissions and limitations under % the License. % This module implements the modification of the vtree for insertion and % deletion. It follows the normal R-tree rules, but takes the "original MBB" % into account, which is RR*-tree specific. It calls out to modules for the % choosing the correct subtree and splitting the nodes. % % This insertion/deletion supports bulks, i.e. adding/removing multiple items % at the same time. This is faster than subsequent single inserts/deletes. % The reason for being faster is, that multiple nodes can be written % at once, before their parent node is written. % % This module is the central piece for the algorithm. The modules vtree_insert % and vtree_delete implement the specific actions that are different between % insertion and deletion. But as you can see they are very similar, only the % function that handle the KP-nodes during traversal and the KV-nodes during % modification are different. % % Here's a quick outline of the way the bulk operations work. First you have a % list of nodes that all want to be added/deleted to the tree. You start at % the root node: % 1. Get all child nodes % 2. Loop through all nodes that should be inserted/delete and group/partition % them into chunks. All nodes that would be added to/removed from a certain % node according to the partition function end up in one partition. % 3. Now keep traversing the tree in depth-first manner and start with 1., % until you hit a leaf node. There you do the actual insertion/deletion. -module(vtree_modify). -include("vtree.hrl"). -include("couch_db.hrl"). -export([write_new_root/2, write_nodes/3, modify_multiple/5, get_overflowing_subset/2]). -ifdef(makecheck). -compile(nowarn_export_all). -compile(export_all). -endif. % Write a new root node for the given nodes. In case there are more than % than it can hold, write a new root recursively. Stop when the root is a % single node. -spec write_new_root(Vt :: #vtree{}, Nodes :: [#kp_node{} | #kv_node{}]) -> #kp_node{}. write_new_root(_Vt, [Root]) -> Root; % The `write_nodes/3` call will handle the splitting if needed. It could % happen that the byte size of nodes returned by `write_nodes/3` is bigger % than the chunk threshold, hence the recursive call. write_new_root(Vt, Nodes) -> MbbO = vtree_util:nodes_mbb(Nodes, Vt#vtree.less), WrittenNodes = write_nodes(Vt, Nodes, MbbO), write_new_root(Vt, WrittenNodes). % Add a list of nodes one by one into a list of nodes (think of the latter % list as list containing child nodes). The node the new nodes get inserted % to, will automatically be split. % The result will again be a list of multiple nodes, with a maximum number of % nodes that still satisfy the chunk threshold. The total number of elements % in the resulting list can be bigger than the node can actually hold, hence % you might need to call it recursively. -spec insert_into_nodes(Vt :: #vtree{}, NodePartitions :: [[#kv_node{} | #kp_node{}]], MbbO :: mbb(), ToInsert :: [#kv_node{} | #kp_node{}]) -> [[#kv_node{} | #kp_node{}]]. insert_into_nodes(_Vt, NodePartitions, _MbbO, []) -> NodePartitions; insert_into_nodes(Vt, NodePartitions, MbbO, [ToInsert|Rest]) -> Less = Vt#vtree.less, Mbb = get_key(ToInsert), FillMax = get_chunk_threshold(Vt, ToInsert), % Every node partition contains a list of nodes, the maximum number is % given by the chunk threshold % Start with calculating the MBBs of the partitions PartitionMbbs = [vtree_util:nodes_mbb(Nodes, Less) || Nodes <- NodePartitions], % Choose the partition the new node should be inserted to. % vtree_choode:choose_subtree/3 expects a list of 2-tuples with the MBB % and any value you like. We use the index in the list as second element % in the tuple, so we can insert the new nodes there easily. NodesNumbered = lists:zip(PartitionMbbs, lists:seq(0, length(PartitionMbbs)-1)), {_, NodeIndex} = vtree_choose:choose_subtree(NodesNumbered, Mbb, Less), {A, [Nth|B]} = lists:split(NodeIndex, NodePartitions), TmpInserted = [ToInsert | Nth], NewNodes = case ?ext_size(TmpInserted) > FillMax of % Maximum number of nodes reached, hence split it true -> {C, D} = split_node(Vt, TmpInserted, MbbO), A ++ [C, D] ++ B; % No need to split the node, just insert the new one false -> A ++ [TmpInserted] ++ B end, insert_into_nodes(Vt, NewNodes, MbbO, Rest). -spec get_key(Node :: #kv_node{} | #kp_node{}) -> mbb(). get_key(#kv_node{}=Node) -> Node#kv_node.key; get_key(#kp_node{}=Node) -> Node#kp_node.key. -spec get_chunk_threshold(Vt :: #vtree{}, Node :: #kv_node{} | #kp_node{}) -> number(). get_chunk_threshold(Vt, #kv_node{}) -> Vt#vtree.kv_chunk_threshold; get_chunk_threshold(Vt, #kp_node{}) -> Vt#vtree.kp_chunk_threshold. % Return a minimal subset of nodes that are just about bigger than `FillMax` -spec get_overflowing_subset(FillMax :: number(), Nodes :: [#kv_node{} | #kp_node{} | split_node()]) -> {[#kv_node{}], [#kv_node{}]} | {[#kp_node{}], [#kp_node{}]} | {[split_node()], [split_node()]}. get_overflowing_subset(FillMax, Nodes) -> get_overflowing_subset(FillMax, Nodes, []). -spec get_overflowing_subset(FillMax :: number(), Nodes :: [#kv_node{} | #kp_node{} | split_node()], Acc :: [#kv_node{} | #kp_node{} | split_node()]) -> {[#kv_node{}], [#kv_node{}]} | {[#kp_node{}], [#kp_node{}]} | {[split_node()], [split_node()]}. get_overflowing_subset(_FillMax, [], Acc) -> {lists:reverse(Acc), []}; get_overflowing_subset(FillMax, [H|T]=Nodes, Acc) -> case ?ext_size(Acc) < FillMax of true -> get_overflowing_subset(FillMax, T, [H|Acc]); false -> {lists:reverse(Acc), Nodes} end. % `MbbO` is the original MBB when it was create the first time % It will return a list of KP-nodes. It might return more than the chunk % threshold (but the size of the nodes per node will be limited by the chunk % threshold). -spec write_nodes(Vt :: #vtree{}, Nodes :: [#kv_node{} | #kp_node{}], MbbO :: mbb()) -> [#kp_node{}]. % All nodes were deleted, the current node is empty now write_nodes(_Vt, [], _MbbO) -> []; write_nodes(Vt, [#kv_node{}|_]=Nodes, MbbO) -> FillMax = Vt#vtree.kv_chunk_threshold, Size = ?ext_size(Nodes), write_nodes(Vt, Nodes, MbbO, FillMax, Size); write_nodes(Vt, [#kp_node{}|_]=Nodes, MbbO) -> FillMax = Vt#vtree.kp_chunk_threshold, Size = ?ext_size(Nodes), write_nodes(Vt, Nodes, MbbO, FillMax, Size). % Too many nodes for a single split, hence do something smart to get % all nodes stored in a good way. First take the first FillMax nodes % and split then into two nodes. Then insert all other nodes one by % one into one of the two newly created nodes. The decision which node % to choose is done by vtree_choose:choose_subtree/3. -spec write_nodes(Vt :: #vtree{}, Nodes :: [#kv_node{} | #kp_node{}], MbbO :: mbb(), FillMax :: number(), Size :: pos_integer()) -> [#kp_node{}]. write_nodes(Vt, Nodes, MbbO, FillMax, Size) when Size > FillMax -> {FirstNodes, Rest} = get_overflowing_subset(FillMax, Nodes), NewNodes = insert_into_nodes(Vt, [FirstNodes], MbbO, Rest), write_multiple_nodes(Vt, NewNodes); % No split needed write_nodes(Vt, Nodes, MbbO, _FillMax, _Size) -> % `write_multiple_nodes/2` isn't used here, as it's the only case, % where we use the supplied MBBO and don't calculate a new one. %write_multiple_nodes(Vt, [Nodes], [MbbO]). #vtree{ fd = Fd, less = Less } = Vt, {ok, WrittenNode} = vtree_io:write_node(Fd, Nodes, Less), [WrittenNode#kp_node{mbb_orig = MbbO}]. % Write multiple nodes at once. It is only called when a split happened, hence % the original MBB (`MbbO`) is reset. -spec write_multiple_nodes(Vt :: #vtree{}, NodeList :: [[#kp_node{} | #kv_node{}]]) -> [#kp_node{}]. write_multiple_nodes(Vt, NodeList) -> #vtree{ fd = Fd, less = Less } = Vt, lists:map( fun(Nodes) -> {ok, WrittenNode} = vtree_io:write_node(Fd, Nodes, Less), WrittenNode#kp_node{mbb_orig = WrittenNode#kp_node.key} end, NodeList). % Splits a KV- or KP-Node. Needs to be called with a total size of the nodes % that can be accommodated by two nodes. It % operates with #kv_node{} and % #kp_node{} records and also returns those. -spec split_node(Vt :: #vtree{}, Nodes :: [#kv_node{} | #kp_node{}], MbbO :: mbb()) -> {[#kv_node{}], [#kv_node{}]} | {[#kp_node{}], [#kp_node{}]}. split_node(Vt, [#kv_node{}|_]=Nodes, MbbO) -> #vtree{ kv_chunk_threshold = FillMax, min_fill_rate = MinRate, less = Less } = Vt, SplitNodes = [{Node#kv_node.key, Node} || Node <- Nodes], {SplitNodesA, SplitNodesB} = vtree_split:split_leaf( SplitNodes, MbbO, FillMax*MinRate, FillMax, Less), {_, NodesA} = lists:unzip(SplitNodesA), {_, NodesB} = lists:unzip(SplitNodesB), {NodesA, NodesB}; split_node(Vt, [#kp_node{}|_]=Nodes, MbbO) -> #vtree{ kp_chunk_threshold = FillMax, min_fill_rate = MinRate, less = Less } = Vt, SplitNodes = [{Node#kp_node.key, Node} || Node <- Nodes], {SplitNodesA, SplitNodesB} = vtree_split:split_inner( SplitNodes, MbbO, FillMax*MinRate, FillMax, Less), {_, NodesA} = lists:unzip(SplitNodesA), {_, NodesB} = lists:unzip(SplitNodesB), {NodesA, NodesB}. % The following comment explains the insertion only to make it easier to % understand, though it works the same for deletions. % % This function inserts partitioned nodes into the corresponding position in % the tree. This means that the number of partitions equals the number of % nodes of the current level. % The whole algorithm inserts multiple nodes into the tree and tries to % minimize the rebuilding of the tree (caused by his append-only nature). % The goal is to write parent nodes only when all the inserts in their % children already happened. It's done by depth first traversal, where the % nodes that get inserted are partitioned according to the currently level. % Those partitions are then further divided while moving deeper. % % Here's an example: % % PartitionedNodes = [[kv_node1, kv_node2], [kv_node3], [], [kv_node4]] % % The `PartitionedNodes` has 4 elements, hence the level it get inserted to % needs to have a exactly 4 nodes % % CurrentLevel = [kp_node1, kp_node2, kp_node2, kp_node4] % % The Partitioned nodes get inserted into the `CurrentLevel`, hence % % kp_node1 gets nodes kv_node1 and kv_node2 added % kp_node2 gets node kv_node3 added % kp_node3 keeps it's original children % kp_node4 gets node kv_node4 added % % The KP-nodes are traversed recursively and the KV-Nodes that get added are % also partitioned recursively, so that in the end the KP-nodes get added % as leaf nodes to the existing KP-nodes. -spec modify_multiple(Vt :: #vtree{}, ModifyFuns :: {fun(), fun()}, Modify :: [#kv_node{}], Existing :: [#kp_node{}], Acc :: [#kp_node{}]) -> [#kp_node{}]. modify_multiple(_Vt, _ModifyFuns, [], [], Acc) -> Acc; % This partition doesn't contain any nodes to delete, hence use the existing % ones and move on modify_multiple(Vt, ModifyFuns, [[]|SiblingPartitions], [Existing|ExistingSiblings], Acc) -> modify_multiple(Vt, ModifyFuns, SiblingPartitions, ExistingSiblings, [Existing|Acc]); modify_multiple(Vt, ModifyFuns, [ModifyNodes|SiblingPartitions], [Existing|ExistingSiblings], Acc) -> #vtree{ fd = Fd, less = Less } = Vt, #kp_node{ childpointer = ChildPointer, mbb_orig = MbbO } = Existing, {KvModifyFun, KpModifyFun} = ModifyFuns, ExistingNodes = vtree_io:read_node(Fd, ChildPointer), NewChildren = case ExistingNodes of [#kv_node{}|_] -> KvModifyFun(ModifyNodes, ExistingNodes); [#kp_node{}|_] -> Partitions = KpModifyFun(ModifyNodes, ExistingNodes, Less), % Moving deeper modify_multiple(Vt, ModifyFuns, Partitions, ExistingNodes, []) end, WrittenNodes = write_nodes(Vt, NewChildren, MbbO), % Moving sideways modify_multiple(Vt, ModifyFuns, SiblingPartitions, ExistingSiblings, WrittenNodes ++ Acc).
vtree/src/vtree_modify.erl
0.7413
0.649926
vtree_modify.erl
starcoder
%% @doc Read configuration from eterm files %% %% This module accepts files that looks like this: %% ``` %% {key1, Value1}. %% {key2, Value2}. %% ...''' %% or this: %% ``` %% #{key1 => Value1 %% ,key2 => Value2 %% ... %% }''' %% %% Note that keys don't match directly with the model keys. Instead %% `file_key' metaparameter is used to specify which key in the %% configuration file maps to a given mnode. %% %% == Example == %% ``` %% #{foo => {[value, consult], %% #{ file_key => key1 %% }} %% }''' -module(lee_consult). -export([ metamodel/0 , read/2 , read/3 , read_to/3 , read_to/4 , doc_chapter_title/2 , doc_gen/2 , meta_validate/4 ]). -export_type([filter/0, doc_config/0]). -include("lee.hrl"). -define(consult, consult). -type filter() :: [lee:metatype()] | all. -type doc_config() :: #{ filter := filter() , config_name := string() }. %% @doc Metamodel module containing metatypes for reading %% configuration from `eterm' files %% %% It defines the following metatype: %% == consult == %% %% === Metaparameters === %% <ul><li>`file_key' of type `atom()': %% Configuration file key %% </li> %% </ul> %% %% === Depends on === %% {@link lee:base_metamodel/0 . value} -spec metamodel() -> lee:module(). metamodel() -> #{ metatype => #{ ?consult => {[metatype, documented] , #{ doc_chapter_title => fun ?MODULE:doc_chapter_title/2 , doc_gen => fun ?MODULE:doc_gen/2 , meta_validate => fun ?MODULE:meta_validate/4 } } }}. -spec meta_validate(lee:model(), _, lee:key(), #mnode{}) -> lee_lib:check_result(). meta_validate(_, _, Key, MNode) -> lee_lib:inject_error_location( Key, lee_lib:validate_meta_attr(file_key, typerefl:atom(), MNode)). %% @doc Parse file into a `lee_storage' %% @throws {error, string()} -spec read_to(lee:model(), file:filename(), lee_storage:data()) -> lee_storage:data(). read_to(Model, Filename, Data) -> read_to(Model, Filename, all, Data). %% @doc Parse file into a `lee_storage' %% @throws {error, string()} -spec read_to(lee:model(), file:filename(), filter(), lee_storage:data()) -> lee_storage:data(). read_to(Model, Filename, Filter, Data) -> Patch = read(Model, Filename, Filter), lee_storage:patch(Data, Patch). %% @doc Parse file into a patch %% @throws {error, string()} -spec read(lee:model(), file:filename()) -> lee:patch(). read(Model, Filename) -> read(Model, Filename, all). %% @doc Parse file into a patch %% @throws {error, string()} -spec read(lee:model(), file:filename(), filter()) -> lee:patch(). read(Model, Filename, Filter) -> Predicate = predicate(Filter), Keys = lee_model:get_metatype_index(?consult, Model), Terms0 = case file:consult(Filename) of {ok, T0} -> T0; {error, Reason0} -> Reason = lee_lib:format( "Reading ~s failed: ~p" , [Filename, Reason0] ), throw({error, Reason}) end, case Terms0 of [Terms] when is_map(Terms) -> ok; _ -> Terms = try maps:from_list(Terms0) catch _:_ -> throw({error, Filename ++ " should be a proplist or a map"}) end end, lists:foldl( fun(Key, Acc) -> read_val(Model, Predicate, Terms, Key, Acc) end , [] , Keys). read_val(Model, Predicate, Terms, Key, Acc) -> #mnode{metatypes = MT, metaparams = Attrs} = lee_model:get(Key, Model), Valid = Predicate(MT), FileKey = ?m_attr(?consult, file_key, Attrs), case Terms of #{FileKey := Val} when Valid -> [{set, Key, Val} | Acc]; #{} -> Acc end. -spec doc_gen(lee:model(), doc_config() | undefined) -> lee:doc(). doc_gen(Model, undefined) -> doc_gen(Model, default_doc_config()); doc_gen(Model, Config) -> #{filter := Filter} = Config, Predicate = predicate(Filter), Intro = "<para> Configuration file is an Erlang term that can be in either proplist: </para> <programlisting language=\"erlang\"> <![CDATA[ {key1, Val1}. {key2, Val2}. ... ]]> </programlisting> <para> or map form: </para> <programlisting language=\"erlang\"> <![CDATA[ #{ key1 => Val1 , key2 => Val2 }. ]]> </programlisting> <para><emphasis>Valid keys:</emphasis></para>", Keys = lee_model:get_metatype_index(?consult, Model), Fun = fun(Key) -> MNode = lee_model:get(Key, Model), case Predicate(MNode#mnode.metatypes) of true -> {true, format_doc(Config, Key, MNode)}; false -> false end end, lee_doc:docbook(Intro) ++ lists:filtermap(Fun, Keys). -spec doc_chapter_title(lee:model(), doc_config() | undefined) -> string(). doc_chapter_title(Model, undefined) -> doc_chapter_title(Model, default_doc_config()); doc_chapter_title(_Model, Config) -> #{config_name := ConfigName } = Config, "Configuration file: " ++ ConfigName. default_doc_config() -> #{ filter => all , config_name => "configuration.eterm" }. predicate(Filter) -> case Filter of all -> fun(_) -> true end; _ -> OrdSet = ordsets:from_list(Filter), fun(MetaTypes) -> ordsets:intersection(OrdSet, MetaTypes) =/= [] end end. format_doc(_Config, Key, MNode = #mnode{metaparams = Attrs}) -> FileKey = ?m_attr(?consult, file_key, Attrs), lee_doc:refer_value(Key, ?consult, atom_to_list(FileKey), MNode).
src/application/lee_consult.erl
0.546617
0.458773
lee_consult.erl
starcoder
%% %% e3d_transf.erl -- %% %% More transformation matrix utilities %% %% Copyright (c) 2010-2011 <NAME> %% %% See the file "license.terms" for information on usage and redistribution %% of this file, and for a DISCLAIMER OF ALL WARRANTIES. %% %% $Id$ %%% %%% @doc More transformation matrix utilities %%% %%% All of the matrix operations operates in the following order %%% I = identity(), %%% T = translate(I, Vec), %%% R = rotate(T, Vec), %%% Matrix = Rotate(Translate()) %%% %%% Also using opengl right handed coordinate system %%% @end -module(e3d_transform). -export([%% Initilizes matrices identity/0, init/1, lookat/3, ortho/2, ortho/6, perspective/3, perspective/4, pick/5, %% Get the actual matrices matrix/1, inv_matrix/1, %% Transform the matrices inverse/1, translate/2, rotate/2, rotate/3, scale/2, mul/1, mul/2 ]). -include("e3d.hrl"). %%%------------------------------------------------------------------- %%-------------------------------------------------------------------- %% @doc Returns the identity transform %% @end %%-------------------------------------------------------------------- -spec identity() -> e3d_transform(). identity() -> #e3d_transf{}. %%-------------------------------------------------------------------- %% @doc Initilizes transform from matrix mat %% @end %%-------------------------------------------------------------------- -spec init(e3d_matrix()) -> e3d_transform(). init(Mat) when tuple_size(Mat) =:= 12 -> init(e3d_mat:expand(Mat)); init(Mat) -> #e3d_transf{mat=Mat, inv=e3d_mat:invert(Mat)}. %%-------------------------------------------------------------------- %% @doc Returns the matrix %% @end %%-------------------------------------------------------------------- -spec matrix(e3d_transform()) -> e3d_matrix(). matrix(#e3d_transf{mat=M}) -> e3d_mat:expand(M). %%-------------------------------------------------------------------- %% @doc Returns the inverse matrix %% @end %%-------------------------------------------------------------------- -spec inv_matrix(e3d_transform()) -> e3d_matrix(). inv_matrix(#e3d_transf{inv=I}) -> e3d_mat:expand(I). %%%------------------------------------------------------------------- %%-------------------------------------------------------------------- %% @doc Inverses the transform %% @end %%-------------------------------------------------------------------- -spec inverse(e3d_transform()) -> e3d_transform(). inverse(#e3d_transf{mat=M, inv=I}) -> #e3d_transf{mat=I, inv=M}. %%-------------------------------------------------------------------- %% @doc Translates the matrix with vector %% @end %%-------------------------------------------------------------------- -spec translate(e3d_transform(), e3d_vector()) -> e3d_transform(). translate(#e3d_transf{mat=M,inv=I}, {Dx,Dy,Dz}) -> #e3d_transf{mat = e3d_mat:mul(M, e3d_mat:translate(Dx,Dy,Dz)), inv = e3d_mat:mul(e3d_mat:translate(-Dx,-Dy,-Dz), I)}. %%-------------------------------------------------------------------- %% @doc Rotates the matrix with rotation matrix %% @end %%-------------------------------------------------------------------- -spec rotate(e3d_transform(), e3d_matrix()) -> e3d_transform(). rotate(#e3d_transf{mat=M,inv=I}, Rot) when tuple_size(Rot) =:= 12; tuple_size(Rot) =:= 16 -> #e3d_transf{mat = e3d_mat:mul(M, Rot), inv = e3d_mat:mul(e3d_mat:transpose(Rot), I)}. %%-------------------------------------------------------------------- %% @doc Rotates the matrix with angle (in degress) and direction %% @end %%-------------------------------------------------------------------- -spec rotate(e3d_transform(), number(), e3d_vector()) -> e3d_transform(). rotate(Mat = #e3d_transf{}, A, Vec) -> rotate(Mat, e3d_mat:rotate(A,Vec)). %%-------------------------------------------------------------------- %% @doc Scales the matrix with {ScaleX, ScaleY, ScaleZ} %% @end %%-------------------------------------------------------------------- -spec scale(e3d_transform(), e3d_vector()) -> e3d_transform(). scale(#e3d_transf{mat=M,inv=I}, {X,Y,Z}) -> #e3d_transf{mat = e3d_mat:mul(M, e3d_mat:scale(X,Y,Z)), inv = e3d_mat:mul(e3d_mat:scale(1/X,1/Y,1/Z), I)}. %%---------------------------------------------------------------------- %% @doc Multiplies the current matrix (at right) with new Mat (at left) %% Trans(Vec) = Mat(Current(Vec)) %% @end %%---------------------------------------------------------------------- -spec mul(e3d_transform(), e3d_transform()) -> e3d_transform(). mul(#e3d_transf{mat=M1,inv=I1}, #e3d_transf{mat=M2,inv=I2}) -> #e3d_transf{mat = e3d_mat:mul(M1, M2), inv = e3d_mat:mul(I2, I1)}. %%-------------------------------------------------------------- %% mul([Rx,Ry,Rz]) = mul([mul(Ry,Rx),Rz]) %%-------------------------------------------------------------- -spec mul([e3d_transform()]) -> e3d_transform(). mul([#e3d_transf{}=A,#e3d_transf{}=B | T ]) -> mul([mul(B,A) | T]); mul([#e3d_transf{}=A]) -> A. %%%------------------------------------------------------------------- %%-------------------------------------------------------------------- %% @doc Generates a world to camera transformation %% @end %%-------------------------------------------------------------------- -spec lookat(e3d_point(), e3d_vector(), e3d_vector()) -> e3d_transform(). lookat(Pos, Look, Up) -> Dir = e3d_vec:norm_sub(Look, Pos), Right = e3d_vec:norm(e3d_vec:cross(Dir, e3d_vec:norm(Up))), NewUp = e3d_vec:norm(e3d_vec:cross(Right, Dir)), AsList = [tuple_to_list(Right), 0.0, tuple_to_list(NewUp), 0.0, tuple_to_list(e3d_vec:neg(Dir)), 0.0, 0.0, 0.0, 0.0, 1.0], CamToWorld = list_to_tuple(lists:flatten(AsList)), WorldToCam = e3d_mat:invert(CamToWorld), translate(#e3d_transf{mat=WorldToCam,inv=CamToWorld}, e3d_vec:neg(Pos)). %%-------------------------------------------------------------------- %% @doc Generates a ortho transformation %% @end %%-------------------------------------------------------------------- -spec ortho(float(), float()) -> e3d_transform(). ortho(Near, Far) -> ortho(-1.0, 1.0, -1.0, 1.0, Far, Near). ortho(Left, Right, Bottom, Top, Near, Far) -> O = 0.0, IDx = 1/(Right-Left), IDy = 1/(Top-Bottom), IDz = 1/(Far-Near), Mat0 = {2.0, O, O, O, 2.0, O, O, O, -2.0, O, O, O}, %% Do this in 3 steps to avoid inverse calculation problems Mat1 = scale(init(Mat0), {IDx,IDy,IDz}), Trans = translate(identity(), {-(Right+Left)*IDx, -(Top+Bottom)*IDy, -(Far+Near)*IDz}), mul(Trans, Mat1). %%-------------------------------------------------------------------- %% @doc Generates a perspective transformation %% Fov = Field Of View (in degrees) %% Projects from camera space: Z = {-near, -far} %% to screen space: Z' = {0.0, 1.0} %% @end %%-------------------------------------------------------------------- -spec perspective(Fov::float(), Near::float(), Far::float()) -> e3d_transform(). perspective(Fov, Near, Far) -> perspective(Fov, 1.0, Near, Far). -spec perspective(Fov::float(), Aspect::float(), Near::float(), Far::float()) -> e3d_transform(). perspective(Fov, Aspect, Near, Far) -> T = 1.0 / math:tan((Fov*math:pi()/180)/2.0), %% Perform projective divide D = 1.0 / (Far-Near), %% Inverted Denom I = 1.0, O = 0.0, Persp = {I/Aspect, O, O, O, O, I, O, O, O, O, -(Near+Far)*D, -I, O, O, -2.0*Far*Near*D, O}, InvPersp = e3d_mat:invert(Persp), scale(#e3d_transf{mat=Persp, inv=InvPersp}, {T,T,1.0}). %%-------------------------------------------------------------------- %% @doc Generates a pick matrix transformation %% Equiv to glu:pickMatrix/5 %% @end %%-------------------------------------------------------------------- -spec pick(X::float(), Y::float(), Width::float(), Height::float(), Viewport::{integer(),integer(),integer(),integer()} ) -> e3d_transform(). pick(X, Y, W, H, {X0,Y0,X1,Y1}) -> Sx = X1 / W, Sy = Y1 / H, Tx = (X1+2.0*(X0-X)) / W, Ty = (Y1+2.0*(Y0-Y)) / H, I = 1.0, O = 0.0, Pick = {Sx, O, O, O, O, Sy, O, O, O, O, I, O, Tx,Ty, O, I}, init(Pick).
e3d/e3d_transform.erl
0.54359
0.547283
e3d_transform.erl
starcoder
%%% %%% Copyright (c) 2016 The Talla Authors. All rights reserved. %%% Use of this source code is governed by a BSD-style %%% license that can be found in the LICENSE file. %%% %%% ----------------------------------------------------------- %%% @author <NAME> <<EMAIL>> %%% @doc Binary Utility API %%% %%% This module contains various utility functions that are %%% found useful when working with binaries. %%% %%% @end %%% ----------------------------------------------------------- -module(onion_binary). %% API. -export([trim/2, bit/2, bits/1, fingerprint/1, fingerprint/2 ]). -include("onion_test.hrl"). -spec trim(Subject, Pattern) -> Result when Subject :: binary(), Pattern :: binary() | [binary()], Result :: binary(). trim(Subject, Pattern) when is_binary(Subject) -> iolist_to_binary(binary:split(Subject, Pattern, [global, trim_all])). %% @doc Get the bit value at a given position of a binary. -spec bit(Subject, Position) -> 0 | 1 when Subject :: binary(), Position :: non_neg_integer(). bit(Subject, Position) -> Byte = binary:at(Subject, Position div 8), (Byte bsr (onion_math:mod(Position, 8))) band 1. %% @doc Show the binary representation of a given binary. -spec bits(Subject) -> [Bit] when Subject :: binary(), Bit :: 0 | 1. bits(Subject) -> [Bit || <<Bit:1/integer>> <= Subject]. %% @doc Get the fingerprint of a given binary. -spec fingerprint(Data) -> binary() when Data :: binary(). fingerprint(Data) -> Elements = [[integer_to_list(A, 16), integer_to_list(B, 16), integer_to_list(C, 16), integer_to_list(D, 16) ] || <<A:4/integer, B:4/integer, C:4/integer, D:4/integer>> <= Data], iolist_to_binary(onion_lists:intersperse(<<" ">>, Elements)). %% @doc Apply the hash algorithm to the input and get the fingerprint. -spec fingerprint(Hash, Data) -> binary() when Hash :: crypto:hash_algorithm(), Data :: binary(). fingerprint(Hash, Data) when is_atom(Hash) -> fingerprint(crypto:hash(Hash, Data)). -ifdef(TEST). trim_basic_test() -> [ ?assertEqual(trim(<<>>, <<>>), <<>>), ?assertEqual(trim(<<"foo bar baz">>, <<" ">>), <<"foobarbaz">>), ?assertEqual(trim(<<"foo bar baz">>, [<<" ">>]), <<"foobarbaz">>), ?assertEqual(trim(<<"foo bar\nbaz">>, [<<" ">>, <<"\n">>]), <<"foobarbaz">>), ?assertEqual(trim(<<" foo bar\n \n \nbaz ">>, [<<" ">>, <<"\n">>]), <<"foobarbaz">>) ]. bit_test() -> [ ?assertEqual(bit(<<255>>, 0), 1), ?assertEqual(bit(<<255>>, 1), 1), ?assertEqual(bit(<<255>>, 2), 1), ?assertEqual(bit(<<255>>, 7), 1), ?assertEqual(bit(<<0>>, 0), 0), ?assertEqual(bit(<<0>>, 1), 0), ?assertEqual(bit(<<0>>, 2), 0), ?assertEqual(bit(<<0>>, 7), 0) ]. bits_test() -> [ ?assertEqual(bits(<<0>>), [0, 0, 0, 0, 0, 0, 0, 0]), ?assertEqual(bits(<<0, 0>>), [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), ?assertEqual(bits(<<255, 3>>), [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1]) ]. fingerprint_basic_test() -> [ ?assertEqual(fingerprint(<<>>), <<>>), ?assertEqual(fingerprint(<<"foobar">>), <<"666F 6F62 6172">>), ?assertEqual(fingerprint(<<255, 255>>), <<"FFFF">>), ?assertEqual(fingerprint(<<0, 0>>), <<"0000">>), ?assertEqual(fingerprint(<<0, 255, 0, 255>>), <<"00FF 00FF">>) ]. fingerprint_hash_basic_test() -> [ ?assertEqual(fingerprint(sha, <<>>), <<"DA39 A3EE 5E6B 4B0D 3255 BFEF 9560 1890 AFD8 0709">>), ?assertEqual(fingerprint(sha, <<"foobar">>), <<"8843 D7F9 2416 211D E9EB B963 FF4C E281 2593 2878">>), ?assertEqual(fingerprint(sha256, <<>>), <<"E3B0 C442 98FC 1C14 9AFB F4C8 996F B924 27AE 41E4 649B 934C A495 991B 7852 B855">>), ?assertEqual(fingerprint(sha256, <<"foobar">>), <<"C3AB 8FF1 3720 E8AD 9047 DD39 466B 3C89 74E5 92C2 FA38 3D4A 3960 714C AEF0 C4F2">>) ]. -endif.
src/onion_binary.erl
0.613584
0.471223
onion_binary.erl
starcoder
?_assert(from_roman("I") == 1), ?_assert(from_roman("II") == 2), ?_assert(from_roman("III") == 3), ?_assert(from_roman("IV") == 4), ?_assert(from_roman("V") == 5), ?_assert(from_roman("VI") == 6), ?_assert(from_roman("VII") == 7), ?_assert(from_roman("VIII") == 8), ?_assert(from_roman("IX") == 9), ?_assert(from_roman("X") == 10), ?_assert(from_roman("XI") == 11), ?_assert(from_roman("XII") == 12), ?_assert(from_roman("XIII") == 13), ?_assert(from_roman("XIV") == 14), ?_assert(from_roman("XV") == 15), ?_assert(from_roman("XVI") == 16), ?_assert(from_roman("XVII") == 17), ?_assert(from_roman("XVIII") == 18), ?_assert(from_roman("XIX") == 19), ?_assert(from_roman("XX") == 20), ?_assert(from_roman("XXI") == 21), ?_assert(from_roman("XXII") == 22), ?_assert(from_roman("XXIII") == 23), ?_assert(from_roman("XXIV") == 24), ?_assert(from_roman("XXV") == 25), ?_assert(from_roman("XXVI") == 26), ?_assert(from_roman("XXVII") == 27), ?_assert(from_roman("XXVIII") == 28), ?_assert(from_roman("XXIX") == 29), ?_assert(from_roman("XXX") == 30), ?_assert(from_roman("XXXI") == 31), ?_assert(from_roman("XXXII") == 32), ?_assert(from_roman("XXXIII") == 33), ?_assert(from_roman("XXXIV") == 34), ?_assert(from_roman("XXXV") == 35), ?_assert(from_roman("XXXVI") == 36), ?_assert(from_roman("XXXVII") == 37), ?_assert(from_roman("XXXVIII") == 38), ?_assert(from_roman("XXXIX") == 39), ?_assert(from_roman("XL") == 40), ?_assert(from_roman("XLI") == 41), ?_assert(from_roman("XLII") == 42), ?_assert(from_roman("XLIII") == 43), ?_assert(from_roman("XLIV") == 44), ?_assert(from_roman("XLV") == 45), ?_assert(from_roman("XLVI") == 46), ?_assert(from_roman("XLVII") == 47), ?_assert(from_roman("XLVIII") == 48), ?_assert(from_roman("XLIX") == 49), ?_assert(from_roman("L") == 50), ?_assert(from_roman("LI") == 51), ?_assert(from_roman("LII") == 52), ?_assert(from_roman("LIII") == 53), ?_assert(from_roman("LIV") == 54), ?_assert(from_roman("LV") == 55), ?_assert(from_roman("LVI") == 56), ?_assert(from_roman("LVII") == 57), ?_assert(from_roman("LVIII") == 58), ?_assert(from_roman("LIX") == 59), ?_assert(from_roman("LX") == 60), ?_assert(from_roman("LXI") == 61), ?_assert(from_roman("LXII") == 62), ?_assert(from_roman("LXIII") == 63), ?_assert(from_roman("LXIV") == 64), ?_assert(from_roman("LXV") == 65), ?_assert(from_roman("LXVI") == 66), ?_assert(from_roman("LXVII") == 67), ?_assert(from_roman("LXVIII") == 68), ?_assert(from_roman("LXIX") == 69), ?_assert(from_roman("LXX") == 70), ?_assert(from_roman("LXXI") == 71), ?_assert(from_roman("LXXII") == 72), ?_assert(from_roman("LXXIII") == 73), ?_assert(from_roman("LXXIV") == 74), ?_assert(from_roman("LXXV") == 75), ?_assert(from_roman("LXXVI") == 76), ?_assert(from_roman("LXXVII") == 77), ?_assert(from_roman("LXXVIII") == 78), ?_assert(from_roman("LXXIX") == 79), ?_assert(from_roman("LXXX") == 80), ?_assert(from_roman("LXXXI") == 81), ?_assert(from_roman("LXXXII") == 82), ?_assert(from_roman("LXXXIII") == 83), ?_assert(from_roman("LXXXIV") == 84), ?_assert(from_roman("LXXXV") == 85), ?_assert(from_roman("LXXXVI") == 86), ?_assert(from_roman("LXXXVII") == 87), ?_assert(from_roman("LXXXVIII") == 88), ?_assert(from_roman("LXXXIX") == 89), ?_assert(from_roman("XC") == 90), ?_assert(from_roman("XCI") == 91), ?_assert(from_roman("XCII") == 92), ?_assert(from_roman("XCIII") == 93), ?_assert(from_roman("XCIV") == 94), ?_assert(from_roman("XCV") == 95), ?_assert(from_roman("XCVI") == 96), ?_assert(from_roman("XCVII") == 97), ?_assert(from_roman("XCVIII") == 98), ?_assert(from_roman("XCIX") == 99), ?_assert(from_roman("C") == 100), ?_assert(from_roman("CI") == 101), ?_assert(from_roman("CII") == 102), ?_assert(from_roman("CIII") == 103), ?_assert(from_roman("CIV") == 104), ?_assert(from_roman("CV") == 105), ?_assert(from_roman("CVI") == 106), ?_assert(from_roman("CVII") == 107), ?_assert(from_roman("CVIII") == 108), ?_assert(from_roman("CIX") == 109), ?_assert(from_roman("CX") == 110), ?_assert(from_roman("CXI") == 111), ?_assert(from_roman("CXII") == 112), ?_assert(from_roman("CXIII") == 113), ?_assert(from_roman("CXIV") == 114), ?_assert(from_roman("CXV") == 115), ?_assert(from_roman("CXVI") == 116), ?_assert(from_roman("CXVII") == 117), ?_assert(from_roman("CXVIII") == 118), ?_assert(from_roman("CXIX") == 119), ?_assert(from_roman("CXX") == 120), ?_assert(from_roman("CXXI") == 121), ?_assert(from_roman("CXXII") == 122), ?_assert(from_roman("CXXIII") == 123), ?_assert(from_roman("CXXIV") == 124), ?_assert(from_roman("CXXV") == 125), ?_assert(from_roman("CXXVI") == 126), ?_assert(from_roman("CXXVII") == 127), ?_assert(from_roman("CXXVIII") == 128), ?_assert(from_roman("CXXIX") == 129), ?_assert(from_roman("CXXX") == 130), ?_assert(from_roman("CXXXI") == 131), ?_assert(from_roman("CXXXII") == 132), ?_assert(from_roman("CXXXIII") == 133), ?_assert(from_roman("CXXXIV") == 134), ?_assert(from_roman("CXXXV") == 135), ?_assert(from_roman("CXXXVI") == 136), ?_assert(from_roman("CXXXVII") == 137), ?_assert(from_roman("CXXXVIII") == 138), ?_assert(from_roman("CXXXIX") == 139), ?_assert(from_roman("CXL") == 140), ?_assert(from_roman("CXLI") == 141), ?_assert(from_roman("CXLII") == 142), ?_assert(from_roman("CXLIII") == 143), ?_assert(from_roman("CXLIV") == 144), ?_assert(from_roman("CXLV") == 145), ?_assert(from_roman("CXLVI") == 146), ?_assert(from_roman("CXLVII") == 147), ?_assert(from_roman("CXLVIII") == 148), ?_assert(from_roman("CXLIX") == 149), ?_assert(from_roman("CL") == 150), ?_assert(from_roman("CLI") == 151), ?_assert(from_roman("CLII") == 152), ?_assert(from_roman("CLIII") == 153), ?_assert(from_roman("CLIV") == 154), ?_assert(from_roman("CLV") == 155), ?_assert(from_roman("CLVI") == 156), ?_assert(from_roman("CLVII") == 157), ?_assert(from_roman("CLVIII") == 158), ?_assert(from_roman("CLIX") == 159), ?_assert(from_roman("CLX") == 160), ?_assert(from_roman("CLXI") == 161), ?_assert(from_roman("CLXII") == 162), ?_assert(from_roman("CLXIII") == 163), ?_assert(from_roman("CLXIV") == 164), ?_assert(from_roman("CLXV") == 165), ?_assert(from_roman("CLXVI") == 166), ?_assert(from_roman("CLXVII") == 167), ?_assert(from_roman("CLXVIII") == 168), ?_assert(from_roman("CLXIX") == 169), ?_assert(from_roman("CLXX") == 170), ?_assert(from_roman("CLXXI") == 171), ?_assert(from_roman("CLXXII") == 172), ?_assert(from_roman("CLXXIII") == 173), ?_assert(from_roman("CLXXIV") == 174), ?_assert(from_roman("CLXXV") == 175), ?_assert(from_roman("CLXXVI") == 176), ?_assert(from_roman("CLXXVII") == 177), ?_assert(from_roman("CLXXVIII") == 178), ?_assert(from_roman("CLXXIX") == 179), ?_assert(from_roman("CLXXX") == 180), ?_assert(from_roman("CLXXXI") == 181), ?_assert(from_roman("CLXXXII") == 182), ?_assert(from_roman("CLXXXIII") == 183), ?_assert(from_roman("CLXXXIV") == 184), ?_assert(from_roman("CLXXXV") == 185), ?_assert(from_roman("CLXXXVI") == 186), ?_assert(from_roman("CLXXXVII") == 187), ?_assert(from_roman("CLXXXVIII") == 188), ?_assert(from_roman("CLXXXIX") == 189), ?_assert(from_roman("CXC") == 190), ?_assert(from_roman("CXCI") == 191), ?_assert(from_roman("CXCII") == 192), ?_assert(from_roman("CXCIII") == 193), ?_assert(from_roman("CXCIV") == 194), ?_assert(from_roman("CXCV") == 195), ?_assert(from_roman("CXCVI") == 196), ?_assert(from_roman("CXCVII") == 197), ?_assert(from_roman("CXCVIII") == 198), ?_assert(from_roman("CXCIX") == 199), ?_assert(from_roman("CC") == 200), ?_assert(from_roman("MCM") == 1900), ?_assert(from_roman("MCMI") == 1901), ?_assert(from_roman("MCMII") == 1902), ?_assert(from_roman("MCMIII") == 1903), ?_assert(from_roman("MCMIV") == 1904), ?_assert(from_roman("MCMV") == 1905), ?_assert(from_roman("MCMVI") == 1906), ?_assert(from_roman("MCMVII") == 1907), ?_assert(from_roman("MCMVIII") == 1908), ?_assert(from_roman("MCMIX") == 1909), ?_assert(from_roman("MCMX") == 1910), ?_assert(from_roman("MCMXI") == 1911), ?_assert(from_roman("MCMXII") == 1912), ?_assert(from_roman("MCMXIII") == 1913), ?_assert(from_roman("MCMXIV") == 1914), ?_assert(from_roman("MCMXV") == 1915), ?_assert(from_roman("MCMXVI") == 1916), ?_assert(from_roman("MCMXVII") == 1917), ?_assert(from_roman("MCMXVIII") == 1918), ?_assert(from_roman("MCMXIX") == 1919), ?_assert(from_roman("MCMXX") == 1920), ?_assert(from_roman("MCMXXI") == 1921), ?_assert(from_roman("MCMXXII") == 1922), ?_assert(from_roman("MCMXXIII") == 1923), ?_assert(from_roman("MCMXXIV") == 1924), ?_assert(from_roman("MCMXXV") == 1925), ?_assert(from_roman("MCMXXVI") == 1926), ?_assert(from_roman("MCMXXVII") == 1927), ?_assert(from_roman("MCMXXVIII") == 1928), ?_assert(from_roman("MCMXXIX") == 1929), ?_assert(from_roman("MCMXXX") == 1930), ?_assert(from_roman("MCMXXXI") == 1931), ?_assert(from_roman("MCMXXXII") == 1932), ?_assert(from_roman("MCMXXXIII") == 1933), ?_assert(from_roman("MCMXXXIV") == 1934), ?_assert(from_roman("MCMXXXV") == 1935), ?_assert(from_roman("MCMXXXVI") == 1936), ?_assert(from_roman("MCMXXXVII") == 1937), ?_assert(from_roman("MCMXXXVIII") == 1938), ?_assert(from_roman("MCMXXXIX") == 1939), ?_assert(from_roman("MCMXL") == 1940), ?_assert(from_roman("MCMXLI") == 1941), ?_assert(from_roman("MCMXLII") == 1942), ?_assert(from_roman("MCMXLIII") == 1943), ?_assert(from_roman("MCMXLIV") == 1944), ?_assert(from_roman("MCMXLV") == 1945), ?_assert(from_roman("MCMXLVI") == 1946), ?_assert(from_roman("MCMXLVII") == 1947), ?_assert(from_roman("MCMXLVIII") == 1948), ?_assert(from_roman("MCMXLIX") == 1949), ?_assert(from_roman("MCML") == 1950), ?_assert(from_roman("MCMLI") == 1951), ?_assert(from_roman("MCMLII") == 1952), ?_assert(from_roman("MCMLIII") == 1953), ?_assert(from_roman("MCMLIV") == 1954), ?_assert(from_roman("MCMLV") == 1955), ?_assert(from_roman("MCMLVI") == 1956), ?_assert(from_roman("MCMLVII") == 1957), ?_assert(from_roman("MCMLVIII") == 1958), ?_assert(from_roman("MCMLIX") == 1959), ?_assert(from_roman("MCMLX") == 1960), ?_assert(from_roman("MCMLXI") == 1961), ?_assert(from_roman("MCMLXII") == 1962), ?_assert(from_roman("MCMLXIII") == 1963), ?_assert(from_roman("MCMLXIV") == 1964), ?_assert(from_roman("MCMLXV") == 1965), ?_assert(from_roman("MCMLXVI") == 1966), ?_assert(from_roman("MCMLXVII") == 1967), ?_assert(from_roman("MCMLXVIII") == 1968), ?_assert(from_roman("MCMLXIX") == 1969), ?_assert(from_roman("MCMLXX") == 1970), ?_assert(from_roman("MCMLXXI") == 1971), ?_assert(from_roman("MCMLXXII") == 1972), ?_assert(from_roman("MCMLXXIII") == 1973), ?_assert(from_roman("MCMLXXIV") == 1974), ?_assert(from_roman("MCMLXXV") == 1975), ?_assert(from_roman("MCMLXXVI") == 1976), ?_assert(from_roman("MCMLXXVII") == 1977), ?_assert(from_roman("MCMLXXVIII") == 1978), ?_assert(from_roman("MCMLXXIX") == 1979), ?_assert(from_roman("MCMLXXX") == 1980), ?_assert(from_roman("MCMLXXXI") == 1981), ?_assert(from_roman("MCMLXXXII") == 1982), ?_assert(from_roman("MCMLXXXIII") == 1983), ?_assert(from_roman("MCMLXXXIV") == 1984), ?_assert(from_roman("MCMLXXXV") == 1985), ?_assert(from_roman("MCMLXXXVI") == 1986), ?_assert(from_roman("MCMLXXXVII") == 1987), ?_assert(from_roman("MCMLXXXVIII") == 1988), ?_assert(from_roman("MCMLXXXIX") == 1989), ?_assert(from_roman("MCMXC") == 1990), ?_assert(from_roman("MCMXCI") == 1991), ?_assert(from_roman("MCMXCII") == 1992), ?_assert(from_roman("MCMXCIII") == 1993), ?_assert(from_roman("MCMXCIV") == 1994), ?_assert(from_roman("MCMXCV") == 1995), ?_assert(from_roman("MCMXCVI") == 1996), ?_assert(from_roman("MCMXCVII") == 1997), ?_assert(from_roman("MCMXCVIII") == 1998), ?_assert(from_roman("MCMXCIX") == 1999), ?_assert(from_roman("MM") == 2000), ?_assert(from_roman("MMI") == 2001), ?_assert(from_roman("MMII") == 2002), ?_assert(from_roman("MMIII") == 2003), ?_assert(from_roman("MMIV") == 2004), ?_assert(from_roman("MMV") == 2005), ?_assert(from_roman("MMVI") == 2006), ?_assert(from_roman("MMVII") == 2007), ?_assert(from_roman("MMVIII") == 2008), ?_assert(from_roman("MMIX") == 2009), ?_assert(from_roman("MMX") == 2010), ?_assert(from_roman("MMXI") == 2011), ?_assert(from_roman("MMXII") == 2012), ?_assert(from_roman("MMXIII") == 2013), ?_assert(from_roman("MMXIV") == 2014), ?_assert(from_roman("MMXV") == 2015), ?_assert(from_roman("MMXVI") == 2016), ?_assert(from_roman("MMXVII") == 2017), ?_assert(from_roman("MMXVIII") == 2018), ?_assert(from_roman("MMXIX") == 2019), ?_assert(from_roman("MMXX") == 2020), ?_assert(from_roman("MMXXI") == 2021), ?_assert(from_roman("MMXXII") == 2022), ?_assert(from_roman("MMXXIII") == 2023), ?_assert(from_roman("MMXXIV") == 2024), ?_assert(from_roman("MMXXV") == 2025), ?_assert(from_roman("MMXXVI") == 2026), ?_assert(from_roman("MMXXVII") == 2027), ?_assert(from_roman("MMXXVIII") == 2028), ?_assert(from_roman("MMXXIX") == 2029), ?_assert(from_roman("MMXXX") == 2030), ?_assert(from_roman("MMXXXI") == 2031), ?_assert(from_roman("MMXXXII") == 2032), ?_assert(from_roman("MMXXXIII") == 2033), ?_assert(from_roman("MMXXXIV") == 2034), ?_assert(from_roman("MMXXXV") == 2035), ?_assert(from_roman("MMXXXVI") == 2036), ?_assert(from_roman("MMXXXVII") == 2037), ?_assert(from_roman("MMXXXVIII") == 2038), ?_assert(from_roman("MMXXXIX") == 2039), ?_assert(from_roman("MMXL") == 2040), ?_assert(from_roman("MMXLI") == 2041), ?_assert(from_roman("MMXLII") == 2042), ?_assert(from_roman("MMXLIII") == 2043), ?_assert(from_roman("MMXLIV") == 2044), ?_assert(from_roman("MMXLV") == 2045), ?_assert(from_roman("MMXLVI") == 2046), ?_assert(from_roman("MMXLVII") == 2047), ?_assert(from_roman("MMXLVIII") == 2048), ?_assert(from_roman("MMXLIX") == 2049), ?_assert(from_roman("MML") == 2050),
fromBash.erl
0.721154
0.553385
fromBash.erl
starcoder
%%%---------------------------------------------------------------------------- %%% Copyright <NAME> 2019. All Rights Reserved. %%% %%% Licensed under the Apache License, Version 2.0 (the "License"); %%% you may not use this file except in compliance with the License. %%% You may obtain a copy of the License at %%% %%% http://www.apache.org/licenses/LICENSE-2.0 %%% %%% Unless required by applicable law or agreed to in writing, software %%% distributed under the License is distributed on an "AS IS" BASIS, %%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %%% See the License for the specific language governing permissions and %%% limitations under the License. %%%---------------------------------------------------------------------------- %%% --------------------------------------------------------------------------- %%% @doc Timers are simple periodic or aperiodic timers that will %%% invoke a callback when they fire. While they do not have direct %%% access to grain state or identity, their fun can contain anything, %%% so they can easily send a message to a grain. %%% %%% `cancel_timer/0` is supplied as a helper for simple situations %%% where a grain only has one timer going. In the case that there %%% many timers going at once, `cancel_timer/1` must be used %%% explicitly on each. %%% %%% A note about timers and shutdown. Following Orleans, calls made %%% from timers do not extend the lease time on a grain and a long %%% call made in a timer callback can extend the life of the grain. %%% For that reason, timers can miss ticks and tick too quickly around %%% the lease time of a grain. %%% %%% @end %%% --------------------------------------------------------------------------- -module(erleans_timer). -include("erleans_timer.hrl"). -export([start/3, start/4, cancel/0, cancel/1, recoverable_cancel/0, check/0, recover/0]). -record(timer, { grain :: pid(), grain_ref :: map(), callback :: function(), args :: term(), period :: pos_integer() | never }). -opaque timer() :: #timer{}. -export_type([timer/0]). start(Callback, Args, StartTime) -> start(Callback, Args, StartTime, never). start(Callback, Args, StartTime, Period) -> case get(grain_ref) of undefined -> {error, called_outside_of_grain_context}; GrainRef -> GrainPid = self(), Timer = #timer{grain = GrainPid, grain_ref = GrainRef, callback = Callback, args = Args, period = Period}, Pid = start_timer(StartTime, Timer), case get(?key) of undefined -> put(?key, #{Pid => Timer}); Map -> put(?key, Map#{Pid => Timer}) end, {ok, Pid} end. cancel() -> case get(?key) of undefined -> {error, no_timer_to_cancel}; Map when is_map(Map) -> maps:fold(fun(Pid, _V, Acc) -> Pid ! ?cancel, Acc end, beep, Map), erlang:erase(?key), ok end. cancel(Pid) when is_pid(Pid) -> case get(?key) of #{Pid := _} = Map -> Pid ! ?cancel, put(?key, maps:remove(Pid, Map)), ok; _ -> {error, no_timer_to_cancel} end. %%% in order to be able to recover the list of timers when the grain %%% starts to go down and then is reactivated before all of the %%% in-process timer ticks can complete, we need to have a slightly %%% different form of cancel, which leaves the timer information %%% structure intact so we can restart them if we need to. recoverable_cancel() -> case get(?key) of undefined -> {error, no_timer_to_cancel}; Map when is_map(Map) -> maps:fold(fun(Pid, _V, Acc) -> Pid ! ?cancel, Acc end, beep, Map), erase(?key), put(?recovery, Map) end. check() -> case get(?recovery) of undefined -> finished; Map when is_map(Map) -> AnythingAlive = maps:fold(fun(_Pid, _V, true) -> true; (Pid, _V, _) -> is_process_alive(Pid) end, false, Map), case AnythingAlive of false -> finished; _ -> pending end end. recover() -> case get(?recovery) of undefined -> ok; Map when is_map(Map) -> NewMap = maps:fold(fun(_Pid, Timer, Acc) -> NewPid = start_timer(Timer#timer.period, Timer), Acc#{NewPid => Timer} end, #{}, Map), erase(?recovery), put(?key, NewMap) end. %%%%%%%%%%%%%%%%%%%%%%%%%% %%% internal functions %%% %%%%%%%%%%%%%%%%%%%%%%%%%% loop(FireTime, #timer{grain = Pid, grain_ref = GrainRef, callback = Callback, args = Args, period = Period} = Timer) -> receive ?cancel -> unlink(Pid), ok; ?tick -> try Callback(GrainRef, Args) of _ -> case Period of never -> unlink(Pid), ok; _ -> NextFire = FireTime + Period, erlang:send_after(NextFire, self(), ?tick, [{abs, true}]), loop(NextFire, Timer) end catch Class:Error -> Pid ! {erleans_timer_error, Class, Error}, unlink(Pid) end; Msg -> Pid ! {erleans_timer_unexpected_msg, Msg} end. start_timer(StartTime, Timer) -> spawn_link(fun() -> %% requests to a grain from a timer callback %% do not reset the activation expiry timer put(req_type, leave_timer), Now = erlang:monotonic_time(milli_seconds), FirstFire = Now + StartTime, erlang:send_after(FirstFire, self(), ?tick, [{abs, true}]), loop(FirstFire, Timer) end).
src/erleans_timer.erl
0.530723
0.456652
erleans_timer.erl
starcoder
%%%------------------------------------------------------------------- %%% @author <NAME> %%% @copyright (C) 2021 ACK CYFRONET AGH %%% This software is released under the MIT license %%% cited in 'LICENSE.txt'. %%% @end %%%------------------------------------------------------------------- %%% @doc %%% Internal datastore API used for operating on time series collections. %%% Time series collection consists of several time series. Each time series consists of %%% several metrics. Metric is set of windows that aggregate multiple %%% measurements from particular period of time. E.g., %%% MyTimeSeriesCollection = #{ %%% TimeSeries1 = #{ %%% Metric1 = [Window1, Window2, ...], %%% Metric2 = ... %%% }, %%% TimeSeries2 = ... %%% } %%% Window = {WindowTimestamp, aggregator(PrevAggregatedValue, MeasurementValue)} where %%% PrevAggregatedValue is result of previous aggregator function executions %%% (window can be created using several measurements). %%% See ts_windows:aggregate/3 to see possible aggregation functions. %%% %%% The module delegates operations on single metric to ts_metric module %%% that is able to handle infinite number of windows inside single metric splitting %%% windows set to subsets stored in multiple records that are saved to multiple %%% datastore documents by ts_persistence helper module. %%% %%% All metrics from all time series are kept together. If any metric windows count %%% is so high that it cannot be stored in single datastore document, the metric's %%% windows set is divided into several records by ts_metric module and then %%% persisted as several datastore documents by ts_persistence module. Thus, %%% windows of each metric form linked list of records storing parts of windows set, %%% with head of the list treated exceptionally (heads are kept together for all %%% metrics while rest of records with windows are kept separately for each metric). %%% @end %%%------------------------------------------------------------------- -module(time_series_collection). -author("<NAME>"). -include("modules/datastore/datastore_time_series.hrl"). -include("modules/datastore/ts_metric_config.hrl"). -include_lib("ctool/include/logging.hrl"). %% API -export([create/4, update/5, update/6, update_many/4, list_windows/4, list_windows/5, delete/3]). -type time_series_id() :: binary(). -type collection_id() :: binary(). -type collection_config() :: #{time_series_id() => #{ts_metric:id() => ts_metric:config()}}. % Metrics are stored in hierarchical map and time_series_id is used to get map % #{ts_metric:id() => ts_metric:metric()} for particular time series. % Other way of presentation is flattened map that uses keys {time_series_id(), ts_metric:id()}. It is used % mainly to return requested data (get can return only chosen metrics so it does not return hierarchical map). -type full_metric_id() :: {time_series_id(), ts_metric:id()}. -type windows_map() :: #{full_metric_id() => [ts_windows:window()]}. -type time_series_range() :: time_series_id() | [time_series_id()]. -type metrics_range() :: ts_metric:id() | [ts_metric:id()]. -type range() :: time_series_id() | {time_series_range(), metrics_range()}. -type request_range() :: range() | [range()]. -type update_range() :: {request_range(), ts_windows:value()} | [{request_range(), ts_windows:value()}]. -export_type([collection_id/0, collection_config/0, time_series_id/0, full_metric_id/0, request_range/0, update_range/0, windows_map/0]). -type ctx() :: datastore:ctx(). -type batch() :: datastore_doc:batch(). -define(CATCH_UNEXPECTED_ERRORS(Expr, ErrorLog, ErrorLogArgs, ErrorReturnValue), try Expr catch Error:Reason:Stacktrace when Reason =/= {fetch_error, not_found} -> ?error_stacktrace(ErrorLog ++ "~nerror type: ~p, error reason: ~p", ErrorLogArgs ++ [Error, Reason], Stacktrace), ErrorReturnValue end ). %%%=================================================================== %%% API %%%=================================================================== -spec create(ctx(), collection_id(), collection_config(), batch()) -> {ok | {error, term()}, batch()}. create(Ctx, Id, ConfigMap, Batch) -> try DocSplittingStrategies = ts_doc_splitting_strategies:calculate(ConfigMap), TimeSeriesCollectionHeads = maps:map(fun(TimeSeriesId, MetricsConfigs) -> maps:map(fun(MetricsId, Config) -> #metric{ config = Config, splitting_strategy = maps:get({TimeSeriesId, MetricsId}, DocSplittingStrategies) } end, MetricsConfigs) end, ConfigMap), PersistenceCtx = ts_persistence:init_for_new_collection(Ctx, Id, TimeSeriesCollectionHeads, Batch), {ok, ts_persistence:finalize(PersistenceCtx)} catch _:{error, too_many_metrics} -> {{error, too_many_metrics}, Batch}; _:{error, empty_metric} -> {{error, empty_metric}, Batch}; _:{error, wrong_resolution} -> {{error, wrong_resolution}, Batch}; Error:Reason:Stacktrace -> ?error_stacktrace("Time series collection ~p init error: ~p:~p~nConfig map: ~p", [Id, Error, Reason, ConfigMap], Stacktrace), {{error, create_failed}, Batch} end. %%-------------------------------------------------------------------- %% @doc %% Puts metrics value for particular timestamp. It updates all metrics from all time series or only chosen subset %% of metrics depending on value of 4th function argument. In second case different measurement values can be specified %% (see update_range() type). %% @end %%-------------------------------------------------------------------- -spec update(ctx(), collection_id(), ts_windows:timestamp(), ts_windows:value() | update_range(), batch()) -> {ok | {error, term()}, batch()}. update(Ctx, Id, NewTimestamp, NewValue, Batch) when is_number(NewValue) -> try {TimeSeriesCollectionHeads, PersistenceCtx} = ts_persistence:init_for_existing_collection(Ctx, Id, Batch), FinalPersistenceCtx = update_time_series( maps:to_list(TimeSeriesCollectionHeads), NewTimestamp, NewValue, PersistenceCtx), {ok, ts_persistence:finalize(FinalPersistenceCtx)} catch Error:Reason:Stacktrace -> ?error_stacktrace("Time series collection ~p update error: ~p:~p~nFailed to update measurement {~p, ~p}", [Id, Error, Reason, NewTimestamp, NewValue], Stacktrace), {{error, update_failed}, Batch} end; update(Ctx, Id, NewTimestamp, MetricsToUpdateWithValues, Batch) when is_list(MetricsToUpdateWithValues) -> try {TimeSeriesCollectionHeads, PersistenceCtx} = ts_persistence:init_for_existing_collection(Ctx, Id, Batch), FinalPersistenceCtx = lists:foldl(fun({MetricsToUpdate, NewValue}, Acc) -> SelectedHeads = select_heads(TimeSeriesCollectionHeads, MetricsToUpdate), update_time_series(maps:to_list(SelectedHeads), NewTimestamp, NewValue, Acc) end, PersistenceCtx, MetricsToUpdateWithValues), {ok, ts_persistence:finalize(FinalPersistenceCtx)} catch Error:Reason:Stacktrace -> ?error_stacktrace("Time series collection ~p update error: ~p:~p~nFailed to update values ~p for timestamp ~p", [Id, Error, Reason, MetricsToUpdateWithValues, NewTimestamp], Stacktrace), {{error, update_failed}, Batch} end; update(Ctx, Id, NewTimestamp, {MetricsToUpdate, NewValue}, Batch) -> update(Ctx, Id, NewTimestamp, MetricsToUpdate, NewValue, Batch). %%-------------------------------------------------------------------- %% @doc %% Puts metrics value for particular timestamp. Updated value is the same for all metrics provided in 4th argument. %% @end %%-------------------------------------------------------------------- -spec update(ctx(), collection_id(), ts_windows:timestamp(), request_range() , ts_windows:value(), batch()) -> {ok | {error, term()}, batch()}. update(Ctx, Id, NewTimestamp, MetricsToUpdate, NewValue, Batch) -> try {TimeSeriesCollectionHeads, PersistenceCtx} = ts_persistence:init_for_existing_collection(Ctx, Id, Batch), SelectedHeads = select_heads(TimeSeriesCollectionHeads, MetricsToUpdate), FinalPersistenceCtx = update_time_series(maps:to_list(SelectedHeads), NewTimestamp, NewValue, PersistenceCtx), {ok, ts_persistence:finalize(FinalPersistenceCtx)} catch Error:Reason:Stacktrace -> ?error_stacktrace("Time series collection ~p update error: ~p:~p~nFailed to update measurement {~p, ~p} for metrics ~p", [Id, Error, Reason, NewTimestamp, NewValue, MetricsToUpdate], Stacktrace), {{error, update_failed}, Batch} end. %%-------------------------------------------------------------------- %% @doc %% Puts multiple measurements to all metrics from all time series. %% Usage of this function allows reduction of datastore overhead. %% @end %%-------------------------------------------------------------------- -spec update_many(ctx(), collection_id(), [{ts_windows:timestamp(), ts_windows:value()}], batch()) -> {ok | {error, term()}, batch()}. update_many(_Ctx, _Id, [], Batch) -> {ok, Batch}; update_many(Ctx, Id, [{NewTimestamp, NewValue} | Measurements], Batch) -> case update(Ctx, Id, NewTimestamp, NewValue, Batch) of {ok, UpdatedBatch} -> update_many(Ctx, Id, Measurements, UpdatedBatch); Other -> Other end. %%-------------------------------------------------------------------- %% @doc %% Returns windows according options provided in 4th argument. %% Windows for all metrics from all time series are included in answer. %% @end %%-------------------------------------------------------------------- -spec list_windows(ctx(), collection_id(), ts_windows:list_options(), batch() | undefined) -> {{ok, windows_map()} | {error, term()}, batch() | undefined}. list_windows(Ctx, Id, Options, Batch) -> ?CATCH_UNEXPECTED_ERRORS( begin {TimeSeriesCollectionHeads, PersistenceCtx} = ts_persistence:init_for_existing_collection(Ctx, Id, Batch), FillMetricsIds = maps:fold(fun(TimeSeriesId, MetricsConfigs, Acc) -> maps:fold(fun(MetricsId, _Config, InternalAcc) -> [{TimeSeriesId, MetricsId} | InternalAcc] end, Acc, MetricsConfigs) end, [], TimeSeriesCollectionHeads), {Ans, FinalPersistenceCtx} = list_time_series_windows( TimeSeriesCollectionHeads, FillMetricsIds, Options, PersistenceCtx), {{ok, Ans}, ts_persistence:finalize(FinalPersistenceCtx)} end, "Time series collection ~p list error~nOptions: ~p", [Id, Options], {{error, list_failed}, Batch} ). %%-------------------------------------------------------------------- %% @doc %% Returns windows according options provided in 4th argument. %% If windows from single metric are requested, the function returns list of windows. %% Otherwise, map containing list of windows for each requested metric is returned. %% @end %%-------------------------------------------------------------------- -spec list_windows(ctx(), collection_id(), request_range(), ts_windows:list_options(), batch() | undefined) -> {{ok, [ts_windows:window()] | windows_map()} | {error, term()}, batch() | undefined}. list_windows(Ctx, Id, RequestedMetrics, Options, Batch) -> ?CATCH_UNEXPECTED_ERRORS( begin {TimeSeriesCollectionHeads, PersistenceCtx} = ts_persistence:init_for_existing_collection(Ctx, Id, Batch), {Ans, FinalPersistenceCtx} = list_time_series_windows( TimeSeriesCollectionHeads, RequestedMetrics, Options, PersistenceCtx), {{ok, Ans}, ts_persistence:finalize(FinalPersistenceCtx)} end, "Time series collection ~p list error~nRequested metrics: ~p~nOptions: ~p", [Id, RequestedMetrics, Options], {{error, list_failed}, Batch} ). -spec delete(ctx(), collection_id(), batch() ) -> {ok | {error, term()}, batch()}. delete(Ctx, Id, Batch) -> ?CATCH_UNEXPECTED_ERRORS( begin {TimeSeriesCollectionHeads, PersistenceCtx} = ts_persistence:init_for_existing_collection(Ctx, Id, Batch), FinalPersistenceCtx = delete_time_series(maps:to_list(TimeSeriesCollectionHeads), PersistenceCtx), {ok, ts_persistence:finalize(FinalPersistenceCtx)} end, "Time series collection ~p delete error", [Id], {{error, delete_failed}, Batch} ). %%===================================================================== %% Internal functions %%===================================================================== -spec select_heads(ts_hub:time_series_collection_heads(), request_range()) -> ts_hub:time_series_collection_heads(). select_heads(_TimeSeriesCollectionHeads, []) -> #{}; select_heads(TimeSeriesCollectionHeads, [{TimeSeriesId, MetricIds} | Tail]) -> Ans = select_heads(TimeSeriesCollectionHeads, Tail), case maps:get(TimeSeriesId, TimeSeriesCollectionHeads, undefined) of undefined -> Ans; TimeSeries -> FilteredTimeSeries = lists:foldl(fun(MetricId, Acc) -> case maps:get(MetricId, TimeSeries, undefined) of undefined -> Acc; Metric -> maps:put(MetricId, Metric, Acc) end end, #{}, utils:ensure_list(MetricIds)), case maps:size(FilteredTimeSeries) of 0 -> Ans; _ -> maps:put(TimeSeriesId, FilteredTimeSeries, Ans) end end; select_heads(TimeSeriesCollectionHeads, [TimeSeriesId | Tail]) -> Ans = select_heads(TimeSeriesCollectionHeads, Tail), case maps:get(TimeSeriesId, TimeSeriesCollectionHeads, undefined) of undefined -> Ans; TimeSeries -> maps:put(TimeSeriesId, TimeSeries, Ans) end; select_heads(TimeSeriesCollectionHeads, ToBeIncluded) -> select_heads(TimeSeriesCollectionHeads, [ToBeIncluded]). -spec update_time_series([{time_series_id(), ts_hub:time_series_heads()}], ts_windows:timestamp(), ts_windows:value(), ts_persistence:ctx()) -> ts_persistence:ctx(). update_time_series([], _NewTimestamp, _NewValue, PersistenceCtx) -> PersistenceCtx; update_time_series([{TimeSeriesId, TimeSeries} | Tail], NewTimestamp, NewValue, PersistenceCtx) -> PersistenceCtxWithIdSet = ts_persistence:set_currently_processed_time_series(TimeSeriesId, PersistenceCtx), UpdatedPersistenceCtx = update_metrics(maps:to_list(TimeSeries), NewTimestamp, NewValue, PersistenceCtxWithIdSet), update_time_series(Tail, NewTimestamp, NewValue, UpdatedPersistenceCtx). -spec update_metrics([{ts_metric:id(), ts_metric:metric()}], ts_windows:timestamp(), ts_windows:value(), ts_persistence:ctx()) -> ts_persistence:ctx(). update_metrics([], _NewTimestamp, _NewValue, PersistenceCtx) -> PersistenceCtx; update_metrics([{MetricId, Metric} | Tail], NewTimestamp, NewValue, PersistenceCtx) -> UpdatedPersistenceCtx = ts_persistence:set_currently_processed_metric(MetricId, PersistenceCtx), FinalPersistenceCtx = ts_metric:update(Metric, NewTimestamp, NewValue, UpdatedPersistenceCtx), update_metrics(Tail, NewTimestamp, NewValue, FinalPersistenceCtx). -spec list_time_series_windows(ts_hub:time_series_collection_heads(), request_range(), ts_windows:list_options(), ts_persistence:ctx()) -> {[ts_windows:window()] | windows_map(), ts_persistence:ctx()}. list_time_series_windows(_TimeSeriesCollectionHeads, [], _Options, PersistenceCtx) -> {#{}, PersistenceCtx}; list_time_series_windows(TimeSeriesCollectionHeads, [{TimeSeriesIds, MetricIds} | RequestedMetrics], Options, PersistenceCtx) -> {Ans, UpdatedPersistenceCtx} = lists:foldl(fun(TimeSeriesId, Acc) -> MetricsMap = maps:get(TimeSeriesId, TimeSeriesCollectionHeads, #{}), lists:foldl(fun(MetricId, {TmpAns, TmpPersistenceCtx}) -> {Values, UpdatedTmpPersistenceCtx} = case maps:get(MetricId, MetricsMap, undefined) of undefined -> {undefined, TmpPersistenceCtx}; Metric -> ts_metric:list_windows(Metric, Options, TmpPersistenceCtx) end, {TmpAns#{{TimeSeriesId, MetricId} => Values}, UpdatedTmpPersistenceCtx} end, Acc, utils:ensure_list(MetricIds)) end, {#{}, PersistenceCtx}, utils:ensure_list(TimeSeriesIds)), {Ans2, FinalPersistenceCtx} = list_time_series_windows( TimeSeriesCollectionHeads, RequestedMetrics, Options, UpdatedPersistenceCtx), {maps:merge(Ans, Ans2), FinalPersistenceCtx}; list_time_series_windows(TimeSeriesCollectionHeads, [TimeSeriesId | RequestedMetrics], Options, PersistenceCtx) -> {Ans, UpdatedPersistenceCtx} = case maps:get(TimeSeriesId, TimeSeriesCollectionHeads, undefined) of undefined -> {#{TimeSeriesId => undefined}, PersistenceCtx}; MetricsMap -> lists:foldl(fun({MetricId, Metric}, {TmpAns, TmpPersistenceCtx}) -> {Values, UpdatedTmpPersistenceCtx} = ts_metric:list_windows(Metric, Options, TmpPersistenceCtx), {TmpAns#{{TimeSeriesId, MetricId} => Values}, UpdatedTmpPersistenceCtx} end, {#{}, PersistenceCtx}, maps:to_list(MetricsMap)) end, {Ans2, FinalPersistenceCtx} = list_time_series_windows(TimeSeriesCollectionHeads, RequestedMetrics, Options, UpdatedPersistenceCtx), {maps:merge(Ans, Ans2), FinalPersistenceCtx}; list_time_series_windows(TimeSeriesCollectionHeads, Request, Options, PersistenceCtx) -> {Ans, FinalPersistenceCtx} = list_time_series_windows(TimeSeriesCollectionHeads, [Request], Options, PersistenceCtx), case maps:is_key(Request, Ans) of true -> % Single key is requested - return value for the key instead of map {maps:get(Request, Ans), FinalPersistenceCtx}; false -> {Ans, FinalPersistenceCtx} end. -spec delete_time_series([{time_series_id(), ts_hub:time_series_heads()}], ts_persistence:ctx()) -> ts_persistence:ctx(). delete_time_series([], PersistenceCtx) -> ts_persistence:delete_hub(PersistenceCtx); delete_time_series([{_TimeSeriesId, TimeSeries} | Tail], PersistenceCtx) -> UpdatedPersistenceCtx = delete_metric(maps:to_list(TimeSeries), PersistenceCtx), delete_time_series(Tail, UpdatedPersistenceCtx). -spec delete_metric([{ts_metric:id(), ts_metric:metric()}], ts_persistence:ctx()) -> ts_persistence:ctx(). delete_metric([], PersistenceCtx) -> PersistenceCtx; delete_metric([{_MetricId, Metric} | Tail], PersistenceCtx) -> UpdatedPersistenceCtx = ts_metric:delete(Metric, PersistenceCtx), delete_metric(Tail, UpdatedPersistenceCtx).
src/modules/datastore/time_series/time_series_collection.erl
0.58059
0.470919
time_series_collection.erl
starcoder
%% ============================================================================= %% bondy_dealer.erl - %% %% Copyright (c) 2016-2021 Leapsight. All rights reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% ============================================================================= %% ============================================================================= %% @doc %% This module implements the capabilities of a Dealer. It is used by %% {@link bondy_router}. %% %% A Dealer is one of the two roles a Router plays. In particular a Dealer is %% the middleman between an Caller and a Callee in an RPC interaction, %% i.e. it works as a generic router for remote procedure calls %% decoupling Callers and Callees. %% %% Callees register the procedures they provide with Dealers. Callers %% initiate procedure calls first to Dealers. Dealers route calls %% incoming from Callers to Callees implementing the procedure called, %% and route call results back from Callees to Callers. %% %% A Caller issues calls to remote procedures by providing the procedure %% URI and any arguments for the call. The Callee will execute the %% procedure using the supplied arguments to the call and return the %% result of the call to the Caller. %% %% The Caller and Callee will usually implement all business logic, while the %% Dealer works as a generic router for remote procedure calls %% decoupling Callers and Callees. %% %% Bondy does not provide message transformations to ensure stability and %% safety. %% As such, any required transformations should be handled by Callers and %% Callees directly (notice that a Callee can act as a middleman implementing %% the required transformations). %% %% The message flow between _Callees_ and a _Dealer_ for registering and %% unregistering endpoints to be called over RPC involves the following %% messages: %% %% 1. "REGISTER" %% 2. "REGISTERED" %% 3. "UNREGISTER" %% 4. "UNREGISTERED" %% 5. "ERROR" %% %% ``` %% ,------. ,------. ,------. %% |Caller| |Dealer| |Callee| %% `--+---' `--+---' `--+---' %% | | | %% | | | %% | | REGISTER | %% | | &lt;--------------------- %% | | | %% | | REGISTERED or ERROR | %% | | ---------------------&gt; %% | | | %% | | | %% | | | %% | | | %% | | | %% | | UNREGISTER | %% | | &lt;--------------------- %% | | | %% | | UNREGISTERED or ERROR| %% | | ---------------------&gt; %% ,--+---. ,--+---. ,--+---. %% |Caller| |Dealer| |Callee| %% `------' `------' `------' %% %% ''' %% %% # Calling and Invocations %% %% The message flow between _Callers_, a _Dealer_ and _Callees_ for %% calling procedures and invoking endpoints involves the following %% messages: %% %% 1. "CALL" %% %% 2. "RESULT" %% %% 3. "INVOCATION" %% %% 4. "YIELD" %% %% 5. "ERROR" %% %% ``` %% ,------. ,------. ,------. %% |Caller| |Dealer| |Callee| %% `--+---' `--+---' `--+---' %% | CALL | | %% | ----------------&gt; | %% | | | %% | | INVOCATION | %% | | ----------------&gt; %% | | | %% | | YIELD or ERROR | %% | | %lt;---------------- %% | | | %% | RESULT or ERROR | | %% | %lt;---------------- | %% ,--+---. ,--+---. ,--+---. %% |Caller| |Dealer| |Callee| %% `------' `------' `------' %% %% ''' %% %% The execution of remote procedure calls is asynchronous, and there %% may be more than one call outstanding. A call is called outstanding %% (from the point of view of the _Caller_), when a (final) result or %% error has not yet been received by the _Caller_. %% %% # Remote Procedure Call Ordering %% %% Regarding *Remote Procedure Calls*, the ordering guarantees are as %% follows: %% %% If _Callee A_ has registered endpoints for both *Procedure 1* and %% *Procedure 2*, and _Caller B_ first issues a *Call 1* to *Procedure %% 1* and then a *Call 2* to *Procedure 2*, and both calls are routed to %% _Callee A_, then _Callee A_ will first receive an invocation %% corresponding to *Call 1* and then *Call 2*. This also holds if %% *Procedure 1* and *Procedure 2* are identical. %% %% In other words, WAMP guarantees ordering of invocations between any %% given _pair_ of _Caller_ and _Callee_. The current implementation %% relies on Distributed Erlang which guarantees message ordering betweeen %% processes in different nodes. %% %% There are no guarantees on the order of call results and errors in %% relation to _different_ calls, since the execution of calls upon %% different invocations of endpoints in _Callees_ are running %% independently. A first call might require an expensive, long-running %% computation, whereas a second, subsequent call might finish %% immediately. %% %% Further, if _Callee A_ registers for *Procedure 1*, the "REGISTERED" %% message will be sent by _Dealer_ to _Callee A_ before any %% "INVOCATION" message for *Procedure 1*. %% %% There is no guarantee regarding the order of return for multiple %% subsequent register requests. A register request might require the %% _Dealer_ to do a time-consuming lookup in some database, whereas %% another register request second might be permissible immediately. %% @end %% ============================================================================= -module(bondy_dealer). -include_lib("wamp/include/wamp.hrl"). -include_lib("kernel/include/logger.hrl"). -include("bondy.hrl"). -define(DEFAULT_LIMIT, 1000). -define(RESERVED_NS(NS), <<"Use of reserved namespace '", NS/binary, "'.">> ). %% API -export([close_context/1]). -export([features/0]). -export([handle_message/2]). -export([handle_peer_message/4]). -export([is_feature_enabled/1]). -export([registrations/1]). -export([registrations/3]). -export([registrations/4]). -export([match_registrations/2]). -export([callees/1]). -export([callees/2]). -export([callees/3]). -compile({no_auto_import, [register/2]}). %% ============================================================================= %% API %% ============================================================================= %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec callees(RealmUri :: uri()) -> [map()] | no_return(). callees(RealmUri) -> case bondy_registry:entries(registration, RealmUri, '_', '_') of [] -> []; Entries -> All = [ { bondy_registry_entry:node(E), bondy_registry_entry:session_id(E) } || E <- Entries ], [ #{node => N, session_id => S} || {N, S} <- sets:to_list(sets:from_list(All)) ] end. %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec callees(RealmUri :: uri(), ProcedureUri :: uri()) -> [map()] | no_return(). callees(RealmUri, ProcedureUri) -> callees(RealmUri, ProcedureUri, #{}). %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec callees(RealmUri :: uri(), ProcedureUri :: uri(), Opts :: map()) -> [map()] | no_return(). callees(RealmUri, ProcedureUri, Opts) -> case bondy_registry:match(registration, ProcedureUri, RealmUri, Opts) of {[], '$end_of_table'} -> []; {Entries, '$end_of_table'} -> All = [ { bondy_registry_entry:node(E), bondy_registry_entry:session_id(E) } || E <- Entries ], [ #{node => N, session_id => S} || {N, S} <- sets:to_list(sets:from_list(All)) ] end. %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec close_context(bondy_context:t()) -> bondy_context:t(). close_context(Ctxt) -> try %% Cleanup registrations ok = unregister_all(Ctxt), %% Cleanup invocations queue ok = bondy_rpc_promise:flush(bondy_context:peer_id(Ctxt)), Ctxt catch Class:Reason:Stacktrace -> ?LOG_ERROR(#{ description => "Error while closing context", class => Class, reason => Reason, trace => Stacktrace }), Ctxt end. %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec features() -> map(). features() -> ?DEALER_FEATURES. %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec is_feature_enabled(binary()) -> boolean(). is_feature_enabled(F) when is_binary(F) -> maps:get(F, ?DEALER_FEATURES, false). %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec handle_message(M :: wamp_message(), Ctxt :: map()) -> ok | no_return(). handle_message(M, Ctxt) -> try do_handle_message(M, Ctxt) catch _:{not_authorized, Reason} -> Reply = wamp_message:error_from( M, #{}, ?WAMP_NOT_AUTHORIZED, [Reason], #{message => Reason} ), bondy:send(bondy_context:peer_id(Ctxt), Reply); throw:not_found -> Reply = not_found_error(M, Ctxt), bondy:send(bondy_context:peer_id(Ctxt), Reply) end. %% ----------------------------------------------------------------------------- %% @doc Handles inbound messages received from a peer (node). %% @end %% ----------------------------------------------------------------------------- -spec handle_peer_message( wamp_message(), To :: remote_peer_id(), From :: remote_peer_id(), Opts :: map()) -> ok | no_return(). handle_peer_message(#yield{} = M, _Caller, Callee, _Opts) -> %% A remote callee is returning a yield to a local caller. Fun = fun (empty) -> no_matching_promise(M); ({ok, Promise}) -> LocalCaller = bondy_rpc_promise:caller(Promise), CallId = bondy_rpc_promise:call_id(Promise), Result = yield_to_result(CallId, M), bondy:send(Callee, LocalCaller, Result, #{}) end, InvocationId = M#yield.request_id, _ = bondy_rpc_promise:dequeue_invocation(InvocationId, Callee, Fun), ok; handle_peer_message( #error{request_type = ?INVOCATION} = M, _Caller, Callee, _Opts) -> %% A remote callee is returning an error to a local caller. Fun = fun (empty) -> no_matching_promise(M); ({ok, Promise}) -> LocalCaller = bondy_rpc_promise:caller(Promise), CallId = bondy_rpc_promise:call_id(Promise), CallError = M#error{request_id = CallId, request_type = ?CALL}, bondy:send(Callee, LocalCaller, CallError, #{}) end, InvocationId = M#error.request_id, _ = bondy_rpc_promise:dequeue_invocation(InvocationId, Callee, Fun), ok; handle_peer_message( #error{request_type = ?CANCEL} = M, Caller, Callee, _Opts) -> %% A CANCEL we made to a remote callee has failed. %% We forward the error back to the local caller, keeping the promise to be %% able to match the future yield message, CallId = M#error.request_id, case bondy_rpc_promise:peek_call(CallId, Caller) of empty -> no_matching_promise(M); {ok, Promise} -> LocalCaller = bondy_rpc_promise:caller(Promise), bondy:send(Callee, LocalCaller, M, #{}) end, ok; handle_peer_message(#interrupt{} = M, _Callee, Caller, _Opts) -> %% A remote caller is cancelling a previous call-invocation %% made to our local callee. Fun = fun (empty) -> %% TODO We should reply with an error no_matching_promise(M); ({ok, Promise}) -> LocalCallee = bondy_rpc_promise:callee(Promise), bondy:send(Caller, LocalCallee, M, #{}) end, InvocationId = M#interrupt.request_id, _ = bondy_rpc_promise:dequeue_invocation(InvocationId, Caller, Fun), ok; handle_peer_message(#invocation{} = M, Callee, Caller, Opts) -> %% A remote caller is making a call to a local callee. %% We first need to find the registry entry to get the local callee %% At the moment we might not get the Pid in the Calle tuple, so we fetch it {RealmUri, Node, SessionId, _Pid} = Callee, Key = bondy_registry_entry:key_pattern( registration, RealmUri, Node, SessionId, M#invocation.registration_id), %% We use lookup because the key is ground case bondy_registry:lookup(Key) of {error, not_found} -> bondy:send( Callee, Caller, no_eligible_callee(invocation, M#invocation.registration_id), #{} ); Entry -> LocalCallee = bondy_registry_entry:peer_id(Entry), %% We enqueue the invocation so that we can match it with the %% YIELD or ERROR Promise = bondy_rpc_promise:new( M#invocation.request_id, LocalCallee, Caller), Timeout = bondy_utils:timeout(Opts), ok = bondy_rpc_promise:enqueue(RealmUri, Promise, Timeout), bondy:send(Caller, LocalCallee, M, Opts) end. %% ============================================================================= %% PRIVATE %% ============================================================================= %% ----------------------------------------------------------------------------- %% @doc %% @end %% ----------------------------------------------------------------------------- -spec do_handle_message(M :: wamp_message(), Ctxt :: map()) -> ok | no_return(). do_handle_message(#register{} = M, Ctxt) -> handle_register(M, Ctxt); do_handle_message(#unregister{} = M, Ctxt) -> handle_unregister(M, Ctxt); do_handle_message(#cancel{} = M, Ctxt0) -> %% TODO check if authorized and if not throw wamp.error.not_authorized CallId = M#cancel.request_id, Caller = bondy_context:peer_id(Ctxt0), Opts = M#cancel.options, %% We first use peek to find the Promise based on CallId so we can retreive %% the Procedure URI required for authrization Authorize = fun(Promise, Ctxt) -> Uri = bondy_rpc_promise:procedure_uri(Promise), ok = bondy_rbac:authorize(<<"wamp.cancel">>, Uri, Ctxt), {ok, Ctxt} end, _ = peek_invocations(CallId, Authorize, Ctxt0), %% A response will be send asynchronously by another router process instance %% If the callee does not support call canceling, then behavior is skip. %% We should check calle but that means we need to broadcast sessions. %% Another option is to pay the price and ask bondy to fail on the %% remote node after checking the callee does not support it. %% The caller is not affected, only in the kill case will receive an %% error later in the case of a remote callee. case maps:get(mode, Opts, skip) of kill -> %% INTERRUPT is sent to the callee, but ERROR is not returned %% to the caller until the callee has responded to INTERRUPT with %% ERROR. In this case, the caller may receive RESULT or %% anotehr ERROR if the callee finishes processing the %% INVOCATION first. %% We thus peek (read) instead of dequeueing. Fun = fun(Promise, Ctxt1) -> InvocationId = bondy_rpc_promise:invocation_id(Promise), Callee = bondy_rpc_promise:callee(Promise), R = wamp_message:interrupt(InvocationId, Opts), ok = bondy:send(Caller, Callee, R, #{}), {ok, Ctxt1} end, _ = peek_invocations(CallId, Fun, Ctxt0), ok; killnowait -> %% The pending call is canceled and ERROR is send immediately %% back to the caller. INTERRUPT is sent to the callee and any %% response to the invocation or interrupt from the callee is %% discarded when received. %% We dequeue the invocation, that way the response will be %% discarded. Fun = fun(Promise, Ctxt1) -> InvocationId = bondy_rpc_promise:invocation_id(Promise), Callee = bondy_rpc_promise:callee(Promise), Caller = bondy_context:peer_id(Ctxt1), Mssg = <<"call_cancelled">>, Args = [Mssg], ArgsKw = #{ message => Mssg, description => <<"The call was cancelled by the user.">> }, Error = wamp_message:error( ?CANCEL, CallId, #{}, ?WAMP_CANCELLED, Args, ArgsKw), ok = bondy:send(Callee, Caller, Error, #{}), Interrupt = wamp_message:interrupt(InvocationId, Opts), ok = bondy:send(Caller, Callee, Interrupt, #{}), {ok, Ctxt1} end, _ = dequeue_invocations(CallId, M, Fun, Ctxt0), ok; skip -> %% The pending call is canceled and ERROR is sent immediately %% back to the caller. No INTERRUPT is sent to the callee and %% the result is discarded when received. %% We dequeue the invocation, that way the response will be %% discarded. %% TODO instead of dequeing, update the entry to reflect it was %% cancelled Fun = fun(Promise, Ctxt1) -> Callee = bondy_rpc_promise:callee(Promise), Caller = bondy_context:peer_id(Ctxt1), Mssg = <<"call_cancelled">>, Args = [Mssg], ArgsKw = #{ message => Mssg, description => <<"The call was cancelled by the user.">> }, Error = wamp_message:error( ?CANCEL, CallId, #{}, ?WAMP_CANCELLED, Args, ArgsKw), ok = bondy:send(Callee, Caller, Error, #{}), {ok, Ctxt1} end, _ = dequeue_invocations(CallId, M, Fun, Ctxt0), ok end; do_handle_message(#yield{} = M, Ctxt0) -> %% A Callee is replying to a previous wamp_invocation() %% which we generated based on a Caller wamp_call() %% We match the wamp_yield() with the originating wamp_invocation() %% using the request_id, and with that match the wamp_call() request_id %% to find the caller pid. Callee = bondy_context:peer_id(Ctxt0), Fun = fun (empty) -> no_matching_promise(M); ({ok, Promise}) -> Caller = bondy_rpc_promise:caller(Promise), case bondy_rpc_promise:call_id(Promise) of undefined -> %% The caller is remote, we fwd the yield to the peer node %% TODO make this explicit, at the moment a promise with %% undefined callId is a promise for a remote callee bondy:send(Callee, Caller, M, #{}); CallId -> Result = yield_to_result(CallId, M), bondy:send(Callee, Caller, Result, #{}) end end, InvocationId = M#yield.request_id, Callee = bondy_context:peer_id(Ctxt0), _ = bondy_rpc_promise:dequeue_invocation(InvocationId, Callee, Fun), ok; do_handle_message(#error{request_type = ?INVOCATION} = M, Ctxt0) -> Callee = bondy_context:peer_id(Ctxt0), Fun = fun (empty) -> no_matching_promise(M); ({ok, Promise}) -> Caller = bondy_rpc_promise:caller(Promise), CallId = bondy_rpc_promise:call_id(Promise), CallError = case bondy:is_remote_peer(Caller) of true -> %% We reply the invocation message as the remote node has %% send us an invocation and not a call M; false -> M#error{request_id = CallId, request_type = ?CALL} end, bondy:send(Callee, Caller, CallError, #{}) end, InvocationId = M#error.request_id, Callee = bondy_context:peer_id(Ctxt0), _ = bondy_rpc_promise:dequeue_invocation(InvocationId, Callee, Fun), ok; do_handle_message(#error{request_type = ?INTERRUPT} = M, Ctxt0) -> %% A callee is responding with an error to an INTERRUPT message %% We need to turn this into a CANCEL error Callee = bondy_context:peer_id(Ctxt0), InvocationId = M#error.request_id, Caller = bondy_context:peer_id(Ctxt0), case bondy_rpc_promise:peek_invocation(InvocationId, Callee) of empty -> %% Call was evicted or performed already by Callee no_matching_promise(M); {ok, Promise} -> Caller = bondy_rpc_promise:caller(Promise), CallId = bondy_rpc_promise:call_id(Promise), CancelError = M#error{request_id = CallId, request_type = ?CALL}, bondy:send(Callee, Caller, CancelError, #{}) end, ok; do_handle_message(#call{procedure_uri = Uri} = M, Ctxt) -> %% TODO Maybe %% ReqId = bondy_utils:get_id(global), %% spawn with pool -> bondy_wamp_meta_api:handle_call(M, Ctxt); %% {ok, ReqId, Ctxt}. ok = bondy_rbac:authorize(<<"wamp.call">>, Uri, Ctxt), handle_call(M, Ctxt). %% @private handle_call( #call{procedure_uri = <<"com.leapsight.bondy.", _/binary>>} = M, Ctxt) -> %% Deprecated API prefix. Now "bondy." maybe_callback(M, Ctxt, bondy_wamp_api); handle_call( #call{procedure_uri = <<"bondy.", _/binary>>} = M, Ctxt) -> maybe_callback(M, Ctxt, bondy_wamp_api); handle_call( #call{procedure_uri = <<"wamp.", _/binary>>} = M, Ctxt) -> maybe_callback(M, Ctxt, bondy_wamp_meta_api); handle_call(#call{procedure_uri = Uri} = M, Ctxt) -> do_handle_call(M, Ctxt, Uri). %% ----------------------------------------------------------------------------- %% @private %% @doc If the callback module returns ignore we need to find the callee in the %% registry %% @end %% ----------------------------------------------------------------------------- maybe_callback(#call{procedure_uri = Uri} = M, Ctxt, Mod) -> case Mod:handle_call(M, Ctxt) of ok -> ok; ignore -> do_handle_call(M, Ctxt, Uri); {redirect, OtherUri} -> do_handle_call(M, Ctxt, OtherUri) end. %% @private do_handle_call(#call{} = M, Ctxt0, Uri) -> %% invoke/5 takes a fun which takes the registration_id of the %% procedure and the callee %% Based on procedure registration and passed options, we will %% determine how many invocations and to whom we should do. Caller = bondy_context:peer_id(Ctxt0), Fun = fun (Entry, {_RealmUri, _Node, SessionId, Pid} = Callee, Ctxt1) when is_integer(SessionId), is_pid(Pid) -> %% TODO Revert to session-scoped Ids %% ReqId = bondy_utils:get_id({session, SessionId}), ReqId = bondy_utils:get_id(global), Args = M#call.arguments, Payload = M#call.arguments_kw, RegId = bondy_registry_entry:id(Entry), RegOpts = bondy_registry_entry:options(Entry), CallOpts = M#call.options, Details = prepare_invocation_details(Uri, CallOpts, RegOpts, Ctxt1), R = wamp_message:invocation(ReqId, RegId, Details, Args, Payload), ok = bondy:send(Caller, Callee, R, #{}), {ok, ReqId, Ctxt1} end, %% A response will be send asynchronously by another router process instance invoke(M#call.request_id, Uri, Fun, M#call.options, Ctxt0). %% @private prepare_invocation_details(Uri, CallOpts, RegOpts, Ctxt) -> DiscloseMe = maps:get(disclose_me, CallOpts, true), DiscloseCaller = maps:get(disclose_caller, RegOpts, true), Details0 = #{procedure => Uri, trust_level => 0}, Details1 = case DiscloseCaller orelse DiscloseMe of true -> Details0#{caller => bondy_context:session_id(Ctxt)}; false -> Details0 end, case maps:get('x_disclose_session_info', RegOpts, false) of true -> Session = bondy_context:session(Ctxt), Info = bondy_session:info(Session), Details1#{'x_session_info' => Info}; false -> Details1 end. %% ----------------------------------------------------------------------------- %% @doc %% Registers an RPC endpoint. %% If the registration already exists, it fails with a %% `{not_authorized | procedure_already_exists, binary()}' reason. %% @end %% ----------------------------------------------------------------------------- handle_register(#register{procedure_uri = Uri} = M, Ctxt) -> ok = maybe_reserved_ns(Uri), ok = bondy_rbac:authorize(<<"wamp.register">>, Uri, Ctxt), #register{options = Opts, request_id = ReqId} = M, PeerId = bondy_context:peer_id(Ctxt), case bondy_registry:add(registration, Uri, Opts, Ctxt) of {ok, Entry, IsFirst} -> ok = on_register(IsFirst, Entry, Ctxt), Id = bondy_registry_entry:id(Entry), Reply = wamp_message:registered(ReqId, Id), bondy:send(PeerId, Reply); {error, {already_exists, Entry}} -> Policy = bondy_registry_entry:match_policy(Entry), Mssg = << "The procedure is already registered by another peer ", "with policy ", $', Policy/binary, $', $. >>, Reply = wamp_message:error( ?REGISTER, ReqId, #{}, ?WAMP_PROCEDURE_ALREADY_EXISTS, [Mssg] ), bondy:send(PeerId, Reply) end. %% ----------------------------------------------------------------------------- %% @private %% @doc %% Unregisters an RPC endpoint. %% If the registration does not exist, it fails with a 'no_such_registration' or %% '{not_authorized, binary()}' error. %% @end %% ----------------------------------------------------------------------------- -spec handle_unregister(wamp_unregister(), bondy_context:t()) -> ok | no_return(). handle_unregister(#unregister{} = M, Ctxt) -> RegId = M#unregister.registration_id, RealmUri = bondy_context:realm_uri(Ctxt), %% TODO Shouldn't we restrict this operation to the peer who registered it? %% and/or a Bondy Admin for revoke registration? case bondy_registry:lookup(registration, RegId, RealmUri) of {error, not_found} -> throw(not_found); Entry -> Uri = bondy_registry_entry:uri(Entry), %% We authorize first ok = bondy_rbac:authorize( <<"wamp.unregister">>, Uri, Ctxt), unregister(Uri, M, Ctxt) end. %% @private unregister(Uri, M, Ctxt) -> ok = maybe_reserved_ns(Uri), RegId = M#unregister.request_id, ok = bondy_rbac:authorize(<<"wamp.unregister">>, Uri, Ctxt), ok = bondy_registry:remove(registration, RegId, Ctxt, fun on_unregister/2), Reply = wamp_message:unregistered(RegId), bondy:send(bondy_context:peer_id(Ctxt), Reply). %% ----------------------------------------------------------------------------- %% @private %% @doc %% @end %% ----------------------------------------------------------------------------- -spec unregister_all(bondy_context:t()) -> ok. unregister_all(Ctxt) -> bondy_registry:remove_all(registration, Ctxt, fun on_unregister/2). %% ----------------------------------------------------------------------------- %% @private %% @doc %% Returns the list of registrations for the active session. %% %% When called with a bondy:context() it is equivalent to calling %% registrations/2 with the RealmUri and SessionId extracted from the Context. %% @end %% ----------------------------------------------------------------------------- -spec registrations(bondy_registry:continuation()) -> { [bondy_registry_entry:t()], bondy_registry:continuation() | bondy_registry:eot() }. registrations({registration, _} = Cont) -> bondy_registry:entries(Cont). %% ----------------------------------------------------------------------------- %% @private %% @doc %% Returns the complete list of registrations matching the RealmUri %% and SessionId. %% %% Use {@link registrations/3} and {@link registrations/1} to limit the %% number of registrations returned. %% @end %% ----------------------------------------------------------------------------- -spec registrations(RealmUri :: uri(), Node :: atom(), SessionId :: id()) -> [bondy_registry_entry:t()]. registrations(RealmUri, Node, SessionId) -> bondy_registry:entries(registration, RealmUri, Node, SessionId). %% ----------------------------------------------------------------------------- %% @private %% @doc %% Returns the list of registrations matching the RealmUri and SessionId. %% %% Use {@link registrations/3} to limit the number of registrations returned. %% @end %% ----------------------------------------------------------------------------- -spec registrations( RealmUri :: uri(), Node :: atom(), SessionId :: id(), non_neg_integer()) -> { [bondy_registry_entry:t()], bondy_registry:continuation() | bondy_registry:eot() }. registrations(RealmUri, Node, SessionId, Limit) -> bondy_registry:entries(registration, RealmUri, Node, SessionId, Limit). %% ----------------------------------------------------------------------------- %% @private %% @doc %% @end %% ----------------------------------------------------------------------------- -spec match_registrations(uri(), bondy_context:t()) -> { [bondy_registry_entry:t()], bondy_registry:continuation() | bondy_registry:eot() }. match_registrations(ProcUri, Ctxt) -> RealmUri = bondy_context:realm_uri(Ctxt), bondy_registry:match(registration, ProcUri, RealmUri). %% ----------------------------------------------------------------------------- %% @private %% @doc %% @end %% ----------------------------------------------------------------------------- -spec match_registrations(uri(), bondy_context:t(), map()) -> { [bondy_registry_entry:t()], bondy_registry:continuation() | bondy_registry:eot() }. match_registrations(ProcUri, Ctxt, Opts) -> RealmUri = bondy_context:realm_uri(Ctxt), bondy_registry:match(registration, ProcUri, RealmUri, Opts). %% ----------------------------------------------------------------------------- %% @private %% @doc %% @end %% ----------------------------------------------------------------------------- -spec match_registrations(bondy_registry:continuation()) -> { [bondy_registry_entry:t()], bondy_registry:continuation() | bondy_registry:eot() }. match_registrations({registration, _} = Cont) -> bondy_registry:match(Cont). %% ----------------------------------------------------------------------------- %% @private %% @doc %% Throws {not_authorized, binary()} %% @end %% ----------------------------------------------------------------------------- -spec invoke(id(), uri(), function(), map(), bondy_context:t()) -> ok. invoke(CallId, ProcUri, UserFun, Opts, Ctxt0) when is_function(UserFun, 3) -> %% Contrary to pubusub, the _Caller_ can receive the %% invocation even if the _Caller_ is also a _Callee_ registered %% for that procedure. case match_registrations(ProcUri, Ctxt0, #{}) of {[], ?EOT} -> reply_error(no_such_procedure(ProcUri, CallId), Ctxt0); Regs -> %% We invoke Fun for each entry Fun = fun ({error, ErrorMap}, Ctxt1) when is_map(ErrorMap) -> ok = reply_error(error_from_map(ErrorMap, CallId), Ctxt1), {ok, Ctxt1}; ({error, noproc}, Ctxt1) -> %% The local process associated with the entry %% is no longer alive. ok = reply_error(no_such_procedure(ProcUri, CallId), Ctxt1), {ok, Ctxt1}; (Entry, Ctxt1) -> Callee = bondy_registry_entry:peer_id(Entry), %% We invoke the provided fun which actually makes the %% invocation {ok, InvocationId, Ctxt2} = UserFun(Entry, Callee, Ctxt1), %% A promise is used to implement a capability and a %% feature: %% - the capability to match the callee response %% (wamp_yield() or wamp_error()) back to the originating %% wamp_call() and Caller %% - the call_timeout feature at the dealer level Promise = bondy_rpc_promise:new( InvocationId, CallId, ProcUri, Callee, Ctxt1), RealmUri = bondy_context:realm_uri(Ctxt1), %% We enqueue the promise with a timeout ok = bondy_rpc_promise:enqueue( RealmUri, Promise, bondy_utils:timeout(Opts)), {ok, Ctxt2} end, invoke_aux(Regs, Fun, Opts, Ctxt0) end. %% @private -spec reply_error(wamp_error(), bondy_context:t()) -> ok. reply_error(Error, Ctxt) -> bondy:send( bondy_context:peer_id(Ctxt), Error ). %% ----------------------------------------------------------------------------- %% @private %% @doc %% @end %% ----------------------------------------------------------------------------- -spec dequeue_invocations( id(), wamp_message(), function(), bondy_context:t()) -> {ok, bondy_context:t()}. dequeue_invocations(CallId, M, Fun, Ctxt) when is_function(Fun, 3) -> Caller = bondy_context:peer_id(Ctxt), case bondy_rpc_promise:dequeue_call(CallId, Caller) of empty -> %% Promises for this call were either interrupted by us, %% fulfilled or timed out and/or garbage collected. ok = no_matching_promise(M), {ok, Ctxt}; {ok, P} -> ReqId = bondy_rpc_promise:invocation_id(P), Callee = bondy_rpc_promise:callee(P), {ok, Ctxt1} = Fun(ReqId, Callee, Ctxt), %% We iterate until there are no more pending invocation for the %% call_request_id == CallId dequeue_invocations(CallId, M, Fun, Ctxt1) end. %% ----------------------------------------------------------------------------- %% @private %% @doc %% @end %% ----------------------------------------------------------------------------- -spec peek_invocations( id(), fun((bondy_rpc_promise:t(), bondy_context:t()) -> {ok, bondy_context:t()}), bondy_context:t()) -> {ok, bondy_context:t()}. peek_invocations(CallId, Fun, Ctxt) when is_function(Fun, 2) -> Caller = bondy_context:peer_id(Ctxt), case bondy_rpc_promise:peek_call(CallId, Caller) of empty -> {ok, Ctxt}; {ok, Promise} -> {ok, Ctxt1} = Fun(Promise, Ctxt), %% We iterate until there are no more pending invocation for the %% call_request_id == CallId peek_invocations(CallId, Fun, Ctxt1) end. %% @private no_matching_promise(M) -> %% Promise was fulfilled or timed out and/or garbage collected, %% we do nothing. ?LOG_DEBUG(#{ description => "Message ignored", reason => no_matching_promise, message => M }), ok. %% ============================================================================= %% PRIVATE - INVOCATION STRATEGIES (LOAD BALANCING, FAIL OVER, ETC) %% ============================================================================= %% @private invoke_aux({[], ?EOT}, _, _, _) -> ok; invoke_aux({L, ?EOT}, Fun, Opts, Ctxt) -> invoke_aux(L, Fun, Opts, Ctxt); invoke_aux({L, Cont}, Fun, Opts, Ctxt) -> ok = invoke_aux(L, Fun, Opts, Ctxt), invoke_aux(match_registrations(Cont), Fun, Opts, Ctxt); invoke_aux(L, Fun, Opts, Ctxt) when is_list(L) -> %% Registrations have different invocation strategies provided by the %% 'invoke' key. Triples = [{ bondy_registry_entry:uri(E), maps:get(invoke, bondy_registry_entry:options(E), ?INVOKE_SINGLE), E } || E <- L], invoke_aux(Triples, undefined, Fun, Opts, Ctxt). %% @private -spec invoke_aux( [{uri(), Strategy :: binary(), Entry :: tuple()}], Acc :: tuple() | undefined, Fun :: function(), Opts :: map(), Ctxt :: bondy_context:t()) -> ok. invoke_aux([], undefined, _, _, _) -> ok; invoke_aux([], {_, ?INVOKE_SINGLE, []}, _, _, _) -> ok; invoke_aux([], {_, Invoke, L}, Fun, Opts, Ctxt0) -> {ok, _Ctxt1} = do_invoke({Invoke, L}, Fun, Opts, Ctxt0), ok; invoke_aux([{Uri, ?INVOKE_SINGLE, E}|T], undefined, Fun, Opts, Ctxt0) -> {ok, Ctxt1} = do_invoke({?INVOKE_SINGLE, [E]}, Fun, Opts, Ctxt0), %% We add an accummulator to drop any subsequent matching Uris. invoke_aux(T, {Uri, ?INVOKE_SINGLE, []}, Fun, Opts, Ctxt1); invoke_aux( [{Uri, ?INVOKE_SINGLE, _}|T], {Uri, ?INVOKE_SINGLE, _} = Last, Fun, Opts, Ctxt) -> %% A single invocation strategy and we have multiple registrations so we %% ignore them %% Invoke should match too, otherwise there is an inconsistency %% in the registry invoke_aux(T, Last, Fun, Opts, Ctxt); invoke_aux([{Uri, Invoke, E}|T], undefined, Fun, Opts, Ctxt) -> %% We do not apply the invocation yet as it is not single, so we need %% to accummulate and apply at the end. %% We build a list for subsequent entries for same Uri. invoke_aux(T, {Uri, Invoke, [E]}, Fun, Opts, Ctxt); invoke_aux([{Uri, Invoke, E}|T], {Uri, Invoke, L}, Fun, Opts, Ctxt) -> %% We do not apply the invocation yet as it is not single, so we need %% to accummulate and apply at the end. %% We build a list for subsequent entries for same Uri. %% Invoke should match too, otherwise there is an inconsistency %% in the registry invoke_aux(T, {Uri, Invoke, [E|L]}, Fun, Opts, Ctxt); invoke_aux([{Uri, ?INVOKE_SINGLE, E}|T], {_, Invoke, L}, Fun, Opts, Ctxt0) -> %% We found a different Uri so we invoke the previous one {ok, Ctxt1} = do_invoke({Invoke, L}, Fun, Opts, Ctxt0), %% The new one is a single so we also invoke and continue {ok, Ctxt2} = do_invoke({?INVOKE_SINGLE, [E]}, Fun, Opts, Ctxt1), invoke_aux(T, {Uri, ?INVOKE_SINGLE, []}, Fun, Opts, Ctxt2); invoke_aux([{Uri, Invoke, E}|T], {_, Invoke, L}, Fun, Opts, Ctxt0) -> %% We found another Uri so we invoke the previous one {ok, Ctxt1} = do_invoke({Invoke, L}, Fun, Opts, Ctxt0), %% We do not apply the invocation yet as it is not single, so we need %% to accummulate and apply at the end. %% We build a list for subsequent entries for same Uri. invoke_aux(T, {Uri, Invoke, [E]}, Fun, Opts, Ctxt1). %% ----------------------------------------------------------------------------- %% @private %% @doc %% Implements load balancing and fail over invocation strategies. %% This works over a list of registration entries for the SAME %% procedure. %% @end %% ----------------------------------------------------------------------------- -spec do_invoke(term(), function(), map(), bondy_context:t()) -> {ok, bondy_context:t()}. do_invoke({Strategy, L}, Fun, CallOpts0, Ctxt) -> try Opts = load_balancer_options(Strategy, CallOpts0), case bondy_rpc_load_balancer:get(L, Opts) of {error, noproc} = Error -> %% We trid all callees in the list `L' but none was alive throw(Error); Entry -> Fun(Entry, Ctxt) end catch throw:{error, _} = ErrorMap -> %% Unexpected error ocurred Fun(ErrorMap, Ctxt) end. %% ----------------------------------------------------------------------------- %% @private %% @doc Adds support for (Sharded Registration) %% [https://wamp-proto.org/_static/gen/wamp_latest.html#sharded-registration] %% by transforming the call runmode and rkey properties into the ones %% expected by the extensions to REGISTER.Options in order to reuse Bondy's %% jump_consistent_hash load balancing strategy. %% %% @end %% ----------------------------------------------------------------------------- load_balancer_options(Strategy, CallOpts0) -> CallOpts1 = coerce_strategy(Strategy, CallOpts0), coerce_routing_key(CallOpts1). %% @private coerce_strategy(_, #{runmode := <<"partition">>} = CallOpts) -> maps:put(strategy, jump_consistent_hash, CallOpts); coerce_strategy(Strategy, CallOpts) -> %% An invalid runmode value would have been caught by %% wamp_message's validation. maps:put(strategy, Strategy, CallOpts). %% @private coerce_routing_key(#{rkey := Value} = CallOpts) -> maps:put('x_routing_key', Value, CallOpts); coerce_routing_key(CallOpts) -> CallOpts. %% ============================================================================= %% PRIVATE: META EVENTS %% ============================================================================= %% @private on_register(true, Entry, Ctxt) -> bondy_event_manager:notify({registration_created, Entry, Ctxt}); on_register(false, Entry, Ctxt) -> bondy_event_manager:notify({registration_added, Entry, Ctxt}). %% @private on_unregister(Entry, Ctxt) -> bondy_event_manager:notify({registration_removed, Entry, Ctxt}). %% @private %% on_delete(Map, Ctxt) -> %% bondy_event_manager:notify({registration_deleted, Map, Ctxt}). error_from_map(Error, CallId) -> Mssg = <<"The request failed due to invalid option parameters.">>, wamp_message:error( ?CALL, CallId, #{}, ?WAMP_INVALID_ARGUMENT, [Mssg], #{ message => Mssg, details => Error, description => <<"A required options parameter was missing in the request or while present they were malformed.">> } ). no_such_procedure(ProcUri, CallId) -> Mssg = << "There are no registered procedures matching the uri", $\s, $', ProcUri/binary, $', $. >>, wamp_message:error( ?CALL, CallId, #{}, ?WAMP_NO_SUCH_PROCEDURE, [Mssg], #{ message => Mssg, description => <<"Either no registration exists for the requested procedure or the match policy used did not match any registered procedures.">> } ). %% @private no_eligible_callee(call, CallId) -> Desc = <<"A call was forwarded throught the router cluster for a callee that is no longer available.">>, no_eligible_callee(?CALL, CallId, Desc); no_eligible_callee(invocation, CallId) -> Desc = <<"An invocation was forwarded throught the router cluster to a callee that is no longer available.">>, no_eligible_callee(?INVOCATION, CallId, Desc). %% @private no_eligible_callee(Type, Id, Desc) -> Mssg = << "There are no elibible callees for the procedure." >>, wamp_message:error( Type, Id, #{}, ?WAMP_NO_ELIGIBLE_CALLE, [Mssg], #{message => Mssg, description => Desc} ). %% @private yield_to_result(CallId, M) -> wamp_message:result( CallId, %% TODO check if yield.options should be assigned to result.details M#yield.options, M#yield.arguments, M#yield.arguments_kw ). %% @private not_found_error(M, _Ctxt) -> Mssg = iolist_to_binary( <<"There are no registered procedures matching the id ", $', (M#unregister.registration_id)/integer, $'>> ), wamp_message:error( ?UNREGISTER, M#unregister.request_id, #{}, ?WAMP_NO_SUCH_REGISTRATION, [Mssg], #{ message => Mssg, description => <<"The unregister request failed.">> } ). %% @private maybe_reserved_ns(<<"com.leapsight.bondy", _/binary>>) -> throw({not_authorized, ?RESERVED_NS(<<"com.leapsight.bondy">>)}); maybe_reserved_ns(<<"bondy", _/binary>>) -> throw({not_authorized, ?RESERVED_NS(<<"bondy">>)}); maybe_reserved_ns(<<"wamp", _/binary>>) -> throw({not_authorized, ?RESERVED_NS(<<"wamp">>)}); maybe_reserved_ns(_) -> ok.
apps/bondy/src/bondy_dealer.erl
0.603114
0.445349
bondy_dealer.erl
starcoder
%%%------------------------------------------------------------------------ %% Copyright 2019, OpenTelemetry Authors %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% @doc %% @end %%%----------------------------------------------------------------------- -module(otel_exporter). -export([init/1, export/4, shutdown/1, report_cb/1]). %% Do any initialization of the exporter here and return configuration %% that will be passed along with a list of spans to the `export' function. -callback init(term()) -> {ok, term()} | ignore. %% This function is called when the configured interval expires with any %% spans that have been collected so far and the configuration returned in `init'. %% Do whatever needs to be done to export each span here, the caller will block %% until it returns. -callback export(ets:tab(), otel_resource:t(), term()) -> ok | success | failed_not_retryable | failed_retryable. -callback shutdown(term()) -> ok. -include_lib("kernel/include/logger.hrl"). init(undefined) -> undefined; init({ExporterModule, Config}) when is_atom(ExporterModule) -> try ExporterModule:init(Config) of {ok, ExporterState} when ExporterModule =:= opentelemetry_exporter -> %% since we log when the initialization failed so the user knows it later succeeded ?LOG_INFO("OTLP exporter successfully initialized"), {ExporterModule, ExporterState}; {ok, ExporterState} -> ?LOG_INFO("Exporter ~tp successfully initialized", [ExporterModule]), {ExporterModule, ExporterState}; ignore -> undefined catch Kind:Reason:StackTrace -> %% logging in debug level since config argument in stacktrace could have secrets ?LOG_DEBUG(#{source => exporter, during => init, kind => Kind, reason => Reason, exporter => ExporterModule, stacktrace => StackTrace}, #{report_cb => fun ?MODULE:report_cb/1}), %% print a more useful message about the failure if we can discern %% one from the failure reason and exporter used case {Kind, Reason} of {error, badarg} when ExporterModule =:= opentelemetry_exporter -> case maps:get(protocol, Config, undefined) of grpc -> %% grpc protocol uses grpcbox which is not included by default %% this will check if it is available so we can warn the user if %% the dependency needs to be added try grpcbox:module_info() of _ -> undefined catch _:_ -> ?LOG_WARNING("OTLP exporter failed to initialize when using the GRPC " "protocol and `grpcbox` module is not available in the " "code path. Verify that you have the `grpcbox` dependency " "included and rerun.", []), undefined end; _ -> %% same as the debug log above %% without the stacktrace and at a higher level ?LOG_WARNING(#{source => exporter, during => init, kind => Kind, reason => Reason, exporter => ExporterModule}, #{report_cb => fun ?MODULE:report_cb/1}), undefined end; {error, undef} when ExporterModule =:= opentelemetry_exporter -> ?LOG_WARNING("OTLP exporter module `opentelemetry_exporter` not found. " "Verify you have included the `opentelemetry_exporter` dependency.", [ExporterModule]), undefined; {error, undef} -> ?LOG_WARNING("Exporter module ~tp not found. Verify you have included " "the dependency that contains the exporter module.", [ExporterModule]), undefined; _ -> %% same as the debug log above %% without the stacktrace and at a higher level ?LOG_WARNING(#{source => exporter, during => init, kind => Kind, reason => Reason, exporter => ExporterModule}, #{report_cb => fun ?MODULE:report_cb/1}), undefined end end; init(ExporterModule) when is_atom(ExporterModule) -> init({ExporterModule, []}). export(ExporterModule, SpansTid, Resource, Config) -> ExporterModule:export(SpansTid, Resource, Config). shutdown(undefined) -> ok; shutdown({ExporterModule, Config}) -> ExporterModule:shutdown(Config). report_cb(#{source := exporter, during := init, kind := Kind, reason := Reason, exporter := opentelemetry_exporter, stacktrace := StackTrace}) -> {"OTLP exporter failed to initialize: ~ts", [otel_utils:format_exception(Kind, Reason, StackTrace)]}; report_cb(#{source := exporter, during := init, kind := Kind, reason := Reason, exporter := ExporterModule, stacktrace := StackTrace}) -> {"Exporter ~tp failed to initialize: ~ts", [ExporterModule, otel_utils:format_exception(Kind, Reason, StackTrace)]}; report_cb(#{source := exporter, during := init, kind := Kind, reason := Reason, exporter := opentelemetry_exporter}) -> {"OTLP exporter failed to initialize with exception ~tp:~tp", [Kind, Reason]}; report_cb(#{source := exporter, during := init, kind := Kind, reason := Reason, exporter := ExporterModule}) -> {"Exporter ~p failed to initialize with exception ~tp:~tp", [ExporterModule, Kind, Reason]}.
apps/opentelemetry/src/otel_exporter.erl
0.601477
0.408277
otel_exporter.erl
starcoder
%%-------------------------------------------------------------------- %% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- -module(ekka_ct). -compile(export_all). -compile(nowarn_export_all). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). %% @doc Get all the test cases in a CT suite. all(Suite) -> lists:usort([F || {F, 1} <- Suite:module_info(exports), string:substr(atom_to_list(F), 1, 2) == "t_" ]). cleanup(Testcase) -> ct:pal("Cleaning up after ~p...", [Testcase]), mria:stop(), mnesia:stop(), ok = mnesia:delete_schema([node()]). start_slave(NodeOrEkka, #{name := Name, env := Env}) -> start_slave(NodeOrEkka, Name, Env); start_slave(NodeOrEkka, Name) when is_atom(Name) -> start_slave(NodeOrEkka, Name, []). start_ekka(#{node := Node, join_to := JoinTo}) -> ok = rpc:call(Node, ekka, start, []), case rpc:call(Node, ekka, join, [JoinTo]) of ok -> ok; ignore -> ok end, Node. start_slave(node, Name, Env) -> CommonBeamOpts = "+S 1:1 " % We want VMs to only occupy a single core "-kernel inet_dist_listen_min 3000 " "-kernel inet_dist_listen_max 3050 ", {ok, Node} = slave:start_link(host(), Name, CommonBeamOpts ++ ebin_path()), %% Load apps before setting the enviroment variables to avoid %% overriding the environment during ekka start: {ok, _} = cover:start([Node]), CommonEnv = [{gen_rpc, port_discovery, stateless}], setenv(Node, Env ++ CommonEnv), ok = snabbkaffe:forward_trace(Node), Node; start_slave(ekka, Name, Env) -> Node = start_slave(node, Name, Env), ok = rpc:call(Node, ekka, start, []), Node. wait_running(Node) -> wait_running(Node, 30000). wait_running(Node, Timeout) when Timeout < 0 -> throw({wait_timeout, Node}); wait_running(Node, Timeout) -> case rpc:call(Node, ekka, is_running, [Node, ekka]) of true -> ok; false -> timer:sleep(100), wait_running(Node, Timeout - 100) end. stop_slave(Node) -> ok = cover:stop([Node]), rpc:call(Node, mnesia, stop, []), mnesia:delete_schema([Node]), slave:stop(Node). host() -> [_, Host] = string:tokens(atom_to_list(node()), "@"), Host. ebin_path() -> string:join(["-pa" | lists:filter(fun is_lib/1, code:get_path())], " "). is_lib(Path) -> string:prefix(Path, code:lib_dir()) =:= nomatch. vals_to_csv(L) -> string:join([lists:flatten(io_lib:format("~p", [N])) || N <- L], ",") ++ "\n". setenv(Node, Env) -> [rpc:call(Node, application, set_env, [App, Key, Val]) || {App, Key, Val} <- Env].
test/ekka_ct.erl
0.560734
0.473231
ekka_ct.erl
starcoder
%% --------------------------------------------------------------------- %% %% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% --------------------------------------------------------------------- %% @doc Log access to Riak CS. This is where I/O stats are %% computed and recorded. %% %% I/O stats are expected as notes in the webmachine log data, with %% keys of the form `{access, KEY}'. I/O stats are only logged if a %% note is included with the key `user' and a value that is a %% `#rcs_user_v2{}' record. %% %% That is, to log I/O stats for a request, call %% %% ``` %% wrq:add_note({access, user}, User=#rcs_user_v2{}, RD) %% ''' %% %% somewhere in your resource. To add another stat, for instance %% `gets', add another note: %% %% ``` %% wrq:add_note({access, gets}, 1, RD) %% ''' %% %% Notes other than `user' are expected to be simple numbers, and all %% notes for each key for a user will be summed for archival %% periodically. %% %% The stat `bytes_out' is logged automatically from the log data %% field `bytes'. %% %% The log is flushed to Riak at an interval specified by the %% `riak_cs' application environment variable %% `access_log_flush_factor'. How often to flush the access log; %% integer factor of `access_archive_period' %% (1 == once per period; 2 == twice per period, etc.) %% This number should evenly divide the `access_archive_period' %% setting, or results of later queries may miss information. -module(riak_cs_access_log_handler). -behaviour(gen_event). %% Public API -export([expect_bytes_out/2, flush/1, set_bytes_in/2, set_user/2]). %% gen_event callbacks -export([init/1, handle_call/2, handle_event/2, handle_info/2, terminate/2, code_change/3]). -include_lib("webmachine/include/webmachine_logger.hrl"). -include("riak_cs.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -record(state, { period :: integer(), %% time between aggregation archivals max_size :: integer(), %% max accesses between archivals size :: integer(), %% num. accesses since last archival current :: {calendar:datetime(), calendar:datetime()}, %% current agg. slice archive :: reference(), %% reference for archive msg table :: ets:tid() %% the table aggregating stats }). -type state() :: #state{}. %% =================================================================== %% Public API %% =================================================================== flush(Timeout) -> Now = calendar:universal_time(), case catch webmachine_log:call(?MODULE, {flush, Now}, Timeout) of ok -> ok; {'EXIT',{Reason,_}} -> Reason end. %%%=================================================================== %%% Non-server API (Webmachine Notes) %%%=================================================================== -define(STAT(Name), {access, Name}). -define(EXPECT_BYTES_OUT, expect_bytes_out). -define(BYTES_IN, bytes_in). %% @doc Set the Riak CS user for this request. Stats are not recorded if %% the user is not set. set_user(KeyID, RD) when is_list(KeyID) -> wrq:add_note(?STAT(user), KeyID, RD); set_user(?RCS_USER{key_id=KeyID}, RD) -> wrq:add_note(?STAT(user), KeyID, RD); set_user(undefined, RD) -> RD; set_user(unknown, RD) -> RD. %% @doc Tell the logger that this resource expected to send `Count' %% bytes, such that it can classify the count it actually receives as %% complete or incomplete. expect_bytes_out(Count, RD) when is_integer(Count) -> wrq:add_note(?EXPECT_BYTES_OUT, Count, RD). %% @doc Note that this resource received `Count' bytes from the %% request body. set_bytes_in(Count, RD) when is_integer(Count) -> wrq:add_note(?BYTES_IN, Count, RD). %% =================================================================== %% gen_event callbacks %% =================================================================== %% @private init(_) -> case {riak_cs_access:log_flush_interval(), riak_cs_access:max_flush_size()} of {{ok, LogPeriod}, {ok, FlushSize}} -> T = fresh_table(), %% accuracy in recording: say the first slice starts *now*, not %% at the just-passed boundary Start = calendar:universal_time(), {_,End} = rts:slice_containing(Start, LogPeriod), C = {Start, End}, InitState = #state{period=LogPeriod, table=T, current=C, max_size=FlushSize, size=0}, case schedule_archival(InitState) of {ok, SchedState} -> ok; {error, _Behind} -> %% startup was right on a boundary, just try again, %% and fail if this one also fails {ok, SchedState} = schedule_archival(InitState) end, {ok, SchedState}; {{error, Reason}, _} -> _ = lager:error("Error starting access logger: ~s", [Reason]), %% can't simply {error, Reason} out here, because %% webmachine/mochiweb will just ignore the failed %% startup; using init:stop/0 here so that the user isn't %% suprised later when there are no logs init:stop(); {_, {error, Reason}} -> _ = lager:error("Error starting access logger: ~s", [Reason]), init:stop() end. %% @private handle_call({flush, FlushEnd}, State) -> NewState = force_archive(State, FlushEnd), {ok, ok, NewState}; handle_call(_Request, State) -> {ok, ok, State}. %% @private handle_event({log_access, #wm_log_data{notes=undefined, method=Method, path=Path, headers=Headers}}, State) -> lager:debug("No WM route: ~p ~s ~p\n", [Method, Path, Headers]), {ok, State}; handle_event({log_access, LogData}, #state{table=T, size=S, max_size=MaxS}=State) -> %% Updates for quotas case lists:keyfind(?STAT(user), 1, LogData#wm_log_data.notes) of {?STAT(user), Key} -> _ = riak_cs_quota:update_all_states(Key, LogData); false -> ignore end, case access_record(LogData) of {ok, Access} -> ets:insert(T, Access), case S+1 < MaxS of true -> %% still a "small" log; keep going {ok, State#state{size=S+1}}; false -> %% log is now "big"; flush it {ok, force_archive(State, calendar:universal_time())} end; _ -> {ok, State} end; handle_event(_Event, State) -> {ok, State}. %% @private handle_info({archive, Ref}, #state{archive=Ref}=State) -> NewState = do_archive(State), case schedule_archival(NewState) of {ok, _}=OkRes -> OkRes; {error, Behind} -> %% if the logger is so far behind that it has already %% missed the time that the next archival should happen, %% just bounce the server to clear up the backlog -- this %% decision could be changed to some heuristic based on %% number of seconds and number of messages behind, if the %% simple "missed window" is too lossy [{message_queue_len, MessageCount}] = process_info(self(), [message_queue_len]), _ = lager:error("Access logger is running ~b seconds behind," " skipping ~p log messages to catch up", [Behind, MessageCount]), remove_handler end; handle_info(_Info, State) -> {ok, State}. %% @private terminate(_Reason, _State) -> ok. %% @private code_change(_OldVsn, State, _Extra) -> {ok, State}. %% =================================================================== %% Internal functions %% =================================================================== %% @doc Create a new ets table to accumulate accesses in. -spec fresh_table() -> ets:tid(). fresh_table() -> ets:new(?MODULE, [private, duplicate_bag, {keypos, 1}]). %% @doc Schedule a message to be sent when it's time to archive this %% slice's accumulated accesses. -spec schedule_archival(state()) -> {ok, state()} | {error, integer()}. schedule_archival(#state{current={_,E}}=State) -> Ref = make_ref(), Now = calendar:datetime_to_gregorian_seconds( calendar:universal_time()), TL = calendar:datetime_to_gregorian_seconds(E)-Now, case TL < 0 of false -> _ = lager:debug("Next access archival in ~b seconds", [TL]), %% time left is in seconds, we need milliseconds erlang:send_after(TL*1000, self(), {archive, Ref}), {ok, State#state{archive=Ref}}; true -> {error, -TL} end. force_archive(#state{current=C}=State, FlushEnd) -> %% record this archive as not filling the whole slice {SliceStart, SliceEnd} = C, NewState = do_archive(State#state{current={SliceStart, FlushEnd}}), %% Now continue waiting for the archive message for this slice, %% but mark the next archive as not filling the whole slice as well NewState#state{current={FlushEnd, SliceEnd}}. %% @doc Send the current slice's accumulated accesses to the archiver %% for storage. Create a clean table to store the next slice's accesses. -spec do_archive(state()) -> state(). do_archive(#state{period=P, table=T, current=C}=State) -> _ = lager:debug("Rolling access for ~p", [C]), %% archiver takes ownership of the table, and deletes it when done riak_cs_access_archiver_manager:archive(T, C), %% create a fresh table for use here NewT = fresh_table(), NewC = rts:next_slice(C, P), State#state{table=NewT, current=NewC, size=0}. %% @doc Digest a Webmachine log data record, and produce a record for %% the access table. -spec access_record(#wm_log_data{}) -> {ok, {iodata(), {binary(), list()}}} | ignore. access_record(#wm_log_data{notes=Notes}=Log) -> case lists:keyfind(?STAT(user), 1, Notes) of {?STAT(user), Key} -> {ok, {Key, {operation(Log), stats(Log)}}}; false -> ignore end. operation(#wm_log_data{resource_module=riak_cs_wm_usage}) -> <<"UsageRead">>; operation(#wm_log_data{resource_module=riak_cs_wm_buckets}) -> <<"ListBuckets">>; operation(#wm_log_data{resource_module=riak_cs_wm_user}) -> <<"AccountRead">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket_acl, method='GET'}) -> <<"BucketReadACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket_acl, method='HEAD'}) -> <<"BucketStatACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket_acl, method='PUT'}) -> <<"BucketWriteACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket_acl}) -> <<"BucketUnknownACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket, method='HEAD'}) -> <<"BucketStat">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket, method='PUT'}) -> <<"BucketCreate">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket, method='DELETE'}) -> <<"BucketDelete">>; operation(#wm_log_data{resource_module=riak_cs_wm_bucket}) -> <<"BucketUnknown">>; operation(#wm_log_data{resource_module=riak_cs_wm_objects}) -> <<"BucketRead">>; operation(#wm_log_data{resource_module=riak_cs_wm_object_acl, method='GET'}) -> <<"KeyReadACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_object_acl, method='HEAD'}) -> <<"KeyStatACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_object_acl, method='PUT'}) -> <<"KeyWriteACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_object_acl}) -> <<"KeyUnknownACL">>; operation(#wm_log_data{resource_module=riak_cs_wm_object, method='GET'}) -> <<"KeyRead">>; operation(#wm_log_data{resource_module=riak_cs_wm_object, method='HEAD'}) -> <<"KeyStat">>; operation(#wm_log_data{resource_module=riak_cs_wm_object, method='PUT'}) -> <<"KeyWrite">>; operation(#wm_log_data{resource_module=riak_cs_wm_object, method='DELETE'}) -> <<"KeyDelete">>; operation(#wm_log_data{resource_module=riak_cs_wm_object}) -> <<"KeyUnknown">>; operation(#wm_log_data{method=Method}) -> iolist_to_binary([<<"Unknown">>, atom_to_binary(Method, latin1)]). stats(#wm_log_data{response_code=CodeOrCodeAndReason, notes=Notes, headers=Headers, method=Method, response_length=Length}) -> Code = extract_code(CodeOrCodeAndReason), Prefix = if Code >= 500 -> <<"SystemError">>; Code >= 400 -> <<"UserError">>; true -> <<"">> end, BytesIn = case lists:keyfind(?BYTES_IN, 1, Notes) of {?BYTES_IN, BI} -> BI; false -> CLS = mochiweb_headers:get_value( "content-length", Headers), case catch list_to_integer(CLS) of CL when is_integer(CL) -> CL; _ -> 0 end end, BytesOut = case Method of 'HEAD' -> 0; _ -> Length end, BytesOutType = case lists:keyfind(?EXPECT_BYTES_OUT, 1, Notes) of {?EXPECT_BYTES_OUT, EL} when EL /= Length -> <<"Incomplete">>; false -> <<"">> end, %% KEEP THIS IN ORDER, so that it's an orddict lists:flatten( [[{iolist_to_binary([Prefix,<<"BytesIn">>]), BytesIn} || BytesIn > 0], [{iolist_to_binary([Prefix,<<"BytesOut">>,BytesOutType]), BytesOut} || BytesOut > 0], {iolist_to_binary([Prefix,<<"Count">>]), 1}]). %% @doc Extract the HTTP response code from either %% a two-tuple of code and reason, or just the raw code. -spec extract_code({integer(), term()} | integer()) -> integer(). extract_code({Code, _Reason}) when is_integer(Code) -> Code; extract_code(Code) when is_integer(Code) -> Code.
src/riak_cs_access_log_handler.erl
0.695235
0.504639
riak_cs_access_log_handler.erl
starcoder
%% @doc %% Collects information about the sockets and processes involved %% in the Erlang distribution mechanism. %% %% All metrics include a label 'peer' that indicates which %% distributed connection the metric is about. %% %% ==Exported metrics== %% %% Metrics pertaining to processes may apply to three different types %% of proccesses depending on the distribution transport: %% `type="dist"', `type="tls_connection"' or `type="tls_sender"'. %% %% <ul> %% <li> %% `erlang_vm_dist_recv_bytes'<br/> %% Type: gauge.<br/> %% Number of bytes received by the socket. %% </li> %% <li> %% `erlang_vm_dist_recv_cnt'<br/> %% Type: gauge.<br/> %% Number of packets received by the socket. %% </li> %% <li> %% `erlang_vm_dist_recv_max_bytes'<br/> %% Type: gauge.<br/> %% Size of the largest packet, in bytes, received by the socket. %% </li> %% <li> %% `erlang_vm_dist_recv_avg_bytes'<br/> %% Type: gauge.<br/> %% Average size of packets, in bytes, received by the socket. %% </li> %% <li> %% `erlang_vm_dist_recv_dvi_bytes'<br/> %% Type: gauge.<br/> %% Average packet size deviation, in bytes, received by the socket. %% </li> %% <li> %% `erlang_vm_dist_send_bytes'<br/> %% Type: gauge.<br/> %% Number of bytes sent from the socket. %% </li> %% <li> %% `erlang_vm_dist_send_cnt'<br/> %% Type: gauge.<br/> %% Number of packets sent from the socket. %% </li> %% <li> %% `erlang_vm_dist_send_max_bytes'<br/> %% Type: gauge.<br/> %% Size of the largest packet, in bytes, sent from the socket. %% </li> %% <li> %% `erlang_vm_dist_send_avg_bytes'<br/> %% Type: gauge.<br/> %% Average size of packets, in bytes, sent from the socket. %% </li> %% <li> %% `erlang_vm_dist_send_pend_bytes'<br/> %% Type: gauge.<br/> %% Number of bytes waiting to be sent by the socket. %% </li> %% <li> %% `erlang_vm_dist_port_input_bytes'<br/> %% Type: gauge.<br/> %% The total number of bytes read from the port. %% </li> %% <li> %% `erlang_vm_dist_port_output_bytes'<br/> %% Type: gauge.<br/> %% The total number of bytes written to the port. %% </li> %% <li> %% `erlang_vm_dist_port_memory_bytes'<br/> %% Type: gauge.<br/> %% The total number of bytes allocated for this port by the runtime system. %% The port itself can have allocated memory that is not included. %% </li> %% <li> %% `erlang_vm_dist_port_queue_size_bytes'<br/> %% Type: gauge.<br/> %% The total number of bytes queued by the port using the ERTS driver queue implementation. %% </li> %% <li> %% `erlang_vm_dist_proc_memory_bytes'<br/> %% Type: gauge.<br/> %% The size in bytes of the process. This includes call stack, heap, and internal structures. %% </li> %% <li> %% `erlang_vm_dist_proc_heap_size_words'<br/> %% Type: gauge.<br/> %% The size in words of the youngest heap generation of the process. %% This generation includes the process stack. This information is %% highly implementation-dependent, and can change if the implementation changes. %% </li> %% <li> %% `erlang_vm_dist_proc_min_heap_size_words'<br/> %% Type: gauge.<br/> %% The minimum heap size for the process. %% </li> %% <li> %% `erlang_vm_dist_proc_min_bin_vheap_size_words'<br/> %% Type: gauge.<br/> %% The minimum binary virtual heap size for the process. %% </li> %% <li> %% `erlang_vm_dist_proc_stack_size_words'<br/> %% Type: gauge.<br/> %% The stack size, in words, of the process. %% </li> %% <li> %% `erlang_vm_dist_proc_total_heap_size_words'<br/> %% Type: gauge.<br/> %% The total size, in words, of all heap fragments of the process. %% This includes the process stack and any unreceived messages that %% are considered to be part of the heap. %% </li> %% <li> %% `erlang_vm_dist_proc_message_queue_len'<br/> %% Type: gauge.<br/> %% The number of messages currently in the message queue of the process. %% </li> %% <li> %% `erlang_vm_dist_proc_reductions'<br/> %% Type: gauge.<br/> %% The number of reductions executed by the process. %% </li> %% <li> %% `erlang_vm_dist_proc_status'<br/> %% Type: gauge.<br/> %% The current status of the distribution process.<br/> %% The status is represented as a numerical value where `exiting=1', %% `suspended=2', `runnable=3', `garbage_collecting=4', `running=5' %% and `waiting=6'. %% </li> %% <li> %% `erlang_vm_dist_node_state'<br/> %% Type: gauge.<br/> %% The current state of the distribution link.<br/> %% The state is represented as a numerical value where `pending=1', %% `up_pending=2' and `up=3'. %% </li> %% </ul> %% %% ==Configuration== %% %% Metrics exported by this collector can be configured via %% `vm_dist_collector_metrics' key of `prometheus' app environment. %% %% Available options: %% <ul> %% <li> %% `recv_bytes' for `erlang_vm_dist_recv_bytes'. %% </li> %% <li> %% `recv_cnt' for `erlang_vm_dist_recv_cnt'. %% </li> %% <li> %% `recv_max_bytes' for `erlang_vm_dist_recv_max_bytes'. %% </li> %% <li> %% `recv_avg_bytes' for `erlang_vm_dist_recv_avg_bytes'. %% </li> %% <li> %% `recv_dvi_bytes' for `erlang_vm_dist_recv_dvi_bytes'. %% </li> %% <li> %% `send_bytes' for `erlang_vm_dist_send_bytes'. %% </li> %% <li> %% `send_cnt' for `erlang_vm_dist_send_cnt'. %% </li> %% <li> %% `send_max_bytes' for `erlang_vm_dist_send_max_bytes'. %% </li> %% <li> %% `send_avg_bytes' for `erlang_vm_dist_send_avg_bytes'. %% </li> %% <li> %% `send_pend_bytes' for `erlang_vm_dist_send_pend_bytes'. %% </li> %% <li> %% `port_input_bytes' for `erlang_vm_dist_port_input_bytes'. %% </li> %% <li> %% `port_output_bytes' for `erlang_vm_dist_port_output_bytes'. %% </li> %% <li> %% `port_memory_bytes' for `erlang_vm_dist_port_memory_bytes'. %% </li> %% <li> %% `port_queue_size_bytes' for `erlang_vm_dist_port_queue_size_bytes'. %% </li> %% <li> %% `proc_memory_bytes' for `erlang_vm_dist_proc_memory_bytes'. %% </li> %% <li> %% `proc_heap_size_words' for `erlang_vm_dist_proc_heap_size_words'. %% </li> %% <li> %% `proc_min_heap_size_words' for `erlang_vm_dist_proc_min_heap_size_words'. %% </li> %% <li> %% `proc_min_bin_vheap_size_words' for `erlang_vm_dist_proc_min_bin_vheap_size_words'. %% </li> %% <li> %% `proc_stack_size_words' for `erlang_vm_dist_proc_stack_size_words'. %% </li> %% <li> %% `proc_total_heap_size_words' for `erlang_vm_dist_proc_total_heap_size_words'. %% </li> %% <li> %% `proc_message_queue_len' for `erlang_vm_dist_proc_message_queue_len'. %% </li> %% <li> %% `proc_reductions' for `erlang_vm_dist_proc_reductions'. %% </li> %% <li> %% `proc_status' for `erlang_vm_dist_proc_status'. %% </li> %% <li> %% `node_state' for `erlang_vm_dist_node_state'. %% </li> %% </ul> %% %% By default all metrics are enabled. %% @end -module(prometheus_vm_dist_collector). -export([deregister_cleanup/1, collect_mf/2]). -import(prometheus_model_helpers, [create_mf/4]). -include("prometheus.hrl"). -behaviour(prometheus_collector). %%==================================================================== %% Macros %%==================================================================== -define(METRIC_NAME_PREFIX, "erlang_vm_dist_"). %%==================================================================== %% Collector API %%==================================================================== %% @private deregister_cleanup(_) -> ok. -spec collect_mf(_Registry, Callback) -> ok when _Registry :: prometheus_registry:registry(), Callback :: prometheus_collector:callback(). %% @private collect_mf(_Registry, Callback) -> Metrics = metrics(), EnabledMetrics = enabled_metrics(), [add_metric_family(Metric, Callback) || {Name, _, _, _}=Metric <- Metrics, metric_enabled(Name, EnabledMetrics)], ok. add_metric_family({Name, Type, Help, Metrics}, Callback) -> Callback(create_mf(?METRIC_NAME(Name), Help, Type, Metrics)). %%==================================================================== %% Private Parts %%==================================================================== metrics() -> try metrics1() catch _:_ -> [] end. metrics1() -> Data = dist_info(), [{recv_bytes, gauge, "Number of bytes received by the socket.", metric(inet, recv_oct, Data)}, {recv_cnt, gauge, "Number of packets received by the socket.", metric(inet, recv_cnt, Data)}, {recv_max_bytes, gauge, "Size of the largest packet, in bytes, received by the socket.", metric(inet, recv_max, Data)}, {recv_avg_bytes, gauge, "Average size of packets, in bytes, received by the socket.", metric(inet, recv_avg, Data)}, {recv_dvi_bytes, gauge, "Average packet size deviation, in bytes, received by the socket.", metric(inet, recv_dvi, Data)}, {send_bytes, gauge, "Number of bytes sent from the socket.", metric(inet, send_oct, Data)}, {send_cnt, gauge, "Number of packets sent from the socket.", metric(inet, send_cnt, Data)}, {send_max_bytes, gauge, "Size of the largest packet, in bytes, sent from the socket.", metric(inet, send_max, Data)}, {send_avg_bytes, gauge, "Average size of packets, in bytes, sent from the socket.", metric(inet, send_avg, Data)}, {send_pend_bytes, gauge, "Number of bytes waiting to be sent by the socket.", metric(inet, send_pend, Data)}, {port_input_bytes, gauge, "The total number of bytes read from the port.", metric(port, input, Data)}, {port_output_bytes, gauge, "The total number of bytes written to the port.", metric(port, output, Data)}, {port_memory_bytes, gauge, "The total number of bytes allocated for this port by the runtime system. " "The port itself can have allocated memory that is not included.", metric(port, memory, Data)}, {port_queue_size_bytes, gauge, "The total number of bytes queued by the port using the ERTS driver queue implementation.", metric(port, queue_size, Data)}, {proc_memory_bytes, gauge, "The size in bytes of the process. This includes call stack, heap, and internal structures.", metric(proc, memory, Data)}, {proc_heap_size_words, gauge, "The size in words of the youngest heap generation of the process. " "This generation includes the process stack. This information is " "highly implementation-dependent, and can change if the implementation changes.", metric(proc, heap_size, Data)}, {proc_min_heap_size_words, gauge, "The minimum heap size for the process.", metric(proc, min_heap_size, Data)}, {proc_min_bin_vheap_size_words, gauge, "The minimum binary virtual heap size for the process.", metric(proc, min_bin_vheap_size, Data)}, {proc_stack_size_words, gauge, "The stack size, in words, of the process.", metric(proc, stack_size, Data)}, {proc_total_heap_size_words, gauge, "The total size, in words, of all heap fragments of the process. " "This includes the process stack and any unreceived messages that " "are considered to be part of the heap.", metric(proc, total_heap_size, Data)}, {proc_message_queue_len, gauge, "The number of messages currently in the message queue of the process.", metric(proc, message_queue_len, Data)}, {proc_reductions, gauge, "The number of reductions executed by the process.", metric(proc, reductions, Data)}, {proc_status, gauge, "The current status of the distribution process. " "The status is represented as a numerical value where `exiting=1', " "`suspended=2', `runnable=3', `garbage_collecting=4', `running=5' " "and `waiting=6'.", metric_proc_status(Data)}, {node_state, gauge, "The current state of the distribution link. " "The state is represented as a numerical value where `pending=1', " "`up_pending=2' and `up=3'.", metric_node_state(Data)}]. enabled_metrics() -> application:get_env(prometheus, vm_dist_collector_metrics, all). metric_enabled(Name, Metrics) -> Metrics =:= all orelse lists:member(Name, Metrics). -include_lib("kernel/include/net_address.hrl"). dist_info() -> {ok, NodesInfo} = net_kernel:nodes_info(), AllPorts = [{P, erlang:port_info(P)} || P <- erlang:ports()], [dist_info(NodeInfo, AllPorts) || NodeInfo <- NodesInfo]. dist_info({Node, Info}, AllPorts) -> DistPid = proplists:get_value(owner, Info), NodeState = proplists:get_value(state, Info), case proplists:get_value(address, Info, #net_address{}) of #net_address{address=undefined} -> {Node, #{ dist_pid => DistPid, node_state => NodeState }}; #net_address{address=SockName} -> dist_info(Node, AllPorts, DistPid, NodeState, SockName) end. dist_info(Node, AllPorts, DistPid, NodeState, SockName) -> [DistPort] = [ P || {P, I} <- AllPorts, I =/= undefined, proplists:get_value(name, I) =:= "tcp_inet", inet:peername(P) =:= {ok, SockName} ], {ok, InetStats} = inet:getstat(DistPort), Map = case erlang:port_info(DistPort, connected) of {_, DistPid} -> #{}; {_, ConnectedPid} -> dist_tls_info(ConnectedPid) end, {Node, Map#{ inet_stats => InetStats, dist_port => DistPort, dist_pid => DistPid, node_state => NodeState }}. dist_tls_info(MaybeTlsConnPid) -> {_, CDict} = process_info(MaybeTlsConnPid, dictionary), case lists:keyfind('$initial_call', 1, CDict) of %% This is the right process: add it to the map and try to find %% the tls_sender process as well. {_, {tls_connection, init, 1}} -> dist_tls_sender_info(MaybeTlsConnPid, #{tls_connection_pid => MaybeTlsConnPid}); _ -> #{} end. dist_tls_sender_info(TlsConnPid, Map) -> case process_info(TlsConnPid, links) of {_, [MaybeTlsSenderPid|_]} when is_pid(MaybeTlsSenderPid) -> {_, SDict} = process_info(MaybeTlsSenderPid, dictionary), case lists:keyfind('$initial_call', 1, SDict) of {_, {tls_sender, init, 1}} -> Map#{tls_sender_pid => MaybeTlsSenderPid}; _ -> Map end; _ -> Map end. metric(inet, Key, Data) -> [ {[{peer, Node}], element(2, lists:keyfind(Key, 1, Stats))} || {Node, #{inet_stats := Stats}} <- Data]; metric(port, Key, Data) -> [ {[{peer, Node}], element(2, erlang:port_info(DistPort, Key))} || {Node, #{dist_port := DistPort}} <- Data]; metric(proc, Key, Data) -> metric_proc(dist_pid, Key, Data) ++ metric_proc(tls_connection_pid, Key, Data) ++ metric_proc(tls_sender_pid, Key, Data). metric_proc(PidKey, Key, Data) -> [ {[{peer, Node}, {type, type(PidKey)}], element(2, process_info(Pid, Key))} || {Node, #{PidKey := Pid}} <- Data]. type(dist_pid) -> dist; type(tls_connection_pid) -> tls_connection; type(tls_sender_pid) -> tls_sender. metric_proc_status(Data) -> metric_proc_status(Data, dist_pid) ++ metric_proc_status(Data, tls_connection_pid) ++ metric_proc_status(Data, tls_sender_pid). metric_proc_status(Data, PidKey) -> [ {[{peer, Node}, {type, type(PidKey)}], proc_status(element(2, process_info(DistPid, status)))} || {Node, #{PidKey := DistPid}} <- Data]. proc_status(exiting) -> 1; proc_status(suspended) -> 2; proc_status(runnable) -> 3; proc_status(garbage_collecting) -> 4; proc_status(running) -> 5; proc_status(waiting) -> 6. metric_node_state(Data) -> [ {[{peer, Node}], node_state(Value)} || {Node, #{node_state := Value}} <- Data]. node_state(pending) -> 1; node_state(up_pending) -> 2; node_state(up) -> 3.
src/collectors/vm/prometheus_vm_dist_collector.erl
0.566858
0.417568
prometheus_vm_dist_collector.erl
starcoder
%% %% %CopyrightBegin% %% %% Copyright Ericsson AB 1996-2019. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% %CopyrightEnd% %% %% A string library that works on grapheme clusters, with the exception %% of codepoints of class 'prepend' and non modern (or decomposed) Hangul. %% If these codepoints appear, functions like 'find/2' may return a string %% which starts inside a grapheme cluster. %% These exceptions are made because the codepoints classes are %% seldom used and require that we are able look at previous codepoints in %% the stream and is thus hard to implement effectively. %% %% GC (grapheme cluster) implies that the length of string 'ß↑e̊' is 3 though %% it is represented by the codepoints [223,8593,101,778] or the %% utf8 binary <<195,159,226,134,145,101,204,138>> %% %% And that searching for strings or graphemes finds the correct positions: %% %% find("eeeee̊eee", "e̊") -> "e̊ee".: %% find("1£4e̊abcdef", "e") -> "ef" %% %% Most functions expect all input to be normalized to one form, %% see unicode:characters_to_nfc and unicode:characters_to_nfd functions. %% When appending strings no checking is done to verify that the %% result is valid unicode strings. %% %% The functions may crash for invalid utf-8 input. %% %% Return value should be kept consistent when return type is %% unicode:chardata() i.e. binary input => binary output, %% list input => list output mixed input => mixed output %% -module(string). -export([is_empty/1, length/1, to_graphemes/1, reverse/1, equal/2, equal/3, equal/4, slice/2, slice/3, pad/2, pad/3, pad/4, trim/1, trim/2, trim/3, chomp/1, take/2, take/3, take/4, lexemes/2, nth_lexeme/3, uppercase/1, lowercase/1, titlecase/1,casefold/1, prefix/2, split/2,split/3,replace/3,replace/4, find/2,find/3, next_codepoint/1, next_grapheme/1 ]). -export([to_float/1, to_integer/1]). %% Old (will be deprecated) lists/string API kept for backwards compability -export([len/1, concat/2, % equal/2, (extended in the new api) chr/2,rchr/2,str/2,rstr/2, span/2,cspan/2,substr/2,substr/3, tokens/2, chars/2,chars/3]). -export([copies/2,words/1,words/2,strip/1,strip/2,strip/3, sub_word/2,sub_word/3,left/2,left/3,right/2,right/3, sub_string/2,sub_string/3,centre/2,centre/3, join/2]). -export([to_upper/1, to_lower/1]). %% -import(lists,[member/2]). -compile({no_auto_import,[length/1]}). -compile({inline, [btoken/2, rev/1, append/2, stack/2, search_compile/1]}). -define(ASCII_LIST(CP1,CP2), CP1 < 256, CP2 < 256, CP1 =/= $\r). -export_type([grapheme_cluster/0]). -type grapheme_cluster() :: char() | [char()]. -type direction() :: 'leading' | 'trailing'. -dialyzer({no_improper_lists, [stack/2, length_b/3]}). %%% BIFs internal (not documented) should not to be used outside of this module %%% May be removed -export([list_to_float/1, list_to_integer/1]). %% Uses bifs: string:list_to_float/1 and string:list_to_integer/1 -spec list_to_float(String) -> {Float, Rest} | {'error', Reason} when String :: string(), Float :: float(), Rest :: string(), Reason :: 'no_float' | 'not_a_list'. list_to_float(_) -> erlang:nif_error(undef). -spec list_to_integer(String) -> {Int, Rest} | {'error', Reason} when String :: string(), Int :: integer(), Rest :: string(), Reason :: 'no_integer' | 'not_a_list'. list_to_integer(_) -> erlang:nif_error(undef). %%% End of BIFs %% Check if string is the empty string -spec is_empty(String::unicode:chardata()) -> boolean(). is_empty([]) -> true; is_empty(<<>>) -> true; is_empty([L|R]) -> is_empty(L) andalso is_empty(R); is_empty(_) -> false. %% Count the number of grapheme clusters in chardata -spec length(String::unicode:chardata()) -> non_neg_integer(). length(<<CP1/utf8, Bin/binary>>) -> length_b(Bin, CP1, 0); length(CD) -> length_1(CD, 0). %% Convert a string to a list of grapheme clusters -spec to_graphemes(String::unicode:chardata()) -> [grapheme_cluster()]. to_graphemes(CD0) -> case unicode_util:gc(CD0) of [GC|CD] -> [GC|to_graphemes(CD)]; [] -> [] end. %% Compare two strings return boolean, assumes that the input are %% normalized to same form, see unicode:characters_to_nfX_xxx(..) -spec equal(A, B) -> boolean() when A::unicode:chardata(), B::unicode:chardata(). equal(A,B) when is_binary(A), is_binary(B) -> A =:= B; equal(A,B) -> equal_1(A,B). %% Compare two strings return boolean, assumes that the input are %% normalized to same form, see unicode:characters_to_nfX_xxx(..) %% does casefold on the fly -spec equal(A, B, IgnoreCase) -> boolean() when A::unicode:chardata(), B::unicode:chardata(), IgnoreCase :: boolean(). equal(A, B, false) -> equal(A,B); equal(A, B, true) -> equal_nocase(A,B). %% Compare two strings return boolean %% if specified does casefold and normalization on the fly -spec equal(A, B, IgnoreCase, Norm) -> boolean() when A :: unicode:chardata(), B :: unicode:chardata(), IgnoreCase :: boolean(), Norm :: 'none' | 'nfc' | 'nfd' | 'nfkc' | 'nfkd'. equal(A, B, Case, none) -> equal(A,B,Case); equal(A, B, false, Norm) -> equal_norm(A, B, Norm); equal(A, B, true, Norm) -> equal_norm_nocase(A, B, Norm). %% Reverse grapheme clusters -spec reverse(String::unicode:chardata()) -> [grapheme_cluster()]. reverse(<<CP1/utf8, Rest/binary>>) -> reverse_b(Rest, CP1, []); reverse(CD) -> reverse_1(CD, []). %% Slice a string and return rest of string %% Note: counts grapheme_clusters -spec slice(String, Start) -> Slice when String::unicode:chardata(), Start :: non_neg_integer(), Slice :: unicode:chardata(). slice(CD, N) when is_integer(N), N >= 0 -> case slice_l0(CD, N) of [] when is_binary(CD) -> <<>>; Res -> Res end. -spec slice(String, Start, Length) -> Slice when String::unicode:chardata(), Start :: non_neg_integer(), Length :: 'infinity' | non_neg_integer(), Slice :: unicode:chardata(). slice(CD, N, Length) when is_integer(N), N >= 0, is_integer(Length), Length > 0 -> case slice_l0(CD, N) of [] when is_binary(CD) -> <<>>; L -> slice_trail(L, Length) end; slice(CD, N, infinity) -> case slice_l0(CD, N) of [] when is_binary(CD) -> <<>>; Res -> Res end; slice(CD, _, 0) -> case is_binary(CD) of true -> <<>>; false -> [] end. %% Pad a string to desired length -spec pad(String, Length) -> unicode:charlist() when String ::unicode:chardata(), Length :: integer(). pad(CD, Length) -> pad(CD, Length, trailing, $\s). -spec pad(String, Length, Dir) -> unicode:charlist() when String ::unicode:chardata(), Length :: integer(), Dir :: direction() | 'both'. pad(CD, Length, Dir) -> pad(CD, Length, Dir, $\s). -spec pad(String, Length, Dir, Char) -> unicode:charlist() when String ::unicode:chardata(), Length :: integer(), Dir :: direction() | 'both', Char :: grapheme_cluster(). pad(CD, Length, leading, Char) when is_integer(Length) -> Len = length(CD), [lists:duplicate(max(0, Length-Len), Char), CD]; pad(CD, Length, trailing, Char) when is_integer(Length) -> Len = length(CD), [CD|lists:duplicate(max(0, Length-Len), Char)]; pad(CD, Length, both, Char) when is_integer(Length) -> Len = length(CD), Size = max(0, Length-Len), Pre = lists:duplicate(Size div 2, Char), Post = case Size rem 2 of 1 -> [Char]; _ -> [] end, [Pre, CD, Pre|Post]. %% Strip characters from whitespace or Separator in Direction -spec trim(String) -> unicode:chardata() when String :: unicode:chardata(). trim(Str) -> trim(Str, both, unicode_util:whitespace()). -spec trim(String, Dir) -> unicode:chardata() when String :: unicode:chardata(), Dir :: direction() | 'both'. trim(Str, Dir) -> trim(Str, Dir, unicode_util:whitespace()). -spec trim(String, Dir, Characters) -> unicode:chardata() when String :: unicode:chardata(), Dir :: direction() | 'both', Characters :: [grapheme_cluster()]. trim(Str, _, []) -> Str; trim(Str, leading, [Sep]) when is_list(Str), Sep < 256 -> trim_ls(Str, Sep); trim(Str, leading, Sep) when is_list(Sep) -> trim_l(Str, Sep); trim(Str, trailing, [Sep]) when is_list(Str), Sep < 256 -> trim_ts(Str, Sep); trim(Str, trailing, Seps0) when is_list(Seps0) -> Seps = search_pattern(Seps0), trim_t(Str, 0, Seps); trim(Str, both, Sep) when is_list(Sep) -> trim(trim(Str,leading,Sep), trailing, Sep). %% Delete trailing newlines or \r\n -spec chomp(String::unicode:chardata()) -> unicode:chardata(). chomp(Str) -> trim(Str, trailing, [[$\r,$\n],$\n]). %% Split String into two parts where the leading part consists of Characters -spec take(String, Characters) -> {Leading, Trailing} when String::unicode:chardata(), Characters::[grapheme_cluster()], Leading::unicode:chardata(), Trailing::unicode:chardata(). take(Str, Sep) -> take(Str, Sep, false, leading). -spec take(String, Characters, Complement) -> {Leading, Trailing} when String::unicode:chardata(), Characters::[grapheme_cluster()], Complement::boolean(), Leading::unicode:chardata(), Trailing::unicode:chardata(). take(Str, Sep, Complement) -> take(Str, Sep, Complement, leading). -spec take(String, Characters, Complement, Dir) -> {Leading, Trailing} when String::unicode:chardata(), Characters::[grapheme_cluster()], Complement::boolean(), Dir::direction(), Leading::unicode:chardata(), Trailing::unicode:chardata(). take(Str, [], Complement, Dir) -> Empty = case is_binary(Str) of true -> <<>>; false -> [] end, case {Complement,Dir} of {false, leading} -> {Empty, Str}; {false, trailing} -> {Str, Empty}; {true, leading} -> {Str, Empty}; {true, trailing} -> {Empty, Str} end; take(Str, Sep, false, leading) -> take_l(Str, Sep, []); take(Str, Sep0, true, leading) -> Sep = search_pattern(Sep0), take_lc(Str, Sep, []); take(Str, Sep0, false, trailing) -> Sep = search_pattern(Sep0), take_t(Str, 0, Sep); take(Str, Sep0, true, trailing) -> Sep = search_pattern(Sep0), take_tc(Str, 0, Sep). %% Uppercase all chars in Str -spec uppercase(String::unicode:chardata()) -> unicode:chardata(). uppercase(CD) when is_list(CD) -> try uppercase_list(CD, false) catch unchanged -> CD end; uppercase(<<CP1/utf8, Rest/binary>>=Orig) -> try uppercase_bin(CP1, Rest, false) of List -> unicode:characters_to_binary(List) catch unchanged -> Orig end; uppercase(<<>>) -> <<>>. %% Lowercase all chars in Str -spec lowercase(String::unicode:chardata()) -> unicode:chardata(). lowercase(CD) when is_list(CD) -> try lowercase_list(CD, false) catch unchanged -> CD end; lowercase(<<CP1/utf8, Rest/binary>>=Orig) -> try lowercase_bin(CP1, Rest, false) of List -> unicode:characters_to_binary(List) catch unchanged -> Orig end; lowercase(<<>>) -> <<>>. %% Make a titlecase of the first char in Str -spec titlecase(String::unicode:chardata()) -> unicode:chardata(). titlecase(CD) when is_list(CD) -> case unicode_util:titlecase(CD) of [GC|Tail] -> append(GC,Tail); Empty -> Empty end; titlecase(CD) when is_binary(CD) -> case unicode_util:titlecase(CD) of [CP|Chars] when is_integer(CP) -> <<CP/utf8,Chars/binary>>; [CPs|Chars] -> << << <<CP/utf8>> || CP <- CPs>>/binary, Chars/binary>>; [] -> <<>> end. %% Make a comparable string of the Str should be used for equality tests only -spec casefold(String::unicode:chardata()) -> unicode:chardata(). casefold(CD) when is_list(CD) -> try casefold_list(CD, false) catch unchanged -> CD end; casefold(<<CP1/utf8, Rest/binary>>=Orig) -> try casefold_bin(CP1, Rest, false) of List -> unicode:characters_to_binary(List) catch unchanged -> Orig end; casefold(<<>>) -> <<>>. -spec to_integer(String) -> {Int, Rest} | {'error', Reason} when String :: unicode:chardata(), Int :: integer(), Rest :: unicode:chardata(), Reason :: 'no_integer' | badarg. to_integer(String) -> try take(String, "+-0123456789") of {Head, Tail} -> case is_empty(Head) of true -> {error, no_integer}; false -> List = unicode:characters_to_list(Head), case string:list_to_integer(List) of {error, _} = Err -> Err; {Int, Rest} -> to_number(String, Int, Rest, List, Tail) end end catch _:_ -> {error, badarg} end. -spec to_float(String) -> {Float, Rest} | {'error', Reason} when String :: unicode:chardata(), Float :: float(), Rest :: unicode:chardata(), Reason :: 'no_float' | 'badarg'. to_float(String) -> try take(String, "+-0123456789eE.,") of {Head, Tail} -> case is_empty(Head) of true -> {error, no_float}; false -> List = unicode:characters_to_list(Head), case string:list_to_float(List) of {error, _} = Err -> Err; {Float, Rest} -> to_number(String, Float, Rest, List, Tail) end end catch _:_ -> {error, badarg} end. to_number(String, Number, Rest, List, _Tail) when is_binary(String) -> BSz = erlang:length(List)-erlang:length(Rest), <<_:BSz/binary, Cont/binary>> = String, {Number, Cont}; to_number(_, Number, Rest, _, Tail) -> {Number, concat(Rest,Tail)}. %% Return the remaining string with prefix removed or else nomatch -spec prefix(String::unicode:chardata(), Prefix::unicode:chardata()) -> 'nomatch' | unicode:chardata(). prefix(Str, Prefix0) -> Result = case unicode:characters_to_list(Prefix0) of [] -> Str; Prefix -> prefix_1(Str, Prefix) end, case Result of [] when is_binary(Str) -> <<>>; Res -> Res end. %% split String with the first occurrence of SearchPattern, return list of splits -spec split(String, SearchPattern) -> [unicode:chardata()] when String :: unicode:chardata(), SearchPattern :: unicode:chardata(). split(String, SearchPattern) -> split(String, SearchPattern, leading). %% split String with SearchPattern, return list of splits -spec split(String, SearchPattern, Where) -> [unicode:chardata()] when String :: unicode:chardata(), SearchPattern :: unicode:chardata(), Where :: direction() | 'all'. split(String, SearchPattern, Where) -> case is_empty(SearchPattern) of true -> [String]; false -> SearchPatternCPs = unicode:characters_to_list(SearchPattern), case split_1(String, SearchPatternCPs, 0, Where, [], []) of {_Curr, []} -> [String]; {_Curr, Acc} when Where =:= trailing -> Acc; {Curr, Acc} when Where =:= all -> lists:reverse([Curr|Acc]); Acc when is_list(Acc) -> Acc end end. %% Replace the first SearchPattern in String with Replacement -spec replace(String, SearchPattern, Replacement) -> [unicode:chardata()] when String :: unicode:chardata(), SearchPattern :: unicode:chardata(), Replacement :: unicode:chardata(). replace(String, SearchPattern, Replacement) -> lists:join(Replacement, split(String, SearchPattern)). %% Replace Where SearchPattern in String with Replacement -spec replace(String, SearchPattern, Replacement, Where) -> [unicode:chardata()] when String :: unicode:chardata(), SearchPattern :: unicode:chardata(), Replacement :: unicode:chardata(), Where :: direction() | 'all'. replace(String, SearchPattern, Replacement, Where) -> lists:join(Replacement, split(String, SearchPattern, Where)). %% Split Str into a list of chardata separated by one of the grapheme %% clusters in Seps -spec lexemes(String::unicode:chardata(), SeparatorList::[grapheme_cluster()]) -> [unicode:chardata()]. lexemes([], _) -> []; lexemes(Str, []) -> [Str]; lexemes(Str, Seps0) when is_list(Seps0) -> Seps = search_pattern(Seps0), lexemes_m(Str, Seps, []). -spec nth_lexeme(String, N, SeparatorList) -> unicode:chardata() when String::unicode:chardata(), N::non_neg_integer(), SeparatorList::[grapheme_cluster()]. nth_lexeme(Str, 1, []) -> Str; nth_lexeme(Str, N, Seps0) when is_list(Seps0), is_integer(N), N > 0 -> Seps = search_pattern(Seps0), nth_lexeme_m(Str, Seps, N). %% find first SearchPattern in String return rest of string -spec find(String, SearchPattern) -> unicode:chardata() | 'nomatch' when String::unicode:chardata(), SearchPattern::unicode:chardata(). find(String, SearchPattern) -> find(String, SearchPattern, leading). %% find SearchPattern in String (search in Dir direction) return rest of string -spec find(String, SearchPattern, Dir) -> unicode:chardata() | 'nomatch' when String::unicode:chardata(), SearchPattern::unicode:chardata(), Dir::direction(). find(String, "", _) -> String; find(String, <<>>, _) -> String; find(String, SearchPattern, leading) -> find_l(String, unicode:characters_to_list(SearchPattern)); find(String, SearchPattern, trailing) -> find_r(String, unicode:characters_to_list(SearchPattern), nomatch). %% Fetch first grapheme cluster and return rest in tail -spec next_grapheme(String::unicode:chardata()) -> maybe_improper_list(grapheme_cluster(),unicode:chardata()) | {error,unicode:chardata()}. next_grapheme(CD) -> unicode_util:gc(CD). %% Fetch first codepoint and return rest in tail -spec next_codepoint(String::unicode:chardata()) -> maybe_improper_list(char(),unicode:chardata()) | {error,unicode:chardata()}. next_codepoint(CD) -> unicode_util:cp(CD). %% Internals length_1([CP1|[CP2|_]=Cont], N) when ?ASCII_LIST(CP1,CP2) -> length_1(Cont, N+1); length_1(Str, N) -> case unicode_util:gc(Str) of [] -> N; [_|Rest] -> length_1(Rest, N+1) end. length_b(<<CP2/utf8, Rest/binary>>, CP1, N) when ?ASCII_LIST(CP1,CP2) -> length_b(Rest, CP2, N+1); length_b(Bin0, CP1, N) -> [_|Bin1] = unicode_util:gc([CP1|Bin0]), case unicode_util:cp(Bin1) of [] -> N+1; [CP3|Bin] -> length_b(Bin, CP3, N+1) end. equal_1([A|AR], [B|BR]) when is_integer(A), is_integer(B) -> A =:= B andalso equal_1(AR, BR); equal_1([], BR) -> is_empty(BR); equal_1(A0,B0) -> case {unicode_util:cp(A0), unicode_util:cp(B0)} of {[CP|A],[CP|B]} -> equal_1(A,B); {[], []} -> true; {L1,L2} when is_list(L1), is_list(L2) -> false end. equal_nocase(A, A) -> true; equal_nocase(A0, B0) -> case {unicode_util:cp(unicode_util:casefold(A0)), unicode_util:cp(unicode_util:casefold(B0))} of {[CP|A],[CP|B]} -> equal_nocase(A,B); {[], []} -> true; {L1,L2} when is_list(L1), is_list(L2) -> false end. equal_norm(A, A, _Norm) -> true; equal_norm(A0, B0, Norm) -> case {unicode_util:cp(unicode_util:Norm(A0)), unicode_util:cp(unicode_util:Norm(B0))} of {[CP|A],[CP|B]} -> equal_norm(A,B, Norm); {[], []} -> true; {L1,L2} when is_list(L1), is_list(L2) -> false end. equal_norm_nocase(A, A, _Norm) -> true; equal_norm_nocase(A0, B0, Norm) -> case {unicode_util:cp(unicode_util:casefold(unicode_util:Norm(A0))), unicode_util:cp(unicode_util:casefold(unicode_util:Norm(B0)))} of {[CP|A],[CP|B]} -> equal_norm_nocase(A,B, Norm); {[], []} -> true; {L1,L2} when is_list(L1), is_list(L2) -> false end. reverse_1([CP1|[CP2|_]=Cont], Acc) when ?ASCII_LIST(CP1,CP2) -> reverse_1(Cont, [CP1|Acc]); reverse_1(CD, Acc) -> case unicode_util:gc(CD) of [GC|Rest] -> reverse_1(Rest, [GC|Acc]); [] -> Acc end. reverse_b(<<CP2/utf8, Rest/binary>>, CP1, Acc) when ?ASCII_LIST(CP1,CP2) -> reverse_b(Rest, CP2, [CP1|Acc]); reverse_b(Bin0, CP1, Acc) -> [GC|Bin1] = unicode_util:gc([CP1|Bin0]), case unicode_util:cp(Bin1) of [] -> [GC|Acc]; [CP3|Bin] -> reverse_b(Bin, CP3, [GC|Acc]) end. slice_l0(<<CP1/utf8, Bin/binary>>, N) when N > 0 -> slice_lb(Bin, CP1, N); slice_l0(L, N) -> slice_l(L, N). slice_l([CP1|[CP2|_]=Cont], N) when ?ASCII_LIST(CP1,CP2),N > 0 -> slice_l(Cont, N-1); slice_l(CD, N) when N > 0 -> case unicode_util:gc(CD) of [_|Cont] -> slice_l(Cont, N-1); [] -> [] end; slice_l(Cont, 0) -> Cont. slice_lb(<<CP2/utf8, Bin/binary>>, CP1, N) when ?ASCII_LIST(CP1,CP2), N > 1 -> slice_lb(Bin, CP2, N-1); slice_lb(Bin, CP1, N) -> [_|Rest] = unicode_util:gc([CP1|Bin]), if N > 1 -> case unicode_util:cp(Rest) of [CP2|Cont] -> slice_lb(Cont, CP2, N-1); [] -> <<>> end; N =:= 1 -> Rest end. slice_trail(Orig, N) when is_binary(Orig) -> case Orig of <<CP1/utf8, Bin/binary>> when N > 0 -> Length = slice_bin(Bin, CP1, N), Sz = byte_size(Orig) - Length, <<Keep:Sz/binary, _/binary>> = Orig, Keep; _ -> <<>> end; slice_trail(CD, N) when is_list(CD) -> slice_list(CD, N). slice_list([CP1|[CP2|_]=Cont], N) when ?ASCII_LIST(CP1,CP2),N > 0 -> [CP1|slice_list(Cont, N-1)]; slice_list(CD, N) when N > 0 -> case unicode_util:gc(CD) of [GC|Cont] -> append(GC, slice_list(Cont, N-1)); [] -> [] end; slice_list(_, 0) -> []. slice_bin(<<CP2/utf8, Bin/binary>>, CP1, N) when ?ASCII_LIST(CP1,CP2), N > 0 -> slice_bin(Bin, CP2, N-1); slice_bin(CD, CP1, N) when N > 0 -> [_|Bin] = unicode_util:gc([CP1|CD]), case unicode_util:cp(Bin) of [CP2|Cont] -> slice_bin(Cont, CP2, N-1); [] -> 0 end; slice_bin(CD, CP1, 0) -> byte_size(CD)+byte_size(<<CP1/utf8>>). uppercase_list([CP1|[CP2|_]=Cont], _Changed) when $a =< CP1, CP1 =< $z, CP2 < 256 -> [CP1-32|uppercase_list(Cont, true)]; uppercase_list([CP1|[CP2|_]=Cont], Changed) when CP1 < 128, CP2 < 256 -> [CP1|uppercase_list(Cont, Changed)]; uppercase_list([], true) -> []; uppercase_list([], false) -> throw(unchanged); uppercase_list(CPs0, Changed) -> case unicode_util:uppercase(CPs0) of [Char|CPs] when Char =:= hd(CPs0) -> [Char|uppercase_list(CPs, Changed)]; [Char|CPs] -> append(Char,uppercase_list(CPs, true)); [] -> uppercase_list([], Changed) end. uppercase_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed) when $a =< CP1, CP1 =< $z, CP2 < 256 -> [CP1-32|uppercase_bin(CP2, Bin, true)]; uppercase_bin(CP1, <<CP2/utf8, Bin/binary>>, Changed) when CP1 < 128, CP2 < 256 -> [CP1|uppercase_bin(CP2, Bin, Changed)]; uppercase_bin(CP1, Bin, Changed) -> case unicode_util:uppercase([CP1|Bin]) of [CP1|CPs] -> case unicode_util:cp(CPs) of [Next|Rest] -> [CP1|uppercase_bin(Next, Rest, Changed)]; [] when Changed -> [CP1]; [] -> throw(unchanged) end; [Char|CPs] -> case unicode_util:cp(CPs) of [Next|Rest] -> [Char|uppercase_bin(Next, Rest, true)]; [] -> [Char] end end. lowercase_list([CP1|[CP2|_]=Cont], _Changed) when $A =< CP1, CP1 =< $Z, CP2 < 256 -> [CP1+32|lowercase_list(Cont, true)]; lowercase_list([CP1|[CP2|_]=Cont], Changed) when CP1 < 128, CP2 < 256 -> [CP1|lowercase_list(Cont, Changed)]; lowercase_list([], true) -> []; lowercase_list([], false) -> throw(unchanged); lowercase_list(CPs0, Changed) -> case unicode_util:lowercase(CPs0) of [Char|CPs] when Char =:= hd(CPs0) -> [Char|lowercase_list(CPs, Changed)]; [Char|CPs] -> append(Char,lowercase_list(CPs, true)); [] -> lowercase_list([], Changed) end. lowercase_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed) when $A =< CP1, CP1 =< $Z, CP2 < 256 -> [CP1+32|lowercase_bin(CP2, Bin, true)]; lowercase_bin(CP1, <<CP2/utf8, Bin/binary>>, Changed) when CP1 < 128, CP2 < 256 -> [CP1|lowercase_bin(CP2, Bin, Changed)]; lowercase_bin(CP1, Bin, Changed) -> case unicode_util:lowercase([CP1|Bin]) of [CP1|CPs] -> case unicode_util:cp(CPs) of [Next|Rest] -> [CP1|lowercase_bin(Next, Rest, Changed)]; [] when Changed -> [CP1]; [] -> throw(unchanged) end; [Char|CPs] -> case unicode_util:cp(CPs) of [Next|Rest] -> [Char|lowercase_bin(Next, Rest, true)]; [] -> [Char] end end. casefold_list([CP1|[CP2|_]=Cont], _Changed) when $A =< CP1, CP1 =< $Z, CP2 < 256 -> [CP1+32|casefold_list(Cont, true)]; casefold_list([CP1|[CP2|_]=Cont], Changed) when CP1 < 128, CP2 < 256 -> [CP1|casefold_list(Cont, Changed)]; casefold_list([], true) -> []; casefold_list([], false) -> throw(unchanged); casefold_list(CPs0, Changed) -> case unicode_util:casefold(CPs0) of [Char|CPs] when Char =:= hd(CPs0) -> [Char|casefold_list(CPs, Changed)]; [Char|CPs] -> append(Char,casefold_list(CPs, true)); [] -> casefold_list([], Changed) end. casefold_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed) when $A =< CP1, CP1 =< $Z, CP2 < 256 -> [CP1+32|casefold_bin(CP2, Bin, true)]; casefold_bin(CP1, <<CP2/utf8, Bin/binary>>, Changed) when CP1 < 128, CP2 < 256 -> [CP1|casefold_bin(CP2, Bin, Changed)]; casefold_bin(CP1, Bin, Changed) -> case unicode_util:casefold([CP1|Bin]) of [CP1|CPs] -> case unicode_util:cp(CPs) of [Next|Rest] -> [CP1|casefold_bin(Next, Rest, Changed)]; [] when Changed -> [CP1]; [] -> throw(unchanged) end; [Char|CPs] -> case unicode_util:cp(CPs) of [Next|Rest] -> [Char|casefold_bin(Next, Rest, true)]; [] -> [Char] end end. %% Fast path for ascii searching for one character in lists trim_ls([CP1|[CP2|_]=Cont]=Str, Sep) when ?ASCII_LIST(CP1,CP2) -> case Sep of CP1 -> trim_ls(Cont, Sep); _ -> Str end; trim_ls(Str, Sep) -> trim_l(Str, [Sep]). trim_l([CP1|[CP2|_]=Cont]=Str, Sep) when ?ASCII_LIST(CP1,CP2) -> case lists:member(CP1, Sep) of true -> trim_l(Cont, Sep); false -> Str end; trim_l([Bin|Cont0], Sep) when is_binary(Bin) -> case bin_search_inv(Bin, Cont0, Sep) of {nomatch, Cont} -> trim_l(Cont, Sep); Keep -> Keep end; trim_l(Str, Sep) when is_list(Str) -> case unicode_util:gc(Str) of [C|Cs] -> case lists:member(C, Sep) of true -> trim_l(Cs, Sep); false -> Str end; [] -> [] end; trim_l(Bin, Sep) when is_binary(Bin) -> case bin_search_inv(Bin, [], Sep) of {nomatch,_} -> <<>>; [Keep] -> Keep end. %% Fast path for ascii searching for one character in lists trim_ts([Sep|Cs1]=Str, Sep) -> case Cs1 of [] -> []; [CP2|_] when ?ASCII_LIST(Sep,CP2) -> Tail = trim_ts(Cs1, Sep), case is_empty(Tail) of true -> []; false -> [Sep|Tail] end; _ -> trim_t(Str, 0, search_pattern([Sep])) end; trim_ts([CP|Cont],Sep) when is_integer(CP) -> [CP|trim_ts(Cont, Sep)]; trim_ts(Str, Sep) -> trim_t(Str, 0, search_pattern([Sep])). trim_t([CP1|Cont]=Cs0, _, {GCs,CPs,_}=Seps) when is_integer(CP1) -> case lists:member(CP1, CPs) of true -> [GC|Cs1] = unicode_util:gc(Cs0), case lists:member(GC, GCs) of true -> Tail = trim_t(Cs1, 0, Seps), case is_empty(Tail) of true -> []; false -> append(GC,Tail) end; false -> append(GC,trim_t(Cs1, 0, Seps)) end; false -> [CP1|trim_t(Cont, 0, Seps)] end; trim_t([Bin|Cont0], N, {GCs,_,_}=Seps0) when is_binary(Bin) -> <<_:N/binary, Rest/binary>> = Bin, Seps = search_compile(Seps0), case bin_search(Rest, Cont0, Seps) of {nomatch,_} -> stack(Bin, trim_t(Cont0, 0, Seps)); [SepStart|Cont1] -> case bin_search_inv(SepStart, Cont1, GCs) of {nomatch, Cont} -> Tail = trim_t(Cont, 0, Seps), case is_empty(Tail) of true -> KeepSz = byte_size(Bin) - byte_size(SepStart), <<Keep:KeepSz/binary, _/binary>> = Bin, Keep; false -> Used = cp_prefix(Cont0, Cont), stack(Bin, stack(Used, Tail)) end; [NonSep|Cont] when is_binary(NonSep) -> KeepSz = byte_size(Bin) - byte_size(NonSep), trim_t([Bin|Cont], KeepSz, Seps) end end; trim_t(Str, 0, {GCs,_,_}=Seps) when is_list(Str) -> case unicode_util:gc(Str) of [GC|Cs1] -> case lists:member(GC, GCs) of true -> Tail = trim_t(Cs1, 0, Seps), case is_empty(Tail) of true -> []; false -> append(GC,Tail) end; false -> append(GC,trim_t(Cs1, 0, Seps)) end; [] -> [] end; trim_t(Bin, N, {GCs,_,_}=Seps0) when is_binary(Bin) -> <<_:N/binary, Rest/binary>> = Bin, Seps = search_compile(Seps0), case bin_search(Rest, [], Seps) of {nomatch,_} -> Bin; [SepStart] -> case bin_search_inv(SepStart, [], GCs) of {nomatch,_} -> KeepSz = byte_size(Bin) - byte_size(SepStart), <<Keep:KeepSz/binary, _/binary>> = Bin, Keep; [NonSep] -> KeepSz = byte_size(Bin) - byte_size(NonSep), trim_t(Bin, KeepSz, Seps) end end. take_l([CP1|[CP2|_]=Cont]=Str, Seps, Acc) when ?ASCII_LIST(CP1,CP2) -> case lists:member(CP1, Seps) of true -> take_l(Cont, Seps, [CP1|Acc]); false -> {rev(Acc), Str} end; take_l([Bin|Cont0], Seps, Acc) when is_binary(Bin) -> case bin_search_inv(Bin, Cont0, Seps) of {nomatch, Cont} -> Used = cp_prefix(Cont0, Cont), take_l(Cont, Seps, [unicode:characters_to_binary([Bin|Used])|Acc]); [Bin1|_]=After when is_binary(Bin1) -> First = byte_size(Bin) - byte_size(Bin1), <<Keep:First/binary, _/binary>> = Bin, {btoken(Keep,Acc), After} end; take_l(Str, Seps, Acc) when is_list(Str) -> case unicode_util:gc(Str) of [C|Cs] -> case lists:member(C, Seps) of true -> take_l(Cs, Seps, append(rev(C),Acc)); false -> {rev(Acc), Str} end; [] -> {rev(Acc), []} end; take_l(Bin, Seps, Acc) when is_binary(Bin) -> case bin_search_inv(Bin, [], Seps) of {nomatch,_} -> {btoken(Bin, Acc), <<>>}; [After] -> First = byte_size(Bin) - byte_size(After), <<Keep:First/binary, _/binary>> = Bin, {btoken(Keep, Acc), After} end. take_lc([CP1|Cont]=Str0, {GCs,CPs,_}=Seps, Acc) when is_integer(CP1) -> case lists:member(CP1, CPs) of true -> [GC|Str] = unicode_util:gc(Str0), case lists:member(GC, GCs) of false -> take_lc(Str, Seps, append(rev(GC),Acc)); true -> {rev(Acc), Str0} end; false -> take_lc(Cont, Seps, append(CP1,Acc)) end; take_lc([Bin|Cont0], Seps0, Acc) when is_binary(Bin) -> Seps = search_compile(Seps0), case bin_search(Bin, Cont0, Seps) of {nomatch, Cont} -> Used = cp_prefix(Cont0, Cont), take_lc(Cont, Seps, [unicode:characters_to_binary([Bin|Used])|Acc]); [Bin1|_]=After when is_binary(Bin1) -> First = byte_size(Bin) - byte_size(Bin1), <<Keep:First/binary, _/binary>> = Bin, {btoken(Keep,Acc), After} end; take_lc(Str, {GCs,_,_}=Seps, Acc) when is_list(Str) -> case unicode_util:gc(Str) of [C|Cs] -> case lists:member(C, GCs) of false -> take_lc(Cs, Seps, append(rev(C),Acc)); true -> {rev(Acc), Str} end; [] -> {rev(Acc), []} end; take_lc(Bin, Seps0, Acc) when is_binary(Bin) -> Seps = search_compile(Seps0), case bin_search(Bin, [], Seps) of {nomatch,_} -> {btoken(Bin, Acc), <<>>}; [After] -> First = byte_size(Bin) - byte_size(After), <<Keep:First/binary, _/binary>> = Bin, {btoken(Keep, Acc), After} end. take_t([CP1|Cont]=Str0, _, {GCs,CPs,_}=Seps) when is_integer(CP1) -> case lists:member(CP1, CPs) of true -> [GC|Str] = unicode_util:gc(Str0), case lists:member(GC, GCs) of true -> {Head, Tail} = take_t(Str, 0, Seps), case is_empty(Head) of true -> {Head, append(GC,Tail)}; false -> {append(GC,Head), Tail} end; false -> {Head, Tail} = take_t(Str, 0, Seps), {append(GC,Head), Tail} end; false -> {Head, Tail} = take_t(Cont, 0, Seps), {[CP1|Head], Tail} end; take_t([Bin|Cont0], N, {GCs,_,_}=Seps0) when is_binary(Bin) -> <<_:N/binary, Rest/binary>> = Bin, Seps = search_compile(Seps0), case bin_search(Rest, Cont0, Seps) of {nomatch,Cont} -> Used = cp_prefix(Cont0, Cont), {Head, Tail} = take_t(Cont, 0, Seps), {stack(unicode:characters_to_binary([Bin|Used]), Head), Tail}; [SepStart|Cont1] -> case bin_search_inv(SepStart, Cont1, GCs) of {nomatch, Cont} -> {Head, Tail} = take_t(Cont, 0, Seps), Used = cp_prefix(Cont0, Cont), case is_empty(Head) of true -> KeepSz = byte_size(Bin) - byte_size(SepStart), <<Keep:KeepSz/binary, End/binary>> = Bin, {Keep, stack(stack(End,Used),Tail)}; false -> {stack(unicode:characters_to_binary([Bin|Used]),Head), Tail} end; [NonSep|Cont] when is_binary(NonSep) -> KeepSz = byte_size(Bin) - byte_size(NonSep), take_t([Bin|Cont], KeepSz, Seps) end end; take_t(Str, 0, {GCs,_,_}=Seps) when is_list(Str) -> case unicode_util:gc(Str) of [GC|Cs1] -> case lists:member(GC, GCs) of true -> {Head, Tail} = take_t(Cs1, 0, Seps), case is_empty(Head) of true -> {Head, append(GC,Tail)}; false -> {append(GC,Head), Tail} end; false -> {Head, Tail} = take_t(Cs1, 0, Seps), {append(GC,Head), Tail} end; [] -> {[],[]} end; take_t(Bin, N, {GCs,_,_}=Seps0) when is_binary(Bin) -> <<_:N/binary, Rest/binary>> = Bin, Seps = search_compile(Seps0), case bin_search(Rest, [], Seps) of {nomatch,_} -> {Bin, <<>>}; [SepStart] -> case bin_search_inv(SepStart, [], GCs) of {nomatch,_} -> KeepSz = byte_size(Bin) - byte_size(SepStart), <<Before:KeepSz/binary, End/binary>> = Bin, {Before, End}; [NonSep] -> KeepSz = byte_size(Bin) - byte_size(NonSep), take_t(Bin, KeepSz, Seps) end end. take_tc([CP1|[CP2|_]=Cont], _, {GCs,_,_}=Seps) when ?ASCII_LIST(CP1,CP2) -> case lists:member(CP1, GCs) of false -> {Head, Tail} = take_tc(Cont, 0, Seps), case is_empty(Head) of true -> {Head, append(CP1,Tail)}; false -> {append(CP1,Head), Tail} end; true -> {Head, Tail} = take_tc(Cont, 0, Seps), {append(CP1,Head), Tail} end; take_tc([Bin|Cont0], N, {GCs,_,_}=Seps0) when is_binary(Bin) -> <<_:N/binary, Rest/binary>> = Bin, case bin_search_inv(Rest, Cont0, GCs) of {nomatch,Cont} -> Used = cp_prefix(Cont0, Cont), {Head, Tail} = take_tc(Cont, 0, Seps0), {stack(unicode:characters_to_binary([Bin|Used]), Head), Tail}; [SepStart|Cont1] -> Seps = search_compile(Seps0), case bin_search(SepStart, Cont1, Seps) of {nomatch, Cont} -> {Head, Tail} = take_tc(Cont, 0, Seps), Used = cp_prefix(Cont0, Cont), case is_empty(Head) of true -> KeepSz = byte_size(Bin) - byte_size(SepStart), <<Keep:KeepSz/binary, End/binary>> = Bin, {Keep, stack(stack(End,Used),Tail)}; false -> {stack(unicode:characters_to_binary([Bin|Used]),Head), Tail} end; [NonSep|Cont] when is_binary(NonSep) -> KeepSz = byte_size(Bin) - byte_size(NonSep), take_tc([Bin|Cont], KeepSz, Seps) end end; take_tc(Str, 0, {GCs,_,_}=Seps) when is_list(Str) -> case unicode_util:gc(Str) of [GC|Cs1] -> case lists:member(GC, GCs) of false -> {Head, Tail} = take_tc(Cs1, 0, Seps), case is_empty(Head) of true -> {Head, append(GC,Tail)}; false -> {append(GC,Head), Tail} end; true -> {Head, Tail} = take_tc(Cs1, 0, Seps), {append(GC,Head), Tail} end; [] -> {[],[]} end; take_tc(Bin, N, {GCs,_,_}=Seps0) when is_binary(Bin) -> <<_:N/binary, Rest/binary>> = Bin, case bin_search_inv(Rest, [], GCs) of {nomatch,_} -> {Bin, <<>>}; [SepStart] -> Seps = search_compile(Seps0), case bin_search(SepStart, [], Seps) of {nomatch,_} -> KeepSz = byte_size(Bin) - byte_size(SepStart), <<Before:KeepSz/binary, End/binary>> = Bin, {Before, End}; [NonSep] -> KeepSz = byte_size(Bin) - byte_size(NonSep), take_tc(Bin, KeepSz, Seps) end end. prefix_1(Cs0, [GC]) -> case unicode_util:gc(Cs0) of [GC|Cs] -> Cs; _ -> nomatch end; prefix_1([CP|Cs], [Pre|PreR]) when is_integer(CP) -> case CP =:= Pre of true -> prefix_1(Cs,PreR); false -> nomatch end; prefix_1(<<CP/utf8, Cs/binary>>, [Pre|PreR]) -> case CP =:= Pre of true -> prefix_1(Cs,PreR); false -> nomatch end; prefix_1(Cs0, [Pre|PreR]) -> case unicode_util:cp(Cs0) of [Pre|Cs] -> prefix_1(Cs,PreR); _ -> nomatch end. split_1([CP1|Cs]=Cs0, [C|_]=Needle, _, Where, Curr, Acc) when is_integer(CP1) -> case CP1=:=C of true -> case prefix_1(Cs0, Needle) of nomatch -> split_1(Cs, Needle, 0, Where, append(C,Curr), Acc); Rest when Where =:= leading -> [rev(Curr), Rest]; Rest when Where =:= trailing -> split_1(Cs, Needle, 0, Where, [C|Curr], [rev(Curr), Rest]); Rest when Where =:= all -> split_1(Rest, Needle, 0, Where, [], [rev(Curr)|Acc]) end; false -> split_1(Cs, Needle, 0, Where, append(CP1,Curr), Acc) end; split_1([Bin|Cont0], Needle, Start, Where, Curr0, Acc) when is_binary(Bin) -> case bin_search_str(Bin, Start, Cont0, Needle) of {nomatch,Sz,Cont} -> <<Keep:Sz/binary, _/binary>> = Bin, split_1(Cont, Needle, 0, Where, [Keep|Curr0], Acc); {Before, [Cs0|Cont], After} -> Curr = add_non_empty(Before,Curr0), case Where of leading -> [rev(Curr),After]; trailing -> <<_/utf8, Cs/binary>> = Cs0, Next = byte_size(Bin) - byte_size(Cs), split_1([Bin|Cont], Needle, Next, Where, Curr0, [rev(Curr),After]); all -> split_1(After, Needle, 0, Where, [], [rev(Curr)|Acc]) end end; split_1(Cs0, [C|_]=Needle, _, Where, Curr, Acc) when is_list(Cs0) -> case unicode_util:cp(Cs0) of [C|Cs] -> case prefix_1(Cs0, Needle) of nomatch -> split_1(Cs, Needle, 0, Where, append(C,Curr), Acc); Rest when Where =:= leading -> [rev(Curr), Rest]; Rest when Where =:= trailing -> split_1(Cs, Needle, 0, Where, [C|Curr], [rev(Curr), Rest]); Rest when Where =:= all -> split_1(Rest, Needle, 0, Where, [], [rev(Curr)|Acc]) end; [Other|Cs] -> split_1(Cs, Needle, 0, Where, append(Other,Curr), Acc); [] -> {rev(Curr), Acc} end; split_1(Bin, [_C|_]=Needle, Start, Where, Curr0, Acc) -> case bin_search_str(Bin, Start, [], Needle) of {nomatch,_,_} -> <<_:Start/binary, Keep/binary>> = Bin, {rev([Keep|Curr0]), Acc}; {Before, [Cs0], After} -> case Where of leading -> [rev([Before|Curr0]),After]; trailing -> <<_/utf8, Cs/binary>> = Cs0, Next = byte_size(Bin) - byte_size(Cs), split_1(Bin, Needle, Next, Where, Curr0, [btoken(Before,Curr0),After]); all -> Next = byte_size(Bin) - byte_size(After), <<_:Start/binary, Keep/binary>> = Before, Curr = [Keep|Curr0], split_1(Bin, Needle, Next, Where, [], [rev(Curr)|Acc]) end end. lexemes_m([CP|_]=Cs0, {GCs,CPs,_}=Seps0, Ts) when is_integer(CP) -> case lists:member(CP, CPs) of true -> [GC|Cs2] = unicode_util:gc(Cs0), case lists:member(GC, GCs) of true -> lexemes_m(Cs2, Seps0, Ts); false -> Seps = search_compile(Seps0), {Lexeme,Rest} = lexeme_pick(Cs0, Seps, []), lexemes_m(Rest, Seps, [Lexeme|Ts]) end; false -> Seps = search_compile(Seps0), {Lexeme,Rest} = lexeme_pick(Cs0, Seps, []), lexemes_m(Rest, Seps, [Lexeme|Ts]) end; lexemes_m([Bin|Cont0], {GCs,_,_}=Seps0, Ts) when is_binary(Bin) -> case bin_search_inv(Bin, Cont0, GCs) of {nomatch,Cont} -> lexemes_m(Cont, Seps0, Ts); Cs -> Seps = search_compile(Seps0), {Lexeme,Rest} = lexeme_pick(Cs, Seps, []), lexemes_m(Rest, Seps, [Lexeme|Ts]) end; lexemes_m(Cs0, {GCs, _, _}=Seps0, Ts) when is_list(Cs0) -> case unicode_util:gc(Cs0) of [C|Cs] -> case lists:member(C, GCs) of true -> lexemes_m(Cs, Seps0, Ts); false -> Seps = search_compile(Seps0), {Lexeme,Rest} = lexeme_pick(Cs0, Seps, []), lexemes_m(Rest, Seps, [Lexeme|Ts]) end; [] -> lists:reverse(Ts) end; lexemes_m(Bin, {GCs,_,_}=Seps0, Ts) when is_binary(Bin) -> case bin_search_inv(Bin, [], GCs) of {nomatch,_} -> lists:reverse(Ts); [Cs] -> Seps = search_compile(Seps0), {Lexeme,Rest} = lexeme_pick(Cs, Seps, []), lexemes_m(Rest, Seps, add_non_empty(Lexeme,Ts)) end. lexeme_pick([CP|Cs1]=Cs0, {GCs,CPs,_}=Seps, Tkn) when is_integer(CP) -> case lists:member(CP, CPs) of true -> [GC|Cs2] = unicode_util:gc(Cs0), case lists:member(GC, GCs) of true -> {rev(Tkn), Cs2}; false -> lexeme_pick(Cs2, Seps, append(rev(GC),Tkn)) end; false -> lexeme_pick(Cs1, Seps, [CP|Tkn]) end; lexeme_pick([Bin|Cont0], Seps, Tkn) when is_binary(Bin) -> case bin_search(Bin, Cont0, Seps) of {nomatch,_} -> lexeme_pick(Cont0, Seps, [Bin|Tkn]); [Left|_Cont] = Cs -> Bytes = byte_size(Bin) - byte_size(Left), <<Lexeme:Bytes/binary, _/binary>> = Bin, {btoken(Lexeme, Tkn), Cs} end; lexeme_pick(Cs0, {GCs, CPs, _} = Seps, Tkn) when is_list(Cs0) -> case unicode_util:cp(Cs0) of [CP|Cs] -> case lists:member(CP, CPs) of true -> [GC|Cs2] = unicode_util:gc(Cs0), case lists:member(GC, GCs) of true -> {rev(Tkn), Cs2}; false -> lexeme_pick(Cs2, Seps, append(rev(GC),Tkn)) end; false -> lexeme_pick(Cs, Seps, append(CP,Tkn)) end; [] -> {rev(Tkn), []} end; lexeme_pick(Bin, Seps, Tkn) when is_binary(Bin) -> case bin_search(Bin, [], Seps) of {nomatch,_} -> {btoken(Bin,Tkn), []}; [Left] -> Bytes = byte_size(Bin) - byte_size(Left), <<Lexeme:Bytes/binary, _/binary>> = Bin, {btoken(Lexeme, Tkn), Left} end. nth_lexeme_m([Bin|Cont0], {GCs,_,_}=Seps0, N) when is_binary(Bin) -> case bin_search_inv(Bin, Cont0, GCs) of {nomatch,Cont} -> nth_lexeme_m(Cont, Seps0, N); Cs when N > 1 -> Rest = lexeme_skip(Cs, Seps0), nth_lexeme_m(Rest, Seps0, N-1); Cs -> Seps = search_compile(Seps0), {Lexeme,_} = lexeme_pick(Cs, Seps, []), Lexeme end; nth_lexeme_m(Cs0, {GCs, _, _}=Seps0, N) when is_list(Cs0) -> case unicode_util:gc(Cs0) of [C|Cs] -> case lists:member(C, GCs) of true -> nth_lexeme_m(Cs, Seps0, N); false when N > 1 -> Cs1 = lexeme_skip(Cs, Seps0), nth_lexeme_m(Cs1, Seps0, N-1); false -> Seps = search_compile(Seps0), {Lexeme,_} = lexeme_pick(Cs0, Seps, []), Lexeme end; [] -> [] end; nth_lexeme_m(Bin, {GCs,_,_}=Seps0, N) when is_binary(Bin) -> Seps = search_compile(Seps0), case bin_search_inv(Bin, [], GCs) of [Cs] when N > 1 -> Cs1 = lexeme_skip(Cs, Seps), nth_lexeme_m(Cs1, Seps, N-1); [Cs] -> {Lexeme,_} = lexeme_pick(Cs, Seps, []), Lexeme; {nomatch,_} -> <<>> end. lexeme_skip([CP|Cs1]=Cs0, {GCs,CPs,_}=Seps) when is_integer(CP) -> case lists:member(CP, CPs) of true -> [GC|Cs2] = unicode_util:gc(Cs0), case lists:member(GC, GCs) of true -> Cs2; false -> lexeme_skip(Cs2, Seps) end; false -> lexeme_skip(Cs1, Seps) end; lexeme_skip([Bin|Cont0], Seps0) when is_binary(Bin) -> Seps = search_compile(Seps0), case bin_search(Bin, Cont0, Seps) of {nomatch,_} -> lexeme_skip(Cont0, Seps); Cs -> tl(unicode_util:gc(Cs)) end; lexeme_skip(Cs0, {GCs, CPs, _} = Seps) when is_list(Cs0) -> case unicode_util:cp(Cs0) of [CP|Cs] -> case lists:member(CP, CPs) of true -> [GC|Cs2] = unicode_util:gc(Cs0), case lists:member(GC, GCs) of true -> Cs2; false -> lexeme_skip(Cs2, Seps) end; false -> lexeme_skip(Cs, Seps) end; [] -> [] end; lexeme_skip(Bin, Seps0) when is_binary(Bin) -> Seps = search_compile(Seps0), case bin_search(Bin, [], Seps) of {nomatch,_} -> <<>>; [Left] -> tl(unicode_util:gc(Left)) end. find_l([C1|Cs]=Cs0, [C|_]=Needle) when is_integer(C1) -> case C1 of C -> case prefix_1(Cs0, Needle) of nomatch -> find_l(Cs, Needle); _ -> Cs0 end; _ -> find_l(Cs, Needle) end; find_l([Bin|Cont0], Needle) when is_binary(Bin) -> case bin_search_str(Bin, 0, Cont0, Needle) of {nomatch, _, Cont} -> find_l(Cont, Needle); {_Before, Cs, _After} -> Cs end; find_l(Cs0, [C|_]=Needle) when is_list(Cs0) -> case unicode_util:cp(Cs0) of [C|Cs] -> case prefix_1(Cs0, Needle) of nomatch -> find_l(Cs, Needle); _ -> Cs0 end; [_C|Cs] -> find_l(Cs, Needle); [] -> nomatch end; find_l(Bin, Needle) -> case bin_search_str(Bin, 0, [], Needle) of {nomatch,_,_} -> nomatch; {_Before, [Cs], _After} -> Cs end. find_r([Cp|Cs]=Cs0, [C|_]=Needle, Res) when is_integer(Cp) -> case Cp of C -> case prefix_1(Cs0, Needle) of nomatch -> find_r(Cs, Needle, Res); _ -> find_r(Cs, Needle, Cs0) end; _ -> find_r(Cs, Needle, Res) end; find_r([Bin|Cont0], Needle, Res) when is_binary(Bin) -> case bin_search_str(Bin, 0, Cont0, Needle) of {nomatch,_,Cont} -> find_r(Cont, Needle, Res); {_, Cs0, _} -> [_|Cs] = unicode_util:gc(Cs0), find_r(Cs, Needle, Cs0) end; find_r(Cs0, [C|_]=Needle, Res) when is_list(Cs0) -> case unicode_util:cp(Cs0) of [C|Cs] -> case prefix_1(Cs0, Needle) of nomatch -> find_r(Cs, Needle, Res); _ -> find_r(Cs, Needle, Cs0) end; [_C|Cs] -> find_r(Cs, Needle, Res); [] -> Res end; find_r(Bin, Needle, Res) -> case bin_search_str(Bin, 0, [], Needle) of {nomatch,_,_} -> Res; {_Before, [Cs0], _After} -> <<_/utf8, Cs/binary>> = Cs0, find_r(Cs, Needle, Cs0) end. %% These are used to avoid creating lists around binaries %% might be unnecessary, is there a better solution? btoken(Token, []) -> Token; btoken(BinPart, [C]) when is_integer(C) -> <<C/utf8, BinPart/binary>>; btoken(<<>>, Tkn) -> lists:reverse(Tkn); btoken(BinPart, Cs) -> [lists:reverse(Cs),BinPart]. rev([B]) when is_binary(B) -> B; rev(L) when is_list(L) -> lists:reverse(L); rev(C) when is_integer(C) -> C. append(Char, <<>>) when is_integer(Char) -> [Char]; append(Char, <<>>) when is_list(Char) -> Char; append(Char, Bin) when is_binary(Bin) -> [Char,Bin]; append(Char, Str) when is_integer(Char) -> [Char|Str]; append(GC, Str) when is_list(GC) -> GC ++ Str. stack(Bin, []) -> Bin; stack(<<>>, St) -> St; stack([], St) -> St; stack(Bin, St) -> [Bin|St]. add_non_empty(<<>>, L) -> L; add_non_empty(Token, L) -> [Token|L]. cp_prefix(Orig, Cont) -> case unicode_util:cp(Cont) of [] -> Orig; [Cp|Rest] -> cp_prefix_1(Orig, Cp, Rest) end. cp_prefix_1(Orig, Until, Cont) -> case unicode_util:cp(Orig) of [Until|Rest] -> case equal(Rest, Cont) of true -> []; false-> [Until|cp_prefix_1(Rest, Until, Cont)] end; [CP|Rest] -> [CP|cp_prefix_1(Rest, Until, Cont)] end. %% Binary special bin_search(Bin, Cont, {Seps,_,BP}) -> bin_search_loop(Bin, 0, BP, Cont, Seps). %% Need to work with [<<$a>>, <<778/utf8>>], %% i.e. å in nfd form $a "COMBINING RING ABOVE" %% and PREPEND characters like "ARABIC NUMBER SIGN" 1536 <<216,128>> %% combined with other characters are currently ignored. search_pattern({_,_,_}=P) -> P; search_pattern(Seps) -> CPs = search_cp(Seps), {Seps, CPs, undefined}. search_compile({Sep, CPs, undefined}) -> {Sep, CPs, binary:compile_pattern(bin_pattern(CPs))}; search_compile({_,_,_}=Compiled) -> Compiled. search_cp([CP|Seps]) when is_integer(CP) -> [CP|search_cp(Seps)]; search_cp([Pattern|Seps]) -> [CP|_] = unicode_util:cp(Pattern), [CP|search_cp(Seps)]; search_cp([]) -> []. bin_pattern([CP|Seps]) -> [<<CP/utf8>>|bin_pattern(Seps)]; bin_pattern([]) -> []. bin_search_loop(Bin0, Start, _, Cont, _Seps) when byte_size(Bin0) =< Start; Start < 0 -> {nomatch, Cont}; bin_search_loop(Bin0, Start, BinSeps, Cont, Seps) -> <<_:Start/binary, Bin/binary>> = Bin0, case binary:match(Bin, BinSeps) of nomatch -> {nomatch,Cont}; {Where, _CL} when Cont =:= [] -> <<_:Where/binary, Cont1/binary>> = Bin, [GC|Cont2] = unicode_util:gc(Cont1), case lists:member(GC, Seps) of false when Cont2 =:= [] -> {nomatch, []}; false -> Next = byte_size(Bin0) - byte_size(Cont2), bin_search_loop(Bin0, Next, BinSeps, Cont, Seps); true -> [Cont1] end; {Where, _CL} -> <<_:Where/binary, Cont0/binary>> = Bin, Cont1 = [Cont0|Cont], [GC|Cont2] = unicode_util:gc(Cont1), case lists:member(GC, Seps) of false -> case Cont2 of [BinR|Cont] when is_binary(BinR) -> Next = byte_size(Bin0) - byte_size(BinR), bin_search_loop(Bin0, Next, BinSeps, Cont, Seps); _ -> {nomatch, Cont2} end; true -> Cont1 end end. bin_search_inv(<<>>, Cont, _) -> {nomatch, Cont}; bin_search_inv(Bin, Cont, [Sep]) -> bin_search_inv_1(Bin, Cont, Sep); bin_search_inv(Bin, Cont, Seps) -> bin_search_inv_n(Bin, Cont, Seps). bin_search_inv_1(<<CP1/utf8, BinRest/binary>>=Bin0, Cont, Sep) -> case BinRest of <<CP2/utf8, _/binary>> when ?ASCII_LIST(CP1, CP2) -> case CP1 of Sep -> bin_search_inv_1(BinRest, Cont, Sep); _ -> [Bin0|Cont] end; _ when Cont =:= [] -> case unicode_util:gc(Bin0) of [Sep|Bin] -> bin_search_inv_1(Bin, Cont, Sep); _ -> [Bin0|Cont] end; _ -> case unicode_util:gc([Bin0|Cont]) of [Sep|[Bin|Cont]] when is_binary(Bin) -> bin_search_inv_1(Bin, Cont, Sep); [Sep|Cs] -> {nomatch, Cs}; _ -> [Bin0|Cont] end end; bin_search_inv_1(<<>>, Cont, _Sep) -> {nomatch, Cont}; bin_search_inv_1([], Cont, _Sep) -> {nomatch, Cont}. bin_search_inv_n(<<CP1/utf8, BinRest/binary>>=Bin0, Cont, Seps) -> case BinRest of <<CP2/utf8, _/binary>> when ?ASCII_LIST(CP1, CP2) -> case lists:member(CP1,Seps) of true -> bin_search_inv_n(BinRest, Cont, Seps); false -> [Bin0|Cont] end; _ when Cont =:= [] -> [GC|Bin] = unicode_util:gc(Bin0), case lists:member(GC, Seps) of true -> bin_search_inv_n(Bin, Cont, Seps); false -> [Bin0|Cont] end; _ -> [GC|Cs0] = unicode_util:gc([Bin0|Cont]), case lists:member(GC, Seps) of false -> [Bin0|Cont]; true -> case Cs0 of [Bin|Cont] when is_binary(Bin) -> bin_search_inv_n(Bin, Cont, Seps); _ -> {nomatch, Cs0} end end end; bin_search_inv_n(<<>>, Cont, _Sep) -> {nomatch, Cont}; bin_search_inv_n([], Cont, _Sep) -> {nomatch, Cont}. bin_search_str(Bin0, Start, [], SearchCPs) -> Compiled = binary:compile_pattern(unicode:characters_to_binary(SearchCPs)), bin_search_str_1(Bin0, Start, Compiled, SearchCPs); bin_search_str(Bin0, Start, Cont, [CP|_]=SearchCPs) -> First = binary:compile_pattern(<<CP/utf8>>), bin_search_str_2(Bin0, Start, Cont, First, SearchCPs). bin_search_str_1(Bin0, Start, First, SearchCPs) -> <<_:Start/binary, Bin/binary>> = Bin0, case binary:match(Bin, First) of nomatch -> {nomatch, byte_size(Bin0), []}; {Where0, _} -> Where = Start+Where0, <<Keep:Where/binary, Cs0/binary>> = Bin0, case prefix_1(Cs0, SearchCPs) of nomatch -> <<_/utf8, Cs/binary>> = Cs0, KeepSz = byte_size(Bin0) - byte_size(Cs), bin_search_str_1(Bin0, KeepSz, First, SearchCPs); [] -> {Keep, [Cs0], <<>>}; Rest -> {Keep, [Cs0], Rest} end end. bin_search_str_2(Bin0, Start, Cont, First, SearchCPs) -> <<_:Start/binary, Bin/binary>> = Bin0, case binary:match(Bin, First) of nomatch -> {nomatch, byte_size(Bin0), Cont}; {Where0, _} -> Where = Start+Where0, <<Keep:Where/binary, Cs0/binary>> = Bin0, [GC|Cs]=unicode_util:gc(Cs0), case prefix_1(stack(Cs0,Cont), SearchCPs) of nomatch when is_binary(Cs) -> KeepSz = byte_size(Bin0) - byte_size(Cs), bin_search_str_2(Bin0, KeepSz, Cont, First, SearchCPs); nomatch -> {nomatch, Where, stack([GC|Cs],Cont)}; [] -> {Keep, [Cs0|Cont], <<>>}; Rest -> {Keep, [Cs0|Cont], Rest} end end. %%--------------------------------------------------------------------------- %% OLD lists API kept for backwards compability %%--------------------------------------------------------------------------- %% Robert's bit %% len(String) %% Return the length of a string. -spec len(String) -> Length when String :: string(), Length :: non_neg_integer(). len(S) -> erlang:length(S). %% equal(String1, String2) %% Test if 2 strings are equal. %% -spec equal(String1, String2) -> boolean() when %% String1 :: string(), %% String2 :: string(). %% equal(S, S) -> true; %% equal(_, _) -> false. %% concat(String1, String2) %% Concatenate 2 strings. -spec concat(String1, String2) -> String3 when String1 :: string(), String2 :: string(), String3 :: string(). concat(S1, S2) -> S1 ++ S2. %% chr(String, Char) %% rchr(String, Char) %% Return the first/last index of the character in a string. -spec chr(String, Character) -> Index when String :: string(), Character :: char(), Index :: non_neg_integer(). chr(S, C) when is_integer(C) -> chr(S, C, 1). chr([C|_Cs], C, I) -> I; chr([_|Cs], C, I) -> chr(Cs, C, I+1); chr([], _C, _I) -> 0. -spec rchr(String, Character) -> Index when String :: string(), Character :: char(), Index :: non_neg_integer(). rchr(S, C) when is_integer(C) -> rchr(S, C, 1, 0). rchr([C|Cs], C, I, _L) -> %Found one, now find next! rchr(Cs, C, I+1, I); rchr([_|Cs], C, I, L) -> rchr(Cs, C, I+1, L); rchr([], _C, _I, L) -> L. %% str(String, SubString) %% rstr(String, SubString) %% index(String, SubString) %% Return the first/last index of the sub-string in a string. %% index/2 is kept for backwards compatibility. -spec str(String, SubString) -> Index when String :: string(), SubString :: string(), Index :: non_neg_integer(). str(S, Sub) when is_list(Sub) -> str(S, Sub, 1). str([C|S], [C|Sub], I) -> case l_prefix(Sub, S) of true -> I; false -> str(S, [C|Sub], I+1) end; str([_|S], Sub, I) -> str(S, Sub, I+1); str([], _Sub, _I) -> 0. -spec rstr(String, SubString) -> Index when String :: string(), SubString :: string(), Index :: non_neg_integer(). rstr(S, Sub) when is_list(Sub) -> rstr(S, Sub, 1, 0). rstr([C|S], [C|Sub], I, L) -> case l_prefix(Sub, S) of true -> rstr(S, [C|Sub], I+1, I); false -> rstr(S, [C|Sub], I+1, L) end; rstr([_|S], Sub, I, L) -> rstr(S, Sub, I+1, L); rstr([], _Sub, _I, L) -> L. l_prefix([C|Pre], [C|String]) -> l_prefix(Pre, String); l_prefix([], String) when is_list(String) -> true; l_prefix(Pre, String) when is_list(Pre), is_list(String) -> false. %% span(String, Chars) -> Length. %% cspan(String, Chars) -> Length. -spec span(String, Chars) -> Length when String :: string(), Chars :: string(), Length :: non_neg_integer(). span(S, Cs) when is_list(Cs) -> span(S, Cs, 0). span([C|S], Cs, I) -> case member(C, Cs) of true -> span(S, Cs, I+1); false -> I end; span([], _Cs, I) -> I. -spec cspan(String, Chars) -> Length when String :: string(), Chars :: string(), Length :: non_neg_integer(). cspan(S, Cs) when is_list(Cs) -> cspan(S, Cs, 0). cspan([C|S], Cs, I) -> case member(C, Cs) of true -> I; false -> cspan(S, Cs, I+1) end; cspan([], _Cs, I) -> I. %% substr(String, Start) %% substr(String, Start, Length) %% Extract a sub-string from String. -spec substr(String, Start) -> SubString when String :: string(), SubString :: string(), Start :: pos_integer(). substr(String, 1) when is_list(String) -> String; substr(String, S) when is_integer(S), S > 1 -> substr2(String, S). -spec substr(String, Start, Length) -> SubString when String :: string(), SubString :: string(), Start :: pos_integer(), Length :: non_neg_integer(). substr(String, S, L) when is_integer(S), S >= 1, is_integer(L), L >= 0 -> substr1(substr2(String, S), L). substr1([C|String], L) when L > 0 -> [C|substr1(String, L-1)]; substr1(String, _L) when is_list(String) -> []. %Be nice! substr2(String, 1) when is_list(String) -> String; substr2([_|String], S) -> substr2(String, S-1). %% tokens(String, Seperators). %% Return a list of tokens seperated by characters in Seperators. -spec tokens(String, SeparatorList) -> Tokens when String :: string(), SeparatorList :: string(), Tokens :: [Token :: nonempty_string()]. tokens(S, Seps) -> case Seps of [] -> case S of [] -> []; [_|_] -> [S] end; [C] -> tokens_single_1(lists:reverse(S), C, []); [_|_] -> tokens_multiple_1(lists:reverse(S), Seps, []) end. tokens_single_1([Sep|S], Sep, Toks) -> tokens_single_1(S, Sep, Toks); tokens_single_1([C|S], Sep, Toks) -> tokens_single_2(S, Sep, Toks, [C]); tokens_single_1([], _, Toks) -> Toks. tokens_single_2([Sep|S], Sep, Toks, Tok) -> tokens_single_1(S, Sep, [Tok|Toks]); tokens_single_2([C|S], Sep, Toks, Tok) -> tokens_single_2(S, Sep, Toks, [C|Tok]); tokens_single_2([], _Sep, Toks, Tok) -> [Tok|Toks]. tokens_multiple_1([C|S], Seps, Toks) -> case member(C, Seps) of true -> tokens_multiple_1(S, Seps, Toks); false -> tokens_multiple_2(S, Seps, Toks, [C]) end; tokens_multiple_1([], _Seps, Toks) -> Toks. tokens_multiple_2([C|S], Seps, Toks, Tok) -> case member(C, Seps) of true -> tokens_multiple_1(S, Seps, [Tok|Toks]); false -> tokens_multiple_2(S, Seps, Toks, [C|Tok]) end; tokens_multiple_2([], _Seps, Toks, Tok) -> [Tok|Toks]. -spec chars(Character, Number) -> String when Character :: char(), Number :: non_neg_integer(), String :: string(). chars(C, N) -> chars(C, N, []). -spec chars(Character, Number, Tail) -> String when Character :: char(), Number :: non_neg_integer(), Tail :: string(), String :: string(). chars(C, N, Tail) when N > 0 -> chars(C, N-1, [C|Tail]); chars(C, 0, Tail) when is_integer(C) -> Tail. %% Torbjörn's bit. %%% COPIES %%% -spec copies(String, Number) -> Copies when String :: string(), Copies :: string(), Number :: non_neg_integer(). copies(CharList, Num) when is_list(CharList), is_integer(Num), Num >= 0 -> copies(CharList, Num, []). copies(_CharList, 0, R) -> R; copies(CharList, Num, R) -> copies(CharList, Num-1, CharList++R). %%% WORDS %%% -spec words(String) -> Count when String :: string(), Count :: pos_integer(). words(String) -> words(String, $\s). -spec words(String, Character) -> Count when String :: string(), Character :: char(), Count :: pos_integer(). words(String, Char) when is_integer(Char) -> w_count(strip(String, both, Char), Char, 0). w_count([], _, Num) -> Num+1; w_count([H|T], H, Num) -> w_count(strip(T, left, H), H, Num+1); w_count([_H|T], Char, Num) -> w_count(T, Char, Num). %%% SUB_WORDS %%% -spec sub_word(String, Number) -> Word when String :: string(), Word :: string(), Number :: integer(). sub_word(String, Index) -> sub_word(String, Index, $\s). -spec sub_word(String, Number, Character) -> Word when String :: string(), Word :: string(), Number :: integer(), Character :: char(). sub_word(String, Index, Char) when is_integer(Index), is_integer(Char) -> case words(String, Char) of Num when Num < Index -> []; _Num -> s_word(strip(String, left, Char), Index, Char, 1, []) end. s_word([], _, _, _,Res) -> lists:reverse(Res); s_word([Char|_],Index,Char,Index,Res) -> lists:reverse(Res); s_word([H|T],Index,Char,Index,Res) -> s_word(T,Index,Char,Index,[H|Res]); s_word([Char|T],Stop,Char,Index,Res) when Index < Stop -> s_word(strip(T,left,Char),Stop,Char,Index+1,Res); s_word([_|T],Stop,Char,Index,Res) when Index < Stop -> s_word(T,Stop,Char,Index,Res). %%% STRIP %%% -spec strip(string()) -> string(). strip(String) -> strip(String, both). -spec strip(String, Direction) -> Stripped when String :: string(), Stripped :: string(), Direction :: 'left' | 'right' | 'both'. strip(String, left) -> strip_left(String, $\s); strip(String, right) -> strip_right(String, $\s); strip(String, both) -> strip_right(strip_left(String, $\s), $\s). -spec strip(String, Direction, Character) -> Stripped when String :: string(), Stripped :: string(), Direction :: 'left' | 'right' | 'both', Character :: char(). strip(String, right, Char) -> strip_right(String, Char); strip(String, left, Char) -> strip_left(String, Char); strip(String, both, Char) -> strip_right(strip_left(String, Char), Char). strip_left([Sc|S], Sc) -> strip_left(S, Sc); strip_left([_|_]=S, Sc) when is_integer(Sc) -> S; strip_left([], Sc) when is_integer(Sc) -> []. strip_right([Sc|S], Sc) -> case strip_right(S, Sc) of [] -> []; T -> [Sc|T] end; strip_right([C|S], Sc) -> [C|strip_right(S, Sc)]; strip_right([], Sc) when is_integer(Sc) -> []. %%% LEFT %%% -spec left(String, Number) -> Left when String :: string(), Left :: string(), Number :: non_neg_integer(). left(String, Len) when is_integer(Len) -> left(String, Len, $\s). -spec left(String, Number, Character) -> Left when String :: string(), Left :: string(), Number :: non_neg_integer(), Character :: char(). left(String, Len, Char) when is_integer(Char) -> Slen = erlang:length(String), if Slen > Len -> substr(String, 1, Len); Slen < Len -> l_pad(String, Len-Slen, Char); Slen =:= Len -> String end. l_pad(String, Num, Char) -> String ++ chars(Char, Num). %%% RIGHT %%% -spec right(String, Number) -> Right when String :: string(), Right :: string(), Number :: non_neg_integer(). right(String, Len) when is_integer(Len) -> right(String, Len, $\s). -spec right(String, Number, Character) -> Right when String :: string(), Right :: string(), Number :: non_neg_integer(), Character :: char(). right(String, Len, Char) when is_integer(Char) -> Slen = erlang:length(String), if Slen > Len -> substr(String, Slen-Len+1); Slen < Len -> r_pad(String, Len-Slen, Char); Slen =:= Len -> String end. r_pad(String, Num, Char) -> chars(Char, Num, String). %%% CENTRE %%% -spec centre(String, Number) -> Centered when String :: string(), Centered :: string(), Number :: non_neg_integer(). centre(String, Len) when is_integer(Len) -> centre(String, Len, $\s). -spec centre(String, Number, Character) -> Centered when String :: string(), Centered :: string(), Number :: non_neg_integer(), Character :: char(). centre(String, 0, Char) when is_list(String), is_integer(Char) -> []; % Strange cases to centre string centre(String, Len, Char) when is_integer(Char) -> Slen = erlang:length(String), if Slen > Len -> substr(String, (Slen-Len) div 2 + 1, Len); Slen < Len -> N = (Len-Slen) div 2, r_pad(l_pad(String, Len-(Slen+N), Char), N, Char); Slen =:= Len -> String end. %%% SUB_STRING %%% -spec sub_string(String, Start) -> SubString when String :: string(), SubString :: string(), Start :: pos_integer(). sub_string(String, Start) -> substr(String, Start). -spec sub_string(String, Start, Stop) -> SubString when String :: string(), SubString :: string(), Start :: pos_integer(), Stop :: pos_integer(). sub_string(String, Start, Stop) -> substr(String, Start, Stop - Start + 1). %% ISO/IEC 8859-1 (latin1) letters are converted, others are ignored %% to_lower_char(C) when is_integer(C), $A =< C, C =< $Z -> C + 32; to_lower_char(C) when is_integer(C), 16#C0 =< C, C =< 16#D6 -> C + 32; to_lower_char(C) when is_integer(C), 16#D8 =< C, C =< 16#DE -> C + 32; to_lower_char(C) -> C. to_upper_char(C) when is_integer(C), $a =< C, C =< $z -> C - 32; to_upper_char(C) when is_integer(C), 16#E0 =< C, C =< 16#F6 -> C - 32; to_upper_char(C) when is_integer(C), 16#F8 =< C, C =< 16#FE -> C - 32; to_upper_char(C) -> C. -spec to_lower(String) -> Result when String :: io_lib:latin1_string(), Result :: io_lib:latin1_string() ; (Char) -> CharResult when Char :: char(), CharResult :: char(). to_lower(S) when is_list(S) -> [to_lower_char(C) || C <- S]; to_lower(C) when is_integer(C) -> to_lower_char(C). -spec to_upper(String) -> Result when String :: io_lib:latin1_string(), Result :: io_lib:latin1_string() ; (Char) -> CharResult when Char :: char(), CharResult :: char(). to_upper(S) when is_list(S) -> [to_upper_char(C) || C <- S]; to_upper(C) when is_integer(C) -> to_upper_char(C). -spec join(StringList, Separator) -> String when StringList :: [string()], Separator :: string(), String :: string(). join([], Sep) when is_list(Sep) -> []; join([H|T], Sep) -> H ++ lists:append([Sep ++ X || X <- T]).
lib/stdlib/src/string.erl
0.600305
0.470737
string.erl
starcoder
%%%--------------------------------------------------------------------------- %%% @doc %%% Erlang term (jsx-style, %%% [http://www.erlang.org/eeps/eep-0018.html EEP-0018]) to JSON serializer %%% and deserializer. %%% %%% For deserialized JSON hashes, they are compatible with functions from %%% {@link orddict} module. %%% %%% Note that Erlang strings are generally recognized as arrays of integers. %%% Note also that tuples alone are not allowed. They can only compose %%% proplists that will be serialized to a JSON object. %%% %%% This module can be used before Indira application starts. %%% @end %%%--------------------------------------------------------------------------- -module(indira_json). -export([encode/1, decode/1]). -export([format_error/1]). -export_type([json_string/0, struct/0]). -export_type([jhash/0, jarray/0, jscalar/0, jstring/0]). %%%--------------------------------------------------------------------------- -type json_string() :: string() | binary(). %% Input formatted as JSON. -type struct() :: jhash() | jarray() | jscalar(). -type jhash() :: [{ jstring(), struct() }, ...] | [{}]. %% Hash node. Literal `[{}]' represents an empty hash. If the hash is %% non-empty, it's a non-empty, ordered list of 2-tuples, compatible with %% {@link orddict}. -type jarray() :: [struct()]. -type jscalar() :: jstring() | number() | null | true | false. -type jstring() :: binary() | atom(). %% String to be serialized. May be an atom, except for `true', `false', and %% `null'. %%%--------------------------------------------------------------------------- %%% public API %%%--------------------------------------------------------------------------- %% @doc Encode Erlang term (jsx-style) to JSON. %% Function returns `{error,badarg}' if data couldn't be encoded. %% %% Note that strings in Erlang are lists of integers. This module doesn't %% guess what's in the list, so you need to pass binary to get JSON string. -spec encode(struct()) -> {ok, iolist()} | {error, badarg}. encode(Struct) -> try {ok, encode_value(Struct)} catch error:{badmatch,_Any} -> {error, badarg}; error:{case_clause,_Any} -> {error, badarg}; %error:if_clause -> % unused in this module % {error, badarg}; error:function_clause -> {error, badarg} end. %% @doc Decode a string containing JSON to a (jsx-style) term. -spec decode(json_string()) -> {ok, struct()} | {error, badarg}. decode(Line) when is_binary(Line) -> decode(binary_to_list(Line)); decode(Line) when is_list(Line) -> case indira_json_lexer:string(Line) of {ok, Tokens, _EndLine} -> case indira_json_parser:parse(Tokens) of {ok, Result} -> {ok, Result}; {error, {_LineNumber, _ParserModule, _Message}} -> {error, badarg} end; {error, {_LineNumber, _LexerModule, _Message}, _} -> {error, badarg} end. %% @doc Convert a `Reason' from error tuple into usable error message. -spec format_error(term()) -> string(). format_error(badarg = _Reason) -> "invalid argument"; format_error(_Reason) -> "unknown JSON error". %%%--------------------------------------------------------------------------- %%% serializer {{{ %% @doc Encode any value. -spec encode_value(struct()) -> iolist(). encode_value([{}] = _Struct) -> "{}"; % short circuit for empty objects encode_value([{_,_} | _] = Struct) -> [${, encode_sequence(Struct, fun encode_pair/1), $}]; encode_value([] = _Struct) -> "[]"; % short circuit for empty arrays encode_value([_ | _] = Struct) -> [$[, encode_sequence(Struct, fun encode_value/1), $]]; encode_value(null = _Struct) -> "null"; encode_value(true = _Struct) -> "true"; encode_value(false = _Struct) -> "false"; encode_value(Struct) when is_binary(Struct); is_atom(Struct) -> encode_string(Struct); encode_value(Struct) when is_number(Struct) -> encode_number(Struct). %%---------------------------------------------------------- %% serializing sequences (lists and hashes) {{{ %% @doc Encode sequence of pairs|values. %% Function expects a non-empty list of elements. %% %% Combined with {@link encode_pair/1} serializes hashes, combined with %% {@link encode_value/1} serializes lists. -spec encode_sequence([term()], fun((term()) -> iolist())) -> iolist(). encode_sequence([E] = _Sequence, EncodeElement) -> EncodeElement(E); encode_sequence([E | Rest] = _Sequence, EncodeElement) -> [EncodeElement(E), "," | encode_sequence(Rest, EncodeElement)]. %% @doc Encode single key/value pair in hash. %% %% @see encode_sequence/2 -spec encode_pair({Key :: string() | binary() | atom(), Value :: struct()}) -> iolist(). encode_pair({K, V} = _Pair) -> [encode_string(K), ":" | encode_value(V)]. %% }}} %%---------------------------------------------------------- %% serializing scalars {{{ %% @doc Encode string. -spec encode_string(string() | binary() | atom()) -> iolist(). encode_string(String) when is_list(String) -> [$", string_quote(String), $"]; encode_string(String) when is_atom(String) -> encode_string(atom_to_list(String)); encode_string(String) when is_binary(String) -> encode_string(binary_to_list(String)). %% @doc Quote all characters that can't be expressed literally in JSON string. -spec string_quote(string()) -> iolist(). string_quote("" = _String) -> ""; string_quote([C | Rest]) when C == $" -> "\\\"" ++ string_quote(Rest); string_quote([C | Rest]) when C == $\\ -> "\\\\" ++ string_quote(Rest); string_quote([C | Rest]) when C == $\b -> "\\b" ++ string_quote(Rest); string_quote([C | Rest]) when C == $\f -> "\\f" ++ string_quote(Rest); string_quote([C | Rest]) when C == $\n -> "\\n" ++ string_quote(Rest); string_quote([C | Rest]) when C == $\r -> "\\r" ++ string_quote(Rest); string_quote([C | Rest]) when C == $\t -> "\\t" ++ string_quote(Rest); string_quote([C | Rest]) when C < 32 -> encode_unicode(C) ++ string_quote(Rest); string_quote([C | Rest]) -> [C | string_quote(Rest)]. %% @doc Encode single unicode character as a `\uXXXX' sequence. -spec encode_unicode(char()) -> string(). encode_unicode(Char) -> case erlang:integer_to_list(Char, 16) of [_] = S -> "\\u000" ++ S; [_,_] = S -> "\\u00" ++ S; [_,_,_] = S -> "\\u0" ++ S; [_,_,_,_] = S -> "\\u" ++ S end. %% @doc Encode number (integer or float) as a string. -spec encode_number(number()) -> iolist(). encode_number(N) when is_float(N) -> io_lib:write(N); % output of float_to_list() looks too complex and ugly encode_number(N) when is_integer(N) -> integer_to_list(N). %% }}} %%---------------------------------------------------------- %%% }}} %%%--------------------------------------------------------------------------- %%% vim:ft=erlang:foldmethod=marker
src/indira_json.erl
0.562177
0.416559
indira_json.erl
starcoder
-module(aec_db_gc). %% Implementation of Garbage Collection of Account state. %% To run periodically, the GC needs to be `enabled` (config param), %% and node should be synced (although it's not strict requirement - if not synced, %% GC will run a lot more during syncing without much benefit (and slow down syncing). %% It makes sense to wait until the DB is not updated so rapidly). %% %% If key block `interval` (config parameter) is hit, GC starts collecting %% of the reachable nodes from most recent generations (`history` config parameter). %% Note that for production use, the `interval` from config is slightly deviated to %% avoid the situation where most of the nodes run GC at the same time. %% (Since we assume users will probably keep the `interval` parameter the same.) %% If #data.enabled and #data.synced are true, #data.hashes indicates the GC status: %% - undefined - not the right time to run GC %% - is pid - we are scanning reachable nodes %% - is reference - waiting for signal to execute the `swap_nodes` phase %% %% GC scan is performed on the background. %% The synchronous part of GC is: %% - writing of the reachable nodes from cache to disk %% - subsequent restart of the node %% - removing all old nodes and copying only reachable ones %% %% If uses wishes to keep GC off by default (as is in default config) but invoke it manually, %% that is possible with calling of aec_db_gc:run() or aec_db_gc:run(HistoryLength). -behaviour(gen_statem). %% API -export([start_link/0, start_link/3, run/0, run/1, stop/0]). %% for internal use only -export([maybe_swap_nodes/0]). %% gen_statem callbacks -export([callback_mode/0, init/1, terminate/3, code_change/4]). -export([handle_event/4]). -export([config/0]). -record(data, {enabled :: boolean(), % do we garbage collect? interval :: non_neg_integer(), % how often (every `interval` blocks) GC runs history :: non_neg_integer(), % how many block state back from top to keep min_height :: undefined | non_neg_integer(), % if hash_not_found error, try GC from this height synced :: boolean(), % we only run GC (repeatedly) if chain is synced height :: undefined | non_neg_integer(), % latest height of MPT hashes stored in tab hashes :: undefined | pid() | reference()}). % hashes tab (or process filling the tab 1st time) -include_lib("aecore/include/aec_db.hrl"). -define(DEFAULT_HISTORY, 500). -define(DEFAULT_INTERVAL, 50000). %% interval from config +- random offset to avoid situation where large %% majority of nodes become unresponsive due to node restart invoked by GC -define(INTERVAL_VARIANCE_PERCENT, 10). -define(INTERVAL_VARIANCE, true). % comment this out for development only! -define(GCED_TABLE_NAME, aec_account_state_gced). -define(TABLE_NAME, aec_account_state). -define(TIMED(Expr), timer:tc(fun () -> Expr end)). -define(LOG(Fmt), lager:info(Fmt, [])). % io:format(Fmt"~n") -define(LOG(Fmt, Args), lager:info(Fmt, Args)). % io:format(Fmt"~n", Args) -define(LOGERR(Fmt, Args), lager:error(Fmt, Args)). % io:format("ERROR:"Fmt"~n", Args) -define(PROTECT(Expr, OkFun), ?PROTECT(Expr, OkFun, fun signal_scanning_failed/1)). -define(PROTECT(Expr, OkFun, ErrHeightFun), (try Expr of Res -> (OkFun)(Res) catch error:{hash_not_present_in_db_at_height, ErrHeight, MissingHash, Stack} -> ?LOGERR("Scanning at height ~p failed due to a missing hash ~p at: ~p", [ErrHeight, MissingHash, Stack]), (ErrHeightFun)(ErrHeight) end)). %%%=================================================================== %%% API %%%=================================================================== %% We don't support reconfiguration of config parameters when the GC is already up, %% doesn't seem to have practical utility. start_link() -> #{enabled := Enabled, interval := Interval, history := History} = config(), start_link(Enabled, Interval, History). start_link(Enabled, Interval, History) -> gen_statem:start_link({local, ?MODULE}, ?MODULE, [Enabled, Interval, History], []). run() -> #{history := History} = config(), run(History). run(History) when is_integer(History), History > 0 -> gen_statem:call(?MODULE, {run, History}). stop() -> gen_statem:stop(?MODULE). %% called from aec_db on startup maybe_swap_nodes() -> maybe_swap_nodes(?GCED_TABLE_NAME, ?TABLE_NAME). %%%=================================================================== %%% gen_statem callbacks %%%=================================================================== %% Change of configuration parameters requires restart of the node. init([Enabled, Interval, History]) when is_integer(Interval), Interval > 0, is_integer(History), History > 0 -> Interval1 = if Enabled -> aec_events:subscribe(top_changed), aec_events:subscribe(chain_sync), interval(Interval); true -> Interval end, Data = #data{enabled = Enabled, interval = Interval1, history = History, min_height = undefined, synced = false, height = undefined, hashes = undefined}, {ok, idle, Data}. handle_event({call, From}, {run, History}, idle, #data{enabled = false} = Data) -> aec_events:subscribe(top_changed), Data1 = Data#data{synced = true, enabled = true, interval = 1, history = History}, {keep_state, Data1, {reply, From, {ok, run_next_scan_height(Data1)}}}; handle_event({call, From}, {run, History}, idle, #data{history = History, hashes = Hashes} = Data) when is_pid(Hashes) -> % initial scanner pid already runs with the same history length, keep it {keep_state, Data, {reply, From, {ok, run_next_scan_height(Data)}}}; handle_event({call, From}, {run, History}, idle, #data{hashes = Hashes} = Data) -> is_pid(Hashes) andalso exit(Hashes, kill), Data1 = Data#data{synced = true, interval = 1, history = History, height = undefined, hashes = undefined}, {keep_state, Data1, {reply, From, {ok, run_next_scan_height(Data1)}}}; handle_event({call, From}, {run, History}, ready, #data{history = History} = Data) -> % same history length, keep it {keep_state, Data, {reply, From, {ok, run_next_scan_height(Data)}}}; handle_event({call, From}, {run, History}, ready, #data{} = Data) -> Data1 = Data#data{interval = 1, history = History, height = undefined, hashes = undefined}, {next_state, idle, Data1, {reply, From, {ok, run_next_scan_height(Data1)}}}; %% once the chain is synced, there's no way to "unsync" handle_event(info, {_, chain_sync, #{info := {chain_sync_done, _}}}, idle, #data{enabled = true} = Data) -> catch aec_events:unsubscribe(chain_sync), {keep_state, Data#data{synced = true}}; %% starting collection when the *interval* matches, and don't have a GC state (hashes = undefined) %% OR some MPT was missing previosly so we try again later handle_event(info, {_, top_changed, #{info := #{height := Height}}}, idle, #data{interval = Interval, history = History, enabled = true, synced = true, min_height = MinHeight, height = undefined, hashes = undefined} = Data) when ((Height rem Interval) == 0 andalso MinHeight == undefined) orelse (is_integer(MinHeight) andalso Height - History > MinHeight) -> Parent = self(), Pid = spawn_link( fun () -> FromHeight = max(Height - History, 0), ?PROTECT(?TIMED(collect_reachable_hashes(FromHeight, Height)), fun ({Time, {ok, Hashes}}) -> ets:give_away(Hashes, Parent, {{FromHeight, Height}, Time}) end) end), {keep_state, Data#data{height = Height, hashes = Pid}}; %% the initial scan failed due to hash_not_present_in_db, reschedule it for later handle_event(info, {scanning_failed, ErrHeight}, idle, #data{enabled = true, hashes = Pid} = Data) when is_pid(Pid) -> {keep_state, Data#data{height = undefined, hashes = undefined, min_height = ErrHeight}}; %% later incremental scan failed due to hash_not_present_in_db, reschedule it for later handle_event(info, {scanning_failed, ErrHeight}, ready, #data{enabled = true} = Data) -> {next_state, idle, Data#data{height = undefined, hashes = undefined, min_height = ErrHeight}}; %% happens when scanning process is killed after it transfers hashes table, we ignore it %% (when we manually invoke GC via 'run') handle_event(info, {'ETS-TRANSFER', _, _, _}, idle, #data{enabled = true, hashes = undefined} = Data) -> {keep_state, Data}; %% received GC state from the phase above handle_event(info, {'ETS-TRANSFER', Hashes, _, {{FromHeight, ToHeight}, Time}}, idle, #data{enabled = true, hashes = Pid} = Data) when is_pid(Pid) -> ?LOG("Scanning of ~p reachable hashes in range <~p, ~p> took ~p seconds", [ets:info(Hashes, size), FromHeight, ToHeight, Time / 1000000]), {next_state, ready, Data#data{hashes = Hashes}}; %% with valid GC state (reachable hashes in ETS cache), follow up on keeping that cache %% synchronized with Merkle-Patricia Trie on disk keeping the latest changes in accounts handle_event(info, {_, top_changed, #{info := #{block_type := key, height := Height}}}, ready, #data{enabled = true, synced = true, height = LastHeight, hashes = Hashes} = Data) when is_reference(Hashes) -> if Height > LastHeight -> ?PROTECT(range_collect_reachable_hashes(Height, Data), fun ({ok, _}) -> {keep_state, Data#data{height = Height}} end, signal_scanning_failed_keep_state(Data)); true -> %% in case previous key block was a fork, we can receive top_changed event %% with the same or lower height as seen last time ?PROTECT(collect_reachable_hashes_delta(Height, Hashes), fun ({ok, _}) -> {keep_state, Data} end, (signal_scanning_failed_keep_state(Data))) end; %% with valid GC state, if we are on key block boundary, we can %% clear the table and insert reachable hashes back handle_event({call, _From}, maybe_garbage_collect, ready, #data{enabled = true, synced = true, hashes = Hashes} = Data) when Hashes /= undefined, not is_pid(Hashes) -> Header = aec_chain:top_header(), case aec_headers:type(Header) of key -> Height = aec_headers:height(Header), ?PROTECT(range_collect_reachable_hashes(Height, Data), %% we exit here as GCEd table is swapped at startup fun ({ok, _}) -> store_cache_and_restart(Hashes, ?GCED_TABLE_NAME) end, signal_scanning_failed_keep_state(Data)); micro -> {keep_state, Data} end; handle_event({call, From}, maybe_garbage_collect, _, Data) -> {keep_state, Data, {reply, From, nop}}; handle_event(_, _, _, Data) -> {keep_state, Data}. terminate(_Reason, _State, _Data) -> void. code_change(_, State, Data, _) -> {ok, State, Data}. callback_mode() -> handle_event_function. %%%=================================================================== %%% Internal functions %%%=================================================================== %% From - To (inclusive) collect_reachable_hashes(FromHeight, ToHeight) when FromHeight < ToHeight -> {ok, Hashes} = collect_reachable_hashes_fullscan(FromHeight), % table is created here {ok, Hashes} = range_collect_reachable_hashes(FromHeight, ToHeight, Hashes), % and reused {ok, Hashes}. collect_reachable_hashes_fullscan(Height) -> Tab = ets:new(gc_reachable_hashes, [public]), MPT = get_mpt(Height), ?LOG("GC fullscan at height ~p of accounts with hash ~w", [Height, aeu_mp_trees:root_hash(MPT)]), {ok, aeu_mp_trees:visit_reachable_hashes(MPT, Tab, fun store_hash/3)}. %% assumes Height - 1, Height - 2, ... down to Height - History %% are in Hashes from previous runs collect_reachable_hashes_delta(Height, Hashes) -> MPT = get_mpt(Height), ?LOG("GC diffscan at height ~p of accounts with hash ~w", [Height, aeu_mp_trees:root_hash(MPT)]), {ok, aeu_mp_trees:visit_reachable_hashes(MPT, Hashes, fun store_unseen_hash/3)}. range_collect_reachable_hashes(ToHeight, #data{height = LastHeight, hashes = Hashes}) -> range_collect_reachable_hashes(LastHeight, ToHeight, Hashes). range_collect_reachable_hashes(LastHeight, ToHeight, Hashes) -> [collect_reachable_hashes_delta(H, Hashes) || H <- lists:seq(LastHeight + 1, ToHeight)], {ok, Hashes}. store_cache_and_restart(Hashes, GCedTab) -> {atomic, ok} = create_accounts_table(GCedTab), {ok, _Count} = store_cache(Hashes, GCedTab), supervisor:terminate_child(aec_conductor_sup, aec_conductor), init:restart(), sys:suspend(self(), infinity). create_accounts_table(Name) -> Rec = ?TABLE_NAME, Fields = record_info(fields, ?TABLE_NAME), mnesia:create_table(Name, aec_db:tab(aec_db:backend_mode(), Rec, Fields, [{record_name, Rec}])). iter(Fun, ets, Tab) -> ets:foldl(fun ({Hash, Node}, ok) -> Fun(Hash, Node), ok end, ok, Tab); iter(Fun, mnesia, Tab) -> mnesia:foldl(fun (X, ok) -> Fun(element(2, X), element(3, X)), ok end, ok, Tab). write_nodes(SrcMod, SrcTab, TgtTab) -> ?TIMED(aec_db:ensure_transaction( fun () -> iter(fun (Hash, Node) -> aec_db:write_accounts_node(TgtTab, Hash, Node) end, SrcMod, SrcTab) end, [], sync_transaction)). store_cache(SrcHashes, TgtTab) -> NodesCount = ets:info(SrcHashes, size), ?LOG("Writing ~p reachable account nodes to table ~p ...", [NodesCount, TgtTab]), {WriteTime, ok} = write_nodes(ets, SrcHashes, TgtTab), ?LOG("Writing reachable account nodes took ~p seconds", [WriteTime / 1000000]), DBCount = length(mnesia:dirty_select(TgtTab, [{'_', [], [1]}])), ?LOG("GC cache has ~p aec_account_state records", [DBCount]), {ok, NodesCount}. maybe_swap_nodes(SrcTab, TgtTab) -> try mnesia:dirty_first(SrcTab) of % table exists H when is_binary(H) -> % and is non empty swap_nodes(SrcTab, TgtTab); '$end_of_table' -> mnesia:delete_table(SrcTab) catch exit:{aborted,{no_exists,[_]}} -> ok end. swap_nodes(SrcTab, TgtTab) -> ?LOG("Clearing table ~p ...", [TgtTab]), {atomic, ok} = mnesia:clear_table(TgtTab), ?LOG("Writing garbage collected accounts ..."), {WriteTime, ok} = write_nodes(mnesia, SrcTab, TgtTab), ?LOG("Writing garbage collected accounts took ~p seconds", [WriteTime / 1000000]), DBCount = length(mnesia:dirty_select(TgtTab, [{'_', [], [1]}])), ?LOG("DB has ~p aec_account_state records", [DBCount]), ?LOG("Removing garbage collected table ~p ...", [SrcTab]), mnesia:delete_table(SrcTab), ok. -spec get_mpt(non_neg_integer()) -> aeu_mp_trees:tree(). get_mpt(Height) -> {ok, Hash} = aec_chain_state:get_key_block_hash_at_height(Height), try get_mpt_from_hash(Hash) of MPT -> MPT catch error:{hash_not_present_in_db, MissingHash} -> Stacktrace = erlang:get_stacktrace(), error({hash_not_present_in_db_at_height, Height, MissingHash, Stacktrace}) end. get_mpt_from_hash(Hash) -> {ok, Trees} = aec_chain:get_block_state(Hash), AccountTree = aec_trees:accounts(Trees), {ok, RootHash} = aec_accounts_trees:root_hash(AccountTree), {ok, DB} = aec_accounts_trees:db(AccountTree), aeu_mp_trees:new(RootHash, DB). store_hash(Hash, Node, Tab) -> ets:insert_new(Tab, {Hash, Node}), {continue, Tab}. store_unseen_hash(Hash, Node, Tab) -> case ets:lookup(Tab, Hash) of [_] -> stop; [] -> store_hash(Hash, Node, Tab) end. signal_scanning_failed(ErrHeight) -> ?MODULE ! {scanning_failed, ErrHeight}. signal_scanning_failed_keep_state(Data) -> fun (ErrHeight) -> signal_scanning_failed(ErrHeight), {keep_state, Data} end. run_next_scan_height(#data{min_height = undefined}) -> aec_headers:height(aec_chain:top_header()) + 1; run_next_scan_height(#data{min_height = MinHeight, history = History}) when is_integer(MinHeight) -> MinHeight + History + 1. -ifdef(TEST). interval(ConfigInterval) -> ConfigInterval. % for common test -else. -ifdef(INTERVAL_VARIANCE). interval(ConfigInterval) -> case trunc(ConfigInterval * ?INTERVAL_VARIANCE_PERCENT / 100.0) of 0 -> ConfigInterval; Variance when Variance > 0 -> Delta = rand:uniform(Variance) - (Variance div 2), max(3, ConfigInterval + Delta) end. -else. interval(ConfigInterval) -> ConfigInterval. -endif. -endif. %% ifdef TEST config() -> maps:from_list( [{binary_to_atom(Key, utf8), aeu_env:user_config([<<"chain">>, <<"garbage_collection">>, Key], Default)} || {Key, Default} <- [{<<"enabled">>, false}, {<<"interval">>, ?DEFAULT_INTERVAL}, {<<"history">>, ?DEFAULT_HISTORY}]]).
apps/aecore/src/aec_db_gc.erl
0.568775
0.525673
aec_db_gc.erl
starcoder
% ============LICENSE_START======================================================= % org.onap.dcae % ================================================================================ % Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. % ================================================================================ % Licensed under the Apache License, Version 2.0 (the "License"); % you may not use this file except in compliance with the License. % You may obtain a copy of the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, % WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. % See the License for the specific language governing permissions and % limitations under the License. % ============LICENSE_END========================================================= % % ECOMP is a trademark and service mark of AT&T Intellectual Property. -module(httpabs). -export([get/2, post/4, %I miss python's default arguments.. post/5, put/4, delete/2 ]). -include("application.hrl"). -define(SC(L), util:concat(L)). %NOTE %Consider the Erlang statement: % %{ok, {{"HTTP/1.1",ReturnCode, State}, Head, Body}} = httpc:get(URL). %CDAP returns error messages in the “Body” field above. % %However, Consul: %1) Always (in all HTTP failures I’ve tested) returns Body == “500\n” %2) Returns the error message in the State field % %Example: % %{{"HTTP/1.0",404,"Client Error: Not Found for url: http://consul.[...].com:8500/v1/kv/hwtestYOUHAVEFAILEDME:rel"},[{"date","Mon, 14 Nov 2016 14:41:03 GMT"},{"server","Werkzeug/0.11.11 Python/3.5.1"},{"content-length","4"},{"content-type","application/json"}],"500\n"} % %This means that error handling in HTTP is not consistent across CDAP and Consul. % %Thus below, on a failure, I return the concatenation of State and Body %%% %%%HELPER %%% -spec parse_response({error|ok, any()}, string()) -> httpstat(). parse_response({Status, Response}, URL) -> case Status of error -> lager:error("httpc error: cannot hit: ~s", [URL]), case Response of no_scheme -> {400, io_lib:format("ERROR: The following URL is malformed: ~s", [URL])}; {bad_body, _} -> {400, "ERROR: The request Body is malformed"}; {bad_body_generator,_} -> {400, "ERROR: The request Body is malformed"}; _ -> lager:error(io_lib:format("Unexpected ERROR hitting ~s", [URL])), {504, list_to_binary(io_lib:format("ERROR: The following URL is unreachable or the request was unable to be parsed due to an unknown error: ~s", [URL]))} %Are there other reasons other than bad body and unreachable that crash request? (Sneak peak: the answer is probably) end; ok -> {{_, ReturnCode, State}, _Head, Body} = Response, case ReturnCode of 200 -> {ReturnCode, Body}; _ -> lager:error("Error While hitting ~s, Non-200 status code returned. HTTP Code ~p, State ~s, ResponseBody ~s:", [URL, ReturnCode, State, Body]), %see Note at the top of this file RetBody = ?SC(["State: ", State, ". Return Body: ", Body]), {ReturnCode, RetBody} end end. sanitize(URL) -> %allow URL to look like "www.foo.com" or <<"www.foo.com">>, trim it case is_binary(URL) of true -> string:strip(binary_to_list(URL)); false -> string:strip(URL) end. %anywhere you see any() is essentially lazy typing.. fix these someday when time is abundant -spec post(string(), string()|binary(), string(), any()) -> httpstat(). post(XER, URL, ContentType, Body) -> %post that sends the XER, no headers signature U = sanitize(URL), parse_response(httpc:request(post, {U, [{"x-ecomp-requestid", XER}], ContentType, Body}, [],[]), U). -spec post(string(), string()|binary(), list(), string(), any()) -> httpstat(). post(XER, URL, Headers, ContentType, Body) -> %post that sends XER, appends the header onto the list of desired headers U = sanitize(URL), parse_response(httpc:request(post, {U, [{"x-ecomp-requestid", XER} | Headers], ContentType, Body}, [],[]), U). -spec get(string(), string()|binary()) -> httpstat(). get(XER, URL) -> %http get that always sends the XER.. even if the server doesn't want it; maybe this will blow up on me one day. U = sanitize(URL), parse_response(httpc:request(get, {U, [{"x-ecomp-requestid", XER}]}, [], []), U). -spec put(string(), string()|binary(), string(), any()) -> httpstat(). put(XER, URL, ContentType, Body) -> %http put that always sends the XER U = sanitize(URL), parse_response(httpc:request(put, {U, [{"x-ecomp-requestid", XER}], ContentType, Body}, [],[]), U). -spec delete(string(), string()|binary()) -> httpstat(). delete(XER, URL) -> %http delete that always sends the XER U = sanitize(URL), parse_response(httpc:request(delete, {U, [{"x-ecomp-requestid", XER}]}, [],[]), U).
src/httpabs.erl
0.712632
0.409162
httpabs.erl
starcoder
-module(mysql_pool). -export([start/0, stop/0]). -export([ checkin/2, checkout/1, execute/3, execute/4, query/2, query/3, query/4, transaction/2, transaction/3, transaction/4, with/2 ]). start()-> case application:start(mysql_pool) of ok -> ok; {error, {already_started, mysql_pool}} -> ok end. stop() -> application:stop(mysql_pool), erlang:halt(). %% @doc Returns a mysql connection to the given pool. checkin(PoolName, Connection) -> poolboy:checkin(PoolName, Connection). %% @doc Checks out a mysql connection from a given pool. checkout(PoolName) -> poolboy:checkout(PoolName). %% @doc Execute a mysql prepared statement with given params. execute(PoolName, StatementRef, Params) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:execute(MysqlConn, StatementRef, Params) end). %% @doc Execute a mysql prepared statement with given params and timeout execute(PoolName, StatementRef, Params, Timeout) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:execute(MysqlConn, StatementRef, Params, Timeout) end). %% @doc Executes a query to a mysql connection in a given pool. query(PoolName, Query) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:query(MysqlConn, Query) end). %% @doc Executes a query to a mysql connection in a given pool with either %% list of query parameters or a timeout value. query(PoolName, Query, ParamsOrTimeout) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:query(MysqlConn, Query, ParamsOrTimeout) end). %% @doc Executes a query to a mysql connection in a given pool with both %% a list of query parameters and a timeout value. query(PoolName, Query, Params, Timeout) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:query(MysqlConn, Query, Params, Timeout) end). %% @doc Wrapper to poolboy:transaction/2. Since it is not a mysql transaction. %% Example instead of: %% Conn = mysql_poolboy:checkout(mypool), %% try %% mysql:query(Conn, "SELECT...") %% after %% mysql_poolboy:checkin(mypool, Conn) %% end. %% %% mysql_poolboy:with(mypool, fun (Conn) -> mysql:query(Conn, "SELECT...") end). with(PoolName, Fun) when is_function(Fun, 1) -> poolboy:transaction(PoolName, Fun). %% @doc Executes a mysql transaction fun. The fun needs to take one argument %% which is the mysql connection. transaction(PoolName, TransactionFun) when is_function(TransactionFun, 1) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:transaction(MysqlConn, TransactionFun, [MysqlConn], infinity) end). %% @doc Executes a transaction fun. Args list needs be the same length as %% TransactionFun arity - 1. transaction(PoolName, TransactionFun, Args) when is_function(TransactionFun, length(Args) + 1) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:transaction(MysqlConn, TransactionFun, [MysqlConn | Args], infinity) end). %% @doc Same as transaction/3 but with the number of retries the mysql %% transaction should try to execute. transaction(PoolName, TransactionFun, Args, Retries) when is_function(TransactionFun, length(Args) + 1) -> poolboy:transaction(PoolName, fun(MysqlConn) -> mysql:transaction(MysqlConn, TransactionFun, [MysqlConn | Args], Retries) end).
src/mysql_pool.erl
0.529263
0.577168
mysql_pool.erl
starcoder
-module(calculator_eunit). -include_lib("eunit/include/eunit.hrl"). -compile(export_all). calculate_test() -> ?assertEqual(2, calculator:calculate("(1 + 1)")), ?assertEqual(1, calculator:calculate("((2 + 3) - 4)")), ?assertEqual(6, calculator:calculate("((1 + 1) + (2 + 2))")), ?assertEqual({error, braces_error}, calculator:calculate("((1 + 1 + (2 + 2))")). tokenize_expression_test() -> ?assertEqual([open_brace, {num, 1}, plus, {num, 1}, close_brace], calculator:tokenize_expression("(1+1)")), ?assertEqual([open_brace, {num, 1}, plus, {num, 1}, close_brace], calculator:tokenize_expression("(1 + 1)")), ?assertEqual([open_brace, {num, 12}, plus, {num, 123}, close_brace], calculator:tokenize_expression("(12 + 123)")). parse_expression_test() -> ?assertEqual({plus, {num, 1}, {num, 1}}, calculator:parse_expression("(1+1)")), ?assertEqual({plus, {num, 1}, {num, 2}}, calculator:parse_expression("(1 + 2)")), ?assertEqual( {minus, {plus, {num, 2}, {num, 3}}, {num, 4}}, calculator:parse_expression("((2 + 3) - 4)") ), ?assertEqual( {minus, {num, 4}, {plus, {num, 2}, {num, 3}}}, calculator:parse_expression("(4 - (2 + 3))") ), ?assertEqual( {plus, {plus, {num, 1}, {num, 1}}, {plus, {num, 2}, {num, 2}}}, calculator:parse_expression("((1+1)+(2+2))") ), ?assertEqual({minus, {num, 1}, {num, 1}}, calculator:parse_expression("(1-1)")), ?assertEqual({minus, {num, 1}, {num, 1}}, calculator:parse_expression("(1 - 1)")), ?assertEqual({multiply, {num, 1}, {num, 1}}, calculator:parse_expression("(1*1)")), ?assertEqual({divide, {num, 1}, {num, 1}}, calculator:parse_expression("(1/1)")). format_from_ast_test() -> ?assertEqual("((2+3)-4)", calculator:format_from_ast({minus, {plus, {num, 2}, {num, 3}}, {num, 4}})). eval_expresstion_test() -> ?assertEqual(2, calculator:eval_expression({plus, {num, 1}, {num, 1}})), ?assertEqual(0, calculator:eval_expression({minus, {num, 1}, {num, 1}})), ?assertEqual(1, calculator:eval_expression({minus, {plus, {num, 2}, {num, 3}}, {num, 4}})). validate_input_test() -> ?assertEqual({ok, valid}, calculator:validate_input("(1 + 2 - (0 + 1))")), ?assertEqual({error, braces_error}, calculator:validate_input("(1 + 1))")), ?assertEqual({error, braces_error}, calculator:validate_input("((1 + 1)")).
test/calculator_eunit.erl
0.5
0.911888
calculator_eunit.erl
starcoder
%%% vim:ts=2:sw=2:et %%%----------------------------------------------------------------------------- %%% @doc Parse transform that implements `str/2' %%% %%% Use `{parse_transform,str}' compiler's option to use this transform. %%% ``` %%% str(Fmt, Args) -> lists:flatten(io_lib:format(Fmt, Args)) %%% throw(Fmt, Args) -> throw(lists:flatten(io_lib:format(Fmt, Args)) %%% i2l(Int) -> integer_to_list(Int) % Enabled with compiled with %%% % the `{d,str_i2l}' option %%% b2l(Bin) -> binary_to_list(Bin) % Enabled with compiled with %%% % the `{d,str_b2l}' option %%% str(Term) -> str:str(Term) %%% ''' %%% @author <NAME> <saleyn(at)gmail(dot)com> %%% @end %%%----------------------------------------------------------------------------- %%% Copyright (c) 2021 <NAME> %%% %%% Permission is hereby granted, free of charge, to any person %%% obtaining a copy of this software and associated documentation %%% files (the "Software"), to deal in the Software without restriction, %%% including without limitation the rights to use, copy, modify, merge, %%% publish, distribute, sublicense, and/or sell copies of the Software, %%% and to permit persons to whom the Software is furnished to do %%% so, subject to the following conditions: %%% %%% The above copyright notice and this permission notice shall be included %%% in all copies or substantial portions of the Software. %%% %%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, %%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF %%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. %%% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY %%% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, %%% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE %%% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. %%%----------------------------------------------------------------------------- -module(str). % If using this module as a parse transform, we need to export the following: -export([parse_transform/2]). -export([str/1, str/2, reset_float_fmt/0, set_float_fmt/1, get_float_fmt/0]). %%%----------------------------------------------------------------------------- %%% External API %%%----------------------------------------------------------------------------- %% @doc Stringify an argument -spec str(term()) -> string(). str(I) when is_list(I) -> lists:flatten( try io_lib:format("~s", [I]) catch _:_ -> io_lib:format("~p", [I]) end); str(I) when is_integer(I) -> integer_to_list(I); str(I) when is_binary(I) -> binary_to_list(I); str(I) when is_float(I) -> str(I, get_float_fmt()); str(I) when is_atom(I) -> atom_to_list(I); str(I) -> lists:flatten(io_lib:format("~p", [I])). str(I, undefined) when is_float(I) -> float_to_list(I); str(I, Opts) when is_float(I) -> float_to_list(I, Opts); str(I, _Opts) -> str(I). %% @doc Erase custom float format from the process dictionary reset_float_fmt() -> erase(float_fmt). %% @doc Store custom float format in the process dictionary %% Return previously stored format. %% Also see float_to_list/2 [http://erlang.org/doc/man/erlang.html#float_to_list-2] set_float_fmt(Opts) -> V=get(float_fmt), put(float_fmt, Opts), V. %% @doc Get custom float format from the process dictionary get_float_fmt() -> get(float_fmt). %%%----------------------------------------------------------------------------- %%% Internal functions %%%----------------------------------------------------------------------------- -record(opts, { i2l = false, b2l = false }). %% @doc Parse transform to be used by providing `{parse_transform, str}' option. parse_transform(AST, Opts) -> I2L = lists:member({d,str_i2l}, Opts), B2L = lists:member({d,str_b2l}, Opts), Tree = erl_syntax:form_list(AST), ModifiedTree = recurse(Tree, #opts{i2l=I2L, b2l=B2L}), erase(line), erl_syntax:revert_forms(ModifiedTree). %% Parse transform support recurse(Tree, Opt) -> update(case erl_syntax:subtrees(Tree) of [] -> Tree; List -> erl_syntax:update_tree(Tree, [[recurse(Subtree, Opt) || Subtree <- Group] || Group <- List]) end, Opt). syn_atom(A, Line) -> erl_syntax:set_pos(erl_syntax:atom(A), Line). syn_call(F,A) -> L=get(line), erl_syntax:set_pos( erl_syntax:application(syn_atom(F, L), A), L). syn_call(M,F,A) -> L=get(line), erl_syntax:set_pos( erl_syntax:application(syn_atom(M, L), syn_atom(F, L), A), L). update(Node, Opt) -> update2(Node, erl_syntax:type(Node), Opt). update2(Node, application, Opt) -> update3(Node, erl_syntax:application_operator(Node), Opt); update2(Node, _, _) -> Node. update3(Node, {atom, L, F}, Opt) -> update4(F, Node, L, Opt); update3(Node, _, _) -> Node. update4(str, Node, Line, _Opt) -> %% Replace str(A, B) -> lists:flatten(io_lib:format(A, B)). %% str(A) -> str:str(A). put(line, Line), case erl_syntax:application_arguments(Node) of [A,B] -> %% This is a call to str(Fmt, Args). %% Replace it with: %% lists:flatten(io_libs:format(Fmt, Args) syn_call(lists, flatten, [syn_call(io_lib, format, [A,B])]); [A] -> %% This is a call to str(Arg). %% Replace it with: %% sprintf:str(Args) syn_call(str, str, [A]); _ -> Node end; update4(throw, Node, Line, _Opt) -> %% Replace throw(A, B) -> throw(lists:flatten(io_lib:format(A, B))). put(line, Line), case erl_syntax:application_arguments(Node) of [A,B] -> syn_call(throw, [syn_call(lists, flatten, [syn_call(io_lib, format, [A,B])])]); _ -> Node end; update4(i2l, Node, Line, #opts{i2l=true}) -> %% Replace i2l(A) -> integer_to_list(A). put(line, Line), case erl_syntax:application_arguments(Node) of [A] -> syn_call(integer_to_list, [A]); _ -> Node end; update4(b2l, Node, Line, #opts{b2l=true}) -> %% Replace b2l(A) -> binary_to_list(A). put(line, Line), case erl_syntax:application_arguments(Node) of [A] -> syn_call(binary_to_list, [A]); _ -> Node end; update4(_, Node, _, _) -> Node.
src/str.erl
0.531209
0.411702
str.erl
starcoder
-module(features_count_router_SUITE). -compile(export_all). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). -define(MUT, features_count_router). -define(COUNTER_MOD, features_counter). -define(STORE_LIB, test_store_lib). all() -> [{group, test_count}]. groups() -> [ {test_count, [ aa_test_new_counter, ab_test_existing_counter, ac_test_new_counter_as_goal, ad_test_existing_counter_as_goal, ae_test_new_counter_as_explictely_not_ensuring_goal, af_test_existing_counter_as_existing_goal, ag_test_counter_registration_race, ah_test_multiple_counts_added_at_once, ai_test_existing_counter_in_different_namespace, ba_test_counter_counts, bb_test_counter_pids, bc_test_counter_count_map, bd_test_namespaced_counter_count_map, be_test_namespaced_counter_counts, ca_test_start_with_existing_counters, cb_test_counter_registration_persists, cc_test_weekly_cohort_counter_created, cd_test_weekly_cohort_counter_created_and_added_again, ce_test_weekly_cohort_counter_with_injected_ymd, da_test_new_goal, db_test_existing_goal, dc_test_goals_are_namespaced, ea_test_triggering_a_goal, eb_test_triggering_a_goal_registered_after_goal_added, ec_test_triggering_a_goal_from_another_namespace, fa_test_event_no_persistence, ga_test_with_value, ha_test_namespaces, ia_stop_counter, ib_stop_unregistered_counter, ic_stop_counter_not_found, ja_test_events_for_key ]} ]. init_meck(Config) -> test_utils:meck_load_prometheus(), meck:new(?COUNTER_MOD), meck:expect(?COUNTER_MOD, add, ['_', '_'], ok), meck:expect(?COUNTER_MOD, add, ['_', '_', '_'], ok), meck:expect(?COUNTER_MOD, add, ['_', '_', '_', '_'], ok), meck:expect(?COUNTER_MOD, count, ['_'], counts(#{count => -1})), meck:expect(?COUNTER_MOD, includes_key, ['_', '_'], false), meck:new(timer, [unstick, passthrough]), meck:expect(timer, apply_after, ['_', '_', '_', '_'], {ok, make_ref()}), meck:new(features_counter_config), meck:expect(features_counter_config, config_for_counter, ['_', init], undefined), meck:new(features_grpc_gen_event_forwarder), meck:expect(features_grpc_gen_event_forwarder, notify, ['_'], undefined), StoreLibState = {store_lib_state, make_ref()}, meck:new(features_store_lib), meck:expect(features_store_lib, init, ['_', <<"count_router">>], StoreLibState), meck:expect(features_store_lib, get, ['_'], {#{}, StoreLibState}), meck:expect(features_store_lib, store, ['_', '_'], {ok, StoreLibState}), meck:new(supervisor, [unstick]), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, self()}), meck:expect(supervisor, terminate_child, [features_counter_sup, '_'], ok), meck:expect(supervisor, delete_child, [features_counter_sup, '_'], ok), [{store_lib_state, StoreLibState} | Config]. init_per_testcase(ca_test_start_with_existing_counters, Config) -> init_meck(Config); init_per_testcase(cc_test_weekly_cohort_counter_created, Config) -> init_meck(Config); init_per_testcase(cd_test_weekly_cohort_counter_created_and_added_again, Config) -> init_meck(Config); init_per_testcase(ce_test_weekly_cohort_counter_with_injected_ymd, Config) -> init_meck(Config); init_per_testcase(db_test_existing_goal, Config) -> init_meck(Config); init_per_testcase(fa_test_event_no_persistence, Config) -> init_meck(Config); init_per_testcase(_, Config) -> Config1 = init_meck(Config), {ok, Pid} = ?MUT:start_link(?STORE_LIB), [{pid, Pid} | Config1]. end_per_testcase(_, Config) -> test_utils:meck_unload_prometheus(), ?assert(meck:validate(?COUNTER_MOD)), meck:unload(?COUNTER_MOD), ?assert(meck:validate(features_counter_config)), meck:unload(features_counter_config), ?assert(meck:validate(features_grpc_gen_event_forwarder)), meck:unload(features_grpc_gen_event_forwarder), ?assert(meck:validate(features_store_lib)), meck:unload(features_store_lib), ?assert(meck:validate(timer)), meck:unload(timer), ?assert(meck:validate(supervisor)), meck:unload(supervisor), Config. aa_test_new_counter(Config) -> Feature = <<"feature_name">>, Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), User = <<"user_id">>, ?MUT:add(Feature, User), Spec = spec_for_feature(Feature), ?assertEqual(Spec, meck:capture(first, supervisor, start_child, ['_', Spec], 2)), ?assertEqual(2, meck:num_calls(?COUNTER_MOD, add, [User, '_', Pid])), Config. ab_test_existing_counter(Config) -> Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), User = <<"user_id">>, ?MUT:add(Feature, User), ?MUT:register_counter(CounterID, Pid), % Used for syncronization / processing messages ?MUT:goals(<<"default">>), ?MUT:add(Feature, User), Spec = #{ id => {features_counter, CounterID}, start => {features_counter, start_link, [?STORE_LIB, CounterID]} }, ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, Spec])), ?assertEqual(4, meck:num_calls(?COUNTER_MOD, add, [User, '_', Pid])), Config. ac_test_new_counter_as_goal(Config) -> Feature = <<"feature_name">>, Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), User = <<"user_id">>, ?MUT:add(<<"default">>, Feature, User, #{ensure_goal => true}), Spec = spec_for_feature(Feature), ExpectedOtherCounters = [], ?assertEqual(Spec, meck:capture(first, supervisor, start_child, ['_', Spec], 2)), ?assertEqual(1, meck:num_calls(?COUNTER_MOD, add, [User, ExpectedOtherCounters, '_', Pid])), Config. ad_test_existing_counter_as_goal(Config) -> Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), User = <<"user_id">>, ?MUT:add(<<"default">>, Feature, User, #{ensure_goal => true}), ?MUT:register_counter(CounterID, Pid), % Used for syncronization / processing messages ?MUT:goals(<<"default">>), io:format("Calls ~p~n", [meck:history(?COUNTER_MOD)]), ?MUT:add(Feature, User), Spec = #{ id => {features_counter, CounterID}, start => {features_counter, start_link, [?STORE_LIB, CounterID]} }, ExpectedOtherCounters = [], io:format("Calls ~p~n", [meck:history(?COUNTER_MOD)]), ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, Spec])), ?assertEqual(2, meck:num_calls(?COUNTER_MOD, add, [User, ExpectedOtherCounters, '_', Pid])), Config. ae_test_new_counter_as_explictely_not_ensuring_goal(Config) -> Feature = <<"feature_name">>, Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), User = <<"user_id">>, ?MUT:add(<<"default">>, Feature, User, #{ensure_goal => false}), Spec = spec_for_feature(Feature), ?assertEqual(Spec, meck:capture(first, supervisor, start_child, ['_', Spec], 2)), ?assertEqual(2, meck:num_calls(?COUNTER_MOD, add, [User, '_', Pid])), Config. af_test_existing_counter_as_existing_goal(Config) -> Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), User = <<"user_id">>, ?MUT:add_goal(<<"default">>, Feature), ?MUT:register_counter(CounterID, Pid), % Used for syncronization / processing messages ?MUT:goals(<<"default">>), ?MUT:add(<<"default">>, Feature, User, #{ensure_goal => true}), % Used for syncronization / processing messages ?MUT:goals(<<"default">>), ExpectedOtherCounters = [], ?assertEqual(1, meck:num_calls(?COUNTER_MOD, add, [User, ExpectedOtherCounters, '_', Pid])), Config. ag_test_counter_registration_race(Config) -> Feature = <<"feature_name">>, Pid = self(), SupResp = {error, {already_started, Pid}}, meck:expect(supervisor, start_child, [features_counter_sup, '_'], SupResp), User = <<"user_id">>, ?MUT:add(Feature, User), Spec = spec_for_feature(Feature), ?assertEqual(Spec, meck:capture(first, supervisor, start_child, ['_', Spec], 2)), ?assertEqual(2, meck:num_calls(?COUNTER_MOD, add, [User, '_', Pid])), Config. ah_test_multiple_counts_added_at_once(Config) -> Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), Adds = [ {<<"default">>, <<"event_1">>, <<"user_1">>, #{}}, {<<"default">>, <<"event_2">>, <<"user_2">>, #{ensure_goal => true}} ], ?MUT:add(Adds), io:format("Calls ~p~n", [meck:history(?COUNTER_MOD)]), ?assertEqual(2, meck:num_calls(?COUNTER_MOD, add, [<<"user_1">>, '_', Pid])), ?assertEqual(1, meck:num_calls(?COUNTER_MOD, add, [<<"user_2">>, '_', Pid])), ?assertEqual(1, meck:num_calls(?COUNTER_MOD, add, [<<"user_2">>, [], '_', Pid])), Config. ai_test_existing_counter_in_different_namespace(Config) -> Feature = <<"feature_name">>, CounterID1 = features_counter_id:create(Feature), CounterID2 = features_counter_id:create(<<"not default">>, Feature, named), Pid = self(), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, Pid}), User = <<"user_id">>, ?MUT:add(<<"default">>, Feature, User, #{}), ?MUT:register_counter(CounterID1, Pid), % Used for syncronization / processing messages ?MUT:goals(<<"default">>), ?MUT:add(<<"not default">>, Feature, User, #{}), Spec1 = #{ id => {features_counter, CounterID1}, start => {features_counter, start_link, [?STORE_LIB, CounterID1]} }, Spec2 = #{ id => {features_counter, CounterID2}, start => {features_counter, start_link, [?STORE_LIB, CounterID2]} }, ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, Spec1])), ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, Spec2])), ?assertEqual(4, meck:num_calls(?COUNTER_MOD, add, [User, '_', Pid])), Config. ba_test_counter_counts(Config) -> Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Pid = self(), Num = 1, Count = counts(#{count => Num}), meck:expect(features_counter, count, [Pid], Count), ?MUT:register_counter(CounterID, Pid), % Run to synchronize/handle all messages ?MUT:goals(<<"default">>), Counts = ?MUT:counts(<<"default">>), ?assertEqual([counts(#{id => CounterID, count => Num})], Counts), Config. bb_test_counter_pids(Config) -> Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Pid = self(), ?MUT:register_counter(CounterID, Pid), % Run to synchronize/handle all messages ?MUT:goals(<<"default">>), Pids = ?MUT:counter_pids(), ?assertEqual([Pid], Pids), Config. bc_test_counter_count_map(Config) -> Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Pid = self(), Num = 1, Count = counts(#{count => Num}), meck:expect(features_counter, count, [Pid], Count), ?MUT:register_counter(CounterID, Pid), % Run to synchronize/handle all messages ?MUT:goals(<<"default">>), Counts = ?MUT:count_map(<<"default">>), ?assertEqual(#{CounterID => Count}, Counts), Config. bd_test_namespaced_counter_count_map(Config) -> Feature = <<"feature_name">>, CounterID1 = features_counter_id:create(Feature), CounterID2 = features_counter_id:create(<<"not default">>, Feature, named), Pid1 = erlang:list_to_pid("<0.0.0>"), Pid2 = erlang:list_to_pid("<0.0.1>"), Count1 = counts(#{count => 1}), Count2 = counts(#{count => 2}), meck:expect(features_counter, count, [ {[Pid1], Count1}, {[Pid2], Count2} ]), ?MUT:register_counter(CounterID1, Pid1), ?MUT:register_counter(CounterID2, Pid2), % Run to synchronize/handle all messages ?MUT:goals(<<"default">>), Counts1 = ?MUT:count_map(<<"default">>), Counts2 = ?MUT:count_map(<<"not default">>), ?assertEqual(#{CounterID1 => Count1}, Counts1), ?assertEqual(#{CounterID2 => Count2}, Counts2), Config. be_test_namespaced_counter_counts(Config) -> Feature = <<"feature_name">>, CounterID1 = features_counter_id:create(Feature), CounterID2 = features_counter_id:create(<<"not default">>, Feature, named), Pid1 = erlang:list_to_pid("<0.0.0>"), Pid2 = erlang:list_to_pid("<0.0.1>"), Count1 = counts(#{count => 1}), Count2 = counts(#{count => 2}), meck:expect(features_counter, count, [ {[Pid1], Count1}, {[Pid2], Count2} ]), ?MUT:register_counter(CounterID1, Pid1), ?MUT:register_counter(CounterID2, Pid2), % Run to synchronize/handle all messages ?MUT:goals(<<"default">>), Counts1 = ?MUT:counts(<<"default">>), Counts2 = ?MUT:counts(<<"not default">>), ?assertEqual([counts(#{id => CounterID1, count => 1})], Counts1), ?assertEqual([counts(#{id => CounterID2, count => 2})], Counts2), Config. ca_test_start_with_existing_counters(Config) -> StoreLibState = ?config(store_lib_state, Config), Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Num = 1, Count = #{count => Num}, Spec = spec_for_feature(CounterID), StoredData = #{counters => [CounterID]}, meck:expect(features_store_lib, get, [StoreLibState], {StoredData, StoreLibState}), meck:expect(features_counter, count, ['_'], Count), {ok, Pid} = ?MUT:start_link(?STORE_LIB), Config1 = [{pid, Pid} | Config], meck:wait(features_store_lib, get, '_', 100), % test_utils:assertNCalls(1, timer, apply_after, ['_', features_count_router, start_enqueued_counters, []]), % apply_after would normally handle calling this but it's mocked, so do it manually ?MUT:start_enqueued_counters(), ?MUT:start_enqueued_counters(), ?MUT:start_enqueued_counters(), meck:wait(supervisor, start_child, ['_', Spec], 100), test_utils:assertNCalls(1, supervisor, start_child, ['_', Spec]), Config1. cb_test_counter_registration_persists(Config) -> Feature = <<"feature_name">>, CounterID = features_counter_id:create(Feature), Pid = self(), Num = 1, Count = counts(#{count => Num}), meck:expect(features_counter, count, [Pid], Count), ?MUT:register_counter(CounterID, Pid), meck:wait(features_store_lib, store, '_', 1000), ExpectedData = expected_stored_data(#{counters => [CounterID]}), ?assertEqual(ExpectedData, meck:capture(first, features_store_lib, store, '_', 1)), Counts = ?MUT:counts(<<"default">>), ?assertEqual([counts(#{id => CounterID, count => Num})], Counts), Config. cc_test_weekly_cohort_counter_created(Config) -> CounterConfig = #{date_cohort => weekly}, meck:expect(features_counter_config, config_for_counter, ['_', init], CounterConfig), StoreLibState = ?config(store_lib_state, Config), {Year, Week} = calendar:iso_week_number(), Name = <<"cc_feature">>, WeeklyCounterID = features_counter_id:create(<<"default">>, Name, weekly, {Year, Week}), Num = 1, Count = #{count => Num}, Spec = spec_for_feature(Name), WeeklySpec = spec_for_feature(WeeklyCounterID), StoredData = #{}, meck:expect(features_store_lib, get, [StoreLibState], {StoredData, StoreLibState}), meck:expect(features_counter, count, ['_'], Count), {ok, Pid} = ?MUT:start_link(?STORE_LIB), Config1 = [{pid, Pid} | Config], ?MUT:add(Name, <<"user_id">>), meck:wait(supervisor, start_child, [features_counter_sup, WeeklySpec], 1000), ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, Spec])), ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, WeeklySpec])), Config1. cd_test_weekly_cohort_counter_created_and_added_again(Config) -> CounterConfig = #{date_cohort => weekly}, meck:expect(features_counter_config, config_for_counter, ['_', init], CounterConfig), StoreLibState = ?config(store_lib_state, Config), {Year, Week} = calendar:iso_week_number(), Name = <<"cc_feature">>, WeeklyCounterID = features_counter_id:create(<<"default">>, Name, weekly, {Year, Week}), Num = 1, Count = #{count => Num}, Spec = spec_for_feature(Name), WeeklySpec = spec_for_feature(WeeklyCounterID), StoredData = #{}, meck:expect(features_store_lib, get, [StoreLibState], {StoredData, StoreLibState}), meck:expect(features_counter, count, ['_'], Count), {ok, Pid} = ?MUT:start_link(?STORE_LIB), Config1 = [{pid, Pid} | Config], ?MUT:add(Name, <<"user_id">>), ?MUT:add(Name, <<"user_id">>), meck:wait(supervisor, start_child, [features_counter_sup, WeeklySpec], 1000), ?assertEqual(2, meck:num_calls(supervisor, start_child, [features_counter_sup, Spec])), ?assertEqual(2, meck:num_calls(supervisor, start_child, [features_counter_sup, WeeklySpec])), Config1. ce_test_weekly_cohort_counter_with_injected_ymd(Config) -> CounterConfig = #{date_cohort => weekly}, meck:expect(features_counter_config, config_for_counter, ['_', init], CounterConfig), StoreLibState = ?config(store_lib_state, Config), InjectedDate = {2000, 1, 3}, {Year, Week} = calendar:iso_week_number(InjectedDate), Name = <<"cc_feature">>, WeeklyCounterID = features_counter_id:create(<<"default">>, Name, weekly, {Year, Week}), Num = 1, Count = #{count => Num}, Spec = spec_for_feature(Name), WeeklySpec = spec_for_feature(WeeklyCounterID), StoredData = #{}, meck:expect(features_store_lib, get, [StoreLibState], {StoredData, StoreLibState}), meck:expect(features_counter, count, ['_'], Count), {ok, Pid} = ?MUT:start_link(?STORE_LIB), Config1 = [{pid, Pid} | Config], ?MUT:add(<<"default">>, Name, <<"user_id">>, #{date => InjectedDate}), meck:wait(supervisor, start_child, [features_counter_sup, WeeklySpec], 1000), ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, Spec])), ?assertEqual(1, meck:num_calls(supervisor, start_child, [features_counter_sup, WeeklySpec])), Config1. da_test_new_goal(Config) -> Goal = <<"goal_name">>, ?MUT:add_goal(<<"default">>, Goal), Goals = ?MUT:goals(<<"default">>), ExpectedStoredGoals = #{<<"default">> => #{Goal => undefined}}, ExpectedGoals = [Goal], ExpectedData = expected_stored_data(#{goals => ExpectedStoredGoals}), ?assertEqual(ExpectedData, meck:capture(first, features_store_lib, store, '_', 1)), ?assertEqual(ExpectedGoals, Goals), Config. db_test_existing_goal(Config) -> StoreLibState = ?config(store_lib_state, Config), Goal = <<"feature_name">>, StoredData = #{goals => #{<<"default">> => #{Goal => undefined}}}, meck:expect(features_store_lib, get, [StoreLibState], {StoredData, StoreLibState}), {ok, Pid} = ?MUT:start_link(?STORE_LIB), Config1 = [{pid, Pid} | Config], meck:wait(features_store_lib, get, '_', 1000), Goals = ?MUT:goals(<<"default">>), ExpectedGoals = [Goal], ?assertEqual(ExpectedGoals, Goals), % Ensure that this doesn't write to the store again when adding the goal again ?MUT:add_goal(<<"default">>, Goal), ?assertError(not_found, meck:capture(first, features_store_lib, store, '_', 1)), Config1. dc_test_goals_are_namespaced(Config) -> Goal1 = <<"goal1">>, Goal2 = <<"goal2">>, ?MUT:add_goal(<<"default">>, Goal1), ?MUT:add_goal(Goal2, Goal2), Goals1 = ?MUT:goals(<<"default">>), Goals2 = ?MUT:goals(Goal2), ExpectedStoredGoals = #{ <<"default">> => #{Goal1 => undefined}, Goal2 => #{Goal2 => undefined} }, ExpectedData = expected_stored_data(#{goals => ExpectedStoredGoals}), ?assertEqual(ExpectedData, meck:capture(last, features_store_lib, store, '_', 1)), ?assertEqual([Goal1], Goals1), ?assertEqual([Goal2], Goals2), Config. ea_test_triggering_a_goal(Config) -> CounterConfig = #{date_cohort => weekly}, User = <<"user_id">>, NonGoalFeature = <<"non_goal">>, GoalFeature = <<"goal">>, {Year, Week} = calendar:iso_week_number(), meck:expect(features_counter_config, config_for_counter, [ {[GoalFeature, init], CounterConfig}, {['_', init], undefined} ]), GlobalCounterPid = erlang:list_to_pid("<0.0.0>"), NonGoalCounterPid = erlang:list_to_pid("<0.0.1>"), GoalCounterPid = erlang:list_to_pid("<0.0.2>"), WeeklyGoalCounterPid = erlang:list_to_pid("<0.0.3>"), GlobalCounterID = features_counter_id:global_counter_id(<<"default">>), NonGoalCounterID = features_counter_id:create(NonGoalFeature), GoalCounterID = features_counter_id:create(GoalFeature), WeeklyGoalCounterID = features_counter_id:create( <<"default">>, GoalFeature, weekly, {Year, Week} ), % The global counter will start up before we do anything, wait for that to % happen before we break supervisor as a mock just below meck:wait( supervisor, start_child, [features_counter_sup, spec_for_feature(GlobalCounterID)], 100 ), meck:expect( supervisor, start_child, [features_counter_sup, '_'], meck:raise(error, should_not_hit_this) ), ?MUT:register_counter(GlobalCounterID, GlobalCounterPid), ?MUT:register_counter(NonGoalCounterID, NonGoalCounterPid), ?MUT:register_counter(GoalCounterID, GoalCounterPid), ?MUT:register_counter(WeeklyGoalCounterID, WeeklyGoalCounterPid), ?MUT:add_goal(<<"default">>, GoalFeature), meck:expect(?COUNTER_MOD, add, ['_', '_', '_'], ok), meck:expect(?COUNTER_MOD, add, ['_', '_', '_', '_'], ok), meck:expect(?COUNTER_MOD, includes_key, [ {['_', NonGoalCounterPid], true}, {['_', GlobalCounterPid], true}, {['_', GoalCounterPid], false}, {['_', WeeklyGoalCounterPid], false} ]), meck:expect(?COUNTER_MOD, count, [ {[NonGoalCounterPid], #{count => 1, tag_counts => #{[] => 1}}}, {[GlobalCounterPid], #{count => 1, tag_counts => #{[] => 1}}}, {[GoalCounterPid], #{count => 1, tag_counts => #{[NonGoalFeature] => 1}}}, {[WeeklyGoalCounterPid], #{count => 1, tag_counts => #{[NonGoalFeature] => 1}}} ]), % synchronize call _Goals = ?MUT:goals(<<"default">>), ?MUT:add(NonGoalFeature, User), ?MUT:add(GoalFeature, User), Counts = ?MUT:counts(<<"default">>), io:format("Adds ~p~n", [meck:history(?COUNTER_MOD)]), ExpectedEvents = lists:sort([NonGoalFeature]), ?assertEqual(User, meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', GoalCounterPid], 1)), ?assertEqual( ExpectedEvents, lists:sort(meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', GoalCounterPid], 2)) ), ?assertEqual( User, meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', WeeklyGoalCounterPid], 1) ), ?assertEqual( ExpectedEvents, lists:sort(meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', WeeklyGoalCounterPid], 2)) ), ExpectedCounts = [ #{count => 1, id => NonGoalCounterID, tag_counts => #{[] => 1}}, #{count => 1, id => GlobalCounterID, tag_counts => #{[] => 1}}, #{count => 1, id => GoalCounterID, tag_counts => #{[NonGoalFeature] => 1}}, #{count => 1, id => WeeklyGoalCounterID, tag_counts => #{[NonGoalFeature] => 1}} ], ?assertEqual( lists:sort(ExpectedCounts), lists:sort(Counts) ), Config. eb_test_triggering_a_goal_registered_after_goal_added(Config) -> User = <<"user_id">>, NonGoalFeature = <<"non_goal">>, GoalFeature = <<"goal">>, GlobalCounterPid = erlang:list_to_pid("<0.0.0>"), NonGoalCounterPid = erlang:list_to_pid("<0.0.1>"), GoalCounterPid = erlang:list_to_pid("<0.0.2>"), GlobalCounterID = features_counter_id:global_counter_id(<<"default">>), NonGoalCounterID = features_counter_id:create(NonGoalFeature), GoalCounterID = features_counter_id:create(GoalFeature), % The global counter will start up before we do anything, wait for that to % happen before we break supervisor as a mock just below meck:wait( supervisor, start_child, [features_counter_sup, spec_for_feature(GlobalCounterID)], 100 ), meck:expect( supervisor, start_child, [features_counter_sup, '_'], meck:raise(error, should_not_hit_this) ), ?MUT:register_counter(GlobalCounterID, GlobalCounterPid), ?MUT:register_counter(NonGoalCounterID, NonGoalCounterPid), ?MUT:add_goal(<<"default">>, GoalFeature), % synchronize call _Goals0 = ?MUT:goals(<<"default">>), ?MUT:register_counter(GoalCounterID, GoalCounterPid), meck:expect(?COUNTER_MOD, add, ['_', '_', '_', '_'], ok), meck:expect(?COUNTER_MOD, add, ['_', '_', '_', GoalCounterPid], ok), meck:expect(?COUNTER_MOD, includes_key, [ {['_', NonGoalCounterPid], true}, {['_', GlobalCounterPid], true}, {['_', GoalCounterPid], false} ]), meck:expect(?COUNTER_MOD, count, [ {[NonGoalCounterPid], #{count => 1, tag_counts => #{[] => 1}}}, {[GlobalCounterPid], #{count => 1, tag_counts => #{[] => 1}}}, {[GoalCounterPid], #{count => 1, tag_counts => #{[NonGoalFeature] => 1}}} ]), % synchronize call _Goals1 = ?MUT:goals(<<"default">>), ?MUT:add(NonGoalFeature, User), ?MUT:add(GoalFeature, User), % synchronize call _Goals2 = ?MUT:goals(<<"default">>), Counts = ?MUT:counts(<<"default">>), ExpectedEvents = lists:sort([NonGoalFeature]), ?assertEqual(User, meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_'], 1)), ?assertEqual( ExpectedEvents, lists:sort(meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', '_'], 2)) ), ?assertEqual(GoalCounterPid, meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', '_'], 4)), ExpectedCounts = [ #{count => 1, id => NonGoalCounterID, tag_counts => #{[] => 1}}, #{count => 1, id => GlobalCounterID, tag_counts => #{[] => 1}}, #{count => 1, id => GoalCounterID, tag_counts => #{[NonGoalFeature] => 1}} ], ?assertEqual( lists:sort(ExpectedCounts), lists:sort(Counts) ), Config. ec_test_triggering_a_goal_from_another_namespace(Config) -> User = <<"user_id">>, NonGoalFeature = <<"non_goal">>, GoalFeature = <<"goal">>, GlobalCounterPid = erlang:list_to_pid("<0.0.0>"), NSGlobalCounterPid = erlang:list_to_pid("<0.1.0>"), NonGoalCounterPid = erlang:list_to_pid("<0.0.1>"), GoalCounterPid = erlang:list_to_pid("<0.0.2>"), GlobalCounterID = features_counter_id:global_counter_id(<<"default">>), NSGlobalCounterID = features_counter_id:global_counter_id(<<"not default">>), NonGoalCounterID = features_counter_id:create(NonGoalFeature), GoalCounterID = features_counter_id:create(<<"not default">>, GoalFeature, named), % The global counter will start up before we do anything, wait for that to % happen before we break supervisor as a mock just below meck:wait( supervisor, start_child, [features_counter_sup, spec_for_feature(GlobalCounterID)], 100 ), meck:expect( supervisor, start_child, [features_counter_sup, '_'], meck:raise(error, should_not_hit_this) ), ?MUT:register_counter(GlobalCounterID, GlobalCounterPid), ?MUT:register_counter(NSGlobalCounterID, NSGlobalCounterPid), ?MUT:register_counter(NonGoalCounterID, NonGoalCounterPid), ?MUT:register_counter(GoalCounterID, GoalCounterPid), ?MUT:add_goal(<<"not default">>, GoalFeature), meck:expect(?COUNTER_MOD, add, ['_', '_', '_'], ok), meck:expect(?COUNTER_MOD, add, ['_', '_', '_', '_'], ok), meck:expect(?COUNTER_MOD, includes_key, [ {['_', NonGoalCounterPid], true}, {['_', GlobalCounterPid], true}, {['_', GoalCounterPid], false} ]), meck:expect(?COUNTER_MOD, count, [ {[NonGoalCounterPid], #{count => 1, tag_counts => #{[] => 1}}}, {[GlobalCounterPid], #{count => 1, tag_counts => #{[] => 1}}}, {[NSGlobalCounterPid], #{count => 1, tag_counts => #{[] => 1}}}, {[GoalCounterPid], #{count => 1, tag_counts => #{[] => 1}}} ]), % synchronize call _Goals = ?MUT:goals(<<"default">>), ?MUT:add(NonGoalFeature, User), ?MUT:add(<<"not default">>, GoalFeature, User, #{}), Counts = ?MUT:counts(<<"default">>), NamespaceCounts = ?MUT:counts(<<"not default">>), io:format("Adds ~p~n", [meck:history(?COUNTER_MOD)]), ExpectedEvents = lists:sort([]), ?assertEqual(User, meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', GoalCounterPid], 1)), ?assertEqual( ExpectedEvents, lists:sort(meck:capture(first, ?COUNTER_MOD, add, ['_', '_', '_', GoalCounterPid], 2)) ), ExpectedCounts = [ #{count => 1, id => NonGoalCounterID, tag_counts => #{[] => 1}}, #{count => 1, id => GlobalCounterID, tag_counts => #{[] => 1}} ], ExpectedNamespaceCounts = [ #{count => 1, id => NSGlobalCounterID, tag_counts => #{[] => 1}}, #{count => 1, id => GoalCounterID, tag_counts => #{[] => 1}} ], ?assertEqual( lists:sort(ExpectedCounts), lists:sort(Counts) ), ?assertEqual( lists:sort(ExpectedNamespaceCounts), lists:sort(NamespaceCounts) ), Config. fa_test_event_no_persistence(Config) -> Feature = <<"feature_name">>, CounterPid = self(), CounterID = features_counter_id:create(Feature), meck:expect(supervisor, start_child, [features_counter_sup, '_'], {ok, CounterPid}), User = <<"user_id">>, StoreLibState = ?config(store_lib_state, Config), meck:expect(features_store_lib, get, [StoreLibState], {not_supported, StoreLibState}), {ok, Pid} = ?MUT:start_link(undefined), Config1 = [{pid, Pid} | Config], meck:wait(features_store_lib, get, '_', 1000), ?MUT:add(Feature, User), Spec = spec_for_feature(CounterID, undefined), ?assertEqual(Spec, meck:capture(first, supervisor, start_child, ['_', Spec], 2)), ?assertEqual(2, meck:num_calls(?COUNTER_MOD, add, [User, '_', CounterPid])), Config1. ga_test_with_value(Config) -> Feature = <<"feature_name">>, Value = 1, User = <<"user_id">>, ?MUT:add(<<"default">>, Feature, User, #{ensure_goal => true, value => Value}), Spec = spec_for_feature(Feature), ExpectedOtherCounters = [], ?assertEqual(Spec, meck:capture(first, supervisor, start_child, ['_', Spec], 2)), ?assertEqual(1, meck:num_calls(?COUNTER_MOD, add, [User, ExpectedOtherCounters, Value, '_'])), Config. ha_test_namespaces(Config) -> NS1 = <<"default">>, NS2 = <<"not default">>, CID1 = features_counter_id:create(NS1, <<"feature">>, named), CID2 = features_counter_id:create(NS2, <<"feature">>, named), ?MUT:register_counter(CID1, self()), ?MUT:register_counter(CID2, self()), % Used for syncronization / processing messages ?MUT:goals(<<"default">>), RoutedNamespaces = lists:sort(?MUT:namespaces()), ExpectedNamespaces = lists:sort([NS1, NS2]), ?assertEqual(ExpectedNamespaces, RoutedNamespaces), Config. ia_stop_counter(Config) -> Namespace = <<"default">>, Feature = <<"feature_name">>, CounterID = features_counter_id:create(Namespace, Feature, named), Pid = self(), ?MUT:register_counter(CounterID, Pid), % Used for syncronization / processing messages ?MUT:goals(Namespace), InitialCounts = ?MUT:counts(Namespace), ?assertNotEqual([], InitialCounts), ?MUT:stop_counter(CounterID), ID = {features_counter, CounterID}, meck:wait(supervisor, delete_child, '_', 1000), test_utils:assertNCalls(1, supervisor, terminate_child, [features_counter_sup, ID]), test_utils:assertNCalls(1, supervisor, delete_child, [features_counter_sup, ID]), ExpectedData = expected_stored_data(#{counters => [CounterID]}), ?assertEqual(ExpectedData, meck:capture(first, features_store_lib, store, '_', 1)), CountsAfterStopping = ?MUT:counts(Namespace), ?assertEqual([], CountsAfterStopping), Config. ib_stop_unregistered_counter(Config) -> Namespace = <<"default">>, Feature = <<"feature_name">>, CounterID = features_counter_id:create(Namespace, Feature, named), ?MUT:goals(Namespace), InitialCounts = ?MUT:counts(Namespace), ?assertEqual([], InitialCounts), ?MUT:stop_counter(CounterID), ID = {features_counter, CounterID}, meck:wait(supervisor, delete_child, '_', 1000), test_utils:assertNCalls(1, supervisor, terminate_child, [features_counter_sup, ID]), test_utils:assertNCalls(1, supervisor, delete_child, [features_counter_sup, ID]), CountsAfterStopping = ?MUT:counts(Namespace), ?assertEqual([], CountsAfterStopping), Config. ic_stop_counter_not_found(Config) -> Namespace = <<"default">>, Feature = <<"feature_name">>, CounterID = features_counter_id:create(Namespace, Feature, named), Pid = self(), meck:expect(supervisor, terminate_child, [features_counter_sup, '_'], {error, not_found}), meck:expect(supervisor, delete_child, [features_counter_sup, '_'], {error, not_found}), ?MUT:register_counter(CounterID, Pid), % Used for syncronization / processing messages ?MUT:goals(Namespace), InitialCounts = ?MUT:counts(Namespace), ?assertNotEqual([], InitialCounts), ?MUT:stop_counter(CounterID), ID = {features_counter, CounterID}, meck:wait(supervisor, delete_child, '_', 1000), test_utils:assertNCalls(1, supervisor, terminate_child, [features_counter_sup, ID]), test_utils:assertNCalls(1, supervisor, delete_child, [features_counter_sup, ID]), ExpectedData = expected_stored_data(#{counters => [CounterID]}), ?assertEqual(ExpectedData, meck:capture(first, features_store_lib, store, '_', 1)), CountsAfterStopping = ?MUT:counts(Namespace), ?assertEqual([], CountsAfterStopping), Config. ja_test_events_for_key(Config) -> Namespace = <<"default">>, Feature = <<"feature_name">>, User = <<"user_id">>, CounterGlobal = features_counter_id:global_counter_id(Namespace), CounterID1 = features_counter_id:create(Feature), CounterID2 = features_counter_id:create(<<"unused feature">>), CounterID3 = features_counter_id:create(<<"other namespace">>, <<"unused feature">>, named), PidGlobal = erlang:list_to_pid("<0.0.0>"), Pid1 = erlang:list_to_pid("<0.0.1>"), Pid2 = erlang:list_to_pid("<0.0.2>"), Pid3 = erlang:list_to_pid("<0.0.3>"), meck:expect(features_counter, includes_key, [ {[User, PidGlobal], true}, {[User, Pid1], true}, {[User, Pid2], false} ]), ?MUT:register_counter(CounterGlobal, PidGlobal), ?MUT:register_counter(CounterID1, Pid1), ?MUT:register_counter(CounterID2, Pid2), ?MUT:register_counter(CounterID3, Pid3), % Run to synchronize/handle all messages ?MUT:goals(Namespace), Events = ?MUT:events_for_key(Namespace, User), ?assertEqual([Feature], Events), Config. expected_stored_data(Data) -> #{ counters => maps:get(counters, Data, []), goals => maps:get(goals, Data, #{}) }. spec_for_feature(Feature) when is_binary(Feature) -> ID = features_counter_id:create(Feature), spec_for_feature(ID, ?STORE_LIB); spec_for_feature(ID) -> spec_for_feature(ID, ?STORE_LIB). spec_for_feature(Feature, StoreLibMod) -> #{ id => {features_counter, Feature}, start => {features_counter, start_link, [StoreLibMod, Feature]} }. counts(C) -> Default = #{ count => 0, single_tag_counts => #{}, tag_counts => #{} }, maps:merge(Default, C).
tests/features_count_router_SUITE.erl
0.533884
0.444444
features_count_router_SUITE.erl
starcoder
% License: Apache License, Version 2.0 % % Licensed under the Apache License, Version 2.0 (the "License"); % you may not use this file except in compliance with the License. % You may obtain a copy of the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, % WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. % See the License for the specific language governing permissions and % limitations under the License. % %% @author <NAME> <<EMAIL>> %% @copyright Copyright 2011-2012 <NAME> %% %% @doc Tools for working with Erlang terms representing JSON. %% %% The ej module is intended to make it easy to work with the Erlang %% structure used by `mochijson2' to represent JSON. You can use %% `ej:get' to walk an object and return a particular value, or %% `ej:set' to update a value. %% %% @end -module(ej). -author('<NAME> <<EMAIL>'). -export([ dedup/1, get/2, get/3, set/3, set_p/3, delete/2, valid/2 ]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -include_lib("ej.hrl"). -export_type([json_object/0, json_plist/0, json_term/0]). %% @doc Extract a value from `Obj' %% %% `Keys' is a tuple or list specifying a path into the JSON %% structure. Each string or binary element of `Keys' will act like a %% Javascript property lookup. Elements of JSON arrays can be %% accessed by including an integer as an element of `Keys'. In %% addition, the atoms `` 'first' '' and `` 'last' '' can be used to %% access the first and last elements of a list, respectively. %% %% Additionally, a subset of JSON objects in an array can be selected %% by matching on a key/value pair. This is best explained with an %% example (for compactness, the input and output is shown using JSON %% notation instead of EJSON). %% %% Given: %% ``` %% Cakes = {[ %% {<<"cakes">>, [ %% {[{<<"frosting">>, <<"white">>}, {<<"tastes">>, <<"good">>}]}, %% {[{<<"frosting">>, <<"red">>}, {<<"tastes">>, <<"good">>}]}, %% {[{<<"frosting">>, <<"blue">>}, {<<"tastes">>, <<"bad">>}]} %% ] %% } %% ]}. %% ''' %% %% Then you can select the good tasting cakes like this: %% %% ``` %% ej:get({"cakes", {select {"tastes", "good"}}}, Cakes). %% %% [ %% {[{<<"frosting">>, <<"white">>}, {<<"tastes">>, <<"good">>}]}, %% {[{<<"frosting">>, <<"red">>}, {<<"tastes">>, <<"good">>}]} %% ] %% ''' %% -spec get(ej_key_path(), json_object() | json_plist()) -> json_term() | undefined. get({}, _Obj) -> undefined; get(Keys, Obj) when is_tuple(Keys) -> get0(tuple_to_list(Keys), Obj); get(Keys, Obj) when is_list(Keys) -> get0(Keys, Obj). %% @doc same as get/2, but returns `Default' if the specified value was not found. -spec get(ej_key_path(), json_object() | json_plist(), json_term()) -> json_term(). get({}, _Obj, Default) -> Default; get(Keys, Obj, Default) when is_tuple(Keys) orelse is_list(Keys) -> case get(Keys, Obj) of undefined -> Default; Value -> Value end. get0([Key | Rest], Obj) -> case get_value(Key, Obj) of undefined -> undefined; AValue -> get0(Rest, AValue) end; get0([], {from_select, Value}) -> Value; get0([], Value) -> Value. %% -define(IS_OBJECT(Obj), (is_tuple(Obj) andalso (1 =:= tuple_size(Obj) %% orelse %% struct =:= element(1, Obj)))). get_value(Key, Obj) when is_list(Key) -> get_value(iolist_to_binary(Key), Obj); get_value(Key, {struct, L}) when is_binary(Key) orelse filter =:= element(1, Key) -> get_value(Key, L); get_value(Key, {L}) when is_binary(Key); is_tuple(Key) -> % alt form get_value(Key, L); get_value(Key, {from_select, []}) when is_binary(Key) -> undefined; get_value(Key, {from_select, List}) when is_binary(Key) -> lists:flatten([get_value(Key, L) || L <- List]); get_value(Key, PL=[{_, _}|_T]) when is_binary(Key) -> case lists:keyfind(Key, 1, PL) of false -> undefined; {_, Return} -> Return end; get_value(Key, [_H|_T]) when is_binary(Key) -> undefined; get_value(Key, []) when is_binary(Key) -> undefined; get_value(Key, null) when is_binary(Key) -> undefined; get_value(first, [H|_T]) -> H; get_value(last, List=[_H|_T]) -> lists:last(List); get_value(Index, List=[_H|_T]) when is_integer(Index) -> lists:nth(Index, List); get_value({startswith, KeyPrefix}, List) when is_binary(KeyPrefix) -> Res = lists:filter(fun({K, _}) -> matching_prefix(KeyPrefix, K) end, List), case Res of [] -> undefined; _ -> Res end; get_value({select, KeyValue}, List=[_H|_T]) when is_tuple(KeyValue) orelse KeyValue =:= all -> {from_select, matching_array_elements(KeyValue, List)}; get_value(Index, Obj) when is_integer(Index) -> erlang:error({index_for_non_list, {Index, Obj}}). as_binary(Key) when is_binary(Key) -> Key; as_binary(Key) when is_list(Key) -> iolist_to_binary(Key); as_binary(Key) when is_tuple(Key) -> Key; as_binary(Key) when is_integer(Key) orelse is_atom(Key) -> Key. matching_array_elements(all, List) -> List; matching_array_elements(CompKey, List) -> lists:filter(fun(E) -> matching_element(CompKey, E) end, List). matching_prefix(Prefix, V) -> PrefixSize = byte_size(Prefix), case V of <<Prefix:PrefixSize/binary, _/binary>> -> true; _ -> false end. matching_element({K, V}, {struct, E}) -> Value = as_binary(V), case lists:keyfind(as_binary(K), 1, E) of false -> false; {_, Value} -> true; _ -> false end; matching_element({K, V}, {E}) -> Value = as_binary(V), case lists:keyfind(as_binary(K), 1, E) of false -> false; {_, Value} -> true; _ -> false end; matching_element(Key, E) -> erlang:error({error_matching_element, {Key, E}}). %% @doc Set a value in `Obj' %% %% Replaces the value at the path specified by `Keys' with `Value' and %% returns the new structure. If `Value' is the atom `EJ_DELETE', %% then the path specified by `Keys' is removed (but see `delete/2'). %% -spec set(ej_key_path(), json_object(), json_term()) -> json_term(). set(Keys, Obj, Value) when is_tuple(Keys) -> set0([ as_binary(X) || X <- tuple_to_list(Keys) ], Obj, Value, []); set(Keys, Obj, Value) when is_list(Keys) -> set0([ as_binary(X) || X <- Keys ], Obj, Value, []). %% @doc Set a value in `Obj' and create missing intermediate %% nodes if need be. %% %% This resembles the behavior of `mkdir -p'. If the intermediate %% elements in the structure are missing, then they are created. This %% is useful when creating complex EJSON structures from scratch. %% %% The arguments are the same as for `set'. %% %% Example: %% ``` %% ej:set_p({"users", {select, {"name", "sebastian"}}, "location"}, %% {[]}, %% <<"Germany">>). %% %% {[{<<"users">>, %% [{[{<<"location">>, <<"Germany">>}, %% {<<"name">>, <<"sebastian">>}]}]}]} %% ''' %% -spec set_p(ej_key_path(), json_object(), json_term()) -> json_term(). set_p(Keys, Obj, Value) when is_tuple(Keys) -> set0([ as_binary(X) || X <- tuple_to_list(Keys) ], Obj, Value, [create_missing]); set_p(Keys, Obj, Value) when is_list(Keys) -> set0([ as_binary(X) || X <- Keys ], Obj, Value, [create_missing]). set0([], _, Value, _) -> Value; set0([Key | Rest], {struct, P}, Value, Options) when is_binary(Key) orelse Key == 'EJ_DELETE' -> case {get_value(Key, P), length(Rest), Value, proplists:get_value(create_missing, Options)} of %% Is matchen when we creating new nested structures {undefined, _, _, true} -> {struct, lists:keystore(Key, 1, P, {Key, set0(Rest, {struct, []}, Value, Options)})}; {undefined, Len, _, _} when Len > 0 -> erlang:error({no_path, Key}); {_, Len, 'EJ_DELETE', _} when Len == 0 -> {struct, lists:keydelete(Key, 1, P)}; {Downstream, _, _, _} -> {struct, lists:keystore(Key, 1, P, {Key, set0(Rest, Downstream, Value, [{make_object, fun make_struct_object/1} |Options])})} end; set0([Key | Rest], {P}, Value, Options) % clean this up? alt form when is_binary(Key) orelse Key == 'EJ_DELETE' -> case {get_value(Key, P), length(Rest), Value, proplists:get_value(create_missing, Options)} of {undefined, _, _, true} -> {lists:keystore(Key, 1, P, {Key, set0(Rest, {[]}, Value, Options)})}; {undefined, Len, _, _} when Len > 0 -> erlang:error({no_path, Key}); {_, Len, 'EJ_DELETE', _} when Len == 0 -> {lists:keydelete(Key, 1, P)}; {Downstream, _, _, _} -> {lists:keystore(Key, 1, P, {Key, set0(Rest, Downstream, Value, [{make_object, fun make_object/1} |Options])})} end; set0([new | []], P, Value, _Options) when is_list(P) -> [Value|P]; set0([{select, Key = {_,_}}], P, 'EJ_DELETE', _Options) when is_list(P) -> lists:filter(fun(E) -> not matching_element(Key, E) end, P); set0([{_,_}], P, Object, _Options) when not is_tuple(Object) -> erlang:error({replacing_object_with_value, {P, Object}}); set0(Key = [{select, {_,_}} | _], {struct, P}, Value, Options) -> set0(Key, P, Value, [{make_object, fun make_struct_object/1} | Options]); set0(Key = [{select, {_,_}} | _], {P}, Value, Options) -> set0(Key, P, Value, [{make_object, fun make_object/1} | Options]); set0([ {select, Filter = {K,_}} | Rest], P, Value, Options) when is_list(P) -> MakeObject = case lists:keyfind(make_object, 1, Options) of false -> undefined; {_, MakeObject_Tmp} -> MakeObject_Tmp end, {Existed, Res} = lists:foldl(fun(E, {WhetherFound, Acc}) -> case matching_element(Filter, E) of true -> ChildElems = object_list(set0(Rest, E, Value, Options)), Child = MakeObject(lists:keystore(as_binary(K), 1, ChildElems, composite_key_as_binary(Filter))), {true, [Child | Acc]}; false -> {WhetherFound, [E | Acc]} end end, {false, []}, P), case {Existed, proplists:get_value(create_missing, Options)} of {true, _} -> lists:reverse(Res); {false, true} -> ChildElems = object_list(set0(Rest, MakeObject([]), Value, Options)), Child = lists:keystore(K, 1, ChildElems, composite_key_as_binary(Filter)), [MakeObject(Child) | lists:reverse(Res)]; {false, _} -> erlang:error({no_path, Filter}) end; set0([Idx | Rest], P, Value, Options) when is_integer(Idx) orelse is_atom(Idx); is_list(P) -> case {get_value(Idx, P), length(Rest), Value} of {undefined, Len, _} when Len > 0 -> erlang:error({no_path, Idx}); {_, Len, 'EJ_DELETE'} when Len == 0 -> set_nth(Idx, P, 'EJ_DELETE'); {Downstream, _, _} -> set_nth(Idx, P, set0(Rest, Downstream, Value, Options)) end. object_list({struct, L}) -> L; object_list({L}) -> L. make_object(L) -> {L}. make_struct_object(L) -> {struct, L}. composite_key_as_binary({K, V}) -> {as_binary(K), as_binary(V)}. set_nth(first, [_H|T], 'EJ_DELETE') -> T; set_nth(first, [_H|T], V) -> [V|T]; set_nth(last, L, 'EJ_DELETE') -> [_H|T] = lists:reverse(L), lists:reverse(T); set_nth(last, L, V) -> [_H|T] = lists:reverse(L), lists:reverse([V|T]); set_nth(N, L, 'EJ_DELETE') -> {L1, [_H|L2]} = lists:split(N - 1, L), lists:concat([L1, L2]); set_nth(N, L, V) -> {L1, [_H|L2]} = lists:split(N - 1, L), lists:concat([L1, [V|L2]]). % TODO: support setting list elements as well as a means to add new % elements to a list. %% @doc Remove the item specified by `Keys'. -spec delete(ej_key_path(), json_object()) -> json_object(). delete(Keys, Obj) when is_tuple(Keys) -> set0([ as_binary(X) || X <- tuple_to_list(Keys) ], Obj, 'EJ_DELETE', []); delete(Keys, Obj) when is_list(Keys) -> set0([ as_binary(X) || X <- Keys ], Obj, 'EJ_DELETE', []). %% valid - JSON term validation via spec %% context threaded through validity checking -record(spec_ctx, { %% List of keys keeping track of where we are in a nested %% JSON object. path = [] :: [binary()], %% Future use: use this to collect errors so that validation %% can report a list of errors rather than just the first %% one. errors = [] :: [term()] }). %% An re module regex (compiled) and a message that will be %% returned when nomatch is triggered. -type ej_string_match() :: {'string_match', {re:mp(), _}}. %% User supplied validation function. This must be an arity 1 fun that %% will be given the value and should return 'ok' if the value is %% good. Any other return is treated as an invalid result. The type %% name describes the expected type of the value. We might want to %% change this or remove it if we want to support a notion of 'any_of' %% matching. The advantage for now is that we can auto-generate a %% better missing message. -type ej_fun_match() :: {fun_match, {fun((json_term()) -> ok | error), any_type | ej_json_type_name(), _}}. %% Map a value spec over each element of an array value. -type ej_array_map() :: {array_map, ej_json_val_spec()}. %% Walk the key/value pairs of a JSON object and execute the %% corresponding key and value specs for each pair. -type ej_object_map() :: {object_map, {{keys, ej_json_val_spec()}, {values, ej_json_val_spec()}}}. %% Validate a value against one of multiple spec's. -type ej_any_of() :: {any_of, {ej_json_val_spec(), ErrorMessage :: any()}}. -type ej_json_spec() :: {[ej_json_spec_rule()]} | ej_object_map() | ej_array_map() | empty_object | empty_array. -type ej_json_spec_rule() :: {ej_json_key_spec(), ej_json_val_spec()}. -type ej_json_key_spec() :: binary() | {opt, binary()}. -type ej_json_val_spec() :: binary() | ej_json_type_name() | ej_string_match() | ej_fun_match() | ej_array_map() | ej_object_map() | ej_any_of() | {[ej_json_spec_rule()]}. -spec valid(Spec :: ej_json_spec(), Obj:: json_object() | json_array()) -> ok | #ej_invalid{}. %% @doc Validate JSON terms. Validity is determined by the %% `ej_json_spec()` provided which has the shape of EJSON terms but %% with keys and values describing what is expected. `Obj' is the %% EJSON term to be validated. This function will return `ok' if all %% validation rules succeed and a `#ej_invalid{}' record when the %% first failure is encountered (validation specs are processed in %% order, depth first). NOTE: this function is experimental and the %% API and definition of specs is subject to change. valid({object_map, _}=Spec, Obj={OL}) when is_list(OL) -> check_value_spec(<<"no_key">>, Spec, Obj, #spec_ctx{}); valid({object_map, _}=Spec, Obj={struct, OL}) when is_list(OL) -> check_value_spec(<<"no_key">>, Spec, Obj, #spec_ctx{}); valid(empty_object, {[]}) -> ok; valid(empty_object, {struct, []}) -> ok; valid(empty_object, Obj={OL}) when is_list(OL) -> #ej_invalid{type = empty_object, key = undefined, expected_type = object, found_type = object, found = Obj}; valid(empty_object, Obj={struct, OL}) when is_list(OL) -> #ej_invalid{type = empty_object, key = undefined, expected_type = object, found_type = json_type(Obj), found = Obj}; valid({array_map, _}=Spec, List) when is_list(List) -> check_value_spec(<<"no_key">>, Spec, List, #spec_ctx{}); valid(empty_array, []) -> ok; valid(empty_array, Obj) when is_list(Obj) -> #ej_invalid{type = empty_array, key = undefined, expected_type = array, found_type = array, found = Obj}; valid({L}, Obj={OL}) when is_list(L) andalso is_list(OL) -> valid(L, Obj, #spec_ctx{}); valid({L}, Obj={struct, OL}) when is_list(L) andalso is_list(OL) -> valid(L, Obj, #spec_ctx{}); valid({L}, Obj) when is_list(L) -> #ej_invalid{type = json_type, key = undefined, expected_type = object, found_type = json_type(Obj), found = Obj}. valid([{{Opt, Key}, ValSpec}|Rest], Obj, Ctx = #spec_ctx{path = Path} = Ctx) when is_binary(Key) andalso (Opt =:= opt orelse Opt =:= req) -> case {Opt, ej:get({Key}, Obj)} of {opt, undefined} -> valid(Rest, Obj, Ctx); {req, undefined} -> #ej_invalid{type = missing, key = make_key(Key, Path), expected_type = type_from_spec(ValSpec)}; {_, Val} -> case check_value_spec(Key, ValSpec, Val, Ctx) of ok -> valid(Rest, Obj, Ctx); Error -> Error end end; valid([{Key, ValSpec}|Rest], Obj, #spec_ctx{} = Ctx) when is_binary(Key) -> %% required key literal valid([{{req, Key}, ValSpec}|Rest], Obj, Ctx); valid([], _Obj, _Ctx) -> ok. -spec make_key(Key :: binary(), Path :: [binary()]) -> binary(). %% Given a key and a list of keys in `Path' indicating the traversal %% path, build a JSON-style key separated by dots. make_key(Key, Path) -> join_path(make_path(Key, Path)). make_path(Key, Path) -> list_to_tuple(lists:reverse([Key | Path])). join_path(Path) -> join_bins(tuple_to_list(Path), <<".">>). %% Return a JSON type name to be used as the expected_type based on a %% value spec. If no type can be determined or any type is accepted, %% 'any_value' should be returned. type_from_spec({string_match, _}) -> string; type_from_spec({array_map, _}) -> array; type_from_spec(empty_array) -> array; type_from_spec({object_map, _}) -> object; type_from_spec(empty_object) -> object; type_from_spec({fun_match, {_, Type, _}}) -> Type; type_from_spec(Literal) when is_binary(Literal) -> string; type_from_spec(Literal) when is_integer(Literal) orelse is_float(Literal) -> number; type_from_spec({L}) when is_list(L) -> object; type_from_spec({any_of, {Specs, _ErrorMsg}}) -> type_from_any_of(Specs); type_from_spec(Type) when Type =:= string; Type =:= number; Type =:= boolean; Type =:= array; Type =:= object; Type =:= null; Type =:= any_value -> Type; type_from_spec(Type) -> error({unknown_spec, type_from_spec, Type}). %% Attempt to find a type for an any_of spec. If all the containing %% specs have the same type, return that; otherwise, return %% 'any_value' as the type placeholder. type_from_any_of([]) -> any_value; type_from_any_of(Specs) -> type_from_any_of(Specs, unset). type_from_any_of([Spec | Rest], Ans) -> CurType = type_from_spec(Spec), case Ans of PrevType when PrevType =:= unset; PrevType =:= CurType -> type_from_any_of(Rest, CurType); _DiffType -> any_value end; type_from_any_of([], Ans) -> Ans. %% Map an EJSON term to JSON type name. json_type(Val) when is_binary(Val) -> string; json_type({L}) when is_list(L) -> object; json_type({struct, L}) when is_list(L) -> object; json_type(L) when is_list(L) -> array; json_type(null) -> null; json_type(Bool) when Bool =:= true; Bool =:= false -> boolean; json_type(N) when is_integer(N) orelse is_float(N) -> number. %% traverse nested spec here check_value_spec(Key, {L}, Val={V}, #spec_ctx{path = Path} = Ctx) when is_list(L) andalso is_list(V) -> valid(L, Val, Ctx#spec_ctx{path = [Key|Path]}); check_value_spec(Key, {L}, Val={struct, V}, #spec_ctx{path = Path} = Ctx) when is_list(L) andalso is_list(V) -> valid(L, Val, Ctx#spec_ctx{path = [Key|Path]}); %% was expecting nested spec, found non-object check_value_spec(Key, {L}, Val, #spec_ctx{path = Path}) when is_list(L) -> #ej_invalid{type = json_type, key = make_key(Key, Path), expected_type = object, found = Val, found_type = json_type(Val)}; check_value_spec(Key, {string_match, {Regex, Msg}}, Val, #spec_ctx{path = Path}) when is_binary(Val) -> %% string_match case re:run(Val, Regex) of nomatch -> #ej_invalid{type = string_match, key = make_key(Key, Path), expected_type = string, found = Val, found_type = string, msg = Msg}; {match, _} -> ok end; check_value_spec(Key, {string_match, _}, Val, #spec_ctx{path = Path}) -> %% expected string for string_match, got wrong type #ej_invalid{type = json_type, key = make_key(Key, Path), expected_type = string, found_type = json_type(Val), found = Val}; check_value_spec(Key, {fun_match, {Fun, any_type, Msg}}, Val, #spec_ctx{path = Path}) -> case Fun(Val) of ok -> ok; _ -> #ej_invalid{type = fun_match, key = make_key(Key, Path), expected_type = any_value, found = Val, found_type = json_type(Val), msg = Msg} end; check_value_spec(Key, {fun_match, {Fun, Type, Msg}}, Val, #spec_ctx{path = Path}) -> %% user supplied fun FoundType = json_type(Val), case FoundType =:= Type of false -> #ej_invalid{type = json_type, key = make_key(Key, Path), expected_type = Type, found_type = FoundType, found = Val}; true -> case Fun(Val) of ok -> ok; _ -> #ej_invalid{type = fun_match, key = make_key(Key, Path), expected_type = Type, found = Val, found_type = json_type(Val), msg = Msg} end end; check_value_spec(Key, {array_map, ItemSpec}, Val, #spec_ctx{path = Path}) when is_list(Val) -> case do_array_map(ItemSpec, Val) of ok -> ok; {bad_item, InvalidItem} -> InvalidItem#ej_invalid{type = array_elt, key = make_key(Key, Path)} end; check_value_spec(Key, {array_map, _ItemSpec}, Val, #spec_ctx{path = Path}) -> %% expected an array for array_map, found wrong type #ej_invalid{type = json_type, key = make_key(Key, Path), expected_type = array, found_type = json_type(Val), found = Val}; check_value_spec(_Key, empty_array, [], #spec_ctx{}) -> ok; check_value_spec(Key, empty_array, Val, #spec_ctx{path = Path}) -> #ej_invalid{type = empty_array, key = make_key(Key, Path), expected_type = array, found_type = json_type(Val), found = Val}; check_value_spec(Key, OM={object_map, _}, {struct, L}, Ctx) when is_list(L) -> check_value_spec(Key, OM, {L}, Ctx); check_value_spec(Key, {object_map, {{keys, KeySpec}, {values, ValSpec}}}, Val={L}, #spec_ctx{path = Path}) when is_list(L) -> case do_object_map(KeySpec, ValSpec, Val) of ok -> ok; {bad_item, Type, InvalidItem} -> InvalidItem#ej_invalid{type = Type, key = make_key(Key, Path)} end; check_value_spec(Key, {object_map, _ItemSpec}, Val, #spec_ctx{path = Path}) -> %% expected an object for object_map, found wrong type #ej_invalid{type = json_type, key = make_key(Key, Path), expected_type = object, found_type = json_type(Val), found = Val}; check_value_spec(_Key, empty_object, {[]}, #spec_ctx{}) -> ok; check_value_spec(_Key, empty_object, {struct, []}, #spec_ctx{}) -> ok; check_value_spec(Key, empty_object, Val, #spec_ctx{path = Path}) -> #ej_invalid{type = empty_object, key = make_key(Key, Path), expected_type = object, found_type = json_type(Val), found = Val}; check_value_spec(Key, {any_of, {Specs, ErrorMsg}}, Val, Ctx) -> check_any_of_value_specs(Key, Val, Ctx, Specs, ErrorMsg); check_value_spec(_Key, any_value, _Val, _Ctx) -> ok; check_value_spec(_Key, string, Val, _Ctx) when is_binary(Val) -> ok; check_value_spec(Key, string, Val, #spec_ctx{path = Path}) -> invalid_for_type(string, Val, Key, Path); check_value_spec(_Key, object, {VL}, _Ctx) when is_list(VL) -> ok; check_value_spec(_Key, object, {struct, VL}, _Ctx) when is_list(VL) -> ok; check_value_spec(Key, object, Val, #spec_ctx{path = Path}) -> invalid_for_type(object, Val, Key, Path); check_value_spec(_Key, number, Val, _Ctx) when is_number(Val) -> ok; check_value_spec(Key, number, Val, #spec_ctx{path = Path}) -> invalid_for_type(number, Val, Key, Path); check_value_spec(_Key, array, Val, _Ctx) when is_list(Val) -> ok; check_value_spec(Key, array, Val, #spec_ctx{path = Path}) -> invalid_for_type(array, Val, Key, Path); check_value_spec(_Key, null, null, _Ctx) -> ok; check_value_spec(Key, null, Val, #spec_ctx{path = Path}) -> invalid_for_type(null, Val, Key, Path); check_value_spec(_Key, boolean, Val, _Ctx) when Val =:= true; Val =:= false -> ok; check_value_spec(Key, boolean, Val, #spec_ctx{path = Path}) -> invalid_for_type(boolean, Val, Key, Path); check_value_spec(_Key, Val, Val, _Ctx) -> %% exact match desired ok; check_value_spec(Key, SpecVal, Val, #spec_ctx{path = Path}) -> %% exact match failed #ej_invalid{type = exact, key = make_key(Key, Path), found = Val, expected_type = json_type(SpecVal), found_type = json_type(Val), msg = SpecVal}. invalid_for_type(ExpectType, Val, Key, Path) -> #ej_invalid{type = json_type, expected_type = ExpectType, found_type = json_type(Val), found = Val, key = make_key(Key, Path)}. do_array_map(_ItemSpec, []) -> ok; do_array_map(ItemSpec, [Item|Rest]) -> %% FIXME: do we want to record element index? case check_value_spec(<<"item_fake_key">>, ItemSpec, Item, #spec_ctx{}) of ok -> do_array_map(ItemSpec, Rest); Error -> {bad_item, Error} end. do_object_map(KeySpec, ValSpec, {L}) when is_list(L) -> do_object_map(KeySpec, ValSpec, L); do_object_map(_KeySpec, _ValSpec, []) -> ok; do_object_map(KeySpec, ValSpec, [{Key, Val}|Rest]) -> case check_value_spec(<<"item_fake_key_key">>, KeySpec, Key, #spec_ctx{}) of ok -> case check_value_spec(<<"item_fake_key_value">>, ValSpec, Val, #spec_ctx{}) of ok -> do_object_map(KeySpec, ValSpec, Rest); ValueError -> {bad_item, object_value, ValueError} end; KeyError -> {bad_item, object_key, KeyError} end. check_any_of_value_specs(Key, Val, #spec_ctx{path = Path}, [], ErrorMsg) -> #ej_invalid{type = any_of, key = make_key(Key, Path), found = Val, expected_type = any_value, found_type = json_type(Val), msg = ErrorMsg}; check_any_of_value_specs(Key, Val, Ctx, [Spec1|OtherSpecs], ErrorMsg) -> case check_value_spec(Key, Spec1, Val, Ctx) of ok -> ok; _Error -> check_any_of_value_specs(Key, Val, Ctx, OtherSpecs, ErrorMsg) end. join_bins([], _Sep) -> <<>>; join_bins(Bins, Sep) when is_binary(Sep) -> join_bins(Bins, Sep, []). join_bins([B], _Sep, Acc) -> iolist_to_binary(lists:reverse([B|Acc])); join_bins([B|Rest], Sep, Acc) -> join_bins(Rest, Sep, [Sep, B | Acc]). %% end valid %% @doc Recursively remove all but the first occurance of duplicated %% keys in EJSON objects. According to %% [http://www.ietf.org/rfc/rfc4627.txt] JSON objects SHOULD have %% unique keys, but it is not a hard requirement. Some parsers fail %% when encountering duplicate keys. %% %% The spec for `dedup' should be `-spec dedup(json_term()) -> %% json_term().', but dialyzer with `-Wunderspecs' warns on this I %% think because it exceeds the depth that dialyzer can look. dedup(Val) when is_binary(Val); is_integer(Val); is_float(Val); Val == null; Val == true; Val == false-> Val; dedup({L}) when is_list(L) -> {[ {Key, dedup(Val)} || {Key, Val} <- lists:ukeysort(1, L) ]}; dedup({struct, L}) when is_list(L) -> {struct, [ {Key, dedup(Val)} || {Key, Val} <- lists:ukeysort(1, L) ]}; dedup(L) when is_list(L) -> [ dedup(Elt) || Elt <- L ]. -ifdef(TEST). ej_test_() -> {setup, fun() -> {ok, [Widget]} = file:consult("../test/widget.terms"), {ok, [Glossary]} = file:consult("../test/glossary.terms"), {ok, [Menu]} = file:consult("../test/menu.terms"), ObjList = {struct, [{<<"objects">>, [ {struct, [{<<"id">>, I}]} || I <- lists:seq(1, 5) ]}]}, {Widget, Glossary, Menu, ObjList} end, fun({Widget, Glossary, Menu, ObjList}) -> [{"ej:get", [ ?_assertMatch({struct, [{_, _}|_]}, ej:get({"widget"}, Widget)), ?_assertMatch({struct, [{_, _}|_]}, ej:get(["widget"], Widget)), ?_assertEqual(<<"1">>, ej:get({"widget", "version"}, Widget)), ?_assertEqual(<<"1">>, ej:get(["widget", "version"], Widget)), ?_assertEqual(250, ej:get({"widget", "image", "hOffset"}, Widget)), ?_assertEqual(250, ej:get(["widget", "image", "hOffset"], Widget)), ?_assertEqual([1,2,3,4,5], ej:get({"widget", "values"}, Widget)), ?_assertEqual([1,2,3,4,5], ej:get(["widget", "values"], Widget)), ?_assertEqual(2, ej:get({"widget", "values", 2}, Widget)), ?_assertEqual(4, ej:get({"widget", "values", 4}, Widget)), ?_assertEqual(1, ej:get({"widget", "values", first}, Widget)), ?_assertEqual(5, ej:get({"widget", "values", last}, Widget)), ?_assertEqual(2, ej:get(["widget", "values", 2], Widget)), ?_assertEqual(4, ej:get(["widget", "values", 4], Widget)), ?_assertEqual(1, ej:get(["widget", "values", first], Widget)), ?_assertEqual(5, ej:get(["widget", "values", last], Widget)), ?_assertEqual({struct, [{<<"id">>, 5}]}, ej:get({<<"objects">>, last}, ObjList)), ?_assertEqual({struct, [{<<"id">>, 1}]}, ej:get({<<"objects">>, first}, ObjList)), ?_assertEqual(undefined, ej:get({"fizzle"}, Widget)), ?_assertEqual(undefined, ej:get({"widget", "fizzle"}, Widget)), ?_assertEqual(undefined, ej:get({"widget", "values", "fizzle"},Widget)), ?_assertEqual(<<"SGML">>, ej:get({"glossary", "GlossDiv", "GlossList", "GlossEntry", "Acronym"}, Glossary)), ?_assertEqual(undefined, ej:get({"glossary", "GlossDiv", "GlossList", "GlossEntry", "fizzle"}, Glossary)), ?_assertEqual(undefined, ej:get({"not_present"}, {[]})), ?_assertEqual(undefined, ej:get({"not_present"}, {struct, []})), ?_assertEqual(undefined, ej:get({"root", "not_present"}, {struct, [{<<"root">>, null}]})), ?_assertEqual(undefined, ej:get({[]}, Widget)), ?_assertEqual(undefined, ej:get({}, Widget)), ?_assertException(error, {index_for_non_list, _}, ej:get({"glossary", "GlossDiv", "GlossList", "GlossEntry", 1}, Glossary)), ?_assertException(error, {index_for_non_list, _}, ej:get({"glossary", "title", 1}, Glossary))]}, {"ej:get with default", [ ?_assertEqual(<<"1">>, ej:get({"widget", "version"}, Widget, "you'll never see this default")), ?_assertEqual(<<"defaults rock">>, ej:get({"widget", "NOT_PRESENT"}, Widget, <<"defaults rock">>)), ?_assertEqual(<<"a default">>, ej:get({}, Widget, <<"a default">>)), ?_assertEqual(<<"a default">>, ej:get({[]}, Widget, <<"a default">>)) ]}, {"ej:get with json_plist", [ ?_assertEqual(<<"1">>, ej:get({"a"}, [{<<"a">>, <<"1">>}])), ?_assertEqual(undefined, ej:get({"x"}, [{<<"a">>, <<"1">>}])), ?_assertEqual(undefined, ej:get({"x"}, [])) ]}, {"ej:get from array by matching key", fun() -> Path1 = {"menu", "popup", "menuitem", {select, {"value", "New"}}}, ?assertMatch([{struct, [{<<"value">>,<<"New">>}|_]}], ej:get(Path1, Menu)), Path2 = {"menu", "popup", "menuitem", {select, {"value", "New"}}, "onclick"}, ?assertEqual([<<"CreateNewDoc()">>], ej:get(Path2, Menu)), PathNoneMatched = {"menu", "popup", "menuitem", {select, {"value", "NotThere"}}}, ?assertEqual([], ej:get(PathNoneMatched, Menu)), PathDoesntExist = {"menu", "popup", "menuitem", {select, {"value", "NotThere"}}, "bar"}, ?assertEqual(undefined, ej:get(PathDoesntExist, Menu)), Data = [ {struct, [{<<"match">>, <<"me">>}]}, {struct, [{<<"match">>, <<"me">>}]} ], ComplexBeginning = {{select, {"match", "me"}}}, ?assertMatch([{struct, _}, {struct, _}], ej:get(ComplexBeginning, Data)), ComplexBeginningDeeper = {{select, {"match", "me"}}, "match"}, ?assertMatch([<<"me">>, <<"me">>], ej:get(ComplexBeginningDeeper, Data)) end}, {"ej:get with multi-level array matching", fun() -> %% When doing multilevel deep array matching, we want the %% array returned to be a single top level list, and not %% a nested list of lists ... Data = {struct,[ {<<"users">>, [ {struct,[{<<"id">>,<<"sebastian">>}, {<<"books">>, [ {struct, [{<<"title">>, <<"faust">>}, {<<"rating">>, 5}]} ]} ]} ]} ]}, Path = {"users", {select, {"id", "sebastian"}}, "books", {select, {"title", "faust"}}, "rating"}, Result = ej:get(Path, Data), ?assertEqual([5], Result) end}, {"ej:get filter at top-level", fun() -> Data = {struct,[{<<"users">>, [{struct,[{<<"company">>,<<"opscode">>}, {<<"name">>,<<"seth">>}]}, {struct,[{<<"location">>,<<"Germany">>}, {<<"name">>,<<"sebastian">>}, {<<"company">>,<<"aircloak">>}]}]}]}, ?assertEqual(undefined, ej:get({"users", "company"}, Data)), ?assertEqual([<<"opscode">>, <<"aircloak">>], ej:get({"users", {select, all}, "company"}, Data)) end}, {"ej:get startswith ", fun() -> Data = {[ {<<"admin:1:name">>, <<"Admin1">>}, {<<"admin:2:name">>, <<"Admin2">>}, {<<"user:1:name">>, <<"User1">>}, {<<"user:2:name">>, <<"User2">>}]}, ?assertEqual(undefined, ej:get({{startswith, <<"guest">>}}, Data)), ?assertMatch([], ej:get({{startswith, <<"guest">>}}, Data, [])), ?assertMatch([ {<<"user:1:name">>, <<"User1">>}, {<<"user:2:name">>, <<"User2">>}], ej:get({{startswith, <<"user">>}}, Data)) end}, {"ej:set, replacing existing value, keys is tuple", fun() -> Path = {"widget", "window", "name"}, CurrentValue = ej:get(Path, Widget), NewValue = <<"bob">>, ?assert(NewValue /= CurrentValue), Widget1 = ej:set(Path, Widget, NewValue), ?assertEqual(NewValue, ej:get(Path, Widget1)), % make sure the structure hasn't been disturbed Widget2 = ej:set(Path, Widget1, <<"main_window">>), ?assertEqual(Widget, Widget2) end}, {"ej:set replacing existing value, keys is lists", fun() -> Path = ["widget", "window", "name"], CurrentValue = ej:get(Path, Widget), NewValue = <<"bob">>, ?assert(NewValue /= CurrentValue), Widget1 = ej:set(Path, Widget, NewValue), ?assertEqual(NewValue, ej:get(Path, Widget1)), % make sure the structure hasn't been disturbed Widget2 = ej:set(Path, Widget1, <<"main_window">>), ?assertEqual(Widget, Widget2) end}, {"ej:set, creating new value", fun() -> Path = {"widget", "image", "newOffset"}, Value = <<"YYY">>, ?assertEqual(undefined, ej:get(Path, Widget)), Widget1 = ej:set(Path, Widget, Value), ?assertEqual(Value, ej:get(Path, Widget1)) end}, {"ej:set, missing intermediate path", fun() -> Path = {"widget", "middle", "nOffset"}, Value = <<"YYY">>, ?assertEqual(undefined, ej:get(Path, Widget)), ?assertException(error, {no_path, _}, ej:set(Path, Widget, Value)) end}, {"ej:set top-level", fun() -> OrigVal = ej:get({"widget", "version"}, Widget), NewVal = <<"2">>, NewWidget = ej:set({"widget", "version"}, Widget, NewVal), ?assertEqual(NewVal, ej:get({"widget", "version"}, NewWidget)), Reset = ej:set({"widget", "version"}, NewWidget, OrigVal), ?assertEqual(Widget, Reset) end}, {"ej:set nested", fun() -> NewVal = <<"JSON">>, Path = {"glossary", "GlossDiv", "GlossList", "GlossEntry", "ID"}, Unchanged = ej:get({"glossary", "GlossDiv", "GlossList", "GlossEntry", "SortAs"}, Glossary), Glossary1 = ej:set(Path, Glossary, NewVal), ?assertEqual(NewVal, ej:get(Path, Glossary1)), ?assertEqual(Unchanged, ej:get({"glossary", "GlossDiv", "GlossList", "GlossEntry", "SortAs"}, Glossary1)), Reset = ej:set(Path, Glossary1, <<"SGML">>), ?assertEqual(Glossary, Reset) end}, {"ej:set list element", fun() -> Orig = ej:get({"menu", "popup", "menuitem", 2}, Menu), New = ej:set({"onclick"}, Orig, <<"OpenFile()">>), Menu1 = ej:set({"menu", "popup", "menuitem", 2}, Menu, New), ?assertEqual(New, ej:get({"menu", "popup", "menuitem", 2}, Menu1)), Reset = ej:set({"menu", "popup", "menuitem", 2}, Menu1, Orig), ?assertEqual(Menu, Reset) end}, {"ej:set list element path", fun() -> Path = {"menu", "popup", "menuitem", 2, "onclick"}, Orig = ej:get(Path, Menu), New = <<"OpenFile()">>, Menu1 = ej:set(Path, Menu, New), ?assertEqual(New, ej:get(Path, Menu1)), Reset = ej:set(Path, Menu1, Orig), ?assertEqual(Menu, Reset) end}, {"ej:set list element path first, last", fun() -> FPath = {"menu", "popup", "menuitem", first, "value"}, LPath = {"menu", "popup", "menuitem", last, "value"}, FMenu = ej:set(FPath, Menu, <<"create">>), LMenu = ej:set(LPath, FMenu, <<"kill">>), ?assertEqual(<<"create">>, ej:get(FPath, FMenu)), ?assertEqual(<<"create">>, ej:get(FPath, LMenu)), ?assertEqual(<<"kill">>, ej:get(LPath, LMenu)) end}, {"ej:set new list element", fun() -> Path = {"menu", "popup", "menuitem", new}, Path1 = {"menu", "popup", "menuitem", first}, Menu1 = ej:set(Path, Menu, <<"first-item">>), ?assertEqual(<<"first-item">>, ej:get(Path1, Menu1)), List = ej:get({"menu", "popup", "menuitem"}, Menu1), ?assertEqual(4, length(List)) end}, {"ej:set_p creates intermediate missing nodes, keys is tuple", fun() -> StartData = {struct,[]}, EndData = {struct,[{<<"a">>, {struct,[{<<"b">>, {struct, [{<<"c">>, <<"value">>}]} }]} }]}, Path = {"a", "b", "c"}, Result = ej:set_p(Path, StartData, <<"value">>), ?assertEqual(EndData, Result), ?assertEqual(<<"value">>, ej:get(Path, Result)), Path2 = {"1", "2"}, Result2 = ej:set_p(Path2, Result, <<"other-value">>), ?assertEqual(<<"other-value">>, ej:get(Path2, Result2)), %% Does not affect existing values ?assertEqual(<<"value">>, ej:get(Path, Result2)) end}, {"ej:set_p creates intermediate missing nodes, keys is lists", fun() -> StartData = {struct,[]}, EndData = {struct,[{<<"a">>, {struct,[{<<"b">>, {struct, [{<<"c">>, <<"value">>}]} }]} }]}, Path = ["a", "b", "c"], Result = ej:set_p(Path, StartData, <<"value">>), ?assertEqual(EndData, Result), ?assertEqual(<<"value">>, ej:get(Path, Result)), Path2 = {"1", "2"}, Result2 = ej:set_p(Path2, Result, <<"other-value">>), ?assertEqual(<<"other-value">>, ej:get(Path2, Result2)), %% Does not affect existing values ?assertEqual(<<"value">>, ej:get(Path, Result2)) end}, {"ej:set new value in an object at a complex path", fun() -> Path = {"menu", "popup", "menuitem", {select, {"value", "New"}}, "alt"}, Val = <<"helptext">>, Menu1 = ej:set(Path, Menu, Val), ?assertMatch([<<"helptext">>], ej:get(Path, Menu1)) end}, {"ej:set_p value in a non-existent object at a complex path", fun() -> Path = {"menu", "popup", "menuitem", {select, {"value", "Edit"}}}, Path2 = {"menu", "popup", "menuitem", {select, {"value", "Edit"}}, "text"}, Path3 = {"menu", "popup", "menuitem", {select, {"value", "Edit"}}, "value"}, Val = {struct, [{<<"text">>, <<"helptext">>}]}, Menu1 = ej:set_p(Path, Menu, Val), ?assertMatch([<<"helptext">>], ej:get(Path2, Menu1)), ?assertEqual([<<"Edit">>], ej:get(Path3, Menu1)) end}, {"ej:set new value in a object at a complex path", fun() -> Path = {"menu", "popup", "menuitem", {select, {"value", "New"}}}, Path2 = {"menu", "popup", "menuitem", {select, {"value", "New"}}, "onclick"}, Val = {struct, [{<<"onclick">>, <<"CreateDifferentNewDoct()">>}]}, Menu1 = ej:set(Path, Menu, Val), ?assertEqual([<<"CreateDifferentNewDoct()">>], ej:get(Path2, Menu1)), Path3 = {"menu", "popup", "menuitem", {select,{"value", "New"}}, "speed"}, ValHigh = <<"high">>, Menu2 = ej:set(Path3, Menu1, ValHigh), ?assertEqual([ValHigh], ej:get(Path3, Menu2)) end}, {"ej:set replace multiple children of a complex path", fun() -> %% We want the ability to affect multiple array elements %% when a complex selector returns more than one match. %% In this case all the selected array elements should be %% replaced. StartData = {struct, [ {struct, [{<<"match">>, <<"me">>}, {<<"param">>, 1}]}, {struct, [{<<"match">>, <<"me">>}, {<<"param">>, 2}]} ]}, Path = {{select, {"match", "me"}}}, Path2 = {{select, {"match", "me"}}, "more"}, Val = {struct, [{<<"more">>, <<"content">>}]}, Result = ej:set(Path, StartData, Val), ?assertMatch([<<"content">>, <<"content">>], ej:get(Path2, Result)) end}, {"ej:set replace multiple children deep in a complex path", fun() -> %% We want the ability to affect multiple array elements %% when a complex selector returns more than one match. %% In this case we show that the array does not have to %% be at the top level. StartData = {struct, [{<<"parent">>, [ {struct, [{<<"match">>, <<"me">>}, {<<"param">>, 1}]}, {struct, [{<<"match">>, <<"me">>}, {<<"param">>, 2}]} ]} ]}, Path = {"parent", {select, {"match", "me"}}}, Path2 = {"parent", {select, {"match", "me"}}, "more"}, Val = {struct, [{<<"more">>, <<"content">>}]}, EndData = ej:set(Path, StartData, Val), ?assertMatch([<<"content">>, <<"content">>], ej:get(Path2, EndData)) end}, {"ej:set doesn't alter order when setting a complex path", fun() -> StartData = {struct, [{<<"parent">>, [ {struct, [{<<"name">>, <<"alice">>}, {<<"param">>, 1}]}, {struct, [{<<"name">>, <<"bob">>}, {<<"param">>, 2}]}, {struct, [{<<"name">>, <<"clara">>}, {<<"param">>, 3}]} ]} ]}, Path = {"parent", {select, {"name", "bob"}}, "param"}, EndData = ej:set(Path, StartData, 4), Names = [ ej:get({"name"}, Elt) || Elt <- ej:get({"parent"}, EndData) ], ExpectNames = [<<"alice">>, <<"bob">>, <<"clara">>], ?assertEqual(ExpectNames, Names) end}, {"ej:set should not allow replacing an array element at a complex path with a pure value", fun() -> %% If the user has made a filtered selection on an array, %% then all the elements in the array are objects. %% Replacing the matched selection with a non-object value %% will break this constraint. Data = {struct, [{struct, [{<<"match">>, <<"me">>}]}]}, Path = {{select, {"match", "me"}}}, Val = <<"pure-value-and-not-a-struct">>, ?assertException(error, {replacing_object_with_value, _}, ej:set(Path, Data, Val)) end}, {"ej:set a value within array", fun() -> %% We should be able to set values on elements we %% have filtered out of an array, rather than just %% replacing them. StartData = {struct,[ {<<"users">>, [ {struct,[{<<"id">>,<<"sebastian">>}]} ]} ]}, EndData = {struct,[ {<<"users">>, [ {struct,[{<<"id">>,<<"sebastian">>}, {<<"books">>, []} ]} ]} ]}, Path = {"users", {select, {"id", "sebastian"}}, "books"}, Val = [], Result = ej:set(Path, StartData, Val), ?assertEqual(EndData, Result) end}, {"ej:set should throw error for trying to missing intermediate nodes", fun() -> %% If we request a composite path that doesn't exist, %% and we are using set, rather than set_p, then we %% should get an error thrown at us. Path = {{select, {"id", "seb"}}}, Val = {struct, [{<<"continent">>, <<"europe">>}]}, ?assertException(error, {no_path, _}, ej:set(Path, {struct, []}, Val)) end}, {"ej:set_p should construct intermediate nodes if missing", fun() -> %% If we request a composite path that doesn't exist, %% the missing nodes should be created for us dynamically %% to match the filtering criteria we are searching for. StartData = {struct,[]}, Path = {"users", {select, {"id", "seb"}}, "room", {select, {"room_id", "living-room"}}, "books", {select, {"title", "faust"}}, "rating"}, Val = 5, Result = ej:set_p(Path, StartData, Val), ?assertEqual([5], ej:get(Path, Result)) end}, {"ej:set_p should create intermediate nodes if missing in existing structures", fun() -> %% If we request a composite path that doesn't exist, %% the missing nodes should be created for us dynamically %% to match the filtering criteria we are searching for. %% Furthermore, this should not affect old values already existing in the %% same structure. StartData = {struct,[{<<"users">>,[ {struct,[{<<"rooms">>,[ {struct,[{<<"books">>,[ {struct,[{<<"rating">>,5},{<<"title">>,<<"faust">>}]} ]},{<<"room_id">>,<<"livingroom">>} ]} ]},{<<"id">>,<<"seb">>}] }] }]}, ValidPath = {"users", {select, {"id", "seb"}}, "rooms", {select, {"room_id", "livingroom"}}, "books", {select, {"title", "faust"}}, "rating"}, ?assertEqual([5], ej:get(ValidPath, StartData)), NewPath = {"users", {select, {"id", "seb"}}, "rooms", {select, {"room_id", "bathroom"}}, "sink"}, NewValue = true, Result = ej:set_p(NewPath, StartData, NewValue), ?assertEqual([true], ej:get(NewPath, Result)), OtherPath = {"users", {select, {"id", "seb"}}, "computers", {select, {"laptop", true}}, "name"}, OtherValue = <<"paris">>, Result1 = ej:set_p(OtherPath, Result, OtherValue), io:format("~p", [Result1]), ?assertEqual([<<"paris">>], ej:get(OtherPath, Result1)), %% Old values still valid ?assertEqual([5], ej:get(ValidPath, Result1)), ?assertEqual([true], ej:get(NewPath, Result1)) end}, {"ej:remove", fun() -> Path = {"glossary", "GlossDiv", "GlossList", "GlossEntry", "Abbrev"}, Orig = ej:get(Path, Glossary), ?assert(undefined /= Orig), Glossary1 = ej:delete(Path, Glossary), ?assertEqual(undefined, ej:get(Path, Glossary1)), % verify some structure ?assertEqual(<<"SGML">>, ej:get({"glossary", "GlossDiv", "GlossList", "GlossEntry", "Acronym"}, Glossary1)), ?assertEqual(<<"S">>, ej:get({"glossary", "GlossDiv", "title"}, Glossary1)) end}, {"ej:remove parameter at complex path", fun() -> Path = {"menu", "popup", "menuitem", {select, {"value", "New"}}, "onclick"}, Orig = ej:get(Path, Menu), ?assert(undefined /= Orig), Menu1 = ej:delete(Path, Menu), ?assertEqual([undefined], ej:get(Path, Menu1)), % verify some structure VerifyPath = {"menu", "popup", "menuitem", {select, {"value", "New"}}, "value"}, ?assertEqual([<<"New">>], ej:get(VerifyPath, Menu1)), % verify that we didn't delete siblings VerifyOpen = {"menu", "popup", "menuitem", {select, {"value", "Open"}}, "onclick"}, ?assertEqual([<<"OpenDoc()">>], ej:get(VerifyOpen, Menu1)), VerifyClose = {"menu", "popup", "menuitem", {select, {"value", "Close"}}, "onclick"}, ?assertEqual([<<"CloseDoc()">>], ej:get(VerifyClose, Menu1)) end}, {"ej:remove object at complex path, keys is tuple", fun() -> Path = {"menu", "popup", "menuitem", {select, {"value", "New"}}}, Orig = ej:get(Path, Menu), ?assert([] /= Orig), Menu1 = ej:delete(Path, Menu), ?assertEqual([], ej:get(Path, Menu1)), % verify some structure VerifyPath = {"menu", "popup", "menuitem", {select, {"value", "New"}}, "value"}, ?assertEqual(undefined, ej:get(VerifyPath, Menu1)), % % verify that we didn't delete siblings VerifyOpen = {"menu", "popup", "menuitem", {select, {"value", "Open"}}, "onclick"}, ?assertEqual([<<"OpenDoc()">>], ej:get(VerifyOpen, Menu1)), VerifyClose = {"menu", "popup", "menuitem", {select, {"value", "Close"}}, "onclick"}, ?assertEqual([<<"CloseDoc()">>], ej:get(VerifyClose, Menu1)) end}, {"ej:remove object at complex path, keys is list", fun() -> Path = ["menu", "popup", "menuitem", {select, {"value", "New"}}], Orig = ej:get(Path, Menu), ?assert([] /= Orig), Menu1 = ej:delete(Path, Menu), ?assertEqual([], ej:get(Path, Menu1)), % verify some structure VerifyPath = ["menu", "popup", "menuitem", {select, {"value", "New"}}, "value"], ?assertEqual(undefined, ej:get(VerifyPath, Menu1)), % % verify that we didn't delete siblings VerifyOpen = ["menu", "popup", "menuitem", {select, {"value", "Open"}}, "onclick"], ?assertEqual([<<"OpenDoc()">>], ej:get(VerifyOpen, Menu1)), VerifyClose = ["menu", "popup", "menuitem", {select, {"value", "Close"}}, "onclick"], ?assertEqual([<<"CloseDoc()">>], ej:get(VerifyClose, Menu1)) end} ] end }. -endif.
src/ej.erl
0.723114
0.402862
ej.erl
starcoder
%% Licensed under the Apache License, Version 2.0 (the "License"); you may %% not use this file except in compliance with the License. You may obtain %% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0> %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% Alternatively, you may use this file under the terms of the GNU Lesser %% General Public License (the "LGPL") as published by the Free Software %% Foundation; either version 2.1, or (at your option) any later version. %% If you wish to allow use of your version of this file only under the %% terms of the LGPL, you should delete the provisions above and replace %% them with the notice and other provisions required by the LGPL; see %% <http://www.gnu.org/licenses/>. If you do not delete the provisions %% above, a recipient may use your version of this file under the terms of %% either the Apache License or the LGPL. %% %% @author <NAME> <<EMAIL>> %% @copyright 2006-2009 <NAME> %% @doc Erlang file monitoring service %% %% The behaviour of this service is inspired by the open source FAM %% daemon ([http://oss.sgi.com/projects/fam/]). It allows file system %% paths to be monitored, so that a message will be sent to the client %% process whenever a status change is detected. Currently, the only %% supported method of detection is by regular polling by the server. %% While it is not optimal, polling has less overhead than might be %% expected, and is portable across platforms. The polling interval can %% be adjusted; by default the server polls all monitored paths every 5 %% seconds. Recursive (automatic) monitoring is supported. The server %% keeps track of its client processes, and removes all their monitors %% if they should die. %% %% == Event messages == %% %% When a new monitor is set up, or a change is detected, an event %% message is sent to the client. These have the following general form: %% <pre>{@type @{file_monitor, Ref::monitor(), Event@}}</pre> %% where `Ref' is the monitor reference returned when the monitor was %% set up, and `Event' is one of the following: %% <ul> %% <li>{@type @{found, Path::binary(), Type, Info::#file_info@{@}, %% Entries::[{added|deleted, Name::binary()@}]@}}</li> %% <li>{@type @{changed, Path::binary(), Type, Info::#file_info@{@}, %% Entries::[{added|deleted, Name::binary()@}]@}}</li> %% <li>{@type @{error, Path::binary(), Type, PosixError::atom()@}}</li> %% </ul> %% where `Path' is the watched path (as a binary), `Type' is the type of %% monitoring being performed (either `file' or `directory'), `Info' is %% a `file_info' record as defined in `kernel/include/file.hrl', and %% `Entries' is a list of tuples `{added, binary()}' and `{deleted, %% binary()}' describing changes to the directory entries if `Type' is %% `directory', otherwise this is always the empty list. For a `found' %% event, all entries are `{added, Name}'. %% %% A `found' event is sent when a monitor is initially set up, if the %% path can be read. After that, whenever a change in status is %% detected, a `changed' event is sent. If the file does not exist or %% could for some other reason not be accessed, an `error' event is sent %% (both initially and for subsequent changes). In other words, the %% first event for a path is always either `found' or `error', and later %% events are either `changed' or `error'. %% %% === Detection of file type changes === %% %% If the object found at a path changes type in the interval between %% two polls, for example if a directory is replaced by a file with the %% same name, or vice versa, the file monitor server will detect this %% and dispatch an `enoent' error event before the new status event. A %% client can thus rely on always seeing the old file disappear before %% any change that reports a different file type. %% %% == Monitoring types == %% %% There are two ways in which a path can be monitored: as a `file', %% meaning that we are interested only in the object found at that path, %% or as a `directory', meaning that we expect the path to point to a %% directory, and we are also interested in the list of entries of that %% directory. %% %% If a path is monitored as a directory, and the object at the path %% exists but is not a directory, an `enotdir' error event will be %% generated. An existing directory can however both be monitored as a %% directory and as a file - the difference is that in the latter case, %% the reported list of entries will always be empty. %% %% == Automatic (recursive) monitoring == %% %% Automatic monitoring (automonitoring for short) can be used to watch %% a single file of any type, or a whole directory tree. The monitoring %% type (`file' or `directory') used for any path is based on the actual %% type of object found at the path (`directory' if the object is a %% readable directory, and `file' otherwise). If the object is replaced %% by another of different type, the monitoring type will change %% automatically. %% %% When a directory becomes automonitored, all of its entries will also %% be automatically monitored, recursively. As entries are created or %% deleted in an automonitored directory, they will be dynamically added %% or removed, respectively, from being monitored. The root path used to %% create the automonitor will however always remain monitored (even if %% the object temporarily or permanently disappears) until the server is %% told to delete the monitor. %% %% The event messages sent to the client are the same as if manual %% monitoring was done. A newly discovered path will be reported by a %% `found' (or possibly, by an `error' event), and subsequent changes on %% that path are reported by `changed' and `error' events. If the %% monitoring type is changed, a new `found' event is sent, and so on. -module(zotonic_filewatcher_monitor). -behaviour(gen_server). -export([monitor_file/1, monitor_file/2, monitor_file/3, monitor_dir/1, monitor_dir/2, monitor_dir/3, automonitor/1, automonitor/2, automonitor/3, demonitor/1, demonitor/2, demonitor_file/2, demonitor_file/3, demonitor_dir/2, demonitor_dir/3, get_interval/0, get_interval/1, set_interval/1, set_interval/2, normalize_path/1]). -export([start/0, start/1, start/2, start_link/0, start_link/1, start_link/2, stop/0, stop/1]). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, code_change/3, terminate/2]). -compile({no_auto_import,[monitor/2]}). -compile({no_auto_import,[demonitor/2]}). -include_lib("kernel/include/file.hrl"). %% NOTE: Monitored paths should be absolute, but this is not checked. %% %% We never rewrite the paths, e.g. from relative to absolute, but we %% convert every path into a binary internally, for the sake of %% comparisons, and return it to the caller for reference. %% %% @type filename() = binary() | atom() | [char() | filename()]. This is %% an "extended IO-list", that allows atoms as well as binaries to occur %% either on their own or embedded in a list or deep list. The intent of %% this is to accept any file name that can be used by the standard %% library module `file', as well as any normal IO-list, and any list %% that is formed by combining such fragments. %% %% @type options() = [term()]. A list of options. %% %% @type server_ref() = pid() | atom() | {Node::atom(), atom()} | %% {global, atom()}. A reference to a running server. See {@link %% //stdlib/gen_server:call/3} for more information. -define(DEFAULT_INTERVAL, 5000). % change with option interval -define(MIN_INTERVAL, 100). -define(TIME_TO_STABLE, 1100). % (timestamps have second resolution) -define(SERVER, ?MODULE). -define(MSGTAG, ?SERVER). %% % @type object() = {file|directory, filename()} %% @type monitor() = reference(). A monitor reference. -record(state, {poll=true, % boolean(), false if polling is disabled interval, % polling interval (milliseconds) files, % map: file path -> #entry{} dirs, % map: directory path -> #entry{} autodirs, % map: directory path -> monitor() -> entries refs, % map: monitor() -> #monitor_info{} clients % map: client Pid -> #client_info{} }). -record(entry, {info = undefined, % #file_info{} or posix atom dir = [], % directory entries (if any) stable = 0, % integer(), millis until dir stable monitors = sets:new() % set(monitor()) }). -record(client_info, {monitor, % erlang:monitor/2 reference refs % set(monitor()); monitors owned by client }). -record(monitor_info, {pid, % client Pid auto, % boolean(), true for an automonitor objects % set(object()) }). %% %% User interface %% %% @spec (filename()) -> %% {ok, monitor(), binary()} | {error, not_owner | automonitor} %% @equiv monitor_file(Path, []) monitor_file(Path) -> monitor_file(Path, []). %% @spec (filename(), options()) -> %% {ok, monitor(), binary()} | {error, not_owner | automonitor} %% @equiv monitor_file(file_monitor, Path, Opts) monitor_file(Path, Opts) -> monitor_file(?SERVER, Path, Opts). %% @spec (server_ref(), filename(), options()) -> %% {ok, monitor(), binary()} | {error, not_owner | automonitor} %% @doc Monitors the specified file path. Returns the monitor reference %% as well as the monitored path as a binary. %% %% Options: %% <ul> %% <li>{@type @{monitor, monitor()@}}: specifies a reference for %% identifying the monitor to which the path should be added. The %% monitor need not already exist, but if it does, only the same %% process is allowed to add paths to it, and paths may not be added %% manually to an automonitor.</li> %% </ul> monitor_file(Server, Path, Opts) -> monitor(Server, Path, Opts, file). %% @spec (filename()) -> %% {ok, monitor(), binary()} | {error, not_owner | automonitor} %% @equiv monitor_dir(Path, []) monitor_dir(Path) -> monitor_dir(Path, []). %% @spec (filename(), options()) -> %% {ok, monitor(), binary()} | {error, not_owner | automonitor} %% @equiv monitor_dir(file_monitor, Path, Opts) monitor_dir(Path, Opts) -> monitor_dir(?SERVER, Path, Opts). %% @spec (server_ref(), filename(), options()) -> %% {ok, monitor(), binary()} | {error, not_owner | automonitor} %% @doc Monitors the specified directory path. Returns the monitor %% reference as well as the monitored path as a binary. %% %% Options: see {@link monitor_file/3}. monitor_dir(Server, Path, Opts) -> monitor(Server, Path, Opts, directory). %% not exported monitor(Server, Path, Opts, Type) -> FlatPath = normalize_path(Path), Ref = case proplists:get_value(monitor, Opts) of R when is_reference(R) ; R =:= undefined -> R; _ -> erlang:error(badarg) end, Cmd = {monitor, self(), {Type, FlatPath}, Ref}, case gen_server:call(Server, Cmd) of {ok, Ref1} -> {ok, Ref1, FlatPath}; {error, Reason} -> {error, Reason} end. %% @spec (filename()) -> {ok, monitor(), binary()} %% @equiv automonitor(Path, []) automonitor(Path) -> automonitor(Path, []). %% @spec (filename(), options()) -> {ok, monitor(), binary()} %% @equiv automonitor(file_monitor, Path, Opts) automonitor(Path, Opts) -> automonitor(?SERVER, Path, Opts). %% @spec (server_ref(), filename(), options()) -> {ok, monitor(), binary()} %% @doc Automonitors the specified path. Returns the monitor reference as %% well as the monitored path as a binary. %% %% Options: none at present. automonitor(Server, Path, _Opts) -> FlatPath = normalize_path(Path), {ok, Ref} = gen_server:call(Server, {automonitor, self(), FlatPath}), {ok, Ref, FlatPath}. %% @spec (monitor()) -> ok | {error, not_owner} %% @equiv demonitor(file_monitor, Ref) demonitor(Ref) -> demonitor(?SERVER, Ref). %% @spec (server_ref(), monitor()) -> ok | {error, not_owner} %% @doc Deletes the specified monitor. This can only be done by the %% process that created the monitor. demonitor(Server, Ref) when is_reference(Ref) -> ok = gen_server:call(Server, {demonitor, self(), Ref}). %% @spec (filename(), monitor()) -> ok | {error, not_owner} %% @equiv demonitor_file(file_monitor, Path, Ref) demonitor_file(Path, Ref) -> demonitor_file(?SERVER, Path, Ref). %% @spec (server_ref(), filename(), monitor()) -> ok | {error, not_owner} %% @doc Removes the file path from the specified monitor. This can only %% be done by the process that created the monitor. demonitor_file(Server, Path, Ref) -> demonitor(Server, Path, Ref, file). %% @spec (filename(), monitor()) -> ok | {error, not_owner} %% @equiv demonitor_dir(file_monitor, Path, Ref) demonitor_dir(Path, Ref) -> demonitor_dir(?SERVER, Path, Ref). %% @spec (server_ref(), filename(), monitor()) -> ok | {error, not_owner} %% @doc Removes the directory path from the specified monitor. This can %% only be done by the process that created the monitor. demonitor_dir(Server, Path, Ref) -> demonitor(Server, Path, Ref, directory). %% not exported demonitor(Server, Path, Ref, Type) when is_reference(Ref) -> FlatPath = normalize_path(Path), ok = gen_server:call(Server, {demonitor, self(), {Type, FlatPath}, Ref}). %% @spec () -> integer() %% @equiv get_interval(file_monitor) get_interval() -> get_interval(?SERVER). %% @spec (server_ref()) -> integer() %% @doc Returns the current polling interval. get_interval(Server) -> gen_server:call(Server, get_interval). %% @spec (integer()) -> ok %% @equiv set_interval(file_monitor, Time) set_interval(Time) -> set_interval(?SERVER, Time). %% @spec (server_ref(), integer()) -> ok %% @doc Sets the polling interval. Units are in milliseconds. set_interval(Server, Time) when is_integer(Time) -> gen_server:call(Server, {set_interval, Time}). %% @spec () -> {ok, ServerPid::pid()} | ignore | {error, any()} %% @equiv start([]) start() -> start([]). %% @spec (options()) -> {ok, ServerPid::pid()} | ignore | {error, any()} %% @equiv start({local, file_monitor}, Options) start(Options) -> start({local, ?SERVER}, Options). %% @spec ({local, atom()} | {global, atom()} | undefined, options()) -> %% {ok, ServerPid::pid()} | ignore | {error, any()} %% @doc Starts the server and registers it using the specified name. %% If the name is `undefined', the server will not be registered. See %% {@link //stdlib/gen_server:start_link/4} for details about the return %% value. %% %% Options: %% <ul> %% <li>{@type {interval, Milliseconds::integer()@}}</li> %% </ul> start(undefined, Options) -> gen_server:start(?MODULE, Options, []); start(Name, Options) -> gen_server:start(Name, ?MODULE, Options, []). %% @spec () -> {ok, ServerPid::pid()} | ignore | {error, any()} %% @equiv start_link([]) start_link() -> start_link([]). %% @spec (options()) -> {ok, ServerPid::pid()} | ignore | {error, any()} %% @equiv start_link({local, file_monitor}, Options) start_link(Options) -> start_link({local, ?SERVER}, Options). %% @spec ({local, atom()} | {global, atom()} | undefined, options()) -> %% {ok, ServerPid::pid()} | ignore | {error, any()} %% @doc Starts the server, links it to the current process, and %% registers it using the specified name. If the name is `undefined', %% the server will not be registered. See {@link %% //stdlib/gen_server:start_link/4} for details about the return value. %% %% Options: see {@link start/2}. start_link(undefined, Options) -> gen_server:start_link(?MODULE, Options, []); start_link(Name, Options) -> gen_server:start_link(Name, ?MODULE, Options, []). %% @spec () -> ok %% @equiv stop(file_monitor) stop() -> stop(?SERVER). %% @spec (server_ref()) -> ok %% @doc Stops the specified server. stop(Server) -> gen_server:call(Server, stop), ok. %% %% gen_server callbacks %% %% @private init(Options) -> Time = safe_interval(proplists:get_value(interval, Options)), St = #state{interval = Time, files = dict:new(), dirs = dict:new(), autodirs = dict:new(), clients = dict:new(), refs = dict:new()}, set_timer(St), {ok, St}. %% Note that we create all references on the server side, to be %% consistent (and in case it will matter, to ensure that the server %% mostly uses references local to its own node). %% @private handle_call({monitor, Pid, Object, undefined}, From, St) -> handle_call({monitor, Pid, Object, make_ref()}, From, St); handle_call({monitor, Pid, Object, Ref}, _From, St) when is_pid(Pid), is_reference(Ref) -> try add_monitor(Object, Pid, Ref, St) of St1 -> {reply, {ok, Ref}, register_client(Pid, Ref, St1)} catch not_owner -> {reply, {error, not_owner}, St}; automonitor -> {reply, {error, automonitor}, St} end; handle_call({demonitor, Pid, Ref}, _From, St) when is_reference(Ref) -> try delete_monitor(Pid, Ref, St) of St1 -> {reply, ok, St1} catch not_owner -> {reply, {error, not_owner}, St} end; handle_call({demonitor, Pid, Object, Ref}, _From, St) when is_reference(Ref) -> try demonitor_path(Pid, Ref, Object, St) of St1 -> {reply, ok, St1} catch not_owner -> {reply, {error, not_owner}, St} end; handle_call({automonitor, Pid, Path}, _From, St) when is_pid(Pid) -> %% it shouldn't be possible to get exceptions due to wrong owner or %% non-automonitor type here, since we always create a new reference Ref = make_ref(), St1 = unsafe_automonitor_path(Path, Pid, Ref, St), {reply, {ok, Ref}, register_client(Pid, Ref, St1)}; handle_call(get_interval, _From, St) -> {reply, St#state.interval, St}; handle_call({set_interval, Time}, _From, St) -> {reply, ok, St#state{interval=safe_interval(Time)}}; handle_call(stop, _From, St) -> {stop, normal, ok, St}. %% @private handle_cast(_, St) -> {noreply, St}. %% @private handle_info({?MSGTAG, Ref, Event}, St) -> %% auto-monitoring event to self case dict:find(Ref, St#state.refs) of {ok, #monitor_info{pid=Pid}} -> {noreply, autoevent(Event, Pid, Ref, St)}; error -> %% could happen if this event was already in the queue when %% we processed the deletion of the same reference, so just %% ignore the message {noreply, St} end; handle_info(poll, St) -> {noreply, set_timer(poll(St))}; handle_info(enable_poll, St) -> {noreply, St#state{poll=true}}; handle_info({'DOWN', _Ref, process, Pid, _Info}, St) -> {noreply, remove_client(Pid, St)}; handle_info(_, St) -> {noreply, St}. %% @private code_change(_OldVsn, St, _Extra) -> {ok, St}. %% @private terminate(_Reason, _St) -> ok. %% %% Internal functions %% %% We allow paths as binaries, atoms, or "extended io-lists" that may %% contain atoms as well as binaries. This is flattened into a single %% binary (currently assuming that the input uses an 8-bit encoding). %% A single character is not a valid path; it must be within a list. %% @spec (filename()) -> binary() %% @doc Flattens the given path to a single binary. normalize_path(Path) when is_binary(Path) -> Path; normalize_path(Path) -> list_to_binary(flatten_onto(Path, [])). flatten_onto([X | Xs], As) when is_integer(X), X >= 0, X =< 255 -> [X | flatten_onto(Xs, As)]; flatten_onto([X | Xs], As) -> flatten_onto(X, flatten_onto(Xs, As)); flatten_onto([], As) -> As; flatten_onto(X, As) when is_atom(X) -> atom_to_list(X) ++ As; flatten_onto(X, As) when is_binary(X) -> binary_to_list(X) ++ As; flatten_onto(_, _) -> erlang:error(badarg). join_to_path(Path, File) when is_binary(Path), is_binary(File) -> normalize_path(filename:join(binary_to_list(Path), binary_to_list(File))). safe_interval(N) when is_integer(N) -> min(16#FFFFffff, max(N, ?MIN_INTERVAL)); safe_interval(_) -> ?DEFAULT_INTERVAL. set_timer(St) -> erlang:send_after(St#state.interval, self(), poll), St. %% Handling of auto-monitoring events %% %% - If a new entry of an automonitored directory is discovered, make it %% too automonitored (this does recursive monitoring by definition). %% %% - If an entry is deleted from an automonitored directory, remove any %% automonitor from it and its subdirectories recursively. Note that %% by definition, this will never auto-remove the top automonitored %% directory. %% %% - Because of the special status of the top directory (the path given %% to automonitor), we do not allow the user to add/remove paths to an %% existing automonitor or pass a user-specified reference. %% %% - Note that to be able to demonitor directories recursively, we must %% track the automonitored directory entries for each directory and %% monitor pair (since the directory itself can no longer be read, %% there is no other way we can know which subentries to demonitor). %% %% - An automonitored non-directory that changes type to directory, or %% vice versa, should should cause recreation of the monitor to match %% the new type. %% %% - Errors on automonitored files are assumed to be intermittent, i.e., %% not even enoent should in itself cause demonitoring - that is done %% only if the containing directory reports that the file is removed. %% %% - Errors on automonitored directories cause immediate demonitoring of %% the entries of the directory, but not the directory itself. autoevent({_Tag, Path, Type, #file_info{}=Info, _Files}, Pid, Ref, St) when (((Type =:= file) and (Info#file_info.type =:= directory)) orelse ((Type =:= directory) and (Info#file_info.type =/= directory))) -> %% monitor type mismatch detected autoremonitor_path(Path, Pid, Ref, St); autoevent({_Tag, Path, directory, #file_info{}=Info, Files}, Pid, Ref, St0) when Info#file_info.type =:= directory -> %% add/remove automonitoring to/from all added/deleted entries lists:foldl(fun ({added, File}, St) -> St1 = add_autodir_entry(Path, File, Ref, St), automonitor_path(join_to_path(Path, File), Pid, Ref, St1); ({deleted, File}, St) -> St1 = remove_autodir_entry(Path, File, Ref, St), autodemonitor_path(join_to_path(Path, File), Pid, Ref, St1) end, St0, Files); autoevent({error, Path, directory, enotdir}, Pid, Ref, St) -> %% monitor type mismatch detected autoremonitor_path(Path, Pid, Ref, St); autoevent({error, Path, directory, _}, Pid, Ref, St) -> %% only demonitor subdirectories/files autodemonitor_dir_entries(Path, Pid, Ref, St); autoevent(_Event, _Pid, _Ref, St) -> St. %% monitor type mismatch detected - recreate it to get correct type autoremonitor_path(Path, Pid, Ref, St) -> automonitor_path(Path, Pid, Ref, autodemonitor_path(Path, Pid, Ref, St)). automonitor_path(Path, Pid, Ref, St) -> %% Pid should be a known client, otherwise do nothing case dict:is_key(Pid, St#state.clients) of true -> try unsafe_automonitor_path(Path, Pid, Ref, St) catch throw:_ -> St end; false -> St end. %% see add_monitor for possible thrown exceptions unsafe_automonitor_path(Path, Pid, Ref, St) -> Object = case file:read_file_info(binary_to_list(Path)) of {ok, #file_info{type=directory}} -> {directory, Path}; _ -> {file, Path} % also for errors end, add_automonitor(Object, Pid, Ref, St). autodemonitor_path(Path, Pid, Ref, St0) -> St1 = try demonitor_path(Pid, Ref, {file, Path}, St0) catch not_owner -> St0 end, St2 = try demonitor_path(Pid, Ref, {directory, Path}, St1) catch not_owner -> St1 end, autodemonitor_dir_entries(Path, Pid, Ref, St2). autodemonitor_dir_entries(Path, Pid, Ref, St0) -> Dirs0 = St0#state.autodirs, case dict:find(Path, Dirs0) of {ok, Map0} -> case dict:find(Ref, Map0) of {ok, Set} -> Map = dict:erase(Ref, Map0), %% purge empty entries to save space Dirs = case dict:size(Map) > 0 of true -> dict:store(Path, Map, Dirs0); false -> dict:erase(Path, Dirs0) end, St1 = St0#state{autodirs = Dirs}, sets:fold(fun (File, St) -> P = join_to_path(Path, File), autodemonitor_path(P, Pid, Ref, St) end, St1, Set); error -> St0 end; error -> St0 end. %% tracking subentries of automonitored directories, in order to enable %% efficient recursive demonitoring add_autodir_entry(Path, File, Ref, St) -> Map = case dict:find(Path, St#state.autodirs) of {ok, Map0} -> Set = case dict:find(Ref, Map0) of {ok, Entries} -> Entries; error -> sets:new() end, dict:store(Ref, sets:add_element(File, Set), Map0); error -> dict:store(Ref, sets:add_element(File, sets:new()), dict:new()) end, St#state{autodirs = dict:store(Path, Map, St#state.autodirs)}. remove_autodir_entry(Path, File, Ref, St) -> Dirs0 = St#state.autodirs, case dict:find(Path, Dirs0) of {ok, Map0} -> case dict:find(Ref, Map0) of {ok, Set0} -> %% purge empty entries to save space Set = sets:del_element(File, Set0), Map = case sets:size(Set) > 0 of true -> dict:store(Ref, Set, Map0); false -> dict:erase(Ref, Map0) end, Dirs = case dict:size(Map) > 0 of true -> dict:store(Path, Map, Dirs0); false -> dict:erase(Path, Dirs0) end, St#state{autodirs = Dirs}; error -> St end; error -> St end. %% client monitoring (once a client, always a client - until death) register_client(Pid, Ref, St) -> Info = case dict:find(Pid, St#state.clients) of {ok, OldInfo} -> OldInfo; error -> Monitor = erlang:monitor(process, Pid), #client_info{monitor = Monitor, refs = sets:new()} end, Refs = sets:add_element(Ref, Info#client_info.refs), St#state{clients = dict:store(Pid, Info#client_info{refs = Refs}, St#state.clients)}. remove_client(Pid, St) -> case dict:find(Pid, St#state.clients) of {ok, #client_info{monitor = Monitor, refs = Refs}} -> erlang:demonitor(Monitor, [flush]), purge_client(Pid, Refs, St); error -> St end. purge_client(Pid, Refs, St0) -> sets:fold(fun (Ref, St) -> %% the Pid *should* be the owner here, so %% a not_owner exception should not happen delete_monitor(Pid, Ref, St) end, St0#state{clients = dict:erase(Pid, St0#state.clients)}, Refs). %% Adding a new monitor; throws 'not_owner' if the monitor reference is %% already registered for another Pid; throws 'automonitor' if the %% reference is already registered with a different type. add_monitor(Object, Pid, Ref, St) -> add_monitor(Object, Pid, Ref, St, false). add_automonitor(Object, Pid, Ref, St) -> add_monitor(Object, Pid, Ref, St, true). add_monitor(Object, Pid, Ref, St, Auto) -> Info = case dict:find(Ref, St#state.refs) of {ok, #monitor_info{pid = Pid, auto = Auto}=OldInfo} -> OldInfo; {ok, #monitor_info{pid = Pid}} -> throw(automonitor); {ok, #monitor_info{}} -> throw(not_owner); error -> #monitor_info{pid = Pid, auto = Auto, objects = sets:new()} end, NewObjects = sets:add_element(Object, Info#monitor_info.objects), Refs = dict:store(Ref, Info#monitor_info{objects = NewObjects}, St#state.refs), monitor_path(Object, Ref, St#state{refs = Refs}). %% We must separate the namespaces for files and dirs; there may be %% simultaneous file and directory monitors for the same path, and a %% file may be deleted and replaced by a directory of the same name, or %% vice versa. The client should know (more or less) if a path is %% expected to refer to a file or a directory. monitor_path({file, Path}, Ref, St) -> St#state{files = monitor_path(Path, Ref, file, St#state.files, St)}; monitor_path({directory, Path}, Ref, St) -> St#state{dirs = monitor_path(Path, Ref, directory, St#state.dirs, St)}. %% Adding a new monitor forces an immediate poll of the path, such that %% previous monitors only see any real change, while the new monitor %% either gets {found, ...} or {error, ...}. monitor_path(Path, Ref, Type, Dict, St) -> Entry = case dict:find(Path, Dict) of {ok, OldEntry} -> poll_file(Path, OldEntry, Type, St); error -> new_entry(Path, Type, St) end, event(#entry{}, dummy_entry(Entry, Ref), Type, Path, St), NewEntry = Entry#entry{monitors = sets:add_element(Ref, Entry#entry.monitors)}, dict:store(Path, NewEntry, Dict). dummy_entry(Entry, Ref) -> Entry#entry{monitors = sets:add_element(Ref, sets:new())}. new_entry(Path, Type, St) -> refresh_entry(Path, #entry{monitors = sets:new()}, Type, St#state.interval). %% Deleting a monitor by reference; throws not_owner if the monitor %% reference is owned by another Pid. The client_info entry may already %% have been deleted if we come from purge_client(). delete_monitor(Pid, Ref, St0) -> St1 = case dict:find(Pid, St0#state.clients) of {ok, #client_info{refs = Refs}=I} -> NewRefs = sets:del_element(Ref, Refs), St0#state{clients = dict:store(Pid, I#client_info{refs = NewRefs}, St0#state.clients)}; error -> St0 end, case dict:find(Ref, St1#state.refs) of {ok, #monitor_info{pid = Pid, objects = Objects}} -> sets:fold(fun (Object, St) -> purge_monitor_path(Ref, Object, St) end, St1#state{refs = dict:erase(Ref, St1#state.refs)}, Objects); {ok, #monitor_info{}} -> throw(not_owner); error -> St1 end. %% Deleting a particular path from a monitor. Throws not_owner if the %% monitor reference is owned by another Pid. demonitor_path(Pid, Ref, Object, St) -> case dict:find(Ref, St#state.refs) of {ok, #monitor_info{pid = Pid, objects = Objects}=I} -> St1 = purge_monitor_path(Ref, Object, St), I1 = I#monitor_info{objects=sets:del_element(Object, Objects)}, St1#state{refs = dict:store(Ref, I1, St1#state.refs)}; {ok, #monitor_info{}} -> throw(not_owner); error -> St end. %% Deleting a particular monitor from a path. purge_monitor_path(Ref, {file, Path}, St) -> St#state{files = purge_monitor_path_1(Path, Ref, St#state.files)}; purge_monitor_path(Ref, {directory, Path}, St) -> St#state{dirs = purge_monitor_path_1(Path, Ref, St#state.dirs)}. purge_monitor_path_1(Path, Ref, Dict) -> case dict:find(Path, Dict) of {ok, Entry} -> Monitors = sets:del_element(Ref, Entry#entry.monitors), case sets:size(Monitors) > 0 of true -> dict:store(Path, Entry#entry{monitors = Monitors}, Dict); false -> dict:erase(Path, Dict) end; error -> Dict end. %% Generating events upon state changes by comparing old and new states %% %% Event formats: %% {found, Path, Type, #file_info{}, Files} %% {changed, Path, Type, #file_info{}, Files} %% {error, Path, Type, PosixAtom} %% %% Type is file or directory, as specified by the monitor type, not by %% the actual type on disk. If Type is file, Files is always []. If Type %% is directory, Files is a list of {added, FileName} and {deleted, %% FileName}, where FileName is on basename form, i.e., without any %% directory component. %% %% When a new monitor is installed for a path, an initial {found,...} %% or {error,...} event will be sent to the monitor owner. %% %% Subsequent events will be either {changed,...} or {error,...}. %% %% The monitor reference is not included in the event descriptor itself, %% but is part of the main message format; see cast/2. %% %% Note that we never compare directory entry lists here; if there might %% have been changes, the timestamps should also be different - see %% refresh_entry() below for more details. event(#entry{info = Info, stable = Stable}, #entry{info = Info, stable = Stable}, _Type, _Path, _St) -> ok; % no change in state event(#entry{info = undefined}, #entry{info = NewInfo}=Entry, Type, Path, St) when not is_atom(NewInfo) -> %% file or directory exists, for a fresh monitor Diff = diff_lists(Entry#entry.dir, []), cast({found, Path, Type, NewInfo, Diff}, Entry#entry.monitors, St); event(OldEntry, #entry{info = NewInfo}=Entry, Type, Path, St) when is_atom(NewInfo) -> %% file or directory is not available type_change_event(OldEntry#entry.info, NewInfo, Type, Path, Entry, St), cast({error, Path, Type, NewInfo}, Entry#entry.monitors, St); event(OldEntry, #entry{info = NewInfo}=Entry, Type, Path, St) -> %% a file or directory has changed or become readable again after an error type_change_event(OldEntry#entry.info, NewInfo, Type, Path, Entry, St), Diff = diff_lists(Entry#entry.dir, OldEntry#entry.dir), if Diff =:= [], Entry#entry.stable =/= OldEntry#entry.stable, NewInfo =:= OldEntry#entry.info -> %% if only the time-to-stable has changed, we must not %% broadcast an event unless there really was a change in %% the directory list ok; true -> cast({changed, Path, Type, NewInfo, Diff}, Entry#entry.monitors, St) end. %% sudden changes in file type cause an 'enoent' error report before the %% new status, so that clients do not need to detect this themselves type_change_event(#file_info{type = T}, #file_info{type = T}, _, _, _, _) -> ok; type_change_event(#file_info{}, #file_info{}, Type, Path, Entry, St) -> cast_enoent(Type, Path, Entry, St); type_change_event(#file_info{type = directory}, enotdir, Type, Path, Entry, St) -> cast_enoent(Type, Path, Entry, St); type_change_event(enotdir, #file_info{type = directory}, Type, Path, Entry, St) -> cast_enoent(Type, Path, Entry, St); type_change_event(_, _, _, _, _, _) -> ok. cast_enoent(Type, Path, Entry, St) -> cast({error, Path, Type, enoent}, Entry#entry.monitors, St). poll(#state{poll=false}=St) -> St; poll(#state{poll=true}=St) -> Files = dict:map(fun (Path, Entry) -> poll_file(Path, Entry, file, St) end, St#state.files), Dirs = dict:map(fun (Path, Entry) -> poll_file(Path, Entry, directory, St) end, St#state.dirs), %% polling will now be disabled until all automonitoring events have %% been processed: self() ! enable_poll, St#state{poll=false, files = Files, dirs = Dirs}. poll_file(Path, Entry, Type, St) -> NewEntry = refresh_entry(Path, Entry, Type, St#state.interval), event(Entry, NewEntry, Type, Path, St), NewEntry. %% We want to minimize the polling cost, so we only list directories %% when they have new timestamps, or are still considered "unstable". %% This requires some explanation: Recall that timestamps have %% whole-second resolution. If we listed a directory during the same %% second that it was (or is still being) modified, it is possible that %% we did not see all the additions/deletions that were made that second %% - and in that case, we might not detect those changes until the %% timestamp changes again (maybe never). (This really happens - in %% particular in test suites...) But we cannot use the system clock for %% comparisons, because the file system might be on another clock (or %% another time zone setting), and read-access timestamps are also not %% reliable (not consistently updated on all platforms and not at all on %% some file systems). Therefore we do our own internal approximation of %% passed time (#state.time), based on the refresh cycle. %% Furthermore, we should make sure that the timestamps we use are %% always from after the directory was last listed, so that we never %% report an added entry that was actually created later than the %% directory timestamp. refresh_entry(Path, Entry, Type, Delta) when is_binary(Path) -> refresh_entry_0(binary_to_list(Path), Entry, Type, Delta, Entry#entry.info). refresh_entry_0(Path, Entry, Type, Delta, OldInfo) -> refresh_entry_1(Path, Entry, Type, Delta, OldInfo, false). refresh_entry_1(Path, Entry, Type, Delta, OldInfo, Break) -> NewInfo = get_file_info(Path), case Type of directory when not is_atom(NewInfo) -> case NewInfo#file_info.type of directory when is_atom(OldInfo) ; NewInfo#file_info.mtime =/= OldInfo#file_info.mtime -> %% The info has changed, so we are forced to refresh %% the directory listing. We set the time-to-stable, %% and loop to ensure that the info is always later %% than the listing. See below for more details. refresh_entry_2(Path, Entry, Type, Delta, NewInfo, ?TIME_TO_STABLE); directory when Entry#entry.stable > 0, not Break -> %% If both the old and new timestamps exist and the %% modification time has not changed, but the entry %% is not yet stable (and we didn't loop just now), %% we need to refresh the directory list again and %% decrement the time-to-stable. Note that we test %% *before* we decrement, which ensures that this %% happens at least once after the initial listing %% regardless of the value of Delta. (See the %% discussion about timestamp resolution above.) refresh_entry_2(Path, Entry, Type, Delta, NewInfo, max(0, Entry#entry.stable - Delta)); directory -> Entry#entry{info = NewInfo}; _ -> %% attempting to monitor a non-directory as a %% directory is reported as an 'enotdir' error Entry#entry{info = enotdir, dir = [], stable = 0} end; _ -> %% If we're not monitoring this path as a directory, or we %% got an error, we don't care what kind of object it is, %% but just track its status. To handle the case of an error %% on a directory type monitor, we make sure to reset the %% list of directory entries. Entry#entry{info = NewInfo, dir = [], stable = 0} end. refresh_entry_2(Path, Entry, Type, Delta, Info, Stable) -> %% refresh directory list, update time-to-stable, and loop to %% re-read the info, but also ensure that we do not trigger the %% stability rule again until the next poll refresh_entry_1(Path, Entry#entry{info = Info, dir = list_dir(Path), stable = Stable}, Type, Delta, Info, true). %% We clear some fields of the file_info so that we only trigger on real %% changes; see the //kernel/file.erl manual and file.hrl for details. get_file_info(Path) when is_list(Path) -> case file:read_file_info(Path) of {ok, Info} -> Info#file_info{access = undefined, atime = undefined}; {error, Error} -> Error % posix error code as atom end. %% Listing the members of a directory; note that it yields the empty %% list if it fails - this is not the place for error detection. %% mw 20151023: patch to ignore Zotonic "files" directories containing all uploaded %% and preview files. This speeds up the scanning considerably. list_dir("." ++ _) -> []; list_dir(Path) when is_list(Path) -> Dirname = filename:dirname(Path), Files = case file:list_dir(Path) of {ok, Fs} -> Fs1 = lists:filter( fun(F) -> not zotonic_filewatcher_handler:is_file_blacklisted(Dirname, F) end, Fs), lists:map( fun normalize_path/1, Fs1 ); {error, _} -> [] end, lists:sort(Files). %% both lists must be sorted for this diff to work diff_lists([F1 | Fs1], [F2 | _]=Fs2) when F1 < F2 -> [{added, F1} | diff_lists(Fs1, Fs2)]; diff_lists([F1 | _]=Fs1, [F2 | Fs2]) when F1 > F2 -> [{deleted, F2} | diff_lists(Fs1, Fs2)]; diff_lists([_ | Fs1], [_ | Fs2]) -> diff_lists(Fs1, Fs2); diff_lists([F | Fs1], Fs2) -> [{added, F} | diff_lists(Fs1, Fs2)]; diff_lists(Fs1, [F | Fs2]) -> [{deleted, F} | diff_lists(Fs1, Fs2)]; diff_lists([], []) -> []. %% Multicasting events to clients. The message has the form %% {file_monitor, MonitorReference, Event}, where Event is described in %% more detail above, and 'file_monitor' is the name of this module. cast(Message, Monitors, St) -> sets:fold(fun (Ref, Msg) -> case dict:find(Ref, St#state.refs) of {ok, #monitor_info{pid = Pid, auto = Auto}} -> Pid ! {?MSGTAG, Ref, Msg}, case Auto of true -> self() ! {?MSGTAG, Ref, Msg}; false -> ok end end, Msg % note that this is a fold, not a map end, Message, Monitors).
apps/zotonic_filewatcher/src/zotonic_filewatcher_monitor.erl
0.696681
0.400339
zotonic_filewatcher_monitor.erl
starcoder
% Licensed under the Apache License, Version 2.0 (the "License"); you may not % use this file except in compliance with the License. You may obtain a copy of % the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, WITHOUT % WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the % License for the specific language governing permissions and limitations under % the License. -module(hastings_fabric_search). -include_lib("mem3/include/mem3.hrl"). -include_lib("couch/include/couch_db.hrl"). -include("hastings.hrl"). -export([ go/4 ]). go(DbName, GroupId, IndexName, HQArgs) when is_binary(GroupId) -> {ok, DDoc} = fabric:open_doc(DbName, <<"_design/", GroupId/binary>>, []), go(DbName, DDoc, IndexName, HQArgs); go(DbName, DDoc, IndexName, HQArgs) -> StartFun = search, StartArgs = [fabric_util:doc_id_and_rev(DDoc), IndexName, HQArgs], case run(DbName, StartFun, StartArgs, HQArgs) of {ok, Resps} -> Hits0 = merge_resps(Resps, HQArgs), Hits1 = limit_resps(Hits0, HQArgs), {ok, maybe_add_docs(DbName, Hits1, HQArgs)}; Else -> Else end. run(DbName, StartFun, StartArgs, #h_args{}=HQArgs) -> Primary = [ {Node, Range} || {{Node, Range}, _} <- HQArgs#h_args.bookmark ], Stale = lists:member(HQArgs#h_args.stale, [true, update_after]), Stable = HQArgs#h_args.stable, Secondary = case Stale orelse Stable of true -> mem3:ushards(DbName); false -> [] end, hastings_fabric:run(DbName, StartFun, StartArgs, Primary, Secondary). merge_resps(Hits, #h_args{}=HQArgs) -> Limit = HQArgs#h_args.limit + HQArgs#h_args.skip, merge_resps(Hits, Limit); merge_resps([{ok, Hits}], _Limit) -> Hits; merge_resps([{ok, Hits} | Rest], Limit) -> RestHits = merge_resps(Rest, Limit), merge_hits(Hits, RestHits, Limit). merge_hits(Hits, RestHits, Limit) -> SortFun = fun(H1, H2) -> {H1#h_hit.dist, H1#h_hit.id} =< {H2#h_hit.dist, H2#h_hit.id} end, SortedHits = lists:sort(SortFun, Hits ++ RestHits), lists:sublist(SortedHits, Limit). limit_resps(Hits, HQArgs) -> limit_resps(Hits, HQArgs#h_args.skip, HQArgs#h_args.limit). limit_resps(Hits, Skip, _Limit) when Skip >= length(Hits) -> []; limit_resps(Hits, Skip, Limit) -> lists:sublist(Hits, Skip+1, Limit). maybe_add_docs(DbName, Hits, #h_args{include_docs=true}) -> add_docs(DbName, Hits); maybe_add_docs(_DbName, Hits, _) -> Hits. add_docs(DbName, Hits) -> DocIds = [Id || #h_hit{id=Id} <- Hits], {ok, Docs} = hastings_util:get_json_docs(DbName, DocIds), lists:map(fun(H) -> {_, Doc} = lists:keyfind(H#h_hit.id, 1, Docs), H#h_hit{doc = Doc} end, Hits).
src/hastings_fabric_search.erl
0.601828
0.427337
hastings_fabric_search.erl
starcoder
%% Copyright (c) 2012, Treetop Software LLC %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above %% copyright notice and this permission notice appear in all copies. %% %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. %% @author <NAME> <<EMAIL>> %% @copyright 2012 Treetop Software LLC %% @doc GeoHash functions for Erlang with C implementations for core %% functionality %% @end -module(geohash). -export([ decode/1, decode_bbox/1, encode/3, neighbor/2, neighbors/1, expand/1, nearby/3 ]). -on_load(init/0). %% @doc Decode a geohash in to latitude and longitude -spec decode(binary()) -> {float(), float()}. decode(_GeoHash) -> exit(geohash_nif_not_loaded). %% @doc Decode a geohash in to latitude and longitude bounds -spec decode_bbox(binary()) -> {{float(), float()}, {float(), float()}}. decode_bbox(_GeoHash) -> exit(geohash_nif_not_loaded). %% @doc Encode latitude and longitude into a geohash -spec encode(float(), float(), pos_integer()) -> binary(). encode(_Latitude, _Longitude, _Precision) -> exit(geohash_nif_not_loaded). %% @doc Calculate a neighoring geohash -spec neighbor(binary(), n | s | w | e) -> binary(). neighbor(_GeoHash, _Direction) -> exit(geohash_nif_not_loaded). %% @doc Calculate 8 neighboring geohashes -spec neighbors(binary()) -> [binary()]. neighbors(C) -> {ok, N} = neighbor(C, n), {ok, W} = neighbor(C, w), {ok, S} = neighbor(C, s), {ok, E} = neighbor(C, e), {ok, NW} = neighbor(N, w), {ok, NE} = neighbor(N, e), {ok, SW} = neighbor(S, w), {ok, SE} = neighbor(S, e), [N, W, S, E, NW, NE, SW, SE]. %% @doc Expand a geohash to give a list of itself and 8 neighboring geohashes -spec expand(binary()) -> [binary()]. expand(C) -> [C | neighbors(C)]. %% @doc Nearby geohashes useful for searching a region of a map -spec nearby(float(), float(), float()) -> [binary()]. nearby(Lat, Lon, Rad) -> Precision = nearby_precision(Lat, Lon, Rad), {ok, GeoHash} = encode(Lat, Lon, Precision), expand(GeoHash). %% @private init() -> SoName = case code:priv_dir(?MODULE) of {error, bad_name} -> case filelib:is_dir(filename:join(["..", "priv"])) of true -> filename:join(["..", "priv", "geohash_nif"]); false -> filename:join(["priv", "geohash_nif"]) end; Dir -> filename:join(Dir, "geohash_nif") end, (catch erlang:load_nif(SoName, 0)), case erlang:system_info(otp_release) of "R13B03" -> true; _ -> ok end. %% @doc Best fit geohash precision -spec nearby_precision(float(), float(), float()) -> integer(). nearby_precision(Lat, Lon, Rad) -> {MinLat, MinLon, MaxLat, MaxLon} = earth_bounding_box(Lat, Lon, Rad), DeltaLat = MaxLat - MinLat, DeltaLon = MaxLon - MinLon, Bits = geohash_bits(DeltaLat, DeltaLon, 63), max(trunc(Bits/5), 1). %% @doc Determine the bounding box of coordinates for a point and radius %% distance on the earth. %% @end -spec earth_bounding_box(float(), float(), float()) -> {float(), float(), float(), float()}. earth_bounding_box(Lat, Lon, Dist) -> MIN_LAT = radians(-90), MAX_LAT = radians(90), MIN_LON = radians(-180), MAX_LON = radians(180), AngularDist = Dist / earth_radius(), Lat0 = radians(Lat), Lon0 = radians(Lon), MinLat = Lat0 - AngularDist, MaxLat = Lat0 + AngularDist, case (MinLat > MIN_LAT) and (MaxLat < MAX_LAT) of true -> DeltaLon = math:asin(math:sin(AngularDist) / math:cos(Lat0)), MinLon = Lon0 - DeltaLon, MinLon0 = case MinLon < MIN_LON of true -> MinLon + 2.0*math:pi(); false -> MinLon end, MaxLon = Lon0 + DeltaLon, MaxLon0 = case MaxLon > MAX_LON of true -> MaxLon - 2.0*math:pi(); false -> MaxLon end, {degrees(MinLat), degrees(MinLon0), degrees(MaxLat), degrees(MaxLon0)}; false -> MinLat0 = max(MinLat, MIN_LAT), MaxLat0 = min(MaxLat, MAX_LAT), {degrees(MinLat0), degrees(MIN_LON), degrees(MaxLat0), degrees(MAX_LON)} end. %% @doc Earth Radius -spec earth_radius() -> float(). earth_radius() -> 6371.0. %% @doc Convert degrees to radians -spec radians(float()) -> float(). radians(Degrees) -> (math:pi()/180.0)*Degrees. %% @doc Convert radians to degrees -spec degrees(float()) -> float(). degrees(Radians) -> (180.0/math:pi())*Radians. %% @doc Determine the number of bits required (slices) to encompass a %% latitude and longitude range. geohash_bits(_, _, 0) -> 0; geohash_bits(DeltaLat, DeltaLon, Bits) -> case (delta_lat(Bits) < DeltaLat) or (delta_lon(Bits) < DeltaLon) of true -> geohash_bits(DeltaLat, DeltaLon, Bits-1); false -> Bits end. %% @doc Give the delta for a number of bits used in geohashing latitude delta_lat(Bits) -> 180.0 / math:pow(2, Bits / 2). %% @doc Give the delta for a number of bits used in geohashing longitude delta_lon(Bits) -> 360.0 / math:pow(2, (Bits + 1) / 2).
src/geohash.erl
0.678007
0.427815
geohash.erl
starcoder
%% %% %CopyrightBegin% %% %% Copyright Ericsson AB 2018-2022. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% %CopyrightEnd% %% %% @doc Utility functions for easier measurement of scheduler utilization %% using erlang:statistics(scheduler_wall_time). -module(scheduler). -export([sample/0, get_sample/0, sample_all/0, get_sample_all/0, utilization/1, utilization/2]). -export_type([sched_sample/0]). -opaque sched_sample() :: {scheduler_wall_time | scheduler_wall_time_all, [{sched_type(), sched_id(), ActiveTime::integer(), TotalTime::integer()}]}. -type sched_type() :: normal | cpu | io. -type sched_id() :: integer(). -spec sample() -> sched_sample(). sample() -> sample(scheduler_wall_time). -spec sample_all() -> sched_sample(). sample_all() -> sample(scheduler_wall_time_all). sample(Stats) -> case erlang:statistics(Stats) of undefined -> erlang:system_flag(scheduler_wall_time, true), sample(Stats); List -> create_sample(Stats, List) end. -spec get_sample() -> sched_sample() | undefined. get_sample() -> get_sample(scheduler_wall_time). -spec get_sample_all() -> sched_sample() | undefined. get_sample_all() -> get_sample(scheduler_wall_time_all). get_sample(Stats) -> case erlang:statistics(Stats) of undefined -> undefined; List -> create_sample(Stats, List) end. create_sample(Stats, List) -> Sorted = lists:sort(List), Tagged = lists:map(fun({I, A, T}) -> {sched_tag(I), I, A, T} end, Sorted), {Stats, Tagged}. -type sched_util_result() :: [{sched_type(), sched_id(), float(), string()} | {total, float(), string()} | {weighted, float(), string()}]. -spec utilization(Seconds) -> sched_util_result() when Seconds :: pos_integer(); (Sample) -> sched_util_result() when Sample :: sched_sample(). utilization(Seconds) when is_integer(Seconds), Seconds > 0 -> _ = erlang:system_flag(scheduler_wall_time, true), T0 = sample(), receive after Seconds*1000 -> ok end, T1 = sample(), _ = erlang:system_flag(scheduler_wall_time, false), utilization(T0,T1); utilization({Stats, _}=T0) when Stats =:= scheduler_wall_time; Stats =:= scheduler_wall_time_all -> utilization(T0, sample(Stats)). -spec utilization(Sample1, Sample2) -> sched_util_result() when Sample1 :: sched_sample(), Sample2 :: sched_sample(). utilization({Stats, Ts0}, {Stats, Ts1}) -> Diffs = lists:map(fun({{Tag, I, A0, T0}, {Tag, I, A1, T1}}) -> {Tag, I, (A1 - A0), (T1 - T0)} end, lists:zip(Ts0,Ts1)), {Lst0, {A, T, N}} = lists:foldl(fun({Tag, I, Adiff, Tdiff}, {Lst, Acc}) -> R = safe_div(Adiff, Tdiff), {[{Tag, I, R, percent(R)} | Lst], acc(Tag, Adiff, Tdiff, Acc)} end, {[], {0, 0, 0}}, Diffs), Total = safe_div(A, T), Lst1 = lists:reverse(Lst0), Lst2 = case erlang:system_info(logical_processors_available) of unknown -> Lst1; LPA -> Weighted = Total * (N / LPA), [{weighted, Weighted, percent(Weighted)} | Lst1] end, [{total, Total, percent(Total)} | Lst2]; utilization({scheduler_wall_time, _}=T0, {scheduler_wall_time_all, Ts1}) -> utilization(T0, {scheduler_wall_time, remove_io(Ts1)}); utilization({scheduler_wall_time_all, Ts0}, {scheduler_wall_time, _}=T1) -> utilization({scheduler_wall_time, remove_io(Ts0)}, T1). %% Do not include dirty-io in totals acc(io, _, _, Acc) -> Acc; acc(Tag, Adiff, Tdiff, {Asum, Tsum, N}) when Tag =:= normal; Tag =:= cpu -> {Adiff+Asum, Tdiff+Tsum, N+1}. remove_io(Ts) -> lists:filter(fun({io,_,_,_}) -> false; (_) -> true end, Ts). safe_div(A, B) -> if B == 0.0 -> 0.0; true -> A / B end. sched_tag(Nr) -> Normal = erlang:system_info(schedulers), Cpu = Normal + erlang:system_info(dirty_cpu_schedulers), case Nr of _ when Nr =< Normal -> normal; _ when Nr =< Cpu -> cpu; _ -> io end. percent(F) -> float_to_list(F*100, [{decimals,1}]) ++ [$%].
lib/runtime_tools/src/scheduler.erl
0.674908
0.476397
scheduler.erl
starcoder
%% @author <NAME> <<EMAIL>> %% @copyright 2007 Mochi Media, Inc. %% @doc Case preserving (but case insensitive) HTTP Header dictionary. -module(mochiweb_headers). -author('<EMAIL>'). -export([from_list/1, insert/3, enter/3, get_value/2, lookup/2]). -export([delete_any/2, get_primary_value/2]). -export([default/3, enter_from_list/2, default_from_list/2]). -export([to_list/1, make/1, to_normalized_list/1]). -export([from_binary/1]). %% @type headers(). %% @type key() = atom() | binary() | string(). %% @type value() = atom() | binary() | string() | integer(). %% @spec make(headers() | [{key(), value()}]) -> headers() %% @doc Construct a headers() from the given list. make(L) when is_list(L) -> from_list(L); %% assume a non-list is already mochiweb_headers. make(T) -> T. %% @spec from_binary(iolist()) -> headers() %% @doc Transforms a raw HTTP header into a mochiweb headers structure. %% %% The given raw HTTP header can be one of the following: %% %% 1) A string or a binary representing a full HTTP header ending with %% double CRLF. %% Examples: %% ``` %% "Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n" %% <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>''' %% %% 2) A list of binaries or strings where each element represents a raw %% HTTP header line ending with a single CRLF. %% Examples: %% ``` %% [<<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">>] %% ["Content-Length: 47\r\n", "Content-Type: text/plain\r\n"] %% ["Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">>]''' %% from_binary(RawHttpHeader) when is_binary(RawHttpHeader) -> from_binary(RawHttpHeader, []); from_binary(RawHttpHeaderList) -> from_binary(list_to_binary([RawHttpHeaderList, "\r\n"])). from_binary(RawHttpHeader, Acc) -> case erlang:decode_packet(httph, RawHttpHeader, []) of {ok, {http_header, _, H, _, V}, Rest} -> from_binary(Rest, [{H, V} | Acc]); _ -> make(Acc) end. %% @spec from_list([{key(), value()}]) -> headers() %% @doc Construct a headers() from the given list. from_list(List) -> lists:foldl( fun({Key, Value}, Headers) -> insert(Key, Value, Headers) end, {0, dict:new()}, List ). %% @spec enter_from_list([{key(), value()}], headers()) -> headers() %% @doc Insert pairs into the headers, replace any values for existing keys. enter_from_list(List, T) -> lists:foldl(fun ({K, V}, T1) -> enter(K, V, T1) end, T, List). %% @spec default_from_list([{key(), value()}], headers()) -> headers() %% @doc Insert pairs into the headers for keys that do not already exist. default_from_list(List, T) -> lists:foldl(fun ({K, V}, T1) -> default(K, V, T1) end, T, List). %% @spec to_list(headers()) -> [{key(), string()}] %% @doc Return the contents of the headers. The keys will be the exact key %% that was first inserted (e.g. may be an atom or binary, case is %% preserved). to_list(Headers) -> Result = fold( fun(Key, Value, Acc) -> [{Key, Value} | Acc] end, [], Headers ), lists:reverse(Result). %% @spec to_normalized_list(headers()) -> [{key(), string()}] %% @doc Return the contents of the headers. The keys will be normalized to_normalized_list(Headers) -> Result = fold( fun(Key, Value, Acc) -> [{normalize(Key), Value} | Acc] end, [], Headers ), lists:reverse(Result). %% @spec get_value(key(), headers()) -> string() | undefined %% @doc Return the value of the given header using a case insensitive search. %% undefined will be returned for keys that are not present. get_value(K, T) -> case lookup(K, T) of {value, {_, V}} -> expand(V); none -> undefined end. %% @spec get_primary_value(key(), headers()) -> string() | undefined %% @doc Return the value of the given header up to the first semicolon using %% a case insensitive search. undefined will be returned for keys %% that are not present. get_primary_value(K, T) -> case get_value(K, T) of undefined -> undefined; V -> lists:takewhile(fun (C) -> C =/= $; end, V) end. %% @spec lookup(key(), headers()) -> {value, {key(), string()}} | none %% @doc Return the case preserved key and value for the given header using %% a case insensitive search. none will be returned for keys that are %% not present. lookup(K, {_, Headers}) -> case dict:find(normalize(K), Headers) of {ok, KVs} -> [{_, K0, _} | _] = KVs, {value, {K0, join_values(KVs)}}; error -> none end. %% @spec default(key(), value(), headers()) -> headers() %% @doc Insert the pair into the headers if it does not already exist. default(K, V, {N, Headers}) -> K1 = normalize(K), V1 = any_to_list(V), case dict:is_key(K1, Headers) of true -> {N, Headers}; false -> {N + 1, dict:store(K1, [{N, K, V1}], Headers)} end. %% @spec enter(key(), value(), headers()) -> headers() %% @doc Insert the pair into the headers, replacing any pre-existing key. enter(K, V, {N, Headers}) -> K1 = normalize(K), V1 = any_to_list(V), {N + 1, dict:store(K1, [{N, K, V1}], Headers)}. %% @spec insert(key(), value(), headers()) -> headers() %% @doc Insert the pair into the headers, merging with any pre-existing key. %% A merge is done with Value = V0 ++ ", " ++ V1. insert(K, V, {N, Headers}) -> K1 = normalize(K), V1 = any_to_list(V), Headers2 = dict:update( K1, fun (Old) -> merge(K1, {N, K, V1}, Old) end, [{N, K, V1}], Headers ), {N + 1, Headers2}. %% @spec delete_any(key(), headers()) -> headers() %% @doc Delete the header corresponding to key if it is present. delete_any(K, {N, Headers}=H) -> K1 = normalize(K), case dict:find(K1, Headers) of {ok, Values} -> Headers2 = dict:erase(K1, Headers), {N - length(Values), Headers2}; error -> H end. %% Internal API expand({array, L}) -> mochiweb_util:join(lists:reverse(L), ", "); expand(V) -> V. merge("set-cookie", V1, V0) -> V0 ++ [V1]; merge(_, V1, V0) -> [{N, K0, _}| _] = V0, [{N, K0, join_values(V0 ++ [V1])}]. join_values(L) -> mochiweb_util:join([V || {_, _, V} <- L], ", "). normalize(K) when is_list(K) -> string:to_lower(K); normalize(K) when is_atom(K) -> normalize(atom_to_list(K)); normalize(K) when is_binary(K) -> normalize(binary_to_list(K)). any_to_list(V) when is_list(V) -> V; any_to_list(V) when is_atom(V) -> atom_to_list(V); any_to_list(V) when is_binary(V) -> binary_to_list(V); any_to_list(V) when is_integer(V) -> integer_to_list(V). fold(Fun, Acc, {_, Headers}) -> Lines = dict:fold( fun(_, Value, Acc1) -> Value ++ Acc1 end, [], Headers ), do_fold(lists:sort(Lines), Fun, Acc). do_fold([{_, Key, Value} | Rest], Fun, Acc) -> do_fold(Rest, Fun, Fun(Key, Value, Acc)); do_fold([], _Fun, Acc) -> Acc. %% %% Tests. %% -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). make_test() -> Identity = make([{hdr, foo}]), ?assertEqual( Identity, make(Identity)). enter_from_list_test() -> H = make([{hdr, foo}]), ?assertEqual( [{hdr, "foo"}, {baz, "wibble"}], to_list(enter_from_list([{baz, wibble}], H))), ?assertEqual( [{hdr, "bar"}], to_list(enter_from_list([{hdr, bar}], H))), ok. default_from_list_test() -> H = make([{hdr, foo}]), ?assertEqual( [{hdr, "foo"}, {baz, "wibble"}], to_list(default_from_list([{baz, wibble}], H))), ?assertEqual( [{hdr, "foo"}], to_list(default_from_list([{hdr, bar}], H))), ok. get_primary_value_test() -> H = make([{hdr, foo}, {baz, <<"wibble;taco">>}]), ?assertEqual( "foo", get_primary_value(hdr, H)), ?assertEqual( undefined, get_primary_value(bar, H)), ?assertEqual( "wibble", get_primary_value(<<"baz">>, H)), ok. set_cookie_test() -> H = make([{"set-cookie", foo}, {"set-cookie", bar}, {"set-cookie", baz}]), ?assertEqual( [{"set-cookie", "foo"}, {"set-cookie", "bar"}, {"set-cookie", "baz"}], to_list(H)), ok. headers_test() -> H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]), [{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H), H1 = ?MODULE:insert(taco, grande, H), [{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1), H2 = ?MODULE:make([{"Set-Cookie", "foo"}]), [{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2), H3 = ?MODULE:insert("Set-Cookie", "bar", H2), [{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3), "foo, bar" = ?MODULE:get_value("set-cookie", H3), {value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3), undefined = ?MODULE:get_value("shibby", H3), none = ?MODULE:lookup("shibby", H3), H4 = ?MODULE:insert("content-type", "application/x-www-form-urlencoded; charset=utf8", H3), "application/x-www-form-urlencoded" = ?MODULE:get_primary_value( "content-type", H4), H4 = ?MODULE:delete_any("nonexistent-header", H4), H3 = ?MODULE:delete_any("content-type", H4), H5 = ?MODULE:make([{"ccc","444"}, {"aaa","123"}, {"bbb","321"}]), H5_Case = ?MODULE:make([{"CCC","444"}, {"Aaa","123"}, {"Bbb","321"}]), H5_List = [{"ccc","444"}, {"aaa","123"}, {"bbb","321"}], H5_List = ?MODULE:to_list(H5), H5_List = ?MODULE:to_normalized_list(H5_Case), HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>, H_HB = ?MODULE:from_binary(HB), H_HB = ?MODULE:from_binary(binary_to_list(HB)), "47" = ?MODULE:get_value("Content-Length", H_HB), "text/plain" = ?MODULE:get_value("Content-Type", H_HB), L_H_HB = ?MODULE:to_list(H_HB), 2 = length(L_H_HB), true = lists:member({'Content-Length', "47"}, L_H_HB), true = lists:member({'Content-Type', "text/plain"}, L_H_HB), HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ], HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ], HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ], H_HL = ?MODULE:from_binary(HL), H_HL = ?MODULE:from_binary(HL2), H_HL = ?MODULE:from_binary(HL3), "47" = ?MODULE:get_value("Content-Length", H_HL), "text/plain" = ?MODULE:get_value("Content-Type", H_HL), L_H_HL = ?MODULE:to_list(H_HL), 2 = length(L_H_HL), true = lists:member({'Content-Length', "47"}, L_H_HL), true = lists:member({'Content-Type', "text/plain"}, L_H_HL), [] = ?MODULE:to_list(?MODULE:from_binary(<<>>)), [] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)), [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)), [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)), [] = ?MODULE:to_list(?MODULE:from_binary("")), [] = ?MODULE:to_list(?MODULE:from_binary([<<>>])), [] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])), [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])), [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])), ok. -endif.
src/mochiweb_headers.erl
0.62395
0.534552
mochiweb_headers.erl
starcoder
%% @doc This implements an http client using the Erlang built in http client. %% If desired, this could be abstracted to support arbitrary http clients that %% return `{ok, Status :: non_neg_integer(), Body :: binary(), Headers :: map()}' %% or `{error, Reasons :: [term()]}'. %% %% On errors, there will be a number of attempts with a delay equal to 100 %% milliseconds times the number of tries minus tries left. (So the first %% delay would be 100 milliseconds. The second delay would be 200 milliseconds, %% and so on...) %% @end -module(aws_credentials_httpc). -export([start/0, get/1, get/2]). -define(PROFILE, aws_credentials). -define(TIMEOUT, 10000). % 10 sec -define(CONNECT_TIMEOUT, 3000). % 3 sec -define(DEFAULT_TRIES, 3). -define(DELAY, 100). % 100 milliseconds start() -> inets:start(httpc, [{profile, ?PROFILE}]). %% @doc Attempt to get a URL with the 3 retries. 3 is the default. get(URL) -> get(URL, ?DEFAULT_TRIES). %% @doc Attempt to get a URL with a specified positive number of retries. %% (Minimum of 1.) %% %% Note this function may return `{ok, Results}' even if it was unable to %% successfully get the desired data. That is, it will return an %% ok tuple with a status code of 500 or 404 or some other HTTP error %% code and no data. -spec get( URL :: string(), Tries :: pos_integer() ) -> {ok, Status :: non_neg_integer(), Body :: binary(), Headers :: map() } | {error, Reasons :: [term()]}. get(URL, Tries) when is_list(URL) andalso is_integer(Tries) andalso Tries > 0 -> get(URL, Tries, Tries, []). get(_URL, _Tries, 0, Errs) -> {error, lists:reverse(Errs)}; get(URL, Tries, Remaining, Errs) -> case make_request(URL) of {ok, {{_HttpVer, Status, _Reason}, Headers, Body}} -> {ok, Status, Body, Headers}; Error -> NewRemaining = Remaining - 1, error_logger:error_msg("Error fetching URL (attempts left: " "~p of ~p) ~p: ~p.", [NewRemaining, Tries, URL, Error]), timer:sleep((Tries - NewRemaining)*?DELAY), get(URL, Tries, NewRemaining, [ Error | Errs ]) end. make_request(URL) -> httpc:request(get, {URL, []}, [{timeout, ?TIMEOUT}, {connect_timeout, ?CONNECT_TIMEOUT}], % HTTP options [{body_format, binary}], % options ?PROFILE).
src/aws_credentials_httpc.erl
0.605449
0.415966
aws_credentials_httpc.erl
starcoder
% @doc Bitstring editing and printing. % @end -module(grisp_bitmap). % API -export([set_bits/3]). -export([get_bits/3]). -export([set_bytes/3]). -export([get_bytes/3]). -export([pp/1]). -export([pp/2]). %--- Types --------------------------------------------------------------------- -type coding() :: bin | binary | dec | decimal | hex | hexadecimal | nib | nibble. %--- API ----------------------------------------------------------------------- % @doc Replace a part of a bitsting. % % === Example === % ``` % 5> grisp_bitmap:pp(grisp_bitmap:set_bits(<<2#00000000>>, 6, <<2#1:1>>), bin). % 00000010 % ok % ''' -spec set_bits(bitstring(),non_neg_integer(),bitstring()) -> bitstring(). set_bits(Bin, Start, Value) when bit_size(Bin) >= Start + bit_size(Value) -> Len = bit_size(Value), <<Prefix:Start/bitstring, _:Len/bitstring, Postfix/bitstring>> = Bin, <<Prefix/bitstring, Value/bitstring, Postfix/bitstring>>. % @doc Get a part of a bitstring. % % === Example === % ``` % 1> grisp_bitmap:get_bits(<<1,2,3,4,5>>, 6, 2). % <<1:2>> % ''' -spec get_bits(bitstring(),non_neg_integer(),non_neg_integer()) -> bitstring(). get_bits(Bin, Start, Len) when bit_size(Bin) >= Start + Len -> <<_:Start/bitstring, Bytes:Len/bitstring, _/bitstring>> = Bin, Bytes. % @equiv set_bits(Bin, Start * 8, Value) -spec set_bytes(binary(),non_neg_integer(),bitstring()) -> binary(). set_bytes(Bin, Start, Value) when byte_size(Bin) >= Start + byte_size(Value) -> set_bits(Bin, Start * 8, Value). % @equiv get_bits(Bin, Start * 8, Len * 8) -spec get_bytes(binary(),non_neg_integer(),non_neg_integer()) -> binary(). get_bytes(Bin, Start, Len) when byte_size(Bin) >= Start + Len -> get_bits(Bin, Start * 8, Len * 8). % @doc Print binary as hexadecimal numbers. % % === Example === % ``` % 2> grisp_bitmap:pp(<<16#f2, 17>>). % F2 11 % ok % ''' -spec pp(bitstring()) -> ok. pp(Bin) -> pp(Bin, #{}). % @doc Print binary as numbers. % % === Example === % ``` % 3> grisp_bitmap:pp(<<16#f2, 17>>, nib). % 1111 0010 0001 0001 % ok % 4> grisp_bitmap:pp(<<16#f2, 17>>, #{display => bin}). % 1111 0010 0001 0001 % ok % ''' -spec pp(bitstring(), coding() | #{display => coding()}) -> ok. pp(Bin, Display) when is_atom(Display) -> pp(Bin, #{display => Display}); pp(Bin, Opts) -> print_bitstring(Bin, normalize_opts(maps:merge(#{display => hex}, Opts))). %--- Internal ------------------------------------------------------------------ normalize_opts(Opts) -> maps:map(fun normalize_opt/2, Opts). normalize_opt(display, hexadecimal) -> hex; normalize_opt(display, binary) -> bin; normalize_opt(display, nibble) -> nib; normalize_opt(display, decimal) -> dec; normalize_opt(display, hex) -> hex; normalize_opt(display, bin) -> bin; normalize_opt(display, nib) -> nib; normalize_opt(display, dec) -> dec; normalize_opt(display, Other) -> error({invalid_option, display, Other}); normalize_opt(_Opt, Value) -> Value. print_bitstring(<<B:8/bitstring, Rest/bitstring>>, Opts) -> print_bits(B, Opts), io:format(" "), print_bitstring(Rest, Opts); print_bitstring(<<Rest/bitstring>>, Opts) -> print_bits(Rest, Opts), io:format("~n"). print_bits(Bits, Opts) -> Size = bit_size(Bits), print_bits(Size, Bits, Opts). print_bits(Size, Bits, #{display := hex}) -> <<Int:Size>> = Bits, io:format("~*.16.0B", [Size div 4, Int]); print_bits(Size, Bits, #{display := bin}) -> <<Int:Size>> = Bits, io:format("~*.2.0B", [Size, Int]); print_bits(_Size, <<N1:4, N2:4>>, #{display := nib}) -> io:format("~4.2.0B ~4.2.0B ", [N1, N2]); print_bits(_Size, <<Byte>>, #{display := dec}) -> io:format("~3.10.0B", [Byte]); print_bits(0, <<>>, _Opts) -> ok; print_bits(_Size, _Byte, _Opts) -> io:format("foo").
src/grisp_bitmap.erl
0.546012
0.414366
grisp_bitmap.erl
starcoder
% Licensed under the Apache License, Version 2.0 (the "License"); you may not % use this file except in compliance with the License. You may obtain a copy of % the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, WITHOUT % WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the % License for the specific language governing permissions and limitations under % the License. -module(couch_jobs_type_monitor). -export([ start/4 ]). -include("couch_jobs.hrl"). -record(st, { jtx, type, vs, parent, timestamp, holdoff, timeout }). start(Type, VS, HoldOff, Timeout) -> Parent = self(), spawn_link(fun() -> loop(#st{ jtx = couch_jobs_fdb:get_jtx(), type = Type, vs = VS, parent = Parent, timestamp = 0, holdoff = HoldOff, timeout = Timeout }) end). loop(#st{vs = VS, timeout = Timeout} = St) -> {St1, Watch} = case get_vs_and_watch(St) of {VS1, W} when VS1 =/= VS -> {notify(St#st{vs = VS1}), W}; {VS, W} -> {St, W} end, try erlfdb:wait(Watch, [{timeout, Timeout}]) catch error:{erlfdb_error, ?FUTURE_VERSION} -> erlfdb:cancel(Watch, [flush]), ok; error:{timeout, _} -> erlfdb:cancel(Watch, [flush]), ok end, loop(St1). notify(#st{} = St) -> #st{holdoff = HoldOff, parent = Pid, timestamp = Ts, vs = VS} = St, Now = erlang:system_time(millisecond), case Now - Ts of Dt when Dt < HoldOff -> timer:sleep(max(HoldOff - Dt, 0)); _ -> ok end, Pid ! {type_updated, VS}, St#st{timestamp = Now}. get_vs_and_watch(#st{jtx = JTx, type = Type}) -> couch_jobs_fdb:tx(JTx, fun(JTx1) -> couch_jobs_fdb:get_activity_vs_and_watch(JTx1, Type) end).
src/couch_jobs/src/couch_jobs_type_monitor.erl
0.589953
0.460713
couch_jobs_type_monitor.erl
starcoder
-module(autocluster_periodic). %% @doc %% Manages periodic activities in a centralized fashion. TRefs from %% {@link timer:apply_interval/4} are stored to ETS along with the %% provided identifier, so you can later stop the activity using only %% that identifier (withouth knowing timer ref). %% %% Initial plan was to go even further, and make this into supervisor %% with a bunch of dynamic gen_server childs - to improve visibility %% of this periodic processes through tools like {@link //observer}. %% But as everything happens during broker startup, we don't have any %% stably running application at that time. %% %% @end %% API -export([start_immediate/3 ,start_delayed/3 ,stop/1 ,stop_all/0 ]). -define(TABLE, ?MODULE). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% API %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% @doc %% Adds periodic activity with a given id. MFA is invoked immediately %% after addition. %% @end -spec start_immediate(term(), pos_integer(), {module(), atom(), term()}) -> ok. start_immediate(Id, Interval, {M, F, A} = MFA) -> _ = (catch apply(M, F, A)), start_delayed(Id, Interval, MFA). %% @doc %% Adds periodic activity with a given id. MFA is invoked only after %% given interval of time has been passed. %% @end -spec start_delayed(term(), pos_integer(), {module(), atom(), term()}) -> ok. start_delayed(Id, Interval, {M, F, A}) -> ensure_ets_table(), {ok, TRef} = timer:apply_interval(Interval, M, F, A), true = ets:insert_new(?TABLE, {Id, TRef}), ok. %% @doc %% Stops periodic activity previously added with {@link %% start_immediate/3} or {@link start_delayed/3}. %% @end -spec stop(term()) -> ok | {error, atom()}. stop(Id) -> case ets:lookup(?TABLE, Id) of [{Id, TRef}] -> _ = timer:cancel(TRef), ets:delete(?TABLE, Id); [] -> {error, not_running} end. %% @doc %% Stops all periodic activities registered using this module. Useful %% only during testing. %% @end -spec stop_all() -> ok. stop_all() -> ensure_ets_table(), Timers = ets:match(?TABLE, '$1'), lists:foreach(fun([{_Id, TRef}]) -> timer:cancel(TRef) end, Timers), ets:delete_all_objects(?TABLE), ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Helpers %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ensure_ets_table() -> case ets:info(?TABLE) of undefined -> _ = ets:new(?TABLE, [public, named_table]), ok; _ -> ok end, ok.
src/autocluster_periodic.erl
0.634883
0.49048
autocluster_periodic.erl
starcoder
% Licensed under the Apache License, Version 2.0 (the "License"); you may not % use this file except in compliance with the License. You may obtain a copy of % the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, WITHOUT % WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the % License for the specific language governing permissions and limitations under % the License. -module(couch_jobs_activity_monitor). -behaviour(gen_server). -export([ start_link/1 ]). -export([ init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, code_change/3 ]). -record(st, { jtx, type, tref, timeout = 0, vs = not_found }). -define(MAX_JITTER_DEFAULT, 10000). -define(MISSING_TIMEOUT_CHECK, 5000). start_link(Type) -> gen_server:start_link(?MODULE, [Type], []). %% gen_server callbacks init([Type]) -> St = #st{jtx = couch_jobs_fdb:get_jtx(), type = Type}, {ok, schedule_check(St)}. terminate(_, _St) -> ok. handle_call(Msg, _From, St) -> {stop, {bad_call, Msg}, {bad_call, Msg}, St}. handle_cast(Msg, St) -> {stop, {bad_cast, Msg}, St}. handle_info(check_activity, St) -> St1 = try check_activity(St) catch error:{erlfdb_error, Err} when Err =:= 1020 orelse Err =:= 1031 -> LogMsg = "~p : type:~p got ~p error, possibly from overload", couch_log:error(LogMsg, [?MODULE, St#st.type, Err]), St end, St2 = schedule_check(St1), {noreply, St2}; handle_info({Ref, ready}, St) when is_reference(Ref) -> % Don't crash out couch_jobs_server and the whole application would need to % eventually do proper cleanup in erlfdb:wait timeout code. LogMsg = "~p : spurious erlfdb future ready message ~p", couch_log:error(LogMsg, [?MODULE, Ref]), {noreply, St}; handle_info(Msg, St) -> {stop, {bad_info, Msg}, St}. code_change(_OldVsn, St, _Extra) -> {ok, St}. % Private helper functions check_activity(#st{jtx = JTx, type = Type, vs = not_found} = St) -> NewVS = couch_jobs_fdb:tx(JTx, fun(JTx1) -> couch_jobs_fdb:get_activity_vs(JTx1, Type) end), St#st{vs = NewVS}; check_activity(#st{jtx = JTx, type = Type, vs = VS} = St) -> NewVS = couch_jobs_fdb:tx(JTx, fun(JTx1) -> NewVS = couch_jobs_fdb:get_activity_vs(JTx1, Type), JobIds = couch_jobs_fdb:get_inactive_since(JTx1, Type, VS), couch_jobs_fdb:re_enqueue_inactive(JTx1, Type, JobIds), NewVS end), St#st{vs = NewVS}. get_timeout_msec(JTx, Type) -> TimeoutVal = couch_jobs_fdb:tx(JTx, fun(JTx1) -> couch_jobs_fdb:get_type_timeout(JTx1, Type) end), case TimeoutVal of not_found -> not_found; ValSeconds -> timer:seconds(ValSeconds) end. schedule_check(#st{jtx = JTx, type = Type, timeout = OldTimeout} = St) -> % Reset versionstamp if timeout changed. St1 = case get_timeout_msec(JTx, Type) of not_found -> St#st{vs = not_found, timeout = ?MISSING_TIMEOUT_CHECK}; OldTimeout -> St; NewTimeout -> St#st{vs = not_found, timeout = NewTimeout} end, #st{timeout = Timeout} = St1, MaxJitter = min(Timeout div 2, get_max_jitter_msec()), Wait = Timeout + rand:uniform(max(1, MaxJitter)), St1#st{tref = erlang:send_after(Wait, self(), check_activity)}. get_max_jitter_msec()-> config:get_integer("couch_jobs", "activity_monitor_max_jitter_msec", ?MAX_JITTER_DEFAULT).
src/couch_jobs/src/couch_jobs_activity_monitor.erl
0.604399
0.421909
couch_jobs_activity_monitor.erl
starcoder
%%%------------------------------------------------------------------- %% @doc %% This module models the LWT SubDAO contract %% In its state it maintains: %% %% - Nonce of synced state with LWT chain %% %% - Holders of LWT tokens %% %% - Staked validator information %% %% - How much HNT needs to be burned next time the HNT contract is updated %% %% - Pending operations for the LWT chain %% %% @end %%%------------------------------------------------------------------- -module(lwt_contract). -behaviour(gen_server). -include("util.hrl"). -record(state, { nonce = 0, holders = #{}, %% val_address => owner validators = #{}, %% some stack of pending operations we need to do to the l2 pending_operations = [], chain_ht = 0 }). -export([init/1, handle_info/2, handle_cast/2, handle_call/3]). -export([start_link/1]). -export([transfer/3, convert_to_hnt/2]). -export([oracle/0, update_from_chain/6]). -export([stake_validator/2, unstake_validator/2]). -export([state/0]). %% @private start_link(LWTHolders) -> gen_server:start_link({local, ?MODULE}, ?MODULE, [LWTHolders], []). %% @doc Get LWT contract state state() -> gen_server:call(?MODULE, state, infinity). %% @doc Transfer `Amount' LWTs from `Payer' to `Payee'. transfer(Payer, Payee, Amount) -> gen_server:call(?MODULE, {transfer, Payer, Payee, Amount}, infinity). %% @doc Convert `Amount' of `Payer''s LWT into HNT at the HNT-LWT exchange rate. convert_to_hnt(Payer, Amount) -> %% essentially we destroy some LWT and then send some of the HNT this contract %% controls to the Payer's address via the hnt contract api gen_server:call(?MODULE, {convert, Payer, Amount}, infinity). %% @doc Function to get the state of the LWT contract. oracle() -> %% get the nonce and the l2 pending operations stack %% multiple attempts at oracling may give longer lists of pending operations %% but we can simply select the longest common prefix gen_server:call(?MODULE, oracle, infinity). stake_validator(Owner, ValidatorAddress) -> gen_server:call(?MODULE, {stake_validator, Owner, ValidatorAddress}, infinity). unstake_validator(Owner, ValidatorAddress) -> gen_server:call(?MODULE, {unstake_validator, Owner, ValidatorAddress}, infinity). update_from_chain(Nonce, OpCount, RewardShares, Power, ChainHt, ChainValidators) -> gen_server:call( ?MODULE, {update, Nonce, OpCount, RewardShares, Power, ChainHt, ChainValidators}, infinity ). %% @private init([LWTHolders]) -> {ok, #state{holders = LWTHolders}}. %% @private handle_info(_Any, State) -> {noreply, State}. %% @private handle_cast(_Any, State) -> {noreply, State}. %% @private handle_call( state, _From, State = #state{holders = Holders, validators = Validators, chain_ht = ChainHt, nonce = Nonce} ) -> Reply = #{holders => Holders, validators => Validators, chain_ht => ChainHt, nonce => Nonce}, {reply, {ok, Reply}, State}; handle_call( {stake_validator, Owner, ValidatorAddress}, _From, State = #state{holders = Holders} ) -> case lists:member(ValidatorAddress, maps:keys(State#state.validators)) of true -> throw({reply, {error, already_staked}, State}); false -> %% NOTE: %% - Immediate debit of LWT stake to prevent potential spending resulting %% in the stake being considered invalid %% - Also add the stake_validator instruction to pending_operations list case maps:get(Owner, Holders, 0) of OwnerLWT when OwnerLWT > ?ValidatorCost -> NewHolders = debit(Owner, ?ValidatorCost, Holders), %NewValidators = add_validator(ValidatorAddress, Owner, Validators), NewPendingOps = State#state.pending_operations ++ [{stake_validator, Owner, ValidatorAddress}], {reply, ok, State#state{ pending_operations = NewPendingOps, holders = NewHolders % validators = NewValidators }}; _ -> throw({reply, {error, insufficient_staking_balance}, State}) end end; handle_call({unstake_validator, Owner, ValidatorAddress}, _From, State) -> case lists:member(ValidatorAddress, maps:keys(State#state.validators)) of false -> throw({reply, {error, unknown_validator}, State}); true -> case maps:get(ValidatorAddress, State#state.validators) of Owner -> %% NOTE: At this point %% - This owner is allowed to unstake %% - Add the unstake_validator instruction to pending_operations list %% - Remove the validator from our state %% - Do NOT immediately return the stake, wait for the unstake_validator %% operation to succeed before crediting stake back to the owner NewPendingOps = State#state.pending_operations ++ [{unstake_validator, Owner, ValidatorAddress}], {reply, ok, State#state{ pending_operations = NewPendingOps }}; _ -> throw({reply, {error, incorrect_owner}, State}) end end; handle_call({transfer, Payer, Payee, Amt}, _From, State) when Amt > 0 -> PayerHolding = maps:get(Payer, State#state.holders, 0), case PayerHolding =< Amt of %% cannot zero out payer on transfer true -> throw({reply, {error, insufficient_transfer_balance}, State}); false -> NewHolders = credit(Payee, Amt, debit(Payer, Amt, State#state.holders)), {reply, ok, State#state{holders = NewHolders}} end; handle_call({convert, Payer, Amount}, _From, State) -> case maps:get(Payer, State#state.holders, 0) >= Amount of false -> throw({reply, {error, insufficient_balance}, State}); true -> ok end, NewHolders = debit(Payer, Amount, State#state.holders), %% module is a lazy identifier for this contract, would be a pubkey normally ok = hnt_contract:transfer_hnt(?MODULE, Payer, Amount div ?HNT_TO_LWT_RATE), {reply, ok, State#state{holders = NewHolders}}; handle_call(oracle, _From, State) -> {reply, {ok, {State#state.nonce, State#state.pending_operations}}, State}; handle_call( {update, Nonce, OpCount, RewardShares, Power, ChainHt, ChainValidators}, _From, State = #state{nonce = Nonce, pending_operations = Ops} ) -> lager:debug("LWT got an update msg, Current Holders: ~p", [State#state.holders]), {ok, HNT, BurnHNT} = hnt_contract:update_from_l2(?MODULE, Nonce, Power), %% ok, we got some HNT, now we need to convert that to LWT and disburse it according to the reward shares LWT = HNT * ?HNT_TO_LWT_RATE, TotalShares = maps:fold( fun(_K, V, Acc) -> Acc + V end, 0, RewardShares ), RewardShare = LWT / TotalShares, NewHolders0 = maps:fold( fun(K, V, Acc) -> credit(K, trunc(V * RewardShare), Acc) end, State#state.holders, RewardShares ), lager:debug("New Holders: ~p", [NewHolders0]), %% ok, we may have some HNT for burning to LWT-DC, add this to pending_operations DCBurns = maps:fold( fun(Key, Amount, Acc) -> %% XXX: This conversion may be incorrect LWTDC = util:hnt_to_dc(Amount) * ?HNT_TO_LWT_RATE, [{lwt_dc, Key, LWTDC} | Acc] end, [], BurnHNT ), lager:debug("New dc_burns: ~p", [DCBurns]), UnstakedValidators = maps:keys(State#state.validators) -- maps:keys(ChainValidators), lager:debug("UnstakedValidators: ~p", [UnstakedValidators]), NewHolders = lists:foldl( fun(ValidatorAddress, HAcc) -> Owner = maps:get(ValidatorAddress, State#state.validators), lager:debug("Crediting owner: ~p for unstaking: ~p", [Owner, ValidatorAddress]), credit(Owner, ?ValidatorCost, HAcc) end, NewHolders0, UnstakedValidators ), lager:debug("Ops ~p, OpCount ~p", [Ops, OpCount]), NewPendingOps = lists:sublist(Ops, OpCount + 1, length(Ops)), lager:debug("new pending ops ~p", [NewPendingOps]), %% Now we need to remove the first `OpCount' operations from our pending operations stack, zero out our burns %% and increment our nonce {reply, ok, State#state{ nonce = Nonce + 1, pending_operations = NewPendingOps ++ DCBurns, holders = NewHolders, chain_ht = ChainHt, validators = ChainValidators }}. debit(Key, Amount, Map) -> maps:update_with(Key, fun(V) -> V - Amount end, Map). credit(Key, Amount, Map) -> maps:update_with(Key, fun(V) -> V + Amount end, Amount, Map).
src/lwt_contract.erl
0.545044
0.411584
lwt_contract.erl
starcoder
%%%------------------------------------------------------------------- %%% @author <NAME> <<EMAIL>> %%% @copyright (C) 2014, <NAME> %%% @doc %%% Generic utility functions that are independent from the %%% problem context %%% @end %%% Created : 10 Feb 2014 by <NAME> %%%------------------------------------------------------------------- -module(utils). -export([to_one_list/1, remount_list/2, bt_mapfold_list_of_lists/3, bt_mapfold/3]). %%% @doc %%% Transforms a list of lists into a flattened list, it also returns %%% a list with the sizes of the original sublists so that the list %%% of lists can be restored. This function is designed to let you %%% use functions that work with lists on lists of lists. %%% The original list of lists can be restored by using {@link remount_list/2} %%% on the result. %%% @param ListOfLists the original list of lists. All the elements of the %%% outer list must be lists. %%% @return a flattened list and a list of sizes. %%% @see remount_list/2 -spec to_one_list(ListOfLists :: [[Elem]]) -> {Sizes :: [integer()], List :: [Elem]} when Elem :: term(). to_one_list(ListOfLists) -> {lists:append(ListOfLists), lists:map(fun erlang:length/1, ListOfLists)}. %%% @doc %%% Transforms a list into a lists of lists by splitting %%% it as specified by the list of sizes provided. Designed to be %%% used in conjunction with {@link to_one_list/1}. %%% @param Sizes a list of the sizes that the sublists of the resulting %%% list of lists must have. The sum of the sizes of the sizes must be %%% equal to the length of the list. %%% @param List the list with the elements to use. %%% @return a list of lists with the elements of List. %%% @see to_one_list/1 -spec remount_list(Sizes :: [integer()], List :: [Elem]) -> ListOfLists :: [[Elem]]. remount_list([], []) -> []; remount_list([Num|RestN], List) -> {FirstPiece, SecondPiece} = lists:split(Num, List), [FirstPiece|remount_list(RestN, SecondPiece)]. %%% @doc %%% Works like {@link bt_mapfold/3}, but expects a list of lists. %%% @param Fun the function to fold %%% @param Acc0 the initial value of the accumulator %%% @param ListOfLists1 the target list of lists %%% @return a tuple with the updated list of lists and the last accumulator %%% @see bt_mapfold/3 -spec bt_mapfold_list_of_lists(Fun, Acc0 :: Acc, ListOfLists1 :: [[A]]) -> {ListOfLists2 :: [[B]], Acc1 :: Acc} when Fun :: fun((A, AccIn :: Acc) -> {OpRes, B, AccOut :: Acc}), A :: term(), B :: term(), Acc :: term(), OpRes :: 'success' | 'failure' | 'finished'. bt_mapfold_list_of_lists(Fun, Acc0, ListOfLists) -> {OneList, Sizes} = to_one_list(ListOfLists), {ResOneList, AccF} = bt_mapfold(Fun, Acc0, OneList), {remount_list(Sizes, ResOneList), AccF}. %%% @doc %%% Based on standard mapfold but implements backtracking %%% and shortcutting. It is intended to simplify search/matching %%% functions. The function provided must return %%% an extra value, (OpRes), in addition to the standard %%% values in mapfold (accumulator Acc and transformed element B) %%% OpRes may be one of the following: %%% <ul> %%% <li><code>success</code> - There was a match</li> %%% <li><code>failure</code> - There was not a match</li> %%% <li><code>finished</code> - There was a match and there will not be more</li> %%% </ul> %%% In addition to the original mapfold behaviour, bt_mapfold %%% will "backtrack" if the end of the list is found and %%% the atom <code>finished</code> was not returned yet. %%% The whole state is saved in a stack whenever the atom <code>success</code> %%% is returned, and these states are retrieved from the stack when %%% backtracking. After backtracking, one of the originally successful function %%% application will be skipped under the assumption that maybe that success %%% was producing some of the later failures. %%% In addition, if at any moment the atom <code>finished</code> is returned, %%% the execution will be terminated, and both the list and the accumulator %%% will be returned as they are. %%% @param Fun the function to fold %%% @param Acc0 the initial value of the accumulator %%% @param List1 the target list %%% @return a tuple with the updated list and the last accumulator -spec bt_mapfold(Fun, Acc0 :: Acc, List1 :: [A]) -> {List2 :: [B], Acc1 :: Acc} when Fun :: fun((A, AccIn :: Acc) -> {OpRes, B, AccOut :: Acc}), A :: term(), B :: term(), Acc :: term(), OpRes :: 'success' | 'failure' | 'finished'. bt_mapfold(Fun, Acc0, List1) -> bt_mapfold(Fun, Acc0, List1, [], []). bt_mapfold(Fun, Acc0, [H0|Tail], List, BackTrackingList) -> case Fun(H0, Acc0) of {success, H1, Acc1} -> bt_mapfold(Fun, Acc1, Tail, [H1|List], [{Fun, Acc0, Tail, [H0|List]} |BackTrackingList]); {failure, H1, Acc1} -> bt_mapfold(Fun, Acc1, Tail, [H1|List], BackTrackingList); {finished, H1, Acc1} -> {lists:reverse(List) ++ [H1|Tail], Acc1} end; bt_mapfold(_, AccF, [], List, []) -> {lists:reverse(List), AccF}; bt_mapfold(_, _, [], _, [{Fun, Acc0, ListOri, ListDest}|BackTrackingList]) -> bt_mapfold(Fun, Acc0, ListOri, ListDest, BackTrackingList).
src/symbolic-execution/utils.erl
0.63273
0.678307
utils.erl
starcoder
%%%------------------------------------------------------------------- %%% @doc %%% This module encapsulates the partitioned table metadata. %%% %%% Different properties must be stored somewhere so `shards' %%% can work properly. Shards perform logic on top of ETS tables, %%% for example, compute the partition based on the `Key' where %%% the action will be applied. To do so, it needs the number of %%% partitions, the function to select the partition, and also the %%% partition identifier to perform the ETS action. %%% @end %%%------------------------------------------------------------------- -module(shards_meta). %% API -export([ new/0, from_map/1, to_map/1, is_metadata/1, init/2, rename/2, lookup/2, put/3, get/1, get/2, get/3, get_partition_tids/1, get_partition_pids/1 ]). %% API – Getters -export([ tab_pid/1, keypos/1, partitions/1, keyslot_fun/1, parallel/1, parallel_timeout/1, ets_opts/1 ]). %% Inline-compiled functions -compile({inline, [ lookup/2, put/3, get/1, get_partition_tids/1, get_partition_pids/1 ]}). %%%=================================================================== %%% Types & Macros %%%=================================================================== %% Default number of partitions -define(PARTITIONS, erlang:system_info(schedulers_online)). %% @type partition_tids() = [{non_neg_integer(), ets:tid()}]. %% %% Defines a tuple-list with the partition number and the ETS TID. -type partition_tids() :: [{non_neg_integer(), ets:tid()}]. %% @type partition_pids() = [{non_neg_integer(), pid()}]. %% %% Defines a tuple-list with the partition number and the partition owner PID. -type partition_pids() :: [{non_neg_integer(), pid()}]. %% @type keyslot_fun() = fun((Key :: term(), Range :: pos_integer()) -> non_neg_integer()). %% %% Defines spec function to pick or compute the partition and/or node. %% The function returns a value for `Key' within the range `0..Range-1'. -type keyslot_fun() :: fun((Key :: term(), Range :: pos_integer()) -> non_neg_integer()). %% Metadata definition -record(meta, { tab_pid = undefined :: pid() | undefined, keypos = 1 :: pos_integer(), partitions = ?PARTITIONS :: pos_integer(), keyslot_fun = fun erlang:phash2/2 :: keyslot_fun(), parallel = false :: boolean(), parallel_timeout = infinity :: timeout(), ets_opts = [] :: [term()] }). %% @type t() = #meta{}. %% %% Defines `shards' metadata. -type t() :: #meta{}. %% @type meta_map() = #{ %% tab_pid => pid(), %% keypos => pos_integer(), %% partitions => pos_integer(), %% keyslot_fun => keyslot_fun(), %% parallel => boolean(), %% parallel_timeout => timeout(), %% ets_opts => [term()] %% }. %% %% Defines the map representation for the metadata data type. -type meta_map() :: #{ tab_pid => pid(), keypos => pos_integer(), partitions => pos_integer(), keyslot_fun => keyslot_fun(), parallel => boolean(), parallel_timeout => timeout(), ets_opts => [term()] }. %% Exported types -export_type([ t/0, keyslot_fun/0, meta_map/0 ]). %%%=================================================================== %%% API %%%=================================================================== %% @doc %% Returns a metadata data type with the default values. %% @end -spec new() -> t(). new() -> #meta{}. %% @doc %% Builds a new `meta' from the given `Map'. %% @end -spec from_map(Map :: #{atom() => term()}) -> t(). from_map(Map) -> #meta{ tab_pid = maps:get(tab_pid, Map, self()), keypos = maps:get(keypos, Map, 1), partitions = maps:get(partitions, Map, ?PARTITIONS), keyslot_fun = maps:get(keyslot_fun, Map, fun erlang:phash2/2), parallel = maps:get(parallel, Map, false), parallel_timeout = maps:get(parallel_timeout, Map, infinity), ets_opts = maps:get(ets_opts, Map, []) }. %% @doc %% Converts the given `Meta' into a `map'. %% @end -spec to_map(t()) -> meta_map(). to_map(Meta) -> #{ tab_pid => Meta#meta.tab_pid, keypos => Meta#meta.keypos, partitions => Meta#meta.partitions, keyslot_fun => Meta#meta.keyslot_fun, parallel => Meta#meta.parallel, parallel_timeout => Meta#meta.parallel_timeout, ets_opts => Meta#meta.ets_opts }. %% @doc %% Returns `true' if `Meta' is a metadata data type, otherwise, %% `false' is returned. %% @end -spec is_metadata(Meta :: term()) -> boolean(). is_metadata(#meta{}) -> true; is_metadata(_) -> false. %% @doc %% Initializes the metadata ETS table. %% @end -spec init(Name, Opts) -> Tab when Name :: atom(), Opts :: [shards:option()], Tab :: shards:tab(). init(Tab, Opts) -> ExtraOpts = case lists:member(named_table, Opts) of true -> [named_table]; false -> [] end, ets:new(Tab, [set, public, {read_concurrency, true}] ++ ExtraOpts). %% @doc %% Renames the metadata ETS table. %% @end -spec rename(Tab, Name) -> Name when Tab :: shards:tab(), Name :: atom(). rename(Tab, Name) -> ets:rename(Tab, Name). %% @doc %% Returns the value associated to the key `Key' in the metadata table `Tab'. %% If `Key' is not found, the error `{unknown_table, Tab}' is raised. %% @end -spec lookup(Tab, Key) -> term() when Tab :: shards:tab(), Key :: term(). lookup(Tab, Key) -> try ets:lookup_element(Tab, Key, 2) catch error:badarg -> error({unknown_table, Tab}) end. %% @doc %% Stores the value `Val' under the given key `Key' into the metadata table %% `Tab'. %% @end -spec put(Tab, Key, Val) -> ok when Tab :: shards:tab(), Key :: term(), Val :: term(). put(Tab, Key, Val) -> true = ets:insert(Tab, {Key, Val}), ok. %% @doc %% Returns the `tab_info' within the metadata. %% @end -spec get(Tab :: shards:tab()) -> t() | no_return(). get(Tab) -> lookup(Tab, '$tab_info'). %% @equiv get(Tab, Key, undefined) get(Tab, Key) -> get(Tab, Key, undefined). %% @doc %% Returns the value for the given `Key' in the metadata, %% or `Def' if `Key' is not set. %% @end -spec get(Tab, Key, Def) -> Val when Tab :: shards:tab(), Key :: term(), Def :: term(), Val :: term(). get(Tab, Key, Def) -> try case ets:lookup(Tab, Key) of [{Key, Val}] -> Val; [] -> Def end catch error:badarg -> error({unknown_table, Tab}) end. %% @doc %% Returns a list with the partition TIDs. %% @end -spec get_partition_tids(Tab :: shards:tab()) -> partition_tids(). get_partition_tids(Tab) -> partitions_info(Tab, tid). %% @doc %% Returns a list with the partition PIDs. %% @end -spec get_partition_pids(Tab :: shards:tab()) -> partition_pids(). get_partition_pids(Tab) -> partitions_info(Tab, pid). %% @private partitions_info(Tab, KeyPrefix) -> try ets:select(Tab, [{{{KeyPrefix, '$1'}, '$2'}, [], [{{'$1', '$2'}}]}]) catch error:badarg -> error({unknown_table, Tab}) end. %%%=================================================================== %%% Getters %%%=================================================================== -spec tab_pid(t() | shards:tab()) -> pid(). tab_pid(#meta{tab_pid = Value}) -> Value; tab_pid(Tab) when is_atom(Tab); is_reference(Tab) -> tab_pid(?MODULE:get(Tab)). -spec keypos(t() | shards:tab()) -> pos_integer(). keypos(#meta{keypos = Value}) -> Value; keypos(Tab) when is_atom(Tab); is_reference(Tab) -> keypos(?MODULE:get(Tab)). -spec partitions(t() | shards:tab()) -> pos_integer(). partitions(#meta{partitions = Value}) -> Value; partitions(Tab) when is_atom(Tab); is_reference(Tab) -> partitions(?MODULE:get(Tab)). -spec keyslot_fun(t() | shards:tab()) -> keyslot_fun(). keyslot_fun(#meta{keyslot_fun = Value}) -> Value; keyslot_fun(Tab) when is_atom(Tab); is_reference(Tab) -> keyslot_fun(?MODULE:get(Tab)). -spec parallel(t() | shards:tab()) -> boolean(). parallel(#meta{parallel = Value}) -> Value; parallel(Tab) when is_atom(Tab); is_reference(Tab) -> parallel(?MODULE:get(Tab)). -spec parallel_timeout(t() | shards:tab()) -> timeout(). parallel_timeout(#meta{parallel_timeout = Value}) -> Value; parallel_timeout(Tab) when is_atom(Tab); is_reference(Tab) -> parallel_timeout(?MODULE:get(Tab)). -spec ets_opts(t() | shards:tab()) -> [term()]. ets_opts(#meta{ets_opts = Value}) -> Value; ets_opts(Tab) when is_atom(Tab); is_reference(Tab) -> ets_opts(?MODULE:get(Tab)).
src/shards_meta.erl
0.569613
0.414395
shards_meta.erl
starcoder
%%-------------------------------------------------------------------- %% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_misc_SUITE). -compile(export_all). -compile(nowarn_export_all). -include_lib("eunit/include/eunit.hrl"). -define(SOCKOPTS, [binary, {packet, raw}, {reuseaddr, true}, {backlog, 512}, {nodelay, true} ]). all() -> emqx_ct:all(?MODULE). t_merge_opts(_) -> Opts = emqx_misc:merge_opts(?SOCKOPTS, [raw, binary, {backlog, 1024}, {nodelay, false}, {max_clients, 1024}, {acceptors, 16} ]), ?assertEqual(1024, proplists:get_value(backlog, Opts)), ?assertEqual(1024, proplists:get_value(max_clients, Opts)), ?assertEqual([binary, raw, {acceptors, 16}, {backlog, 1024}, {max_clients, 1024}, {nodelay, false}, {packet, raw}, {reuseaddr, true}], lists:sort(Opts)). t_maybe_apply(_) -> ?assertEqual(undefined, emqx_misc:maybe_apply(fun(A) -> A end, undefined)), ?assertEqual(a, emqx_misc:maybe_apply(fun(A) -> A end, a)). t_run_fold(_) -> ?assertEqual(1, emqx_misc:run_fold([], 1, state)), Add = fun(I, St) -> I+St end, Mul = fun(I, St) -> I*St end, ?assertEqual(6, emqx_misc:run_fold([Add, Mul], 1, 2)). t_pipeline(_) -> ?assertEqual({ok, input, state}, emqx_misc:pipeline([], input, state)), Funs = [fun(_I, _St) -> ok end, fun(_I, St) -> {ok, St+1} end, fun(I, St) -> {ok, I+1, St+1} end, fun(I, St) -> {ok, I*2, St*2} end], ?assertEqual({ok, 4, 6}, emqx_misc:pipeline(Funs, 1, 1)), ?assertEqual({error, undefined, 1}, emqx_misc:pipeline([fun(_I) -> {error, undefined} end], 1, 1)), ?assertEqual({error, undefined, 2}, emqx_misc:pipeline([fun(_I, _St) -> {error, undefined, 2} end], 1, 1)). t_start_timer(_) -> TRef = emqx_misc:start_timer(1, tmsg), timer:sleep(2), ?assertEqual([{timeout, TRef, tmsg}], drain()), ok = emqx_misc:cancel_timer(TRef). t_cancel_timer(_) -> Timer = emqx_misc:start_timer(0, foo), ok = emqx_misc:cancel_timer(Timer), ?assertEqual([], drain()), ok = emqx_misc:cancel_timer(undefined). t_proc_name(_) -> ?assertEqual(emqx_pool_1, emqx_misc:proc_name(emqx_pool, 1)). t_proc_stats(_) -> Pid1 = spawn(fun() -> exit(normal) end), timer:sleep(10), ?assertEqual([], emqx_misc:proc_stats(Pid1)), Pid2 = spawn(fun() -> ?assertMatch([{mailbox_len, 0}|_], emqx_misc:proc_stats()), timer:sleep(200) end), timer:sleep(10), Pid2 ! msg, timer:sleep(10), ?assertMatch([{mailbox_len, 1}|_], emqx_misc:proc_stats(Pid2)). t_drain_deliver(_) -> self() ! {deliver, t1, m1}, self() ! {deliver, t2, m2}, ?assertEqual([{deliver, t1, m1}, {deliver, t2, m2} ], emqx_misc:drain_deliver(2)). t_drain_down(_) -> {Pid1, _Ref1} = erlang:spawn_monitor(fun() -> ok end), {Pid2, _Ref2} = erlang:spawn_monitor(fun() -> ok end), timer:sleep(100), ?assertEqual([Pid1, Pid2], emqx_misc:drain_down(2)), ?assertEqual([], emqx_misc:drain_down(1)). t_index_of(_) -> try emqx_misc:index_of(a, []) of _ -> ct:fail(should_throw_error) catch error:Reason -> ?assertEqual(badarg, Reason) end, ?assertEqual(3, emqx_misc:index_of(a, [b, c, a, e, f])). t_check(_) -> Policy = #{message_queue_len => 10, max_heap_size => 1024 * 1024 * 8}, [self() ! {msg, I} || I <- lists:seq(1, 5)], ?assertEqual(ok, emqx_misc:check_oom(Policy)), [self() ! {msg, I} || I <- lists:seq(1, 6)], ?assertEqual({shutdown, message_queue_too_long}, emqx_misc:check_oom(Policy)). drain() -> drain([]). drain(Acc) -> receive Msg -> drain([Msg|Acc]) after 0 -> lists:reverse(Acc) end. t_rand_seed(_) -> ?assert(is_tuple(emqx_misc:rand_seed())). t_now_to_secs(_) -> ?assert(is_integer(emqx_misc:now_to_secs(os:timestamp()))). t_now_to_ms(_) -> ?assert(is_integer(emqx_misc:now_to_ms(os:timestamp()))).
test/emqx_misc_SUITE.erl
0.550607
0.508849
emqx_misc_SUITE.erl
starcoder
-module(rational). -export([new/1, new/2, add/2, multiply/2, subtract/2, reciprocal/1, compare/2, minimum/2, maximum/2]). -export_type([rational/0]). -record(rational, {numerator :: integer(), denominator :: integer()}). -opaque rational() :: #rational{}. %% @doc Create a new rational number with denominator 1. -spec new(integer()) -> rational(). new(N) -> new(N, 1). %% @doc Create a new rational number. %% %% The denominator must be non-zero -spec new(integer(), integer()) -> rational(). new(Numerator, Denominator) when Denominator < 0 -> new(-1 * Numerator, abs(Denominator)); new(Numerator, Denominator) -> reduce(#rational{numerator=Numerator, denominator=Denominator}). %% @doc Add two rational numbers. -spec add(rational(), rational()) -> rational(). add(#rational{numerator=0}, B) -> B; add(A, #rational{numerator=0}) -> A; add(#rational{numerator=NumA, denominator=DenomA}, #rational{numerator=NumB, denominator=DenomB}) when DenomA =:= DenomB -> new(NumA + NumB, DenomA); add(#rational{numerator=NumA, denominator=DenomA}, #rational{numerator=NumB, denominator=DenomB}) -> new(NumA * DenomB + NumB * DenomA, DenomA * DenomB). %% @doc Multiply two rational numbers. -spec multiply(rational(), rational()) -> rational(). multiply(#rational{numerator=NumA, denominator=DenomA}, #rational{numerator=NumB, denominator=DenomB}) -> new(NumA * NumB, DenomA * DenomB). %% @doc Subtract rational number `B' from rational number `A'. -spec subtract(rational(), rational()) -> rational(). subtract(A, B) -> add(A, multiply(B, new(-1, 1))). %% @doc Reduce a rational number. -spec reduce(rational()) -> rational(). reduce(#rational{numerator = 0}) -> #rational{numerator = 0, denominator = 1}; reduce(#rational{numerator=Numerator, denominator=Denominator}) -> G = gcd(abs(Numerator), abs(Denominator)), #rational{numerator=Numerator div G, denominator=Denominator div G}. %% @doc Return the reciprocal of `X'. -spec reciprocal(rational()) -> rational(). reciprocal(#rational{numerator=0}) -> #rational{numerator=0, denominator=1}; reciprocal(X) -> new(X#rational.denominator, X#rational.numerator). %% @doc Compare two rational numbers. %% %% @returns ``lt'', ``eq'', or ``gt'' if `A' is less than, equal to, %% or grater than `B' respectively. -spec compare(A :: rational(), B :: rational()) -> eq | lt | gt. compare(#rational{numerator=NumA, denominator=DenomA}, #rational{numerator=NumB, denominator=DenomB}) -> NumeratorA = NumA * DenomB, NumeratorB = NumB * DenomA, if NumeratorA < NumeratorB -> lt; NumeratorA =:= NumeratorB -> eq; NumeratorA > NumeratorB -> gt end. %% @doc return the maximum of `A' and `B' -spec maximum(rational(), rational()) -> rational(). maximum(A, B) -> case rational:compare(A, B) of eq -> A; lt -> B; gt -> A end. %% @doc return the minimum of `A' and `B' -spec minimum(rational(), rational()) -> rational(). minimum(A, B) -> case rational:compare(A, B) of eq -> A; lt -> A; gt -> B end. -spec gcd(integer(), integer()) -> integer(). gcd(X, 0) -> X; gcd(X, Y) -> gcd(Y, X rem Y).
src/rational.erl
0.747339
0.632474
rational.erl
starcoder
-module(cqerl_hash). -behaviour(gen_server). %% This module performs the heavy lifting for the 'hash' application mode. %% Below is some background behind the creation of this mode as well as an %% overview of its operation. %% %% A note on terminology: When I say "client" I mean a cqerl_client process. %% When I say "user" I mean any process in an application that calls %% cqerl:get_client or cqerl:new_client to get a client to use to make a query. %% %% So - we've been having a few intermittent problems with the existing %% pooler-based implementation. Fundamentally, it doesn't seem to be an ideal %% choice for managing a pool of clients where each client can have multiple %% users, as is the case with cqerl clients. Additionally, as has been noted in %% #49, it has scalability issues with bottlenecks (that may not be endemic to %% pooler per se, but are certainly present in our use of it). I looked over %% dispcount as an alternative, but as I explained in my comment on #49, that %% doesn't seem ideal either. %% %% What I've ended up doing, then, is to invent my own solution (which steals a %% couple of dispcount's neat ideas that fit our problem). Basically, for each %% "key" (see below), we have a set of client processes started on the first %% attempt to get a client using that key. Those clients, once started, are %% available to all users, and are allocated from a pool using a %% dispcount-style hash of the user's PID. Because they're multi-user clients, %% however, we don't need dispcount's exclusivity guarantees and so can %% allocate multiple users to each client. %% %% The "key" is essentially the NodeKey used by the pooler implementation - a %% tuple of the Node and Opts used to originally set up the clients. I think %% this could probably stand to be simplified a bit, but for now it's what I've %% got. Every unique key currently starts num_clients (default 20) client %% processes - I'd also like to make that configurable on a per-keyspace basis. %% %% Each set of clients is monitored by a one_for_one supervisor (call it the %% "set supervisor"). And these set supervisors are, in turn, monitored under a %% simple_one_for_one supervisor called cqerl_client_sup. What this means is %% that individual failures/crashes amongst a set of clients will be restarted %% by their set supervisor and only cause transient error conditions for any %% user trying to access them. If, on the other hand, the whole Cassandra %% server goes down (or something else equally catastrophic happens) then the %% clients retrying will quickly hit the set supervisor's restart intensity and %% cause it to shutdown too. Since cqerl_client_sup is a simple_one_for_one, %% however, the failure won't cascade any further upwards. Further, the next %% attempt by a user to access a client for that key will create a new set of %% clients. If they connect successfully, everything is good and we keep going. %% If they don't, the user (and any other uses waiting on their creation) will %% again receive error responses. %% %% In addition to the set supervisor, each key has its own ets table which is %% used to map the hash values to a client PID. These tables are maintained by %% the cqerl_hash module/process (registered as cqerl) which monitors all the %% client processes and their immediate supervisors. They are further indexed %% by a "root" ets table named cqerl_client_tables which maps keys to their %% respective tables. Thus in the normal course of events, any request for a %% client requires two ets lookups to read-optimised tables (in much the same %% manner as dispcount): First to get the client table for a key, then to get %% the client allocated to the hash of the user process pid. %% %% I've tried, as far as possible, to make this change backwards compatible. If %% you change nothing, the behaviour still uses the existing pooler system. %% Adding {mode, hash} to the cqerl app config enables the new system. %% %% The other change required is to call cqerl:get_client/2 rather than %% cqerl:new_client. I made that distinction mostly so that users would not %% accidentally end up using the wrong mode, but also because it would have %% required fiddly and otherwise unnecessary checks in new_client to have it %% work for both. %% %% Calling cqerl:close_client/1 in hash mode is not necessary, but nor is it %% harmful. %% %% I've changed the integration and load tests to run in both modes, and %% everything seems to work okay. Load tests for hash mode are marginally %% (maybe ~20%) faster on my system, but it's a VM with only 2 cores - I hope %% on beefier machines that the improvement will be more pronounced, but I've %% yet to have a chance to test that. -export([ start_link/0, client_started/1, get_client/2 ]). -export([ init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2, handle_info/2 ]). -type key() :: {Node :: term(), Opts :: list()}. -record(pending, { key :: key(), reply_to :: [term()], remaining :: non_neg_integer(), sup_pid :: pid(), table :: ets:tid() }). -record(client_table, { key :: key(), sup_pid :: pid(), table :: ets:tid() }). -record(state, { pending = [] :: [#pending{}] }). start_link() -> gen_server:start_link({local, cqerl}, ?MODULE, [], []). init(_) -> ets:new(cqerl_client_tables, [named_table, {read_concurrency, true}, protected, {keypos, #client_table.key}]), {ok, #state{}}. client_started(Key) -> gen_server:cast(cqerl, {add_client, Key, self()}). handle_call({start_clients, Node, Opts}, From, State = #state{pending = Pending}) -> Key = cqerl_client:make_key(Node, Opts), case ets:lookup(cqerl_client_tables, Key) of [#client_table{}] -> {reply, ok, State}; [] -> case lists:keytake(Key, #pending.key, Pending) of false -> start_clients_impl(Node, Opts, From, State); {value, PendingItem, OtherPending} -> NewPending = PendingItem#pending{reply_to = [From | PendingItem#pending.reply_to]}, {noreply, State#state{pending = [NewPending | OtherPending]}} end end; handle_call(_, _, State) -> {noreply, State}. handle_cast({add_client, Key, Pid}, State = #state{pending = Pending}) -> NewState = case lists:keytake(Key, #pending.key, Pending) of {value, PendingItem, OtherPending} -> add_new_client(Pid, PendingItem, State#state{pending = OtherPending}); false -> add_replacement_client(Key, Pid, State) end, {noreply, NewState}; handle_cast(_, State) -> {noreply, State}. handle_info({'DOWN', _Ref, process, Pid, Reason}, State) -> NewState = clear_pending(Pid, Reason, State), clear_existing(Pid), {noreply, NewState}; handle_info(_, State) -> {noreply, State}. terminate(_Reason, _State) -> ok. code_change(_OldVsn, State, _Extra) -> {ok, State}. get_client(Node, Opts) -> Key = cqerl_client:make_key(Node, Opts), case get_table(Key) of {ok, T} -> N = erlang:phash2(self(), ets:info(T, size)), [{_, Pid}] = ets:lookup(T, N+1), {ok, {Pid, make_ref()}}; _ -> start_clients(Node, Opts) end. start_clients(Node, Opts) -> case gen_server:call(cqerl, {start_clients, Node, Opts}, infinity) of ok -> get_client(Node, Opts); {error, E} -> {error, E} end. get_table(Key) -> case ets:lookup(cqerl_client_tables, Key) of [#client_table{table = T}] -> {ok, T}; [] -> {error, clients_not_started} end. start_clients_impl(Node, Opts, From, State) -> ClientTable = ets:new(cqerl_clients, [{read_concurrency, true}, protected]), case cqerl_client_sup:add_clients(Node, Opts) of {error, E} -> {reply, {error, E}, State}; {ok, {Num, SupPid}} -> monitor(process, SupPid), Key = cqerl_client:make_key(Node, Opts), NewPending = #pending{key = Key, reply_to = [From], remaining = Num, table = ClientTable, sup_pid = SupPid}, {noreply, State#state{pending = [NewPending | State#state.pending]}} end. add_new_client(Pid, PendingItem = #pending{reply_to = ReplyTo, key = Key, table = Table, remaining = Remaining, sup_pid = SupPid}, State = #state{pending = OtherPending}) -> add_client(Pid, Table), NewPending = case Remaining of 1 -> ets:insert(cqerl_client_tables, #client_table{ key = Key, table = Table, sup_pid = SupPid}), lists:foreach(fun(R) -> gen_server:reply(R, ok) end, ReplyTo), OtherPending; N -> [PendingItem#pending{remaining = N-1} | OtherPending] end, State#state{pending = NewPending}. add_replacement_client(Key, Pid, State) -> case get_table(Key) of {ok, T} -> add_client(Pid, T); {error, clients_not_started} -> undefined = process_info(Pid), % make sure the sender is dead error_logger:warning_msg("Stale 'add_client' from a dead child: ~p", [Pid]) end, State. add_client(Pid, Table) -> monitor(process, Pid), Index = find_empty_index(Table), ets:insert(Table, {Index, Pid}). find_empty_index(Table) -> {UsedIndices, _} = lists:unzip(ets:tab2list(Table)), PossibleIndices = lists:seq(1, length(UsedIndices)+1), find_empty(lists:sort(UsedIndices), PossibleIndices). find_empty([A|Tail], [A|Tail2]) -> find_empty(Tail, Tail2); find_empty(_, [A|_]) -> A. clear_pending(Pid, Reason, State = #state{pending = Pending}) -> NewPending = case lists:keytake(Pid, #pending.sup_pid, Pending) of false -> Pending; {value, #pending{reply_to = ReplyTo}, OtherPending} -> lists:foreach(fun(R) -> gen_server:reply(R, {error, Reason}) end, ReplyTo), OtherPending end, State#state{pending = NewPending}. clear_existing(Pid) -> Tables = ets:tab2list(cqerl_client_tables), lists:foreach( fun(#client_table{key = K, sup_pid = SupPid, table = T}) when SupPid =:= Pid -> ets:delete(cqerl_client_tables, K), ets:delete(T); (#client_table{table = T}) -> ets:match_delete(T, {'_', Pid}) end, Tables).
src/cqerl_hash.erl
0.590071
0.461077
cqerl_hash.erl
starcoder
%% `perforator_ci` is a continious performance integration tool which %% uses `perforator` output statistics. It can nicely track and show %% performance tests degradations. %% %% `perforator_ci` consists of 3 "major" objects: %% * projects (`perforator_ci_project`) %% * builders (`perforator_ci_builder`) %% * web backend %% %% Each project tracks changes on a repository branch and asks builders to %% build changes. Currently, only Git is supported. %% %% Builders (only one per host, because more builders can screw up `perforator` %% results) try to execute list of commands, including command used for %% generating perf reports, and send results to projects. %% %% Project keeps track of build results and publishes them to web backend (on %% demand). %% %% That's it. Want to sleep. %% %% @author Martynas <<EMAIL>> -module(perforator_ci). -include("perforator_ci.hrl"). %% API -export([ create_and_start_project/1, update_project/1, get_builders/0 ]). -export([start/0, stop/0, init/0]). %% ============================================================================ %% API %% ============================================================================ %% @doc Stores project in DB and starts project handler process. -spec create_and_start_project({ perforator_ci_types:project_name(), perforator_ci_types:repo_url(), perforator_ci_types:branch(), perforator_ci_types:repo_backend(), perforator_ci_types:polling_strategy(), perforator_ci_types:build_instructions(), list()}) -> perforator_ci_types:project_id(). create_and_start_project({Name, RepoUrl, Branch, RepoBackend, Polling, BuildInstr, Info}) -> % Store and fetch an ID: ID = perforator_ci_db:create_project({Name, RepoUrl, Branch, RepoBackend, Polling, BuildInstr, Info}), % Clone project repo: RepoBackend:clone(RepoUrl, perforator_ci_utils:repo_path(ID)), % Check, maybe project is already running, so there is no need to start a % new instance: case perforator_ci_project:is_project_running(ID) of true -> ok; % do nothing, already started false -> {ok, _} = perforator_ci_project_sup:start_project(ID) end, ID. %% @doc Updates project info. Updates take place after project handler process %% is restarted (dirty hack). -spec update_project({ perforator_ci_types:project_id(), perforator_ci_types:project_name(), perforator_ci_types:repo_url(), perforator_ci_types:branch(), perforator_ci_types:repo_backend(), perforator_ci_types:polling_strategy(), perforator_ci_types:build_instructions(), list()}) -> ok. update_project({ID, Name, RepoUrl, Branch, RepoBackend, Polling, BuildInstr, Info}) -> % Update DB: ok = perforator_ci_db:update_project({ID, Name, RepoUrl, Branch, RepoBackend, Polling, BuildInstr, Info}), % Restart project handler: exit(perforator_ci_project:get_pid(ID), '$restart'), ok. %% @doc Returns builders list (nodes where they reside) with their queue size. -spec get_builders() -> [{node(), integer()}]. get_builders() -> [{B, perforator_ci_builder:get_queue_size(B)} || B <-perforator_ci_builder:get_builders()]. %% ============================================================================ %% Start/Init %% ============================================================================ %% @doc Starts the app with its deps. %% Tries to reduce noise. start() -> ?mute(begin application:start(compiler), application:start(syntax_tools), application:start(lager) end), ?silent(error, begin pg2:start(), application:start(mnesia), application:start(gproc), application:start(cowboy), application:start(perforator_ci) end). %% @doc Stops the app and its deps. stop() -> ?silent(error, begin application:stop(perforator_ci), application:stop(cowboy), application:stop(gproc), application:stop(mnesia) end), ?mute(begin application:stop(lager), application:stop(syntax_tools), application:stop(compiler) end). %% @doc Creates mnesia schema and tables. %% See perforator_ci_db:init/0 for more info. init() -> perforator_ci_db:init().
src/perforator_ci.erl
0.564098
0.51751
perforator_ci.erl
starcoder
%%% Concurrent Programming In Erlang -- The University of Kent / FutureLearn %%% Exercise 2 : URL https://www.futurelearn.com/courses/concurrent-programming-erlang/3/steps/488337 %% Author: <NAME>. -module(reader). -export([receiver/0]). %% Using indirect recursion to avoid repeating the call back to receiver in %% each receiver receive clause that needs to continue receiving. %% The stop message has the highest priority (via its precedence in the clauses). %% To make the reader sleep, send a 'short_wait', 'wait' or 'long_wait' %% message to it. They are also the next on the priority order in the order %% given, so we can schedule a short wait in between longer waits. %% A short wait is special, it also reads *one* message, so we can schedule a %% long wait, then a short wait, then several messages, and only 1 of those %% will be read and echoed before the reader sleeps again as long as there %% were no stop message(s). receiver() -> receive stop -> io:format("Stopping, bye!~n"), ok; priority -> io:format("You have 30 seconds to send your prioritized messages!~n"), timer:sleep(30000), receiver2(), wait(2); short_wait -> wait(5); wait -> wait(10); long_wait -> wait(30); Msg -> process(Msg) end. receiver2() -> receive Value when is_float(Value) -> io:format("Got (prioritized): ~w~n", [Value]), receiver2(); Value when is_integer(Value) -> io:format("Got (prioritized): ~w~n", [Value]), receiver2(); normal -> io:format("Back to normal reception~n") end. wait(Seconds) -> io:format("Received sleep request! Sleeping for ~w seconds...~n", [Seconds]), timer:sleep(Seconds * 1000), io:format("Back listening!~n"), receiver(). process(Msg) -> io:format("Got: ~w~n", [Msg]), receiver(). %% -----------------------------------------------------------------------------
exercises/e2/reader.erl
0.503174
0.450359
reader.erl
starcoder
%% Copyright (c) 2018 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(ekka_SUITE). -compile(export_all). -include("ekka.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). -define(CONTENT_TYPE, "application/x-www-form-urlencoded"). all() -> [{group, cluster}]. groups() -> [{cluster, [sequence], [cluster_test, cluster_join, cluster_leave, cluster_remove, cluster_remove2 ]}]. init_per_suite(Config) -> NewConfig = generate_config(), Vals = proplists:get_value(ekka, NewConfig), [application:set_env(ekka, Par, Value) || {Par, Value} <- Vals], application:ensure_all_started(ekka), Config. end_per_suite(_Config) -> application:stop(ekka), ekka_mnesia:ensure_stopped(). %%-------------------------------------------------------------------- %% cluster group %%-------------------------------------------------------------------- cluster_test(_Config) -> Z = slave(ekka, cluster_test_z), wait_running(Z), true = ekka:is_running(Z, ekka), Node = node(), ok = rpc:call(Z, ekka_cluster, join, [Node]), [Z, Node] = lists:sort(mnesia:system_info(running_db_nodes)), ok = rpc:call(Z, ekka_cluster, leave, []), [Node] = lists:sort(mnesia:system_info(running_db_nodes)), ok = slave:stop(Z). cluster_join(_Config) -> Z = slave(ekka, cluster_join_z), Z1 = slave(ekka, cluster_join_z1), N = slave(node, cluster_join_n), wait_running(Z), true = ekka:is_running(Z, ekka), true = ekka:is_running(Z1, ekka), Node = node(), %% case1 ignore = ekka:join(Node), %% case2 {error, {node_down, _}} = ekka:join(N), %% case3 ok = ekka:join(Z), Cluster = ekka_cluster:status(), [Z, Node] = proplists:get_value(running_nodes, Cluster), %% case4 ok = ekka:join(Z1), Cluster1 = ekka_cluster:status(), [Z1, Node] = proplists:get_value(running_nodes, Cluster1), true = rpc:call(Z, ekka, is_running, [Z, ekka]), %% case4 ok = rpc:call(Z, ekka, join, [Z1]), ?assertEqual(3, length(proplists:get_value(running_nodes, ekka_cluster:status()))), %% case5 slave:stop(Z), timer:sleep(100), ?assertEqual(2, length(proplists:get_value(running_nodes, ekka_cluster:status()))), ?assertEqual(1, length(proplists:get_value(stopped_nodes, ekka_cluster:status()))), slave:stop(N), slave:stop(Z1). cluster_leave(_Config) -> Z = slave(ekka, cluster_leave_z), wait_running(Z), {error, node_not_in_cluster} = ekka_cluster:leave(), ok = ekka_cluster:join(Z), Node = node(), [Z, Node] = ekka_mnesia:running_nodes(), ok = ekka_cluster:leave(), [Node] = ekka_mnesia:running_nodes(), slave:stop(Z). cluster_remove(_Config) -> Z = slave(ekka, cluster_remove_z), wait_running(Z), Node = node(), ignore = ekka_cluster:force_leave(Node), ok = ekka_cluster:join(Z), [Z, Node] = ekka_mnesia:running_nodes(), ok = ekka_cluster:force_leave(Z), [Node] = ekka_mnesia:running_nodes(), slave:stop(Z). cluster_remove2(_Config) -> Z = slave(ekka, cluster_remove2_z), wait_running(Z), ok = ekka_cluster:join(Z), Node = node(), [Z, Node] = ekka_mnesia:running_nodes(), ok = ekka_cluster:force_leave(Z), ok = rpc:call(Z, ekka_mnesia, ensure_stopped, []), [Node] = ekka_mnesia:running_nodes(), slave:stop(Z). host() -> [_, Host] = string:tokens(atom_to_list(node()), "@"), Host. wait_running(Node) -> wait_running(Node, 30000). wait_running(Node, Timeout) when Timeout < 0 -> throw({wait_timeout, Node}); wait_running(Node, Timeout) -> case rpc:call(Node, ekka, is_running, [Node, ekka]) of true -> ok; false -> timer:sleep(100), wait_running(Node, Timeout - 100) end. slave(ekka, Node) -> {ok, Ekka} = slave:start(host(), Node, ensure_slave()), rpc:call(Ekka, application, ensure_all_started, [ekka]), Ekka; slave(node, Node) -> {ok, N} = slave:start(host(), Node, ensure_slave()), N. generate_config() -> Schema = cuttlefish_schema:files([local_path(["priv", "ekka.schema"])]), Conf = conf_parse:file([local_path(["etc", "ekka.conf.example"])]), cuttlefish_generator:map(Schema, Conf). get_base_dir(Module) -> {file, Here} = code:is_loaded(Module), filename:dirname(filename:dirname(Here)). get_base_dir() -> get_base_dir(?MODULE). local_path(Components, Module) -> filename:join([get_base_dir(Module) | Components]). local_path(Components) -> local_path(Components, ?MODULE). ensure_slave() -> EbinDir = local_path(["ebin"]), DepsDir = local_path(["deps", "*", "ebin"]), "-pa " ++ EbinDir ++ " -pa " ++ DepsDir.
test/ekka_SUITE.erl
0.521959
0.415551
ekka_SUITE.erl
starcoder
%% This Source Code Form is subject to the terms of the Mozilla Public %% License, v. 2.0. If a copy of the MPL was not distributed with this %% file, You can obtain one at https://mozilla.org/MPL/2.0/. %% %% Copyright (c) 2021-2022 VMware, Inc. or its affiliates. All rights reserved. %% -module(tx_funs). -include_lib("eunit/include/eunit.hrl"). -include("include/khepri.hrl"). -include("src/khepri_fun.hrl"). -include("src/internal.hrl"). -include("src/khepri_machine.hrl"). -dialyzer([{no_return, [allowed_khepri_tx_api_test/0, allowed_erlang_module_api_test/0, allowed_bs_match_accepts_match_context_test/0]}, {no_missing_calls, [extracting_unexported_external_function_test/0]}, {no_match, [matches_type/2, allowed_case_block_with_different_tuple_arities_test/0, trim_leading_dash3/2, allowed_bs_match_accepts_match_context_test/0]}]). -define(make_standalone_fun(Expression), begin __Fun = fun() -> Expression end, khepri_tx:to_standalone_fun(__Fun, rw) end). -define(assertStandaloneFun(Expression), ?assertMatch(#standalone_fun{}, ?make_standalone_fun(Expression))). -define(assertToFunThrow(Expected, Expression), ?assertThrow(Expected, ?make_standalone_fun(Expression))). %% The compiler is smart enough to optimize away many instructions by %% inspecting types and values. `mask/1' confuses the compiler by sending %% and receiving the value. mask(Value) -> self() ! Value, receive Msg -> Msg end. noop_ok_test() -> ?assertStandaloneFun(ok). allowed_khepri_tx_api_test() -> ?assertStandaloneFun( begin _ = khepri_tx:put([foo], khepri_payload:data(value)), _ = khepri_tx:put([foo], khepri_payload:data(value), #{}), _ = khepri_tx:get([foo]), _ = khepri_tx:get([foo], #{}), _ = khepri_tx:exists([foo]), _ = khepri_tx:has_data([foo]), _ = khepri_tx:list([foo]), _ = khepri_tx:find([foo], ?STAR), _ = khepri_tx:delete([foo]), _ = khepri_tx:abort(error), _ = khepri_tx:is_transaction() end). denied_khepri_tx_run_3_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {khepri_tx, run, 3}}}, _ = khepri_tx:run(#khepri_machine{}, fun() -> ok end, true)). allowed_erlang_expressions_add_test() -> One = mask(1), ?assertStandaloneFun(One + 2). allowed_erlang_expressions_subtract_test() -> One = mask(1), ?assertStandaloneFun(One - 2). allowed_erlang_expressions_multiply_test() -> Three = mask(3), ?assertStandaloneFun(Three * 2). allowed_erlang_expressions_divide_test() -> Six = mask(6), ?assertStandaloneFun(Six / 2). allowed_erlang_expressions_integer_division_test() -> Six = mask(6), ?assertStandaloneFun(Six div 2). allowed_erlang_expressions_remainder_test() -> Seven = mask(7), ?assertStandaloneFun(Seven rem 2). allowed_erlang_expressions_bnot_test() -> One = mask(1), ?assertStandaloneFun(bnot One). allowed_erlang_expressions_band_test() -> One = mask(1), ?assertStandaloneFun(One band One). allowed_erlang_expressions_bor_test() -> One = mask(1), ?assertStandaloneFun(One bor One). allowed_erlang_expressions_bxor_test() -> One = mask(1), ?assertStandaloneFun(One bxor One). allowed_erlang_expressions_bsl_test() -> One = mask(1), ?assertStandaloneFun(One bsl One). allowed_erlang_expressions_bsr_test() -> One = mask(1), ?assertStandaloneFun(One bsr One). allowed_erlang_expressions_equals_test() -> One = mask(1), ?assertStandaloneFun(One == One). allowed_erlang_expressions_not_equals_test() -> One = mask(1), ?assertStandaloneFun(One /= One). allowed_erlang_expressions_strict_equals_test() -> One = mask(1), OneFloat = mask(1.0), ?assertStandaloneFun(One =:= OneFloat). allowed_erlang_expressions_strict_not_equals_test() -> One = mask(1), OneFloat = mask(1.0), ?assertStandaloneFun(One =/= OneFloat). allowed_erlang_expressions_greater_than_test() -> One = mask(1), ?assertStandaloneFun(One > One). allowed_erlang_expressions_greater_than_or_equal_to_test() -> One = mask(1), ?assertStandaloneFun(One >= One). allowed_erlang_expressions_less_than_test() -> One = mask(1), ?assertStandaloneFun(One < One). allowed_erlang_expressions_less_than_or_equal_to_test() -> One = mask(1), ?assertStandaloneFun(One =< One). allowed_erlang_expressions_not_test() -> True = mask(true), ?assertStandaloneFun(not True). allowed_erlang_expressions_and_test() -> True = mask(true), ?assertStandaloneFun(True and True). allowed_erlang_expressions_or_test() -> True = mask(true), ?assertStandaloneFun(True or True). allowed_erlang_expressions_xor_test() -> True = mask(true), ?assertStandaloneFun(True xor True). allowed_erlang_expressions_andalso_test() -> True = mask(true), ?assertStandaloneFun(True andalso True). allowed_erlang_expressions_orelse_test() -> True = mask(true), ?assertStandaloneFun(True orelse True). allowed_erlang_expressions_list_concat_test() -> List = mask([a]), ?assertStandaloneFun(List ++ List). allowed_erlang_expressions_list_difference_test() -> List = mask([a]), ?assertStandaloneFun(List -- List). allowed_erlang_expressions_map_literal_test() -> ?assertStandaloneFun(#{a => b}). allowed_erlang_expressions_map_destructure_test() -> ?assertStandaloneFun( begin M = #{a => b}, #{a := _} = M end). allowed_erlang_expressions_map_update_test() -> ?assertStandaloneFun( begin M = #{a => b}, M#{a => b} end). allowed_erlang_expressions_float_arithmetic_test() -> One = mask(1.0), ?assertStandaloneFun( begin One / 2.0 + One * 6.0 - 3.0 + -(One + 1.0) end). allowed_erlang_types_test() -> ?assertStandaloneFun( begin _ = 1, _ = 1.0, _ = atom, _ = <<"binary">>, _ = [l, i, s, t], _ = #{a => b}, _ = $c, _ = "string" end). allowed_case_block_test() -> ?assertStandaloneFun( begin case khepri_tx:get([foo]) of {ok, #{[foo] := _}} -> {ok, found}; {ok, #{}} -> {ok, not_found}; _ -> error end end). allowed_case_block_with_different_tuple_arities_test() -> ?assertStandaloneFun( begin case {a, b, c} of {_, _, _} -> three; {_, _} -> two; {_} -> one end end). allowed_binary_handling_test() -> ?assertStandaloneFun( begin _ = name_concat(<<"prefix">>, <<"name2">>), _ = name_concat(<<"name1">>, 0) end). name_concat(<<"prefix">>, Name2) -> <<"prefix_", Name2/binary>>; name_concat(Name1, Name2) -> <<Name1/binary, "_", Name2/signed>>. allowed_list_comprehension_test() -> ?assertStandaloneFun( begin [erlang:abs(I) || I <- [1, 2, 3]] end). allowed_list_comprehension_with_funs_test() -> ?assertStandaloneFun( begin [begin F = fun(I1) -> I1 end, F(I) end || I <- (fun(L) -> L end)([1, 2, 3])] end). allowed_list_comprehension_with_multiple_qualifiers_test() -> ?assertStandaloneFun( begin {ok, Nodes} = khepri_tx:list([?ROOT_NODE]), [Data || Path <- lists:sort(maps:keys(Nodes)), #{data := Data} <- [maps:get(Path, Nodes)]] end). allowed_begin_block_test() -> ?assertStandaloneFun( begin F1 = fun() -> ok end, F2 = fun() -> ok end, _F3 = fun() -> {F1, F2} end end). allowed_if_block_test() -> ?assertStandaloneFun( begin L = lists:max([1]), if L >= 0 -> fun() -> ok end; true -> ok end end). allowed_try_catch_block_test() -> ?assertStandaloneFun( begin try 1 + 1 catch C:R:S -> erlang:raise(C, R, S) end end). call_that_will_raise(A) -> try 1 + A catch error:_:Stacktrace -> erlang:raise(error, "Oh no!", Stacktrace) end. allowed_call_to_try_catch_function_test() -> self() ! a, A = receive Msg -> Msg end, ?assertStandaloneFun( begin try call_that_will_raise(A) after ok end end). allowed_catch_test() -> ?assertStandaloneFun( begin case catch (exit(a)) of {'EXIT', _Exit} -> true; _ -> false end end). matches_type(exchange, <<"exchanges">>) -> true; matches_type(queue, <<"queues">>) -> true; matches_type(exchange, <<"all">>) -> true; matches_type(queue, <<"all">>) -> true; matches_type(_, _) -> false. allowed_bs_match_test() -> List = [{'apply-to', <<"queues">>}], ?assertStandaloneFun( begin matches_type(queue, proplists:get_value('apply-to', List)) end). encode_integer(Length) -> <<Length:7/integer>>. allowed_bitstring_init_test() -> ?assertStandaloneFun( begin <<25:7/integer>> = encode_integer(25) end). parse_date( <<Year:4/bytes, $-, Month:2/bytes, $-, Day:2/bytes, _Rest/binary>>) -> {Year, Month, Day}. allowed_bs_match_date_parser_test() -> ?assertStandaloneFun( begin {<<"2022">>, <<"02">>, <<"02">>} = parse_date(<<"2022-02-02">>) end). parse_float(<<".", Rest/binary>>) -> parse_digits(Rest); parse_float(Bin) -> {[], Bin}. parse_digits(Bin) -> parse_digits(Bin, []). parse_digits( <<Digit/integer, Rest/binary>>, Acc) when is_integer(Digit) andalso Digit >= 48 andalso Digit =< 57 -> parse_digits(Rest, [Digit | Acc]); parse_digits(Rest, Acc) -> {lists:reverse(Acc), Rest}. allowed_bs_match_digit_parser_test() -> ?assertStandaloneFun( begin {[1, 2, 3, 4, 5], <<>>} = parse_float(<<".", 1, 2, 3, 4, 5>>) end). %% This set of parse_float, parse_digits, etc. is the same as the above %% functions and test case, except that the intermediary function %% `parse_digits/2' introduces new bindings that change the arity, to %% ensure we are not hard-coding an arity. parse_float2(<<".", Rest/binary>>) -> parse_digits2([], Rest); parse_float2(Bin) -> {[], Bin}. parse_digits2(Foo, Bin) -> parse_digits2(Foo, [], Bin). parse_digits2(Foo, Bar, Bin) -> parse_digits2(Foo, Bar, Bin, []). parse_digits2( Foo, Bar, <<Digit/integer, Rest/binary>>, Acc) when is_integer(Digit) andalso Digit >= 48 andalso Digit =< 57 -> parse_digits2(Foo, Bar, Rest, [Digit | Acc]); parse_digits2(_Foo, _Bar, Rest, Acc) -> {lists:reverse(Acc), Rest}. allowed_bs_match_digit_parser2_test() -> ?assertStandaloneFun( begin {[1, 2, 3, 4, 5], <<>>} = parse_float2(<<".", 1, 2, 3, 4, 5>>) end). %% The compiler determines that this clause will always match because this %% function is not exported and is only called with a compile-time binary %% matching the pattern. As a result, the instruction for this match is %% `bs_start_match4' trim_leading_dash1(<<$-, Rest/binary>>) -> trim_leading_dash1(Rest); trim_leading_dash1(Binary) -> Binary. %% This is the same function but we'll give it a non-binary argument in %% the test case to avoid the `bs_start_match4' optimization. Instead %% the compiler uses a `{test,bs_start_match3,..}` instruction. trim_leading_dash2(<<$-, Rest/binary>>) -> trim_leading_dash2(Rest); trim_leading_dash2(Binary) -> Binary. %% Again, effectively the same function but to fix compilation for this %% case we need to determine the correct arity to mark as accepting %% a match context, so we should test a case where the binary match %% is done in another argument. trim_leading_dash3(Arg, <<$-, Rest/binary>>) -> trim_leading_dash3(Arg, Rest); trim_leading_dash3(_Arg, Binary) -> Binary. allowed_bs_match_accepts_match_context_test() -> ?assertStandaloneFun( begin <<"5">> = trim_leading_dash1(<<"-5">>), <<"5">> = trim_leading_dash2(<<"-5">>), <<"5">> = trim_leading_dash2("-5"), <<"5">> = trim_leading_dash3([], "-5") end). make_tuple([A]) -> {a, A}; make_tuple([A, B]) -> {b, A, B}; make_tuple([_, B, C]) -> {c, B, C}. handle_tuple(Tuple) when is_tuple(Tuple) -> case Tuple of {a, _} -> true; {b, _, _} -> false; {c, _, _} -> false end. select_tuple_arity_var_info_test() -> Nodes = nodes(), ?assertStandaloneFun( begin Tuple = make_tuple(Nodes), handle_tuple(Tuple) end). encode_frame(Frame) when is_tuple(Frame) andalso (element(1, Frame) =:= text orelse element(1, Frame) =:= binary) -> <<(encode_fin(Frame))/bitstring>>. encode_fin({text, false}) -> <<0:1/integer>>; encode_fin({binary, false}) -> <<0:1/integer>>; encode_fin(_) -> <<1:1/integer>>. type_inference_for_test_arity_instruction_test() -> self() ! {text, false}, TextFrame = receive TextMsg -> TextMsg end, self() ! {binary, true}, BinaryFrame = receive BinaryMsg -> BinaryMsg end, ?assertStandaloneFun( begin <<0:0>> = encode_frame(TextFrame), <<0:1>> = encode_frame(BinaryFrame) end). bit_string_comprehension_expression_test() -> Data = crypto:strong_rand_bytes(128), <<Mask:32/integer>> = crypto:strong_rand_bytes(4), ?assertStandaloneFun( begin <<<<(Part bxor Mask):32/integer>> || <<Part:32/integer>> <= Data>> end). apply_fun_to_args(Fun, Arg1, Arg2) -> Fun(Arg1, Arg2). allowed_higher_order_external_call_test() -> StandaloneFun = ?make_standalone_fun( begin Fun = fun mod_used_for_transactions:min/2, apply_fun_to_args(Fun, 1, 2) end), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertEqual(1, khepri_fun:exec(StandaloneFun, [])). denied_receive_block_test() -> ?assertToFunThrow( {invalid_tx_fun, receiving_message_denied}, begin receive Msg -> Msg end end). denied_receive_after_block_test() -> ?assertToFunThrow( {invalid_tx_fun, receiving_message_denied}, begin receive Msg -> Msg after 0 -> ok end end). denied_module_info_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {lists, module_info, 0}}}, begin _ = lists:module_info() end). denied_module_info_1_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {lists, module_info, 1}}}, begin _ = lists:module_info(compile) end). -record(record, {}). allowed_erlang_module_api_test() -> %% The compiler optimization will replace many of the following calls %% directly by their result, so the testing is a bit limited... I tried to %% use some counter-measures but it's not every effective. self() ! erlang:phash2(dict:new()), Term = receive Msg -> Msg end, ?assertStandaloneFun( begin Atom = list_to_atom(binary_to_list(term_to_binary(Term))), Binary = term_to_binary(Term), String = binary_to_list(Binary), Int = size(term_to_binary(Term)), Float = float(Int), List = [Term, Term], Map = maps:from_list(List), Pid = list_to_pid(String), _ = erlang:abs(Int), _ = erlang:adler32(Binary), _ = erlang:adler32(Term, Binary), _ = erlang:adler32_combine(Term, Term, Term), _ = erlang:append_element({Term, Term}, Term), _ = erlang:atom_to_binary(Atom), _ = erlang:atom_to_list(Atom), _ = erlang:binary_to_atom(Binary), _ = erlang:binary_to_float(Binary), _ = erlang:binary_to_integer(Binary), _ = erlang:binary_to_list(Binary), _ = erlang:binary_to_term(Binary), _ = erlang:bitstring_to_list(Binary), _ = erlang:ceil(Int), _ = erlang:crc32(Binary), _ = erlang:crc32(Term, Binary), _ = erlang:crc32_combine(Term, Term, Term), _ = erlang:delete_element(Term, {Term, Term}), _ = erlang:element(Term, {Term, Term}), _ = erlang:external_size(Term), _ = erlang:float(Int), _ = erlang:float_to_binary(Float), _ = erlang:float_to_list(Float), _ = erlang:hd([Term, Term]), _ = erlang:insert_element(Term, {Term, Term}, Term), _ = erlang:integer_to_binary(Term), _ = erlang:integer_to_list(Term), _ = erlang:iolist_size(Binary), _ = erlang:iolist_to_binary(Binary), _ = erlang:iolist_to_iovec(Binary), _ = erlang:is_atom(Term), _ = erlang:is_binary(Term), _ = erlang:is_bitstring(Term), _ = erlang:is_boolean(Term), _ = erlang:is_float(Term), _ = erlang:is_integer(Term), _ = erlang:is_list(Term), _ = erlang:is_map(Term), _ = erlang:is_map_key(Term, #{a => b}), _ = erlang:is_number(Term), _ = erlang:is_pid(Term), _ = erlang:is_record(Term, record), _ = erlang:is_reference(Term), _ = erlang:is_tuple({Term, Term}), _ = erlang:list_to_atom(String), _ = erlang:list_to_binary(String), _ = erlang:list_to_bitstring(String), _ = erlang:list_to_float(String), _ = erlang:list_to_integer(String), _ = erlang:list_to_pid(String), _ = erlang:list_to_tuple(String), _ = erlang:make_tuple(Term, Term), _ = erlang:max(Int, Int), _ = erlang:md5(Binary), _ = erlang:md5_final(Binary), _ = erlang:md5_init(), _ = erlang:md5_update(Binary, Binary), _ = erlang:min(Term, Term), _ = erlang:phash2(Term), _ = erlang:phash2(Term, Term), _ = erlang:pid_to_list(Pid), _ = erlang:raise(error, Term, []), _ = erlang:round(Term), _ = erlang:setelement(Term, {Term, Term}, Term), _ = erlang:split_binary(Binary, Int), _ = erlang:term_to_binary(Term), _ = erlang:term_to_iovec(Term), _ = erlang:tl([Term, Term]), _ = erlang:tuple_size({Term, Term}), _ = erlang:tuple_to_list({Term, Term}), _ = erlang:binary_part(Binary, 0, 10), _ = erlang:bit_size(Binary), _ = erlang:byte_size(Binary), _ = erlang:error(Term), _ = erlang:exit(Term), _ = erlang:length(List), _ = erlang:map_get(key, Map), _ = erlang:map_size(Map), _ = erlang:size(Binary), _ = erlang:throw(Term) end). denied_builtin_make_ref_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, make_ref, 0}}}, _ = make_ref()). denied_erlang_make_ref_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, make_ref, 0}}}, _ = erlang:make_ref()). denied_builtin_node_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {node, 0}}}, _ = node()). denied_erlang_node_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {node, 0}}}, _ = erlang:node()). denied_builtin_node_1_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {node, 1}}}, _ = node(list_to_pid("<0.0.0>"))). denied_erlang_node_1_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {node, 1}}}, _ = erlang:node(list_to_pid("<0.0.0>"))). denied_builtin_nodes_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, nodes, 0}}}, _ = nodes()). denied_erlang_nodes_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, nodes, 0}}}, _ = erlang:nodes()). denied_builtin_nodes_1_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, nodes, 1}}}, _ = nodes(visible)). denied_erlang_nodes_1_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, nodes, 1}}}, _ = erlang:nodes(visible)). denied_builtin_self_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {self, 0}}}, _ = self()). denied_erlang_self_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {self, 0}}}, _ = erlang:self()). denied_builtin_send_2_test() -> ?assertToFunThrow( {invalid_tx_fun, sending_message_denied}, list_to_pid("<0.0.0>") ! msg). denied_erlang_send_2_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, send, 2}}}, _ = erlang:send(list_to_pid("<0.0.0>"), msg)). denied_erlang_send_3_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {erlang, send, 3}}}, _ = erlang:send(list_to_pid("<0.0.0>"), msg, [nosuspend])). %% `apply_last' instruction is used when the apply is the last call %% in the function. denied_apply_last_test() -> self() ! erlang, Module = receive Msg -> Msg end, ?assertToFunThrow( {invalid_tx_fun, dynamic_apply_denied}, _ = Module:now()). denied_apply_test() -> self() ! erlang, Module = receive Msg -> Msg end, ?assertToFunThrow( {invalid_tx_fun, dynamic_apply_denied}, c = hd(Module:tl([[a, b], c]))). allowed_dict_api_test() -> ?assertStandaloneFun( begin _ = dict:new() end). allowed_io_lib_format_test() -> ?assertStandaloneFun( begin _ = io_lib:format("", []) end). denied_io_api_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {io, format, 1}}}, begin _ = io:format("") end). allowed_lists_api_test() -> ?assertStandaloneFun( begin _ = lists:reverse([]) end). allowed_logger_api_test() -> ?assertStandaloneFun( begin _ = logger:debug(""), _ = logger:info(""), _ = logger:notice(""), _ = logger:warning(""), _ = logger:error(""), _ = logger:critical(""), _ = logger:alert(""), _ = logger:emergency("") end). denied_logger_get_config_0_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {logger, get_config, 0}}}, begin _ = logger:get_config() end). allowed_maps_api_test() -> ?assertStandaloneFun( begin _ = maps:keys(#{}) end). allowed_orddict_api_test() -> ?assertStandaloneFun( begin _ = orddict:new() end). allowed_ordsets_api_test() -> ?assertStandaloneFun( begin _ = ordsets:new() end). allowed_proplists_api_test() -> ?assertStandaloneFun( begin _ = proplists:get_keys([]) end). allowed_sets_api_test() -> ?assertStandaloneFun( begin _ = sets:new() end). allowed_string_api_test() -> ?assertStandaloneFun( begin _ = string:length("") end). allowed_unicode_api_test() -> ?assertStandaloneFun( begin _ = unicode:characters_to_binary("") end). allowed_re_test() -> ?assertStandaloneFun( begin {ok, MP} = re:compile("abcd"), _ = re:inspect(MP, namelist), _ = re:run("abcd", "ab.*"), _ = re:replace("abcd", "ab", "ef"), _ = re:split("abcab", "a") end). denied_re_version_test() -> ?assertToFunThrow( {invalid_tx_fun, {call_denied, {re, version, 0}}}, begin re:version() end). when_readwrite_mode_is_true_test() -> ?assert( is_record(khepri_tx:to_standalone_fun( fun() -> khepri_tx:get([foo]) end, rw), standalone_fun)), ?assert( is_record(khepri_tx:to_standalone_fun( fun() -> khepri_tx:put([foo], khepri_payload:data(value)) end, rw), standalone_fun)), ?assertThrow( {invalid_tx_fun, {call_denied, {self, 0}}}, khepri_tx:to_standalone_fun( fun() -> _ = khepri_tx:get([foo]), self() ! message end, rw)), ?assertThrow( {invalid_tx_fun, {call_denied, {self, 0}}}, khepri_tx:to_standalone_fun( fun() -> _ = khepri_tx:put([foo], khepri_payload:data(value)), self() ! message end, rw)), ?assert( is_record(khepri_tx:to_standalone_fun( fun mod_used_for_transactions:exported/0, rw), standalone_fun)), ?assert( is_function(khepri_tx:to_standalone_fun( fun dict:new/0, rw), 0)), Fun = fun() -> khepri_tx:delete([foo]) end, ?assert( is_record(khepri_tx:to_standalone_fun( fun() -> Fun() end, rw), standalone_fun)). when_readwrite_mode_is_false_test() -> ?assert( is_function(khepri_tx:to_standalone_fun( fun() -> khepri_tx:get([foo]) end, ro), 0)), %% In the following case, `to_standalone()' works, but the transaction %% will abort once executed. ?assert( is_function(khepri_tx:to_standalone_fun( fun() -> khepri_tx:put( [foo], khepri_payload:data(value)) end, ro), 0)), ?assert( is_function(khepri_tx:to_standalone_fun( fun() -> _ = khepri_tx:get([foo]), self() ! message end, ro), 0)), %% In the following case, `to_standalone()' works, but the transaction %% will abort once executed. ?assert( is_function(khepri_tx:to_standalone_fun( fun() -> _ = khepri_tx:put( [foo], khepri_payload:data(value)), self() ! message end, ro), 0)), ?assert( is_function(khepri_tx:to_standalone_fun( fun mod_used_for_transactions:exported/0, ro), 0)), ?assert( is_function(khepri_tx:to_standalone_fun( fun dict:new/0, ro), 0)), Fun = fun() -> khepri_tx:delete([foo]) end, ?assert( is_function(khepri_tx:to_standalone_fun( fun() -> Fun() end, ro), 0)). when_readwrite_mode_is_auto_test() -> ?assert( is_function(khepri_tx:to_standalone_fun( fun() -> khepri_tx:get([foo]) end, auto), 0)), ?assert( is_record(khepri_tx:to_standalone_fun( fun() -> khepri_tx:put([foo], khepri_payload:data(value)) end, auto), standalone_fun)), ?assert( is_function(khepri_tx:to_standalone_fun( fun() -> _ = khepri_tx:get([foo]), self() ! message end, auto), 0)), ?assertThrow( {invalid_tx_fun, {call_denied, {self, 0}}}, khepri_tx:to_standalone_fun( fun() -> _ = khepri_tx:put([foo], khepri_payload:data(value)), self() ! message end, auto)), ?assert( is_function(khepri_tx:to_standalone_fun( fun mod_used_for_transactions:exported/0, auto), 0)), ?assert( is_function(khepri_tx:to_standalone_fun( fun dict:new/0, auto), 0)), Fun = fun() -> khepri_tx:delete([foo]) end, ?assert( is_record(khepri_tx:to_standalone_fun( fun() -> Fun() end, auto), standalone_fun)). make_list() -> [a, b]. make_map() -> #{a => b}. make_tuple() -> {a, b}. make_binary() -> <<"ab">>. make_fun(0) -> fun() -> result end; make_fun(1) -> fun(_) -> result end; make_fun(2) -> fun(_, _) -> result end; make_fun(3) -> fun(_, _, _) -> result end; make_fun(4) -> fun(_, _, _, _) -> result end; make_fun(5) -> fun(_, _, _, _, _) -> result end; make_fun(6) -> fun(_, _, _, _, _, _) -> result end; make_fun(7) -> fun(_, _, _, _, _, _, _) -> result end; make_fun(8) -> fun(_, _, _, _, _, _, _, _) -> result end; make_fun(9) -> fun(_, _, _, _, _, _, _, _, _) -> result end; make_fun(10) -> fun(_, _, _, _, _, _, _, _, _, _) -> result end. list_in_fun_env_test() -> List = make_list(), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> List end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(List, khepri_fun:exec(StandaloneFun, [])). map_in_fun_env_test() -> Map = make_map(), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Map end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(Map, khepri_fun:exec(StandaloneFun, [])). tuple_in_fun_env_test() -> Tuple = make_tuple(), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Tuple end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(Tuple, khepri_fun:exec(StandaloneFun, [])). binary_in_fun_env_test() -> Binary = make_binary(), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Binary end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(Binary, khepri_fun:exec(StandaloneFun, [])). fun0_in_fun_env_test() -> Fun = make_fun(0), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun() end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun1_in_fun_env_test() -> Fun = make_fun(1), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun2_in_fun_env_test() -> Fun = make_fun(2), self() ! Fun, receive Fun -> ok end, StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun3_in_fun_env_test() -> Fun = make_fun(3), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun4_in_fun_env_test() -> Fun = make_fun(4), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3, 4) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun5_in_fun_env_test() -> Fun = make_fun(5), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3, 4, 5) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun6_in_fun_env_test() -> Fun = make_fun(6), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3, 4, 5, 6) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun7_in_fun_env_test() -> Fun = make_fun(7), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3, 4, 5, 6, 7) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun8_in_fun_env_test() -> Fun = make_fun(8), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3, 4, 5, 6, 7, 8) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun9_in_fun_env_test() -> Fun = make_fun(9), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3, 4, 5, 6, 7, 8, 9) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). fun10_in_fun_env_test() -> Fun = make_fun(10), StandaloneFun = khepri_tx:to_standalone_fun( fun() -> Fun(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertNotEqual([], StandaloneFun#standalone_fun.env), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). exec_with_regular_fun_test() -> Fun = khepri_tx:to_standalone_fun( fun() -> result end, ro), ?assert(is_function(Fun)), ?assertEqual(result, khepri_fun:exec(Fun, [])). exec_standalone_fun_multiple_times_test() -> StandaloneFun = khepri_tx:to_standalone_fun( fun() -> result end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). exec_with_standalone_fun_test() -> StandaloneFun = khepri_tx:to_standalone_fun( fun() -> result end, rw), ?assertMatch(#standalone_fun{}, StandaloneFun), %% This is to make sure it still works after the generated module was %% loaded once. ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])), ?assertEqual(result, khepri_fun:exec(StandaloneFun, [])). record_matching_fun_clause_test() -> StandaloneFun = khepri_fun:to_standalone_fun( fun mod_used_for_transactions:outer_function/2, #{}), %% Dialyzer doesn't like that we ?assertMatch(#standalone_fun{}, %% StandaloneFun), I don't know why... Let's verify we don't have a %% function object instead. ?assertNot(is_function(StandaloneFun)), MyRecord1 = mod_used_for_transactions:make_record(hash_term), ?assertEqual(true, khepri_fun:exec(StandaloneFun, [MyRecord1, a])), MyRecord2 = mod_used_for_transactions:make_record(non_existing), ?assertEqual(false, khepri_fun:exec(StandaloneFun, [MyRecord2, a])), ?assertEqual(false, khepri_fun:exec(StandaloneFun, [not_my_record, a])), ok. extracting_unexported_external_function_test() -> ?assertThrow( {call_to_unexported_function, {mod_used_for_transactions, inner_function, 2}}, khepri_fun:to_standalone_fun( fun mod_used_for_transactions:inner_function/2, #{})).
test/tx_funs.erl
0.649579
0.441974
tx_funs.erl
starcoder
%% ------------------------------------------------------------------- %% %% riaknostic - automated diagnostic tools for Riak %% %% Copyright (c) 2011 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- %% @doc Functions that help diagnostics interact with the local Riak %% node or other members of the cluster. -module(riaknostic_node). -export([can_connect/0, can_connect_all/0, stats/0, pid/0, local_command/2, local_command/3, local_command/4, cluster_command/2, cluster_command/3, cluster_command/4 ]). %% @doc Calls the given 0-arity module and function on the local Riak %% node and returns the result of that call. %% @equiv local_command(Module, Function, []) %% @see can_connect/0. -spec local_command(Module::atom(), Function::atom()) -> term(). local_command(Module, Function) -> local_command(Module, Function, []). %% @doc Calls the given module and function with the given arguments %% on the local Riak node and returns the result of that call. %% @equiv local_command(Module, Function, Args, 5000) %% @see can_connect/0 -spec local_command(Module::atom(), Function::atom(), Args::[term()]) -> term(). local_command(Module, Function, Args) -> local_command(Module, Function, Args, 5000). %% @doc Calls the given module and function with the given arguments %% on the local Riak node and returns the result of that call, %% returning an error if the call doesn't complete within the given %% timeout. %% @equiv rpc:call(RiakNodeName, Module, Function, Args, Timeout) %% @see can_connect/0 -spec local_command(Module::atom(), Function::atom(), Args::[term()], Timeout::integer()) -> term(). local_command(Module, Function, Args, Timeout) -> riaknostic_util:log(debug, "Local RPC: ~p:~p(~p) [~p]", [Module, Function, Args, Timeout]), rpc:call(nodename(), Module, Function, Args, Timeout). %% @doc Calls the given 0-arity module and function on all members of %% the Riak cluster. %% @equiv cluster_command(Module, Function, []) %% @see can_connect/0 -spec cluster_command(Module::atom(), Function::atom()) -> term(). cluster_command(Module, Function) -> cluster_command(Module, Function, []). %% @doc Calls the given module and function with the given arguments %% on all members of the Riak cluster. %% @equiv cluster_command(Module, Function, Args, 5000) %% @see can_connect/0 -spec cluster_command(Module::atom(), Function::atom(), Args::[term()]) -> term(). cluster_command(Module, Function, Args) -> cluster_command(Module, Function, Args, 5000). %% @doc Calls the given module and function with the given arguments %% on all members for the Riak cluster, returning an error if the call %% doesn't complete within the given timeout. %% @equiv rpc:multicall(RiakClusterMembers, Module, Function, Args, Timeout) %% @see can_connect/0 -spec cluster_command(Module::atom(), Function::atom(), Args::[term()], Timeout::integer()) -> term(). cluster_command(Module, Function, Args, Timeout) -> riaknostic_util:log(debug, "Cluster RPC: ~p:~p(~p) [~p]", [Module, Function, Args, Timeout]), Stats = stats(), {ring_members, RingMembers} = lists:keyfind(ring_members, 1, Stats), rpc:multicall(RingMembers, Module, Function, Args, Timeout). %% @doc Retrieves the operating system's process ID of the local Riak %% node. %% @equiv local_command(os, getpid) %% @see can_connect/0 -spec pid() -> string(). pid() -> local_command(os, getpid). %% @doc Attempts to connect to the local Riak node if it is not %% already, and returns whether connection was successful. -spec can_connect() -> true | false. can_connect() -> case is_connected() of true -> true; false -> riaknostic_util:log(debug, "Not connected to the local Riak node, trying to connect. alive:~p connect_failed:~p", [is_alive(), connect_failed()]), maybe_connect() end. -spec can_connect_all() -> true | false. can_connect_all() -> case is_connected() of true -> case riaknostic_check_nodes_connected:check() of [] -> true; _ -> false end; false -> false end. %% @doc Fetches or returns previously fetched Riak statistics. %% @see can_connect/0 -spec stats() -> [proplists:property()]. stats() -> case has_stats() of {ok, Stats} -> Stats; _ -> fetch_stats() end. %% Private functions is_connected() -> is_alive() andalso connect_failed() =/= true. maybe_connect() -> case connect_failed() of true -> false; _ -> try_connect() end. try_connect() -> TargetNode = nodename(), case is_alive() of true -> ok; _ -> start_net() end, case {net_kernel:hidden_connect_node(TargetNode), net_adm:ping(TargetNode)} of {true, pong} -> application:set_env(riaknostic, connect_failed, false), riaknostic_util:log(debug, "Connected to local Riak node ~p.", [TargetNode]), true; _ -> application:set_env(riaknostic, connect_failed, true), lager:warning("Could not connect to the local Riak node ~p, some checks will not run.", [TargetNode]), false end. connect_failed() -> case application:get_env(riaknostic, connect_failed) of {ok, true} -> true; undefined -> undefined; _ -> false end. start_net() -> riaknostic_util:log(debug, "Starting distributed Erlang."), {Type, RiakName} = riaknostic_config:node_name(), ThisNode = append_node_suffix(RiakName, "_diag"), {ok, _} = net_kernel:start([ThisNode, Type]), erlang:set_cookie(node(), riaknostic_config:cookie()). nodename() -> {_, Name} = riaknostic_config:node_name(), case string:tokens(Name, "@") of [_Node, _Host] -> list_to_atom(Name); [Node] -> [_, Host] = string:tokens(atom_to_list(node()), "@"), list_to_atom(lists:concat([Node, "@", Host])) end. append_node_suffix(Name, Suffix) -> case string:tokens(Name, "@") of [Node, Host] -> list_to_atom(lists:concat([Node, Suffix, os:getpid(), "@", Host])); [Node] -> list_to_atom(lists:concat([Node, Suffix, os:getpid()])) end. has_stats() -> case application:get_env(riaknostic, local_stats) of {ok, Stats} -> {ok, Stats}; undefined -> false end. fetch_stats() -> riaknostic_util:log(debug, "Fetching local riak_kv_status."), case local_command(riak_kv_status, statistics) of [] -> []; PList -> application:set_env(riaknostic, local_stats, PList), PList end.
deps/riaknostic/src/riaknostic_node.erl
0.612889
0.475484
riaknostic_node.erl
starcoder
-module(knuth). -export([knuth/1]). -export_type([ group/0, constraint/0, exact_cover_request/0, treatment_fun/0, evaluation/0]). -type group() :: list(). -type constraint() :: fun((list()) -> boolean()). -type exact_cover_request() :: {list(group()),list(constraint())}. -type treatment_fun() :: fun((list(group())) -> term()). -type evaluation() :: {group(),list(constraint())}. -spec knuth(exact_cover_request()) -> ok. knuth({Groups,Cons}) -> knuth([],evaluate(Groups,Cons),Cons). -spec knuth(list(group()),list(evaluation()),list(constraint())) -> ok. knuth(Acc,Evaluations,Cons) -> case is_cover(Acc,Cons) of true -> [{result,[G || {G,_} <- Acc]}]; false -> % PC = PassedConstraint case least_passed_constraints(Evaluations,Cons) of [] -> ok; [LeastPC|_] -> FilteredGroups = lists:filter(fun({_,Cs}) -> lists:member(LeastPC,Cs) end,Evaluations), lists:foldl(fun({Group,PCs},ResultAcc) -> NextEvaluations = lists:filter(fun({_,Cs}) -> not lists:any(fun(PC) -> lists:member(PC,Cs) end,PCs) end,Evaluations), NextCons = remove(PCs,Cons), ResultAcc ++ knuth([{Group,PCs} | Acc],NextEvaluations,NextCons) end,[],FilteredGroups) end end. -spec least_passed_constraints(list(evaluation()),list(constraint())) -> list(constraint()). least_passed_constraints(Evaluations,Constraints) -> PassedConstraints = lists:flatten([Cs || {_,Cs} <- Evaluations]), Zip = lists:map(fun(C) -> {C,frequency(PassedConstraints,C)} end,Constraints), Min = lists:min([X || {_,X} <- Zip]), [C || {C,Freq} <- Zip, Freq == Min]. -spec frequency(list(),term()) -> integer(). frequency(L,Target) -> length([E || E <- L, E == Target]). -spec remove(list(),list()) -> list(). remove(X,L) -> [Y || Y <- L, not lists:member(Y,X)]. -spec is_cover(list(group()),list(constraint())) -> boolean(). is_cover(Acc,Cons) -> L = lists:flatten([C || {_,C} <- Acc]), lists:all(fun(C) -> lists:member(C,L) end,Cons). -spec evaluate(list(group()),list(constraint())) -> list(evaluation()). evaluate(Gs,Cons) -> lists:map(fun(G) -> {G,lists:filter(fun(C) -> C(G) end,Cons)} end,Gs).
src/knuth.erl
0.552298
0.429071
knuth.erl
starcoder
%%% @doc %%% BLOCKTYPE %%% Floating point value to Seven Segment Variable Digits Decoder %%% DESCRIPTION %%% Convert an input floating point number to a set of bytes. %%% one per digit, indicating which segments of a %%% seven segment display digit should be turned on. %%% LINKS %%% @end -module(lblx_float_to_7seg). -author("<NAME>"). -include("../block_state.hrl"). %% ==================================================================== %% API functions %% ==================================================================== -export([groups/0, version/0]). -export([create/2, create/4, create/5, upgrade/1, initialize/1, execute/2, delete/1]). groups() -> [conversion]. version() -> "0.2.0". %% Merge the block type specific, Config, Input, and Output attributes %% with the common Config, Input, and Output attributes, that all block types have -spec default_configs(BlockName :: block_name(), Description :: string()) -> config_attribs(). default_configs(BlockName, Description) -> attrib_utils:merge_attribute_lists( block_common:configs(BlockName, ?MODULE, version(), Description), [ {num_of_digits, {4}}, %| int | 4 | 1..99 | {pos_precision, {2}}, %| int | 2 | 0..num of digits | {neg_precision, {1}} %| int | 1 | 1..num of digits | ]). -spec default_inputs() -> input_attribs(). default_inputs() -> attrib_utils:merge_attribute_lists( block_common:inputs(), [ {input, {empty, {empty}}} %| float | empty | +/- max float | ]). -spec default_outputs() -> output_attribs(). default_outputs() -> attrib_utils:merge_attribute_lists( block_common:outputs(), [ {digits, [{null, []}]}, %| byte arrary | null | 0..FFh | {pos_overflow, {null, []}}, %| bool | null | true, false | {neg_overflow, {null, []}} %| bool | null | true, false | ]). %% %% Create a set of block attributes for this block type. %% Init attributes are used to override the default attribute values %% and to add attributes to the lists of default attributes %% -spec create(BlockName :: block_name(), Description :: string()) -> block_defn(). create(BlockName, Description) -> create(BlockName, Description, [], [], []). -spec create(BlockName :: block_name(), Description :: string(), InitConfig :: config_attribs(), InitInputs :: input_attribs()) -> block_defn(). create(BlockName, Description, InitConfig, InitInputs) -> create(BlockName, Description, InitConfig, InitInputs, []). -spec create(BlockName :: block_name(), Description :: string(), InitConfig :: config_attribs(), InitInputs :: input_attribs(), InitOutputs :: list()) -> block_defn(). create(BlockName, Description, InitConfig, InitInputs, InitOutputs) -> % Update Default Config, Input, Output, and Private attribute values % with the initial values passed into this function. % % If any of the intial attributes do not already exist in the % default attribute lists, merge_attribute_lists() will create them. Config = attrib_utils:merge_attribute_lists(default_configs(BlockName, Description), InitConfig), Inputs = attrib_utils:merge_attribute_lists(default_inputs(), InitInputs), Outputs = attrib_utils:merge_attribute_lists(default_outputs(), InitOutputs), % This is the block definition, {Config, Inputs, Outputs}. %% %% Upgrade block attribute values, when block code and block data versions are different %% -spec upgrade(BlockDefn :: block_defn()) -> {ok, block_defn()} | {error, atom()}. upgrade({Config, Inputs, Outputs}) -> ModuleVer = version(), {BlockName, BlockModule, ConfigVer} = config_utils:name_module_version(Config), BlockType = type_utils:type_name(BlockModule), case attrib_utils:set_value(Config, version, version()) of {ok, UpdConfig} -> m_logger:info(block_type_upgraded_from_ver_to, [BlockName, BlockType, ConfigVer, ModuleVer]), {ok, {UpdConfig, Inputs, Outputs}}; {error, Reason} -> m_logger:error(err_upgrading_block_type_from_ver_to, [Reason, BlockName, BlockType, ConfigVer, ModuleVer]), {error, Reason} end. %% %% Initialize block values %% Perform any setup here as needed before starting execution %% -spec initialize(BlockState :: block_state()) -> block_state(). initialize({Config, Inputs, Outputs, Private}) -> % Check the config values case config_utils:get_integer_range(Config, num_of_digits, 1, 99) of {ok, NumOfDigits} -> % Create a digit output for each digit Outputs1 = output_utils:resize_attribute_array_value(Outputs, digits, NumOfDigits, {null, []}), case config_utils:get_integer_range(Config, pos_precision, 0, NumOfDigits) of {ok, _PosPrecision} -> case config_utils:get_integer_range(Config, neg_precision, 0, NumOfDigits) of {ok, _NegPrecision} -> Value = null, Status = initialed; {error, Reason} -> {Value, Status} = config_utils:log_error(Config, neg_precision, Reason) end; {error, Reason} -> {Value, Status} = config_utils:log_error(Config, pos_precision, Reason) end; {error, Reason} -> Outputs1 = Outputs, {Value, Status} = config_utils:log_error(Config, num_of_digits, Reason) end, Outputs2 = output_utils:set_value_status(Outputs1, Value, Status), % This is the block state {Config, Inputs, Outputs2, Private}. %% %% Execute the block specific functionality %% -spec execute(BlockState :: block_state(), ExecMethod :: exec_method()) -> block_state(). execute({Config, Inputs, Outputs, Private}, disable) -> Outputs1 = output_utils:update_all_outputs(Outputs, null, disabled), {Config, Inputs, Outputs1, Private}; execute({Config, Inputs, Outputs, Private}, _ExecMethod) -> % Config values are validated in initialize function, just read them here {ok, NumOfDigits} = attrib_utils:get_value(Config, num_of_digits), case input_utils:get_float(Inputs, input) of {ok, null} -> Value = null, Status = normal, Digits7Seg = lists:duplicate(NumOfDigits, null), PosOverflow = null, NegOverflow = null; {ok, InValue} -> case InValue >= 0.0 of true -> {ok, Precision} = attrib_utils:get_value(Config, pos_precision), IsPositive = true; false -> {ok, Precision} = attrib_utils:get_value(Config, neg_precision), IsPositive = false end, % Just set the output value equal to the string of the input value % Value is not used to drive 7-Segment display Value = float_to_list(InValue, [{decimals, Precision}]), Status = normal, case check_length(Value, NumOfDigits) of true -> % Remove the decimal point, and Pad out the string to the size of the display InValueStr = unicode:characters_to_nfc_list(string:pad(lists:delete($., Value), NumOfDigits, leading)), Digits7SegNoDecPnt = lists:map(fun(Digit) -> block_utils:char_to_segments(Digit, false) end, InValueStr), if (Precision > 0) -> DecPntDigitIndex = NumOfDigits - Precision, DigitWithDecPnt = lists:nth(DecPntDigitIndex, Digits7SegNoDecPnt) bor 16#80, Digits7Seg = list_replace(Digits7SegNoDecPnt, DigitWithDecPnt, DecPntDigitIndex); true -> Digits7Seg = Digits7SegNoDecPnt end, PosOverflow = false, NegOverflow = false; false -> % number too big Overflow = block_utils:char_to_segments($-, false), Digits7Seg = lists:duplicate(NumOfDigits, Overflow), if IsPositive -> PosOverflow = true, NegOverflow = false; true -> PosOverflow = false, NegOverflow = true end end; {error, Reason} -> {Value, Status} = input_utils:log_error(Config, input, Reason), Digits7Seg = lists:duplicate(NumOfDigits, null), PosOverflow = null, NegOverflow = null end, {ok, Outputs1} = attrib_utils:set_value(Outputs, pos_overflow, PosOverflow), {ok, Outputs2} = attrib_utils:set_value(Outputs1, neg_overflow, NegOverflow), Outputs3 = output_utils:set_array_values(Outputs2, digits, Digits7Seg), Outputs4 = output_utils:set_value_status(Outputs3, Value, Status), % Return updated block state {Config, Inputs, Outputs4, Private}. %% %% Delete the block %% -spec delete(BlockState :: block_state()) -> block_defn(). delete({Config, Inputs, Outputs, _Private}) -> {Config, Inputs, Outputs}. %% ==================================================================== %% Internal functions %% ==================================================================== % % Check if if value fits into the display % -spec check_length(InValueStr :: string(), NumOfDigits :: integer()) -> boolean(). check_length(InValueStr, NumOfDigits) -> case lists:member($., InValueStr) of true -> % don't count decimal point when determining if digits fit into display length(InValueStr) =< (NumOfDigits + 1); false -> % No decimal point in string length(InValueStr) =< NumOfDigits end. % % Replace the element at Position in the List, with Element % Lists are indexed from 1 % -spec list_replace(List :: list(), Element :: term(), Position :: pos_integer()) -> list(). list_replace(List, Element, Position) -> lists:sublist(List, Position - 1) ++ [Element] ++ lists:nthtail(Position, List). %% ==================================================================== %% Tests %% ==================================================================== -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -include("block_io_test_gen.hrl"). test_sets() -> [ % Test bad config inputs {[{num_of_digits, -1}], [], [{status, config_err}, {value, null}, {pos_overflow, null}, {neg_overflow, null}]}, {[{num_of_digits, 4}, {pos_precision, -1}], [], [{status, config_err}, {value, null}, {pos_overflow, null}, {neg_overflow, null}]}, {[{pos_precision, 1}, {neg_precision, 5}], [], [{status, config_err}, {value, null}, {pos_overflow, null}, {neg_overflow, null}]}, % Test bad inputs {[{neg_precision, 1}], [{input, "bad"}], [{status, input_err}, {value, null}, {pos_overflow, null}, {neg_overflow, null}]}, {[{pos_precision, 0}], [{input, 8.0}], [{status, normal}, {value, "8"}, {{digits, 1}, 16#00}, {{digits, 2}, 16#00}, {{digits, 3}, 16#00}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{input, 8888.0}], [{status, normal}, {value, "8888"}, {{digits, 1}, 16#7F}, {{digits, 2}, 16#7F}, {{digits, 3}, 16#7F}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{pos_precision, 1}], [], [{status, normal}, {value, "8888.0"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, true}, {neg_overflow, false}]}, {[{input, 888.8}], [{status, normal}, {value, "888.8"}, {{digits, 1}, 16#7F}, {{digits, 2}, 16#7F}, {{digits, 3}, 16#FF}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{pos_precision, 2}], [], [{status, normal}, {value, "888.80"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, true}, {neg_overflow, false}]}, {[{input, 88.88}], [{status, normal}, {value, "88.88"}, {{digits, 1}, 16#7F}, {{digits, 2}, 16#FF}, {{digits, 3}, 16#7F}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{pos_precision, 3}], [], [{status, normal}, {value, "88.880"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, true}, {neg_overflow, false}]}, {[{neg_precision, 0}], [{input, -8.0}], [{status, normal}, {value, "-8"}, {{digits, 1}, 16#00}, {{digits, 2}, 16#00}, {{digits, 3}, 16#40}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{input, -888.0}], [{status, normal}, {value, "-888"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#7F}, {{digits, 3}, 16#7F}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{neg_precision, 1}], [], [{status, normal}, {value, "-888.0"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, false}, {neg_overflow, true}]}, {[{input, -88.8}], [{status, normal}, {value, "-88.8"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#7F}, {{digits, 3}, 16#FF}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{neg_precision, 2}], [], [{status, normal}, {value, "-88.80"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, false}, {neg_overflow, true}]}, {[{input, -8.88}], [{status, normal}, {value, "-8.88"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#FF}, {{digits, 3}, 16#7F}, {{digits, 4}, 16#7F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{neg_precision, 3}], [], [{status, normal}, {value, "-8.880"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, false}, {neg_overflow, true}]}, {[{pos_precision, 4}], [{input, 0.88888}], [{status, normal}, {value, "0.8889"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, true}, {neg_overflow, false}]}, {[{pos_precision, 3}], [], [{status, normal}, {value, "0.889"}, {{digits, 1}, 16#BF}, {{digits, 2}, 16#7F}, {{digits, 3}, 16#7f}, {{digits, 4}, 16#6F}, {pos_overflow, false}, {neg_overflow, false}]}, {[{neg_precision, 3}], [{input, -0.88888}], [{status, normal}, {value, "-0.889"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#40}, {{digits, 3}, 16#40}, {{digits, 4}, 16#40}, {pos_overflow, false}, {neg_overflow, true}]}, {[{neg_precision, 2}], [], [{status, normal}, {value, "-0.89"}, {{digits, 1}, 16#40}, {{digits, 2}, 16#BF}, {{digits, 3}, 16#7F}, {{digits, 4}, 16#6F}, {pos_overflow, false}, {neg_overflow, false}]} ]. -endif.
src/block_types/lblx_float_to_7seg.erl
0.502686
0.485844
lblx_float_to_7seg.erl
starcoder
-module(primality). %% API exports -export([ is_prime/1, is_prime/2 ]). %% Type exports -export_type([ is_prime_option/0, is_prime_options/0 ]). %%==================================================================== %% Internal macros %%==================================================================== -define(CERTAINTY, certainty). %%==================================================================== %% Types %%==================================================================== -type is_prime_option() :: {'mode', 'probabilistic' | 'certified'} | {'certainty', non_neg_integer()}. -type is_prime_options() :: [is_prime_option()]. %%==================================================================== %% API functions %%==================================================================== %% @equiv is_prime(N, []) %% @doc Checks whether the given integer N is a prime with the default options. -spec is_prime(N :: non_neg_integer()) -> boolean(). is_prime(N) -> is_prime(N, []). %% @doc Checks whether the given integer N is a prime with the given options. %% %% Available options are: %% <dl> %% <dt>`mode'</dt> %% <dd>Whether to allow probabilistic errors (`probabilistic') or not (`certified'). The default is `probabilistic'.</dd> %% <dt>`certainty'</dt> %% <dd>How certain about the result we can be, as in Java's <a href="https://docs.oracle.com/javase/9/docs/api/java/math/BigInteger.html#isProbablePrime-int-">isProbablePrime</a> %% If `mode' is `probabilistic', the probability of incorrect return value is at most 2<sup>-`certainty'</sup>. %% If `mode' is `certified', this option is ignored. %% The default value is `40'. %% </dd> %% </dl> -spec is_prime(N :: non_neg_integer(), Options :: is_prime_options()) -> boolean(). is_prime(N, _) when N =< 1 -> false; is_prime(N, Options) -> % Performs Miller-Rabin algorithm many time Certainty = proplists:get_value(?CERTAINTY, Options, 40), % Since each run the error rate is at most 1/4, we need to iterate ceil(Certainty/2) times. is_prime_internal(N, (Certainty + 1) div 2). %%==================================================================== %% Internal functions %%==================================================================== -spec is_prime_internal(N :: non_neg_integer(), NumIter :: non_neg_integer()) -> boolean(). is_prime_internal(_N, 0) -> % We're done. true; is_prime_internal(N, NumIter) -> primality_miller_rabin:is_prime(N) andalso is_prime_internal(N, NumIter - 1).
src/primality.erl
0.743354
0.586404
primality.erl
starcoder
%% @copyright 2007-2008 Basho Technologies %% @reference <NAME> (1978). "Time, clocks, and the ordering of events in a distributed system". Communications of the ACM 21 (7): 558-565. %% @reference <NAME> (1988). "Virtual Time and Global States of Distributed Systems". Workshop on Parallel and Distributed Algorithms: pp. 215-226 %% @author <NAME> <<EMAIL>> %% @author <NAME> <<EMAIL>> %% @doc A simple Erlang implementation of vector clocks as inspired by Lamport logical clocks. %% Copyright 2007-2008 Basho Technologies %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% http://www.apache.org/licenses/LICENSE-2.0 %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(vclock). -author('<NAME> <<EMAIL>>'). -author('<NAME> <<EMAIL>>'). -export([fresh/0,descends/2,merge/1,get_counter/2,get_timestamp/2, increment/2,all_nodes/1]). %% @type vclock() = [vc_entry]. %% @type vc_entry() = {node(), {counter(), timestamp()}}. %% The timestamp is present but not used, in case a client wishes to inspect it. %% @type node() = term(). %% Nodes can have any term() as a name, but they must differ from each other. %% @type counter() = integer(). %% @type timestamp() = integer(). %% @doc Create a brand new vclock. %% @spec fresh() -> vclock() fresh() -> []. %% @doc Remove trivial ancestors. %% @spec simplify(vclock()) -> vclock() simplify(VClock) -> simplify(VClock, []). simplify([], NClock) -> NClock; simplify([{Node,{Ctr1,TS1}}|VClock], NClock) -> {Ctr2,TS2} = proplists:get_value(Node, NClock, {Ctr1,TS1}), {Ctr,TS} = if Ctr1 > Ctr2 -> {Ctr1,TS1}; true -> {Ctr2,TS2} end, simplify(VClock, [{Node,{Ctr,TS}}|proplists:delete(Node, NClock)]). %% @doc Return true if Va is a direct descendant of Vb, else false -- remember, a vclock is its own descendant! %% @spec descends(Va :: vclock(), Vb :: vclock()) -> bool() descends(_, []) -> %% all vclocks descend from the empty vclock true; descends(Va, Vb) -> [{NodeB, {CtrB, _T}}|RestB] = Vb, CtrA = case proplists:get_value(NodeB, Va) of undefined -> false; {CA, _TSA} -> CA end, case CtrA of false -> false; _ -> if CtrA < CtrB -> false; true -> descends(Va,RestB) end end. %% @doc Combine all VClocks in the input list into their least possible %% common descendant. %% @spec merge(VClocks :: [vclock()]) -> vclock() merge(VClocks) -> merge(VClocks, []). merge([], NClock) -> NClock; merge([AClock|VClocks],NClock) -> merge(VClocks, lists:foldl(fun(X,L) -> extend(X,L) end, NClock, AClock)). %% @doc Get the counter value in VClock set from Node. %% @spec get_counter(Node :: node(), VClock :: vclock()) -> counter() get_counter(Node, VClock) -> case proplists:get_value(Node, VClock) of {Ctr, _TS} -> Ctr; undefined -> undefined end. %% @doc Get the timestamp value in a VClock set from Node. %% @spec get_timestamp(Node :: node(), VClock :: vclock()) -> timestamp() get_timestamp(Node, VClock) -> case proplists:get_value(Node, VClock) of {_Ctr, TS} -> TS; undefined -> undefined end. %% @doc Increment VClock at Node. %% @spec increment(Node :: node(), VClock :: vclock()) -> vclock() increment(Node, VClock) -> {Ctr, TS} = case proplists:get_value(Node, VClock) of undefined -> {1, timestamp()}; {C, _T} -> {C + 1, timestamp()} end, extend({Node, {Ctr, TS}}, VClock). %% @doc Reflect an update performed by Node. %% See increment/2 for usage. %% @spec extend(VC_Entry :: VC_Entry, VClock :: vclock()) -> vclock() extend({Node,{Ctr,TS}}, VClock) -> simplify([{Node,{Ctr,TS}}|VClock]). %% @doc Return the list of all nodes that have ever incremented VClock. %% @spec all_nodes(VClock :: vclock()) -> [node()] all_nodes(VClock) -> [X || {X,{_,_}} <- VClock]. timestamp() -> calendar:datetime_to_gregorian_seconds(erlang:universaltime()).
src/vclock.erl
0.683525
0.423071
vclock.erl
starcoder
%% @doc %% Infinite lists realization. %% %% @author <NAME> % Module name. -module(inflists). % We don't need hd and tl functions for lists because we use the same names for infinite lists. -compile({no_auto_import, [hd/1, tl/1]}). % Export. -export([% Constructors. iterate/3, iterate/2, % Get infinite lists parts. hd/1, tl/1, ht/1, take/2, is_begin/2, nth/2, drop/2, drop_less/2, nthtail/2, sublist/2, sublist/3, split/2, find/2, % Basic simple infinite lists. repeat/1, cycle/1, % Arithmetic series. seq/2, odds/0, evens/0, seq/1, naturals/0, naturals/1, % Geometric series. geometric_series/2, power_series/1, % Zip/unzip. zip/2, zip_3/3, zipwith/3, unzip/1, unzip_3/1, % High order functions. map/2, filter/2, adj_pairs_map/2, fold/3, is_all/3, is_any/3, % Mathematical functions. add/2, inc/1, sub/2, dec/1, neg/1, mul/2, twice/1, dvs/2, half/1, inv/1, ndvs/2, nhalf/1, nrem/2, square/1, sqrt/1, cube/1, pow/2, npow/2, % More complex mathematical functions. partial_sums/1, partial_products/1, partial_avgs/1, dirichlet_series/1, dirichlet_series/2, sign_alternate/1, % Some usefull infinite lists. fib/0, trib/0, harmonic_series/0, anharmonic_series/0, grundy_series/0, facts/0, inv_facts/0, squares/0, sqrts/0, cubes/0, triangulars/0, % Prime numbers. primes/0, % Concatenation. concat/2, % Sparse infinite lists. sparse/2, odds/1, evens/1, % Merge/umerge. merge/2, unmerge/1, % Teylor's series. taylor_exp/1, taylor_lnxp1/1, taylor_sin/1, taylor_cos/1, taylor_arctg/1, % Monotonic infinite lists actions. mono_merge/2, mono_unique/1, mono_union/2, mono_intersection/2, mono_complement/2]). %--------------------------------------------------------------------------------------------------- % Types. %--------------------------------------------------------------------------------------------------- % Types export. -export_type([inflist/0]). % Define infinite list as record. -record(inflist, { h :: term(), acc :: term(), f :: fun((term(), term()) -> {term(), term()}) }). % Inifinite list - list containing infinite number of element. -type inflist() :: #inflist{}. %--------------------------------------------------------------------------------------------------- % Infinite lists constructors. %--------------------------------------------------------------------------------------------------- -spec iterate(H, Acc, F) -> inflist() when F :: fun((H, Acc) -> {H, Acc}), H :: term(), Acc :: term(). %% @doc %% Create infinite list with head, accumulator and iterate function. %% Iterate function produces the second infinite list element from its head and accumulator. iterate(H, Acc, F) when is_function(F, 2) -> #inflist { h = H, acc = Acc, f = F }; iterate(_, _, F) -> throw({badarg, {F, wrong_arity}}). %--------------------------------------------------------------------------------------------------- -spec iterate(H, F :: fun((H) -> H)) -> inflist() when H :: term(). %% @doc %% Create infinite list with head and iterate function. %% Iterate function produces the second infinite list element from its head. iterate(H, F) when is_function(F, 1) -> iterate ( H, 0, fun(Cur_H, _) -> {F(Cur_H), 0} end ); iterate(_, F) -> throw({badarg, {F, wrong_arity}}). %--------------------------------------------------------------------------------------------------- % Get infinite lists parts (head, tail, sublists). %--------------------------------------------------------------------------------------------------- -spec hd(IL :: inflist()) -> term(). %% @doc %% Head of infinite list. hd(IL) when is_record(IL, inflist) -> IL#inflist.h. %--------------------------------------------------------------------------------------------------- -spec tl(IL :: inflist()) -> inflist(). %% @doc %% Tail of infinite list. tl(#inflist{h = H, acc = Acc, f = F} = IL) -> {New_H, New_Acc} = F(H, Acc), IL#inflist{h = New_H, acc = New_Acc}. %--------------------------------------------------------------------------------------------------- -spec ht(IL :: inflist()) -> {term(), inflist()}. %% @doc %% Take head and tail simultaneously. ht(IL) -> {hd(IL), tl(IL)}. %--------------------------------------------------------------------------------------------------- -spec take(IL :: inflist(), N :: integer()) -> list(). %% @doc %% Take first N elements of infinite list (synonym for sublist). %% @see sublist/2 take(_, N) when (N < 0) -> throw({badarg, N}); take(IL, N) -> take(IL, N, []). -spec take(IL :: inflist(), N :: integer(), [E]) -> [E] when E :: term(). %% @private %% @doc %% Take first elements of infinite list. take(_, 0, R) -> lists:reverse(R); take(IL, N, R) -> take(tl(IL), N - 1, [hd(IL) | R]). %--------------------------------------------------------------------------------------------------- -spec is_begin(IL :: inflist(), B :: term() | list()) -> boolean(). %% @doc %% Check if infinite list begins with given term or list. is_begin(IL, T) when not is_list(T) -> hd(IL) =:= T; is_begin(IL, []) when is_record(IL, inflist) -> true; is_begin(IL, [H | T]) -> {IH, IT} = ht(IL), if (IH =:= H) -> is_begin(IT, T); true -> false end. %--------------------------------------------------------------------------------------------------- -spec nth(IL :: inflist(), N :: integer()) -> term(). %% @doc %% Take N-th element for N > 0. nth(_, N) when (N < 1) -> throw({badarg, N}); nth(IL, 1) -> hd(IL); nth(IL, N) -> nth(tl(IL), N - 1). %--------------------------------------------------------------------------------------------------- -spec drop(IL :: inflist(), N :: integer()) -> inflist(). %% @doc %% Drop first N elements of infinite list (synonym for nthtail). %% @see nthtail/2 drop(_, N) when (N < 0) -> throw({badarg, N}); drop(IL, 0) -> IL; drop(IL, N) -> drop(tl(IL), N - 1). %--------------------------------------------------------------------------------------------------- -spec drop_less(IL :: inflist(), N :: number()) -> inflist(). %% @doc %% Drop first elements less than N. drop_less(IL, N) -> {H, T} = ht(IL), if H < N -> drop_less(T, N); true -> IL end. %--------------------------------------------------------------------------------------------------- -spec nthtail(IL :: inflist(), N :: integer()) -> inflist(). %% @doc %% Tail of infinite list without N elements (synonym for drop). %% @see drop/2 nthtail(IL, N) -> drop(IL, N). %--------------------------------------------------------------------------------------------------- -spec sublist(IL :: inflist(), N :: integer()) -> list(). %% @doc %% Sublist from first position (synonym for take). %% @see take/2 sublist(IL, N) -> take(IL, N). %--------------------------------------------------------------------------------------------------- -spec sublist(IL :: inflist(), Start :: integer(), N :: integer()) -> list(). %% @doc %% Sublist from given position. sublist(IL, Start, N) -> take(drop(IL, Start - 1), N). %--------------------------------------------------------------------------------------------------- -spec split(IL :: inflist(), N :: integer()) -> {list(), inflist()}. %% @doc %% Split infinite list by position. split(_, N) when (N < 0) -> throw({badarg, N}); split(IL, N) -> split(IL, N, []). -spec split(IL :: inflist(), N :: integer(), [E]) -> {[E], inflist()} when E :: term(). %% @doc %% Split infinite list by position. %% @private split(IL, 0, R) -> {lists:reverse(R), IL}; split(IL, N, R) -> split(tl(IL), N - 1, [hd(IL) | R]). %--------------------------------------------------------------------------------------------------- -spec find(IL :: inflist(), FF :: fun((term()) -> boolean())) -> integer(). %% @doc %% Find number of element wich satisfies the given condition. %% Warning. %% Dangerous function, because list can contain no elements with given condition. find(IL, FF) -> find(IL, FF, 1). -spec find(IL :: inflist(), FF :: fun((term()) -> boolean()), N :: integer()) -> integer(). %% @doc %% Find number of element with satisfies the given condition. %% @private find(IL, FF, N) -> {H, T} = ht(IL), Is_FF = FF(H), if Is_FF -> N; true -> find(T, FF, N + 1) end. %--------------------------------------------------------------------------------------------------- % Basic simple infinite lists. %--------------------------------------------------------------------------------------------------- -spec repeat(T :: term()) -> inflist(). %% @doc %% Construct infinite list, containing one repeating element (Haskell analogue). %% %% Example: %% <pre> %% repeat(T) -> [T, T, T, ..] %% </pre> repeat(T) -> iterate ( T, fun(_) -> T end ). %--------------------------------------------------------------------------------------------------- -spec cycle(L :: list()) -> inflist(). %% @doc %% Construct infinite list, containing infinite number of list L copies (Haskell analogue). %% %% Example: %% <pre> %% repeat([A, B, C]) -> [A, B, C, A, B, C, ..] %% </pre> cycle([]) -> throw({badarg, []}); cycle([H | T]) -> iterate ( H, T, fun (_, []) -> {H, T}; (_, [Cur_H | Cur_T]) -> {Cur_H, Cur_T} end ). %--------------------------------------------------------------------------------------------------- % Arithmetic series. %--------------------------------------------------------------------------------------------------- -spec seq(From :: number(), Step :: number()) -> inflist(). %% @doc %% Construct arithmetic series. %% %% Example: %% <pre> %% seq(From, Step) -> [From, From + Step, From + 2 * Step, ..] %% </pre> seq(From, Step) -> iterate ( From, fun(H) -> H + Step end ). %--------------------------------------------------------------------------------------------------- -spec odds() -> inflist(). %% @doc %% Odd natural numbers. %% %% Example: %% <pre> %% odds() -> [1, 3, 5, 7, 9, ..] %% </pre> odds() -> seq(1, 2). %--------------------------------------------------------------------------------------------------- -spec evens() -> inflist(). %% @doc %% Even natural numbers. %% %% Example: %% <pre> %% evens() -> [2, 4, 6, 8, 10, ..] %% </pre> evens() -> seq(2, 2). %--------------------------------------------------------------------------------------------------- -spec seq(From :: number()) -> inflist(). %% @doc %% Construct infinite list of naturals from given number. %% %% Example: %% <pre> %% seq(From) -> [From, From + 1, From + 2, ..] %% </pre> %% %% @see naturals/1 seq(From) -> seq(From, 1). %--------------------------------------------------------------------------------------------------- -spec naturals() -> inflist(). %% @doc %% Infinite list of natural numbers. %% %% Example: %% <pre> %% naturals() -> [1, 2, 3, 4, 5, ..] %% </pre> naturals() -> seq(1). %--------------------------------------------------------------------------------------------------- -spec naturals(From :: integer()) -> inflist(). %% @doc %% Naturals from given number (synonym for seq). %% %% Example: %% <pre> %% seq(From) -> [From, From + 1, From + 2, ..] %% </pre> %% %% @see seq/1 naturals(From) -> seq(From). %--------------------------------------------------------------------------------------------------- % Geometric series. %--------------------------------------------------------------------------------------------------- -spec geometric_series(Base :: number(), K :: number()) -> inflist(). %% @doc %% Construct geometric series. %% %% Example: %% <pre> %% geometric_series(Base, K) -> [Base, Base * K, Base * K^2, Base * K^3, ..] %% </pre> geometric_series(Base, K) -> iterate ( Base, fun(H) -> H * K end ). %--------------------------------------------------------------------------------------------------- -spec power_series(X :: number()) -> inflist(). %% @doc %% Series of number powers. %% %% Example: %% <pre> %% power_series(X) -> [1, X, X^2, X^3, ..] %% </pre> power_series(X) -> geometric_series(1, X). %--------------------------------------------------------------------------------------------------- % Zip/unzip functions and functors. %--------------------------------------------------------------------------------------------------- -spec zip(IL1 :: inflist(), IL2 :: inflist()) -> inflist(). %% @doc %% Zip two infinite lists. %% %% Example: %% <pre> %% A = [a1, a2, a3, a4, a5, ..] %% B = [b1, b2, b3, b4, b5, ..] %% %% zip(A, B) -> [{a1, b1}, {a2, b2}, {a3, b3}, {a4, b4}, {a5, b5}, ..] %% </pre> zip(#inflist{h = H1, acc = Acc1, f = F1}, #inflist{h = H2, acc = Acc2, f = F2}) -> iterate ( {H1, H2}, {Acc1, Acc2}, fun({Cur_H1, Cur_H2}, {Cur_Acc1, Cur_Acc2}) -> {New_H1, New_Acc1} = F1(Cur_H1, Cur_Acc1), {New_H2, New_Acc2} = F2(Cur_H2, Cur_Acc2), {{New_H1, New_H2}, {New_Acc1, New_Acc2}} end ); zip(IL1, IL2) -> throw({badarg, {IL1, IL2}}). %--------------------------------------------------------------------------------------------------- -spec zip_3(IL1 :: inflist(), IL2 :: inflist(), IL3 :: inflist()) -> inflist(). %% @doc %% Zip three infinite lists. %% %% Example: %% <pre> %% A = [a1, a2, a3, a4, a5, ..] %% B = [b1, b2, b3, b4, b5, ..] %% C = [c1, c2, c3, c4, c5, ..] %% %% zip_3(A, B, C) -> [{a1, b1, c1}, {a2, b2, c2}, {a3, b3, c3}, ..] %% </pre> zip_3(#inflist{h = H1, acc = Acc1, f = F1}, #inflist{h = H2, acc = Acc2, f = F2}, #inflist{h = H3, acc = Acc3, f = F3}) -> iterate ( {H1, H2, H3}, {Acc1, Acc2, Acc3}, fun({Cur_H1, Cur_H2, Cur_H3}, {Cur_Acc1, Cur_Acc2, Cur_Acc3}) -> {New_H1, New_Acc1} = F1(Cur_H1, Cur_Acc1), {New_H2, New_Acc2} = F2(Cur_H2, Cur_Acc2), {New_H3, New_Acc3} = F3(Cur_H3, Cur_Acc3), {{New_H1, New_H2, New_H3}, {New_Acc1, New_Acc2, New_Acc3}} end ); zip_3(IL1, IL2, IL3) -> throw({badarg, {IL1, IL2, IL3}}). %--------------------------------------------------------------------------------------------------- -spec zipwith(IL1 :: inflist(), IL2 :: inflist(), Zip_F) -> inflist() when Zip_F :: fun((T1, T2) -> {T1, T2}), T1 :: term(), T2 :: term(). %% @doc %% Zip two infinite lists with given function. %% %% Example: %% <pre> %% A = [a1, a2, a3, a4, a5, ..] %% B = [b1, b2, b3, b4, b5, ..] %% %% zipwith(A, B, Zip_F) -> [Zip_F(a1, b1), Zip_F(a2, b2), Zip_F(a3, b3), ..] %% </pre> zipwith(#inflist{h = H1, acc = Acc1, f = F1}, #inflist{h = H2, acc = Acc2, f = F2}, Zip_F) when is_function(Zip_F, 2) -> iterate ( Zip_F(H1, H2), {{H1, Acc1}, {H2, Acc2}}, fun(_, {{Cur_H1, Cur_Acc1}, {Cur_H2, Cur_Acc2}}) -> {New_H1, New_Acc1} = F1(Cur_H1, Cur_Acc1), {New_H2, New_Acc2} = F2(Cur_H2, Cur_Acc2), {Zip_F(New_H1, New_H2), {{New_H1, New_Acc1}, {New_H2, New_Acc2}}} end ); zipwith(IL1, IL2, Zip_F) -> throw({badarg, {IL1, IL2, Zip_F}}). %--------------------------------------------------------------------------------------------------- -spec unzip(IL :: inflist()) -> {inflist(), inflist()}. %% @doc %% Unzip infinite list into two lists. %% %% Example: %% <pre> %% IL = [{a1, b1}, {a2, b2}, {a3, b3}, {a4, b4}, {a5, b5}, ..] %% %% unzip(IL) -> {[a1, a2, a3, a4, a5, ..], %% [b1, b2, b3, b4, b5, ..]} %% </pre> unzip(#inflist{h = {H1, H2}, acc = {Acc1, Acc2}, f = F}) -> { iterate ( H1, Acc1, fun(Cur_H1, Cur_Acc1) -> {{New_H1, _}, {New_Acc1, _}} = F({Cur_H1, H2}, {Cur_Acc1, Acc2}), {New_H1, New_Acc1} end ), iterate ( H2, Acc2, fun(Cur_H2, Cur_Acc2) -> {{_, New_H2}, {_, New_Acc2}} = F({H1, Cur_H2}, {Acc1, Cur_Acc2}), {New_H2, New_Acc2} end ) }; unzip(IL) -> throw({badarg, IL}). %--------------------------------------------------------------------------------------------------- -spec unzip_3(IL :: inflist()) -> {inflist(), inflist(), inflist()}. %% @doc %% Unzip infinite list into three lists. %% %% Example: %% <pre> %% IL = [{a1, b1, c1}, {a2, b2, c2}, {a3, b3, c3}, {a4, b4, c4}, ..] %% %% unzip_3(IL) -> {[a1, a2, a3, a4, ..], %% [b1, b2, b3, b4, ..], %% [c1, c2, c3, c4, ..]} %% </pre> unzip_3(#inflist{h = {H1, H2, H3}, acc = {Acc1, Acc2, Acc3}, f = F}) -> { iterate ( H1, Acc1, fun(Cur_H1, Cur_Acc1) -> {{New_H1, _, _}, {New_Acc1, _, _}} = F({Cur_H1, H2, H3}, {Cur_Acc1, Acc2, Acc3}), {New_H1, New_Acc1} end ), iterate ( H2, Acc2, fun(Cur_H2, Cur_Acc2) -> {{_, New_H2, _}, {_, New_Acc2, _}} = F({H1, Cur_H2, H3}, {Acc1, Cur_Acc2, Acc3}), {New_H2, New_Acc2} end ), iterate ( H3, Acc3, fun(Cur_H3, Cur_Acc3) -> {{_, _, New_H3}, {_, _, New_Acc3}} = F({H1, H2, Cur_H3}, {Acc1, Acc2, Cur_Acc3}), {New_H3, New_Acc3} end ) }; unzip_3(IL) -> throw({badarg, IL}). %--------------------------------------------------------------------------------------------------- % High order functions. %--------------------------------------------------------------------------------------------------- -spec map(IL :: inflist(), Map_F :: fun((term()) -> term())) -> inflist(). %% @doc %% Apply function to every element of infinite list. %% %% Example: %% <pre> %% IL = [a1, a2, a4, a4, a5, ..] %% %% map(IL, Map_F) -> [Map_F(a1), Map_F(a2), Map_F(a3), Map_F(a4), Map_F(a5), ..] %% </pre> map(#inflist{h = H, acc = Acc, f = F}, Map_F) when is_function(Map_F, 1) -> iterate ( Map_F(H), {H, Acc}, fun(_, {Cur_H, Cur_Acc}) -> {New_H, New_Acc} = F(Cur_H, Cur_Acc), {Map_F(New_H), {New_H, New_Acc}} end ); map(IL, Map_F) -> throw({badarg, {IL, Map_F}}). %--------------------------------------------------------------------------------------------------- -spec filter(IL :: inflist(), Filter_F :: fun((term()) -> boolean())) -> inflist(). %% @doc %% Filter infinite list. %% Warning! %% Dangerous function. %% It can cause infinite recursion if result list is not finite. %% %% Example: %% <pre> %% IL = [1, 2, 3, 4, 5, 6, ..] %% %% filter(IL, fun(X) -> X rem 2 =:= 1 end) -> [1, 3, 5, 7, ..] %% filter(IL, fun(X) -> X =:= 0 end) -> infinite loop %% </pre> filter(IL, Filter_F) -> New_IL = iterate ( 0, IL, fun F_(_, L) -> {H, T} = ht(L), F_Res = Filter_F(H), if F_Res -> {H, T}; true -> F_(none, T) end end ), tl(New_IL). %--------------------------------------------------------------------------------------------------- -spec adj_pairs_map(IL :: inflist(), Map_F :: fun((term(), term()) -> term())) -> inflist(). %% @doc %% Apply map function to every pair of adjacent elements. %% %% Example: %% <pre> %% IL = [a1, a2, a3, a4, a5, ..] %% %% adj_pairs_map(IL, Map_F) -> [Map_F(a1, a2), Map_F(a2, a3), Map_F(a3, a4), ..] %% </pre> adj_pairs_map(IL, Map_F) -> zipwith(IL, tl(IL), Map_F). %--------------------------------------------------------------------------------------------------- -spec fold(IL :: inflist(), Fold_F :: fun((term(), Fold_Acc) -> Fold_Acc), Fold_Acc) -> inflist() when Fold_Acc :: term(). %% @doc %% Partial folds of sublists. %% Note. We will never reach fold result because list is infinite. %% %% Example: %% <pre> %% IL = [a1, a2, a3, a4, a5, ..] %% %% fold(IL, Fold_F) -> [lists:foldl(Fold_F, Fold_Acc, [a1]), %% lists:foldl(Fold_F, Fold_Acc, [a1, a2]), %% lists:foldl(Fold_F, Fold_Acc, [a1, a2, a3]), %% lists:foldl(Fold_F, Fold_Acc, [a1, a2, a3, a4]), %% lists:foldl(Fold_F, Fold_Acc, [a1, a2, a3, a4, a5]), %% ..] %% </pre> fold(#inflist{h = H, acc = Acc, f = F}, Fold_F, Fold_Acc) when is_function(Fold_F, 2) -> iterate ( Fold_F(H, Fold_Acc), {H, Acc}, fun(Cur_Fold_Acc, {Cur_H, Cur_Acc}) -> {New_H, New_Acc} = F(Cur_H, Cur_Acc), {Fold_F(New_H, Cur_Fold_Acc), {New_H, New_Acc}} end ); fold(IL, Fold_F, Fold_Acc) -> throw({badarg, {IL, Fold_F, Fold_Acc}}). %--------------------------------------------------------------------------------------------------- -spec is_all(IL :: inflist(), Pred :: fun((term()) -> boolean()), N :: integer()) -> boolean(). %% @doc %% Check predicate for all of N first infinite list members. is_all(_, _, 0) -> true; is_all(IL, Pred, N) when (N > 0) -> Is = Pred(hd(IL)), if not Is -> false; true -> is_all(tl(IL), Pred, N - 1) end. %--------------------------------------------------------------------------------------------------- -spec is_any(IL :: inflist(), Pred :: fun((term()) -> boolean()), N :: integer()) -> boolean(). %% @doc %% Check predicate for any of N first infinite list members. is_any(_, _, 0) -> false; is_any(IL, Pred, N) when (N > 0) -> Is = Pred(hd(IL)), if Is -> true; true -> is_any(tl(IL), Pred, N - 1) end. %--------------------------------------------------------------------------------------------------- % Mathematical functions. %--------------------------------------------------------------------------------------------------- -spec add(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Add function. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% add(A, B) -> [A1 + B1, A2 + B2, A3 + B3, A4 + B4, A5 + B5, ..] %% add(A, V) -> [A1 + V, A2 + V, A3 + V, A4 + V, A5 + V, ..] %% add(V, A) -> [V + A1, V + A2, V + A3, V + A4, V + A5, ..] %% </pre> add(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), if Is_A andalso Is_B -> zipwith(A, B, fun(X, Y) -> X + Y end); Is_A -> map(A, fun(X) -> X + B end); Is_B -> map(B, fun(X) -> A + X end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- -spec inc(IL :: inflist()) -> inflist(). %% @doc %% Increment. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% inc(A) -> [A1 + 1, A2 + 1, A3 + 1, A4 + 1, A5 + 1, ..] %% </pre> inc(IL) -> add(IL, 1). %--------------------------------------------------------------------------------------------------- -spec sub(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Sub function. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% sub(A, B) -> [A1 - B1, A2 - B2, A3 - B3, A4 - B4, A5 - B5, ..] %% sub(A, V) -> [A1 - V, A2 - V, A3 - V, A4 - V, A5 - V, ..] %% sub(V, A) -> [V - A1, V - A2, V - A3, V - A4, V - A5, ..] %% </pre> sub(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), if Is_A andalso Is_B -> zipwith(A, B, fun(X, Y) -> X - Y end); Is_A -> map(A, fun(X) -> X - B end); Is_B -> map(B, fun(X) -> A - X end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- -spec dec(IL :: inflist()) -> inflist(). %% @doc %% Decrement. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% dec(A) -> [A1 - 1, A2 - 1, A3 - 1, A4 - 1, A5 - 1, ..] %% </pre> dec(IL) -> sub(IL, 1). %--------------------------------------------------------------------------------------------------- -spec neg(IL :: inflist()) -> inflist(). %% @doc %% Negate infinite list. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% neg(A) -> [-A1, -A2, -A3, -A4, -A5, ..] %% </pre> neg(IL) -> map(IL, fun(X) -> -X end). %--------------------------------------------------------------------------------------------------- -spec mul(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Multiplication. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% mul(A, B) -> [A1 * B1, A2 * B2, A3 * B3, A4 * B4, A5 * B5, ..] %% mul(A, V) -> [A1 * V, A2 * V, A3 * V, A4 * V, A5 * V, ..] %% mul(V, A) -> [V * A1, V * A2, V * A3, V * A4, V * A5, ..] %% </pre> mul(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), if Is_A andalso Is_B -> zipwith(A, B, fun(X, Y) -> X * Y end); Is_A -> map(A, fun(X) -> X * B end); Is_B -> map(B, fun(X) -> A * X end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- -spec twice(IL :: inflist()) -> inflist(). %% @doc %% Twice infinite list. %% %% Example: %% <pre> %% twice([A1, A2, A3, A4, ..]) -> [2 * A1, 2 * A2, 2 * A3, 2 * A4, ..] %% </pre> twice(IL) -> mul(IL, 2). %--------------------------------------------------------------------------------------------------- -spec dvs(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Division. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% dvs(A, B) -> [A1 / B1, A2 / B2, A3 / B3, A4 / B4, A5 / B5, ..] %% dvs(A, V) -> [A1 / V, A2 / V, A3 / V, A4 / V, A5 / V, ..] %% dvs(V, A) -> [V / A1, V / A2, V / A3, V / A4, V / A5, ..] %% </pre> dvs(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), if Is_A andalso Is_B -> zipwith(A, B, fun(X, Y) -> X / Y end); Is_A -> map(A, fun(X) -> X / B end); Is_B -> map(B, fun(X) -> A / X end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- -spec half(IL :: inflist()) -> inflist(). %% @doc %% Half all elements of infinite list. %% %% Example: %% <pre> %% half([A1, A2, A3, A4, ..]) -> [A1 / 2, A2 / 2, A3 / 2, A4 / 2, ..] %% </pre> half(IL) -> dvs(IL, 2). %--------------------------------------------------------------------------------------------------- -spec inv(IL :: inflist()) -> inflist(). %% @doc %% Infinite list of inverted values. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% inv(A) -> [1 / A1, 1 / A2, 1 / A3, 1 / A4, 1 / A5, ..] %% </pre> inv(IL) -> map(IL, fun(X) -> 1 / X end). %--------------------------------------------------------------------------------------------------- -spec ndvs(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Division without remainder (for natural numbers). %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% ndvs(A, B) -> [A1 div B1, A2 div B2, A3 div B3, A4 div B4, A5 div B5, ..] %% ndvs(A, V) -> [A1 div V, A2 div V, A3 div V, A4 div V, A5 div V, ..] %% ndvs(V, A) -> [V div A1, V div A2, V div A3, V div A4, V div A5, ..] %% </pre> ndvs(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), if Is_A andalso Is_B -> zipwith(A, B, fun(X, Y) -> X div Y end); Is_A -> map(A, fun(X) -> X div B end); Is_B -> map(B, fun(X) -> A div X end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- -spec nhalf(IL :: inflist()) -> inflist(). %% @doc %% Divide each element of infinite list on 2 (integer division). %% %% Example: %% <pre> %% nhalf([A1, A2, A3, A4, ..]) -> [A1 div 2, A2 div 2, A3 div 2, A4 div 2, ..] %% </pre> nhalf(IL) -> ndvs(IL, 2). %--------------------------------------------------------------------------------------------------- -spec nrem(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Get remainder (infinite list of remainders). %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% nrem(A, B) -> [A1 rem B1, A2 rem B2, A3 rem B3, A4 rem B4, A5 rem B5, ..] %% nrem(A, V) -> [A1 rem V, A2 rem V, A3 rem V, A4 rem V, A5 rem V, ..] %% nrem(V, A) -> [V rem A1, V rem A2, V rem A3, V rem A4, V rem A5, ..] %% </pre> nrem(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), if Is_A andalso Is_B -> zipwith(A, B, fun(X, Y) -> X rem Y end); Is_A -> map(A, fun(X) -> X rem B end); Is_B -> map(B, fun(X) -> A rem X end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- -spec square(IL :: inflist()) -> inflist(). %% @doc %% Infinite list of squares. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% square(A) -> [A1^2, A2^2, A3^2, A4^2, A5^2, ..] %% </pre> square(IL) -> mul(IL, IL). %--------------------------------------------------------------------------------------------------- -spec sqrt(IL :: inflist()) -> inflist(). %% @doc %% Square root of infinite list. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% sqrt(A) -> [sqrt(A1), sqrt(A2), sqrt(A3), sqrt(A4), sqrt(A5), ..] %% </pre> sqrt(IL) -> map(IL, fun(X) -> math:sqrt(X) end). %--------------------------------------------------------------------------------------------------- -spec cube(IL :: inflist()) -> inflist(). %% @doc %% Cube of infinite list. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% cube(A) -> [A1^3, A2^3, A3^3, A4^3, A5^3, ..] %% </pre> cube(IL) -> mul(square(IL), IL). %--------------------------------------------------------------------------------------------------- -spec pow(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Power calculation. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% pow(A, B) -> [A1^B1, A2^B2, A3^B3, A4^B4, A5^B5, ..] %% pow(A, V) -> [A1^V, A2^V, A3^V, A4^V, A5^V, ..] %% pow(V, A) -> [V^A1, V^A2, V^A3, V^A4, V^A5, ..] %% </pre> pow(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), if Is_A andalso Is_B -> zipwith(A, B, fun(X, Y) -> math:pow(X, Y) end); Is_A -> map(A, fun(X) -> math:pow(X, B) end); Is_B -> map(B, fun(X) -> math:pow(A, X) end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- -spec npow(Arg, Arg) -> inflist() when Arg :: inflist() | term(). %% @doc %% Power calculation (in integers). %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% B = [B1, B2, B3, B4, B5, ..] %% V - not inflist %% %% pow(A, B) -> [A1^B1, A2^B2, A3^B3, A4^B4, A5^B5, ..] %% pow(A, V) -> [A1^V, A2^V, A3^V, A4^V, A5^V, ..] %% pow(V, A) -> [V^A1, V^A2, V^A3, V^A4, V^A5, ..] %% </pre> npow(A, B) -> Is_A = is_record(A, inflist), Is_B = is_record(B, inflist), % Function for power calculation in integers. Pow = fun _Pow(_, 0) -> 1; _Pow(N, M) -> N * _Pow(N, M - 1) end, if Is_A andalso Is_B -> zipwith(A, B, Pow); Is_A -> map(A, fun(X) -> Pow(X, B) end); Is_B -> map(B, fun(X) -> Pow(A, X) end); true -> throw({badarg, {A, B}}) end. %--------------------------------------------------------------------------------------------------- % Partial sums and products of infinite list. %--------------------------------------------------------------------------------------------------- -spec partial_sums(IL :: inflist()) -> inflist(). %% @doc %% Partial sums. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% partial_sums(A) -> [A1, %% A1 + A2, %% A1 + A2 + A3, %% A1 + A2 + A3 + A4, %% A1 + A2 + A3 + A4 + A5, %% ..] %% </pre> partial_sums(IL) -> fold(IL, fun(X, Y) -> X + Y end, 0). %--------------------------------------------------------------------------------------------------- -spec partial_products(IL :: inflist()) -> inflist(). %% @doc %% Partial products. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% partial_products(A) -> [A1, %% A1 * A2, %% A1 * A2 * A3, %% A1 * A2 * A3 * A4, %% A1 * A2 * A3 * A4 * A5, %% ..] %% </pre> partial_products(IL) -> fold(IL, fun(X, Y) -> X * Y end, 1). %--------------------------------------------------------------------------------------------------- -spec partial_avgs(IL :: inflist()) -> inflist(). %% @doc %% Average values of infinite list. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% partial_avgs(A) -> [A1, %% (A1 + A2) / 2, %% (A1 + A2 + A3) / 3, %% (A1 + A2 + A3 + A4) / 4, %% (A1 + A2 + A3 + A4 + A5) / 5, %% ..] %% </pre> partial_avgs(IL) -> dvs(partial_sums(IL), naturals()). %--------------------------------------------------------------------------------------------------- -spec dirichlet_series(S :: number()) -> inflist(). %% @doc %% Dirichlet series. %% %% Example: %% <pre> %% 1 1 1 1 %% dirichlet_series(S) -> [1, -----, -----, -----, -----, ..] %% 2^S 3^S 4^S 5^S %% </pre> dirichlet_series(S) -> pow(harmonic_series(), S). %--------------------------------------------------------------------------------------------------- -spec dirichlet_series(IL :: inflist(), S :: number()) -> inflist(). %% @doc %% Dirichlet series of base inflist IL and pow degree S. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% A2 A3 A4 A5 %% dirichlet_series(S) -> [A1, -----, -----, -----, -----, ..] %% 2^S 3^S 4^S 5^S %% </pre> dirichlet_series(IL, S) -> mul(IL, dirichlet_series(S)). %--------------------------------------------------------------------------------------------------- -spec sign_alternate(IL :: inflist()) -> inflist(). %% @doc %% Alternate sign of infinite list. %% Odd position elements are unchanged, even position elements are negated. %% %% Example: %% <pre> %% A = [A1, A2, A3, A4, A5, ..] %% %% sign_alternate(A) -> [A1, -A2, A3, -A4, A5, ..] %% </pre> sign_alternate(IL) -> mul(IL, grundy_series()). %--------------------------------------------------------------------------------------------------- % Some usefull sequences. %--------------------------------------------------------------------------------------------------- -spec fib() -> inflist(). %% @doc %% Fibonacci numbers. %% %% Example: %% <pre> %% fib() -> [1, 1, 2, 3, 5, 8, 13, 21, ..] %% </pre> fib() -> iterate ( 1, 0, fun(H, Acc) -> {H + Acc, H} end ). %--------------------------------------------------------------------------------------------------- -spec trib() -> inflist(). %% @doc %% Tribonacci numbers. %% %% Example: %% <pre> %% trib() -> [0, 1, 1, 2, 4, 7, 13, 24, 44, ..] %% </pre> trib() -> iterate ( 0, {1, 1}, fun(H, {A1, A2}) -> {A1, {A2, H + A1 + A2}} end ). %--------------------------------------------------------------------------------------------------- -spec harmonic_series() -> inflist(). %% @doc %% Harmonic series. %% %% Example: %% <pre> %% 1 1 1 1 1 1 %% harmonic_series() -> [1, ---, ---, ---, ---, ---, ---, ..] %% 2 3 4 5 6 7 %% </pre> harmonic_series() -> inv(naturals()). %--------------------------------------------------------------------------------------------------- -spec anharmonic_series() -> inflist(). %% @doc %% Anharmonic series (Leibniz series). %% %% Example: %% <pre> %% 1 1 1 1 %% anharmonic_series() -> [1, - ---, ---, - ---, ---, ..] %% 3 5 7 9 %% </pre> anharmonic_series() -> sign_alternate(inv(odds())). %--------------------------------------------------------------------------------------------------- -spec grundy_series() -> inflist(). %% @doc %% Grundy series (see <NAME>). %% %% Example: %% <pre> %% grundy_series() -> [1, -1, 1, -1, 1, -1, ..] %% </pre> grundy_series() -> cycle([1, -1]). %--------------------------------------------------------------------------------------------------- -spec facts() -> inflist(). %% @doc %% Factorials series (starts with 0! = 1). %% %% Example: %% <pre> %% facts() -> [1, 1, 2!, 3!, 4!, 5!, ..] %% </pre> facts() -> iterate ( 1, 1, fun(H, Acc) -> M = H * Acc, {M, Acc + 1} end ). %--------------------------------------------------------------------------------------------------- -spec inv_facts() -> inflist(). %% @doc %% Series of inverted factorials. %% %% Example: %% <pre> %% 1 1 1 %% inv_facts() -> [1, 1, ---, ---, ---, ..] %% 2! 3! 4! %% </pre> inv_facts() -> inv(facts()). %--------------------------------------------------------------------------------------------------- -spec squares() -> inflist(). %% @doc %% Natural squares. %% %% Example: %% <pre> %% squares() -> [1, 4, 9, 16, 25, ..] %% </pre> squares() -> square(naturals()). %--------------------------------------------------------------------------------------------------- -spec sqrts() -> inflist(). %% @doc %% Square roots of naturals. %% %% Example: %% <pre> %% sqrts() -> [1, sqrt(2), sqrt(3), 2, sqrt(5), ..] %% </pre> sqrts() -> sqrt(naturals()). %--------------------------------------------------------------------------------------------------- -spec cubes() -> inflist(). %% @doc %% Natural cubes. %% %% Example: %% <pre> %% cubes() -> [1, 8, 27, 64, 125, ..] %% </pre> cubes() -> cube(naturals()). %--------------------------------------------------------------------------------------------------- -spec triangulars() -> inflist(). %% @doc %% Triangulars numbers. %% %% Example: %% <pre> %% triangulars() -> [1, 3, 6, 10, 15, 21, ..] %% </pre> triangulars() -> iterate ( 1, 1, fun(H, Acc) -> {H + Acc + 1, Acc + 1} end ). %--------------------------------------------------------------------------------------------------- % Prime numbers. %--------------------------------------------------------------------------------------------------- -spec primes() -> inflist(). %% @doc %% Prime numbers. %% %% Example: %% <pre> %% primes() -> [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, ..] %% </pre> primes() -> iterate ( 2, % first prime number [2], % list of factors fun(H, Acc) -> % Get next prime number function. Next_P_Fun = fun NPF_(N, [], _) -> N; NPF_(N, [P | _], _) when (P * P > N) -> N; NPF_(N, [P | T], Ps) -> if (N rem P) =:= 0 -> NPF_(N + 1, Ps, Ps); true -> NPF_(N, T, Ps) end end, P = Next_P_Fun(H + 1, Acc, Acc), {P, Acc ++ [P]} end ). %--------------------------------------------------------------------------------------------------- % Concatenate. %--------------------------------------------------------------------------------------------------- -spec concat(L :: list(), IL :: inflist()) -> inflist(). %% @doc %% Attach list to infinite list from the beginning. %% %% Example: %% <pre> %% IL = [a1, a2, a3, a4, a5, ..] %% L = [b1, b2, b3] %% T - not inflist %% %% concat(L, IL) -> [b1, b2, b3, a1, a2, a3, a4, a5, ..] %% concat(T, IL) -> [T, a1, a2, a3, a4, a5, ..] %% </pre> concat([], IL) when is_record(IL, inflist) -> IL; concat(T, IL) when not is_list(T) -> concat([T], IL); concat([LH | LT], #inflist{h = H, acc = Acc, f = F}) -> iterate ( LH, {false, LT}, fun (_, {false, Cur_L}) -> case Cur_L of [Cur_LH | Cur_LT] -> {Cur_LH, {false, Cur_LT}}; [] -> {H, {true, Acc}} end; (Cur_H, {true, Cur_Acc}) -> {New_H, New_Acc} = F(Cur_H, Cur_Acc), {New_H, {true, New_Acc}} end ); concat(L, IL) -> throw({badarg, {L, IL}}). %--------------------------------------------------------------------------------------------------- % Sparse. %--------------------------------------------------------------------------------------------------- -spec sparse(IL :: inflist(), N :: integer()) -> inflist(). %% @doc %% Take sparse infinite list (first element, and then every (N + 1)-th). %% %% Example: %% <pre> %% A = [a1, a2, a3, a4, a5, ..] %% %% sparse(A, 0) -> A %% sparse(A, 1) -> [a1, a3, a5, ...] %% sparse(A, 2) -> [a1, a4, a7, ...] %% </pre> sparse(IL, 0) when is_record(IL, inflist) -> IL; sparse(#inflist{h = H, acc = Acc, f = F}, N) when (is_integer(N) andalso (N > 0)) -> iterate ( H, Acc, fun(Cur_H, Cur_Acc) -> FN = fun Loc_FN(Loc_H, Loc_Acc, 0) -> {Loc_H, Loc_Acc}; Loc_FN(Loc_H, Loc_Acc, Loc_N) -> {New_H, New_Acc} = F(Loc_H, Loc_Acc), Loc_FN(New_H, New_Acc, Loc_N - 1) end, FN(Cur_H, Cur_Acc, N + 1) end ); sparse(IL, N) -> throw({badarg, {IL, N}}). %--------------------------------------------------------------------------------------------------- -spec odds(IL :: inflist()) -> inflist(). %% @doc %% Odd elements of list. %% %% Example: %% <pre> %% A = [a1, a2, a4, a4, a5, ..] %% %% odds(A) -> [a1, a3, a5, ..] %% </pre> odds(IL) -> sparse(IL, 1). %--------------------------------------------------------------------------------------------------- -spec evens(IL :: inflist()) -> inflist(). %% @doc %% Even elements of list. %% %% Example: %% <pre> %% A = [a1, a2, a3, a4, a5, ..] %% %% evens(A) -> [a2, a4, a6, ..] %% </pre> evens(IL) -> sparse(tl(IL), 1). %--------------------------------------------------------------------------------------------------- % Merge/unmerge. %--------------------------------------------------------------------------------------------------- -spec merge(IL1 :: inflist(), IL2 :: inflist()) -> inflist(). %% @doc %% Merge two infinite lists. %% %% Example: %% <pre> %% A = [a1, a2, a3, a4, a5, ..] %% B = [b1, b2, b3, b4, b5, ..] %% %% merge(A, B) -> [a1, b1, a2, b2, a3, b3, a4, b4, ..] %% </pre> merge(#inflist{h = H1, acc = Acc1, f = F1}, #inflist{h = H2, acc = Acc2, f = F2}) -> iterate ( H1, {{H2, Acc2}, F1(H1, Acc1), false}, fun(_, {{Cur_H, Cur_Acc}, Next, Is_F1}) -> { Cur_H, { Next, (if Is_F1 -> F1; true -> F2 end)(Cur_H, Cur_Acc), not Is_F1 } } end ); merge(IL1, IL2) -> throw({badarg, {IL1, IL2}}). %--------------------------------------------------------------------------------------------------- -spec unmerge(IL :: inflist()) -> {inflist(), inflist()}. %% @doc %% Split infinite list to odd and even elements infinite lists. %% %% Example: %% <pre> %% A = [a1, a2, a3, a4, a4, ..] %% %% unmerge(A) -> {[a1, a3, a5, ..], [a2, a4, a6, ..]} %% </pre> unmerge(IL) -> {odds(IL), evens(IL)}. %--------------------------------------------------------------------------------------------------- % Taylor series. %--------------------------------------------------------------------------------------------------- -spec taylor_exp(X :: number()) -> inflist(). %% @doc %% Taylor series of e^x for (-inf, inf). %% <pre> %% X X^2 X^3 %% taylor_exp(X) -> [1, ---, -----, -----, ..] %% 1! 2! 3! %% </pre> taylor_exp(X) -> dvs(power_series(X), facts()). %--------------------------------------------------------------------------------------------------- -spec taylor_lnxp1(X :: number()) -> inflist(). %% @doc %% Taylor series of ln(1 + x) for (-1, 1]. %% <pre> %% X^2 X^3 X^4 %% taylor_lnxp1(X) -> ln(x + 1) = [X, - -----, -----, - -----, ..] %% 2 3 4 %% </pre> taylor_lnxp1(X) when ((X =< -1) orelse (X > 1)) -> throw({badarg, X}); taylor_lnxp1(X) -> sign_alternate(dvs(tl(power_series(X)), naturals())). %--------------------------------------------------------------------------------------------------- -spec taylor_sin(X :: number()) -> inflist(). %% @doc %% Taylor series of sin(x) for (-inf, inf). %% <pre> %% X^3 X^5 X^7 %% taylor_sin(X) = [X, - -----, -----, - -----, ..] %% 3! 5! 7! %% </pre> taylor_sin(X) -> sign_alternate(evens(taylor_exp(X))). %--------------------------------------------------------------------------------------------------- -spec taylor_cos(X :: number()) -> inflist(). %% @doc %% Taylor series of cos(x) for (-inf, inf). %% <pre> %% X^2 X^4 X^6 %% taylor_cos(X) = [1, - -----, -----, - -----, ..] %% 2! 4! 6! %% </pre> taylor_cos(X) -> sign_alternate(odds(taylor_exp(X))). %--------------------------------------------------------------------------------------------------- -spec taylor_arctg(X :: number()) -> inflist(). %% @doc %% Taylor series of arctg(x) for (-1, 1). %% <pre> %% X^3 X^5 X^7 %% taylor_arctg(X) = [X, - -----, -----, - -----, ..] %% 3 5 7 %% </pre> taylor_arctg(X) when (abs(X) >= 1) -> throw({badarg, X}); taylor_arctg(X) -> sign_alternate(dvs(evens(power_series(X)), naturals())). %--------------------------------------------------------------------------------------------------- % Sets operations. %--------------------------------------------------------------------------------------------------- -spec mono_merge(IL1 :: inflist(), IL2 :: inflist()) -> inflist(). %% @doc %% Merge of two monotonous lists. %% %% Example: %% <pre> %% A = [1, 3, 5, 7, 9, ..] %% B = [1, 4, 9, 16, 25, ..] %% %% mono_merge(A, B) -> [1, 1, 3, 4, 5, 7, 9, 9, 11, 13, ..] %% </pre> mono_merge(IL1, IL2) -> IL = iterate ( 0, {IL1, IL2}, fun(_, {L1, L2}) -> {H1, T1} = ht(L1), {H2, T2} = ht(L2), if H1 < H2 -> {H1, {T1, L2}}; true -> {H2, {L1, T2}} end end ), tl(IL). %--------------------------------------------------------------------------------------------------- -spec mono_unique(IL :: inflist()) -> inflist(). %% @doc %% Take only unique elements of infinite list. %% Warning. %% This is dangerous function, because it can lead to infinite loop %% when list has tail consisting of the same element. %% %% Example: %% <pre> %% mono_unique([1, 1, 2, 3, 3, 3, 4, 5, ..]) -> [1, 2, 3, 4, 5, ..] %% mono_unique([a, a, a, a, a, ..]) -> infinite loop %% </pre> mono_unique(#inflist{h = H, acc = Acc, f = F}) -> iterate ( H, Acc, fun _F(Cur_H, Cur_Acc) -> {Next_H, Next_Acc} = F(Cur_H, Cur_Acc), if Next_H =:= Cur_H -> _F(Next_H, Next_Acc); true -> {Next_H, Next_Acc} end end ). %--------------------------------------------------------------------------------------------------- -spec mono_union(IL1 :: inflist(), IL2 :: inflist()) -> inflist(). %% @doc %% Union of two monotolnous lists. %% %% Example: %% <pre> %% A = [1, 3, 5, 7, 9, ..] %% B = [1, 4, 9, 16, 25, ..] %% %% mono_union(A, B) -> [1, 3, 4, 5, 7, 9, 11, 13, ..] %% </pre> mono_union(IL1, IL2) -> mono_unique(mono_merge(IL1, IL2)). %--------------------------------------------------------------------------------------------------- -spec mono_intersection(IL1 :: inflist(), IL2 :: inflist()) -> inflist(). %% @doc %% Intersection of two monotonous lists. %% Warning. %% Dangerous function, because it can lead to infinite loop. %% %% Example: %% <pre> %% A = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, ..] %% B = [1, 4, 9, 16, 25, ..] %% %% mono_intersection(A, B) -> [1, 9, 25, ..] %% mono_intersection(odds(), evens()) -> infinite loop %% </pre> mono_intersection(IL1, IL2) -> IL = iterate ( 0, {IL1, IL2}, fun F_(_, {L1, L2}) -> {H1, T1} = ht(L1), {H2, T2} = ht(L2), if H1 < H2 -> F_(none, {drop_less(L1, H2), L2}); H1 > H2 -> F_(none, {L1, drop_less(L2, H1)}); true -> {H1, {T1, T2}} end end ), tl(IL). %--------------------------------------------------------------------------------------------------- -spec mono_complement(IL1 :: inflist(), IL2 :: inflist()) -> inflist(). %% @doc %% Complement of monotonous infinite lists. %% All element of the first list which are not elements of the second list. %% %% Example: %% <pre> %% A = [1, 3, 5, 7, 9, 11, 13, ..] %% B = [1, 4, 9, 16, 25, ..] %% %% mono_complement(A, B) -> [3, 5, 7, 11, 13, 15, 17, 19, 21, 23, 27, 29, ..] %% </pre> mono_complement(IL1, IL2) -> IL = iterate ( 0, {IL1, IL2}, fun F_(_, {L1, L2}) -> {H1, T1} = ht(L1), {H2, T2} = ht(L2), if H1 < H2 -> {H1, {T1, L2}}; H1 > H2 -> F_(none, {L1, drop_less(L2, H1)}); true -> F_(none, {T1, T2}) end end ), tl(IL). %---------------------------------------------------------------------------------------------------
src/inflists.erl
0.507812
0.455259
inflists.erl
starcoder
-module(coello_queue_spec). -include_lib("espec/include/espec.hrl"). -include_lib("hamcrest/include/hamcrest.hrl"). -include_lib("amqp_client/include/amqp_client.hrl"). spec() -> before_all(fun() -> meck:new([amqp_connection, amqp_channel]), meck:expect(amqp_connection, start, 1, connection), meck:expect(amqp_connection, open_channel, 1, {ok, channel}) end), after_all(fun() -> meck:unload([amqp_connection, amqp_channel]) end), describe("declare/1", fun() -> it("should create a exclusive, server named queue", fun()-> meck:expect(amqp_channel, call, 2, #'queue.declare_ok'{ queue = <<"abc">>}), Params = #'queue.declare'{exclusive = false}, {ok, QueueName} = coello_queue:declare(channel), assert_that(meck:called(amqp_channel, call, [channel, Params]), is(true)), assert_that(QueueName, is(<<"abc">>)) end) end), describe("declare/2", fun() -> it("should create a exclusive queue named with the given name", fun() -> QueueName = <<"queuequeue">>, meck:expect(amqp_channel, call, 2, #'queue.declare_ok'{ queue = QueueName}), Params = #'queue.declare'{exclusive = false, queue = QueueName }, {ok, CreatedQueue} = coello_queue:declare(channel, QueueName), assert_that(meck:called(amqp_channel, call, [channel, Params]), is(true)), assert_that(CreatedQueue, is(QueueName)) end) end), describe("bind/4", fun() -> it("should bind the queue with the passed in routing key", fun()-> meck:sequence(amqp_channel, call, 2, [#'queue.declare_ok'{}, #'queue.bind_ok'{}]), Exchange = <<"exchange-A">>, RoutingKey = <<"pandecea">>, QueueName = <<"cocacola">>, Params = #'queue.bind'{queue = QueueName, exchange = Exchange, routing_key = RoutingKey }, coello_queue:bind(channel, QueueName, RoutingKey, Exchange), assert_that(meck:called(amqp_channel, call, [channel, Params]), is(true)) end) end), describe("delete/2", fun() -> it("should delete the queue unconditionally", fun()-> meck:expect(amqp_channel, call, 2, #'queue.delete_ok'{}), QueueName = <<"acola">>, Method = #'queue.delete'{ queue = QueueName}, coello_queue:delete(channel, QueueName), assert_that(meck:called(amqp_channel, call, [channel, Method]), is(true)) end) end).
test/spec/coello_queue_spec.erl
0.655557
0.428054
coello_queue_spec.erl
starcoder
%% Copyright (c) 2016 <NAME> <<EMAIL>> %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(tansu_api). -export([info/0]). -export([kv_delete/1]). -export([kv_get/1]). -export([kv_get_children_of/1]). -export([kv_set/2]). -export([kv_set/3]). -export([kv_subscribe/1]). -export([kv_test_and_delete/2]). -export([kv_test_and_set/3]). -export([kv_test_and_set/4]). -export([kv_unsubscribe/1]). -define(CATEGORY, user). info() -> tansu_consensus:info(). kv_delete(Key) -> tansu_consensus:ckv_delete(?CATEGORY, Key). kv_get(Key) -> tansu_consensus:ckv_get(?CATEGORY, Key). kv_get_children_of(Parent) -> maps:fold( fun ({?CATEGORY, Child}, {Data, Metadata}, A) -> A#{Child => {Data, Metadata}}; (_, _, A) -> A end, #{}, tansu_consensus:ckv_get_children_of(?CATEGORY, Parent)). kv_set(Key, Value) -> kv_set(Key, Value, #{}). kv_set(Key, Value, Options) -> tansu_consensus:ckv_set(?CATEGORY, Key, Value, Options). kv_test_and_delete(Key, ExistingValue) -> tansu_consensus:ckv_test_and_delete(?CATEGORY, Key, ExistingValue). kv_test_and_set(Key, ExistingValue, NewValue) -> kv_test_and_set(Key, ExistingValue, NewValue, #{}). kv_test_and_set(Key, ExistingValue, NewValue, Options) -> tansu_consensus:ckv_test_and_set(?CATEGORY, Key, ExistingValue, NewValue, Options). kv_subscribe(Key) -> tansu_sm:subscribe(?CATEGORY, Key). kv_unsubscribe(Key) -> tansu_sm:unsubscribe(?CATEGORY, Key).
src/tansu_api.erl
0.509032
0.412353
tansu_api.erl
starcoder
%% Copyright (c) 2012, <NAME> <<EMAIL>> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above %% copyright notice and this permission notice appear in all copies. %% %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. %% @doc Event filter implementation. %% %% An event query is constructed using the built in operators exported from %% this module. The filtering operators are used to specify which events %% should be included in the output of the query. The default output action %% is to copy all events matching the input filters associated with a query %% to the output. This makes it possible to construct and compose multiple %% queries at runtime. %% %% === Examples of built in filters === %% ``` %% %% Select all events where 'a' exists and is greater than 0. %% glc:gt(a, 0). %% %% Select all events where 'a' exists and is equal to 0. %% glc:eq(a, 0). %% %% Select all events where 'a' exists and is less than 0. %% glc:lt(a, 0). %% %% %% Select no input events. Used as black hole query. %% glc:null(false). %% %% Select all input events. Used as passthrough query. %% glc:null(true). %% ''' %% %% === Examples of combining filters === %% ``` %% %% Select all events where both 'a' and 'b' exists and are greater than 0. %% glc:all([glc:gt(a, 0), glc:gt(b, 0)]). %% %% Select all events where 'a' or 'b' exists and are greater than 0. %% glc:any([glc:get(a, 0), glc:gt(b, 0)]). %% ''' %% %% === Handling output events === %% %% Once a query has been composed it is possible to override the output action %% with an erlang function. The function will be applied to each output event %% from the query. The return value from the function will be ignored. %% %% ``` %% %% Write all input events as info reports to the error logger. %% glc:with(glc:null(true), fun(E) -> %% error_logger:info_report(gre:pairs(E)) end). %% ''' %% -module(glc). -export([ compile/2, handle/2, delete/1 ]). -export([ lt/2, eq/2, gt/2 ]). -export([ all/1, any/1, null/1, with/2 ]). -export([ union/1 ]). -record(module, { 'query' :: term(), tables :: [{atom(), ets:tid()}], qtree :: term() }). -spec lt(atom(), term()) -> glc_ops:op(). lt(Key, Term) -> glc_ops:lt(Key, Term). -spec eq(atom(), term()) -> glc_ops:op(). eq(Key, Term) -> glc_ops:eq(Key, Term). -spec gt(atom(), term()) -> glc_ops:op(). gt(Key, Term) -> glc_ops:gt(Key, Term). %% @doc Filter the input using multiple filters. %% %% For an input to be considered valid output the all filters specified %% in the list must hold for the input event. The list is expected to %% be a non-empty list. If the list of filters is an empty list a `badarg' %% error will be thrown. -spec all([glc_ops:op()]) -> glc_ops:op(). all(Filters) -> glc_ops:all(Filters). %% @doc Filter the input using one of multiple filters. %% %% For an input to be considered valid output on of the filters specified %% in the list must hold for the input event. The list is expected to be %% a non-empty list. If the list of filters is an empty list a `badarg' %% error will be thrown. -spec any([glc_ops:op()]) -> glc_ops:op(). any(Filters) -> glc_ops:any(Filters). %% @doc Always return `true' or `false'. -spec null(boolean()) -> glc_ops:op(). null(Result) -> glc_ops:null(Result). %% @doc Apply a function to each output of a query. %% %% Updating the output action of a query finalizes it. Attempting %% to use a finalized query to construct a new query will result %% in a `badarg' error. -spec with(glc_ops:op(), fun((gre:event()) -> term())) -> glc_ops:op(). with(Query, Action) -> glc_ops:with(Query, Action). %% @doc Return a union of multiple queries. %% %% The union of multiple queries is the equivalent of executing multiple %% queries separately on the same input event. The advantage is that filter %% conditions that are common to all or some of the queries only need to %% be tested once. %% %% All queries are expected to be valid and have an output action other %% than the default which is `output'. If these expectations don't hold %% a `badarg' error will be thrown. -spec union([glc_ops:op()]) -> glc_ops:op(). union(Queries) -> glc_ops:union(Queries). %% @doc Compile a query to a module. %% %% On success the module representing the query is returned. The module and %% data associated with the query must be released using the {@link delete/1} %% function. The name of the query module is expected to be unique. -spec compile(atom(), list()) -> {ok, atom()}. compile(Module, Query) -> {ok, ModuleData} = module_data(Query), case glc_code:compile(Module, ModuleData) of {ok, Module} -> {ok, Module} end. %% @doc Handle an event using a compiled query. %% %% The input event is expected to have been returned from {@link gre:make/2}. -spec handle(atom(), gre:event()) -> ok. handle(Module, Event) -> Module:handle(Event). %% @doc Release a compiled query. %% %% This releases all resources allocated by a compiled query. The query name %% is expected to be associated with an existing query module. Calling this %% function will result in a runtime error. -spec delete(atom()) -> ok. delete(_Module) -> ok. %% @private Map a query to a module data term. -spec module_data(term()) -> {ok, #module{}}. module_data(Query) -> %% terms in the query which are not valid arguments to the %% erl_syntax:abstract/1 functions are stored in ETS. %% the terms are only looked up once they are necessary to %% continue evaluation of the query. Params = ets:new(params, [set,protected]), %% query counters are stored in a shared ETS table. this should %% be an optional feature. enable by defaults to simplify tests. Counters = ets:new(counters, [set,public]), ets:insert(Counters, [{input,0}, {filter,0}, {output,0}]), %% the abstract_tables/1 function expects a list of name-tid pairs. %% tables are referred to by name in the generated code. the table/1 %% function maps names to tids. Tables = [{params,Params}, {counters,Counters}], Query2 = glc_lib:reduce(Query), {ok, #module{'query'=Query, tables=Tables, qtree=Query2}}. %% @todo Move comment. %% @private Map a query to a simplified query tree term. %% %% The simplified query tree is used to combine multiple queries into one %% query module. The goal of this is to reduce the filtering and dispatch %% overhead when multiple concurrent queries are executed. %% %% A fixed selection condition may be used to specify a property that an event %% must have in order to be considered part of the input stream for a query. %% %% For the sake of simplicity it is only possible to define selection %% conditions using the fields present in the context and identifiers %% of an event. The fields in the context are bound to the reserved %% names: %% %% - '$n': node name %% - '$a': application name %% - '$p': process identifier %% - '$t': timestamp %% %% %% If an event must be selected based on the runtime state of an event handler %% this must be done in the body of the handler. -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). setup_query(Module, Query) -> ?assertNot(erlang:module_loaded(Module)), ?assertEqual({ok, Module}, case (catch compile(Module, Query)) of {'EXIT',_}=Error -> ?debugFmt("~p", [Error]), Error; Else -> Else end), ?assert(erlang:function_exported(Module, table, 1)), ?assert(erlang:function_exported(Module, handle, 1)), {compiled, Module}. nullquery_compiles_test() -> {compiled, Mod} = setup_query(testmod1, glc:null(false)), ?assertError(badarg, Mod:table(noexists)). params_table_exists_test() -> {compiled, Mod} = setup_query(testmod2, glc:null(false)), ?assert(is_integer(Mod:table(params))), ?assertMatch([_|_], ets:info(Mod:table(params))). nullquery_exists_test() -> {compiled, Mod} = setup_query(testmod3, glc:null(false)), ?assert(erlang:function_exported(Mod, info, 1)), ?assertError(badarg, Mod:info(invalid)), ?assertEqual({null, false}, Mod:info('query')). init_counters_test() -> {compiled, Mod} = setup_query(testmod4, glc:null(false)), ?assertEqual(0, Mod:info(input)), ?assertEqual(0, Mod:info(filter)), ?assertEqual(0, Mod:info(output)). filtered_event_test() -> %% If no selection condition is specified no inputs can match. {compiled, Mod} = setup_query(testmod5, glc:null(false)), glc:handle(Mod, gre:make([], [list])), ?assertEqual(1, Mod:info(input)), ?assertEqual(1, Mod:info(filter)), ?assertEqual(0, Mod:info(output)). nomatch_event_test() -> %% If a selection condition but no body is specified the event %% is expected to count as filtered out if the condition does %% not hold. {compiled, Mod} = setup_query(testmod6, glc:eq('$n', 'noexists@nohost')), glc:handle(Mod, gre:make([{'$n', 'noexists2@nohost'}], [list])), ?assertEqual(1, Mod:info(input)), ?assertEqual(1, Mod:info(filter)), ?assertEqual(0, Mod:info(output)). opfilter_eq_test() -> %% If a selection condition but no body is specified the event %% counts as input to the query, but not as filtered out. {compiled, Mod} = setup_query(testmod7, glc:eq('$n', 'noexists@nohost')), glc:handle(Mod, gre:make([{'$n', 'noexists@nohost'}], [list])), ?assertEqual(1, Mod:info(input)), ?assertEqual(0, Mod:info(filter)), ?assertEqual(1, Mod:info(output)), done. opfilter_gt_test() -> {compiled, Mod} = setup_query(testmod8, glc:gt(a, 1)), glc:handle(Mod, gre:make([{'a', 2}], [list])), ?assertEqual(1, Mod:info(input)), ?assertEqual(0, Mod:info(filter)), glc:handle(Mod, gre:make([{'a', 0}], [list])), ?assertEqual(2, Mod:info(input)), ?assertEqual(1, Mod:info(filter)), ?assertEqual(1, Mod:info(output)), done. opfilter_lt_test() -> {compiled, Mod} = setup_query(testmod9, glc:lt(a, 1)), glc:handle(Mod, gre:make([{'a', 0}], [list])), ?assertEqual(1, Mod:info(input)), ?assertEqual(0, Mod:info(filter)), ?assertEqual(1, Mod:info(output)), glc:handle(Mod, gre:make([{'a', 2}], [list])), ?assertEqual(2, Mod:info(input)), ?assertEqual(1, Mod:info(filter)), ?assertEqual(1, Mod:info(output)), done. allholds_op_test() -> {compiled, Mod} = setup_query(testmod10, glc:all([glc:eq(a, 1), glc:eq(b, 2)])), glc:handle(Mod, gre:make([{'a', 1}], [list])), glc:handle(Mod, gre:make([{'a', 2}], [list])), ?assertEqual(2, Mod:info(input)), ?assertEqual(2, Mod:info(filter)), glc:handle(Mod, gre:make([{'b', 1}], [list])), glc:handle(Mod, gre:make([{'b', 2}], [list])), ?assertEqual(4, Mod:info(input)), ?assertEqual(4, Mod:info(filter)), glc:handle(Mod, gre:make([{'a', 1},{'b', 2}], [list])), ?assertEqual(5, Mod:info(input)), ?assertEqual(4, Mod:info(filter)), ?assertEqual(1, Mod:info(output)), done. anyholds_op_test() -> {compiled, Mod} = setup_query(testmod11, glc:any([glc:eq(a, 1), glc:eq(b, 2)])), glc:handle(Mod, gre:make([{'a', 2}], [list])), glc:handle(Mod, gre:make([{'b', 1}], [list])), ?assertEqual(2, Mod:info(input)), ?assertEqual(2, Mod:info(filter)), glc:handle(Mod, gre:make([{'a', 1}], [list])), glc:handle(Mod, gre:make([{'b', 2}], [list])), ?assertEqual(4, Mod:info(input)), ?assertEqual(2, Mod:info(filter)), done. with_function_test() -> Self = self(), {compiled, Mod} = setup_query(testmod12, glc:with(glc:eq(a, 1), fun(Event) -> Self ! gre:fetch(a, Event) end)), glc:handle(Mod, gre:make([{a,1}], [list])), ?assertEqual(1, Mod:info(output)), ?assertEqual(1, receive Msg -> Msg after 0 -> notcalled end), done. union_error_test() -> ?assertError(badarg, glc:union([glc:eq(a, 1)])), done. -endif.
src/glc.erl
0.729038
0.46132
glc.erl
starcoder
%%%------------------------------------------------------------------- %%% @doc %%% Streams is the Strava term for the raw data associated with an %%% activity. %%% @reference http://strava.github.io/api/v3/streams/ %%% @end %%% For copyright notice see LICENSE. %%%------------------------------------------------------------------- -module(strava_stream). %% Types -export_type([stream/0, t/0, type/0]). %% Streams functions -export([activity/3, activity/4, effort/3, effort/4, route/2, segment/3, segment/4]). %%%=================================================================== %%% Types %%%=================================================================== -type stream() :: map(). -type type() :: altitude | cadence | distance | grade_smooth | heartrate | latlng | moving | temp | time | velocity_smooth | watts. -type t() :: stream(). %%%=================================================================== %%% Streams functions %%%=================================================================== %%-------------------------------------------------------------------- %% @doc %% Retrieve activity streams. Streams represent the raw data of the %% uploaded file. External applications may only access this %% information for activities owned by the authenticated athlete. %% %% @see activity/4 %% @end %%-------------------------------------------------------------------- -spec activity(strava_auth:token(), integer(), [type()]) -> {ok, [t()]} | strava:error(). activity(Token, Id, Types) -> activity(Token, Id, Types, _Options = #{}). %%-------------------------------------------------------------------- %% @doc %% Retrieve activity streams. For options see Strava documentation. %% @end %%-------------------------------------------------------------------- -spec activity(strava_auth:token(), integer(), [type()], map()) -> {ok, [t()]} | strava:error(). activity(Token, Id, Types, Options) -> streams(Token, [<<"activities">>, Id, <<"streams">>], Types, Options). %%-------------------------------------------------------------------- %% @doc %% Retrieve effort streams. Returns a subset of the activity streams %% that correspond to the effort. %% %% @see effort/4 %% @end %%-------------------------------------------------------------------- -spec effort(strava_auth:token(), integer(), [type()]) -> {ok, [t()]} | strava:error(). effort(Token, Id, Types) -> effort(Token, Id, Types, _Options = #{}). %%-------------------------------------------------------------------- %% @doc %% Retrieve effort streams. For options see Strava documentation. %% @end %%-------------------------------------------------------------------- -spec effort(strava_auth:token(), integer(), [type()], map()) -> {ok, [t()]} | strava:error(). effort(Token, Id, Types, Options) -> streams(Token, [<<"segment_efforts">>, Id, <<"streams">>], Types, Options). %%-------------------------------------------------------------------- %% @doc %% Retrieve route streams. Only `distance`, `altitude` and `latlng` %% stream types are available and always returned. No options could be %% specified. %% @end %%-------------------------------------------------------------------- -spec route(strava_auth:token(), integer()) -> {ok, [t()]} | strava:error(). route(Token, Id) -> streams(Token, [<<"routes">>, Id, <<"streams">>], _Types = [], _Options = #{}). %%-------------------------------------------------------------------- %% @doc %% Retrieve segment streams. Only `distance', `altitude' and `latlng' %% stream types are available. %% %% @see segment/4 %% @end %%-------------------------------------------------------------------- -spec segment(strava_auth:token(), integer(), [type()]) -> {ok, [t()]} | strava:error(). segment(Token, Id, Types) -> segment(Token, Id, Types, _Options = #{}). %%-------------------------------------------------------------------- %% @doc %% Retrieve segment streams. For options see Strava documentation. %% @end %%-------------------------------------------------------------------- -spec segment(strava_auth:token(), integer(), [type()], map()) -> {ok, [t()]} | strava:error(). segment(Token, Id, Types, Options) -> streams(Token, [<<"segments">>, Id, <<"streams">>], Types, Options). %%%=================================================================== %%% Internal functions %%%=================================================================== %%-------------------------------------------------------------------- %% @doc %% Retrieve streams. %% @end %%-------------------------------------------------------------------- -spec streams(strava_auth:token(), strava_api:path(), [type()], map()) -> {ok, [t()]} | strava:error(). streams(Token, Path, Types, Options) -> Path1 = Path ++ [string:join(lists:map(fun strava_util:to_string/1, Types), ",")], Options1 = maps:fold( fun(K, V, Ans) when K =:= resolution, V =:= low orelse V =:= medium orelse V =:= high -> Ans#{K => V}; (K, V, Ans) when K =:= series_type, V =:= time orelse V =:= distance -> Ans#{K => V}; (_K, _V, Ans) -> Ans end, _Ans = #{}, Options), strava_api:convert( strava_api:read(Token, Path1, Options1), {list, fun strava_repr:to_stream/1} ).
src/strava_stream.erl
0.51879
0.46217
strava_stream.erl
starcoder
-module(iso8601). -export([add_time/4, add_days/2, add_months/2, add_years/2, subtract_time/4, format/1, parse/1, parse_exact/1, parse_duration/1, apply_duration/2]). -export_types([timestamp/0]). -define(MIDNIGHT, {0,0,0}). -define(NOON, {12,0,0}). -define(TEATIME, {16,0,0}). -define(V, proplists:get_value). -type datetime() :: {calendar:date(), {calendar:hour(), calendar:minute(), calendar:second() | float()}}. -type datetime_plist() :: list({atom(), integer()}). -type maybe(A) :: undefined | A. -type timestamp() :: {MegaSecs::integer(), Secs::integer(), MicroSecs::integer() | float()}. %% API -spec add_time (calendar:datetime(), integer(), integer(), integer()) -> calendar:datetime(). %% @doc Add some time to the supplied `calendar:datetime()'. add_time({_, _, _}=Timestamp, H, M, S) -> add_time(calendar:now_to_datetime(Timestamp), H, M, S); add_time(Datetime, H, M, S) -> apply_offset(Datetime, H, M, S). -spec add_days (datetime() | timestamp(), integer()) -> datetime(). %% @doc Add some days to the supplied `datetime()'. add_days({_, _, _}=Timestamp, D) -> add_days(calendar:now_to_datetime(Timestamp), D); add_days(Datetime, D) -> apply_days_offset(Datetime, D). -spec add_months (datetime() | timestamp(), integer()) -> datetime(). %% @doc Add some months to the supplied `datetime()'. add_months({_, _, _}=Timestamp, M) -> add_months(calendar:now_to_datetime(Timestamp), M); add_months(Datetime, M) -> apply_months_offset(Datetime, M). -spec add_years (datetime() | timestamp(), integer()) -> datetime(). %% @doc Add some years to the supplied `datetime()'. add_years({_, _, _}=Timestamp, Y) -> add_years(calendar:now_to_datetime(Timestamp), Y); add_years(Datetime, Y) -> apply_years_offset(Datetime, Y). -spec subtract_time (calendar:datetime(), integer(), integer(), integer()) -> calendar:datetime(). %% @doc Subtract some time from the supplied `calendar:datetime()'. subtract_time(Datetime, H, M, S) -> apply_offset(Datetime, -H, -M, -S). -spec format (datetime() | timestamp()) -> binary(). %% @doc Convert a `util:timestamp()' or a calendar-style `{date(), time()}' %% tuple to an ISO 8601 formatted string. Note that this function always %% returns a string with no offset (i.e., ending in "Z"). format({_,_,_}=Timestamp) -> format(calendar:now_to_datetime(Timestamp)); format({{Y,Mo,D}, {H,Mn,S}}) when is_float(S) -> FmtStr = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~9.6.0fZ", IsoStr = io_lib:format(FmtStr, [Y, Mo, D, H, Mn, S]), list_to_binary(IsoStr); format({{Y,Mo,D}, {H,Mn,S}}) -> FmtStr = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ", IsoStr = io_lib:format(FmtStr, [Y, Mo, D, H, Mn, S]), list_to_binary(IsoStr). -spec parse (iodata()) -> calendar:datetime(). %% @doc Convert an ISO 8601 formatted string to a `{date(), time()}' parse(Bin) when is_binary(Bin) -> parse(binary_to_list(Bin)); parse(Str) -> {{Date, {H, M, S}}, Subsecond} = year(Str, []), {Date, {H, M, S + round(Subsecond)}}. -spec parse_exact (iodata()) -> calendar:datetime(). %% @doc Convert an ISO 8601 formatted string to a `{date(), time()}' %% tuple with seconds precision to 3 decimal places parse_exact(Bin) when is_binary(Bin) -> parse_exact(binary_to_list(Bin)); parse_exact(Str) -> {{Date, {H, M, S}}, SecondsDecimal} = year(Str, []), {Date, {H, M, S + SecondsDecimal}}. -spec gi(string()) ->integer(). %doc get string and return integer part or 0 on error gi(DS)-> {Int, _Rest} = string:to_integer(DS), case Int of error->0; _->Int end. -spec parse_duration(string()) ->datetime_plist(). %% @doc Convert an ISO 8601 Durations string to a parse_duration(Bin) when is_binary(Bin)-> %TODO extended format parse_duration(binary_to_list(Bin)); parse_duration(Str) -> case re:run(Str, "^(?<sign>-|\\+)?P" "(?:(?<years>[0-9]+)Y)?" "(?:(?<months>[0-9]+)M)?" "(?:(?<days>[0-9]+)D)?" "(T(?:(?<hours>[0-9]+)H)?" "(?:(?<minutes>[0-9]+)M)?" "(?:(?<seconds>[0-9]+(?:\\.[0-9]+)?)S)?)?$", [{capture, [sign, years, months, days, hours, minutes, seconds], list}]) of {match, [Sign, Years, Months, Days, Hours, Minutes, Seconds]} -> [{sign, Sign}, {years, gi(Years)}, {months, gi(Months)}, {days, gi(Days)}, {hours, gi(Hours)}, {minutes, gi(Minutes)}, {seconds, gi(Seconds)}]; nomatch -> error(badarg) end. -spec apply_duration(datetime(), string()) -> datetime(). %% @doc Return new datetime after apply duration. apply_duration(Datetime, Duration) -> [{sign, _S}, {years, Y}, {months, M}, {days, D}, {hours, H}, {minutes, MM}, {seconds, SS}] = parse_duration(Duration), D1 = apply_years_offset(Datetime, Y), D2 = apply_months_offset(D1, M), D3 = apply_days_offset(D2, D), apply_offset(D3, H, MM, SS). %% Private functions year([Y1,Y2,Y3,Y4|Rest], Acc) -> acc([Y1,Y2,Y3,Y4], Rest, year, Acc, fun month_or_week/2); year(_, _) -> error(badarg). month_or_week([], Acc) -> datetime(Acc); month_or_week([$-,$W,W1,W2|Rest], Acc) -> acc([W1,W2], Rest, week, Acc, fun week_day/2); month_or_week([$-,D1,D2,D3], Acc) -> %% ordinal date, no time io:format("Ordinal date, no time!~n"), acc_ordinal_date(D1, D2, D3, [], Acc, fun hour/2); month_or_week([$-,D1,D2,D3,$T|Rest], Acc) -> %% ordinal date with time io:format("Ordinal date, time is ~p!~n", [Rest]), acc_ordinal_date(D1, D2, D3, [$T|Rest], Acc, fun hour/2); month_or_week([$-,M1,M2|Rest], Acc) -> acc([M1,M2], Rest, month, Acc, fun month_day/2); month_or_week([$W,W1,W2|Rest], Acc) -> acc([W1,W2], Rest, week, Acc, fun week_day_no_hyphen/2); month_or_week([M1,M2|Rest], Acc) -> acc([M1,M2], Rest, month, Acc, fun month_day_no_hyphen/2); month_or_week(_, _) -> error(badarg). week_day([], Acc) -> datetime(Acc); week_day([$-,D|Rest], Acc) -> acc([D], Rest, week_day, Acc, fun hour/2); week_day(_, _) -> error(badarg). week_day_no_hyphen([], Acc) -> datetime(Acc); week_day_no_hyphen([D|Rest], Acc) -> acc([D], Rest, week_day, Acc, fun hour/2); week_day_no_hyphen(_, _) -> error(badarg). month_day([], Acc) -> datetime(Acc); month_day([$-,D1,D2|Rest], Acc) -> acc([D1,D2], Rest, month_day, Acc, fun hour/2); month_day(_, _) -> error(badarg). month_day_no_hyphen([], _) -> error(badarg); % omission of day disallowed by spec in this case month_day_no_hyphen([D1,D2|Rest], Acc) -> acc([D1,D2], Rest, month_day, Acc, fun hour/2); month_day_no_hyphen(_, _) -> error(badarg). hour([], Acc) -> datetime(Acc); hour([$T,H1,H2,$.|Rest], Acc) -> acc([H1,H2], Rest, hour, Acc, fun hour_decimal/2); hour([$T,H1,H2,$,|Rest], Acc) -> acc([H1,H2], Rest, hour, Acc, fun hour_decimal/2); hour([$T,H1,H2|Rest], Acc) -> acc([H1,H2], Rest, hour, Acc, fun minute/2); hour(_, _) -> error(badarg). hour_decimal(Str, Acc) -> decimal(Str, Acc, hour_decimal). minute([], Acc) -> datetime(Acc); minute([$:,M1,M2,$.|Rest], Acc) -> acc([M1,M2], Rest, minute, Acc, fun minute_decimal/2); minute([$:,M1,M2,$,|Rest], Acc) -> acc([M1,M2], Rest, minute, Acc, fun minute_decimal/2); minute([$:,M1,M2|Rest], Acc) -> acc([M1,M2], Rest, minute, Acc, fun second/2); minute([M1,M2,$.|Rest], Acc) -> acc([M1,M2], Rest, minute, Acc, fun minute_decimal/2); minute([M1,M2,$,|Rest], Acc) -> acc([M1,M2], Rest, minute, Acc, fun minute_decimal/2); minute([M1,M2|Rest], Acc) -> acc([M1,M2], Rest, minute, Acc, fun second_no_colon/2); minute(_, _) -> error(badarg). minute_decimal(Str, Acc) -> decimal(Str, Acc, minute_decimal). second([], Acc) -> datetime(Acc); second([$:,S1,S2,$.|Rest], Acc) -> acc([S1,S2], Rest, second, Acc, fun second_decimal/2); second([$:,S1,S2,$,|Rest], Acc) -> acc([S1,S2], Rest, second, Acc, fun second_decimal/2); second([$:,S1,S2|Rest], Acc) -> acc([S1,S2], Rest, second, Acc, fun offset_hour/2); second(_, _) -> error(badarg). second_no_colon([], Acc) -> datetime(Acc); second_no_colon([S1,S2,$.|Rest], Acc) -> acc([S1,S2], Rest, second, Acc, fun second_decimal/2); second_no_colon([S1,S2,$,|Rest], Acc) -> acc([S1,S2], Rest, second, Acc, fun second_decimal/2); second_no_colon([S1,S2|Rest], Acc) -> acc([S1,S2], Rest, second, Acc, fun offset_hour/2); second_no_colon(_, _) -> error(badarg). second_decimal(Str, Acc) -> decimal(Str, Acc, second_decimal). decimal([], _, _) -> error(badarg); decimal(Str, Acc, Key) -> F = fun(X) when is_integer(X), X >= $0, X =< $9 -> true; (_) -> false end, {Parts, Rest} = lists:splitwith(F, Str), acc_float([$0,$.|Parts], Rest, Key, Acc, fun offset_hour/2). offset_hour([], Acc) -> datetime(Acc); offset_hour([$Z], Acc) -> datetime(Acc); offset_hour([$+,H1,H2|Rest], Acc) -> acc([H1,H2], Rest, offset_hour, Acc, fun offset_minute/2); offset_hour([$-,H1,H2|Rest], Acc) -> acc([H1,H2], Rest, offset_hour, [{offset_sign, -1}|Acc], fun offset_minute/2); offset_hour(_, _) -> error(badarg). offset_minute([], Acc) -> datetime(Acc); offset_minute([$:,M1,M2], Acc) -> acc([M1,M2], [], offset_minute, Acc, fun datetime/2); offset_minute([M1,M2], Acc) -> acc([M1,M2], [], offset_minute, Acc, fun datetime/2); offset_minute(_, _) -> error(badarg). acc(IntStr, Rest, Key, Acc, NextF) -> Acc1 = [{Key, list_to_integer(IntStr)}|Acc], NextF(Rest, Acc1). acc_float(FloatStr, Rest, Key, Acc, NextF) -> Acc1 = [{Key, list_to_float(FloatStr)}|Acc], NextF(Rest, Acc1). acc_ordinal_date(D1, D2, D3, Rest, Acc, NextF) -> Days = list_to_integer([D1, D2, D3]), Days > 0 orelse error(badarg), Year = ?V(year, Acc), Year =/= undefined orelse error(badarg), DaysInMonths = days_in_months_for_year(Year), { Month, Day } = unpack_ordinal_date(Days, DaysInMonths), Acc1 = [{ month, Month }, { month_day, Day }|Acc], NextF(Rest, Acc1). unpack_ordinal_date(Days, DaysInMonths) -> unpack_ordinal_date(1, Days, DaysInMonths). unpack_ordinal_date(_Month, _Days, []) -> error(badarg), { 0, 0 }; unpack_ordinal_date(_Month, Days, _DaysInMonths) when Days < 0 -> error(badarg), { 0, 0 }; unpack_ordinal_date(Month, Days, [DaysThisMonth|DaysInMonths]) -> case Days > DaysThisMonth of true -> unpack_ordinal_date(Month + 1, Days - DaysThisMonth, DaysInMonths); _ -> { Month, Days } end. days_in_months_for_year(Year) -> case is_leap_year(Year) of true -> [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; false -> [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] end. is_leap_year(Year) -> case Year rem 100 of 0 -> Year rem 400 =:= 0; _ -> Year rem 4 =:= 0 end. add_decimal(Datetime, Plist) -> HDecimal = ?V(hour_decimal, Plist, 0.0), MDecimal = ?V(minute_decimal, Plist, 0.0), apply_offset(Datetime, HDecimal, MDecimal, 0.0). datetime(Plist) -> {Date, WeekOffsetH} = make_date(Plist), Time = {?V(hour, Plist, 0), ?V(minute, Plist, 0), ?V(second, Plist, 0)}, Datetime = add_decimal({Date, Time}, Plist), OffsetSign = ?V(offset_sign, Plist, 1), OffsetH = -1 * OffsetSign * ?V(offset_hour, Plist, 0), OffsetM = -1 * OffsetSign * ?V(offset_minute, Plist, 0), { apply_offset(Datetime, WeekOffsetH+OffsetH, OffsetM, 0), ?V(second_decimal, Plist, 0.0) }. datetime(_, Plist) -> datetime(Plist). -spec make_date (datetime_plist()) -> {Date::calendar:date(), WeekOffsetH::non_neg_integer()}. %% @doc Return a `tuple' containing a date and, if the date is in week format, %% an offset in hours that can be applied to the date to adjust it to midnight %% of the day specified. If month format is used, the offset will be zero. make_date(Plist) -> Year = ?V(year, Plist), Year =/= undefined orelse error(badarg), make_date(Year, ?V(month, Plist, 1), ?V(week, Plist), Plist). -spec make_date (non_neg_integer(), maybe(pos_integer()), maybe(pos_integer()), datetime_plist()) -> {calendar:date(), non_neg_integer()}. %% @doc Return a `tuple' containing a date and - if the date is in week format %% (i.e., `Month' is undefined, `Week' is not) - an offset in hours that can be %% applied to the date to adjust it to midnight of the day specified. If month %% format is used (i.e., `Week' is undefined, `Month' is not), the offset will %% be zero. make_date(Year, Month, undefined, Plist) -> Date = {Year, Month, ?V(month_day, Plist, 1)}, {Date, 0}; make_date(Year, _, Week, Plist) -> Weekday = ?V(week_day, Plist, 1), OffsetH = ((Week-1)*7 + (Weekday-1))*24, % week/weekday offset in hours {date_at_w01_1(Year), OffsetH}. -spec date_at_w01_1(pos_integer()) -> calendar:date(). %% @doc Calculate the `calendar:date()' at ISO week 1, day 1 in the supplied %% year. date_at_w01_1(Year) -> case calendar:day_of_the_week({Year,1,1}) of 1 -> {Year, 1, 1}; 2 -> {Year-1, 12, 31}; 3 -> {Year-1, 12, 30}; 4 -> {Year-1, 12, 29}; 5 -> {Year, 1, 4}; 6 -> {Year, 1, 3}; 7 -> {Year, 1, 2} end. -spec apply_offset (calendar:datetime(), number(), number(), number()) -> calendar:datetime(). %% @doc Add the specified number of hours, minutes and seconds to `Datetime'. apply_offset(Datetime, H, M, S) -> OffsetS = S + (60 * (M + (60 * H))), Gs = round(OffsetS) + calendar:datetime_to_gregorian_seconds(Datetime), calendar:gregorian_seconds_to_datetime(Gs). -spec apply_months_offset (datetime(), number()) -> datetime(). %% @doc Add the specified number of months to `Datetime'. apply_months_offset(Datetime, 0) -> Datetime; apply_months_offset(Datetime, AM) -> {{Y, M, D}, {H, MM, S}} = Datetime, AY = (Y*12)+M+AM, Year = (AY div 12), Month = case (AY rem 12) of 0 -> 12; _ -> AY rem 12 end, find_last_valid_date({{Year, Month, D}, {H, MM, S}}). -spec apply_days_offset (datetime(), number()) -> datetime(). %% @doc Add the specified days to `Datetime'. apply_days_offset(Datetime, AD) -> {{Y, M, D}, {H, MM, S}} = Datetime, DaysTotal=calendar:date_to_gregorian_days({Y, M, D})+AD, {calendar:gregorian_days_to_date(DaysTotal), {H, MM, S}}. -spec apply_years_offset (datetime(), number()) -> datetime(). %% @doc Add the specified years to `Datetime'. apply_years_offset(Datetime, AY) -> {{Y, M, D}, {H, MM, S}} = Datetime, {{Y+AY, M, D}, {H, MM, S}}. -spec find_last_valid_date(datetime()) -> datetime(). %% @doc Decrease days until found valid date'. find_last_valid_date(Datetime)-> {{Y, M, D}, {H, MM, S}} = Datetime, case calendar:valid_date({Y, M, D}) of true -> Datetime; false -> find_last_valid_date({{Y, M, D-1}, {H, MM, S}}) end.
src/iso8601.erl
0.54359
0.423995
iso8601.erl
starcoder
%%% @doc Merkle Trees are a data structures devised especially to find %%% conflicts or diverging pieces of data between two data sets. %%% %%% They're more or less a hybrid between a sparse K-ary tree and a %%% trie of hash values. %%% %%% Each `{Key, Value}' pair gets two hashes: a hash of the key (Hkey), and %%% a hash of the hashed key and the hashed value (Hash). %%% %%% The Hkey is used as the main index and to build a tree. If we have three %%% hashes with the values `<<213,21,54,...>>', `<<213,33,98,...>>', and %%% `<<11,45,101,...>>', the resulting tree/trie is: %%% %%% (Root) %%% Inner %%% / \ %%% / \ %%% (11) (213) %%% <<11,45,101,...>> Inner %%% / \ %%% / \ %%% (21) (33) %%% <<213,21,54,...>> <<213,33,98,...>> %%% %%% Each of the leaf nodes will contain both hashes, along with a non-hashed %%% version of the key. Each Inner node contains a hash of all its children's %%% hash values, and indexes them by the hash byte at the given depth. %%% %%% This structure allows to quickly compare for changes in values, missing %%% nodes, and so on, but more importantly allows to quickly know if the data %%% sets (or subsets of them) are identical. %%% %%% It also allows to do a level-order traversal node-per-node over the network %%% allowing somewhat efficient diffing. %%% @end -module(merklet). -record(leaf, {hash :: binary(), % hash(hash(key), hash(value)) userkey :: binary(), % user submitted key hashkey :: binary()}). % hash of the user submitted key -record(inner, {hashchildren :: binary(), % hash of children's hashes children :: [{offset(), #inner{} | #leaf{}}, ...], offset :: non_neg_integer()}). % byte offset -define(HASHPOS, 2). % #leaf.hash =:= #inner.hashchildren -type offset() :: byte(). -type leaf() :: #leaf{}. -type inner() :: #inner{}. -type tree() :: leaf() | inner() | 'undefined'. -type key() :: binary(). -type value() :: binary(). -type path() :: binary(). -type access_fun() :: fun((at | child_at | keys | {keys, Hash::binary()}, path()) -> tree()). -type serial_fun() :: fun((at | child_at | keys | {keys, Hash::binary()}, path()) -> binary()). -export_type([tree/0, key/0, value/0, path/0, access_fun/0, serial_fun/0]). -export([insert/2, insert_many/2, delete/2, keys/1, diff/2]). -export([dist_diff/2, access_serialize/1, access_unserialize/1]). -define(HASH, sha). -define(HASHBYTES, 20). -define(VSN, 1). -define(UNDEFINED, 0). -define(INNER, 1). -define(LEAF, 2). -define(OFFSETBYTE, 3). -define(KEYS, 4). -define(KEYS_SKIP, 5). -define(KEYS_SKIP_UNSEEN, 0). -define(KEYS_SKIP_SAME, 1). -define(KEYS_SKIP_DIFF, 2). %%%%%%%%%%% %%% API %%% %%%%%%%%%%% %% @doc Adds a key to the tree, or overwrites an exiting one. -spec insert({key(), value()}, tree()) -> tree(). insert({Key, Value}, Tree) -> insert(0, to_leaf(Key, Value), Tree). %% @doc Adds multiple keys to the tree, or overwrites existing ones. -spec insert_many(list({key(), value()}), tree()) -> tree(). insert_many([], Tree) -> Tree; insert_many([H|T], Tree) -> insert_many(T, insert(H, Tree)). %% @doc Removes a key from a tree, if present. -spec delete(key(), tree()) -> tree(). delete(Key, Tree) -> delete_leaf(to_leaf(Key, <<>>), Tree). %% @doc Returns a sorted list of all the keys in the tree -spec keys(tree()) -> [key()]. keys(Tree) -> lists:usort(raw_keys(Tree)). %% @doc Takes two trees and returns the different keys between them. -spec diff(tree(), tree()) -> [key()]. diff(Tree1, Tree2) -> %% We use the remote access for this local comparison. This is %% slower than a dedicated traversal algorithm, but less code %% means fewer chances of breaking stuff. Fun = access_local(Tree2), diff(Tree1, Fun, <<>>). %% @doc Takes a local tree, and an access function to another tree, %% and returns the keys associated with diverging parts of both trees. %% The access fun takes an atom and a path and must return a flat tree %% node or a subtree. (`fun(Verb, Path) -> Node | undefined'). %% %% The Path is a sequence of bytes (in a `binary()') telling how to get to %% a specific node: %% %% - `<<>>' means returning the current node, at whatever point we are in the %% tree's traversal. %% - `<<Offset,...>>' means to return the node at the given offset for the %% current tree level. For example, a value of `<<0>>' means to return the %% leftmost child of the current node, whereas `<<3>>' should return the %% 4th leftmost child. Any time the path is larger than the number of %% children, we return `undefined'. %% This is the case where we can recurse. %% - Any invalid path returns `undefined'. %% %% The three terms required are: %% - `at': Uses the path as above to traverse the tree and return a node. %% - `keys': Returns all the keys held (recursively) by the node at a given %% path. A special variant exists of the form `{keys, Key, Hash}', where the %% function must return the key set minus the one that would contain either %% `Key' or `Hash', but by specifying if the key and hash were encountered, %% and if so, if they matched or not. %% - `child_at': Special case of `at' used when comparing child nodes of two %% inner nodes. Basically the same as `at', but with one new rule: %% %% Whenever we hit a path that is `<<N>>' and we are on an inner node, %% it means we only have a child to look at. Return that child along %% with its byte at the offset in the dictionary structure %% (`{ByteAtOffset, Node}'). %% %% Examples of navigation through a tree of the form: %% %% 0 | ___.-A-._____ %% | / | \ %% 1 | .-B-. C .-D-. %% | / \ / \ %% 2 | E F .G. H %% | / \ %% 3 | I J %% %% Which is four levels deep. The following paths lead to following nodes: %% %% +==============+===========+ +==============+===========+ %% | Path | Node | | Path | Node | %% +==============+===========+ +==============+===========+ %% | <<>> | A | | <<0,1>> | F | %% | <<0>> | B | | <<2,0>> | G | %% | <<1>> | C | | <<2,1>> | H | %% | <<2>> | D | | <<2,0,0>> | I | %% | <<3>> | undefined | | <<2,0,1>> | J | %% | <<0,0>> | E | | <<2,0,1,3>> | undefined | %% +--------------+-----------+ +--------------+-----------+ %% %% The values returned are all the keys that differ across both trees. -spec dist_diff(tree(), access_fun()) -> [key()]. dist_diff(Tree, Fun) when is_function(Fun,2) -> diff(Tree, Fun, <<>>). %% @doc Returns an `access_fun()' for the current tree. This function %% can be put at the end of a connection to a remote node, and it %% will return serialized tree nodes. -spec access_serialize(tree()) -> serial_fun(). access_serialize(Tree) -> fun(at, Path) -> serialize(at(Path, Tree)); (child_at, Path) -> serialize(child_at(Path, Tree)); (keys, Path) -> serialize(raw_keys(at(Path, Tree))); ({keys,Key,Skip}, Path) -> serialize(raw_keys(at(Path, Tree), Key, Skip)) end. %% @doc Takes an {@link access_fun()} that fetches nodes serialized according %% to the format used by {@link access_serialize/2}, and returns a new {@link %% access_fun()} that will unserialized and can be used directly in %% {@link dist_diff/2} -spec access_unserialize(serial_fun()) -> access_fun(). access_unserialize(Fun) -> fun(Arg, Path) -> unserialize(Fun(Arg,Path)) end. %%%%%%%%%%%%%%% %%% PRIVATE %%% %%%%%%%%%%%%%%% %% if the tree is empty, just use the leaf insert(_Offset, Leaf, undefined) -> Leaf; %% If the offset is at the max value for the hash, return the leaf -- %% We can't go deeper anyway. insert(?HASHBYTES, Leaf, _) -> Leaf; %% if the current node of the tree is a leaf and both keys are the same, %% replace it. insert(_Offset, Leaf=#leaf{hashkey=Key}, #leaf{hashkey=Key}) -> Leaf; %% if the current node of the tree is a leaf, and keys are different, turn the %% current leaf into an inner node, and insert the new one in it. insert(Offset, NewLeaf, OldLeaf=#leaf{}) -> insert(Offset, NewLeaf, to_inner(Offset, OldLeaf)); %% Insert to an inner node! insert(Offset, Leaf=#leaf{hashkey=Key}, Inner=#inner{children=Children, hashchildren=Hash}) -> Byte = binary:at(Key, Offset), case orddict:find(Byte, Children) of error -> Inner#inner{hashchildren=children_hash([{Byte, Leaf}], Hash), children=orddict:store(Byte, Leaf, Children)}; {ok, Subtree} -> InsertLeaf = insert(Offset+1, Leaf, Subtree), NewHash = children_hash([{Byte, InsertLeaf}, {Byte, Subtree}], Hash), Inner#inner{hashchildren=NewHash, children=orddict:store(Byte, InsertLeaf, Children)} end. %% Not found or empty tree. Leave as is. delete_leaf(_, undefined) -> undefined; %% If we have the same leaf node we were looking for, kill it. delete_leaf(#leaf{hashkey=K}, #leaf{hashkey=K}) -> undefined; %% If it's a different leaf, the item to delete is already gone. Leave as is. delete_leaf(#leaf{}, Leaf=#leaf{}) -> Leaf; %% if it's an inner node, look inside delete_leaf(Leaf=#leaf{hashkey=K}, Inner=#inner{offset=Offset, children=Children}) -> Byte = binary:at(K, Offset), case orddict:find(Byte, Children) of error -> % not found, leave as is Inner; {ok, Subtree} -> NewChildren = case maybe_shrink(delete_leaf(Leaf, Subtree)) of undefined -> % leaf gone orddict:erase(Byte, Children); Node -> % replacement node orddict:store(Byte, Node, Children) end, maybe_shrink(Inner#inner{hashchildren=children_hash(NewChildren), children=NewChildren}) end. raw_keys(undefined) -> []; raw_keys(#leaf{userkey=Key}) -> [Key]; raw_keys(#inner{children=Children}) -> lists:append(orddict:fold( fun(_Byte, Node, Acc) -> [raw_keys(Node)|Acc] end, [], Children )). %% Same as raw_keys/1, but reports on a given hash and key raw_keys(I=#inner{}, KeyToWatch, ToSkip) -> raw_keys(I, KeyToWatch, ToSkip, unseen). raw_keys(undefined, _, _, Status) -> {Status, []}; raw_keys(#leaf{hash=Hash}, _, Hash, _Status) -> {same, []}; raw_keys(#leaf{userkey=Key}, Key, _, _Status) -> {diff, []}; raw_keys(#leaf{userkey=Key}, _, _, Status) -> {Status, [Key]}; raw_keys(#inner{children=Children}, Key, ToSkip, InitStatus) -> {Status, DeepList} = lists:foldl( fun({_, Node}, {Status, Acc}) -> {NewStatus, ToAdd} = raw_keys(Node, Key, ToSkip, Status), {NewStatus, [ToAdd|Acc]} end, {InitStatus, []}, Children ), {Status, lists:append(DeepList)}. -spec diff(tree(), access_fun(), path()) -> [key()]. diff(Tree, Fun, Path) -> lists:usort(raw_diff(Tree, Fun(at, Path), Fun, Path)). %% Empty trees yield all keys of remaining trees raw_diff(undefined, undefined, _, _) -> []; raw_diff(undefined, _Tree2, Fun, Path) -> Fun(keys, Path); raw_diff(Tree1, undefined, _, _) -> raw_keys(Tree1); %% If hashes are the same, we're done. raw_diff(#leaf{hash=Hash}, #leaf{hash=Hash}, _, _) -> []; raw_diff(#leaf{hash=Hash}, #inner{hashchildren=Hash}, _, _) -> []; raw_diff(#inner{hashchildren=Hash}, #leaf{hash=Hash}, _, _) -> []; raw_diff(#inner{hashchildren=Hash}, #inner{hashchildren=Hash}, _, _) -> []; %% if they differ and both nodes are leaf nodes, return both values raw_diff(#leaf{userkey=Key1}, #leaf{userkey=Key2}, _, _) -> [Key1,Key2]; %% if both differ but one is an inner node, return everything raw_diff(#leaf{userkey=Key, hash=ToSkip}, #inner{}, Fun, Path) -> %% We can only get rid of the current Key if the hashes are the same case Fun({keys, Key, ToSkip}, Path) of {same, Keys} -> Keys; {diff, Keys} -> [Key|Keys]; {unseen, Keys} -> [Key|Keys] end; raw_diff(Inner=#inner{}, #leaf{userkey=Key, hash=ToSkip}, _, _) -> %% We can only get rid of the current Key if the hashes are the same case raw_keys(Inner, Key, ToSkip) of {same, Keys} -> Keys; {diff, Keys} -> [Key|Keys]; {unseen, Keys} -> [Key|Keys] end; %% if both nodes are inner and populated, compare them offset by offset. raw_diff(#inner{children=Children}, #inner{}, Fun, Path) -> ChildPath = <<Path/binary, 0>>, diff_offsets(children_offsets(Children), Fun(child_at, ChildPath), Fun, ChildPath). %% Whatever is left alone is returned diff_offsets([], undefined, _, _) -> []; diff_offsets(List, undefined, _, _) -> lists:append([raw_keys(Child) || {_, Child} <- List]); diff_offsets([], _, Fun, Path) -> Keys = Fun(keys, Path), case next_child_path(Path) of undefined -> Keys; Next -> Keys ++ diff_offsets([], Fun(child_at, Next), Fun, Next) end; %% If both offsets are the same, compare recursively. diff_offsets(L=[{OffL, Child}|Rest], R={OffR,Node}, Fun, Path) -> if OffL =:= OffR -> Diff = raw_diff(Child, Node, Fun, Path), case next_child_path(Path) of undefined -> Diff; Next -> Diff ++ diff_offsets(Rest, Fun(child_at, Next), Fun, Next) end; OffL < OffR -> raw_keys(Child) ++ diff_offsets(Rest, R, Fun, Path); OffL > OffR -> Keys = Fun(keys, Path), case next_child_path(Path) of undefined -> Keys; Next -> Keys ++ diff_offsets(L, Fun(child_at, Next), Fun, Next) end end. next_child_path(Path) -> ParentSize = byte_size(Path) - 1, <<ParentPath:ParentSize/binary, ChildByte>> = Path, case ChildByte+1 of 256 -> undefined; Next -> <<ParentPath/binary, Next>> end. %%% Basic Tree Management Functions %% @doc Takes a Key and a Value and turns them to a leaf node. -spec to_leaf(key(), value()) -> leaf(). to_leaf(Key, Value) when is_binary(Key) -> %% We use the hash of the value as part of the 'hash' entry, %% but not the 'hashkey'. This allows a tree where the structure %% is based on the keys, but we can still compare and use both %% the key and its value to do comparison when diffing. HashKey = crypto:hash(?HASH, Key), #leaf{userkey=Key, hashkey=HashKey, hash=crypto:hash(?HASH, <<HashKey/binary, Value/binary>>)}. %% @doc We build a Key-Value list of the child nodes and their offset %% to be used as a sparse K-ary tree. -spec to_inner(offset(), leaf()) -> inner(). to_inner(Offset, Child=#leaf{hashkey=Hash}) -> Children = orddict:store(binary:at(Hash, Offset), Child, orddict:new()), #inner{hashchildren=children_hash(Children), children=Children, offset=Offset}. %% @doc The hash for the node is put together in a somewhat portable way: %% fetch the hashes for all the children, sort them by the value %% they would yield for each byte being converted as an integer, and %% then apply the ?HASH to the entire sequence in that order. %% %% We use the 'hash' value for leaf nodes so that comparison can be done %% while caring about both keys and values. This has no impact on position %% of inner nodes, because it is dictated by the children's keyhashes, and %% not the inner node's own hashes. %% @todo consider endianness for absolute portability -spec children_hash([{offset(), leaf()}, ...]) -> binary(). children_hash(Children) -> children_hash(Children, 0). -spec children_hash([{offset(), leaf()}, ...], integer() | binary()) -> binary(). children_hash(Children, <<InitHash:(?HASHBYTES*8)/integer>>) -> children_hash(Children, InitHash); children_hash(Children, InitHash) -> HashLength = ?HASHBYTES*8, Result = lists:foldl(fun({_, Child}, Acc) -> <<HasInt:HashLength/integer>> = element(?HASHPOS, Child), HasInt bxor Acc end, InitHash, Children), <<Result:HashLength/integer>>. %% @doc Checks if the node can be shrunken down to a single leaf it contains %% or should just be returned as is. %% This avoids a problem where a deleted subtree results in an inner node %% with a single element, which wastes space and can slow down diffing. maybe_shrink(Leaf = #leaf{}) -> Leaf; maybe_shrink(undefined) -> undefined; maybe_shrink(Inner = #inner{children=Children}) -> %% The trick for this one is that if we have *any* child set that %% is anything else than a single leaf node, we can't shrink. We use %% a fold with a quick try ... catch to quickly figure this out, in %% two iterations at most. try orddict:fold(fun(_Offset, Leaf=#leaf{}, 0) -> Leaf; (_, _, _) -> throw(false) end, 0, Children) catch throw:false -> Inner end. %% @doc Returns the sorted offsets of a given child. Because we're using %% orddicts, we can just straight up return the children as is, but another %% data structure would need to transform them into a key/value list sorted %% by the offset: [{Offet, ChildNode}]. children_offsets(Children) -> Children. %% Wrapper for the diff function. access_local(Node) -> fun(at, Path) -> at(Path, Node); (child_at, Path) -> child_at(Path, Node); (keys, Path) -> raw_keys(at(Path, Node)); ({keys, Key, Skip}, Path) -> raw_keys(at(Path, Node), Key, Skip) end. %% Return the node at a given position in a tree. at(Path, Tree) -> case child_at(Path, Tree) of {_Off, Node} -> Node; Node -> Node end. %% Special case of at/2 that returns the node at a given position in a tree, %% but if the resulting node is the child of an inner node, return it with %% its indexed offset. %% This allows to diff inner nodes without contextual info while in the %% offset traversal. child_at(<<>>, Node) -> %% End of path, return whatever Node; child_at(<<N,Rest/binary>>, #inner{children=Children}) -> %% Depending on the path depth, the behavior changes. If the path depth %% left is of one (i.e. `<<N>> = <<N,Rest/binary>>') and that we are in %% an inner node, then we're looking for the child definition as %% `{ByteAtOffset, ChildNode}'. %% Otherwise, this is how we keep recursing by looking deeper in a %% inner node. %% If the path goes past what the node contains, we return `undefined'. try orddict:fold(fun(Off, Node, 0) when Rest =:= <<>> -> throw({Off,Node}); (_, Node, 0) -> throw(Node); (_, _, X) -> X-1 end, N, Children), undefined catch throw:{Off,Node} -> {Off,Node}; throw:Node -> child_at(Rest, Node) end; %% Invalid path child_at(_, _) -> undefined. %% Serialize nodes flatly. All terms are self-contained and their %% trailing value can be used as one blob. A protocol using this format %% of serialization should therefore frame each binary before concatenating %% them. %% %% Note that this format is sufficient for diffing, but not to rebuild entire %% trees from scratch. serialize(undefined) -> <<?VSN, ?UNDEFINED>>; serialize(#leaf{userkey=Key, hashkey=HKey, hash=Hash}) -> <<?VSN, ?LEAF, ?HASHBYTES:32, HKey/binary, Hash/binary, Key/binary>>; serialize(#inner{hashchildren=Hash}) -> <<?VSN, ?INNER, ?HASHBYTES:32, Hash/binary>>; serialize({Offset, Node}) when is_record(Node, leaf); is_record(Node, inner) -> <<?VSN, ?OFFSETBYTE, Offset, (serialize(Node))/binary>>; serialize(Keys) when is_list(Keys) -> Serialized = << <<(byte_size(Key)):16, Key/binary>> || Key <- Keys >>, <<?VSN, ?KEYS, (length(Keys)):16, Serialized/binary>>; serialize({Word, Keys}) when is_list(Keys), is_atom(Word) -> Seen = case Word of unseen -> ?KEYS_SKIP_UNSEEN; same -> ?KEYS_SKIP_SAME; diff -> ?KEYS_SKIP_DIFF end, Serialized = << <<(byte_size(Key)):16, Key/binary>> || Key <- Keys >>, <<?VSN, ?KEYS_SKIP, Seen:2, (length(Keys)):16, Serialized/binary>>. %% Deserialize nodes flatly. Assume self-contained binaries. %% %% Note that this format is sufficient for diffing, but not to rebuild entire %% trees from scratch. unserialize(<<?VSN, ?UNDEFINED>>) -> undefined; unserialize(<<?VSN, ?LEAF, ?HASHBYTES:32, HKey:?HASHBYTES/binary, Hash:?HASHBYTES/binary, Key/binary>>) -> #leaf{userkey=Key, hashkey=HKey, hash=Hash}; unserialize(<<?VSN, ?INNER, ?HASHBYTES:32, Hash:?HASHBYTES/binary>>) -> #inner{hashchildren=Hash}; unserialize(<<?VSN, ?OFFSETBYTE, Byte, Node/binary>>) -> {Byte, unserialize(Node)}; unserialize(<<?VSN, ?KEYS, NumKeys:16, Serialized/binary>>) -> Keys = [Key || <<Size:16, Key:Size/binary>> <= Serialized], NumKeys = length(Keys), Keys; unserialize(<<?VSN, ?KEYS_SKIP, Seen:2, NumKeys:16, Serialized/binary>>) -> Word = case Seen of ?KEYS_SKIP_UNSEEN -> unseen; ?KEYS_SKIP_SAME -> same; ?KEYS_SKIP_DIFF -> diff end, Keys = [Key || <<Size:16, Key:Size/binary>> <= Serialized], NumKeys = length(Keys), {Word, Keys}.
src/merklet.erl
0.524638
0.7773
merklet.erl
starcoder
%% @doc API for various coding schemes with Rust NIF as the backend. %% %% Exposes: %% %% `golay_extended_encode' %% %% `golay_extended_decode' %% %% `golay_standard_encode' %% %% `golay_standard_decode' %% %% `golay_shortened_encode' %% %% `golay_shortened_decode' %% %% `bch_encode' %% %% `bch_decode' %% %% `hamming_standard_encode' %% %% `hamming_standard_decode' %% %% `hamming_shortened_encode' %% %% `hamming_shortened_decode' -module(erlcode). %% API -export( [ golay_extended_encode/1, golay_extended_decode/1, golay_standard_encode/1, golay_standard_decode/1, golay_shortened_encode/1, golay_shortened_decode/1, bch_encode/1, bch_decode/1, hamming_standard_encode/1, hamming_standard_decode/1, hamming_shortened_encode/1, hamming_shortened_decode/1 ]). %% Native lib support -export([load/0]). -on_load(load/0). -include_lib("erlcode.hrl"). %% ================================================================== %% API %% ================================================================== %% @doc Encode the given 12 data bits into a 24-bit codeword -spec golay_extended_encode(Data :: pos_integer()) -> {ok, Encoded :: pos_integer()}. golay_extended_encode(Data) when Data < ?MAX_12_BIT_INT -> not_loaded(?LINE). %% @doc Try to decode the given 24-bit word to the nearest codeword, correcting up to 3 errors and detecting 4 errors %% %% If decoding was successful, return `{ok, (data, err)}', where `data' is the 12 %% data bits and `err' is the number of corrected bits. %% Otherwise, return `{error, {unrecoverable, data}}' to %% indicate an unrecoverable error. -spec golay_extended_decode(Data :: pos_integer()) -> {ok, {Decoded :: pos_integer(), Corrupted :: non_neg_integer()}} | {error, {unrecoverable, non_neg_integer()}}. golay_extended_decode(Data) when Data < ?MAX_24_BIT_INT -> not_loaded(?LINE). %% @doc Encode the given 12 data bits into a 23-bit codeword -spec golay_standard_encode(Data :: pos_integer()) -> {ok, Encoded :: pos_integer()}. golay_standard_encode(Data) when Data < ?MAX_12_BIT_INT -> not_loaded(?LINE). %% @doc Try to decode the given 23-bit word to the nearest codeword, correcting up to 3 errors and detecting 4 errors %% %% If decoding was successful, return `{ok, (data, err)}', where `data' is the 12 %% data bits and `err' is the number of corrected bits. %% Otherwise, return `{error, {unrecoverable, data}}' to %% indicate an unrecoverable error. -spec golay_standard_decode(Data :: pos_integer()) -> {ok, {Decoded :: pos_integer(), Corrupted :: non_neg_integer()}} | {error, {unrecoverable, non_neg_integer()}}. golay_standard_decode(Data) when Data < ?MAX_23_BIT_INT -> not_loaded(?LINE). %% @doc Encode the given 6 data bits into a 18-bit codeword -spec golay_shortened_encode(Data :: pos_integer()) -> {ok, Encoded :: pos_integer()}. golay_shortened_encode(Data) when Data < ?MAX_6_BIT_INT -> not_loaded(?LINE). %% @doc Try to decode the given 18-bit word to the nearest codeword, correcting up to 3 errors %% %% If decoding was successful, return `{ok, (data, err)}', where `data' is the 6 %% data bits and `err' is the number of corrected bits. %% Otherwise, return `{error, {unrecoverable, data}}' to %% indicate an unrecoverable error. -spec golay_shortened_decode(Data :: pos_integer()) -> {ok, {Decoded :: pos_integer(), Corrupted :: non_neg_integer()}} | {error, {unrecoverable, non_neg_integer()}}. golay_shortened_decode(Data) when Data < ?MAX_18_BIT_INT -> not_loaded(?LINE). %% @doc Encode the given 16 data bits into a 64-bit codeword -spec bch_encode(Data :: pos_integer()) -> {ok, Encoded :: pos_integer()}. bch_encode(Data) when Data < ?MAX_16_BIT_INT -> not_loaded(?LINE). %% @doc Try to decode the given 64-bit word to the nearest codeword, correcting up to 11 bit errors %% %% If decoding was successful, return `{ok, (data, err)}', where `data' is the 16 %% data bits and `err' is the number of corrected bits. %% Otherwise, return `{error, {unrecoverable, data}}' to %% indicate an unrecoverable error. -spec bch_decode(Data :: pos_integer()) -> {ok, {Decoded :: pos_integer(), Corrupted :: non_neg_integer()}} | {error, {unrecoverable, non_neg_integer()}}. bch_decode(Data) when Data < ?MAX_64_BIT_INT -> not_loaded(?LINE). %% @doc Encode the given 11 data bits into a 15-bit codeword -spec hamming_standard_encode(Data :: pos_integer()) -> {ok, Encoded :: pos_integer()}. hamming_standard_encode(Data) when Data < ?MAX_11_BIT_INT -> not_loaded(?LINE). %% @doc Try to decode the given 15-bit word to the nearest codeword, correcting up to 1 bit error %% %% If decoding was successful, return `{ok, (data, err)}', where `data' is the 11 %% data bits and `err' is the number of corrected bits. %% Otherwise, return `{error, {unrecoverable, data}}' to %% indicate an unrecoverable error. -spec hamming_standard_decode(Data :: pos_integer()) -> {ok, {Decoded :: pos_integer(), Corrupted :: non_neg_integer()}} | {error, {unrecoverable, non_neg_integer()}}. hamming_standard_decode(Data) when Data < ?MAX_15_BIT_INT -> not_loaded(?LINE). %% @doc Encode the given 6 data bits into a 10-bit codeword -spec hamming_shortened_encode(Data :: pos_integer()) -> {ok, Encoded :: pos_integer()}. hamming_shortened_encode(Data) when Data < ?MAX_6_BIT_INT -> not_loaded(?LINE). %% @doc Try to decode the given 10-bit word to the nearest codeword, correcting up to 1 bit error %% %% If decoding was successful, return `{ok, (data, err)}', where `data' is the 6 %% data bits and `err' is the number of corrected bits. %% Otherwise, return `{error, {unrecoverable, data}}' to %% indicate an unrecoverable error. -spec hamming_shortened_decode(Data :: pos_integer()) -> {ok, {Decoded :: pos_integer(), Corrupted :: non_neg_integer()}} | {error, {unrecoverable, non_neg_integer()}}. hamming_shortened_decode(Data) when Data < ?MAX_10_BIT_INT -> not_loaded(?LINE). %% ================================================================== %% NIF %% ================================================================== load() -> erlang:load_nif(filename:join(priv(), "libnative"), none). not_loaded(Line) -> erlang:nif_error({error, {not_loaded, [{module, ?MODULE}, {line, Line}]}}). priv()-> case code:priv_dir(?MODULE) of {error, _} -> EbinDir = filename:dirname(code:which(?MODULE)), AppPath = filename:dirname(EbinDir), filename:join(AppPath, "priv"); Path -> Path end.
src/erlcode.erl
0.600657
0.48377
erlcode.erl
starcoder
% Licensed under the Apache License, Version 2.0 (the "License"); you may not % use this file except in compliance with the License. You may obtain a copy of % the License at % % http://www.apache.org/licenses/LICENSE-2.0 % % Unless required by applicable law or agreed to in writing, software % distributed under the License is distributed on an "AS IS" BASIS, WITHOUT % WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the % License for the specific language governing permissions and limitations under % the License. -module(knit_kmod). -export([ render/1 ]). -include("knit.hrl"). % Module attributes: % knit_priority - Set a module priority - int() | default (which is 0) % knit_extra - Set the value of Extra for code_change % knit_depends - Set module dependencies - [atom()] % knit_timeout - Set an upgrade timeout - int()>0 | default | infinity % knit_purge - Set the purge style - Purge | {PrePurge, PostPerge} % knit_apply - Run a function during upgrade [MFA | {Phase, MFA}] % where phase is first | immediate | last % or {phase, priority} % % % High-level % ========== % % {update, Mod} % {update, Mod, supervisor} % {update, Mod, Change} % {update, Mod, DepMods} % {update, Mod, Change, DepMods} % {update, Mod, Change, PrePurge, PostPurge, DepMods} % {update, Mod, Timeout, Change, PrePurge, PostPurge, DepMods} % {update, Mod, ModType, Timeout, Change, PrePurge, PostPurge, DepMods} % Mod = atom() % ModType = static | dynamic % Timeout = int()>0 | default | infinity % Change = soft | {advanced,Extra} % Extra = term() % PrePurge = PostPurge = soft_purge | brutal_purge % DepMods = [Mod] % % {load_module, Mod} % {load_module, Mod, DepMods} % {load_module, Mod, PrePurge, PostPurge, [Mod]} % Mod = atom() % PrePurge = PostPurge = soft_purge | brutal_purge % DepMods = [Mod] % % {add_module, Mod} % Mod = atom() % % {delete_module, Mod} % Mod = atom() % % {restart_application, Application} % Application = atom() -record(kmod, { name, action, behaviors, has_code_change, priority, extra, depends, timeout, purge, apply_funs }). render(BeamActions) -> KMods0 = lists:map(fun new_kmod/1, BeamActions), KMods1 = lists:sort(fun cmp_kmods/2, KMods0), Instructions = lists:flatmap(fun render_kmod/1, KMods1), FirstFuns = render_apply_funs(first, KMods1), LastFuns = render_apply_funs(last, KMods1), FirstFuns ++ Instructions ++ LastFuns. new_kmod({added, BeamFile}) -> new_kmod(add, BeamFile); new_kmod({changed, {_OldBeamFile, NewBeamFile}}) -> new_kmod(update, NewBeamFile); new_kmod({removed, BeamFile}) -> Name = filename:rootname(filename:basename(BeamFile)), #kmod{name=list_to_atom(Name), action=delete}. new_kmod(Action, BeamFile) -> {ok, {Name, Chunks}} = beam_lib:chunks(BeamFile, [attributes, exports]), [{attributes, Attrs}, {exports, Exports}] = lists:sort(Chunks), #kmod{ name = Name, action = Action, behaviors = get_behaviors(Name, Attrs), has_code_change = lists:keymember(code_change, 1, Exports), priority = get_priority(Name, Attrs), extra = get_extra(Name, Attrs), depends = get_depends(Name, Attrs), timeout = get_timeout(Name, Attrs), purge = get_purge(Name, Attrs), apply_funs = get_apply_funs(Name, Attrs) }. cmp_kmods(A0, B0) -> % Order by action (add, then update, then delete) % then secondary sort on priority. The negation on % priority is so that higher priorities run first. ActOrder = fun (#kmod{action=add}) -> -1; (#kmod{action=update}) -> 0; (#kmod{action=delete}) -> 1 end, A = {ActOrder(A0), -1 * A0#kmod.priority}, B = {ActOrder(B0), -1 * B0#kmod.priority}, A =< B. render_kmod(#kmod{action=add}=K) -> [{add_module, K#kmod.name}]; render_kmod(#kmod{action=delete}=K) -> [{delete_module, K#kmod.name}]; render_kmod(#kmod{action=update}=K) -> #kmod{ name = Name, behaviors = Behaviors, has_code_change = HasCC, extra = Extra0, depends = Deps, timeout = Timeout, purge = {Pre, Post} } = K, IsSup = lists:member(supervisor, Behaviors), Instructions0 = case {IsSup, HasCC} of {true, _} -> [{update, Name, static, Timeout, {advanced, []}, Pre, Post, Deps}]; {false, true} -> Extra = case Extra0 of soft -> {advanced, []}; _ -> Extra0 end, [{update, Name, dynamic, Timeout, Extra, Pre, Post, Deps}]; {false, false} -> [{load_module, Name, Pre, Post, Deps}] end, Instructions0 ++ render_apply_funs(immediate, [K]). render_apply_funs(Phase, KMods) -> Funs0 = lists:flatmap(fun(K) -> lists:flatmap(fun({{Phase0, Pri}, MFA}) -> if Phase0 /= Phase -> []; true -> [{Pri, MFA}] end end, K#kmod.apply_funs) end, KMods), Funs1 = lists:sort(Funs0), [{apply, MFA} || {_, MFA} <- Funs1]. get_behaviors(_Name, Attrs) -> case get_attr(behavior, Attrs) of Behaviors0 when is_list(Behaviors0) -> Behaviors0; undefined -> case get_attr(behaviour, Attrs) of Behaviours0 when is_list(Behaviours0) -> Behaviours0; undefined -> [] end end. get_priority(Name, Attrs) -> case get_attr(knit_priority, Attrs) of [P] when is_integer(P) -> P; undefined -> 0; Else -> ?BAD_CONFIG("Invalid knit_priority in ~s: ~p", [Name, Else]) end. get_extra(Name, Attrs) -> case get_attr(knit_extra, Attrs) of [E] -> {advanced, E}; undefined -> soft; Else -> ?BAD_CONFIG("Invalid knit_extra in ~s: ~p", [Name, Else]) end. get_depends(Name, Attrs) -> case get_attr(knit_depends, Attrs) of Modules0 when is_list(Modules0) -> Modules = lists:flatten(Modules0), lists:foreach(fun(M) -> case is_atom(M) of true -> ok; false -> ?BAD_CONFIG("Invalid dependency in ~s: ~p", [Name, M]) end end, Modules), Modules; undefined -> [] end. get_timeout(Name, Attrs) -> case get_attr(knit_timeout, Attrs) of [default] -> default; [infinity] -> infinity; [TO] when is_integer(TO), TO >= 0 -> TO; undefined -> default; Else -> ?BAD_CONFIG("Invalid timeout in ~s: ~p", [Name, Else]) end. get_purge(Name, Attrs) -> case get_attr(knit_purge, Attrs) of [soft_purge] -> {soft_purge, soft_purge}; [brutal_purge] -> {brutal_purge, brutal_purge}; [{Pre, Post}] when (Pre == soft_purge orelse Pre == brutal_purge), (Post == soft_purge orelse Post == brutal_purge) -> {Pre, Post}; undefined -> {brutal_purge, brutal_purge}; Else -> ?BAD_CONFIG("Invalid purge in ~s: ~p", [Name, Else]) end. get_apply_funs(Name, Attrs) -> case get_attr(knit_apply, Attrs) of undefined -> []; Funs -> lists:map(fun(F) -> validate_apply_fun(Name, F) end, Funs) end. validate_apply_fun(Name, {_, _, _}=MFA) -> {{last, 0}, validate_mfa(Name, MFA)}; validate_apply_fun(Name, {Phase, MFA}) -> {validate_phase(Name, Phase), validate_mfa(Name, MFA)}. validate_mfa(_Name, {Mod, Fun, Args}) when is_atom(Mod), is_atom(Fun), is_list(Args) -> {Mod, Fun, Args}; validate_mfa(Name, BadMFA) -> ?BAD_CONFIG("Invalid apply MFA in ~s: ~p", [Name, BadMFA]). validate_phase(_Name, first) -> {first, 0}; validate_phase(_Name, {first, P}) when is_integer(P) -> {first, P}; validate_phase(_Name, immediate) -> {immediate, 0}; validate_phase(_Name, {immediate, P}) when is_integer(P) -> {immediate, P}; validate_phase(_Name, last) -> {last, 0}; validate_phase(_Name, {last, P}) when is_integer(P) -> {last, P}; validate_phase(Name, BadPhase) -> ?BAD_CONFIG("Invalid apply phase in ~s: ~p", [Name, BadPhase]). get_attr(Name, Attrs) -> case lists:keyfind(Name, 1, Attrs) of {Name, Value} -> Value; false -> undefined end.
src/knit_kmod.erl
0.620277
0.433262
knit_kmod.erl
starcoder