code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.sdc.play.module.plausc.controllers;
import play.api.mvc._
import com.sdc.play.module.plausc.PlayAuthenticate
object Authenticate extends Controller {
import ControllerHelpers._
import com.sdc.play.module.plausc.user.AuthUser
private val PAYLOAD_KEY = "p"
def noCache(result: PlainResult) = {
// http://stackoverflow.com/questions/49547/making-sure-a-web-page-is-not-cached-across-all-browsers
result.withHeaders(
CACHE_CONTROL -> "no-cache, no-store, must-revalidate", // HTTP 1.1
PRAGMA -> "no-cache", // HTTP 1.0.
EXPIRES -> "0"); // Proxies.
}
def authenticate(provider: String) = Action { implicit request =>
// noCache(response());
val payload = getQueryString(PAYLOAD_KEY)
PlayAuthenticate.handleAuthentication(provider, payload)
}
def logout() = Action { implicit request =>
// noCache(response());
PlayAuthenticate.logout
}
// TODO remove on Play 2.1
def getQueryString(key: String)(implicit request: Request[_]) = {
request.queryString.get(key) map {l => if (!l.isEmpty) l(0) else null } orNull
}
}
| eschreiner/play2-scala-auth | code/app/com/sdc/play/module/plausc/controllers/Authenticate.scala | Scala | apache-2.0 | 1,084 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package backend.jvm
package opt
import java.util.regex.Pattern
import scala.annotation.tailrec
import scala.collection.mutable
import scala.jdk.CollectionConverters._
import scala.tools.asm.Type
import scala.tools.asm.tree.MethodNode
import scala.tools.nsc.backend.jvm.BTypes.InternalName
import scala.tools.nsc.backend.jvm.BackendReporting.{CalleeNotFinal, OptimizerWarning}
import scala.tools.nsc.backend.jvm.analysis.BackendUtils
import scala.tools.nsc.backend.jvm.opt.InlinerHeuristics._
abstract class InlinerHeuristics extends PerRunInit {
val postProcessor: PostProcessor
import postProcessor._
import bTypes._
import callGraph._
import frontendAccess.{backendReporting, compilerSettings}
lazy val inlineSourceMatcher: LazyVar[InlineSourceMatcher] = perRunLazy(this)(new InlineSourceMatcher(compilerSettings.optInlineFrom))
final case class InlineRequest(callsite: Callsite, reason: InlineReason) {
// non-null if `-Yopt-log-inline` is active, it explains why the callsite was selected for inlining
def logText: String =
if (compilerSettings.optLogInline.isEmpty) null
else if (compilerSettings.optInlineHeuristics == "everything") "-Yopt-inline-heuristics:everything is enabled"
else {
val callee = callsite.callee.get
reason match {
case AnnotatedInline =>
val what = if (callee.annotatedInline) "callee" else "callsite"
s"the $what is annotated `@inline`"
case HigherOrderWithLiteral | HigherOrderWithForwardedParam =>
val paramNames = Option(callee.callee.parameters).map(_.asScala.map(_.name).toVector)
def param(i: Int) = {
def syn = s"<param $i>"
paramNames.fold(syn)(v => v.applyOrElse(i, (_: Int) => syn))
}
def samInfo(i: Int, sam: String, arg: String) = s"the argument for parameter (${param(i)}: $sam) is a $arg"
val argInfos = for ((i, sam) <- callee.samParamTypes; info <- callsite.argInfos.get(i).iterator) yield {
val argKind = info match {
case FunctionLiteral => "function literal"
case ForwardedParam(_) => "parameter of the callsite method"
case StaticallyKnownArray => "" // should not happen, just included to avoid potential crash
}
samInfo(i, sam.internalName.split('/').last, argKind)
}
s"the callee is a higher-order method, ${argInfos.mkString(", ")}"
case SyntheticForwarder =>
"the callee is a synthetic forwarder method"
case TrivialMethod =>
"the callee is a small trivial method"
case FactoryMethod =>
"the callee is a factory method"
case BoxingForwarder =>
"the callee is a forwarder method with boxing adaptation"
case GenericForwarder =>
"the callee is a forwarder or alias method"
case RefParam =>
"the callee has a Ref type parameter"
case KnownArrayOp =>
"ScalaRuntime.array_apply and array_update are inlined if the array has a statically knonw type"
}
}
}
def canInlineFromSource(sourceFilePath: Option[String], calleeDeclarationClass: InternalName): Boolean = {
inlineSourceMatcher.get.allowFromSources && sourceFilePath.isDefined ||
inlineSourceMatcher.get.allow(calleeDeclarationClass)
}
/**
* Select callsites from the call graph that should be inlined, grouped by the containing method.
* Cyclic inlining requests are allowed, the inliner will eliminate requests to break cycles.
*/
def selectCallsitesForInlining: Map[MethodNode, Set[InlineRequest]] = {
// We should only create inlining requests for callsites being compiled (not for callsites in
// classes on the classpath). The call graph may contain callsites of classes parsed from the
// classpath. In order to get only the callsites being compiled, we start at the map of
// compilingClasses in the byteCodeRepository.
val compilingMethods = for {
(classNode, _) <- byteCodeRepository.compilingClasses.valuesIterator
methodNode <- classNode.methods.iterator.asScala
} yield methodNode
compilingMethods.map(methodNode => {
var requests = Set.empty[InlineRequest]
callGraph.callsites(methodNode).valuesIterator foreach {
case callsite @ Callsite(_, _, _, Right(Callee(callee, _, _, _, _, _, _, callsiteWarning)), _, _, _, pos, _, _) =>
inlineRequest(callsite) match {
case Some(Right(req)) => requests += req
case Some(Left(w)) =>
if (w.emitWarning(compilerSettings)) {
backendReporting.inlinerWarning(callsite.callsitePosition, w.toString)
}
case None =>
if (callsiteWarning.isDefined && callsiteWarning.get.emitWarning(compilerSettings))
backendReporting.inlinerWarning(pos, s"there was a problem determining if method ${callee.name} can be inlined: \\n"+ callsiteWarning.get)
}
case Callsite(ins, _, _, Left(warning), _, _, _, pos, _, _) =>
if (warning.emitWarning(compilerSettings))
backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\\n$warning")
}
(methodNode, requests)
}).filterNot(_._2.isEmpty).toMap
}
val maxSize = 3000
val mediumSize = 2000
val smallSize = 1000
def selectRequestsForMethodSize(method: MethodNode, requests: List[InlineRequest], methodSizes: mutable.Map[MethodNode, Int]): List[InlineRequest] = {
val byReason = requests.groupBy(_.reason)
var size = method.instructions.size
val res = mutable.ListBuffer.empty[InlineRequest]
def include(kind: InlineReason, limit: Int): Unit = {
var rs = byReason.getOrElse(kind, Nil)
while (rs.nonEmpty && size < limit) {
val r = rs.head
rs = rs.tail
val callee = r.callsite.callee.get.callee
val cSize = methodSizes.getOrElse(callee, callee.instructions.size)
if (size + cSize < limit) {
res += r
size += cSize
}
}
}
include(AnnotatedInline, maxSize)
include(SyntheticForwarder, maxSize)
include(KnownArrayOp, maxSize)
include(HigherOrderWithLiteral, maxSize)
include(HigherOrderWithForwardedParam, mediumSize)
include(RefParam, mediumSize)
include(BoxingForwarder, mediumSize)
include(FactoryMethod, mediumSize)
include(GenericForwarder, smallSize)
include(TrivialMethod, smallSize)
methodSizes(method) = size
res.toList
}
/**
* Returns the inline request for a callsite if the callsite should be inlined according to the
* current heuristics (`-Yopt-inline-heuristics`).
*
* @return `None` if this callsite should not be inlined according to the active heuristic
* `Some(Left)` if the callsite should be inlined according to the heuristic, but cannot
* be inlined according to an early, incomplete check (see earlyCanInlineCheck)
* `Some(Right)` if the callsite should be inlined (it's still possible that the callsite
* cannot be inlined in the end, for example if it contains instructions that would
* cause an IllegalAccessError in the new class; this is checked in the inliner)
*/
def inlineRequest(callsite: Callsite): Option[Either[OptimizerWarning, InlineRequest]] = {
def requestIfCanInline(callsite: Callsite, reason: InlineReason): Option[Either[OptimizerWarning, InlineRequest]] = {
val callee = callsite.callee.get
if (!callee.safeToInline) {
if (callsite.isInlineAnnotated && callee.canInlineFromSource) {
// By default, we only emit inliner warnings for methods annotated @inline. However, we don't
// want to be unnecessarily noisy with `-opt-warnings:_`: for example, the inliner heuristic
// would attempt to inline `Function1.apply$sp$II`, as it's higher-order (the receiver is
// a function), and it's concrete (forwards to `apply`). But because it's non-final, it cannot
// be inlined. So we only create warnings here for methods annotated @inline.
Some(Left(CalleeNotFinal(
callee.calleeDeclarationClass.internalName,
callee.callee.name,
callee.callee.desc,
callsite.isInlineAnnotated)))
} else None
} else inliner.earlyCanInlineCheck(callsite) match {
case Some(w) =>
Some(Left(w))
case None =>
Some(Right(InlineRequest(callsite, reason)))
}
}
// don't inline into synthetic forwarders (anonfun-adapted methods, bridges, etc). the heuristics
// will instead inline such methods at callsite. however, *do* inline into user-written forwarders
// or aliases, because otherwise it's too confusing for users looking at generated code, they will
// write a small test method and think the inliner doesn't work correctly.
val isGeneratedForwarder =
BytecodeUtils.isSyntheticMethod(callsite.callsiteMethod) && backendUtils.looksLikeForwarderOrFactoryOrTrivial(callsite.callsiteMethod, callsite.callsiteClass.internalName, allowPrivateCalls = true) > 0 ||
backendUtils.isMixinForwarder(callsite.callsiteMethod, callsite.callsiteClass) // seems mixin forwarders are not synthetic...
if (isGeneratedForwarder) None
else {
val callee = callsite.callee.get
compilerSettings.optInlineHeuristics match {
case "everything" =>
requestIfCanInline(callsite, AnnotatedInline)
case "at-inline-annotated" =>
if (callsite.isInlineAnnotated && !callsite.isNoInlineAnnotated) requestIfCanInline(callsite, AnnotatedInline)
else None
case "default" =>
def shouldInlineAnnotated = if (callsite.isInlineAnnotated) Some(AnnotatedInline) else None
def shouldInlineHO = Option {
if (callee.samParamTypes.isEmpty) null
else {
val samArgs = callee.samParamTypes flatMap {
case (index, _) => Option.option2Iterable(callsite.argInfos.get(index))
}
if (samArgs.isEmpty) null
else if (samArgs.exists(_ == FunctionLiteral)) HigherOrderWithLiteral
else HigherOrderWithForwardedParam
}
}
def shouldInlineRefParam =
if (Type.getArgumentTypes(callee.callee.desc).exists(tp => coreBTypes.srRefCreateMethods.contains(tp.getInternalName))) Some(RefParam)
else None
def shouldInlineArrayOp =
if (BackendUtils.isRuntimeArrayLoadOrUpdate(callsite.callsiteInstruction) && callsite.argInfos.get(1).contains(StaticallyKnownArray)) Some(KnownArrayOp)
else None
def shouldInlineForwarder = Option {
// trait super accessors are excluded here because they contain an `invokespecial` of the default method in the trait.
// this instruction would have different semantics if inlined into some other class.
// we *do* inline trait super accessors if selected by a different heuristic. in this case, the `invokespcial` is then
// inlined in turn (chosen by the same heuristic), or the code is rolled back. but we don't inline them just because
// they are forwarders.
val isTraitSuperAccessor = backendUtils.isTraitSuperAccessor(callee.callee, callee.calleeDeclarationClass)
if (isTraitSuperAccessor) {
// inline static trait super accessors if the corresponding trait method is a forwarder or trivial (scala-dev#618)
{
val css = callGraph.callsites(callee.callee)
if (css.sizeIs == 1) css.head._2 else null
} match {
case null => null
case traitMethodCallsite =>
val tmCallee = traitMethodCallsite.callee.get
val traitMethodForwarderKind = backendUtils.looksLikeForwarderOrFactoryOrTrivial(
tmCallee.callee, tmCallee.calleeDeclarationClass.internalName, allowPrivateCalls = false)
if (traitMethodForwarderKind > 0) GenericForwarder
else null
}
}
else {
val forwarderKind = backendUtils.looksLikeForwarderOrFactoryOrTrivial(callee.callee, callee.calleeDeclarationClass.internalName, allowPrivateCalls = false)
if (forwarderKind < 0)
null
else if (BytecodeUtils.isSyntheticMethod(callee.callee) || backendUtils.isMixinForwarder(callee.callee, callee.calleeDeclarationClass))
SyntheticForwarder
else forwarderKind match {
case 1 => TrivialMethod
case 2 => FactoryMethod
case 3 => BoxingForwarder
case 4 => GenericForwarder
}
}
}
if (callsite.isNoInlineAnnotated) None
else {
val reason = shouldInlineAnnotated orElse shouldInlineHO orElse shouldInlineRefParam orElse shouldInlineArrayOp orElse shouldInlineForwarder
reason.flatMap(r => requestIfCanInline(callsite, r))
}
}
}
}
/*
// using http://lihaoyi.github.io/Ammonite/
load.ivy("com.google.guava" % "guava" % "18.0")
val javaUtilFunctionClasses = {
val rt = System.getProperty("sun.boot.class.path").split(":").find(_.endsWith("lib/rt.jar")).get
val u = new java.io.File(rt).toURL
val l = new java.net.URLClassLoader(Array(u))
val cp = com.google.common.reflect.ClassPath.from(l)
cp.getTopLevelClasses("java.util.function").toArray.map(_.toString).toList
}
// found using IntelliJ's "Find Usages" on the @FunctionalInterface annotation
val otherClasses = List(
"com.sun.javafx.css.parser.Recognizer",
"java.awt.KeyEventDispatcher",
"java.awt.KeyEventPostProcessor",
"java.io.FileFilter",
"java.io.FilenameFilter",
"java.lang.Runnable",
"java.lang.Thread$UncaughtExceptionHandler",
"java.nio.file.DirectoryStream$Filter",
"java.nio.file.PathMatcher",
"java.time.temporal.TemporalAdjuster",
"java.time.temporal.TemporalQuery",
"java.util.Comparator",
"java.util.concurrent.Callable",
"java.util.logging.Filter",
"java.util.prefs.PreferenceChangeListener",
"javafx.animation.Interpolatable",
"javafx.beans.InvalidationListener",
"javafx.beans.value.ChangeListener",
"javafx.collections.ListChangeListener",
"javafx.collections.MapChangeListener",
"javafx.collections.SetChangeListener",
"javafx.event.EventHandler",
"javafx.util.Builder",
"javafx.util.BuilderFactory",
"javafx.util.Callback"
)
val allClasses = javaUtilFunctionClasses ::: otherClasses
load.ivy("org.ow2.asm" % "asm" % "5.0.4")
val classesAndSamNameDesc = allClasses.map(c => {
val cls = Class.forName(c)
val internalName = org.objectweb.asm.Type.getDescriptor(cls).drop(1).dropRight(1) // drop L and ;
val sams = cls.getMethods.filter(m => {
(m.getModifiers & java.lang.reflect.Modifier.ABSTRACT) != 0 &&
m.getName != "equals" // Comparator has an abstract override of "equals" for adding Javadoc
})
assert(sams.size == 1, internalName + sams.map(_.getName))
val sam = sams.head
val samDesc = org.objectweb.asm.Type.getMethodDescriptor(sam)
(internalName, sam.getName, samDesc)
})
println(classesAndSamNameDesc map {
case (cls, nme, desc) => s"""("$cls", "$nme$desc")"""
} mkString ("", ",\\n", "\\n"))
*/
private val javaSams: Map[String, String] = Map(
("java/util/function/BiConsumer", "accept(Ljava/lang/Object;Ljava/lang/Object;)V"),
("java/util/function/BiFunction", "apply(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"),
("java/util/function/BiPredicate", "test(Ljava/lang/Object;Ljava/lang/Object;)Z"),
("java/util/function/BinaryOperator", "apply(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"),
("java/util/function/BooleanSupplier", "getAsBoolean()Z"),
("java/util/function/Consumer", "accept(Ljava/lang/Object;)V"),
("java/util/function/DoubleBinaryOperator", "applyAsDouble(DD)D"),
("java/util/function/DoubleConsumer", "accept(D)V"),
("java/util/function/DoubleFunction", "apply(D)Ljava/lang/Object;"),
("java/util/function/DoublePredicate", "test(D)Z"),
("java/util/function/DoubleSupplier", "getAsDouble()D"),
("java/util/function/DoubleToIntFunction", "applyAsInt(D)I"),
("java/util/function/DoubleToLongFunction", "applyAsLong(D)J"),
("java/util/function/DoubleUnaryOperator", "applyAsDouble(D)D"),
("java/util/function/Function", "apply(Ljava/lang/Object;)Ljava/lang/Object;"),
("java/util/function/IntBinaryOperator", "applyAsInt(II)I"),
("java/util/function/IntConsumer", "accept(I)V"),
("java/util/function/IntFunction", "apply(I)Ljava/lang/Object;"),
("java/util/function/IntPredicate", "test(I)Z"),
("java/util/function/IntSupplier", "getAsInt()I"),
("java/util/function/IntToDoubleFunction", "applyAsDouble(I)D"),
("java/util/function/IntToLongFunction", "applyAsLong(I)J"),
("java/util/function/IntUnaryOperator", "applyAsInt(I)I"),
("java/util/function/LongBinaryOperator", "applyAsLong(JJ)J"),
("java/util/function/LongConsumer", "accept(J)V"),
("java/util/function/LongFunction", "apply(J)Ljava/lang/Object;"),
("java/util/function/LongPredicate", "test(J)Z"),
("java/util/function/LongSupplier", "getAsLong()J"),
("java/util/function/LongToDoubleFunction", "applyAsDouble(J)D"),
("java/util/function/LongToIntFunction", "applyAsInt(J)I"),
("java/util/function/LongUnaryOperator", "applyAsLong(J)J"),
("java/util/function/ObjDoubleConsumer", "accept(Ljava/lang/Object;D)V"),
("java/util/function/ObjIntConsumer", "accept(Ljava/lang/Object;I)V"),
("java/util/function/ObjLongConsumer", "accept(Ljava/lang/Object;J)V"),
("java/util/function/Predicate", "test(Ljava/lang/Object;)Z"),
("java/util/function/Supplier", "get()Ljava/lang/Object;"),
("java/util/function/ToDoubleBiFunction", "applyAsDouble(Ljava/lang/Object;Ljava/lang/Object;)D"),
("java/util/function/ToDoubleFunction", "applyAsDouble(Ljava/lang/Object;)D"),
("java/util/function/ToIntBiFunction", "applyAsInt(Ljava/lang/Object;Ljava/lang/Object;)I"),
("java/util/function/ToIntFunction", "applyAsInt(Ljava/lang/Object;)I"),
("java/util/function/ToLongBiFunction", "applyAsLong(Ljava/lang/Object;Ljava/lang/Object;)J"),
("java/util/function/ToLongFunction", "applyAsLong(Ljava/lang/Object;)J"),
("java/util/function/UnaryOperator", "apply(Ljava/lang/Object;)Ljava/lang/Object;"),
("com/sun/javafx/css/parser/Recognizer", "recognize(I)Z"),
("java/awt/KeyEventDispatcher", "dispatchKeyEvent(Ljava/awt/event/KeyEvent;)Z"),
("java/awt/KeyEventPostProcessor", "postProcessKeyEvent(Ljava/awt/event/KeyEvent;)Z"),
("java/io/FileFilter", "accept(Ljava/io/File;)Z"),
("java/io/FilenameFilter", "accept(Ljava/io/File;Ljava/lang/String;)Z"),
("java/lang/Runnable", "run()V"),
("java/lang/Thread$UncaughtExceptionHandler", "uncaughtException(Ljava/lang/Thread;Ljava/lang/Throwable;)V"),
("java/nio/file/DirectoryStream$Filter", "accept(Ljava/lang/Object;)Z"),
("java/nio/file/PathMatcher", "matches(Ljava/nio/file/Path;)Z"),
("java/time/temporal/TemporalAdjuster", "adjustInto(Ljava/time/temporal/Temporal;)Ljava/time/temporal/Temporal;"),
("java/time/temporal/TemporalQuery", "queryFrom(Ljava/time/temporal/TemporalAccessor;)Ljava/lang/Object;"),
("java/util/Comparator", "compare(Ljava/lang/Object;Ljava/lang/Object;)I"),
("java/util/concurrent/Callable", "call()Ljava/lang/Object;"),
("java/util/logging/Filter", "isLoggable(Ljava/util/logging/LogRecord;)Z"),
("java/util/prefs/PreferenceChangeListener", "preferenceChange(Ljava/util/prefs/PreferenceChangeEvent;)V"),
("javafx/animation/Interpolatable", "interpolate(Ljava/lang/Object;D)Ljava/lang/Object;"),
("javafx/beans/InvalidationListener", "invalidated(Ljavafx/beans/Observable;)V"),
("javafx/beans/value/ChangeListener", "changed(Ljavafx/beans/value/ObservableValue;Ljava/lang/Object;Ljava/lang/Object;)V"),
("javafx/collections/ListChangeListener", "onChanged(Ljavafx/collections/ListChangeListener$Change;)V"),
("javafx/collections/MapChangeListener", "onChanged(Ljavafx/collections/MapChangeListener$Change;)V"),
("javafx/collections/SetChangeListener", "onChanged(Ljavafx/collections/SetChangeListener$Change;)V"),
("javafx/event/EventHandler", "handle(Ljavafx/event/Event;)V"),
("javafx/util/Builder", "build()Ljava/lang/Object;"),
("javafx/util/BuilderFactory", "getBuilder(Ljava/lang/Class;)Ljavafx/util/Builder;"),
("javafx/util/Callback", "call(Ljava/lang/Object;)Ljava/lang/Object;")
)
def javaSam(internalName: InternalName): Option[String] = javaSams.get(internalName)
}
object InlinerHeuristics {
sealed trait InlineReason
case object AnnotatedInline extends InlineReason
case object SyntheticForwarder extends InlineReason
case object TrivialMethod extends InlineReason
case object FactoryMethod extends InlineReason
case object BoxingForwarder extends InlineReason
case object GenericForwarder extends InlineReason
case object RefParam extends InlineReason
case object KnownArrayOp extends InlineReason
case object HigherOrderWithLiteral extends InlineReason
case object HigherOrderWithForwardedParam extends InlineReason
class InlineSourceMatcher(inlineFromSetting: List[String]) {
// `terminal` is true if all remaining entries are of the same negation as this one
case class Entry(pattern: Pattern, negated: Boolean, terminal: Boolean) {
def matches(internalName: InternalName): Boolean = pattern.matcher(internalName).matches()
}
private val patternStrings = inlineFromSetting.filterNot(_.isEmpty)
val startAllow: Boolean = patternStrings.headOption.contains("**")
private[this] var _allowFromSources: Boolean = false
val entries: List[Entry] = parse()
def allowFromSources = _allowFromSources
def allow(internalName: InternalName): Boolean = {
var answer = startAllow
@tailrec def check(es: List[Entry]): Boolean = es match {
case e :: rest =>
if (answer && e.negated && e.matches(internalName))
answer = false
else if (!answer && !e.negated && e.matches(internalName))
answer = true
if (e.terminal && answer != e.negated) answer
else check(rest)
case _ =>
answer
}
check(entries)
}
private def parse(): List[Entry] = {
var result = List.empty[Entry]
val patternsRevIterator = {
val it = patternStrings.reverseIterator
if (startAllow) it.take(patternStrings.length - 1) else it
}
for (p <- patternsRevIterator) {
if (p == "<sources>") _allowFromSources = true
else {
val len = p.length
var index = 0
def current = if (index < len) p.charAt(index) else 0.toChar
def next() = index += 1
val negated = current == '!'
if (negated) next()
val regex = new java.lang.StringBuilder
while (index < len) {
if (current == '*') {
next()
if (current == '*') {
next()
val starStarDot = current == '.'
if (starStarDot) {
next()
// special case: "a.**.C" matches "a.C", and "**.C" matches "C"
val i = index - 4
val allowEmpty = i < 0 || (i == 0 && p.charAt(i) == '!') || p.charAt(i) == '.'
if (allowEmpty) regex.append("(?:.*/|)")
else regex.append(".*/")
} else
regex.append(".*")
} else {
regex.append("[^/]*")
}
} else if (current == '.') {
next()
regex.append('/')
} else {
val start = index
var needEscape = false
while (index < len && current != '.' && current != '*') {
needEscape = needEscape || "\\\\.[]{}()*+-?^$|".indexOf(current) != -1
next()
}
if (needEscape) regex.append("\\\\Q")
regex.append(p, start, index)
if (needEscape) regex.append("\\\\E")
}
}
val isTerminal = result.isEmpty || result.head.terminal && result.head.negated == negated
result ::= Entry(Pattern.compile(regex.toString), negated, isTerminal)
}
}
result
}
}
}
| martijnhoekstra/scala | src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala | Scala | apache-2.0 | 25,263 |
/*
* Copyright 2012 OneCalendar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package service
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.matchers.ShouldMatchers
import org.joda.time.DateTime
import models.Event
import dao.EventDao._
import com.github.simplyscala.MongoEmbedDatabase
import com.mongodb.casbah.{MongoConnection, MongoDB}
import com.mongodb.{ServerAddress, MongoOptions}
import dao.framework.MongoConnectionProperties
import MongoConnectionProperties._
import com.github.simplyscala.MongodProps
class LoadEventbriteTest extends FunSuite with ShouldMatchers with MongoEmbedDatabase with BeforeAndAfterAll {
var mongoProps: MongodProps = null
override def beforeAll() { mongoProps = mongoStart(27018) }
override def afterAll() { mongoStop(mongoProps) }
implicit val dbName: MongoDbName = "test"
implicit val pool: MongoDB = {
val connection: MongoConnection = {
val options: MongoOptions = new MongoOptions()
options.setConnectionsPerHost(2)
MongoConnection(new ServerAddress("127.0.0.1", 27018), options)
}
connection(dbName)
}
test("should parse scala stream") {
implicit val now = () => new DateTime().withDate(2012,4,1).getMillis
LoadEventbrite.parseLoad("scala")
val events: List[Event] = findByTag(List("scala"))
events.size should be > 0
}
} | OneCalendar/OneCalendar | test/service/LoadEventbriteTest.scala | Scala | apache-2.0 | 1,934 |
package lila.insight
import lila.game.{ Game, GameRepo, Pov }
import lila.user.User
import org.joda.time.DateTime
final class InsightApi(
storage: Storage,
pipeline: AggregationPipeline,
insightUserApi: InsightUserApi,
gameRepo: GameRepo,
indexer: InsightIndexer
)(implicit ec: scala.concurrent.ExecutionContext) {
import InsightApi._
def insightUser(user: User): Fu[InsightUser] =
insightUserApi find user.id flatMap {
case Some(u) =>
u.lastSeen.isBefore(DateTime.now minusDays 1) ?? {
insightUserApi setSeenNow user
} inject u
case None =>
for {
count <- storage count user.id
ecos <- storage ecos user.id
c = InsightUser.make(user.id, count, ecos)
_ <- insightUserApi save c
} yield c
}
def ask[X](question: Question[X], user: User): Fu[Answer[X]] =
pipeline
.aggregate(question, user)
.flatMap { aggDocs =>
val clusters = AggregationClusters(question, aggDocs)
val gameIds = lila.common.ThreadLocalRandom.shuffle(clusters.flatMap(_.gameIds)) take 4
gameRepo.userPovsByGameIds(gameIds, user) map { povs =>
Answer(question, clusters, povs)
}
}
.monSuccess(_.insight.request)
def userStatus(user: User): Fu[UserStatus] =
gameRepo lastFinishedRatedNotFromPosition user flatMap {
case None => fuccess(UserStatus.NoGame)
case Some(game) =>
storage fetchLast user.id map {
case None => UserStatus.Empty
case Some(entry) if entry.date isBefore game.createdAt => UserStatus.Stale
case _ => UserStatus.Fresh
}
}
def indexAll(userId: User.ID) =
indexer
.all(userId)
.monSuccess(_.insight.index) >>
insightUserApi.remove(userId)
def updateGame(g: Game) =
Pov(g)
.map { pov =>
pov.player.userId ?? { userId =>
storage find InsightEntry.povToId(pov) flatMap {
_ ?? { old =>
indexer.update(g, userId, old)
}
}
}
}
.sequenceFu
.void
}
object InsightApi {
sealed trait UserStatus
object UserStatus {
case object NoGame extends UserStatus // the user has no rated games
case object Empty extends UserStatus // insights not yet generated
case object Stale extends UserStatus // new games not yet generated
case object Fresh extends UserStatus // up to date
}
}
| luanlv/lila | modules/insight/src/main/InsightApi.scala | Scala | mit | 2,570 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.persistence.jdbc
import akka.Done
import akka.actor.CoordinatedShutdown
import play.api.db.Databases
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.util.Random
object SlickDbTestProvider {
private val JNDIName = "DefaultDS"
private val JNDIDBName = "DefaultDB"
private val AsyncExecConfig: AsyncExecutorConfig = new AsyncExecutorConfig {
override val numThreads: Int = 20
override val minConnections: Int = 20
override val maxConnections: Int = 20
override val queueSize: Int = 100
override def registerMbeans: Boolean = false
}
/** Builds Slick Database (with AsyncExecutor) and bind it as JNDI resource for test purposes */
def buildAndBindSlickDb(baseName: String, coordinatedShutdown: CoordinatedShutdown)(
implicit executionContext: ExecutionContext
): Unit = {
val dbName = s"${baseName}_${Random.alphanumeric.take(8).mkString}"
val db = Databases.inMemory(dbName, config = Map("jndiName" -> JNDIName))
SlickDbProvider.buildAndBindSlickDatabase(db, AsyncExecConfig, JNDIDBName, coordinatedShutdown)
}
}
| ignasi35/lagom | persistence-jdbc/core/src/test/scala/com/lightbend/lagom/internal/persistence/jdbc/SlickDbTestProvider.scala | Scala | apache-2.0 | 1,254 |
package bifrost.api.http.swagger
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.github.swagger.akka.model.{Contact, Info, License}
import com.github.swagger.akka.{HasActorSystem, SwaggerHttpService}
import io.swagger.models.Swagger
import bifrost.settings.Settings
import scala.reflect.runtime.universe.Type
class SwaggerDocService(system: ActorSystem, val apiTypes: Seq[Type], settings: Settings)
extends SwaggerHttpService with HasActorSystem {
override implicit val actorSystem: ActorSystem = system
override implicit val materializer: ActorMaterializer = ActorMaterializer()
override val host = settings.bindAddress + ":" + settings.rpcPort
override val apiDocsPath: String = "swagger"
override val info: Info = Info("The Web Interface to the Scorex API",
"1.3.0-SNAPSHOT",
"Scorex API",
"License: Creative Commons CC0",
Some(Contact("Alex", "https://scorex-dev.groups.io/g/main", "alex.chepurnoy@iohk.io")),
Some(License("License: Creative Commons CC0", "https://github.com/ScorexProject/Scorex/blob/master/COPYING"))
)
//Let swagger-ui determine the host and port
override def swaggerConfig: Swagger = new Swagger().basePath(prependSlashIfNecessary(basePath)).info(info).scheme(scheme)
}
| Topl/Project-Bifrost | src/main/scala/bifrost/api/http/swagger/SwaggerDocService.scala | Scala | mpl-2.0 | 1,303 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.test.behavior
import java.time.Instant
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.database.{DocumentConflictException, DocumentProvider, NoDocumentException}
import org.apache.openwhisk.core.entity._
trait ArtifactStoreCRUDBehaviors extends ArtifactStoreBehaviorBase {
behavior of s"${storeType}ArtifactStore put"
it should "put document and get a revision 1" in {
implicit val tid: TransactionId = transid()
val doc = put(authStore, newAuth())
doc.rev.empty shouldBe false
}
it should "put and update document" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
val auth2 =
getWhiskAuth(doc)
.copy(namespaces = Set(wskNS("foo1")))
.revision[WhiskAuth](doc.rev)
val doc2 = put(authStore, auth2)
doc2.rev should not be doc.rev
doc2.rev.empty shouldBe false
}
it should "put delete and then recreate document with same id with different rev" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
delete(authStore, doc) shouldBe true
val auth2 = auth.copy(namespaces = Set(wskNS("foo1")))
val doc2 = put(authStore, auth2)
doc2.rev should not be doc.rev
doc2.rev.empty shouldBe false
}
it should "throw DocumentConflictException when updated with old revision" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
val auth2 = getWhiskAuth(doc).copy(namespaces = Set(wskNS("foo1"))).revision[WhiskAuth](doc.rev)
val doc2 = put(authStore, auth2)
//Updated with _rev set to older one
val auth3 = getWhiskAuth(doc2).copy(namespaces = Set(wskNS("foo2"))).revision[WhiskAuth](doc.rev)
intercept[DocumentConflictException] {
put(authStore, auth3)
}
}
it should "throw DocumentConflictException if document with same id is inserted twice" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
intercept[DocumentConflictException] {
put(authStore, auth)
}
}
it should "work if same document was deleted earlier" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
//1. Create a document
val doc = put(authStore, auth)
//2. Now delete the document
delete(authStore, doc) shouldBe true
//3. Now recreate the same document.
val doc2 = put(authStore, auth)
//Recreating a deleted document should work
doc2.rev.empty shouldBe false
}
behavior of s"${storeType}ArtifactStore delete"
it should "deletes existing document" in {
implicit val tid: TransactionId = transid()
val doc = put(authStore, newAuth())
delete(authStore, doc) shouldBe true
}
it should "throws IllegalArgumentException when deleting without revision" in {
intercept[IllegalArgumentException] {
implicit val tid: TransactionId = transid()
delete(authStore, DocInfo("doc-with-empty-revision"))
}
}
it should "throws NoDocumentException when document does not exist" in {
intercept[NoDocumentException] {
implicit val tid: TransactionId = transid()
delete(authStore, DocInfo ! ("non-existing-doc", "42"))
}
}
it should "throws DocumentConflictException when revision does not match" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
val auth2 = getWhiskAuth(doc).copy(namespaces = Set(wskNS("foo1"))).revision[WhiskAuth](doc.rev)
val doc2 = put(authStore, auth2)
intercept[DocumentConflictException] {
delete(authStore, doc)
}
}
behavior of s"${storeType}ArtifactStore get"
it should "get existing entity matching id and rev" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
val authFromGet = getWhiskAuth(doc)
authFromGet shouldBe auth
authFromGet.docinfo.rev shouldBe doc.rev
}
it should "get existing entity matching id only" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
val authFromGet = getWhiskAuth(doc)
authFromGet shouldBe auth
}
it should "get entity with timestamp" in {
implicit val tid: TransactionId = transid()
val activation = WhiskActivation(
EntityPath("testnamespace"),
EntityName("activation1"),
Subject(),
ActivationId.generate(),
start = Instant.now.inMills,
end = Instant.now.inMills)
val activationDoc = put(activationStore, activation)
val activationFromDb = activationStore.get[WhiskActivation](activationDoc).futureValue
activationFromDb shouldBe activation
}
it should "throws NoDocumentException when document revision does not match" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
val doc = put(authStore, auth)
val auth2 = getWhiskAuth(doc).copy(namespaces = Set(wskNS("foo1"))).revision[WhiskAuth](doc.rev)
val doc2 = put(authStore, auth2)
authStore.get[WhiskAuth](doc).failed.futureValue.getCause shouldBe a[AssertionError]
val authFromGet = getWhiskAuth(doc2)
authFromGet shouldBe auth2
}
it should "throws NoDocumentException when document does not exist" in {
implicit val tid: TransactionId = transid()
authStore.get[WhiskAuth](DocInfo("non-existing-doc")).failed.futureValue shouldBe a[NoDocumentException]
}
it should "not get a deleted document" in {
implicit val tid: TransactionId = transid()
val auth = newAuth()
//1. Create a document
val docInfo = put(authStore, auth)
//2. Now delete the document
delete(authStore, docInfo) shouldBe true
//3. Now getting a deleted document should fail
authStore.get[WhiskAuth](docInfo).failed.futureValue shouldBe a[NoDocumentException]
//Check get by id flow also which return none for such "soft" deleted document
authStore match {
case provider: DocumentProvider =>
provider.get(docInfo.id).futureValue shouldBe None
case _ =>
}
}
}
| RSulzmann/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/database/test/behavior/ArtifactStoreCRUDBehaviors.scala | Scala | apache-2.0 | 7,042 |
package daos.doobie
import doobie.specs2.imports.AnalysisSpec
import org.specs2.mutable.Specification
import testutil.TestUtil
/** Pruebas de SubjectDaoDoobie */
object SubjectDaoDoobieSpec extends Specification with AnalysisSpec {
val transactor = TestUtil.transactor()
check(SubjectDaoDoobie.subjectQuery(""))
}
| kdoomsday/kaminalapp | test/daos/doobie/SubjectDaoDoobieSpec.scala | Scala | mit | 321 |
package unfiltered.response
import java.io.OutputStream
import java.util.zip.{GZIPOutputStream => GZOS}
/** Enclose the response's output stream in another stream,
* typically a subclass of java.io.FilterOutputStream */
object ResponseFilter {
trait Filtering[S <: OutputStream] extends ResponseFunction[Any] {
def apply[T](delegate: HttpResponse[T]) = {
new DelegatingResponse(delegate) {
override val outputStream = filter(delegate.outputStream)
}
}
def filter(os: OutputStream): S
}
object GZip extends Filtering[GZOS] {
def filter(os: OutputStream) = new GZOS(os)
}
}
| omarkilani/unfiltered | library/src/main/scala/response/filtering.scala | Scala | mit | 620 |
package kuaixue.scala.book.chapter_13.mutable
/**
* Arrays are mutable, indexed collections of values.
*/
object Ch04_1_Arr extends App {
def printArgs(args: Array[String]): Unit = {
for (arg <- args)
print(arg)
}
def printArgs_1(args: Array[String]): Unit = {
args.foreach(print)
}
def printArgs_2(args: Array[String]): Unit = {
for (i <- 0.to(args.length - 1))
print(args.apply(i))
}
def updateArr(){
val greetStrings = new Array[String](3)
greetStrings(0) = "Hello"
greetStrings(1) = ", "
greetStrings(2) = "world!\\n"
// for (i <- 0 to 2)
// print(greetStrings(i))
printArgs_2(greetStrings)
greetStrings.update(0, "Hi")
greetStrings.update(1, ", ")
greetStrings.update(2, "the world!\\n")
printArgs(greetStrings)
}
val numNames2 = Array.apply("zero", "one", "two")
val numName = Array("zero", "one", "two")
} | slieer/scala-tutorials | src/main/scala/kuaixue/scala/book/chapter_13/mutable/ArrTest.scala | Scala | apache-2.0 | 958 |
import Name.{T => Name}
import java.lang.StringBuilder
import scala.collection.mutable
// ResultGraph: small, immutable graph indexed by IDs only.
// Used as a return type from computations and searches.
// No optimizations for fast Queries, since it's intended for small graphs.
class ResultGraph[NodeT <: Node, EdgeT <: Edge] (nodeColl:Iterable[NodeT], edgeColl:Iterable[EdgeT]) extends Graph[NodeT, EdgeT, ResultGraph[NodeT, EdgeT]] {
val nodes = nodeColl.map(node => (node.id, node)).toMap
val edges = edgeColl.map(edge => (edge.id, edge)).toMap
private var validation = None : Option[Boolean]
def this() = {
this(Set.empty, Set.empty)
}
// Returns a *new* ResultGraph.
def loadFromFile(filename:String) = {
ResultGraph.loadFromFile(filename)
}
def validate():Unit = {
val result =
edges.values.forall(edge => nodes.contains(edge.source)
&& nodes.contains(edge.dest))
if (result) {
validation = Some(true)
}
else {
validation = Some(false)
throw new ValidationException("ResultGraph")
}
}
def toStringLongform():String = {
val builder = new StringBuilder()
builder.append("%d NODES\\n".format(nodes.size))
for (node <- nodes.values) {
builder.append(node.toString + "\\n")
}
builder.append("%d EDGES\\n".format(edges.size))
for (edge <- edges.values) {
builder.append(edge.toString + "\\n")
}
builder.toString
}
def getNode(nodeId: Name):Option[NodeT] = {
nodes.get(nodeId)
}
def getEdge(edgeId: Name):Option[EdgeT] = {
edges.get(edgeId)
}
def outEdges(nodeId:Name):Set[EdgeT] = {
// For ResultGraph, O(n) in number of edges.
edges.values.filter(edge => edge.source == nodeId).toSet
}
def inEdges(nodeId:Name):Set[EdgeT] = {
// For ResultGraph, O(n) in number of edges.
edges.values.filter(edge => edge.dest == nodeId).toSet
}
def print():Unit = {
println(this.toStringLongform)
}
private def findNodes(nodeFilter:NodeFilter[NodeT]) = {
new ResultGraph(nodes.values.filter(nodeFilter), Set.empty[EdgeT])
}
private def followEdgesFrom(g:ResultGraph[NodeT,EdgeT],
edgeFilter:EdgeFilter[EdgeT],
nodeFilter:NodeFilter[NodeT],
depth:Option[Int]) = {
def fail() = {
throw new Exception("inconsistent graph")
}
// TODO(michaelochurch): refactor the huge function.
def loop(unexploredNodeIds:Set[Name],
exploredNodeIds:Set[Name],
allNodes:Set[NodeT],
allEdges:Set[EdgeT],
depth:Int):ResultGraph[NodeT,EdgeT] = {
if (depth == 0 || unexploredNodeIds.isEmpty) {
new ResultGraph(allNodes, allEdges)
}
else {
def nodeAndEdgeFilter(edge:EdgeT):Option[(EdgeT, NodeT)] = {
if (edgeFilter(edge)) {
val node = getNode(edge.dest).getOrElse(fail())
if (nodeFilter(node)) {
Some((edge, node))
} else None
} else None
}
val outEdgeIds = unexploredNodeIds.flatMap(outEdges(_))
val matches = outEdgeIds.flatMap(nodeAndEdgeFilter)
val newNodes = matches.map(_._2)
val nowExplored = exploredNodeIds ++ unexploredNodeIds
loop(newNodes.map(_.id) -- nowExplored,
nowExplored,
allNodes ++ newNodes,
allEdges ++ matches.map(_._1),
depth - 1)
}
}
loop(unexploredNodeIds = g.nodes.keySet,
exploredNodeIds = Set.empty,
allNodes = g.nodes.values.toSet,
allEdges = g.edges.values.toSet,
depth = depth.getOrElse(-1))
}
private def followEdges(q:Query[NodeT, EdgeT],
edgeFilter:EdgeFilter[EdgeT],
nodeFilter:NodeFilter[NodeT],
depth:Option[Int]) = {
followEdgesFrom(search(q),
edgeFilter, nodeFilter, depth)
}
def toResultGraph() = this
def search(q:Query[NodeT, EdgeT]):ResultGraph[NodeT, EdgeT] = {
q match {
case FindNodes(nodeFilter) => findNodes(nodeFilter)
case FollowEdges(q, edgeFilter, nodeFilter, depth) =>
followEdges(q, edgeFilter, nodeFilter, depth)
}
}
private def tuple() = (nodes, edges)
override def toString() = {
val validationString = validation match {
case None => "(not validated)"
case Some(true) => "(validated)"
case Some(false) => "(FAILED validation)"
}
"ResultGraph: %d nodes, %d edges %s".format(nodes.size, edges.size,
validationString)
}
override def hashCode() = {
this.tuple.hashCode
}
override def equals(that:Any) = {
that match {
case (graph:ResultGraph[_, _]) => graph.tuple == this.tuple
case _ => false
}
}
def saveToFile(filename:String) = {
val writer = Serialization.objectWriter(filename)
try {
Serialization.writeGraphSize(writer, nodes.size, edges.size)
for (node <- nodes.values) {
Serialization.writeNode(writer, node)
}
for (edge <- edges.values) {
Serialization.writeEdge(writer, edge)
}
} finally {
writer.close()
}
}
}
object ResultGraph {
def loadFromFile[NodeT <: Node, EdgeT <: Edge](filename:String) = {
val reader = Serialization.objectReader(filename)
try {
val (nNodes, nEdges) = Serialization.readGraphSize(reader)
val nodes = (1 to nNodes).map(_ => Serialization.readNode[NodeT](reader))
val edges = (1 to nEdges).map(_ => Serialization.readEdge[EdgeT](reader))
new ResultGraph[NodeT, EdgeT](nodes, edges)
} finally {
reader.close()
}
}
def empty[NodeT <: Node, EdgeT <: Edge]() =
new ResultGraph[NodeT, EdgeT]()
def basic = empty[BaseNode, BaseEdge]
}
| michaelochurch/ScalaGraph | src/main/scala/ResultGraph.scala | Scala | mit | 5,937 |
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.summingbird
import com.twitter.summingbird.option.JobId
import scala.collection.parallel.mutable.ParHashSet
import scala.ref.WeakReference
import scala.util.Try
import java.util.concurrent.ConcurrentHashMap
trait CounterIncrementor {
def incrBy(by: Long): Unit
}
trait PlatformStatProvider {
// Incrementor for a Counter identified by group/name for the specific jobID
// Returns an incrementor function for the Counter wrapped in an Option
// to ensure we catch when the incrementor cannot be obtained for the specified jobID
def counterIncrementor(jobId: JobId, group: Group, name: Name): Option[CounterIncrementor]
}
object SummingbirdRuntimeStats {
private class MutableSetSynchronizedWrapper[T] {
private[this] val innerContainer = scala.collection.mutable.Set[T]()
def nonEmpty: Boolean = innerContainer.synchronized { innerContainer.nonEmpty }
def toSeq: Seq[T] = innerContainer.synchronized { innerContainer.toSeq }
def add(e: T): Unit = innerContainer.synchronized { innerContainer += e }
}
// A global set of PlatformStatProviders, ParHashSet in scala seemed to trigger a deadlock
// So a simple wrapper on a mutable set is used.
private[this] val platformStatProviders = new MutableSetSynchronizedWrapper[WeakReference[PlatformStatProvider]]
val SCALDING_STATS_MODULE = "com.twitter.summingbird.scalding.ScaldingRuntimeStatsProvider$"
// Need to explicitly invoke the object initializer on remote node
// since Scala object initialization is lazy, hence need the absolute object classpath
private[this] final val platformObjects = List(SCALDING_STATS_MODULE)
// invoke the ScaldingRuntimeStatsProvider object initializer on remote node
private[this] lazy val platformsInit =
platformObjects.foreach { s: String => Try[Unit] { Class.forName(s) } }
def hasStatProviders: Boolean = platformStatProviders.nonEmpty
def addPlatformStatProvider(pp: PlatformStatProvider): Unit =
platformStatProviders.add(new WeakReference(pp))
def getPlatformCounterIncrementor(jobID: JobId, group: Group, name: Name): CounterIncrementor = {
platformsInit
// Find the PlatformMetricProvider (PMP) that matches the jobID
// return the incrementor for the Counter specified by group/name
// We return the first PMP that matches the jobID, in reality there should be only one
(for {
provRef <- platformStatProviders.toSeq
prov <- provRef.get
incr <- prov.counterIncrementor(jobID, group, name)
} yield incr)
.toList
.headOption
.getOrElse(sys.error("Could not find the platform stat provider for jobID " + jobID))
}
}
object JobCounters {
@annotation.tailrec
private[this] final def getOrElseUpdate[K, V](map: ConcurrentHashMap[K, V], k: K, default: => V): V = {
val v = map.get(k)
if (v == null) {
map.putIfAbsent(k, default)
getOrElseUpdate(map, k, default)
} else {
v
}
}
private val registeredCountersForJob: ConcurrentHashMap[JobId, ParHashSet[(Group, Name)]] =
new ConcurrentHashMap[JobId, ParHashSet[(Group, Name)]]()
def getCountersForJob(jobID: JobId): Option[Seq[(Group, Name)]] =
Option(registeredCountersForJob.get(jobID)).map(_.toList)
def registerCounter(jobID: JobId, group: Group, name: Name): Unit = {
val set = getOrElseUpdate(registeredCountersForJob, jobID, ParHashSet[(Group, Name)]())
set += ((group, name))
}
}
| twitter/summingbird | summingbird-core/src/main/scala/com/twitter/summingbird/Stats.scala | Scala | apache-2.0 | 4,010 |
package cmwell.analytics.util
import java.net.URI
import com.fasterxml.jackson.databind.JsonNode
import scala.collection.JavaConverters._
import scala.util.Try
// TODO: These s/b `Uri`s?
case class ContactPoints(cassandra: String,
es: String)
object FindContactPoints {
/** Given a CM-Well URL, find Elasticsearch contact point.
* This will be in the form: host:port.
* Uses the first accessible master, which should expose ES on port 9200 (clustered) or 9201 (single).
*
* Using jackson included with Spark for parsing JSON (to avoid version conflicts).
*/
def es(url: String): String = {
val uri = new URI(url)
val json: JsonNode = HttpUtil.getJson(s"http://${uri.getHost}:${uri.getPort}/proc/health?format=json")
val masterIpAddresses: Seq[String] = json.get("fields").findValue("masters").elements.asScala.map(_.textValue).toSeq
if (masterIpAddresses.isEmpty)
throw new RuntimeException("No master node addresses found.")
// For Elasticsearch, the port is 9201 for a single node, and 9200 for clustered.
val esPort = if (masterIpAddresses.lengthCompare(1) > 0) "9200" else "9201"
// All the masters should be accessible, but verify that.
// A better implementation would keep all the endpoints in the list, and we could fall back to the others
// if the one we are using disappears.
val firstAccessibleESEndpoint = masterIpAddresses.find { ipAddress =>
Try(HttpUtil.get(s"http://$ipAddress:$esPort")).isSuccess
}
if (firstAccessibleESEndpoint.isEmpty)
throw new RuntimeException("No accessible ES endpoint was found.")
s"${firstAccessibleESEndpoint.get}:$esPort"
}
/**
* Get the internal contact point for one of Cassandra nodes.
* This is not exposed using a nice JSON API, but we can pick it out of the HTML that it is embedded in.
*/
def cas(url: String): String = {
val uri = new URI(url)
val json: JsonNode = HttpUtil.getJson(s"http://${uri.getAuthority}:${uri.getPort}/proc/health-detailed.md?format=json")
val data = json.get("content").findValue("data").asText
// The first 6 lines will looks like this (ignore them).
//
// ##### Current time: 2018-02-27T18:48:35.138Z
// ### Cluster name: cm-well-???
// ### Data was generated on:
// | **Node** | **WS** | **BG** | **CAS** | **ES** | **ZK** | **KF** |
// |----------|--------|--------|---------|--------|--------|--------|
// The remaining lines will look like this:
//|10.204.192.148|<span style='color:green'>Green</span><br>Ok<br>Response time: 3 ms|<span style='color:green'>Green</span><br>Par4: i.p:0:G i:27:G p.p:0:G p:41:G|<span style='color:green'>Green</span><br>192.168.100.58 -> UN<br>192.168.100.59 -> UN<br>192.168.100.57 -> UN<br>192.168.100.60 -> UN|<span style='color:green'>Green</span><br>||<span style='color:green'>Green</span><br>|
// We want what is in the fourth column.
// <span style='color:green'>Green</span><br>10.204.146.186 -> UN<br>10.204.146.187 -> UN<br>10.204.146.188 -> UN<br>10.204.146.185 -> UN
val allInternalAddresses = for {
line <- data.split("\\\\n").drop(6)
casColumn = line.split("\\\\|")(4)
internalAddresses = casColumn.drop(casColumn.indexOf("</span>") + "</span>".length)
eachAddress <- internalAddresses.split("<br>")
if !eachAddress.isEmpty
} yield eachAddress.take(eachAddress.indexOf(" "))
if (allInternalAddresses.isEmpty)
throw new RuntimeException("No Cassandra endpoints found.")
allInternalAddresses.head
}
}
| bryaakov/CM-Well | tools/dataConsistencyTool/cmwell-spark-analysis/src/main/scala/cmwell/analytics/util/FindContactPoints.scala | Scala | apache-2.0 | 3,598 |
package subscrobbler
import com.typesafe.config._
import java.awt.Desktop
import java.net.URI
object Application extends App {
LastFmConnection.init()
var u: UserData = new UserData(Conf.user)
var r: Recommendation = u.getRecommendation
//r.toStdOut()
val htmlFileName = Conf.user + ".html"
r.toHtmlFile(htmlFileName)
//u.downloadHistory("eae.csv")
Desktop.getDesktop.browse(new URI(htmlFileName))
}
object Conf {
private val config = ConfigFactory.load()
val userAgent: String = config.getString("userAgent")
val key: String = Option(System.getProperty("lastfm.api.key"))
.getOrElse(config.getString("lastfm.api.key"))
val user: String = Option(System.getProperty("lastfm.api.user"))
.getOrElse(config.getString("lastfm.api.user"))
val dbgFlag: Boolean = config.getBoolean("dbgFlag")
}
| sentenzo/sub-scrobbler | src/main/scala/subscrobbler/Application.scala | Scala | mit | 855 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.commons.script
import org.beangle.commons.script.{EvaluationException, ExpressionEvaluator}
import javax.script.{ScriptEngine, ScriptEngineManager, SimpleBindings}
class JSR223ExpressionEvaluator(engineName: String) extends ExpressionEvaluator {
var scriptEngine = new ScriptEngineManager().getEngineByName(engineName)
def parse(exp: String): Unit = {}
def eval(exp: String, root: AnyRef): AnyRef = {
val ctx = new SimpleBindings
root match {
case sm: collection.Map[String, Any] =>
sm foreach (x => ctx.put(x._1, x._2))
case jm: java.util.Map[String, Any] => ctx.putAll(jm)
case _ => ctx.put("root", root)
}
scriptEngine.eval(exp, ctx)
}
def eval[T](exp: String, root: AnyRef, resultType: Class[T]): T = {
eval(exp, root).asInstanceOf[T]
}
}
| beangle/commons | core/src/main/scala/org/beangle/commons/script/JSR223ExpressionEvaluator.scala | Scala | lgpl-3.0 | 1,549 |
package $package$.first
import com.typesafe.scalalogging.Logger
import pureconfig._
import pureconfig.generic.auto._
object First {
val config = ConfigSource.default.at("first").load[FirstConfig].getOrElse(FirstConfig("undefined"))
val logger = Logger(getClass)
def main(args: Array[String]): Unit = logger.info(s"Run first at version: \\${config.version}")
}
| MateuszKubuszok/SBTScalaMultiproject2.g8 | src/main/g8/modules/first/src/main/scala/$package$/first/First.scala | Scala | mit | 370 |
import scala.reflect.{OptManifest, ClassTag}
object Ref {
object Sentinel
def makeWithArr[A: OptManifest]: String = optManifest[A] match {
case m: ClassTag[_] => m.newArray(0).asInstanceOf[AnyRef] match {
// these can be reordered, so long as Unit comes before AnyRef
case _: Array[Boolean] => "bool"
case _: Array[Byte] => "byte"
case _: Array[Short] => "short"
case _: Array[Char] => "char"
case _: Array[Int] => "int"
case _: Array[Float] => "float"
case _: Array[Long] => "long"
case _: Array[Double] => "double"
case _: Array[Unit] => "unit"
case a: Array[AnyRef] => a.getClass.getComponentType.getName
}
case _ => "<?>"
}
def make[A: OptManifest]: String = optManifest[A] match {
case m: ClassTag[a] => m match {
case ClassTag.Boolean => "bool"
case ClassTag.Byte => "byte"
case ClassTag.Short => "short"
case ClassTag.Char => "char"
case ClassTag.Int => "int"
case ClassTag.Float => "float"
case ClassTag.Long => "long"
case ClassTag.Double => "double"
case ClassTag.Unit => "unit"
case ClassTag.Any => "any"
case ClassTag.AnyVal => "anyval"
case ClassTag.Object => "anyref"
case _ => m.runtimeClass.getName
}
case NoManifest => "<?>"
}
}
import Ref.*
def baz[A] = Ref.makeWithArr[A]
def qux[A] = Ref.make[A]
@main def Test = {
assert(Ref.makeWithArr[Boolean] == "bool")
assert(Ref.makeWithArr[Byte] == "byte")
assert(Ref.makeWithArr[Short] == "short")
assert(Ref.makeWithArr[Char] == "char")
assert(Ref.makeWithArr[Int] == "int")
assert(Ref.makeWithArr[Float] == "float")
assert(Ref.makeWithArr[Long] == "long")
assert(Ref.makeWithArr[Double] == "double")
assert(Ref.makeWithArr[Unit] == "unit")
assert(Ref.makeWithArr["abc"] == "java.lang.String")
assert(Ref.makeWithArr[Null] == "<?>")
assert(Ref.makeWithArr[Nothing] == "<?>")
assert(baz[Int] == "<?>")
assert(Ref.make[Boolean] == "bool")
assert(Ref.make[Byte] == "byte")
assert(Ref.make[Short] == "short")
assert(Ref.make[Char] == "char")
assert(Ref.make[Int] == "int")
assert(Ref.make[Float] == "float")
assert(Ref.make[Long] == "long")
assert(Ref.make[Double] == "double")
assert(Ref.make[Unit] == "unit")
assert(Ref.make[Any] == "any")
assert(Ref.make[AnyVal] == "anyval")
assert(Ref.make[AnyRef] == "anyref")
assert(Ref.make["abc"] == "java.lang.String")
assert(Ref.make[Null] == "<?>")
assert(Ref.make[Nothing] == "<?>")
assert(qux[Int] == "<?>")
}
| dotty-staging/dotty | tests/run/i9482.scala | Scala | apache-2.0 | 2,710 |
package com.twitter.finagle.http.filter
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Request, Response}
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.util.{Await, Future, Time}
import org.specs.SpecificationWithJUnit
class StatsFilterSpec extends SpecificationWithJUnit {
"StatsFilter" should {
"increment stats" in {
val receiver = new InMemoryStatsReceiver
val filter = new StatsFilter(receiver) andThen new Service[Request, Response] {
def apply(request: Request): Future[Response] = {
val response = request.response
response.statusCode = 404
response.write("hello")
Future.value(response)
}
}
Time.withCurrentTimeFrozen { _ =>
Await.result(filter(Request()))
}
receiver.counters(Seq("status", "404")) must_== 1
receiver.counters(Seq("status", "4XX")) must_== 1
// TODO: until we can mock stopwatches
// receiver.stats(Seq("time", "404")) must_== Seq(0.0)
// receiver.stats(Seq("time", "4XX")) must_== Seq(0.0)
receiver.stats(Seq("response_size")) must_== Seq(5.0)
}
}
}
| firebase/finagle | finagle-http/src/test/scala/com/twitter/finagle/http/filter/StatsFilterSpec.scala | Scala | apache-2.0 | 1,179 |
import org.coursera.naptime.NaptimeModule
import org.coursera.naptime.ari.EngineApi
import org.coursera.naptime.ari.FetcherApi
import org.coursera.naptime.ari.LocalSchemaProvider
import org.coursera.naptime.ari.SchemaProvider
import org.coursera.naptime.ari.engine.EngineImpl
import org.coursera.naptime.ari.engine.EngineMetricsCollector
import org.coursera.naptime.ari.engine.LoggingEngineMetricsCollector
import org.coursera.naptime.ari.fetcher.LocalFetcher
import org.coursera.naptime.ari.graphql.DefaultGraphqlSchemaProvider
import org.coursera.naptime.ari.graphql.GraphqlSchemaProvider
import org.coursera.naptime.ari.graphql.controllers.filters.DefaultFilters
import org.coursera.naptime.ari.graphql.controllers.filters.FilterList
import resources.UserStore
import resources.UserStoreImpl
import resources.UsersResource
import resources.CoursesResource
import resources.InstructorsResource
import resources.PartnersResource
class ResourceModule extends NaptimeModule {
override def configure(): Unit = {
bindResource[UsersResource]
bindResource[CoursesResource]
bindResource[InstructorsResource]
bindResource[PartnersResource]
bind[UserStore].to[UserStoreImpl]
bind[EngineApi].to[EngineImpl]
bind[FetcherApi].to[LocalFetcher]
bind[EngineMetricsCollector].to[LoggingEngineMetricsCollector]
bind[SchemaProvider].to[LocalSchemaProvider]
bind[GraphqlSchemaProvider].to[DefaultGraphqlSchemaProvider]
bind[FilterList].to[DefaultFilters]
}
}
| saeta/naptime | examples/src/main/scala/ResourceModule.scala | Scala | apache-2.0 | 1,491 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.mutable.{ArrayBuffer, HashSet, Queue}
import scala.util.{Failure, Try}
import org.apache.spark.{Logging, TaskContext}
import org.apache.spark.network.shuffle.{BlockFetchingListener, ShuffleClient}
import org.apache.spark.network.buffer.ManagedBuffer
import org.apache.spark.serializer.{SerializerInstance, Serializer}
import org.apache.spark.util.{CompletionIterator, Utils}
/**
* An iterator that fetches multiple blocks. For local blocks, it fetches from the local block
* manager. For remote blocks, it fetches them using the provided BlockTransferService.
*
* This creates an iterator of (BlockID, values) tuples so the caller can handle blocks in a
* pipelined fashion as they are received.
*
* The implementation throttles the remote fetches to they don't exceed maxBytesInFlight to avoid
* using too much memory.
*
* @param context [[TaskContext]], used for metrics update
* @param shuffleClient [[ShuffleClient]] for fetching remote blocks
* @param blockManager [[BlockManager]] for reading local blocks
* @param blocksByAddress list of blocks to fetch grouped by the [[BlockManagerId]].
* For each block we also require the size (in bytes as a long field) in
* order to throttle the memory usage.
* @param serializer serializer used to deserialize the data.
* @param maxBytesInFlight max size (in bytes) of remote blocks to fetch at any given point.
*/
private[spark]
final class ShuffleBlockFetcherIterator(
context: TaskContext,
shuffleClient: ShuffleClient,
blockManager: BlockManager,
blocksByAddress: Seq[(BlockManagerId, Seq[(BlockId, Long)])],
serializer: Serializer,
maxBytesInFlight: Long)
extends Iterator[(BlockId, Try[Iterator[Any]])] with Logging {
import ShuffleBlockFetcherIterator._
/**
* Total number of blocks to fetch. This can be smaller than the total number of blocks
* in [[blocksByAddress]] because we filter out zero-sized blocks in [[initialize]].
*
* This should equal localBlocks.size + remoteBlocks.size.
*/
private[this] var numBlocksToFetch = 0
/**
* The number of blocks proccessed by the caller. The iterator is exhausted when
* [[numBlocksProcessed]] == [[numBlocksToFetch]].
*/
private[this] var numBlocksProcessed = 0
private[this] val startTime = System.currentTimeMillis
/** Local blocks to fetch, excluding zero-sized blocks. */
private[this] val localBlocks = new ArrayBuffer[BlockId]()
/** Remote blocks to fetch, excluding zero-sized blocks. */
private[this] val remoteBlocks = new HashSet[BlockId]()
/**
* A queue to hold our results. This turns the asynchronous model provided by
* [[BlockTransferService]] into a synchronous model (iterator).
*/
private[this] val results = new LinkedBlockingQueue[FetchResult]
/**
* Current [[FetchResult]] being processed. We track this so we can release the current buffer
* in case of a runtime exception when processing the current buffer.
*/
@volatile private[this] var currentResult: FetchResult = null
/**
* Queue of fetch requests to issue; we'll pull requests off this gradually to make sure that
* the number of bytes in flight is limited to maxBytesInFlight.
*/
private[this] val fetchRequests = new Queue[FetchRequest]
/** Current bytes in flight from our requests */
private[this] var bytesInFlight = 0L
private[this] val shuffleMetrics = context.taskMetrics.createShuffleReadMetricsForDependency()
private[this] val serializerInstance: SerializerInstance = serializer.newInstance()
/**
* Whether the iterator is still active. If isZombie is true, the callback interface will no
* longer place fetched blocks into [[results]].
*/
@volatile private[this] var isZombie = false
initialize()
/**
* Mark the iterator as zombie, and release all buffers that haven't been deserialized yet.
*/
private[this] def cleanup() {
isZombie = true
// Release the current buffer if necessary
currentResult match {
case SuccessFetchResult(_, _, buf) => buf.release()
case _ =>
}
// Release buffers in the results queue
val iter = results.iterator()
while (iter.hasNext) {
val result = iter.next()
result match {
case SuccessFetchResult(_, _, buf) => buf.release()
case _ =>
}
}
}
private[this] def sendRequest(req: FetchRequest) {
logDebug("Sending request for %d blocks (%s) from %s".format(
req.blocks.size, Utils.bytesToString(req.size), req.address.hostPort))
bytesInFlight += req.size
// so we can look up the size of each blockID
val sizeMap = req.blocks.map { case (blockId, size) => (blockId.toString, size) }.toMap
val blockIds = req.blocks.map(_._1.toString)
val address = req.address
shuffleClient.fetchBlocks(address.host, address.port, address.executorId, blockIds.toArray,
new BlockFetchingListener {
override def onBlockFetchSuccess(blockId: String, buf: ManagedBuffer): Unit = {
// Only add the buffer to results queue if the iterator is not zombie,
// i.e. cleanup() has not been called yet.
if (!isZombie) {
// Increment the ref count because we need to pass this to a different thread.
// This needs to be released after use.
buf.retain()
results.put(new SuccessFetchResult(BlockId(blockId), sizeMap(blockId), buf))
shuffleMetrics.incRemoteBytesRead(buf.size)
shuffleMetrics.incRemoteBlocksFetched(1)
}
logTrace("Got remote block " + blockId + " after " + Utils.getUsedTimeMs(startTime))
}
override def onBlockFetchFailure(blockId: String, e: Throwable): Unit = {
logError(s"Failed to get block(s) from ${req.address.host}:${req.address.port}", e)
results.put(new FailureFetchResult(BlockId(blockId), e))
}
}
)
}
private[this] def splitLocalRemoteBlocks(): ArrayBuffer[FetchRequest] = {
// Make remote requests at most maxBytesInFlight / 5 in length; the reason to keep them
// smaller than maxBytesInFlight is to allow multiple, parallel fetches from up to 5
// nodes, rather than blocking on reading output from one node.
val targetRequestSize = math.max(maxBytesInFlight / 5, 1L)
logDebug("maxBytesInFlight: " + maxBytesInFlight + ", targetRequestSize: " + targetRequestSize)
// Split local and remote blocks. Remote blocks are further split into FetchRequests of size
// at most maxBytesInFlight in order to limit the amount of data in flight.
val remoteRequests = new ArrayBuffer[FetchRequest]
// Tracks total number of blocks (including zero sized blocks)
var totalBlocks = 0
for ((address, blockInfos) <- blocksByAddress) {
totalBlocks += blockInfos.size
if (address.executorId == blockManager.blockManagerId.executorId) {
// Filter out zero-sized blocks
localBlocks ++= blockInfos.filter(_._2 != 0).map(_._1)
numBlocksToFetch += localBlocks.size
} else {
val iterator = blockInfos.iterator
var curRequestSize = 0L
var curBlocks = new ArrayBuffer[(BlockId, Long)]
while (iterator.hasNext) {
val (blockId, size) = iterator.next()
// Skip empty blocks
if (size > 0) {
curBlocks += ((blockId, size))
remoteBlocks += blockId
numBlocksToFetch += 1
curRequestSize += size
} else if (size < 0) {
throw new BlockException(blockId, "Negative block size " + size)
}
if (curRequestSize >= targetRequestSize) {
// Add this FetchRequest
remoteRequests += new FetchRequest(address, curBlocks)
curBlocks = new ArrayBuffer[(BlockId, Long)]
logDebug(s"Creating fetch request of $curRequestSize at $address")
curRequestSize = 0
}
}
// Add in the final request
if (curBlocks.nonEmpty) {
remoteRequests += new FetchRequest(address, curBlocks)
}
}
}
logInfo(s"Getting $numBlocksToFetch non-empty blocks out of $totalBlocks blocks")
remoteRequests
}
/**
* Fetch the local blocks while we are fetching remote blocks. This is ok because
* [[ManagedBuffer]]'s memory is allocated lazily when we create the input stream, so all we
* track in-memory are the ManagedBuffer references themselves.
*/
private[this] def fetchLocalBlocks() {
val iter = localBlocks.iterator
while (iter.hasNext) {
val blockId = iter.next()
try {
val buf = blockManager.getBlockData(blockId)
shuffleMetrics.incLocalBlocksFetched(1)
shuffleMetrics.incLocalBytesRead(buf.size)
buf.retain()
results.put(new SuccessFetchResult(blockId, 0, buf))
} catch {
case e: Exception =>
// If we see an exception, stop immediately.
logError(s"Error occurred while fetching local blocks", e)
results.put(new FailureFetchResult(blockId, e))
return
}
}
}
private[this] def initialize(): Unit = {
// Add a task completion callback (called in both success case and failure case) to cleanup.
context.addTaskCompletionListener(_ => cleanup())
// Split local and remote blocks.
val remoteRequests = splitLocalRemoteBlocks()
// Add the remote requests into our queue in a random order
fetchRequests ++= Utils.randomize(remoteRequests)
// Send out initial requests for blocks, up to our maxBytesInFlight
while (fetchRequests.nonEmpty &&
(bytesInFlight == 0 || bytesInFlight + fetchRequests.front.size <= maxBytesInFlight)) {
sendRequest(fetchRequests.dequeue())
}
val numFetches = remoteRequests.size - fetchRequests.size
logInfo("Started " + numFetches + " remote fetches in" + Utils.getUsedTimeMs(startTime))
// Get Local Blocks
fetchLocalBlocks()
logDebug("Got local blocks in " + Utils.getUsedTimeMs(startTime))
}
override def hasNext: Boolean = numBlocksProcessed < numBlocksToFetch
override def next(): (BlockId, Try[Iterator[Any]]) = {
numBlocksProcessed += 1
val startFetchWait = System.currentTimeMillis()
currentResult = results.take()
val result = currentResult
val stopFetchWait = System.currentTimeMillis()
shuffleMetrics.incFetchWaitTime(stopFetchWait - startFetchWait)
result match {
case SuccessFetchResult(_, size, _) => bytesInFlight -= size
case _ =>
}
// Send fetch requests up to maxBytesInFlight
while (fetchRequests.nonEmpty &&
(bytesInFlight == 0 || bytesInFlight + fetchRequests.front.size <= maxBytesInFlight)) {
sendRequest(fetchRequests.dequeue())
}
val iteratorTry: Try[Iterator[Any]] = result match {
case FailureFetchResult(_, e) =>
Failure(e)
case SuccessFetchResult(blockId, _, buf) =>
// There is a chance that createInputStream can fail (e.g. fetching a local file that does
// not exist, SPARK-4085). In that case, we should propagate the right exception so
// the scheduler gets a FetchFailedException.
Try(buf.createInputStream()).map { is0 =>
val is = blockManager.wrapForCompression(blockId, is0)
val iter = serializerInstance.deserializeStream(is).asKeyValueIterator
CompletionIterator[Any, Iterator[Any]](iter, {
// Once the iterator is exhausted, release the buffer and set currentResult to null
// so we don't release it again in cleanup.
currentResult = null
buf.release()
})
}
}
(result.blockId, iteratorTry)
}
}
private[storage]
object ShuffleBlockFetcherIterator {
/**
* A request to fetch blocks from a remote BlockManager.
* @param address remote BlockManager to fetch from.
* @param blocks Sequence of tuple, where the first element is the block id,
* and the second element is the estimated size, used to calculate bytesInFlight.
*/
case class FetchRequest(address: BlockManagerId, blocks: Seq[(BlockId, Long)]) {
val size = blocks.map(_._2).sum
}
/**
* Result of a fetch from a remote block.
*/
private[storage] sealed trait FetchResult {
val blockId: BlockId
}
/**
* Result of a fetch from a remote block successfully.
* @param blockId block id
* @param size estimated size of the block, used to calculate bytesInFlight.
* Note that this is NOT the exact bytes.
* @param buf [[ManagedBuffer]] for the content.
*/
private[storage] case class SuccessFetchResult(blockId: BlockId, size: Long, buf: ManagedBuffer)
extends FetchResult {
require(buf != null)
require(size >= 0)
}
/**
* Result of a fetch from a remote block unsuccessfully.
* @param blockId block id
* @param e the failure exception
*/
private[storage] case class FailureFetchResult(blockId: BlockId, e: Throwable)
extends FetchResult
}
| andrewor14/iolap | core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala | Scala | apache-2.0 | 13,993 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.backend.utils
import ai.h2o.sparkling.SharedH2OTestContext
import ai.h2o.sparkling.backend.exceptions.RestApiCommunicationException
import ai.h2o.sparkling.extensions.rest.api.Paths
import org.apache.spark.sql.SparkSession
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FunSuite, Matchers}
import water.parser.ParseTime
@RunWith(classOf[JUnitRunner])
class RestApiUtilsTestSuite extends FunSuite with Matchers with SharedH2OTestContext {
override def createSparkSession(): SparkSession = sparkSession("local[*]")
test("Error message from unsuccessful call contains information from the server") {
val conf = hc.getConf
val endpoint = RestApiUtils.getClusterEndpoint(conf)
val caught = intercept[RestApiCommunicationException](RestApiUtils.update(endpoint, Paths.CHUNK, conf))
assert(caught.getMessage.contains("Cannot find value for the parameter 'frame_name'"))
}
test("Set America/Los_Angeles timezone to H2O cluster") {
testSettingTimezoneToH2OCluster("America/Los_Angeles")
}
test("Set Europe/Prague timezone to H2O cluster") {
testSettingTimezoneToH2OCluster("Europe/Prague")
}
test("Set UTC timezone to H2O cluster") {
testSettingTimezoneToH2OCluster("UTC")
}
private def testSettingTimezoneToH2OCluster(timezone: String): Unit = {
val conf = hc.getConf
println(ParseTime.listTimezones)
RestApiUtils.setTimeZone(conf, timezone)
val result = RestApiUtils.getTimeZone(conf)
result shouldEqual timezone
}
}
| h2oai/sparkling-water | core/src/test/scala/ai/h2o/sparkling/backend/utils/RestApiUtilsTestSuite.scala | Scala | apache-2.0 | 2,363 |
package chapter17
/**
* 17.4 컬렉션 초기화
*
* 컬렉션을 초기화하고 생성하는 일반적인 방법은 초기 원소를 컬렉션 동반 객체의 팩토리 메소드에
* 넘기는 것이다.
*/
object c17_i04 extends App {
import scala.collection.mutable
val stuff = mutable.Set(42) // 타입추론
//stuff += "abracadabra" //type mismatch; found : String("abracadabra") required: Int
val stuff2 = mutable.Set[Any](42)
stuff2 += "abracadabra"
import scala.collection.immutable.TreeSet
val colors = List("b", "y", "r", "g")
//val treeSet = TreeSet(colors) // ◾No implicit Ordering defined for List[String].
val treeSet = TreeSet[String]() ++ colors
println(treeSet)
/*
* 1. 배열이나 리스트로 바꾸기
*
* 한 가지 명심해야할 사항은, 리스트나 배열 변환 시 컬렉션의 모든 원소를 복사해야 하기 때문에
* 컬렉션 크기가 아주 큰 경우 느릴 수도 있다.
*/
treeSet.toList
treeSet.toArray
/*
* 2. 변경 가능한 집합(맵)과 변경 불가능한 집합(맵) 사이의 변환
*
* empty 컬렉션을 만들고 ++ / ++= 연산자를 활용해서 원소 추가하면 그만이다.
*/
val mutaSet = mutable.Set.empty ++ treeSet // ++, ++= 차이 없다
println(mutaSet)
val immutaSet = Set.empty ++ mutaSet
println(immutaSet)
val muta = mutable.Map("i" -> 1, "ii"->2)
println(muta)
val immu = Map.empty ++ muta
println(immu)
} | seraekim/srkim-lang-scala | src/main/java/chapter17/c17_i04.scala | Scala | bsd-3-clause | 1,491 |
package monocle.state
import monocle.Getter
import cats.data.State
trait StateGetterSyntax {
implicit def toStateGetterOps[S, A](getter: Getter[S, A]): StateGetterOps[S, A] =
new StateGetterOps[S, A](getter)
}
final class StateGetterOps[S, A](private val getter: Getter[S, A]) extends AnyVal {
/** transforms a Getter into a State */
def toState: State[S, A] =
State(s => (s, getter.get(s)))
/** alias for toState */
def st: State[S, A] =
toState
/** extracts the value viewed through the getter */
def extract: State[S, A] =
toState
/** extracts the value viewed through the getter and applies `f` over it */
def extracts[B](f: A => B): State[S, B] =
extract.map(f)
}
| aoiroaoino/Monocle | state/src/main/scala/monocle/state/StateGetterSyntax.scala | Scala | mit | 715 |
package org.openurp.edu.eams.teach.grade.lesson.web.action
import java.util.Date
import org.beangle.commons.lang.Strings
import org.beangle.commons.bean.comparators.PropertyComparator
import org.beangle.commons.collection.Collections
import org.beangle.commons.collection.Order
import org.beangle.commons.collection.page.PagedList
import org.beangle.data.jpa.dao.OqlBuilder
import org.openurp.base.Semester
import org.openurp.edu.eams.base.util.WeekStates
import org.openurp.edu.eams.core.CommonAuditState
import org.openurp.edu.base.Project
import org.openurp.edu.base.StudentJournal
import org.openurp.edu.base.Course
import org.openurp.edu.eams.teach.Grade
import org.openurp.edu.teach.grade.CourseGrade
import org.openurp.edu.eams.teach.lesson.CourseSchedule
import org.openurp.edu.teach.lesson.Lesson
import org.openurp.edu.eams.teach.lesson.dao.LessonPlanRelationDao
import org.openurp.edu.eams.teach.lesson.dao.LessonSeqNoGenerator
import org.openurp.edu.eams.teach.lesson.model.LessonBean
import org.openurp.edu.teach.lesson.model.TeachClassBean
import org.openurp.edu.eams.web.action.common.SemesterSupportAction
class RetakeAction extends SemesterSupportAction {
var lessonSeqNoGenerator: LessonSeqNoGenerator = _
var lessonPlanRelationDao: LessonPlanRelationDao = _
def search(): String = {
val datas = stats(null)
var orderBy = get(Order.ORDER_STR)
if (Strings.isEmpty(orderBy)) {
orderBy = "stat.newspace desc"
}
if (orderBy.startsWith("stat.")) orderBy = Strings.substringAfter(orderBy, "stat.")
Collections.sort(datas, new PropertyComparator(orderBy))
put("stats", new PagedList(datas, getPageLimit))
forward()
}
private def stats(courseIds: Array[Long]): List[RetakeCourseStat] = {
val courseCode = get("course.code")
val courseName = get("course.name")
val departId = getLong("course.department.id")
var builder = OqlBuilder.from(classOf[CourseGrade], "cg")
builder.select("new org.openurp.edu.eams.teach.grade.lesson.web.action.RetakeCourseStat(cg.course,count(distinct cg.std.id))")
.groupBy("cg.course")
.where("cg.passed=false")
.where("cg.status=" + Grade.Status.PUBLISHED)
.where("not exists(from " + classOf[CourseGrade].getName +
" cg2 where cg2.std=cg.std and cg2.course=cg.course and cg2.id!=cg.id and cg2.passed=true)")
builder.where("exists( from " + classOf[StudentJournal].getName +
" sj where sj.std=cg.std and sj.beginOn<=:now and (sj.endOn=null or sj.endOn>=:now) )", new Date())
if (null != courseIds && courseIds.length > 0) {
builder.where("cg.course.id in (:courseIds)", courseIds)
} else {
if (Strings.isNotBlank(courseCode)) {
builder.where("cg.course.code like :code", "%" + courseCode + "%")
}
if (Strings.isNotBlank(courseName)) {
builder.where("cg.course.name like :name", "%" + courseName + "%")
}
if (null != departId) {
builder.where("cg.course.department.id =:departmentId", departId)
} else {
builder.where("cg.course.department in(:departments)", getDeparts)
}
builder.where("cg.courseType.name not like :typeName", "%类")
}
val statList = entityDao.search(builder).asInstanceOf[List[RetakeCourseStat]]
val stats = Collections.newMap[Any]
for (stat <- statList) {
stats.put(stat.getCourse.id, stat)
}
val semesterId = getIntId("semester")
builder = OqlBuilder.from(classOf[Lesson], "l")
builder.select("l.course.id,sum(l.teachClass.limitCount-l.teachClass.stdCount)")
.groupBy("l.course.id")
builder.where("l.semester.id=:semesterId and l.project=:project", semesterId, getProject)
.where("l.teachClass.limitCount>l.teachClass.stdCount")
if (null != courseIds && courseIds.length > 0) {
builder.where("l.course.id in (:courseIds)", courseIds)
} else {
if (Strings.isNotBlank(courseCode)) {
builder.where("l.course.code like :code", "%" + courseCode + "%")
}
if (Strings.isNotBlank(courseName)) {
builder.where("l.course.name like :name", "%" + courseName + "%")
}
if (null != departId) {
builder.where("l.course.department.id =:departmentId", departId)
} else {
builder.where("l.course.department in(:departments)", getDeparts)
}
}
val lessonCourses = entityDao.search(builder)
for (data <- lessonCourses) {
val datas = data.asInstanceOf[Array[Any]]
val course = datas(0).asInstanceOf[java.lang.Long]
val free = datas(1).asInstanceOf[Number]
val stat = stats.get(course)
if (null != stat && null != free) {
stat.setFreespace(free.intValue())
}
}
Collections.newBuffer[Any](stats.values)
}
def unpassed(): String = {
val courseId = getLong("course.id")
val course = entityDao.get(classOf[Course], courseId)
val builder = OqlBuilder.from(classOf[CourseGrade], "grade")
builder.where("grade.passed=false and grade.course.id=:courseId", courseId)
.where("grade.status=" + Grade.Status.PUBLISHED)
.where("not exists(from " + classOf[CourseGrade].getName +
" cg2 where cg2.std=grade.std and cg2.course=grade.course and cg2.id!=grade.id and cg2.passed=true)")
builder.where("exists( from " + classOf[StudentJournal].getName +
" sj where sj.std=grade.std and sj.beginOn<=:now and (sj.endOn=null or sj.endOn>=:now) )", new Date())
builder.select("count(distinct grade.std.id)")
val rs = entityDao.search(builder)
put("count", rs.get(0))
builder.select(null)
builder.orderBy(get(Order.ORDER_STR))
builder.limit(getPageLimit)
put("course", course)
put("grades", entityDao.search(builder))
forward()
}
def freespace(): String = {
val courseId = getLongId("course")
val semesterId = getIntId("semester")
val builder = OqlBuilder.from(classOf[Lesson], "lesson")
builder.where("lesson.semester.id=:semesterId and lesson.project=:project", semesterId, getProject)
.where("lesson.teachClass.limitCount>lesson.teachClass.stdCount")
.where("lesson.course.id=:courseId", courseId)
var orderBy = get(Order.ORDER_STR)
if (Strings.isEmpty(orderBy)) {
orderBy = "lesson.teachClass.limitCount-lesson.teachClass.stdCount desc,lesson.no"
}
builder.limit(getPageLimit).orderBy(orderBy)
put("lessons", entityDao.search(builder))
forward()
}
def newLesson(): String = {
val courseIds = getLongIds("stat")
val semesterId = getIntId("semester")
val semester = entityDao.get(classOf[Semester], semesterId)
val stats = stats(courseIds)
val lessons = Collections.newBuffer[Any]
val project = getProject
for (stat <- stats) {
val course = stat.getCourse
val lesson = LessonBean.getDefault.asInstanceOf[LessonBean]
lesson.getTeachClass.setLimitCount(stat.getNewspace)
lesson.setProject(project)
lesson.setCourse(course)
lesson.setTeachDepart(course.department)
lesson.setCourseType(course.getCourseType)
lesson.setSemester(semester)
lesson.setExamMode(course.getExamMode)
val courseSchedule = lesson.getCourseSchedule
val startWeek = 1
var endWeek = startWeek
endWeek = if (course.getWeeks != null && course.getWeeks > 0) startWeek + course.getWeeks - 1 else if (course.getWeekHour != 0) startWeek + (course.getPeriod / course.getWeekHour).toInt -
1 else startWeek + semester.getWeeks - 1
courseSchedule.setWeekState(WeekStates.build(startWeek + "-" + endWeek))
val teachClass = lesson.getTeachClass.asInstanceOf[TeachClassBean]
teachClass.setName("重修班")
lesson.setCreatedAt(new Date(System.currentTimeMillis()))
lesson.setUpdatedAt(new Date(System.currentTimeMillis()))
lesson.setAuditStatus(CommonAuditState.UNSUBMITTED)
lessons.add(lesson)
}
lessonSeqNoGenerator.genLessonSeqNos(new ArrayList[Lesson](lessons))
for (lesson <- lessons) {
entityDao.saveOrUpdate(lesson)
lessonPlanRelationDao.saveRelation(null, lesson)
}
redirect("search", "info.action.success")
}
}
| openurp/edu-eams-webapp | grade/src/main/scala/org/openurp/edu/eams/teach/grade/lesson/web/action/RetakeAction.scala | Scala | gpl-3.0 | 8,154 |
package de.frosner.ddq.constraints
import org.scalatest.{FlatSpec, Matchers}
import scala.util.{Failure, Success}
class ConstraintUtilTest extends FlatSpec with Matchers {
"tryToStatus" should "return success if the condition is met and the try was a success" in {
val tryObject = Success(5)
ConstraintUtil.tryToStatus(tryObject, (i: Int) => i == 5) shouldBe ConstraintSuccess
}
it should "return failure if the condition is not met and the try was a success" in {
val tryObject = Success(5)
ConstraintUtil.tryToStatus(tryObject, (i: Int) => i == 4) shouldBe ConstraintFailure
}
it should "return error if the try was a failure" in {
case object DummyException extends Throwable
val tryObject = Failure(DummyException)
ConstraintUtil.tryToStatus(tryObject, (i: Int) => i == 5) shouldBe ConstraintError(DummyException)
}
}
| FRosner/drunken-data-quality | src/test/scala/de/frosner/ddq/constraints/ConstraintUtilTest.scala | Scala | apache-2.0 | 869 |
/*
* Copyright 2012-2016 Steve Chaloner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package be.objectify.deadbolt.scala.composite
import java.util.regex.Pattern
import be.objectify.deadbolt.scala.cache.PatternCache
import be.objectify.deadbolt.scala._
import be.objectify.deadbolt.scala.models.{Subject, PatternType}
import be.objectify.deadbolt.scala.testhelpers.{SecurityPermission, SecurityRole, User}
import org.mockito.ArgumentMatchers
import org.specs2.mock.Mockito
import play.api.mvc.Request
import play.api.test.PlaySpecification
import scala.concurrent.{ExecutionContext, Future}
/**
* @author Steve Chaloner (steve@objectify.be)
*/
object CompositeConstraintsTest extends PlaySpecification with Mockito {
val analyzer: StaticConstraintAnalyzer = new StaticConstraintAnalyzer(new PatternCache {
override def apply(value: String): Option[Pattern] = Some(Pattern.compile(value))
})
val ec = scala.concurrent.ExecutionContext.Implicits.global
val ecProvider: ExecutionContextProvider = new ExecutionContextProvider {
override def get(): ExecutionContext = ec
}
val logic: ConstraintLogic = new ConstraintLogic(analyzer,
ecProvider)
val constraints: CompositeConstraints = new CompositeConstraints(logic,
ecProvider)
"Restrict" should {
"return false when" >> {
"there is no subject" >> {
val result = constraints.Restrict(List(Array("foo")))(request(None),
handler(None))
await(result) should beFalse
}
"the subject has no roles" >> {
val subject = Some(User())
val result = constraints.Restrict(List(Array("foo")))(request(subject),
handler(subject))
await(result) should beFalse
}
"the subject has all required roles but one required role is negated" >> {
val subject = Some(User(roles = List(SecurityRole("admin"), SecurityRole("editor"))))
val result = constraints.Restrict(List(Array("admin", "!editor")))(request(subject),
handler(subject))
await(result) should beFalse
}
"the subject has all but one of the required roles" >> {
val subject = Some(User(roles = List(SecurityRole("admin"), SecurityRole("editor"))))
val result = constraints.Restrict(List(Array("admin", "editor", "foo")))(request(subject),
handler(subject))
await(result) should beFalse
}
"there are no required roles" >> {
val subject = Some(User(roles = List(SecurityRole("admin"), SecurityRole("editor"))))
val result = constraints.Restrict(List(Array()))(request(subject),
handler(subject))
await(result) should beFalse
}
"there are no role matches" >> {
val subject = Some(User(roles = List(SecurityRole("admin"), SecurityRole("editor"))))
val result = constraints.Restrict(List(Array("foo", "bar")))(request(subject),
handler(subject))
await(result) should beFalse
}
}
"return true when" should {
"the subject has all required roles" >> {
val subject = Some(User(roles = List(SecurityRole("admin"), SecurityRole("editor"))))
val result = constraints.Restrict(List(Array("admin", "editor")))(request(subject),
handler(subject))
await(result) should beTrue
}
"the subject has all required roles plus others" >> {
val subject = Some(User(roles = List(SecurityRole("admin"), SecurityRole("editor"), SecurityRole("foo"))))
val result = constraints.Restrict(List(Array("admin", "editor")))(request(subject),
handler(subject))
await(result) should beTrue
}
}
}
"Pattern of type" >> {
"REGEX" >> {
"return false when " >> {
"maybeSubject is None" >> {
val result = constraints.Pattern("[ABC]",
PatternType.REGEX,
meta = None,
invert = false)(request(None), handler(None))
await(result) should beFalse
}
"none of the permissions match the regular expression" >> {
val subject = Some(User(permissions = List(SecurityPermission("D"))))
val result = constraints.Pattern("[ABC]",
PatternType.REGEX,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beFalse
}
"the subject has no permissions" >> {
val subject = Some(User())
val result = constraints.Pattern("[ABC]",
PatternType.REGEX,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beFalse
}
}
"return true when" >> {
"the subject has one permission that matches the regular expression" >> {
val subject = Some(User(permissions = List(SecurityPermission("B"))))
val result = constraints.Pattern("[ABC]",
PatternType.REGEX,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beTrue
}
"the subject has one permission that matches the regular expression, plus others" >> {
val subject = Some(User(permissions = List(SecurityPermission("B"), SecurityPermission("D"))))
val result = constraints.Pattern("[ABC]",
PatternType.REGEX,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beTrue
}
}
}
"EQUALITY" should {
"return false when" should {
"maybeSubject is None" >> {
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.EQUALITY,
meta = None,
invert = false)(request(None), handler(None))
await(result) should beFalse
}
"none of the permissions equal the pattern value" >> {
val subject = Some(User(permissions = List(SecurityPermission("bar"))))
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.EQUALITY,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beFalse
}
"the subject has no permissions" >> {
val subject = Some(User())
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.EQUALITY,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beFalse
}
}
"return true when" >> {
"the subject has one permission that equals the pattern value" >> {
val subject = Some(User(permissions = List(SecurityPermission("foo"))))
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.EQUALITY,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beTrue
}
"the subject has one permission that equals the pattern value, plus others" >> {
val subject = Some(User(permissions = List(SecurityPermission("foo"), SecurityPermission("bar"))))
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.EQUALITY,
meta = None,
invert = false)(request(subject), handler(subject))
await(result) should beTrue
}
}
}
"CUSTOM" should {
"return false when" >> {
"dynamicResourceHandler is None" >> {
val subject = Some(User())
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.CUSTOM,
meta = None,
invert = false)(request(subject), handler(subject, None))
await(result) must throwA(new RuntimeException("A custom pattern is specified but no dynamic resource handler is provided"))
}
"checkPermission returns false" >> {
val subject = Some(User())
val drh = mock[DynamicResourceHandler]
val dh = handler(subject, Some(drh))
drh.checkPermission(ArgumentMatchers.eq("foo"), ArgumentMatchers.eq(None), ArgumentMatchers.eq(dh), any[AuthenticatedRequest[_]]) returns Future.successful(false)
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.CUSTOM,
meta = None,
invert = false)(request(subject), dh)
await(result) should beFalse
}
}
"return true when" >> {
"checkPermission returns true" >> {
val subject = Some(User())
val drh = mock[DynamicResourceHandler]
val dh = handler(subject, Some(drh))
drh.checkPermission(ArgumentMatchers.eq("foo"), ArgumentMatchers.eq(None), ArgumentMatchers.eq(dh), any[AuthenticatedRequest[_]]) returns Future.successful(true)
val result: Future[Boolean] = constraints.Pattern("foo",
PatternType.CUSTOM,
meta = None,
invert = false)(request(subject), dh)
await(result) should beTrue
}
}
}
}
"Dynamic" should {
"return false when" >> {
"dynamicResourceHandler is None" >> {
val subject = Some(User())
val result: Future[Boolean] = constraints.Dynamic("foo",
Some("bar"))(request(subject), handler(subject, None))
await(result) must throwA(new RuntimeException("A dynamic resource is specified but no dynamic resource handler is provided"))
}
"isAllowed returns false" >> {
val subject = Some(User())
val drh = mock[DynamicResourceHandler]
val dh = handler(subject, Some(drh))
drh.isAllowed(ArgumentMatchers.eq("foo"), ArgumentMatchers.eq(Some("bar")), ArgumentMatchers.eq(dh), any[AuthenticatedRequest[_]]) returns Future.successful(false)
val result: Future[Boolean] = constraints.Dynamic("foo",
Some("bar"))(request(subject), dh)
await(result) should beFalse
}
}
"return true when" >> {
"isAllowed returns true" >> {
val subject = Some(User())
val drh = mock[DynamicResourceHandler]
val dh = handler(subject, Some(drh))
drh.isAllowed(ArgumentMatchers.eq("foo"), ArgumentMatchers.eq(Some("bar")), ArgumentMatchers.eq(dh), any[AuthenticatedRequest[_]]) returns Future.successful(true)
val result: Future[Boolean] = constraints.Dynamic("foo",
Some("bar"))(request(subject), dh)
await(result) should beTrue
}
}
}
"SubjectPresent " should {
"return true when a subject is present" >> {
val subject = Some(User())
val result: Future[Boolean] = constraints.SubjectPresent()(request(subject), handler(subject))
await(result) should beTrue
}
"return false when a subject is not present" >> {
val result: Future[Boolean] = constraints.SubjectPresent()(request(None), handler(None))
await(result) should beFalse
}
}
"SubjectNotPresent " should {
"return false when a subject is present" >> {
val subject = Some(User())
val result: Future[Boolean] = constraints.SubjectNotPresent()(request(subject), handler(subject))
await(result) should beFalse
}
"return true when a subject is not present" >> {
val result: Future[Boolean] = constraints.SubjectNotPresent()(request(None), handler(None))
await(result) should beTrue
}
}
private def request[A](maybeSubject: Option[Subject]): AuthenticatedRequest[A] = new AuthenticatedRequest(mock[Request[A]], maybeSubject)
private def handler(maybeSubject: Option[Subject]): DeadboltHandler = handler(maybeSubject, None)
private def handler(maybeSubject: Option[Subject],
maybeDrh: Option[DynamicResourceHandler]): DeadboltHandler = {
val handler = mock[DeadboltHandler]
handler.getSubject(any[AuthenticatedRequest[_]]) returns Future {maybeSubject}(ec)
handler.getDynamicResourceHandler(any[AuthenticatedRequest[_]]) returns Future {maybeDrh}(ec)
}
}
| schaloner/deadbolt-2-scala | code/test/be/objectify/deadbolt/scala/composite/CompositeConstraintsTest.scala | Scala | apache-2.0 | 14,929 |
package slamdata.engine.physical.mongodb
import slamdata.Predef._
import org.specs2.execute.{Result}
import org.specs2.scalaz.DisjunctionMatchers
import scalaz.concurrent._
import scalaz.stream._
import slamdata.engine._
import slamdata.engine.fp._
import slamdata.engine.fs._; import Path._
class FileSystemSpecs extends BackendTest with DisjunctionMatchers {
import Backend._
import Errors._
import slamdata.engine.fs._
def oneDoc: Process[Task, Data] =
Process.emit(Data.Obj(ListMap("a" -> Data.Int(1))))
def anotherDoc: Process[Task, Data] =
Process.emit(Data.Obj(ListMap("b" -> Data.Int(2))))
tests { case (backendName, fs) =>
val TestDir = testRootDir(fs) ++ genTempDir.run
backendName should {
"FileSystem" should {
// Run the task to create a single FileSystem instance for each run (I guess)
"list root" in {
fs.ls(Path(".")).map(_ must contain(FilesystemNode(fs.defaultPath, Plain))).run.run must beRightDisjunction
}
"have zips" in {
// This is the collection we use for all of our examples, so might as well make sure it's there.
fs.ls(fs.defaultPath).map(_ must contain(FilesystemNode(Path("./zips"), Plain))).run.run must beRightDisjunction
fs.count(fs.defaultPath ++ Path("zips")).run.run must beRightDisjunction(29353L)
}
"read zips with skip and limit" in {
(for {
cursor <- fs.scan(fs.defaultPath ++ Path("zips"), 100, Some(5)).runLog
process <- fs.scan(fs.defaultPath ++ Path("zips"), 0, None).drop(100).take(5).runLog
} yield {
cursor must_== process
}).fold(_ must beNull, ɩ).run
}
"fail when reading zips with negative skip and zero limit" in {
fs.scan(fs.defaultPath ++ Path("zips"), -1, None).run.fold(_ must beNull, ɩ).attemptRun must beLeftDisjunction
fs.scan(fs.defaultPath ++ Path("zips"), 0, Some(0)).run.fold(_ must beNull, ɩ).attemptRun must beLeftDisjunction
}
"save one" in {
(for {
tmp <- liftE[ProcessingError](genTempFile)
before <- fs.ls(TestDir).leftMap(PPathError(_))
_ <- fs.save(TestDir ++ tmp, oneDoc)
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
before must not(contain(FilesystemNode(tmp, Plain)))
after must contain(FilesystemNode(tmp, Plain))
}).fold(_ must beNull, ɩ).run
}
"allow duplicate saves" in {
(for {
tmp <- liftE[ProcessingError](genTempFile)
_ <- fs.save(TestDir ++ tmp, oneDoc)
before <- fs.ls(TestDir).leftMap(PPathError(_))
_ <- fs.save(TestDir ++ tmp, oneDoc)
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
before must contain(FilesystemNode(tmp, Plain))
after must contain(FilesystemNode(tmp, Plain))
}).fold(_ must beNull, ɩ).run
}
"fail duplicate creates" in {
(for {
tmp <- liftE[ProcessingError](genTempFile)
_ <- fs.create(TestDir ++ tmp, oneDoc)
before <- fs.ls(TestDir).leftMap(PPathError(_))
rez <- liftE[ProcessingError](fs.create(TestDir ++ tmp, anotherDoc).run)
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must_== before
rez must beLeftDisjunction(PPathError(ExistingPathError(TestDir ++ tmp, Some("can’t be created, because it already exists"))))
}).fold(_ must beNull, ɩ).run
}
"fail initial replace" in {
(for {
tmp <- liftP(genTempFile)
before <- fs.ls(TestDir)
rez <- liftP(fs.replace(TestDir ++ tmp, anotherDoc).run)
after <- fs.ls(TestDir)
} yield {
after must_== before
rez must beLeftDisjunction(PPathError(NonexistentPathError(TestDir ++ tmp, Some("can’t be replaced, because it doesn’t exist"))))
}).fold(_ must beNull, ɩ).run
}
"replace one" in {
(for {
tmp <- liftE[ProcessingError](genTempFile)
_ <- fs.create(TestDir ++ tmp, oneDoc)
before <- fs.ls(TestDir).leftMap(PPathError(_))
_ <- fs.replace(TestDir ++ tmp, anotherDoc)
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
before must contain(FilesystemNode(tmp, Plain))
after must contain(FilesystemNode(tmp, Plain))
}).fold(_ must beNull, ɩ).run
}
"save one (subdir)" in {
(for {
tmpDir <- liftE[ProcessingError](genTempDir)
tmp = Path("file1")
before <- fs.ls(TestDir ++ tmpDir).leftMap(PPathError(_))
_ <- fs.save(TestDir ++ tmpDir ++ tmp, oneDoc)
after <- fs.ls(TestDir ++ tmpDir).leftMap(PPathError(_))
} yield {
before must not(contain(FilesystemNode(tmp, Plain)))
after must contain(FilesystemNode(tmp, Plain))
}).fold(_ must beNull, ɩ).run
}
"save one with error" in {
val badJson = Data.Int(1)
val data: Process[Task, Data] = Process.emit(badJson)
(for {
tmpDir <- liftP(genTempDir)
file = tmpDir ++ Path("file1")
before <- fs.ls(TestDir ++ tmpDir)
rez <- liftP(fs.save(TestDir ++ file, data).run)
after <- fs.ls(TestDir ++ tmpDir)
} yield {
rez must beLeftDisjunction
after must_== before
}).fold(_ must beNull, ɩ).run
}
"save many (approx. 10 MB in 1K docs)" in {
val sizeInMB = 10.0
// About 0.5K each of data, and 0.25K of index, etc.:
def jsonTree(depth: Int): Data =
if (depth == 0) Data.Arr(Data.Str("abc") :: Data.Int(123) :: Data.Str("do, re, mi") :: Nil)
else Data.Obj(ListMap("left" -> jsonTree(depth-1), "right" -> jsonTree(depth-1)))
def json(i: Int) = Data.Obj(ListMap("seq" -> Data.Int(i), "filler" -> jsonTree(3)))
// This is _very_ approximate:
val bytesPerDoc = 750
val count = (sizeInMB*1024*1024/bytesPerDoc).toInt
val data: Process[Task, Data] = Process.emitAll(0 until count).map(json(_))
(for {
tmp <- liftE[ProcessingError](genTempFile)
_ <- fs.save(TestDir ++ tmp, data)
after <- fs.ls(TestDir).leftMap(PPathError(_))
_ <- fs.delete(TestDir ++ tmp).leftMap(PPathError(_)) // clean up this one eagerly, since it's a large file
} yield {
after must contain(FilesystemNode(tmp, Plain))
}).fold(_ must beNull, ɩ).run
}
"append one" in {
val json = Data.Obj(ListMap("a" ->Data.Int(1)))
val data: Process[Task, Data] = Process.emit(json)
(for {
tmp <- liftE[ProcessingError](genTempFile)
rez <- fs.append(TestDir ++ tmp, data).runLog.leftMap(PPathError(_))
saved <- fs.scan(TestDir ++ tmp, 0, None).runLog.leftMap(PResultError(_))
} yield {
rez.size must_== 0
saved.size must_== 1
}).fold(_ must beNull, ɩ).run
}
"append with one ok and one error" in {
val json1 = Data.Obj(ListMap("a" ->Data.Int(1)))
val json2 = Data.Int(1)
val data: Process[Task, Data] = Process.emitAll(json1 :: json2 :: Nil)
(for {
tmp <- liftE[ProcessingError](genTempFile)
rez <- fs.append(TestDir ++ tmp, data).runLog.leftMap(PPathError(_))
saved <- fs.scan(TestDir ++ tmp, 0, None).runLog.leftMap(PResultError(_))
} yield {
rez.size must_== 1
saved.size must_== 1
}).fold(_ must beNull, ɩ).run
}
"move file" in {
(for {
tmp1 <- liftE[ProcessingError](genTempFile)
tmp2 <- liftE(genTempFile)
_ <- fs.save(TestDir ++ tmp1, oneDoc)
_ <- fs.move(TestDir ++ tmp1, TestDir ++ tmp2, FailIfExists).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must not(contain(FilesystemNode(tmp1, Plain)))
after must contain(FilesystemNode(tmp2, Plain))
}).fold(_ must beNull, ɩ).run
}
"error: move file to existing path" in {
(for {
tmp1 <- liftE[ProcessingError](genTempFile)
tmp2 <- liftE(genTempFile)
_ <- fs.save(TestDir ++ tmp1, oneDoc)
_ <- fs.save(TestDir ++ tmp2, oneDoc)
rez <- liftE(fs.move(TestDir ++ tmp1, TestDir ++ tmp2, FailIfExists).leftMap(PPathError(_)).run.attempt)
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
rez must beLeftDisjunction
after must contain(FilesystemNode(tmp1, Plain))
after must contain(FilesystemNode(tmp2, Plain))
}).fold(_ must beNull, ɩ).run
}
"move file to existing path with Overwrite semantics" in {
(for {
tmp1 <- liftE[ProcessingError](genTempFile)
tmp2 <- liftE(genTempFile)
_ <- fs.save(TestDir ++ tmp1, oneDoc)
_ <- fs.save(TestDir ++ tmp2, oneDoc)
_ <- fs.move(TestDir ++ tmp1, TestDir ++ tmp2, Overwrite).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must not(contain(FilesystemNode(tmp1, Plain)))
after must contain(FilesystemNode(tmp2, Plain))
}).fold(_ must beNull, ɩ).run
}
"move file to itself (NOP)" in {
(for {
tmp1 <- liftE[ProcessingError](genTempFile)
_ <- fs.save(TestDir ++ tmp1, oneDoc)
_ <- fs.move(TestDir ++ tmp1, TestDir ++ tmp1, FailIfExists).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must contain(FilesystemNode(tmp1, Plain))
}).fold(_ must beNull, ɩ).run
}
"move dir" in {
(for {
tmpDir1 <- liftE[ProcessingError](genTempDir)
tmp1 = tmpDir1 ++ Path("file1")
tmp2 = tmpDir1 ++ Path("file2")
_ <- fs.save(TestDir ++ tmp1, oneDoc)
_ <- fs.save(TestDir ++ tmp2, oneDoc)
tmpDir2 <- liftE(genTempDir)
_ <- fs.move(TestDir ++ tmpDir1, TestDir ++ tmpDir2, FailIfExists).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must not(contain(FilesystemNode(tmpDir1, Plain)))
after must contain(FilesystemNode(tmpDir2, Plain))
}).fold(_ must beNull, ɩ).run
}
"move dir with destination given as file path" in {
(for {
tmpDir1 <- liftE[ProcessingError](genTempDir)
tmp1 = tmpDir1 ++ Path("file1")
tmp2 = tmpDir1 ++ Path("file2")
_ <- fs.save(TestDir ++ tmp1, oneDoc)
_ <- fs.save(TestDir ++ tmp2, oneDoc)
tmpDir2 <- liftE(genTempFile)
_ <- fs.move(TestDir ++ tmpDir1, TestDir ++ tmpDir2, FailIfExists).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must not(contain(FilesystemNode(tmpDir1, Plain)))
after must contain(FilesystemNode(tmpDir2.asDir, Plain))
}).fold(_ must beNull, ɩ).run
}
"move missing dir to new (also missing) location (NOP)" in {
(for {
tmpDir1 <- liftE[ProcessingError](genTempDir)
tmpDir2 <- liftE(genTempDir)
_ <- fs.move(TestDir ++ tmpDir1, TestDir ++ tmpDir2, FailIfExists).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must not(contain(FilesystemNode(tmpDir1, Plain)))
after must not(contain(FilesystemNode(tmpDir2, Plain)))
}).fold(_ must beNull, ɩ).run
}
"delete file" in {
(for {
tmp <- liftE[ProcessingError](genTempFile)
_ <- fs.save(TestDir ++ tmp, oneDoc)
_ <- fs.delete(TestDir ++ tmp).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must not(contain(FilesystemNode(tmp, Plain)))
}).fold(_ must beNull, ɩ).run
}
"delete file but not sibling" in {
val tmp1 = Path("file1")
val tmp2 = Path("file2")
(for {
tmpDir <- liftE[ProcessingError](genTempDir)
_ <- fs.save(TestDir ++ tmpDir ++ tmp1, oneDoc)
_ <- fs.save(TestDir ++ tmpDir ++ tmp2, oneDoc)
before <- fs.ls(TestDir ++ tmpDir).leftMap(PPathError(_))
_ <- fs.delete(TestDir ++ tmpDir ++ tmp1).leftMap(PPathError(_))
after <- fs.ls(TestDir ++ tmpDir).leftMap(PPathError(_))
} yield {
before must contain(FilesystemNode(tmp1, Plain))
after must not(contain(FilesystemNode(tmp1, Plain)))
after must contain(FilesystemNode(tmp2, Plain))
}).fold(_ must beNull, ɩ).run
}
"delete dir" in {
(for {
tmpDir <- liftE[ProcessingError](genTempDir)
tmp1 = tmpDir ++ Path("file1")
tmp2 = tmpDir ++ Path("file2")
_ <- fs.save(TestDir ++ tmp1, oneDoc)
_ <- fs.save(TestDir ++ tmp2, oneDoc)
_ <- fs.delete(TestDir ++ tmpDir).leftMap(PPathError(_))
after <- fs.ls(TestDir).leftMap(PPathError(_))
} yield {
after must not(contain(FilesystemNode(tmpDir, Plain)))
}).fold(_ must beNull, ɩ).run
}
"delete missing file (not an error)" in {
(for {
tmp <- genTempFile
rez <- fs.delete(TestDir ++ tmp).run.attempt
} yield {
rez must beRightDisjunction
}).run
}
}
"query evaluation" should {
import slamdata.engine.sql.{Expr, Query, SQLParser}
import slamdata.engine.{QueryRequest, Variables}
def parse(query: String) =
liftE[ProcessingError](SQLParser.parseInContext(Query(query), TestDir).fold(e => Task.fail(new RuntimeException(e.message)), Task.now))
def eval(fs: Backend, expr: Expr, path: Option[Path]):
ProcessingTask[IndexedSeq[Data]] =
fs.eval(QueryRequest(expr, path, Variables(Map()))).run._2.fold(
e => liftE(Task.fail(new RuntimeException(e.message))),
_.runLog)
"leave no temps behind" in {
(for {
tmp <- liftE[ProcessingError](genTempFile)
_ <- fs.save(TestDir ++ tmp, oneDoc)
before <- fs.lsAll(Path.Root).leftMap(PPathError(_))
// NB: this query *does* produce a temporary result (not a simple read)
expr <- parse("select a from " + tmp.simplePathname)
rez <- eval(fs, expr, None)
after <- fs.lsAll(Path.Root).leftMap(PPathError(_))
} yield {
rez must_== Vector(Data.Obj(ListMap("a" -> Data.Int(1))))
after must contain(exactly(before.toList: _*))
}).fold(_ must beNull, ɩ).run
}
"leave only the output behind" in {
(for {
tmp <- liftE[ProcessingError](genTempFile)
_ <- fs.save(TestDir ++ tmp, oneDoc)
before <- fs.lsAll(Path.Root).leftMap(PPathError(_))
out <- liftE(genTempFile)
// NB: this query *does* produce a temporary result (not a simple read)
expr <- parse("select a from " + tmp.simplePathname)
rez <- eval(fs, expr, Some(TestDir ++ out))
after <- fs.lsAll(Path.Root).leftMap(PPathError(_))
} yield {
rez must_== Vector(Data.Obj(ListMap("a" -> Data.Int(1))))
after must contain(exactly(FilesystemNode(TestDir ++ out, Plain) :: before.toList: _*))
}).fold(_ must beNull, ɩ).run
}
}
}
val cleanup = step {
deleteTempFiles(fs, TestDir).run
}
}
}
| wemrysi/quasar | it/src/test/scala/slamdata/engine/fs/filesystem.scala | Scala | apache-2.0 | 16,635 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import scala.util.Random
import org.apache.spark.{ExecutorAllocationClient, SparkConf}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.DECOMMISSION_ENABLED
import org.apache.spark.internal.config.Streaming._
import org.apache.spark.resource.ResourceProfile
import org.apache.spark.scheduler.ExecutorDecommissionInfo
import org.apache.spark.streaming.util.RecurringTimer
import org.apache.spark.util.{Clock, Utils}
/**
* Class that manages executors allocated to a StreamingContext, and dynamically requests or kills
* executors based on the statistics of the streaming computation. This is different from the core
* dynamic allocation policy; the core policy relies on executors being idle for a while, but the
* micro-batch model of streaming prevents any particular executors from being idle for a long
* time. Instead, the measure of "idle-ness" needs to be based on the time taken to process
* each batch.
*
* At a high level, the policy implemented by this class is as follows:
* - Use StreamingListener interface get batch processing times of completed batches
* - Periodically take the average batch completion times and compare with the batch interval
* - If (avg. proc. time / batch interval) >= scaling up ratio, then request more executors.
* The number of executors requested is based on the ratio = (avg. proc. time / batch interval).
* - If (avg. proc. time / batch interval) <= scaling down ratio, then try to kill an executor that
* is not running a receiver.
*
* This features should ideally be used in conjunction with backpressure, as backpressure ensures
* system stability, while executors are being readjusted.
*
* Note that an initial set of executors (spark.executor.instances) was allocated when the
* SparkContext was created. This class scales executors up/down after the StreamingContext
* has started.
*/
private[streaming] class ExecutorAllocationManager(
client: ExecutorAllocationClient,
receiverTracker: ReceiverTracker,
conf: SparkConf,
batchDurationMs: Long,
clock: Clock) extends StreamingListener with Logging {
private val scalingIntervalSecs = conf.get(STREAMING_DYN_ALLOCATION_SCALING_INTERVAL)
private val scalingUpRatio = conf.get(STREAMING_DYN_ALLOCATION_SCALING_UP_RATIO)
private val scalingDownRatio = conf.get(STREAMING_DYN_ALLOCATION_SCALING_DOWN_RATIO)
private val minNumExecutors = conf.get(STREAMING_DYN_ALLOCATION_MIN_EXECUTORS)
.getOrElse(math.max(1, receiverTracker.numReceivers()))
private val maxNumExecutors = conf.get(STREAMING_DYN_ALLOCATION_MAX_EXECUTORS)
private val timer = new RecurringTimer(clock, scalingIntervalSecs * 1000,
_ => manageAllocation(), "streaming-executor-allocation-manager")
@volatile private var batchProcTimeSum = 0L
@volatile private var batchProcTimeCount = 0
validateSettings()
def start(): Unit = {
timer.start()
logInfo(s"ExecutorAllocationManager started with " +
s"ratios = [$scalingUpRatio, $scalingDownRatio] and interval = $scalingIntervalSecs sec")
}
def stop(): Unit = {
timer.stop(interruptTimer = true)
logInfo("ExecutorAllocationManager stopped")
}
/**
* Manage executor allocation by requesting or killing executors based on the collected
* batch statistics.
*/
private def manageAllocation(): Unit = synchronized {
logInfo(s"Managing executor allocation with ratios = [$scalingUpRatio, $scalingDownRatio]")
if (batchProcTimeCount > 0) {
val averageBatchProcTime = batchProcTimeSum / batchProcTimeCount
val ratio = averageBatchProcTime.toDouble / batchDurationMs
logInfo(s"Average: $averageBatchProcTime, ratio = $ratio" )
if (ratio >= scalingUpRatio) {
logDebug("Requesting executors")
val numNewExecutors = math.max(math.round(ratio).toInt, 1)
requestExecutors(numNewExecutors)
} else if (ratio <= scalingDownRatio) {
logDebug("Killing executors")
killExecutor()
}
}
batchProcTimeSum = 0
batchProcTimeCount = 0
}
/** Request the specified number of executors over the currently active one */
private def requestExecutors(numNewExecutors: Int): Unit = {
require(numNewExecutors >= 1)
val allExecIds = client.getExecutorIds()
logDebug(s"Executors (${allExecIds.size}) = ${allExecIds}")
val targetTotalExecutors =
math.max(math.min(maxNumExecutors, allExecIds.size + numNewExecutors), minNumExecutors)
// Just map the targetTotalExecutors to the default ResourceProfile
client.requestTotalExecutors(
Map(ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID -> targetTotalExecutors),
Map(ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID -> 0),
Map.empty)
logInfo(s"Requested total $targetTotalExecutors executors")
}
/** Kill an executor that is not running any receiver, if possible */
private def killExecutor(): Unit = {
val allExecIds = client.getExecutorIds()
logDebug(s"Executors (${allExecIds.size}) = ${allExecIds}")
if (allExecIds.nonEmpty && allExecIds.size > minNumExecutors) {
val execIdsWithReceivers = receiverTracker.allocatedExecutors.values.flatten.toSeq
logInfo(s"Executors with receivers (${execIdsWithReceivers.size}): ${execIdsWithReceivers}")
val removableExecIds = allExecIds.diff(execIdsWithReceivers)
logDebug(s"Removable executors (${removableExecIds.size}): ${removableExecIds}")
if (removableExecIds.nonEmpty) {
val execIdToRemove = removableExecIds(Random.nextInt(removableExecIds.size))
if (conf.get(DECOMMISSION_ENABLED)) {
client.decommissionExecutor(execIdToRemove,
ExecutorDecommissionInfo("spark scale down", None),
adjustTargetNumExecutors = true)
} else {
client.killExecutor(execIdToRemove)
}
logInfo(s"Requested to kill executor $execIdToRemove")
} else {
logInfo(s"No non-receiver executors to kill")
}
} else {
logInfo("No available executor to kill")
}
}
private def addBatchProcTime(timeMs: Long): Unit = synchronized {
batchProcTimeSum += timeMs
batchProcTimeCount += 1
logDebug(
s"Added batch processing time $timeMs, sum = $batchProcTimeSum, count = $batchProcTimeCount")
}
private def validateSettings(): Unit = {
require(
scalingUpRatio > scalingDownRatio,
s"Config ${STREAMING_DYN_ALLOCATION_SCALING_UP_RATIO.key} must be more than config " +
s"${STREAMING_DYN_ALLOCATION_SCALING_DOWN_RATIO.key}")
if (conf.contains(STREAMING_DYN_ALLOCATION_MIN_EXECUTORS.key) &&
conf.contains(STREAMING_DYN_ALLOCATION_MAX_EXECUTORS.key)) {
require(
maxNumExecutors >= minNumExecutors,
s"Config ${STREAMING_DYN_ALLOCATION_MAX_EXECUTORS.key} must be more than config " +
s"${STREAMING_DYN_ALLOCATION_MIN_EXECUTORS.key}")
}
}
override def onBatchCompleted(batchCompleted: StreamingListenerBatchCompleted): Unit = {
logDebug("onBatchCompleted called: " + batchCompleted)
if (!batchCompleted.batchInfo.outputOperationInfos.values.exists(_.failureReason.nonEmpty)) {
batchCompleted.batchInfo.processingDelay.foreach(addBatchProcTime)
}
}
}
private[streaming] object ExecutorAllocationManager extends Logging {
def isDynamicAllocationEnabled(conf: SparkConf): Boolean = {
val streamingDynamicAllocationEnabled = Utils.isStreamingDynamicAllocationEnabled(conf)
if (Utils.isDynamicAllocationEnabled(conf) && streamingDynamicAllocationEnabled) {
throw new IllegalArgumentException(
"""
|Dynamic Allocation cannot be enabled for both streaming and core at the same time.
|Please disable core Dynamic Allocation by setting spark.dynamicAllocation.enabled to
|false to use Dynamic Allocation in streaming.
""".stripMargin)
}
streamingDynamicAllocationEnabled
}
def createIfEnabled(
client: ExecutorAllocationClient,
receiverTracker: ReceiverTracker,
conf: SparkConf,
batchDurationMs: Long,
clock: Clock): Option[ExecutorAllocationManager] = {
if (isDynamicAllocationEnabled(conf) && client != null) {
Some(new ExecutorAllocationManager(client, receiverTracker, conf, batchDurationMs, clock))
} else None
}
}
| maropu/spark | streaming/src/main/scala/org/apache/spark/streaming/scheduler/ExecutorAllocationManager.scala | Scala | apache-2.0 | 9,186 |
/**
* Intel Intrinsics for Lightweight Modular Staging Framework
* https://github.com/ivtoskov/lms-intrinsics
* Department of Computer Science, ETH Zurich, Switzerland
* __ _ __ _ _
* / /____ ___ _____ (_)____ / /_ _____ (_)____ _____ (_)_____ _____
* / // __ `__ \\ / ___/______ / // __ \\ / __// ___// // __ \\ / ___// // ___// ___/
* / // / / / / /(__ )/_____// // / / // /_ / / / // / / /(__ )/ // /__ (__ )
* /_//_/ /_/ /_//____/ /_//_/ /_/ \\__//_/ /_//_/ /_//____//_/ \\___//____/
*
* Copyright (C) 2017 Ivaylo Toskov (itoskov@ethz.ch)
* Alen Stojanov (astojanov@inf.ethz.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.ethz.acl.intrinsics
import ch.ethz.acl.intrinsics.MicroArchType._
import ch.ethz.acl.passera.unsigned.{UByte, UInt, ULong, UShort}
import scala.reflect.SourceContext
import scala.language.higherKinds
trait AVX512020 extends IntrinsicsBase {
/**
* Shuffle 8-bit integers in "a" and "b" using the corresponding selector and
* index in "idx", and store the results in "dst" using writemask "k" (elements
* are copied from "a" when the corresponding mask bit is not set).
* a: __m128i, idx: __m128i, k: __mmask16, b: __m128i
*/
case class MM_MASK2_PERMUTEX2VAR_EPI8(a: Exp[__m128i], idx: Exp[__m128i], k: Exp[Int], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] {
val category = List(IntrinsicsCategory.Swizzle)
val intrinsicType = List()
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Convert packed single-precision (32-bit) floating-point elements in "a" to
* packed unsigned 64-bit integers with truncation, and store the results in
* "dst".
* a: __m128
*/
case class MM256_CVTTPS_EPU64(a: Exp[__m128]) extends IntrinsicsDef[__m256i] {
val category = List(IntrinsicsCategory.Convert)
val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Sign extend packed 32-bit integers in "a" to packed 64-bit integers, and store
* the results in "dst" using writemask "k" (elements are copied from "src" when
* the corresponding mask bit is not set).
* src: __m512i, k: __mmask8, a: __m256i
*/
case class MM512_MASK_CVTEPI32_EPI64(src: Exp[__m512i], k: Exp[Int], a: Exp[__m256i]) extends IntrinsicsDef[__m512i] {
val category = List(IntrinsicsCategory.Convert)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Sign extend packed 8-bit integers in the low 2 bytes of "a" to packed 64-bit
* integers, and store the results in "dst" using writemask "k" (elements are
* copied from "src" when the corresponding mask bit is not set).
* src: __m128i, k: __mmask8, a: __m128i
*/
case class MM_MASK_CVTEPI8_EPI64(src: Exp[__m128i], k: Exp[Int], a: Exp[__m128i]) extends IntrinsicsDef[__m128i] {
val category = List(IntrinsicsCategory.Convert)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Load 512-bits of integer data from memory into "dst" using a non-temporal
* memory hint.
* "mem_addr" must be aligned on a 64-byte boundary or a
* general-protection exception may be generated.
* mem_addr: void const*, mem_addrOffset: int
*/
case class MM512_STREAM_LOAD_SI512[A[_], T:Typ, U:Integral](mem_addr: Exp[A[T]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends VoidPointerIntrinsicsDef[T, U, __m512i] {
val category = List(IntrinsicsCategory.Load)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Convert packed 32-bit integers in "a" to packed 16-bit integers with signed
* saturation, and store the active results (those with their respective bit set
* in writemask "k") to unaligned memory at "base_addr".
* base_addr: void*, k: __mmask8, a: __m128i, base_addrOffset: int
*/
case class MM_MASK_CVTSEPI32_STOREU_EPI16[A[_], T:Typ, U:Integral](base_addr: Exp[A[T]], k: Exp[Int], a: Exp[__m128i], base_addrOffset: Exp[U])(implicit val cont: Container[A]) extends VoidPointerIntrinsicsDef[T, U, Unit] {
val category = List(IntrinsicsCategory.Convert, IntrinsicsCategory.Store)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Rotate the bits in each packed 64-bit integer in "a" to the right by the
* number of bits specified in "imm8", and store the results in "dst".
* a: __m512i, imm8: int
*/
case class MM512_ROR_EPI64(a: Exp[__m512i], imm8: Exp[Int]) extends IntrinsicsDef[__m512i] {
val category = List(IntrinsicsCategory.Shift)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Unpack and interleave double-precision (64-bit) floating-point elements from
* the low half of each 128-bit lane in "a" and "b", and store the results in
* "dst".
* a: __m512d, b: __m512d
*/
case class MM512_UNPACKLO_PD(a: Exp[__m512d], b: Exp[__m512d]) extends IntrinsicsDef[__m512d] {
val category = List(IntrinsicsCategory.Swizzle)
val intrinsicType = List(IntrinsicsType.FloatingPoint)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Compute the bitwise NOT of packed 64-bit integers in "a" and then AND with
* "b", and store the results in "dst" using zeromask "k" (elements are zeroed
* out when the corresponding mask bit is not set).
* k: __mmask8, a: __m256i, b: __m256i
*/
case class MM256_MASKZ_ANDNOT_EPI64(k: Exp[Int], a: Exp[__m256i], b: Exp[__m256i]) extends IntrinsicsDef[__m256i] {
val category = List(IntrinsicsCategory.Logical)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Convert packed 32-bit integers in "a" to packed 8-bit integers with signed
* saturation, and store the active results (those with their respective bit set
* in writemask "k") to unaligned memory at "base_addr".
* base_addr: void*, k: __mmask16, a: __m512i, base_addrOffset: int
*/
case class MM512_MASK_CVTSEPI32_STOREU_EPI8[A[_], T:Typ, U:Integral](base_addr: Exp[A[T]], k: Exp[Int], a: Exp[__m512i], base_addrOffset: Exp[U])(implicit val cont: Container[A]) extends VoidPointerIntrinsicsDef[T, U, Unit] {
val category = List(IntrinsicsCategory.Convert, IntrinsicsCategory.Store)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Convert packed double-precision (32-bit) floating-point elements in "a" to
* packed unsigned 32-bit integers with truncation, and store the results in
* "dst" using zeromask "k" (elements are zeroed out when the corresponding mask
* bit is not set).
* k: __mmask8, a: __m256
*/
case class MM256_MASKZ_CVTTPS_EPU32(k: Exp[Int], a: Exp[__m256]) extends IntrinsicsDef[__m256i] {
val category = List(IntrinsicsCategory.Convert)
val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Multiply packed double-precision (64-bit) floating-point elements in "a" and
* "b", add the intermediate result to packed elements in "c", and store the
* results in "dst" using zeromask "k" (elements are zeroed out when the
* corresponding mask bit is not set).
* [round_note]
* k: __mmask8, a: __m512d, b: __m512d, c: __m512d, rounding: const int
*/
case class MM512_MASKZ_FMADD_ROUND_PD(k: Exp[Int], a: Exp[__m512d], b: Exp[__m512d], c: Exp[__m512d], rounding: Exp[Int]) extends IntrinsicsDef[__m512d] {
val category = List(IntrinsicsCategory.Arithmetic)
val intrinsicType = List(IntrinsicsType.FloatingPoint)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Compare packed 16-bit integers in "a" and "b" for greater-than-or-equal, and
* store the results in mask vector "k".
* a: __m512i, b: __m512i
*/
case class MM512_CMPGE_EPI16_MASK(a: Exp[__m512i], b: Exp[__m512i]) extends IntrinsicsDef[Int] {
val category = List(IntrinsicsCategory.Compare)
val intrinsicType = List(IntrinsicsType.Integer, IntrinsicsType.Mask)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Multiply the packed unsigned 16-bit integers in "a" and "b", producing
* intermediate 32-bit integers, and store the high 16 bits of the intermediate
* integers in "dst" using writemask "k" (elements are copied from "src" when the
* corresponding mask bit is not set).
* src: __m256i, k: __mmask16, a: __m256i, b: __m256i
*/
case class MM256_MASK_MULHI_EPU16(src: Exp[__m256i], k: Exp[Int], a: Exp[__m256i], b: Exp[__m256i]) extends IntrinsicsDef[__m256i] {
val category = List(IntrinsicsCategory.Arithmetic)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Compare packed unsigned 64-bit integers in "a" and "b", and store packed
* minimum values in "dst" using writemask "k" (elements are copied from "src"
* when the corresponding mask bit is not set).
* src: __m512i, k: __mmask8, a: __m512i, b: __m512i
*/
case class MM512_MASK_MIN_EPU64(src: Exp[__m512i], k: Exp[Int], a: Exp[__m512i], b: Exp[__m512i]) extends IntrinsicsDef[__m512i] {
val category = List(IntrinsicsCategory.SpecialMathFunctions)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Compute the approximate reciprocal square root of the lower single-precision
* (32-bit) floating-point element in "b", store the result in the lower element
* of "dst" using writemask "k" (the element is copied from "src" when mask bit 0
* is not set), and copy the upper 3 packed elements from "a" to the upper
* elements of "dst". The maximum relative error for this approximation is less
* than 2^-28. [round_note]
* src: __m128, k: __mmask8, a: __m128, b: __m128, rounding: int
*/
case class MM_MASK_RSQRT28_ROUND_SS(src: Exp[__m128], k: Exp[Int], a: Exp[__m128], b: Exp[__m128], rounding: Exp[Int]) extends IntrinsicsDef[__m128] {
val category = List(IntrinsicsCategory.ElementaryMathFunctions)
val intrinsicType = List(IntrinsicsType.FloatingPoint)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Multiply packed single-precision (32-bit) floating-point elements in "a" and
* "b", add the negated intermediate result to packed elements in "c", and store
* the results in "dst" using zeromask "k" (elements are zeroed out when the
* corresponding mask bit is not set).
* k: __mmask8, a: __m128, b: __m128, c: __m128
*/
case class MM_MASKZ_FNMADD_PS(k: Exp[Int], a: Exp[__m128], b: Exp[__m128], c: Exp[__m128]) extends IntrinsicsDef[__m128] {
val category = List(IntrinsicsCategory.Arithmetic)
val intrinsicType = List(IntrinsicsType.FloatingPoint)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Convert packed 64-bit integers in "a" to packed 8-bit integers with signed
* saturation, and store the results in "dst" using writemask "k" (elements are
* copied from "src" when the corresponding mask bit is not set).
* src: __m128i, k: __mmask8, a: __m128i
*/
case class MM_MASK_CVTSEPI64_EPI8(src: Exp[__m128i], k: Exp[Int], a: Exp[__m128i]) extends IntrinsicsDef[__m128i] {
val category = List(IntrinsicsCategory.Convert)
val intrinsicType = List(IntrinsicsType.Integer)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
/**
* Compare packed 16-bit integers in "a" and "b" for less-than, and store the
* results in mask vector "k1" using zeromask "k" (elements are zeroed out when
* the corresponding mask bit is not set).
* k1: __mmask8, a: __m128i, b: __m128i
*/
case class MM_MASK_CMPLT_EPI16_MASK(k1: Exp[Int], a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[Int] {
val category = List(IntrinsicsCategory.Compare)
val intrinsicType = List(IntrinsicsType.Integer, IntrinsicsType.Mask)
val performance = Map.empty[MicroArchType, Performance]
val header = "immintrin.h"
}
def _mm_mask2_permutex2var_epi8(a: Exp[__m128i], idx: Exp[__m128i], k: Exp[Int], b: Exp[__m128i]): Exp[__m128i] = {
MM_MASK2_PERMUTEX2VAR_EPI8(a, idx, k, b)
}
def _mm256_cvttps_epu64(a: Exp[__m128]): Exp[__m256i] = {
MM256_CVTTPS_EPU64(a)
}
def _mm512_mask_cvtepi32_epi64(src: Exp[__m512i], k: Exp[Int], a: Exp[__m256i]): Exp[__m512i] = {
MM512_MASK_CVTEPI32_EPI64(src, k, a)
}
def _mm_mask_cvtepi8_epi64(src: Exp[__m128i], k: Exp[Int], a: Exp[__m128i]): Exp[__m128i] = {
MM_MASK_CVTEPI8_EPI64(src, k, a)
}
def _mm512_stream_load_si512[A[_], T:Typ, U:Integral](mem_addr: Exp[A[T]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m512i] = {
cont.read(mem_addr)(MM512_STREAM_LOAD_SI512(mem_addr, mem_addrOffset)(typ[T], implicitly[Integral[U]], cont))
}
def _mm_mask_cvtsepi32_storeu_epi16[A[_], T:Typ, U:Integral](base_addr: Exp[A[T]], k: Exp[Int], a: Exp[__m128i], base_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = {
cont.write(base_addr)(MM_MASK_CVTSEPI32_STOREU_EPI16(base_addr, k, a, base_addrOffset)(typ[T], implicitly[Integral[U]], cont))
}
def _mm512_ror_epi64(a: Exp[__m512i], imm8: Exp[Int]): Exp[__m512i] = {
MM512_ROR_EPI64(a, imm8)
}
def _mm512_unpacklo_pd(a: Exp[__m512d], b: Exp[__m512d]): Exp[__m512d] = {
MM512_UNPACKLO_PD(a, b)
}
def _mm256_maskz_andnot_epi64(k: Exp[Int], a: Exp[__m256i], b: Exp[__m256i]): Exp[__m256i] = {
MM256_MASKZ_ANDNOT_EPI64(k, a, b)
}
def _mm512_mask_cvtsepi32_storeu_epi8[A[_], T:Typ, U:Integral](base_addr: Exp[A[T]], k: Exp[Int], a: Exp[__m512i], base_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = {
cont.write(base_addr)(MM512_MASK_CVTSEPI32_STOREU_EPI8(base_addr, k, a, base_addrOffset)(typ[T], implicitly[Integral[U]], cont))
}
def _mm256_maskz_cvttps_epu32(k: Exp[Int], a: Exp[__m256]): Exp[__m256i] = {
MM256_MASKZ_CVTTPS_EPU32(k, a)
}
def _mm512_maskz_fmadd_round_pd(k: Exp[Int], a: Exp[__m512d], b: Exp[__m512d], c: Exp[__m512d], rounding: Exp[Int]): Exp[__m512d] = {
MM512_MASKZ_FMADD_ROUND_PD(k, a, b, c, rounding)
}
def _mm512_cmpge_epi16_mask(a: Exp[__m512i], b: Exp[__m512i]): Exp[Int] = {
MM512_CMPGE_EPI16_MASK(a, b)
}
def _mm256_mask_mulhi_epu16(src: Exp[__m256i], k: Exp[Int], a: Exp[__m256i], b: Exp[__m256i]): Exp[__m256i] = {
MM256_MASK_MULHI_EPU16(src, k, a, b)
}
def _mm512_mask_min_epu64(src: Exp[__m512i], k: Exp[Int], a: Exp[__m512i], b: Exp[__m512i]): Exp[__m512i] = {
MM512_MASK_MIN_EPU64(src, k, a, b)
}
def _mm_mask_rsqrt28_round_ss(src: Exp[__m128], k: Exp[Int], a: Exp[__m128], b: Exp[__m128], rounding: Exp[Int]): Exp[__m128] = {
MM_MASK_RSQRT28_ROUND_SS(src, k, a, b, rounding)
}
def _mm_maskz_fnmadd_ps(k: Exp[Int], a: Exp[__m128], b: Exp[__m128], c: Exp[__m128]): Exp[__m128] = {
MM_MASKZ_FNMADD_PS(k, a, b, c)
}
def _mm_mask_cvtsepi64_epi8(src: Exp[__m128i], k: Exp[Int], a: Exp[__m128i]): Exp[__m128i] = {
MM_MASK_CVTSEPI64_EPI8(src, k, a)
}
def _mm_mask_cmplt_epi16_mask(k1: Exp[Int], a: Exp[__m128i], b: Exp[__m128i]): Exp[Int] = {
MM_MASK_CMPLT_EPI16_MASK(k1, a, b)
}
override def mirror[A:Typ](e: Def[A], f: Transformer)(implicit pos: SourceContext): Exp[A] = (e match {
case MM_MASK2_PERMUTEX2VAR_EPI8 (a, idx, k, b) =>
_mm_mask2_permutex2var_epi8(f(a), f(idx), f(k), f(b))
case MM256_CVTTPS_EPU64 (a) =>
_mm256_cvttps_epu64(f(a))
case MM512_MASK_CVTEPI32_EPI64 (src, k, a) =>
_mm512_mask_cvtepi32_epi64(f(src), f(k), f(a))
case MM_MASK_CVTEPI8_EPI64 (src, k, a) =>
_mm_mask_cvtepi8_epi64(f(src), f(k), f(a))
case iDef@MM512_STREAM_LOAD_SI512 (mem_addr, mem_addrOffset) =>
_mm512_stream_load_si512(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.voidType, iDef.integralType, iDef.cont)
case iDef@MM_MASK_CVTSEPI32_STOREU_EPI16 (base_addr, k, a, base_addrOffset) =>
_mm_mask_cvtsepi32_storeu_epi16(iDef.cont.applyTransformer(base_addr, f), iDef.cont.applyTransformer(k, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(base_addrOffset, f))(iDef.voidType, iDef.integralType, iDef.cont)
case MM512_ROR_EPI64 (a, imm8) =>
_mm512_ror_epi64(f(a), f(imm8))
case MM512_UNPACKLO_PD (a, b) =>
_mm512_unpacklo_pd(f(a), f(b))
case MM256_MASKZ_ANDNOT_EPI64 (k, a, b) =>
_mm256_maskz_andnot_epi64(f(k), f(a), f(b))
case iDef@MM512_MASK_CVTSEPI32_STOREU_EPI8 (base_addr, k, a, base_addrOffset) =>
_mm512_mask_cvtsepi32_storeu_epi8(iDef.cont.applyTransformer(base_addr, f), iDef.cont.applyTransformer(k, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(base_addrOffset, f))(iDef.voidType, iDef.integralType, iDef.cont)
case MM256_MASKZ_CVTTPS_EPU32 (k, a) =>
_mm256_maskz_cvttps_epu32(f(k), f(a))
case MM512_MASKZ_FMADD_ROUND_PD (k, a, b, c, rounding) =>
_mm512_maskz_fmadd_round_pd(f(k), f(a), f(b), f(c), f(rounding))
case MM512_CMPGE_EPI16_MASK (a, b) =>
_mm512_cmpge_epi16_mask(f(a), f(b))
case MM256_MASK_MULHI_EPU16 (src, k, a, b) =>
_mm256_mask_mulhi_epu16(f(src), f(k), f(a), f(b))
case MM512_MASK_MIN_EPU64 (src, k, a, b) =>
_mm512_mask_min_epu64(f(src), f(k), f(a), f(b))
case MM_MASK_RSQRT28_ROUND_SS (src, k, a, b, rounding) =>
_mm_mask_rsqrt28_round_ss(f(src), f(k), f(a), f(b), f(rounding))
case MM_MASKZ_FNMADD_PS (k, a, b, c) =>
_mm_maskz_fnmadd_ps(f(k), f(a), f(b), f(c))
case MM_MASK_CVTSEPI64_EPI8 (src, k, a) =>
_mm_mask_cvtsepi64_epi8(f(src), f(k), f(a))
case MM_MASK_CMPLT_EPI16_MASK (k1, a, b) =>
_mm_mask_cmplt_epi16_mask(f(k1), f(a), f(b))
case Reflect(MM_MASK2_PERMUTEX2VAR_EPI8 (a, idx, k, b), u, es) =>
reflectMirrored(Reflect(MM_MASK2_PERMUTEX2VAR_EPI8 (f(a), f(idx), f(k), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM256_CVTTPS_EPU64 (a), u, es) =>
reflectMirrored(Reflect(MM256_CVTTPS_EPU64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM512_MASK_CVTEPI32_EPI64 (src, k, a), u, es) =>
reflectMirrored(Reflect(MM512_MASK_CVTEPI32_EPI64 (f(src), f(k), f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM_MASK_CVTEPI8_EPI64 (src, k, a), u, es) =>
reflectMirrored(Reflect(MM_MASK_CVTEPI8_EPI64 (f(src), f(k), f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(iDef@MM512_STREAM_LOAD_SI512 (mem_addr, mem_addrOffset), u, es) =>
reflectMirrored(Reflect(MM512_STREAM_LOAD_SI512 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.voidType, iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(iDef@MM_MASK_CVTSEPI32_STOREU_EPI16 (base_addr, k, a, base_addrOffset), u, es) =>
reflectMirrored(Reflect(MM_MASK_CVTSEPI32_STOREU_EPI16 (iDef.cont.applyTransformer(base_addr, f), iDef.cont.applyTransformer(k, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(base_addrOffset, f))(iDef.voidType, iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM512_ROR_EPI64 (a, imm8), u, es) =>
reflectMirrored(Reflect(MM512_ROR_EPI64 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM512_UNPACKLO_PD (a, b), u, es) =>
reflectMirrored(Reflect(MM512_UNPACKLO_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM256_MASKZ_ANDNOT_EPI64 (k, a, b), u, es) =>
reflectMirrored(Reflect(MM256_MASKZ_ANDNOT_EPI64 (f(k), f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(iDef@MM512_MASK_CVTSEPI32_STOREU_EPI8 (base_addr, k, a, base_addrOffset), u, es) =>
reflectMirrored(Reflect(MM512_MASK_CVTSEPI32_STOREU_EPI8 (iDef.cont.applyTransformer(base_addr, f), iDef.cont.applyTransformer(k, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(base_addrOffset, f))(iDef.voidType, iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM256_MASKZ_CVTTPS_EPU32 (k, a), u, es) =>
reflectMirrored(Reflect(MM256_MASKZ_CVTTPS_EPU32 (f(k), f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM512_MASKZ_FMADD_ROUND_PD (k, a, b, c, rounding), u, es) =>
reflectMirrored(Reflect(MM512_MASKZ_FMADD_ROUND_PD (f(k), f(a), f(b), f(c), f(rounding)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM512_CMPGE_EPI16_MASK (a, b), u, es) =>
reflectMirrored(Reflect(MM512_CMPGE_EPI16_MASK (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM256_MASK_MULHI_EPU16 (src, k, a, b), u, es) =>
reflectMirrored(Reflect(MM256_MASK_MULHI_EPU16 (f(src), f(k), f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM512_MASK_MIN_EPU64 (src, k, a, b), u, es) =>
reflectMirrored(Reflect(MM512_MASK_MIN_EPU64 (f(src), f(k), f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM_MASK_RSQRT28_ROUND_SS (src, k, a, b, rounding), u, es) =>
reflectMirrored(Reflect(MM_MASK_RSQRT28_ROUND_SS (f(src), f(k), f(a), f(b), f(rounding)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM_MASKZ_FNMADD_PS (k, a, b, c), u, es) =>
reflectMirrored(Reflect(MM_MASKZ_FNMADD_PS (f(k), f(a), f(b), f(c)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM_MASK_CVTSEPI64_EPI8 (src, k, a), u, es) =>
reflectMirrored(Reflect(MM_MASK_CVTSEPI64_EPI8 (f(src), f(k), f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case Reflect(MM_MASK_CMPLT_EPI16_MASK (k1, a, b), u, es) =>
reflectMirrored(Reflect(MM_MASK_CMPLT_EPI16_MASK (f(k1), f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos)
case _ => super.mirror(e, f)
}).asInstanceOf[Exp[A]] // why??
}
trait CGenAVX512020 extends CGenIntrinsics {
val IR: AVX512
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case iDef@MM_MASK2_PERMUTEX2VAR_EPI8(a, idx, k, b) =>
headers += iDef.header
emitValDef(sym, s"_mm_mask2_permutex2var_epi8(${quote(a)}, ${quote(idx)}, ${quote(k)}, ${quote(b)})")
case iDef@MM256_CVTTPS_EPU64(a) =>
headers += iDef.header
emitValDef(sym, s"_mm256_cvttps_epu64(${quote(a)})")
case iDef@MM512_MASK_CVTEPI32_EPI64(src, k, a) =>
headers += iDef.header
emitValDef(sym, s"_mm512_mask_cvtepi32_epi64(${quote(src)}, ${quote(k)}, ${quote(a)})")
case iDef@MM_MASK_CVTEPI8_EPI64(src, k, a) =>
headers += iDef.header
emitValDef(sym, s"_mm_mask_cvtepi8_epi64(${quote(src)}, ${quote(k)}, ${quote(a)})")
case iDef@MM512_STREAM_LOAD_SI512(mem_addr, mem_addrOffset) =>
headers += iDef.header
emitValDef(sym, s"_mm512_stream_load_si512((void const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))")
case iDef@MM_MASK_CVTSEPI32_STOREU_EPI16(base_addr, k, a, base_addrOffset) =>
headers += iDef.header
stream.println(s"_mm_mask_cvtsepi32_storeu_epi16((void*) (${quote(base_addr) + (if(base_addrOffset == Const(0)) "" else " + " + quote(base_addrOffset))}), ${quote(k)}, ${quote(a)});")
case iDef@MM512_ROR_EPI64(a, imm8) =>
headers += iDef.header
emitValDef(sym, s"_mm512_ror_epi64(${quote(a)}, ${quote(imm8)})")
case iDef@MM512_UNPACKLO_PD(a, b) =>
headers += iDef.header
emitValDef(sym, s"_mm512_unpacklo_pd(${quote(a)}, ${quote(b)})")
case iDef@MM256_MASKZ_ANDNOT_EPI64(k, a, b) =>
headers += iDef.header
emitValDef(sym, s"_mm256_maskz_andnot_epi64(${quote(k)}, ${quote(a)}, ${quote(b)})")
case iDef@MM512_MASK_CVTSEPI32_STOREU_EPI8(base_addr, k, a, base_addrOffset) =>
headers += iDef.header
stream.println(s"_mm512_mask_cvtsepi32_storeu_epi8((void*) (${quote(base_addr) + (if(base_addrOffset == Const(0)) "" else " + " + quote(base_addrOffset))}), ${quote(k)}, ${quote(a)});")
case iDef@MM256_MASKZ_CVTTPS_EPU32(k, a) =>
headers += iDef.header
emitValDef(sym, s"_mm256_maskz_cvttps_epu32(${quote(k)}, ${quote(a)})")
case iDef@MM512_MASKZ_FMADD_ROUND_PD(k, a, b, c, rounding) =>
headers += iDef.header
emitValDef(sym, s"_mm512_maskz_fmadd_round_pd(${quote(k)}, ${quote(a)}, ${quote(b)}, ${quote(c)}, ${quote(rounding)})")
case iDef@MM512_CMPGE_EPI16_MASK(a, b) =>
headers += iDef.header
emitValDef(sym, s"_mm512_cmpge_epi16_mask(${quote(a)}, ${quote(b)})")
case iDef@MM256_MASK_MULHI_EPU16(src, k, a, b) =>
headers += iDef.header
emitValDef(sym, s"_mm256_mask_mulhi_epu16(${quote(src)}, ${quote(k)}, ${quote(a)}, ${quote(b)})")
case iDef@MM512_MASK_MIN_EPU64(src, k, a, b) =>
headers += iDef.header
emitValDef(sym, s"_mm512_mask_min_epu64(${quote(src)}, ${quote(k)}, ${quote(a)}, ${quote(b)})")
case iDef@MM_MASK_RSQRT28_ROUND_SS(src, k, a, b, rounding) =>
headers += iDef.header
emitValDef(sym, s"_mm_mask_rsqrt28_round_ss(${quote(src)}, ${quote(k)}, ${quote(a)}, ${quote(b)}, ${quote(rounding)})")
case iDef@MM_MASKZ_FNMADD_PS(k, a, b, c) =>
headers += iDef.header
emitValDef(sym, s"_mm_maskz_fnmadd_ps(${quote(k)}, ${quote(a)}, ${quote(b)}, ${quote(c)})")
case iDef@MM_MASK_CVTSEPI64_EPI8(src, k, a) =>
headers += iDef.header
emitValDef(sym, s"_mm_mask_cvtsepi64_epi8(${quote(src)}, ${quote(k)}, ${quote(a)})")
case iDef@MM_MASK_CMPLT_EPI16_MASK(k1, a, b) =>
headers += iDef.header
emitValDef(sym, s"_mm_mask_cmplt_epi16_mask(${quote(k1)}, ${quote(a)}, ${quote(b)})")
case _ => super.emitNode(sym, rhs)
}
}
| ivtoskov/lms-intrinsics | src/main/scala/ch/ethz/acl/intrinsics/AVX512020.scala | Scala | apache-2.0 | 27,610 |
package assigner.search
import assigner._
import assigner.model._
import scala.collection.SortedSet
/** Random starting point generator. */
object StartingPoint {
def apply(course: Course): Assignment = {
val studentMap =
course.studentMap.sorted mapValues { _ => default.queueId }
val groupMap =
course.groupMap.sorted mapValues { _ => SortedSet.empty[Long] }
val queue = default.queueId -> studentMap.keySet
val manager = new Manager(course)
val assignment = Assignment(studentMap, groupMap + queue)
for (_ <- 1 to course.settings.initialMoves) {
manager.getAllMoves(assignment).toList.shuffle match {
case move :: _ => move.operateOn(assignment)
case Nil =>
}
}
// TODO: Find a better way to build the starting point.
// val students = course.studentMap partition { _._2.mandatory } match {
// case (mandatory, elective) =>
// mandatory.toList.shuffle ::: elective.toList.shuffle
// }
//
// val groups = course.groupMap partition { _._2.mandatory } match {
// case (mandatory, elective) =>
// mandatory.toList.shuffle ::: elective.toList.shuffle
// }
assignment
}
}
| joroKr21/IoS-Algorithm | src/main/scala/assigner/search/StartingPoint.scala | Scala | mit | 1,201 |
/*
* Copyright (c) 2016. Fengguo (Hugo) Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.cit.intellij.jawa.compiler
import java.io.{File, IOException}
import javax.swing.event.HyperlinkEvent
import com.intellij.notification.{Notification, NotificationListener, NotificationType, Notifications}
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components.ApplicationComponent
import com.intellij.openapi.project.Project
import com.intellij.openapi.projectRoots.{JavaSdk, ProjectJdkTable}
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.util.io.FileUtil
import com.intellij.util.PathUtil
import com.intellij.util.net.NetUtils
import gnu.trove.TByteArrayList
import org.argus.cit.intellij.jawa.compiler.JcCompileServerLauncher.ConfigureLinkListener
import org.jetbrains.jps.incremental.BuilderService
import org.argus.cit.intellij.jawa.compiler.JcCompileServerLauncher._
import org.argus.cit.intellij.jawa.extensions._
import collection.JavaConverters._
import scala.util.control.Exception._
/**
* @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a>
*/
class JcCompileServerLauncher extends ApplicationComponent {
private var serverInstance: Option[ServerInstance] = None
override def initComponent() {}
override def disposeComponent() {
if (running) stop()
}
def tryToStart(project: Project): Boolean = {
if (!running) {
val started = start(project)
if (started) {
try new RemoteServerRunner(project).send("addDisconnectListener", Seq.empty, null)
catch {
case _: Exception =>
}
}
started
}
else true
}
private def start(project: Project): Boolean = {
val applicationSettings = JawaCompileServerSettings.getInstance
if (applicationSettings.COMPILE_SERVER_SDK == null) {
// Try to find a suitable JDK
val choice = Option(ProjectRootManager.getInstance(project).getProjectSdk).orElse {
val all = ProjectJdkTable.getInstance.getSdksOfType(JavaSdk.getInstance()).asScala
all.headOption
}
choice.foreach(sdk => applicationSettings.COMPILE_SERVER_SDK = sdk.getName)
// val message = "JVM SDK is automatically selected: " + name +
// "\\n(can be changed in Application Settings / Scala)"
// Notifications.Bus.notify(new Notification("scala", "Scala compile server",
// message, NotificationType.INFORMATION))
}
findJdkByName(applicationSettings.COMPILE_SERVER_SDK)
.left.map(_ + "\\nPlease either disable Jawa compile server or configure a valid JVM SDK for it.")
.right.flatMap(start(project, _)) match {
case Left(error) =>
val title = "Cannot start Jawa compile server"
val content = s"<html><body>${error.replace("\\n", "<br>")} <a href=''>Configure</a></body></html>"
Notifications.Bus.notify(new Notification("jawa", title, content, NotificationType.ERROR, ConfigureLinkListener))
false
case Right(_) =>
ApplicationManager.getApplication invokeLater (() => {
JcCompileServerManager.instance(project).configureWidget()
})
true
}
}
private def start(project: Project, jdk: JDK): Either[String, Process] = {
import org.argus.cit.intellij.jawa.compiler.JcCompileServerLauncher.{compilerJars, jvmParameters}
compilerJars.partition(_.exists) match {
case (presentFiles, Seq()) =>
val bootclasspathArg = Nil
val classpath = (jdk.tools +: presentFiles).map(_.canonicalPath).mkString(File.pathSeparator)
val settings = JawaCompileServerSettings.getInstance
val freePort = JcCompileServerLauncher.findFreePort
if (settings.COMPILE_SERVER_PORT != freePort) {
new RemoteServerStopper(settings.COMPILE_SERVER_PORT).sendStop()
settings.COMPILE_SERVER_PORT = freePort
ApplicationManager.getApplication.saveSettings()
}
val ngRunnerFqn = "org.argus.cit.intellij.jawa.nailgun.NailgunRunner"
val id = settings.COMPILE_SERVER_ID
val shutdownDelay = settings.COMPILE_SERVER_SHUTDOWN_DELAY
val shutdownDelayArg = if (settings.COMPILE_SERVER_SHUTDOWN_IDLE && shutdownDelay >= 0) {
Seq(s"-Dshutdown.delay=$shutdownDelay")
} else Nil
val commands = jdk.executable.canonicalPath +: bootclasspathArg ++: "-cp" +: classpath +: jvmParameters ++: shutdownDelayArg ++:
ngRunnerFqn +: freePort.toString +: id.toString +: Nil
val builder = new ProcessBuilder(commands.asJava)
if (settings.USE_PROJECT_HOME_AS_WORKING_DIR) {
projectHome(project).foreach(dir => builder.directory(dir))
}
catching(classOf[IOException]).either(builder.start())
.left.map(_.getMessage)
.right.map { process =>
val watcher = new ProcessWatcher(process, "jawaCompileServer")
serverInstance = Some(ServerInstance(watcher, freePort, builder.directory()))
watcher.startNotify()
process
}
case (_, absentFiles) =>
val paths = absentFiles.map(_.getPath).mkString(", ")
Left("Required file(s) not found: " + paths)
}
}
// TODO stop server more gracefully
def stop() {
serverInstance.foreach { it =>
it.destroyProcess()
}
}
def stop(project: Project) {
stop()
ApplicationManager.getApplication invokeLater (() => {
JcCompileServerManager.instance(project).configureWidget()
})
}
def running: Boolean = serverInstance.exists(_.running)
def errors(): Seq[String] = serverInstance.map(_.errors()).getOrElse(Seq.empty)
def port: Option[Int] = serverInstance.map(_.port)
override def getComponentName: String = getClass.getSimpleName
}
object JcCompileServerLauncher {
def instance: JcCompileServerLauncher = ApplicationManager.getApplication.getComponent(classOf[JcCompileServerLauncher])
def compilerJars: Seq[File] = {
val jcBuildersJar = new File(PathUtil.getJarPathForClass(classOf[BuilderService]))
val utilJar = new File(PathUtil.getJarPathForClass(classOf[FileUtil]))
val trove4jJar = new File(PathUtil.getJarPathForClass(classOf[TByteArrayList]))
val pluginRoot = pluginPath
val jcRoot = new File(pluginRoot, "jc")
Seq(
jcBuildersJar,
utilJar,
trove4jJar,
new File(pluginRoot, "scala-library.jar"),
new File(pluginRoot, "saf-library.jar"),
new File(pluginRoot, "jawa.jar"),
new File(pluginRoot, "amandroid.jar"),
new File(pluginRoot, "jawa-nailgun-runner.jar"),
new File(pluginRoot, "compiler-settings.jar"),
new File(jcRoot, "nailgun.jar"),
new File(jcRoot, "incremental-compiler.jar"),
new File(jcRoot, "asm-all.jar"),
new File(jcRoot, "jawa-jc-plugin.jar")
)
}
def pluginPath: String = {
if (ApplicationManager.getApplication.isUnitTestMode) new File(System.getProperty("plugin.path"), "lib").getCanonicalPath
else new File(PathUtil.getJarPathForClass(getClass)).getParent
}
def jvmParameters: Seq[String] = {
val settings = JawaCompileServerSettings.getInstance
val xmx = settings.COMPILE_SERVER_MAXIMUM_HEAP_SIZE |> { size =>
if (size.isEmpty) Nil else List("-Xmx%sm".format(size))
}
val (userMaxPermSize, otherParams) = settings.COMPILE_SERVER_JVM_PARAMETERS.split(" ").partition(_.contains("-XX:MaxPermSize"))
val defaultMaxPermSize = Some("-XX:MaxPermSize=256m")
val needMaxPermSize = settings.COMPILE_SERVER_SDK < "1.8"
val maxPermSize = if (needMaxPermSize) userMaxPermSize.headOption.orElse(defaultMaxPermSize) else None
xmx ++ otherParams ++ maxPermSize
}
def ensureServerRunning(project: Project) {
val launcher = JcCompileServerLauncher.instance
if (needRestart(project)) launcher.stop()
if (!launcher.running) launcher.tryToStart(project)
}
def needRestart(project: Project): Boolean = {
val serverInstance = JcCompileServerLauncher.instance.serverInstance
serverInstance match {
case None => true
case Some(_) =>
val useProjectHome = JawaCompileServerSettings.getInstance().USE_PROJECT_HOME_AS_WORKING_DIR
val workingDirChanged = useProjectHome && projectHome(project) != serverInstance.map(_.workingDir)
workingDirChanged
}
}
def ensureNotRunning(project: Project) {
val launcher = JcCompileServerLauncher.instance
if (launcher.running) launcher.stop(project)
}
def findFreePort: Int = {
val port = JawaCompileServerSettings.getInstance().COMPILE_SERVER_PORT
if (NetUtils.canConnectToSocket("localhost", port))
NetUtils.findAvailableSocketPort()
else port
}
private def projectHome(project: Project): Option[File] = {
for {
dir <- Option(project.getBaseDir)
path <- Option(dir.getCanonicalPath)
file = new File(path)
if file.exists()
} yield file
}
object ConfigureLinkListener extends NotificationListener.Adapter {
def hyperlinkActivated(notification: Notification, event: HyperlinkEvent) {
JcCompileServerManager.showCompileServerSettingsDialog()
notification.expire()
}
}
}
private case class ServerInstance(watcher: ProcessWatcher, port: Int, workingDir: File) {
private var stopped = false
def running: Boolean = !stopped && watcher.running
def errors(): Seq[String] = watcher.errors()
def destroyProcess() {
stopped = true
watcher.destroyProcess()
}
} | arguslab/argus-cit-intellij | src/main/scala/org/argus/cit/intellij/jawa/compiler/JcCompileServerLauncher.scala | Scala | epl-1.0 | 9,837 |
package game.commanders.unite
import com.badlogic.gdx.backends.lwjgl._
object DesktopMain extends App {
val cfg = new LwjglApplicationConfiguration
cfg.title = "Commanders Unite"
cfg.height = 720
cfg.width = 1280
cfg.forceExit = true
new LwjglApplication(new CommandersUnite, cfg)
}
| tommyettinger/CommandersUnite | commanders-unite/desktop/src/main/scala/DesktopMain.scala | Scala | mit | 309 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.gihyo.spark.ch06
import scala.collection.mutable
import jp.gihyo.spark.{SparkFunSuite, TestStreamingContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContextWrapper
class gihyo_6_3_ReduceSuite extends SparkFunSuite with TestStreamingContext {
test("run") {
val lines = mutable.Queue[RDD[String]]()
val ds = ssc.queueStream(lines)
val clock = new StreamingContextWrapper(ssc).manualClock
gihyo_6_3_Reduce.run(ds)
ssc.start()
lines += sc.makeRDD(Seq("gi", "jutsu", "hyoron", "sha")) // test data
clock.advance(1000)
Thread.sleep(1000)
}
}
| yu-iskw/gihyo-spark-book-example | src/test/scala/jp/gihyo/spark/ch06/gihyo_6_3_ReduceSuite.scala | Scala | apache-2.0 | 1,424 |
package kata.scala
import scala.collection.mutable
class Graph {
private val edges = mutable.Map[Int, mutable.ArrayBuffer[Int]]()
def adjacentTo(vertex: Int): Iterator[Int] = {
edges(vertex).iterator
}
def contains(vertex: Int): Boolean = edges.contains(vertex)
def addEdge(from: Int, to: Int) = {
if (!edges.contains(from)) {
edges += (from -> new mutable.ArrayBuffer[Int]())
}
edges(from) += to
if (!edges.contains(to)) {
edges += (to -> new mutable.ArrayBuffer[Int]())
}
edges(to) += from
}
}
| Alex-Diez/Scala-TDD-Katas | old-katas/graph-search-kata/day-9/src/main/scala/kata/scala/Graph.scala | Scala | mit | 606 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2009-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package example.lambda2
/**
* Evaluation of lambda calculus using global beta reduction with meta-level
* substitution and arithmetic operations.
*/
trait Reduce extends RewritingEvaluator {
import LambdaTree._
import org.kiama.rewriting.Rewriter._
/**
* Evaluate by repeatedly trying to apply beta reduction and arithmetic
* operators anywhere.
*/
lazy val s =
reduce (beta + arithop)
/**
* Beta reduction via meta-level substitution.
*/
lazy val beta =
rule[Exp] {
case App (Lam (x, _, e1), e2) => substitute (x, e2, e1)
}
/*
* Evaluation of arithmetic operators.
*/
lazy val arithop =
rule[Exp] {
case Opn (Num (l), op, Num (r)) => Num (op.eval (l, r))
}
}
class ReduceEvaluator extends Reduce {
override val reducesinlambdas = true
}
| adeze/kiama | library/src/org/kiama/example/lambda2/Reduce.scala | Scala | gpl-3.0 | 1,703 |
package spark
import org.apache.hadoop.fs.Path
import org.apache.hadoop.conf.Configuration
import rdd.{CheckpointRDD, CoalescedRDD}
import scheduler.{ResultTask, ShuffleMapTask}
/**
* Enumeration to manage state transitions of an RDD through checkpointing
* [ Initialized --> marked for checkpointing --> checkpointing in progress --> checkpointed ]
*/
private[spark] object CheckpointState extends Enumeration {
type CheckpointState = Value
val Initialized, MarkedForCheckpoint, CheckpointingInProgress, Checkpointed = Value
}
/**
* This class contains all the information related to RDD checkpointing. Each instance of this class
* is associated with a RDD. It manages process of checkpointing of the associated RDD, as well as,
* manages the post-checkpoint state by providing the updated partitions, iterator and preferred locations
* of the checkpointed RDD.
*/
private[spark] class RDDCheckpointData[T: ClassManifest](rdd: RDD[T])
extends Logging with Serializable {
import CheckpointState._
// The checkpoint state of the associated RDD.
var cpState = Initialized
// The file to which the associated RDD has been checkpointed to
@transient var cpFile: Option[String] = None
// The CheckpointRDD created from the checkpoint file, that is, the new parent the associated RDD.
var cpRDD: Option[RDD[T]] = None
// Mark the RDD for checkpointing
def markForCheckpoint() {
RDDCheckpointData.synchronized {
if (cpState == Initialized) cpState = MarkedForCheckpoint
}
}
// Is the RDD already checkpointed
def isCheckpointed: Boolean = {
RDDCheckpointData.synchronized { cpState == Checkpointed }
}
// Get the file to which this RDD was checkpointed to as an Option
def getCheckpointFile: Option[String] = {
RDDCheckpointData.synchronized { cpFile }
}
// Do the checkpointing of the RDD. Called after the first job using that RDD is over.
def doCheckpoint() {
// If it is marked for checkpointing AND checkpointing is not already in progress,
// then set it to be in progress, else return
RDDCheckpointData.synchronized {
if (cpState == MarkedForCheckpoint) {
cpState = CheckpointingInProgress
} else {
return
}
}
// Create the output path for the checkpoint
val path = new Path(rdd.context.checkpointDir.get, "rdd-" + rdd.id)
val fs = path.getFileSystem(new Configuration())
if (!fs.mkdirs(path)) {
throw new SparkException("Failed to create checkpoint path " + path)
}
// Save to file, and reload it as an RDD
rdd.context.runJob(rdd, CheckpointRDD.writeToFile(path.toString) _)
val newRDD = new CheckpointRDD[T](rdd.context, path.toString)
// Change the dependencies and partitions of the RDD
RDDCheckpointData.synchronized {
cpFile = Some(path.toString)
cpRDD = Some(newRDD)
rdd.markCheckpointed(newRDD) // Update the RDD's dependencies and partitions
cpState = Checkpointed
RDDCheckpointData.clearTaskCaches()
logInfo("Done checkpointing RDD " + rdd.id + ", new parent is RDD " + newRDD.id)
}
}
// Get preferred location of a split after checkpointing
def getPreferredLocations(split: Partition): Seq[String] = {
RDDCheckpointData.synchronized {
cpRDD.get.preferredLocations(split)
}
}
def getPartitions: Array[Partition] = {
RDDCheckpointData.synchronized {
cpRDD.get.partitions
}
}
def checkpointRDD: Option[RDD[T]] = {
RDDCheckpointData.synchronized {
cpRDD
}
}
}
private[spark] object RDDCheckpointData {
def clearTaskCaches() {
ShuffleMapTask.clearCache()
ResultTask.clearCache()
}
}
| prabeesh/Spark-Kestrel | core/src/main/scala/spark/RDDCheckpointData.scala | Scala | bsd-3-clause | 3,688 |
object eq extends testing.Benchmark {
def eqtest[T](creator: Int => T, n: Int): Int = {
val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
var sum = 0
var i = 0
while (i < n) {
var j = 0
while (j < n) {
if (elems(i) eq elems(j)) sum += 1
j += 1
}
i += 1
}
sum
}
val obj1 = new Object
val obj2 = new Object
def run() {
var sum = 0
sum += eqtest(x => if (x == 0) obj1 else obj2, 2000)
sum += eqtest(x => x, 1000)
sum += eqtest(x => x.toChar, 550)
sum += eqtest(x => x.toByte, 550)
sum += eqtest(x => x.toLong, 550)
sum += eqtest(x => x.toShort, 100)
sum += eqtest(x => x.toFloat, 100)
sum += eqtest(x => x.toDouble, 100)
assert(sum == 2958950)
}
}
| felixmulder/scala | test/files/bench/equality/eq.scala | Scala | bsd-3-clause | 795 |
package se.gigurra.leavu3.datamodel
import com.github.gigurra.heisenberg.MapData._
import com.github.gigurra.heisenberg.{Schema, Parsed}
case class Dlz(source: SourceData = Map.empty) extends SafeParsed[Dlz.type] {
val rAero = parse(schema.rAero)
val rMin = parse(schema.rMin)
val rPi = parse(schema.rPi)
val rTr = parse(schema.rTr)
}
object Dlz extends Schema[Dlz] {
val rAero = required[Float]("RAERO", default = 0)
val rMin = required[Float]("RMIN", default = 0)
val rPi = required[Float]("RPI", default = 0)
val rTr = required[Float]("RTR", default = 0)
}
| GiGurra/leavu3 | src/main/scala/se/gigurra/leavu3/datamodel/Dlz.scala | Scala | mit | 589 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.sdk.pipeline.autoCalculations
case class FromFixedValue(field: Field, value : String)
| fjsc/sparta | sdk/src/main/scala/com/stratio/sparta/sdk/pipeline/autoCalculations/FromFixedValue.scala | Scala | apache-2.0 | 735 |
package com.gx.decorator
/**
* Copyright 2017 josephguan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
object App extends App {
val coffee = new SimpleCoffee with MilkAdded with SugarAdded
println(coffee.getCost)
println(coffee.getDescription)
}
| josephguan/scala-design-patterns | structural/decorator/src/main/scala/com/gx/decorator/App.scala | Scala | apache-2.0 | 785 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package limits
import java.time.Instant
import akka.http.scaladsl.model.StatusCodes.TooManyRequests
import scala.collection.parallel.immutable.ParSeq
import scala.concurrent.Future
import scala.concurrent.Promise
import scala.concurrent.duration._
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.junit.JUnitRunner
import common._
import common.TestUtils._
import common.rest.WskRestOperations
import common.WskAdmin.wskadmin
import spray.json._
import spray.json.DefaultJsonProtocol._
import org.apache.openwhisk.http.Messages._
import org.apache.openwhisk.utils.ExecutionContextFactory
import org.apache.openwhisk.utils.retry
import scala.util.{Success, Try}
protected[limits] trait LocalHelper {
def prefix(msg: String) = msg.substring(0, msg.indexOf('('))
}
@RunWith(classOf[JUnitRunner])
class ThrottleTests
extends FlatSpec
with TestHelpers
with WskTestHelpers
with WskActorSystem
with ScalaFutures
with Matchers
with LocalHelper {
// use an infinite thread pool so that activations do not wait to send the activation requests
override implicit val executionContext = ExecutionContextFactory.makeCachedThreadPoolExecutionContext()
implicit val testConfig = PatienceConfig(5.minutes)
implicit val wskprops = WskProps()
val wsk = new WskRestOperations
val defaultAction = Some(TestUtils.getTestActionFilename("hello.js"))
val throttleWindow = 1.minute
// Due to the overhead of the per minute limit in the controller, we add this overhead here as well.
val overhead = if (WhiskProperties.getControllerHosts.split(",").length > 1) 1.2 else 1.0
val maximumInvokesPerMinute = math.ceil(getLimit("limits.actions.invokes.perMinute") * overhead).toInt
val maximumFiringsPerMinute = math.ceil(getLimit("limits.triggers.fires.perMinute") * overhead).toInt
val maximumConcurrentInvokes = getLimit("limits.actions.invokes.concurrent")
println(s"maximumInvokesPerMinute = $maximumInvokesPerMinute")
println(s"maximumFiringsPerMinute = $maximumFiringsPerMinute")
println(s"maximumConcurrentInvokes = $maximumConcurrentInvokes")
/*
* Retrieve a numeric limit for the key from the property set.
*/
def getLimit(key: String) = WhiskProperties.getProperty(key).toInt
/**
* Extracts the number of throttled results from a sequence of <code>RunResult</code>
*
* @param results the sequence of results
* @param message the message to determine the type of throttling
*/
def throttledActivations(results: List[RunResult], message: String) = {
val count = results.count { result =>
result.exitCode == TestUtils.THROTTLED && result.stderr.contains(prefix(message))
}
println(s"number of throttled activations: $count out of ${results.length}")
count
}
/**
* Waits until all successful activations are finished. Used to prevent the testcases from
* leaking activations.
*
* @param results the sequence of results from invocations or firings
*/
def waitForActivations(results: ParSeq[RunResult]) = results.foreach { result =>
if (result.exitCode == SUCCESS_EXIT) {
withActivation(wsk.activation, result, totalWait = 5.minutes)(identity)
}
}
/**
* Settles throttles of 1 minute. Waits up to 1 minute depending on the time already waited.
*
* @param waitedAlready the time already gone after the last invoke or fire
*/
def settleThrottles(waitedAlready: FiniteDuration) = {
val timeToWait = (throttleWindow - waitedAlready).max(Duration.Zero)
println(s"Waiting for ${timeToWait.toSeconds} seconds, already waited for ${waitedAlready.toSeconds} seconds")
Thread.sleep(timeToWait.toMillis)
}
/**
* Calculates the <code>Duration</code> between two <code>Instant</code>
*
* @param start the Instant something started
* @param end the Instant something ended
*/
def durationBetween(start: Instant, end: Instant) = Duration.fromNanos(java.time.Duration.between(start, end).toNanos)
/**
* Invokes the given action up to 'count' times until one of the invokes is throttled.
*
* @param count maximum invocations to make
*/
def untilThrottled(count: Int, retries: Int = 3)(run: () => RunResult): List[RunResult] = {
val p = Promise[Unit]
val results = List.fill(count)(Future {
if (!p.isCompleted) {
val rr = run()
if (rr.exitCode != SUCCESS_EXIT) {
println(s"exitCode = ${rr.exitCode} stderr = ${rr.stderr.trim}")
}
if (rr.exitCode == THROTTLED) {
p.trySuccess(())
}
Some(rr)
} else {
println("already throttled, skipping additional runs")
None
}
})
val finished = Future.sequence(results).futureValue.flatten
// some activations may need to be retried
val failed = finished filter { rr =>
rr.exitCode != SUCCESS_EXIT && rr.exitCode != THROTTLED
}
println(
s"Executed ${finished.length} requests, maximum was $count, need to retry ${failed.length} (retries left: $retries)")
if (failed.isEmpty || retries <= 0) {
finished
} else {
finished ++ untilThrottled(failed.length, retries - 1)(run)
}
}
behavior of "Throttles"
it should "throttle multiple activations of one action" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
val name = "checkPerMinuteActionThrottle"
assetHelper.withCleaner(wsk.action, name) { (action, _) =>
action.create(name, defaultAction)
}
// Three things to be careful of:
// 1) We do not know the minute boundary so we perform twice max so that it will trigger no matter where they fall
// 2) We cannot issue too quickly or else the concurrency throttle will be triggered
// 3) In the worst case, we do about almost the limit in the first min and just exceed the limit in the second min.
val totalInvokes = 2 * maximumInvokesPerMinute
val numGroups = (totalInvokes / maximumConcurrentInvokes) + 1
val invokesPerGroup = (totalInvokes / numGroups) + 1
val interGroupSleep = 5.seconds
val results = (1 to numGroups).flatMap { i =>
if (i != 1) { Thread.sleep(interGroupSleep.toMillis) }
untilThrottled(invokesPerGroup) { () =>
wsk.action.invoke(name, Map("payload" -> "testWord".toJson), expectedExitCode = DONTCARE_EXIT)
}
}.toList
val afterInvokes = Instant.now
try {
val throttledCount = throttledActivations(results, tooManyRequests(0, 0))
throttledCount should be > 0
} finally {
val alreadyWaited = durationBetween(afterInvokes, Instant.now)
settleThrottles(alreadyWaited)
println("clearing activations")
}
// wait for the activations last, if these fail, the throttle should be settled
// and this gives the activations time to complete and may avoid unnecessarily polling
println("waiting for activations to complete")
waitForActivations(results.par)
}
it should "throttle multiple activations of one trigger" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
val name = "checkPerMinuteTriggerThrottle"
assetHelper.withCleaner(wsk.trigger, name) { (trigger, _) =>
trigger.create(name)
}
// invokes per minute * 2 because the current minute could advance which resets the throttle
val results = untilThrottled(maximumFiringsPerMinute * 2 + 1) { () =>
wsk.trigger.fire(name, Map("payload" -> "testWord".toJson), expectedExitCode = DONTCARE_EXIT)
}
val afterFirings = Instant.now
try {
val throttledCount = throttledActivations(results, tooManyRequests(0, 0))
throttledCount should be > 0
} finally {
// no need to wait for activations of triggers since they consume no resources
// (because there is no rule attached in this test)
val alreadyWaited = durationBetween(afterFirings, Instant.now)
settleThrottles(alreadyWaited)
}
}
it should "throttle 'concurrent' activations of one action" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
val name = "checkConcurrentActionThrottle"
assetHelper.withCleaner(wsk.action, name) {
val timeoutAction = Some(TestUtils.getTestActionFilename("sleep.js"))
(action, _) =>
action.create(name, timeoutAction)
}
// The sleep is necessary as the load balancer currently has a latency before recognizing concurrency.
val sleep = 15.seconds
// Adding a bit of overcommit since some loadbalancers rely on some overcommit. This won't hurt those who don't
// since all activations are taken into account to check for throttled invokes below.
val slowInvokes = (maximumConcurrentInvokes * 1.2).toInt
val fastInvokes = 4
val fastInvokeDuration = 4.seconds
val slowInvokeDuration = sleep + fastInvokeDuration
// These invokes will stay active long enough that all are issued and load balancer has recognized concurrency.
val startSlowInvokes = Instant.now
val slowResults = untilThrottled(slowInvokes) { () =>
wsk.action.invoke(
name,
Map("sleepTimeInMs" -> slowInvokeDuration.toMillis.toJson),
expectedExitCode = DONTCARE_EXIT)
}
val afterSlowInvokes = Instant.now
val slowIssueDuration = durationBetween(startSlowInvokes, afterSlowInvokes)
println(
s"$slowInvokes slow invokes (dur = ${slowInvokeDuration.toSeconds} sec) took ${slowIssueDuration.toSeconds} seconds to issue")
// Sleep to let the background thread get the newest values (refreshes every 2 seconds)
println(s"Sleeping for ${sleep.toSeconds} sec")
Thread.sleep(sleep.toMillis)
// These fast invokes will trigger the concurrency-based throttling.
val startFastInvokes = Instant.now
val fastResults = untilThrottled(fastInvokes) { () =>
wsk.action.invoke(
name,
Map("sleepTimeInMs" -> fastInvokeDuration.toMillis.toJson),
expectedExitCode = DONTCARE_EXIT)
}
val afterFastInvokes = Instant.now
val fastIssueDuration = durationBetween(afterFastInvokes, startFastInvokes)
println(
s"$fastInvokes fast invokes (dur = ${fastInvokeDuration.toSeconds} sec) took ${fastIssueDuration.toSeconds} seconds to issue")
val combinedResults = slowResults ++ fastResults
try {
val throttledCount = throttledActivations(combinedResults, tooManyConcurrentRequests(0, 0))
throttledCount should be > 0
} finally {
val alreadyWaited = durationBetween(afterSlowInvokes, Instant.now)
settleThrottles(alreadyWaited)
println("clearing activations")
}
// wait for the activations last, giving the activations time to complete and
// may avoid unnecessarily polling; if these fail, the throttle may not be settled
println("waiting for activations to complete")
waitForActivations(combinedResults.par)
}
}
@RunWith(classOf[JUnitRunner])
class NamespaceSpecificThrottleTests
extends FlatSpec
with TestHelpers
with WskTestHelpers
with WskActorSystem
with Matchers
with BeforeAndAfterAll
with LocalHelper {
val defaultAction = Some(TestUtils.getTestActionFilename("hello.js"))
val wsk = new WskRestOperations
def sanitizeNamespaces(namespaces: Seq[String], expectedExitCode: Int = SUCCESS_EXIT): Unit = {
val deletions = namespaces.map { ns =>
Try {
disposeAdditionalTestSubject(ns, expectedExitCode)
withClue(s"failed to delete temporary limits for $ns") {
wskadmin.cli(Seq("limits", "delete", ns), expectedExitCode)
}
}
}
if (expectedExitCode == SUCCESS_EXIT) every(deletions) shouldBe a[Success[_]]
}
sanitizeNamespaces(
Seq("zeroSubject", "zeroConcSubject", "oneSubject", "oneSequenceSubject", "activationDisabled"),
expectedExitCode = DONTCARE_EXIT)
// Create a subject with rate limits == 0
val zeroProps = getAdditionalTestSubject("zeroSubject")
wskadmin.cli(
Seq(
"limits",
"set",
zeroProps.namespace,
"--invocationsPerMinute",
"0",
"--firesPerMinute",
"0",
"--concurrentInvocations",
"0"))
// Create a subject where only the concurrency limit is set to 0
val zeroConcProps = getAdditionalTestSubject("zeroConcSubject")
wskadmin.cli(Seq("limits", "set", zeroConcProps.namespace, "--concurrentInvocations", "0"))
// Create a subject where the rate limits are set to 1
val oneProps = getAdditionalTestSubject("oneSubject")
wskadmin.cli(Seq("limits", "set", oneProps.namespace, "--invocationsPerMinute", "1", "--firesPerMinute", "1"))
// Create a subject where the rate limits are set to 1 for testing sequences
val oneSequenceProps = getAdditionalTestSubject("oneSequenceSubject")
wskadmin.cli(Seq("limits", "set", oneSequenceProps.namespace, "--invocationsPerMinute", "1", "--firesPerMinute", "1"))
// Create a subject where storing of activations in activationstore is disabled.
val activationDisabled = getAdditionalTestSubject("activationDisabled")
wskadmin.cli(Seq("limits", "set", activationDisabled.namespace, "--storeActivations", "false"))
override def afterAll() = {
sanitizeNamespaces(Seq(zeroProps, zeroConcProps, oneProps, oneSequenceProps, activationDisabled).map(_.namespace))
}
behavior of "Namespace-specific throttles"
it should "respect overridden rate-throttles of 0" in withAssetCleaner(zeroProps) { (wp, assetHelper) =>
implicit val props = wp
val triggerName = "zeroTrigger"
val actionName = "zeroAction"
assetHelper.withCleaner(wsk.action, actionName) { (action, _) =>
action.create(actionName, defaultAction)
}
assetHelper.withCleaner(wsk.trigger, triggerName) { (trigger, _) =>
trigger.create(triggerName)
}
wsk.action.invoke(actionName, expectedExitCode = TooManyRequests.intValue).stderr should {
include(prefix(tooManyRequests(0, 0))) and include("allowed: 0")
}
wsk.trigger.fire(triggerName, expectedExitCode = TooManyRequests.intValue).stderr should {
include(prefix(tooManyRequests(0, 0))) and include("allowed: 0")
}
}
it should "respect overridden rate-throttles of 1" in withAssetCleaner(oneProps) { (wp, assetHelper) =>
implicit val props = wp
val triggerName = "oneTrigger"
val actionName = "oneAction"
assetHelper.withCleaner(wsk.action, actionName) { (action, _) =>
action.create(actionName, defaultAction)
}
assetHelper.withCleaner(wsk.trigger, triggerName) { (trigger, _) =>
trigger.create(triggerName)
}
val deployedControllers = WhiskProperties.getControllerHosts.split(",").length
// One invoke should be allowed, the second one throttled.
// Due to the current implementation of the rate throttling,
// it is possible that the counter gets deleted, because the minute switches.
retry({
val results = (1 to deployedControllers + 1).map { _ =>
wsk.action.invoke(actionName, expectedExitCode = TestUtils.DONTCARE_EXIT)
}
results.map(_.exitCode) should contain(TestUtils.THROTTLED)
results.map(_.stderr).mkString should {
include(prefix(tooManyRequests(0, 0))) and include("allowed: 1")
}
}, 2, Some(1.second))
// One fire should be allowed, the second one throttled.
// Due to the current implementation of the rate throttling,
// it is possible, that the counter gets deleted, because the minute switches.
retry({
val results = (1 to deployedControllers + 1).map { _ =>
wsk.trigger.fire(triggerName, expectedExitCode = TestUtils.DONTCARE_EXIT)
}
results.map(_.exitCode) should contain(TestUtils.THROTTLED)
results.map(_.stderr).mkString should {
include(prefix(tooManyRequests(0, 0))) and include("allowed: 1")
}
}, 2, Some(1.second))
}
// One sequence invocation should count as one invocation for rate throttling purposes.
// This is independent of the number of actions in the sequences.
it should "respect overridden rate-throttles of 1 for sequences" in withAssetCleaner(oneSequenceProps) {
(wp, assetHelper) =>
implicit val props = wp
val actionName = "oneAction"
val sequenceName = "oneSequence"
assetHelper.withCleaner(wsk.action, actionName) { (action, _) =>
action.create(actionName, defaultAction)
}
assetHelper.withCleaner(wsk.action, sequenceName) { (action, _) =>
action.create(sequenceName, Some(s"$actionName,$actionName"), kind = Some("sequence"))
}
val deployedControllers = WhiskProperties.getControllerHosts.split(",").length
// One invoke should be allowed.
wsk.action
.invoke(sequenceName, expectedExitCode = TestUtils.DONTCARE_EXIT)
.exitCode shouldBe TestUtils.SUCCESS_EXIT
// One invoke should be allowed, the second one throttled.
// Due to the current implementation of the rate throttling,
// it is possible that the counter gets deleted, because the minute switches.
retry({
val results = (1 to deployedControllers + 1).map { _ =>
wsk.action.invoke(sequenceName, expectedExitCode = TestUtils.DONTCARE_EXIT)
}
results.map(_.exitCode) should contain(TestUtils.THROTTLED)
results.map(_.stderr).mkString should {
include(prefix(tooManyRequests(0, 0))) and include("allowed: 1")
}
}, 2, Some(1.second))
}
it should "respect overridden concurrent throttle of 0" in withAssetCleaner(zeroConcProps) { (wp, assetHelper) =>
implicit val props = wp
val actionName = "zeroConcurrentAction"
assetHelper.withCleaner(wsk.action, actionName) { (action, _) =>
action.create(actionName, defaultAction)
}
wsk.action.invoke(actionName, expectedExitCode = TooManyRequests.intValue).stderr should {
include(prefix(tooManyConcurrentRequests(0, 0))) and include("allowed: 0")
}
}
it should "not store an activation if disabled for this namespace" in withAssetCleaner(activationDisabled) {
(wp, assetHelper) =>
implicit val props = wp
val actionName = "activationDisabled"
assetHelper.withCleaner(wsk.action, actionName) { (action, _) =>
action.create(actionName, defaultAction)
}
val runResult = wsk.action.invoke(actionName)
val activationId = wsk.activation.extractActivationId(runResult)
withClue(s"did not find an activation id in '$runResult'") {
activationId shouldBe a[Some[_]]
}
val activation = wsk.activation.waitForActivation(activationId.get)
activation shouldBe 'Left
}
}
| openwhisk/openwhisk | tests/src/test/scala/limits/ThrottleTests.scala | Scala | apache-2.0 | 19,503 |
package com.ubirch.user.core.actor
import com.ubirch.user.core.manager.UserManager
import com.ubirch.user.model.db.{Activate, Deactivate, User}
import com.ubirch.user.model.rest.ActivationUpdate
import com.ubirch.user.model.rest.ActivationUpdate._
import com.ubirch.util.date.DateUtil
import com.ubirch.util.mongo.connection.MongoUtil
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
trait UserActorLogic {
protected[actor] def updateActivation(updates: ActivationUpdate)(implicit mongo: MongoUtil): Future[Either[String, String]] = {
val activationMap = updates.updates.map(u => u.externalId -> (u.activate, u.executionDate)).toMap
for {
users <- UserManager.findByExternalIds(activationMap.keys.toSeq)
result <- validate(updates, users).flatMap {
case Left(errorCsv) => Future.successful(Left(errorCsv))
case _ => updateUsers(updates, users).map(successMsg => Right(header + lineBreak + successMsg.mkString(lineBreak)))
}
} yield result
}
private[actor] def validate(update: ActivationUpdate, users: Seq[User]): Future[Either[String, Unit]] = {
val now = DateUtil.nowUTC
val errors = update.updates.map { u =>
if (u.executionDate.isDefined && u.executionDate.get.isBefore(now))
Some(u.toErrorCsv + dateInPast)
else
users.find(_.externalId == u.externalId) match {
case Some(user) if user.activeUser.contains(u.activate) =>
Some(u.toErrorCsv + targetStateWrong(user.activeUser.get))
case None => Some(u.toErrorCsv + extIdNotExisting)
case _ => None
}
}.collect { case Some(el) => el }
if (errors.nonEmpty) Future.successful(Left(errorHeader + lineBreak + errors.mkString(lineBreak)))
else Future.successful(Right(()))
}
private def updateUsers(updates: ActivationUpdate, users: Seq[User])(implicit mongo: MongoUtil): Future[Seq[String]] = {
val usersAndMessages = users.map { user =>
updates.updates.find(_.externalId == user.externalId) match {
case Some(update) if update.executionDate.isDefined =>
val action = if (update.activate) Activate else Deactivate
val updated = user.copy(action = Some(action), executionDate = update.executionDate)
val successMsg = update.toCSV(user.executionDate)
(updated, successMsg)
case Some(update) =>
val updated = user.copy(activeUser = Some(update.activate), executionDate = None, action = None)
val successMsg = update.toCSV(user.executionDate)
(updated, successMsg)
case None => throw new Exception("update not found; should be excluded by previous validation")
}
}
UserManager
.updateMany(usersAndMessages.map(_._1))
.map {
case Right(_) => usersAndMessages.map(_._2)
case Left(errorMsg) => throw new Exception(errorMsg)
}
}
}
| ubirch/ubirch-user-service | core/src/main/scala/com/ubirch/user/core/actor/UserActorLogic.scala | Scala | apache-2.0 | 2,935 |
package ch.epfl.performanceNetwork
import ch.epfl.performanceNetwork.printers.DataPrinter
import ch.epfl.performanceNetwork.printers.SingleFileWritter
import org.eclipse.jgit.api.Git
import scala.collection.JavaConverters._
import org.eclipse.jgit.api.ListBranchCommand.ListMode
import ch.epfl.performanceNetwork.gitInterface.NetworkDownloader
import ch.epfl.performanceNetwork.benchmarkInterface.BenchDataDownloader
import scala.io.Source
import java.util.regex.Pattern
import java.awt.Desktop
import java.io.File
import java.lang.ProcessBuilder.Redirect
/**
* @author Thibault Urien
*
*/
object Main {
def main(args: Array[String]): Unit = {
val workingDir = ""
val params =
Source.fromFile(workingDir + "setting.js")
.getLines()
.toSeq
.dropWhile { s => !s.contains("SharedSetting") }
.takeWhile { s => !s.trim().startsWith("}") }
def parsPath(path: String) = path.split("/").mkString(File.separator)
/*
* Attempt to find a attribute named paramName
* in what is assumed to be the JavaScript structure SharedSetting
* Reomve the "" or '' if it's a string.
* */
def find(paramName: String) = {
val p = Pattern.compile("\\"" + paramName + "\\"\\\\s*:\\\\s*([^,]*[^\\\\s^,])\\\\s*(,|$)")
params.map {
s =>
val matches = p.matcher(s)
if (matches.find())
Some(matches.group(1))
else
None
}.find { x => x != None } match {
case Some(Some(result)) =>
if (result.startsWith("\\"") && result.endsWith("\\"") && result.size > 1)
result.drop(1).dropRight(1)
else if (result.startsWith("'") && result.endsWith("'") && result.size > 1)
result.drop(1).dropRight(1)
else
result
case _ => throw new MalformedSettingException("Unable to find " + paramName + " in the setting file")
}
}
val repoDir = parsPath(find("repoDir"))
val repoUrl = find("repoUrl")
val dataUrlDomain = find("dataUrlDomain")
val mainFileUrl = find("mainFileUrl")
val indexFileLocalName = find("indexFileLocalName")
val fileNameRegex = find("fileNameRegex")
val mainFileIsIndex = find("mainFileIsIndex").toBoolean
val vertexesFile = parsPath(find("vertexesFile"))
val edgesFile = parsPath(find("edgesFile"))
val testsFile = parsPath(find("testsFile"))
val prameters = find("prameters")
val testSeparator = find("testSeparator")
val paramSeparator = find("paramSeparator")
val showResultWhenDone = find("showResultWhenDone").toBoolean
val groupBegin = find("groupBegin") match {
case "" => ""
case s => "" + s(0)
}
val completeResultSeparator = find("completeResultSeparator")
val groupEnd = find("groupEnd") match {
case "" => ""
case s => "" + s(0)
}
def printToFile(printer: DataPrinter, file: String) =
{
val writer = new SingleFileWritter(printer.writtenFields, file, workingDir, ".js")
printer.printData(writer)
writer.close
println("Succesfully wrote " + workingDir + file + ".js")
}
val t1 = new Thread(new Runnable() {
def run {
val tests = BenchDataDownloader.fetch(
dataUrlDomain,
mainFileUrl,
mainFileIsIndex,
indexFileLocalName,
workingDir,
prameters,
testSeparator,
fileNameRegex,
paramSeparator,
groupBegin,
completeResultSeparator,
groupEnd)
printToFile(tests, testsFile)
}
})
val t2 = new Thread(new Runnable() {
def run {
val (vertexes, edges) = NetworkDownloader(repoUrl, workingDir, repoDir)
printToFile(vertexes, vertexesFile)
printToFile(edges, edgesFile)
}
})
t1.start()
t2.start()
t1.join()
t2.join()
if (showResultWhenDone && Desktop.isDesktopSupported()) {
val page = new File(workingDir + "index.htm").getCanonicalFile.toURI()
Desktop.getDesktop().browse(page);
}
}
class MalformedSettingException(message: String) extends Exception(message)
} | ThibaultUrien/SemesterProject | jvm/src/main/scala/ch/epfl/performanceNetwork/Main.scala | Scala | bsd-3-clause | 4,202 |
/*
* Copyright (c) 2015 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cats.derived
import TestDefns._
class IterableTests extends CatsSuite {
import iterable._
test("IList[T] => Iterable[T]") {
// test trampolining
val llarge = 1 to 10000
val llargeTotal = llarge.sum
val large = IList.fromSeq(llarge)
val I: Iterable[Int] = large
val i = I.iterator
val total = i.sum
assert(total == llargeTotal)
val i2 = I.iterator
assert(i2 sameElements llarge.iterator)
}
test("(T, T, T) => Iterable[T]") {
type F[T] = (T, T, T)
val I: Iterable[Int] = MkIterable[F].iterable((1, 2, 3))
val i = I.iterator
val total = i.sum
assert(total == 6)
}
test("List[(T, Option[T])] => Iterable[T]") {
type F[T] = List[(T, Option[T])]
val I: Iterable[Int] = MkIterable[F].iterable(List((1, Some(2)), (3, None)))
val i = I.iterator
val total = i.sum
assert(total == 6)
}
test("(List[T], List[T], List[T]) => Iterable[T]") {
type F[T] = (List[T], List[T], List[T])
val I: Iterable[Int] = MkIterable[F].iterable((List(1, 2, 3), List(4), List(5, 6)))
val i = I.iterator
val total = i.sum
assert(total == 21)
}
test("List[List[T]] => Iterable[T]") {
type F[T] = List[List[T]]
val I: Iterable[Int] = MkIterable[F].iterable(List(List(1, 2, 3), List(4), List(5, 6)))
val i = I.iterator
val total = i.sum
assert(total == 21)
}
test("Tree[T] => Iterable[T]") {
val tree: Tree[String] =
Node(
Leaf("quux"),
Node(
Leaf("foo"),
Leaf("wibble")
)
)
val I: Iterable[String] = MkIterable[Tree].iterable(tree)
val i = I.iterator
assert(I.map(_.length).sum == 13)
}
}
| ceedubs/kittens | src/test/scala/cats/derived/iterable.scala | Scala | apache-2.0 | 2,298 |
package com.lateralthoughts.points
import com.lateralthoughts.points.repositories.{RewardingActionCategoryRepository, RewardingActionRepository}
import com.lateralthoughts.points.services.{RewardingActionCategoryService, RewardingActionService}
/**
* Contains the injected beans of application
*/
object PointsConfig {
lazy val rewardingActionRepository = RewardingActionRepository
lazy val rewardingActionCategoryRepository = RewardingActionCategoryRepository
lazy val rewardingActionService = new RewardingActionService(this)
lazy val rewardingActionCategoryService = new RewardingActionCategoryService(this)
}
| vincentdoba/points | points-server/src/main/scala/com/lateralthoughts/points/PointsConfig.scala | Scala | mit | 630 |
package com.alexitc.coinalerts.errors
import com.alexitc.playsonify.models.{ConflictError, FieldValidationError, NotFoundError, PublicError}
import play.api.i18n.{Lang, MessagesApi}
sealed trait NewCurrencyAlertError
case object RepeatedExchangeError extends NewCurrencyAlertError with ConflictError {
override def toPublicErrorList(messagesApi: MessagesApi)(implicit lang: Lang): List[PublicError] = {
val message = messagesApi("error.newCurrencyAlert.repeatedExchange")
val error = FieldValidationError("exchange", message)
List(error)
}
}
case object NewCurrencyAlertNotFoundError extends NewCurrencyAlertError with NotFoundError {
override def toPublicErrorList(messagesApi: MessagesApi)(implicit lang: Lang): List[PublicError] = {
val message = messagesApi("error.newCurrencyAlert.notFound")
val error = FieldValidationError("exchange", message)
List(error)
}
}
| AlexITC/crypto-coin-alerts | alerts-server/app/com/alexitc/coinalerts/errors/newCurrencyAlertErrors.scala | Scala | gpl-3.0 | 905 |
package examples
import java.security.{KeyPairGenerator, SecureRandom}
import java.util.Base64
object KeyGenApp extends App {
val (publicKey, privateKey) = {
val generator: KeyPairGenerator = KeyPairGenerator.getInstance("RSA")
generator.initialize(2048, new SecureRandom())
val pair = generator.generateKeyPair()
(pair.getPublic, pair.getPrivate)
}
println(Base64.getEncoder.encodeToString(privateKey.getEncoded))
println(Base64.getEncoder.encodeToString(publicKey.getEncoded))
}
| adilakhter/scalaznoob | src/main/scala/examples/KeyGenApp.scala | Scala | apache-2.0 | 509 |
package org.jetbrains.plugins.scala
package project
import com.intellij.openapi.roots.libraries.PersistentLibraryKind
/**
* @author Pavel Fatin
*/
object ScalaLibraryKind extends PersistentLibraryKind[ScalaLibraryProperties]("Scala") with ScalaLibraryKind
trait ScalaLibraryKind extends PersistentLibraryKind[ScalaLibraryProperties] {
override def createDefaultProperties() = new ScalaLibraryProperties()
} | whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/project/ScalaLibraryKind.scala | Scala | apache-2.0 | 413 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util.Map.Entry
import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.IteratorSetting
import org.apache.accumulo.core.data.{Key, Value}
import org.apache.commons.codec.binary.Base64
import org.geotools.factory.Hints
import org.locationtech.geomesa.accumulo.AccumuloFeatureIndexType
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.locationtech.geomesa.utils.geotools.{GeometryUtils, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.stats._
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
/**
* Reads simple features and observe them with a Stat server-side
*
* Only works with z3IdxStrategy for now (queries that date filters)
*/
class KryoLazyStatsIterator extends KryoLazyAggregatingIterator[Stat] {
import org.locationtech.geomesa.accumulo.iterators.KryoLazyStatsIterator._
var serializer: StatSerializer = null
override def init(options: Map[String, String]): Stat = {
sft = IteratorCache.sft(options(KryoLazyAggregatingIterator.SFT_OPT))
serializer = StatSerializer(sft)
Stat(sft, options(STATS_STRING_KEY))
}
override def aggregateResult(sf: SimpleFeature, result: Stat): Unit = result.observe(sf)
override def encodeResult(result: Stat): Array[Byte] = serializer.serialize(result)
}
object KryoLazyStatsIterator extends LazyLogging {
import org.locationtech.geomesa.index.conf.QueryHints.{ENCODE_STATS, STATS_STRING}
val DEFAULT_PRIORITY = 30
val STATS_STRING_KEY = "geomesa.stats.string"
val STATS_FEATURE_TYPE_KEY = "geomesa.stats.featuretype"
// Need a filler namespace, else geoserver throws NPE for xml output
val StatsSft = SimpleFeatureTypes.createType("stats:stats", "stats:String,geom:Geometry")
def configure(sft: SimpleFeatureType,
index: AccumuloFeatureIndexType,
filter: Option[Filter],
hints: Hints,
deduplicate: Boolean,
priority: Int = DEFAULT_PRIORITY): IteratorSetting = {
val is = new IteratorSetting(priority, "stats-iter", classOf[KryoLazyStatsIterator])
KryoLazyAggregatingIterator.configure(is, sft, index, filter, deduplicate, None)
is.addOption(STATS_STRING_KEY, hints.get(STATS_STRING).asInstanceOf[String])
is
}
def kvsToFeatures(sft: SimpleFeatureType): (Entry[Key, Value]) => SimpleFeature = {
val sf = new ScalaSimpleFeature("", StatsSft)
sf.setAttribute(1, GeometryUtils.zeroPoint)
(e: Entry[Key, Value]) => {
// value is the already serialized stat
sf.setAttribute(0, Base64.encodeBase64URLSafeString(e.getValue.get()))
sf
}
}
/**
* Encodes a stat as a base64 string.
*
* Creates a new serializer each time, so don't call repeatedly.
*
* @param stat stat to encode
* @param sft simple feature type of underlying schema
* @return base64 string
*/
def encodeStat(stat: Stat, sft: SimpleFeatureType): String =
Base64.encodeBase64URLSafeString(StatSerializer(sft).serialize(stat))
/**
* Decodes a stat string from a result simple feature.
*
* Creates a new serializer each time, so not used internally.
*
* @param encoded encoded string
* @param sft simple feature type of the underlying schema
* @return stat
*/
def decodeStat(encoded: String, sft: SimpleFeatureType): Stat =
StatSerializer(sft).deserialize(Base64.decodeBase64(encoded))
/**
* Reduces computed simple features which contain stat information into one on the client
*
* @param features iterator of features received per tablet server from query
* @param hints query hints that the stats are being run against
* @return aggregated iterator of features
*/
def reduceFeatures(sft: SimpleFeatureType, hints: Hints)
(features: CloseableIterator[SimpleFeature]): CloseableIterator[SimpleFeature] = {
val serializer = StatSerializer(sft)
val decodedStats = features.map { f =>
serializer.deserialize(Base64.decodeBase64(f.getAttribute(0).toString))
}
val sum = if (decodedStats.isEmpty) {
// create empty stat based on the original input so that we always return something
Stat(sft, hints.get(STATS_STRING).asInstanceOf[String])
} else {
val sum = decodedStats.next()
decodedStats.foreach(sum += _)
sum
}
decodedStats.close()
val stats = if (hints.containsKey(ENCODE_STATS) && hints.get(ENCODE_STATS).asInstanceOf[Boolean]) encodeStat(sum, sft) else sum.toJson
Iterator(new ScalaSimpleFeature("stat", StatsSft, Array(stats, GeometryUtils.zeroPoint)))
}
} | tkunicki/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/KryoLazyStatsIterator.scala | Scala | apache-2.0 | 5,247 |
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.error
/**
* An abstract error calculation class that provides some basic functionality.
*/
abstract class AbstractErrorCalculation extends ErrorCalculation {
/**
* The overall error.
*/
protected var globalError: Double = 0.0
/**
* The size of a set.
*/
protected var setSize: Int = 0
override def updateError(actual: Array[Double], ideal: Array[Double], significance: Double) {
// TODO should this 0 out globalError as per below fn
for(i <- 0 until actual.length) {
val delta = (ideal(i) - actual(i)) * significance
globalError += delta * delta
}
setSize += ideal.length
}
override def updateError(actual: Double, ideal: Double) {
val delta = ideal - actual
globalError += delta * delta
setSize += 1
}
override def clear() {
globalError = 0
setSize = 0
}
override def getSetSize: Int = setSize
} | PeterLauris/aifh | vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/error/AbstractErrorCalculation.scala | Scala | apache-2.0 | 1,832 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.model
import com.netflix.atlas.core.util.SmallHashMap
import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.Scope
import org.openjdk.jmh.annotations.State
import org.openjdk.jmh.annotations.Threads
import org.openjdk.jmh.infra.Blackhole
/**
* Check performance of special case in KeyValueQuery to use SmallHashMap.getOrNull when
* possible instead of Map.get. This avoids an allocation for the Option and a little bit
* of overhead for the lambda invocation. For tight loops such as checking the query for a
* high volume streaming path it provides a noticeable benefit.
*
* ```
* > jmh:run -prof gc -wi 10 -i 10 -f1 -t1 .*KeyValueQuery.*
* ```
*/
@State(Scope.Thread)
class KeyValueQuery {
private val tagMap = Map(
"nf.app" -> "atlas_backend",
"nf.cluster" -> "atlas_backend-dev",
"nf.asg" -> "atlas_backend-dev-v001",
"nf.stack" -> "dev",
"nf.region" -> "us-east-1",
"nf.zone" -> "us-east-1e",
"nf.node" -> "i-123456789",
"nf.ami" -> "ami-987654321",
"nf.vmtype" -> "r3.2xlarge",
"name" -> "jvm.gc.pause",
"cause" -> "Allocation_Failure",
"action" -> "end_of_major_GC",
"statistic" -> "totalTime"
)
private val smallTagMap = SmallHashMap(tagMap)
private val query = Query.And(
Query.Equal("nf.app", "atlas_backend"),
Query.And(
Query.Equal("nf.stack", "dev"),
Query.And(
Query.Equal("name", "jvm.gc.pause"),
Query.Equal("cause", "Allocation_Failure")
)
)
)
@Threads(1)
@Benchmark
def checkMap(bh: Blackhole): Unit = {
bh.consume(query.matches(tagMap))
}
@Threads(1)
@Benchmark
def checkSmallMap(bh: Blackhole): Unit = {
bh.consume(query.matches(smallTagMap))
}
}
| Netflix/atlas | atlas-jmh/src/main/scala/com/netflix/atlas/core/model/KeyValueQuery.scala | Scala | apache-2.0 | 2,420 |
package suiryc.scala.javafx.scene.control
import com.typesafe.scalalogging.LazyLogging
import javafx.scene.control.SplitPane
/** Pane helpers. */
object Panes extends LazyLogging {
/** Encode SplitPane divider positions into a String. */
def encodeDividerPositions(pane: SplitPane): String = {
pane.getDividerPositions.mkString(";")
}
/** Restores SplitPane divider positions from String. */
def restoreDividerPositions(pane: SplitPane, dividerPositions: String): Unit = {
try {
val positions = dividerPositions.split(';').map(_.toDouble)
pane.setDividerPositions(positions: _*)
} catch {
case ex: Exception => logger.warn(s"Could not restore SplitPane divider positions=<$dividerPositions>: ${ex.getMessage}")
}
}
}
| suiryc/suiryc-scala | javafx/src/main/scala/suiryc/scala/javafx/scene/control/Panes.scala | Scala | gpl-3.0 | 768 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic
import org.scalatest._
class ConversionCheckedTripleEqualsExplicitlySpec extends Spec with Matchers with ConversionCheckedTripleEquals with ExplicitlySpecHelpers {
object `The Explicitly DSL` {
object `when used with === on identical types` {
def `should allow an Equality to be specified explicitly` {
assert(1 !== 2)
assert((1 === 2)(decided by intInequality))
assert(1 === 1)
assert((1 !== 1)(decided by intInequality))
implicit val strIneq = stringInequality
assert(" Hi" === "hI ")
assert { (" Hi" !== "hI ") (decided by defaultEquality[String]) }
}
}
object `when used with toType === fromType` {
def `should allow an Equality to be specified explicitly` {
assert(new Fruit("orange") !== new Pomme)
assert((new Fruit("orange") === new Pomme)(decided by fruitInequality))
assert(new Fruit("apple") === new Pomme)
assert((new Fruit("apple") !== new Pomme)(decided by fruitInequality))
}
}
object `when used with fromType === toType` {
def `should allow an Equality to be specified explicitly` {
assert(new Pomme !== new Fruit("orange"))
assert((new Pomme === new Fruit("orange"))(decided by fruitInequality))
assert(new Pomme === new Fruit("apple"))
assert((new Pomme !== new Fruit("apple"))(decided by fruitInequality))
}
}
}
object `The determined by syntax` {
def `should produce an Equivalence if used with an Equivalence (that is not an Equality)` {
assert(1 !== 2)
1 should !== (2)
assert((1 === 2)(determined by intInequivalence))
(1 should === (2)) (determined by intInequivalence)
assert(1 === 1)
1 should === (1)
assert((1 !== 1)(determined by intInequivalence))
(1 should !== (1)) (determined by intInequivalence)
}
def `should produce an Equivalence from "after being" syntax` {
assert(("Hi" !== "hI"))
assert(("Hi" === "hI")(after being downCased))
}
def `should produce an Equivalence from "after being X and Y" syntax` {
assert((" Hi" !== "hI "))
assert((" Hi" === "hI ")(after being downCased and chopped))
}
def `should produce an Equivalence from "determined by <equivalence> afterBeing" syntax` {
implicit val stringIneq = stringInequivalence
assert(("Hi" === "hI"))
assert { ("Hi" !== "hI") (after being downCased) }
assert { ("Hi" === "hI") (determined by defaultEquality[String] afterBeing downCased) }
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalactic/ConversionCheckedTripleEqualsExplicitlySpec.scala | Scala | apache-2.0 | 3,182 |
package com.twitter.finagle.http2
import com.twitter.finagle.Stack
import com.twitter.finagle.http2.transport.H2ServerFilter
import com.twitter.finagle.netty4.http.handler.UriValidatorHandler
import com.twitter.finagle.param.Timer
import io.netty.channel.ChannelHandlerContext
import io.netty.handler.codec.http2.Http2MultiplexHandler
private[http2] object Http2PipelineInitializer {
/**
* Install Finagle specific filters and handlers common across all HTTP/2 only pipelines
*
* @param ctx
* @param params
*/
def setup(ctx: ChannelHandlerContext, params: Stack.Params): Unit = {
// we insert immediately after the Http2MultiplexHandler#0, which we know are the
// last Http2 frames before they're converted to Http/1.1
val timer = params[Timer].timer
val codecName = ctx.pipeline
.context(classOf[Http2MultiplexHandler])
.name
ctx.pipeline
.addAfter(codecName, H2ServerFilter.HandlerName, new H2ServerFilter(timer))
.remove(UriValidatorHandler.HandlerName)
}
}
| luciferous/finagle | finagle-http2/src/main/scala/com/twitter/finagle/http2/Http2PipelineInitializer.scala | Scala | apache-2.0 | 1,032 |
/*
* Pubsub envelope subscriber
*
* @author Giovanni Ruggiero
* @email giovanni.ruggiero@gmail.com
*/
import org.zeromq.ZMQ
object psenvsub {
def main(args : Array[String]) {
// Prepare our context and subscriber
val context = ZMQ.context(1)
val subscriber = context.socket(ZMQ.SUB)
subscriber.connect("tcp://localhost:5563")
subscriber.subscribe("B".getBytes())
while (true) {
// Read envelope with address
val address = new String(subscriber.recv(0))
// Read message contents
val contents = new String(subscriber.recv(0))
println(address + " : " + contents)
}
}
}
| soscpd/bee | root/tests/zguide/examples/Scala/psenvsub.scala | Scala | mit | 607 |
package be.angelcorp.glsl.glsl330
import be.angelcorp.glsl._
@Glsl
trait FS330Core extends Glsl330 {
"#version 330"
// in
@GlslSymbolOnly def gl_FragCoord: vec4 = ???
@GlslSymbolOnly def gl_FrontFacing: bool = ???
@GlslSymbolOnly def gl_ClipDistance: Array[float] = ???
@GlslSymbolOnly def gl_PointCoord: vec2 = ???
@GlslSymbolOnly def gl_PrimitiveID: int = ???
// out
@GlslSymbolOnly def gl_FragColor_=(v: vec4): Unit = ??? // deprecated
@GlslSymbolOnly def gl_FragData: Array[vec4] = ??? // deprecated
@GlslSymbolOnly def gl_FragDepth_=(v: float): Unit = ???
}
| AODtorusan/scala-glsl | core/src/main/scala/be/angelcorp/glsl/glsl330/FS330Core.scala | Scala | mit | 590 |
package io.scalajs.nodejs.util
import org.scalatest.FunSpec
import scala.scalajs.js
/**
* Util Tests
* @author lawrence.daniels@gmail.com
*/
class UtilTest extends FunSpec {
describe("Util") {
it("supports isPrimitive(5)") {
assert(Util.isPrimitive(5)) // Returns: true
}
it("supports isPrimitive(\\"foo\\")") {
assert(Util.isPrimitive("foo")) // Returns: true
}
it("supports isPrimitive(false)") {
assert(Util.isPrimitive(false)) // Returns: true
}
it("supports isPrimitive(null)") {
assert(Util.isPrimitive(null)) // Returns: true
}
it("supports isPrimitive(undefined)") {
assert(Util.isPrimitive(js.undefined)) // Returns: true
}
it("supports isPrimitive({})") {
assert(!Util.isPrimitive(new js.Object())) // Returns: false
}
it("supports isPrimitive(function() {})") {
assert(!Util.isPrimitive(() => {})) // Returns: false
}
it("supports isPrimitive(/^$/)") {
assert(!Util.isPrimitive(js.RegExp("/^$/"))) // Returns: false
}
it("supports isPrimitive(new Date())") {
assert(!Util.isPrimitive(new js.Date())) // Returns: false
}
}
}
| scalajs-io/nodejs | app/common/src/test/scala/io/scalajs/nodejs/util/UtilTest.scala | Scala | apache-2.0 | 1,184 |
package slick.jdbc
import java.sql.ResultSet
import slick.dbio.{Effect, NoStream, SynchronousDatabaseAction}
import slick.basic.BasicStreamingAction
import slick.util.CloseableIterator
/** An invoker which calls a function to retrieve a ResultSet. This can be used
* for reading information from a java.sql.DatabaseMetaData object which has
* many methods that return ResultSets.
*
* For convenience, if the function returns null, this is treated like an
* empty ResultSet. */
abstract class ResultSetInvoker[+R] extends Invoker[R] { self =>
protected def createResultSet(session: JdbcBackend#Session): ResultSet
def iteratorTo(maxRows: Int)(implicit session: JdbcBackend#Session): CloseableIterator[R] = {
val rs = createResultSet(session)
if(rs eq null) CloseableIterator.empty
else {
val pr = new PositionedResult(rs) {
def close() = rs.close()
}
new PositionedResultIterator[R](pr, maxRows, true) {
def extractValue(pr: PositionedResult) = self.extractValue(pr)
}
}
}
protected def extractValue(pr: PositionedResult): R
}
object ResultSetInvoker {
def apply[R](f: JdbcBackend#Session => ResultSet)(implicit conv: PositionedResult => R): Invoker[R] = new ResultSetInvoker[R] {
def createResultSet(session: JdbcBackend#Session) = f(session)
def extractValue(pr: PositionedResult) = conv(pr)
}
}
object ResultSetAction {
def apply[R](f: JdbcBackend#Session => ResultSet)(implicit conv: PositionedResult => R): BasicStreamingAction[Vector[R], R, Effect.Read] = new StreamingInvokerAction[Vector[R], R, Effect.Read] {
protected[this] def createInvoker(sql: Iterable[String]) = ResultSetInvoker(f)(conv)
protected[this] def createBuilder = Vector.newBuilder[R]
def statements = Nil
}
}
| AtkinsChang/slick | slick/src/main/scala/slick/jdbc/ResultSetInvoker.scala | Scala | bsd-2-clause | 1,793 |
/*
* Copyright (C) 2010 Lalit Pant <pant.lalit@gmail.com>
*
* The contents of this file are subject to the GNU General Public License
* Version 3 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.gnu.org/copyleft/gpl.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
*/
package net.kogics.kojo
package mathworld
import geogebra.plugin.GgbAPI
import net.kogics.kojo.core._
import net.kogics.kojo.util._
object MathWorld extends InitedSingleton[MathWorld] {
def initedInstance(kojoCtx: KojoCtx, ggbApi: GgbAPI) = synchronized {
instanceInit()
val ret = instance()
ret._kojoCtx = kojoCtx
ret._ggbApi = ggbApi
ret._Algo = new Algo(ggbApi)
ret
}
protected def newInstance = new MathWorld
}
class MathWorld {
@volatile var _kojoCtx: KojoCtx = _
@volatile var _ggbApi: GgbAPI = _
@volatile var _Algo: Algo = _
private def ensureVisible() {
_kojoCtx.makeMathWorldVisible()
}
def clear() {
Utils.runInSwingThreadAndWait {
ensureVisible()
_ggbApi.getApplication.setSaved()
_ggbApi.getApplication.fileNew()
}
}
// for unit tests
private[mathworld] def _clear2() {
Utils.runInSwingThread {
_ggbApi.getApplication.setSaved()
_ggbApi.getApplication.fileNew()
}
}
def showAxes() {
Utils.runInSwingThread {
_ggbApi.setAxesVisible(true, true)
_ggbApi.getKernel.notifyRepaint()
}
}
def hideAxes() {
Utils.runInSwingThread {
_ggbApi.setAxesVisible(false, false)
_ggbApi.getKernel.notifyRepaint()
}
}
def showGrid() {
Utils.runInSwingThread {
_ggbApi.setGridVisible(true)
_ggbApi.getKernel.notifyRepaint()
}
}
def hideGrid() {
Utils.runInSwingThread {
_ggbApi.setGridVisible(false)
_ggbApi.getKernel.notifyRepaint()
}
}
def showAlgebraView() {
Utils.runInSwingThread {
_ggbApi.getApplication.setShowAlgebraView(true)
_ggbApi.getApplication.updateCenterPanel(true)
_ggbApi.getApplication.setDefaultCursor()
}
}
def hideAlgebraView() {
Utils.runInSwingThread {
_ggbApi.getApplication.setShowAlgebraView(false)
_ggbApi.getApplication.updateCenterPanel(true)
_ggbApi.getApplication.setDefaultCursor()
}
}
def zoom(factor: Double, cx: Double, cy: Double) {
Utils.runInSwingThread {
val view = _ggbApi.getApplication.getEuclidianView
val newZoom = factor * geogebra.euclidian.EuclidianView.SCALE_STANDARD
view.setCoordSystem(view.getWidth/2 - cx * newZoom, view.getHeight/2 + cy * newZoom, newZoom, newZoom)
}
}
def switchTo() = ensureVisible()
def point(x: Double, y: Double, label: String=null): MwPoint = MwPoint(_ggbApi, x, y, Option(label))
def pointOn(on: MwLine, x: Double, y: Double): MwPoint = MwPoint(_ggbApi, on, x, y)
def line(p1: MwPoint, p2: MwPoint): MwLine = MwLine(_ggbApi, p1, p2)
def lineSegment(p1: MwPoint, p2: MwPoint): MwLineSegment = MwLineSegment(_ggbApi, p1, p2)
def lineSegment(p: MwPoint, len: Double): MwLineSegment = MwLineSegment(_ggbApi, p, len)
def ray(p1: MwPoint, p2: MwPoint): MwRay = MwRay(_ggbApi, p1, p2)
def angle(p1: MwPoint, p2: MwPoint, p3: MwPoint): MwAngle = MwAngle(_ggbApi, p1, p2, p3)
def angle(p1: MwPoint, p2: MwPoint, size: Double): MwAngle = MwAngle(_ggbApi, p1, p2, size * math.Pi / 180)
def text(content: String, x: Double, y: Double): MwText = {
MwText(_ggbApi, content, x, y)
}
def circle(center: MwPoint, radius: Double): MwCircle = {
MwCircle(_ggbApi, center, radius)
}
def figure(name: String) = new MwFigure(name)
def intersect(l1: MwLine, l2: MwLine): MwPoint = _Algo.intersect(_ggbApi, l1, l2)
def intersect(l: MwLine, c: MwCircle): Seq[MwPoint] = _Algo.intersect(_ggbApi, l, c)
def intersect(c: MwCircle, l: MwLine): Seq[MwPoint] = intersect(l, c)
def intersect(c1: MwCircle, c2: MwCircle): Seq[MwPoint] = _Algo.intersect(_ggbApi, c1, c2)
def midpoint(ls: MwLineSegment): MwPoint = _Algo.midpoint(ls)
def perpendicular(l: MwLine, p: MwPoint): MwLine = _Algo.perpendicular(l, p)
def parallel(l: MwLine, p: MwPoint): MwLine = _Algo.parallel(l, p)
def show(shapes: VisualElement*) {
Utils.runInSwingThread {
shapes.foreach {s => s.show}
}
}
// quick and dirty stuff for now
import geogebra.kernel._
def variable(name: String, value: Double, min: Double, max: Double, increment: Double, x: Int, y: Int) {
Throttler.throttle()
Utils. runInSwingThread {
val number = new GeoNumeric(_ggbApi.getConstruction)
number.setEuclidianVisible(true)
number.setSliderLocation(x, y)
number.setAbsoluteScreenLocActive(true)
number.setIntervalMin(min)
number.setIntervalMax(max)
number.setAnimationStep(increment)
number.setValue(value)
number.setLabel(name)
number.setLabelMode(GeoElement.LABEL_NAME_VALUE)
number.setLabelVisible(true)
number.update()
}
}
def evaluate(cmd: String) {
Throttler.throttle()
Utils. runInSwingThread {
_ggbApi.evalCommand(cmd)
}
}
def turtle(x: Double, y: Double) = {
Utils.runInSwingThreadAndWait {
new MwTurtle(x, y)
}
}
}
| vnkmr7620/kojo | KojoEnv/src/net/kogics/kojo/mathworld/MathWorld.scala | Scala | gpl-3.0 | 5,487 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.SparkException
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest}
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
class ImputerSuite extends MLTest with DefaultReadWriteTest {
test("Imputer for Double with default missing Value NaN") {
val df = spark.createDataFrame(Seq(
(0, 1.0, 4.0, 1.0, 1.0, 1.0, 4.0, 4.0, 4.0),
(1, 11.0, 12.0, 11.0, 11.0, 11.0, 12.0, 12.0, 12.0),
(2, 3.0, Double.NaN, 3.0, 3.0, 3.0, 10.0, 12.0, 4.0),
(3, Double.NaN, 14.0, 5.0, 3.0, 1.0, 14.0, 14.0, 14.0)
)).toDF("id", "value1", "value2",
"expected_mean_value1", "expected_median_value1", "expected_mode_value1",
"expected_mean_value2", "expected_median_value2", "expected_mode_value2")
val imputer = new Imputer()
.setInputCols(Array("value1", "value2"))
.setOutputCols(Array("out1", "out2"))
ImputerSuite.iterateStrategyTest(true, imputer, df)
}
test("Single Column: Imputer for Double with default missing Value NaN") {
val df1 = spark.createDataFrame(Seq(
(0, 1.0, 1.0, 1.0, 1.0),
(1, 11.0, 11.0, 11.0, 11.0),
(2, 3.0, 3.0, 3.0, 3.0),
(3, Double.NaN, 5.0, 3.0, 1.0)
)).toDF("id", "value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer1 = new Imputer()
.setInputCol("value")
.setOutputCol("out")
ImputerSuite.iterateStrategyTest(false, imputer1, df1)
val df2 = spark.createDataFrame(Seq(
(0, 4.0, 4.0, 4.0, 4.0),
(1, 12.0, 12.0, 12.0, 12.0),
(2, Double.NaN, 10.0, 12.0, 4.0),
(3, 14.0, 14.0, 14.0, 14.0)
)).toDF("id", "value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer2 = new Imputer()
.setInputCol("value")
.setOutputCol("out")
ImputerSuite.iterateStrategyTest(false, imputer2, df2)
}
test("Imputer should handle NaNs when computing surrogate value, if missingValue is not NaN") {
val df = spark.createDataFrame(Seq(
(0, 1.0, 1.0, 1.0, 1.0),
(1, 3.0, 3.0, 3.0, 3.0),
(2, Double.NaN, Double.NaN, Double.NaN, Double.NaN),
(3, -1.0, 2.0, 1.0, 1.0)
)).toDF("id", "value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer = new Imputer().setInputCols(Array("value")).setOutputCols(Array("out"))
.setMissingValue(-1.0)
ImputerSuite.iterateStrategyTest(true, imputer, df)
}
test("Single Column: Imputer should handle NaNs when computing surrogate value," +
" if missingValue is not NaN") {
val df = spark.createDataFrame(Seq(
(0, 1.0, 1.0, 1.0, 1.0),
(1, 3.0, 3.0, 3.0, 3.0),
(2, Double.NaN, Double.NaN, Double.NaN, Double.NaN),
(3, -1.0, 2.0, 1.0, 1.0)
)).toDF("id", "value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer = new Imputer().setInputCol("value").setOutputCol("out")
.setMissingValue(-1.0)
ImputerSuite.iterateStrategyTest(false, imputer, df)
}
test("Imputer for Float with missing Value -1.0") {
val df = spark.createDataFrame(Seq(
(0, 1.0F, 1.0F, 1.0F, 1.0F),
(1, 3.0F, 3.0F, 3.0F, 3.0F),
(2, 10.0F, 10.0F, 10.0F, 10.0F),
(3, 10.0F, 10.0F, 10.0F, 10.0F),
(4, -1.0F, 6.0F, 3.0F, 10.0F)
)).toDF("id", "value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer = new Imputer().setInputCols(Array("value")).setOutputCols(Array("out"))
.setMissingValue(-1)
ImputerSuite.iterateStrategyTest(true, imputer, df)
}
test("Single Column: Imputer for Float with missing Value -1.0") {
val df = spark.createDataFrame(Seq(
(0, 1.0F, 1.0F, 1.0F, 1.0F),
(1, 3.0F, 3.0F, 3.0F, 3.0F),
(2, 10.0F, 10.0F, 10.0F, 10.0F),
(3, 10.0F, 10.0F, 10.0F, 10.0F),
(4, -1.0F, 6.0F, 3.0F, 10.0F)
)).toDF("id", "value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer = new Imputer().setInputCol("value").setOutputCol("out")
.setMissingValue(-1)
ImputerSuite.iterateStrategyTest(false, imputer, df)
}
test("Imputer should impute null as well as 'missingValue'") {
val rawDf = spark.createDataFrame(Seq(
(0, 4.0, 4.0, 4.0, 4.0),
(1, 10.0, 10.0, 10.0, 10.0),
(2, 10.0, 10.0, 10.0, 10.0),
(3, Double.NaN, 8.0, 10.0, 10.0),
(4, -1.0, 8.0, 10.0, 10.0)
)).toDF("id", "rawValue",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val df = rawDf.selectExpr("*", "IF(rawValue=-1.0, null, rawValue) as value")
val imputer = new Imputer().setInputCols(Array("value")).setOutputCols(Array("out"))
ImputerSuite.iterateStrategyTest(true, imputer, df)
}
test("Single Column: Imputer should impute null as well as 'missingValue'") {
val rawDf = spark.createDataFrame(Seq(
(0, 4.0, 4.0, 4.0, 4.0),
(1, 10.0, 10.0, 10.0, 10.0),
(2, 10.0, 10.0, 10.0, 10.0),
(3, Double.NaN, 8.0, 10.0, 10.0),
(4, -1.0, 8.0, 10.0, 10.0)
)).toDF("id", "rawValue",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val df = rawDf.selectExpr("*", "IF(rawValue=-1.0, null, rawValue) as value")
val imputer = new Imputer().setInputCol("value").setOutputCol("out")
ImputerSuite.iterateStrategyTest(false, imputer, df)
}
test("Imputer should work with Structured Streaming") {
val localSpark = spark
import localSpark.implicits._
val df = Seq[(java.lang.Double, Double)](
(4.0, 4.0),
(10.0, 10.0),
(10.0, 10.0),
(Double.NaN, 8.0),
(null, 8.0)
).toDF("value", "expected_mean_value")
val imputer = new Imputer()
.setInputCols(Array("value"))
.setOutputCols(Array("out"))
.setStrategy("mean")
val model = imputer.fit(df)
testTransformer[(java.lang.Double, Double)](df, model, "expected_mean_value", "out") {
case Row(exp: java.lang.Double, out: Double) =>
assert((exp.isNaN && out.isNaN) || (exp == out),
s"Imputed values differ. Expected: $exp, actual: $out")
}
}
test("Single Column: Imputer should work with Structured Streaming") {
val localSpark = spark
import localSpark.implicits._
val df = Seq[(java.lang.Double, Double)](
(4.0, 4.0),
(10.0, 10.0),
(10.0, 10.0),
(Double.NaN, 8.0),
(null, 8.0)
).toDF("value", "expected_mean_value")
val imputer = new Imputer()
.setInputCol("value")
.setOutputCol("out")
.setStrategy("mean")
val model = imputer.fit(df)
testTransformer[(java.lang.Double, Double)](df, model, "expected_mean_value", "out") {
case Row(exp: java.lang.Double, out: Double) =>
assert((exp.isNaN && out.isNaN) || (exp == out),
s"Imputed values differ. Expected: $exp, actual: $out")
}
}
test("Imputer throws exception when surrogate cannot be computed") {
val df = spark.createDataFrame(Seq(
(0, Double.NaN, 1.0, 1.0),
(1, Double.NaN, 3.0, 3.0),
(2, Double.NaN, Double.NaN, Double.NaN)
)).toDF("id", "value", "expected_mean_value", "expected_median_value")
Seq("mean", "median").foreach { strategy =>
val imputer = new Imputer().setInputCols(Array("value")).setOutputCols(Array("out"))
.setStrategy(strategy)
withClue("Imputer should fail all the values are invalid") {
val e: SparkException = intercept[SparkException] {
val model = imputer.fit(df)
}
assert(e.getMessage.contains("surrogate cannot be computed"))
}
}
}
test("Single Column: Imputer throws exception when surrogate cannot be computed") {
val df = spark.createDataFrame(Seq(
(0, Double.NaN, 1.0, 1.0, 1.0),
(1, Double.NaN, 3.0, 3.0, 3.0),
(2, Double.NaN, Double.NaN, Double.NaN, Double.NaN)
)).toDF("id", "value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
Seq("mean", "median", "mode").foreach { strategy =>
val imputer = new Imputer().setInputCol("value").setOutputCol("out")
.setStrategy(strategy)
withClue("Imputer should fail all the values are invalid") {
val e: SparkException = intercept[SparkException] {
val model = imputer.fit(df)
}
assert(e.getMessage.contains("surrogate cannot be computed"))
}
}
}
test("Imputer input & output column validation") {
val df = spark.createDataFrame(Seq(
(0, 1.0, 1.0, 1.0),
(1, Double.NaN, 3.0, 3.0),
(2, Double.NaN, Double.NaN, Double.NaN)
)).toDF("id", "value1", "value2", "value3")
Seq("mean", "median", "mode").foreach { strategy =>
withClue("Imputer should fail if inputCols and outputCols are different length") {
val e: IllegalArgumentException = intercept[IllegalArgumentException] {
val imputer = new Imputer().setStrategy(strategy)
.setInputCols(Array("value1", "value2"))
.setOutputCols(Array("out1"))
val model = imputer.fit(df)
}
assert(e.getMessage.contains("should have the same length"))
}
withClue("Imputer should fail if inputCols contains duplicates") {
val e: IllegalArgumentException = intercept[IllegalArgumentException] {
val imputer = new Imputer().setStrategy(strategy)
.setInputCols(Array("value1", "value1"))
.setOutputCols(Array("out1", "out2"))
val model = imputer.fit(df)
}
assert(e.getMessage.contains("inputCols contains duplicates"))
}
withClue("Imputer should fail if outputCols contains duplicates") {
val e: IllegalArgumentException = intercept[IllegalArgumentException] {
val imputer = new Imputer().setStrategy(strategy)
.setInputCols(Array("value1", "value2"))
.setOutputCols(Array("out1", "out1"))
val model = imputer.fit(df)
}
assert(e.getMessage.contains("outputCols contains duplicates"))
}
}
}
test("Imputer read/write") {
val t = new Imputer()
.setInputCols(Array("myInputCol"))
.setOutputCols(Array("myOutputCol"))
.setMissingValue(-1.0)
testDefaultReadWrite(t)
}
test("Single Column: Imputer read/write") {
val t = new Imputer()
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
.setMissingValue(-1.0)
testDefaultReadWrite(t)
}
test("ImputerModel read/write") {
val spark = this.spark
import spark.implicits._
val surrogateDF = Seq(1.234).toDF("myInputCol")
val instance = new ImputerModel(
"myImputer", surrogateDF)
.setInputCols(Array("myInputCol"))
.setOutputCols(Array("myOutputCol"))
val newInstance = testDefaultReadWrite(instance)
assert(newInstance.surrogateDF.columns === instance.surrogateDF.columns)
assert(newInstance.surrogateDF.collect() === instance.surrogateDF.collect())
}
test("Single Column: ImputerModel read/write") {
val spark = this.spark
import spark.implicits._
val surrogateDF = Seq(1.234).toDF("myInputCol")
val instance = new ImputerModel(
"myImputer", surrogateDF)
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
val newInstance = testDefaultReadWrite(instance)
assert(newInstance.surrogateDF.columns === instance.surrogateDF.columns)
assert(newInstance.surrogateDF.collect() === instance.surrogateDF.collect())
}
test("Imputer for IntegerType with default missing value null") {
val df = spark.createDataFrame(Seq[(Integer, Integer, Integer, Integer)](
(1, 1, 1, 1),
(11, 11, 11, 11),
(3, 3, 3, 3),
(null, 5, 3, 1)
)).toDF("value1",
"expected_mean_value1", "expected_median_value1", "expected_mode_value1")
val imputer = new Imputer()
.setInputCols(Array("value1"))
.setOutputCols(Array("out1"))
val types = Seq(IntegerType, LongType)
for (mType <- types) {
// cast all columns to desired data type for testing
val df2 = df.select(df.columns.map(c => col(c).cast(mType)): _*)
ImputerSuite.iterateStrategyTest(true, imputer, df2)
}
}
test("Single Column Imputer for IntegerType with default missing value null") {
val df = spark.createDataFrame(Seq[(Integer, Integer, Integer, Integer)](
(1, 1, 1, 1),
(11, 11, 11, 11),
(3, 3, 3, 3),
(null, 5, 3, 1)
)).toDF("value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer = new Imputer()
.setInputCol("value")
.setOutputCol("out")
val types = Seq(IntegerType, LongType)
for (mType <- types) {
// cast all columns to desired data type for testing
val df2 = df.select(df.columns.map(c => col(c).cast(mType)): _*)
ImputerSuite.iterateStrategyTest(false, imputer, df2)
}
}
test("Imputer for IntegerType with missing value -1") {
val df = spark.createDataFrame(Seq[(Integer, Integer, Integer, Integer)](
(1, 1, 1, 1),
(11, 11, 11, 11),
(3, 3, 3, 3),
(-1, 5, 3, 1)
)).toDF("value1",
"expected_mean_value1", "expected_median_value1", "expected_mode_value1")
val imputer = new Imputer()
.setInputCols(Array("value1"))
.setOutputCols(Array("out1"))
.setMissingValue(-1.0)
val types = Seq(IntegerType, LongType)
for (mType <- types) {
// cast all columns to desired data type for testing
val df2 = df.select(df.columns.map(c => col(c).cast(mType)): _*)
ImputerSuite.iterateStrategyTest(true, imputer, df2)
}
}
test("Single Column: Imputer for IntegerType with missing value -1") {
val df = spark.createDataFrame(Seq[(Integer, Integer, Integer, Integer)](
(1, 1, 1, 1),
(11, 11, 11, 11),
(3, 3, 3, 3),
(-1, 5, 3, 1)
)).toDF("value",
"expected_mean_value", "expected_median_value", "expected_mode_value")
val imputer = new Imputer()
.setInputCol("value")
.setOutputCol("out")
.setMissingValue(-1.0)
val types = Seq(IntegerType, LongType)
for (mType <- types) {
// cast all columns to desired data type for testing
val df2 = df.select(df.columns.map(c => col(c).cast(mType)): _*)
ImputerSuite.iterateStrategyTest(false, imputer, df2)
}
}
test("assert exception is thrown if both multi-column and single-column params are set") {
import testImplicits._
val df = Seq((0.5, 0.3), (0.5, -0.4)).toDF("feature1", "feature2")
ParamsSuite.testExclusiveParams(new Imputer, df, ("inputCol", "feature1"),
("inputCols", Array("feature1", "feature2")))
ParamsSuite.testExclusiveParams(new Imputer, df, ("inputCol", "feature1"),
("outputCol", "result1"), ("outputCols", Array("result1", "result2")))
// this should fail because at least one of inputCol and inputCols must be set
ParamsSuite.testExclusiveParams(new Imputer, df, ("outputCol", "feature1"))
}
test("Compare single/multiple column(s) Imputer in pipeline") {
val df = spark.createDataFrame(Seq(
(0, 1.0, 4.0),
(1, 11.0, 12.0),
(2, 3.0, Double.NaN),
(3, Double.NaN, 14.0)
)).toDF("id", "value1", "value2")
Seq("mean", "median", "mode").foreach { strategy =>
val multiColsImputer = new Imputer()
.setInputCols(Array("value1", "value2"))
.setOutputCols(Array("result1", "result2"))
.setStrategy(strategy)
val plForMultiCols = new Pipeline()
.setStages(Array(multiColsImputer))
.fit(df)
val imputerForCol1 = new Imputer()
.setInputCol("value1")
.setOutputCol("result1")
.setStrategy(strategy)
val imputerForCol2 = new Imputer()
.setInputCol("value2")
.setOutputCol("result2")
.setStrategy(strategy)
val plForSingleCol = new Pipeline()
.setStages(Array(imputerForCol1, imputerForCol2))
.fit(df)
val resultForSingleCol = plForSingleCol.transform(df)
.select("result1", "result2")
.collect()
val resultForMultiCols = plForMultiCols.transform(df)
.select("result1", "result2")
.collect()
resultForSingleCol.zip(resultForMultiCols).foreach {
case (rowForSingle, rowForMultiCols) =>
assert(rowForSingle.getDouble(0) == rowForMultiCols.getDouble(0) &&
rowForSingle.getDouble(1) == rowForMultiCols.getDouble(1))
}
}
}
}
object ImputerSuite {
/**
* Imputation strategy. Available options are ["mean", "median", "mode"].
* @param df DataFrame with columns "id", "value", "expected_mean", "expected_median",
* "expected_mode".
*/
def iterateStrategyTest(isMultiCol: Boolean, imputer: Imputer, df: DataFrame): Unit = {
Seq("mean", "median", "mode").foreach { strategy =>
imputer.setStrategy(strategy)
val model = imputer.fit(df)
val resultDF = model.transform(df)
if (isMultiCol) {
imputer.getInputCols.zip(imputer.getOutputCols).foreach { case (inputCol, outputCol) =>
verifyTransformResult(strategy, inputCol, outputCol, resultDF)
}
} else {
verifyTransformResult(strategy, imputer.getInputCol, imputer.getOutputCol, resultDF)
}
}
}
def verifyTransformResult(
strategy: String,
inputCol: String,
outputCol: String,
resultDF: DataFrame): Unit = {
// check dataType is consistent between input and output
val inputType = resultDF.schema(inputCol).dataType
val outputType = resultDF.schema(outputCol).dataType
assert(inputType == outputType, "Output type is not the same as input type.")
// check value
resultDF.select(s"expected_${strategy}_$inputCol", outputCol).collect().foreach {
case Row(exp: Float, out: Float) =>
assert((exp.isNaN && out.isNaN) || (exp == out),
s"Imputed values differ. Expected: $exp, actual: $out")
case Row(exp: Double, out: Double) =>
assert((exp.isNaN && out.isNaN) || (exp ~== out absTol 1e-5),
s"Imputed values differ. Expected: $exp, actual: $out")
case Row(exp: Integer, out: Integer) =>
assert(exp == out,
s"Imputed values differ. Expected: $exp, actual: $out")
case Row(exp: Long, out: Long) =>
assert(exp == out,
s"Imputed values differ. Expected: $exp, actual: $out")
}
}
}
| maropu/spark | mllib/src/test/scala/org/apache/spark/ml/feature/ImputerSuite.scala | Scala | apache-2.0 | 19,475 |
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import ru.makkarpov.scalingua.{Language, LanguageId, Messages, TaggedLanguage}
import ru.makkarpov.scalingua.I18n._
import ru.makkarpov.scalingua.{CompiledLanguage, PluralFunction, TaggedLanguage}
import some.test.pkg._
class Test extends AnyFlatSpec with Matchers {
it should "skip index generation" in {
val compiledMessages: scala.util.Try[Messages] = scala.util.Try(Messages.compiled("some.test.pkg"))
compiledMessages shouldBe 'Failure
}
implicit val messages = ManuallyLoadedLanguages
it should "provide correct messages for en_US" in {
implicit val langId = LanguageId("en-US")
t("Hello, world!") shouldBe "Hello, world!"
p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "There is 7 dogs!"
}
it should "provide correct messages for ru_RU" in {
implicit val langId = LanguageId("ru-RU")
t("Hello, world!") shouldBe "Привет, мир!"
p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "Здесь 7 собак!"
}
it should "provide english messages for absent languages" in {
implicit val langId = LanguageId("xx-QQ")
t("Hello, world!") shouldBe "Hello, world!"
p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "There is 7 dogs!"
}
it should "provide correct messages for other countries (ru_XX)" in {
implicit val langId = LanguageId("ru-XX")
t("Hello, world!") shouldBe "Привет, мир!"
p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "Здесь 7 собак!"
}
it should "provide correct messages for generic languages (ru)" in {
implicit val langId = LanguageId("ru")
t("Hello, world!") shouldBe "Привет, мир!"
p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "Здесь 7 собак!"
}
it should "handle percent signs" in {
implicit val langId = LanguageId("ru-RU")
t"Percents! %" shouldBe "Проценты! %"
t("Percents!! %%") shouldBe "Проценты!! %"
val x = 1
t"Percents with variables%: $x, percents%" shouldBe s"Проценты с перменными%: $x, проценты%"
t"Percents after variable: $x%%" shouldBe s"Проценты после переменной: $x%"
// Plural:
p"Look, I have $x percent${S.s}: %!" shouldBe s"Смотри, у меня $x процент: %!"
}
it should "reject invalid percent signs" in {
"""
val x = 123
t"Test: $x% qweqwe"
""" shouldNot typeCheck
}
it should "escape unicode literals" in {
implicit val langId = LanguageId("en-US")
t"Привет, мир!" shouldBe "Привет, мир!"
t"Weird’quotes" shouldBe "Weird’quotes"
}
}
object ManuallyLoadedLanguages extends Messages(
TaggedLanguage.Identity,
new Language_ru_RU(getClass.getResourceAsStream("/some/test/pkg/data_ru_RU.bin"))
)
| makkarpov/scalingua | sbt-plugin/src/sbt-test/main/load-in-runtime/src/test/scala/Test.scala | Scala | apache-2.0 | 2,925 |
package io.github.datamoth.dm.imp.oozie
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File
import java.nio.file.Files
import org.apache.commons.io.FileUtils
import org.apache.commons.io.FilenameUtils
import scala.collection.JavaConverters._
object Client {
case class Config(
user: String
, hdfsDir: String
, sysopts: com.typesafe.config.Config
)
def create(user: String, hdfsDir: String, sysopts: com.typesafe.config.Config): Client = {
new Client(Config(
user = user
, hdfsDir = hdfsDir
, sysopts = sysopts
))
}
}
class Client(cfg: Client.Config) {
import io.github.datamoth.dm.api
import scala.collection.JavaConverters._
import org.apache.oozie.client.OozieClient
import org.apache.oozie.client.WorkflowJob
import org.apache.oozie.client.CoordinatorJob
import org.apache.oozie.client.OozieClientException
private val L = LoggerFactory.getLogger(classOf[Client])
private val client = new OozieClient(cfg.sysopts.getString("uri"))
private val ALL_JOBS = "STATUS=PREP;STATUS=RUNNING;STATUS=SUCCEEDED;STATUS=KILLED;STATUS=FAILED;STATUS=SUSPENDED"
private val ALL_ALIVE_COORDS = "STATUS=IGNORED;STATUS=PAUSED;STATUS=PAUSEDWITHERROR;STATUS=PREMATER;STATUS=PREP;STATUS=PREPPAUSED;STATUS=PREPSUSPENDED;STATUS=RUNNING;STATUS=RUNNINGWITHERROR;STATUS=SUSPENDED;STATUS=SUSPENDEDWITHERROR"
private val ALL_ACTIONS = "STATUS=FAILED;STATUS=IGNORED;STATUS=KILLED;STATUS=READY;STATUS=RUNNING;STATUS=SKIPPED;STATUS=SUBMITTED;STATUS=SUCCEEDED;STATUS=SUSPENDED;STATUS=TIMEDOUT;STATUS=WAITING"
def getJobs(maxCount: Int): List[WorkflowJob] = {
L.debug("Try to get jobs list")
val jobs = client.getJobsInfo(ALL_JOBS, 0, maxCount).asScala
jobs.toList
}
def getCoordinatorInfo(name: String): List[AnyRef] = {
client.getCoordJobsInfo(ALL_ALIVE_COORDS + s";NAME=${name}", 0, 100).asScala.map{ c =>
client.getCoordJobInfo(c.getId, ALL_ACTIONS, 0, 30, "desc")
}.toList
}
def deploy(deploy: api.oozie.DeployInfo): api.oozie.DeployInfo = {
val result = deploy.commands.map { cmd =>
if (cmd.errors.isEmpty) {
cmd.actions.foldLeft(cmd) { case (c, action) =>
action match {
case "start" => start(c)
case "kill" => kill(c)
case "resume" => resume(c)
case "suspend" => suspend(c)
case _ => c
}
}
} else {
cmd
}
}
deploy.setCommands(commands = result).withCommandErrors
}
private def start(cmd: api.oozie.DeployCommand): api.oozie.DeployCommand = {
val conf = client.createConfiguration()
try {
val c = cmd.coordinator.get
for (opt <- cfg.sysopts.entrySet.asScala) {
L.debug("Set oozie sysopt {} <- {}", opt.getKey:Any, cfg.sysopts.getString(opt.getKey))
conf.setProperty(opt.getKey, cfg.sysopts.getString(opt.getKey))
}
conf.setProperty("oozie.coord.application.path", new File(new File(cfg.hdfsDir), c.location.file).toString)
L.info("Starting: {}", c.name)
val id = client.run(conf)
L.info("Started: {}, id {}", c.name:Any, id)
cmd.withId(id).withStatus("running")
} catch {
case e: OozieClientException =>
L.error("Oozie client error [while starting]: {}", e.getMessage)
cmd.withoutId.withError(s"${e.getErrorCode}:${e.getMessage}")
case e: Exception =>
L.error("Oozie client error [while starting]: {}", e.getMessage)
cmd.withoutId.withError(e.getMessage)
}
}
private def kill(cmd: api.oozie.DeployCommand): api.oozie.DeployCommand = {
try {
val list = getCoordsByName(cmd.coordinatorName)
list.foreach { c =>
L.info("Killing: {}:{}", c.getAppName:Any, c.getId)
client.kill(c.getId)
L.info("Killed: {}:{}", c.getAppName:Any, c.getId)
}
cmd.withoutId.withoutStatus
} catch {
case e: OozieClientException =>
L.error("Oozie client error [while killing], {}", e.getMessage)
cmd.withError(s"${e.getErrorCode}:${e.getMessage}")
case e: Exception =>
L.error("Oozie client error [while killing], {}", e.getMessage)
cmd.withError(e.getMessage)
}
}
private def suspend(cmd: api.oozie.DeployCommand): api.oozie.DeployCommand = {
try {
val list = getCoordsByName(cmd.coordinatorName)
list.foreach { c =>
L.info("Suspending: {}:{}", c.getAppName:Any, c.getId)
client.suspend(c.getId)
L.info("Suspended: {}:{}", c.getAppName:Any, c.getId)
}
cmd.withStatus("suspended")
} catch {
case e: OozieClientException =>
L.error("Oozie client error [while suspending]: {}", e.getMessage)
cmd.withError(s"${e.getErrorCode}:${e.getMessage}")
case e: Exception =>
L.error("Oozie client error [while suspending]: {}", e.getMessage)
cmd.withError(e.getMessage)
}
}
private def resume(cmd: api.oozie.DeployCommand): api.oozie.DeployCommand = {
try {
val list = getCoordsByName(cmd.coordinatorName)
list.foreach { c =>
L.info("Resuming: {}:{}", c.getAppName:Any, c.getId)
client.suspend(c.getId)
L.info("Resumed: {}:{}", c.getAppName:Any, c.getId)
cmd.withoutId.withoutStatus
}
cmd.withStatus("running")
} catch {
case e: OozieClientException =>
L.error("Oozie client error [while resuming]: {}", e.getMessage)
cmd.withError(s"${e.getErrorCode}:${e.getMessage}")
case e: Exception =>
L.error("Oozie client error [while resuming]: {}", e.getMessage)
cmd.withError(e.getMessage)
}
}
private def getCoordsByName(name: String): List[CoordinatorJob] = {
val rawList = client.getCoordJobsInfo(ALL_ALIVE_COORDS + s";NAME=${name}", 0, 1000)
val map = rawList.asScala.groupBy(_.getAppName)
val list = map.get(name).getOrElse(List[CoordinatorJob]())
if (list.length == 0) {
throw new java.util.NoSuchElementException(s"Coordinator ${name} not found")
}
list.toList
}
}
| datamoth/datamoth | datamot/src/main/scala/io/github/datamoth/dm/imp/oozie/Client.scala | Scala | apache-2.0 | 5,722 |
package com.avsystem.commons
package jiop
trait JavaInterop extends AnyRef
with JBasicUtils
with JCollectionUtils
with CompatAsJavaScalaExtensions
with Java8CollectionUtils
with JFunctionUtils
with JStreamUtils
with JOptionalUtils
with JavaTimeInterop
object JavaInterop extends JavaInterop
| AVSystem/scala-commons | commons-core/jvm/src/main/scala/com/avsystem/commons/jiop/JavaInterop.scala | Scala | mit | 309 |
/*
*************************************************************************************
* Copyright 2013 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.domain.eventlog
import com.normation.eventlog._
import scala.xml._
import org.joda.time.DateTime
import net.liftweb.common._
import com.normation.utils.HashcodeCaching
sealed trait ParameterEventLog extends EventLog { override final val eventLogCategory = ParameterLogCategory }
final case class AddGlobalParameter(
override val eventDetails : EventLogDetails
) extends ParameterEventLog with HashcodeCaching {
override val cause = None
override val eventType = AddGlobalParameter.eventType
}
object AddGlobalParameter extends EventLogFilter {
override val eventType = AddGlobalParameterEventType
override def apply(x : (EventLogType, EventLogDetails)) : AddGlobalParameter = AddGlobalParameter(x._2)
}
final case class ModifyGlobalParameter(
override val eventDetails : EventLogDetails
) extends ParameterEventLog with HashcodeCaching {
override val cause = None
override val eventType = ModifyGlobalParameter.eventType
}
object ModifyGlobalParameter extends EventLogFilter {
override val eventType = ModifyGlobalParameterEventType
override def apply(x : (EventLogType, EventLogDetails)) : ModifyGlobalParameter = ModifyGlobalParameter(x._2)
}
final case class DeleteGlobalParameter(
override val eventDetails : EventLogDetails
) extends ParameterEventLog with HashcodeCaching {
override val cause = None
override val eventType = DeleteGlobalParameter.eventType
}
object DeleteGlobalParameter extends EventLogFilter {
override val eventType = DeleteGlobalParameterEventType
override def apply(x : (EventLogType, EventLogDetails)) : DeleteGlobalParameter = DeleteGlobalParameter(x._2)
}
object ParameterEventsLogsFilter {
final val eventList : List[EventLogFilter] = List(
AddGlobalParameter
, ModifyGlobalParameter
, DeleteGlobalParameter
)
}
| bmwjanos/rudder | rudder-core/src/main/scala/com/normation/rudder/domain/eventlog/ParameterEventLog.scala | Scala | gpl-3.0 | 3,535 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.sparkfy
import org.apache.log4j.{LogManager, PropertyConfigurator}
import org.slf4j.impl.StaticLoggerBinder
import org.slf4j.{Logger, LoggerFactory}
import com.github.sparkfy.util.Utils
/**
* :: DeveloperApi ::
* Utility trait for classes that want to log data. Creates a SLF4J logger for the class and allows
* logging messages at different levels using methods that only evaluate parameters lazily if the
* log level is enabled.
*
* NOTE: DO NOT USE this class outside of Spark. It is intended as an internal utility.
* This will likely be changed or removed in future releases.
*/
trait Logging {
// Make the log field transient so that objects with Logging can
// be serialized and used on another machine
@transient private var log_ : Logger = null
// Method to get the logger name for this object
protected def logName = {
// Ignore trailing $'s in the class names for Scala objects
this.getClass.getName.stripSuffix("$")
}
// Method to get or create the logger for this object
protected def log: Logger = {
if (log_ == null) {
initializeIfNecessary()
log_ = LoggerFactory.getLogger(logName)
}
log_
}
// Log methods that take only a String
protected def logInfo(msg: => String) {
if (log.isInfoEnabled) log.info(msg)
}
protected def logDebug(msg: => String) {
if (log.isDebugEnabled) log.debug(msg)
}
protected def logTrace(msg: => String) {
if (log.isTraceEnabled) log.trace(msg)
}
protected def logWarning(msg: => String) {
if (log.isWarnEnabled) log.warn(msg)
}
protected def logError(msg: => String) {
if (log.isErrorEnabled) log.error(msg)
}
// Log methods that take Throwables (Exceptions/Errors) too
protected def logInfo(msg: => String, throwable: Throwable) {
if (log.isInfoEnabled) log.info(msg, throwable)
}
protected def logDebug(msg: => String, throwable: Throwable) {
if (log.isDebugEnabled) log.debug(msg, throwable)
}
protected def logTrace(msg: => String, throwable: Throwable) {
if (log.isTraceEnabled) log.trace(msg, throwable)
}
protected def logWarning(msg: => String, throwable: Throwable) {
if (log.isWarnEnabled) log.warn(msg, throwable)
}
protected def logError(msg: => String, throwable: Throwable) {
if (log.isErrorEnabled) log.error(msg, throwable)
}
protected def isTraceEnabled(): Boolean = {
log.isTraceEnabled
}
private def initializeIfNecessary() {
if (!Logging.initialized) {
Logging.initLock.synchronized {
if (!Logging.initialized) {
initializeLogging()
}
}
}
}
private def initializeLogging() {
// Don't use a logger in here, as this is itself occurring during initialization of a logger
// If Log4j 1.2 is being used, but is not initialized, load a default properties file
val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr
// This distinguishes the log4j 1.2 binding, currently
// org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently
// org.apache.logging.slf4j.Log4jLoggerFactory
val usingLog4j12 = "org.slf4j.impl.Log4jLoggerFactory".equals(binderClass)
if (usingLog4j12) {
val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
if (!log4j12Initialized) {
// scalastyle:off println
val defaultLogProps = "com/github/sparkfy/log4j-defaults.properties"
Option(Utils.getClassLoader.getResource(defaultLogProps)) match {
case Some(url) =>
PropertyConfigurator.configure(url)
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
case None =>
System.err.println(s"Spark was unable to load $defaultLogProps")
}
// scalastyle:on println
}
}
Logging.initialized = true
// Force a call into slf4j to initialize it. Avoids this happening from multiple threads
// and triggering this: http://mailman.qos.ch/pipermail/slf4j-dev/2010-April/002956.html
log
}
}
private object Logging {
@volatile private var initialized = false
val initLock = new Object()
try {
// We use reflection here to handle the case where users remove the
// slf4j-to-jul bridge order to route their logs to JUL.
val bridgeClass = Utils.classForName("org.slf4j.bridge.SLF4JBridgeHandler")
bridgeClass.getMethod("removeHandlersForRootLogger").invoke(null)
val installed = bridgeClass.getMethod("isInstalled").invoke(null).asInstanceOf[Boolean]
if (!installed) {
bridgeClass.getMethod("install").invoke(null)
}
} catch {
case e: ClassNotFoundException => // can't log anything yet so just fail silently
}
}
| sparkfy/sparkfy | sparkfy-common/src/main/scala/com/github/sparkfy/Logging.scala | Scala | apache-2.0 | 5,566 |
/*
* Copyright 2014 Andrey Kutyrev
*
* Licensed under the the GNU Public License v3.0;
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.gnu.org/licenses/gpl.html
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================================
*/
package squ1b3r.thingummies.items
import cpw.mods.fml.common.registry.GameRegistry
import squ1b3r.thingummies.items.tool.ItemMagnetRF
import squ1b3r.thingummies.reference.Reference
@GameRegistry.ObjectHolder(Reference.ModID)
object ModItems {
def preInit(): Unit = {
// Tools
GameRegistry.registerItem(ItemMagnetRF, ItemMagnetRF.getUnlocalizedName)
}
}
| squ1b3r/Thingummies | src/main/scala/squ1b3r/thingummies/items/ModItems.scala | Scala | gpl-3.0 | 1,023 |
package com.datastax.spark.connector.util
import scala.collection.concurrent.TrieMap
import scala.reflect.runtime.universe._
import scala.util.{Try, Success, Failure}
object ReflectionUtil {
private val rm = runtimeMirror(getClass.getClassLoader)
private val singletonCache = TrieMap[String, Any]()
private def findScalaObject[T : TypeTag](objectName: String): Try[T] = TypeTag.synchronized {
Try {
val targetType = implicitly[TypeTag[T]].tpe
val module = rm.staticModule(objectName)
if (!(module.typeSignature <:< targetType))
throw new IllegalArgumentException(s"Object $objectName is not instance of $targetType")
val moduleMirror = rm.reflectModule(module)
moduleMirror.instance.asInstanceOf[T]
}
}
private def findSingletonClassInstance[T : TypeTag](className: String): Try[T] = TypeTag.synchronized {
Try {
val targetType = implicitly[TypeTag[T]].tpe
val targetClass = rm.runtimeClass(targetType.typeSymbol.asClass)
val instance =
singletonCache.get(className) match {
case Some(obj) => obj
case None =>
val newInstance = Class.forName(className).getConstructor(Array.empty[Class[_]]: _*).newInstance()
singletonCache.putIfAbsent(className, newInstance) match {
case None => newInstance
case Some(previousInstance) => previousInstance
}
}
if (!targetClass.isInstance(instance))
throw new IllegalArgumentException(s"Class $className is not $targetType")
instance.asInstanceOf[T]
}
}
/** Returns either a global Scala object by its fully qualified name or a singleton
* instance of a Java class identified by its fully qualified class name.
* Java class instances are cached. The Java class must provide a default constructor. */
def findGlobalObject[T : TypeTag](objectName: String): T = {
val scalaObject: Try[T] = findScalaObject[T](objectName)
val classInstance: Try[T] = findSingletonClassInstance[T](objectName)
scalaObject orElse classInstance match {
case Success(obj) => obj
case Failure(e) => throw new IllegalArgumentException(s"Singleton object not available: $objectName", e)
}
}
/** Returns a list of parameter names and types of the main constructor.
* The main constructor is assumed to be the one that has the highest number of parameters.
* In case on ambiguity, this method throws IllegalArgumentException.*/
def constructorParams(tpe: Type): Seq[(String, Type)] = TypeTag.synchronized {
val ctorSymbol = Reflect.methodSymbol(tpe)
// the reason we're using typeSignatureIn is because the constructor might be a generic type
// and we don't really want to get generic type parameters here, but concrete ones:
val ctorMethod = ctorSymbol.typeSignatureIn(tpe).asInstanceOf[MethodType]
for (param <- ctorMethod.params) yield
(param.name.toString, param.typeSignature)
}
def constructorParams[T : TypeTag]: Seq[(String, Type)] = TypeTag.synchronized {
constructorParams(implicitly[TypeTag[T]].tpe)
}
/** Returns a list of names and return types of 0-argument public methods of a Scala type */
def getters(tpe: Type): Seq[(String, Type)] = TypeTag.synchronized {
val methods = for (d <- tpe.members.toSeq if d.isMethod && d.isPublic) yield d.asMethod
for (g <- methods if g.isGetter) yield {
// the reason we're using typeSignatureIn is because the getter might be a generic type
// and we don't really want to get generic type here, but a concrete one:
val returnType = g.typeSignatureIn(tpe).asInstanceOf[NullaryMethodType].resultType
(g.name.toString, returnType)
}
}
def getters[T : TypeTag]: Seq[(String, Type)] = TypeTag.synchronized {
getters(implicitly[TypeTag[T]].tpe)
}
/** Returns a list of names and parameter types of 1-argument public methods of a Scala type,
* returning no result (Unit) */
def setters(tpe: Type): Seq[(String, Type)] = TypeTag.synchronized {
val methods = for (d <- tpe.members.toSeq if d.isMethod && d.isPublic) yield d.asMethod
for (s <- methods if s.isSetter) yield {
// need a concrete type, not a generic one:
val paramType = s.typeSignatureIn(tpe).asInstanceOf[MethodType].params(0).typeSignature
(s.name.toString, paramType)
}
}
def setters[T : TypeTag]: Seq[(String, Type)] = TypeTag.synchronized {
setters(implicitly[TypeTag[T]].tpe)
}
}
| nvoron23/spark-cassandra-connector | spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/util/ReflectionUtil.scala | Scala | apache-2.0 | 4,504 |
/*
* Copyright 2015 ligaDATA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ligadata.MetadataAPI.Utility
import com.ligadata.MetadataAPI.MetadataAPIImpl
import org.apache.logging.log4j._
import com.ligadata.kamanja.metadata.MdMgr
/**
* Created by dhaval on 8/13/15.
*/
object DumpService {
private val userid: Option[String] = Some("metadataapi")
val loggerName = this.getClass.getName
lazy val logger = LogManager.getLogger(loggerName)
def dumpMetadata: String ={
var response=""
try{
MdMgr.GetMdMgr.dump
response="Metadata dumped in DEBUG mode"
}catch{
case e: Exception => {
response=e.getStackTrace.toString
}
}
response
}
def dumpAllNodes: String ={
var response=""
try{
response=MetadataAPIImpl.GetAllNodes("JSON", userid)
}
catch {
case e: Exception => {
response=e.getStackTrace.toString
}
}
response
}
def dumpAllClusters: String ={
var response=""
try{
response=MetadataAPIImpl.GetAllClusters("JSON", userid)
}
catch {
case e: Exception => {
response=e.getStackTrace.toString
}
}
response
}
def dumpAllClusterCfgs: String ={
var response=""
try{
response=MetadataAPIImpl.GetAllClusterCfgs("JSON", userid)
}
catch {
case e: Exception => {
response=e.getStackTrace.toString
}
}
response
}
def dumpAllAdapters: String ={
var response=""
try{
response=MetadataAPIImpl.GetAllAdapters("JSON", userid)
}
catch {
case e: Exception => {
response=e.getStackTrace.toString
}
}
response
}
}
| traytonwhite/Kamanja | trunk/MetadataAPI/src/main/scala/com/ligadata/MetadataAPI/Utility/DumpService.scala | Scala | apache-2.0 | 2,200 |
package com.sopranoworks.bolt.values
import com.sopranoworks.bolt._
import com.google.cloud.spanner.{ResultSet, ResultSets, Struct, Type, Value=>SValue, Database => SDatabase}
import org.specs2.mutable.Specification
import scala.collection.JavaConversions._
class ResultIndexValueTest extends Specification {
class DummyDatabase extends Database {
var tables = Map.empty[String,Table]
override def table(name: String): Option[Table] = tables.get(name)
}
class DummyNut extends Bolt.Nut(null) {
private val _database = new DummyDatabase
override def database: Database = _database
override def executeNativeQuery(sql: String): ResultSet = {
val sb = Struct.newBuilder()
sb.set("ONE").to(SValue.int64(1))
sb.set("TWO").to(SValue.int64(2))
sb.set("THREE").to(SValue.int64(2))
ResultSets.forRows(Type.struct(List(Type.StructField.of("ONE",Type.int64()),Type.StructField.of("TWO",Type.int64()),Type.StructField.of("THREE",Type.int64()) )),List(sb.build()))
}
}
"resolveReference" should {
"from SubqueryValue at index 0" in {
val nat = new DummyNut
val qc = QueryContext(nat,null)
val v = ResultIndexValue(SubqueryValue(nat,"SELECT *",qc),0)
v.eval.asValue.isInstanceOf[IntValue] must_== true
v.asValue.asInstanceOf[IntValue].value must_== 1
}
"from SubqueryValue at index 1" in {
val nat = new DummyNut
val qc = QueryContext(nat,null)
val v = ResultIndexValue(SubqueryValue(nat,"SELECT *",qc),1)
v.eval.asValue.isInstanceOf[IntValue] must_== true
v.asValue.asInstanceOf[IntValue].value must_== 2
}
"out of range from SubqueryValue" in {
val nat = new DummyNut
val qc = QueryContext(nat,null)
val v = ResultIndexValue(SubqueryValue(nat,"SELECT *",qc),3)
v.eval.asValue must throwA[RuntimeException]
}
"from StructValue at 0" in {
val st = StructValue()
st.addValue(IntValue("1",1,true))
st.addValue(IntValue("2",2,true))
st.addValue(IntValue("3",3,true))
val v = ResultIndexValue(st,0)
v.eval.asValue.isInstanceOf[IntValue] must_== true
v.asValue.asInstanceOf[IntValue].value must_== 1
}
"from StructValue at 1" in {
val st = StructValue()
st.addValue(IntValue("1",1,true))
st.addValue(IntValue("2",2,true))
st.addValue(IntValue("3",3,true))
val v = ResultIndexValue(st,1)
v.eval.asValue.isInstanceOf[IntValue] must_== true
v.asValue.asInstanceOf[IntValue].value must_== 2
}
"out of range from StructValue" in {
val st = StructValue()
st.addValue(IntValue("1",1,true))
st.addValue(IntValue("2",2,true))
st.addValue(IntValue("3",3,true))
val v = ResultIndexValue(st,3)
v.eval.asValue must throwA[RuntimeException]
}
"from Array 0" in {
val arr = ArrayValue(List(IntValue("1",1,true),IntValue("2",2,true),IntValue("3",3,true)),true,Type.int64())
val v = ResultIndexValue(arr,0)
v.eval.asValue.isInstanceOf[IntValue] must_== true
v.asValue.asInstanceOf[IntValue].value must_== 1
}
"from Array 1" in {
val arr = ArrayValue(List(IntValue("1",1,true),IntValue("2",2,true),IntValue("3",3,true)),true,Type.int64())
val v = ResultIndexValue(arr,1)
v.eval.asValue.isInstanceOf[IntValue] must_== true
v.asValue.asInstanceOf[IntValue].value must_== 2
}
"out of range from Array" in {
val arr = ArrayValue(List(IntValue("1",1,true),IntValue("2",2,true),IntValue("3",3,true)),true,Type.int64())
val v = ResultIndexValue(arr,3)
v.eval.asValue must throwA[RuntimeException]
}
}
}
| OsamuTakahashi/bolt | src/test/scala/com/sopranoworks/bolt/values/ResultIndexValueTest.scala | Scala | mit | 3,675 |
package almond.input
import almond.api.JupyterApi
final class Input private (
val prompt: String,
val password: Boolean
) {
private def copy(
prompt: String = prompt,
password: Boolean = password
): Input =
new Input(prompt, password)
def withPrompt(prompt: String): Input =
copy(prompt = prompt)
def withPassword(password: Boolean): Input =
copy(password = password)
def withPassword(): Input =
copy(password = true)
// TODO Also allow to return result via a future?
def request()(implicit api: JupyterApi): String =
api.stdin(prompt, password)
}
object Input {
def apply(): Input =
new Input("", password = false)
def apply(prompt: String): Input =
new Input(prompt, password = false)
def password(): Input =
new Input("", password = true)
def password(prompt: String): Input =
new Input(prompt, password = true)
}
| alexarchambault/jupyter-scala | modules/scala/jupyter-api/src/main/scala/almond/input/Input.scala | Scala | apache-2.0 | 894 |
package ohnosequences.cosas.types
import ohnosequences.cosas._, fns._, klists._
/* This is a parser for a particular _single_ denotation */
// TODO: update to DepFns
trait AnyDenotationParser {
type Type <: AnyType
val tpe: Type
/* This is normally `tpe.label`, but it's left free so that you can parse type label from a different representation */
val labelRep: String
/* The type used to denote Type */
type D <: Type#Raw
/* The type from which we try to parse `D` */
type Value
val parser: Value => Option[D]
def apply(k: String, v: Value): Either[DenotationParserError, Type := D] = k match {
case `labelRep` => parser(v)
.fold[Either[DenotationParserError, Type := D]](
Left(ErrorParsingValue(tpe)(v))
)(
d => Right(tpe := d)
)
case _ => Left(WrongKey(tpe, k, labelRep))
}
}
sealed trait DenotationParserError
/* This type of error occurs when the `parser` function returns `None` */
case class ErrorParsingValue[Tpe <: AnyType, Value](val tpe: Tpe)(val from: Value) extends DenotationParserError
/* This error may occur when the parsed value pair has a wrong key label */
case class WrongKey[Tpe <: AnyType](val tpe: Tpe, val got: String, val expected: String) extends DenotationParserError
// TODO: I think this constructor is enough and AnyDenotationParser is not needed
class DenotationParser[T <: AnyType, D0 <: T#Raw, V](
val tpe: T,
val labelRep: String
)(val parser: V => Option[D0]
) extends AnyDenotationParser {
type Type = T
type D = D0
type Value = V
}
case object AnyDenotationParser {
// NOTE: this won't work for a parametrized type T, because there is no implicit `tpe`
implicit def genericParser[T <: AnyType { type Raw >: D }, D](implicit tpe: T): DenotationParser[T,D,D] =
new DenotationParser(tpe, tpe.label)(d => Some(d))
}
/* This is a DepFn which parses a _KList of denotations_ from a Map of pairs (type label -> value: V) */
class ParseDenotations[V, Ts <: AnyProductType] extends DepFn1[
Map[String, V],
Either[ParseDenotationsError, Ts#Raw]
]
trait ParseDenotationsError
case class KeyNotFound[V](val key: String, val map: Map[String,V]) extends ParseDenotationsError
case class ErrorParsing[PE <: DenotationParserError](val err: PE) extends ParseDenotationsError
case object ParseDenotations {
implicit def emptyParam[V, T <: AnyType]
: AnyApp1At[ParseDenotations[V, |[T]], Map[String,V]] { type Y = Either[ParseDenotationsError,*[AnyDenotation]] } =
App1 { map: Map[String,V] => Right(*[AnyDenotation]) }
// TODO: improve parameters names
implicit def nonEmpty[
V,
H <: Ts#Types#Bound { type Raw >: HR }, HR, Ts <: AnyProductType { type Raw >: Ds },
Ds <: AnyKList.withBound[AnyDenotation]
](implicit
parseH: DenotationParser[H,HR,V],
parseRest: AnyApp1At[ParseDenotations[V,Ts], Map[String,V]] { type Y = Either[ParseDenotationsError,Ds] }
)
: AnyApp1At[ParseDenotations[V, H :×: Ts], Map[String,V]] { type Y = Either[ParseDenotationsError, (H := HR) :: Ds] } =
App1 { map: Map[String,V] => {
map.get(parseH.labelRep).fold[Either[ParseDenotationsError, (H := HR) :: Ds]](
Left(KeyNotFound(parseH.labelRep, map))
)(
v => parseH(parseH.labelRep, v) fold (
l => Left(ErrorParsing(l)),
r => parseRest(map).fold[Either[ParseDenotationsError, (H := HR) :: Ds]] (
err => Left(err),
td => Right(r :: (td: Ds))
)
)
)
}
}
}
| ohnosequences/cosas | src/main/scala/cosas/types/parsing.scala | Scala | agpl-3.0 | 3,513 |
package org.workcraft.pluginmanager
import org.scalatest.Spec
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import java.util.UUID
@RunWith(classOf[JUnitRunner])
class PluginManifestTest extends Spec {
def correct(list: Traversable[String]) = {
val l = list.toIndexedSeq
(l.length == 2) && (l(0).equals("org.workcraft.GoodPluginA")) && (l(1).equals("org.workcraft.GoodPluginB"))
}
val guid1 = UUID.fromString("b9a4c2f9-d937-4abd-9e50-c9fdb156a28e")
describe("PluginManifest") {
it("should report a missing or unreadable file as exception") {
PluginManifest.read(guid1, "no such file") match {
case Right(_) => fail("expected error value")
case Left(error) => error match {
case ManifestReadError.Exception(_) => {}
case x => fail("expected Empty, got " + x.getClass().getSimpleName())
}
}
}
it("should report an empty manifest as error") {
PluginManifest.read(guid1, ClassLoader.getSystemResource("emptyManifest").getPath()) match {
case Right(_) => fail("expected error value")
case Left(error) => error match {
case ManifestReadError.Empty() => {}
case x => fail("expected Empty, got " + x.getClass().getSimpleName())
}
}
}
it("should report a version mismatch") {
PluginManifest.read(guid1, ClassLoader.getSystemResource("wrongVersion").getPath()) match {
case Right(_) => fail("expected error value")
case Left(error) => error match {
case ManifestReadError.VersionMismatch() => {}
case x => fail("expected VersionMismatch, got " + x.getClass().getSimpleName())
}
}
}
it("should correctly read a well-formed manifest") {
PluginManifest.read(guid1, ClassLoader.getSystemResource("goodManifest").getPath()) match {
case Left(x) => fail("did not expect an error value, got " + x.getClass().getSimpleName())
case Right(list) => assert(correct(list))
}
}
it("should correctly read back the manifest that it has written") {
PluginManifest.write(guid1, "target/test-classes/testManifest", List("org.workcraft.GoodPluginA", "org.workcraft.GoodPluginB")) match {
case Some(error) => fail("did not expect an error value while writing, got " + error.getClass().getSimpleName())
case None => PluginManifest.read(guid1, "target/test-classes/testManifest") match {
case Left(x) => fail("did not expect an error value while reading, got " + x.getClass().getSimpleName())
case Right(list) => assert(correct(list))
}
}
}
}
}
| tuura/workcraft-2.2 | PluginManager/src/test/scala/org/workcraft/pluginmanager/PluginManifestTest.scala | Scala | gpl-3.0 | 2,721 |
/*
* Sonar Scoverage Plugin
* Copyright (C) 2013 Rado Buransky
* dev@sonar.codehaus.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
*/
package com.buransky.plugins.scoverage.resource
import org.sonar.api.resources.Directory
import com.buransky.plugins.scoverage.language.Scala
/**
* Single directory in file system. Unlike org.sonar.api.resources.Directory that can represent
* a chain of directories.
*
* @author Rado Buransky
*/
class SingleDirectory(key: String, scala: Scala) extends Directory(key) {
private val name: String = {
val i = key.lastIndexOf(Directory.SEPARATOR)
if (i >= 0) key.substring(i + 1) else key
}
private val parent: Option[SingleDirectory] = {
val i = key.lastIndexOf(Directory.SEPARATOR)
if (i > 0) Some(new SingleDirectory(key.substring(0, i), scala)) else None
}
override lazy val getName = name
override lazy val getLanguage = scala
override lazy val getParent = parent.getOrElse(null)
} | scoverage/sonar-scoverage-plugin | plugin/src/main/scala/com/buransky/plugins/scoverage/resource/SingleDirectory.scala | Scala | lgpl-3.0 | 1,648 |
package spire
package math.extras.interval
import spire.algebra.{Bool, Eq, Order}
import spire.math._
import spire.math.interval._
import scala.annotation.tailrec
import scala.language.implicitConversions
sealed abstract class IntervalTrie[T] extends IntervalSet[T, IntervalTrie[T]]
object IntervalTrie {
implicit def algebra[T:Element]: Bool[IntervalTrie[T]] with Eq[IntervalTrie[T]] = new Bool[IntervalTrie[T]] with Eq[IntervalTrie[T]] {
def eqv(x: IntervalTrie[T], y: IntervalTrie[T]): Boolean = x == y
def zero: IntervalTrie[T] = IntervalTrie.empty[T]
def one: IntervalTrie[T] = IntervalTrie.all[T]
def complement(a: IntervalTrie[T]): IntervalTrie[T] = ~a
def or(a: IntervalTrie[T], b: IntervalTrie[T]): IntervalTrie[T] = a | b
def and(a: IntervalTrie[T], b: IntervalTrie[T]): IntervalTrie[T] = a & b
override def xor(a: IntervalTrie[T], b: IntervalTrie[T]): IntervalTrie[T] = a ^ b
}
trait Element[@sp(Float, Int, Long, Double) T] {
implicit def order:Order[T]
def toLong(value:T): Long
def fromLong(key:Long) : T
}
implicit object ByteElement extends Element[Byte] {
def order: Order[Byte] = spire.std.byte.ByteAlgebra
def toLong(value:Byte): Long = value
def fromLong(key:Long): Byte = key.toByte
}
implicit object ShortElement extends Element[Short] {
def order: Order[Short] = spire.std.short.ShortAlgebra
def toLong(value:Short): Long = value
def fromLong(key:Long): Short = key.toShort
}
implicit object IntElement extends Element[Int] {
def order: Order[Int] = spire.std.int.IntAlgebra
def toLong(value:Int): Long = value
def fromLong(key:Long) : Int = key.toInt
}
implicit object LongElement extends Element[Long] {
def order: Order[Long] = spire.std.long.LongAlgebra
def toLong(value:Long): Long = value
def fromLong(key:Long) : Long = key
}
implicit object FloatElement extends Element[Float] {
def order: Order[Float] = spire.std.float.FloatAlgebra
def toLong(value:Float): Long = {
if(java.lang.Float.isNaN(value))
throw new IllegalArgumentException("NaN")
// sign and magnitude signed integer
val signAndMagnitude = java.lang.Float.floatToIntBits(value)
// two's complement signed integer: if the sign bit is set, negate everything except the sign bit
val twosComplement = if(signAndMagnitude>=0) signAndMagnitude else (-signAndMagnitude | (1L<<63))
twosComplement
}
def fromLong(twosComplement:Long): Float = {
// sign and magnitude signed integer: if the sign bit is set, negate everything except the sign bit
val signAndMagnitude = if(twosComplement>=0) twosComplement else (-twosComplement | (1L<<63))
// double from sign and magnitude signed integer
java.lang.Float.intBitsToFloat(signAndMagnitude.toInt)
}
}
implicit object CharElement extends Element[Char] {
def order: Order[Char] = spire.std.char.CharAlgebra
def toLong(value:Char): Long = value.toLong
def fromLong(key:Long): Char = key.toChar
}
implicit object DoubleElement extends Element[Double] {
def order: Order[Double] = spire.std.double.DoubleAlgebra
def toLong(value:Double): Long = {
if(java.lang.Double.isNaN(value))
throw new IllegalArgumentException("NaN")
// sign and magnitude signed integer
val signAndMagnitude = java.lang.Double.doubleToLongBits(value)
// two's complement signed integer: if the sign bit is set, negate everything except the sign bit
val twosComplement = if(signAndMagnitude>=0) signAndMagnitude else (-signAndMagnitude | (1L<<63))
twosComplement
}
def fromLong(twosComplement:Long): Double = {
// sign and magnitude signed integer: if the sign bit is set, negate everything except the sign bit
val signAndMagnitude = if(twosComplement>=0) twosComplement else (-twosComplement | (1L<<63))
// double from sign and magnitude signed integer
java.lang.Double.longBitsToDouble(signAndMagnitude)
}
}
implicit object UByteElement extends Element[UByte] {
def order: Order[UByte] = spire.math.UByte.UByteAlgebra
def toLong(value:UByte): Long = value.toLong
def fromLong(key:Long) : UByte = UByte(key.toByte)
}
implicit object UShortElement extends Element[UShort] {
def order: Order[UShort] = spire.math.UShort.UShortAlgebra
def toLong(value:UShort): Long = value.toLong
def fromLong(key:Long) : UShort = UShort(key.toShort)
}
implicit object UIntElement extends Element[UInt] {
def order: Order[UInt] = spire.math.UInt.UIntAlgebra
def toLong(value:UInt): Long = value.toLong
def fromLong(key:Long) : UInt = UInt(key.toInt)
}
implicit object ULongElement extends Element[ULong] {
def order: Order[ULong] = spire.math.ULong.ULongAlgebra
def toLong(value:ULong): Long = value.toLong + Long.MinValue
def fromLong(key:Long) : ULong = ULong(key - Long.MinValue)
}
import Tree._
private implicit def tIsLong[T](value:T)(implicit tl:Element[T]) = tl.toLong(value)
private[interval] def fromKind[T:Element](value:T, kind:Int) = {
val bound = kind match {
case 0 => Below(value)
case 1 => Above(value)
case 2 => Both(value)
}
IntervalTrie[T](false, bound)
}
def constant[T:Element](value:Boolean): IntervalTrie[T] = IntervalTrie[T](value, null)
def empty[T:Element]: IntervalTrie[T] = constant[T](false)
def point[T:Element](value:T): IntervalTrie[T] = IntervalTrie[T](false, Tree.Leaf(toPrefix(value), true, false))
def atOrAbove[T:Element](value:T): IntervalTrie[T] = IntervalTrie[T](false, Tree.Leaf(toPrefix(value), true, true))
def above[T:Element](value:T): IntervalTrie[T] = IntervalTrie[T](false, Tree.Leaf(toPrefix(value), false, true))
def all[T:Element]: IntervalTrie[T] = constant[T](true)
def hole[T:Element](value:T): IntervalTrie[T] = IntervalTrie[T](true, Tree.Leaf(toPrefix(value), true, false))
def below[T:Element](value:T): IntervalTrie[T] = IntervalTrie[T](true, Tree.Leaf(toPrefix(value), true, true))
def atOrBelow[T:Element](value:T): IntervalTrie[T] = IntervalTrie[T](true, Tree.Leaf(toPrefix(value), false, true))
def apply[T:Element](interval:Interval[T]): IntervalTrie[T] = interval.fold {
case (Closed(a), Closed(b)) if a == b => point(a)
case (Unbound(), Open(x)) => below(x)
case (Unbound(), Closed(x)) => atOrBelow(x)
case (Open(x), Unbound()) => above(x)
case (Closed(x), Unbound()) => atOrAbove(x)
case (Closed(a), Closed(b)) => fromTo(Below(a), Above(b))
case (Closed(a), Open(b)) => fromTo(Below(a), Below(b))
case (Open(a), Closed(b)) => fromTo(Above(a), Above(b))
case (Open(a), Open(b)) => fromTo(Above(a), Below(b))
case (Unbound(), Unbound()) => all[T]
case (EmptyBound(), EmptyBound()) => empty[T]
}
private object Below {
def apply[T: Element](value:T) = Leaf(toPrefix(value), true, true)
def unapply(l:Leaf) = if(l.at && l.sign) Some(l.key) else None
}
private object Above {
def apply[T: Element](value:T) = Leaf(toPrefix(value), false, true)
def unapply(l:Leaf) = if(!l.at && l.sign) Some(l.key) else None
}
private object Both {
def apply[T: Element](value:T) = Leaf(toPrefix(value), true, false)
def unapply(l:Leaf) = if(l.at && !l.sign) Some(l.key) else None
}
private def fromTo[T:Element](a:Leaf, b:Leaf) : IntervalTrie[T] = {
IntervalTrie[T](false, concat(a, b))
}
def apply(text:String): IntervalTrie[Long] = {
val la = spire.std.long.LongAlgebra
def rationalToLong(r:Rational) : Long = {
if(r>Long.MaxValue || r<Long.MinValue)
throw new NumberFormatException("Integer number too large")
else
r.toLong
}
def intervalToIntervalSet(i:Interval[Long]) : IntervalTrie[Long] = apply(i)
val intervals = text.split(';').map(Interval.apply).map(_.mapBounds(rationalToLong)(la))
val simpleSets = intervals.map(intervalToIntervalSet)
simpleSets.foldLeft(empty[Long])(_ | _)
}
private final def foreachInterval[T:Element, U](a0:Boolean, a:Tree)(f:Interval[T] => U): Unit = {
val x = implicitly[Element[T]]
import x._
def op(b0:Bound[T], a0:Boolean, a:Tree): Bound[T] = a match {
case Below(a) =>
if(a0)
f(Interval.fromBounds(b0, Open(fromLong(a))))
Closed(fromLong(a))
case Above(a) =>
if(a0)
f(Interval.fromBounds(b0, Closed(fromLong(a))))
Open(fromLong(a))
case Both(a) =>
if(a0)
f(Interval.fromBounds(b0, Open(fromLong(a))))
else
f(Interval.point(fromLong(a)))
Open(fromLong(a))
case a:Branch =>
val am = a0 ^ a.left.sign
val bm = op(b0, a0, a.left)
val b1 = op(bm, am, a.right)
b1
case _ =>
Unbound()
}
val last = op(Unbound(), a0, a)
if(a0 ^ ((a ne null) && a.sign))
f(Interval.fromBounds(last, Unbound()))
}
private abstract class TreeIterator[T](a:Tree) extends Iterator[T] {
var index = 0
var buffer = new Array[Tree](65)
def pop() = {
index -= 1
buffer(index)
}
def push(x: Tree): Unit = {
buffer(index) = x
index += 1
}
if(a ne null)
push(a)
def hasNextLeaf = index != 0
final def nextLeaf(): Leaf = pop() match {
case b:Branch =>
push(b.right)
push(b.left)
nextLeaf()
case l:Leaf => l
// $COVERAGE-OFF$
case _ => unreachable
// $COVERAGE-ON$
}
}
private final class EdgeIterator[T:Element](tree:Tree) extends TreeIterator[T](tree) {
private val element = implicitly[Element[T]]
def hasNext = hasNextLeaf
def next = element.fromLong(nextLeaf.key)
}
private final class IntervalIterator[T:Element](e:IntervalTrieImpl[T]) extends TreeIterator[Interval[T]](e.tree) {
private[this] val element = implicitly[Element[T]]
private[this] var lower: Bound[T] = if(e.belowAll) Unbound() else null
private[this] def nextInterval(): Interval[T] = {
import element.{fromLong, order}
var result : Interval[T] = null
if(hasNextLeaf) {
val leaf = nextLeaf()
if(lower eq null) leaf match {
case Both(x) =>
result = Interval.point(fromLong(x))
lower = null
case Below(x) =>
result = null
lower = Closed(fromLong(x))
case Above(x) =>
result = null
lower = Open(fromLong(x))
// $COVERAGE-OFF$
case _ => unreachable
// $COVERAGE-ON$
} else leaf match {
case Both(x) =>
val upper = Open(fromLong(x))
result = Interval.fromBounds[T](lower, upper)
lower = upper
case Below(x) =>
val upper = Open(fromLong(x))
result = Interval.fromBounds[T](lower, upper)
lower = null
case Above(x) =>
val upper = Closed(fromLong(x))
result = Interval.fromBounds[T](lower, upper)
lower = null
// $COVERAGE-OFF$
case _ => unreachable
// $COVERAGE-ON$
}
} else if(lower ne null) {
result = Interval.fromBounds(lower, Unbound())
lower = null
} else {
Iterator.empty.next()
}
result
}
def hasNext: Boolean = hasNextLeaf || (lower ne null)
@tailrec
override def next(): Interval[T] = {
val result = nextInterval()
if(result ne null)
result
else
next()
}
}
private def apply[T:Element](below:Boolean, tree:Tree): IntervalTrie[T] =
IntervalTrieImpl(below, tree)
private final case class IntervalTrieImpl[T](belowAll:Boolean, tree:Tree)(implicit ise:Element[T]) extends IntervalTrie[T] { lhs =>
import Tree._
import ise.order
def aboveAll: Boolean = if(tree eq null) belowAll else belowAll ^ tree.sign
def isEmpty = !belowAll && (tree eq null)
def isContiguous = if(belowAll) {
tree match {
case a:Leaf => a.sign
case null => true
case _ => false
}
} else {
tree match {
case _:Leaf => true
case Branch(_,_,a:Leaf, b:Leaf) => a.sign & b.sign
case null => true
case _ => false
}
}
def hull: Interval[T] = {
@tailrec
def lowerBound(a:Tree) : Bound[T] = a match {
case a:Branch => lowerBound(a.left)
case Above(x) => Open(ise.fromLong(x))
case Below(x) => Closed(ise.fromLong(x))
case Both(x) => Closed(ise.fromLong(x))
}
@tailrec
def upperBound(a:Tree) : Bound[T] = a match {
case a:Branch => upperBound(a.right)
case Both(x) => Closed(ise.fromLong(x))
case Above(x) => Closed(ise.fromLong(x))
case Below(x) => Open(ise.fromLong(x))
}
if(isEmpty) {
Interval.empty[T]
} else {
val lower = if(belowAll) Unbound[T]() else lowerBound(tree)
val upper = if(aboveAll) Unbound[T]() else upperBound(tree)
Interval.fromBounds(lower, upper)
}
}
def below(value:T) : Boolean = SampleBelow(belowAll, tree, toPrefix(ise.toLong(value)))
def at(value:T) : Boolean = SampleAt(belowAll, tree, toPrefix(ise.toLong(value)))
def above(value:T) : Boolean = SampleAbove(belowAll, tree, toPrefix(ise.toLong(value)))
def apply(value:T) : Boolean = at(value)
def & (rhs:IntervalTrie[T]) = rhs match {
case rhs:IntervalTrieImpl[T] =>
IntervalTrie[T](lhs.belowAll & rhs.belowAll, AndCalculator(lhs.belowAll, lhs.tree, rhs.belowAll, rhs.tree))
}
def | (rhs:IntervalTrie[T]) = rhs match {
case rhs: IntervalTrieImpl[T] =>
IntervalTrie[T](lhs.belowAll | rhs.belowAll, OrCalculator(lhs.belowAll, lhs.tree, rhs.belowAll, rhs.tree))
}
def ^ (rhs:IntervalTrie[T]) = rhs match {
case rhs: IntervalTrieImpl[T] => IntervalTrie[T](lhs.belowAll ^ rhs.belowAll, XorCalculator(lhs.belowAll, lhs.tree, rhs.belowAll, rhs.tree))
}
def unary_~ = IntervalTrie[T](!belowAll, tree)
def isSupersetOf(rhs:IntervalTrie[T]) = rhs match {
case rhs:IntervalTrieImpl[T] =>
SupersetOfCalculator(lhs.belowAll, lhs.tree, rhs.belowAll, rhs.tree)
}
def intersects(rhs:IntervalTrie[T]) = rhs match {
case rhs:IntervalTrieImpl[T] =>
!DisjointCalculator(lhs.belowAll, lhs.tree, rhs.belowAll, rhs.tree)
}
def isProperSupersetOf(rhs:IntervalTrie[T]) = isSupersetOf(rhs) && (rhs != lhs)
def intervals = new Traversable[Interval[T]] {
override def foreach[U](f: Interval[T] => U): Unit = foreachInterval(belowAll, tree)(f)
}
def intervalIterator = new IntervalIterator[T](lhs)
def edges : Iterable[T] = new Iterable[T] {
override def iterator: Iterator[T] = new EdgeIterator[T](lhs.tree)
}
override def toString = {
if (isEmpty)
Interval.empty[T].toString
else
intervals.map(_.toString).mkString(";")
}
}
}
| adampingel/spire | extras/src/main/scala/spire/math/extras/interval/IntervalTrie.scala | Scala | mit | 15,198 |
package spider
package database
import slick.jdbc.JdbcBackend._
import slick.jdbc.MySQLProfile.api.{ Database ⇒ _, DBIOAction ⇒ _, _ }
import slick.dbio.DBIO
import slick.dbio._
import scala.concurrent.{ Await, ExecutionContext }
import scala.concurrent.duration.Duration.Inf
import com.typesafe.scalalogging.Logger
object FarmDB {
lazy val logger = Logger("spider.database")
def prepareDbAndTable(config: DBConfig): Unit = {
import scala.concurrent.ExecutionContext.Implicits.global
if (config.db.isEmpty) return
val db = getConnection(config.copy(db = None))
val actions = Seq(
createDatabaseSQL(config.db.get),
createProductTable,
createCategoryTable)
actions map { action ⇒
Await.result(db.run(action) recover {
case e ⇒ logger.info(e.getMessage)
}, Inf)
}
}
def getConnection(config: DBConfig): Database = {
prepareDbAndTable(config)
Database.forURL(mysqlURL(config), driver = "com.mysql.jdbc.Driver")
}
private def createDatabaseSQL(db: String) = {
// NO INJECTION PREVENTION
sqlu"CREATE DATABASE IF NOT EXISTS #$db"
}
private def mysqlURL(config: DBConfig): String = {
val builder = new StringBuilder(s"jdbc:mysql://${config.host}:${config.port}")
for (db ← config.db) builder ++= s"/$db"
builder ++= s"?user=${config.username}"
for (pass ← config.password) builder ++= s"&password=$pass"
for ((key, value) ← config.properties) builder ++= s"&$key=$value"
builder.toString
}
def createProductTable = FarmTable.schema.create
def createCategoryTable = CategoryTable.schema.create
implicit class SyncDB(db: Database) {
def runSync[R](a: DBIOAction[R, NoStream, Nothing])(
implicit ec: ExecutionContext): R = {
Await.result(db.run(a), Inf)
}
}
} | VinaLx/farm-spider | src/main/scala/spider/database/database.scala | Scala | mit | 1,821 |
package cvx
import breeze.linalg.{DenseMatrix, DenseVector, _}
/**
* Created by oar on 12/1/16.
*
* Class computes the affine space of all solutions x to a linear equation Ax=b, where A
* is an mxn matrix with m<n of full rank (hence the system Ax=b is underdetermined).
*
* The rank condition will not be checked.
*
* The solutions x will be represented in the form x=z0+Fu, where z0 is the minimum norm
* solution of Ax=b and the matrix F maps onto ker(A), more precisely, the columns of F are an
* orthonormal basis of ker(A).
*
* In particular thus one has AF=0 (equivalently: Im(F) is contained in ker(A)).
*/
class SolutionSpace(val A:DenseMatrix[Double], val b:DenseVector[Double]){
assert(A.rows==b.length)
assert(A.rows < A.cols)
val sol:(DenseVector[Double],DenseMatrix[Double]) = MatrixUtils.solveUnderdetermined(A,b)
val z0:DenseVector[Double] = sol._1
val F:DenseMatrix[Double] = sol._2
/** If Ax0=b then x0 = z0 + Fu0 and so, since F'F=I (orthonormal columns) we have
* F'(x0-z0)=F'Fu0=u0.
* @return if Ax0=b returns u0 such that x0 = z0 + Fu0.
*/
def parameter(x0:DenseVector[Double]):DenseVector[Double] = F.t*(x0-z0)
}
object SolutionSpace {
def apply(A:DenseMatrix[Double], b:DenseVector[Double]) = new SolutionSpace(A,b)
} | spyqqqdia/cvx | src/main/scala/cvx/SolutionSpace.scala | Scala | mit | 1,302 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend.scalaz
import com.github.dnvriend.TestSpec
import scala.language.{ implicitConversions, postfixOps }
import scalaz.Kleisli._
import scalaz.Scalaz._
import scalaz._
class KleisliTest extends TestSpec {
/**
* see: http://www.casualmiracles.com/2012/07/02/a-small-example-of-kleisli-arrows/
*
* TL;DR
* A Kleisli is function composition for Monads. If you have a __function__ that return higher-kinded-types
* like List, Options etc then you can use a Kleisli to compose those __functions__, so it's all about
* function composition of this shape 'A => M[A]' where M is the same higher-kinded-type like eg. Option.
*
* The problem
* You have `functions` that take a simple type and return higher kinded types like Options or Lists,
* and you need to `compose those functions`. You might think you need to get the first result,
* pattern match on the it, apply it to the second function, repeat. Sounds rather cumbersome.
*
*/
// methods that take simple types and return higher-kinded types (HKT)
// they will be ETA'd to Int => Option[String], String => Option[Int] and Int => Option[Double]
def toStr(x: Int): Option[String] = Option(x.toString)
def toInt(x: String): Option[Int] = Option(x.toInt)
def toDouble(x: Int): Option[Double] = Option(x * 2.0)
"compose functions that return higher-kinded-types" should "be composed the old fashioned way" in {
// the process is quite simple,
// 1. Int => Option[String]
// 2. String => Option[Int]
// 3. Int => Option[Double]
// 4. Return the Option[Double]
(for {
x <- toStr(10)
y <- toInt(x)
z <- toDouble(y)
} yield z) shouldBe Option(20.0)
}
// all the functions that will be composed must return the same type
// in this case they all return Options.
it should "be composed using a kleisli" in {
// Kleisli of Option, Int to Double
val kleisliComposition: Kleisli[Option, Int, Double] = kleisli(toStr _) andThenK toInt _ andThenK toDouble _
val _: Kleisli[Option, Int, Double] = kleisli(toStr _) >==> toInt _ >==> toDouble _
kleisliComposition(10) shouldBe Option(20.0)
}
// define an abstract computation using effects
def calc[F[_]: Monad](left: Kleisli[F, Int, Int], right: Kleisli[F, Int, Int]) = for {
x <- left
y <- right
} yield x + y
def maybeX(x: Int) = Option(x + 1)
def maybeY(x: Int) = Option(x + 100)
it should "compute using an abstract composition and two effects" in {
val comp: ReaderT[Option, Int, Int] = calc[Option](Kleisli(maybeX), Kleisli(maybeY))
comp.apply(1) shouldBe ""
}
// def f2[F[_]: Monad](x: Int) = Kleisli[F, Int, Int](x * 100)
// val k1: ReaderT[F, Int, Int] = Kleisli(f1)
// val k2: ReaderT[Option, Int, Int] = Kleisli(f2)
// val k3: ReaderT[Option, Int, Int] = k1 compose k2
// val k4: ReaderT[Option, Int, Int] = k1 <=< k2 // alias for compose
// val k5: ReaderT[Option, Int, Int] = k1 andThen k2
// val k6: ReaderT[Option, Int, Int] = k1 >=> k2 // alias for andThen
// it should "" in {
// val result: Option[Int] = for {
// nr <- Option(4)
// result <- k1(nr)
// } yield result
//
// println(result)
// }
}
| dnvriend/study-category-theory | scalaz-test/src/test/com/github/dnvriend/scalaz/KleisliTest.scala | Scala | apache-2.0 | 3,843 |
/*
* Copyright (C) 2017. RandomCoder <randomcoder@randomcoding.co.uk>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package uk.co.randomcoding.cucumber.generator.gherkin
/**
* Identifiers for the different Gherkin Components
*
* @author Tim Sheppard
*/
object GherkinComponentIdentifier {
val FEATURE = "Feature:"
val AS_A = "As a"
val IN_ORDER_TO = "In order to"
val I_WANT = "I want"
val SCENARIO = "Scenario:"
val SCENARIO_OUTLINE = "Scenario Outline:"
val GIVEN = "Given"
val WHEN = "When"
val THEN = "Then"
val AND = "And"
val BUT = "But"
val EXAMPLES = "Examples:"
}
| randomcoder/gherkin-converter | src/main/scala/uk/co/randomcoding/cucumber/generator/gherkin/GherkinComponentIdentifier.scala | Scala | agpl-3.0 | 1,244 |
package reactivemongo
import reactivemongo.bson.{ BSONBinary, BSONDocument }
import reactivemongo.api.{
BSONSerializationPack,
NodeSetSession,
SessionTransaction,
WriteConcern
}
import reactivemongo.api.commands.{
InsertCommand,
ResolvedCollectionCommand,
WriteConcern => WC
}
final class InsertCommandSpec extends org.specs2.mutable.Specification {
"Insert command" title
private val writer = InsertCommand.writer(BSONSerializationPack)(Command)
section("unit")
"Insert command" should {
"be written" >> {
val base = BSONDocument(
"insert" -> "foo",
"ordered" -> false,
"documents" -> (firstDoc +: otherDocs))
lazy val session = new NodeSetSession(java.util.UUID.randomUUID())
val lsid = BSONDocument(
"lsid" -> BSONDocument(
"id" -> BSONBinary(session.lsid)))
val writeConcern = BSONDocument(
"writeConcern" -> BSONDocument("w" -> 1, "j" -> false))
// ---
"without session" in {
writer(None)(insert1) must_=== (base ++ writeConcern)
}
"with session" in {
val write = writer(Some(session))
// w/o transaction started
write(insert1) must_=== (base ++ lsid ++ writeConcern) and {
session.startTransaction(WriteConcern.Default, None).
aka("transaction") must beSuccessfulTry[(SessionTransaction, Boolean)].which { _ =>
// w/ transaction started
write(insert1) must_=== (base ++ lsid ++ BSONDocument(
"txnNumber" -> 1L,
"startTransaction" -> true, // as first command in tx
"autocommit" -> false))
}
} and {
// w/o 'startTransaction' flag after first command in tx
write(insert1) must_=== (base ++ lsid ++ BSONDocument(
"txnNumber" -> 1L, "autocommit" -> false))
}
}
}
}
section("unit")
// ---
private lazy val firstDoc = BSONDocument("_id" -> 1, "value" -> "foo")
private lazy val otherDocs = Seq(
BSONDocument("_id" -> 2, "value" -> "bar"),
BSONDocument("_id" -> 3, "value" -> "lorem"))
private lazy val insert1 = ResolvedCollectionCommand(
collection = "foo",
command = Command.Insert(
head = firstDoc,
tail = otherDocs,
ordered = false,
writeConcern = WC.Default))
private object Command extends InsertCommand[BSONSerializationPack.type] {
val pack = BSONSerializationPack
}
}
| ornicar/ReactiveMongo | driver/src/test/scala/InsertCommandSpec.scala | Scala | apache-2.0 | 2,482 |
package frmr.scyig.db
import slick.jdbc.MySQLProfile.api._
import net.liftweb.util.{BCrypt => _, _}
import org.mindrot.jbcrypt.BCrypt
case class User(
id: Option[Int],
email: String,
passwordHash: String,
name: String,
superuser: Boolean
) {
val userId = id.getOrElse(0)
def checkpw(candidatePassword: String): Boolean =
BCrypt.checkpw(candidatePassword, passwordHash)
lazy val sponsorIds: Seq[Int] = {
DB.runAwait(Users.filter(_.id === id).join(UsersSponsors).on(_.id === _.userId)
.map(_._2.sponsorId).result).openOr(Seq())
}
// FIXME: Make atomic
def setSponsors(sponsorIds: Seq[Int]) = {
DB.runAwait(UsersSponsors.filter(_.userId === id).delete)
val sponsorInserts = sponsorIds.map { sponsorId =>
DB.runAwait(UsersSponsors += ("", userId, sponsorId))
}
}
}
class Users(tag: Tag) extends Table[User](tag, "users") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def email = column[String]("email", O.Unique, O.Length(255))
def passwordHash = column[String]("password_hash")
def name = column[String]("name")
def superuser = column[Boolean]("superuser")
def * = (id.?, email, passwordHash, name, superuser) <> (User.tupled, User.unapply)
}
object Users extends TableQuery[Users](new Users(_)) {
def hashpw(password: String): String =
BCrypt.hashpw(password, BCrypt.gensalt())
}
| farmdawgnation/scyig-judicial | src/main/scala/frmr/scyig/db/Users.scala | Scala | apache-2.0 | 1,369 |
package com.sksamuel.elastic4s.requests.searches.aggs.pipeline
import com.sksamuel.elastic4s.requests.searches.aggs.AggMetaDataFn
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
object AvgBucketPipelineAggBuilder {
def apply(agg: AvgBucketPipelineAgg): XContentBuilder = {
val builder = XContentFactory.jsonBuilder()
builder.startObject("avg_bucket")
builder.field("buckets_path", agg.bucketsPath)
agg.gapPolicy.foreach(policy => builder.field("gap_policy", policy.toString.toLowerCase))
agg.format.foreach(f => builder.field("format", f))
builder.endObject()
AggMetaDataFn(agg, builder)
builder.endObject()
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/pipeline/AvgBucketPipelineAggBuilder.scala | Scala | apache-2.0 | 672 |
//
// Codex - a multi-language code indexer and grokker
// http://github.com/samskivert/codex
package codex.extract
import java.io.{File, StringReader}
import org.junit.Assert._
import org.junit._
import codex._
class ExtractorTest {
import ExtractorTest._
@Test def testJarReading {
val buf = new StringBuilder
val path = ".m2/repository/javax/servlet/servlet-api/2.5/servlet-api-2.5-sources.jar"
val jar = file(home, path)
if (jar.exists) {
Extractor.extract(dumper(buf))(false)(jar)
assertTrue(buf.length > 0) // TODO: test something?
}
}
}
object ExtractorTest {
def dumper (buf :StringBuilder) = new Visitor {
def onCompUnit (path :String, isTest :Boolean) = {
indent = 0
dump(s"CU $path")
}
def onEnter (name :String, kind :String, offset :Int) {
dump(s"ENTER $kind $name $offset")
indent += 1
}
def onExit (name :String) {
indent -= 1
dump(s"EXIT $name")
}
private def dump (msg :String) = buf.append(" " * indent).append(msg).append("\\n")
private var indent = 0
}
def test (ex :Extractor, code :String) = {
val buf = new StringBuilder
ex.process(dumper(buf), false, "test", new StringReader(code))
buf.toString
}
}
| samskivert/codex | src/test/scala/codex/extract/ExtractorTest.scala | Scala | bsd-3-clause | 1,257 |
package ru.pavkin.todoist.api.dispatch.core
import dispatch.{Req, url}
import ru.pavkin.todoist.api
import ru.pavkin.todoist.api.{RawRequest, Token}
import ru.pavkin.todoist.api.core.AuthorizedRequestFactory
case class DispatchAuthorizedRequestFactory(token: Token) extends AuthorizedRequestFactory[RawRequest, Req] {
def produce(resources: RawRequest): Req =
url(api.syncURL)
.POST
.<<?(Map(
"token" -> token,
"seq_no" -> "0"
) ++ resources.mapValues(list => s"[${list.reverse.mkString(",")}]"
))
}
| vpavkin/todoist-api-scala | dispatch/src/main/scala/ru/pavkin/todoist/api/dispatch/core/DispatchAuthorizedRequestFactory.scala | Scala | mit | 550 |
package filodb.memory.format.vectors
import java.nio.ByteBuffer
import debox.Buffer
import spire.syntax.cfor._
import filodb.memory.{BinaryRegion, MemFactory}
import filodb.memory.format._
import filodb.memory.format.BinaryVector.BinaryVectorPtr
import filodb.memory.format.Encodings._
import filodb.memory.format.MemoryReader._
import filodb.memory.format.UnsafeUtils.ZeroPointer
object IntBinaryVector {
/**
* Creates a new MaskedIntAppendingVector, allocating a byte array of the right size for the max #
* of elements.
* @param maxElements initial maximum number of elements this vector will hold. Will automatically grow.
*/
def appendingVector(memFactory: MemFactory,
maxElements: Int,
nbits: Short = 32,
signed: Boolean = true): BinaryAppendableVector[Int] = {
val bytesRequired = 12 + BitmapMask.numBytesRequired(maxElements) + noNAsize(maxElements, nbits)
val addr = memFactory.allocateOffheap(bytesRequired)
val dispose = () => memFactory.freeMemory(addr)
GrowableVector(memFactory, new MaskedIntAppendingVector(addr, bytesRequired, maxElements, nbits, signed, dispose))
}
/**
* Returns the number of bytes required for a NoNA appending vector of given max length and nbits
* This accounts for when nbits < 8 and we need extra byte
*/
def noNAsize(maxElements: Int, nbits: Short): Int =
8 + ((maxElements * nbits + Math.max(8 - nbits, 0)) / 8)
/**
* Same as appendingVector but uses a SimpleAppendingVector with no ability to hold NA mask
*/
def appendingVectorNoNA(memFactory: MemFactory,
maxElements: Int,
nbits: Short = 32,
signed: Boolean = true): IntAppendingVector = {
val bytesRequired = noNAsize(maxElements, nbits)
val addr = memFactory.allocateOffheap(bytesRequired)
val dispose = () => memFactory.freeMemory(addr)
appendingVectorNoNA(addr, bytesRequired, nbits, signed, dispose)
}
// scalastyle:off method.length
def appendingVectorNoNA(addr: BinaryRegion.NativePointer,
maxBytes: Int,
nbits: Short,
signed: Boolean,
dispose: () => Unit): IntAppendingVector = nbits match {
case 32 => new IntAppendingVector(addr, maxBytes, nbits, signed, dispose) {
final def addData(v: Int): AddResponse = checkOffset() match {
case Ack =>
UnsafeUtils.setInt(addr + numBytes, v)
incWriteOffset(4)
Ack
case other: AddResponse => other
}
}
case 16 => new IntAppendingVector(addr, maxBytes, nbits, signed, dispose) {
final def addData(v: Int): AddResponse = checkOffset() match {
case Ack =>
UnsafeUtils.setShort(addr + numBytes, v.toShort)
incWriteOffset(2)
Ack
case other: AddResponse => other
}
}
case 8 => new IntAppendingVector(addr, maxBytes, nbits, signed, dispose) {
final def addData(v: Int): AddResponse = checkOffset() match {
case Ack =>
UnsafeUtils.setByte(addr + numBytes, v.toByte)
incWriteOffset(1)
Ack
case other: AddResponse => other
}
}
case 4 => new IntAppendingVector(addr, maxBytes, nbits, signed, dispose) {
final def addData(v: Int): AddResponse = checkOffset() match {
case Ack =>
val origByte = if (bitShift == 0) 0 else nativePtrReader.getByte(writeOffset)
val newByte = (origByte | (v << bitShift)).toByte
UnsafeUtils.setByte(writeOffset, newByte)
bumpBitShift()
Ack
case other: AddResponse => other
}
}
case 2 => new IntAppendingVector(addr, maxBytes, nbits, signed, dispose) {
final def addData(v: Int): AddResponse = checkOffset() match {
case Ack =>
val origByte = if (bitShift == 0) 0 else nativePtrReader.getByte(writeOffset)
val newByte = (origByte | (v << bitShift)).toByte
UnsafeUtils.setByte(writeOffset, newByte)
bumpBitShift()
Ack
case other: AddResponse => other
}
}
}
/**
* Quickly create an IntBinaryVector from a sequence of Ints which can be optimized.
*/
def apply(memFactory: MemFactory, data: Seq[Int]): BinaryAppendableVector[Int] = {
val vect = appendingVectorNoNA(memFactory, data.length)
data.foreach(vect.addData)
vect
}
/**
* Returns an IntVectorDataReader object for a simple (no mask) Int BinaryVector
*/
def simple(acc: MemoryReader, vector: BinaryVectorPtr): IntVectorDataReader = {
// get nbits, etc and decide
if (PrimitiveVectorReader.signed(acc, vector)) {
PrimitiveVectorReader.nbits(acc, vector) match {
case 32 => OffheapSignedIntVector32
case 16 => OffheapSignedIntVector16
case 8 => OffheapSignedIntVector8
}
} else {
PrimitiveVectorReader.nbits(acc, vector) match {
case 32 => OffheapSignedIntVector32
case 16 => OffheapUnsignedIntVector16
case 8 => OffheapUnsignedIntVector8
case 4 => OffheapUnsignedIntVector4
case 2 => OffheapUnsignedIntVector2
}
}
}
def apply(buffer: ByteBuffer): IntVectorDataReader = apply(MemoryReader.fromByteBuffer(buffer), 0)
import WireFormat._
/**
* Parses the type of vector from the WireFormat word at address+4 and returns the appropriate
* IntVectorDataReader object for parsing it
*/
def apply(acc: MemoryReader, vector: BinaryVectorPtr): IntVectorDataReader = {
BinaryVector.vectorType(acc, vector) match {
case x if x == WireFormat(VECTORTYPE_BINSIMPLE, SUBTYPE_INT) => MaskedIntBinaryVector
case x if x == WireFormat(VECTORTYPE_BINSIMPLE, SUBTYPE_INT_NOMASK) => simple(acc, vector)
case x if x == WireFormat(VECTORTYPE_BINSIMPLE, SUBTYPE_REPEATED) => IntConstVector
}
}
/**
* Given the min and max values in an IntVector, determines the most optimal (smallest)
* nbits and the signed flag to use. Typically used in a workflow where you use
* `IntBinaryVector.appendingVector` first, then further optimize to the smallest IntVector
* available.
*/
def minMaxToNbitsSigned(min: Int, max: Int): (Short, Boolean) = {
if (min >= 0 && max < 4) {
(2, false)
} else if (min >= 0 && max < 16) {
(4, false)
} else if (min >= Byte.MinValue && max <= Byte.MaxValue) {
(8, true)
} else if (min >= 0 && max < 256) {
(8, false)
} else if (min >= Short.MinValue && max <= Short.MaxValue) {
(16, true)
} else if (min >= 0 && max < 65536) {
(16, false)
} else {
(32, true)
}
}
/**
* Produces a smaller BinaryVector if possible given combination of minimal nbits as well as
* if all values are not NA.
* The output is a frozen BinaryVector with optimized nbits and without mask if appropriate.
*/
def optimize(memFactory: MemFactory, vector: OptimizingPrimitiveAppender[Int]): BinaryVectorPtr = {
// Get nbits and signed
val (min, max) = vector.minMax
val (nbits, signed) = minMaxToNbitsSigned(min, max)
val dispose = () => vector.dispose()
if (vector.noNAs) {
if (min == max) {
ConstVector.make(memFactory, vector.length, 4) { addr => UnsafeUtils.setInt(ZeroPointer, addr, vector(0)) }
// No NAs? Use just the PrimitiveAppendableVector
} else if (nbits == vector.nbits) { vector.dataVect(memFactory) }
else {
val newVect = IntBinaryVector.appendingVectorNoNA(memFactory, vector.length, nbits, signed)
newVect.addVector(vector)
newVect.freeze(None) // we're already creating a new copy
}
} else {
// Some NAs and same number of bits? Just keep NA mask
if (nbits == vector.nbits) { vector.getVect(memFactory) }
// Some NAs and different number of bits? Create new vector and copy data over
else {
val newVect = IntBinaryVector.appendingVector(memFactory, vector.length, nbits, signed)
newVect.addVector(vector)
newVect.freeze(None)
}
}
}
}
/**
* An iterator optimized for speed and type-specific to avoid boxing.
* It has no hasNext() method - because it is guaranteed to visit every element, and this way
* you can avoid another method call for performance.
*/
trait IntIterator extends TypedIterator {
def next: Int
}
/**
* +0000 4-byte length word
* +0004 2-byte WireFormat
* +0006 2-byte Bitshift / signed / NBits (for format see PrimitiveAppendableVector)
* +0008 start of packed integer data
*/
trait IntVectorDataReader extends VectorDataReader {
import PrimitiveVector.HeaderLen
import PrimitiveVectorReader._
// Iterator to go through bytes. Put var in constructor for much faster access.
class GenericIntIterator(acc: MemoryReader, vector: BinaryVectorPtr, var n: Int) extends IntIterator {
final def next: Int = {
val data = apply(acc, vector, n)
n += 1
data
}
}
/**
* Retrieves the element at position/row n, where n=0 is the first element of the vector.
*/
def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int
/**
* Returns the number of elements in this BinaryVector
*/
def length(acc: MemoryReader, vector: BinaryVectorPtr): Int =
((numBytes(acc, vector) - HeaderLen) * 8 +
(if (bitShift(acc, vector) != 0) bitShift(acc, vector) - 8 else 0)) / nbits(acc, vector)
/**
* Sums up the Int values in the vector from position start to position end.
* @param vector the BinaryVectorPtr native address of the BinaryVector
* @param start the starting element # in the vector to sum, 0 == first element
* @param end the ending element # in the vector to sum, inclusive
* @return the Long sum, since Ints might possibly overflow
*/
def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long
private[memory] def defaultSum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
var rowNo = start
var sum = 0L
while (rowNo <= end) {
sum += apply(acc, vector, rowNo)
rowNo += 1
}
sum
}
/**
* Returns an IntIterator to efficiently go through the elements of the vector. The user is responsible for
* knowing how many elements to process. There is no hasNext.
* All elements are iterated through, even those designated as "not available".
* Costs an allocation for the iterator but allows potential performance gains too.
* NOTE: the default one is not very efficient, it just calls apply() again and again.
* @param vector the BinaryVectorPtr native address of the BinaryVector
* @param startElement the starting element # in the vector, by default 0 (the first one)
*/
def iterate(acc: MemoryReader, vector: BinaryVectorPtr, startElement: Int = 0): IntIterator =
new GenericIntIterator(acc, vector, startElement)
def debugString(acc: MemoryReader, vector: BinaryVectorPtr, sep: String = ","): String = {
val it = iterate(acc, vector)
val size = length(acc, vector)
(0 until size).map(_ => it.next).mkString(sep)
}
/**
* Converts the BinaryVector to an unboxed Buffer.
* Only returns elements that are "available".
*/
def toBuffer(acc: MemoryReader, vector: BinaryVectorPtr, startElement: Int = 0): Buffer[Int] = {
val newBuf = Buffer.empty[Int]
val dataIt = iterate(acc, vector, startElement)
val availIt = iterateAvailable(acc, vector, startElement)
val len = length(acc, vector)
cforRange { startElement until len } { n =>
val item = dataIt.next
if (availIt.next) newBuf += item
}
newBuf
}
}
object OffheapSignedIntVector32 extends IntVectorDataReader {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int =
acc.getInt(vector + 8 + n * 4)
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
require(start >= 0 && end < length(acc, vector), s"($start, $end) is" +
s"out of bounds, length=${length(acc, vector)}")
var addr = vector + 8 + start * 4
val untilAddr = vector + 8 + end * 4 + 4 // one past the end
var sum: Long = 0L
while (addr < untilAddr) {
sum += acc.getInt(addr)
addr += 4
}
sum
}
}
object OffheapSignedIntVector16 extends IntVectorDataReader {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int =
acc.getShort(vector + 8 + n * 2).toInt
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
require(start >= 0 && end < length(acc, vector), s"($start, $end) " +
s"is out of bounds, length=${length(acc, vector)}")
var addr = vector + 8 + start * 2
val untilAddr = vector + 8 + end * 2 + 2 // one past the end
var sum = 0L
while (addr < untilAddr) {
sum += acc.getShort(addr)
addr += 2
}
sum
}
}
object OffheapSignedIntVector8 extends IntVectorDataReader {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int =
acc.getByte(vector + 8 + n).toInt
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
require(start >= 0 && end < length(acc, vector), s"($start, $end) is out " +
s"of bounds, length=${length(acc, vector)}")
var addr = vector + 8 + start
val untilAddr = vector + 8 + end + 1 // one past the end
var sum = 0L
while (addr < untilAddr) {
sum += acc.getByte(addr)
addr += 1
}
sum
}
}
object OffheapUnsignedIntVector16 extends IntVectorDataReader {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int =
(acc.getShort(vector + 8 + n * 2) & 0x0ffff).toInt
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
require(start >= 0 && end < length(acc, vector), s"($start, $end) is out " +
s"of bounds, length=${length(acc, vector)}")
val startRoundedUp = (start + 3) & ~3
var sum = defaultSum(acc, vector, start, Math.min(end, startRoundedUp - 1))
if (startRoundedUp <= end) {
var addr = vector + 8 + startRoundedUp * 2
var rowNo = startRoundedUp
while ((rowNo + 3) <= end) {
val bytes = acc.getLong(addr)
sum += ((bytes >> 0) & 0x0ffff) + ((bytes >> 16) & 0x0ffff) +
((bytes >> 32) & 0x0ffff) + ((bytes >> 48) & 0x0ffff)
rowNo += 4 // 4 rows at a time
addr += 8 // 8 bytes at a time
}
sum += defaultSum(acc, vector, rowNo, end)
}
sum
}
}
object OffheapUnsignedIntVector8 extends IntVectorDataReader {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int =
(acc.getByte(vector + 8 + n) & 0x00ff).toInt
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
require(start >= 0 && end < length(acc, vector), s"($start, $end) is out of " +
s"bounds, length=${length(acc, vector)}")
val startRoundedUp = (start + 7) & ~7
var sum = defaultSum(acc, vector, start, Math.min(end, startRoundedUp - 1))
if (startRoundedUp <= end) {
var addr = vector + 8 + startRoundedUp
var rowNo = startRoundedUp
while ((rowNo + 7) <= end) {
val bytes = acc.getLong(addr)
sum += ((bytes >> 0) & 0x0ff) + ((bytes >> 8) & 0x0ff) +
((bytes >> 16) & 0x0ff) + ((bytes >> 24) & 0x0ff) +
((bytes >> 32) & 0x0ff) + ((bytes >> 40) & 0x0ff) +
((bytes >> 48) & 0x0ff) + ((bytes >> 56) & 0x0ff)
rowNo += 8 // 8 rows at a time
addr += 8 // 8 bytes at a time
}
sum += defaultSum(acc, vector, rowNo, end)
}
sum
}
}
object OffheapUnsignedIntVector4 extends IntVectorDataReader {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int =
(acc.getByte(vector + 8 + n/2) >> ((n & 0x01) * 4)).toInt & 0x0f
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
require(start >= 0 && end < length(acc, vector), s"($start, $end) " +
s"is out of bounds, length=${length(acc, vector)}")
val startRoundedUp = (start + 7) & ~7
var sum = defaultSum(acc, vector, start, Math.min(end, startRoundedUp - 1))
if (startRoundedUp <= end) {
var addr = vector + 8 + startRoundedUp/2
var rowNo = startRoundedUp
while ((rowNo + 7) <= end) {
val bytes = acc.getInt(addr)
sum += ((bytes >> 0) & 0x0f) + ((bytes >> 4) & 0x0f) +
((bytes >> 8) & 0x0f) + ((bytes >> 12) & 0x0f) +
((bytes >> 16) & 0x0f) + ((bytes >> 20) & 0x0f) +
((bytes >> 24) & 0x0f) + ((bytes >> 28) & 0x0f)
rowNo += 8 // 8 rows at a time
addr += 4 // 4 bytes at a time
}
sum += defaultSum(acc, vector, rowNo, end)
}
sum
}
}
object OffheapUnsignedIntVector2 extends IntVectorDataReader {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int =
(acc.getByte(vector + 8 + n/4) >> ((n & 0x03) * 2)).toInt & 0x03
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long = {
require(start >= 0 && end < length(acc, vector), s"($start, $end) is " +
s"out of bounds, length=${length(acc, vector)}")
val startRoundedUp = (start + 7) & ~7
var sum = defaultSum(acc, vector, start, Math.min(end, startRoundedUp - 1))
if (startRoundedUp <= end) {
var addr = vector + 8 + startRoundedUp/4
var rowNo = startRoundedUp
while ((rowNo + 7) <= end) {
val bytes = acc.getShort(addr).toInt
sum += ((bytes >> 0) & 0x03) + ((bytes >> 2) & 0x03) +
((bytes >> 4) & 0x03) + ((bytes >> 6) & 0x03) +
((bytes >> 8) & 0x03) + ((bytes >> 10) & 0x03) +
((bytes >> 12) & 0x03) + ((bytes >> 14) & 0x03)
rowNo += 8 // 8 rows at a time
addr += 2 // 2 bytes at a time
}
sum += defaultSum(acc, vector, rowNo, end)
}
sum
}
}
object MaskedIntBinaryVector extends IntVectorDataReader with BitmapMaskVector {
final def apply(acc: MemoryReader, vector: BinaryVectorPtr, n: Int): Int = {
val subvect = subvectAddr(acc, vector)
IntBinaryVector.simple(acc, subvect).apply(acc, subvect, n)
}
override def length(acc: MemoryReader, vector: BinaryVectorPtr): Int = super.length(acc, subvectAddr(acc, vector))
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long =
IntBinaryVector.simple(acc, subvectAddr(acc, vector)).sum(acc, subvectAddr(acc, vector), start, end)
override def iterate(acc: MemoryReader, vector: BinaryVectorPtr, startElement: Int = 0): IntIterator =
IntBinaryVector.simple(acc, subvectAddr(acc, vector)).iterate(acc, subvectAddr(acc, vector), startElement)
}
abstract class IntAppendingVector(addr: BinaryRegion.NativePointer,
maxBytes: Int,
nbits: Short,
signed: Boolean,
val dispose: () => Unit)
extends PrimitiveAppendableVector[Int](addr, maxBytes, nbits, signed) {
override def vectSubType: Int = WireFormat.SUBTYPE_INT_NOMASK
final def addNA(): AddResponse = addData(0)
final def apply(index: Int): Int = reader.apply(nativePtrReader, addr, index)
val reader = IntBinaryVector.simple(nativePtrReader, addr)
def copyToBuffer: Buffer[Int] = reader.asIntReader.toBuffer(nativePtrReader, addr)
final def addFromReaderNoNA(reader: RowReader, col: Int): AddResponse = addData(reader.getInt(col))
final def minMax: (Int, Int) = {
var min = Int.MaxValue
var max = Int.MinValue
cforRange { 0 until length } { index =>
val data = reader.apply(nativePtrReader, addr, index)
if (data < min) min = data
if (data > max) max = data
}
(min, max)
}
override def optimize(memFactory: MemFactory, hint: EncodingHint = AutoDetect): BinaryVectorPtr =
IntBinaryVector.optimize(memFactory, this)
}
class MaskedIntAppendingVector(addr: BinaryRegion.NativePointer,
val maxBytes: Int,
maxElements: Int,
val nbits: Short,
signed: Boolean,
val dispose: () => Unit) extends
// First four bytes: offset to SimpleIntBinaryVector
BitmapMaskAppendableVector[Int](addr, maxElements) with OptimizingPrimitiveAppender[Int] {
def vectMajorType: Int = WireFormat.VECTORTYPE_BINSIMPLE
def vectSubType: Int = WireFormat.SUBTYPE_INT
val subVect = IntBinaryVector.appendingVectorNoNA(addr + subVectOffset,
maxBytes - subVectOffset,
nbits, signed, dispose)
def dataVect(memFactory: MemFactory): BinaryVectorPtr = subVect.freeze(memFactory)
def copyToBuffer: Buffer[Int] = MaskedIntBinaryVector.toBuffer(nativePtrReader, addr)
final def minMax: (Int, Int) = {
var min = Int.MaxValue
var max = Int.MinValue
cforRange { 0 until length } { index =>
if (isAvailable(index)) {
val data = subVect.apply(index)
if (data < min) min = data
if (data > max) max = data
}
}
(min, max)
}
override def optimize(memFactory: MemFactory, hint: EncodingHint = AutoDetect): BinaryVectorPtr =
IntBinaryVector.optimize(memFactory, this)
override def newInstance(memFactory: MemFactory, growFactor: Int = 2): BinaryAppendableVector[Int] = {
val addr = memFactory.allocateOffheap(maxBytes * growFactor)
val dispose = () => memFactory.freeMemory(addr)
new MaskedIntAppendingVector(addr, maxBytes * growFactor, maxElements * growFactor,
nbits, signed, dispose)
}
}
object IntConstVector extends ConstVector with IntVectorDataReader {
override def length(acc: MemoryReader, vector: BinaryVectorPtr): Int = numElements(acc, vector)
def apply(acc: MemoryReader, vector: BinaryVectorPtr, i: Int): Int =
acc.getInt(vector + ConstVector.DataOffset)
final def sum(acc: MemoryReader, vector: BinaryVectorPtr, start: Int, end: Int): Long =
(end - start + 1) * apply(acc, vector, 0)
}
| filodb/FiloDB | memory/src/main/scala/filodb.memory/format/vectors/IntBinaryVector.scala | Scala | apache-2.0 | 22,391 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.sumobot.http_frontend
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpMethods.{GET, OPTIONS}
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.model.ws.{Message, UpgradeToWebSocket}
import akka.http.scaladsl.model.{HttpEntity, HttpRequest, HttpResponse, Uri}
import akka.stream.scaladsl.{Keep, Sink, Source}
import akka.stream.{ActorMaterializer, Materializer, OverflowStrategy}
import com.sumologic.sumobot.http_frontend.SumoBotHttpServer._
import com.sumologic.sumobot.http_frontend.authentication.{AuthenticationForbidden, AuthenticationInfo, AuthenticationSucceeded}
import org.reactivestreams.Publisher
import scala.concurrent.Await
import scala.concurrent.duration._
object SumoBotHttpServer {
private[http_frontend] val UrlSeparator = "/"
private[http_frontend] val RootPageName = "index.ssp"
private[http_frontend] val WebSocketEndpoint = UrlSeparator + "websocket"
private[http_frontend] val Resources = Set(UrlSeparator + "script.js", UrlSeparator + "style.css")
private[http_frontend] val BufferSize = 128
private[http_frontend] val SocketOverflowStrategy = OverflowStrategy.fail
}
class SumoBotHttpServer(options: SumoBotHttpServerOptions)(implicit system: ActorSystem) {
private implicit val materializer: Materializer = ActorMaterializer()
private val routingHelper = RoutingHelper(options.origin)
private val rootPage = DynamicResource(RootPageName)
private val serverSource = Http().bind(options.httpHost, options.httpPort)
private val binding = serverSource.to(Sink.foreach(_.handleWithSyncHandler {
routingHelper.withAllowOriginHeader {
routingHelper.withForbiddenFallback {
options.authentication.routes.orElse {
routingHelper.withHeadRequests {
requestHandler
}
}
}
}
})).run()
def terminate(): Unit = {
Await.result(binding, 10.seconds).terminate(5.seconds)
}
private val requestHandler: PartialFunction[HttpRequest, HttpResponse] = {
case req@HttpRequest(GET, Uri.Path("/"), _, _, _) =>
authenticate(req) {
authInfo => renderRootPage(authInfo)
}
case HttpRequest(OPTIONS, Uri.Path("/"), _, _, _) =>
rootPageOptions
case req@HttpRequest(GET, Uri.Path(path), _, _, _)
if Resources.contains(path) =>
val filename = path.replaceFirst(UrlSeparator, "")
authenticate(req) {
_ => staticResource(filename)
}
case HttpRequest(OPTIONS, Uri.Path(path), _, _, _)
if Resources.contains(path) =>
val filename = path.replaceFirst(UrlSeparator, "")
staticResourceOptions(filename)
case req@HttpRequest(GET, Uri.Path(WebSocketEndpoint), _, _, _) =>
authenticate(req) {
_ => webSocketRequestHandler(req)
}
case req@HttpRequest(OPTIONS, Uri.Path(WebSocketEndpoint), _, _, _) =>
webSocketOptions(req)
}
private def authenticate(request: HttpRequest)(succeededHandler: AuthenticationInfo => HttpResponse): HttpResponse = {
options.authentication.authentication(request) match {
case AuthenticationSucceeded(info) =>
succeededHandler(info)
case AuthenticationForbidden(response) =>
response
}
}
private def staticResource(filename: String): HttpResponse = {
val resource = StaticResource(filename)
HttpResponse(entity = HttpEntity(resource.contentType, resource.contents))
}
private def staticResourceOptions(filename: String): HttpResponse = {
HttpResponse()
.withHeaders(List(`Access-Control-Allow-Methods`(List(GET))))
}
private def renderRootPage(authInfo: AuthenticationInfo): HttpResponse = {
val contents = rootPage.contents(Map(
"authInfo" -> authInfo,
"serverOptions" -> options
))
HttpResponse(entity = HttpEntity(rootPage.contentType, contents))
}
private val rootPageOptions: HttpResponse = {
HttpResponse()
.withHeaders(List(`Access-Control-Allow-Methods`(List(GET))))
}
private def webSocketRequestHandler(req: HttpRequest): HttpResponse = {
req.header[UpgradeToWebSocket] match {
case Some(upgrade) =>
webSocketUpgradeHandler(upgrade)
case None => HttpResponse(400, entity = "Invalid WebSocket request")
}
}
private def webSocketOptions(req: HttpRequest): HttpResponse = {
HttpResponse()
.withHeaders(List(`Access-Control-Allow-Methods`(List(GET))))
}
def webSocketUpgradeHandler(upgrade: UpgradeToWebSocket): HttpResponse = {
val (publisherRef: ActorRef, publisher: Publisher[Message]) =
Source.actorRef[Message](BufferSize, SocketOverflowStrategy)
.toMat(Sink.asPublisher(true))(Keep.both).run()
val publisherSource = Source.fromPublisher(publisher)
val senderRef = system.actorOf(Props(classOf[HttpOutcomingSender], publisherRef))
val receiverRef = system.actorOf(Props(classOf[HttpIncomingReceiver], senderRef))
val sink = Sink.actorRef(receiverRef, HttpIncomingReceiver.StreamEnded)
upgrade.handleMessagesWithSinkSource(sink, publisherSource)
}
}
| SumoLogic/sumobot | src/main/scala/com/sumologic/sumobot/http_frontend/SumoBotHttpServer.scala | Scala | apache-2.0 | 5,935 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package common
import java.time.Instant
import org.scalatest.Matchers
import scala.collection.mutable.ListBuffer
import scala.util.Failure
import scala.util.Try
import scala.concurrent.duration.Duration
import scala.concurrent.duration.DurationInt
import spray.json._
import TestUtils.RunResult
import TestUtils.CONFLICT
/**
* An arbitrary response of a whisk action. Includes the result as a JsObject as the
* structure of "result" is not defined.
*
* @param result a JSON object used to save the result of the execution of the action
* @param status a string used to indicate the status of the action
* @param success a boolean value used to indicate whether the action is executed successfully or not
*/
case class ActivationResponse(result: Option[JsObject], status: String, success: Boolean)
object ActivationResponse extends DefaultJsonProtocol {
implicit val serdes = jsonFormat3(ActivationResponse.apply)
}
/**
* Activation record as it is returned from the OpenWhisk service.
*
* @param activationId a String to save the ID of the activation
* @param logs a list of String to save the logs of the activation
* @param response an Object of ActivationResponse to save the response of the activation
* @param start an Instant to save the start time of activation
* @param end an Instant to save the end time of activation
* @param duration a Long to save the duration of the activation
* @param cases String to save the cause of failure if the activation fails
* @param annotations a list of JSON objects to save the annotations of the activation
*/
case class ActivationResult(activationId: String,
logs: Option[List[String]],
response: ActivationResponse,
start: Instant,
end: Instant,
duration: Long,
cause: Option[String],
annotations: Option[List[JsObject]]) {
def getAnnotationValue(key: String): Option[JsValue] = {
Try {
val annotation = annotations.get.filter(x => x.getFields("key")(0) == JsString(key))
assert(annotation.size == 1) // only one annotation with this value
val value = annotation(0).getFields("value")
assert(value.size == 1)
value(0)
}.toOption
}
}
object ActivationResult extends DefaultJsonProtocol {
private implicit val instantSerdes = new RootJsonFormat[Instant] {
def write(t: Instant) = t.toEpochMilli.toJson
def read(value: JsValue) =
Try {
value match {
case JsNumber(i) => Instant.ofEpochMilli(i.bigDecimal.longValue)
case _ => deserializationError("timetsamp malformed")
}
} getOrElse deserializationError("timetsamp malformed 2")
}
implicit val serdes = new RootJsonFormat[ActivationResult] {
private val format = jsonFormat8(ActivationResult.apply)
def write(result: ActivationResult) = format.write(result)
def read(value: JsValue) = {
val obj = value.asJsObject
obj.getFields("activationId", "response", "start") match {
case Seq(JsString(activationId), response, start) =>
Try {
val logs = obj.fields.get("logs").map(_.convertTo[List[String]])
val end = obj.fields.get("end").map(_.convertTo[Instant]).getOrElse(Instant.EPOCH)
val duration = obj.fields.get("duration").map(_.convertTo[Long]).getOrElse(0L)
val cause = obj.fields.get("cause").map(_.convertTo[String])
val annotations = obj.fields.get("annotations").map(_.convertTo[List[JsObject]])
new ActivationResult(
activationId,
logs,
response.convertTo[ActivationResponse],
start.convertTo[Instant],
end,
duration,
cause,
annotations)
} getOrElse deserializationError("Failed to deserialize the activation result.")
case _ => deserializationError("Failed to deserialize the activation ID, response or start.")
}
}
}
}
/**
* Test fixture to ease cleaning of whisk entities created during testing.
*
* The fixture records the entities created during a test and when the test
* completed, will delete them all.
*/
trait WskTestHelpers extends Matchers {
type Assets = ListBuffer[(BaseDeleteFromCollection, String, Boolean)]
/**
* Helper to register an entity to delete once a test completes.
* The helper sanitizes (deletes) a previous instance of the entity if it exists
* in given collection.
*
*/
class AssetCleaner(assetsToDeleteAfterTest: Assets, wskprops: WskProps) {
def withCleaner[T <: BaseDeleteFromCollection](cli: T, name: String, confirmDelete: Boolean = true)(
cmd: (T, String) => RunResult): RunResult = {
// sanitize (delete) if asset exists
cli.sanitize(name)(wskprops)
assetsToDeleteAfterTest += ((cli, name, confirmDelete))
cmd(cli, name)
}
}
/**
* Creates a test closure which records all entities created inside the test into a
* list that is iterated at the end of the test so that these entities are deleted
* (from most recently created to oldest).
*/
def withAssetCleaner(wskprops: WskProps)(test: (WskProps, AssetCleaner) => Any) = {
// create new asset list to track what must be deleted after test completes
val assetsToDeleteAfterTest = new Assets()
try {
test(wskprops, new AssetCleaner(assetsToDeleteAfterTest, wskprops))
} catch {
case t: Throwable =>
// log the exception that occurred in the test and rethrow it
println(s"Exception occurred during test execution: $t")
throw t
} finally {
// delete assets in reverse order so that was created last is deleted first
val deletedAll = assetsToDeleteAfterTest.reverse map {
case ((cli, n, delete)) =>
n -> Try {
cli match {
case _: BasePackage if delete =>
val rr = cli.delete(n)(wskprops)
rr.exitCode match {
case CONFLICT => whisk.utils.retry(cli.delete(n)(wskprops), 5, Some(1.second))
case _ => rr
}
case _ => if (delete) cli.delete(n)(wskprops) else cli.sanitize(n)(wskprops)
}
}
} forall {
case (n, Failure(t)) =>
println(s"ERROR: deleting asset failed for $n: $t")
false
case _ =>
true
}
assert(deletedAll, "some assets were not deleted")
}
}
/**
* Extracts an activation id from a wsk command producing a RunResult with such an id.
* If id is found, polls activations until one matching id is found. If found, pass
* the activation to the post processor which then check for expected values.
*/
def withActivation(
wsk: BaseActivation,
run: RunResult,
initialWait: Duration = 1.second,
pollPeriod: Duration = 1.second,
totalWait: Duration = 60.seconds)(check: ActivationResult => Unit)(implicit wskprops: WskProps): Unit = {
val activationId = wsk.extractActivationId(run)
withClue(s"did not find an activation id in '$run'") {
activationId shouldBe a[Some[_]]
}
withActivation(wsk, activationId.get, initialWait, pollPeriod, totalWait)(check)
}
/**
* Polls activations until one matching id is found. If found, pass
* the activation to the post processor which then check for expected values.
*/
def withActivation(wsk: BaseActivation,
activationId: String,
initialWait: Duration,
pollPeriod: Duration,
totalWait: Duration)(check: ActivationResult => Unit)(implicit wskprops: WskProps): Unit = {
val id = activationId
val activation = wsk.waitForActivation(id, initialWait, pollPeriod, totalWait)
if (activation.isLeft) {
assert(false, s"error waiting for activation $id: ${activation.left.get}")
} else
try {
check(activation.right.get.convertTo[ActivationResult])
} catch {
case error: Throwable =>
println(s"check failed for activation $id: ${activation.right.get}")
throw error
}
}
/**
* Polls until it finds {@code N} activationIds from an entity. Asserts the count
* of the activationIds actually equal {@code N}. Takes a {@code since} parameter
* defining the oldest activationId to consider valid.
*/
def withActivationsFromEntity(
wsk: BaseActivation,
entity: String,
N: Int = 1,
since: Option[Instant] = None,
pollPeriod: Duration = 1.second,
totalWait: Duration = 60.seconds)(check: Seq[ActivationResult] => Unit)(implicit wskprops: WskProps): Unit = {
val activationIds =
wsk.pollFor(N, Some(entity), since = since, retries = (totalWait / pollPeriod).toInt, pollPeriod = pollPeriod)
withClue(
s"expecting $N activations matching '$entity' name since $since but found ${activationIds.mkString(",")} instead") {
activationIds.length shouldBe N
}
val parsed = activationIds.map { id =>
wsk.parseJsonString(wsk.get(Some(id)).stdout).convertTo[ActivationResult]
}
try {
check(parsed)
} catch {
case error: Throwable =>
println(s"check failed for activations $activationIds: ${parsed}")
throw error
}
}
/**
* In the case that test throws an exception, print stderr and stdout
* from the provided RunResult.
*/
def withPrintOnFailure(runResult: RunResult)(test: () => Unit) {
try {
test()
} catch {
case error: Throwable =>
println(s"[stderr] ${runResult.stderr}")
println(s"[stdout] ${runResult.stdout}")
throw error
}
}
def removeCLIHeader(response: String): String = response.substring(response.indexOf("\\n"))
def getJSONFromCLIResponse(response: String): JsObject = removeCLIHeader(response).parseJson.asJsObject
def getAdditionalTestSubject(newUser: String): WskProps = {
val wskadmin = new RunWskAdminCmd {}
WskProps(namespace = newUser, authKey = wskadmin.cli(Seq("user", "create", newUser)).stdout.trim)
}
def disposeAdditionalTestSubject(subject: String): Unit = {
val wskadmin = new RunWskAdminCmd {}
withClue(s"failed to delete temporary subject $subject") {
wskadmin.cli(Seq("user", "delete", subject)).stdout should include("Subject deleted")
}
}
}
| tysonnorris/openwhisk | tests/src/test/scala/common/WskTestHelpers.scala | Scala | apache-2.0 | 11,330 |
package circumflex
package security
import java.io.Serializable
/*! # Principal
The `Principal` trait should be implemented in your application
by the `User` class (or whatever abstraction you use for authentication).
Typically you would use an ORM library to provide methods for storing and
retrieving users, but your application may choose different technology
for managing authentication data. The `Principal` trait in this sense is
agnostic to storage technology.
*/
trait Principal extends Serializable {
/*! A unique ID is used to lookup the user from the storage and to associate
it with session on current domain. The `uniqueId` method should return string
representation (for use in cookies). The same string is to be used in the
implementation of method `lookup` in `Auth`.
*/
def uniqueId: String
/*! The `secret` method should return a string containing the secret information
(e.g. SHA256 of user password) which could be used to check the authenticity of
the principal. */
def secret: String
}
/*! ## Stubs
Circumflex Auth includes default implementations of `Principal` which
are used by `NoAuth` stub.
*/
class DummyPrincipal extends Principal {
def uniqueId = "dummy"
def secret = ""
}
object DummyPrincipal extends DummyPrincipal
| inca/circumflex | security/src/main/scala/principal.scala | Scala | bsd-2-clause | 1,288 |
/*
*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.locationtech.geomesa.utils.stats
import java.util.concurrent.atomic.AtomicLong
import com.typesafe.scalalogging.slf4j.Logging
trait MethodProfiling {
import java.lang.System.{currentTimeMillis => ctm}
def profile[R](code: => R)(implicit timing: Timing): R = {
val (startTime, r) = (ctm, code)
timing.occurrence(ctm - startTime)
r
}
def profile[R](code: => R, identifier: String)(implicit timings: Timings) = {
val (startTime, r) = (ctm, code)
timings.occurrence(identifier, ctm - startTime)
r
}
}
/**
* Class to hold timing results
*/
class Timing extends Serializable {
private var total = 0L
private var count = 0L
/**
* Updates this instance with a new timing
*
* @param time
* @return
*/
def occurrence(time: Long): Unit = {
total += time
count += 1
this
}
/**
* Gets the total time
*
* @return
*/
def time: Long = total
/**
* Gets the number of event occurrences
*
* @return
*/
def occurrences: Long = count
/**
* Computes the average for this instance
*
* @return
*/
def average(): Double = total / count.toDouble
}
trait Timings extends Serializable {
/**
* Updates the given identifier with a new timing
*
* @param identifier
* @param time
*/
def occurrence(identifier: String, time: Long): Unit
/**
* Gets the total time for the given identifier
*
* @param identifier
* @return
*/
def time(identifier: String): Long
/**
* Gets the total occurrences for the given identifier
*
* @param identifier
* @return
*/
def occurrences(identifier: String): Long
/**
* Creates a printed string with the computed averages
*
* @return
*/
def averageOccurrences(): String
/**
* Creates a printed string with the computed averages
*
* @return
*/
def averageTimes(): String
}
/**
* Class to hold timing results. Thread-safe.
*/
class TimingsImpl extends Timings {
private val map = scala.collection.mutable.Map.empty[String, Timing]
override def occurrence(identifier: String, time: Long): Unit = {
val timing = map.synchronized(map.getOrElseUpdate(identifier, new Timing))
timing.synchronized(timing.occurrence(time))
}
override def time(identifier: String): Long =
map.synchronized(map.getOrElseUpdate(identifier, new Timing)).time
override def occurrences(identifier: String): Long =
map.synchronized(map.getOrElseUpdate(identifier, new Timing)).occurrences
override def averageOccurrences(): String = if (map.isEmpty) {
"No occurrences"
} else {
val entries = map.synchronized(map.toList).sortBy(_._1)
val total = entries.map(_._2.occurrences).sum
val percentOccurrences = entries.map { case (id, timing) =>
s"$id: ${(timing.occurrences * 100 / total.toDouble).formatted("%.1f%%")}"
}
percentOccurrences.mkString(s"Total occurrences: $total. Percent of occurrences - ", ", ", "")
}
override def averageTimes(): String = if (map.isEmpty) {
"No occurrences"
} else {
val entries = map.synchronized(map.toList).sortBy(_._1)
val total = entries.map(_._2.time).sum
val percentTimes = entries.map { case (id, timing) =>
timing.synchronized(s"$id: ${(timing.time * 100 / total.toDouble).formatted("%.1f%%")}" +
s" ${timing.occurrences} times at ${timing.average.formatted("%.4f")} ms avg")
}
percentTimes.mkString(s"Total time: $total ms. Percent of time - ", ", ", "")
}
}
/**
* Useful for sharing timings between instances of a certain class
*
* @param moduloToLog
*/
class AutoLoggingTimings(moduloToLog: Int = 1000) extends TimingsImpl with Logging {
val count = new AtomicLong()
override def occurrence(identifier: String, time: Long) = {
super.occurrence(identifier, time)
if (count.incrementAndGet() % moduloToLog == 0) {
logger.debug(averageTimes())
}
}
}
object NoOpTimings extends Timings {
override def occurrence(identifier: String, time: Long) = {}
override def occurrences(identifier: String) = 0L
override def time(identifier: String) = 0L
override def averageTimes() = ""
override def averageOccurrences() = ""
} | mmatz-ccri/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/stats/MethodProfiling.scala | Scala | apache-2.0 | 4,850 |
package BIDMach
import BIDMat.{Mat,SBMat,CMat,DMat,FMat,IMat,HMat,GDMat,GLMat,GMat,GIMat,GSDMat,GSMat,LMat,SMat,SDMat}
import BIDMat.MatFunctions._
import BIDMat.SciFunctions._
import BIDMat.Plotting._
import BIDMat.about
import BIDMat.MatIOtrait
import BIDMach.models._
import BIDMach.updaters._
import BIDMach.datasources._
import BIDMach.datasinks._
import BIDMach.mixins._
import scala.collection.immutable.List
import scala.collection.mutable.ListBuffer
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Basic sequential Learner class with a single datasource
*/
@SerialVersionUID(100L)
case class Learner(
val datasource:DataSource,
val model:Model,
val mixins:Array[Mixin],
val updater:Updater,
val datasink:DataSink,
val opts:Learner.Options = new Learner.Options) extends Serializable {
var results:FMat = null
val dopts:DataSource.Opts = datasource.opts
val mopts:Model.Opts = model.opts
val ropts:Mixin.Opts = if (mixins != null) mixins(0).opts else null
val uopts:Updater.Opts = if (updater != null) updater.opts else null
var useGPU = false
var reslist:ListBuffer[FMat] = null;
var samplist:ListBuffer[Float] = null;
var lastCheckPoint = 0;
var done = false;
var paused = false;
var ipass = 0;
var here = 0L;
var lasti = 0;
var bytes = 0L;
var cacheState = false;
var debugMemState = false;
def setup = {
Learner.setupPB(datasource, dopts.putBack, mopts.dim)
}
def init = {
var cacheState = Mat.useCache;
Mat.useCache = opts.useCache;
datasource.init;
model.bind(datasource);
if (datasink.asInstanceOf[AnyRef] != null) {
datasink.init;
model.bind(datasink);
}
model.init;
if (mixins != null) mixins map (_ init(model))
if (updater != null) updater.init(model)
Mat.useCache = cacheState;
useGPU = model.useGPU
}
def train = {
retrain
}
def retrain() = {
flip
var cacheState = Mat.useCache;
Mat.useCache = opts.useCache;
debugMemState = Mat.debugMem;
if (updater != null) updater.clear;
reslist = new ListBuffer[FMat];
samplist = new ListBuffer[Float];
firstPass(null);
while (ipass < opts.npasses && ! done) {
nextPass(null);
}
wrapUp;
}
def firstPass(iter:Iterator[(AnyRef, MatIOtrait)]):Unit = {
if (iter != null) {
datasource.asInstanceOf[IteratorSource].opts.iter = iter;
}
setup
init
done = false;
ipass = 0;
here = 0L;
lasti = 0;
bytes = 0L;
if (updater != null) updater.clear;
cacheState = Mat.useCache;
Mat.useCache = opts.useCache;
reslist = new ListBuffer[FMat];
samplist = new ListBuffer[Float];
flip;
nextPass(iter);
}
def nextPass(iter:Iterator[(AnyRef, MatIOtrait)]): Unit = {
if (opts.debugMem && ipass > 0) Mat.debugMem = true;
var lastp = 0f
if (iter != null) {
datasource.asInstanceOf[IteratorSource].opts.iter = iter;
}
datasource.reset
var istep = 0
println("pass=%2d" format ipass)
while (datasource.hasNext) {
while (paused) Thread.sleep(10)
val mats = datasource.next;
here += datasource.opts.batchSize
bytes += mats.map(Learner.numBytes _).reduce(_+_);
val dsp = datasource.progress;
val gprogress = (ipass + dsp)/opts.npasses;
if ((istep - 1) % opts.evalStep == 0 || (istep > 0 && (! datasource.hasNext))) {
if (opts.updateAll) {
model.dobatchg(mats, ipass, here);
if (mixins != null) mixins map (_ compute(mats, here));
if (updater != null) updater.update(ipass, here, gprogress);
}
val scores = model.evalbatchg(mats, ipass, here);
if (datasink != null) datasink.put;
reslist.append(scores.newcopy)
samplist.append(here)
} else {
model.dobatchg(mats, ipass, here)
if (mixins != null) mixins map (_ compute(mats, here))
if (updater != null) updater.update(ipass, here, gprogress)
}
if (datasource.opts.putBack >= 0) datasource.putBack(mats, datasource.opts.putBack)
istep += 1
if (dsp > lastp + opts.pstep && reslist.length > lasti) {
val gf = gflop
lastp = dsp - (dsp % opts.pstep)
print("%5.2f%%, %s, gf=%5.3f, secs=%3.1f, GB=%4.2f, MB/s=%5.2f" format (
100f*lastp,
Learner.scoreSummary(reslist, lasti, reslist.length, opts.cumScore),
gf._1,
gf._2,
bytes*1e-9,
bytes/gf._2*1e-6))
if (useGPU) {
print(", GPUmem=%3.6f" format GPUmem._1)
}
println;
lasti = reslist.length;
}
if (opts.checkPointFile != null && toc > 3600 * opts.checkPointInterval * (1 + lastCheckPoint)) {
model.save(opts.checkPointFile format lastCheckPoint);
lastCheckPoint += 1;
}
}
if (updater != null) updater.updateM(ipass)
ipass += 1
}
def wrapUp {
val gf = gflop;
Mat.useCache = cacheState;
Mat.debugMem = debugMemState;
println("Time=%5.4f secs, gflops=%4.2f" format (gf._2, gf._1))
if (opts.autoReset && useGPU) {
Learner.toCPU(modelmats)
resetGPUs
Mat.clearCaches
}
datasource.close;
if (datasink != null) datasink.close;
results = Learner.scores2FMat(reslist) on row(samplist.toList);
done = true;
}
def predict() = {
setup;
datasource.init;
model.bind(datasource);
if (datasink.asInstanceOf[AnyRef] != null) {
datasink.init;
model.bind(datasink);
}
val rstate = model.refresh;
model.refresh = false
model.init
val results = repredict
model.refresh = rstate
results
}
def repredict() = {
flip
useGPU = model.useGPU
var cacheState = Mat.useCache
Mat.useCache = opts.useCache
var here = 0L
var lasti = 0
var bytes = 0L
var lastp = 0f
val reslist = new ListBuffer[FMat]
val samplist = new ListBuffer[Float]
println("Predicting")
datasource.reset
while (datasource.hasNext) {
val mats = datasource.next
here += datasource.opts.batchSize
bytes += mats.map(Learner.numBytes _).reduce(_+_);
val scores = model.evalbatchg(mats, 0, here);
if (datasink != null) datasink.put
reslist.append(scores.newcopy);
samplist.append(here);
val dsp = datasource.progress;
if (dsp > lastp + opts.pstep && reslist.length > lasti) {
val gf = gflop
lastp = dsp - (dsp % opts.pstep)
print("%5.2f%%, %s, gf=%5.3f, secs=%3.1f, GB=%4.2f, MB/s=%5.2f" format (
100f*lastp,
Learner.scoreSummary(reslist, lasti, reslist.length, opts.cumScore),
gf._1,
gf._2,
bytes*1e-9,
bytes/gf._2*1e-6))
if (useGPU) {
print(", GPUmem=%3.2f" format GPUmem._1)
}
println
lasti = reslist.length
}
}
val gf = gflop
Mat.useCache = cacheState
println("Time=%5.4f secs, gflops=%4.2f" format (gf._2, gf._1));
if (opts.autoReset && useGPU) {
Learner.toCPU(modelmats)
resetGPUs
Mat.clearCaches
}
datasource.close;
if (datasink != null) datasink.close;
results = Learner.scores2FMat(reslist) on row(samplist.toList)
}
def datamats = datasource.asInstanceOf[MatSource].mats;
def modelmats = model.modelmats;
def datamat = datasource.asInstanceOf[MatSource].mats(0);
def modelmat = model.modelmats(0);
def preds = datasink.asInstanceOf[MatSink].mats
}
/**
* Parallel Learner with a single datasource.
*/
case class ParLearner(
val datasource:DataSource,
val models:Array[Model],
val mixins:Array[Array[Mixin]],
val updaters:Array[Updater],
val datasink:DataSink,
val opts:ParLearner.Options = new ParLearner.Options) extends Serializable {
var um:Array[Mat] = null
var mm:Array[Mat] = null
var results:FMat = null
var cmats:Array[Array[Mat]] = null
var useGPU = false
def setup = {
val dopts = datasource.opts
Learner.setupPB(datasource, datasource.opts.putBack, models(0).opts.dim)
}
def init = {
datasource.init
useGPU = models(0).opts.useGPU
val thisGPU = if (useGPU) getGPU else 0
for (i <- 0 until opts.nthreads) {
if (useGPU && i < Mat.hasCUDA) setGPU(i)
models(i).bind(datasource)
models(i).init
if (mixins != null) mixins(i) map (_ init(models(i)))
if (updaters != null && updaters(i) != null) updaters(i).init(models(i))
}
if (useGPU) setGPU(thisGPU)
val mml = models(0).modelmats.length
um = new Array[Mat](mml)
mm = new Array[Mat](mml)
for (i <- 0 until mml) {
val mm0 = models(0).modelmats(i)
mm(i) = zeros(mm0.nrows, mm0.ncols)
um(i) = zeros(mm0.nrows, mm0.ncols)
}
ParLearner.syncmodels(models, mm, um, 0, useGPU)
}
def train = {
setup
init
retrain
}
def retrain = {
flip
val mm0 = models(0).modelmats(0)
var cacheState = Mat.useCache
Mat.useCache = opts.useCache
cmats = new Array[Array[Mat]](opts.nthreads)
for (i <- 0 until opts.nthreads) cmats(i) = new Array[Mat](datasource.omats.length)
val thisGPU = if (useGPU) getGPU else 0
if (useGPU) {
for (i <- 0 until opts.nthreads) {
// if (i != thisGPU) connect(i)
}
}
@volatile var done = iones(opts.nthreads, 1)
var ipass = 0
var here = 0L
var lasti = 0
var bytes = 0L
val reslist = new ListBuffer[FMat]
val samplist = new ListBuffer[Float]
for (i <- 0 until opts.nthreads) {
if (useGPU && i < Mat.hasCUDA) setGPU(i)
if (updaters != null && updaters(i) != null) updaters(i).clear
}
setGPU(thisGPU)
var istep = 0
var lastp = 0f
var running = true
var progress = 0f;
var gprogress = 0f;
for (ithread <- 0 until opts.nthreads) {
Future {
if (useGPU && ithread < Mat.hasCUDA) setGPU(ithread)
while (running) {
while (done(ithread) == 1) Thread.sleep(1)
try {
if ((istep + ithread + 1) % opts.evalStep == 0 || !datasource.hasNext ) {
val scores = models(ithread).evalbatchg(cmats(ithread), ipass, here)
reslist.synchronized { reslist.append(scores(0)) }
samplist.synchronized { samplist.append(here) }
} else {
models(ithread).dobatchg(cmats(ithread), ipass, here)
if (mixins != null && mixins(ithread) != null) mixins(ithread) map (_ compute(cmats(ithread), here))
if (updaters != null && updaters(ithread) != null) updaters(ithread).update(ipass, here, gprogress)
}
} catch {
case e:Exception => {
print("Caught exception in thread %d %s\\n" format (ithread, e.toString));
val se = e.getStackTrace();
for (i <- 0 until 8) {
println("thread %d, %s" format (ithread, se(i).toString));
}
restart(ithread)
println("Restarted: Keep on truckin...")
}
}
done(ithread) = 1
}
}
}
while (ipass < opts.npasses) {
datasource.reset
istep = 0
lastp = 0f
println("pass=%2d" format ipass)
while (datasource.hasNext) {
for (ithread <- 0 until opts.nthreads) {
if (datasource.hasNext) {
val mats = datasource.next
progress = datasource.progress
gprogress = (ipass + progress)/opts.npasses
for (j <- 0 until mats.length) {
cmats(ithread)(j) = safeCopy(mats(j), ithread)
}
if (ithread == 0) here += datasource.opts.batchSize
done(ithread) = 0;
bytes += mats.map(Learner.numBytes _).reduce(_+_);
}
}
while (mini(done).v == 0) Thread.sleep(1)
Thread.sleep(opts.coolit)
istep += opts.nthreads
if (istep % opts.syncStep == 0) ParLearner.syncmodels(models, mm, um, istep/opts.syncStep, useGPU)
if (datasource.progress > lastp + opts.pstep) {
while (datasource.progress > lastp + opts.pstep) lastp += opts.pstep
val gf = gflop
if (reslist.length > lasti) {
print("%5.2f%%, %s, gf=%5.3f, secs=%3.1f, GB=%4.2f, MB/s=%5.2f" format (
100f*lastp,
Learner.scoreSummary(reslist, lasti, reslist.length, opts.cumScore),
gf._1,
gf._2,
bytes*1e-9,
bytes/gf._2*1e-6))
if (useGPU) {
for (i <- 0 until math.min(opts.nthreads, Mat.hasCUDA)) {
setGPU(i)
if (i==0) print(", GPUmem=%3.2f" format GPUmem._1) else print(", %3.2f" format GPUmem._1)
}
setGPU(thisGPU)
}
println
}
lasti = reslist.length
}
}
for (i <- 0 until opts.nthreads) {
if (useGPU && i < Mat.hasCUDA) setGPU(i);
if (updaters != null && updaters(i) != null) updaters(i).updateM(ipass)
}
setGPU(thisGPU)
ParLearner.syncmodelsPass(models, mm, um, ipass)
ipass += 1
if (opts.resFile != null) {
saveAs(opts.resFile, Learner.scores2FMat(reslist) on row(samplist.toList), "results")
}
}
running = false;
datasource.close
val gf = gflop
Mat.useCache = cacheState
if (useGPU) {
for (i <- 0 until opts.nthreads) {
// if (i != thisGPU) disconnect(i);
}
}
if (opts.autoReset && useGPU) {
Learner.toCPU(models(0).modelmats)
resetGPUs
}
println("Time=%5.4f secs, gflops=%4.2f, samples=%4.2g, MB/sec=%4.2g" format (gf._2, gf._1, 1.0*opts.nthreads*here, bytes/gf._2/1e6))
results = Learner.scores2FMat(reslist) on row(samplist.toList)
}
def safeCopy(m:Mat, ithread:Int):Mat = {
m match {
case ss:SMat => {
val out = SMat.newOrCheckSMat(ss.nrows, ss.ncols, ss.nnz, null, m.GUID, ithread, "safeCopy".##)
ss.copyTo(out)
}
case ss:FMat => {
val out = FMat.newOrCheckFMat(ss.nrows, ss.ncols, null, m.GUID, ithread, "safeCopy".##)
ss.copyTo(out)
}
case ss:IMat => {
val out = IMat.newOrCheckIMat(ss.nrows, ss.ncols, null, m.GUID, ithread, "safeCopy".##)
ss.copyTo(out)
}
}
}
def restart(ithread:Int) = {
if (useGPU) {
resetGPU
Mat.trimCaches(ithread)
}
models(ithread).bind(datasource)
models(ithread).init
models(ithread).modelmats(0) <-- mm(0)
updaters(ithread).init(models(ithread))
}
def datamats = datasource.asInstanceOf[MatSource].mats
def modelmats = models(0).modelmats
def datamat = datasource.asInstanceOf[MatSource].mats(0)
def modelmat = models(0).modelmats(0)
}
/**
* Parallel Learner class with multiple datasources, models, mixins, and updaters.
* i.e. several independent Learners whose models are synchronized periodically.
*/
case class ParLearnerx(
val datasources:Array[DataSource],
val models:Array[Model],
val mixins:Array[Array[Mixin]],
val updaters:Array[Updater],
val datasinks:Array[DataSink],
val opts:ParLearner.Options = new ParLearner.Options) extends Serializable {
var um:Array[Mat] = null
var mm:Array[Mat] = null
var results:FMat = null
var useGPU = false
def setup = {
for (i <- 0 until opts.nthreads) {
Learner.setupPB(datasources(i), datasources(i).opts.putBack, models(i).opts.dim)
}
}
def init = {
val thisGPU = if (Mat.hasCUDA > 0) getGPU else 0
for (i <- 0 until opts.nthreads) {
if (i < Mat.hasCUDA) setGPU(i)
datasources(i).init
models(i).bind(datasources(i))
models(i).init
if (mixins != null) mixins(i) map(_ init(models(i)))
updaters(i).init(models(i))
}
useGPU = models(0).useGPU
if (Mat.hasCUDA > 0) setGPU(thisGPU)
val mml = models(0).modelmats.length
um = new Array[Mat](mml)
mm = new Array[Mat](mml)
for (i <- 0 until mml) {
val mm0 = models(0).modelmats(i)
mm(i) = zeros(mm0.nrows, mm0.ncols)
um(i) = zeros(mm0.nrows, mm0.ncols)
}
}
def train = {
setup
init
retrain
}
def retrain() = {
flip
var cacheState = Mat.useCache
Mat.useCache = opts.useCache
val thisGPU = if (useGPU) getGPU else 0
if (useGPU) {
for (i <- 0 until opts.nthreads) {
if (i != thisGPU) connect(i)
}
}
@volatile var done = izeros(opts.nthreads, 1)
var ipass = 0
var istep0 = 0L
var ilast0 = 0L
var bytes = 0L
val reslist = new ListBuffer[FMat]
val samplist = new ListBuffer[Float]
var lastp = 0f
var lasti = 0
var gprogress = 0f
done.clear
for (ithread <- 0 until opts.nthreads) {
Future {
if (useGPU && ithread < Mat.hasCUDA) setGPU(ithread)
var here = 0L
updaters(ithread).clear
while (done(ithread) < opts.npasses) {
var istep = 0
while (datasources(ithread).hasNext) {
val mats = datasources(ithread).next
here += datasources(ithread).opts.batchSize
bytes += mats.map(Learner.numBytes _).reduce(_+_);
gprogress = (dsProgress + ipass)/opts.npasses
models(0).synchronized {
istep += 1
istep0 += 1
}
try {
if (istep % opts.evalStep == 0) {
val scores = models(ithread).synchronized {models(ithread).evalbatchg(mats, ipass, here)}
reslist.synchronized { reslist.append(scores) }
samplist.synchronized { samplist.append(here) }
} else {
models(ithread).synchronized {
models(ithread).dobatchg(mats, ipass, here)
if (mixins != null && mixins(ithread) != null) mixins(ithread) map (_ compute(mats, here))
updaters(ithread).update(ipass, here, gprogress)
}
}
} catch {
case e:Exception => {
print("Caught exception in thread %d %s\\nTrying restart..." format (ithread, e.toString))
restart(ithread)
println("Keep on truckin...")
}
}
if (useGPU) Thread.sleep(opts.coolit)
if (datasources(ithread).opts.putBack >= 0) datasources(ithread).putBack(mats, datasources(ithread).opts.putBack)
// if (istep % (opts.syncStep/opts.nthreads) == 0) syncmodel(models, ithread)
}
models(ithread).synchronized { updaters(ithread).updateM(ipass) }
done(ithread) += 1
while (done(ithread) > ipass) Thread.sleep(1)
}
}
}
println("pass=%2d" format ipass)
while (ipass < opts.npasses) {
while (mini(done).v == ipass) {
if (istep0 >= ilast0 + opts.syncStep) {
ParLearner.syncmodels(models, mm, um, istep0/opts.syncStep, useGPU)
ilast0 += opts.syncStep
}
if (dsProgress > lastp + opts.pstep) {
while (dsProgress > lastp + opts.pstep) lastp += opts.pstep
val gf = gflop
if (reslist.length > lasti) {
print("%5.2f%%, %s, gf=%5.3f, secs=%3.1f, GB=%4.2f, MB/s=%5.2f" format (
100f*lastp,
reslist.synchronized {
Learner.scoreSummary(reslist, lasti, reslist.length)
},
gf._1,
gf._2,
bytes*1e-9,
bytes/gf._2*1e-6))
if (useGPU) {
for (i <- 0 until math.min(opts.nthreads, Mat.hasCUDA)) {
setGPU(i)
if (i==0) print(", GPUmem=%3.2f" format GPUmem._1) else print(", %3.2f" format GPUmem._1)
}
setGPU(thisGPU)
}
println
}
lasti = reslist.length
} else {
Thread.sleep(1)
}
}
lastp = 0f
if (ipass < opts.npasses) {
for (i <- 0 until opts.nthreads) datasources(i).reset
println("pass=%2d" format ipass+1)
}
if (opts.resFile != null) {
saveAs(opts.resFile, Learner.scores2FMat(reslist) on row(samplist.toList), "results")
}
ipass += 1
}
val gf = gflop
Mat.useCache = cacheState
println("Time=%5.4f secs, gflops=%4.2f, MB/s=%5.2f, GB=%5.2f" format (gf._2, gf._1, bytes/gf._2*1e-6, bytes*1e-9))
if (opts.autoReset && useGPU) {
Learner.toCPU(modelmats)
resetGPUs
}
for (ithread <- 0 until opts.nthreads) datasources(ithread).close
results = Learner.scores2FMat(reslist) on row(samplist.toList)
}
def syncmodel(models:Array[Model], ithread:Int) = {
mm.synchronized {
for (i <- 0 until models(ithread).modelmats.length) {
um(i) <-- models(ithread).modelmats(i)
um(i) ~ um(i) *@ (1f/opts.nthreads)
mm(i) ~ mm(i) *@ (1 - 1f/opts.nthreads)
mm(i) ~ mm(i) + um(i)
models(ithread).modelmats(i) <-- mm(i)
}
}
}
def restart(ithread:Int) = {
if (useGPU) {
resetGPU
Mat.trimCache2(ithread)
}
models(ithread).bind(datasources(ithread))
models(ithread).init
for (i <- 0 until models(ithread).modelmats.length) {
models(ithread).modelmats(i) <-- mm(i)
}
updaters(ithread).init(models(ithread))
}
def dsProgress:Float = {
var sum = 0f
for (i <- 0 until datasources.length) {
sum += datasources(i).progress
}
sum / datasources.length
}
def modelmats = models(0).modelmats
def modelmat = models(0).modelmats(0)
}
/**
* Parallel multi-datasource Learner that takes function arguments.
* This allows classes to be initialized later, when the learner is setup.
*/
class ParLearnerxF(
dopts:DataSource.Opts,
ddfun:(DataSource.Opts, Int)=>DataSource,
mopts:Model.Opts,
mkmodel:(Model.Opts)=>Model,
ropts:Mixin.Opts,
mkreg:(Mixin.Opts)=>Array[Mixin],
uopts:Updater.Opts,
mkupdater:(Updater.Opts)=>Updater,
sopts:DataSink.Opts,
ssfun:(DataSink.Opts, Int)=>DataSink,
val lopts:ParLearner.Options = new ParLearner.Options) extends Serializable {
var dds:Array[DataSource] = null;
var sss:Array[DataSink] = null
var models:Array[Model] = null
var mixins:Array[Array[Mixin]] = null
var updaters:Array[Updater] = null
var learner:ParLearnerx = null
def setup = {
dds = new Array[DataSource](lopts.nthreads);
sss = new Array[DataSink](lopts.nthreads);
models = new Array[Model](lopts.nthreads);
if (mkreg != null) mixins = new Array[Array[Mixin]](lopts.nthreads)
updaters = new Array[Updater](lopts.nthreads)
val thisGPU = if (Mat.hasCUDA > 0) getGPU else 0
for (i <- 0 until lopts.nthreads) {
if (mopts.useGPU && i < Mat.hasCUDA) setGPU(i)
dds(i) = ddfun(dopts, i)
models(i) = mkmodel(mopts)
if (mkreg != null) mixins(i) = mkreg(ropts)
updaters(i) = mkupdater(uopts)
}
if (0 < Mat.hasCUDA) setGPU(thisGPU)
learner = new ParLearnerx(dds, models, mixins, updaters, sss, lopts)
learner.setup
}
def init = learner.init
def train = {
setup
init
learner.retrain
}
}
/**
* Single-datasource parallel Learner which takes function arguments.
*/
class ParLearnerF(
val ds:DataSource,
val mopts:Model.Opts,
mkmodel:(Model.Opts)=>Model,
ropts:Mixin.Opts,
mkreg:(Mixin.Opts)=>Array[Mixin],
val uopts:Updater.Opts,
mkupdater:(Updater.Opts)=>Updater,
val sopts:DataSink.Opts,
val ss:DataSink,
val lopts:ParLearner.Options = new ParLearner.Options) extends Serializable {
var models:Array[Model] = null
var mixins:Array[Array[Mixin]] = null
var updaters:Array[Updater] = null
var learner:ParLearner = null
def setup = {
models = new Array[Model](lopts.nthreads)
if (mkreg != null) mixins = new Array[Array[Mixin]](lopts.nthreads)
if (mkupdater != null) updaters = new Array[Updater](lopts.nthreads)
val thisGPU = if (Mat.hasCUDA > 0) getGPU else 0
for (i <- 0 until lopts.nthreads) {
if (mopts.useGPU && i < Mat.hasCUDA) setGPU(i)
models(i) = mkmodel(mopts)
if (mkreg != null) mixins(i) = mkreg(ropts)
if (mkupdater != null) updaters(i) = mkupdater(uopts)
}
if (0 < Mat.hasCUDA) setGPU(thisGPU)
learner = new ParLearner(ds, models, mixins, updaters, ss, lopts)
learner.setup
}
def init = learner.init
def train = {
setup
init
retrain
}
def retrain = learner.retrain
}
object Learner {
class Options extends BIDMat.Opts {
var npasses = 2;
var evalStep = 11;
var pstep = 0.01f;
var resFile:String = null;
var autoReset = true;
var useCache = true;
var updateAll = false;
var debugMem = false;
var cumScore = 0;
var checkPointFile:String = null;
var checkPointInterval = 0f;
}
def numBytes(mat:Mat):Long = {
mat match {
case a:FMat => 4L * mat.length;
case a:IMat => 4L * mat.length;
case a:DMat => 8L * mat.length;
case a:LMat => 8L * mat.length;
case a:SMat => 8L * mat.nnz;
case a:SDMat => 12L * mat.nnz;
}
}
def toCPU(mats:Array[Mat]) {
for (i <- 0 until mats.length) {
mats(i) match {
case g:GMat => mats(i) = FMat(g)
case g:GSMat => mats(i) = SMat(g)
case g:GIMat => mats(i) = IMat(g)
case g:GDMat => mats(i) = DMat(g)
case g:GLMat => mats(i) = LMat(g)
case g:GSDMat => mats(i) = SDMat(g)
case _ => {}
}
}
}
def setupPB(ds:DataSource, npb:Int, dim:Int) = {
ds match {
case ddm:MatSource => {
if (npb >= 0) {
ddm.setupPutBack(npb, dim)
}
}
case _ => {}
}
}
def scoreSummary(reslist:ListBuffer[FMat], lasti:Int, len:Int, cumScore:Int = 0):String = {
val istart = if (cumScore == 0) lasti else {if (cumScore == 1) 0 else if (cumScore == 2) len/2 else 3*len/4};
var i = 0
var sum = 0.0;
for (scoremat <- reslist) {
if (i >= istart) sum += mean(scoremat(?,0)).v
i += 1
}
("ll=%6.5f" format sum/(len - istart))
}
def scores2FMat(reslist:ListBuffer[FMat]):FMat = {
val out = FMat(reslist(0).nrows, reslist.length)
var i = 0;
while (i < reslist.length) {
val scoremat = reslist(i)
out(?, i) = scoremat(?,0)
i += 1
}
out
}
}
object ParLearner {
class Options extends
Learner.Options {
var nthreads = math.max(0, Mat.hasCUDA)
var syncStep = 32
var coolit = 60
}
def syncmodelsPass(models:Array[Model], mm:Array[Mat], um:Array[Mat], ipass:Int) = {
models(0).mergeModelPassFn(models, mm, um, ipass);
}
def syncmodels(models:Array[Model], mm:Array[Mat], um:Array[Mat], istep:Long, useGPU:Boolean) = {
models(0).mergeModelFn(models, mm, um, istep);
}
}
| jamesjia94/BIDMach | src/main/scala/BIDMach/Learner.scala | Scala | bsd-3-clause | 27,291 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.analysis
import slamdata.Predef.{Map => _, _}
import quasar.common.{JoinType, JoinTypeArbitrary, SortDir}
import quasar.contrib.matryoshka._
import quasar.contrib.matryoshka.arbitrary._
import quasar.fp._
import quasar.fp.ski.κ
import quasar.ejson
import quasar.ejson.{CommonEJson, EJson, EJsonArbitrary, ExtEJson}
import quasar.ejson.implicits._
import quasar.qscript._
import matryoshka.{Hole => _, _}
import matryoshka.data.Fix
import matryoshka.data.free._
import matryoshka.implicits._
import org.specs2.scalacheck._
import scalaz._, Scalaz._
final class OutlineSpec extends quasar.Qspec with QScriptHelpers {
import EJsonArbitrary._, FigureArbitrary._, JoinTypeArbitrary._
import MapFuncCore.StaticMap
import Outline.{Figure, Shape, freeEJsonEqual, undefinedF, unknownF, arrF, mapF}
implicit val params = Parameters(maxSize = 10)
val toFree = convertToFree[EJson, Figure](_: Fix[EJson])
val rollS = Free.roll[EJson, Figure] _
val joinFunc =
ConcatMapsR(
MakeMapR(ConstantR(Fix(CommonEJson(ejson.Str("left")))), LeftSideF),
MakeMapR(ConstantR(Fix(CommonEJson(ejson.Str("right")))), RightSideF))
val modSides: (Shape, Shape) => Shape =
(l, r) => rollS(ExtEJson(ejson.Map(List(
rollS(CommonEJson(ejson.Str("left"))) -> l,
rollS(CommonEJson(ejson.Str("right"))) -> r))))
"Outline FreeMap" >> {
val outlineFM = Outline.outlineF(_: FreeMap)(κ(unknownF))
val av = ConcatArraysR(HoleF, HoleF)
val mv = ConcatMapsR(HoleF, HoleF)
val uv = ProjectIndexR(HoleF, ConstantR(ejsonInt(42)))
"unknown when no static structure" >> {
outlineFM(uv) must_= unknownF
}
"constants are lifted into Free" >> prop { ejs: Fix[EJson] =>
outlineFM(ConstantR(ejs)).transCataM[Option, Fix[EJson], EJson](_.run.toOption) must_= Some(ejs)
}
"map construction with constant keys result in maps" >> prop {
(k1: Fix[EJson], k2: Fix[EJson]) => (k1 =/= k2) ==> {
val fm = ConcatMapsR(MakeMapR(ConstantR(k1), av), MakeMapR(ConstantR(k2), mv))
val ss =
rollS(ExtEJson(ejson.Map(List(
toFree(k1) -> arrF,
toFree(k2) -> mapF))))
outlineFM(fm) must_= ss
}}
"map construction with at least one non-constant key is not static" >> prop { k1: Fix[EJson] =>
val k2 = ProjectFieldR(HoleF, ConstantR(ejsonStr("fieldName")))
val fm = ConcatMapsR(MakeMapR(ConstantR(k1), uv), MakeMapR(k2, uv))
outlineFM(fm) must_= mapF
}
"concatenation of maps is right-biased" >> prop { k: Fix[EJson] =>
val fm = ConcatMapsR(MakeMapR(ConstantR(k), mv), MakeMapR(ConstantR(k), av))
val ss = rollS(ExtEJson(ejson.Map(List(toFree(k) -> arrF))))
outlineFM(fm) must_= ss
}
"concatenation of maps preserves field order" >> prop {
(k1: Fix[EJson], k2: Fix[EJson], k3: Fix[EJson]) => (k1 =/= k2 && k1 =/= k3 && k2 =/= k3) ==> {
val l =
StaticMap[Fix, Hole](List(k1 -> HoleF, k2 -> HoleF, k3 -> HoleF))
val r =
StaticMap[Fix, Hole](List(k2 -> av))
val ss = rollS(ExtEJson(ejson.Map(List(
toFree(k1) -> unknownF,
toFree(k2) -> arrF,
toFree(k3) -> unknownF))))
outlineFM(ConcatMapsR(l, r)) must_= ss
}}
"field projection on static map results in field value" >> prop {
(k1: Fix[EJson], k2: Fix[EJson]) => (k1 =/= k2) ==> {
val fm =
ProjectFieldR(
ConcatMapsR(MakeMapR(ConstantR(k1), mv), MakeMapR(ConstantR(k2), av)),
ConstantR(k1))
outlineFM(fm) must_= mapF
}}
"field projection on static map without field results in undefined" >> prop {
(k1: Fix[EJson], k2: Fix[EJson]) => (k1 =/= k2) ==> {
val fm = ProjectFieldR(MakeMapR(ConstantR(k1), av), ConstantR(k2))
outlineFM(fm) must_= undefinedF
}}
"field deletion on static map results in static field deleted" >> prop {
(k1: Fix[EJson], k2: Fix[EJson]) => (k1 =/= k2) ==> {
val fm =
DeleteFieldR(
ConcatMapsR(MakeMapR(ConstantR(k2), av), MakeMapR(ConstantR(k1), mv)),
ConstantR(k1))
val ss =
rollS(ExtEJson(ejson.Map(List(toFree(k2) -> arrF))))
outlineFM(fm) must_= ss
}}
"field deletion of nonexistent on static map is identity" >> prop {
(k1: Fix[EJson], k2: Fix[EJson]) => (k1 =/= k2) ==> {
val fm = DeleteFieldR(MakeMapR(ConstantR(k1), av), ConstantR(k2))
val ss = rollS(ExtEJson(ejson.Map(List(toFree(k1) -> arrF))))
outlineFM(fm) must_= ss
}}
"array construction results in arrays" >> {
val fm =
ConcatArraysR(
MakeArrayR(uv),
ConcatArraysR(
ConcatArraysR(
MakeArrayR(mv),
MakeArrayR(av)),
MakeArrayR(uv)))
val ss =
rollS(CommonEJson(ejson.Arr(List(unknownF, mapF, arrF, unknownF))))
outlineFM(fm) must_= ss
}
"index projection on static array results in index value" >> {
val fm =
ProjectIndexR(
ConcatArraysR(ConcatArraysR(MakeArrayR(av), MakeArrayR(mv)), MakeArrayR(uv)),
ConstantR(ejsonInt(1)))
outlineFM(fm) must_= mapF
}
"index projection on static array with invalid index results in undefined" >> {
outlineFM(ProjectIndexR(MakeArrayR(mv), ConstantR(ejsonInt(-5)))) must_= undefinedF
}
"index projection on static array with nonexistent index results in undefined" >> {
outlineFM(ProjectIndexR(MakeArrayR(mv), ConstantR(ejsonInt(7)))) must_= undefinedF
}
"complex shape" >> prop {
(k1: Fix[EJson], k2: Fix[EJson]) => (k1 =/= k2) ==> {
val fm =
ConcatArraysR(
MakeArrayR(uv),
MakeArrayR(ConcatMapsR(
MakeMapR(ConstantR(k1), MakeArrayR(mv)),
MakeMapR(ConstantR(k2), MakeMapR(ConstantR(k1), av)))))
val ss =
rollS(CommonEJson(ejson.Arr(List(
unknownF,
rollS(ExtEJson(ejson.Map(List(
toFree(k1) -> rollS(CommonEJson(ejson.Arr(List(mapF)))),
toFree(k2) -> rollS(ExtEJson(ejson.Map(List(toFree(k1) -> arrF))))))))))))
outlineFM(fm) must_= ss
}}
}
"Outline QScriptCore" >> {
val outlineQC: QScriptCore[Shape] => Shape =
Outline[QScriptCore].outlineƒ
val func =
ConcatArraysR(
MakeArrayR(HoleF),
MakeArrayR(ConstantR(Fix(ExtEJson(ejson.Int(75))))))
val modSrc: Shape => Shape =
s => rollS(CommonEJson(ejson.Arr(List(s, rollS(ExtEJson(ejson.Int(75)))))))
"Map is shape of function applied to source" >> prop { srcShape: Shape =>
outlineQC(Map(srcShape, func)) must_= modSrc(srcShape)
}
"LeftShift(IncludeId) results in shape of repair applied to static array" >> prop { srcShape: Shape =>
val r = rollS(CommonEJson(ejson.Arr(List(unknownF, unknownF))))
outlineQC(LeftShift(srcShape, HoleF, IncludeId, joinFunc)) must_= modSides(srcShape, r)
}
"RightSide of repair is unknown when not IncludeId" >> prop { srcShape: Shape =>
outlineQC(LeftShift(srcShape, HoleF, ExcludeId, joinFunc)) must_= modSides(srcShape, unknownF)
}
"Reduce tracks static input shape through buckets" >> prop { srcShape: Shape =>
val rfunc = joinFunc map {
case LeftSide => ReduceIndex(0.left)
case RightSide => ReduceIndex(0.right)
}
val outShape = modSides(modSrc(srcShape), unknownF)
outlineQC(Reduce(srcShape, List(func), List(ReduceFuncs.Count(HoleF)), rfunc)) must_= outShape
}
"Reduce tracks static input shape through parametric reducers" >> prop { srcShape: Shape =>
val rfunc = joinFunc map {
case LeftSide => ReduceIndex(0.right)
case RightSide => ReduceIndex(1.right)
}
val reducers = List[ReduceFunc[FreeMap]](
ReduceFuncs.Count(HoleF),
ReduceFuncs.Last(func))
val outShape = modSides(unknownF, modSrc(srcShape))
outlineQC(Reduce(srcShape, Nil, reducers, rfunc)) must_= outShape
}
"Shape of composite-typed reducers is composite type" >> prop { srcShape: Shape =>
val rfunc = joinFunc map {
case LeftSide => ReduceIndex(0.right)
case RightSide => ReduceIndex(1.right)
}
val reducers = List[ReduceFunc[FreeMap]](
ReduceFuncs.UnshiftArray(HoleF),
ReduceFuncs.UnshiftMap(HoleF, HoleF))
val outShape = modSides(arrF, mapF)
outlineQC(Reduce(srcShape, Nil, reducers, rfunc)) must_= outShape
}
"Subset is the shape of `from` applied to the source shape" >> prop { srcShape: Shape =>
val from: FreeQS =
Free.roll(QCT(Sort(
Free.roll(QCT(Map(HoleQS, func))),
Nil,
NonEmptyList((HoleF, SortDir.Ascending)))))
val count: FreeQS =
Free.roll(QCT(Map(HoleQS, ConstantR(Fix(ExtEJson(ejson.Int(2)))))))
outlineQC(Subset(srcShape, from, Take, count)) must_= modSrc(srcShape)
}
"Sort does not affect shape" >> prop { srcShape: Shape =>
outlineQC(Sort(srcShape, Nil, NonEmptyList((func, SortDir.Descending)))) must_= srcShape
}
"Filter does not affect shape" >> prop { srcShape: Shape =>
outlineQC(Filter(srcShape, func)) must_= srcShape
}
"Unreferenced has undefined shape" >> {
outlineQC(Unreferenced()) must_= undefinedF
}
}
val lfn: FreeMap = MapFuncCore.StaticArray(List(HoleF, ConstantR(ejsonInt(53))))
val l: FreeQS = Free.roll(QCT(Map(HoleQS, lfn)))
def lShape(srcShape: Shape): Shape =
rollS(CommonEJson(ejson.Arr(List(srcShape, rollS(ExtEJson(ejson.Int(53)))))))
val rfn: FreeMap = MapFuncCore.StaticArray(List(ConstantR(ejsonInt(78)), HoleF))
val r: FreeQS = Free.roll(QCT(Map(HoleQS, rfn)))
def rShape(srcShape: Shape): Shape =
rollS(CommonEJson(ejson.Arr(List(rollS(ExtEJson(ejson.Int(78))), srcShape))))
def joinShape(srcShape: Shape) =
modSides(lShape(srcShape), rShape(srcShape))
"Outline ThetaJoin" >> {
"results from applying combine to branch shapes" >> prop { (srcShape: Shape, jtype: JoinType) =>
val on = ConstantR[JoinSide](ejsonNull)
Outline[ThetaJoin].outlineƒ(ThetaJoin(srcShape, l, r, on, jtype, joinFunc)) must_= joinShape(srcShape)
}
}
"Outline EquiJoin" >> {
"results from applying combine to branch shapes" >> prop { (srcShape: Shape, jtype: JoinType) =>
Outline[EquiJoin].outlineƒ(EquiJoin(srcShape, l, r, Nil, jtype, joinFunc)) must_= joinShape(srcShape)
}
}
}
| drostron/quasar | connector/src/test/scala/quasar/qscript/analysis/OutlineSpec.scala | Scala | apache-2.0 | 11,109 |
package gpio4s
import akka.actor.{ActorContext, ActorRef, ActorSystem}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import gpio4s.GpioInfo.PiBrev2
import gpio4s.gpiocfg.CfgDSL._
import gpio4s.gpiocfg.CfgIO.RichPins
import org.scalamock.scalatest.MockFactory
import org.scalatest.{Matchers, WordSpecLike}
class GpioSetupSpec extends TestKit(ActorSystem(getClass.getSimpleName.dropRight(1)))
with ImplicitSender with WordSpecLike with Matchers with MockFactory {
"controller" must {
"create all pins for model" in {
val producer = stub[PinProducer]
(producer.get(_: Int)(_: ActorContext)).when(*, *).returning(testActor).repeat(PiBrev2.pins.indices)
GpioService(PiBrev2, producer)
Thread.sleep(1000)
}
"configure pins" in {
val probe = TestProbe()
val pi = GpioService(PiBrev2, probedProducer(probe))
val conf = gpio {_ number 0 digital input}
pi ! Configure(conf)
probe.expectMsg(Setup(conf.pins().head))
}
"relay events" in {
val pi = GpioService(PiBrev2, producer)
val subs = TestProbe()
subs.send(pi, Subscribe(0))
val e = DigitalEvent(0, true)
pi ! e
subs.expectMsg(e)
}
}
def producer: PinProducer = new PinProducer {
def get(num: Int)(implicit context: ActorContext): ActorRef = testActor
}
def probedProducer(probe: TestProbe): PinProducer = new PinProducer {
def get(num: Int)(implicit context: ActorContext): ActorRef = probe.ref
}
}
| jw3/gpio4s | src/test/scala/gpio4s/GpioSetupSpec.scala | Scala | apache-2.0 | 1,663 |
package controllers
import javax.inject.Inject
import com.mohiva.play.silhouette.api.{ Environment, LogoutEvent, Silhouette }
import com.mohiva.play.silhouette.impl.authenticators.SessionAuthenticator
import controllers.headers.ProvidesHeader
import models.audit.{NewTask, AuditTaskTable}
import models.user.User
import play.api.libs.json.Json
import play.api.mvc.{BodyParsers, Result, RequestHeader}
import scala.concurrent.Future
class MapController @Inject() (implicit val env: Environment[User, SessionAuthenticator])
extends Silhouette[User, SessionAuthenticator] with ProvidesHeader {
/**
* Returns an index page.
*
* @return
*/
def edit = UserAwareAction.async { implicit request =>
request.identity match {
case Some(user) =>Future.successful(Ok(views.html.mapEdit("Project Sidewalk", Some(user))))
case None => Future.successful(Ok(views.html.mapEdit("Project Sidewalk")))
}
}
}
| danZzyy/SidewalkWebpage | sidewalk-webpage/app/controllers/MapController.scala | Scala | mit | 932 |
package com.cleawing.docker
import akka.actor.ActorSystem
import akka.testkit.TestKit
import com.cleawing.docker.api.{RemoteClient, Data}
import org.scalatest._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Seconds, Span}
class RemoteClientSpec(_system: ActorSystem) extends TestKit(_system)
with FunSpecLike with ShouldMatchers with ScalaFutures
with EitherValues with BeforeAndAfterAll {
def this() = this(ActorSystem("ApiSpec"))
implicit val defaultPatience = PatienceConfig(timeout = Span(2, Seconds), interval = Span(100, Millis))
val api = RemoteClient()
override def afterAll(): Unit = {
TestKit.shutdownActorSystem(system)
}
describe("Misc") {
it("should return Data.Version") {
whenReady(api.version()) { _.right.value shouldBe a [Data.Version]}
}
it("should return Data.Info") {
whenReady(api.info()) {_.right.value shouldBe a [Data.Info]}
}
}
describe("Images") {
it("should return Data.Images") {
whenReady(api.images()) {_.right.value shouldBe a [Data.Images]}
}
ignore("should return Data.ImageHistory") {
whenReady(api.images()) {_.right.value shouldBe a [Data.ImageHistory]}
}
}
describe("Containers") {
it("should return Data.Containers") {
whenReady(api.containers()) {_.right.value shouldBe a [Data.Containers]}
}
}
}
| Cleawing/akka-docker | src/test/scala/com/cleawing/docker/RemoteClientSpec.scala | Scala | apache-2.0 | 1,383 |
package breeze.linalg
import breeze.linalg.support._
/**
* TODO
*
* @author dlwh
**/
trait Broadcasted[+T, B] extends NumericOps[Broadcasted[T, B]] {
def underlying: T
}
trait BroadcastedLike[T, B, Self <: Broadcasted[T, B]] extends Broadcasted[T, B] with NumericOps[Self] {
def map[U, Res](f: B=>U)(implicit cmv: CanMapValues[Self, B, U, Res]):Res = {
cmv(repr, f)
}
def foreach[U](f: B=>U)(implicit cmv: CanForeachValues[Self, B]):Unit = {
cmv.foreach(repr, f)
}
}
class Broadcaster
object * extends Broadcaster
object Broadcaster {
implicit def canBroadcastSliceColumns[From, Slice1, To, Col]
(implicit cs2_:: : CanSlice2[From, Slice1, ::.type, To],
handhold: CanCollapseAxis.HandHold[From, Axis._0.type, Col]): CanSlice2[From, Slice1, *.type, BroadcastedColumns[To, Col]] = {
new CanSlice2[From, Slice1, *.type, BroadcastedColumns[To, Col]] {
def apply(from: From, slice: Slice1, slice2: *.type): BroadcastedColumns[To, Col] = {
BroadcastedColumns(cs2_::(from, slice, ::))
}
}
}
implicit def canBroadcastColumns[From, Slice1, Col]
(implicit handhold: CanCollapseAxis.HandHold[From, Axis._0.type, Col])
: CanSlice2[From, ::.type, *.type, BroadcastedColumns[From, Col]] = {
new CanSlice2[From, ::.type, *.type, BroadcastedColumns[From, Col]] {
def apply(from: From, slice: ::.type, slice2: *.type): BroadcastedColumns[From, Col] = {
BroadcastedColumns(from)
}
}
}
implicit def canBroadcastSliceRows[From, Slice1, To, Row]
(implicit cs2_:: : CanSlice2[From, ::.type, Slice1, To],
handhold: CanCollapseAxis.HandHold[From, Axis._1.type, Row]): CanSlice2[From, *.type, Slice1, BroadcastedRows[To, Row]] = {
new CanSlice2[From, *.type, Slice1, BroadcastedRows[To, Row]] {
def apply(from: From, slice2: *.type, slice: Slice1): BroadcastedRows[To, Row] = {
BroadcastedRows(cs2_::(from, ::, slice))
}
}
}
implicit def canBroadcastRows[From, Slice1, Row](implicit handhold: CanCollapseAxis.HandHold[From, Axis._1.type, Row]): CanSlice2[From, *.type, ::.type, BroadcastedRows[From, Row]] = {
new CanSlice2[From, *.type, ::.type, BroadcastedRows[From, Row]] {
def apply(from: From, slice2: *.type, slice: ::.type): BroadcastedRows[From, Row] = {
BroadcastedRows(from)
}
}
}
}
| chen0031/breeze | math/src/main/scala/breeze/linalg/Broadcasted.scala | Scala | apache-2.0 | 2,416 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.cdi
/**
* Bean Container.
*
* @author chaostone
* @since 3.1.0
*/
trait Container {
def contains(key: Any): Boolean
def getType(key: Any): Option[Class[_]]
def getDefinition(key: Any): Any
def getBean[T](key: Any): Option[T]
def getBean[T](clazz: Class[T]): Option[T]
def getBeans[T](clazz: Class[T]): Map[String, T]
def keys: Set[_]
def parent: Container
}
object Container {
var ROOT: Container = _
var listeners: List[ContainerListener] = Nil
def addListener(listener: ContainerListener): Unit = {
listeners = listener :: listeners
}
val containers = new collection.mutable.HashSet[Container]
}
trait ContainerAware {
def container: Container
def container_=(container: Container): Unit
}
trait ContainerListener {
def onStarted(container: Container): Unit = {}
def onStopped(container: Container): Unit = {}
}
trait PropertySource {
def properties: collection.Map[String, String]
}
| beangle/cdi | api/src/main/scala/org/beangle/cdi/container.scala | Scala | lgpl-3.0 | 1,695 |
package sigmastate.serialization.transformers
import org.ergoplatform.ErgoBox
import org.ergoplatform.ErgoBox.RegisterId
import sigmastate.{ArgInfo, SType}
import sigmastate.Values.{Value, SValue}
import sigmastate.serialization.ValueSerializer
import ValueSerializer._
import sigmastate.Operations.DeserializeRegisterInfo._
import sigmastate.utils.SigmaByteWriter.DataInfo
import sigmastate.utils.{SigmaByteReader, SigmaByteWriter}
import sigmastate.utxo.DeserializeRegister
case class DeserializeRegisterSerializer(cons: (RegisterId, SType, Option[Value[SType]]) => Value[SType])
extends ValueSerializer[DeserializeRegister[SType]] {
override def opDesc = DeserializeRegister
val idInfo: DataInfo[Byte] = idArg
val typeInfo: DataInfo[SType] = ArgInfo("type", "expected type of the deserialized script")
val defaultInfo: DataInfo[SValue] = defaultArg
override def serialize(obj: DeserializeRegister[SType], w: SigmaByteWriter): Unit = {
w.put(obj.reg.number, idInfo)
w.putType(obj.tpe, typeInfo)
opt(w, "default", obj.default)(_.putValue(_, defaultInfo))
}
override def parse(r: SigmaByteReader): Value[SType] = {
val registerId = ErgoBox.findRegisterByIndex(r.getByte()).get
val tpe = r.getType()
val dv = r.getOption(r.getValue())
r.wasDeserialize ||= true // mark the flag
cons(registerId, tpe, dv)
}
}
| ScorexFoundation/sigmastate-interpreter | sigmastate/src/main/scala/sigmastate/serialization/transformers/DeserializeRegisterSerializer.scala | Scala | mit | 1,362 |
/**
* Copyright (C) 2009-2017 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.actor
import language.implicitConversions
import scala.concurrent.duration.Duration
import scala.collection.mutable
import akka.routing.{ Deafen, Listen, Listeners }
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.duration._
import akka.annotation.InternalApi
object FSM {
/**
* A partial function value which does not match anything and can be used to
* “reset” `whenUnhandled` and `onTermination` handlers.
*
* {{{
* onTermination(FSM.NullFunction)
* }}}
*/
object NullFunction extends PartialFunction[Any, Nothing] {
def isDefinedAt(o: Any) = false
def apply(o: Any) = sys.error("undefined")
}
/**
* Message type which is sent directly to the subscribed actor in
* [[akka.actor.FSM.SubscribeTransitionCallBack]] before sending any
* [[akka.actor.FSM.Transition]] messages.
*/
final case class CurrentState[S](fsmRef: ActorRef, state: S)
/**
* Message type which is used to communicate transitions between states to
* all subscribed listeners (use [[akka.actor.FSM.SubscribeTransitionCallBack]]).
*/
final case class Transition[S](fsmRef: ActorRef, from: S, to: S)
/**
* Send this to an [[akka.actor.FSM]] to request first the [[FSM.CurrentState]]
* and then a series of [[FSM.Transition]] updates. Cancel the subscription
* using [[FSM.UnsubscribeTransitionCallBack]].
*/
final case class SubscribeTransitionCallBack(actorRef: ActorRef)
/**
* Unsubscribe from [[akka.actor.FSM.Transition]] notifications which was
* effected by sending the corresponding [[akka.actor.FSM.SubscribeTransitionCallBack]].
*/
final case class UnsubscribeTransitionCallBack(actorRef: ActorRef)
/**
* Reason why this [[akka.actor.FSM]] is shutting down.
*/
sealed trait Reason
/**
* Default reason if calling `stop()`.
*/
case object Normal extends Reason
/**
* Reason given when someone was calling `system.stop(fsm)` from outside;
* also applies to `Stop` supervision directive.
*/
case object Shutdown extends Reason
/**
* Signifies that the [[akka.actor.FSM]] is shutting itself down because of
* an error, e.g. if the state to transition into does not exist. You can use
* this to communicate a more precise cause to the `onTermination` block.
*/
final case class Failure(cause: Any) extends Reason
/**
* This case object is received in case of a state timeout.
*/
case object StateTimeout
/**
* INTERNAL API
*/
private final case class TimeoutMarker(generation: Long)
/**
* INTERNAL API
*/
@InternalApi
private[akka] final case class Timer(name: String, msg: Any, repeat: Boolean, generation: Int,
owner: AnyRef)(context: ActorContext)
extends NoSerializationVerificationNeeded {
private var ref: Option[Cancellable] = _
private val scheduler = context.system.scheduler
private implicit val executionContext = context.dispatcher
def schedule(actor: ActorRef, timeout: FiniteDuration): Unit =
ref = Some(
if (repeat) scheduler.schedule(timeout, timeout, actor, this)
else scheduler.scheduleOnce(timeout, actor, this))
def cancel(): Unit =
if (ref.isDefined) {
ref.get.cancel()
ref = None
}
}
/**
* This extractor is just convenience for matching a (S, S) pair, including a
* reminder what the new state is.
*/
object `->` {
def unapply[S](in: (S, S)) = Some(in)
}
val `→` = `->`
/**
* Log Entry of the [[akka.actor.LoggingFSM]], can be obtained by calling `getLog`.
*/
final case class LogEntry[S, D](stateName: S, stateData: D, event: Any)
/** Used by `forMax` to signal "cancel stateTimeout" */
private final val SomeMaxFiniteDuration = Some(Long.MaxValue.nanos)
/**
* INTERNAL API
* Using a subclass for binary compatibility reasons
*/
private[akka] class SilentState[S, D](_stateName: S, _stateData: D, _timeout: Option[FiniteDuration], _stopReason: Option[Reason], _replies: List[Any])
extends State[S, D](_stateName, _stateData, _timeout, _stopReason, _replies) {
/**
* INTERNAL API
*/
private[akka] override def notifies: Boolean = false
override def copy(stateName: S = stateName, stateData: D = stateData, timeout: Option[FiniteDuration] = timeout, stopReason: Option[Reason] = stopReason, replies: List[Any] = replies): State[S, D] = {
new SilentState(stateName, stateData, timeout, stopReason, replies)
}
}
/**
* This captures all of the managed state of the [[akka.actor.FSM]]: the state
* name, the state data, possibly custom timeout, stop reason and replies
* accumulated while processing the last message.
*/
case class State[S, D](stateName: S, stateData: D, timeout: Option[FiniteDuration] = None, stopReason: Option[Reason] = None, replies: List[Any] = Nil) {
/**
* INTERNAL API
*/
private[akka] def notifies: Boolean = true
// defined here to be able to override it in SilentState
def copy(stateName: S = stateName, stateData: D = stateData, timeout: Option[FiniteDuration] = timeout, stopReason: Option[Reason] = stopReason, replies: List[Any] = replies): State[S, D] = {
new State(stateName, stateData, timeout, stopReason, replies)
}
/**
* Modify state transition descriptor to include a state timeout for the
* next state. This timeout overrides any default timeout set for the next
* state.
*
* Use Duration.Inf to deactivate an existing timeout.
*/
def forMax(timeout: Duration): State[S, D] = timeout match {
case f: FiniteDuration ⇒ copy(timeout = Some(f))
case Duration.Inf ⇒ copy(timeout = SomeMaxFiniteDuration) // we map the Infinite duration to a special marker,
case _ ⇒ copy(timeout = None) // that means "cancel stateTimeout". This marker is needed
} // so we do not have to break source/binary compat.
// TODO: Can be removed once we can break State#timeout signature to `Option[Duration]`
/**
* Send reply to sender of the current message, if available.
*
* @return this state transition descriptor
*/
def replying(replyValue: Any): State[S, D] = {
copy(replies = replyValue :: replies)
}
/**
* Modify state transition descriptor with new state data. The data will be
* set when transitioning to the new state.
*/
def using(@deprecatedName('nextStateDate) nextStateData: D): State[S, D] = {
copy(stateData = nextStateData)
}
/**
* INTERNAL API.
*/
private[akka] def withStopReason(reason: Reason): State[S, D] = {
copy(stopReason = Some(reason))
}
/**
* INTERNAL API.
*/
private[akka] def withNotification(notifies: Boolean): State[S, D] = {
if (notifies)
State(stateName, stateData, timeout, stopReason, replies)
else
new SilentState(stateName, stateData, timeout, stopReason, replies)
}
}
/**
* All messages sent to the [[akka.actor.FSM]] will be wrapped inside an
* `Event`, which allows pattern matching to extract both state and data.
*/
final case class Event[D](event: Any, stateData: D) extends NoSerializationVerificationNeeded
/**
* Case class representing the state of the [[akka.actor.FSM]] within the
* `onTermination` block.
*/
final case class StopEvent[S, D](reason: Reason, currentState: S, stateData: D) extends NoSerializationVerificationNeeded
}
/**
* Finite State Machine actor trait. Use as follows:
*
* <pre>
* object A {
* trait State
* case class One extends State
* case class Two extends State
*
* case class Data(i : Int)
* }
*
* class A extends Actor with FSM[A.State, A.Data] {
* import A._
*
* startWith(One, Data(42))
* when(One) {
* case Event(SomeMsg, Data(x)) => ...
* case Event(SomeOtherMsg, _) => ... // when data not needed
* }
* when(Two, stateTimeout = 5 seconds) { ... }
* initialize()
* }
* </pre>
*
* Within the partial function the following values are returned for effecting
* state transitions:
*
* - <code>stay</code> for staying in the same state
* - <code>stay using Data(...)</code> for staying in the same state, but with
* different data
* - <code>stay forMax 5.millis</code> for staying with a state timeout; can be
* combined with <code>using</code>
* - <code>goto(...)</code> for changing into a different state; also supports
* <code>using</code> and <code>forMax</code>
* - <code>stop</code> for terminating this FSM actor
*
* Each of the above also supports the method <code>replying(AnyRef)</code> for
* sending a reply before changing state.
*
* While changing state, custom handlers may be invoked which are registered
* using <code>onTransition</code>. This is meant to enable concentrating
* different concerns in different places; you may choose to use
* <code>when</code> for describing the properties of a state, including of
* course initiating transitions, but you can describe the transitions using
* <code>onTransition</code> to avoid having to duplicate that code among
* multiple paths which lead to a transition:
*
* <pre>
* onTransition {
* case Active -> _ => cancelTimer("activeTimer")
* }
* </pre>
*
* Multiple such blocks are supported and all of them will be called, not only
* the first matching one.
*
* Another feature is that other actors may subscribe for transition events by
* sending a <code>SubscribeTransitionCallback</code> message to this actor.
* Stopping a listener without unregistering will not remove the listener from the
* subscription list; use <code>UnsubscribeTransitionCallback</code> before stopping
* the listener.
*
* State timeouts set an upper bound to the time which may pass before another
* message is received in the current state. If no external message is
* available, then upon expiry of the timeout a StateTimeout message is sent.
* Note that this message will only be received in the state for which the
* timeout was set and that any message received will cancel the timeout
* (possibly to be started again by the next transition).
*
* Another feature is the ability to install and cancel single-shot as well as
* repeated timers which arrange for the sending of a user-specified message:
*
* <pre>
* setTimer("tock", TockMsg, 1 second, true) // repeating
* setTimer("lifetime", TerminateMsg, 1 hour, false) // single-shot
* cancelTimer("tock")
* isTimerActive("tock")
* </pre>
*/
trait FSM[S, D] extends Actor with Listeners with ActorLogging {
import FSM._
type State = FSM.State[S, D]
type Event = FSM.Event[D]
type StopEvent = FSM.StopEvent[S, D]
type StateFunction = scala.PartialFunction[Event, State]
type Timeout = Option[FiniteDuration]
type TransitionHandler = PartialFunction[(S, S), Unit]
/*
* “import” so that these are visible without an import
*/
val Event: FSM.Event.type = FSM.Event
val StopEvent: FSM.StopEvent.type = FSM.StopEvent
/**
* This extractor is just convenience for matching a (S, S) pair, including a
* reminder what the new state is.
*/
val `->` = FSM.`->`
/**
* This case object is received in case of a state timeout.
*/
val StateTimeout = FSM.StateTimeout
/**
* ****************************************
* DSL
* ****************************************
*/
/**
* Insert a new StateFunction at the end of the processing chain for the
* given state. If the stateTimeout parameter is set, entering this state
* without a differing explicit timeout setting will trigger a StateTimeout
* event; the same is true when using #stay.
*
* @param stateName designator for the state
* @param stateTimeout default state timeout for this state
* @param stateFunction partial function describing response to input
*/
final def when(stateName: S, stateTimeout: FiniteDuration = null)(stateFunction: StateFunction): Unit =
register(stateName, stateFunction, Option(stateTimeout))
/**
* Set initial state. Call this method from the constructor before the [[#initialize]] method.
* If different state is needed after a restart this method, followed by [[#initialize]], can
* be used in the actor life cycle hooks [[akka.actor.Actor#preStart]] and [[akka.actor.Actor#postRestart]].
*
* @param stateName initial state designator
* @param stateData initial state data
* @param timeout state timeout for the initial state, overriding the default timeout for that state
*/
final def startWith(stateName: S, stateData: D, timeout: Timeout = None): Unit =
currentState = FSM.State(stateName, stateData, timeout)
/**
* Produce transition to other state.
* Return this from a state function in order to effect the transition.
*
* This method always triggers transition events, even for `A -> A` transitions.
* If you want to stay in the same state without triggering an state transition event use [[#stay]] instead.
*
* @param nextStateName state designator for the next state
* @return state transition descriptor
*/
final def goto(nextStateName: S): State = FSM.State(nextStateName, currentState.stateData)
/**
* Produce "empty" transition descriptor.
* Return this from a state function when no state change is to be effected.
*
* No transition event will be triggered by [[#stay]].
* If you want to trigger an event like `S -> S` for `onTransition` to handle use `goto` instead.
*
* @return descriptor for staying in current state
*/
final def stay(): State = goto(currentState.stateName).withNotification(false) // cannot directly use currentState because of the timeout field
/**
* Produce change descriptor to stop this FSM actor with reason "Normal".
*/
final def stop(): State = stop(Normal)
/**
* Produce change descriptor to stop this FSM actor including specified reason.
*/
final def stop(reason: Reason): State = stop(reason, currentState.stateData)
/**
* Produce change descriptor to stop this FSM actor including specified reason.
*/
final def stop(reason: Reason, stateData: D): State = stay using stateData withStopReason (reason)
final class TransformHelper(func: StateFunction) {
def using(andThen: PartialFunction[State, State]): StateFunction =
func andThen (andThen orElse { case x ⇒ x })
}
final def transform(func: StateFunction): TransformHelper = new TransformHelper(func)
/**
* Schedule named timer to deliver message after given delay, possibly repeating.
* Any existing timer with the same name will automatically be canceled before
* adding the new timer.
* @param name identifier to be used with cancelTimer()
* @param msg message to be delivered
* @param timeout delay of first message delivery and between subsequent messages
* @param repeat send once if false, scheduleAtFixedRate if true
*/
final def setTimer(name: String, msg: Any, timeout: FiniteDuration, repeat: Boolean = false): Unit = {
if (debugEvent)
log.debug("setting " + (if (repeat) "repeating " else "") + "timer '" + name + "'/" + timeout + ": " + msg)
if (timers contains name) {
timers(name).cancel
}
val timer = Timer(name, msg, repeat, timerGen.next, this)(context)
timer.schedule(self, timeout)
timers(name) = timer
}
/**
* Cancel named timer, ensuring that the message is not subsequently delivered (no race).
* @param name of the timer to cancel
*/
final def cancelTimer(name: String): Unit = {
if (debugEvent)
log.debug("canceling timer '" + name + "'")
if (timers contains name) {
timers(name).cancel
timers -= name
}
}
/**
* Inquire whether the named timer is still active. Returns true unless the
* timer does not exist, has previously been canceled or if it was a
* single-shot timer whose message was already received.
*/
final def isTimerActive(name: String): Boolean = timers contains name
/**
* Set state timeout explicitly. This method can safely be used from within a
* state handler.
*/
final def setStateTimeout(state: S, timeout: Timeout): Unit = stateTimeouts(state) = timeout
/**
* INTERNAL API, used for testing.
*/
private[akka] final def isStateTimerActive = timeoutFuture.isDefined
/**
* Set handler which is called upon each state transition, i.e. not when
* staying in the same state. This may use the pair extractor defined in the
* FSM companion object like so:
*
* <pre>
* onTransition {
* case Old -> New => doSomething
* }
* </pre>
*
* It is also possible to supply a 2-ary function object:
*
* <pre>
* onTransition(handler _)
*
* private def handler(from: S, to: S) { ... }
* </pre>
*
* The underscore is unfortunately necessary to enable the nicer syntax shown
* above (it uses the implicit conversion total2pf under the hood).
*
* <b>Multiple handlers may be installed, and every one of them will be
* called, not only the first one matching.</b>
*/
final def onTransition(transitionHandler: TransitionHandler): Unit = transitionEvent :+= transitionHandler
/**
* Convenience wrapper for using a total function instead of a partial
* function literal. To be used with onTransition.
*/
implicit final def total2pf(transitionHandler: (S, S) ⇒ Unit): TransitionHandler =
new TransitionHandler {
def isDefinedAt(in: (S, S)) = true
def apply(in: (S, S)) { transitionHandler(in._1, in._2) }
}
/**
* Set handler which is called upon termination of this FSM actor. Calling
* this method again will overwrite the previous contents.
*/
final def onTermination(terminationHandler: PartialFunction[StopEvent, Unit]): Unit =
terminateEvent = terminationHandler
/**
* Set handler which is called upon reception of unhandled messages. Calling
* this method again will overwrite the previous contents.
*
* The current state may be queried using ``stateName``.
*/
final def whenUnhandled(stateFunction: StateFunction): Unit =
handleEvent = stateFunction orElse handleEventDefault
/**
* Verify existence of initial state and setup timers. This should be the
* last call within the constructor, or [[akka.actor.Actor#preStart]] and
* [[akka.actor.Actor#postRestart]]
*
* An initial `currentState -> currentState` notification will be triggered by calling this method.
*
* @see [[#startWith]]
*/
final def initialize(): Unit =
if (currentState != null) makeTransition(currentState)
else throw new IllegalStateException("You must call `startWith` before calling `initialize`")
/**
* Return current state name (i.e. object of type S)
*/
final def stateName: S = {
if (currentState != null) currentState.stateName
else throw new IllegalStateException("You must call `startWith` before using `stateName`")
}
/**
* Return current state data (i.e. object of type D)
*/
final def stateData: D =
if (currentState != null) currentState.stateData
else throw new IllegalStateException("You must call `startWith` before using `stateData`")
/**
* Return next state data (available in onTransition handlers)
*/
final def nextStateData = nextState match {
case null ⇒ throw new IllegalStateException("nextStateData is only available during onTransition")
case x ⇒ x.stateData
}
/*
* ****************************************************************
* PRIVATE IMPLEMENTATION DETAILS
* ****************************************************************
*/
private[akka] def debugEvent: Boolean = false
/*
* FSM State data and current timeout handling
*/
private var currentState: State = _
private var timeoutFuture: Option[Cancellable] = None
private var nextState: State = _
private var generation: Long = 0L
/*
* Timer handling
*/
private val timers = mutable.Map[String, Timer]()
private val timerGen = Iterator from 0
/*
* State definitions
*/
private val stateFunctions = mutable.Map[S, StateFunction]()
private val stateTimeouts = mutable.Map[S, Timeout]()
private def register(name: S, function: StateFunction, timeout: Timeout): Unit = {
if (stateFunctions contains name) {
stateFunctions(name) = stateFunctions(name) orElse function
stateTimeouts(name) = timeout orElse stateTimeouts(name)
} else {
stateFunctions(name) = function
stateTimeouts(name) = timeout
}
}
/*
* unhandled event handler
*/
private val handleEventDefault: StateFunction = {
case Event(value, stateData) ⇒
log.warning("unhandled event " + value + " in state " + stateName)
stay
}
private var handleEvent: StateFunction = handleEventDefault
/*
* termination handling
*/
private var terminateEvent: PartialFunction[StopEvent, Unit] = NullFunction
/*
* transition handling
*/
private var transitionEvent: List[TransitionHandler] = Nil
private def handleTransition(prev: S, next: S) {
val tuple = (prev, next)
for (te ← transitionEvent) { if (te.isDefinedAt(tuple)) te(tuple) }
}
/*
* *******************************************
* Main actor receive() method
* *******************************************
*/
override def receive: Receive = {
case TimeoutMarker(gen) ⇒
if (generation == gen) {
processMsg(StateTimeout, "state timeout")
}
case t @ Timer(name, msg, repeat, gen, owner) ⇒
if ((owner eq this) && (timers contains name) && (timers(name).generation == gen)) {
if (timeoutFuture.isDefined) {
timeoutFuture.get.cancel()
timeoutFuture = None
}
generation += 1
if (!repeat) {
timers -= name
}
processMsg(msg, t)
}
case SubscribeTransitionCallBack(actorRef) ⇒
// TODO Use context.watch(actor) and receive Terminated(actor) to clean up list
listeners.add(actorRef)
// send current state back as reference point
actorRef ! CurrentState(self, currentState.stateName)
case Listen(actorRef) ⇒
// TODO Use context.watch(actor) and receive Terminated(actor) to clean up list
listeners.add(actorRef)
// send current state back as reference point
actorRef ! CurrentState(self, currentState.stateName)
case UnsubscribeTransitionCallBack(actorRef) ⇒
listeners.remove(actorRef)
case Deafen(actorRef) ⇒
listeners.remove(actorRef)
case value ⇒ {
if (timeoutFuture.isDefined) {
timeoutFuture.get.cancel()
timeoutFuture = None
}
generation += 1
processMsg(value, sender())
}
}
private def processMsg(value: Any, source: AnyRef): Unit = {
val event = Event(value, currentState.stateData)
processEvent(event, source)
}
private[akka] def processEvent(event: Event, source: AnyRef): Unit = {
val stateFunc = stateFunctions(currentState.stateName)
val nextState = if (stateFunc isDefinedAt event) {
stateFunc(event)
} else {
// handleEventDefault ensures that this is always defined
handleEvent(event)
}
applyState(nextState)
}
private[akka] def applyState(nextState: State): Unit = {
nextState.stopReason match {
case None ⇒ makeTransition(nextState)
case _ ⇒
nextState.replies.reverse foreach { r ⇒ sender() ! r }
terminate(nextState)
context.stop(self)
}
}
private[akka] def makeTransition(nextState: State): Unit = {
if (!stateFunctions.contains(nextState.stateName)) {
terminate(stay withStopReason Failure("Next state %s does not exist".format(nextState.stateName)))
} else {
nextState.replies.reverse foreach { r ⇒ sender() ! r }
if (currentState.stateName != nextState.stateName || nextState.notifies) {
this.nextState = nextState
handleTransition(currentState.stateName, nextState.stateName)
gossip(Transition(self, currentState.stateName, nextState.stateName))
this.nextState = null
}
currentState = nextState
def scheduleTimeout(d: FiniteDuration): Some[Cancellable] = {
import context.dispatcher
Some(context.system.scheduler.scheduleOnce(d, self, TimeoutMarker(generation)))
}
currentState.timeout match {
case SomeMaxFiniteDuration ⇒ // effectively disable stateTimeout
case Some(d: FiniteDuration) if d.length >= 0 ⇒ timeoutFuture = scheduleTimeout(d)
case _ ⇒
val timeout = stateTimeouts(currentState.stateName)
if (timeout.isDefined) timeoutFuture = scheduleTimeout(timeout.get)
}
}
}
/**
* Call `onTermination` hook; if you want to retain this behavior when
* overriding make sure to call `super.postStop()`.
*
* Please note that this method is called by default from `preRestart()`,
* so override that one if `onTermination` shall not be called during
* restart.
*/
override def postStop(): Unit = {
/*
* setting this instance’s state to terminated does no harm during restart
* since the new instance will initialize fresh using startWith()
*/
terminate(stay withStopReason Shutdown)
super.postStop()
}
private def terminate(nextState: State): Unit = {
if (currentState.stopReason.isEmpty) {
val reason = nextState.stopReason.get
logTermination(reason)
for (timer ← timers.values) timer.cancel()
timers.clear()
timeoutFuture.foreach { _.cancel() }
currentState = nextState
val stopEvent = StopEvent(reason, currentState.stateName, currentState.stateData)
if (terminateEvent.isDefinedAt(stopEvent))
terminateEvent(stopEvent)
}
}
/**
* By default [[FSM.Failure]] is logged at error level and other reason
* types are not logged. It is possible to override this behavior.
*/
protected def logTermination(reason: Reason): Unit = reason match {
case Failure(ex: Throwable) ⇒ log.error(ex, "terminating due to Failure")
case Failure(msg: AnyRef) ⇒ log.error(msg.toString)
case _ ⇒
}
}
/**
* Stackable trait for [[akka.actor.FSM]] which adds a rolling event log and
* debug logging capabilities (analogous to [[akka.event.LoggingReceive]]).
*
* @since 1.2
*/
trait LoggingFSM[S, D] extends FSM[S, D] { this: Actor ⇒
import FSM._
def logDepth: Int = 0
private[akka] override val debugEvent = context.system.settings.FsmDebugEvent
private val events = new Array[Event](logDepth)
private val states = new Array[AnyRef](logDepth)
private var pos = 0
private var full = false
private def advance() {
val n = pos + 1
if (n == logDepth) {
full = true
pos = 0
} else {
pos = n
}
}
private[akka] abstract override def processEvent(event: Event, source: AnyRef): Unit = {
if (debugEvent) {
val srcstr = source match {
case s: String ⇒ s
case Timer(name, _, _, _, _) ⇒ "timer " + name
case a: ActorRef ⇒ a.toString
case _ ⇒ "unknown"
}
log.debug("processing {} from {} in state {}", event, srcstr, stateName)
}
if (logDepth > 0) {
states(pos) = stateName.asInstanceOf[AnyRef]
events(pos) = event
advance()
}
val oldState = stateName
super.processEvent(event, source)
val newState = stateName
if (debugEvent && oldState != newState)
log.debug("transition " + oldState + " -> " + newState)
}
/**
* Retrieve current rolling log in oldest-first order. The log is filled with
* each incoming event before processing by the user supplied state handler.
* The log entries are lost when this actor is restarted.
*/
protected def getLog: IndexedSeq[LogEntry[S, D]] = {
val log = events zip states filter (_._1 ne null) map (x ⇒ LogEntry(x._2.asInstanceOf[S], x._1.stateData, x._1.event))
if (full) {
IndexedSeq() ++ log.drop(pos) ++ log.take(pos)
} else {
IndexedSeq() ++ log
}
}
}
| rorygraves/perf_tester | corpus/akka/akka-actor/src/main/scala/akka/actor/FSM.scala | Scala | apache-2.0 | 28,391 |
package com.scalaAsm.x86
package Instructions
package General
// Description: Clear Carry Flag
// Category: general/flgctrl
trait CLC extends InstructionDefinition {
val mnemonic = "CLC"
}
object CLC extends ZeroOperands[CLC] with CLCImpl
trait CLCImpl extends CLC {
implicit object _0 extends NoOp{
val opcode: OneOpcode = 0xF8
}
}
| bdwashbu/scala-x86-inst | src/main/scala/com/scalaAsm/x86/Instructions/General/CLC.scala | Scala | apache-2.0 | 351 |
package mismatch.geohash.console
import scala.util.control.Exception.allCatch
import unfiltered.request._
import unfiltered.response._
import unfiltered.directives._, Directives._
import unfiltered.filter.Plan
import org.json4s.JValue
import org.json4s.JsonDSL._
import ch.hsr.geohash.GeoHash
class GeohashDebugConsole extends Plan {
def geohashAsJson(hash: GeoHash, withBBox: Option[Boolean]): JValue = withBBox match {
case Some(true) => {
val bbox = hash.getBoundingBox
("binary" -> hash.toBinaryString) ~ ("bbox" ->
("minLat" -> bbox.getMinLat) ~ ("minLng" -> bbox.getMinLon) ~
("maxLat" -> bbox.getMaxLat) ~ ("maxLng" -> bbox.getMaxLon))
}
case _ => ("binary" -> hash.toBinaryString)
}
object asBool extends data.Fallible[String, Boolean](s => allCatch.opt {s.toBoolean})
def intent = Directive.Intent {
case GET(Path(Seg("hashes" :: geom :: Nil))) => {
case class BadParam(msg: String) extends ResponseJoiner(msg)(msgs =>
BadRequest ~> ResponseString(msgs.mkString("", "\\n", "\\n")))
def incorrectCoordinate(coordName: String)(paramName: String, paramValue: Double) =
BadParam(s"$paramValue is not correct value of $coordName")
def isInRange(left: Double, right: Double)(value: Double) = (left <= value && value <= right)
val latInterpreter = data.Conditional[Double](isInRange(-90.0, 90.0)).fail(incorrectCoordinate("latitude"))
val lngInterpreter = data.Conditional[Double](isInRange(-180.0, 180.0)).fail(incorrectCoordinate("longitude"))
val bitsInterpreter = data.Conditional[Int](b => (1 to 64) contains b).fail(
(_, v) => BadParam(s"$v is out of bits range [1, 64]"))
implicit val doubleValue = data.as.String ~> data.as.Double
implicit val intValue = data.as.String ~> data.as.Int
implicit def required[T] = data.Requiring[T].fail(name =>
BadParam(s"'$name' is required")
)
geom match {
case "point" =>
for {
(lat & lng & bits & withBBox) <-
(latInterpreter ~> required named "lat") &
(lngInterpreter ~> required named "lng") &
(bitsInterpreter ~> required named "bits") &
(asBool named "withBBox")
} yield Json(geohashAsJson(GeoHash.withBitPrecision(lat, lng, bits), withBBox))
case _ => error(ResponseString(s"'$geom' is not supported"))
}
}
}
}
object Server {
import java.net.URL
def main(args: Array[String]) {
val port = if (args.length > 0) args(0).toInt else 8080
unfiltered.jetty.Server.
http(port).
context("/console") {_.resources(new URL(getClass().getResource("/www/index.html"), "."))}.
plan(new GeohashDebugConsole).run()
}
}
| mismatch/geohash-debug-console | src/main/scala/GeohashDebugConsole.scala | Scala | mit | 2,782 |
package com.scalaAsm.x86
package Instructions
package x87
// Description: Reverse Divide
// Category: general/arith
trait FIDIVR extends InstructionDefinition {
val mnemonic = "FIDIVR"
}
object FIDIVR extends OneOperand[FIDIVR] with FIDIVRImpl
trait FIDIVRImpl extends FIDIVR {
implicit object _0 extends OneOp[m32] {
val opcode: OneOpcode = 0xDA /+ 7
val format = RmFormat
override def hasImplicitOperand = true
}
implicit object _1 extends OneOp[m16] {
val opcode: OneOpcode = 0xDE /+ 7
val format = RmFormat
override def hasImplicitOperand = true
}
}
| bdwashbu/scala-x86-inst | src/main/scala/com/scalaAsm/x86/Instructions/x87/FIDIVR.scala | Scala | apache-2.0 | 593 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.