code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package org.jetbrains.plugins.scala
package format
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.ScInterpolatedStringLiteral
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScBlockExpr, ScExpression}
import scala.collection.mutable
import scala.util.matching.Regex
/**
* Pavel Fatin
*/
object InterpolatedStringParser extends StringParser {
import FormattedStringParser.FormatSpecifierStartPattern
override def parse(element: PsiElement): Option[Seq[StringPart]] =
parse(element, checkStripMargin = true)
private[format] def parse(element: PsiElement, checkStripMargin: Boolean): Option[Seq[StringPart]] = {
if (checkStripMargin) {
element match {
case literal@WithStrippedMargin(_, _) =>
return StripMarginParser.parse(literal)
case _ =>
}
}
element match {
case literal: ScInterpolatedStringLiteral =>
Some(parseLiteral(literal))
case _ =>
None
}
}
private def parseLiteral(literal: ScInterpolatedStringLiteral): Seq[StringPart] = {
val formatted = literal.firstChild.exists(_.textMatches("f"))
val pairs: Seq[(PsiElement, Option[PsiElement])] = {
val elements = literal.children.toList.drop(1)
elements.zipAll(elements.drop(1).map(Some(_)), null, None)
}
val isRaw = literal.kind == ScInterpolatedStringLiteral.Raw
val parts = pairs.collect {
// `str` or `{2 + 2}`
// in input: s"$str ${2 + 2}"
case (expression: ScExpression, nextOpt: Option[PsiElement]) =>
val actualExpression = expression match {
case block: ScBlockExpr =>
val blockExpressions = block.exprs
if (blockExpressions.length == 1) blockExpressions.head
else block
case it => it
}
val specifier = if (!formatted) None else nextOpt match {
case Some(next) if isTextElement(next) =>
val nextText = textIn(next, isRaw)
val matched = FormatSpecifierStartPattern.findFirstIn(nextText)
matched.map { format =>
Specifier(Span(next, 0, format.length), format)
}
case _ => None
}
Injection(actualExpression, specifier)
// `text`
// in input: s"${a}text${b}"
case (e, _) if isTextElement(e) =>
val text: String = {
val value = textIn(e, isRaw)
// specifier was already handled in previous step, when handling injection, so drop it here
if (formatted) FormatSpecifierStartPattern.replaceFirstIn(value, "")
else value
}
Text(text)
// `$$`
// in input: s"$$"
case (e, _) if e.getNode.getElementType == ScalaTokenTypes.tINTERPOLATED_STRING_ESCAPE =>
Text("$")
}
val withFixedLeadingQuote = parts match {
case Text(s) :: tail =>
Text(s.drop(literal.quoteLength)) :: tail
case it => it
}
val withEscapedPercents = if (!formatted) withFixedLeadingQuote else {
withFixedLeadingQuote.flatMap {
case t: Text => processSpecialFormatEscapes(t)
case part => List(part)
}
}
val withoutEmpty = withEscapedPercents.filter {
case Text("") => false
case _ => true
}
withoutEmpty
}
private def isTextElement(e: PsiElement): Boolean = {
val elementType = e.getNode.getElementType
elementType == ScalaTokenTypes.tINTERPOLATED_STRING ||
elementType == ScalaTokenTypes.tINTERPOLATED_MULTILINE_STRING
}
private def textIn(part: PsiElement, isRaw: Boolean): String = {
val text = part.getText
ScalaStringUtils.unescapeStringCharacters(text, isRaw)
}
private val SpecialEscapeRegex = "(%%|%n)".r
private def processSpecialFormatEscapes(textPart: Text): Seq[StringPart] = {
val text = textPart.value
val matches: Seq[Regex.Match] = SpecialEscapeRegex.findAllMatchIn(text).toList
if (matches.isEmpty) List(textPart)
else processSpecialFormatEscapes(text, matches)
}
private def processSpecialFormatEscapes(text: String, matches: Seq[Regex.Match]): Seq[StringPart] = {
import SpecialFormatEscape._
val result = new mutable.ArrayBuffer[StringPart](2 * matches.size + 1)
var prevEnd = 0
matches.foreach { m =>
if (m.start > prevEnd) {
result += Text(text.substring(prevEnd, m.start))
}
val specialEscape = m.matched match {
case PercentChar.originalText => PercentChar
case LineSeparator.originalText => LineSeparator
}
result += specialEscape
prevEnd = m.end
}
if (prevEnd < text.length)
result += Text(text.substring(prevEnd, text.length))
result.toSeq
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/format/InterpolatedStringParser.scala
|
Scala
|
apache-2.0
| 4,870
|
package com.catinthedark.yoba.entity
import com.catinthedark.yoba.Shared
import com.catinthedark.yoba.common.Const
object Creatures {
def create(shared: Shared, x: Float, z: Float): Creature = {
new Mammy(x, z, Const.Difficulty.mammySpeed(shared.lvl), Const.Physics.mammyWidth, Const.Physics.mammyDepth)
}
def createSign(shared: Shared, x: Float, z: Float): Creature = {
new Sign(x, z, 0f, Const.Physics.signWidth, Const.Physics.signDepth)
}
def createLamp(shared: Shared, x: Float, z: Float): Creature = {
new Lamp(x, z, 0f, Const.Physics.lampWidth, Const.Physics.lampDepth)
}
def createTree(shared: Shared, x: Float, z: Float): Creature = {
new Tree(x, z, 0f, Const.Physics.treeWidth, Const.Physics.treeDepth)
}
def createBush(shared: Shared, x: Float, z: Float): Creature = {
new Bush(x, z, 0f, Const.Physics.bushWidth, Const.Physics.bushDepth)
}
sealed trait Creature extends Ordered[Creature] {
var x: Float
var z: Float
var speed: Float
var width: Float
var depth: Float
var deathAnimationStateTime: Float = 0f
var fallSpeed: Float = 2f
override def compare(that: Creature): Int = that.z.compareTo(z)
var isDying: Boolean = false
}
case class Man(var x: Float,
var z: Float,
var speed: Float,
var width: Float = Const.Physics.playerWidth,
var depth: Float = Const.Physics.playerDepth)
extends Creature
case class Mammy(var x: Float,
var z: Float,
var speed: Float,
var width: Float,
var depth: Float)
extends Creature
case class Sign(var x: Float,
var z: Float,
var speed: Float,
var width: Float,
var depth: Float)
extends Creature
case class Lamp(var x: Float,
var z: Float,
var speed: Float,
var width: Float,
var depth: Float)
extends Creature
case class Tree(var x: Float,
var z: Float,
var speed: Float,
var width: Float,
var depth: Float)
extends Creature
case class Bush(var x: Float,
var z: Float,
var speed: Float,
var width: Float,
var depth: Float)
extends Creature
}
|
cat-in-the-dark/old48_33_game
|
src/main/scala/com/catinthedark/yoba/entity/Creatures.scala
|
Scala
|
mit
| 2,448
|
package mesosphere.marathon
package integration
import mesosphere.AkkaIntegrationTest
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.integration.facades.MarathonFacade._
import mesosphere.marathon.integration.setup.EmbeddedMarathonTest
import mesosphere.marathon.state.AbsolutePathId
class TaskKillingIntegrationTest extends AkkaIntegrationTest with EmbeddedMarathonTest {
override val marathonArgs: Map[String, String] = Map("enable_features" -> "task_killing")
"TaskKilling" should {
"Killing a task publishes a TASK_KILLING event" in {
Given("a new app")
val app = appProxy(testBasePath / "app-to-kill", "v1", instances = 1, healthCheck = None)
When("The app is deployed")
val createResult = marathon.createAppV2(app)
Then("The app is created")
createResult should be(Created)
extractDeploymentIds(createResult) should have size 1
waitForDeployment(createResult)
waitForTasks(AbsolutePathId(app.id), 1) //make sure, the app has really started
When("the task is killed")
val killResult = marathon.killAllTasksAndScale(AbsolutePathId(app.id))
killResult should be(OK)
// We used to wait for TASK_KILLING here, however in rare cases the task would start and then fail immediately e.g.
// because the port for `app_mock` is already bound.
// When marathon tries to kill a FAILED task it will receive a TASK_UNKNOWN status from mesos. The deployment will
// succeed in the end since the task is gone but we'll never see TASK_KILLING event.
// We can simply wait for the deployment to succeed but that defeats the purpose of this test (we test kill-and-scale
// elsewhere.
val waitingFor = Map[String, CallbackEvent => Boolean](
"status_update_event" -> (_.taskStatus == "TASK_KILLING"),
"unknown_instance_terminated_event" -> (_.info("instanceId").toString == app.id)
)
waitForAnyEventWith(s"waiting for task ${app.id} to be removed", waitingFor)
}
"Killing an ephemeral task without wipe or scale causes a new task to be restarted with a higher incarnation count" in {
Given("a new app")
val app = appProxy(testBasePath / "ephemeral-app-to-kill", "v1", instances = 1, healthCheck = None)
val appId = AbsolutePathId(app.id)
When("The app is deployed")
val createResult = marathon.createAppV2(app)
Then("The app is created")
createResult should be(Created)
extractDeploymentIds(createResult) should have size 1
waitForDeployment(createResult)
waitForTasks(appId, 1) //make sure, the app has really started
When("the task is killed")
val task = marathon.tasks(appId).value.headOption.value
val killResult = marathon.killTask(appId, task.id, false)
killResult should be(OK)
val taskId = Task.Id.parse(task.id)
val nextTaskId = Task.Id.nextIncarnationFor(taskId)
eventually {
val newTask = marathon.tasks(appId).value.headOption.value
newTask.id shouldBe nextTaskId.idString
}
}
"Killing an ephemeral task with wipe causes a brand new instance to be created" in {
Given("a new app")
val app = appProxy(testBasePath / "ephemeral-app-to-wipe", "v1", instances = 1, healthCheck = None)
val appId = AbsolutePathId(app.id)
When("The app is deployed")
val createResult = marathon.createAppV2(app)
Then("The app is created")
createResult should be(Created)
extractDeploymentIds(createResult) should have size 1
waitForDeployment(createResult)
waitForTasks(appId, 1) //make sure, the app has really started
When("the task is killed with wipe = true")
val task = marathon.tasks(appId).value.headOption.value
val killResult = marathon.killTask(appId, task.id, wipe = true)
killResult should be(OK)
val taskId = Task.Id.parse(task.id)
val instanceId = taskId.instanceId
eventually {
val newTask = marathon.tasks(appId).value.headOption.value
Task.Id.parse(newTask.id).instanceId shouldNot be(instanceId)
}
}
}
}
|
mesosphere/marathon
|
tests/integration/src/test/scala/mesosphere/marathon/integration/TaskKillingIntegrationTest.scala
|
Scala
|
apache-2.0
| 4,158
|
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.unicomplex
import java.io._
import java.net.URL
import java.util.concurrent.{TimeUnit, TimeoutException}
import java.util.jar.JarFile
import java.util.{Timer, TimerTask}
import akka.actor._
import akka.pattern.ask
import akka.routing.FromConfig
import akka.util.Timeout
import com.typesafe.config._
import com.typesafe.scalalogging.LazyLogging
import org.squbs.lifecycle.ExtensionLifecycle
import org.squbs.pipeline.PipelineSetting
import org.squbs.unicomplex.UnicomplexBoot.CubeInit
import org.squbs.util.ConfigUtil._
import scala.annotation.tailrec
import scala.collection.concurrent.TrieMap
import scala.collection.mutable
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.language.postfixOps
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
object UnicomplexBoot extends LazyLogging {
final val extConfigDirKey = "squbs.external-config-dir"
final val extConfigNameKey = "squbs.external-config-files"
final val actorSystemNameKey = "squbs.actorsystem-name"
val defaultStartupTimeout: Timeout =
Try(System.getProperty("startup.timeout").toLong) map { millis =>
akka.util.Timeout(millis, TimeUnit.MILLISECONDS)
} getOrElse (1 minute)
object StartupType extends Enumeration {
type StartupType = Value
val
// Identifies extensions
EXTENSIONS,
// Identifies actors as startup type
ACTORS,
// Identifies service as startup type
SERVICES = Value
}
case class CubeInit(info: Cube, components: Map[StartupType.Value, Seq[Config]])
val actorSystems = TrieMap.empty[String, ActorSystem]
def apply(addOnConfig: Config): UnicomplexBoot = {
val startTime = Timestamp(System.nanoTime, System.currentTimeMillis)
UnicomplexBoot(startTime, Option(addOnConfig), getFullConfig(Option(addOnConfig)))
}
def apply(actorSystemCreator: (String, Config) => ActorSystem): UnicomplexBoot = {
val startTime = Timestamp(System.nanoTime, System.currentTimeMillis)
UnicomplexBoot(startTime, None, getFullConfig(None), actorSystemCreator)
}
def getFullConfig(addOnConfig: Option[Config]): Config = {
val baseConfig = ConfigFactory.load()
// 1. See whether add-on config is there.
addOnConfig match {
case Some(config) =>
ConfigFactory.load(config withFallback baseConfig)
case None =>
// Sorry, the configDir is used to read the file. So it cannot be read from this config file.
val configDir = new File(baseConfig.getString(extConfigDirKey))
import collection.JavaConversions._
val configNames = baseConfig.getStringList(extConfigNameKey)
configNames.add("application")
val parseOptions = ConfigParseOptions.defaults().setAllowMissing(true)
val addConfigs = configNames map {
name => ConfigFactory.parseFileAnySyntax(new File(configDir, name), parseOptions)
}
if (addConfigs.isEmpty) baseConfig
else ConfigFactory.load((addConfigs :\\ baseConfig) (_ withFallback _))
}
}
private[unicomplex] def scan(jarNames: Seq[String])(boot: UnicomplexBoot): UnicomplexBoot = {
val configEntries = jarNames map readConfigs
val jarConfigs = jarNames zip configEntries collect { case (jar, Some(cfg)) => (jar, cfg) }
resolveCubes(jarConfigs, boot.copy(jarNames = jarNames))
}
private[unicomplex] def scanResources(resources: Seq[URL],
withClassPath: Boolean = true)(boot: UnicomplexBoot): UnicomplexBoot = {
val cpResources: Seq[URL] =
if (withClassPath) {
val loader = getClass.getClassLoader
import scala.collection.JavaConversions._
Seq("conf", "json", "properties") flatMap { ext => loader.getResources(s"META-INF/squbs-meta.$ext") }
} else Seq.empty
// Dedup the resources, just in case.
val allResources = mutable.LinkedHashSet(cpResources ++ resources : _*).toSeq
val jarConfigs = allResources map readConfigs collect { case Some(jarCfg) => jarCfg }
resolveCubes(jarConfigs, boot)
}
private[this] def resolveCubes(jarConfigs: Seq[(String, Config)], boot: UnicomplexBoot) = {
val cubeList = resolveAliasConflicts(jarConfigs map { case (jar, config) => readCube(jar, config) } collect {
case Some(cube) => cube
})
// Read listener and alias information.
val (activeAliases, activeListeners, missingAliases) = findListeners(boot.config, cubeList)
missingAliases foreach { name => logger.warn(s"Requested listener $name not found!") }
boot.copy(cubes = cubeList, jarConfigs = jarConfigs, listeners = activeListeners, listenerAliases = activeAliases)
}
private def createReaderFromFS(directory: File): String => Option[Reader] = {
(filePath: String) => Option(new File(directory, filePath)) collect {
case configFile if configFile.isFile => new InputStreamReader(new FileInputStream(configFile), "UTF-8")
}
}
private def createReaderFromJarFile(file: File): String => Option[Reader] = {
val triedJarFile = Try(new JarFile(file))
(filePath: String) => triedJarFile match {
case Success(jarFile) => Option(jarFile.getEntry(filePath)) collect {
case configFile if !configFile.isDirectory => new InputStreamReader(jarFile.getInputStream(configFile), "UTF-8")
}
case Failure(e) => throw e
}
}
private def getConfigReader(jarName: String): Option[(Option[Reader], String)] = {
// Make it extra lazy, so that we do not create the next File if the previous one succeeds.
val configExtensions = Stream("conf", "json", "properties")
val maybeConfFileReader = Option(new File(jarName)) collect {
case file if file.isDirectory => createReaderFromFS(file)
case file if file.isFile => createReaderFromJarFile(file)
}
maybeConfFileReader flatMap (fileReader => configExtensions map { ext =>
val currentFile = s"META-INF/squbs-meta.$ext"
Try(fileReader(currentFile)) match {
case Failure(e) =>
logger.info(s"${e.getClass.getName} reading configuration from $jarName : $currentFile.\\n${e.getMessage}")
None
case Success(maybeReader) => Option(maybeReader, currentFile)
}
} find (_.isDefined) flatten)
}
private[this] def readConfigs(jarName: String): Option[Config] = {
getConfigReader(jarName) flatMap ((maybeReader: Option[Reader], fileName: String) => {
val maybeConfig = Try(maybeReader map ConfigFactory.parseReader) match {
case Failure(e) =>
logger.info(s"${e.getClass.getName} reading configuration from $jarName : $fileName.\\n${e.getMessage}")
None
case Success(cfg) => cfg
}
maybeReader foreach(_.close())
maybeConfig
}).tupled
}
private[this] def readConfigs(resource: URL): Option[(String, Config)] = {
// Taking the best guess at the jar name or classpath entry. Should work most of the time.
val jarName = resource.getProtocol match {
case "jar" =>
val jarURL = new URL(resource.getPath.split('!')(0))
jarURL.getProtocol match {
case "file" => jarURL.getPath
case _ => jarURL.toString
}
case "file" => // We assume the classpath entry ends before the last /META-INF/
val path = resource.getPath
val endIdx = path.lastIndexOf("/META-INF/")
if (endIdx > 0) path.substring(0, endIdx) else path
case _ =>
val path = resource.toString
val endIdx = path.lastIndexOf("/META-INF/")
if (endIdx > 0) path.substring(0, endIdx) else path
}
try {
val config = ConfigFactory.parseURL(resource, ConfigParseOptions.defaults().setAllowMissing(false))
Some((jarName, config))
} catch {
case NonFatal(e) =>
logger.warn(s"${e.getClass.getName} reading configuration from $jarName.\\n ${e.getMessage}")
None
}
}
private[this] def readCube(jarPath: String, config: Config): Option[CubeInit] = {
val cubeName =
try {
config.getString("cube-name")
} catch {
case e: ConfigException => return None
}
val cubeVersion =
try {
config.getString("cube-version")
} catch {
case e: ConfigException => return None
}
val cubeAlias = cubeName.substring(cubeName.lastIndexOf('.') + 1)
val c = Seq(
config.getOption[Seq[Config]]("squbs-actors") map ((StartupType.ACTORS, _)),
config.getOption[Seq[Config]]("squbs-services") map ((StartupType.SERVICES, _)),
config.getOption[Seq[Config]]("squbs-extensions") map ((StartupType.EXTENSIONS, _))
).collect { case Some((sType, configs)) => (sType, configs) }.toMap
Some(CubeInit(Cube(cubeAlias, cubeName, cubeVersion, jarPath), c))
}
// Resolve cube alias conflict by making it longer on demand.
@tailrec
private[unicomplex] def resolveAliasConflicts(cubeList: Seq[CubeInit]): Seq[CubeInit] = {
val aliasConflicts = cubeList map { cube =>
(cube.info.name, cube.info.fullName)
} groupBy (_._1) mapValues { seq =>
(seq map (_._2)).toSet
} filter { _._2.size > 1 }
if (aliasConflicts.isEmpty) cubeList
else {
var updated = false
val newAliases = (aliasConflicts flatMap { case (alias, conflicts) =>
conflicts.toSeq map { symName =>
val idx = symName.lastIndexOf('.', symName.length - alias.length - 2)
if (idx > 0) {
updated = true
(symName, symName.substring(idx + 1))
}
else (symName, symName)
}
}).toSeq
if (updated) {
val updatedList = cubeList map { cube =>
newAliases find { case (symName, alias) => symName == cube.info.fullName } match {
case Some((symName, alias)) => cube.copy(info = cube.info.copy(name = alias))
case None => cube
}
}
resolveAliasConflicts(updatedList)
}
else sys.error("Duplicate cube names: " + (aliasConflicts flatMap (_._2) mkString ", "))
}
}
private[unicomplex] def startComponents(cube: CubeInit, aliases: Map[String, String])
(implicit actorSystem: ActorSystem,
timeout: Timeout = UnicomplexBoot.defaultStartupTimeout) = {
import cube.components
import cube.info.{fullName, jarPath, name, version}
val cubeSupervisor = actorSystem.actorOf(Props[CubeSupervisor], name)
Unicomplex(actorSystem).uniActor ! CubeRegistration(cube.info, cubeSupervisor)
def startActor(actorConfig: Config): Option[(String, String, String, Class[_])] = {
val className = actorConfig getString "class-name"
val name = actorConfig.get[String]("name", className substring (className.lastIndexOf('.') + 1))
val withRouter = actorConfig.get[Boolean]("with-router", false)
val initRequired = actorConfig.get[Boolean]("init-required", false)
try {
val clazz = Class.forName(className, true, getClass.getClassLoader)
clazz asSubclass classOf[Actor]
// Create and the props for this actor to be started, optionally enabling the router.
val props = if (withRouter) Props(clazz) withRouter FromConfig() else Props(clazz)
// Send the props to be started by the cube.
cubeSupervisor ! StartCubeActor(props, name, initRequired)
Some((fullName, name, version, clazz))
} catch {
case NonFatal(e) =>
val t = getRootCause(e)
logger.warn(s"Can't load actor: $className.\\n" +
s"Cube: $fullName $version\\n" +
s"Path: $jarPath\\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
cubeSupervisor ! StartFailure(e)
None
}
}
def startServiceRoute(clazz: Class[_], webContext: String, listeners: Seq[String],
ps: PipelineSetting) = {
Try {
(clazz asSubclass classOf[RouteDefinition], classOf[RouteActor])
} orElse Try {
(clazz asSubclass classOf[FlowDefinition], classOf[FlowActor])
} orElse Try {
(clazz asSubclass classOf[AbstractRouteDefinition], classOf[JavaRouteActor])
} orElse Try {
(clazz asSubclass classOf[AbstractFlowDefinition], classOf[JavaFlowActor])
} map { case (routeClass, routeActor) =>
val props = Props(routeActor, webContext, routeClass)
val className = clazz.getSimpleName
val actorName =
if (webContext.length > 0) s"${webContext.replace('/', '_')}-$className-route"
else s"root-$className-route"
cubeSupervisor ! StartCubeService(webContext, listeners, props, actorName, ps, initRequired = true)
(fullName, name, version, clazz)
}
}
// This same creator class is available in Akka's Props.scala but it is inaccessible to us.
class TypedCreatorFunctionConsumer(clz: Class[_ <: Actor], creator: () => Actor) extends IndirectActorProducer {
override def actorClass = clz
override def produce() = creator()
}
def startServiceActor(clazz: Class[_], webContext: String, listeners: Seq[String],
ps: PipelineSetting, initRequired: Boolean) =
Try {
val actorClass = clazz asSubclass classOf[Actor]
def actorCreator: Actor = WithWebContext(webContext) { actorClass.newInstance() }
val props = Props(classOf[TypedCreatorFunctionConsumer], clazz, actorCreator _)
val className = clazz.getSimpleName
val actorName =
if (webContext.length > 0) s"${webContext.replace('/', '_')}-$className-handler"
else s"root-$className-handler"
cubeSupervisor ! StartCubeService(webContext, listeners, props, actorName, ps, initRequired)
(fullName, name, version, actorClass)
}
def startService(serviceConfig: Config): Option[(String, String, String, Class[_])] =
Try {
val className = serviceConfig.getString("class-name")
val clazz = Class.forName(className, true, getClass.getClassLoader)
val webContext = serviceConfig.getString("web-context")
val pipeline = serviceConfig.getOption[String]("pipeline")
val defaultFlowsOn = serviceConfig.getOption[Boolean]("defaultPipeline")
val pipelineSettings = (pipeline, defaultFlowsOn)
val listeners = serviceConfig.getOption[Seq[String]]("listeners").fold(Seq("default-listener")) { list =>
if (list.contains("*")) aliases.values.toSeq.distinct
else list flatMap { entry =>
aliases.get(entry) match {
case Some(listener) => Seq(listener)
case None =>
logger.warn(s"Listener $entry required by $fullName is not configured. Ignoring.")
Seq.empty[String]
}
}
}
val service = startServiceRoute(clazz, webContext, listeners, pipelineSettings) orElse
startServiceActor(clazz, webContext, listeners, pipelineSettings,
serviceConfig.get[Boolean]("init-required", false))
service match {
case Success(svc) => svc
case Failure(e) =>
throw new IOException(s"Class $className is neither a RouteDefinition nor an Actor.", e)
}
} match {
case Success(svc) => Some(svc)
case Failure(e) =>
val t = getRootCause(e)
logger.warn(s"Can't load service definition $serviceConfig.\\n" +
s"Cube: $fullName $version\\n" +
s"Path: $jarPath\\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
cubeSupervisor ! StartFailure(e)
None
}
val actorConfigs = components.getOrElse(StartupType.ACTORS, Seq.empty)
val routeConfigs = components.getOrElse(StartupType.SERVICES, Seq.empty)
val actorInfo = actorConfigs map startActor
val routeInfo = routeConfigs map startService
val startedF = cubeSupervisor ? Started // Tell the cube all actors to be started are started.
logger.info(s"Started cube $fullName $version")
val componentInfo = (actorInfo ++ routeInfo) collect { case Some(component) => component }
(startedF, componentInfo)
}
def configuredListeners(config: Config): Map[String, Config] = {
import collection.JavaConversions._
val listeners = config.root.toSeq collect {
case (n, v: ConfigObject) if v.toConfig.getOption[String]("type").contains("squbs.listener") => (n, v.toConfig)
}
resolveDuplicates[Config](listeners, (name, conf, c) =>
logger.warn(s"Duplicate listener $name already declared. Ignoring.")
)
}
def findListenerAliases(listeners: Map[String, Config]): Map[String, String] = {
val aliases = for ((name, config) <- listeners) yield {
val aliasNames = config.get[Seq[String]]("aliases", Seq.empty[String])
(name, name) +: (aliasNames map ((_, name)))
}
resolveDuplicates[String](aliases.toSeq.flatten, (alias, listener, l) =>
logger.warn(s"Duplicate alias $alias for listener $listener already declared for listener $l. Ignoring.")
)
}
def resolveDuplicates[T](in: Seq[(String, T)], duplicateHandler: (String, T, T) => Unit): Map[String, T] = {
in.groupBy(_._1).map {
case (key, Seq((k, v))) => key -> v
case (key, head::tail) =>
tail.foreach { case (k, ass) => duplicateHandler(k, ass, head._2)}
key -> head._2
}
}
def findListeners(config: Config, cubes: Seq[CubeInit]) = {
val demandedListeners =
for {
routes <- cubes.map { _.components.get(StartupType.SERVICES) }.collect { case Some(routes) => routes }.flatten
routeListeners <- routes.get[Seq[String]]("listeners", Seq("default-listener"))
if routeListeners != "*" // Filter out wildcard listener bindings, not starting those.
} yield {
routeListeners
}
val listeners = configuredListeners(config)
val aliases = findListenerAliases(listeners)
val activeAliases = aliases filter { case (n, _) => demandedListeners contains n }
val missingAliases = demandedListeners filterNot { l => activeAliases exists { case (n, _) => n == l } }
val activeListenerNames = activeAliases.values
val activeListeners = listeners filter { case (n, c) => activeListenerNames exists (_ == n) }
(activeAliases, activeListeners, missingAliases)
}
def startServiceInfra(boot: UnicomplexBoot)(implicit actorSystem: ActorSystem) {
import actorSystem.dispatcher
val startTime = System.nanoTime
implicit val timeout = Timeout((boot.listeners.size * 5) seconds)
val ackFutures =
for ((listenerName, config) <- boot.listeners) yield {
Unicomplex(actorSystem).uniActor ? StartListener(listenerName, config)
}
// Block for the web service to be started.
Await.ready(Future.sequence(ackFutures), timeout.duration)
val elapsed = (System.nanoTime - startTime) / 1000000
logger.info(s"Web Service started in $elapsed milliseconds")
}
@tailrec
private[unicomplex] def getRootCause(e: Throwable): Throwable = {
Option(e.getCause) match {
case Some(ex) => getRootCause(ex)
case None => e
}
}
}
case class UnicomplexBoot private[unicomplex](startTime: Timestamp,
addOnConfig: Option[Config] = None,
config: Config,
actorSystemCreator: (String, Config) => ActorSystem = { (name, config) => ActorSystem(name, config) },
cubes: Seq[CubeInit] = Seq.empty,
listeners: Map[String, Config] = Map.empty,
listenerAliases: Map[String, String] = Map.empty,
jarConfigs: Seq[(String, Config)] = Seq.empty,
jarNames: Seq[String] = Seq.empty,
actors: Seq[(String, String, String, Class[_])] = Seq.empty,
extensions: Seq[Extension] = Seq.empty,
started: Boolean = false,
stopJVM: Boolean = false) extends LazyLogging {
import UnicomplexBoot._
def actorSystemName = config.getString(actorSystemNameKey)
def actorSystem = UnicomplexBoot.actorSystems(actorSystemName)
def externalConfigDir = config.getString(extConfigDirKey)
def createUsing(actorSystemCreator: (String, Config) => ActorSystem) = copy(actorSystemCreator = actorSystemCreator)
def scanComponents(jarNames: Seq[String]): UnicomplexBoot = scan(jarNames)(this)
def scanComponents(jarNames: Array[String]): UnicomplexBoot = scan(jarNames.toSeq)(this)
def scanResources(withClassPath: Boolean, resources: String*): UnicomplexBoot =
UnicomplexBoot.scanResources(resources map (new File(_).toURI.toURL), withClassPath)(this)
def scanResources(resources: String*): UnicomplexBoot =
UnicomplexBoot.scanResources(resources map (new File(_).toURI.toURL), withClassPath = true)(this)
def initExtensions: UnicomplexBoot = {
val initSeq = cubes.flatMap { cube =>
cube.components.getOrElse(StartupType.EXTENSIONS, Seq.empty) map { config =>
val className = config getString "class-name"
val seqNo = config.get[Int]("sequence", Int.MaxValue)
(seqNo, className, cube)
}
}.sortBy(_._1)
// load extensions
val extensions = initSeq map (loadExtension _).tupled
// preInit extensions
val preInitExtensions = extensions map extensionOp("preInit", _.preInit())
// Init extensions
val initExtensions = preInitExtensions map extensionOp("init", _.init())
copy(extensions = initExtensions)
}
def stopJVMOnExit: UnicomplexBoot = copy(stopJVM = true)
def start(): UnicomplexBoot = start(defaultStartupTimeout)
def start(implicit timeout: Timeout): UnicomplexBoot = synchronized {
if (started) throw new IllegalStateException("Unicomplex already started!")
// Extensions may have changed the config. So we need to reload the config here.
val newConfig = UnicomplexBoot.getFullConfig(addOnConfig)
val newName = config.getString(UnicomplexBoot.actorSystemNameKey)
implicit val actorSystem = {
val system = actorSystemCreator(newName, newConfig)
system.registerExtension(Unicomplex)
Unicomplex(system).setScannedComponents(jarNames)
system
}
UnicomplexBoot.actorSystems += actorSystem.name -> actorSystem
actorSystem.registerOnTermination {
UnicomplexBoot.actorSystems -= actorSystem.name
}
registerExtensionShutdown(actorSystem)
val uniActor = Unicomplex(actorSystem).uniActor
// Send start time to Unicomplex
uniActor ! startTime
// Register extensions in Unicomplex actor
uniActor ! Extensions(extensions)
val startServices = listeners.nonEmpty && cubes.exists(_.components.contains(StartupType.SERVICES))
// Notify Unicomplex that services will be started.
if (startServices) uniActor ! PreStartWebService(listeners)
// Signal started to Unicomplex.
uniActor ! Started
val preCubesInitExtensions = extensions map extensionOp("preCubesInit", _.preCubesInit())
uniActor ! Extensions(preCubesInitExtensions)
// Start all actors
val (futures, actorsUnflat) = cubes.map(startComponents(_, listenerAliases)).unzip
val actors = actorsUnflat.flatten
import actorSystem.dispatcher
Await.ready(Future.sequence(futures), timeout.duration)
// Start the service infrastructure if services are enabled and registered.
if (startServices) startServiceInfra(this)
val postInitExtensions = preCubesInitExtensions map extensionOp("postInit", _.postInit())
// Update the extension errors in Unicomplex actor, in case there are errors.
uniActor ! Extensions(postInitExtensions)
{
// Tell Unicomplex we're done.
val stateFuture = Unicomplex(actorSystem).uniActor ? Activate
Try(Await.result(stateFuture, timeout.duration)) recoverWith { case _: TimeoutException =>
val recoverFuture = Unicomplex(actorSystem).uniActor ? ActivateTimedOut
Try(Await.result(recoverFuture, timeout.duration))
} match {
case Success(Active) => logger.info(s"[$actorSystemName] activated")
case Success(Failed) => logger.info(s"[$actorSystemName] initialization failed.")
case e => logger.warn(s"[$actorSystemName] awaiting confirmation, $e.")
}
}
val boot = copy(config = actorSystem.settings.config, actors = actors, extensions = postInitExtensions, started = true)
Unicomplex(actorSystem).boot send boot
boot
}
def registerExtensionShutdown(actorSystem: ActorSystem) {
if (extensions.nonEmpty) {
actorSystem.registerOnTermination {
// Run the shutdown in a different thread, not in the ActorSystem's onTermination thread.
import scala.concurrent.Future
// Kill the JVM if the shutdown takes longer than the timeout.
if (stopJVM) {
val shutdownTimer = new Timer(true)
shutdownTimer.schedule(new TimerTask {
def run() {
System.exit(0)
}
}, 5000)
}
// Then run the shutdown in the global execution context.
import scala.concurrent.ExecutionContext.Implicits.global
Future {
extensions.reverse foreach { e =>
import e.info._
e.extLifecycle foreach (_.shutdown())
logger.info(s"Shutting down extension ${e.extLifecycle.getClass.getName} in $fullName $version")
}
} onComplete {
case Success(result) =>
logger.info(s"ActorSystem ${actorSystem.name} shutdown complete")
if (stopJVM) System.exit(0)
case Failure(e) =>
logger.error(s"Error occurred during shutdown extensions: $e", e)
if (stopJVM) System.exit(-1)
}
}
}
}
def loadExtension(seqNo: Int, className: String, cube: CubeInit): Extension = {
try {
val clazz = Class.forName(className, true, getClass.getClassLoader)
val extLifecycle = ExtensionLifecycle(this) { clazz.asSubclass(classOf[ExtensionLifecycle]).newInstance }
Extension(cube.info, seqNo, Some(extLifecycle), Seq.empty)
} catch {
case NonFatal(e) =>
import cube.info._
val t = getRootCause(e)
logger.warn(s"Can't load extension $className.\\n" +
s"Cube: $fullName $version\\n" +
s"Path: $jarPath\\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
Extension(cube.info, seqNo, None, Seq("load" -> t))
}
}
def extensionOp(opName: String, opFn: ExtensionLifecycle => Unit)
(extension: Extension): Extension = {
import extension.info._
extension.extLifecycle match {
case None => extension
case Some(l) =>
try {
opFn(l)
logger.info(s"Success $opName extension ${l.getClass.getName} in $fullName $version")
extension
} catch {
case NonFatal(e) =>
val t = getRootCause(e)
logger.warn(s"Error on $opName extension ${l.getClass.getName}\\n" +
s"Cube: $fullName $version\\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
extension.copy(exceptions = extension.exceptions :+ (opName -> t))
}
}
}
}
|
SarathChandran/squbs
|
squbs-unicomplex/src/main/scala/org/squbs/unicomplex/UnicomplexBoot.scala
|
Scala
|
apache-2.0
| 28,350
|
package scutil
package object lang {
type Identity[T] = T
type Predicate[-T] = T=>Boolean
type Thunk[+T] = ()=>T
type Effect[-T] = T=>Unit
//------------------------------------------------------------------------------
def constant[S,T](value: =>T):(S=>T) = _ => value
def ignorant[S,T](thunk:Thunk[T]):(S=>T) = _ => thunk()
def thunk[T](value: =>T):Thunk[T] = () => value
/** tell the compiler the control flow never reaches this point */
def nothing:Nothing = sys error "silence! i kill you!"
//------------------------------------------------------------------------------
type ~>[-F[_],+G[_]] = NaturalTransformation[F,G]
//------------------------------------------------------------------------------
def typed[T](t : => T):Unit = {}
}
|
ritschwumm/scutil
|
modules/core/src/main/scala/scutil/lang/package.scala
|
Scala
|
bsd-2-clause
| 776
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import matchers.BePropertyMatchResult
import matchers.BePropertyMatcher
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
trait CustomFileBePropertyMatchers {
class FileBePropertyMatcher extends BePropertyMatcher[java.io.File] {
def apply(left: java.io.File) = BePropertyMatchResult(left.isFile, "file")
}
class DirectoryBePropertyMatcher extends BePropertyMatcher[java.io.File] {
def apply(left: java.io.File) = BePropertyMatchResult(left.isDirectory, "directory")
}
val file = new FileBePropertyMatcher
val directory = new DirectoryBePropertyMatcher
}
import Matchers._
class ShouldFileBePropertyMatcherSpec extends AnyFunSpec with CustomFileBePropertyMatchers {
describe("A temp file") {
it("should be a file, not a directory") {
val tempFile = java.io.File.createTempFile("delete", "me")
try {
tempFile should be a (file)
tempFile should not be a (directory)
}
finally {
tempFile.delete()
}
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/ShouldFileBePropertyMatcherSpec.scala
|
Scala
|
apache-2.0
| 1,660
|
import scala.language.higherKinds
object Bug {
class Tag[W[M1[X1]]]
def ofType[W[M2[X2]]]: Tag[W] = ???
type InSeq [M3[X3]] = Some[M3[Any]]
// fail
val x = ofType[InSeq]
// okay
val y: Any = ofType[InSeq]
object T {
val z = ofType[InSeq]
}
}
|
yusuke2255/dotty
|
tests/pending/pos/t7902.scala
|
Scala
|
bsd-3-clause
| 269
|
package es.weso.shacl
import org.apache.jena.riot.RDFLanguages._
import scala.collection.JavaConversions._
object DataFormats {
lazy val formats = List("TURTLE", "RDF/XML", "N-TRIPLES", "RDF/JSON", "TRIG")
def available(format: String): Boolean = {
formats.contains(format.toUpperCase)
}
def default = "TURTLE"
lazy val toList: List[String] = formats
override def toString(): String = {
toList.mkString(",")
}
}
|
jorgeyp/ShExcala
|
src/main/scala/es/weso/shacl/DataFormats.scala
|
Scala
|
mit
| 440
|
package monocle.macros
import monocle._
import monocle.macros.internal.MacroImpl
class GenLens[A] {
/** generate a [[Lens]] between a case class `S` and one of its field */
def apply[B](field: A => B): Lens[A, B] = macro MacroImpl.genLens_impl[A, B]
}
object GenLens {
def apply[A] = new GenLens[A]
}
|
malcolmgreaves/Monocle
|
macro/src/main/scala/monocle/macros/GenLens.scala
|
Scala
|
mit
| 310
|
package com.typesafe.slick.testkit.tests
import org.junit.Assert._
import scala.slick.ast._
import scala.slick.ast.Util._
import com.typesafe.slick.testkit.util.{TestkitTest, TestDB}
class MapperTest(val tdb: TestDB) extends TestkitTest {
import tdb.profile.simple._
override val reuseInstance = true
def testMappedEntity {
case class User(id: Option[Int], first: String, last: String)
object Users extends Table[User]("users") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def first = column[String]("first")
def last = column[String]("last")
def * = id.? ~: baseProjection <> (User, User.unapply _)
def baseProjection = first ~ last
def forInsert = baseProjection <>
({ (f, l) => User(None, f, l) }, { u:User => Some((u.first, u.last)) })
val findByID = createFinderBy(_.id)
}
Users.ddl.create
Users.baseProjection.insert("Homer", "Simpson")
/* Using Users.forInsert so that we don't put a NULL value into the ID
* column. H2 and SQLite allow this but PostgreSQL doesn't. */
Users.forInsert.insertAll(
User(None, "Marge", "Bouvier"),
User(None, "Carl", "Carlson"),
User(None, "Lenny", "Leonard")
)
val lastNames = Set("Bouvier", "Ferdinand")
assertEquals(1, Query(Users).where(_.last inSet lastNames).list.size)
val updateQ = Users.where(_.id === 2.bind).map(_.forInsert)
println("Update: "+updateQ.updateStatement)
updateQ.update(User(None, "Marge", "Simpson"))
assertTrue(Query(Users.where(_.id === 1).exists).first)
Users.where(_.id between(1, 2)).foreach(println)
println("ID 3 -> " + Users.findByID.first(3))
assertEquals(
Set(User(Some(1), "Homer", "Simpson"), User(Some(2), "Marge", "Simpson")),
Users.where(_.id between(1, 2)).list.toSet
)
assertEquals(
User(Some(3), "Carl", "Carlson"),
Users.findByID.first(3)
)
}
def testUpdate {
case class Data(a: Int, b: Int)
object Ts extends Table[Data]("T") {
def a = column[Int]("A")
def b = column[Int]("B")
def * = a ~ b <> (Data, Data.unapply _)
}
Ts.ddl.create
Ts.insertAll(new Data(1, 2), new Data(3, 4), new Data(5, 6))
val updateQ = Ts.where(_.a === 1)
Dump(updateQ, "updateQ: ")
println("Update: "+updateQ.updateStatement)
updateQ.update(Data(7, 8))
val updateQ2 = Ts.where(_.a === 3).map(identity)
Dump(updateQ2, "updateQ2: ")
println("Update2: "+updateQ2.updateStatement)
updateQ2.update(Data(9, 10))
assertEquals(
Set(Data(7, 8), Data(9, 10), Data(5, 6)),
Query(Ts).list.toSet
)
}
def testMappedType {
sealed trait Bool
case object True extends Bool
case object False extends Bool
implicit val boolTypeMapper = MappedTypeMapper.base[Bool, Int](
{ b =>
assertNotNull(b)
if(b == True) 1 else 0
}, { i =>
assertNotNull(i)
if(i == 1) True else False
}
)
object T extends Table[(Int, Bool, Option[Bool])]("t2") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def b = column[Bool]("b")
def c = column[Option[Bool]]("c")
def * = id ~ b ~ c
}
T.ddl.create
(T.b ~ T.c).insertAll((False, None), (True, Some(True)))
assertEquals(Query(T).list.toSet, Set((1, False, None), (2, True, Some(True))))
assertEquals(T.where(_.b === (True:Bool)).list.toSet, Set((2, True, Some(True))))
assertEquals(T.where(_.b === (False:Bool)).list.toSet, Set((1, False, None)))
}
def testMappedRefType {
sealed trait Bool
case object True extends Bool
case object False extends Bool
implicit val boolTypeMapper = MappedTypeMapper.base[Bool, String](
{ b =>
assertNotNull(b)
if(b == True) "y" else "n"
}, { i =>
assertNotNull(i)
if(i == "y") True else False
}
)
object T extends Table[(Int, Bool, Option[Bool])]("t3") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def b = column[Bool]("b")
def c = column[Option[Bool]]("c")
def * = id ~ b ~ c
}
T.ddl.create
(T.b ~ T.c).insertAll((False, None), (True, Some(True)))
assertEquals(Query(T).list.toSet, Set((1, False, None), (2, True, Some(True))))
assertEquals(T.where(_.b === (True:Bool)).list.toSet, Set((2, True, Some(True))))
assertEquals(T.where(_.b === (False:Bool)).list.toSet, Set((1, False, None)))
}
def testWideMappedEntity {
case class Part(i1: Int, i2: Int, i3: Int, i4: Int, i5: Int, i6: Int)
case class Whole(id: Int, p1: Part, p2: Part, p3: Part, p4: Part)
object T extends Table[Int]("t_wide") {
def id = column[Int]("id", O.PrimaryKey)
def p1i1 = column[Int]("p1i1")
def p1i2 = column[Int]("p1i2")
def p1i3 = column[Int]("p1i3")
def p1i4 = column[Int]("p1i4")
def p1i5 = column[Int]("p1i5")
def p1i6 = column[Int]("p1i6")
def p2i1 = column[Int]("p2i1")
def p2i2 = column[Int]("p2i2")
def p2i3 = column[Int]("p2i3")
def p2i4 = column[Int]("p2i4")
def p2i5 = column[Int]("p2i5")
def p2i6 = column[Int]("p2i6")
def p3i1 = column[Int]("p3i1")
def p3i2 = column[Int]("p3i2")
def p3i3 = column[Int]("p3i3")
def p3i4 = column[Int]("p3i4")
def p3i5 = column[Int]("p3i5")
def p3i6 = column[Int]("p3i6")
def p4i1 = column[Int]("p4i1")
def p4i2 = column[Int]("p4i2")
def p4i3 = column[Int]("p4i3")
def p4i4 = column[Int]("p4i4")
def p4i5 = column[Int]("p4i5")
def p4i6 = column[Int]("p4i6")
def * = id
def all = (
id,
(p1i1, p1i2, p1i3, p1i4, p1i5, p1i6),
(p2i1, p2i2, p2i3, p2i4, p2i5, p2i6),
(p3i1, p3i2, p3i3, p3i4, p3i5, p3i6),
(p4i1, p4i2, p4i3, p4i4, p4i5, p4i6)
)
override def create_* =
all.shaped.packedNode.collect {
case Select(Ref(IntrinsicSymbol(in)), f: FieldSymbol) if in == this => f
}.toSeq.distinct
}
val data = (
0,
(11, 12, 13, 14, 15, 16),
(21, 22, 23, 24, 25, 26),
(31, 32, 33, 34, 35, 36),
(41, 42, 43, 44, 45, 46)
)
val oData = Whole(0,
Part(11, 12, 13, 14, 15, 16),
Part(21, 22, 23, 24, 25, 26),
Part(31, 32, 33, 34, 35, 36),
Part(41, 42, 43, 44, 45, 46)
)
T.ddl.create
T.all.shaped.insert(data)
val q1 = T.map(_.all)
assertEquals(data, q1.first)
val i2 = q1.mapResult { case (id, p1, p2, p3, p4) =>
Whole(id, Part.tupled.apply(p1), Part.tupled.apply(p2), Part.tupled.apply(p3), Part.tupled.apply(p4))
}
assertEquals(oData, i2.first)
}
/*
def testGetOr {
object T extends Table[Option[Int]]("t4") {
def year = column[Option[Int]]("YEAR")
def * = year
}
T.ddl.create
T.insertAll(Some(2000), None)
val q = T.map(t => (t.year.getOr(2000), (t.year.getOr(2000)-0)))
println(q.selectStatement)
q.foreach(println)
}
*/
}
|
zefonseca/slick-1.0.0-scala.2.11.1
|
slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/MapperTest.scala
|
Scala
|
bsd-2-clause
| 7,023
|
package com.socrata.http.server
import scala.collection.JavaConverters._
import javax.servlet.DispatcherType
import java.util.{EventListener, EnumSet}
import org.eclipse.jetty.server.Handler
import org.eclipse.jetty.servlet.ServletContextHandler
import scala.concurrent.duration.{FiniteDuration, Duration}
class SocrataServerJettyServlet(options: SocrataServerJettyServlet.Options) extends
AbstractSocrataServerJetty(options.cobbleTogetherHandler, options)
object SocrataServerJettyServlet {
import scala.language.existentials
case class ServletSpec(servlet: Class[_ <: javax.servlet.Servlet], pathSpec: String)
case class FilterSpec(filter: Class[_ <: javax.servlet.Filter], pathSpec: String, dispatches: Set[DispatcherType])
abstract class Options extends AbstractSocrataServerJetty.Options {
type OptT <: Options
private[SocrataServerJettyServlet] def cobbleTogetherHandler: Handler = {
val handler = new ServletContextHandler
listeners.foreach(handler.addEventListener)
for(servlet <- servlets) {
handler.addServlet(servlet.servlet, servlet.pathSpec)
}
for(filter <- filters) {
val d = EnumSet.copyOf(filter.dispatches.asJava)
handler.addFilter(filter.filter, filter.pathSpec, d)
}
handler
}
val listeners: Seq[EventListener]
def withListeners(ls: Seq[EventListener]): OptT
val servlets: Seq[ServletSpec]
def withServlets(ss: Seq[ServletSpec]): OptT
val filters: Seq[FilterSpec]
def withFilters(fs: Seq[FilterSpec]): OptT
}
private case class OptionsImpl(
listeners: Seq[EventListener] = Nil,
servlets: Seq[ServletSpec] = Nil,
filters: Seq[FilterSpec] = Nil,
deregisterWait: FiniteDuration = AbstractSocrataServerJetty.defaultOptions.deregisterWait,
gzipOptions: Option[Gzip.Options] = AbstractSocrataServerJetty.defaultOptions.gzipOptions,
broker: ServerBroker = AbstractSocrataServerJetty.defaultOptions.broker,
onFatalException: (Throwable) => Unit = AbstractSocrataServerJetty.defaultOptions.onFatalException,
gracefulShutdownTimeout: Duration = AbstractSocrataServerJetty.defaultOptions.gracefulShutdownTimeout,
onStop: () => Unit = AbstractSocrataServerJetty.defaultOptions.onStop,
port: Int = AbstractSocrataServerJetty.defaultOptions.port,
hookSignals: Boolean = AbstractSocrataServerJetty.defaultOptions.hookSignals,
extraHandlers: List[Handler => Handler] = AbstractSocrataServerJetty.defaultOptions.extraHandlers,
errorHandler: Option[HttpRequest => HttpResponse] = None,
poolOptions: Pool.Options = AbstractSocrataServerJetty.defaultOptions.poolOptions,
requestHeaderSize: Int = 8192
) extends Options {
type OptT = OptionsImpl
override def withServlets(ss: Seq[ServletSpec]) = copy(servlets = ss)
override def withFilters(fs: Seq[FilterSpec]) = copy(filters = fs)
override def withListeners(ls: Seq[EventListener]) = copy(listeners = ls)
override def withDeregisterWait(dw: FiniteDuration) = copy(deregisterWait = dw)
override def withOnStop(callback: () => Unit) = copy(onStop = callback)
override def withPort(p: Int) = copy(port = p)
override def withBroker(b: ServerBroker) = copy(broker = b)
override def withOnFatalException(callback: Throwable => Unit) = copy(onFatalException = callback)
override def withGracefulShutdownTimeout(gst: Duration) = copy(gracefulShutdownTimeout = gst)
override def withGzipOptions(gzo: Option[Gzip.Options]) = copy(gzipOptions = gzo)
override def withHookSignals(enabled: Boolean) = copy(hookSignals = enabled)
override def withExtraHandlers(h: List[Handler => Handler]) = copy(extraHandlers = h)
override def withErrorHandler(h: Option[HttpRequest => HttpResponse]) = copy(errorHandler = h)
override def withPoolOptions(pOpt: Pool.Options) = copy(poolOptions = pOpt)
override def withIdleTimeout(it: Int) = this
override def withRequestHeaderSize(size: Int) = copy(requestHeaderSize = size)
}
val defaultOptions: Options = OptionsImpl()
val Gzip = AbstractSocrataServerJetty.Gzip
val Pool = AbstractSocrataServerJetty.Pool
}
|
socrata-platform/socrata-http
|
socrata-http-jetty/src/main/scala/com/socrata/http/server/SocrataServerJettyServlet.scala
|
Scala
|
apache-2.0
| 4,147
|
package sri.universal.apis
import sri.core.{ComponentConstructor, ReactElement}
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSImport, JSName}
@js.native
trait AppRegistry extends js.Object {
def registerConfig(config: js.Array[js.Object]): Unit = js.native
def registerComponent(
appKey: String,
getComponentFunc: js.Function0[ComponentConstructor]): Unit =
js.native
@JSName("registerComponent")
def registerFunctionComponent(
appKey: String,
getComponentFunc: js.Function0[js.Function0[ReactElement]]): Unit =
js.native
def registerRunnable(appKey: String, getComponentFunc: js.Function): Unit =
js.native
def runApplication(appKey: String, appParameters: js.Object): Unit =
js.native
def getRegistry(): Registry = js.native
}
@js.native
trait Registry extends js.Object {
val sections: js.Array[String] = js.native
val runnables: js.Dictionary[Any] = js.native
}
@js.native
@JSImport("react-native", "AppRegistry")
object AppRegistry extends AppRegistry
|
scalajs-react-interface/universal
|
src/main/scala/sri/universal/apis/AppRegistry.scala
|
Scala
|
apache-2.0
| 1,042
|
package mesosphere.marathon.api.v2
import javax.servlet.http.HttpServletRequest
import javax.ws.rs.core.{ Context, Response }
import javax.ws.rs.{ DELETE, GET, Path, Produces }
import com.google.inject.Inject
import mesosphere.chaos.http.HttpConf
import mesosphere.marathon.MarathonConf
import mesosphere.marathon.api.{ AuthResource, MarathonMediaType, RestResource }
import mesosphere.marathon.core.election.ElectionService
import mesosphere.marathon.plugin.auth._
@Path("v2/leader")
class LeaderResource @Inject() (
electionService: ElectionService,
val config: MarathonConf with HttpConf,
val authenticator: Authenticator,
val authorizer: Authorizer)
extends RestResource with AuthResource {
@GET
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
def index(@Context req: HttpServletRequest): Response = authenticated(req) { implicit identity =>
withAuthorization(ViewResource, AuthorizedResource.Leader) {
electionService.leaderHostPort match {
case None => notFound("There is no leader")
case Some(leader) =>
ok(jsonObjString("leader" -> leader))
}
}
}
@DELETE
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
def delete(@Context req: HttpServletRequest): Response = authenticated(req) { implicit identity =>
withAuthorization(UpdateResource, AuthorizedResource.Leader) {
if (electionService.isLeader) {
electionService.abdicateLeadership()
ok(jsonObjString("message" -> "Leadership abdicated"))
} else {
notFound("There is no leader")
}
}
}
}
|
timcharper/marathon
|
src/main/scala/mesosphere/marathon/api/v2/LeaderResource.scala
|
Scala
|
apache-2.0
| 1,604
|
package com.github.dronegator.nlp.utils
/**
* Created by cray on 9/20/16.
*/
import akka.actor._
import scala.reflect.ClassTag
package object typeactor {
implicit class ActorRefFactoryExt[A](system: ActorRefFactory)(implicit tag: ClassTag[A]) {
def actorOf(props: TypeProps[A]) =
TypeActorRef[A](system.actorOf(props.props))
def actorOf(props: TypeProps[A], name: String) =
TypeActorRef[A](system.actorOf(props.props, name))
}
implicit class ActorContextExt[A](system: ActorContext)(implicit tag: ClassTag[A]) {
def watch(typeActorRef: TypeActorRef[A]) =
TypeActorRef[A](system.watch(typeActorRef.actorRef))
}
}
|
dronegator/nlp
|
akka-utils/src/main/scala/com/github/dronegator/nlp/utils/typeactor/package.scala
|
Scala
|
apache-2.0
| 661
|
package scapi.ahe
import java.math.BigInteger
import edu.biu.scapi.midLayer.asymmetricCrypto.encryption.{DJKeyGenParameterSpec, ScDamgardJurikEnc}
import edu.biu.scapi.midLayer.asymmetricCrypto.keys.{DamgardJurikPrivateKey, DamgardJurikPublicKey}
import edu.biu.scapi.midLayer.ciphertext.BigIntegerCiphertext
import edu.biu.scapi.midLayer.plaintext.BigIntegerPlainText
object DamgardJurikExperiment extends App {
val b1 = new BigInteger("1000")
val b2 = new BigInteger("2000")
val x1 = new BigIntegerPlainText(b1)
val x2 = new BigIntegerPlainText(b2)
val djEncScheme = new ScDamgardJurikEnc()
val keyPair = djEncScheme.generateKey(new DJKeyGenParameterSpec())
djEncScheme.setKey(keyPair.getPublic, keyPair.getPrivate)
val pubKey = keyPair.getPublic.asInstanceOf[DamgardJurikPublicKey]
val privKey = keyPair.getPrivate.asInstanceOf[DamgardJurikPrivateKey]
val c1 = djEncScheme.encrypt(x1).asInstanceOf[BigIntegerCiphertext]
val c2 = djEncScheme.encrypt(x2).asInstanceOf[BigIntegerCiphertext]
val c3 = djEncScheme.add(c1, c2)
println(djEncScheme.decrypt(c3))
}
|
kushti/scala-scapi
|
src/main/scala/scapi/ahe/DamgardJurikExperiment.scala
|
Scala
|
cc0-1.0
| 1,094
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.encoders
import java.math.BigInteger
import java.sql.{Date, Timestamp}
import java.util.Arrays
import scala.collection.mutable.ArrayBuffer
import scala.reflect.runtime.universe.TypeTag
import org.apache.spark.sql.Encoders
import org.apache.spark.sql.catalyst.{OptionalData, PrimitiveData}
import org.apache.spark.sql.catalyst.analysis.AnalysisTest
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference}
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Project}
import org.apache.spark.sql.catalyst.util.ArrayData
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
case class RepeatedStruct(s: Seq[PrimitiveData])
case class NestedArray(a: Array[Array[Int]]) {
override def hashCode(): Int =
java.util.Arrays.deepHashCode(a.asInstanceOf[Array[AnyRef]])
override def equals(other: Any): Boolean = other match {
case NestedArray(otherArray) =>
java.util.Arrays.deepEquals(
a.asInstanceOf[Array[AnyRef]],
otherArray.asInstanceOf[Array[AnyRef]])
case _ => false
}
}
case class BoxedData(
intField: java.lang.Integer,
longField: java.lang.Long,
doubleField: java.lang.Double,
floatField: java.lang.Float,
shortField: java.lang.Short,
byteField: java.lang.Byte,
booleanField: java.lang.Boolean)
case class RepeatedData(
arrayField: Seq[Int],
arrayFieldContainsNull: Seq[java.lang.Integer],
mapField: scala.collection.Map[Int, Long],
mapFieldNull: scala.collection.Map[Int, java.lang.Long],
structField: PrimitiveData)
/** For testing Kryo serialization based encoder. */
class KryoSerializable(val value: Int) {
override def hashCode(): Int = value
override def equals(other: Any): Boolean = other match {
case that: KryoSerializable => this.value == that.value
case _ => false
}
}
/** For testing Java serialization based encoder. */
class JavaSerializable(val value: Int) extends Serializable {
override def hashCode(): Int = value
override def equals(other: Any): Boolean = other match {
case that: JavaSerializable => this.value == that.value
case _ => false
}
}
/** For testing UDT for a case class */
@SQLUserDefinedType(udt = classOf[UDTForCaseClass])
case class UDTCaseClass(uri: java.net.URI)
class UDTForCaseClass extends UserDefinedType[UDTCaseClass] {
override def sqlType: DataType = StringType
override def serialize(obj: UDTCaseClass): UTF8String = {
UTF8String.fromString(obj.uri.toString)
}
override def userClass: Class[UDTCaseClass] = classOf[UDTCaseClass]
override def deserialize(datum: Any): UDTCaseClass = datum match {
case uri: UTF8String => UDTCaseClass(new java.net.URI(uri.toString))
}
}
case class PrimitiveValueClass(wrapped: Int) extends AnyVal
case class ReferenceValueClass(wrapped: ReferenceValueClass.Container) extends AnyVal
object ReferenceValueClass {
case class Container(data: Int)
}
class ExpressionEncoderSuite extends PlanTest with AnalysisTest {
OuterScopes.addOuterScope(this)
implicit def encoder[T : TypeTag]: ExpressionEncoder[T] = ExpressionEncoder()
// test flat encoders
encodeDecodeTest(false, "primitive boolean")
encodeDecodeTest(-3.toByte, "primitive byte")
encodeDecodeTest(-3.toShort, "primitive short")
encodeDecodeTest(-3, "primitive int")
encodeDecodeTest(-3L, "primitive long")
encodeDecodeTest(-3.7f, "primitive float")
encodeDecodeTest(-3.7, "primitive double")
encodeDecodeTest(new java.lang.Boolean(false), "boxed boolean")
encodeDecodeTest(new java.lang.Byte(-3.toByte), "boxed byte")
encodeDecodeTest(new java.lang.Short(-3.toShort), "boxed short")
encodeDecodeTest(new java.lang.Integer(-3), "boxed int")
encodeDecodeTest(new java.lang.Long(-3L), "boxed long")
encodeDecodeTest(new java.lang.Float(-3.7f), "boxed float")
encodeDecodeTest(new java.lang.Double(-3.7), "boxed double")
encodeDecodeTest(BigDecimal("32131413.211321313"), "scala decimal")
encodeDecodeTest(new java.math.BigDecimal("231341.23123"), "java decimal")
encodeDecodeTest(BigInt("23134123123"), "scala biginteger")
encodeDecodeTest(new BigInteger("23134123123"), "java BigInteger")
encodeDecodeTest(Decimal("32131413.211321313"), "catalyst decimal")
encodeDecodeTest("hello", "string")
encodeDecodeTest(Date.valueOf("2012-12-23"), "date")
encodeDecodeTest(Timestamp.valueOf("2016-01-29 10:00:00"), "timestamp")
encodeDecodeTest(Array(Timestamp.valueOf("2016-01-29 10:00:00")), "array of timestamp")
encodeDecodeTest(Array[Byte](13, 21, -23), "binary")
encodeDecodeTest(Seq(31, -123, 4), "seq of int")
encodeDecodeTest(Seq("abc", "xyz"), "seq of string")
encodeDecodeTest(Seq("abc", null, "xyz"), "seq of string with null")
encodeDecodeTest(Seq.empty[Int], "empty seq of int")
encodeDecodeTest(Seq.empty[String], "empty seq of string")
encodeDecodeTest(Seq(Seq(31, -123), null, Seq(4, 67)), "seq of seq of int")
encodeDecodeTest(Seq(Seq("abc", "xyz"), Seq[String](null), null, Seq("1", null, "2")),
"seq of seq of string")
encodeDecodeTest(Array(31, -123, 4), "array of int")
encodeDecodeTest(Array("abc", "xyz"), "array of string")
encodeDecodeTest(Array("a", null, "x"), "array of string with null")
encodeDecodeTest(Array.empty[Int], "empty array of int")
encodeDecodeTest(Array.empty[String], "empty array of string")
encodeDecodeTest(Array(Array(31, -123), null, Array(4, 67)), "array of array of int")
encodeDecodeTest(Array(Array("abc", "xyz"), Array[String](null), null, Array("1", null, "2")),
"array of array of string")
encodeDecodeTest(Map(1 -> "a", 2 -> "b"), "map")
encodeDecodeTest(Map(1 -> "a", 2 -> null), "map with null")
encodeDecodeTest(Map(1 -> Map("a" -> 1), 2 -> Map("b" -> 2)), "map of map")
encodeDecodeTest(Tuple1[Seq[Int]](null), "null seq in tuple")
encodeDecodeTest(Tuple1[Map[String, String]](null), "null map in tuple")
encodeDecodeTest(List(1, 2), "list of int")
encodeDecodeTest(List("a", null), "list with String and null")
encodeDecodeTest(
UDTCaseClass(new java.net.URI("http://spark.apache.org/")), "udt with case class")
// Kryo encoders
encodeDecodeTest("hello", "kryo string")(encoderFor(Encoders.kryo[String]))
encodeDecodeTest(new KryoSerializable(15), "kryo object")(
encoderFor(Encoders.kryo[KryoSerializable]))
// Java encoders
encodeDecodeTest("hello", "java string")(encoderFor(Encoders.javaSerialization[String]))
encodeDecodeTest(new JavaSerializable(15), "java object")(
encoderFor(Encoders.javaSerialization[JavaSerializable]))
// test product encoders
private def productTest[T <: Product : ExpressionEncoder](input: T): Unit = {
encodeDecodeTest(input, input.getClass.getSimpleName)
}
case class InnerClass(i: Int)
productTest(InnerClass(1))
encodeDecodeTest(Array(InnerClass(1)), "array of inner class")
encodeDecodeTest(Array(Option(InnerClass(1))), "array of optional inner class")
productTest(PrimitiveData(1, 1, 1, 1, 1, 1, true))
productTest(
OptionalData(Some(2), Some(2), Some(2), Some(2), Some(2), Some(2), Some(true),
Some(PrimitiveData(1, 1, 1, 1, 1, 1, true))))
productTest(OptionalData(None, None, None, None, None, None, None, None))
encodeDecodeTest(Seq(Some(1), None), "Option in array")
encodeDecodeTest(Map(1 -> Some(10L), 2 -> Some(20L), 3 -> None), "Option in map")
productTest(BoxedData(1, 1L, 1.0, 1.0f, 1.toShort, 1.toByte, true))
productTest(BoxedData(null, null, null, null, null, null, null))
productTest(RepeatedStruct(PrimitiveData(1, 1, 1, 1, 1, 1, true) :: Nil))
productTest((1, "test", PrimitiveData(1, 1, 1, 1, 1, 1, true)))
productTest(
RepeatedData(
Seq(1, 2),
Seq(new Integer(1), null, new Integer(2)),
Map(1 -> 2L),
Map(1 -> null),
PrimitiveData(1, 1, 1, 1, 1, 1, true)))
productTest(NestedArray(Array(Array(1, -2, 3), null, Array(4, 5, -6))))
productTest(("Seq[(String, String)]",
Seq(("a", "b"))))
productTest(("Seq[(Int, Int)]",
Seq((1, 2))))
productTest(("Seq[(Long, Long)]",
Seq((1L, 2L))))
productTest(("Seq[(Float, Float)]",
Seq((1.toFloat, 2.toFloat))))
productTest(("Seq[(Double, Double)]",
Seq((1.toDouble, 2.toDouble))))
productTest(("Seq[(Short, Short)]",
Seq((1.toShort, 2.toShort))))
productTest(("Seq[(Byte, Byte)]",
Seq((1.toByte, 2.toByte))))
productTest(("Seq[(Boolean, Boolean)]",
Seq((true, false))))
productTest(("ArrayBuffer[(String, String)]",
ArrayBuffer(("a", "b"))))
productTest(("ArrayBuffer[(Int, Int)]",
ArrayBuffer((1, 2))))
productTest(("ArrayBuffer[(Long, Long)]",
ArrayBuffer((1L, 2L))))
productTest(("ArrayBuffer[(Float, Float)]",
ArrayBuffer((1.toFloat, 2.toFloat))))
productTest(("ArrayBuffer[(Double, Double)]",
ArrayBuffer((1.toDouble, 2.toDouble))))
productTest(("ArrayBuffer[(Short, Short)]",
ArrayBuffer((1.toShort, 2.toShort))))
productTest(("ArrayBuffer[(Byte, Byte)]",
ArrayBuffer((1.toByte, 2.toByte))))
productTest(("ArrayBuffer[(Boolean, Boolean)]",
ArrayBuffer((true, false))))
productTest(("Seq[Seq[(Int, Int)]]",
Seq(Seq((1, 2)))))
// test for ExpressionEncoder.tuple
encodeDecodeTest(
1 -> 10L,
"tuple with 2 flat encoders")(
ExpressionEncoder.tuple(ExpressionEncoder[Int], ExpressionEncoder[Long]))
encodeDecodeTest(
(PrimitiveData(1, 1, 1, 1, 1, 1, true), (3, 30L)),
"tuple with 2 product encoders")(
ExpressionEncoder.tuple(ExpressionEncoder[PrimitiveData], ExpressionEncoder[(Int, Long)]))
encodeDecodeTest(
(PrimitiveData(1, 1, 1, 1, 1, 1, true), 3),
"tuple with flat encoder and product encoder")(
ExpressionEncoder.tuple(ExpressionEncoder[PrimitiveData], ExpressionEncoder[Int]))
encodeDecodeTest(
(3, PrimitiveData(1, 1, 1, 1, 1, 1, true)),
"tuple with product encoder and flat encoder")(
ExpressionEncoder.tuple(ExpressionEncoder[Int], ExpressionEncoder[PrimitiveData]))
encodeDecodeTest(
(1, (10, 100L)),
"nested tuple encoder") {
val intEnc = ExpressionEncoder[Int]
val longEnc = ExpressionEncoder[Long]
ExpressionEncoder.tuple(intEnc, ExpressionEncoder.tuple(intEnc, longEnc))
}
encodeDecodeTest(
PrimitiveValueClass(42), "primitive value class")
encodeDecodeTest(
ReferenceValueClass(ReferenceValueClass.Container(1)), "reference value class")
productTest(("UDT", new ExamplePoint(0.1, 0.2)))
test("nullable of encoder schema") {
def checkNullable[T: ExpressionEncoder](nullable: Boolean*): Unit = {
assert(implicitly[ExpressionEncoder[T]].schema.map(_.nullable) === nullable.toSeq)
}
// test for flat encoders
checkNullable[Int](false)
checkNullable[Option[Int]](true)
checkNullable[java.lang.Integer](true)
checkNullable[String](true)
// test for product encoders
checkNullable[(String, Int)](true, false)
checkNullable[(Int, java.lang.Long)](false, true)
// test for nested product encoders
{
val schema = ExpressionEncoder[(Int, (String, Int))].schema
assert(schema(0).nullable === false)
assert(schema(1).nullable === true)
assert(schema(1).dataType.asInstanceOf[StructType](0).nullable === true)
assert(schema(1).dataType.asInstanceOf[StructType](1).nullable === false)
}
// test for tupled encoders
{
val schema = ExpressionEncoder.tuple(
ExpressionEncoder[Int],
ExpressionEncoder[(String, Int)]).schema
assert(schema(0).nullable === false)
assert(schema(1).nullable === true)
assert(schema(1).dataType.asInstanceOf[StructType](0).nullable === true)
assert(schema(1).dataType.asInstanceOf[StructType](1).nullable === false)
}
}
test("null check for map key") {
val encoder = ExpressionEncoder[Map[String, Int]]()
val e = intercept[RuntimeException](encoder.toRow(Map(("a", 1), (null, 2))))
assert(e.getMessage.contains("Cannot use null as map key"))
}
private def encodeDecodeTest[T : ExpressionEncoder](
input: T,
testName: String): Unit = {
test(s"encode/decode for $testName: $input") {
val encoder = implicitly[ExpressionEncoder[T]]
val row = encoder.toRow(input)
val schema = encoder.schema.toAttributes
val boundEncoder = encoder.resolveAndBind()
val convertedBack = try boundEncoder.fromRow(row) catch {
case e: Exception =>
fail(
s"""Exception thrown while decoding
|Converted: $row
|Schema: ${schema.mkString(",")}
|${encoder.schema.treeString}
|
|Encoder:
|$boundEncoder
|
""".stripMargin, e)
}
// Test the correct resolution of serialization / deserialization.
val attr = AttributeReference("obj", encoder.deserializer.dataType)()
val plan = LocalRelation(attr).serialize[T].deserialize[T]
assertAnalysisSuccess(plan)
val isCorrect = (input, convertedBack) match {
case (b1: Array[Byte], b2: Array[Byte]) => Arrays.equals(b1, b2)
case (b1: Array[Int], b2: Array[Int]) => Arrays.equals(b1, b2)
case (b1: Array[Array[_]], b2: Array[Array[_]]) =>
Arrays.deepEquals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
case (b1: Array[_], b2: Array[_]) =>
Arrays.equals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
case (left: Comparable[_], right: Comparable[_]) =>
left.asInstanceOf[Comparable[Any]].compareTo(right) == 0
case _ => input == convertedBack
}
if (!isCorrect) {
val types = convertedBack match {
case c: Product =>
c.productIterator.filter(_ != null).map(_.getClass.getName).mkString(",")
case other => other.getClass.getName
}
val encodedData = try {
row.toSeq(encoder.schema).zip(schema).map {
case (a: ArrayData, AttributeReference(_, ArrayType(et, _), _, _)) =>
a.toArray[Any](et).toSeq
case (other, _) =>
other
}.mkString("[", ",", "]")
} catch {
case e: Throwable => s"Failed to toSeq: $e"
}
fail(
s"""Encoded/Decoded data does not match input data
|
|in: $input
|out: $convertedBack
|types: $types
|
|Encoded Data: $encodedData
|Schema: ${schema.mkString(",")}
|${encoder.schema.treeString}
|
|fromRow Expressions:
|${boundEncoder.deserializer.treeString}
""".stripMargin)
}
}
}
}
|
ZxlAaron/mypros
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
|
Scala
|
apache-2.0
| 15,682
|
package storage
import models.ForumCategory
/**
* DAO for forum categories
*/
trait ForumCategoryDAO extends GenericDAO[ForumCategory]
|
metaxmx/FridayNightBeer
|
modules/datamodel/src/main/scala/storage/ForumCategoryDAO.scala
|
Scala
|
apache-2.0
| 147
|
package net.xylophones.planetoid.game.logic
import net.xylophones.planetoid.game.maths.Vector2D
import net.xylophones.planetoid.game.model._
import net.xylophones.planetoid.game.logic.ModelTestObjectMother._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FunSuite, Matchers}
import net.xylophones.planetoid.game.logic.ModelTestObjectMother._
@RunWith(classOf[JUnitRunner])
class GameCollisionUpdaterTest extends FunSuite with Matchers {
val underTest = new GameCollisionUpdater(new CollisionCalculator)
test("collision between player1 and planet is detected") {
/*
given
*/
val planet = Planet(Vector2D(10, 10), 10)
val player1 = Player(createRocketAt(Vector2D(10, 10)), numLives = 1)
val physics = new GamePhysics()
val model = GameModel(planet, Players(player1, createDummyPlayer()))
// when
val result = underTest.update(resultFromModel(model), physics, Vector.empty)
// then
val newModel = result.model
val events = result.events
newModel.players.p1.numLives shouldBe 0
newModel.players.p2.points shouldBe 1
events should contain (GameEvent.Player1LoseLife)
events should contain (GameEvent.PlayerLoseLife)
}
test("collision between player2 and planet is detected") {
/*
given
*/
val planet = Planet(Vector2D(10, 10), 10)
val player2 = Player(createRocketAt(Vector2D(10, 10)), numLives = 1)
val physics = new GamePhysics()
val model = GameModel(planet, Players(createDummyPlayer(), player2))
// when
val result = underTest.update(resultFromModel(model), physics, Vector.empty)
// then
val newModel = result.model
val events = result.events
newModel.players.p2.numLives shouldBe 0
newModel.players.p1.points shouldBe 1
}
test("collision between player1 and missile is detected and missile is removed") {
/*
given
*/
val physics = new GamePhysics()
val player1 = Player(createRocketAt(Vector2D(10, 10)), numLives = 1)
val missile = new Missile(Vector2D(10, 10), Vector2D(0, 0), 2)
val player2 = createDummyPlayerWithMissile(missile)
val model = GameModel(createDummyPlanet(), Players(player1, player2))
// when
val result = underTest.update(resultFromModel(model), physics, Vector.empty)
// then
val newModel = result.model
val events = result.events
newModel.players.p1.numLives shouldBe 0
newModel.players.p2.missiles shouldBe empty
newModel.players.p2.points shouldBe 1
}
test("missiles are removed for player 2, even when collision between player1 and planet is detected") {
/*
given
*/
val planet = Planet(Vector2D(10, 10), 10)
val player1 = Player(createRocketAt(Vector2D(10, 10)), numLives = 1)
val missile = new Missile(Vector2D(10, 10), Vector2D(0, 0), 2)
val player2 = createDummyPlayerWithMissile(missile)
val physics = new GamePhysics()
val model = GameModel(planet, Players(player1, player2))
// when
val result = underTest.update(resultFromModel(model), physics, Vector.empty)
// then
val newModel = result.model
val events = result.events
newModel.players.p2.missiles shouldBe empty
}
test("collision between player1 and player2 is detected") {
/*
given
*/
val player1 = Player(createRocketAt(Vector2D(10, 10)), numLives = 1)
val player2 = Player(createRocketAt(Vector2D(10, 10)), numLives = 1)
val physics = new GamePhysics()
val model = GameModel(createDummyPlanet(), Players(player1, player2))
// when
val result = underTest.update(resultFromModel(model), physics, Vector.empty)
// then
val newModel = result.model
val events = result.events
newModel.players.p1.numLives shouldBe 0
newModel.players.p1.points shouldBe 1
newModel.players.p2.numLives shouldBe 0
newModel.players.p2.points shouldBe 1
events should contain (GameEvent.Player2LoseLife)
events should contain (GameEvent.Player1LoseLife)
events should contain (GameEvent.PlayerLoseLife)
}
test("no collision between player1 and player2 when they are not intersecting") {
/*
given
*/
val player1 = Player(createRocketAt(Vector2D(1000, 1000)), numLives = 1)
val player2 = Player(createRocketAt(Vector2D(10, 10)), numLives = 1)
val physics = new GamePhysics()
val model = GameModel(createDummyPlanet(), Players(player1, player2))
// when
val result = underTest.update(resultFromModel(model), physics, Vector.empty)
// then
val newModel = result.model
val events = result.events
newModel.players.p1 shouldBe player1
newModel.players.p2 shouldBe player2
events should not contain (GameEvent.Player2LoseLife)
events should not contain (GameEvent.Player1LoseLife)
events should not contain (GameEvent.PlayerLoseLife)
}
}
|
wjsrobertson/planetoid
|
game/src/test/scala/net/xylophones/planetoid/game/logic/GameCollisionUpdaterTest.scala
|
Scala
|
apache-2.0
| 4,887
|
package me.elrod.tryidrisapp
import android.app.{ Activity, AlertDialog, Fragment }
import android.content.{ Context, Intent }
import android.graphics.Color
import android.os.Bundle
import android.text.{ Spanned, Spannable, SpannableString }
import android.text.style.ForegroundColorSpan
import android.util.Log
import android.view.inputmethod.EditorInfo
import android.view.KeyEvent
import android.view.{ Menu, MenuInflater, MenuItem, View, Window }
import android.view.View.OnKeyListener
import android.widget.{ ArrayAdapter, ProgressBar, TextView, ScrollView }
import android.widget.TextView.OnEditorActionListener
import argonaut._, Argonaut._
import me.elrod.tryidris._, TryIdris._
import scalaz._, Scalaz._
import scalaz.concurrent.Promise
import scalaz.concurrent.Promise._
import scalaz.effect.IO
import scala.language.implicitConversions // lolscala
object Implicits {
implicit def toRunnable[F](f: => F): Runnable = new Runnable() {
def run(): Unit = {
f
()
}
}
}
import Implicits._
class MainActivity extends Activity with TypedViewHolder {
override def onPostCreate(bundle: Bundle): Unit = {
super.onPostCreate(bundle)
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS)
setContentView(R.layout.main_activity)
// TODO: These should probably be Option.
val output = findView(TR.output)
val input = findView(TR.input_code)
val scrollView = Option(findView(TR.mainScrollView))
input.setOnEditorActionListener(new IdrisOnEditorActionListener(this, output, input, scrollView))
}
override def onCreateOptionsMenu(menu: Menu): Boolean = {
val inflater: MenuInflater = getMenuInflater
inflater.inflate(R.menu.options, menu);
true
}
override def onOptionsItemSelected(item: MenuItem): Boolean = {
item.getItemId match {
case R.id.about => {
val b = new AlertDialog.Builder(this)
.setTitle("About Try Idris")
.setMessage("Try Idris Android App (c) 2014 Ricky Elrod. Powered by the awesome http://tryidris.org/ by Brian McKenna.")
.show
}
case _ => ()
}
true
}
}
class IdrisOnEditorActionListener(
c: Activity,
output: TextView,
input: TextView,
scrollView: Option[ScrollView]) extends OnEditorActionListener {
private def prompt = {
val p = new SpannableString("idris> ")
p.setSpan(
new ForegroundColorSpan(Color.parseColor("#6D0839")),
0,
p.length,
Spanned.SPAN_EXCLUSIVE_EXCLUSIVE)
p
}
def onEditorAction(v: TextView, actionId: Int, event: KeyEvent): Boolean =
if (actionId == EditorInfo.IME_ACTION_SEND) {
// This feels weird, but it is the best I know how to do right now.
runTheWorld(v, actionId, event).map(_.unsafePerformIO)
true
} else {
false
}
def runTheWorld(v: TextView, actionId: Int, event: KeyEvent): Promise[IO[Unit]] = {
promise {
for {
_ <- IO { c.runOnUiThread(c.setProgressBarIndeterminateVisibility(true)) }
resp <- interpretIO(InterpretRequest(input.getText.toString))
.map(toUtf8String)
.map(_.decodeOption[InterpretResponse])
} yield (resp)
} map { ioo =>
ioo.flatMap { res =>
IO {
c.runOnUiThread(output.append(prompt))
c.runOnUiThread(output.append(input.getText.toString + "\\n"))
c.runOnUiThread(input.setText(""))
res match {
case Some(x) => {
colorize(x) match {
case Some(colored) => c.runOnUiThread(output.append(colored))
case None => c.runOnUiThread(output.append(x.result))
}
c.runOnUiThread(output.append("\\n"))
}
case None => {
c.runOnUiThread(output.append("<ERROR!> :(\\n"))// Something bad happened. :'(
}
}
scrollView.map(_.fullScroll(View.FOCUS_DOWN))
c.runOnUiThread(c.setProgressBarIndeterminateVisibility(false))
()
}
}
}
}
def colorize(r: InterpretResponse): Option[SpannableString] =
r.tokens match {
case None => None // Well then.
case Some(tokens) => {
val s = new SpannableString(r.result)
tokens.foreach { t =>
val style = t.metadata.find(_._1 == ":decor").map(_._2 substring 1)
val color = style match {
case Some("name") => Color.parseColor("#00ff00")
case Some("bound") => Color.parseColor("#ff00ff")
case Some("data") => Color.parseColor("#ff0000")
case Some("type") => Color.parseColor("#0000ff")
case Some("error") => Color.parseColor("#ff0000")
case _ => Color.parseColor("#555555")
}
s.setSpan(
new ForegroundColorSpan(color),
t.startChar,
t.startChar + t.length,
Spanned.SPAN_EXCLUSIVE_EXCLUSIVE)
}
Some(s)
}
}
}
|
relrod/tryidris-android
|
src/main/scala/activity/MainActivity.scala
|
Scala
|
bsd-3-clause
| 4,979
|
/*
* Copyright 2011 TomTom International BV
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tomtom.splitter.layer7
import java.util.concurrent.ExecutorService
import org.jboss.netty.handler.codec.http._
/**
* Document me.
*
* @author Eric Bowman
* @since 2011-04-12 19:31
*/
/**
* Document me.
*
* @author Eric Bowman
* @since 2011-04-07 09:19
*/
class CommandableServer(name: String, port: Int)(implicit executor: ExecutorService) extends HttpServer(port)(executor) {
val Sleep = """sleep (\\d+)""".r
val Status = """status (\\d+)""".r
val Host = """host""".r
override def makeResponse(request: HttpRequest,
buffer: StringBuilder,
status: HttpResponseStatus,
keepAlive: Boolean): List[AnyRef] = {
val responseBits = super.makeResponse(request, buffer, status, keepAlive)
if (request.headers.get("X-Request-Id") != null) {
responseBits.head.asInstanceOf[HttpResponse].headers.set(
"X-Request-Id", request.headers.get("X-Request-Id"))
}
responseBits
}
def start() {
start {
case (Left(request), buffer: StringBuilder) =>
import collection.JavaConverters._
val decoder = new QueryStringDecoder(request.getUri)
val params = decoder.getParameters.asScala
params.get(name) match {
case Some(commands) => commands.asScala.toList match {
case "ok" :: Nil =>
buffer.append(name + " ok")
HttpResponseStatus.OK
case Sleep(ms) :: Nil =>
buffer.append(name + " sleep " + ms)
Thread.sleep(ms.toLong)
HttpResponseStatus.OK
case Status(code) :: Nil =>
buffer.append(name + " status " + code)
HttpResponseStatus.valueOf(code.toInt)
case Host() :: Nil =>
buffer.append("HOST=" + request.headers.get(HttpHeaders.Names.HOST))
HttpResponseStatus.OK
case _ =>
buffer.append(name + " could not parse " + request.getUri)
HttpResponseStatus.OK
}
case None =>
buffer.append(name + " no command found in " + request.getUri)
HttpResponseStatus.OK
}
case _ => HttpResponseStatus.OK
}
}
}
|
ebowman/splitter
|
src/test/scala/tomtom/splitter/layer7/CommandableServer.scala
|
Scala
|
apache-2.0
| 2,866
|
package varys.framework
private[varys] object DataType extends Enumeration {
type DataType = Value
val FAKE, INMEMORY, ONDISK = Value
}
private[varys] case class DataIdentifier(
dataId: String,
coflowId: String)
private[varys] class FlowDescription(
val id: String, // Expected to be unique within the coflow
val coflowId: String, // Must be a valid coflow
val dataType: DataType.DataType, // http://www.scala-lang.org/node/7661
val sizeInBytes: Long,
val maxReceivers: Int, // Upper-bound on the number of receivers (how long to keep it around?)
val originHost: String,
var originCommPort: Int)
extends Serializable {
val dataId = DataIdentifier(id, coflowId)
val user = System.getProperty("user.name", "<unknown>")
override def toString: String = "FlowDescription(" + id + ":" + dataType + ":" + coflowId +
" # " + sizeInBytes + " Bytes)"
def updateCommPort(commPort: Int) {
originCommPort = commPort
}
}
private[varys] class FileDescription(
val id_ : String, // Expected to be unique within the coflow
val pathToFile: String,
val cId_ : String, // Must be a valid coflow
val dataType_ : DataType.DataType,
val offset : Long,
val size_ : Long,
val maxR_ : Int,
val originHost_ : String,
val originCommPort_ : Int)
extends FlowDescription(id_, cId_, dataType_, size_, maxR_, originHost_, originCommPort_) {
override def toString: String = "FileDescription(" + id + "["+ pathToFile + "]:" + dataType +
":" + coflowId + " # " + sizeInBytes + " Bytes)"
}
private[varys] class ObjectDescription(
val id_ : String, // Expected to be unique within the coflow
val className: String,
val cId_ : String, // Must be a valid coflow
val dataType_ : DataType.DataType,
val serializedSize : Long,
val maxR_ : Int,
val originHost_ : String,
val origCommPort_ : Int)
extends FlowDescription(id_, cId_, dataType_, serializedSize, maxR_, originHost_, origCommPort_) {
override def toString: String = "ObjectDescription(" + id + "["+ className + "]:" + dataType +
":" + coflowId + " # " + sizeInBytes + " Bytes)"
}
|
mosharaf/varys
|
core/src/main/scala/varys/framework/DataDescription.scala
|
Scala
|
apache-2.0
| 2,168
|
package com.angos.slicknspray
object MathHelper {
val R = 6371.0 /* km */
def haversine(xLat: Double, xLong: Double, yLat: Double, yLong: Double): Double = {
val latDistance = (yLat - xLat) / 2.0
val lonDistance = (yLong - xLong) / 2.0
val a = Math.sin(latDistance) * Math.sin(latDistance) +
Math.cos(xLat * Math.PI / 180.0) * Math.cos(yLat * Math.PI / 180.0) *
Math.sin(lonDistance) * Math.sin(lonDistance)
val c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1.0 - a))
R * c
}
}
|
jdkendall/angos-slicknspray
|
src/main/scala/com/angos/slicknspray/MathHelper.scala
|
Scala
|
mit
| 521
|
import com.typesafe.config.{ConfigException, Key, Path}
import uconfig.parser.HoconParser
package object uconfig {
type PathSeq = List[Path]
object PathSeq {
def apply(segments: Key*): PathSeq = List(segments:_*)
def fromString(path: String): PathSeq = HoconParser.parse(HoconParser.pathSeq,path) match {
case HoconParser.Success(seq,_) => seq
case HoconParser.Failure(msg,_) => throw new ConfigException.BadPath(path,msg)
case HoconParser.Error(msg,_) => throw new ConfigException.BadPath(path,msg)
}
implicit class RichPathSeq(val ps: PathSeq) extends AnyVal {
def toPath: String = ps.mkString(".")
}
}
}
|
jokade/sconfig
|
shared/src/main/scala/uconfig/package.scala
|
Scala
|
mit
| 661
|
package com.gilt.opm.query
import com.gilt.opm.utils.MongoHelper
import com.mongodb.casbah.commons.MongoDBObject
import MongoHelper.toMongo
/**
* Case class representing the logic to filter a property that is less than the given value.
*
* @param valueTranslator: see [[com.gilt.opm.query.OpmSearcher]]
*/
case class OpmPropertyLessThan[T <% Ordered[T]](property: String, value: T, valueTranslator: Option[(String, Any) => Any] = None) extends OpmPropertyQuery {
override def isMatch(obj: Any): Boolean =
if (obj == null) false
else obj.asInstanceOf[T] < value
override def toMongoDBObject(prefix: String = "", matchInverse: Boolean = false) =
if (matchInverse) MongoDBObject("%s%s".format(prefix, property) -> MongoDBObject("$gte" -> toMongo(value, translate(property))))
else MongoDBObject("%s%s".format(prefix, property) -> MongoDBObject("$lt" -> toMongo(value, translate(property))))
}
|
gilt/opm
|
src/main/scala/com/gilt/opm/query/OpmPropertyLessThan.scala
|
Scala
|
mit
| 917
|
package no.digipost.labs.users
import no.digipost.labs.Settings.Proxy
import org.scalatest.Assertions
import org.json4s._
import org.json4s.jackson.JsonMethods.parse
import org.json4s.jackson.Serialization.write
import no.digipost.labs.oauth._
import scala.concurrent.Future
import com.ning.http.util.Base64
import no.digipost.labs.oauth.AccessToken
import no.digipost.labs.oauth.AccessTokenRequest
import org.scalatra.test.HttpComponentsClient
import no.digipost.labs.Settings
import no.digipost.labs.oauth.AccessToken.IdToken
import no.digipost.labs.oauth.DigipostUser
import org.json4s.mongo.ObjectIdSerializer
import org.bson.types.ObjectId
import no.digipost.labs.login.SessionsResource
object SessionHelper extends Assertions {
implicit val jsonFormats = DefaultFormats + new ObjectIdSerializer
val settings = new Settings(Settings.load().config)
val userRepository = new TestUsersRepository
createTestUsers()
class TestAccessToken(val admin: Boolean, access_token: String, refresh_token: String, expires_in: String, token_type: String, id_token: String)
extends AccessToken(access_token, refresh_token, expires_in, token_type, Some(id_token))
val oauthService = new OAuthService {
def idToken = IdToken(settings.oauthClientId, "180", "frode", "https://www.digipost.no/", cryptoService.randomNonce)
def base64IdTokenHash = Base64.encode(write(idToken).getBytes)
override def getAccessToken(request: AccessTokenRequest): Future[AccessToken] = {
val admin = request.parameters("code") == "admin"
Future.successful(
new TestAccessToken(admin,
"S8d79_zKHBzFOMQ0kHCwJ0o9ukynG0q29L7-Tc1_IrU",
"Abd89789KGAS78khjkasd-asdu_klasqw09jkasqwuz",
"180",
"bearer",
cryptoService.signWithHmacSha256(base64IdTokenHash, settings.oauthSecret) + "." + base64IdTokenHash))
}
}
val cryptoService = new CryptoService {
def signWithHmacSha256(tokenValue: String, secret: String) = tokenValue
def randomNonce: String = "stubbed-nonce"
}
val digipostService = new DigipostService {
def getBasicUserDetails(uri: String, proxy: Option[Proxy], accessToken: AccessToken): Future[DigipostUser] = {
val user = if (accessToken.asInstanceOf[TestAccessToken].admin)
DigipostUser("6e948923eb19443fae21355b99bde581", "Admin Nordmann", "admin@example.com", "admin.nordmann#1234")
else
DigipostUser("6e048923eb19443fae11355b99bde552", "Regular Nordmann", "regular@example.com", "regular.nordmann#1234")
Future.successful(user)
}
}
def sessionServletWithMocks = new SessionsResource(settings, new LoginWithDigipost(oauthService, digipostService, cryptoService), null, userRepository)
def loginUser(client: HttpComponentsClient, admin: Boolean = false): UserInfo = {
client.get("/sessions") {
assert(client.response.status === 302)
}
val code = if (admin) "admin" else "regular"
client.get("/sessions/oauth", "code" -> code, "state" -> cryptoService.randomNonce) {
assert(client.response.status === 302)
}
client.get("/sessions/user") {
assert(client.response.status === 200)
parse(client.response.body).extract[UserInfo]
}
}
def loginUserAndGetCsrfToken(client: HttpComponentsClient, admin: Boolean = false) = {
loginUser(client, admin).token
}
def createTestUsers() {
userRepository.insert(DbUser(_id = new ObjectId, name = "Admin Nordmann", email = Some("admin@example.com"), digipostAddress = Some("admin.nordmann#1234"), digipostId = Some("6e948923eb19443fae21355b99bde581"), admin = true))
}
}
|
digipost/labs
|
backend/src/test/scala/no/digipost/labs/users/SessionHelper.scala
|
Scala
|
apache-2.0
| 3,618
|
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters
import play.api.mvc.EssentialFilter
import play.filters.csp.CSPComponents
import play.filters.csrf.CSRFComponents
import play.filters.headers.SecurityHeadersComponents
import play.filters.hosts.AllowedHostsComponents
/**
* A compile time default filters components.
*
* {{{
* class MyComponents(context: ApplicationLoader.Context)
* extends BuiltInComponentsFromContext(context)
* with play.filters.HttpFiltersComponents {
*
* }
* }}}
*/
trait HttpFiltersComponents
extends CSRFComponents
with SecurityHeadersComponents
with AllowedHostsComponents {
def httpFilters: Seq[EssentialFilter] = Seq(csrfFilter, securityHeadersFilter, allowedHostsFilter)
}
|
Shenker93/playframework
|
framework/src/play-filters-helpers/src/main/scala/play/filters/HttpFiltersComponents.scala
|
Scala
|
apache-2.0
| 775
|
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package kafka.api
import java.time.Duration
import java.util.concurrent.TimeUnit
import java.util.{Collections, HashMap, Properties}
import com.yammer.metrics.core.{Histogram, Meter}
import kafka.api.QuotaTestClients._
import kafka.metrics.KafkaYammerMetrics
import kafka.server.{ClientQuotaManager, ClientQuotaManagerConfig, KafkaConfig, KafkaServer, QuotaType}
import kafka.utils.TestUtils
import org.apache.kafka.clients.admin.Admin
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer._
import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback
import org.apache.kafka.common.config.internals.QuotaConfigs
import org.apache.kafka.common.{Metric, MetricName, TopicPartition}
import org.apache.kafka.common.metrics.{KafkaMetric, Quota}
import org.apache.kafka.common.protocol.ApiKeys
import org.apache.kafka.common.quota.ClientQuotaAlteration
import org.apache.kafka.common.quota.ClientQuotaEntity
import org.apache.kafka.common.security.auth.KafkaPrincipal
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api.{BeforeEach, Test}
import scala.collection.Map
import scala.jdk.CollectionConverters._
abstract class BaseQuotaTest extends IntegrationTestHarness {
override val brokerCount = 2
protected def producerClientId = "QuotasTestProducer-1"
protected def consumerClientId = "QuotasTestConsumer-1"
protected def createQuotaTestClients(topic: String, leaderNode: KafkaServer): QuotaTestClients
this.serverConfig.setProperty(KafkaConfig.ControlledShutdownEnableProp, "false")
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, "2")
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicPartitionsProp, "1")
this.serverConfig.setProperty(KafkaConfig.GroupMinSessionTimeoutMsProp, "100")
this.serverConfig.setProperty(KafkaConfig.GroupMaxSessionTimeoutMsProp, "30000")
this.serverConfig.setProperty(KafkaConfig.GroupInitialRebalanceDelayMsProp, "0")
this.producerConfig.setProperty(ProducerConfig.ACKS_CONFIG, "-1")
this.producerConfig.setProperty(ProducerConfig.BUFFER_MEMORY_CONFIG, "300000")
this.producerConfig.setProperty(ProducerConfig.CLIENT_ID_CONFIG, producerClientId)
this.consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "QuotasTest")
this.consumerConfig.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString)
this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
this.consumerConfig.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, consumerClientId)
this.consumerConfig.setProperty(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "0")
this.consumerConfig.setProperty(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "0")
// Low enough quota that a producer sending a small payload in a tight loop should get throttled
val defaultProducerQuota: Long = 8000
val defaultConsumerQuota: Long = 2500
val defaultRequestQuota: Double = Long.MaxValue.toDouble
val topic1 = "topic-1"
var leaderNode: KafkaServer = _
var followerNode: KafkaServer = _
var quotaTestClients: QuotaTestClients = _
@BeforeEach
override def setUp(): Unit = {
super.setUp()
val numPartitions = 1
val leaders = createTopic(topic1, numPartitions, brokerCount)
leaderNode = if (leaders(0) == servers.head.config.brokerId) servers.head else servers(1)
followerNode = if (leaders(0) != servers.head.config.brokerId) servers.head else servers(1)
quotaTestClients = createQuotaTestClients(topic1, leaderNode)
}
@Test
def testThrottledProducerConsumer(): Unit = {
val numRecords = 1000
val produced = quotaTestClients.produceUntilThrottled(numRecords)
quotaTestClients.verifyProduceThrottle(expectThrottle = true)
// Consumer should read in a bursty manner and get throttled immediately
assertTrue(quotaTestClients.consumeUntilThrottled(produced) > 0, "Should have consumed at least one record")
quotaTestClients.verifyConsumeThrottle(expectThrottle = true)
}
@Test
def testProducerConsumerOverrideUnthrottled(): Unit = {
// Give effectively unlimited quota for producer and consumer
val props = new Properties()
props.put(QuotaConfigs.PRODUCER_BYTE_RATE_OVERRIDE_CONFIG, Long.MaxValue.toString)
props.put(QuotaConfigs.CONSUMER_BYTE_RATE_OVERRIDE_CONFIG, Long.MaxValue.toString)
quotaTestClients.overrideQuotas(Long.MaxValue, Long.MaxValue, Long.MaxValue.toDouble)
quotaTestClients.waitForQuotaUpdate(Long.MaxValue, Long.MaxValue, Long.MaxValue.toDouble)
val numRecords = 1000
assertEquals(numRecords, quotaTestClients.produceUntilThrottled(numRecords))
quotaTestClients.verifyProduceThrottle(expectThrottle = false)
// The "client" consumer does not get throttled.
assertEquals(numRecords, quotaTestClients.consumeUntilThrottled(numRecords))
quotaTestClients.verifyConsumeThrottle(expectThrottle = false)
}
@Test
def testProducerConsumerOverrideLowerQuota(): Unit = {
// consumer quota is set such that consumer quota * default quota window (10 seconds) is less than
// MAX_PARTITION_FETCH_BYTES_CONFIG, so that we can test consumer ability to fetch in this case
// In this case, 250 * 10 < 4096
quotaTestClients.overrideQuotas(2000, 250, Long.MaxValue.toDouble)
quotaTestClients.waitForQuotaUpdate(2000, 250, Long.MaxValue.toDouble)
val numRecords = 1000
val produced = quotaTestClients.produceUntilThrottled(numRecords)
quotaTestClients.verifyProduceThrottle(expectThrottle = true)
// Consumer should be able to consume at least one record, even when throttled
assertTrue(quotaTestClients.consumeUntilThrottled(produced) > 0, "Should have consumed at least one record")
quotaTestClients.verifyConsumeThrottle(expectThrottle = true)
}
@Test
def testQuotaOverrideDelete(): Unit = {
// Override producer and consumer quotas to unlimited
quotaTestClients.overrideQuotas(Long.MaxValue, Long.MaxValue, Long.MaxValue.toDouble)
quotaTestClients.waitForQuotaUpdate(Long.MaxValue, Long.MaxValue, Long.MaxValue.toDouble)
val numRecords = 1000
assertEquals(numRecords, quotaTestClients.produceUntilThrottled(numRecords))
quotaTestClients.verifyProduceThrottle(expectThrottle = false)
assertEquals(numRecords, quotaTestClients.consumeUntilThrottled(numRecords))
quotaTestClients.verifyConsumeThrottle(expectThrottle = false)
// Delete producer and consumer quota overrides. Consumer and producer should now be
// throttled since broker defaults are very small
quotaTestClients.removeQuotaOverrides()
quotaTestClients.waitForQuotaUpdate(defaultProducerQuota, defaultConsumerQuota, defaultRequestQuota)
val produced = quotaTestClients.produceUntilThrottled(numRecords)
quotaTestClients.verifyProduceThrottle(expectThrottle = true)
// Since producer may have been throttled after producing a couple of records,
// consume from beginning till throttled
quotaTestClients.consumer.seekToBeginning(Collections.singleton(new TopicPartition(topic1, 0)))
quotaTestClients.consumeUntilThrottled(numRecords + produced)
quotaTestClients.verifyConsumeThrottle(expectThrottle = true)
}
@Test
def testThrottledRequest(): Unit = {
quotaTestClients.overrideQuotas(Long.MaxValue, Long.MaxValue, 0.1)
quotaTestClients.waitForQuotaUpdate(Long.MaxValue, Long.MaxValue, 0.1)
val consumer = quotaTestClients.consumer
consumer.subscribe(Collections.singleton(topic1))
val endTimeMs = System.currentTimeMillis + 10000
var throttled = false
while ((!throttled || quotaTestClients.exemptRequestMetric == null) && System.currentTimeMillis < endTimeMs) {
consumer.poll(Duration.ofMillis(100L))
val throttleMetric = quotaTestClients.throttleMetric(QuotaType.Request, consumerClientId)
throttled = throttleMetric != null && metricValue(throttleMetric) > 0
}
assertTrue(throttled, "Should have been throttled")
quotaTestClients.verifyConsumerClientThrottleTimeMetric(expectThrottle = true,
Some(ClientQuotaManagerConfig.DefaultQuotaWindowSizeSeconds * 1000.0))
val exemptMetric = quotaTestClients.exemptRequestMetric
assertNotNull(exemptMetric, "Exempt requests not recorded")
assertTrue(metricValue(exemptMetric) > 0, "Exempt requests not recorded")
}
}
object QuotaTestClients {
val DefaultEntity: String = null
def metricValue(metric: Metric): Double = metric.metricValue().asInstanceOf[Double]
}
abstract class QuotaTestClients(topic: String,
leaderNode: KafkaServer,
producerClientId: String,
consumerClientId: String,
val producer: KafkaProducer[Array[Byte], Array[Byte]],
val consumer: KafkaConsumer[Array[Byte], Array[Byte]],
val adminClient: Admin) {
def overrideQuotas(producerQuota: Long, consumerQuota: Long, requestQuota: Double): Unit
def removeQuotaOverrides(): Unit
protected def userPrincipal: KafkaPrincipal
protected def quotaMetricTags(clientId: String): Map[String, String]
def produceUntilThrottled(maxRecords: Int, waitForRequestCompletion: Boolean = true): Int = {
var numProduced = 0
var throttled = false
do {
val payload = numProduced.toString.getBytes
val future = producer.send(new ProducerRecord[Array[Byte], Array[Byte]](topic, null, null, payload),
new ErrorLoggingCallback(topic, null, null, true))
numProduced += 1
do {
val metric = throttleMetric(QuotaType.Produce, producerClientId)
throttled = metric != null && metricValue(metric) > 0
} while (!future.isDone && (!throttled || waitForRequestCompletion))
} while (numProduced < maxRecords && !throttled)
numProduced
}
def consumeUntilThrottled(maxRecords: Int, waitForRequestCompletion: Boolean = true): Int = {
val timeoutMs = TimeUnit.MINUTES.toMillis(1)
consumer.subscribe(Collections.singleton(topic))
var numConsumed = 0
var throttled = false
val startMs = System.currentTimeMillis
do {
numConsumed += consumer.poll(Duration.ofMillis(100L)).count
val metric = throttleMetric(QuotaType.Fetch, consumerClientId)
throttled = metric != null && metricValue(metric) > 0
} while (numConsumed < maxRecords && !throttled && System.currentTimeMillis < startMs + timeoutMs)
// If throttled, wait for the records from the last fetch to be received
if (throttled && numConsumed < maxRecords && waitForRequestCompletion) {
val minRecords = numConsumed + 1
val startMs = System.currentTimeMillis
while (numConsumed < minRecords && System.currentTimeMillis < startMs + timeoutMs)
numConsumed += consumer.poll(Duration.ofMillis(100L)).count
}
numConsumed
}
private def quota(quotaManager: ClientQuotaManager, userPrincipal: KafkaPrincipal, clientId: String): Quota = {
quotaManager.quota(userPrincipal, clientId)
}
private def verifyThrottleTimeRequestChannelMetric(apiKey: ApiKeys, metricNameSuffix: String,
clientId: String, expectThrottle: Boolean): Unit = {
val throttleTimeMs = brokerRequestMetricsThrottleTimeMs(apiKey, metricNameSuffix)
if (expectThrottle)
assertTrue(throttleTimeMs > 0, s"Client with id=$clientId should have been throttled, $throttleTimeMs")
else
assertEquals(0.0, throttleTimeMs, 0.0, s"Client with id=$clientId should not have been throttled")
}
def verifyProduceThrottle(expectThrottle: Boolean, verifyClientMetric: Boolean = true,
verifyRequestChannelMetric: Boolean = true): Unit = {
verifyThrottleTimeMetric(QuotaType.Produce, producerClientId, expectThrottle)
if (verifyRequestChannelMetric)
verifyThrottleTimeRequestChannelMetric(ApiKeys.PRODUCE, "", producerClientId, expectThrottle)
if (verifyClientMetric)
verifyProducerClientThrottleTimeMetric(expectThrottle)
}
def verifyConsumeThrottle(expectThrottle: Boolean, verifyClientMetric: Boolean = true,
verifyRequestChannelMetric: Boolean = true): Unit = {
verifyThrottleTimeMetric(QuotaType.Fetch, consumerClientId, expectThrottle)
if (verifyRequestChannelMetric)
verifyThrottleTimeRequestChannelMetric(ApiKeys.FETCH, "Consumer", consumerClientId, expectThrottle)
if (verifyClientMetric)
verifyConsumerClientThrottleTimeMetric(expectThrottle)
}
private def verifyThrottleTimeMetric(quotaType: QuotaType, clientId: String, expectThrottle: Boolean): Unit = {
val throttleMetricValue = metricValue(throttleMetric(quotaType, clientId))
if (expectThrottle) {
assertTrue(throttleMetricValue > 0, s"Client with id=$clientId should have been throttled")
} else {
assertTrue(throttleMetricValue.isNaN, s"Client with id=$clientId should not have been throttled")
}
}
private def throttleMetricName(quotaType: QuotaType, clientId: String): MetricName = {
leaderNode.metrics.metricName("throttle-time",
quotaType.toString,
quotaMetricTags(clientId).asJava)
}
def throttleMetric(quotaType: QuotaType, clientId: String): KafkaMetric = {
leaderNode.metrics.metrics.get(throttleMetricName(quotaType, clientId))
}
private def brokerRequestMetricsThrottleTimeMs(apiKey: ApiKeys, metricNameSuffix: String): Double = {
def yammerMetricValue(name: String): Double = {
val allMetrics = KafkaYammerMetrics.defaultRegistry.allMetrics.asScala
val (_, metric) = allMetrics.find { case (metricName, _) =>
metricName.getMBeanName.startsWith(name)
}.getOrElse(fail(s"Unable to find broker metric $name: allMetrics: ${allMetrics.keySet.map(_.getMBeanName)}"))
metric match {
case m: Meter => m.count.toDouble
case m: Histogram => m.max
case m => throw new AssertionError(s"Unexpected broker metric of class ${m.getClass}")
}
}
yammerMetricValue(s"kafka.network:type=RequestMetrics,name=ThrottleTimeMs,request=${apiKey.name}$metricNameSuffix")
}
def exemptRequestMetric: KafkaMetric = {
val metricName = leaderNode.metrics.metricName("exempt-request-time", QuotaType.Request.toString, "")
leaderNode.metrics.metrics.get(metricName)
}
private def verifyProducerClientThrottleTimeMetric(expectThrottle: Boolean): Unit = {
val tags = new HashMap[String, String]
tags.put("client-id", producerClientId)
val avgMetric = producer.metrics.get(new MetricName("produce-throttle-time-avg", "producer-metrics", "", tags))
val maxMetric = producer.metrics.get(new MetricName("produce-throttle-time-max", "producer-metrics", "", tags))
if (expectThrottle) {
TestUtils.waitUntilTrue(() => metricValue(avgMetric) > 0.0 && metricValue(maxMetric) > 0.0,
s"Producer throttle metric not updated: avg=${metricValue(avgMetric)} max=${metricValue(maxMetric)}")
} else
assertEquals(0.0, metricValue(maxMetric), 0.0, "Should not have been throttled")
}
def verifyConsumerClientThrottleTimeMetric(expectThrottle: Boolean, maxThrottleTime: Option[Double] = None): Unit = {
val tags = new HashMap[String, String]
tags.put("client-id", consumerClientId)
val avgMetric = consumer.metrics.get(new MetricName("fetch-throttle-time-avg", "consumer-fetch-manager-metrics", "", tags))
val maxMetric = consumer.metrics.get(new MetricName("fetch-throttle-time-max", "consumer-fetch-manager-metrics", "", tags))
if (expectThrottle) {
TestUtils.waitUntilTrue(() => metricValue(avgMetric) > 0.0 && metricValue(maxMetric) > 0.0,
s"Consumer throttle metric not updated: avg=${metricValue(avgMetric)} max=${metricValue(maxMetric)}")
maxThrottleTime.foreach(max => assertTrue(metricValue(maxMetric) <= max,
s"Maximum consumer throttle too high: ${metricValue(maxMetric)}"))
} else
assertEquals(0.0, metricValue(maxMetric), 0.0, "Should not have been throttled")
}
def clientQuotaEntity(user: Option[String], clientId: Option[String]): ClientQuotaEntity = {
var entries = Map.empty[String, String]
user.foreach(user => entries = entries ++ Map(ClientQuotaEntity.USER -> user))
clientId.foreach(clientId => entries = entries ++ Map(ClientQuotaEntity.CLIENT_ID -> clientId))
new ClientQuotaEntity(entries.asJava)
}
// None is translated to `null` which remove the quota
def clientQuotaAlteration(quotaEntity: ClientQuotaEntity,
producerQuota: Option[Long],
consumerQuota: Option[Long],
requestQuota: Option[Double]): ClientQuotaAlteration = {
var ops = Seq.empty[ClientQuotaAlteration.Op]
def addOp(key: String, value: Option[Double]): Unit = {
ops = ops ++ Seq(new ClientQuotaAlteration.Op(key, value.map(Double.box).orNull))
}
addOp(QuotaConfigs.PRODUCER_BYTE_RATE_OVERRIDE_CONFIG, producerQuota.map(_.toDouble))
addOp(QuotaConfigs.CONSUMER_BYTE_RATE_OVERRIDE_CONFIG, consumerQuota.map(_.toDouble))
addOp(QuotaConfigs.REQUEST_PERCENTAGE_OVERRIDE_CONFIG, requestQuota)
new ClientQuotaAlteration(quotaEntity, ops.asJava)
}
def alterClientQuotas(quotaAlterations: ClientQuotaAlteration *): Unit = {
adminClient.alterClientQuotas(quotaAlterations.asJava).all().get()
}
def waitForQuotaUpdate(producerQuota: Long, consumerQuota: Long, requestQuota: Double, server: KafkaServer = leaderNode): Unit = {
TestUtils.retry(10000) {
val quotaManagers = server.dataPlaneRequestProcessor.quotas
val overrideProducerQuota = quota(quotaManagers.produce, userPrincipal, producerClientId)
val overrideConsumerQuota = quota(quotaManagers.fetch, userPrincipal, consumerClientId)
val overrideProducerRequestQuota = quota(quotaManagers.request, userPrincipal, producerClientId)
val overrideConsumerRequestQuota = quota(quotaManagers.request, userPrincipal, consumerClientId)
assertEquals(Quota.upperBound(producerQuota.toDouble), overrideProducerQuota,
s"ClientId $producerClientId of user $userPrincipal must have producer quota")
assertEquals(Quota.upperBound(consumerQuota.toDouble), overrideConsumerQuota,
s"ClientId $consumerClientId of user $userPrincipal must have consumer quota")
assertEquals(Quota.upperBound(requestQuota.toDouble), overrideProducerRequestQuota,
s"ClientId $producerClientId of user $userPrincipal must have request quota")
assertEquals(Quota.upperBound(requestQuota.toDouble), overrideConsumerRequestQuota,
s"ClientId $consumerClientId of user $userPrincipal must have request quota")
}
}
}
|
Chasego/kafka
|
core/src/test/scala/integration/kafka/api/BaseQuotaTest.scala
|
Scala
|
apache-2.0
| 19,352
|
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt
import java.io.File
import java.net.URI
import Def.{ ScopedKey, Setting }
import Project._
import Types.Endo
import compiler.Eval
import SessionSettings._
final case class SessionSettings(currentBuild: URI, currentProject: Map[URI, String], original: Seq[Setting[_]], append: SessionMap, rawAppend: Seq[Setting[_]], currentEval: () => Eval) {
assert(currentProject contains currentBuild, "Current build (" + currentBuild + ") not associated with a current project.")
def setCurrent(build: URI, project: String, eval: () => Eval): SessionSettings = copy(currentBuild = build, currentProject = currentProject.updated(build, project), currentEval = eval)
def current: ProjectRef = ProjectRef(currentBuild, currentProject(currentBuild))
def appendSettings(s: Seq[SessionSetting]): SessionSettings = copy(append = modify(append, _ ++ s))
def appendRaw(ss: Seq[Setting[_]]): SessionSettings = copy(rawAppend = rawAppend ++ ss)
def mergeSettings: Seq[Setting[_]] = original ++ merge(append) ++ rawAppend
def clearExtraSettings: SessionSettings = copy(append = Map.empty, rawAppend = Nil)
private[this] def merge(map: SessionMap): Seq[Setting[_]] = map.values.toSeq.flatten[SessionSetting].map(_._1)
private[this] def modify(map: SessionMap, onSeq: Endo[Seq[SessionSetting]]): SessionMap =
{
val cur = current
map.updated(cur, onSeq(map.getOrElse(cur, Nil)))
}
}
object SessionSettings {
type SessionSetting = (Setting[_], List[String])
type SessionMap = Map[ProjectRef, Seq[SessionSetting]]
def reapply(session: SessionSettings, s: State): State =
BuiltinCommands.reapply(session, Project.structure(s), s)
def clearSettings(s: State): State =
withSettings(s)(session => reapply(session.copy(append = session.append - session.current), s))
def clearAllSettings(s: State): State =
withSettings(s)(session => reapply(session.clearExtraSettings, s))
def withSettings(s: State)(f: SessionSettings => State): State =
{
val extracted = Project extract s
import extracted._
if (session.append.isEmpty) {
s.log.info("No session settings defined.")
s
} else
f(session)
}
def pluralize(size: Int, of: String) = size.toString + (if (size == 1) of else (of + "s"))
def checkSession(newSession: SessionSettings, oldState: State) {
val oldSettings = (oldState get Keys.sessionSettings).toList.flatMap(_.append).flatMap(_._2)
if (newSession.append.isEmpty && !oldSettings.isEmpty)
oldState.log.warn("Discarding " + pluralize(oldSettings.size, " session setting") + ". Use 'session save' to persist session settings.")
}
def removeRanges[T](in: Seq[T], ranges: Seq[(Int, Int)]): Seq[T] =
{
val asSet = (Set.empty[Int] /: ranges) { case (s, (hi, lo)) => s ++ (hi to lo) }
in.zipWithIndex.flatMap { case (t, index) => if (asSet(index + 1)) Nil else t :: Nil }
}
def removeSettings(s: State, ranges: Seq[(Int, Int)]): State =
withSettings(s) { session =>
val current = session.current
val newAppend = session.append.updated(current, removeRanges(session.append.getOrElse(current, Nil), ranges))
reapply(session.copy(append = newAppend), s)
}
def saveAllSettings(s: State): State = saveSomeSettings(s)(_ => true)
def saveSettings(s: State): State =
{
val current = Project.session(s).current
saveSomeSettings(s)(_ == current)
}
def saveSomeSettings(s: State)(include: ProjectRef => Boolean): State =
withSettings(s) { session =>
val newSettings =
for ((ref, settings) <- session.append if !settings.isEmpty && include(ref)) yield {
val (news, olds) = writeSettings(ref, settings.toList, session.original, Project.structure(s))
(ref -> news, olds)
}
val (newAppend, newOriginal) = newSettings.unzip
val newSession = session.copy(append = newAppend.toMap, original = newOriginal.flatten.toSeq)
reapply(newSession.copy(original = newSession.mergeSettings, append = Map.empty), s)
}
def writeSettings(pref: ProjectRef, settings: List[SessionSetting], original: Seq[Setting[_]], structure: BuildStructure): (Seq[SessionSetting], Seq[Setting[_]]) =
{
val project = Project.getProject(pref, structure).getOrElse(sys.error("Invalid project reference " + pref))
val writeTo: File = BuildPaths.configurationSources(project.base).headOption.getOrElse(new File(project.base, "build.sbt"))
writeTo.createNewFile()
val path = writeTo.getAbsolutePath
val (inFile, other, _) = ((List[Setting[_]](), List[Setting[_]](), Set.empty[ScopedKey[_]]) /: original.reverse) {
case ((in, oth, keys), s) =>
s.pos match {
case RangePosition(`path`, _) if !keys.contains(s.key) => (s :: in, oth, keys + s.key)
case _ => (in, s :: oth, keys)
}
}
val (_, oldShifted, replace, lineMap) = ((0, List[Setting[_]](), List[SessionSetting](), Map.empty[Int, (Int, List[String])]) /: inFile) {
case ((offs, olds, repl, lineMap), s) =>
val RangePosition(_, r @ LineRange(start, end)) = s.pos
settings find (_._1.key == s.key) match {
case Some(ss @ (ns, newLines)) if !ns.init.dependencies.contains(ns.key) =>
val shifted = ns withPos RangePosition(path, LineRange(start - offs, start - offs + newLines.size))
(offs + end - start - newLines.size, shifted :: olds, ss :: repl, lineMap + (start -> (end, newLines)))
case _ =>
val shifted = s withPos RangePosition(path, r shift -offs)
(offs, shifted :: olds, repl, lineMap)
}
}
val newSettings = settings diff replace
val (tmpLines, _) = ((List[String](), 1) /: IO.readLines(writeTo).zipWithIndex) {
case ((accLines, n), (line, m)) if n == m + 1 =>
lineMap.get(n) match {
case Some(Pair(end, lines)) => (lines reverse_::: accLines, end)
case None => (line :: accLines, n + 1)
}
case (res, _) => res
}
val exist = tmpLines.reverse
val adjusted = if (!newSettings.isEmpty && needsTrailingBlank(exist)) exist :+ "" else exist
val lines = adjusted ++ newSettings.flatMap(_._2 ::: "" :: Nil)
IO.writeLines(writeTo, lines)
val (newWithPos, _) = ((List[SessionSetting](), adjusted.size + 1) /: newSettings) {
case ((acc, line), (s, newLines)) =>
val endLine = line + newLines.size
((s withPos RangePosition(path, LineRange(line, endLine)), newLines) :: acc, endLine + 1)
}
(newWithPos.reverse, other ++ oldShifted)
}
def needsTrailingBlank(lines: Seq[String]) = !lines.isEmpty && !lines.takeRight(1).exists(_.trim.isEmpty)
def printAllSettings(s: State): State =
withSettings(s) { session =>
for ((ref, settings) <- session.append if !settings.isEmpty) {
println("In " + Reference.display(ref))
printSettings(settings)
}
s
}
def printSettings(s: State): State =
withSettings(s) { session =>
printSettings(session.append.getOrElse(session.current, Nil))
s
}
def printSettings(settings: Seq[SessionSetting]): Unit =
for (((_, stringRep), index) <- settings.zipWithIndex)
println(" " + (index + 1) + ". " + stringRep.mkString("\\n"))
def Help = """session <command>
Manipulates session settings, which are temporary settings that do not persist past the current sbt execution (that is, the current session).
Valid commands are:
clear, clear-all
Removes temporary settings added using 'set' and re-evaluates all settings.
For 'clear', only the settings defined for the current project are cleared.
For 'clear-all', all settings in all projects are cleared.
list, list-all
Prints a numbered list of session settings defined.
The numbers may be used to remove individual settings or ranges of settings using 'remove'.
For 'list', only the settings for the current project are printed.
For 'list-all', all settings in all projets are printed.
remove <range-spec>
<range-spec> is a comma-separated list of individual numbers or ranges of numbers.
For example, 'remove 1,3,5-7'.
The temporary settings at the given indices for the current project are removed and all settings are re-evaluated.
Use the 'list' command to see a numbered list of settings for the current project.
save, save-all
Makes the session settings permanent by writing them to a '.sbt' configuration file.
For 'save', only the current project's settings are saved (the settings for other projects are left alone).
For 'save-all', the session settings are saved for all projects.
The session settings defined for a project are appended to the first '.sbt' configuration file in that project.
If no '.sbt' configuration file exists, the settings are written to 'build.sbt' in the project's base directory."""
sealed trait SessionCommand
final class Print(val all: Boolean) extends SessionCommand
final class Clear(val all: Boolean) extends SessionCommand
final class Save(val all: Boolean) extends SessionCommand
final class Remove(val ranges: Seq[(Int, Int)]) extends SessionCommand
import complete._
import DefaultParsers._
lazy val parser =
token(Space) ~>
(token("list-all" ^^^ new Print(true)) | token("list" ^^^ new Print(false)) | token("clear" ^^^ new Clear(false)) |
token("save-all" ^^^ new Save(true)) | token("save" ^^^ new Save(false)) | token("clear-all" ^^^ new Clear(true)) |
remove)
lazy val remove = token("remove") ~> token(Space) ~> natSelect.map(ranges => new Remove(ranges))
def natSelect = rep1sep(token(range, "<range>"), ',')
def range: Parser[(Int, Int)] = (NatBasic ~ ('-' ~> NatBasic).?).map { case lo ~ hi => (lo, hi getOrElse lo) }
def command(s: State) = Command.applyEffect(parser) {
case p: Print => if (p.all) printAllSettings(s) else printSettings(s)
case v: Save => if (v.all) saveAllSettings(s) else saveSettings(s)
case c: Clear => if (c.all) clearAllSettings(s) else clearSettings(s)
case r: Remove => removeSettings(s, r.ranges)
}
}
|
xeno-by/old-scalameta-sbt
|
main/src/main/scala/sbt/SessionSettings.scala
|
Scala
|
bsd-3-clause
| 10,252
|
package Dataset
import dispatch._, Defaults._
import ij.{ImagePlus, IJ}
import ij.process.ImageProcessor
import org.netbeans.api.keyring.Keyring
import imagepipeline._, Defs._
import scala.collection._
class Dataset {
def run(): Unit = {
}
}
case class RoiDataset(image: String, roi: String, convertImgPath: String => String = a => a) {
import Dataset._
// from Github gist
private def readAll(arr: String*): Option[Array[String]] = {
val res = new mutable.ArrayBuffer[String]
for (path <- arr) {
readGist(path, true) match {
case Some(t) => res += t
case None => return None
}
}
Some(res.toArray)
}
def run[A](calc: Pipeline21[ImageProcessor, Roi, A], firstNum: Int = -1): Unit = {
readAll(image, roi) match {
case Some(ts) => {
val ls = if (firstNum > 0) ts(0).lines.take(firstNum) else ts(0).lines
process(ls, ts(1), calc)
}
case None => println("Could not read files")
}
}
def process[A](imgs: Iterator[String], rois: String, calc: Pipeline21[ImageProcessor, Roi, A]): Unit = {
val r = Csv.read(rois, "Slice")
// println(imgs.length)
def getRoi(m: Array[Map[String, String]]): Array[Roi] = {
m.map(mm => {
(mm("BX").toInt, mm("BY").toInt, mm("Width").toInt, mm("Height").toInt)
})
}
var count = 0
var countfile = 0
for ((im, i) <- imgs.zipWithIndex) {
countfile += 1
// println(im,im.replace("640tirf","ricm"))
r.get((i + 1).toString) match {
case Some(rois) => {
val path = convertImgPath(im)
println("#%03d(%03d) Loading: %s".format(countfile,count,path))
val img1 = IJ.openImage(path).getProcessor
// val img2 = IJ.openImage(im.replace("640tirf", "ricm")).getProcessor
for (r <- getRoi(rois)) {
count += 1
val res: A = calc.run(img1, r)
println(res.asInstanceOf[RowData])
// IJ.save(new ImagePlus("result", res), "./testimgs/%03d.tif".format(count))
}
}
case None =>
}
}
}
}
object Csv {
def read(str: String, key: String, sep: String = "\\t"): Map[String, Array[Map[String, String]]] = {
val res = new mutable.HashMap[String, mutable.ArrayBuffer[Map[String, String]]]
val lines = str.lines
val header = lines.next().split(sep)
val keyidx = header.indexOf(key)
for (l <- lines) {
val cols = l.split(sep)
val m = header.zip(cols).map(a => a._1 -> a._2).toMap
val k = cols(keyidx)
if (res.get(k).isEmpty) {
res(k) = new mutable.ArrayBuffer[Map[String, String]]
}
res(k) += m
}
res.mapValues(_.toArray)
}
}
object Dataset {
def readGist(id: String, secret: Boolean = false): Option[String] = {
try {
val req = if (secret)
url("https://api.github.com/gists/" + id).as_!("hirokai", KeyChain.readToken.getOrElse(""))
else
url("https://api.github.com/gists/" + id)
val resp = Http(req OK as.String)
val s = resp()
import play.api.libs.json._
val obj = Json.parse(s)
Some((obj \\\\ "content")(0).as[String])
} catch {
case _: Exception => None
}
}
}
object KeyChain {
private val keyName = "ImagePipeline"
def readToken =
Keyring.read(keyName) match {
case null => None
case chars => Some(chars.mkString)
}
def storeToken(token: String) {
Keyring.save(keyName, token.toCharArray, "access token for ImagePipeline Github Gists")
}
def deleteToken {
Keyring.delete(keyName)
}
}
|
hirokai/ImagePipelineNew
|
src/main/scala/Dataset.scala
|
Scala
|
mit
| 3,625
|
package fpgatidbits.ocm
import Chisel._
// A module for inferring true dual-pPort BRAMs on FPGAs
// Since (Xilinx) FPGA synthesis tools do not infer TDP BRAMs from
// Chisel-generated Verilog (both ports in the same "always" block),
// we use a BlackBox with a premade Verilog BRAM template.
class DualPortBRAMIO(addrBits: Int, dataBits: Int) extends Bundle {
val ports = Vec.fill (2) {new OCMSlaveIF(dataBits, dataBits, addrBits)}
override def cloneType: this.type =
new DualPortBRAMIO(addrBits, dataBits).asInstanceOf[this.type]
ports(0).req.addr.setName("a_addr")
ports(0).req.writeData.setName("a_din")
ports(0).req.writeEn.setName("a_wr")
ports(0).rsp.readData.setName("a_dout")
ports(1).req.addr.setName("b_addr")
ports(1).req.writeData.setName("b_din")
ports(1).req.writeEn.setName("b_wr")
ports(1).rsp.readData.setName("b_dout")
}
// variant of DualPortBRAM with the desired number of registers at input and
// output. should help achieve higher Fmax with large BRAMs, at the cost of
// latency.
class PipelinedDualPortBRAM(addrBits: Int, dataBits: Int,
regIn: Int, // number of registers at input
regOut: Int // number of registers at output
) extends Module {
val io = new DualPortBRAMIO(addrBits, dataBits)
// instantiate the desired BRAM
val bram = if(dataBits <= 36 && addrBits <= 4) {
// use pure Chisel for small memories (just synth to LUTs)
Module(new DualPortBRAM_NoBlackBox(addrBits, dataBits)).io
} else {
Module(new DualPortBRAM(addrBits, dataBits)).io
}
bram.ports(0).req := ShiftRegister(io.ports(0).req, regIn)
bram.ports(1).req := ShiftRegister(io.ports(1).req, regIn)
io.ports(0).rsp := ShiftRegister(bram.ports(0).rsp, regOut)
io.ports(1).rsp := ShiftRegister(bram.ports(1).rsp, regOut)
}
class DualPortBRAM(addrBits: Int, dataBits: Int) extends BlackBox {
val io = new DualPortBRAMIO(addrBits, dataBits)
setVerilogParameters(new VerilogParameters {
val DATA = dataBits
val ADDR = addrBits
})
// the clock does not get added to the BlackBox interface by default
addClock(Driver.implicitClock)
// simulation model for TDP BRAM
// for the C++ backend, this generates a model that should be roughly
// equivalent, although there's no guarantee about what happens on
// collisions (sim access to same address with two memory ports)
val mem = Mem(UInt(width = dataBits), 1 << addrBits)
for (i <- 0 until 2) {
val req = io.ports(i).req
val regAddr = Reg(next = io.ports(i).req.addr)
io.ports(i).rsp.readData := mem(regAddr)
when (req.writeEn) {
mem(req.addr) := req.writeData
}
}
}
// no BlackBox (pure Chisel) version. won't synthesize to BRAM, but sometimes
// (if the depth is small) this may be more desirable.
class DualPortBRAM_NoBlackBox(addrBits: Int, dataBits: Int) extends Module {
val io = new DualPortBRAMIO(addrBits, dataBits)
val mem = Mem(UInt(width = dataBits), 1 << addrBits)
for (i <- 0 until 2) {
val req = io.ports(i).req
val regAddr = Reg(next = io.ports(i).req.addr)
io.ports(i).rsp.readData := mem(regAddr)
when (req.writeEn) {
mem(req.addr) := req.writeData
}
}
}
// the dual-port BRAM Verilog below is adapted from Dan Strother's example:
// http://danstrother.com/2010/09/11/inferring-rams-in-fpgas/
/*
module DualPortBRAM #(
parameter DATA = 72,
parameter ADDR = 10
) (
input wire clk,
// Port A
input wire a_wr,
input wire [ADDR-1:0] a_addr,
input wire [DATA-1:0] a_din,
output reg [DATA-1:0] a_dout,
// Port B
input wire b_wr,
input wire [ADDR-1:0] b_addr,
input wire [DATA-1:0] b_din,
output reg [DATA-1:0] b_dout
);
// Shared memory
reg [DATA-1:0] mem [(2**ADDR)-1:0];
// Port A
always @(posedge clk) begin
a_dout <= mem[a_addr];
if(a_wr) begin
a_dout <= a_din;
mem[a_addr] <= a_din;
end
end
// Port B
always @(posedge clk) begin
b_dout <= mem[b_addr];
if(b_wr) begin
b_dout <= b_din;
mem[b_addr] <= b_din;
end
end
endmodule
*/
|
maltanar/fpga-tidbits
|
src/main/scala/fpgatidbits/ocm/DualPortBRAM.scala
|
Scala
|
bsd-2-clause
| 4,178
|
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rhttpc.akkahttp.proxy
import akka.http.scaladsl.model.HttpResponse
trait SuccessHttpResponseRecognizer {
def isSuccess(response: HttpResponse): Boolean
}
object AcceptSuccessHttpStatus extends SuccessHttpResponseRecognizer {
override def isSuccess(response: HttpResponse) = response.status.isSuccess()
}
|
arkadius/reliable-http-client
|
rhttpc-akka-http/src/main/scala/rhttpc/akkahttp/proxy/SuccessHttpResponseRecognizer.scala
|
Scala
|
apache-2.0
| 933
|
package domain
import play.api.libs.json.Json
case class User(id: Long, email: String)
object User {
implicit val format = Json.format[User]
}
|
vadim-shb/english-vocabulator
|
server/app/domain/User.scala
|
Scala
|
apache-2.0
| 147
|
package scadla.utils
import scadla._
import squants.space.Length
import scala.language.postfixOps
import squants.space.LengthConversions._
object RoundedCube {
def apply(x: Length, y: Length, z: Length, r: Length) = {
if (r.value > 0) {
val d = 2*r
assert(d < x && d < y && d < z, "RoundedCube, radius should be less than x/2, y/2, z/2.")
val c = Translate(r, r, r, Cube(x - d, y - d, z - d))
Minkowski(c, Sphere(r))
} else {
Cube(x,y,z)
}
}
}
object RoundedCubeH {
def apply(x: Length, y: Length, z: Length, r: Length) = {
if (r.value > 0) {
val h = z/2
val d = 2*r
assert(d < x && d < y, "roundedCube, radius should be less than x/2, y/2.")
val c = Translate(r, r, 0 mm, Cube(x - d, y - d, h))
Minkowski(c, Cylinder(r, h))
} else {
Cube(x,y,z)
}
}
}
|
dzufferey/scadla
|
src/main/scala/scadla/utils/RoundedCube.scala
|
Scala
|
apache-2.0
| 858
|
package com.webtrends.harness.component.zookeeper
import java.util
import java.util.UUID
import org.apache.curator.x.discovery.{UriSpec, ServiceInstance}
import org.apache.curator.x.discovery.details.InstanceProvider
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.time.NoTimeConversions
import collection.JavaConverters._
class WookieeWeightedStrategySpec extends SpecificationWithJUnit with NoTimeConversions {
class MockInstanceProvider(instances: Seq[ServiceInstance[WookieeServiceDetails]]) extends InstanceProvider[WookieeServiceDetails] {
override def getInstances: util.List[ServiceInstance[WookieeServiceDetails]] = instances.toList.asJava
}
def builderInstance(id: Int, weight: Int) = ServiceInstance.builder[WookieeServiceDetails]()
.uriSpec(new UriSpec(s"akka.tcp://server@localhost:8080/"))
.id(id.toString)
.name(UUID.randomUUID().toString)
.payload(new WookieeServiceDetails(weight))
.port(8080)
.build()
"WookieeWeightedStrategy" should {
"returns null when no instances" in {
val instances = Seq.empty[ServiceInstance[WookieeServiceDetails]]
val instanceProvider = new MockInstanceProvider(instances)
val strategy = new WookieeWeightedStrategy()
strategy.getInstance(instanceProvider) mustEqual null
}
"default to round-robin when weights are all the same" in {
val instances = (0 to 10).map(i => builderInstance(i, 0))
val instanceProvider = new MockInstanceProvider(instances)
val strategy = new WookieeWeightedStrategy()
(0 to 10).map(i => strategy.getInstance(instanceProvider).getId == i.toString).reduce(_ && _) mustEqual true
}
"pick the lowest weighted instance" in {
val instances = (1 to 10).map(i => builderInstance(i,i)) ++ Seq(builderInstance(0,0))
val instanceProvider = new MockInstanceProvider(instances)
val strategy = new WookieeWeightedStrategy()
strategy.getInstance(instanceProvider).getId mustEqual "0"
}
"pick the lowest as weight changes" in {
val instances = (10 to 20).map(i => builderInstance(i,i)) ++ Seq( builderInstance(5,5))
val instanceProvider = new MockInstanceProvider(instances)
val strategy = new WookieeWeightedStrategy()
// first check prior to updated instance weights has lowest 5
strategy.getInstance(instanceProvider).getId mustEqual "5"
// second check after weight for instance 5 has increased and now id 10 is lowest
val updatedInstances = (10 to 20).map(i => builderInstance(i,i)) ++ Seq( builderInstance(5, 15))
val updatedProvider = new MockInstanceProvider(updatedInstances)
strategy.getInstance(updatedProvider).getId mustEqual "10"
}
}
}
|
mjwallin1/wookiee-zookeeper
|
src/test/scala/com/webtrends/harness/component/zookeeper/WookieeWeightedStrategySpec.scala
|
Scala
|
apache-2.0
| 2,745
|
/*
* BrowserClientImpl.scala
* (ScalaOSC)
*
* Copyright (c) 2008-2021 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.osc
package impl
private[osc] final class BrowserClientImpl(protected val target: Browser.Address,
protected val config: Browser.Config)
extends ClientImpl[Browser.Address] with BrowserChannelImpl {
override def isOpen: Boolean = input.isOpen
protected val input : Browser.Receiver .Directed = Browser.Receiver (target, config)
protected val output: Browser.Transmitter.Directed = Browser.Transmitter(target, config)
}
|
Sciss/ScalaOSC
|
js/src/main/scala/de/sciss/osc/impl/BrowserClientImpl.scala
|
Scala
|
lgpl-2.1
| 786
|
/*
* Copyright (c) 2013 Scott Abernethy.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package test
import org.specs2.mutable._
import play.api.test._
import play.api.test.Helpers._
/**
* Add your spec here.
* You can mock out a whole application including requests, plugins etc.
* For more information, consult the wiki.
*/
class ApplicationSpec extends Specification {
"Application" should {
"send 404 on a bad request" in {
running(FakeApplication()) {
route(FakeRequest(GET, "/boum")) must beNone
}
}
"render the login page" in {
running(FakeApplication()) {
val home = route(FakeRequest(GET, "/approach")).get
status(home) must equalTo(OK)
contentType(home) must beSome.which(_ == "text/html")
contentAsString(home) must contain ("Who approaches?!")
}
}
}
}
|
scott-abernethy/opener-of-the-way
|
test/ApplicationSpec.scala
|
Scala
|
gpl-3.0
| 1,484
|
/*
* Copyright (c) 2015 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless.examples
import shapeless._
object CaseClassMergeDemo extends App {
import mergeSyntax._
case class Foo(i: Int, s: String, b: Boolean)
case class Bar(b: Boolean, s: String)
val foo = Foo(23, "foo", true)
val bar = Bar(false, "bar")
val merged = foo merge bar
assert(merged == Foo(23, "bar", false))
}
// Implementation in terms of LabelledGeneric ...
object mergeSyntax {
implicit class MergeSyntax[T](t: T) {
def merge[U](u: U)(implicit merge: CaseClassMerge[T, U]): T = merge(t, u)
}
}
trait CaseClassMerge[T, U] {
def apply(t: T, u: U): T
}
object CaseClassMerge {
import ops.record.Merger
def apply[T, U](implicit merge: CaseClassMerge[T, U]): CaseClassMerge[T, U] = merge
implicit def mkCCMerge[T, U, RT <: HList, RU <: HList]
(implicit
tgen: LabelledGeneric.Aux[T, RT],
ugen: LabelledGeneric.Aux[U, RU],
merger: Merger.Aux[RT, RU, RT]
): CaseClassMerge[T, U] =
new CaseClassMerge[T, U] {
def apply(t: T, u: U): T =
tgen.from(merger(tgen.to(t), ugen.to(u)))
}
}
|
malcolmgreaves/shapeless
|
examples/src/main/scala/shapeless/examples/caseclassmerge.scala
|
Scala
|
apache-2.0
| 1,673
|
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.catalogs.doperable
import scala.reflect.runtime.{universe => ru}
import io.deepsense.deeplang.TypeUtils
/**
* Node that represents trait in hierarchy stored in DOperableCatalog.
*/
private[doperable] class TraitNode(protected override val javaType: Class[_]) extends TypeNode {
private[doperable] override def getParentJavaType(upperBoundType: ru.Type): Option[Class[_]] = {
val t = TypeUtils.classToType(javaType)
val baseTypes = t.baseClasses.map(TypeUtils.symbolToType)
val baseJavaTypes = baseTypes.filter(_ <:< upperBoundType).map(TypeUtils.typeToClass)
baseJavaTypes.find(!_.isInterface)
}
private[doperable] override def descriptor: TypeDescriptor = {
TraitDescriptor(fullName, (supertraits.values ++ parent).map(_.fullName).toList)
}
}
private[doperable] object TraitNode {
def apply(javaType: Class[_]): TraitNode = new TraitNode(javaType)
}
|
deepsense-io/seahorse-workflow-executor
|
deeplang/src/main/scala/io/deepsense/deeplang/catalogs/doperable/TraitNode.scala
|
Scala
|
apache-2.0
| 1,518
|
package com.holdenkarau.spark.testing
import org.apache.spark.ml.linalg.SQLDataTypes.{MatrixType, VectorType}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types.{StructField, StructType}
import org.scalacheck.Prop.forAll
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers
class MLScalaCheckTest extends FunSuite with SharedSparkContext with Checkers {
test("vector generation") {
val schema = StructType(List(StructField("vector", VectorType)))
val sqlContext = new SQLContext(sc)
val dataframeGen = DataframeGenerator.arbitraryDataFrame(sqlContext, schema)
val property =
forAll(dataframeGen.arbitrary) {
dataframe => {
dataframe.schema === schema && dataframe.count >= 0
}
}
check(property)
}
test("matrix generation") {
val schema = StructType(List(StructField("matrix", MatrixType)))
val sqlContext = new SQLContext(sc)
val dataframeGen = DataframeGenerator.arbitraryDataFrame(sqlContext, schema)
val property =
forAll(dataframeGen.arbitrary) {
dataframe => {
dataframe.schema === schema && dataframe.count >= 0
}
}
check(property)
}
}
|
snithish/spark-testing-base
|
src/test/2.0/scala/com/holdenkarau/spark/testing/MLScalaCheckTest.scala
|
Scala
|
apache-2.0
| 1,207
|
package sbt.inc
import sbt.IO
import java.io.File
import collection.mutable
/**
* During an incremental compilation run, a ClassfileManager deletes class files and is notified of generated class files.
* A ClassfileManager can be used only once.
*/
trait ClassfileManager {
/**
* Called once per compilation step with the class files to delete prior to that step's compilation.
* The files in `classes` must not exist if this method returns normally.
* Any empty ancestor directories of deleted files must not exist either.
*/
def delete(classes: Iterable[File]): Unit
/** Called once per compilation step with the class files generated during that step.*/
def generated(classes: Iterable[File]): Unit
/** Called once at the end of the whole compilation run, with `success` indicating whether compilation succeeded (true) or not (false).*/
def complete(success: Boolean): Unit
}
object ClassfileManager {
/** Constructs a minimal ClassfileManager implementation that immediately deletes class files when requested. */
val deleteImmediately: () => ClassfileManager = () => new ClassfileManager {
def delete(classes: Iterable[File]): Unit = IO.deleteFilesEmptyDirs(classes)
def generated(classes: Iterable[File]) {}
def complete(success: Boolean) {}
}
@deprecated("Use overloaded variant that takes additional logger argument, instead.", "0.13.5")
def transactional(tempDir0: File): () => ClassfileManager =
transactional(tempDir0, sbt.Logger.Null)
/** When compilation fails, this ClassfileManager restores class files to the way they were before compilation.*/
def transactional(tempDir0: File, logger: sbt.Logger): () => ClassfileManager = () => new ClassfileManager {
val tempDir = tempDir0.getCanonicalFile
IO.delete(tempDir)
IO.createDirectory(tempDir)
logger.debug(s"Created transactional ClassfileManager with tempDir = $tempDir")
private[this] val generatedClasses = new mutable.HashSet[File]
private[this] val movedClasses = new mutable.HashMap[File, File]
private def showFiles(files: Iterable[File]): String = files.map(f => s"\\t$f").mkString("\\n")
def delete(classes: Iterable[File]) {
logger.debug(s"About to delete class files:\\n${showFiles(classes)}")
val toBeBackedUp = classes.filter(c => c.exists && !movedClasses.contains(c) && !generatedClasses(c))
logger.debug(s"We backup classs files:\\n${showFiles(toBeBackedUp)}")
for (c <- toBeBackedUp) {
movedClasses.put(c, move(c))
}
IO.deleteFilesEmptyDirs(classes)
}
def generated(classes: Iterable[File]): Unit = {
logger.debug(s"Registering generated classes:\\n${showFiles(classes)}")
generatedClasses ++= classes
}
def complete(success: Boolean) {
if (!success) {
logger.debug("Rolling back changes to class files.")
logger.debug(s"Removing generated classes:\\n${showFiles(generatedClasses)}")
IO.deleteFilesEmptyDirs(generatedClasses)
logger.debug(s"Restoring class files: \\n${showFiles(movedClasses.map(_._1))}")
for ((orig, tmp) <- movedClasses) IO.move(tmp, orig)
}
logger.debug(s"Removing the temporary directory used for backing up class files: $tempDir")
IO.delete(tempDir)
}
def move(c: File): File =
{
val target = File.createTempFile("sbt", ".class", tempDir)
IO.move(c, target)
target
}
}
}
|
pdalpra/sbt
|
compile/inc/src/main/scala/sbt/inc/ClassfileManager.scala
|
Scala
|
bsd-3-clause
| 3,445
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.kafka
import joptsimple.{BuiltinHelpFormatter, OptionException, OptionSet, OptionParser}
import java.net.{HttpURLConnection, URLEncoder, URL}
import scala.io.Source
import java.io._
import java.util
import scala.collection.JavaConversions._
import java.util.{Properties, Collections}
import ly.stealth.mesos.kafka.Util.{BindAddress, Str, Period}
import ly.stealth.mesos.kafka.Topics.Topic
object Cli {
var api: String = null
var out: PrintStream = System.out
var err: PrintStream = System.err
def main(args: Array[String]): Unit = {
try { exec(args) }
catch { case e: Error =>
err.println("Error: " + e.getMessage)
System.exit(1)
}
}
def exec(_args: Array[String]): Unit = {
var args = _args
if (args.length == 0) {
handleHelp(); printLine()
throw new Error("command required")
}
val cmd = args(0)
args = args.slice(1, args.length)
if (cmd == "help") { handleHelp(if (args.length > 0) args(0) else null, if (args.length > 1) args(1) else null); return }
if (cmd == "scheduler" && SchedulerCli.isEnabled) { SchedulerCli.handle(args); return }
args = handleGenericOptions(args)
// rest of cmds require <subCmd>
if (args.length < 1) {
handleHelp(cmd); printLine()
throw new Error("command required")
}
val subCmd = args(0)
args = args.slice(1, args.length)
cmd match {
case "topic" => TopicCli.handle(subCmd, args)
case "broker" => BrokerCli.handle(subCmd, args)
case _ => throw new Error("unsupported command " + cmd)
}
}
private def handleHelp(cmd: String = null, subCmd: String = null): Unit = {
cmd match {
case null =>
printLine("Usage: <command>\\n")
printCmds()
printLine()
printLine("Run `help <command>` to see details of specific command")
case "help" =>
printLine("Print general or command-specific help\\nUsage: help [cmd [cmd]]")
case "scheduler" =>
if (!SchedulerCli.isEnabled) throw new Error(s"unsupported command $cmd")
SchedulerCli.handle(null, help = true)
case "broker" =>
BrokerCli.handle(subCmd, null, help = true)
case "topic" =>
TopicCli.handle(subCmd, null, help = true)
case _ =>
throw new Error(s"unsupported command $cmd")
}
}
private[kafka] def handleGenericOptions(args: Array[String], help: Boolean = false): Array[String] = {
val parser = newParser()
parser.accepts("api", "Api url. Example: http://master:7000").withRequiredArg().ofType(classOf[java.lang.String])
parser.allowsUnrecognizedOptions()
if (help) {
printLine("Generic Options")
parser.printHelpOn(out)
return args
}
var options: OptionSet = null
try { options = parser.parse(args: _*) }
catch {
case e: OptionException =>
parser.printHelpOn(out)
printLine()
throw new Error(e.getMessage)
}
resolveApi(options.valueOf("api").asInstanceOf[String])
options.nonOptionArguments().toArray(new Array[String](0))
}
private def optionsOrFile(value: String): String = {
if (!value.startsWith("file:")) return value
val file = new File(value.substring("file:".length))
if (!file.exists()) throw new Error(s"File $file does not exists")
val props: Properties = new Properties()
val reader = new FileReader(file)
try { props.load(reader) }
finally { reader.close() }
val map = new util.HashMap[String, String](props.toMap)
Util.formatMap(map)
}
private def newParser(): OptionParser = {
val parser: OptionParser = new OptionParser()
parser.formatHelpWith(new BuiltinHelpFormatter(Util.terminalWidth, 2))
parser
}
private def printCmds(): Unit = {
printLine("Commands:")
printLine("help [cmd [cmd]] - print general or command-specific help", 1)
if (SchedulerCli.isEnabled) printLine("scheduler - start scheduler", 1)
printLine("broker - broker management commands", 1)
printLine("topic - topic management commands", 1)
}
private def printLine(s: Object = "", indent: Int = 0): Unit = out.println(" " * indent + s)
private[kafka] def resolveApi(apiOption: String): Unit = {
if (api != null) return
if (apiOption != null) {
api = apiOption
return
}
if (System.getenv("KM_API") != null) {
api = System.getenv("KM_API")
return
}
if (Config.DEFAULT_FILE.exists()) {
val props: Properties = new Properties()
val stream: FileInputStream = new FileInputStream(Config.DEFAULT_FILE)
props.load(stream)
stream.close()
api = props.getProperty("api")
if (api != null) return
}
throw new Error("Undefined api. Provide either cli option or config default value")
}
private[kafka] def sendRequest(uri: String, params: util.Map[String, String]): Map[String, Object] = {
def queryString(params: util.Map[String, String]): String = {
var s = ""
for ((name, value) <- params) {
if (!s.isEmpty) s += "&"
s += URLEncoder.encode(name, "utf-8")
if (value != null) s += "=" + URLEncoder.encode(value, "utf-8")
}
s
}
val qs: String = queryString(params)
val url: String = api + (if (api.endsWith("/")) "" else "/") + "api" + uri
val connection: HttpURLConnection = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
var response: String = null
try {
connection.setRequestMethod("POST")
connection.setDoOutput(true)
val data = qs.getBytes("utf-8")
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded; charset=utf-8")
connection.setRequestProperty("Content-Length", "" + data.length)
connection.getOutputStream.write(data)
try { response = Source.fromInputStream(connection.getInputStream).getLines().mkString}
catch {
case e: IOException =>
if (connection.getResponseCode != 200) throw new IOException(connection.getResponseCode + " - " + connection.getResponseMessage)
else throw e
}
} finally {
connection.disconnect()
}
if (response.trim().isEmpty) return null
var node: Map[String, Object] = null
try { node = Util.parseJson(response)}
catch { case e: IllegalArgumentException => throw new IOException(e) }
node
}
class Error(message: String) extends java.lang.Error(message) {}
object SchedulerCli {
def isEnabled: Boolean = System.getenv("KM_NO_SCHEDULER") == null
def handle(args: Array[String], help: Boolean = false): Unit = {
val parser = newParser()
parser.accepts("debug", "Debug mode. Default - " + Config.debug)
.withRequiredArg().ofType(classOf[java.lang.Boolean])
parser.accepts("storage",
"""Storage for cluster state. Examples:
| - file:kafka-mesos.json
| - zk:/kafka-mesos
|Default - """.stripMargin + Config.storage)
.withRequiredArg().ofType(classOf[String])
parser.accepts("master",
"""Master connection settings. Examples:
| - master:5050
| - master:5050,master2:5050
| - zk://master:2181/mesos
| - zk://username:password@master:2181
| - zk://master:2181,master2:2181/mesos""".stripMargin)
.withRequiredArg().ofType(classOf[String])
parser.accepts("user", "Mesos user to run tasks. Default - none")
.withRequiredArg().ofType(classOf[String])
parser.accepts("principal", "Principal (username) used to register framework. Default - none")
.withRequiredArg().ofType(classOf[String])
parser.accepts("secret", "Secret (password) used to register framework. Default - none")
.withRequiredArg().ofType(classOf[String])
parser.accepts("framework-name", "Framework name. Default - " + Config.frameworkName)
.withRequiredArg().ofType(classOf[String])
parser.accepts("framework-role", "Framework role. Default - " + Config.frameworkRole)
.withRequiredArg().ofType(classOf[String])
parser.accepts("framework-timeout", "Framework timeout (30s, 1m, 1h). Default - " + Config.frameworkTimeout)
.withRequiredArg().ofType(classOf[String])
parser.accepts("api", "Api url. Example: http://master:7000")
.withRequiredArg().ofType(classOf[String])
parser.accepts("bind-address", "Scheduler bind address (master, 0.0.0.0, 192.168.50.*, if:eth1). Default - all")
.withRequiredArg().ofType(classOf[String])
parser.accepts("zk",
"""Kafka zookeeper.connect. Examples:
| - master:2181
| - master:2181,master2:2181""".stripMargin)
.withRequiredArg().ofType(classOf[String])
parser.accepts("jre", "JRE zip-file (jre-7-openjdk.zip). Default - none.")
.withRequiredArg().ofType(classOf[String])
parser.accepts("log", "Log file to use. Default - stdout.")
.withRequiredArg().ofType(classOf[String])
val configArg = parser.nonOptions()
if (help) {
printLine("Start scheduler \\nUsage: scheduler [options] [config.properties]\\n")
parser.printHelpOn(out)
return
}
var options: OptionSet = null
try { options = parser.parse(args: _*) }
catch {
case e: OptionException =>
parser.printHelpOn(out)
printLine()
throw new Error(e.getMessage)
}
var configFile = if (options.valueOf(configArg) != null) new File(options.valueOf(configArg)) else null
if (configFile != null && !configFile.exists()) throw new Error(s"config-file $configFile not found")
if (configFile == null && Config.DEFAULT_FILE.exists()) configFile = Config.DEFAULT_FILE
if (configFile != null) {
printLine("Loading config defaults from " + configFile)
Config.load(configFile)
}
val debug = options.valueOf("debug").asInstanceOf[java.lang.Boolean]
if (debug != null) Config.debug = debug
val storage = options.valueOf("storage").asInstanceOf[String]
if (storage != null) Config.storage = storage
val provideOption = "Provide either cli option or config default value"
val master = options.valueOf("master").asInstanceOf[String]
if (master != null) Config.master = master
else if (Config.master == null) throw new Error(s"Undefined master. $provideOption")
val user = options.valueOf("user").asInstanceOf[String]
if (user != null) Config.user = user
val principal = options.valueOf("principal").asInstanceOf[String]
if (principal != null) Config.principal = principal
val secret = options.valueOf("secret").asInstanceOf[String]
if (secret != null) Config.secret = secret
val frameworkName = options.valueOf("framework-name").asInstanceOf[String]
if (frameworkName != null) Config.frameworkName = frameworkName
val frameworkRole = options.valueOf("framework-role").asInstanceOf[String]
if (frameworkRole != null) Config.frameworkRole = frameworkRole
val frameworkTimeout = options.valueOf("framework-timeout").asInstanceOf[String]
if (frameworkTimeout != null)
try { Config.frameworkTimeout = new Period(frameworkTimeout) }
catch { case e: IllegalArgumentException => throw new Error("Invalid framework-timeout") }
val api = options.valueOf("api").asInstanceOf[String]
if (api != null) Config.api = api
else if (Config.api == null) throw new Error(s"Undefined api. $provideOption")
val bindAddress = options.valueOf("bind-address").asInstanceOf[String]
if (bindAddress != null)
try { Config.bindAddress = new BindAddress(bindAddress) }
catch { case e: IllegalArgumentException => throw new Error("Invalid bind-address") }
val zk = options.valueOf("zk").asInstanceOf[String]
if (zk != null) Config.zk = zk
else if (Config.zk == null) throw new Error(s"Undefined zk. $provideOption")
val jre = options.valueOf("jre").asInstanceOf[String]
if (jre != null) Config.jre = new File(jre)
if (Config.jre != null && !Config.jre.exists()) throw new Error("JRE file doesn't exists")
val log = options.valueOf("log").asInstanceOf[String]
if (log != null) Config.log = new File(log)
if (Config.log != null) printLine(s"Logging to ${Config.log}")
Scheduler.start()
}
}
object BrokerCli {
def handle(cmd: String, _args: Array[String], help: Boolean = false): Unit = {
var args = _args
if (help) {
handleHelp(cmd)
return
}
var arg: String = null
if (args.length > 0 && !args(0).startsWith("-")) {
arg = args(0)
args = args.slice(1, args.length)
}
if (arg == null && cmd != "list") {
handleHelp(cmd); printLine()
throw new Error("argument required")
}
cmd match {
case "list" => handleList(arg)
case "add" | "update" => handleAddUpdate(arg, args, cmd == "add")
case "remove" => handleRemove(arg)
case "start" | "stop" => handleStartStop(arg, args, cmd == "start")
case _ => throw new Error("unsupported broker command " + cmd)
}
}
private def handleHelp(cmd: String): Unit = {
cmd match {
case null =>
printLine("Broker management commands\\nUsage: broker <command>\\n")
printCmds()
printLine()
printLine("Run `help broker <command>` to see details of specific command")
case "list" =>
handleList(null, help = true)
case "add" | "update" =>
handleAddUpdate(null, null, cmd == "add", help = true)
case "remove" =>
handleRemove(null, help = true)
case "start" | "stop" =>
handleStartStop(null, null, cmd == "start", help = true)
case _ =>
throw new Error(s"unsupported broker command $cmd")
}
}
private def handleList(expr: String, help: Boolean = false): Unit = {
if (help) {
printLine("List brokers\\nUsage: broker list [<broker-expr>] [options]\\n")
handleGenericOptions(null, help = true)
printLine()
Expr.printBrokerExprExamples(out)
return
}
val params = new util.HashMap[String, String]()
if (expr != null) params.put("broker", expr)
var json: Map[String, Object] = null
try { json = sendRequest("/broker/list", params) }
catch { case e: IOException => throw new Error("" + e) }
val brokerNodes = json("brokers").asInstanceOf[List[Map[String, Object]]]
val title = if (brokerNodes.isEmpty) "no brokers" else "broker" + (if (brokerNodes.size > 1) "s" else "") + ":"
printLine(title)
for (brokerNode <- brokerNodes) {
val broker = new Broker()
broker.fromJson(brokerNode)
printBroker(broker, 1)
printLine()
}
}
private def handleAddUpdate(expr: String, args: Array[String], add: Boolean, help: Boolean = false): Unit = {
val parser = newParser()
parser.accepts("cpus", "cpu amount (0.5, 1, 2)").withRequiredArg().ofType(classOf[java.lang.Double])
parser.accepts("mem", "mem amount in Mb").withRequiredArg().ofType(classOf[java.lang.Long])
parser.accepts("heap", "heap amount in Mb").withRequiredArg().ofType(classOf[java.lang.Long])
parser.accepts("port", "port or range (31092, 31090..31100). Default - auto").withRequiredArg().ofType(classOf[java.lang.String])
parser.accepts("bind-address", "broker bind address (broker0, 192.168.50.*, if:eth1). Default - auto").withRequiredArg().ofType(classOf[java.lang.String])
parser.accepts("stickiness-period", "stickiness period to preserve same node for broker (5m, 10m, 1h)").withRequiredArg().ofType(classOf[String])
parser.accepts("options", "options or file. Examples:\\n log.dirs=/tmp/kafka/$id,num.io.threads=16\\n file:server.properties").withRequiredArg()
parser.accepts("log4j-options", "log4j options or file. Examples:\\n log4j.logger.kafka=DEBUG\\\\, kafkaAppender\\n file:log4j.properties").withRequiredArg()
parser.accepts("jvm-options", "jvm options string (-Xms128m -XX:PermSize=48m)").withRequiredArg()
parser.accepts("constraints", "constraints (hostname=like:master,rack=like:1.*). See below.").withRequiredArg()
parser.accepts("failover-delay", "failover delay (10s, 5m, 3h)").withRequiredArg().ofType(classOf[String])
parser.accepts("failover-max-delay", "max failover delay. See failoverDelay.").withRequiredArg().ofType(classOf[String])
parser.accepts("failover-max-tries", "max failover tries. Default - none").withRequiredArg().ofType(classOf[String])
if (help) {
val cmd = if (add) "add" else "update"
printLine(s"${cmd.capitalize} broker\\nUsage: broker $cmd <broker-expr> [options]\\n")
parser.printHelpOn(out)
printLine()
handleGenericOptions(null, help = true)
printLine()
Expr.printBrokerExprExamples(out)
printLine()
printConstraintExamples()
if (!add) printLine("\\nNote: use \\"\\" arg to unset an option")
return
}
var options: OptionSet = null
try { options = parser.parse(args: _*) }
catch {
case e: OptionException =>
parser.printHelpOn(out)
printLine()
throw new Error(e.getMessage)
}
val cpus = options.valueOf("cpus").asInstanceOf[java.lang.Double]
val mem = options.valueOf("mem").asInstanceOf[java.lang.Long]
val heap = options.valueOf("heap").asInstanceOf[java.lang.Long]
val port = options.valueOf("port").asInstanceOf[String]
val bindAddress = options.valueOf("bind-address").asInstanceOf[String]
val stickinessPeriod = options.valueOf("stickiness-period").asInstanceOf[String]
val constraints = options.valueOf("constraints").asInstanceOf[String]
val options_ = options.valueOf("options").asInstanceOf[String]
val log4jOptions = options.valueOf("log4j-options").asInstanceOf[String]
val jvmOptions = options.valueOf("jvm-options").asInstanceOf[String]
val failoverDelay = options.valueOf("failover-delay").asInstanceOf[String]
val failoverMaxDelay = options.valueOf("failover-max-delay").asInstanceOf[String]
val failoverMaxTries = options.valueOf("failover-max-tries").asInstanceOf[String]
val params = new util.LinkedHashMap[String, String]
params.put("broker", expr)
if (cpus != null) params.put("cpus", "" + cpus)
if (mem != null) params.put("mem", "" + mem)
if (heap != null) params.put("heap", "" + heap)
if (port != null) params.put("port", port)
if (bindAddress != null) params.put("bindAddress", bindAddress)
if (stickinessPeriod != null) params.put("stickinessPeriod", stickinessPeriod)
if (options_ != null) params.put("options", optionsOrFile(options_))
if (constraints != null) params.put("constraints", constraints)
if (log4jOptions != null) params.put("log4jOptions", optionsOrFile(log4jOptions))
if (jvmOptions != null) params.put("jvmOptions", jvmOptions)
if (failoverDelay != null) params.put("failoverDelay", failoverDelay)
if (failoverMaxDelay != null) params.put("failoverMaxDelay", failoverMaxDelay)
if (failoverMaxTries != null) params.put("failoverMaxTries", failoverMaxTries)
var json: Map[String, Object] = null
try { json = sendRequest("/broker/" + (if (add) "add" else "update"), params) }
catch { case e: IOException => throw new Error("" + e) }
val brokerNodes: List[Map[String, Object]] = json("brokers").asInstanceOf[List[Map[String, Object]]]
val addedUpdated = if (add) "added" else "updated"
val brokers = "broker" + (if (brokerNodes.length > 1) "s" else "")
printLine(s"$brokers $addedUpdated:")
for (brokerNode <- brokerNodes) {
val broker: Broker = new Broker()
broker.fromJson(brokerNode)
printBroker(broker, 1)
printLine()
}
}
private def handleRemove(expr: String, help: Boolean = false): Unit = {
if (help) {
printLine("Remove broker\\nUsage: broker remove <broker-expr> [options]\\n")
handleGenericOptions(null, help = true)
printLine()
Expr.printBrokerExprExamples(out)
return
}
var json: Map[String, Object] = null
try { json = sendRequest("/broker/remove", Collections.singletonMap("broker", expr)) }
catch { case e: IOException => throw new Error("" + e) }
val ids = json("ids").asInstanceOf[String]
val brokers = "broker" + (if (ids.contains(",")) "s" else "")
printLine(s"$brokers $ids removed")
}
private def handleStartStop(expr: String, args: Array[String], start: Boolean, help: Boolean = false): Unit = {
val parser = newParser()
parser.accepts("timeout", "timeout (30s, 1m, 1h). 0s - no timeout").withRequiredArg().ofType(classOf[String])
if (!start) parser.accepts("force", "forcibly stop").withOptionalArg().ofType(classOf[String])
if (help) {
val cmd = if (start) "start" else "stop"
printLine(s"${cmd.capitalize} broker\\nUsage: broker $cmd <broker-expr> [options]\\n")
parser.printHelpOn(out)
printLine()
handleGenericOptions(null, help = true)
printLine()
Expr.printBrokerExprExamples(out)
return
}
var options: OptionSet = null
try { options = parser.parse(args: _*) }
catch {
case e: OptionException =>
parser.printHelpOn(out)
printLine()
throw new Error(e.getMessage)
}
val cmd: String = if (start) "start" else "stop"
val timeout: String = options.valueOf("timeout").asInstanceOf[String]
val force: Boolean = options.has("force")
val params = new util.LinkedHashMap[String, String]()
params.put("broker", expr)
if (timeout != null) params.put("timeout", timeout)
if (force) params.put("force", null)
var json: Map[String, Object] = null
try { json = sendRequest("/broker/" + cmd, params) }
catch { case e: IOException => throw new Error("" + e) }
val status = json("status").asInstanceOf[String]
val brokerNodes: List[Map[String, Object]] = json("brokers").asInstanceOf[List[Map[String, Object]]]
val brokers = "broker" + (if (brokerNodes.size > 1) "s" else "")
val startStop = if (start) "start" else "stop"
// started|stopped|scheduled|timeout
if (status == "timeout") throw new Error(s"$brokers $startStop timeout")
else if (status == "scheduled") printLine(s"$brokers scheduled to $startStop:")
else printLine(s"$brokers $status:")
for (brokerNode <- brokerNodes) {
val broker: Broker = new Broker()
broker.fromJson(brokerNode)
printBroker(broker, 1)
printLine()
}
}
private def printCmds(): Unit = {
printLine("Commands:")
printLine("list - list brokers", 1)
printLine("add - add broker", 1)
printLine("update - update broker", 1)
printLine("remove - remove broker", 1)
printLine("start - start broker", 1)
printLine("stop - stop broker", 1)
}
private def printBroker(broker: Broker, indent: Int): Unit = {
printLine("id: " + broker.id, indent)
printLine("active: " + broker.active, indent)
printLine("state: " + broker.state(), indent)
printLine("resources: " + "cpus:" + "%.2f".format(broker.cpus) + ", mem:" + broker.mem + ", heap:" + broker.heap + ", port:" + (if (broker.port != null) broker.port else "auto"), indent)
if (broker.bindAddress != null) printLine("bind-address: " + broker.bindAddress, indent)
if (!broker.constraints.isEmpty) printLine("constraints: " + Util.formatMap(broker.constraints), indent)
if (!broker.options.isEmpty) printLine("options: " + Util.formatMap(broker.options), indent)
if (!broker.log4jOptions.isEmpty) printLine("log4j-options: " + Util.formatMap(broker.log4jOptions), indent)
if (broker.jvmOptions != null) printLine("jvm-options: " + broker.jvmOptions, indent)
var failover = "failover:"
failover += " delay:" + broker.failover.delay
failover += ", max-delay:" + broker.failover.maxDelay
if (broker.failover.maxTries != null) failover += ", max-tries:" + broker.failover.maxTries
printLine(failover, indent)
var stickiness = "stickiness:"
stickiness += " period:" + broker.stickiness.period
if (broker.stickiness.hostname != null) stickiness += ", hostname:" + broker.stickiness.hostname
if (broker.stickiness.stopTime != null) stickiness += ", expires:" + Str.dateTime(broker.stickiness.expires)
printLine(stickiness, indent)
val task = broker.task
if (task != null) {
printLine("task: ", indent)
printLine("id: " + broker.task.id, indent + 1)
printLine("state: " + task.state, indent + 1)
if (task.endpoint != null) printLine("endpoint: " + task.endpoint + (if (broker.bindAddress != null) " (" + task.hostname + ")" else ""), indent + 1)
if (!task.attributes.isEmpty) printLine("attributes: " + Util.formatMap(task.attributes), indent + 1)
}
}
private def printConstraintExamples(): Unit = {
printLine("constraint examples:")
printLine("like:master - value equals 'master'", 1)
printLine("unlike:master - value not equals 'master'", 1)
printLine("like:slave.* - value starts with 'slave'", 1)
printLine("unique - all values are unique", 1)
printLine("cluster - all values are the same", 1)
printLine("cluster:master - value equals 'master'", 1)
printLine("groupBy - all values are the same", 1)
printLine("groupBy:3 - all values are within 3 different groups", 1)
}
}
object TopicCli {
def handle(cmd: String, _args: Array[String], help: Boolean = false): Unit = {
var args = _args
if (help) {
handleHelp(cmd)
return
}
var arg: String = null
if (args.length > 0 && !args(0).startsWith("-")) {
arg = args(0)
args = args.slice(1, args.length)
}
if (arg == null && cmd != "list") {
handleHelp(cmd); printLine()
throw new Error("argument required")
}
cmd match {
case "list" => handleList(arg)
case "add" | "update" => handleAddUpdate(arg, args, cmd == "add")
case "rebalance" => handleRebalance(arg, args)
case _ => throw new Error("unsupported topic command " + cmd)
}
}
def handleHelp(cmd: String): Unit = {
cmd match {
case null =>
printLine("Topic management commands\\nUsage: topic <command>\\n")
printCmds()
printLine()
printLine("Run `help topic <command>` to see details of specific command")
case "list" =>
handleList(null, help = true)
case "add" | "update" =>
handleAddUpdate(null, null, cmd == "add", help = true)
case "rebalance" =>
handleRebalance(null, null, help = true)
case _ =>
throw new Error(s"unsupported topic command $cmd")
}
}
def handleList(expr: String, help: Boolean = false): Unit = {
if (help) {
printLine("List topics\\nUsage: topic list [<topic-expr>]\\n")
handleGenericOptions(null, help = true)
printLine()
Expr.printTopicExprExamples(out)
return
}
val params = new util.LinkedHashMap[String, String]
if (expr != null) params.put("topic", expr)
var json: Map[String, Object] = null
try { json = sendRequest("/topic/list", params) }
catch { case e: IOException => throw new Error("" + e) }
val topicsNodes: List[Map[String, Object]] = json("topics").asInstanceOf[List[Map[String, Object]]]
val title: String = if (topicsNodes.isEmpty) "no topics" else "topic" + (if (topicsNodes.size > 1) "s" else "") + ":"
printLine(title)
for (topicNode <- topicsNodes) {
val topic = new Topic()
topic.fromJson(topicNode)
printTopic(topic, 1)
printLine()
}
}
def handleAddUpdate(name: String, args: Array[String], add: Boolean, help: Boolean = false): Unit = {
val cmd = if (add) "add" else "update"
val parser = newParser()
if (add) {
parser.accepts("broker", "<broker-expr>. Default - *. See below.").withRequiredArg().ofType(classOf[String])
parser.accepts("partitions", "partitions count. Default - 1").withRequiredArg().ofType(classOf[Integer])
parser.accepts("replicas", "replicas count. Default - 1").withRequiredArg().ofType(classOf[Integer])
}
parser.accepts("options", "topic options. Example: flush.ms=60000,retention.ms=6000000").withRequiredArg().ofType(classOf[String])
if (help) {
printLine(s"${cmd.capitalize} topic\\nUsage: topic $cmd <topic-expr> [options]\\n")
parser.printHelpOn(out)
printLine()
handleGenericOptions(null, help = true)
printLine()
Expr.printTopicExprExamples(out)
if (add) {
printLine()
Expr.printBrokerExprExamples(out)
}
return
}
var options: OptionSet = null
try { options = parser.parse(args: _*) }
catch {
case e: OptionException =>
parser.printHelpOn(out)
printLine()
throw new Error(e.getMessage)
}
val broker = options.valueOf("broker").asInstanceOf[String]
val partitions = options.valueOf("partitions").asInstanceOf[Integer]
val replicas = options.valueOf("replicas").asInstanceOf[Integer]
val options_ = options.valueOf("options").asInstanceOf[String]
val params = new util.LinkedHashMap[String, String]
params.put("topic", name)
if (broker != null) params.put("broker", broker)
if (partitions != null) params.put("partitions", "" + partitions)
if (replicas != null) params.put("replicas", "" + replicas)
if (options != null) params.put("options", options_)
var json: Map[String, Object] = null
try { json = sendRequest(s"/topic/$cmd", params) }
catch { case e: IOException => throw new Error("" + e) }
val topicNodes = json("topics").asInstanceOf[List[Map[String, Object]]]
val addedUpdated = if (add) "added" else "updated"
val title = s"topic${if (topicNodes.size > 1) "s" else ""} $addedUpdated:"
printLine(title)
for (topicNode <- topicNodes) {
val topic = new Topic()
topic.fromJson(topicNode)
printTopic(topic, 1)
printLine()
}
}
private def handleRebalance(exprOrStatus: String, args: Array[String], help: Boolean = false): Unit = {
val parser = newParser()
parser.accepts("broker", "<broker-expr>. Default - *. See below.").withRequiredArg().ofType(classOf[String])
parser.accepts("replicas", "replicas count. Default - 1").withRequiredArg().ofType(classOf[Integer])
parser.accepts("timeout", "timeout (30s, 1m, 1h). 0s - no timeout").withRequiredArg().ofType(classOf[String])
if (help) {
printLine("Rebalance topics\\nUsage: topic rebalance <topic-expr>|status [options]\\n")
parser.printHelpOn(out)
printLine()
handleGenericOptions(null, help = true)
printLine()
Expr.printTopicExprExamples(out)
printLine()
Expr.printBrokerExprExamples(out)
return
}
var options: OptionSet = null
try { options = parser.parse(args: _*) }
catch {
case e: OptionException =>
parser.printHelpOn(out)
printLine()
throw new Error(e.getMessage)
}
val broker: String = options.valueOf("broker").asInstanceOf[String]
val replicas: Integer = options.valueOf("replicas").asInstanceOf[Integer]
val timeout: String = options.valueOf("timeout").asInstanceOf[String]
val params = new util.LinkedHashMap[String, String]()
if (exprOrStatus != "status") params.put("topic", exprOrStatus)
if (broker != null) params.put("broker", broker)
if (replicas != null) params.put("replicas", "" + replicas)
if (timeout != null) params.put("timeout", timeout)
var json: Map[String, Object] = null
try { json = sendRequest("/topic/rebalance", params) }
catch { case e: IOException => throw new Error("" + e) }
val status = json("status").asInstanceOf[String]
val error = if (json.contains("error")) json("error").asInstanceOf[String] else ""
val state: String = json("state").asInstanceOf[String]
val is: String = if (status == "idle" || status == "running") "is " else ""
val colon: String = if (state.isEmpty && error.isEmpty) "" else ":"
// started|completed|failed|running|idle|timeout
if (status == "timeout") throw new Error("Rebalance timeout:\\n" + state)
printLine(s"Rebalance $is$status$colon $error")
if (error.isEmpty && !state.isEmpty) printLine(state)
}
private def printCmds(): Unit = {
printLine("Commands:")
printLine("list - list topics", 1)
printLine("add - add topic", 1)
printLine("update - update topic", 1)
printLine("rebalance - rebalance topics", 1)
}
private def printTopic(topic: Topic, indent: Int): Unit = {
printLine("name: " + topic.name, indent)
printLine("partitions: " + topic.partitionsState, indent)
if (!topic.options.isEmpty) printLine("options: " + Util.formatMap(topic.options), indent)
}
}
}
|
yonglehou/kafka-1
|
src/scala/ly/stealth/mesos/kafka/Cli.scala
|
Scala
|
apache-2.0
| 34,750
|
package sigmastate.eval
import org.ergoplatform._
import sigmastate._
import sigmastate.Values._
import sigmastate.helpers.ContextEnrichingTestProvingInterpreter
import sigmastate.lang.LangTests
import sigmastate.utxo._
import scalan.BaseCtxTests
import scalan.util.BenchmarkUtil._
import special.sigma._
class CompilerItTest extends BaseCtxTests
with LangTests with ExampleContracts with ErgoScriptTestkit {
import IR._
import builder._
import CollBuilder._
import Context._
import Coll._
import SigmaProp._
import BigInt._
import sigmastate.serialization.OpCodes._
import Liftables._
def intConstCase = {
Case(env, "intConst", "1", ergoCtx,
calc = {_ => 1 },
cost = {_ => constCost[Int]},
size = null,
tree = IntConstant(1), Result(1, 1, 4))
}
ignore("intConstCase") {
intConstCase.doReduce
}
def bigIntegerConstCase = {
Case(env, "bigIntegerConst", "big", ergoCtx,
calc = {_ => bigSym },
cost = {_ => constCost[BigInt]},
size = null,
tree = BigIntConstant(big), Result(big, 1, 32))
}
ignore("bigIntegerConstCase") {
bigIntegerConstCase.doReduce
}
def addBigIntegerConstsCase = {
// val size = (sizeOf(bigSym) max sizeOf(n1Sym)) + 1L
val res = big.add(n1)
Case(env, "addBigIntegerConsts", "big + n1", ergoCtx,
calc = {_ => bigSym.add(n1Sym) },
cost = {_ => constCost[BigInt] + constCost[BigInt] +
costOf("+", SFunc(Vector(SBigInt, SBigInt), SBigInt)) },
size = null,
tree = mkPlus(BigIntConstant(big), BigIntConstant(n1)),
Result(res, 12, 32))
}
ignore("addBigIntegerConstsCase") {
addBigIntegerConstsCase.doReduce()
}
def arrayConstCase = {
val arr1 = env("arr1").asInstanceOf[Array[Byte]]
val col1Sym = liftConst(Colls.fromArray(arr1))
val res = Colls.fromArray(arr1).toArray
Case(env, "arrayConst", "arr1", ergoCtx,
calc = {_ => col1Sym },
cost = {_ => constCost[Coll[Byte]] },
size = null,
tree = ByteArrayConstant(arr1), Result(res, 1, 2))
}
ignore("arrayConstCase") {
arrayConstCase.doReduce()
}
def sigmaPropConstCase = {
val res = dslValue.SigmaProp(p1)
val resSym = liftConst(res)
Case(env, "sigmaPropConst", "p1", ergoCtx,
calc = {_ => resSym },
cost = null,
size = null,
tree = SigmaPropConstant(p1), Result(p1, 10052, 33))
}
ignore("sigmaPropConstCase") {
sigmaPropConstCase.doReduce()
}
def andSigmaPropConstsCase = {
import SigmaDslBuilder._
val p1Dsl = dslValue.SigmaProp(p1)
val p2Dsl = dslValue.SigmaProp(p2)
val p1Sym: Ref[SigmaProp] = liftConst(p1Dsl)
val p2Sym: Ref[SigmaProp] = liftConst(p2Dsl)
Case(env, "andSigmaPropConsts", "p1 && p2", ergoCtx,
calc = {_ => dsl.allZK(colBuilder.fromItems(p1Sym, p2Sym)) },
cost = null,
size = null,
tree = SigmaAnd(Seq(SigmaPropConstant(p1), SigmaPropConstant(p2))),
Result(CAND(Seq(p1, p2)), 20124, 67))
}
ignore("andSigmaPropConstsCase") {
andSigmaPropConstsCase.doReduce()
}
def bigIntArray_Map_Case = {
val res = bigIntegerArr1.map(n => n.add(n1)).toArray
Case(env, "bigIntArray_Map",
"bigIntArr1.map { (i: BigInt) => i + n1 }", ergoCtx,
calc = { ctx =>
val vals = liftConst(bigIntegerArr1)
vals.map(fun(n => n.add(liftConst(n1))))
},
cost = null,
// {_ =>
// val arr = liftConst(bigIntArr1)
// val opType = SFunc(Vector(SBigInt,SBigInt), SBigInt)
// val f = fun { in: Ref[(Int, Long)] =>
// val Pair(c, s) = in
// val c1 = c + constCost[WBigInteger] + costOf("+", opType)
// val c2 = costOf("+_per_item", opType) * ((s max sizeOf(liftConst(n1))) + 1L).toInt
// c1 + c2
// }
// val arrSizes = colBuilder.fromArray(liftConst(Array(1L, 1L)))
// val costs = colBuilder.replicate(arr.length, 0).zip(arrSizes).map(f)
// constCost[Coll[WBigInteger]] + costs.sum(intPlusMonoid)
// },
size = null,
tree = mkMapCollection(BigIntArrayConstant(bigIntegerArr1), mkFuncValue(Vector((1,SBigInt)), ArithOp(ValUse(1,SBigInt), BigIntConstant(10L), PlusCode))),
Result(res, 23, 64))
}
ignore("bigIntArray_Map_Case") {
bigIntArray_Map_Case.doReduce()
}
def bigIntArray_Slice_Case = {
Case(env, "bigIntArray_Slice_Case",
"bigIntArr1.slice(0,1)", ergoCtx,
calc = null,
cost = null,
size = null,
tree = null,
Result(bigIntegerArr1.slice(0, 1), 21, 32))
}
ignore("bigIntArray_Slice_Case") {
bigIntArray_Slice_Case.doReduce()
}
// def bigIntArray_Where_Case = {
// import SCollection._
// Case(env, "bigIntArray_Where_Case",
// "bigIntArr1.where(fun (i: BigInt) = i > 0)", ergoCtx,
// calc = null,
// cost = null,
// size = null,
// tree = null,
// Result.Ignore)
// }
// test("bigIntArray_Where_Case") {
// bigIntArray_Where_Case.doReduce()
// }
def register_BigIntArr_Case = {
Case(env, "register_BigIntArr_Case",
"SELF.R4[Coll[BigInt]].get", ergoCtx,
calc = null,
cost = null,
size = null,
tree = null,
Result(bigIntegerArr1, 11, 64L))
}
ignore("register_BigIntArr_Case") {
measure(5) { i =>
register_BigIntArr_Case.doReduce()
}
/*
Iter 0: 3074 ms
Iter 1: 29 ms
Iter 2: 31 ms
Iter 3: 26 ms
Iter 4: 24 ms
Total time: 3184 ms
*/
}
def register_BigIntArr_Map_Case = {
Case(env, "register_BigIntArr_Map_Case",
"SELF.R4[Coll[BigInt]].get.map { (i: BigInt) => i + n1 }", ergoCtx,
calc = null,
cost = null,
size = null,
tree = null,
Result(bigIntegerArr1.map(i => i.add(n1)), 33, 64L))
}
ignore("register_BigIntArr_Map_Case") {
register_BigIntArr_Map_Case.doReduce()
}
def register_BigIntArr_Slice_Case = {
Case(env, "register_BinIntArr_Slice_Case",
"SELF.R4[Coll[BigInt]].get.slice(0,1)", ergoCtx,
calc = null,
cost = null,
size = null,
tree = null,
Result(bigIntegerArr1.slice(0,1)/*,207, 1L*/))
}
ignore("register_BigIntArr_Slice_Case") {
register_BigIntArr_Slice_Case.doReduce()
}
def crowdFunding_Case = {
import SigmaDslBuilder._
import Box._
import Values._
val prover = new ContextEnrichingTestProvingInterpreter()
val backerPK = prover.dlogSecrets(0).publicImage
val projectPK = prover.dlogSecrets(1).publicImage
val env = envCF ++ Seq("projectPubKey" -> projectPK, "backerPubKey" -> backerPK)
Case(env, "crowdFunding_Case", crowdFundingScript, ergoCtx,
{ ctx: Ref[Context] =>
val backerPubKey = liftConst(dslValue.SigmaProp(backerPK))
val projectPubKey = liftConst(dslValue.SigmaProp(projectPK))
val c1 = asRep[SigmaProp](dsl.sigmaProp(ctx.HEIGHT >= toRep(timeout))) && backerPubKey
val c2 = asRep[SigmaProp](dsl.sigmaProp(dsl.allOf(colBuilder.fromItems(
ctx.HEIGHT < toRep(timeout),
ctx.OUTPUTS.exists(fun { out =>
out.value >= toRep(minToRaise) lazy_&& Thunk(out.propositionBytes === projectPubKey.propBytes)
}))
))) && projectPubKey
(c1 || c2)
},
cost = null,
size = null,
tree = BlockValue(Vector(
ValDef(1,List(),SigmaPropConstant(projectPK))),
SigmaOr(Seq(
SigmaAnd(Seq(BoolToSigmaProp(GE(Height,IntConstant(100))),SigmaPropConstant(backerPK))),
SigmaAnd(Seq(
BoolToSigmaProp(AND(Vector(
LT(Height,IntConstant(100)),
Exists(Outputs,
FuncValue(Vector((2,SBox)),
BinAnd(
GE(ExtractAmount(ValUse(2,SBox)),LongConstant(1000)),
EQ(ExtractScriptBytes(ValUse(2,SBox)), SigmaPropBytes(ValUse(1,SSigmaProp))))
)
)))),
ValUse(1,SSigmaProp)
))))),
Result({ TrivialProp.FalseProp }, 40736, 1L)
)
}
ignore("crowdFunding_Case") {
crowdFunding_Case.doReduce()
}
// def register_BinIntArr_Where_Case = {
// import SCollection._
// Case(env, "contextVar_BinIntArr_Map_Case",
// "SELF.R4[Array[BigInt]].value.where(fun (i: BigInt) = i > 0)", ergoCtx,
// calc = null,
// cost = null,
// size = null,
// tree = null,
// Result.Ignore)
// }
}
|
ScorexFoundation/sigmastate-interpreter
|
sigmastate/src/test/scala/sigmastate/eval/CompilerItTest.scala
|
Scala
|
mit
| 8,426
|
/**
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.bsp.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait TestResultFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val TestResultFormat: JsonFormat[sbt.internal.bsp.TestResult] = new JsonFormat[sbt.internal.bsp.TestResult] {
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.bsp.TestResult = {
__jsOpt match {
case Some(__js) =>
unbuilder.beginObject(__js)
val originId = unbuilder.readField[Option[String]]("originId")
val statusCode = unbuilder.readField[Int]("statusCode")
unbuilder.endObject()
sbt.internal.bsp.TestResult(originId, statusCode)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.internal.bsp.TestResult, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("originId", obj.originId)
builder.addField("statusCode", obj.statusCode)
builder.endObject()
}
}
}
|
xuwei-k/xsbt
|
protocol/src/main/contraband-scala/sbt/internal/bsp/codec/TestResultFormats.scala
|
Scala
|
apache-2.0
| 1,152
|
package parsers.base
trait TableParser {
def parse: Array[Array[String]]
}
|
maxmouchet/vamk-timetables
|
parsers/src/main/scala/parsers/base/TableParser.scala
|
Scala
|
mit
| 80
|
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{Calculated, CtBoolean, CtBoxIdentifier}
import uk.gov.hmrc.ct.computations.calculations.LowEmissionCarsCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class LEC11(value: Boolean) extends CtBoxIdentifier("Disposals Less Than Special Rate Pool") with CtBoolean
object LEC11 extends Calculated[LEC11, ComputationsBoxRetriever] with LowEmissionCarsCalculator {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): LEC11 =
LEC11(disposalsLessThanSpecialRatePool(fieldValueRetriever.retrieveLEC01(),
fieldValueRetriever.retrieveCP666(),
fieldValueRetriever.retrieveCP667()
))
}
|
scottcutts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/LEC11.scala
|
Scala
|
apache-2.0
| 1,323
|
// Copyright 2016 Yahoo Inc.
// Licensed under the terms of the Apache 2.0 license.
// Please see LICENSE file in the project root for terms.
package com.yahoo.ml.caffe
import org.apache.spark.{Partition, SparkContext}
import org.apache.spark.rdd.{RDD, UnionRDD}
import scala.reflect.ClassTag
private[caffe] class UnionRDDWLocsSpecified[T:ClassTag](sc: SparkContext, rdds: Seq[RDD[T]], locs: Array[String])
extends UnionRDD[T](sc, rdds) {
override def getPreferredLocations(s: Partition): Seq[String] = Seq(locs(s.index % locs.length))
}
|
yahoo/CaffeOnSpark
|
caffe-grid/src/main/scala/com/yahoo/ml/caffe/UnionRDDWLocsSpecified.scala
|
Scala
|
apache-2.0
| 545
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.ops
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest
import com.intel.analytics.bigdl.dllib.utils.{T}
import com.intel.analytics.bigdl.dllib.utils.RandomGenerator
import org.scalatest.{FlatSpec, Matchers}
class MaxSpec extends FlatSpec with Matchers {
"Max operation" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericFloat
RandomGenerator.RNG.setSeed(10)
val input =
T(
Tensor.range(1, 10).resize(2, 5),
Tensor.scalar[Int](1)
)
val expectOutput = Tensor(T(5f, 10f))
val output = Max(startFromZero = true).forward(input)
output should be(expectOutput)
}
"Max operation forward one-element tensor index" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericFloat
RandomGenerator.RNG.setSeed(10)
val input =
T(
Tensor.range(1, 10).resize(2, 5),
Tensor[Int](1).fill(1)
)
val expectOutput = Tensor(T(5f, 10f))
val output = Max(startFromZero = true).forward(input)
output should be(expectOutput)
}
"Max keepDims" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericFloat
RandomGenerator.RNG.setSeed(10)
val input =
T(
Tensor.range(1, 10).resize(2, 5),
Tensor.scalar[Int](1)
)
val expectOutput = Tensor(T(5f, 10f)).resize(2, 1)
val output = Max(true, true).forward(input)
output should be(expectOutput)
}
"Max dim start from 1" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericFloat
RandomGenerator.RNG.setSeed(10)
val input =
T(
Tensor.range(1, 10).resize(2, 5),
Tensor.scalar[Int](2)
)
val expectOutput = Tensor(T(5f, 10f)).resize(2, 1)
val output = Max(true, false).forward(input)
output should be(expectOutput)
}
}
class MaxSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val max = Max[Float, Float](startFromZero = true).setName("max_pool")
val input1 = Tensor[Float].range(1, 6).resize(2, 3)
val input2 = Tensor.scalar[Int](1)
val input = T(input1, input2)
runSerializationTest(max, input)
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/ops/MaxSpec.scala
|
Scala
|
apache-2.0
| 2,920
|
package lila.tournament
import akka.actor._
import akka.pattern.{ ask, pipe }
import scala.concurrent.duration._
import actorApi._
import lila.hub.actorApi.map.Ask
import makeTimeout.short
private[tournament] final class StartedOrganizer(
api: TournamentApi,
reminder: ActorRef,
isOnline: String => Boolean,
socketHub: ActorRef) extends Actor {
override def preStart {
pairingLogger.info("Start StartedOrganizer")
context setReceiveTimeout 15.seconds
scheduleNext
}
case object Tick
def scheduleNext =
context.system.scheduler.scheduleOnce(3 seconds, self, Tick)
def receive = {
case ReceiveTimeout =>
val msg = "tournament.StartedOrganizer timed out!"
pairingLogger.error(msg)
throw new RuntimeException(msg)
case Tick =>
val myself = self
val startAt = nowMillis
TournamentRepo.started.flatMap { started =>
lila.common.Future.traverseSequentially(started) { tour =>
PlayerRepo activeUserIds tour.id flatMap { activeUserIds =>
val nb = activeUserIds.size
val result: Funit =
if (tour.secondsToFinish == 0) fuccess(api finish tour)
else if (!tour.isScheduled && nb < 2) fuccess(api finish tour)
else if (!tour.pairingsClosed) startPairing(tour, activeUserIds, startAt)
else funit
result >>- {
reminder ! RemindTournament(tour, activeUserIds)
} inject nb
}
}.addEffect { playerCounts =>
lila.mon.tournament.player(playerCounts.sum)
lila.mon.tournament.started(started.size)
}
}.chronometer
.mon(_.tournament.startedOrganizer.tickTime)
.logIfSlow(500, logger)(_ => "StartedOrganizer.Tick")
.result andThenAnyway scheduleNext
}
private def startPairing(tour: Tournament, activeUserIds: List[String], startAt: Long): Funit =
getWaitingUsers(tour) zip PairingRepo.playingUserIds(tour) map {
case (waitingUsers, playingUserIds) =>
val users = waitingUsers intersect activeUserIds diff playingUserIds
api.makePairings(tour, users, startAt)
}
private def getWaitingUsers(tour: Tournament): Fu[WaitingUsers] =
socketHub ? Ask(tour.id, GetWaitingUsers) mapTo manifest[WaitingUsers]
}
|
clarkerubber/lila
|
modules/tournament/src/main/StartedOrganizer.scala
|
Scala
|
agpl-3.0
| 2,323
|
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.avro
import java.util.{Collection => JCollection, List => JList}
import com.vividsolutions.jts.geom.Geometry
import org.geotools.feature.`type`.{AttributeDescriptorImpl, Types}
import org.geotools.feature.{AttributeImpl, GeometryAttributeImpl}
import org.geotools.geometry.jts.ReferencedEnvelope
import org.geotools.util.Converters
import org.opengis.feature.`type`.{AttributeDescriptor, Name}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.feature.{GeometryAttribute, Property}
import org.opengis.filter.identity.FeatureId
import org.opengis.geometry.BoundingBox
import scala.collection.JavaConversions._
import scala.util.Try
class AvroSimpleFeature(id: FeatureId, sft: SimpleFeatureType)
extends SimpleFeature
with Serializable {
val values = Array.ofDim[AnyRef](sft.getAttributeCount)
@transient lazy val userData = collection.mutable.HashMap.empty[AnyRef, AnyRef]
def getFeatureType = sft
def getType = sft
def getIdentifier = id
def getID = id.getID
def getAttribute(name: String) = if (sft.indexOf(name) >= 0) getAttribute(sft.indexOf(name)) else null
def getAttribute(name: Name) = getAttribute(name.getLocalPart)
def getAttribute(index: Int) = values(index)
def setAttribute(name: String, value: Object) = setAttribute(sft.indexOf(name), value)
def setAttribute(name: Name, value: Object) = setAttribute(name.getLocalPart, value)
def setAttribute(index: Int, value: Object) = setAttributeNoConvert(index,
Converters.convert(value, getFeatureType.getDescriptor(index).getType.getBinding).asInstanceOf[AnyRef])
def setAttributes(vals: JList[Object]) = vals.zipWithIndex.foreach { case (v, idx) => setAttribute(idx, v) }
def setAttributes(vals: Array[Object])= vals.zipWithIndex.foreach { case (v, idx) => setAttribute(idx, v) }
def setAttributeNoConvert(index: Int, value: Object) = values(index) = value
def setAttributeNoConvert(name: String, value: Object): Unit = setAttributeNoConvert(sft.indexOf(name), value)
def setAttributeNoConvert(name: Name, value: Object): Unit = setAttributeNoConvert(name.getLocalPart, value)
def setAttributesNoConvert(vals: JList[Object]) = vals.zipWithIndex.foreach { case (v, idx) => values(idx) = v }
def setAttributesNoConvert(vals: Array[Object])= vals.zipWithIndex.foreach { case (v, idx) => values(idx) = v }
def getAttributeCount = values.length
def getAttributes: JList[Object] = values.toList
def getDefaultGeometry: Object = Try(sft.getGeometryDescriptor.getName).map(getAttribute).getOrElse(null)
def setDefaultGeometry(geo: Object) = setAttribute(sft.getGeometryDescriptor.getName, geo)
def getBounds: BoundingBox = getDefaultGeometry match {
case g: Geometry =>
new ReferencedEnvelope(g.getEnvelopeInternal, sft.getCoordinateReferenceSystem)
case _ =>
new ReferencedEnvelope(sft.getCoordinateReferenceSystem)
}
def getDefaultGeometryProperty: GeometryAttribute = {
val geoDesc = sft.getGeometryDescriptor
geoDesc != null match {
case true =>
new GeometryAttributeImpl(getDefaultGeometry, geoDesc, null)
case false =>
null
}
}
def setDefaultGeometryProperty(geoAttr: GeometryAttribute) = geoAttr != null match {
case true =>
setDefaultGeometry(geoAttr.getValue)
case false =>
setDefaultGeometry(null)
}
def getProperties: JCollection[Property] =
getAttributes.zip(sft.getAttributeDescriptors).map {
case(attribute, attributeDescriptor) =>
new AttributeImpl(attribute, attributeDescriptor, id)
}
def getProperties(name: Name): JCollection[Property] = getProperties(name.getLocalPart)
def getProperties(name: String): JCollection[Property] = getProperties.filter(_.getName.toString == name)
def getProperty(name: Name): Property = getProperty(name.getLocalPart)
def getProperty(name: String): Property =
Option(sft.getDescriptor(name)) match {
case Some(descriptor) => new AttributeImpl(getAttribute(name), descriptor, id)
case _ => null
}
def getValue: JCollection[_ <: Property] = getProperties
def setValue(values: JCollection[Property]) = values.zipWithIndex.foreach { case (p, idx) =>
this.values(idx) = p.getValue
}
def getDescriptor: AttributeDescriptor = new AttributeDescriptorImpl(sft, sft.getName, 0, Int.MaxValue, true, null)
def getName: Name = sft.getName
def getUserData = userData
def isNillable = true
def setValue(newValue: Object) = setValue (newValue.asInstanceOf[JCollection[Property]])
def validate() = values.zipWithIndex.foreach { case (v, idx) => Types.validate(getType.getDescriptor(idx), v) }
override def hashCode(): Int = id.hashCode()
override def equals(obj: scala.Any): Boolean = obj match {
case other: AvroSimpleFeature =>
if(id.equalsExact(other.getIdentifier)) {
java.util.Arrays.equals(values, other.getAttributes.toArray)
} else false
case _ =>
false
}
}
|
ronq/geomesa
|
geomesa-features/geomesa-feature-avro/src/main/scala/org/locationtech/geomesa/features/avro/AvroSimpleFeature.scala
|
Scala
|
apache-2.0
| 5,485
|
package org.jetbrains.plugins.scala.lang.completion
import com.intellij.codeInsight.completion._
import com.intellij.codeInsight.lookup._
import com.intellij.openapi.application.ApplicationManager
import com.intellij.patterns.PsiElementPattern.Capture
import com.intellij.patterns.{ElementPattern, PlatformPatterns, StandardPatterns}
import com.intellij.psi._
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.util.ProcessingContext
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.icons.Icons
import org.jetbrains.plugins.scala.lang.completion.ScalaAfterNewCompletionUtil._
import org.jetbrains.plugins.scala.lang.completion.handlers.{ScalaConstructorInsertHandler, ScalaGenerateAnonymousFunctionInsertHandler}
import org.jetbrains.plugins.scala.lang.completion.lookups.{LookupElementManager, ScalaChainLookupElement, ScalaLookupItem}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi._
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.ScSyntheticFunction
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypingContext}
import org.jetbrains.plugins.scala.lang.resolve.processor.CompletionProcessor
import org.jetbrains.plugins.scala.lang.resolve.{ResolveUtils, ScalaResolveResult, StdKinds}
import scala.annotation.tailrec
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* User: Alexander Podkhalyuzin
* Date: 17.09.2009
*/
class ScalaSmartCompletionContributor extends CompletionContributor {
import org.jetbrains.plugins.scala.lang.completion.ScalaSmartCompletionContributor._
override def beforeCompletion(context: CompletionInitializationContext) {
}
private def acceptTypes(typez: Seq[ScType], variants: Array[Object], result: CompletionResultSet,
scope: GlobalSearchScope, secondCompletion: Boolean, completeThis: Boolean,
place: PsiElement, originalPlace: PsiElement) {
def isAccessible(el: ScalaLookupItem): Boolean = {
ScalaPsiUtil.nameContext(el.element) match {
case memb: ScMember =>
ResolveUtils.isAccessible(memb, place, forCompletion = true)
case _ => true
}
}
if (typez.length == 0 || typez.forall(_ == types.Nothing)) return
def applyVariant(_variant: Object, checkForSecondCompletion: Boolean = false) {
val chainVariant = _variant.isInstanceOf[ScalaChainLookupElement]
val variant = _variant match {
case el: ScalaLookupItem => el
case ch: ScalaChainLookupElement => ch.element
case _ => return
}
val elemToAdd = _variant.asInstanceOf[LookupElement]
variant match {
case el: ScalaLookupItem if isAccessible(el) =>
val elem = el.element
val subst = el.substitutor
def checkType(_tp: ScType, _subst: ScSubstitutor, chainCompletion: Boolean, etaExpanded: Boolean = false): Boolean = {
val tp = _subst.subst(_tp)
var elementAdded = false
val scType = subst.subst(tp)
import org.jetbrains.plugins.scala.lang.psi.types.Nothing
if (!scType.equiv(Nothing) && typez.exists(scType conforms _)) {
elementAdded = true
if (etaExpanded) el.etaExpanded = true
result.addElement(elemToAdd)
} else {
typez.foreach {
case ScParameterizedType(tp, Seq(arg)) if !elementAdded =>
ScType.extractClass(tp, Some(elem.getProject)) match {
case Some(clazz) if clazz.qualifiedName == "scala.Option" || clazz.qualifiedName == "scala.Some" =>
if (!scType.equiv(Nothing) && scType.conforms(arg)) {
el.someSmartCompletion = true
if (etaExpanded) el.etaExpanded = true
result.addElement(elemToAdd)
elementAdded = true
}
case _ =>
}
case _ =>
}
}
if (!elementAdded && chainCompletion && secondCompletion) {
val processor = new CompletionProcessor(StdKinds.refExprLastRef, place, false, postProcess = {
r => {
r match {
case r: ScalaResolveResult if !r.isNamedParameter =>
import org.jetbrains.plugins.scala.lang.psi.types.Nothing
val qualifier = r.fromType.getOrElse(Nothing)
val newElem = LookupElementManager.getLookupElement(r, qualifierType = qualifier, isInStableCodeReference = false)(0)
applyVariant(new ScalaChainLookupElement(el, newElem))
case _ =>
}
}
})
processor.processType(subst.subst(_tp), place)
processor.candidatesS
}
elementAdded
}
if (!el.isNamedParameterOrAssignment)
elem match {
case fun: ScSyntheticFunction =>
val second = checkForSecondCompletion && fun.paramClauses.flatten.length == 0
checkType(fun.retType, ScSubstitutor.empty, second)
case fun: ScFunction =>
if (fun.containingClass != null && fun.containingClass.qualifiedName == "scala.Predef") {
fun.name match {
case "implicitly" | "identity" | "locally" => return
case _ =>
}
}
val infer = if (chainVariant) ScSubstitutor.empty else ScalaPsiUtil.inferMethodTypesArgs(fun, subst)
val second = checkForSecondCompletion &&
fun.paramClauses.clauses.filterNot(_.isImplicit).flatMap(_.parameters).length == 0
val added = fun.returnType match {
case Success(tp, _) => checkType(tp, infer, second)
case _ => false
}
if (!added) {
fun.getType(TypingContext.empty) match {
case Success(tp, _) => checkType(tp, infer, second, etaExpanded = true)
case _ =>
}
}
case method: PsiMethod =>
val second = checkForSecondCompletion && method.getParameterList.getParametersCount == 0
val infer = if (chainVariant) ScSubstitutor.empty else ScalaPsiUtil.inferMethodTypesArgs(method, subst)
checkType(ScType.create(method.getReturnType, method.getProject, scope), infer, second)
case typed: ScTypedDefinition =>
if (!PsiTreeUtil.isContextAncestor(typed.nameContext, place, false) &&
(originalPlace == null || !PsiTreeUtil.isContextAncestor(typed.nameContext, originalPlace, false)))
for (tt <- typed.getType(TypingContext.empty)) checkType(tt, ScSubstitutor.empty, checkForSecondCompletion)
case f: PsiField =>
checkType(ScType.create(f.getType, f.getProject, scope), ScSubstitutor.empty, checkForSecondCompletion)
case _ =>
}
case _ =>
}
}
place.getParent match {
case ref: ScReferenceExpression if ref.smartQualifier == None =>
//enum and factory methods
val iterator = typez.iterator
while (iterator.hasNext) {
val tp = iterator.next()
def checkObject(o: ScObject) {
o.members.foreach {
case function: ScFunction =>
val lookup = LookupElementManager.getLookupElement(new ScalaResolveResult(function), isClassName = true,
isOverloadedForClassName = false, shouldImport = true, isInStableCodeReference = false).apply(0)
lookup.addLookupStrings(o.name + "." + function.name)
applyVariant(lookup)
case v: ScValue =>
v.declaredElements.foreach(td => {
val lookup = LookupElementManager.getLookupElement(new ScalaResolveResult(td), isClassName = true,
isOverloadedForClassName = false, shouldImport = true, isInStableCodeReference = false).apply(0)
lookup.addLookupStrings(o.name + "." + td.name)
applyVariant(lookup)
})
case v: ScVariable =>
v.declaredElements.foreach(td => {
val lookup = LookupElementManager.getLookupElement(new ScalaResolveResult(td), isClassName = true,
isOverloadedForClassName = false, shouldImport = true, isInStableCodeReference = false).apply(0)
lookup.addLookupStrings(o.name + "." + td.name)
applyVariant(lookup)
})
case obj: ScObject =>
val lookup = LookupElementManager.getLookupElement(new ScalaResolveResult(obj), isClassName = true,
isOverloadedForClassName = false, shouldImport = true, isInStableCodeReference = false).apply(0)
lookup.addLookupStrings(o.name + "." + obj.name)
applyVariant(lookup)
case _ =>
}
}
def checkTypeProjection(tp: ScType) {
tp match {
case ScProjectionType(proj, _: ScTypeAlias | _: ScClass | _: ScTrait, _) =>
ScType.extractClass(proj) match {
case Some(o: ScObject) if ResolveUtils.isAccessible(o, place, forCompletion = true) && ScalaPsiUtil.hasStablePath(o) => checkObject(o)
case _ =>
}
case _ =>
}
}
@tailrec
def checkType(tp: ScType) {
ScType.extractClass(tp) match {
case Some(c: ScClass) if c.qualifiedName == "scala.Option" || c.qualifiedName == "scala.Some" =>
tp match {
case ScParameterizedType(_, Seq(scType)) => checkType(scType)
case _ =>
}
case Some(o: ScObject) => //do nothing
case Some(clazz: ScTypeDefinition) =>
checkTypeProjection(tp)
ScalaPsiUtil.getCompanionModule(clazz) match {
case Some(o: ScObject) if ResolveUtils.isAccessible(o, place, forCompletion = true) && ScalaPsiUtil.hasStablePath(o) => checkObject(o)
case _ => //do nothing
}
case Some(p: PsiClass) if ResolveUtils.isAccessible(p, place, forCompletion = true) =>
p.getAllMethods.foreach(method => {
if (method.hasModifierProperty("static") && ResolveUtils.isAccessible(method, place, forCompletion = true)) {
val lookup = LookupElementManager.getLookupElement(new ScalaResolveResult(method), isClassName = true,
isOverloadedForClassName = false, shouldImport = true, isInStableCodeReference = false).apply(0)
lookup.addLookupStrings(p.getName + "." + method.getName)
applyVariant(lookup)
}
})
p.getFields.foreach(field => {
if (field.hasModifierProperty("static") && ResolveUtils.isAccessible(field, place, forCompletion = true)) {
val lookup = LookupElementManager.getLookupElement(new ScalaResolveResult(field), isClassName = true,
isOverloadedForClassName = false, shouldImport = true, isInStableCodeReference = false).apply(0)
lookup.addLookupStrings(p.getName + "." + field.getName)
applyVariant(lookup)
}
})
case _ => checkTypeProjection(tp)
}
}
checkType(tp)
}
variants.foreach(applyVariant(_, checkForSecondCompletion = true))
if (typez.exists(_.equiv(types.Boolean))) {
for (keyword <- Set("false", "true")) {
result.addElement(LookupElementManager.getKeywrodLookupElement(keyword, place))
}
}
if (completeThis) {
var parent = place
var foundClazz = false
while (parent != null) {
parent match {
case t: ScNewTemplateDefinition if foundClazz => //do nothing, impossible to invoke
case t: ScTemplateDefinition =>
t.getTypeWithProjections(TypingContext.empty, thisProjections = true) match {
case Success(scType, _) =>
import org.jetbrains.plugins.scala.lang.psi.types.Nothing
val lookupString = (if (foundClazz) t.name + "." else "") + "this"
val el = new ScalaLookupItem(t, lookupString)
if (!scType.equiv(Nothing) && typez.exists(scType conforms _)) {
if (!foundClazz) el.bold = true
result.addElement(el)
} else {
var elementAdded = false
typez.foreach {
case ScParameterizedType(tp, Seq(arg)) if !elementAdded =>
ScType.extractClass(tp, Some(place.getProject)) match {
case Some(clazz) if clazz.qualifiedName == "scala.Option" || clazz.qualifiedName == "scala.Some" =>
if (!scType.equiv(Nothing) && scType.conforms(arg)) {
el.someSmartCompletion = true
result.addElement(el)
elementAdded = true
}
case _ =>
}
case _ =>
}
}
case _ =>
}
foundClazz = true
case _ =>
}
parent = parent.getContext
}
}
case _ => variants.foreach(applyVariant(_, checkForSecondCompletion = true))
}
}
/*
ref = expr
expr = ref
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScAssignStmt]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, assign) = extractReference[ScAssignStmt](element)
if (assign.getRExpression == Some(ref)) {
assign.getLExpression match {
case call: ScMethodCall => //todo: it's update method
case leftExpression: ScExpression =>
//we can expect that the type is same for left and right parts.
acceptTypes(ref.expectedTypes(), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
} else { //so it's left expression
//todo: if right expression exists?
}
}
})
/*
val x: Type = ref
var y: Type = ref
*/
extend(CompletionType.SMART, StandardPatterns.or[PsiElement](superParentPattern(classOf[ScPatternDefinition]),
superParentPattern(classOf[ScVariableDefinition])), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[PsiElement](element)
acceptTypes(ref.expectedType().toList.toSeq, ref.getVariants, result, ref.getResolveScope,
parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
return ref
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScReturnStmt]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[ScReturnStmt](element)
val fun: ScFunction = PsiTreeUtil.getParentOfType(ref, classOf[ScFunction])
if (fun == null) return
acceptTypes(Seq[ScType](fun.returnType.getOrAny), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
private def argumentsForFunction(args: ScArgumentExprList, referenceExpression: ScReferenceExpression,
result: CompletionResultSet) {
val braceArgs = args.isBraceArgs
val expects = referenceExpression.expectedTypes()
for (expected <- expects) {
def params(tp: ScType): Seq[ScType] = tp match {
case ScFunctionType(_, params) => params
case _ => null
}
val actualParams = params(expected)
if (actualParams != null) {
val params = actualParams match {
case Seq(ScTupleType(types)) if braceArgs => types
case _ => actualParams
}
val presentableParams = params.map(_.removeAbstracts)
val anonFunRenderer = new LookupElementRenderer[LookupElement] {
def renderElement(element: LookupElement, presentation: LookupElementPresentation) {
val arrowText = ScalaPsiUtil.functionArrow(referenceExpression.getProject)
val text = ScalaCompletionUtil.generateAnonymousFunctionText(braceArgs, presentableParams, canonical = false,
arrowText = arrowText)
presentation match {
case realPresentation: RealLookupElementPresentation =>
if (!realPresentation.hasEnoughSpaceFor(text, false)) {
var prefixIndex = presentableParams.length - 1
val suffix = s", ... $arrowText"
var end = false
while (prefixIndex > 0 && !end) {
val prefix = ScalaCompletionUtil.generateAnonymousFunctionText(braceArgs,
presentableParams.slice(0, prefixIndex), canonical = false, withoutEnd = true,
arrowText = arrowText)
if (realPresentation.hasEnoughSpaceFor(prefix + suffix, false)) {
presentation.setItemText(prefix + suffix)
end = true
} else prefixIndex -= 1
}
if (!end) {
presentation.setItemText(s"... $arrowText ")
}
} else presentation.setItemText(text)
presentation.setIcon(Icons.LAMBDA)
case _ =>
presentation.setItemText(text)
}
}
}
val builder = LookupElementBuilder.create("")
.withRenderer(anonFunRenderer)
.withInsertHandler(new ScalaGenerateAnonymousFunctionInsertHandler(params, braceArgs))
val lookupElement =
if (ApplicationManager.getApplication.isUnitTestMode)
builder.withAutoCompletionPolicy(AutoCompletionPolicy.ALWAYS_AUTOCOMPLETE)
else builder.withAutoCompletionPolicy(AutoCompletionPolicy.NEVER_AUTOCOMPLETE)
result.addElement(lookupElement)
}
}
}
/*
call(exprs, ref, exprs)
if expected type is function, so we can suggest anonymous function creation
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScArgumentExprList]),
new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (referenceExpression, args) = extractReference[ScArgumentExprList](element)
argumentsForFunction(args, referenceExpression, result)
}
})
/*
call {ref}
if expected type is function, so we can suggest anonymous function creation
*/
extend(CompletionType.SMART, bracesCallPattern,
new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val referenceExpression = element.getParent.asInstanceOf[ScReferenceExpression]
val block = referenceExpression.getParent.asInstanceOf[ScBlockExpr]
val args = block.getParent.asInstanceOf[ScArgumentExprList]
argumentsForFunction(args, referenceExpression, result)
}
})
/*
call(exprs, ref, exprs)
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScArgumentExprList]),
new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (referenceExpression, _) = extractReference[ScArgumentExprList](element)
acceptTypes(referenceExpression.expectedTypes(), referenceExpression.getVariants, result,
referenceExpression.getResolveScope, parameters.getInvocationCount > 1,ScalaCompletionUtil.completeThis(referenceExpression),
element, parameters.getOriginalPosition)
}
})
/*
if (ref) expr
if (expr) ref
if (expr) expr else ref
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScIfStmt]),
new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, ifStmt) = extractReference[ScIfStmt](element)
if (ifStmt.condition.getOrElse(null: ScExpression) == ref)
acceptTypes(ref.expectedTypes(), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
else acceptTypes(ifStmt.expectedTypes(), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
while (ref) expr
while (expr) ref
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScWhileStmt]),
new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, whileStmt) = extractReference[ScWhileStmt](element)
if (whileStmt.condition.getOrElse(null: ScExpression) == ref)
acceptTypes(ref.expectedTypes(), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
else acceptTypes(ref.expectedTypes(), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
do expr while (ref)
do ref while (expr)
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScDoStmt]),
new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, doStmt) = extractReference[ScDoStmt](element)
if (doStmt.condition.getOrElse(null: ScExpression) == ref)
acceptTypes(ref.expectedTypes(), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
else acceptTypes(ref.expectedTypes(), ref.getVariants, result,
ref.getResolveScope, parameters.getInvocationCount > 1, ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
expr op ref
expr ref name
ref op expr
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScInfixExpr]),
new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, infix) = extractReference[ScInfixExpr](element)
val typez: ArrayBuffer[ScType] = new ArrayBuffer[ScType]
if (infix.lOp == ref) {
val op: String = infix.operation.getText
if (op.endsWith(":")) {
typez ++= ref.expectedTypes()
}
} else if (infix.rOp == ref) {
val op: String = infix.operation.getText
if (!op.endsWith(":")) {
typez ++= ref.expectedTypes()
}
}
acceptTypes(typez, ref.getVariants, result, ref.getResolveScope, parameters.getInvocationCount > 1,
ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
inside try block according to expected type
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScTryBlock]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[ScTryBlock](element)
acceptTypes(ref.expectedTypes(), ref.getVariants, result, ref.getResolveScope, parameters.getInvocationCount > 1,
ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
inside block expression according to expected type
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScBlockExpr]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[ScBlockExpr](element)
acceptTypes(ref.expectedTypes(), ref.getVariants, result, ref.getResolveScope, parameters.getInvocationCount > 1,
ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
inside finally block
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScFinallyBlock]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext,
result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[ScFinallyBlock](element)
acceptTypes(ref.expectedTypes(), ref.getVariants, result, ref.getResolveScope, parameters.getInvocationCount > 1,
ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
inside anonymous function
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScFunctionExpr]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[ScFunctionExpr](element)
acceptTypes(ref.expectedType().toList.toSeq, ref.getVariants, result, ref.getResolveScope, parameters.getInvocationCount > 1,
ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
for function definitions
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScFunctionDefinition]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[ScFunctionDefinition](element)
acceptTypes(ref.expectedTypes(), ref.getVariants, result, ref.getResolveScope, parameters.getInvocationCount > 1,
ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
/*
for default parameters
*/
extend(CompletionType.SMART, superParentPattern(classOf[ScParameter]), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet) {
val element = parameters.getPosition
val (ref, _) = extractReference[ScParameter](element)
acceptTypes(ref.expectedTypes(), ref.getVariants, result, ref.getResolveScope, parameters.getInvocationCount > 1,
ScalaCompletionUtil.completeThis(ref), element, parameters.getOriginalPosition)
}
})
extend(CompletionType.SMART, afterNewPattern, new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet) {
val element = parameters.getPosition
val refElement = ScalaPsiUtil.getParentOfType(element, classOf[ScReferenceElement])
val renamesMap = new mutable.HashMap[String, (String, PsiNamedElement)]()
val reverseRenamesMap = new mutable.HashMap[String, PsiNamedElement]()
refElement match {
case ref: PsiReference => ref.getVariants.foreach {
case s: ScalaLookupItem =>
s.isRenamed match {
case Some(name) =>
renamesMap += ((s.element.name, (name, s.element)))
reverseRenamesMap += ((name, s.element))
case None =>
}
case _ =>
}
case _ =>
}
val addedClasses = new mutable.HashSet[String]
val newExpr = PsiTreeUtil.getParentOfType(element, classOf[ScNewTemplateDefinition])
val types: Array[ScType] = newExpr.expectedTypes().map {
case ScAbstractType(_, lower, upper) => upper
case tp => tp
}
for (typez <- types) {
val element: LookupElement = convertTypeToLookupElement(typez, newExpr, addedClasses,
new AfterNewLookupElementRenderer(_, _, _), new ScalaConstructorInsertHandler, renamesMap)
if (element != null) {
result.addElement(element)
}
}
for (typez <- types) {
collectInheritorsForType(typez, newExpr, addedClasses, result, new AfterNewLookupElementRenderer(_, _, _),
new ScalaConstructorInsertHandler, renamesMap)
}
}
})
}
private[completion] object ScalaSmartCompletionContributor {
def extractReference[T <: PsiElement](element: PsiElement): (ScReferenceExpression, T) = {
val reference = element.getParent.asInstanceOf[ScReferenceExpression]
val parent = reference.getParent
parent match {
case refExpr: ScReferenceExpression =>
(refExpr, parent.getParent.asInstanceOf[T])
case _ =>
(reference, parent.asInstanceOf[T])
}
}
def superParentPattern(clazz: java.lang.Class[_ <: PsiElement]): ElementPattern[PsiElement] = {
StandardPatterns.or(PlatformPatterns.psiElement(ScalaTokenTypes.tIDENTIFIER).withParent(classOf[ScReferenceExpression]).
withSuperParent(2, clazz),
PlatformPatterns.psiElement(ScalaTokenTypes.tIDENTIFIER).withParent(classOf[ScReferenceExpression]).
withSuperParent(2, classOf[ScReferenceExpression]).withSuperParent(3, clazz))
}
def superParentsPattern(classes: Class[_ <: PsiElement]*): ElementPattern[PsiElement] = {
var pattern: Capture[PsiElement] =
PlatformPatterns.psiElement(ScalaTokenTypes.tIDENTIFIER).withParent(classes(0))
for (i <- 1 until classes.length) {
pattern = pattern.withSuperParent(i + 1, classes(i))
}
pattern
}
val bracesCallPattern = superParentsPattern(classOf[ScReferenceExpression], classOf[ScBlockExpr],
classOf[ScArgumentExprList], classOf[ScMethodCall])
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/completion/ScalaSmartCompletionContributor.scala
|
Scala
|
apache-2.0
| 32,866
|
package org.orbeon.oxf.xforms.action.actions
import org.orbeon.dom.Element
import org.orbeon.oxf.util.IndentedLogger
import org.orbeon.oxf.xforms.model.XFormsModel
import org.orbeon.oxf.xforms.XFormsServerSharedInstancesCache
import org.orbeon.oxf.xforms.action.{DynamicActionContext, XFormsAction, XFormsActionInterpreter}
import org.orbeon.xforms.xbl.Scope
import org.orbeon.saxon.om
import org.orbeon.xforms.XFormsNames
/**
* Extension xxf:invalidate-instance action.
*/
class XXFormsInvalidateInstanceAction extends XFormsAction {
override def execute(
actionContext : DynamicActionContext)(implicit
logger : IndentedLogger
): Unit = {
val interpreter = actionContext.interpreter
// Evaluate AVTs
val resourceURI = interpreter.resolveAVT(actionContext.analysis, "resource")
val handleXIncludeString = interpreter.resolveAVT(actionContext.analysis, "xinclude")
val ignoreQueryString = actionContext.element.attributeValueOpt(XFormsNames.IGNORE_QUERY_STRING).contains("true")
// Use XFormsModel logger because it's what's used by `XFormsServerSharedInstancesCache` in other places
val indentedLogger = interpreter.containingDocument.getIndentedLogger(XFormsModel.LoggingCategory)
if (handleXIncludeString == null) {
// No @xinclude attribute specified so remove all instances matching @resource
// NOTE: For now, we can't individually invalidate instances obtained through POST or PUT
XFormsServerSharedInstancesCache.remove(resourceURI, null, true, ignoreQueryString)(indentedLogger)
XFormsServerSharedInstancesCache.remove(resourceURI, null, false, ignoreQueryString)(indentedLogger)
} else {
// Just remove instances matching both @resource and @xinclude
val handleXInclude = handleXIncludeString.toBoolean
XFormsServerSharedInstancesCache.remove(resourceURI, null, handleXInclude, ignoreQueryString)(indentedLogger)
}
}
}
|
orbeon/orbeon-forms
|
xforms-runtime/shared/src/main/scala/org/orbeon/oxf/xforms/action/actions/XXFormsInvalidateInstanceAction.scala
|
Scala
|
lgpl-2.1
| 1,940
|
package org.jetbrains.plugins.scala
package annotator
package quickfix
import com.intellij.codeInsight.FileModificationService
import com.intellij.codeInsight.intention.IntentionAction
import com.intellij.openapi.command.undo.UndoUtil
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiFile
import org.jetbrains.plugins.scala.ScalaBundle
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createTypeElementFromText
import org.jetbrains.plugins.scala.lang.psi.types.api.TypePresentation
import org.jetbrains.plugins.scala.lang.psi.types.{ScType, TypePresentationContext}
class ChangeTypeFix(typeElement: ScTypeElement, newType: ScType) extends IntentionAction {
override val getText: String = {
implicit val tpc: TypePresentationContext = TypePresentationContext(typeElement)
val (oldTypeDescripton, newTypeDescription) = typeElement.`type`() match {
case Right(oldType) => TypePresentation.different(oldType, newType)
case _ => (typeElement.getText, newType.presentableText)
}
ScalaBundle.message("change.type.to", oldTypeDescripton, newTypeDescription)
}
override def getFamilyName: String = ScalaBundle.message("family.name.change.type")
override def startInWriteAction: Boolean = true
override def isAvailable(project: Project, editor: Editor, file: PsiFile): Boolean =
typeElement.isValid && typeElement.getManager.isInProject(file)
override def invoke(project: Project, editor: Editor, file: PsiFile): Unit = {
if (!typeElement.isValid) return
if (!FileModificationService.getInstance.prepareFileForWrite(typeElement.getContainingFile)) return
if (typeElement.getParent == null || typeElement.getParent.getNode == null) return
val replaced = typeElement.replace(createTypeElementFromText(newType.canonicalText)(typeElement.getManager))
ScalaPsiUtil.adjustTypes(replaced)
UndoUtil.markPsiFileForUndo(file)
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/quickfix/ChangeTypeFix.scala
|
Scala
|
apache-2.0
| 2,103
|
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package scalaguide.tests.scalatest
import org.scalatest._
import org.scalatestplus.play._
import play.api.libs.ws._
import play.api.mvc._
import play.api.test._
import play.api.{GlobalSettings, Application}
import play.api.test.Helpers._
import scala.Some
import play.api.test.FakeApplication
abstract class MixedPlaySpec extends fixture.WordSpec with MustMatchers with OptionValues with MixedFixtures
/**
*
*/
class ScalaFunctionalTestSpec extends MixedPlaySpec with Results {
// lie and make this look like a DB model.
case class Computer(name: String, introduced: Option[String])
object Computer {
def findById(id: Int): Option[Computer] = Some(Computer("Macintosh", Some("1984-01-24")))
}
"Scala Functional Test" should {
// #scalafunctionaltest-fakeApplication
val fakeApplicationWithGlobal = FakeApplication(withGlobal = Some(new GlobalSettings() {
override def onStart(app: Application) { println("Hello world!") }
}))
// #scalafunctionaltest-fakeApplication
val fakeApplication = FakeApplication(withRoutes = {
case ("GET", "/Bob") =>
Action {
Ok("Hello Bob") as "text/html; charset=utf-8"
}
})
// #scalafunctionaltest-respondtoroute
"respond to the index Action" in new App(fakeApplication) {
val Some(result) = route(FakeRequest(GET, "/Bob"))
status(result) mustEqual OK
contentType(result) mustEqual Some("text/html")
charset(result) mustEqual Some("utf-8")
contentAsString(result) must include ("Hello Bob")
}
// #scalafunctionaltest-respondtoroute
// #scalafunctionaltest-testview
"render index template" in new App {
val html = views.html.index("Coco")
contentAsString(html) must include ("Hello Coco")
}
// #scalafunctionaltest-testview
// #scalafunctionaltest-testmodel
val appWithMemoryDatabase = FakeApplication(additionalConfiguration = inMemoryDatabase("test"))
"run an application" in new App(appWithMemoryDatabase) {
val Some(macintosh) = Computer.findById(21)
macintosh.name mustEqual "Macintosh"
macintosh.introduced.value mustEqual "1984-01-24"
}
// #scalafunctionaltest-testmodel
// #scalafunctionaltest-testwithbrowser
val fakeApplicationWithBrowser = FakeApplication(withRoutes = {
case ("GET", "/") =>
Action {
Ok(
"""
|<html>
|<head><title>Hello Guest</title></head>
|<body>
| <div id="title">Hello Guest, welcome to this website.</div>
| <a href="/login">click me</a>
|</body>
|</html>
""".stripMargin) as "text/html"
}
case ("GET", "/login") =>
Action {
Ok(
"""
|<html>
|<head><title>Hello Coco</title></head>
|<body>
| <div id="title">Hello Coco, welcome to this website.</div>
|</body>
|</html>
""".stripMargin) as "text/html"
}
})
"run in a browser" in new HtmlUnit(app = fakeApplicationWithBrowser) {
// Check the home page
go to "http://localhost:" + port
pageTitle mustEqual "Hello Guest"
click on linkText("click me")
currentUrl mustEqual "http://localhost:" + port + "/login"
pageTitle mustEqual "Hello Coco"
}
// #scalafunctionaltest-testwithbrowser
// #scalafunctionaltest-testpaymentgateway
"test server logic" in new Server(app = fakeApplicationWithBrowser, port = 19001) { port =>
val myPublicAddress = s"localhost:$port"
val testPaymentGatewayURL = s"http://$myPublicAddress"
// The test payment gateway requires a callback to this server before it returns a result...
val callbackURL = s"http://$myPublicAddress/callback"
// await is from play.api.test.FutureAwaits
val response = await(WS.url(testPaymentGatewayURL).withQueryString("callbackURL" -> callbackURL).get())
response.status mustEqual OK
}
// #scalafunctionaltest-testpaymentgateway
// #scalafunctionaltest-testws
val appWithRoutes = FakeApplication(withRoutes = {
case ("GET", "/") =>
Action {
Ok("ok")
}
})
"test WS logic" in new Server(app = appWithRoutes, port = 3333) {
await(WS.url("http://localhost:3333").get()).status mustEqual OK
}
// #scalafunctionaltest-testws
}
}
|
jyotikamboj/container
|
pf-documentation/manual/working/scalaGuide/main/tests/code-scalatestplus-play/ScalaFunctionalTestSpec.scala
|
Scala
|
mit
| 4,557
|
package me.reminisce.gameboard.questions
import java.util.concurrent.TimeUnit
import akka.testkit.{TestActorRef, TestProbe}
import me.reminisce.database.MongoCollections
import me.reminisce.database.MongoDBEntities.FBPage
import me.reminisce.database.MongoDBFormats._
import me.reminisce.gameboard.board.GameboardEntities.{OrderQuestion, PageSubject}
import me.reminisce.gameboard.questions.QuestionGenerator.{CreateQuestionWithMultipleItems, NotEnoughData}
import org.scalatest.DoNotDiscover
import reactivemongo.api.collections.bson.BSONCollection
import reactivemongo.api.commands.WriteConcern
import reactivemongo.bson.BSONDocument
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
@DoNotDiscover
class OrderByPageLikesSpec extends QuestionTester("OrderByPageLikesSpec") {
val userId = "TestUserOrderByPageLikes"
"OderByPageLikes" must {
"not create question when there is not enough data." in {
testWithDb {
db =>
val itemIds = List("This User does not exist")
val actorRef = TestActorRef(OrderByPageLikes.props(db))
val testProbe = TestProbe()
testProbe.send(actorRef, CreateQuestionWithMultipleItems(userId, itemIds))
testProbe.expectMsg(NotEnoughData(s"Not enough pages in list."))
}
}
"create a valid question when the data is there." in {
testWithDb {
db =>
val pagesCollection = db[BSONCollection](MongoCollections.fbPages)
val pagesNumber = QuestionGenerationConfig.orderingItemsNumber
val itemIds: List[String] = (1 to pagesNumber).map {
nb => s"Page$nb"
}.toList
// the pages will be in reverse order (most likes first)
val pages = (0 until pagesNumber).map {
nb =>
FBPage(None, itemIds(nb), Some(s"Cool page with id $nb"), None, nb)
}.toList.reverse
(0 until pagesNumber) foreach {
nb =>
val selector = BSONDocument("pageId" -> pages(nb))
Await.result(pagesCollection.update(selector, pages(nb), WriteConcern.Acknowledged, upsert = true), Duration(10, TimeUnit.SECONDS))
}
val actorRef = TestActorRef(OrderByPageLikes.props(db))
val testProbe = TestProbe()
testProbe.send(actorRef, CreateQuestionWithMultipleItems(userId, itemIds))
checkFinished[OrderQuestion](testProbe) {
question =>
orderCheck[PageSubject](question) {
case (subject, nb) =>
assert(subject.name == pages(nb).name.getOrElse(""))
}
}
}
}
}
}
|
reminisceme/game-creator
|
src/test/scala/me/reminisce/gameboard/questions/OrderByPageLikesSpec.scala
|
Scala
|
apache-2.0
| 2,722
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnetexamples.multitask
import org.apache.mxnet.Shape
import org.apache.mxnet.IO
import org.apache.mxnet.DataIter
object Data {
// return train and val iterators for mnist
def mnistIterator(dataPath: String, batchSize: Int, inputShape: Shape): (DataIter, DataIter) = {
val flat = if (inputShape.length == 3) "False" else "True"
val trainParams = Map(
"image" -> s"$dataPath/train-images-idx3-ubyte",
"label" -> s"$dataPath/train-labels-idx1-ubyte",
"input_shape" -> inputShape.toString(),
"batch_size" -> s"$batchSize",
"shuffle" -> "True",
"flat" -> flat
)
val trainDataIter = IO.MNISTIter(trainParams)
val testParams = Map(
"image" -> s"$dataPath/t10k-images-idx3-ubyte",
"label" -> s"$dataPath/t10k-labels-idx1-ubyte",
"input_shape" -> inputShape.toString(),
"batch_size" -> s"$batchSize",
"flat" -> flat
)
val testDataIter = IO.MNISTIter(testParams)
(trainDataIter, testDataIter)
}
}
|
dmlc/mxnet
|
scala-package/examples/src/main/scala/org/apache/mxnetexamples/multitask/Data.scala
|
Scala
|
apache-2.0
| 1,816
|
package fr.eurecom.dsg.treelib.evaluation
import org.apache.spark.rdd._
abstract class BaseEvaluation {
/**
* Evaluate the accuracy of a prediction
*
* @param predictedResult the predicted values
* @param actualResult the actual result
*/
def Evaluate(predictedResult: RDD[String], actualResult: RDD[String]) : Any
}
|
bigfootproject/treelib
|
src/main/scala/fr/eurecom/dsg/treelib/evaluation/BaseEvaluation.scala
|
Scala
|
apache-2.0
| 353
|
package scala.tools.nsc
package object symtab {
val Flags = scala.reflect.internal.Flags
}
|
felixmulder/scala
|
src/compiler/scala/tools/nsc/symtab/package.scala
|
Scala
|
bsd-3-clause
| 96
|
package org.raisercostin.jedi
import org.raisercostin.jedi.Locations._
import org.scalatest._
import org.junit.runner.RunWith
import org.junit.Assert._
import org.scalatest.junit.JUnitRunner
import scala.util.Try
import java.util.regex.Pattern.Loop
import Locations._
import org.scalatest.Matchers._
@RunWith(classOf[JUnitRunner])
class CachedLocationTest extends FunSuite {
test("compute etag for url file") {
implicit val cache: CacheConfig = EtagCacheConfig(Locations.temp.child("default-cache"))
val url = """https://commons.apache.org/proper/commons-io/javadocs/api-2.5/index.html"""
val remote = Locations.url(url)
remote.uri shouldBe url
remote.raw shouldBe url
remote.slug shouldBe "https-----commons--apache--org--proper--commons-io--javadocs--api-2.5--index.html"
remote.etagFromHttpRequestHeader.get shouldBe "b26-531084169df69"
remote.etag shouldBe "b26-531084169df69"
val cached = remote.cached.flush
cached.baseName.should(endWith(remote.etag))
println(s"""remote=$remote
|cached=$cached""".stripMargin)
//assertEquals(40, newlocation.etag.size)
//assertTrue(newlocation.version.size > 20)
}
test("compute slug") {
Locations.url("https://archive.org").slug shouldBe "https-----archive--org"
Locations.url("https://commons.archive.org/a.b/c.d").slug shouldBe "https-----commons--archive--org--a.b--c.d"
}
test("compute same etag for url file with forced client caching") {
implicit val cache: CacheConfig = TimeSensitiveCachedEntry(Locations.temp.child("default-cache"))
val url = """https://archive.org"""
val remote1 = Locations.url(url).cached.flush
val remote2 = Locations.url(url).cached.flush
remote1.cache shouldBe remote2.cache
println("url1=" + remote1.cache)
println("url2=" + remote2.cache)
println("url1=" + remote1)
println("url2=" + remote2)
println("etag1=" + remote1.etag)
println("etag2=" + remote2.etag)
println("version1=" + remote1.version)
println("version2=" + remote2.version)
remote1.etag shouldBe remote2.etag
}
test("compute with etag and date") {
implicit val cache: CacheConfig = TimeSensitiveEtagCachedEntry(Locations.temp.child("default-cache"))
Locations.url("""https://archive.org""").cached.cache.absolute should (include("date") and not include("etag"))
Locations.url("""https://commons.apache.org""").cached.cache.absolute should (include("etag") and not include("date"))
}
}
|
raisercostin/jedi-io
|
src/test/scala/org/raisercostin/jedi/CachedLocationTest.scala
|
Scala
|
apache-2.0
| 2,487
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api
import org.apache.flink.api.common.typeinfo.Types.STRING
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.scala.{StreamTableEnvironment, _}
import org.apache.flink.table.planner.utils.{TableTestUtil, TestTableSources}
import org.apache.flink.table.sinks.CsvTableSink
import org.apache.calcite.plan.RelOptUtil
import org.apache.calcite.sql.SqlExplainLevel
import org.junit.Assert.assertEquals
import org.junit.rules.ExpectedException
import org.junit.{Rule, Test}
class TableEnvironmentTest {
// used for accurate exception information checking.
val expectedException: ExpectedException = ExpectedException.none()
@Rule
def thrown: ExpectedException = expectedException
val env = new StreamExecutionEnvironment(new LocalStreamEnvironment())
val tableEnv = StreamTableEnvironment.create(env, TableTestUtil.STREAM_SETTING)
@Test
def testScanNonExistTable(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage("Table `MyTable` was not found")
tableEnv.scan("MyTable")
}
@Test
def testRegisterDataStream(): Unit = {
val table = env.fromElements[(Int, Long, String, Boolean)]().toTable(tableEnv, 'a, 'b, 'c, 'd)
tableEnv.registerTable("MyTable", table)
val scanTable = tableEnv.scan("MyTable")
val relNode = TableTestUtil.toRelNode(scanTable)
val actual = RelOptUtil.toString(relNode)
val expected = "LogicalTableScan(table=[[default_catalog, default_database, MyTable]])\\n"
assertEquals(expected, actual)
// register on a conflict name
thrown.expect(classOf[ValidationException])
thrown.expectMessage(
"Temporary table `default_catalog`.`default_database`.`MyTable` already exists")
tableEnv.registerDataStream("MyTable", env.fromElements[(Int, Long)]())
}
@Test
def testSimpleQuery(): Unit = {
val table = env.fromElements[(Int, Long, String, Boolean)]().toTable(tableEnv, 'a, 'b, 'c, 'd)
tableEnv.registerTable("MyTable", table)
val queryTable = tableEnv.sqlQuery("SELECT a, c, d FROM MyTable")
val relNode = TableTestUtil.toRelNode(queryTable)
val actual = RelOptUtil.toString(relNode, SqlExplainLevel.NO_ATTRIBUTES)
val expected = "LogicalProject\\n" +
" LogicalTableScan\\n"
assertEquals(expected, actual)
}
@Test
def testStreamTableEnvironmentExplain(): Unit = {
thrown.expect(classOf[TableException])
thrown.expectMessage(
"'explain' method without any tables is unsupported in StreamTableEnvironment.")
val execEnv = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()
val tEnv = StreamTableEnvironment.create(execEnv, settings)
tEnv.registerTableSource("MyTable", TestTableSources.getPersonCsvTableSource)
tEnv.registerTableSink("MySink",
new CsvTableSink("/tmp").configure(Array("first"), Array(STRING)))
val table1 = tEnv.sqlQuery("select first from MyTable")
tEnv.insertInto(table1, "MySink")
tEnv.explain(false)
}
}
|
gyfora/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/TableEnvironmentTest.scala
|
Scala
|
apache-2.0
| 4,045
|
package org.jetbrains.plugins.scala
package lang
package refactoring
package util
import com.intellij.openapi.application.ApplicationManager
import com.intellij.psi._
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.{ScalaLexer, ScalaTokenTypes}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement
import scala.reflect.NameTransformer
/**
* User: Alexander Podkhalyuzin
* Date: 24.06.2008
*/
object ScalaNamesUtil {
val keywordNames = ScalaTokenTypes.KEYWORDS.getTypes.map(_.toString).toSet
private val lexerCache = new ThreadLocal[ScalaLexer] {
override def initialValue(): ScalaLexer = new ScalaLexer()
}
private def checkGeneric(text: String, predicate: ScalaLexer => Boolean): Boolean = {
// ApplicationManager.getApplication.assertReadAccessAllowed() - looks like we don't need it
if (text == null || text == "") return false
val lexer = lexerCache.get()
lexer.start(text, 0, text.length(), 0)
if (!predicate(lexer)) return false
lexer.advance()
lexer.getTokenType == null
}
def isOpCharacter(c : Char) : Boolean = {
c match {
case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | '|' | '/' | '\\' =>
true
case ch =>
Character.getType(ch) == Character.MATH_SYMBOL.toInt || Character.getType(ch) == Character.OTHER_SYMBOL.toInt
}
}
def isIdentifier(text: String): Boolean = {
checkGeneric(text, lexer => lexer.getTokenType == ScalaTokenTypes.tIDENTIFIER)
}
def isKeyword(text: String): Boolean = keywordNames.contains(text)
def isOperatorName(text: String): Boolean = isIdentifier(text) && isOpCharacter(text(0))
def scalaName(element: PsiElement) = element match {
case scNamed: ScNamedElement => scNamed.name
case psiNamed: PsiNamedElement => psiNamed.getName
}
def qualifiedName(named: PsiNamedElement): Option[String] = {
ScalaPsiUtil.nameContext(named) match {
case pack: PsiPackage => Some(pack.getQualifiedName)
case clazz: PsiClass => Some(clazz.qualifiedName)
case memb: PsiMember =>
val containingClass = memb.containingClass
if (containingClass != null && containingClass.qualifiedName != null && memb.hasModifierProperty(PsiModifier.STATIC)) {
Some(Seq(containingClass.qualifiedName, named.name).filter(_ != "").mkString("."))
} else None
case _ => None
}
}
object isBackticked {
def unapply(named: ScNamedElement): Option[String] = {
val name = named.name
isBacktickedName.unapply(name)
}
}
object isBacktickedName {
def unapply(name: String): Option[String] = {
if (name.startsWith("`") && name.endsWith("`")) Some(name.substring(1, name.length - 1))
else None
}
}
def toJavaName(name: String) = {
val toEncode = name match {
case ScalaNamesUtil.isBacktickedName(s) => s
case _ => name
}
NameTransformer.encode(toEncode)
}
def changeKeyword(s: String): String = {
if (ScalaNamesUtil.isKeyword(s)) "`" + s + "`"
else s
}
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaNamesUtil.scala
|
Scala
|
apache-2.0
| 3,191
|
package io.reactors.protocol.instrument
import io.reactors._
/** Service used to record custom channel and reactor behavior.
*
* Together with the `ScriptedTransport`, this service is used to simulate
* faulty behavior and unreliable network, or otherwise any kind of special behavior.
*/
class Scripted(val system: ReactorSystem) extends Protocol.Service {
/** Modifies the behavior of a specific channel.
*
* Takes a behavior function that maps an event stream of emitted events on this
* channel to an event stream of delivered events.
* Calling this operation overwrites any behaviors installed with previous calls
* with the specific channel.
*
* @tparam T the type of events on the channel
* @param ch the channel whose delivery semantics must be changed
* @param behavior a function that specifies how the stream of events that were
* emitted on this channel must be altered into another event
* stream to simulate some sort of special behavior
*/
def instrument[T](ch: Channel[T])(behavior: Events[T] => Events[T]): Unit = {
val transport = system.remote.transport("scripted").asInstanceOf[ScriptedTransport]
transport.withChannel(ch, behavior)
}
override def shutdown(): Unit = {
}
}
object Scripted {
val defaultBundle: ReactorSystem.Bundle = ReactorSystem.Bundle.default("""
remote = {
default-schema = "scripted"
transports = [
{
schema = "scripted"
transport = "io.reactors.protocol.instrument.ScriptedTransport"
host = ""
port = 0
}
]
}
system = {
channels = {
create-as-local = "false"
}
}
""".stripMargin)
}
|
storm-enroute/reactive-collections
|
reactors-protocol/shared/src/main/scala/io/reactors/protocol/instrument/Scripted.scala
|
Scala
|
bsd-3-clause
| 1,775
|
package dotty.tools.dotc.core
import Contexts._
/** Periods are the central "clock" of the compiler.
* A period consists of a run id and a phase id.
* run ids represent compiler runs
* phase ids represent compiler phases
*/
abstract class Periods { thisCtx: Context =>
import Periods._
/** The current phase identifier */
def phaseId: Int = period.phaseId
/** The current run identifier */
def runId: Int = period.runId
/** Execute `op` at given period */
def atPeriod[T](pd: Period)(op: Context => T): T =
op(thisCtx.fresh.setPeriod(pd))
/** Execute `op` at given phase id */
inline def atPhase[T](pid: PhaseId)(inline op: Context ?=> T): T =
op(using thisCtx.withPhase(pid))
/** The period containing the current period where denotations do not change.
* We compute this by taking as first phase the first phase less or equal to
* the current phase that has the same "nextTransformerId". As last phase
* we take the next transformer id following the current phase.
*/
def stablePeriod: Period = {
var first = phaseId
val nxTrans = thisCtx.base.nextDenotTransformerId(first)
while (first - 1 > NoPhaseId && (thisCtx.base.nextDenotTransformerId(first - 1) == nxTrans))
first -= 1
Period(runId, first, nxTrans)
}
/** Are all base types in the current period guaranteed to be the same as in period `p`? */
def hasSameBaseTypesAs(p: Period): Boolean = {
val period = thisCtx.period
period == p ||
period.runId == p.runId &&
thisCtx.phases(period.phaseId).sameBaseTypesStartId ==
thisCtx.phases(p.phaseId).sameBaseTypesStartId
}
}
object Periods {
/** A period is a contiguous sequence of phase ids in some run.
* It is coded as follows:
*
* sign, always 0 1 bit
* runid 17 bits
* last phase id: 7 bits
* #phases before last: 7 bits
*
* // Dmitry: sign == 0 isn't actually always true, in some cases phaseId == -1 is used for shifts, that easily creates code < 0
*/
class Period(val code: Int) extends AnyVal {
/** The run identifier of this period. */
def runId: RunId = code >>> (PhaseWidth * 2)
/** The phase identifier of this single-phase period. */
def phaseId: PhaseId = (code >>> PhaseWidth) & PhaseMask
/** The last phase of this period */
def lastPhaseId: PhaseId =
(code >>> PhaseWidth) & PhaseMask
/** The first phase of this period */
def firstPhaseId: Int = lastPhaseId - (code & PhaseMask)
def containsPhaseId(id: PhaseId): Boolean = firstPhaseId <= id && id <= lastPhaseId
/** Does this period contain given period? */
def contains(that: Period): Boolean = {
// Let this = (r1, l1, d1), that = (r2, l2, d2)
// where r = runid, l = last phase, d = duration - 1
// Then seen as intervals:
//
// this = r1 / (l1 - d1) .. l1
// that = r2 / (l2 - d2) .. l2
//
// Let's compute:
//
// lastDiff = X * 2^5 + (l1 - l2) mod 2^5
// where X >= 0, X == 0 iff r1 == r2 & l1 - l2 >= 0
// result = lastDiff + d2 <= d1
// We have:
// lastDiff + d2 <= d1
// iff X == 0 && l1 - l2 >= 0 && l1 - l2 + d2 <= d1
// iff r1 == r2 & l1 >= l2 && l1 - d1 <= l2 - d2
// q.e.d
val lastDiff = (code - that.code) >>> PhaseWidth
lastDiff + (that.code & PhaseMask ) <= (this.code & PhaseMask)
}
/** Does this period overlap with given period? */
def overlaps(that: Period): Boolean =
this.runId == that.runId &&
this.firstPhaseId <= that.lastPhaseId &&
that.firstPhaseId <= this.lastPhaseId
/** The intersection of two periods */
def & (that: Period): Period =
if (this overlaps that)
Period(
this.runId,
this.firstPhaseId max that.firstPhaseId,
this.lastPhaseId min that.lastPhaseId)
else
Nowhere
/** The smallest period containing two periods */
def | (that: Period): Period =
Period(this.runId,
this.firstPhaseId min that.firstPhaseId,
this.lastPhaseId max that.lastPhaseId)
override def toString: String = s"Period($firstPhaseId..$lastPhaseId, run = $runId)"
def ==(that: Period): Boolean = this.code == that.code
def !=(that: Period): Boolean = this.code != that.code
}
object Period {
/** The single-phase period consisting of given run id and phase id */
def apply(rid: RunId, pid: PhaseId): Period =
new Period(((rid << PhaseWidth) | pid) << PhaseWidth)
/** The period consisting of given run id, and lo/hi phase ids */
def apply(rid: RunId, loPid: PhaseId, hiPid: PhaseId): Period =
new Period(((rid << PhaseWidth) | hiPid) << PhaseWidth | (hiPid - loPid))
/** The interval consisting of all periods of given run id */
def allInRun(rid: RunId): Period =
apply(rid, 0, PhaseMask)
}
final val Nowhere: Period = new Period(0)
final val InitialPeriod: Period = Period(InitialRunId, FirstPhaseId)
final val InvalidPeriod: Period = Period(NoRunId, NoPhaseId)
/** An ordinal number for compiler runs. First run has number 1. */
type RunId = Int
final val NoRunId = 0
final val InitialRunId = 1
final val RunWidth = java.lang.Integer.SIZE - PhaseWidth * 2 - 1/* sign */
final val MaxPossibleRunId = (1 << RunWidth) - 1
/** An ordinal number for phases. First phase has number 1. */
type PhaseId = Int
final val NoPhaseId = 0
final val FirstPhaseId = 1
/** The number of bits needed to encode a phase identifier. */
final val PhaseWidth = 7
final val PhaseMask = (1 << PhaseWidth) - 1
final val MaxPossiblePhaseId = PhaseMask
}
|
som-snytt/dotty
|
compiler/src/dotty/tools/dotc/core/Periods.scala
|
Scala
|
apache-2.0
| 5,742
|
package com.github.agourlay.cornichon.dsl
import com.github.agourlay.cornichon.core.CornichonError
import org.parboiled2._
import scala.util.{ Failure, Success }
import scala.collection.breakOut
object DataTableParser {
val WhiteSpace = CharPredicate("\u0009\u0020")
val delimeter = CharPredicate('|')
val delims = CharPredicate(delimeter, '\r', '\n')
val Backslash = CharPredicate('\\')
def parse(input: String): Either[CornichonError, DataTable] = {
val p = new DataTableParser(input)
p.dataTableRule.run() match {
case Failure(e: ParseError) ⇒
Left(DataTableParseError(p.formatError(e, new ErrorFormatter(showTraces = true))))
case Failure(e: Throwable) ⇒
Left(DataTableError(e, input))
case Success(dt) ⇒
Right(dt)
}
}
}
class DataTableParser(val input: ParserInput) extends Parser with StringHeaderParserSupport {
def dataTableRule = rule {
zeroOrMore(NL) ~ HeaderRule ~ NL ~ oneOrMore(RowRule).separatedBy(NL) ~ zeroOrMore(NL) ~ EOI ~> DataTable
}
def HeaderRule = rule { Separator ~ oneOrMore(HeaderValue).separatedBy(Separator) ~ Separator ~> Headers }
def RowRule = rule { Separator ~ oneOrMore(CellContent).separatedBy(Separator) ~ Separator ~> Row }
def CellContent = rule { !NL ~ capture(zeroOrMore(ContentsChar)) }
def ContentsChar = rule { !DataTableParser.delims ~ ANY }
def NL = rule { Spaces ~ optional('\r') ~ '\n' ~ Spaces }
def Spaces = rule { quiet(zeroOrMore(DataTableParser.WhiteSpace)) }
def Separator = rule { Spaces ~ DataTableParser.delimeter ~ Spaces }
}
case class DataTable(headers: Headers, rows: Seq[Row]) {
require(rows.forall(_.fields.size == headers.fields.size), "Datatable is malformed, all rows must have the same number of elements")
def rawStringList: List[Map[String, String]] =
rows.map { row ⇒
val map: Map[String, String] = (headers.fields zip row.fields).collect {
case (name, value) if value.trim.nonEmpty ⇒ name → value.trim
}(breakOut)
map
}.toList
}
case class Headers(fields: Seq[String])
case class Row(fields: Seq[String])
trait StringHeaderParserSupport extends StringBuilding {
this: Parser ⇒
def HeaderValue = rule {
atomic(clearSB() ~ Characters ~ push(sb.toString) ~> (_.trim))
}
def Characters = rule { oneOrMore(NormalChar | '\\' ~ EscapedChar) }
def NormalChar = rule { !(DataTableParser.delims | DataTableParser.Backslash) ~ ANY ~ appendSB() }
def EscapedChar = rule {
DataTableParser.Backslash ~ appendSB() |
'b' ~ appendSB('\b') |
'f' ~ appendSB('\f') |
'n' ~ appendSB('\n') |
'r' ~ appendSB('\r') |
't' ~ appendSB('\t') |
'|' ~ appendSB('|') |
Unicode ~> { code ⇒ sb.append(code.asInstanceOf[Char]); () }
}
def Unicode = rule { 'u' ~ capture(4 times CharPredicate.HexDigit) ~> (Integer.parseInt(_, 16)) }
}
case class DataTableError(error: Throwable, input: String) extends CornichonError {
lazy val baseErrorMessage = s"error thrown '${error.getMessage}' while parsing data table $input"
}
case class DataTableParseError(baseErrorMessage: String) extends CornichonError
|
OlegIlyenko/cornichon
|
cornichon-core/src/main/scala/com/github/agourlay/cornichon/dsl/DataTable.scala
|
Scala
|
apache-2.0
| 3,172
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import AbstractFetcherThread._
import com.yammer.metrics.Metrics
import kafka.cluster.BrokerEndPoint
import kafka.server.AbstractFetcherThread.{FetchRequest, PartitionData}
import kafka.utils.TestUtils
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.record.{CompressionType, MemoryRecords, SimpleRecord}
import org.apache.kafka.common.requests.EpochEndOffset
import org.junit.Assert.{assertFalse, assertTrue}
import org.junit.{Before, Test}
import scala.collection.JavaConverters._
import scala.collection.{Map, Set, mutable}
class AbstractFetcherThreadTest {
@Before
def cleanMetricRegistry(): Unit = {
for (metricName <- Metrics.defaultRegistry().allMetrics().keySet().asScala)
Metrics.defaultRegistry().removeMetric(metricName)
}
@Test
def testMetricsRemovedOnShutdown() {
val partition = new TopicPartition("topic", 0)
val fetcherThread = new DummyFetcherThread("dummy", "client", new BrokerEndPoint(0, "localhost", 9092))
fetcherThread.start()
// add one partition to create the consumer lag metric
fetcherThread.addPartitions(Map(partition -> 0L))
// wait until all fetcher metrics are present
TestUtils.waitUntilTrue(() =>
allMetricsNames == Set(FetcherMetrics.BytesPerSec, FetcherMetrics.RequestsPerSec, FetcherMetrics.ConsumerLag),
"Failed waiting for all fetcher metrics to be registered")
fetcherThread.shutdown()
// after shutdown, they should be gone
assertTrue(Metrics.defaultRegistry().allMetrics().isEmpty)
}
@Test
def testConsumerLagRemovedWithPartition() {
val partition = new TopicPartition("topic", 0)
val fetcherThread = new DummyFetcherThread("dummy", "client", new BrokerEndPoint(0, "localhost", 9092))
fetcherThread.start()
// add one partition to create the consumer lag metric
fetcherThread.addPartitions(Map(partition -> 0L))
// wait until lag metric is present
TestUtils.waitUntilTrue(() => allMetricsNames(FetcherMetrics.ConsumerLag),
"Failed waiting for consumer lag metric")
// remove the partition to simulate leader migration
fetcherThread.removePartitions(Set(partition))
// the lag metric should now be gone
assertFalse(allMetricsNames(FetcherMetrics.ConsumerLag))
fetcherThread.shutdown()
}
private def allMetricsNames = Metrics.defaultRegistry().allMetrics().asScala.keySet.map(_.getName)
class DummyFetchRequest(val offsets: collection.Map[TopicPartition, Long]) extends FetchRequest {
override def isEmpty: Boolean = offsets.isEmpty
override def offset(topicPartition: TopicPartition): Long = offsets(topicPartition)
}
class TestPartitionData(records: MemoryRecords = MemoryRecords.EMPTY) extends PartitionData {
override def error: Errors = Errors.NONE
override def toRecords: MemoryRecords = records
override def highWatermark: Long = 0L
override def exception: Option[Throwable] = None
}
class DummyFetcherThread(name: String,
clientId: String,
sourceBroker: BrokerEndPoint,
fetchBackOffMs: Int = 0)
extends AbstractFetcherThread(name, clientId, sourceBroker, fetchBackOffMs, isInterruptible = true, includeLogTruncation = false) {
type REQ = DummyFetchRequest
type PD = PartitionData
override def processPartitionData(topicPartition: TopicPartition,
fetchOffset: Long,
partitionData: PartitionData): Unit = {}
override def handleOffsetOutOfRange(topicPartition: TopicPartition): Long = 0L
override def handlePartitionsWithErrors(partitions: Iterable[TopicPartition]): Unit = {}
override protected def fetch(fetchRequest: DummyFetchRequest): Seq[(TopicPartition, TestPartitionData)] =
fetchRequest.offsets.mapValues(_ => new TestPartitionData()).toSeq
override protected def buildFetchRequest(partitionMap: collection.Seq[(TopicPartition, PartitionFetchState)]): ResultWithPartitions[DummyFetchRequest] =
ResultWithPartitions(new DummyFetchRequest(partitionMap.map { case (k, v) => (k, v.fetchOffset) }.toMap), Set())
override def buildLeaderEpochRequest(allPartitions: Seq[(TopicPartition, PartitionFetchState)]): ResultWithPartitions[Map[TopicPartition, Int]] = {
ResultWithPartitions(Map(), Set())
}
override def fetchEpochsFromLeader(partitions: Map[TopicPartition, Int]): Map[TopicPartition, EpochEndOffset] = { Map() }
override def maybeTruncate(fetchedEpochs: Map[TopicPartition, EpochEndOffset]): ResultWithPartitions[Map[TopicPartition, OffsetTruncationState]] = {
ResultWithPartitions(Map(), Set())
}
}
@Test
def testFetchRequestCorruptedMessageException() {
val partition = new TopicPartition("topic", 0)
val fetcherThread = new CorruptingFetcherThread("test", "client", new BrokerEndPoint(0, "localhost", 9092),
fetchBackOffMs = 1)
fetcherThread.start()
// Add one partition for fetching
fetcherThread.addPartitions(Map(partition -> 0L))
// Wait until fetcherThread finishes the work
TestUtils.waitUntilTrue(() => fetcherThread.fetchCount > 3, "Failed waiting for fetcherThread to finish the work")
fetcherThread.shutdown()
// The fetcherThread should have fetched two normal messages
assertTrue(fetcherThread.logEndOffset == 2)
}
class CorruptingFetcherThread(name: String,
clientId: String,
sourceBroker: BrokerEndPoint,
fetchBackOffMs: Int = 0)
extends DummyFetcherThread(name, clientId, sourceBroker, fetchBackOffMs) {
@volatile var logEndOffset = 0L
@volatile var fetchCount = 0
private val normalPartitionDataSet = List(
new TestPartitionData(MemoryRecords.withRecords(0L, CompressionType.NONE, new SimpleRecord("hello".getBytes()))),
new TestPartitionData(MemoryRecords.withRecords(1L, CompressionType.NONE, new SimpleRecord("hello".getBytes())))
)
override def processPartitionData(topicPartition: TopicPartition,
fetchOffset: Long,
partitionData: PartitionData): Unit = {
// Throw exception if the fetchOffset does not match the fetcherThread partition state
if (fetchOffset != logEndOffset)
throw new RuntimeException(
"Offset mismatch for partition %s: fetched offset = %d, log end offset = %d."
.format(topicPartition, fetchOffset, logEndOffset))
// Now check message's crc
val records = partitionData.toRecords
for (batch <- records.batches.asScala) {
batch.ensureValid()
logEndOffset = batch.nextOffset
}
}
override protected def fetch(fetchRequest: DummyFetchRequest): Seq[(TopicPartition, TestPartitionData)] = {
fetchCount += 1
// Set the first fetch to get a corrupted message
if (fetchCount == 1) {
val record = new SimpleRecord("hello".getBytes())
val records = MemoryRecords.withRecords(CompressionType.NONE, record)
val buffer = records.buffer
// flip some bits in the message to ensure the crc fails
buffer.putInt(15, buffer.getInt(15) ^ 23422)
buffer.putInt(30, buffer.getInt(30) ^ 93242)
fetchRequest.offsets.mapValues(_ => new TestPartitionData(records)).toSeq
} else {
// Then, the following fetches get the normal data
fetchRequest.offsets.mapValues(v => normalPartitionDataSet(v.toInt)).toSeq
}
}
override protected def buildFetchRequest(partitionMap: collection.Seq[(TopicPartition, PartitionFetchState)]): ResultWithPartitions[DummyFetchRequest] = {
val requestMap = new mutable.HashMap[TopicPartition, Long]
partitionMap.foreach { case (topicPartition, partitionFetchState) =>
// Add backoff delay check
if (partitionFetchState.isReadyForFetch)
requestMap.put(topicPartition, partitionFetchState.fetchOffset)
}
ResultWithPartitions(new DummyFetchRequest(requestMap), Set())
}
override def handlePartitionsWithErrors(partitions: Iterable[TopicPartition]) = delayPartitions(partitions, fetchBackOffMs.toLong)
}
}
|
richhaase/kafka
|
core/src/test/scala/unit/kafka/server/AbstractFetcherThreadTest.scala
|
Scala
|
apache-2.0
| 9,191
|
package techex.data
import org.joda.time.Instant
import techex.cases.playerSignup.CreatePlayerData
import techex.domain._
import scalaz.stream.async
import scalaz.stream.async.mutable.Topic
object eventstreams {
lazy val events: Topic[InputMessage] =
scalaz.stream.async.topic()
val factUdpates: Topic[Fact] =
async.topic()
}
trait InputMessage{
val msgType:String
val instant:Instant
}
trait Command extends InputMessage
case class EnterObservation(beacon: BeaconId, playerId: PlayerId, instant: Instant, proximity: Proximity) extends InputMessage{
val msgType = "EnterObservation"
}
case class ExitObservation(playerId:PlayerId,instant:Instant) extends InputMessage{
val msgType = "ExitObservation"
}
case class CreatePlayer(data:CreatePlayerData,instant:Instant,id:PlayerId) extends Command{
val msgType = "CreatePlayer"
}
case class StartEntry(entryId: ScId,instant:Instant) extends Command{val msgType="StartEntry"}
case class EndEntry(entryId: ScId,instant:Instant) extends Command{val msgType="EndEntry"}
case class AddEntry(entry: ScheduleEntry,instant:Instant) extends Command{
val msgType="AddEntry"
}
case class RemoveEntry(entryId: ScId,instant:Instant) extends Command{val msgType="RemoveEntry"}
|
kantega/tech-ex-2015
|
backend/src/main/scala/techex/data/eventstreams.scala
|
Scala
|
mit
| 1,241
|
package org.scalajs.jsenv.test
import org.scalajs.jsenv.nodejs.NodeJSEnv
import org.junit.Test
import org.junit.Assert._
class NodeJSTest extends TimeoutComTests {
protected def newJSEnv: NodeJSEnv = new NodeJSEnv
/** Node.js strips double percentage signs - #500 */
@Test
def percentageTest: Unit = {
val counts = 1 to 15
val argcs = 1 to 3
val strings = counts.map("%" * _)
val strlists = for {
count <- argcs
string <- strings
} yield List.fill(count)(string)
val codes = for {
strlist <- strlists
} yield {
val args = strlist.map(s => s""""$s"""").mkString(", ")
s"console.log($args);\\n"
}
val result = strlists.map(_.mkString(" ") + "\\n").mkString("")
codes.mkString("").hasOutput(result)
}
/** Node.js console.log hack didn't allow to log non-Strings - #561 */
@Test
def nonStringTest: Unit = {
"""
console.log(1);
console.log(undefined);
console.log(null);
console.log({});
console.log([1,2]);
""" hasOutput
"""|1
|undefined
|null
|[object Object]
|1,2
|""".stripMargin
}
@Test
def slowJSEnvTest: Unit = {
val com = comRunner("""
setTimeout(function() {
scalajsCom.init(function(msg) {
scalajsCom.send("pong: " + msg);
});
}, 1000);
""")
val n = 20
start(com)
for (_ <- 1 to n)
com.send("ping")
for (_ <- 1 to n)
assertEquals(com.receive(), "pong: ping")
com.close()
com.await(DefaultTimeout)
}
}
|
mdedetrich/scala-js
|
js-envs/src/test/scala/org/scalajs/jsenv/test/NodeJSTest.scala
|
Scala
|
bsd-3-clause
| 1,567
|
package com.cpuheater.bot
import akka.actor.ActorSystem
import akka.event.Logging
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.directives.DebuggingDirectives
import akka.util.Timeout
import akka.http.scaladsl.server.Directives._
import akka.stream.{ActorMaterializer, ActorMaterializerSettings, Supervision}
import com.cpuheater.bot.route.FBRoute
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent.duration._
object BotApp extends App with FBRoute with LazyLogging {
val decider: Supervision.Decider = { e =>
logger.error(s"Exception in stream $e")
Supervision.Stop
}
implicit val actorSystem = ActorSystem("bot", ConfigFactory.load)
val materializerSettings = ActorMaterializerSettings(actorSystem).withSupervisionStrategy(decider)
implicit val materializer = ActorMaterializer(materializerSettings)(actorSystem)
implicit val ec = actorSystem.dispatcher
val routes = {
logRequestResult("bot") {
fbRoute
}
}
implicit val timeout = Timeout(30.seconds)
val routeLogging = DebuggingDirectives.logRequestResult("RouteLogging", Logging.InfoLevel)(routes)
Http().bindAndHandle(routeLogging, "localhost", 8080)
logger.info("Starting")
}
|
cpuheater/scala-messenger-bot
|
src/main/scala/com/cpuheater/bot/BotApp.scala
|
Scala
|
apache-2.0
| 1,265
|
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author Arash Fard, Usman Nisar, Ayushi Jain, Aravind Kalimurthy, John Miller
* @version 1.2
* @date Thu Nov 25 11:28:31 EDT 2013
* @see LICENSE (MIT style license file).
*/
package scalation.graphalytics
import scala.collection._
import scala.collection.immutable.{Set => SET}
import scala.collection.mutable.{ArrayStack, ListBuffer, Map, HashMap, MutableList, Set}
import scala.math.pow
import scala.util.control.Breaks.{break, breakable}
import scala.util.Random
import scalation.stat.Statistic
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The 'StrictSim' class provides an implementation for strict simulation
* graph pattern matching. This version uses `DualSim`.
* @see hipore.com/ijbd/2014/IJBD%20Vol%201%20No%201%202014.pdf
* @param q the query graph Q(U, D, k)
* @param g the data graph G(V, E, l)
*/
class StrictSim (g: Graph, q: Graph)
extends GraphMatcher (g, q)
{
private val listOfDistinctReducedSet = new ListBuffer [SET [String]] () // contains total number of matches
// after post processing
private val mapOfBallWithSize = Map [Int, Long] () // contains balls left after
// post processing with diameter.
private val listOfMatchedBallVertices = MutableList [Int] () // contains list of center vertices
private val qmet = new GraphMetrics (q.clone, false) // creating graph metrics object of query graph
private val dataSize = g.size // size of the data graph
private val querySize = q.size // size of the query graph
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Apply the Strict Graph Simulation pattern matching algorithm to find the mappings
* from the query graph 'q' to the data graph 'g'. These are represented by a
* multi-valued function 'phi' that maps each query graph vertex 'u' to a
* set of data graph vertices '{v}'.
*/
def mappings (): Array [SET [Int]] = merge (mappings2 ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Mapping results per ball.
*/
def mappings2 (): HashMap [Int, Array [SET [Int]]] = strictSim (new DualSim (g, q).mappings ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Merged mapping results, the union over all balls.
*/
def merge (matches: HashMap [Int, Array [SET [Int]]]): Array [SET [Int]] =
{
val phi_all = Array.ofDim [SET [Int]] (querySize)
for (i <- 0 until querySize) phi_all (i) = SET [Int] ()
for ((c, phi_c) <- matches) {
println (s"(c, phi_c) = ($c, ${phi_c.deep})")
for (i <- 0 until querySize) phi_all(i) ++= phi_c(i)
} // for
phi_all
} // merge
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Performs strict simulation to find mappings with balls.
* @param phi the initial mapping after applying Dual to the whole graph
*/
def strictSim (phi: Array [SET [Int]]): HashMap [Int, Array [SET [Int]]] =
{
if (phi.size == 0) { println ("No dual match."); return null } // exit if no match after dual simulation
val newGraph = filterGraph (phi) // if doing strong sim more than once, must clone g
val prunedSize = phi.flatten.toSet.size // size of feasible matches after strict simulation
val qDiameter = qmet.diam // get the query diameter
val balls = HashMap [Int, Ball] () // map of balls: center -> ball
val matches = HashMap [Int, Array [SET [Int]]] () // map of matches in balls: center -> match
val gCenters = (0 until q.size).flatMap(phi(_)) // set of mapped data graph centers
val bCenters = Set [Int] () // set of centers for all balls
var ballSum = 0
for (center <- gCenters) { // for each mapped data graph center
val ball = new Ball (newGraph, center, qDiameter) // create a new ball for that center vertex
ballSum += ball.nodesInBall.size // calculate ball size
val mat = dualFilter (phi.clone, ball) // perform dual filter on the ball
println (s"center = $center, mat = ${mat.deep}")
balls.put (center, ball)
if (mat.size != 0) { bCenters += center; matches += center -> mat }
else println ("No match for ball centered at " + center + "\\n")
} // for
println ("SEQUENTIAL: Data Graph Name: " + g.name +
"\\n Number of Data Graph Nodes: " + dataSize +
"\\n Query Graph Name: " + q.name +
"\\n Number of Query Graph Nodes: " + querySize +
"\\n Number of Strict Matches: " + bCenters.size +
"\\n Graph Size after Pruning: " + prunedSize + " nodes" +
"\\n Query Diameter: " + qDiameter +
"\\n Average Ball Size: " + (ballSum / prunedSize.toDouble) +
"\\n Total Distinct Edges: " + calculateTotalEdges (g, balls, bCenters) +
"\\n Total Distinct Vertices: " + calculateTotalVertices ())
println ("Ball Diameter Metrics(Min, Max, Mean, StdDev): " + calculateBallDiameterMetrics (balls) )
matches
} // strictSim
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Prune the data graph by consider only those vertices and edges which
* are part of feasible matches after performing initial dual simulation.
* @param phi mappings from a query vertex u_q to { graph vertices v_g }
*/
def filterGraph (phi: Array [SET [Int]]): Graph =
{
val nodesInSimset = phi.flatten.toSet // get all the vertices of feasible matches
for (i <- 0 until dataSize) g.ch(i) &= nodesInSimset // prune via intersection
val newCh = Array.ofDim [SET [Int]] (dataSize)
for (i <- 0 until dataSize) newCh(i) = SET [Int] ()
for (u <- 0 until q.size; w <- phi(u)) { // new ch and pa set for data graph based upon feasible vertices
for (v <- q.ch(u)) newCh(w) |= (g.ch(w) & phi(v))
} // for
new Graph (newCh, g.label, g.inverse, g.name + "2") // create a new data graph
} // filterGraph
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform dual simulation onto the ball.
* @param phi mappings from a query vertex u_q to { graph vertices v_g }
* @param ball the Ball B(Graph, Center, Radius)
*/
def dualFilter (phi: Array [SET [Int]], ball: Ball): Array [SET [Int]] =
{
for (v <- phi.indices) phi(v) &= ball.nodesInBall // project simset onto ball
val filterSet = new ArrayStack [(Int, Int)] ()
var filtered = false
for (u <- phi.indices; v <- phi(u) if ball.borderNodes contains v) {
filtered = false // filtering ball based on child relationship
breakable { for (u1 <- q.ch(u)) {
if ((ball.post (v) & phi (u1)).isEmpty) {
filterSet.push ((u, v))
filtered = true
break
} // if
}} // breakable for
if (! filtered) { // filtering ball based on parent relationship,
breakable { for (u2 <- q.pa(u)) { // if no child has been filtered out
if ((ball.pre (v) & phi(u2)).isEmpty) {
filterSet.push ((u, v))
break
} // if
}} // breakable for
} // if
} // for
while (! filterSet.isEmpty) { // refine ch and pa relationship for the vertex v,
val (u, v) = filterSet.pop () // which is now not a feasible match
phi(u) -= v
for (u2 <- q.pa(u); v2 <- (ball.pre (v) & phi(u2)) if (ball.post (v2) & phi(u)).isEmpty)
filterSet.push ((u2, v2))
for (u1 <- q.ch(u); v1 <- (ball.post (v) & phi(u1)) if (ball.pre (v1) & phi(u)).isEmpty)
filterSet.push ((u1, v1))
} // while
val chSet = HashMap [Int, Set [Int]] ()
val paSet = HashMap [Int, Set [Int]] ()
// create new ch and pa set for the ball after above pruning
for (u <- phi.indices; v <- phi(u); uc <- q.ch(u); vc <- (ball.post (v) & phi(uc))) {
chSet.getOrElseUpdate (v, Set [Int] ()) += vc
paSet.getOrElseUpdate (vc, Set [Int] ()) += v
} // for
// Finding max perfect subgraph
val stack = new ArrayStack [Int] ()
val visited = Set (ball.center)
stack.push (ball.center)
while (! stack.isEmpty) {
val v = stack.pop ()
for (child <- (chSet.getOrElse (v, Set ()) | paSet.getOrElse (v, Set ()))) {
if (! visited.contains (child)) {
stack.push (child)
visited += child
} // if
} // for
} // while
for ( v <- phi.indices) phi(v) = phi(v) & visited
//fixes the edges in the ball
//(note that it does not change the parent set; this is only used for printing)
//uncomment if you want to see the ball after finding maximum perfect subgraph
ball.chMap = Map [Int, Set [Int]] ()
val matchNodes = phi.flatten.toSet
for ((n, nset) <- chSet; nc <- nset) {
if ((matchNodes contains n) && (matchNodes contains nc)) ball.chMap.getOrElseUpdate (n, Set () ) += nc
} // for
for (v <- phi.indices if phi(v).isEmpty) return Array [SET [Int]] ()
phi
} //dualFilter
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count distinct vertices left after post processing.
*/
def calculateTotalVertices (): Int =
{
val totalSet = Set [String] ()
for (i <- 0 until listOfDistinctReducedSet.length) totalSet ++= listOfDistinctReducedSet(i)
totalSet.size
} // calculateTotalVertices
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count distinct edges left after post processing.
* @param g the data graph G(V, E, l)
* @param balls mappings from a center vertex to the Ball B(Graph, Center, Radius)
* @param matchCenters set of all vertices which are considered as center
*/
def calculateTotalEdges (g: Graph, balls: HashMap [Int, Ball], matchCenters: Set [Int]): Int =
{
val distinctEdges = Set [String] ()
for (vert_id <- 0 until g.ch.length; if balls.keySet.contains (vert_id)) {
balls.get (vert_id).get.chMap.foreach (i => i._2.foreach (j => distinctEdges += (i._1.toString + "_" + j.toString)))
} // for
distinctEdges.size
} // calculateTotalEdges
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Calculate statistics (e.g., min, max, average diameter and standard deviation)
* on the balls left after post-processing.
* @param balls mappings from a center vertex to the Ball B(Graph, Center, Radius)
*/
def calculateBallDiameterMetrics (balls: HashMap [Int, Ball]): Statistic =
{
val ballStats = new Statistic ()
for (vert_id <- listOfMatchedBallVertices) ballStats.tally (balls.get (vert_id).get.getBallDiameter)
ballStats
} // calculateBallDiameterMetrics
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vertex from an array of central vertices, those which have
* highest 'ch' set size and lowest frequency of label in the query graph, i.e.,
* highest ratio.
* @param centr the array of vertices whose eccentricity is equal to the radius
*/
def selectivityCriteria (qmet: GraphMetrics): Int =
{
var index = 0
var max = 0.0
for (ctr <- qmet.central) {
val ratio = qmet.g.ch(ctr).size.toDouble / qmet.g.labelMap (qmet.g.label(ctr)).size.toDouble
if (max < ratio) { max = ratio; index = ctr }
} // for
index
} // selectivityCriteria
} // StrictSim class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: ::::::::::::
/** The `StrictSimTest` object is used to test the `StrictSim` class.
* > run-main scalation.graphalytics.StrictSimTest
*/
object StrictSimTest extends App
{
val g = Graph.g1p
val q = Graph.q1p
println (s"g.checkEdges = ${g.checkEdges}")
g.printG ()
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new StrictSim (g, q)).test ("StrictSim") // Strict Graph Simulation Pattern Matcher
} // StrictSimTest object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: ::::::::::::
/** The `StrictSimTest2` object is used to test the `StrictSim` class.
* > run-main scalation.graphalytics.StrictSimTest2
*/
object StrictSimTest2 extends App
{
val g = Graph.g2p
val q = Graph.q2p
println (s"g.checkEdges = ${g.checkEdges}")
g.printG ()
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new StrictSim (g, q)).test ("StrictSim") // Strict Graph Simulation Pattern Matcher
} // StrictSimTest2 object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `StrictSimTest3` object test the `StrictSim` class by passing data graph
* and query graph relative file paths.
* > run-main scalation.graphalytics.StrictSimTest3
*/
object StrictSimTest3 extends App
{
val g = GraphIO ("gfile")
val q = GraphIO ("qfile")
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new StrictSim (g, q)).test ("StrictSim") // Strict Graph Simulation Pattern Matcher
} // StrictSimTest3 object
|
NBKlepp/fda
|
scalation_1.2/src/main/scala/scalation/graphalytics/StrictSim.scala
|
Scala
|
mit
| 14,810
|
package mesosphere.marathon
package integration
import java.util.concurrent.atomic.AtomicInteger
import mesosphere.AkkaIntegrationTest
import mesosphere.marathon.integration.setup.{ EmbeddedMarathonTest, IntegrationHealthCheck }
import mesosphere.marathon.raml.{ App, GroupUpdate, UpgradeStrategy }
import mesosphere.marathon.state.{ Group, PathId }
import scala.concurrent.duration._
@IntegrationTest
class GroupDeployIntegrationTest extends AkkaIntegrationTest with EmbeddedMarathonTest {
import PathId._
val appIdCount = new AtomicInteger()
val groupIdCount = new AtomicInteger()
def nextAppId(suffix: Option[String] = None): String = s"app-${suffix.getOrElse(appIdCount.getAndIncrement())}"
def nextGroupId(suffix: Option[String] = None): PathId = s"group-${suffix.getOrElse(groupIdCount.getAndIncrement())}".toRootTestPath
def temporaryGroup(testCode: (PathId) => Any): Unit = {
val gid = nextGroupId()
try {
testCode(gid)
} finally {
marathon.deleteGroup(gid, force = true)
}
}
"GroupDeployment" should {
"create empty group successfully" in {
Given("A group which does not exist in marathon")
val group = Group.emptyUpdate(nextGroupId(Some("which-does-not-exist-created-successfully")))
When("The group gets created")
val result = marathon.createGroup(group)
Then("The group is created. A success event for this group is send.")
result should be(Created)
waitForDeployment(result)
}
"update empty group successfully" in {
Given("An existing group")
val name = "test2".toRootTestPath
val group = Group.emptyUpdate(name)
waitForDeployment(marathon.createGroup(group))
When("The group gets updated")
val dependencies = Set("/test".toTestPath.toString)
waitForDeployment(marathon.updateGroup(name, group.copy(dependencies = Some(dependencies))))
Then("The group is updated")
val result = marathon.group("test2".toRootTestPath)
result should be(OK)
result.value.dependencies should be(dependencies)
}
"deleting an existing group gives a 200 http response" in {
Given("An existing group")
val group = Group.emptyUpdate(nextGroupId(Some("which-exists-give-200-when-deleted")))
waitForDeployment(marathon.createGroup(group))
When("The group gets deleted")
val result = marathon.deleteGroup(PathId(group.id.get))
waitForDeployment(result)
Then("The group is deleted")
result should be(OK)
// only expect the test base group itself
marathon.listGroupsInBaseGroup.value.map(_.id) should not contain (group.id)
}
"delete a non existing group should give a 404 http response" in {
When("A non existing group is deleted")
val result = marathon.deleteGroup("does_not_exist".toRootTestPath)
Then("We get a 404 http response code")
result should be(NotFound)
}
"create a group with applications to start" in {
val id = "group-with-appication-to-start".toRootTestPath
val appId = id / nextAppId()
Given(s"A group with one application with id $appId")
val app = appProxy(appId, "v1", 2, healthCheck = None)
val group = GroupUpdate(Some(id.toString), apps = Some(Set(app)))
When("The group is created")
waitForDeployment(marathon.createGroup(group))
Then("A success event is send and the application has been started")
val tasks = waitForTasks(PathId(app.id), app.instances)
tasks should have size 2
}
"update a group with applications to restart" in {
val id = nextGroupId(Some("with-application-to-restart"))
val appId = id / nextAppId()
Given(s"A group with one application started with id $appId")
val app1V1 = appProxy(appId, "v1", 2, healthCheck = None)
waitForDeployment(marathon.createGroup(GroupUpdate(Some(id.toString), Some(Set(app1V1)))))
waitForTasks(PathId(app1V1.id), app1V1.instances)
When("The group is updated, with a changed application")
val app1V2 = appProxy(appId, "v2", 2, healthCheck = None)
waitForDeployment(marathon.updateGroup(id, GroupUpdate(Some(id.toString), Some(Set(app1V2)))))
Then("A success event is send and the application has been started")
waitForTasks(PathId(app1V2.id), app1V2.instances)
}
"update a group with the same application so no restart is triggered" in {
val id = nextGroupId(Some("with-the-same-application-no-restart-triggered"))
val appId = id / nextAppId()
Given(s"A group with one application started with id $appId")
val app1V1 = appProxy(appId, "v1", 2, healthCheck = None)
waitForDeployment(marathon.createGroup(GroupUpdate(Some(id.toString), Some(Set(app1V1)))))
waitForTasks(PathId(app1V1.id), app1V1.instances)
val tasks = marathon.tasks(appId)
When("The group is updated, with the same application")
waitForDeployment(marathon.updateGroup(id, GroupUpdate(Some(id.toString), Some(Set(app1V1)))))
Then("There is no deployment and all tasks still live")
marathon.listDeploymentsForBaseGroup().value should be ('empty)
marathon.tasks(appId).value.toSet should be(tasks.value.toSet)
}
"create a group with application with health checks" in {
val id = nextGroupId(Some("create-with-application-with-health-checks"))
val appId = id / nextAppId()
Given(s"A group with one application with id $appId")
val proxy = appProxy(appId, "v1", 1)
val group = GroupUpdate(Some(id.toString), Some(Set(proxy)))
When("The group is created")
val create = marathon.createGroup(group)
Then("A success event is send and the application has been started")
waitForDeployment(create)
}
"upgrade a group with application with health checks" in {
val id = nextGroupId(Some("update-with-application-with-health-checks"))
val appId = id / nextAppId()
Given(s"A group with one application with id $appId")
val proxy = appProxy(appId, "v1", 1)
val group = GroupUpdate(Some(id.toString), Some(Set(proxy)))
waitForDeployment(marathon.createGroup(group))
val check = registerAppProxyHealthCheck(PathId(proxy.id), "v1", state = true)
When("The group is updated")
check.afterDelay(1.second, state = false)
check.afterDelay(3.seconds, state = true)
val update = marathon.updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 1)))))
Then("A success event is send and the application has been started")
waitForDeployment(update)
}
"rollback from an upgrade of group" in {
val gid = nextGroupId(Some("with-rollback-from-an-upgrade-of-group"))
val appId = gid / nextAppId()
Given(s"A group with one application with id $appId")
val proxy = appProxy(appId, "v1", 2)
val group = GroupUpdate(Some(gid.toString), Some(Set(proxy)))
val create = marathon.createGroup(group)
waitForDeployment(create)
waitForTasks(PathId(proxy.id), proxy.instances)
val v1Checks = registerAppProxyHealthCheck(appId, "v1", state = true)
When("The group is updated")
waitForDeployment(marathon.updateGroup(gid, group.copy(apps = Some(Set(appProxy(appId, "v2", 2))))))
Then("The new version is deployed")
val v2Checks = registerAppProxyHealthCheck(appId, "v2", state = true)
eventually {
v2Checks.pinged.get should be(true) withClue "v2 apps did not come up"
}
When("A rollback to the first version is initiated")
v1Checks.pinged.set(false)
waitForDeployment(marathon.rollbackGroup(gid, create.value.version))
Then("The rollback will be performed and the old version is available")
eventually {
v1Checks.pinged.get should be(true) withClue "v1 apps did not come up again"
}
}
"during Deployment the defined minimum health capacity is never undershot" in {
val id = nextGroupId(Some("that-during-deployment-min-health-capacity-never-undershot"))
val appId = id / nextAppId()
Given(s"A group with one application with id $appId")
val proxy = appProxy(appId, "v1", 2).copy(upgradeStrategy = Some(UpgradeStrategy(1, 1)))
val group = GroupUpdate(Some(id.toString), Some(Set(proxy)))
val create = marathon.createGroup(group)
waitForDeployment(create)
waitForTasks(appId, proxy.instances)
val v1Check = registerAppProxyHealthCheck(appId, "v1", state = true)
When("The new application is not healthy")
val v2Check = registerAppProxyHealthCheck(appId, "v2", state = false) //will always fail
val update = marathon.updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 2)))))
Then("All v1 applications are kept alive")
v1Check.pinged.set(false)
eventually {
v1Check.pinged.get should be(true) withClue "v1 are not alive"
}
When("The new application becomes healthy")
v2Check.state = true //make v2 healthy, so the app can be cleaned
waitForDeployment(update)
}
"An upgrade in progress cannot be interrupted without force" in temporaryGroup { gid =>
val appId = gid / nextAppId(Some("with-upgrade-in-progress-cannot-be-interrupted-without-force"))
Given(s"A group with one application with id $appId with an upgrade in progress")
val proxy = appProxy(appId, "v1", 2)
val group = GroupUpdate(Some(gid.toString), Some(Set(proxy)))
val create = marathon.createGroup(group)
waitForDeployment(create)
registerAppProxyHealthCheck(appId, "v2", state = false) //will always fail
marathon.updateGroup(gid, group.copy(apps = Some(Set(appProxy(appId, "v2", 2)))))
When("Another upgrade is triggered, while the old one is not completed")
val result = marathon.updateGroup(gid, group.copy(apps = Some(Set(appProxy(appId, "v3", 2)))))
Then("An error is indicated")
result should be (Conflict)
waitForEvent("group_change_failed")
When("Another upgrade is triggered with force, while the old one is not completed")
val force = marathon.updateGroup(gid, group.copy(apps = Some(Set(appProxy(appId, "v4", 2)))), force = true)
Then("The update is performed")
waitForDeployment(force)
}
"A group with a running deployment can not be deleted without force" in temporaryGroup{ gid =>
val appId = gid / nextAppId(Some("with-running-deployment-cannot-be-deleted-without-force"))
Given(s"A group with one application with id $appId with an upgrade in progress")
val proxy = appProxy(appId, "v1", 2)
registerAppProxyHealthCheck(appId, "v1", state = false) //will always fail
val group = GroupUpdate(Some(gid.toString), Some(Set(proxy)))
marathon.createGroup(group)
// There is race condition in mesos which we trigger by issuing Launch and Kill requests very fast after each other
// https://issues.apache.org/jira/browse/MESOS-7783
// To prevent it, we should wait until both tasks are RUNNING
waitForTasks(appId, 2)
When("Delete the group, while the deployment is in progress")
val deleteResult = marathon.deleteGroup(gid)
Then("An error is indicated")
deleteResult should be(Conflict)
waitForEvent("group_change_failed")
When("Delete is triggered with force, while the deployment is not completed")
val force = marathon.deleteGroup(gid, force = true)
force.success should be(true) withClue (s"Could not force delete $gid: Response: code=${force.code} body=${force.entityString}")
Then("The delete is performed")
waitForDeployment(force)
}
"Groups with Applications with circular dependencies can not get deployed" in {
val gid = nextGroupId(Some("with-application-with-circular-dependencies-cannot-be-deployed"))
Given(s"A group with id $gid with 3 circular dependent applications")
val db = appProxy(gid / "db", "v1", 1, dependencies = Set(gid / "frontend1"))
val service = appProxy(gid / "service", "v1", 1, dependencies = Set(db.id.toPath))
val frontend = appProxy(gid / "frontend1", "v1", 1, dependencies = Set(service.id.toPath))
val group = GroupUpdate(Option(gid.toString), Option(Set(db, service, frontend)))
When("The group gets posted")
val result = marathon.createGroup(group)
Then("An unsuccessful response has been posted, with an error indicating cyclic dependencies")
result.success should be(false) withClue s"Response code is ${result.code}: ${result.entityString}"
val errors = (result.entityJson \\ "details" \\\\ "errors").flatMap(_.as[Seq[String]])
errors.find(_.contains("cyclic dependencies")) shouldBe defined withClue s"""errors "$errors" did not contain "cyclic dependencies" error."""
}
"Applications with dependencies get deployed in the correct order" in temporaryGroup { gid =>
Given(s"A group with id $gid with 3 dependent applications")
val db = appProxy(gid / "db", "v1", 1)
val service = appProxy(gid / "service", "v1", 1, dependencies = Set(db.id.toPath))
val frontend = appProxy(gid / "frontend1", "v1", 1, dependencies = Set(service.id.toPath))
val group = GroupUpdate(Option(gid.toString), Option(Set(db, service, frontend)))
When("The group gets deployed")
var ping = Map.empty[String, Long]
def storeFirst(health: IntegrationHealthCheck): Unit = {
if (!ping.contains(health.appId.toString)) ping += health.appId.toString -> System.currentTimeMillis()
}
registerAppProxyHealthCheck(PathId(db.id), "v1", state = true).withHealthAction(storeFirst)
registerAppProxyHealthCheck(PathId(service.id), "v1", state = true).withHealthAction(storeFirst)
registerAppProxyHealthCheck(PathId(frontend.id), "v1", state = true).withHealthAction(storeFirst)
val response = marathon.createGroup(group)
response.success should be(true) withClue (s"Could create group $gid: Response: code=${response.code} body=${response.entityString}")
waitForDeployment(response)
Then("The correct order is maintained")
ping should have size 3
ping(db.id) should be < ping(service.id) withClue s"database was deployed at ${ping(db.id)} and service at ${ping(service.id)}"
ping(service.id) should be < ping(frontend.id) withClue s"service was deployed at ${ping(service.id)} and frontend at ${ping(frontend.id)}"
}
"Groups with dependencies get deployed in the correct order" in temporaryGroup { gid =>
Given(s"A group with id $gid with 3 dependent applications")
val db = appProxy(gid / "db/db1", "v1", 1)
val service = appProxy(gid / "service/service1", "v1", 1)
val frontend = appProxy(gid / "frontend/frontend1", "v1", 1)
val group = GroupUpdate(
Option(gid.toString),
Option(Set.empty[App]),
Option(Set(
GroupUpdate(Some("db"), apps = Some(Set(db))),
GroupUpdate(Some("service"), apps = Some(Set(service))).copy(dependencies = Some(Set((gid / "db").toString))),
GroupUpdate(Some("frontend"), apps = Some(Set(frontend))).copy(dependencies = Some(Set((gid / "service").toString)))
))
)
When("The group gets deployed")
var ping = Map.empty[String, Long]
def storeFirst(health: IntegrationHealthCheck): Unit = {
if (!ping.contains(health.appId.toString)) ping += health.appId.toString -> System.currentTimeMillis()
}
registerAppProxyHealthCheck(PathId(db.id), "v1", state = true).withHealthAction(storeFirst)
registerAppProxyHealthCheck(PathId(service.id), "v1", state = true).withHealthAction(storeFirst)
registerAppProxyHealthCheck(PathId(frontend.id), "v1", state = true).withHealthAction(storeFirst)
waitForDeployment(marathon.createGroup(group))
Then("The correct order is maintained")
ping should have size 3
ping(db.id) should be < ping(service.id) withClue s"database was deployed at ${ping(db.id)} and service at ${ping(service.id)}"
ping(service.id) should be < ping(frontend.id) withClue s"service was deployed at ${ping(service.id)} and frontend at ${ping(frontend.id)}"
}
}
}
|
janisz/marathon
|
src/test/scala/mesosphere/marathon/integration/GroupDeployIntegrationTest.scala
|
Scala
|
apache-2.0
| 16,289
|
/**
* BuildDocTreeSpec.scala
*
* Unit tests for tree generation: find connected components, build thresholded tree
*
* Overview, created August 2012
* @author Jonathan Stray
*
*/
package com.overviewdocs.clustering
import com.overviewdocs.nlp.UnigramDocumentVectorGenerator
import org.specs2.mutable.Specification
class BuildDocTreeSpec extends Specification {
"BuildDocTree" should {
"separate empty and non-empty nodes" in {
val vectorGen = new UnigramDocumentVectorGenerator
vectorGen.addDocument(1, Seq("word1","word2"))
vectorGen.addDocument(2, Seq("singular")) // will be removed, N=1
vectorGen.addDocument(3, Seq("word1","word2"))
vectorGen.addDocument(4, Seq("word1","word2")) // need at least three docs with same words as vector generator removes all words with N<3
vectorGen.minDocsToKeepTerm = 3 // this is the default, but changeable, so set here
val docVecs = vectorGen.documentVectors
val (nonEmpty, empty) = BuildDocTree.gatherEmptyDocs(docVecs)
nonEmpty.docs should containTheSameElementsAs(Seq(1,3,4))
empty.docs should containTheSameElementsAs(Seq(2))
}
}
}
|
overview/overview-server
|
worker/src/test/scala/com/overviewdocs/clustering/BuildDocTreeSpec.scala
|
Scala
|
agpl-3.0
| 1,194
|
package com.metl.data
import java.time.ZoneId
import com.metl.utils._
import com.metl.model._
import scala.xml._
import net.liftweb.common._
import net.liftweb.util.Helpers._
import Privacy._
import com.metl.external.ForeignRelationship
trait XmlUtils {
def getPrivacyByName(content:NodeSeq,name:String):Privacy = tryo(Privacy.parse((content \\\\ name).text)).openOr(Privacy.PUBLIC)
def getColorByName(content:NodeSeq,name:String):Color = tryo(ColorConverter.fromText(getValueOfNode(content,name))).openOr(Color.default)
def getStringByName(content:NodeSeq,name:String):String = tryo(getValueOfNode(content,name)).openOr("unknown "+name)
def getBooleanByName(content:NodeSeq,name:String):Boolean = tryo(getValueOfNode(content,name).toBoolean).openOr(false)
def getDoubleByName(content:NodeSeq,name:String):Double = tryo(getValueOfNode(content,name).toDouble).openOr(-1.0)
def getLongByName(content:NodeSeq,name:String):Long = tryo(getValueOfNode(content,name).toLong).openOr(-1L)
def getIntByName(content:NodeSeq,name:String):Int = tryo(getValueOfNode(content,name).toInt).openOr(-1)
def getListOfStringsByNameWithin(content:NodeSeq,name:String,containerName:String) = tryo((getXmlByName(content,containerName) \\\\ name).map(i => i.head.text).toList).openOr(List.empty[String])
def getValueOfNode(content:NodeSeq,nodeName:String):String = tryo((content \\\\ nodeName).head.text).openOr("")
def getXmlByName(content:NodeSeq,name:String):NodeSeq = tryo((content \\\\ name)).openOr(NodeSeq.Empty)
def getAttributeOfNode(content:NodeSeq,nodeName:String,attributeName:String):String = tryo((content \\\\ nodeName).seq(0).attribute(attributeName).getOrElse(NodeSeq.Empty).text).openOr("")
def parseCanvasContent(i:NodeSeq):ParsedCanvasContent = {
val target = getStringByName(i,"target")
val privacy = getPrivacyByName(i,"privacy")
val slide = getStringByName(i,"slide")
val identity = getStringByName(i,"identity")
ParsedCanvasContent(target,privacy,slide,identity)
}
def parsedCanvasContentToXml(p:ParsedCanvasContent):Seq[Node] = {
<target>{p.target}</target>
<privacy>{p.privacy.toString.toLowerCase}</privacy>
<slide>{p.slide}</slide>
<identity>{p.identity}</identity>
}
def parseMeTLContent(i:NodeSeq,config:ServerConfiguration = ServerConfiguration.empty):ParsedMeTLContent = {
val author = getStringByName(i,"author")
val timestamp = {
val failed = -1L
tryo(getAttributeOfNode(i,"message","time").toLong).openOr({
getLongByName(getXmlByName(i,"metlMetaData"),"timestamp") match {
case l:Long if l == failed => tryo(getAttributeOfNode(i,"message","timestamp").toLong).openOr(failed)
case l:Long => l
//case _ => failed
}
})
}
val audiences = tryo(((i \\\\ "audiences") \\\\ "audience").flatMap(an => {
for (
domain <- (an \\ "@domain").headOption;
name <- (an \\ "@name").headOption;
audienceType <- (an \\ "@type").headOption;
action <- (an \\ "@action").headOption
) yield {
Audience(config,domain.text,name.text,audienceType.text,action.text)
}
})).openOr(List.empty[Audience]) //this is where I've got to parse it out.
//val timestamp = tryo(getAttributeOfNode(i,"message","timestamp").toLong).openOr(-1L)
//val timestamp = getLongByName(getXmlByName(i,"metlMetaData"),"timestamp")
ParsedMeTLContent(author,timestamp,audiences.toList)
}
def parsedMeTLContentToXml(p:ParsedMeTLContent):Seq[Node] = {
<author>{p.author}</author>
<audiences>{p.audiences.map(a => {
<audience domain={a.domain} name={a.name} type={a.audienceType} action={a.action}/>
})}</audiences>
}
def hasChild(in:NodeSeq,tagName:String):Boolean = (in \\ tagName).length > 0
def hasSubChild(in:NodeSeq,tagName:String):Boolean = (in \\\\ tagName).length > 0
}
case class ParsedMeTLContent(author:String,timestamp:Long,audiences:List[Audience])
case class ParsedCanvasContent(target:String,privacy:Privacy,slide:String,identity:String)
class GenericXmlSerializer(config:ServerConfiguration) extends Serializer with XmlUtils with Logger {
type T = NodeSeq
val configName = config.name
override def toMeTLData(input:NodeSeq):MeTLData = Stopwatch.time("GenericXmlSerializer.toMeTLStanza",{
input match {
case i:NodeSeq if hasChild(i,"ink") => toMeTLInk(i)
case i:NodeSeq if hasChild(i,"textbox") => toMeTLText(i)
case i:NodeSeq if hasChild(i,"image") => toMeTLImage(i)
case i:NodeSeq if hasChild(i,"video") => toMeTLVideo(i)
case i:NodeSeq if hasChild(i,"dirtyInk") => toMeTLDirtyInk(i)
case i:NodeSeq if hasChild(i,"dirtyText") => toMeTLDirtyText(i)
case i:NodeSeq if hasChild(i,"dirtyImage") => toMeTLDirtyImage(i)
case i:NodeSeq if hasChild(i,"dirtyVideo") => toMeTLDirtyVideo(i)
case i:NodeSeq if hasChild(i,"moveDelta") => toMeTLMoveDelta(i)
case i:NodeSeq if hasChild(i,"quiz") => toMeTLQuiz(i)
case i:NodeSeq if hasChild(i,"quizResponse") => toMeTLQuizResponse(i)
case i:NodeSeq if hasChild(i,"screenshotSubmission") => toSubmission(i)
case i:NodeSeq if hasChild(i,"attendance") => toMeTLAttendance(i)
// case i:NodeSeq if hasChild(i,"body") => toMeTLCommand(i)
case i:NodeSeq if hasChild(i,"command") => toMeTLCommand(i)
case i:NodeSeq if hasChild(i,"fileResource") => toMeTLFile(i)
case i:NodeSeq if hasChild(i,"videoStream") => toMeTLVideoStream(i)
case i:NodeSeq if hasChild(i,"theme") => toTheme(i)
case i:NodeSeq if hasChild(i,"grade") => toGrade(i)
case i:NodeSeq if hasChild(i,"numericGradeValue") => toNumericGradeValue(i)
case i:NodeSeq if hasChild(i,"booleanGradeValue") => toBooleanGradeValue(i)
case i:NodeSeq if hasChild(i,"textGradeValue") => toTextGradeValue(i)
case i:NodeSeq if hasChild(i,"chatMessage") => toChatMessage(i)
case i:NodeSeq if hasChild(i,"undeletedCanvasContent") => toMeTLUndeletedCanvasContent(i)
case i:NodeSeq if hasSubChild(i,"target") && hasSubChild(i,"privacy") && hasSubChild(i,"slide") && hasSubChild(i,"identity") => toMeTLUnhandledCanvasContent(i)
case i:NodeSeq if (((i \\\\ "author").length > 0) && ((i \\\\ "message").length > 0)) => toMeTLUnhandledStanza(i)
case other:NodeSeq => toMeTLUnhandledData(other)
}
})
protected def metlXmlToXml(rootName:String,additionalNodes:Seq[Node],wrapWithMessage:Boolean = false,additionalAttributes:List[(String,String)] = List.empty[(String,String)]) = Stopwatch.time("GenericXmlSerializer.metlXmlToXml", {
val attrs = additionalAttributes.foldLeft(scala.xml.Null.asInstanceOf[scala.xml.MetaData])((acc,item) => {
item match {
case (k:String,v:String) => new UnprefixedAttribute(k,v,acc)
case _ => acc
}
})
wrapWithMessage match {
case true => {
new Elem(null, "message", attrs, TopScope, false, new Elem(null, rootName, Null, TopScope, false, additionalNodes: _*))
}
case _ => new Elem(null, rootName, attrs, TopScope, false, additionalNodes:_*)
}
})
protected def metlContentToXml(rootName:String,input:MeTLStanza,additionalNodes:Seq[Node]) = Stopwatch.time("GenericXmlSerializer.metlContentToXml", {
val pmc = parsedMeTLContentToXml(ParsedMeTLContent(input.author,input.timestamp,input.audiences)) ++ additionalNodes
metlXmlToXml(rootName,pmc,true,List(("timestamp",input.timestamp.toString)))
})
protected def canvasContentToXml(rootName:String,input:MeTLCanvasContent,additionalNodes:Seq[Node]) = Stopwatch.time("GenericXmlSerializer.canvasContentToXml", {
metlContentToXml(rootName,input,parsedCanvasContentToXml(ParsedCanvasContent(input.target,input.privacy,input.slide,input.identity)) ++ additionalNodes)
})
override def fromHistory(input:History):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromHistory", {
<history jid={input.jid}>{input.getAll.map(i => fromMeTLData(i))}<deletedContents>{input.getDeletedCanvasContents.map(dcc => fromMeTLData(dcc))}</deletedContents></history>
})
override def toHistory(input:NodeSeq):History = Stopwatch.time("GenericXmlSerializer.toHistory",{
val history = new History((input \\ "@jid").headOption.map(_.text).getOrElse(""))
input match {
case e:Elem => e.child.foreach{
case el:Elem if el.label == "deletedContents" => {
el.child.foreach{
case c:NodeSeq => toMeTLData(c) match {
case ms:MeTLCanvasContent => history.addDeletedCanvasContent(ms)
case _ => {}
}
}
}
case c:NodeSeq => toMeTLData(c) match {
case ms:MeTLStanza => history.addStanza(ms)
case _ => {}
}
}
}
history
})
protected val xmlType = "xml"
override def toMeTLUnhandledData(i:NodeSeq) = MeTLUnhandledData(config,i.toString,xmlType)
override def toMeTLUnhandledStanza(i:NodeSeq) = {
val m = parseMeTLContent(i,config)
MeTLUnhandledStanza(config,m.author,m.timestamp,i.toString,xmlType,m.audiences)
}
override def toMeTLUnhandledCanvasContent(i:NodeSeq) = {
val cc = parseCanvasContent(i)
val m = parseMeTLContent(i,config)
MeTLUnhandledCanvasContent(config,m.author,m.timestamp,cc.target,cc.privacy,cc.slide,cc.identity,m.audiences,1.0,1.0,i.toString,xmlType)
}
override def fromMeTLUnhandledData(i:MeTLUnhandledData) = i.valueType.toLowerCase.trim match {
case s:String if s == xmlType => XML.loadString(i.unhandled)
case _ => NodeSeq.Empty
}
override def fromMeTLUnhandledStanza(i:MeTLUnhandledStanza) = i.valueType.toLowerCase.trim match {
case s:String if s == xmlType => XML.loadString(i.unhandled)
case _ => NodeSeq.Empty
}
override def fromMeTLUnhandledCanvasContent(i:MeTLUnhandledCanvasContent) = i.valueType.toLowerCase.trim match {
case s:String if s == xmlType => XML.loadString(i.unhandled)
case _ => NodeSeq.Empty
}
override def toMeTLUndeletedCanvasContent(input:NodeSeq):MeTLUndeletedCanvasContent = {
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val elementType = getStringByName(input,"elementType")
val oldIdentity = getStringByName(input,"oldIdentity")
val newIdentity = getStringByName(input,"newIdentity")
MeTLUndeletedCanvasContent(config,m.author,m.timestamp,c.target,c.privacy,c.slide,c.identity,elementType,oldIdentity,newIdentity,m.audiences)
}
override def fromMeTLUndeletedCanvasContent(input:MeTLUndeletedCanvasContent):NodeSeq = {
canvasContentToXml("undeletedCanvasContent",input,Seq(
<elementType>{input.elementType}</elementType>,
<oldIdentity>{input.oldElementIdentity}</oldIdentity>,
<newIdentity>{input.newElementIdentity}</newIdentity>
))
}
override def toMeTLMoveDelta(input:NodeSeq):MeTLMoveDelta = Stopwatch.time("GenericXmlSerializer.toMeTLMoveDelta", {
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val inkIds = getListOfStringsByNameWithin(input,"inkId","inkIds")
val textIds = getListOfStringsByNameWithin(input,"textId","textIds")
val multiWordTextIds = getListOfStringsByNameWithin(input,"multiWordTextId","multiWordTextIds")
val imageIds = getListOfStringsByNameWithin(input,"imageId","imageIds")
val videoIds = getListOfStringsByNameWithin(input,"videoId","videoIds")
val xTranslate = getDoubleByName(input,"xTranslate")
val yTranslate = getDoubleByName(input,"yTranslate")
val xScale = getDoubleByName(input,"xScale")
val yScale = getDoubleByName(input,"yScale")
val newPrivacy = getPrivacyByName(input,"newPrivacy")
val isDeleted = getBooleanByName(input,"isDeleted")
val xOrigin = getDoubleByName(input,"xOrigin")
val yOrigin = getDoubleByName(input,"yOrigin")
MeTLMoveDelta(config,m.author,m.timestamp,c.target,c.privacy,c.slide,c.identity,xOrigin,yOrigin,inkIds,textIds,multiWordTextIds,imageIds,videoIds,xTranslate,yTranslate,xScale,yScale,newPrivacy,isDeleted,m.audiences)
})
override def fromMeTLMoveDelta(input:MeTLMoveDelta):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLMoveDelta", {
canvasContentToXml("moveDelta",input, Seq(
<inkIds>{input.inkIds.map(i => <inkId>{i}</inkId>)}</inkIds>,
<imageIds>{input.imageIds.map(i => <imageId>{i}</imageId>)}</imageIds>,
<textIds>{input.textIds.map(i => <textId>{i}</textId>)}</textIds>,
<videoIds>{input.videoIds.map(i => <videoId>{i}</videoId>)}</videoIds>,
<multiWordTextIds>{input.multiWordTextIds.map(i => <multiWordTextId>{i}</multiWordTextId>)}</multiWordTextIds>,
<xTranslate>{input.xTranslate}</xTranslate>,
<yTranslate>{input.yTranslate}</yTranslate>,
<xScale>{input.xScale}</xScale>,
<yScale>{input.yScale}</yScale>,
<newPrivacy>{input.newPrivacy}</newPrivacy>,
<isDeleted>{input.isDeleted}</isDeleted>,
<xOrigin>{input.xOrigin}</xOrigin>,
<yOrigin>{input.yOrigin}</yOrigin>
))
})
override def toMeTLAttendance(input:NodeSeq):Attendance = Stopwatch.time("GenericXmlSerializer.toMeTLAttendance",{
val m = parseMeTLContent(input,config)
val location = getStringByName(input,"location")
val present = getBooleanByName(input,"present")
Attendance(config,m.author,m.timestamp,location,present,m.audiences)
})
override def fromMeTLAttendance(input:Attendance):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLAttenance",{
metlContentToXml("attendance",input,List(
<location>{input.location}</location>,
<present>{input.present}</present>
))
})
override def fromTheme(t:MeTLTheme):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromTheme",{
metlContentToXml("theme",t,List(
<text>{t.theme.text}</text>,
<origin>{t.theme.origin}</origin>,
<location>{t.location}</location>
))
})
override def toTheme(x:NodeSeq):MeTLTheme = Stopwatch.time("GenericXmlSerializer.toTheme",{
val m = parseMeTLContent(x)
val text = getStringByName(x,"text")
val location = getStringByName(x,"location")
val origin = getStringByName(x,"origin")
MeTLTheme(config,m.author,m.timestamp,location,Theme(m.author,text,origin),m.audiences)
})
override def fromChatMessage(t:MeTLChatMessage):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromChatMessage",{
metlContentToXml("chatMessage",t,List(
<context>{t.context}</context>,
<content>{t.content}</content>,
<contentType>{t.contentType}</contentType>,
<identity>{t.identity}</identity>
))
})
override def toChatMessage(x:NodeSeq):MeTLChatMessage = Stopwatch.time("GenericXmlSerializer.toChatMessage",{
val m = parseMeTLContent(x)
val context = getStringByName(x,"context")
val contentType = getStringByName(x,"contentType")
val content = getStringByName(x,"content")
val identity = getStringByName(x,"identity")
MeTLChatMessage(config,m.author,m.timestamp,identity,contentType,content,context,m.audiences)
})
override def toMeTLInk(input:NodeSeq):MeTLInk = Stopwatch.time("GenericXmlSerializer.toMeTLInk",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val checksum = getDoubleByName(input,"checksum")
val startingSum = getDoubleByName(input,"startingSum")
val points = tryo(PointConverter.fromText(getStringByName(input,"points"))).openOr(List.empty[Point])
val color = getColorByName(input,"color")
val thickness = getDoubleByName(input,"thickness")
val isHighlighter = getBooleanByName(input,"highlight")
val identity = c.identity match {
case "" => startingSum.toString
case other => other
}
MeTLInk(config,m.author,m.timestamp,checksum,startingSum,points,color,thickness,isHighlighter,c.target,c.privacy,c.slide,identity,m.audiences)
})
override def fromMeTLInk(input:MeTLInk):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLInk",{
canvasContentToXml("ink",input,List(
<checksum>{input.checksum}</checksum>,
<startingSum>{input.startingSum}</startingSum>,
<points>{PointConverter.toText(input.points)}</points>,
<color>{ColorConverter.toRGBAString(input.color)}</color>,
<thickness>{input.thickness}</thickness>,
<highlight>{input.isHighlighter}</highlight>
))
})
override def toMeTLImage(input:NodeSeq):MeTLImage = Stopwatch.time("GenericXmlSerializer.toMeTLImage",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val tag = getStringByName(input,"tag")
val source = getStringByName(input,"source") match {
case s:String if (s.length > 0 && s != "unknown url" && s != "none") => Full(s)
case _ => Empty
}
val imageBytes = source.map(u => config.getResource(u))
val pngBytes = Empty
val width = getDoubleByName(input,"width")
val height = getDoubleByName(input,"height")
val x = getDoubleByName(input,"x")
val y = getDoubleByName(input,"y")
MeTLImage(config,m.author,m.timestamp,tag,source,imageBytes,pngBytes,width,height,x,y,c.target,c.privacy,c.slide,c.identity,m.audiences)
})
override def fromMeTLImage(input:MeTLImage):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLImage",{
canvasContentToXml("image",input,List(
<tag>{input.tag}</tag>,
<source>{input.source.openOr("unknown")}</source>,
<width>{input.width}</width>,
<height>{input.height}</height>,
<x>{input.x}</x>,
<y>{input.y}</y>
))
})
override def toMeTLVideo(input:NodeSeq):MeTLVideo = Stopwatch.time("GenericXmlSerializer.toMeTLVideo",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val source = getStringByName(input,"source") match {
case s:String if (s.length > 0 && s != "unknown url" && s != "none") => Full(s)
case _ => Empty
}
val videoBytes = source.map(u => config.getResource(u))
val width = getDoubleByName(input,"width")
val height = getDoubleByName(input,"height")
val x = getDoubleByName(input,"x")
val y = getDoubleByName(input,"y")
MeTLVideo(config,m.author,m.timestamp,source,videoBytes,width,height,x,y,c.target,c.privacy,c.slide,c.identity,m.audiences)
})
override def fromMeTLVideo(input:MeTLVideo):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLVideo",{
canvasContentToXml("video",input,List(
<source>{input.source.openOr("unknown")}</source>,
<width>{input.width}</width>,
<height>{input.height}</height>,
<x>{input.x}</x>,
<y>{input.y}</y>
))
})
override def fromMeTLWord(input:MeTLTextWord) = <word>
<text>{input.text}</text>
<bold>{input.bold}</bold>
<underline>{input.underline}</underline>
<italic>{input.italic}</italic>
<justify>{input.justify}</justify>
<font>{input.font}</font>
<size>{input.size}</size>
<color>
<alpha>{input.color.alpha}</alpha>
<red>{input.color.red}</red>
<green>{input.color.green}</green>
<blue>{input.color.blue}</blue>
</color>
</word>
def fromAudience(input:Audience) = <audience>
<domain>{input.domain}</domain>
<name>{input.name}</name>
<audienceType>{input.name}</audienceType>
<action>{input.name}</action>
</audience>
override def toMeTLWord(input:NodeSeq) = (for {
text <- (input \\\\ "text").headOption.map(_.text)
bold <- (input \\\\ "bold").headOption.map(_.text.toBoolean)
underline <- (input \\\\ "underline").headOption.map(_.text.toBoolean)
italic <- (input \\\\ "italic").headOption.map(_.text.toBoolean)
justify <- (input \\\\ "justify").headOption.map(_.text)
font <- (input \\\\ "font").headOption.map(_.text)
size <- (input \\\\ "size").headOption.map(_.text.toDouble)
colorNode <- (input \\\\ "color").headOption
a <- (colorNode \\\\ "alpha").headOption.map(_.text.toInt)
r <- (colorNode \\\\ "red").headOption.map(_.text.toInt)
g <- (colorNode \\\\ "green").headOption.map(_.text.toInt)
b <- (colorNode \\\\ "blue").headOption.map(_.text.toInt)
} yield {
MeTLTextWord(text,bold,underline,italic,justify,Color(a,r,g,b),font,size)
}).getOrElse(MeTLTextWord.empty)
override def toMeTLMultiWordText(input:NodeSeq):MeTLMultiWordText = Stopwatch.time("GenericXmlSerializer.toMeTLMultiWordText",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val width = getDoubleByName(input,"width")
val requestedWidth = getDoubleByName(input,"requestedWidth")
val height = getDoubleByName(input,"height")
val x = getDoubleByName(input,"x")
val y = getDoubleByName(input,"y")
val words = (input \\\\ "words" \\\\ "word").toList.map(toMeTLWord _)
val tag = getStringByName(input,"tag")
MeTLMultiWordText(config,m.author,m.timestamp,height,width,requestedWidth,x,y,tag,c.identity,c.target,c.privacy,c.slide,words,m.audiences)
})
override def fromMeTLMultiWordText(input:MeTLMultiWordText) = Stopwatch.time("GenericXmlSerializer.fromMeTLMultiWordText", canvasContentToXml("multiWordText",input,List(
<x>{input.x}</x>,
<y>{input.y}</y>,
<width>{input.width}</width>,
<height>{input.height}</height>,
<tag>{input.tag}</tag>,
<requestedWidth>{input.requestedWidth}</requestedWidth>,
<words>{input.words.map(fromMeTLWord _)}</words>,
<audiences>{input.audiences.map(fromAudience _)}</audiences>
)))
override def toMeTLText(input:NodeSeq):MeTLText = Stopwatch.time("GenericXmlSerializer.toMeTLText",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val tag = getStringByName(input,"tag")
val caret = getIntByName(input,"caret")
val text = getStringByName(input,"text")
val style = getStringByName(input,"style")
val family = getStringByName(input,"family")
val weight = getStringByName(input,"weight")
val size = getDoubleByName(input,"size")
val decoration = getStringByName(input,"decoration")
val color = getColorByName(input,"color")
val width = getDoubleByName(input,"width")
val height = getDoubleByName(input,"height")
val x = getDoubleByName(input,"x")
val y = getDoubleByName(input,"y")
MeTLText(config,m.author,m.timestamp,text,height,width,caret,x,y,tag,style,family,weight,size,decoration,c.identity,c.target,c.privacy,c.slide,color,m.audiences)
})
override def fromMeTLText(input:MeTLText):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLText",{
canvasContentToXml("textbox",input,List(
<tag>{input.tag}</tag>,
<caret>{input.caret}</caret>,
<text>{input.text}</text>,
<style>{input.style}</style>,
<family>{input.family}</family>,
<weight>{input.weight}</weight>,
<size>{input.size}</size>,
<decoration>{input.decoration}</decoration>,
<color>{ColorConverter.toHexString(input.color)}</color>,
<width>{input.width}</width>,
<height>{input.height}</height>,
<x>{input.x}</x>,
<y>{input.y}</y>
))
})
override def toMeTLDirtyInk(input:NodeSeq):MeTLDirtyInk = Stopwatch.time("GenericXmlSerializer.toMeTLDirtyInk",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
MeTLDirtyInk(config,m.author,m.timestamp,c.target,c.privacy,c.slide,c.identity,m.audiences)
})
override def fromMeTLDirtyInk(input:MeTLDirtyInk):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLDirtyInk",{
canvasContentToXml("dirtyInk",input,NodeSeq.Empty)
})
override def toMeTLDirtyImage(input:NodeSeq):MeTLDirtyImage = Stopwatch.time("GenericXmlSerializer.toMeTLDirtyImage",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
MeTLDirtyImage(config,m.author,m.timestamp,c.target,c.privacy,c.slide,c.identity,m.audiences)
})
override def fromMeTLDirtyImage(input:MeTLDirtyImage):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLDirtyImage",{
canvasContentToXml("dirtyImage",input,NodeSeq.Empty)
})
override def toMeTLDirtyVideo(input:NodeSeq):MeTLDirtyVideo = Stopwatch.time("GenericXmlSerializer.toMeTLDirtyVideo",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
MeTLDirtyVideo(config,m.author,m.timestamp,c.target,c.privacy,c.slide,c.identity,m.audiences)
})
override def fromMeTLDirtyVideo(input:MeTLDirtyVideo):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLDirtyVideo",{
canvasContentToXml("dirtyVideo",input,NodeSeq.Empty)
})
override def toMeTLDirtyText(input:NodeSeq):MeTLDirtyText = Stopwatch.time("GenericXmlSerializer.toMeTLDirtyText",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
MeTLDirtyText(config,m.author,m.timestamp,c.target,c.privacy,c.slide,c.identity,m.audiences)
})
override def fromMeTLDirtyText(input:MeTLDirtyText):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLDirtyText",{
canvasContentToXml("dirtyText",input,NodeSeq.Empty)
})
override def toMeTLCommand(input:NodeSeq):MeTLCommand = Stopwatch.time("GenericXmlSerializer.toMeTLCommand",{
val m = parseMeTLContent(input,config)
val comm = getStringByName(input,"name")
val parameters = getListOfStringsByNameWithin(input,"parameter","parameters")
MeTLCommand(config,m.author,m.timestamp,comm,parameters,m.audiences)
})
override def fromMeTLCommand(input:MeTLCommand):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLCommand",{
metlContentToXml("command",input,List(
<name>{input.command}</name>,
<parameters>{input.commandParameters.map(cp => <parameter>{cp}</parameter>)}</parameters>
))
})
override def toSubmission(input:NodeSeq):MeTLSubmission = Stopwatch.time("GenericXmlSerializer.toSubmission",{
val m = parseMeTLContent(input,config)
val c = parseCanvasContent(input)
val title = getStringByName(input,"title")
val url = getStringByName(input,"url")
val imageBytes = Full(config.getResource(url))
val blacklist = getXmlByName(input,"blacklist").map(bl => {
val username = getStringByName(bl,"username")
val highlight = getColorByName(bl,"highlight")
SubmissionBlacklistedPerson(username,highlight)
}).toList
MeTLSubmission(config,m.author,m.timestamp,title,c.slide.toInt,url,imageBytes,blacklist,c.target,c.privacy,c.identity,m.audiences)
})
override def fromSubmission(input:MeTLSubmission):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromSubmission",{
canvasContentToXml("screenshotSubmission",input,List(
<url>{input.url}</url>,
<title>{input.title}</title>,
<time>{input.timestamp.toString}</time>
) ::: input.blacklist.map(bl => <blacklist><username>{bl.username}</username><highlight>{ColorConverter.toRGBAString(bl.highlight)}</highlight></blacklist> ).toList)
})
override def toMeTLQuiz(input:NodeSeq):MeTLQuiz = Stopwatch.time("GenericXmlSerializer.toMeTLQuiz",{
val m = parseMeTLContent(input,config)
val created = getLongByName(input,"created")
val question = getStringByName(input,"question") match {
case q if (q.length > 0) => q
case _ => getStringByName(input,"title")
}
val id = getStringByName(input,"id")
val url = getStringByName(input,"url") match {
case s:String if (s.length > 0 && s != "unknown url" && s != "none") => Full(s)
case _ => Empty
}
val quizImage = url.map(u => config.getResource(u))
val isDeleted = getBooleanByName(input,"isDeleted")
val options = getXmlByName(input,"quizOption").map(qo => toQuizOption(qo)).toList
MeTLQuiz(config,m.author,m.timestamp,created,question,id,url,quizImage,isDeleted,options,m.audiences)
})
override def fromMeTLQuiz(input:MeTLQuiz):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLQuiz",{
metlContentToXml("quiz",input,List(
<created>{input.created}</created>,
<question>{input.question}</question>,
<id>{input.id}</id>,
<isDeleted>{input.isDeleted}</isDeleted>,
<options>{input.options.map(o => fromQuizOption(o))}</options>
) ::: input.url.map(u => List(<url>{u}</url>)).openOr(List.empty[Node]))
})
override def toMeTLFile(input:NodeSeq):MeTLFile = Stopwatch.time("GenericXmlSerializer.toMeTLFile",{
val m = parseMeTLContent(input,config)
val name = getStringByName(input,"name")
val id = getStringByName(input,"identity")
val deleted = getBooleanByName(input,"deleted")
val url = (input \\ "url").headOption.map(_.text)
val bytes = url.map(u => config.getResource(u))
MeTLFile(config,m.author,m.timestamp,name,id,url,bytes)
})
override def fromMeTLFile(input:MeTLFile):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLFile",{
metlContentToXml("fileResource",input,List(
<name>{input.name}</name>,
<identity>{input.id}</identity>,
<deleted>{input.deleted}</deleted>
) :::
input.url.map(u => List(<url>{u}</url>)).getOrElse(List.empty[Node]))
})
override def toMeTLVideoStream(input:NodeSeq):MeTLVideoStream = Stopwatch.time("GenericXmlSerializer.toMeTLVideoStream",{
val m = parseMeTLContent(input,config)
val id = getStringByName(input,"identity")
val deleted = getBooleanByName(input,"deleted")
val url = (input \\ "url").headOption.map(_.text)
MeTLVideoStream(config,m.author,id,m.timestamp,url,deleted)
})
override def fromMeTLVideoStream(input:MeTLVideoStream):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLVideoStream",{
metlContentToXml("videoStream",input,List(
<identity>{input.id}</identity>,
<deleted>{input.isDeleted}</deleted>
) :::
input.url.map(u => List(<url>{u}</url>)).getOrElse(List.empty[Node]))
})
def toQuizOption(input:NodeSeq):QuizOption = Stopwatch.time("GenericXmlSerializer.toMeTLQuizOption",{
val name = getStringByName(input,"name")
val text = getStringByName(input,"text")
val correct = getBooleanByName(input,"correct")
val color = getColorByName(input,"color")
QuizOption(name,text,correct,color)
})
def fromQuizOption(input:QuizOption):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLQuizOption",{
metlXmlToXml("quizOption",List(
<name>{input.name}</name>,
<text>{input.text}</text>,
<correct>{input.correct}</correct>,
<color>{input.color}</color>
))
})
override def toMeTLQuizResponse(input:NodeSeq):MeTLQuizResponse = Stopwatch.time("GenericXmlSerializer.toMeTLQuizResponse", {
val m = parseMeTLContent(input,config)
val answer = getStringByName(input,"answer")
val answerer = getStringByName(input,"answerer")
val id = getStringByName(input,"id")
MeTLQuizResponse(config,m.author,m.timestamp,answer,answerer,id,m.audiences)
})
override def fromMeTLQuizResponse(input:MeTLQuizResponse):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromMeTLQuizResponse",{
metlContentToXml("quizResponse",input,List(
<answer>{input.answer}</answer>,
<answerer>{input.answerer}</answerer>,
<id>{input.id}</id>
))
})
protected val usZone: ZoneId = ZoneId.of("America/New_York")
val dateTimeFormatter = new MultiFormatDateFormatter(
Left("EEE MMM dd kk:mm:ss z yyyy"),
Right("MM/dd/yyyy h:mm:ss a", usZone),
Right("MM/d/yyyy h:mm:ss a", usZone),
Right("M/dd/yyyy h:mm:ss a", usZone),
Right("M/d/yyyy h:mm:ss a", usZone),
Right("MM/dd/yyyy HH:mm:ss", usZone),
Right("MM/d/yyyy HH:mm:ss", usZone),
Right("M/dd/yyyy HH:mm:ss", usZone),
Right("M/d/yyyy HH:mm:ss", usZone),
Right("dd/MM/yyyy h:mm:ss a", usZone),
Right("d/MM/yyyy h:mm:ss a", usZone),
Right("dd/M/yyyy h:mm:ss a", usZone),
Right("d/M/yyyy h:mm:ss a", usZone),
Right("dd/MM/yyyy HH:mm:ss a", usZone),
Right("d/MM/yyyy HH:mm:ss a", usZone),
Right("dd/M/yyyy HH:mm:ss a", usZone),
Right("d/M/yyyy HH:mm:ss a", usZone)
)
override def toConversation(input:NodeSeq):Conversation = Stopwatch.time("GenericXmlSerializer.toConversation",{
val m = parseMeTLContent(input,config)
val author = getStringByName(input,"author")
val lastAccessed = getLongByName(input,"lastAccessed")
val slides = getXmlByName(input,"slide").map(s => toSlide(s)).toList
val subject = getStringByName(input,"subject")
val tag = getStringByName(input,"tag")
val jid = getIntByName(input,"jid")
val title = getStringByName(input,"title")
val creationString = getStringByName(input,"creation")
val created = try {
creationString.toLong
} catch {
case e:Exception => {
dateTimeFormatter.parse(getStringByName(input, "created"))
}
}
val permissions = getXmlByName(input,"permissions").map(p => toPermissions(p)).headOption.getOrElse(Permissions.default(config))
val blacklist = getXmlByName(input,"blacklist").flatMap(bl => getXmlByName(bl,"user")).map(_.text)
val thisConfig = getStringByName(input,"configName") match {
case "unknown configName" => config
case "" => config
case other => ServerConfiguration.configForName(other)
}
val foreignRelationship = (input \\\\ "foreignRelationship").headOption.flatMap(n => {
for {
sys <- (n \\ "@system").headOption.map(_.text)
key <- (n \\ "@key").headOption.map(_.text)
displayName = (n \\ "@displayName").headOption.map(_.text)
} yield {
ForeignRelationship(sys,key)
}
})
Conversation(thisConfig,author,lastAccessed,slides,subject,tag,jid,title,created,permissions,blacklist.toList,m.audiences,foreignRelationship)
})
override def fromConversation(input:Conversation):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromConversation",{
metlXmlToXml("conversation",List(
<author>{input.author}</author>,
<lastAccessed>{input.lastAccessed}</lastAccessed>,
<slides>{input.slides.map(s => fromSlide(s))}</slides>,
<subject>{input.subject}</subject>,
<tag>{input.tag}</tag>,
<jid>{input.jid}</jid>,
<title>{input.title}</title>,
<created>{new java.util.Date(input.created).toString()}</created>,
<creation>{input.created}</creation>,
<blacklist>{
input.blackList.map(bu => <user>{bu}</user> )
}</blacklist>,
// <configName>{input.server.name}</configName>,
fromPermissions(input.permissions)
) ::: input.foreignRelationship.toList.map(t => {
<foreignRelationship system={t.system} key={t.key} displayName={t.displayName.map(dn => Text(dn))} />
}))
})
override def fromConversationList(input:List[Conversation]):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromConversationList",{
<conversations>{input.map(c => fromConversation(c))}</conversations>
})
override def toSlide(input:NodeSeq):Slide = Stopwatch.time("GenericXmlSerializer.toSlide",{
val m = parseMeTLContent(input,config)
val author = getStringByName(input,"author")
val id = getIntByName(input,"id")
val index = getIntByName(input,"index")
val defHeight = getIntByName(input,"defaultHeight")
val defWidth = getIntByName(input,"defaultWidth")
val exposed = getBooleanByName(input,"exposed")
val slideType = getStringByName(input,"type")
val groupSets = (input \\ "groupSet").map(gs => toGroupSet(gs)).toList
Slide(config,author,id,index,defHeight,defWidth,exposed,slideType,groupSets,m.audiences)
})
override def fromSlide(input:Slide):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromSlide",{
metlXmlToXml("slide",List(
<id>{input.id}</id>,
<author>{input.author}</author>,
<index>{input.index}</index>,
<defaultHeight>{input.defaultHeight}</defaultHeight>,
<defaultWidth>{input.defaultWidth}</defaultWidth>,
<exposed>{input.exposed}</exposed>,
<type>{input.slideType}</type>
) ::: List(input.groupSet.map(gs => fromGroupSet(gs))).flatten.flatMap(_.theSeq).toList)
})
override def toGroupSet(input:NodeSeq):GroupSet = Stopwatch.time("GenericXmlSerializer.toGroupSet",{
val m = parseMeTLContent(input,config)
val id = getStringByName(input,"id")
val location = getStringByName(input,"location")
val groupingStrategy = toGroupingStrategy((input \\ "groupingStrategy"))
val groups = ((input \\ "groups") \\ "group").map(gn => toGroup(gn)).toList
GroupSet(config,id,location,groupingStrategy,groups,m.audiences)
})
override def fromGroupSet(input:GroupSet):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromGroupSet",{
metlXmlToXml("groupSet",List(
<id>{input.id}</id>,
<location>{input.location}</location>,
fromGroupingStrategy(input.groupingStrategy).head,
<groups>{input.groups.map(g => fromGroup(g))}</groups>
))
})
override def toGroupingStrategy(input:NodeSeq):GroupingStrategy = {
getStringByName(input,"name") match {
case "byMaximumSize" => ByMaximumSize(getIntByName(input,"groupSize"))
case "byTotalGroups" => ByTotalGroups(getIntByName(input,"groupCount"))
case "onePersonPerGroup" => OnePersonPerGroup
case "everyoneInOneGroup" => EveryoneInOneGroup
case "complexGroupingStrategy" => ComplexGroupingStrategy(Map("xml" -> input.toString))
case _ => EveryoneInOneGroup
}
}
override def fromGroupingStrategy(input:GroupingStrategy):NodeSeq = {
input match {
case ByMaximumSize(groupSize) => <groupingStrategy><name>byMaximumSize</name><groupSize>{groupSize.toString}</groupSize></groupingStrategy>
case ByTotalGroups(groupCount) => <groupingStrategy><name>byTotalGroups</name><groupCount>{groupCount.toString}</groupCount></groupingStrategy>
case OnePersonPerGroup => <groupingStrategy><name>onePersonPerGroup</name></groupingStrategy>
case EveryoneInOneGroup => <groupingStrategy><name>everyoneInOneGroup</name></groupingStrategy>
case ComplexGroupingStrategy(data) => <groupingStrategy><name>complexGroupingStrategy</name>{data.toString}<data></data></groupingStrategy>
case _ => <groupingStrategy><name>everyoneInOneGroup</name></groupingStrategy>
}
}
override def toGroup(input:NodeSeq):Group = Stopwatch.time("GenericXmlSerializer.toGroup",{
val m = parseMeTLContent(input,config)
val id = getStringByName(input,"id")
val location = getStringByName(input,"location")
val timestamp = getLongByName(input,"timestamp")
val members = ((input \\ "members") \\ "member").map(_.text).toList
Group(config,id,location,timestamp,members,m.audiences)
})
override def fromGroup(input:Group):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromGroup",{
metlXmlToXml("group",List(
<id>{input.id}</id>,
<location>{input.location}</location>,
<timestamp>{input.timestamp}</timestamp>,
<members>{input.members.map(m => {
<member>{m}</member>
})}</members>
))
})
override def toPermissions(input:NodeSeq):Permissions = Stopwatch.time("GenericXmlSerializer.toPermissions",{
try {
val studentsCanOpenFriends = getBooleanByName(input,"studentCanOpenFriends")
val studentsCanPublish = getBooleanByName(input,"studentCanPublish")
val usersAreCompulsorilySynced = getBooleanByName(input,"usersAreCompulsorilySynced")
val studentsMayBroadcast = tryo(getValueOfNode(input,"studentsMayBroadcast").toBoolean).openOr(true)
val studentsMayChatPublicly = tryo(getValueOfNode(input,"studentsMayChatPublicly").toBoolean).openOr(true)
Permissions(config,studentsCanOpenFriends,studentsCanPublish,usersAreCompulsorilySynced,studentsMayBroadcast,studentsMayChatPublicly)
} catch {
case e:Exception => {
Permissions.default(config)
}
}
})
override def fromPermissions(input:Permissions):Node = Stopwatch.time("GenericXmlSerializer.fromPermissions",{
<permissions><studentCanOpenFriends>{input.studentsCanOpenFriends}</studentCanOpenFriends><studentCanPublish>{input.studentsCanPublish}</studentCanPublish><usersAreCompulsorilySynced>{input.usersAreCompulsorilySynced}</usersAreCompulsorilySynced><studentsMayBroadcast>{input.studentsMayBroadcast}</studentsMayBroadcast><studentsMayChatPublicly>{input.studentsMayChatPublicly}</studentsMayChatPublicly></permissions>
})
override def toColor(input:AnyRef):Color = Stopwatch.time("GenericXmlSerializer.toColor",{
Color.empty
})
override def fromColor(input:Color):AnyRef = "%s %s %s %s".format(input.alpha,input.red,input.green,input.blue)
override def toPointList(input:AnyRef):List[Point] = List.empty[Point]
override def fromPointList(input:List[Point]):AnyRef = ""
override def toPoint(input:AnyRef):Point = {
Point.empty
}
override def fromPoint(input:Point):String = "%s %s %s".format(input.x,input.y,input.thickness)
override def toGrade(input:NodeSeq):MeTLGrade = Stopwatch.time("GenericXmlSerializer.toGrade",{
val m = parseMeTLContent(input,config)
val id = getStringByName(input,"id")
val name = getStringByName(input,"name")
val description = getStringByName(input,"description")
val location = getStringByName(input,"location")
val visible = getBooleanByName(input,"visible")
val gradeType = MeTLGradeValueType.parse(getStringByName(input,"gradeType"))
val numericMaximum = if (gradeType == MeTLGradeValueType.Numeric){
Some(getDoubleByName(input,"numericMaximum"))
} else {
None
}
val numericMinimum = if (gradeType == MeTLGradeValueType.Numeric){
Some(getDoubleByName(input,"numericMinimum"))
} else {
None
}
val foreignRelationship = (input \\\\ "foreignRelationship").headOption.flatMap(n => {
for {
sys <- (n \\ "@sys").headOption.map(_.text)
key <- (n \\ "@key").headOption.map(_.text)
} yield {
(sys,key)
}
})
val gradeReferenceUrl = (input \\\\ "gradeReferenceUrl").headOption.map(_.text)
MeTLGrade(config,m.author,m.timestamp,id,location,name,description,gradeType,visible,foreignRelationship,gradeReferenceUrl,numericMaximum,numericMinimum,m.audiences)
})
override def fromGrade(input:MeTLGrade):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromGrade",{
metlContentToXml("grade",input,List(
<id>{input.id}</id>,
<name>{input.name}</name>,
<location>{input.location}</location>,
<visible>{input.visible.toString}</visible>,
<gradeType>{MeTLGradeValueType.print(input.gradeType)}</gradeType>,
<description>{input.description}</description>
) ::: input.foreignRelationship.toList.map(t => {
<foreignRelationship sys={t._1} key={t._2} />
}) ::: input.gradeReferenceUrl.toList.map(s => {
<gradeReferenceUrl>{s}</gradeReferenceUrl>
}) ::: input.numericMaximum.toList.map(nm => {
<numericMaximum>{nm.toString}</numericMaximum>
}) ::: input.numericMinimum.toList.map(nm => {
<numericMinimum>{nm.toString}</numericMinimum>
}))
})
override def toNumericGradeValue(input:NodeSeq):MeTLNumericGradeValue = Stopwatch.time("GenericXmlSerializer.toNumericGradeValue",{
val m = parseMeTLContent(input,config)
val gradeId = getStringByName(input,"gradeId")
val gradedUser = getStringByName(input,"gradedUser")
val gradeValue = getDoubleByName(input,"gradeValue")
val gradeComment = (input \\\\ "gradeComment").headOption.map(_.text)
val gradePrivateComment = (input \\\\ "gradePrivateComment").headOption.map(_.text)
MeTLNumericGradeValue(config,m.author,m.timestamp,gradeId,gradedUser,gradeValue,gradeComment,gradePrivateComment,m.audiences)
})
override def fromNumericGradeValue(input:MeTLNumericGradeValue):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromNumericGradeValue",{
metlContentToXml("numericGradeValue",input,List(
<gradeId>{input.gradeId}</gradeId>,
<gradedUser>{input.gradedUser}</gradedUser>,
<gradeValue>{input.gradeValue.toString}</gradeValue>
) ::: input.gradeComment.toList.map(s => {
<gradeComment>{s}</gradeComment>
}) ::: input.gradePrivateComment.toList.map(s => {
<gradePrivateComment>{s}</gradePrivateComment>
}))
})
override def toBooleanGradeValue(input:NodeSeq):MeTLBooleanGradeValue = Stopwatch.time("GenericXmlSerializer.toBooleanGradeValue",{
val m = parseMeTLContent(input,config)
val gradeId = getStringByName(input,"gradeId")
val gradedUser = getStringByName(input,"gradedUser")
val gradeValue = getBooleanByName(input,"gradeValue")
val gradeComment = (input \\\\ "gradeComment").headOption.map(_.text)
val gradePrivateComment = (input \\\\ "gradePrivateComment").headOption.map(_.text)
MeTLBooleanGradeValue(config,m.author,m.timestamp,gradeId,gradedUser,gradeValue,gradeComment,gradePrivateComment,m.audiences)
})
override def fromBooleanGradeValue(input:MeTLBooleanGradeValue):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromBooleanGradeValue",{
metlContentToXml("booleanGradeValue",input,List(
<gradeId>{input.gradeId}</gradeId>,
<gradedUser>{input.gradedUser}</gradedUser>,
<gradeValue>{input.gradeValue.toString}</gradeValue>
) ::: input.gradeComment.toList.map(s => {
<gradeComment>{s}</gradeComment>
}) ::: input.gradePrivateComment.toList.map(s => {
<gradePrivateComment>{s}</gradePrivateComment>
}))
})
override def toTextGradeValue(input:NodeSeq):MeTLTextGradeValue = Stopwatch.time("GenericXmlSerializer.toTextGradeValue",{
val m = parseMeTLContent(input,config)
val gradeId = getStringByName(input,"gradeId")
val gradedUser = getStringByName(input,"gradedUser")
val gradeValue = getStringByName(input,"gradeValue")
val gradeComment = (input \\\\ "gradeComment").headOption.map(_.text)
val gradePrivateComment = (input \\\\ "gradePrivateComment").headOption.map(_.text)
MeTLTextGradeValue(config,m.author,m.timestamp,gradeId,gradedUser,gradeValue,gradeComment,gradePrivateComment,m.audiences)
})
override def fromTextGradeValue(input:MeTLTextGradeValue):NodeSeq = Stopwatch.time("GenericXmlSerializer.fromTextGradeValue",{
metlContentToXml("textGradeValue",input,List(
<gradeId>{input.gradeId}</gradeId>,
<gradedUser>{input.gradedUser}</gradedUser>,
<gradeValue>{input.gradeValue.toString}</gradeValue>
) ::: input.gradeComment.toList.map(s => {
<gradeComment>{s}</gradeComment>
}) ::: input.gradePrivateComment.toList.map(s => {
<gradePrivateComment>{s}</gradePrivateComment>
}))
})
}
|
StackableRegiments/analyticalmetlx
|
src/main/scala/com/metl/data/GenericXmlSerializer.scala
|
Scala
|
apache-2.0
| 46,330
|
/*
* Copyright (C) 2016 Vincibean <Andre Bessi>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vincibean.scala.impatient.chapter6
import org.vincibean.scala.impatient.chapter6.exercise6.Seed
import org.vincibean.scala.impatient.chapter6.exercise6.Seed.Seed
/**
* Implement a function that checks whether a card suit value from the preceding
* exercise is red.
*
* Created by Vincibean on 20/01/16.
*/
package object exercise7 {
def isRedSeed(seed: Seed): Boolean = (seed == Seed.Hearts) || (seed == Seed.Clubs)
}
|
Vincibean/ScalaForTheImpatient-Solutions
|
src/main/scala/org/vincibean/scala/impatient/chapter6/exercise7/package.scala
|
Scala
|
gpl-3.0
| 1,160
|
package collins.controllers.actions.asset
import scala.concurrent.Future
import play.api.data.Form
import play.api.data.Forms.of
import play.api.data.Forms.optional
import play.api.data.Forms.text
import play.api.data.Forms.tuple
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.mvc.Result
import collins.controllers.Api
import collins.controllers.actions.AssetAction
import collins.controllers.actions.RequestDataHolder
import collins.controllers.actions.SecureAction
import collins.controllers.actors.ActivationProcessor
import collins.controllers.actors.ProvisionerResult
import collins.controllers.actors.ProvisionerRun
import collins.controllers.actors.ProvisionerTest
import collins.controllers.forms.truthyFormat
import collins.models.Asset
import collins.models.AssetLifecycle
import collins.models.AssetMetaValue
import collins.models.{Status => AStatus}
import collins.models.Truthy
import collins.provisioning.Provisioner
import collins.provisioning.ProvisionerRequest
import collins.provisioning.{ProvisionerRoleData => ProvisionerRole}
import collins.util.concurrent.BackgroundProcessor
import collins.util.concurrent.BackgroundProcessor.SendType
import collins.util.config.Feature
import collins.softlayer.SoftLayer
import collins.softlayer.SoftLayerConfig
trait ProvisionUtil { self: SecureAction =>
import collins.controllers.forms._
type ProvisionForm = Tuple7[
String, // profile
String, // contact
Option[String], // suffix
Option[String], // profile role
Option[String], // pool
Option[String], // secondary_role
Option[Truthy] // active
]
val provisionForm = Form(tuple(
"profile" -> text,
"contact" -> text,
"suffix" -> optional(text(3)),
"primary_role" -> optional(text),
"pool" -> optional(text),
"secondary_role" -> optional(text),
"activate" -> optional(of[Truthy])
))
case class ActionDataHolder(
asset: Asset, request: ProvisionerRequest, activate: Boolean, attribs: Map[String,String] = Map.empty
) extends RequestDataHolder
protected def validate(asset: Asset, form: ProvisionForm): Validation = {
val activate = form._7
if (activeBool(activate) == true)
validateActivate(asset, form) match {
case Some(error) =>
return Left(error)
case _ =>
}
else if (!Provisioner.canProvision(asset))
return Left(
RequestDataHolder.error403(
"Provisioning prevented by configuration. Asset does not have allowed status"
)
)
validateProvision(asset, form)
}
protected def validateProvision(
asset: Asset, form: ProvisionForm
): Validation = {
val (profile, contact, suffix, primary_role, pool, secondary_role, activate) = form
Provisioner.makeRequest(asset.tag, profile, Some(contact), suffix) match {
case None =>
Left(RequestDataHolder.error400("Invalid profile %s specified".format(profile)))
case Some(request) =>
val role = request.profile.role
validatePrimaryRole(role, primary_role)
.right.flatMap(vrole => validatePool(vrole, pool))
.right.flatMap(vrole => validateSecondaryRole(vrole, secondary_role))
.right.flatMap(vrole => validateAllowedHardware(vrole, asset))
.right.map(frole => request.profile.copy(role = frole))
.right.map(profile => request.copy(profile = profile))
.right.map { frequest =>
ActionDataHolder(asset, frequest, activeBool(activate), attribs(asset, frequest, form))
}
}
}
protected def validateActivate(
asset: Asset, form: ProvisionForm
): Option[RequestDataHolder] = {
if (!asset.isIncomplete)
Some(RequestDataHolder.error409("Asset status must be 'Incomplete'"))
else if (!SoftLayerConfig.enabled)
Some(RequestDataHolder.error501("SoftLayer plugin not enabled"))
else if (!SoftLayer.isSoftLayerAsset(asset))
Some(RequestDataHolder.error400("Asset not a SoftLayer asset"))
else if (!SoftLayer.softLayerId(asset).isDefined)
Some(RequestDataHolder.error400("Asset not a SoftLayer asset"))
else
None
}
protected def attribs(asset: Asset, request: ProvisionerRequest, form: ProvisionForm): Map[String,String] = {
val build_contact = form._2
val suffix = form._3
val role = request.profile.role
val highPriorityAttrs =
Map(
"NODECLASS" -> request.profile.identifier,
"CONTACT" -> role.contact.getOrElse(""),
"CONTACT_NOTES" -> role.contact_notes.getOrElse(""),
"SUFFIX" -> suffix.getOrElse(""),
"PRIMARY_ROLE" -> role.primary_role.getOrElse(""),
"POOL" -> role.pool.getOrElse(""),
"SECONDARY_ROLE" -> role.secondary_role.getOrElse(""),
"BUILD_CONTACT" -> build_contact
)
val lowPriorityAttrs = role.attributes
val clearProfileAttrs = role.clear_attributes.map(a => (a -> "")).toMap
// make sure high priority attrs take precedence over low priority
// and make sure any explicitly set attrs override any that are to be cleared
clearOnRepurposeAttrs(asset) ++ clearProfileAttrs ++ lowPriorityAttrs ++ highPriorityAttrs
}
private[this] def clearOnRepurposeAttrs(asset: Asset): Map[String, String] = {
if (Feature.useWhiteListOnRepurpose) {
val keepAttributes = Feature.keepSomeMetaOnRepurpose.map(_.name)
val allAttributes = AssetMetaValue.findByAsset(asset).map(_.getName()).toSet
val deleteAttributes = allAttributes -- keepAttributes
deleteAttributes.map(s => (s -> "")).toMap
} else {
Feature.deleteSomeMetaOnRepurpose.map(_.name).map(s => (s -> "")).toMap
}
}
protected def fieldError(form: Form[ProvisionForm]): Validation = (form match {
case f if f.error("profile").isDefined => Option("Profile must be specified")
case f if f.error("contact").isDefined => Option("Contact must be specified")
case f if f.error("suffix").isDefined => Option("Suffix must be at least 3 characters")
case f if f.error("primary_role").isDefined => Option("Invalid primary_role")
case f if f.error("pool").isDefined => Option("Invalid pool specified")
case f if f.error("secondary_role").isDefined => Option("Invalid secondary_role")
case f if f.error("activate").isDefined => Option("activate must be truthy")
case o => None
}).map(s => Left(RequestDataHolder.error400(s)))
.getOrElse(Left(RequestDataHolder.error400("An unknown error occurred")))
private def activeBool(activate: Option[Truthy]) = activate.map(_.toBoolean).getOrElse(false)
type ValidOption = Either[RequestDataHolder,ProvisionerRole]
protected def validatePrimaryRole(role: ProvisionerRole, prole: Option[String]): ValidOption = {
if (role.primary_role.isDefined)
Right(role)
else if (prole.isDefined)
Right(role.copy(primary_role = prole.map(_.toUpperCase)))
else if (role.requires_primary_role)
Left(RequestDataHolder.error400("A primary_role is required but none was specified"))
else
Right(role)
}
protected def validateAllowedHardware(role: ProvisionerRole, asset: Asset): ValidOption = role.allowed_classes match {
case Some(classifiers) => asset.nodeClass match {
case Some(nc) => {
if (classifiers contains nc.tag)
Right(role)
else
Left(RequestDataHolder.error400("Asset is classified as %s, but the provisioning profile requires assets matching: %s".format(nc.tag, classifiers.mkString(" or "))))
}
case _ => Left(RequestDataHolder.error400("Asset is unclassified, but the provisioning profile requires classified assets matching: %s".format(classifiers.mkString(" or "))))
}
case _ => Right(role)
}
protected def validateSecondaryRole(role: ProvisionerRole, srole: Option[String]): ValidOption = {
if (role.secondary_role.isDefined)
Right(role)
else if (srole.isDefined)
Right(role.copy(secondary_role = srole.map(_.toUpperCase)))
else if (role.requires_secondary_role)
Left(RequestDataHolder.error400("A secondary_role is required but none was specified"))
else
Right(role)
}
protected def validatePool(role: ProvisionerRole, pool: Option[String]): ValidOption = {
if (role.pool.isDefined)
Right(role)
else if (pool.isDefined)
Right(role.copy(pool = pool.map(_.toUpperCase)))
else if (role.requires_pool)
Left(RequestDataHolder.error400("A pool is required but none was specified"))
else
Right(role)
}
}
trait Provisions extends ProvisionUtil with AssetAction { self: SecureAction =>
protected def onSuccess() {
// Hook for rate limiting if needed
}
protected def onFailure() {
// additional hook
}
protected def tattle(message: String, error: Boolean) {
if (error)
tattler.critical(message, definedAsset)
else
tattler.note(message, definedAsset)
}
protected def activateAsset(adh: ActionDataHolder): Future[Result] = {
val ActionDataHolder(asset, pRequest, _, attribs) = adh
val slId = SoftLayer.softLayerId(asset).get
if (attribs.nonEmpty) {
val lifeCycle = new AssetLifecycle(userOption(), tattler)
lifeCycle.updateAssetAttributes(Asset.findById(asset.getId).get, attribs)
}
BackgroundProcessor.send(ActivationProcessor(slId)(request)) { res =>
processProvisionAction(res) {
case true =>
val newAsset = Asset.findById(asset.getId).get
Asset.partialUpdate(newAsset, None, AStatus.New.map(_.id))
setAsset(newAsset)
tattle("Asset successfully activated", false)
None
case false =>
tattle("Asset activation failed", true)
onFailure()
Some(handleError(RequestDataHolder.error400("Asset activation failed")))
}.getOrElse {
onSuccess()
Api.statusResponse(true)
}
}
}
protected def provisionAsset(adh: ActionDataHolder): Future[Result] = {
import play.api.Play.current
val ActionDataHolder(asset, pRequest, _, attribs) = adh
BackgroundProcessor.send(ProvisionerTest(pRequest)) { res =>
processProvisionAction(res) { result =>
processProvision(result)
}
}.flatMap {
case Some(err) =>
onFailure()
Future(err)
case None =>
if (attribs.nonEmpty) {
val lifeCycle = new AssetLifecycle(userOption(), tattler)
lifeCycle.updateAssetAttributes(
Asset.findById(asset.getId).get, attribs
)
setAsset(Asset.findById(asset.getId))
}
BackgroundProcessor.send(ProvisionerRun(pRequest)) { res =>
processProvisionAction(res) { result =>
processProvision(result).map { err =>
tattle("Provisioning failed. Exit code %d\n%s".format(result.commandResult.exitCode,
result.commandResult.toString
), true)
onFailure()
err
}.orElse {
tattle(
"Successfully provisioned server as %s".format(pRequest.profile.identifier), false
)
None
}
}.getOrElse {
onSuccess()
Api.statusResponse(true)
}
}
}
}
protected def processProvisionAction[T, A](res: SendType[T])(f: T => Option[Result]): Option[Result] = res match {
case Left(ex) =>
tattle("Exception provisioning asset: %s".format(ex.getMessage), true)
logger.error("Exception provisioning %s".format(getAsset), ex)
Some(handleError(RequestDataHolder.error500(
"There was an exception processing your request: %s".format(ex.getMessage),
ex
)))
case Right(result) => f(result)
}
protected def processProvision(result: ProvisionerResult): Option[Result] = result match {
case success if success.commandResult.exitCode == 0 =>
None
case failure if failure.commandResult.exitCode != 0 =>
Some(handleError(RequestDataHolder.error500(
"There was an error processing your request. Exit Code %d".format(
failure.commandResult.exitCode
), new Exception(failure.commandResult.toString)
)))
}
}
|
funzoneq/collins
|
app/collins/controllers/actions/asset/ProvisionUtil.scala
|
Scala
|
apache-2.0
| 12,285
|
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.server.testkit
import java.net.URI
import akka.NotUsed
import play.api.libs.typedmap.TypedMap
import play.api.mvc.request.RemoteConnection
import play.api.mvc.request.RequestTarget
import play.api.mvc.Headers
import play.api.mvc.Request
import play.core.parsers.FormUrlEncodedParser
/**
* This is a simplified FakeRequest inspired on Play-Test's FakeRequest. Creating this simple copy here
* avoids adding a dependency to play-test that brings in too much transitive baggage
*/
class FakeRequest(override val method: String, path: String) extends Request[NotUsed] {
override def body: NotUsed = NotUsed
override def connection: RemoteConnection =
RemoteConnection(remoteAddressString = "127.0.0.1", secure = false, clientCertificateChain = None)
private val _path = path
override def target: RequestTarget = new RequestTarget {
override lazy val uri: URI = new URI(uriString)
override def uriString: String = _path
override lazy val path: String = uriString.split('?').take(1).mkString
override lazy val queryMap: Map[String, Seq[String]] = FormUrlEncodedParser.parse(queryString)
}
override def version: String = "HTTP/1.1"
override def headers: Headers = new Headers(Seq("Host" -> "localhost"))
override def attrs: TypedMap = TypedMap.empty
}
|
rcavalcanti/lagom
|
service/scaladsl/server/src/test/scala/com/lightbend/lagom/scaladsl/server/testkit/FakeRequest.scala
|
Scala
|
apache-2.0
| 1,432
|
import akka.actor.ActorSystem
import colossus.core.IOSystem
import colossus.protocols.http.HttpMethod._
import colossus.protocols.http.UrlParsing._
import colossus.protocols.http.{HttpServer, Initializer, RequestHandler}
import colossus.protocols.http.{ContentType, Http, HttpCodes, HttpHeader, HttpHeaders}
import colossus.service.GenRequestHandler.PartialHandler
import colossus.service.Callback.Implicits._
object HttpService2 {
implicit val actorSystem = ActorSystem()
implicit val ioSystem = IOSystem()
HttpServer.start("example-server", 9000) { initContext =>
new Initializer(initContext) {
override def onConnect: RequestHandlerFactory =
serverContext =>
new RequestHandler(serverContext) {
override def handle: PartialHandler[Http] = {
case request @ Get on Root =>
// #example1
request.ok("hello").withHeader("header-name", "header-value")
request.ok("hello", HttpHeaders(HttpHeader("header-name", "header-value")))
request.ok("hello", HttpHeaders(HttpHeader(HttpHeaders.CookieHeader, "header-value")))
// #example1
// #example1a
request.ok("hello", HttpHeaders(HttpHeader("Content-Type", "header-value")))
// #example1a
case request @ Get on Root =>
// #example3
val body: String = request.body.bytes.utf8String
val contentType: Option[String] = request.head.headers.contentType
val headers: HttpHeaders = request.head.headers
val parameter: Option[String] = request.head.parameters.getFirst("key")
// #example3
// #example2
request
.respond(
HttpCodes.CONFLICT,
"""{"name":"value"}"""
)
.withContentType(ContentType.ApplicationJson)
// #example2
}
}
}
}
}
|
tumblr/colossus
|
colossus-docs/src/main/scala/HttpService2.scala
|
Scala
|
apache-2.0
| 2,062
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* A result of a job in the DAGScheduler.
*/
@DeveloperApi
sealed trait JobResult
@DeveloperApi
case object JobSucceeded extends JobResult
@DeveloperApi
private[spark] case class JobFailed(exception: Exception) extends JobResult
|
yelshater/hadoop-2.3.0
|
spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/scheduler/JobResult.scala
|
Scala
|
apache-2.0
| 1,145
|
package cn.edu.neu.chiewen.cknn.trajectory
import cn.edu.neu.chiewen.cknn.ResultKeeper
class Trajectory(val positions: List[(Double, Double)], val factory: TrajectoryFactory) extends Serializable with ResultKeeper {
def nowAt = positions.head
def isEnd = positions.isEmpty
def nextPosition = new Trajectory(positions.drop(1), factory)
def showResult(s: String) {
print("\\t")
for (i <- children) i.showResult(s)
println
}
}
|
chiewen/CkNN
|
CkNN/src/main/scala/cn/edu/neu/chiewen/cknn/trajectory/Trajectory.scala
|
Scala
|
gpl-2.0
| 449
|
package fpinscala.datastructures
sealed trait List[+A] // `List` data type, parameterized on a type, `A`
case object Nil extends List[Nothing] // A `List` data constructor representing the empty list
/* Another data constructor, representing nonempty lists. Note that `tail` is another `List[A]`,
which may be `Nil` or another `Cons`.
*/
case class Cons[+A](head: A, tail: List[A]) extends List[A]
object List { // `List` companion object. Contains functions for creating and working with lists.
def sum(ints: List[Int]): Int = ints match { // A function that uses pattern matching to add up a list of integers
case Nil => 0 // The sum of the empty list is 0.
case Cons(x,xs) => x + sum(xs) // The sum of a list starting with `x` is `x` plus the sum of the rest of the list.
}
def product(ds: List[Double]): Double = ds match {
case Nil => 1.0
case Cons(0.0, _) => 0.0
case Cons(x,xs) => x * product(xs)
}
def apply[A](as: A*): List[A] = // Variadic function syntax
if (as.isEmpty) Nil
else Cons(as.head, apply(as.tail: _*))
val x = List(1,2,3,4,5) match {
case Cons(x, Cons(2, Cons(4, _))) => x
case Nil => 42
case Cons(x, Cons(y, Cons(3, Cons(4, _)))) => x + y
case Cons(h, t) => h + sum(t)
case _ => 101
}
def append[A](a1: List[A], a2: List[A]): List[A] =
a1 match {
case Nil => a2
case Cons(h,t) => Cons(h, append(t, a2))
}
def foldRight[A,B](as: List[A], z: B)(f: (A, B) => B): B = // Utility functions
as match {
case Nil => z
case Cons(x, xs) => f(x, foldRight(xs, z)(f))
}
def sum2(ns: List[Int]) =
foldRight(ns, 0)((x,y) => x + y)
def product2(ns: List[Double]) =
foldRight(ns, 1.0)(_ * _) // `_ * _` is more concise notation for `(x,y) => x * y`; see sidebar
def tail[A](l: List[A]): List[A] =
l match {
case Nil => sys.error("tail of empty list")
case Cons(_, xs) => xs
}
def setHead[A](l: List[A], h: A): List[A] =
l match {
case Nil => sys.error("setHead for empty list")
case Cons(_, xs) => Cons(h, xs)
}
def drop[A](l: List[A], n: Int): List[A] =
if (n <= 0) l
else l match {
case Nil => Nil
case Cons(_, xs) => drop(xs, n-1)
}
def dropWhile[A](l: List[A], f: A => Boolean): List[A] =
l match {
case Cons(x, xs) if f(x) => dropWhile(xs, f)
case _ => l
}
def init[A](l: List[A]): List[A] =
l match {
case Nil => sys.error("init of empty list")
case Cons(_, Nil) => Nil
case Cons(x, xs) => Cons(x, init(l))
}
def length[A](l: List[A]): Int =
foldRight(l, 0)((_, a) => a + 1)
@annotation.tailrec
def foldLeft[A,B](l: List[A], z: B)(f: (B, A) => B): B =
l match {
case Nil => z
case Cons(x, xs) => foldLeft(xs, f(z,x))(f)
}
def sum3(l: List[Int]): Int =
foldLeft(l, 0)(_ + _)
def product3(l: List[Double]): Double =
foldLeft(l, 1.0)(_ * _)
def length2[A](l: List[A]): Int =
foldLeft(l, 0)((acc, _) => acc + 1)
def reverse[A](l: List[A]): List[A] =
foldLeft(l, Nil: List[A])((acc, h) => Cons(h, acc))
def foldLeftViaFoldRight[A,B](l: List[A], z: B)(f: (B, A) => B): B =
foldRight(reverse(l), z)((x, acc) => f(acc, x))
def foldLeftViaFoldRight2[A,B](l: List[A], z: B)(f: (B, A) => B): B =
foldRight(l, (b: B) => b)((x, g) => (b: B) => g(f(b, x)))(z)
def foldRightViaFoldLeft[A,B](l: List[A], z: B)(f: (A, B) => B): B =
foldLeft(l, (b: B) => b)((g, h) => (b: B) => g(f(h, b)))(z)
def foldRightViaFoldLeft2[A,B](l: List[A], z: B)(f: (A, B) => B) =
foldLeft(reverse(l), z)((acc, h) => f(h, acc))
def appendViaFoldRight[A](a: List[A], b: List[A]): List[A] =
foldRight(a, b)((x, acc) => Cons(x, acc))
def concat[A](l: List[List[A]]): List[A] =
foldLeft(l, List[A]())((acc, h) => append(acc, h))
def increase(l: List[Int]): List[Int] =
foldRight(l, List[Int]())((x, acc) => Cons(x+1, acc))
def map[A,B](l: List[A])(f: A => B): List[B] =
foldRight(l, Nil: List[B])((x, acc) => Cons(f(x), acc))
def filter[A](l: List[A])(p: A => Boolean): List[A] =
foldRight(l, Nil: List[A])((x, acc) => if (p(x)) Cons(x,acc) else acc)
def flatMap[A,B](l: List[A])(f: A => List[B]): List[B] =
concat(map(l)(f))
def filterViaFlatMap[A](l: List[A], p: A => Boolean): List[A] =
flatMap(l)(a => if (p(a)) List(a) else Nil)
def addPairwise(a: List[Int], b: List[Int]): List[Int] =
(a, b) match {
case (_, Nil) => Nil
case (Nil, _) => Nil
case (Cons(h1, t1), Cons(h2, t2)) => Cons(h1+h2, addPairwise(t1, t2))
}
def zipWith[A,B,C](a: List[A], b: List[B])(f: (A, B) => C): List[C] =
(a, b) match {
case (_, Nil) => Nil
case (Nil, _) => Nil
case (Cons(h1, t1), Cons(h2, t2)) => Cons(f(h1, h2), zipWith(t1, t2)(f))
}
@annotation.tailrec
def startWith[A](l: List[A], prefix: List[A]): Boolean =
(l, prefix) match {
case (_, Nil) => true
case (Cons(h1, t1), Cons(h2, t2)) if (h1 == h2) => startWith(t1, t2)
case _ => false
}
@annotation.tailrec
def hasSubsequence[A](l: List[A], sub: List[A]): Boolean =
l match {
case Nil => sub == Nil
case _ if startWith(l, sub) => true
case Cons(_, t) => hasSubsequence(t, sub)
}
}
|
724399396/function-programming-in-scala
|
exercises/src/main/scala/fpinscala/datastructures/List.scala
|
Scala
|
mit
| 5,304
|
/**
* Bolt
* ArrayValue
*
* Copyright (c) 2017 Osamu Takahashi
*
* This software is released under the MIT License.
* http://opensource.org/licenses/mit-license.php
*
* @author Osamu Takahashi
*/
package com.sopranoworks.bolt.values
import com.google.cloud.spanner.{Mutation, Type}
import scala.collection.JavaConversions._
/**
*
* @param values
* @param evaluated
* @param arrayType
*/
case class ArrayValue(var values:java.util.List[Value],var evaluated:Boolean = false,var arrayType:Type = null) extends Value with LiteralValue {
private var _evaluated = List.empty[Value]
private var _stayUnresolved = false
override def stayUnresolved: Boolean = _stayUnresolved
private def _isValidArray:Boolean = {
if (_evaluated.isEmpty) {
true
} else {
val t = if (arrayType != null) arrayType else _evaluated(0).spannerType
if (t != null) {
arrayType = t
_evaluated.forall(_.spannerType == t)
} else {
false
}
}
}
override def eval: Value = {
if (!evaluated || values.length != _evaluated.length) {
_stayUnresolved = values.foldLeft(false) {
case (f,v) =>
f || v.eval.stayUnresolved
}
if (_stayUnresolved)
return this
_evaluated = values.toList.map(_.asValue).filter(_ != NullValue)
if (!_isValidArray)
throw new RuntimeException("")
evaluated = true
}
this
}
override def invalidateEvaluatedValueIfContains(values: List[Value]): Boolean = {
if (values.foldLeft(false) {
case (f,v) => f || v.invalidateEvaluatedValueIfContains(values)
}) {
evaluated = false
_evaluated = List.empty[Value]
true
} else false
}
override def resolveReference(columns: Map[String, TableColumnValue]): Map[String, TableColumnValue] = {
values.foldLeft(columns) {
case (c,v) =>
v.resolveReference(c)
}
}
override def text:String = values.map(_.text).mkString("[",",","]")
override def spannerType: Type = {
eval
arrayType
}
override def asArray: ArrayValue = this
def length:Int = {
eval
_evaluated.length
}
def offset(v:Value):Value = {
val n = v.eval.asValue match {
case NullValue =>
0
case IntValue(_,i,_) =>
i.toInt
case _ =>
throw new RuntimeException("Array offset type must be int64")
}
getField(n)
}
def ordinal(v:Value):Value = {
val n = v.eval.asValue match {
case NullValue =>
0
case IntValue(_,i,_) =>
i.toInt - 1
case _ =>
throw new RuntimeException("Array offset type must be int64")
}
getField(n)
}
def contains(v:Value):Boolean = {
this.eval
_evaluated.exists(_.isEqualValue(v))
}
override def setTo(m: Mutation.WriteBuilder, key: String): Unit = {
this.eval
m.set(key).toStringArray(values.map(_.text))
}
override def getField(fieldIdx:Int):Value = {
if (fieldIdx < 0 || values.length <= fieldIdx) throw new ArrayIndexOutOfBoundsException
values.get(fieldIdx)
}
}
|
OsamuTakahashi/bolt
|
src/main/scala/com/sopranoworks/bolt/values/ArrayValue.scala
|
Scala
|
mit
| 3,106
|
import scala.quoted._
object Macros {
inline def assert2(expr: => Boolean): Unit = ${ assertImpl('expr) }
def assertImpl(expr: Expr[Boolean])(using QuoteContext) = '{
def foo(): Unit = $expr
foo()
}
}
|
som-snytt/dotty
|
tests/run-macros/quote-change-owner/Macro_1.scala
|
Scala
|
apache-2.0
| 216
|
package com.twitter.server.handler
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Request, Response, Uri}
import com.twitter.finagle.server.ServerRegistry
import com.twitter.finagle.util.StackRegistry
import com.twitter.io.Buf
import com.twitter.server.util.HtmlUtils.escapeHtml
import com.twitter.server.util.HttpUtils.{new404, newResponse}
import com.twitter.server.util.MetricSource
import com.twitter.server.view.StackRegistryView
import com.twitter.util.Future
import java.net.URLDecoder
import java.nio.charset.StandardCharsets
private object ServerRegistryHandler {
def render(servers: Seq[(String, StackRegistry.Entry)]): String =
s"""<link type="text/css" href="/admin/files/css/server-registry.css" rel="stylesheet"/>
<script type="application/javascript" src="/admin/files/js/chart-renderer.js"></script>
<script type="application/javascript" src="/admin/files/js/server-registry.js"></script>
<ul id="server-tabs" class="nav nav-tabs" data-refresh-uri="/admin/metrics">
${(for {
(scope, entry) <- servers
} yield {
s"""<li><a href="#${entry.name}-entry" data-toggle="tab">${escapeHtml(scope)}</a></li>"""
}).mkString("\\n")}
</ul>
<!-- Tab panes -->
<div id="servers" class="tab-content">
${(for ((scope, entry) <- servers) yield {
val scopeDash = scope.replace("/", "-")
s"""<div class="tab-pane borders" id="${entry.name}-entry">
<div class="row">
<!-- server stats -->
<div class="server-info col-md-3">
<dl class="server-stats dl-horizontal">
<dt><a href="/admin/metrics#$scope/pending">Pending:</a></dt>
<dd id="${scopeDash}-pending" data-key="$scope/pending">...</dd>
<dt><a href="/admin/metrics#$scope/failures">Failures:</a></dt>
<dd id="${scopeDash}-failures" data-key="$scope/failures">...</dd>
<dt><a href="/admin/metrics#$scope/success">Success:</a></dt>
<dd id="${scopeDash}-success" data-key="$scope/success">...</dd>
<dt><a href="/admin/metrics#$scope/requests">Requests:</a></dt>
<dd id="${scopeDash}-requests"data-key="$scope/requests">...</dd>
</dl>
</div>
<!-- graph -->
<div id="server-graph" class="col-md-9"></div>
</div>
</div>"""
}).mkString("\\n")}
</div>"""
}
/**
* Renders information about servers registered to Finagle's ServerRegistry
* in an html fragment. Servers can be queried by passing in the server name
* as part of the uri (ex. "/admin/servers/myserver").
*/
class ServerRegistryHandler(
uriPrefix: String,
source: MetricSource = new MetricSource,
registry: StackRegistry = ServerRegistry)
extends Service[Request, Response] {
// Search the metrics source for the stat scope that includes `serverName`.
// The search namespace includes both "$serverName/" and "srv/$serverName"
// to take into account finagle's ServerStatsReceiver. Note, unnamed servers are
// ignored as we can't dissambiguate their stats.
private[this] def findScope(serverName: String): Option[String] = {
val k0 = s"$serverName"
val k1 = s"srv/$serverName"
if (source.contains(s"$k0/pending")) Some(k0)
else if (source.contains(s"$k1/pending")) Some(k1)
else None
}
def apply(req: Request): Future[Response] = {
val uri = Uri.fromRequest(req)
uri.path.stripPrefix(uriPrefix) match {
case idx @ ("index.html" | "index.htm" | "index.txt" | "servers") =>
val servers = (registry.registrants.flatMap {
case e: StackRegistry.Entry if e.name.nonEmpty =>
for (scope <- findScope(e.name)) yield (scope, e)
case _ => Nil
}).toSeq
val html = ServerRegistryHandler.render(servers)
// This is useful to avoid the returned fragment being wrapped
// with an index in the context of an ajax call.
val typ = if (idx.endsWith(".txt")) "text/plain" else "text/html"
newResponse(
contentType = s"$typ;charset=UTF-8",
content = Buf.Utf8(html)
)
case name =>
val decodedName = URLDecoder.decode(name, StandardCharsets.UTF_8.name)
val entries = registry.registrants filter { _.name == decodedName }
if (entries.isEmpty) new404(s"$name could not be found.")
else {
val server = entries.head
val scope = findScope(server.name)
val html = StackRegistryView.render(server, scope)
newResponse(
contentType = "text/html;charset=UTF-8",
content = Buf.Utf8(html)
)
}
}
}
}
|
twitter/twitter-server
|
server/src/main/scala/com/twitter/server/handler/ServerRegistryHandler.scala
|
Scala
|
apache-2.0
| 4,978
|
package org.littlewings.javaee7.config
import javax.inject.Inject
import org.apache.deltaspike.core.api.config.ConfigResolver
import org.apache.deltaspike.core.api.projectstage.ProjectStage
import org.apache.deltaspike.testcontrol.api.junit.CdiTestRunner
import org.junit.runner.RunWith
import org.junit.{Before, Test}
import org.scalatest.Matchers
import org.scalatest.junit.JUnitSuite
@RunWith(classOf[CdiTestRunner])
class ConfigResolverSpec extends JUnitSuite with Matchers {
@Test
def configResolver(): Unit = {
ConfigResolver.getPropertyValue("database.server") should be("localhost")
ConfigResolver.getPropertyValue("database.port") should be("3306")
ConfigResolver.getPropertyValue("database.name") should be("test")
ConfigResolver.getPropertyValue("missing.key") should be(null)
ConfigResolver.getPropertyValue("missing.key", "defaultValue") should be("defaultValue")
ConfigResolver.getPropertyValue("jdbc.url") should be("jdbc:mysql://localhost:3306/test")
}
@Inject
var projectStage: ProjectStage = _
@Before
def setUp(): Unit = {
val configResolverProjectStageField = classOf[ConfigResolver].getDeclaredField("projectStage")
configResolverProjectStageField.setAccessible(true)
configResolverProjectStageField.set(null, projectStage)
}
@Test
def configResolverProjectStageAware(): Unit = {
ConfigResolver.getProjectStageAwarePropertyValue("database.server") should be("ut-server")
ConfigResolver.getProjectStageAwarePropertyValue("database.port") should be("13306")
ConfigResolver.getProjectStageAwarePropertyValue("database.name") should be("ut-test")
ConfigResolver.getProjectStageAwarePropertyValue("jdbc.url") should be("jdbc:mysql://ut-server:13306/ut-test")
// ConfigResolver.getProjectStageAwarePropertyValue("jdbc.url") should be("jdbc:mysql://localhost:3306/test")
}
@Test
def typedResolver(): Unit = {
ConfigResolver
.resolve("database.server")
.as(classOf[String])
.getValue should be("ut-server")
ConfigResolver
.resolve("database.port")
.as(classOf[Integer])
.getValue should be(13306)
ConfigResolver
.resolve("missing.key")
.as(classOf[String])
.getValue should be(null)
ConfigResolver
.resolve("missing.key")
.as(classOf[String])
.withDefault("defaultValue")
.getValue should be("defaultValue")
ConfigResolver
.resolve("jdbc.url")
.as(classOf[String])
.getValue should be("jdbc:mysql://${database.server}:${database.port}/${database.name}")
ConfigResolver
.resolve("jdbc.url")
.as(classOf[String])
.evaluateVariables(true)
.getValue should be("jdbc:mysql://localhost:3306/test")
}
}
|
kazuhira-r/javaee7-scala-examples
|
cdi-deltaspike-configuration/src/test/scala/org/littlewings/javaee7/config/ConfigResolverSpec.scala
|
Scala
|
mit
| 2,752
|
package fpinscala.gettingstarted
// A comment!
/* Another comment */
/** A documentation comment */
object MyModule {
def abs(n: Int): Int =
if (n < 0) -n
else n
private def formatAbs(x: Int) = {
val msg = "The absolute value of %d is %d"
msg.format(x, abs(x))
}
def main(args: Array[String]): Unit =
println(formatAbs(-42))
// A definition of factorial, using a local, tail recursive function
def factorial(n: Int): Int = {
@annotation.tailrec
def go(n: Int, acc: Int): Int =
if (n <= 0) acc
else go(n-1, n*acc)
go(n, 1)
}
// Another implementation of `factorial`, this time with a `while` loop
def factorial2(n: Int): Int = {
var acc = 1
var i = n
while (i > 0) { acc *= i; i -= 1 }
acc
}
// Exercise 1: Write a function to compute the nth fibonacci number
def fib(n: Int): Int = ???
// This definition and `formatAbs` are very similar..
private def formatFactorial(n: Int) = {
val msg = "The factorial of %d is %d."
msg.format(n, factorial(n))
}
// We can generalize `formatAbs` and `formatFactorial` to
// accept a _function_ as a parameter
def formatResult(name: String, n: Int, f: Int => Int) = {
val msg = "The %s of %d is %d."
msg.format(name, n, f(n))
}
}
object FormatAbsAndFactorial {
import MyModule._
// Now we can use our general `formatResult` function
// with both `abs` and `factorial`
def main(args: Array[String]): Unit = {
println(formatResult("absolute value", -42, abs))
println(formatResult("factorial", 7, factorial))
}
}
// Functions get passed around so often in FP that it's
// convenient to have syntax for constructing a function
// *without* having to give it a name
object AnonymousFunctions {
import MyModule._
// Some examples of anonymous functions:
def main(args: Array[String]): Unit = {
println(formatResult("absolute value", -42, abs))
println(formatResult("factorial", 7, factorial))
println(formatResult("increment", 7, (x: Int) => x + 1))
println(formatResult("increment2", 7, (x) => x + 1))
println(formatResult("increment3", 7, x => x + 1))
println(formatResult("increment4", 7, _ + 1))
println(formatResult("increment5", 7, x => { val r = x + 1; r }))
}
}
object MonomorphicBinarySearch {
// First, a binary search implementation, specialized to `Double`,
// another primitive type in Scala, representing 64-bit floating
// point numbers
// Ideally, we could generalize this to work for any `Array` type,
// so long as we have some way of comparing elements of the `Array`
def binarySearch(ds: Array[Double], key: Double): Int = {
@annotation.tailrec
def go(low: Int, mid: Int, high: Int): Int = {
if (low > high) -mid - 1
else {
val mid2 = (low + high) / 2
val d = ds(mid2) // We index into an array using the same
// syntax as function application
if (d == key) mid2
else if (d > key) go(low, mid2, mid2-1)
else go(mid2 + 1, mid2, high)
}
}
go(0, 0, ds.length - 1)
}
}
object PolymorphicFunctions {
// Here's a polymorphic version of `binarySearch`, parameterized on
// a function for testing whether an `A` is greater than another `A`.
def binarySearch[A](as: Array[A], key: A, gt: (A,A) => Boolean): Int = {
@annotation.tailrec
def go(low: Int, mid: Int, high: Int): Int = {
if (low > high) -mid - 1
else {
val mid2 = (low + high) / 2
val a = as(mid2)
val greater = gt(a, key)
if (!greater && !gt(key,a)) mid2
else if (greater) go(low, mid2, mid2-1)
else go(mid2 + 1, mid2, high)
}
}
go(0, 0, as.length - 1)
}
// Exercise 2: Implement a polymorphic function to check whether
// an `Array[A]` is sorted
def isSorted[A](as: Array[A], gt: (A,A) => Boolean): Boolean = ???
// Polymorphic functions are often so constrained by their type
// that they only have one implementation! Here's an example:
def partial1[A,B,C](a: A, f: (A,B) => C): B => C =
(b: B) => f(a, b)
// Exercise 3: Implement `curry`.
// Note that `=>` associates to the right, so we could
// write the return type as `A => B => C`
def curry[A,B,C](f: (A, B) => C): A => (B => C) =
???
// NB: The `Function2` trait has a `curried` method already
// Exercise 4: Implement `uncurry`
def uncurry[A,B,C](f: A => B => C): (A, B) => C =
???
/*
NB: There is a method on the `Function` object in the standard library,
`Function.uncurried` that you can use for uncurrying.
Note that we can go back and forth between the two forms. We can curry
and uncurry and the two forms are in some sense "the same". In FP jargon,
we say that they are _isomorphic_ ("iso" = same; "morphe" = shape, form),
a term we inherit from category theory.
*/
// Exercise 5: Implement `compose`
def compose[A,B,C](f: B => C, g: A => B): A => C =
???
}
|
damien-neveu/functional-programming-in-scala
|
exercises/src/main/scala/fpinscala/gettingstarted/GettingStarted.scala
|
Scala
|
mit
| 4,993
|
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model
case class ClockDifference (
`class`: Option[String] = None,
diff: Option[Integer] = None
)
|
cliffano/swaggy-jenkins
|
clients/scala-httpclient-deprecated/generated/src/main/scala/org/openapitools/client/model/ClockDifference.scala
|
Scala
|
mit
| 485
|
package de.zalando.play.controllers
import java.util.Base64
import scala.language.implicitConversions
/**
* @since 20.02.2016.
*/
case class Base64String(value: String) {
override val toString: String = Base64String.base64string2string(this)
}
object Base64String {
implicit def string2base64string(s: String): Base64String =
Base64String(new String(Base64.getEncoder.encode(s.getBytes)))
implicit def base64string2string(s: Base64String): String =
new String(Base64.getDecoder.decode(s.value))
}
case class BinaryString(value: Array[Byte])
object BinaryString {
def fromString(s: String): BinaryString = BinaryString(s.getBytes)
implicit def binaryString2String(s: BinaryString): String = new String(s.value)
implicit def byteArray2binaryString(s: Array[Byte]): BinaryString = BinaryString(s)
implicit def binaryString2byteArray(s: BinaryString): Array[Byte] = s.value
}
|
zalando/play-swagger
|
api/src/main/scala/de/zalando/play/controllers/stringWrappers.scala
|
Scala
|
mit
| 903
|
// Copyright 2017 EPFL DATA Lab (data.epfl.ch)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package squid
package ir
import utils._
import meta.RuntimeUniverseHelpers.sru
sealed abstract class Variance(val asInt: Int) {
def * (that: Variance) = Variance(asInt * that.asInt) //(this, that) match {}
def symbol = this match {
case Invariant => "="
case Covariant => "+"
case Contravariant => "-"
}
override def toString = s"[$symbol]"
}
object Variance {
def apply(asInt: Int) = asInt match {
case 0 => Invariant
case 1 => Covariant
case -1 => Contravariant
}
def of (s: sru.TypeSymbol) =
if (s.isCovariant) Covariant else if (s.isContravariant) Contravariant else Invariant
}
case object Invariant extends Variance(0)
case object Covariant extends Variance(1)
case object Contravariant extends Variance(-1)
|
epfldata/squid
|
core/src/main/scala/squid/ir/Variance.scala
|
Scala
|
apache-2.0
| 1,367
|
object Test extends App {
def fold[A, B](f: (A, => B) => B) = (b: B) => f(null.asInstanceOf[A], b)
def f[A, B](x: A, y: B): B = y
def bip[A, B] = fold[A, B]((x, y) => f(x, y))
def bop[A, B] = fold[A, B](f(_, _))
// these work:
fold[Int, Int]((x, y) => f(x, y))(0)
fold[Int, Int](f(_, _))(0)
// Used to throw a ClassCastException. Since the fix for scala/bug#7899, these issue type errors.
// fold[Int, Int](f _)(0)
// fold[Int, Int](f)(0)
}
|
lrytz/scala
|
test/files/run/t7584b.scala
|
Scala
|
apache-2.0
| 463
|
package com.arcusys.valamis.lesson.tincan.model
import com.arcusys.valamis.lesson.model.{ BaseManifest, LessonType }
import com.arcusys.valamis.lesson.model.LessonType.LessonType
import com.arcusys.valamis.model.PeriodTypes
import com.arcusys.valamis.model.PeriodTypes.PeriodType
import org.joda.time.DateTime
case class TincanManifest(
id: Long,
title: String,
summary: Option[String],
courseId: Option[Int],
assetRefId: Option[Long] = None,
visibility: Option[Boolean] = None,
logo: Option[String] = None,
isDefault: Boolean,
passingLimit: Int = 0,
rerunInterval: Int = 0,
rerunIntervalType: PeriodType = PeriodTypes.UNLIMITED,
beginDate: Option[DateTime],
endDate: Option[DateTime]) extends BaseManifest {
def getType: LessonType = LessonType.Tincan
}
|
ViLPy/Valamis
|
valamis-tincan-lesson/src/main/scala/com/arcusys/valamis/lesson/tincan/model/TincanManifest.scala
|
Scala
|
lgpl-3.0
| 812
|
package top.myetl
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import top.myetl.lucenerdd.convert.{BeanToDoc, DocToBean}
import top.myetl.lucenerdd.rdd.{IndexReadRDD, LuceneRDD}
import scala.reflect.ClassTag
/**
* Created by pengda on 17/5/12.
*/
package object lucenerdd {
/** SparkContext function , load LuceneRDD from hdfs file */
implicit def sparkContextFunctions(sc: SparkContext) = new SparkContextFunctions(sc)
class SparkContextFunctions(sc: SparkContext) extends Serializable{
def luceneRDD[T: ClassTag](tableName: String)(docConversion: DocToBean[T]): LuceneRDD[T] = {
val indexReadRDD = new IndexReadRDD(sc, tableName).cache()
new LuceneRDD[T](indexReadRDD)(docConversion)
}
}
/** RDD function, save the RDD as Lucene format */
// implicit def sparkRDDFunctions[T : ClassTag](rdd: RDD[T]) = new SparkRDDFunctions[T](rdd)
//
// class SparkRDDFunctions[T : ClassTag](rdd: RDD[T]) extends Serializable {
//
// def saveToLucene(tableName: String)(docConversion: BeanToDoc[T]): LuceneWriteRDD[T] = {
// new LuceneWriteRDD[T](rdd, tableName)(docConversion)
// }
//
// }
}
|
myetl/sparkLu
|
src/main/scala/top/myetl/lucenerdd/package.scala
|
Scala
|
apache-2.0
| 1,165
|
package lila.insight
case class Question[X](
dimension: Dimension[X],
metric: Metric,
filters: List[Filter[_]]
)
case class Filter[A](
dimension: Dimension[A],
selected: List[A]
) {
def isEmpty = selected.isEmpty || selected.sizeIs == Dimension.valuesOf(dimension).size
import reactivemongo.api.bson._
def matcher: BSONDocument = Dimension.filtersOf(dimension, selected)
}
|
luanlv/lila
|
modules/insight/src/main/Question.scala
|
Scala
|
mit
| 404
|
package org.sbtidea
import android.AndroidSupport
import java.io.File
import xml.NodeSeq
// cheating for now
import sbt.ScalaInstance
class IdeaProjectDomain
object IdeaLibrary {
sealed abstract class Scope(val configName: Option[String])
case object CompileScope extends Scope(None)
case object RuntimeScope extends Scope(Some("RUNTIME"))
case object TestScope extends Scope(Some("TEST"))
case object ProvidedScope extends Scope(Some("PROVIDED"))
object Scope {
def apply(conf: String): Scope = {
conf match {
case "compile" => CompileScope
case "runtime" => RuntimeScope
case "test" => TestScope
case "provided" => ProvidedScope
case _ => CompileScope
}
}
}
}
case class IdeaLibrary(id: String, name: String, evictionId: String, classes: Set[File], javaDocs: Set[File], sources: Set[File]) {
def hasClasses = !classes.isEmpty
def allFiles = classes ++ sources ++ javaDocs
}
case class IdeaModuleLibRef(config: IdeaLibrary.Scope, library: IdeaLibrary)
case class Directories(sources: Seq[File], resources: Seq[File], outDir: File) {
def addSrc(moreSources: Seq[File]): Directories = copy(sources = sources ++ moreSources)
def addRes(moreResources: Seq[File]): Directories = copy(resources = resources ++ moreResources)
}
case class DependencyProject(name: String, scope: IdeaLibrary.Scope)
case class SubProjectInfo(baseDir: File, name: String,
dependencyProjects: List[DependencyProject],
classpathDeps: Seq[(File, Seq[File])], compileDirs: Directories,
testDirs: Directories, libraries: Seq[IdeaModuleLibRef], scalaInstance: ScalaInstance,
ideaGroup: Option[String], webAppPath: Option[File], basePackage: Option[String],
packagePrefix: Option[String], extraFacets: NodeSeq, scalacOptions: Seq[String],
includeScalaFacet: Boolean, androidSupport: Option[AndroidSupport]) {
lazy val languageLevel: String = {
val version = scalaInstance.version
val binaryScalaVersion = version.take(version.lastIndexOf('.'))
val virtualized = if (version.contains("virtualized")) " virtualized" else ""
"Scala " + binaryScalaVersion + virtualized
}
}
case class IdeaProjectInfo(baseDir: File, name: String, childProjects: List[SubProjectInfo], ideaLibs: List[IdeaLibrary])
case class IdeaUserEnvironment(webFacet: Boolean)
case class IdeaProjectEnvironment(projectJdkName :String, javaLanguageLevel: String,
includeSbtProjectDefinitionModule: Boolean, projectOutputPath: Option[String],
excludedFolders: Seq[String], compileWithIdea: Boolean, modulePath: String, useProjectFsc: Boolean,
enableTypeHighlighting: Boolean, deleteExistingLibraries: Boolean)
|
mpeltonen/sbt-idea
|
src/main/scala/org/sbtidea/IdeaProjectDomain.scala
|
Scala
|
bsd-3-clause
| 2,913
|
/*
* Copyright 2015 Foundational Development
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package pro.foundev.reporting
case class RunTimeOptions(host: String, runs: Int, recordsToIngest: Long, enableLauncher: Boolean) {
}
|
rssvihla/datastax_work
|
spark_commons/benchmarks/low_latency_spark/src/main/scala/pro/foundev/reporting/RunTimeOptions.scala
|
Scala
|
apache-2.0
| 766
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.submit.steps
import java.io.File
import io.fabric8.kubernetes.api.model.ContainerBuilder
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.KubernetesUtils
import org.apache.spark.deploy.k8s.submit.KubernetesDriverSpec
/**
* Step that configures the classpath, spark.jars, and spark.files for the driver given that the
* user may provide remote files or files with local:// schemes.
*/
private[spark] class DependencyResolutionStep(
sparkJars: Seq[String],
sparkFiles: Seq[String],
jarsDownloadPath: String,
filesDownloadPath: String) extends DriverConfigurationStep {
override def configureDriver(driverSpec: KubernetesDriverSpec): KubernetesDriverSpec = {
val resolvedSparkJars = KubernetesUtils.resolveFileUris(sparkJars, jarsDownloadPath)
val resolvedSparkFiles = KubernetesUtils.resolveFileUris(sparkFiles, filesDownloadPath)
val sparkConf = driverSpec.driverSparkConf.clone()
if (resolvedSparkJars.nonEmpty) {
sparkConf.set("spark.jars", resolvedSparkJars.mkString(","))
}
if (resolvedSparkFiles.nonEmpty) {
sparkConf.set("spark.files", resolvedSparkFiles.mkString(","))
}
val resolvedClasspath = KubernetesUtils.resolveFilePaths(sparkJars, jarsDownloadPath)
val resolvedDriverContainer = if (resolvedClasspath.nonEmpty) {
new ContainerBuilder(driverSpec.driverContainer)
.addNewEnv()
.withName(ENV_MOUNTED_CLASSPATH)
.withValue(resolvedClasspath.mkString(File.pathSeparator))
.endEnv()
.build()
} else {
driverSpec.driverContainer
}
driverSpec.copy(
driverContainer = resolvedDriverContainer,
driverSparkConf = sparkConf)
}
}
|
saltstar/spark
|
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/steps/DependencyResolutionStep.scala
|
Scala
|
apache-2.0
| 2,556
|
/*
* MilmSearch is a mailing list searching system.
*
* Copyright (C) 2013 MilmSearch Project.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 3
* of the License, or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*
* You can contact MilmSearch Project at mailing list
* milm-search-public@lists.sourceforge.jp.
*/
package org.milmsearch.common
import net.liftweb.common.LazyLoggable
import net.liftweb.common.Box
/**
* ログ関連ユーティリティ
*/
object LoggingUtil extends LazyLoggable {
/**
* 例外発生時にエラーログを出力する<br/>
* catch した例外は throw する
*
* @param f 例外を投げる可能性のある処理
* @return 処理 f の戻り値
*/
def withErrlog[T](f: => T): T =
try {
f
} catch {
case e => {
logger.error(e.getMessage(), e)
throw e
}
}
/**
* 例外発生時にエラーログを出力する<br/>
* catch した例外は黙殺する
*
* @param f 例外を投げる可能性のある処理
*/
def withErrlogQuietly(f: => Unit) {
try {
f
} catch {
case e => logger.error(e.getMessage(), e)
}
}
}
|
mzkrelx/milm-search-core
|
src/main/scala/org/milmsearch/common/LoggingUtil.scala
|
Scala
|
gpl-3.0
| 1,689
|
package client.components.mui.policies.statements
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import shared.responses.policies.Statement
object PolicyStatementList {
case class Props(
statements: List[Statement]
)
case class State()
class Backend($ : BackendScope[Props, State]) {
def render(p: Props, s: State) = {
val statementsToRender =
p.statements.zipWithIndex.map {
case (statement, index) =>
<.div(^.className := "card-nested-padded", PolicyStatementCard(index, statement)): VdomElement
}
<.div(statementsToRender: _*)
}
}
val component = ScalaComponent
.builder[Props]("PolicyStatementList")
.initialState(State())
.renderBackend[Backend]
.build
def apply(
statements: List[Statement]
) = component(
Props(
statements
)
)
}
|
beikern/foulkon-ui
|
client/src/main/scala/client/components/mui/policies/statements/PolicyStatementList.scala
|
Scala
|
apache-2.0
| 890
|
package controllers
import java.net.URLDecoder
import controllers.external.{TVContentShort, TVContentLong}
import models._
import org.joda.time.{DateTime, DateTimeZone}
import org.mockito.Mockito._
import org.scalatest.MustMatchers
import org.scalatest.mock.MockitoSugar
import org.scalatestplus.play.PlaySpec
import play.api.libs.json.Json
import play.api.mvc.SimpleResult
import play.api.test.FakeRequest
import play.api.test.Helpers._
import reactivemongo.bson.BSONObjectID
import utils.DomainBuilder.{TVLongWithTimeZone, TVShortWithTimeZone}
import utils.TimeProvider
import scala.concurrent.Future
class TVContentControllerSpec extends PlaySpec with MustMatchers {
//TODO create explicit TVContentLong and TVContentShort per TVContent persisted
"TVContentController" should {
"return OK with empty list if there is no TV content for CHANNEL2 available today" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findDayContentByChannel("CHANNEL2")).thenReturn(Future.successful(Seq()))
//WHEN
val contentsResult: Future[SimpleResult] = controller.allContent("CHANNEL2").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (OK)
contentType(contentsResult) mustBe (Some("application/json"))
val contentsInResponse = contentAsString(contentsResult)
Nil mustEqual Json.parse(contentsInResponse).as[Seq[TVContentShort]]
//AND
verify(tvContentRepository).findDayContentByChannel("CHANNEL2")
}
"return all the TV content for a channel1 available today" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findDayContentByChannel("CHANNEL1")).thenReturn(Future.successful(
Seq(tvProgram1, tvProgram2, tvProgram3, tvProgram4, tvProgram5)))
//WHEN
val programResult: Future[SimpleResult] = controller.allContent("channel1").apply(FakeRequest())
//THEN
status(programResult) mustBe (OK)
contentType(programResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms must contain(TVShortWithTimeZone(tvProgram1))
tvprograms must contain(TVShortWithTimeZone(tvProgram2))
tvprograms must contain(TVShortWithTimeZone(tvProgram3))
tvprograms must contain(TVShortWithTimeZone(tvProgram4))
tvprograms must contain(TVShortWithTimeZone(tvProgram5))
//AND
verify(tvContentRepository).findDayContentByChannel("CHANNEL1")
}
"return all the TV content for a CHANNEL 3 available today" in new TVContentSetUpTest() {
//GIVEN
val channel = URLDecoder.decode("CHANNEL+3", "UTF-8")
when(tvContentRepository.findDayContentByChannel(channel)).thenReturn(Future.successful(
Seq(tvProgram6)))
//WHEN
val programResult: Future[SimpleResult] = controller.allContent("CHANNEL+3").apply(FakeRequest())
//THEN
status(programResult) mustBe (OK)
contentType(programResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms must contain(TVShortWithTimeZone(tvProgram6))
//AND
verify(tvContentRepository).findDayContentByChannel(channel)
}
"return all the TV content for a CHANNEL1 available today" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findDayContentByChannel("CHANNEL1")).thenReturn(Future.successful(
Seq(tvProgram1, tvProgram2, tvProgram3, tvProgram4, tvProgram5)))
//WHEN
val programResult: Future[SimpleResult] = controller.allContent("CHANNEL1").apply(FakeRequest())
//THEN
status(programResult) mustBe (OK)
contentType(programResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms must contain(TVShortWithTimeZone(tvProgram1))
tvprograms must contain(TVShortWithTimeZone(tvProgram2))
tvprograms must contain(TVShortWithTimeZone(tvProgram3))
tvprograms must contain(TVShortWithTimeZone(tvProgram4))
tvprograms must contain(TVShortWithTimeZone(tvProgram5))
//AND
verify(tvContentRepository).findDayContentByChannel("CHANNEL1")
}
"return NOT_FOUND if there is no TV content for CHANNEL2 available now" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findCurrentContentByChannel("CHANNEL2")).thenReturn(Future.successful(None))
//WHEN
val contentsResult: Future[SimpleResult] = controller.currentContent("CHANNEL2").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (NOT_FOUND)
val contentsInResponse = contentAsJson(contentsResult).as[NotFoundResponse]
contentsInResponse mustEqual (NotFoundResponse(s"No TV content at this moment for the channel: CHANNEL2"))
//AND
verify(tvContentRepository).findCurrentContentByChannel("CHANNEL2")
}
"return the TV content for CHANNEL1 available now" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findCurrentContentByChannel("CHANNEL1")).thenReturn(Future.successful(Some(tvProgram3)))
//WHEN
val programsResult: Future[SimpleResult] = controller.currentContent("CHANNEL1").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
contentType(programsResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programsResult)
val tvprogram = Json.parse(programInResponse).as[TVContentLong]
tvprogram mustBe (TVLongWithTimeZone(tvProgram3))
//AND
verify(tvContentRepository).findCurrentContentByChannel("CHANNEL1")
}
"return the TV content for CHANNEL1 available from now until the end of the day" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findLeftContentByChannel("CHANNEL1")).thenReturn(Future.successful(
Seq(tvProgram3, tvProgram4, tvProgram5)))
//WHEN
val programsResult: Future[SimpleResult] = controller.contentLeft("CHANNEL1").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
contentType(programsResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programsResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms must contain(TVShortWithTimeZone(tvProgram3))
tvprograms must contain(TVShortWithTimeZone(tvProgram4))
tvprograms must contain(TVShortWithTimeZone(tvProgram5))
//AND
verify(tvContentRepository).findLeftContentByChannel("CHANNEL1")
}
"return OK with empty list if there is no TV content for CHANNEL2 available from now until the end of the day" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findLeftContentByChannel("CHANNEL2")).thenReturn(Future.successful(
Seq()))
//WHEN
val contentsResult: Future[SimpleResult] = controller.contentLeft("CHANNEL2").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (OK)
contentType(contentsResult) mustBe (Some("application/json"))
val contentsInResponse = contentAsString(contentsResult)
Nil mustEqual Json.parse(contentsInResponse).as[Seq[TVContentShort]]
//AND
verify(tvContentRepository).findLeftContentByChannel("CHANNEL2")
}
"return TV content details for a specific TV Content ID" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findContentByID(tvProgram1.id.get.stringify)).thenReturn(Future.successful(Some(tvProgram1)))
//WHEN
val programResult: Future[SimpleResult] = controller.tvContentDetails(tvProgram1.id.get.stringify).apply(FakeRequest())
//THEN
status(programResult) mustBe (OK)
val programInResponse = contentAsString(programResult)
val tvprogram = Json.parse(programInResponse).as[TVContentLong]
tvprogram mustBe (TVLongWithTimeZone(tvProgram1))
//AND
verify(tvContentRepository).findContentByID(tvProgram1.id.get.stringify)
}
"return NOT_FOUND if there is no TV content details for a specific TV Content ID" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findContentByID("noExistID")).thenReturn(Future.successful(None))
//WHEN
val contentsResult: Future[SimpleResult] = controller.tvContentDetails("noExistID").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (NOT_FOUND)
val contentsInResponse = contentAsJson(contentsResult).as[NotFoundResponse]
contentsInResponse mustEqual (NotFoundResponse(s"No TV content details with id: noExistID"))
//AND
verify(tvContentRepository).findContentByID("noExistID")
}
"return all the TV content available today by type SERIES and provider FREEVIEW with upper case" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findDayContentByTypeAndProvider("series", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram1, tvProgram7)))
//WHEN
val programsResult: Future[SimpleResult] = controller.allContentByTypeAndProvider("SERIES", "FREEVIEW").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
val programInResponse = contentAsString(programsResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms mustEqual Seq(
TVShortWithTimeZone(tvProgram1), TVShortWithTimeZone(tvProgram7))
//AND
verify(tvContentRepository).findDayContentByTypeAndProvider("series", "FREEVIEW")
}
"return all the TV content available today by type film and provider freeview" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findDayContentByTypeAndProvider("film", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram3, tvProgram4, tvProgram8)))
//WHEN
val programsResult: Future[SimpleResult] = controller.allContentByTypeAndProvider("film", "freeview").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
val programInResponse = contentAsString(programsResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms mustEqual Seq(
TVShortWithTimeZone(tvProgram3), TVShortWithTimeZone(tvProgram4), TVShortWithTimeZone(tvProgram8))
//AND
verify(tvContentRepository).findDayContentByTypeAndProvider("film", "FREEVIEW")
}
"return all the TV content available today by type program and provider FREEVIEW" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findDayContentByTypeAndProvider("program", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram5, tvProgram9)))
//WHEN
val programsResult: Future[SimpleResult] = controller.allContentByTypeAndProvider("program", "FREEVIEW").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
val programInResponse = contentAsString(programsResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms mustEqual Seq(
TVShortWithTimeZone(tvProgram5), TVShortWithTimeZone(tvProgram9))
//AND
verify(tvContentRepository).findDayContentByTypeAndProvider("program", "FREEVIEW")
}
"return OK with empty list by provider notExist available today" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findDayContentByTypeAndProvider("program", "NOTEXIST")).thenReturn(
Future.successful(Seq()))
//WHEN
val contentsResult: Future[SimpleResult] = controller.allContentByTypeAndProvider("program", "notExist").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (OK)
contentType(contentsResult) mustBe (Some("application/json"))
val contentsInResponse = contentAsString(contentsResult)
Nil mustEqual Json.parse(contentsInResponse).as[Seq[TVContentShort]]
//AND
verify(tvContentRepository).findDayContentByTypeAndProvider("program", "NOTEXIST")
}
"return BAD_REQUEST by type notExist available today" in new TVContentSetUpTest() {
//WHEN
val contentsResult: Future[SimpleResult] = controller.allContentByTypeAndProvider("notExist", "freeview").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (BAD_REQUEST)
val contentsInResponse = contentAsJson(contentsResult).as[BadRequestResponse]
contentsInResponse mustEqual (BadRequestResponse("TV content notExist does not exist"))
}
"return the TV content for type SERIES and provider FREEVIEW in upper case available now" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findCurrentContentByTypeAndProvider("series", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram7)))
//WHEN
val contentsResult: Future[SimpleResult] = controller.currentContentByTypeAndProvider("SERIES", "FREEVIEW").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (OK)
contentType(contentsResult) mustBe (Some("application/json"))
val contentInResponse = contentAsString(contentsResult)
val tvcontents = Json.parse(contentInResponse).as[Seq[TVContentShort]]
tvcontents mustEqual Seq(TVShortWithTimeZone(tvProgram7))
//AND
verify(tvContentRepository).findCurrentContentByTypeAndProvider("series", "FREEVIEW")
}
"return the TV content for type film and provider freeview available now" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findCurrentContentByTypeAndProvider("film", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram3, tvProgram8)))
//WHEN
val contentsResult: Future[SimpleResult] = controller.currentContentByTypeAndProvider("film", "freeview").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (OK)
contentType(contentsResult) mustBe (Some("application/json"))
val contentInResponse = contentAsString(contentsResult)
val tvcontents = Json.parse(contentInResponse).as[Seq[TVContentShort]]
tvcontents mustEqual Seq(
TVShortWithTimeZone(tvProgram3),
TVShortWithTimeZone(tvProgram8))
//AND
verify(tvContentRepository).findCurrentContentByTypeAndProvider("film", "FREEVIEW")
}
"return the TV content for type program and provider freeview available now" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findCurrentContentByTypeAndProvider("program", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram9)))
//WHEN
val contentsResult: Future[SimpleResult] = controller.currentContentByTypeAndProvider("program", "freeview").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (OK)
contentType(contentsResult) mustBe (Some("application/json"))
val contentInResponse = contentAsString(contentsResult)
val tvcontents = Json.parse(contentInResponse).as[Seq[TVContentShort]]
tvcontents mustEqual Seq(TVShortWithTimeZone(tvProgram9))
//AND
verify(tvContentRepository).findCurrentContentByTypeAndProvider("program", "FREEVIEW")
}
"return BAD_REQUEST by type notExist available now" in new TVContentSetUpTest() {
//WHEN
val contentsResult: Future[SimpleResult] = controller.currentContentByTypeAndProvider("notExist", "FREEVIEW").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (BAD_REQUEST)
val contentsInResponse = contentAsJson(contentsResult).as[BadRequestResponse]
contentsInResponse mustEqual (BadRequestResponse("TV content notExist does not exist"))
}
"return the TV content for type PROGRAM and provider FREEVIEW available from now until the end of the day" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findLeftContentByTypeAndProvider("program", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram9)))
//WHEN
val programsResult: Future[SimpleResult] = controller.contentLeftByTypeAndProvider("PROGRAM", "FREEVIEW").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
contentType(programsResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programsResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms mustEqual Seq(TVShortWithTimeZone(tvProgram9))
//AND
verify(tvContentRepository).findLeftContentByTypeAndProvider("program", "FREEVIEW")
}
"return the TV content for type series and provider freeview available from now until the end of the day" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findLeftContentByTypeAndProvider("series", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram7)))
//WHEN
val programsResult: Future[SimpleResult] = controller.contentLeftByTypeAndProvider("series", "freeview").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
contentType(programsResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programsResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms mustEqual Seq(TVShortWithTimeZone(tvProgram7))
//AND
verify(tvContentRepository).findLeftContentByTypeAndProvider("series", "FREEVIEW")
}
"return the TV content for type FILM and provider freeview available from now until the end of the day" in new TVContentSetUpTest() {
//GIVEN
when(tvContentRepository.findLeftContentByTypeAndProvider("film", "FREEVIEW")).thenReturn(
Future.successful(Seq(tvProgram8)))
//WHEN
val programsResult: Future[SimpleResult] = controller.contentLeftByTypeAndProvider("FILM", "freeview").apply(FakeRequest())
//THEN
status(programsResult) mustBe (OK)
contentType(programsResult) mustBe (Some("application/json"))
val programInResponse = contentAsString(programsResult)
val tvprograms = Json.parse(programInResponse).as[Seq[TVContentShort]]
tvprograms mustEqual Seq(TVShortWithTimeZone(tvProgram8))
//AND
verify(tvContentRepository).findLeftContentByTypeAndProvider("film", "FREEVIEW")
}
"return BAD_REQUEST by type notExist from now until the end of the day" in new TVContentSetUpTest() {
//WHEN
val contentsResult: Future[SimpleResult] = controller.contentLeftByTypeAndProvider("notExist", "FREEVIEW").apply(FakeRequest())
//THEN
status(contentsResult) mustBe (BAD_REQUEST)
val contentsInResponse = contentAsJson(contentsResult).as[BadRequestResponse]
contentsInResponse mustEqual (BadRequestResponse("TV content notExist does not exist"))
}
}
}
trait TVContentSetUpTest extends MockitoSugar {
val fakeNow = new DateTime(2014, 4, 4, 10, 0, 0, DateTimeZone.forID("UTC"))
val tvProgram1 = TVContent("CHANNEL1", List("FREEVIEW", "SKY"), fakeNow.minusHours(3), fakeNow.minusHours(2), None,
Some(Series("serie1", Some(Episode(Some("ep1"), None, None, None, None)), List(), List(), List(), List(), List(), None, None, None, Some("plotS1"), None, None)), None, None,
Some(BSONObjectID.generate))
val tvProgram2 = TVContent("CHANNEL1", List("SKY"), fakeNow.minusHours(2), fakeNow.minusHours(1), None,
Some(Series("serie1", Some(Episode(Some("ep1"), None, None, None, None)), List(), List(), List(), List(), List(), None, None, None, Some("plotS1"), None, None)), None, None,
Some(BSONObjectID.generate))
val tvProgram3 = TVContent("CHANNEL1", List("FREEVIEW", "SKY"), fakeNow.minusHours(1), fakeNow.plusHours(1), None,
None,
Some(Film("program1", List(), List(), List(), List(), List(), None, None, None, Some("plotF1"), None, None)),
None,
Some(BSONObjectID.generate))
val tvProgram4 = TVContent("CHANNEL1", List("FREEVIEW", "SKY"), fakeNow.plusHours(1), fakeNow.plusHours(3), None,
None,
Some(Film("program1", List(), List(), List(), List(), List(), None, None, None, Some("plotE1"), None, None)),
None,
Some(BSONObjectID.generate))
val tvProgram5 = TVContent("CHANNEL1", List("FREEVIEW", "SKY"), fakeNow.plusHours(3), fakeNow.plusHours(4), None,
None,
None,
Some(Program("p5", Some("d5"))),
Some(BSONObjectID.generate))
val tvProgram6 = TVContent("CHANNEL 3", List("SKY"), fakeNow.plusHours(3), fakeNow.plusHours(5), None,
None,
None,
Some(Program("p6", Some("d6"))),
Some(BSONObjectID.generate))
val tvProgram7 = TVContent("CHANNEL4", List("FREEVIEW", "SKY"), fakeNow.minusHours(1), fakeNow.plusHours(2), None,
Some(Series("serie1", Some(Episode(Some("ep1"), None, None, None, None)), List(), List(), List(), List(), List(), None, None, None, Some("plotS1"), None, None)),
None,
None,
Some(BSONObjectID.generate))
val tvProgram8 = TVContent("CHANNEL5", List("FREEVIEW", "SKY"), fakeNow.minusHours(1), fakeNow.plusHours(2), None,
None,
Some(Film("program1", List(), List(), List(), List(), List(), None, None, None, Some("plotF1"), None, None)),
None,
Some(BSONObjectID.generate))
val tvProgram9 = TVContent("CHANNEL5", List("FREEVIEW", "SKY"), fakeNow.minusHours(4), fakeNow.plusHours(5), None,
None,
None,
Some(Program("p9", Some("d9"))),
Some(BSONObjectID.generate))
val tvContentRepository = mock[ContentRepository]
implicit val hostUnderTest: String = "http://beta.tvlive.io"
implicit val timeUnderTest: TimeProvider = new TimeProvider {
override def currentDate(): DateTime = new DateTime(DateTimeZone.forID("UTC"))
}
class App extends TVContentController {
override val contentRepository = tvContentRepository
override implicit val host: String = hostUnderTest
override implicit val time: TimeProvider = timeUnderTest
}
val controller = new App
}
|
tvlive/tv-api
|
test/unit/controllers/TVContentControllerSpec.scala
|
Scala
|
apache-2.0
| 22,132
|
package bad.robot.radiate.teamcity
import java.net.URL
import bad.robot.radiate.Hypermedia
import bad.robot.radiate.UrlSyntax._
import bad.robot.radiate.config.Authorisation
import bad.robot.radiate.teamcity.TeamCityEndpoints.buildsEndpointFor
case class TeamCityUrl(baseUrl: URL) {
def urlFor(endpoint: Hypermedia) = baseUrl / endpoint.href
def urlFor(locator: BuildLocatorBuilder, authorisation: Authorisation) = {
baseUrl / (buildsEndpointFor(authorisation).href + locator.build)
}
}
|
tobyweston/radiate
|
src/main/scala/bad/robot/radiate/teamcity/TeamCityUrl.scala
|
Scala
|
apache-2.0
| 501
|
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @builder scalation.util.bld.BldMM_Array
* @version 1.3
* @date Thu Sep 24 14:03:17 EDT 2015
* @see LICENSE (MIT style license file).
*
* @see www.programering.com/a/MDO2cjNwATI.html
*/
package scalation
package util
import java.io.{RandomAccessFile, Serializable}
import java.lang.Cloneable
import java.nio.{ByteBuffer, MappedByteBuffer}
import java.nio.channels.FileChannel
import scala.collection._
import scala.collection.mutable.{AbstractSeq, IndexedSeq}
import scalation.math.Complex
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_ArrayC` class provides support for large, persistent arrays via memory
* mapped files. Currently, the size of a memory mapped array is limited to
* 2GB (2^31), since indices are signed 32-bit integers.
* FIX: use Long for indices and multiple files to remove 2GB limitation
* @see https://github.com/xerial/larray/blob/develop/README.md
* @param _length the number of elements in the `mem_mapped` array
*/
final class MM_ArrayC (_length: Int)
extends AbstractSeq [Complex] with IndexedSeq [Complex] with Serializable with Cloneable
{
import MM_ArrayC.{_count, E_SIZE}
/** The number of bytes in this memory mapped file
*/
val nBytes = _length * E_SIZE
/** The file name for this memory mapped files
*/
val fname = { _count += 1; "mem_mapped_" + _count }
/** The random/direct access file
*/
private val raf = new RandomAccessFile (MEM_MAPPED_DIR + fname, "rw");
/** The random access file mapped into memory
*/
private val mraf = raf.getChannel ().map (FileChannel.MapMode.READ_WRITE, 0, nBytes);
/** The range of index positions for 'this' memory mapped array
*/
private val range = 0 until _length
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the size of elements in the memory mapped file.
*/
def length: Int = _length
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the bytes in the file starting at 'index'.
* @param index the index position in the file
*/
def apply (index: Int): Complex =
{
Complex (mraf.getDouble (E_SIZE * index), mraf.getDouble (E_SIZE * index + 8))
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Put the bytes in the file starting at 'index'.
* @param index the index position in the file
* @param x the double value to put
*/
def update (index: Int, x: Complex)
{
mraf.putDouble (E_SIZE * index, x.val1); mraf.putDouble (E_SIZE * index + 8, x.val2)
} // update
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Fold left through 'this' array.
* @param s0 the initial value
* @param f the function to apply
*/
def foldLeft (s0: Complex)(f: (Complex, Complex) => Complex): Complex =
{
var s = s0
for (i <- range) s = f (s, apply(i))
s
} // foldLeft
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Map elements of 'this' array by applying the function 'f'.
* @param f the function to be applied
*/
def map (f: Complex => Complex): MM_ArrayC =
{
val c = new MM_ArrayC (_length)
for (i <- range) c(i) = f(apply(i))
c
} // map
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' starting at 'from' and continuing until 'till'
* @param from the starting index for the slice (inclusive)
* @param till the ending index for the slice (exclusive)
*/
override def slice (from: Int, till: Int): MM_ArrayC =
{
MM_ArrayC (super.slice (from, till))
} // slice
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether element 'x' is contained in this array.
* @param x the element sought
*/
def contains (x: Complex): Boolean =
{
for (i <- range if x == apply(i)) return true
false
} // contains
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a sequence for 'this' array.
*/
def deep: immutable.IndexedSeq [Complex] = for (i <- range) yield apply(i)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Close the memory mapped file.
*/
def close () { raf.close () }
} // MM_ArrayC class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_ArrayC` companion object provides factory methods for the `MM_ArrayC`
* class.
*/
object MM_ArrayC
{
/** The number of bytes required to store a `Complex`
*/
private val E_SIZE = 16
/** The counter for ensuring files names are unique
*/
var _count = 0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a memory mapped array from one or more values (repeated values `Complex*`).
* @param x the first `Complex` number
* @param xs the rest of the `Complex` numbers
*/
def apply (x: Complex, xs: Complex*): MM_ArrayC =
{
val c = new MM_ArrayC (1 + xs.length)
c(0) = x
for (i <- 0 until c.length) c(i+1) = xs(i)
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a memory mapped array with 'n' elements.
* @param n the number of elements
*/
def apply (xs: Seq [Complex]): MM_ArrayC =
{
_count += 1
val c = new MM_ArrayC (xs.length)
for (i <- 0 until c.length) c(i) = xs(i)
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a memory mapped array with 'n' elements.
* @param n the number of elements
*/
def ofDim (n: Int): MM_ArrayC =
{
_count += 1
new MM_ArrayC (n)
} // ofDim
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate memory mapped arrays 'a' and 'b'.
*/
def concat (a: MM_ArrayC, b: MM_ArrayC): MM_ArrayC =
{
val (na, nb) = (a.length, b.length)
val c = new MM_ArrayC (na + nb)
for (i <- 0 until na) c(i) = a(i)
for (i <- 0 until nb) c(i + na) = b(i)
c
} // concat
} // MM_ArrayC object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_ArrayCTest` is used to test the `MM_ArrayC` class.
* > run-main scalation.util.MM_ArrayCTest
*/
object MM_ArrayCTest extends App
{
val n = 100 // number of elements
val mraf = new MM_ArrayC (n) // memory mapped array
// Write into the Memory Mapped File
for (i <- 0 until n) mraf(i) = 2 * i
println ("\nWRITE: memory mapped file '" + mraf.fname + "' now has " + mraf.nBytes + " bytes")
// Read from the Memory Mapped File
println ()
// for (i <- 0 until n) print (mraf(i) + " ")
println (mraf.deep)
println ("READ: memory mapped file '" + mraf.fname + "' completed.")
mraf.close ()
} // MM_ArrayCTest object
|
NBKlepp/fda
|
scalation_1.3/scalation_mathstat/src/main/scala/scalation/util/MM_ArrayC.scala
|
Scala
|
mit
| 7,461
|
package com.twitter.finagle.service
import com.twitter.util._
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito.{times, verify, when}
import org.mockito.{Matchers, Mockito}
import org.mockito.Matchers._
import com.twitter.finagle.Service
@RunWith(classOf[JUnitRunner])
class RefcountedServiceTest extends FunSuite with MockitoSugar {
class PoolServiceWrapperHelper {
val service = mock[Service[Any, Any]]
when(service.close(any)) thenReturn Future.Done
val promise = new Promise[Any]
when(service(Matchers.any)) thenReturn promise
val wrapper = Mockito.spy(new RefcountedService[Any, Any](service))
}
test("PoolServiceWrapper should call release() immediately when no requests have been made") {
val h = new PoolServiceWrapperHelper
import h._
verify(service, times(0)).close(any)
wrapper.close()
verify(service).close(any)
}
test("PoolServiceWrapper should call release() after pending request finishes") {
val h = new PoolServiceWrapperHelper
import h._
val f = wrapper(123)
assert(!f.isDefined)
verify(service)(123)
wrapper.close()
verify(service, times(0)).close(any)
promise() = Return(123)
verify(service).close(any)
assert(f.isDefined)
assert(Await.result(f) === 123)
}
}
|
latur19318/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/service/RefcountedServiceTest.scala
|
Scala
|
apache-2.0
| 1,401
|
package com.tribbloids.spookystuff.parsing
// TODO: generalise!
trait PhaseVec
object PhaseVec {
case object Eye extends PhaseVec
case object NoOp extends PhaseVec
case class Skip(length: Int) extends PhaseVec {
// def next(bm: BacktrackingManager#LinearSearch): Option[Like] = {
//
// skipOpt match {
// case Some(skip) => bm.length_+=(skip + 1)
// case None => bm.transitionQueueII += 1 //TODO: is it really useful?
// }
// None
// }
}
case class Depth(v: Int) extends PhaseVec {}
// trait Transition extends Like {
//
// def next(bm: BacktrackingManager#LinearSearch): Option[Like] = {
//
// bm.transitionQueueII += 1
// bm.currentOutcome = transition._1 -> nextResult
// return transition._2 -> nextResult.nextPhaseVecOpt.asInstanceOf[PhaseVec]
// }
//
// }
}
|
tribbloid/spookystuff
|
mldsl/src/main/scala/com/tribbloids/spookystuff/parsing/PhaseVec.scala
|
Scala
|
apache-2.0
| 898
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.