code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package io.buoyant.linkerd.protocol.http
import com.twitter.finagle.http.{Method, Request}
import com.twitter.finagle.{Dtab, Path}
import io.buoyant.config.Parser
import io.buoyant.linkerd.RouterConfig
import io.buoyant.linkerd.protocol.{HttpConfig, HttpInitializer}
import io.buoyant.router.Http
import io.buoyant.router.RoutingFactory.IdentifiedRequest
import io.buoyant.test.Awaits
import org.scalatest.FunSuite
class HttpConfigTest extends FunSuite with Awaits {
def parse(yaml: String): HttpConfig = {
val mapper = Parser.objectMapper(yaml, Iterable(Seq(HttpInitializer), Seq(MethodAndHostIdentifierInitializer, PathIdentifierInitializer)))
mapper.readValue[RouterConfig](yaml).asInstanceOf[HttpConfig]
}
test("parse config") {
val yaml = s"""
|protocol: http
|httpAccessLog: access.log
|identifier:
| kind: io.l5d.methodAndHost
|maxChunkKB: 8
|maxHeadersKB: 8
|maxInitialLineKB: 4
|maxRequestKB: 5120
|maxResponseKB: 5120
|servers:
|- port: 5000
""".stripMargin
val config = parse(yaml)
assert(config.maxChunkKB.get == 8)
assert(config.maxHeadersKB.get == 8)
assert(config.maxInitialLineKB.get == 4)
assert(config.maxRequestKB.get == 5120)
assert(config.maxResponseKB.get == 5120)
}
test("default identifier") {
val yaml = s"""
|protocol: http
|servers:
|- port: 5000
""".stripMargin
val config = parse(yaml)
val identifier = config.routerParams[Http.param.HttpIdentifier]
.id(Path.read("/svc"), () => Dtab.empty)
val req = Request(Method.Get, "/one/two/three")
req.host = "host.com"
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst.path ==
Path.read("/svc/host.com")
)
}
test("single identifier") {
val yaml = s"""
|protocol: http
|identifier:
| kind: io.l5d.methodAndHost
|servers:
|- port: 5000
""".stripMargin
val config = parse(yaml)
val identifier = config.routerParams[Http.param.HttpIdentifier]
.id(Path.read("/svc"), () => Dtab.empty)
val req = Request(Method.Get, "/one/two/three")
req.host = "host.com"
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst.path ==
Path.read("/svc/1.1/GET/host.com")
)
}
test("identifier list") {
val yaml = s"""
|protocol: http
|identifier:
|- kind: io.l5d.methodAndHost
|- kind: io.l5d.path
|servers:
|- port: 5000
""".stripMargin
val config = parse(yaml)
val identifier = config.routerParams[Http.param.HttpIdentifier]
.id(Path.read("/svc"), () => Dtab.empty)
val req = Request(Method.Get, "/one/two/three")
req.host = "host.com"
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst.path ==
Path.read("/svc/1.1/GET/host.com")
)
}
test("identifier list with fallback") {
val yaml = s"""
|protocol: http
|identifier:
|- kind: io.l5d.methodAndHost
|- kind: io.l5d.path
|servers:
|- port: 5000
""".stripMargin
val config = parse(yaml)
val identifier = config.routerParams[Http.param.HttpIdentifier]
.id(Path.read("/svc"), () => Dtab.empty)
val req = Request(Method.Get, "/one/two/three")
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst.path ==
Path.read("/svc/one")
)
}
}
| denverwilliams/linkerd | linkerd/protocol/http/src/test/scala/io/buoyant/linkerd/protocol/http/HttpConfigTest.scala | Scala | apache-2.0 | 3,867 |
package client.logger
import scala.annotation.elidable
import scala.annotation.elidable._
trait Logger {
/*
* Use @elidable annotation to completely exclude functions from the compiler generated byte-code based on
* the specified level. In a production build most logging functions will simply disappear with no runtime
* performance penalty.
*
* Specify level as a compiler parameter
* > scalac -Xelide-below INFO
*/
@elidable(FINEST) def trace(msg: String, e: Exception): Unit
@elidable(FINEST) def trace(msg: String): Unit
@elidable(FINE) def debug(msg: String, e: Exception): Unit
@elidable(FINE) def debug(msg: String): Unit
@elidable(INFO) def info(msg: String, e: Exception): Unit
@elidable(INFO) def info(msg: String): Unit
@elidable(WARNING) def warn(msg: String, e: Exception): Unit
@elidable(WARNING) def warn(msg: String): Unit
@elidable(SEVERE) def error(msg: String, e: Exception): Unit
@elidable(SEVERE) def error(msg: String): Unit
@elidable(SEVERE) def fatal(msg: String, e: Exception): Unit
@elidable(SEVERE) def fatal(msg: String): Unit
def enableServerLogging(url: String): Unit
def disableServerLogging(): Unit
/* Kestrel-like calls to avoid creating local variables in client code where the local variables would be there
* solely to carry the value past the log statement */
//TODO just translate levels in Scala to avoid all this duplication?
//def trace[T](t: T, msg: String, e: Exception, ft: T => String = empty): T = {trace(msg, e); t}
def trace[T](t: T, msg: String, ft: T => String = empty): T = {trace(combine(t, msg, ft)); t}
//def debug[T](t: T, msg: String, e: Exception, ft: T => String = empty): T = {debug(combine(t, msg, ft), e); t}
def debug[T](t: T, msg: String, ft: T => String = empty): T = {debug(combine(t, msg, ft)); t}
//def info[T](t: T, msg: String, e: Exception, ft: T => String = empty): T = {info(combine(t, msg, ft), e); t}
def info[T](t: T, msg: String, ft: T => String = empty): T = {info(combine(t, msg, ft)); t}
//def warn[T](t: T, msg: String, e: Exception, ft: T => String = empty): T = {warn(combine(t, msg, ft), e); t}
def warn[T](t: T, msg: String, ft: T => String = empty): T = {warn(combine(t, msg, ft)); t}
//def error[T](t: T, msg: String, e: Exception, ft: T => String = empty): T = {error(combine(t, msg, ft), e); t}
def error[T](t: T, msg: String, ft: T => String = empty): T = {error(combine(t, msg, ft)); t}
//def fatal[T](t: T, msg: String, e: Exception, ft: T => String = empty): T = {fatal(combine(t, msg, ft), e); t}
def fatal[T](t: T, msg: String, ft: T => String = empty): T = {fatal(combine(t, msg, ft)); t}
private def combine[T](t: T, msg: String, ft: T => String) = {
val tPart = ft(t)
msg + (if (msg.trim.nonEmpty && tPart.trim.nonEmpty) " " else "") + tPart
}
private def empty: Any => String = _ => ""
}
object LoggerFactory {
private[logger] def createLogger(name: String) = {}
lazy val consoleAppender = new BrowserConsoleAppender
lazy val popupAppender = new PopUpAppender
/**
* Create a logger that outputs to browser console
*/
def getLogger(name: String): Logger = {
val nativeLogger = Log4JavaScript.log4javascript.getLogger(name)
nativeLogger.addAppender(consoleAppender)
consoleAppender.setThreshold(Level42.DEBUG)
new L4JSLogger(nativeLogger)
}
/**
* Create a logger that outputs to a separate popup window
*/
def getPopUpLogger(name: String): Logger = {
val nativeLogger = Log4JavaScript.log4javascript.getLogger(name)
nativeLogger.addAppender(popupAppender)
new L4JSLogger(nativeLogger)
}
}
| aholland/autowire-circe | client/src/main/scala/client/logger/LoggerFactory.scala | Scala | unlicense | 3,572 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.webapi
import org.scalatest.FunSuite
class RequestIdSuite extends FunSuite {
test("next") {
val id1 = RequestId.next
val id2 = RequestId.next
assert(id1 !== id2)
}
test("next with parent") {
val id1 = RequestId.next
val id2 = RequestId.next(id1)
assert(id1 !== id2)
assert(id1 === id2.parent.get)
}
test("apply") {
val id1 = RequestId.next
val id2 = RequestId(id1.toString)
assert(id1 === id2)
}
test("apply with parent") {
val id1 = RequestId.next
val id2 = RequestId.next(id1)
val id3 = RequestId(id2.toString)
assert(id2 === id3)
}
test("retry id") {
val id1 = RequestId.next
val id2 = id1.retryId
assert(id1 !== id2)
assert(id1.parent === id2.parent)
assert(id1.name === id2.name)
assert(id1.attempt < id2.attempt)
}
}
| rspieldenner/atlas | atlas-webapi/src/test/scala/com/netflix/atlas/webapi/RequestIdSuite.scala | Scala | apache-2.0 | 1,454 |
package heist
package interpreter
import scala.collection.JavaConversions._
import com.google.api.client.auth.oauth2.{TokenResponse, Credential}
import com.google.gdata.client.spreadsheet._
import com.google.gdata.data.spreadsheet._
import com.google.gdata.util._
case class GoogleClientInterpreter(credentials: Credential) {
import spreadsheet._
import cats.{Id, ~>}
lazy val sheetService = {
val ss = new SpreadsheetService("Wibble")
ss.setOAuth2Credentials(credentials)
ss
}
def run[T](prog: FreeSheetOp[T]): T =
prog.foldMap(impureCompiler)
def impureCompiler = new (SheetOp ~> Id) {
def apply[A](op: SheetOp[A]): Id[A] = op match {
case OpenSheet(name, withSheet) =>
val url = new java.net.URL("https://spreadsheets.google.com/feeds/spreadsheets/private/full")
val query = new SpreadsheetQuery(url)
query.setTitleQuery(name)
val feed = sheetService.query(query, classOf[SpreadsheetFeed])
val entry: SpreadsheetEntry = feed.getEntries.head // Invalid assumption of 1 result
withSheet(Sheet(entry))
case OpenWorksheet(sheet, index, withWork) =>
val worksheet =
sheetService.getFeed(sheet.entry.getWorksheetFeedUrl(), classOf[WorksheetFeed])
.getEntries()
.get(index)
val listFeedUrl = worksheet.getListFeedUrl()
val listFeed = sheetService.getFeed(listFeedUrl, classOf[ListFeed])
withWork(Worksheet(listFeed))
case Rows(worksheet: Worksheet, withRows) =>
withRows(worksheet.feed.getEntries.map(SingleRow.apply))
case Cells(rows: Seq[SingleRow], withRow) =>
val values = for {
row <- rows
tag <- row.row.getCustomElements.getTags.iterator.toSeq
} yield row.row.getCustomElements.getValue(tag)
withRow(values)
}
}
} | d6y/heist | src/main/scala/heist/interpreter/google-client.scala | Scala | apache-2.0 | 1,856 |
package spray.servlet31
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
import javax.servlet.{WriteListener, AsyncContext}
import scala.concurrent.duration.Duration
import akka.io.Tcp
import spray.http._
import akka.actor.{ActorSystem, ActorRef}
import java.io.{ByteArrayInputStream, IOException}
import javax.servlet.http.HttpServletResponse
import akka.spray.UnregisteredActorRef
import scala.util.control.NonFatal
import akka.event.LoggingAdapter
import spray.servlet.ConnectorSettings
import java.util.concurrent.ConcurrentLinkedQueue
import scala.concurrent.Future
import scala.util.Try
import scala.util.Failure
import spray.http.ChunkedResponseStart
import akka.actor.UnhandledMessage
import spray.http.HttpResponse
import scala.util.Success
import spray.http.HttpRequest
import spray.http.SetTimeoutTimeout
import spray.http.Timedout
import spray.http.SetRequestTimeout
/**
* Actor that handles writing to the response stream, hooking to {@link javax.servlet.WriteListener }
* @param system actorSystem that will contain this actor.
* @param log facade to log events.
* @param settings spray servlet connection settings.
* @param asyncContext context to obtain the response OutputStream.
* @param requestString friendly string that represents the request. Used for logging.
* @param futureRequest Once completed, will hold the request data (or a failure)
* @param serviceActor spray routing to pass the request (once completed) and wait for any response.
*/
private[servlet31] class Responder(system: ActorSystem, log: LoggingAdapter, settings: ConnectorSettings,
asyncContext: AsyncContext, requestString: String, futureRequest: Future[HttpRequest],
serviceActor: ActorRef) extends UnregisteredActorRef(system) {
// Constants and mutable state used when dealing with chunks of data. Copied from Spray Servlet 3.0
final val OPEN = 0
final val STARTED = 1
final val COMPLETED = 2
val state = new AtomicInteger(OPEN)
/**
* Will contain the request once its future is completed. Maybe it should be Atomic.
*/
private var theRequest: Option[HttpRequest] = None
/**
* @return a friendly string that represents the request. Used for logging.
*/
private def requestStringForLog: String = theRequest.map(_.toString).getOrElse(requestString)
private val queue: ConcurrentLinkedQueue[(ByteArrayInputStream, PostProcessMessage, String)] =
new ConcurrentLinkedQueue[(ByteArrayInputStream, PostProcessMessage, String)]
/**
* The first time data arrives and the queue is filled with something,
* we hook the listener so it can start moving data from the queue to the OutputStream
* whenever the Servlet Container wants.
*/
private val writeListenerSet = new AtomicBoolean(false)
/**
* Servlet response.
*/
private val hsResponse = asyncContext.getResponse.asInstanceOf[HttpServletResponse]
/**
* Servlet 3.1 Write Listener
*/
private val writeListener = new WriteListener {
/**
* Log the error event.
* @param t the error that occured.
*/
def onError(t: Throwable) {
log.error(t, "Error during async processing of {}", requestStringForLog)
}
/**
* Will be called by the servlet container. The first time, it will extract the data from the request and
* pass it to spray routing.
*/
def onWritePossible() {
tryWriteFromQueue()
}
}
futureRequest.onComplete(processRequestFromFuture)(system.dispatcher)
/**
* The timeout duration can vary with a call to SetTimeout
*/
private var timeoutTimeout: Duration = settings.timeoutTimeout
/**
* Helper method to handle null strings.
* @param s a possibly null String.
* @return s or an empty String if s is null.
*/
private def nullAsEmpty(s: String): String = if (s == null) "" else s
private def processRequestFromFuture(successOrFailure: Try[HttpRequest]) {
successOrFailure match {
case Success(request: HttpRequest) ⇒ {
theRequest = Some(request)
serviceActor.tell(request, this)
}
case Failure(t: Throwable) ⇒ t match {
case e: IllegalRequestException ⇒ {
log.warning("Illegal request {}\\n\\t{}\\n\\tCompleting with '{}' response",
requestStringForLog, e.info.formatPretty, e.status)
writeResponse(HttpResponse(e.status, e.info.format(settings.verboseErrorMessages)), PostProcessMessage(close = true))
}
case e: RequestProcessingException ⇒ {
log.warning("Request {} could not be handled normally\\n\\t{}\\n\\tCompleting with '{}' response",
requestStringForLog, e.info.formatPretty, e.status)
writeResponse(HttpResponse(e.status, e.info.format(settings.verboseErrorMessages)), PostProcessMessage(close = true))
}
case NonFatal(e) ⇒ {
log.error(e, "Error during processing of request {}", requestStringForLog)
writeResponse(HttpResponse(500, entity = "The request could not be handled"), PostProcessMessage(close = true))
}
}
}
}
/**
* If there is data in the queue, try to write it to the response OutputStream, while it's ready.
*/
private def tryWriteFromQueue() {
if (!queue.isEmpty) {
val (byteInputStream, postProcessMessage, responseAsStringForLog) = queue.peek()
val tryToWrite: Try[Unit] = Try {
while (hsResponse.getOutputStream.isReady && byteInputStream.available > 0) {
hsResponse.getOutputStream.write(byteInputStream.read())
}
}
tryToWrite match {
case Failure(e) ⇒ e match {
case ioe: IOException ⇒
log.error("Could not write response body, probably the request has either timed out or the client has " +
"disconnected\\nRequest: {}\\nResponse: {}\\nError: {}",
requestStringForLog, responseAsStringForLog, ioe)
case another ⇒
log.error("Could not complete request\\nRequest: {}\\nResponse: {}\\nError: {}",
requestStringForLog, responseAsStringForLog, another)
}
}
//val error: Option[Throwable] = (tryToWrite map {case _ => None} recover { case t => Some(t)}).toOption.flatten
if (tryToWrite.isFailure || byteInputStream.available() == 0) {
queue.poll()
postProcess(tryToWrite, postProcessMessage)
}
}
}
/**
* Defines what to do after some data has been written to the stream.
* @param ack if something, send it back to the sender.
* @param close if true, close the stream (complete) and tell the sender that we are closed.
* @param sender the actor that sent us the data.
*/
private case class PostProcessMessage(close: Boolean, sender: Option[ActorRef] = None, ack: Option[Any] = None)
private def postProcess(error: Try[_], postProcessMessage: PostProcessMessage) {
error match {
case Success(_) ⇒ {
postProcessMessage.ack.foreach(ack => postProcessMessage.sender.foreach(sender => sender.tell(ack, this)))
if (postProcessMessage.close) {
asyncContext.complete()
if (postProcessMessage.sender.isDefined) {
postProcessMessage.sender.get.tell(Tcp.Closed, this)
}
}
}
case Failure(e) ⇒ {
asyncContext.complete()
if (postProcessMessage.sender.isDefined) {
postProcessMessage.sender.get.tell(Tcp.ErrorClosed(nullAsEmpty(e.getMessage)), this)
}
}
}
}
/**
* Method to handle messages that this Actor receives.
* @param message an actor message.
* @param sender the actor that sent the message.
*/
def handle(message: Any)(implicit sender: ActorRef) {
val trueSender = sender
message match {
case wrapper: HttpMessagePartWrapper if wrapper.messagePart.isInstanceOf[HttpResponsePart] ⇒
wrapper.messagePart.asInstanceOf[HttpResponsePart] match {
case response: HttpResponse ⇒
if (state.compareAndSet(OPEN, COMPLETED)) {
writeResponse(response, PostProcessMessage(close = true, Some(trueSender), wrapper.ack))
} else state.get match {
case STARTED ⇒
log.warning("Received an HttpResponse after a ChunkedResponseStart, dropping ...\\nRequest: {}\\nResponse: {}", requestStringForLog, response)
case COMPLETED ⇒
log.warning("Received a second response for a request that was already completed, dropping ...\\nRequest: {}\\nResponse: {}", requestStringForLog, response)
}
case response: ChunkedResponseStart ⇒
if (state.compareAndSet(OPEN, STARTED)) {
writeResponse(response, PostProcessMessage(close = false, Some(trueSender), wrapper.ack))
} else state.get match {
case STARTED ⇒
log.warning("Received a second ChunkedResponseStart, dropping ...\\nRequest: {}\\nResponse: {}", requestStringForLog, response)
case COMPLETED ⇒
log.warning("Received a ChunkedResponseStart for a request that was already completed, dropping ...\\nRequest: {}\\nResponse: {}", requestStringForLog, response)
}
case MessageChunk(body, _) ⇒ state.get match {
case OPEN ⇒
log.warning("Received a MessageChunk before a ChunkedResponseStart, dropping ...\\nRequest: {}\\nChunk: {} bytes\\n", requestStringForLog, body.length)
case STARTED ⇒
writeChunk(body, PostProcessMessage(close = false, Some(trueSender), wrapper.ack))
case COMPLETED ⇒
log.warning("Received a MessageChunk for a request that was already completed, dropping ...\\nRequest: {}\\nChunk: {} bytes", requestStringForLog, body.length)
}
case _: ChunkedMessageEnd ⇒
if (state.compareAndSet(STARTED, COMPLETED)) {
postProcess(Success(), PostProcessMessage(close = true, Some(trueSender), wrapper.ack))
} else state.get match {
case OPEN ⇒
log.warning("Received a ChunkedMessageEnd before a ChunkedResponseStart, dropping ...\\nRequest: {}", requestStringForLog)
case COMPLETED ⇒
log.warning("Received a ChunkedMessageEnd for a request that was already completed, dropping ...\\nRequest: {}", requestStringForLog)
}
}
case msg@SetRequestTimeout(timeout) ⇒
state.get match {
case COMPLETED ⇒ notCompleted(msg)
case _ ⇒
val millis = if (timeout.isFinite()) timeout.toMillis else 0
asyncContext.setTimeout(millis)
}
case msg@SetTimeoutTimeout(timeout) ⇒
state.get match {
case COMPLETED ⇒ notCompleted(msg)
case _ ⇒ timeoutTimeout = timeout
}
case x ⇒ system.eventStream.publish(UnhandledMessage(x, sender, this))
}
}
/**
* Log that a message came but the response was already committed.
* @param msg the unwanted message.
*/
private def notCompleted(msg: Any) {
log.warning("Received a {} for a request that was already completed, dropping ...\\nRequest: {}",
msg, requestStringForLog)
}
/**
* Write a chunk of data to the queue, and maybe to the OutputStream if it's ready.
* @param buffer data
* @param postProcessMessage defines what to do after the data is sent to the stream.
*/
private def writeChunk(buffer: Array[Byte], postProcessMessage: PostProcessMessage) {
queue.add((new ByteArrayInputStream(buffer), postProcessMessage, hsResponse.toString))
tryWriteFromQueue()
}
private def writeResponse(response: HttpMessageStart with HttpResponsePart,
postProcessMessage: PostProcessMessage) {
val resp = response.message.asInstanceOf[HttpResponse]
hsResponse.setStatus(resp.status.intValue)
resp.headers.foreach {
header ⇒
header.lowercaseName match {
case "content-type" ⇒ // we never render these headers here, because their production is the
case "content-length" ⇒ // responsibility of the spray-servlet layer, not the user
case _ ⇒ hsResponse.addHeader(header.name, header.value)
}
}
resp.entity match {
case EmptyEntity ⇒ {
postProcess(Success(), postProcessMessage)
}
case HttpBody(contentType, buffer) ⇒ {
hsResponse.addHeader("Content-Type", contentType.value)
if (response.isInstanceOf[HttpResponse]) hsResponse.addHeader("Content-Length", buffer.length.toString)
queue.add((new ByteArrayInputStream(buffer), postProcessMessage, response.toString))
if (!writeListenerSet.get()) {
writeListenerSet.set(true)
hsResponse.getOutputStream.setWriteListener(writeListener)
} else {
tryWriteFromQueue()
}
}
}
}
/**
* public method to be called by AsyncListener.
* @param timeoutHandler actor to tell that a timeOut happened.
*/
def callTimeout(timeoutHandler: ActorRef) {
val timeOutResponder = new UnregisteredActorRef(system) {
def handle(message: Any)(implicit sender: ActorRef) {
message match {
case x: HttpResponse ⇒ writeResponse(x, PostProcessMessage(close = false))
case x ⇒ system.eventStream.publish(UnhandledMessage(x, sender, this))
}
}
}
writeResponse(timeoutResponse(), PostProcessMessage(close = false))
if (timeoutTimeout.isFinite() && theRequest.isDefined) {
timeoutHandler.tell(Timedout(theRequest.get), timeOutResponder)
}
}
/**
* @return a Timeout Response to send to the client.
*/
private def timeoutResponse(): HttpResponse = HttpResponse(
status = 500,
entity = "Ooops! The server was not able to produce a timely response to your request.\\n" +
"Please try again in a short while!")
} | lukiano/spray-servlet31 | src/main/scala/spray/servlet31/Responder.scala | Scala | mit | 13,964 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.worker
import java.io._
import scala.collection.JavaConversions._
import com.google.common.base.Charsets.UTF_8
import com.google.common.io.Files
import org.apache.hadoop.fs.Path
import org.apache.spark.{Logging, SparkConf, SecurityManager}
import org.apache.spark.deploy.{DriverDescription, SparkHadoopUtil}
import org.apache.spark.deploy.DeployMessages.DriverStateChanged
import org.apache.spark.deploy.master.DriverState
import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.util.{Utils, Clock, SystemClock}
/**
* Manages the execution of one driver, including automatically restarting the driver on failure.
* This is currently only used in standalone cluster deploy mode.
*/
private[deploy] class DriverRunner(
conf: SparkConf,
val driverId: String,
val workDir: File,
val sparkHome: File,
val driverDesc: DriverDescription,
val worker: RpcEndpointRef,
val workerUrl: String,
val securityManager: SecurityManager)
extends Logging {
@volatile private var process: Option[Process] = None
@volatile private var killed = false
// Populated once finished
private[worker] var finalState: Option[DriverState] = None
private[worker] var finalException: Option[Exception] = None
private var finalExitCode: Option[Int] = None
// Decoupled for testing
def setClock(_clock: Clock): Unit = {
clock = _clock
}
def setSleeper(_sleeper: Sleeper): Unit = {
sleeper = _sleeper
}
private var clock: Clock = new SystemClock()
private var sleeper = new Sleeper {
def sleep(seconds: Int): Unit = (0 until seconds).takeWhile(f => {Thread.sleep(1000); !killed})
}
/** Starts a thread to run and manage the driver. */
private[worker] def start() = {
new Thread("DriverRunner for " + driverId) {
override def run() {
try {
val driverDir = createWorkingDirectory()
val localJarFilename = downloadUserJar(driverDir)
def substituteVariables(argument: String): String = argument match {
case "{{WORKER_URL}}" => workerUrl
case "{{USER_JAR}}" => localJarFilename
case other => other
}
// TODO: If we add ability to submit multiple jars they should also be added here
val builder = CommandUtils.buildProcessBuilder(driverDesc.command, securityManager,
driverDesc.mem, sparkHome.getAbsolutePath, substituteVariables)
launchDriver(builder, driverDir, driverDesc.supervise)
}
catch {
case e: Exception => finalException = Some(e)
}
val state =
if (killed) {
DriverState.KILLED
} else if (finalException.isDefined) {
DriverState.ERROR
} else {
finalExitCode match {
case Some(0) => DriverState.FINISHED
case _ => DriverState.FAILED
}
}
finalState = Some(state)
worker.send(DriverStateChanged(driverId, state, finalException))
}
}.start()
}
/** Terminate this driver (or prevent it from ever starting if not yet started) */
private[worker] def kill() {
synchronized {
process.foreach(p => p.destroy())
killed = true
}
}
/**
* Creates the working directory for this driver.
* Will throw an exception if there are errors preparing the directory.
*/
private def createWorkingDirectory(): File = {
val driverDir = new File(workDir, driverId)
if (!driverDir.exists() && !driverDir.mkdirs()) {
throw new IOException("Failed to create directory " + driverDir)
}
driverDir
}
/**
* Download the user jar into the supplied directory and return its local path.
* Will throw an exception if there are errors downloading the jar.
*/
private def downloadUserJar(driverDir: File): String = {
val jarPath = new Path(driverDesc.jarUrl)
val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf)
val destPath = new File(driverDir.getAbsolutePath, jarPath.getName)
val jarFileName = jarPath.getName
val localJarFile = new File(driverDir, jarFileName)
val localJarFilename = localJarFile.getAbsolutePath
if (!localJarFile.exists()) { // May already exist if running multiple workers on one node
logInfo(s"Copying user jar $jarPath to $destPath")
Utils.fetchFile(
driverDesc.jarUrl,
driverDir,
conf,
securityManager,
hadoopConf,
System.currentTimeMillis(),
useCache = false)
}
if (!localJarFile.exists()) { // Verify copy succeeded
throw new Exception(s"Did not see expected jar $jarFileName in $driverDir")
}
localJarFilename
}
private def launchDriver(builder: ProcessBuilder, baseDir: File, supervise: Boolean) {
builder.directory(baseDir)
def initialize(process: Process): Unit = {
// Redirect stdout and stderr to files
val stdout = new File(baseDir, "stdout")
CommandUtils.redirectStream(process.getInputStream, stdout)
val stderr = new File(baseDir, "stderr")
val header = "Launch Command: %s\\n%s\\n\\n".format(
builder.command.mkString("\\"", "\\" \\"", "\\""), "=" * 40)
Files.append(header, stderr, UTF_8)
CommandUtils.redirectStream(process.getErrorStream, stderr)
}
runCommandWithRetry(ProcessBuilderLike(builder), initialize, supervise)
}
def runCommandWithRetry(
command: ProcessBuilderLike, initialize: Process => Unit, supervise: Boolean): Unit = {
// Time to wait between submission retries.
var waitSeconds = 1
// A run of this many seconds resets the exponential back-off.
val successfulRunDuration = 5
var keepTrying = !killed
while (keepTrying) {
logInfo("Launch Command: " + command.command.mkString("\\"", "\\" \\"", "\\""))
synchronized {
if (killed) { return }
process = Some(command.start())
initialize(process.get)
}
val processStart = clock.getTimeMillis()
val exitCode = process.get.waitFor()
if (clock.getTimeMillis() - processStart > successfulRunDuration * 1000) {
waitSeconds = 1
}
if (supervise && exitCode != 0 && !killed) {
logInfo(s"Command exited with status $exitCode, re-launching after $waitSeconds s.")
sleeper.sleep(waitSeconds)
waitSeconds = waitSeconds * 2 // exponential back-off
}
keepTrying = supervise && exitCode != 0 && !killed
finalExitCode = Some(exitCode)
}
}
}
private[deploy] trait Sleeper {
def sleep(seconds: Int)
}
// Needed because ProcessBuilder is a final class and cannot be mocked
private[deploy] trait ProcessBuilderLike {
def start(): Process
def command: Seq[String]
}
private[deploy] object ProcessBuilderLike {
def apply(processBuilder: ProcessBuilder): ProcessBuilderLike = new ProcessBuilderLike {
override def start(): Process = processBuilder.start()
override def command: Seq[String] = processBuilder.command()
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala | Scala | apache-2.0 | 7,917 |
package com.cloudray.scalapress.plugin.listings.domain
import javax.persistence.{Column, ManyToOne, Entity, Table, GenerationType, GeneratedValue, Id}
import collection.mutable.ArrayBuffer
import com.cloudray.scalapress.item.ItemType
import scala.beans.BeanProperty
/** @author Stephen Samuel */
@Entity
@Table(name = "listings_packages")
class ListingPackage {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@BeanProperty var id: Long = _
@Column(name = "maxcategories")
@BeanProperty var maxFolders: Int = _
@ManyToOne
@BeanProperty var objectType: ItemType = _
@BeanProperty var maxImages: Int = _
@Column(name = "description", length = 3000)
@BeanProperty var description: String = _
@BeanProperty var maxCharacters: Int = _
@BeanProperty var deleted: Boolean = _
@BeanProperty var autoPublish: Boolean = _
@Column(name = "labels", length = 3000)
@BeanProperty var labels: String = _
@Column(length = 3000)
@BeanProperty var name: String = _
@BeanProperty var fee: Int = _
@BeanProperty var duration: Int = _
@BeanProperty var folders: String = _
def priceText = {
val buffer = new ArrayBuffer[String]
buffer.append(fee match {
case 0 => "Free"
case _ => "£" + "%.2f".format(fee / 100.0)
})
if (duration > 0)
buffer.append(duration match {
case x if x == 365 => "for 1 year"
case x if x == 30 => "for 1 month"
case x if x % 30 == 0 => "for " + duration / 30 + " months"
case x => "for " + x + " days"
})
buffer.mkString(" ")
}
} | vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/plugin/listings/domain/ListingPackage.scala | Scala | apache-2.0 | 1,574 |
package runnables.production
import ccn.packet.{CCNName, Content}
import com.typesafe.scalalogging.slf4j.Logging
import config.{ComputeNodeConfig, RouterConfig, StaticConfig}
import nfn.service.GPS.GPX.GPXOriginFilter
import nfn.service.GPS.GPX.GPXDistanceAggregator
import nfn.service.GPS.GPX.GPXDistanceComputer
import nfn.service.NBody
import nfn.service.Temperature.{ReadSensorData, ReadSensorDataSimu, StoreSensorData}
import nfn.service._
import node.LocalNode
import orgOpenmhealth.helperServices.SimpleToJSON
import orgOpenmhealth.services.{DistanceTo, PointCount}
import scopt.OptionParser
import sys.process._
import scala.io.Source
object ComputeServerConfigDefaults {
val mgmtSocket = "/tmp/ccn-lite-mgmt.sock"
val ccnLiteAddr = "127.0.0.1"
val ccnlPort = 9000
val computeServerPort = 9001
val isCCNLiteAlreadyRunning = false
val logLevel = "warning"
val prefix = CCNName("nfn", "node")
}
case class ComputeServerConfig(prefix: CCNName = ComputeServerConfigDefaults.prefix,
mgmtSocket: Option[String] = None,
ccnLiteAddr: String = ComputeServerConfigDefaults.ccnLiteAddr,
ccnlPort: Int = ComputeServerConfigDefaults.ccnlPort,
computeServerPort: Int = ComputeServerConfigDefaults.computeServerPort,
isCCNLiteAlreadyRunning: Boolean = ComputeServerConfigDefaults.isCCNLiteAlreadyRunning,
logLevel: String = ComputeServerConfigDefaults.logLevel,
suite: String = "")
object ComputeServerStarter extends Logging {
val argsParser = new OptionParser[ComputeServerConfig]("") {
override def showUsageOnError = true
head("nfn-scala: compute-server starter", "v0.2.0")
opt[String]('m', "mgmtsocket") action { (ms, c) =>
c.copy(mgmtSocket = Some(ms))
} text s"unix socket name for ccnl mgmt ops or of running ccnl, if not specified ccnl UDP socket is used (example: ${ComputeServerConfigDefaults.mgmtSocket})"
opt[String]('a', "ccnl-addr") action { case (a, c) =>
c.copy(ccnLiteAddr = a)
} text s"address ccnl should use or address of running ccnl (default: ${ComputeServerConfigDefaults.ccnLiteAddr})"
opt[Int]('o', "ccnl-port") action { case (p, c) =>
c.copy(ccnlPort = p)
} text s"unused port ccnl should use or port of running ccnl (default: ${ComputeServerConfigDefaults.ccnlPort})"
opt[String]('s', "suite") action { case (s, c) =>
c.copy(suite = s)
} text s"wireformat to be used (default: ndntlv)"
opt[Int]('p', "cs-port") action { case (p, c) =>
c.copy(computeServerPort = p)
} text s"port used by compute server, (default: ${ComputeServerConfigDefaults.computeServerPort})"
opt[Unit]('r', "ccnl-already-running") action { (_, c) =>
c.copy(isCCNLiteAlreadyRunning = true)
} text s"flag to indicate that ccnl is already running and should not be started internally by nfn-scala"
opt[Unit]('v', "verbose") action { (_, c) =>
c.copy(logLevel = "info")
} text "loglevel 'info'"
opt[Unit]('d', "debug") action { (_, c) =>
c.copy(logLevel = "debug")
} text "loglevel 'debug'"
opt[Unit]('h', "help") action { (_, c) =>
showUsage
sys.exit
c
} text { "prints usage" }
arg[String]("<node-prefix>") validate {
p => if(CCNName.fromString(p).isDefined) success else failure(s"Argument <node-prefix> must be a valid CCNName (e.g. ${ComputeServerConfigDefaults.prefix})")
} action {
case (p, c) => c.copy(prefix = CCNName.fromString(p).get)
} text s"prefix of this node, all content and services are published with this name (example: ${ComputeServerConfigDefaults.prefix})"
}
def main(args: Array[String]) = {
argsParser.parse(args, ComputeServerConfig()) match {
case Some(config) =>
StaticConfig.setDebugLevel(config.logLevel)
println("Suite is", config.suite)
if(config.suite != ""){
StaticConfig.setWireFormat(config.suite)
}
logger.debug(s"config: $config")
// Configuration of the router, sro far always ccn-lite
// It requires the socket to the management interface, isCCNOnly = false indicates that it is a NFN node
// and isAlreadyRunning tells the system that it should not have to start ccn-lite
val routerConfig = RouterConfig(config.ccnLiteAddr, config.ccnlPort, config.prefix, config.mgmtSocket.getOrElse("") ,isCCNOnly = false, isAlreadyRunning = config.isCCNLiteAlreadyRunning)
// This configuration sets up the compute server
// withLocalAm = false tells the system that it should not start an abstract machine alongside the compute server
val computeNodeConfig = ComputeNodeConfig("127.0.0.1", config.computeServerPort, config.prefix, withLocalAM = false)
// Abstraction of a node which runs both the router and the compute server on localhost (over UDP sockets)
val node = LocalNode(routerConfig, Some(computeNodeConfig))
// Publish services
// This will internally get the Java bytecode for the compiled services, put them into jar files and
// put the data of the jar into a content object.
// The name of this service is infered from the package structure of the service as well as the prefix of the local node.
// In this case the prefix is given with the commandline argument 'prefixStr' (e.g. /node/nodeA/nfn_service_WordCount)
node.publishServiceLocalPrefix(new WordCount())
// node.publishServiceLocalPrefix(new DelayedWordCount())
// node.publishServiceLocalPrefix(new IntermediateTest())
// node.publishServiceLocalPrefix(new FetchContentTest())
// node.publishServiceLocalPrefix(new NBody.SimulationService())
// node.publishServiceLocalPrefix(new NBody.RenderService())
node.publishServiceLocalPrefix(new ListIntervalService())
node.publishServiceLocalPrefix(new SensorDataProcessingService())
// node.publishServiceLocalPrefix(new NBody.SimulationRenderService())
// node.publishServiceLocalPrefix(new ChainIntermediates())
// node.publishServiceLocalPrefix(new PubSubBroker())
// node.publishServiceLocalPrefix(new ControlRequestTest())
//node.publishServiceLocalPrefix(new Pandoc())
//node.publishServiceLocalPrefix(new PDFLatex())
//node.publishServiceLocalPrefix(new Reverse())
// node.publishServiceLocalPrefix(new Echo())
// node.publishServiceLocalPrefix(new ChunkTest())
// node.publishServiceLocalPrefix(new Waypoint())
// node.publishServiceLocalPrefix(new EchoP())
//node.publishServiceLocalPrefix(new GPXOriginFilter())
//node.publishServiceLocalPrefix(new GPXDistanceComputer())
//node.publishServiceLocalPrefix(new GPXDistanceAggregator())
//node.publishServiceLocalPrefix(new ReadSensorData())
// node.publishServiceLocalPrefix(new PointCount())
// node.publishServiceLocalPrefix(new DistanceTo())
// node.publishServiceLocalPrefix(new SimpleToJSON())
//node.publishServiceLocalPrefix(new StoreSensorData())
//node.publishServiceLocalPrefix(new ReadSensorData())*/
// Gets the content of the ccn-lite tutorial
//node += PandocTestDocuments.tutorialMd(node.localPrefix)
// Publishes a very small two-line markdown file
//node += PandocTestDocuments.tinyMd(node.localPrefix)
//Read GPS Trackpoints for NDN Fit Experiment, uncomment if needed
//val files = ("ls trackpoints/" !!)
//val filelist = files.split('\n')
/*filelist.foreach(f => {
val data = Source.fromFile(s"trackpoints/$f").mkString
val num = f.substring(f.indexOf("_")+1, f.indexOf("."))
node += Content(CCNName(s"/ndn/ch/unibas/NDNfit/Joe/internal/running/training/2015/02/04/gpx/p$num".substring(1).split("/").toList, None), data.getBytes)
}
)*/
/*
val files = ("ls /home/claudio/trackpoints/" !!)
val filelist = files.split('\n')
filelist.foreach(f => {
val data = Source.fromFile(s"/home/claudio/trackpoints/$f").mkString
val num = f.substring(f.indexOf("_")+1, f.indexOf("."))
node += Content(CCNName(s"/ndn/ch/unibas/NDNfit/hidden/run1/gpx/data/p$num".substring(1).split("/").toList, None), data.getBytes)
}
)
*/
/*
* --------------
* GPX SCENARIO
* --------------
*
* Uncomment code above to put raw data into the cache!
*
* Arguments for this runnable: -m /tmp/mgmt.sock -o 9000 -p 9001 -d /nfn/node0
*
*
* Test Requests:
*
* ccn-lite-peek -w 10 -u 127.0.0.1/9000 /ndn/ch/unibas/NDNfit/hidden/run1/gpx/data/p1 | ccn-lite-pktdump -f2
* ccn-lite-simplenfn -w 10 -u 127.0.0.1/9000 "call 3 /nfn/node0/nfn_service_GPX_GPXOriginFilter '/run1/gpx/data' 1" | ccn-lite-pktdump -f2
* ccn-lite-simplenfn -w 10 -u 127.0.0.1/9000 "call 5 /nfn/node0/nfn_service_GPX_GPXDistanceComputer '/run1/gpx/data' 1 '/run1/gpx/data' 2" | ccn-lite-pktdump -f2
* ccn-lite-simplenfn -w 10 -u 127.0.0.1/9000 "call 3 /nfn/node0/nfn_service_GPX_GPXDistanceAggregator '/run1/gpx/data' 5" | ccn-lite-pktdump -f2
*
*
*/
case None => sys.exit(1)
}
}
}
| cn-uofbasel/nfn-scala | src/main/scala/runnables/production/ComputeServerStarter.scala | Scala | isc | 9,461 |
package spire.math
import org.scalatest.FunSuite
import scala.util.Random
class RationalTest extends FunSuite {
test("rational canonical construction") {
val r = Rational(5,6)
assert(r.numerator === BigInt(5))
assert(r.denominator === BigInt(6))
}
test("rational degenerate construction") {
val r = Rational(30, 345)
assert(r.numerator === BigInt(2))
assert(r.denominator === BigInt(23))
}
test("RationalIsFractional implicit exists") {
import spire.implicits._
def doStuff[NT:Fractional](a: NT, b: NT):NT = a / b
assertResult(Rational(1, 2)) {
doStuff(Rational(1), Rational(2))
}
}
test("equality of equivalent canonical and degenerate rationals") {
val a = Rational(1, 2)
val b = Rational(8, 16)
assert(a === b)
}
test("non-equivalent rationals are not equal") {
val a = Rational(1, 2)
val b = Rational(1, 3)
val c = Rational(2, 1)
assertResult(false)(a == b)
assertResult(false)(a == c)
}
test("comparisons") {
val a = Rational(1, 2)
val b = Rational(3, 4)
val c = Rational(-1, 2)
val d = Rational(1, 2)
assert(a < b)
assert(b > a)
assert(a > c)
assert(c < a)
assert(a <= d)
assert(a >= d)
}
test("primitive comparisons") {
val a = Rational("5000000000")
val b = Rational(-123456)
val c = Rational(1, 8)
assert(a === 5000000000L)
assert(5000000000L === a)
assert(b === -123456)
assert(-123456 === b)
assert(c === 0.125)
assert(0.125 === c)
assert(c === 0.125f)
assert(0.125f === c)
}
test("addition") {
val a = Rational(3, 10)
val b = Rational(4, 19)
// This will go through the coprime denominator path.
// Since, 97 and 190 are coprime, 97/190 is canonical too.
assertResult(Rational(97, 190)) {
a + b
}
val c = Rational(1, 2)
val d = Rational(1, 6)
// This will go through the non-coprime denominator path. Since the
// GCD of 2 and 6 is 2, the numerator 1 * 3 + 1 * 1 = 4 is tried first.
// The GCD of 4 and 2 is 2, so the numerator will need to be reduced.
assertResult(Rational(1 * 6 + 1 * 2, 2 * 6)) {
c + d
}
val e = Rational(1, 2)
val f = Rational(3, 4)
// This will go through the non-coprime denominator path. Since the
// GCD of 2 and 4 is 2, the numerator 5 is tried first, which is
// coprime with 2, so the numerator need not be reduced.
assertResult(Rational(1 * 4 + 3 * 2, 2 * 4)) {
e + f
}
}
test("subtraction") {
// Just ripped from addition
val a = Rational(3, 10)
val b = Rational(4, 19)
assertResult(Rational(3 * 19 - 4 * 10, 10 * 19)) {
a - b
}
val c = Rational(1, 2)
val d = Rational(1, 6)
assertResult(Rational(1 * 6 - 1 * 2, 2 * 6)) {
c - d
}
val e = Rational(1, 2)
val f = Rational(3, 4)
assertResult(Rational(1 * 4 - 3 * 2, 2 * 4)) {
e - f
}
}
test("multiplication") {
val a = Rational(2, 3)
val b = Rational(1, 2)
assertResult(Rational(1, 3)) {
a * b
}
val c = Rational(-321, 23)
val d = Rational(23, 13)
assertResult(Rational(-321 * 23, 23 * 13)) {
c * d
}
val e = Rational(-1, 2)
assertResult(Rational(1, 4)) {
e * e
}
}
test("division") {
val a = Rational(2, 3)
val b = Rational(1, 2)
assertResult(Rational(4, 3)) {
a / b
}
val c = Rational(-21, 5)
val d = Rational(7, 18)
assertResult(Rational(-54, 5)) {
c / d
}
val e = Rational(-23, 19)
assertResult(Rational.one) {
e / e
}
}
test("division by 0") {
intercept[ArithmeticException] {
Rational.one / 0
}
}
test("pow") {
val a = Rational(1, 2)
assertResult(Rational(1, BigInt("4294967296"))) {
a pow 32
}
val b = Rational(-3, 1)
assertResult(Rational(9, 1)) {
b pow 2
}
assertResult(Rational(-27, 1)) {
b pow 3
}
}
test("longValue") { assert(Rational("5000000000").toLong === 5000000000L) }
test("intValue") {
assert(Rational(3).toInt === 3)
assert(Rational(-5, 2).toInt === -2)
}
test("shortValue") {
assert(Rational(65535).toShort === -1)
assert(Rational(65536).toShort === 0)
assert(Rational(-5).toShort === -5)
}
test("byteValue") {
assert(Rational(-1).toByte === -1)
assert(Rational(256).toByte === 0)
}
test("toDouble and tFloat") {
assert(Rational(1, 2).toFloat === 0.5f)
val a = Rational("10000000000000002/10000000000000000")
assert(a.toDouble === 1.0000000000000002)
assert(a.toFloat === 1.0f)
assert(Rational(2, 3).toDouble === 2 / 3.0)
}
test("toString") {
assert(Rational(1, 2).toString === "1/2")
assert(Rational(1, -2).toString === "-1/2")
assert(Rational(2, 4).toString === "1/2")
}
test("hashCode is the same for equivalent rats") {
assert(Rational(1, 2).hashCode === Rational(2, 4).hashCode)
assert(Rational(0).hashCode === Rational(0, 5).hashCode)
assert(Rational(-1, 2).hashCode === Rational(1, -2).hashCode)
}
test("reverse primitive equality") {
assert(1 == Rational.one)
//assert(-23L == Rational(-23L, 1L))
}
test("limiting 0 to any number returns 0") {
assert(Rational.zero.limitDenominatorTo(1234) === Rational.zero)
assert(Rational.zero.limitDenominatorTo(1) === Rational.zero)
assert(Rational.zero.limitTo(23) === Rational.zero)
}
test("limiting to non-positive number throws exception") {
intercept[IllegalArgumentException] {
val a = Rational(123, 456).limitDenominatorTo(-1)
}
intercept[IllegalArgumentException] {
val a = Rational(123, 456).limitTo(-1)
}
}
/**
* Finds the closest `Rational` to `a` whose denominator is no greater than
* `limit` by brute-force. This is used to compare with the version used by
* `Rational` which is a little harder to reason about. This just literally
* tries every denominator between 1 and `limit` and returns the `Rational`
* that was closest to `a`.
*/
def bruteForceLimitDen(a: Rational, limit: Int): Rational =
(1 to limit) map (BigInt(_)) flatMap { d =>
val ln = (a * d).toBigInt
List(Rational(ln - 1, d), Rational(ln, d), Rational(ln + 1, d))
} minBy (b => (b - a).abs)
// FIXME: for some reason the commented files seem to throw SBT/scalac into
// some kind of continuous compilcation loop... YMMV :/
test("limitDenominatorTo valid number returns correct result") {
assert(Rational(6, 5) === Rational(23, 19).limitDenominatorTo(10))
//assert(Rational(-6, 5) === Rational(-23, 19).limitDenominatorTo(10))
val rng = new Random(9281)
val rationals = List.fill(100)(Rational(rng.nextInt, rng.nextInt.abs + 1))
rationals foreach { a =>
//assert(a.limitDenominatorTo(255) === bruteForceLimitDen(a, 255), {
// "%s != %s (original: %s)" format (
// a.limitDenominatorTo(255),
// bruteForceLimitDen(a, 255),
// a
// )
//})
}
}
test("limit large number to small number returns small number") {
assert(Rational(1231, 2).limitTo(12) === Rational(12))
assert(Rational(-321, 3).limitTo(7) === Rational(-7))
}
test("limitToInt makes rationals fit in Ints") {
val rng = new Random(2919234)
val rationals = List.fill(100)(Rational(BigInt(128, rng), BigInt(128, rng).abs + 1))
rationals foreach { a =>
val b = a.limitToInt
assert(b.numerator.isValidInt && b.denominator.isValidInt,
"%s (from %s) doesn't fit in Ints" format (b.toString, a.toString))
}
}
test("gcd returns the correct rational GCD") {
assert(Rational(1, 2).gcd(Rational(1, 3)) === Rational(1, 6))
assert(Rational(11, 12).gcd(Rational(43, 22)) === Rational(1, 132))
assert(Rational(-1, 2).gcd(Rational(1, 3)) === Rational(1, 6))
assert(Rational(11, 12).gcd(Rational(-43, 22)) === Rational(1, 132))
val x = Rational("1234123412341234/87658765876587658764")
val y = Rational("1919191919191919191919/373737373737373737")
val z = Rational("1/287380324068203382157064120376241062")
assert(x.gcd(y) === z) // As confirmed by Wolfram Alpha
}
}
| lrytz/spire | tests/src/test/scala/spire/math/RationalTest.scala | Scala | mit | 8,372 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.util
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.data.DigitSplitter
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.SimpleFeatureType
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class AccumuloSftBuilderTest extends Specification {
sequential
"SpecBuilder" >> {
"configure table splitters as strings" >> {
val sft1 = new AccumuloSftBuilder()
.intType("i")
.longType("l")
.recordSplitter(classOf[DigitSplitter].getName, Map("fmt" ->"%02d", "min" -> "0", "max" -> "99"))
.build("test")
// better - uses class directly (or at least less annoying)
val sft2 = new AccumuloSftBuilder()
.recordSplitter(classOf[DigitSplitter], Map("fmt" ->"%02d", "min" -> "0", "max" -> "99"))
.intType("i")
.longType("l")
.build("test")
def test(sft: SimpleFeatureType) = {
sft.getAttributeCount mustEqual 2
sft.getAttributeDescriptors.map(_.getLocalName) must containAllOf(List("i", "l"))
sft.getUserData.get(SimpleFeatureTypes.TABLE_SPLITTER) must be equalTo classOf[DigitSplitter].getName
val opts = sft.getUserData.get(SimpleFeatureTypes.TABLE_SPLITTER_OPTIONS).asInstanceOf[Map[String, String]]
opts.size must be equalTo 3
opts("fmt") must be equalTo "%02d"
opts("min") must be equalTo "0"
opts("max") must be equalTo "99"
}
List(sft1, sft2) forall test
}
}
}
| mdzimmerman/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/util/AccumuloSftBuilderTest.scala | Scala | apache-2.0 | 2,118 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package statements
import com.intellij.psi._
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScConstructorPattern, ScInfixPattern}
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScConstructor, ScReferenceElement, ScStableCodeReferenceElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression.calculateReturns
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.psi.light.{PsiClassWrapper, StaticTraitScFunctionWrapper}
import org.jetbrains.plugins.scala.lang.psi.types.api.StdTypes
import org.jetbrains.plugins.scala.lang.psi.types.result.Typeable
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, ModCount}
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
* Time: 9:49:36
*/
trait ScFunctionDefinition extends ScFunction with ScControlFlowOwner {
def body: Option[ScExpression]
def hasAssign: Boolean
def assignment: Option[PsiElement]
def returnUsages: Set[ScExpression] = innerReturnUsages(calculateReturns)
def canBeTailRecursive: Boolean = getParent match {
case (_: ScTemplateBody) && Parent(Parent(owner: ScTypeDefinition)) =>
owner.isInstanceOf[ScObject] ||
owner.getModifierList.isFinal || {
val methodModifiers = getModifierList
methodModifiers.isPrivate || methodModifiers.isFinal
}
case _ => true
}
def hasTailRecursionAnnotation: Boolean =
annotations.map(_.typeElement)
.flatMap(_.`type`().toOption)
.map(_.canonicalText)
.contains("_root_.scala.annotation.tailrec")
def recursiveReferences: Seq[RecursiveReference] = {
def quickCheck(ref: ScReferenceElement): Boolean = {
ref match {
case _: ScStableCodeReferenceElement =>
ref.getParent match {
case ChildOf(_: ScConstructor) =>
this.isConstructor && containingClass.name == ref.refName
case cp: ScConstructorPattern if cp.ref == ref =>
this.name == "unapply" || this.name == "unapplySeq"
case inf: ScInfixPattern if inf.operation == ref =>
this.name == "unapply" || this.name == "unapplySeq"
case _ => false
}
case _: ScReferenceExpression =>
if (this.name == "apply")
if (this.containingClass.isInstanceOf[ScObject]) this.containingClass.name == ref.refName
else true
else this.name == ref.refName
case _ => false
}
}
val recursiveReferences = body.toSeq
.flatMap(_.depthFirst())
.collect {
case element: ScReferenceElement if quickCheck(element) => element
}.filter(_.isReferenceTo(this))
val expressions: Set[PsiElement] = recursiveReferences match {
case Seq() => Set.empty
case _ =>
def calculateExpandedReturns(expression: ScExpression): Set[ScExpression] = {
val visitor = new ScExpression.ReturnsVisitor {
private val booleanInstance = StdTypes.instance.Boolean
override def visitInfixExpression(infix: ScInfixExpr): Unit = {
infix match {
case ScInfixExpr(Typeable(`booleanInstance`), ElementText("&&" | "||"), right@Typeable(`booleanInstance`)) =>
acceptVisitor(right)
case _ => super.visitInfixExpression(infix)
}
}
}
expression.accept(visitor)
visitor.result
}
def expandIf(expression: ScExpression): Set[ScExpression] = (expression match {
case ScIfStmt(_, Some(thenBranch), None) => calculateReturns(thenBranch).flatMap(expandIf)
case _ => Set.empty
}) ++ Set(expression)
innerReturnUsages(calculateExpandedReturns).flatMap(expandIf)
}
@scala.annotation.tailrec
def possiblyTailRecursiveCallFor(element: PsiElement): PsiElement = element.getParent match {
case call@(_: ScMethodCall |
_: ScGenericCall) => possiblyTailRecursiveCallFor(call)
case infix: ScInfixExpr if infix.operation == element => possiblyTailRecursiveCallFor(infix)
case statement: ScReturnStmt => statement
case _ => element
}
recursiveReferences.map { reference =>
RecursiveReference(reference, expressions(possiblyTailRecursiveCallFor(reference)))
}
}
def recursionType: RecursionType = recursiveReferences match {
case Seq() => RecursionType.NoRecursion
case seq if seq.forall(_.isTailCall) => RecursionType.TailRecursion
case _ => RecursionType.OrdinaryRecursion
}
override def controlFlowScope: Option[ScalaPsiElement] = body
@Cached(ModCount.getBlockModificationCount, this)
def getStaticTraitFunctionWrapper(cClass: PsiClassWrapper): StaticTraitScFunctionWrapper = {
new StaticTraitScFunctionWrapper(this, cClass)
}
private def innerReturnUsages(calculateReturns: ScExpression => Set[ScExpression]): Set[ScExpression] = {
def returnsIn(expression: ScExpression) =
expression.depthFirst(!_.isInstanceOf[ScFunction]).collect {
case statement: ScReturnStmt => statement
} ++ calculateReturns(expression)
body.toSet
.flatMap(returnsIn)
.filter(_.getContainingFile == getContainingFile)
}
}
object ScFunctionDefinition {
object withBody {
def unapply(fun: ScFunctionDefinition): Option[ScExpression] = Option(fun).flatMap(_.body)
}
}
case class RecursiveReference(element: ScReferenceElement, isTailCall: Boolean)
trait RecursionType
object RecursionType {
case object NoRecursion extends RecursionType
case object OrdinaryRecursion extends RecursionType
case object TailRecursion extends RecursionType
} | jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScFunctionDefinition.scala | Scala | apache-2.0 | 5,979 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions.utils
import org.apache.flink.api.common.typeinfo.{SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.table.api.Types
import org.apache.flink.table.functions.{FunctionContext, ScalarFunction}
import org.apache.flink.table.typeutils.TimeIntervalTypeInfo
import org.apache.flink.types.Row
import org.apache.commons.lang3.StringUtils
import org.junit.Assert
import java.lang.{Long => JLong}
import java.sql.{Date, Time, Timestamp}
import java.util.Random
import scala.annotation.varargs
import scala.collection.mutable
import scala.io.Source
case class SimplePojo(name: String, age: Int)
@SerialVersionUID(1L)
object Func0 extends ScalarFunction {
def eval(index: Int): Int = {
index
}
}
@SerialVersionUID(1L)
object Func1 extends ScalarFunction {
def eval(index: Integer): Integer = {
index + 1
}
def eval(b: Byte): Byte = (b + 1).toByte
def eval(s: Short): Short = (s + 1).toShort
def eval(f: Float): Float = f + 1
}
@SerialVersionUID(1L)
object Func2 extends ScalarFunction {
def eval(index: Integer, str: String, pojo: SimplePojo): String = {
s"$index and $str and $pojo"
}
}
@SerialVersionUID(1L)
object Func3 extends ScalarFunction {
def eval(index: Integer, str: String): String = {
s"$index and $str"
}
}
@SerialVersionUID(1L)
object Func4 extends ScalarFunction {
def eval(): Integer = {
null
}
}
@SerialVersionUID(1L)
object Func5 extends ScalarFunction {
def eval(): Int = {
-1
}
}
@SerialVersionUID(1L)
object Func6 extends ScalarFunction {
def eval(date: Date, time: Time, timestamp: Timestamp): (Date, Time, Timestamp) = {
(date, time, timestamp)
}
}
@SerialVersionUID(1L)
object Func7 extends ScalarFunction {
def eval(a: Integer, b: Integer): Integer = {
a + b
}
}
@SerialVersionUID(1L)
object Func8 extends ScalarFunction {
def eval(a: Int): String = {
"a"
}
def eval(a: Int, b: Int): String = {
"b"
}
def eval(a: String, b: String): String = {
"c"
}
}
@SerialVersionUID(1L)
object Func9 extends ScalarFunction {
def eval(a: Int, b: Int, c: Long): String = {
s"$a and $b and $c"
}
}
@SerialVersionUID(1L)
object Func10 extends ScalarFunction {
def eval(c: Long): Long = {
c
}
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] =
SqlTimeTypeInfo.TIMESTAMP
}
@SerialVersionUID(1L)
object Func11 extends ScalarFunction {
def eval(a: Int, b: Long): String = {
s"$a and $b"
}
}
@SerialVersionUID(1L)
object Func12 extends ScalarFunction {
def eval(a: Long): Long = {
a
}
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] = {
TimeIntervalTypeInfo.INTERVAL_MILLIS
}
}
@SerialVersionUID(1L)
object ShouldNotExecuteFunc extends ScalarFunction {
def eval(s: String): Boolean = {
throw new Exception("This func should never be executed")
}
}
@SerialVersionUID(1L)
class RichFunc0 extends ScalarFunction {
var openCalled = false
var closeCalled = false
override def open(context: FunctionContext): Unit = {
super.open(context)
if (openCalled) {
Assert.fail("Open called more than once.")
} else {
openCalled = true
}
if (closeCalled) {
Assert.fail("Close called before open.")
}
}
def eval(index: Int): Int = {
if (!openCalled) {
Assert.fail("Open was not called before eval.")
}
if (closeCalled) {
Assert.fail("Close called before eval.")
}
index + 1
}
override def close(): Unit = {
super.close()
if (closeCalled) {
Assert.fail("Close called more than once.")
} else {
closeCalled = true
}
if (!openCalled) {
Assert.fail("Open was not called before close.")
}
}
}
@SerialVersionUID(1L)
class RichFunc1 extends ScalarFunction {
var added: Int = Int.MaxValue
override def open(context: FunctionContext): Unit = {
added = context.getJobParameter("int.value", "0").toInt
}
def eval(index: Int): Int = {
index + added
}
override def close(): Unit = {
added = Int.MaxValue
}
}
@SerialVersionUID(1L)
class RichFunc2 extends ScalarFunction {
var prefix = "ERROR_VALUE"
override def open(context: FunctionContext): Unit = {
prefix = context.getJobParameter("string.value", "")
}
def eval(value: String): String = {
prefix + "#" + value
}
override def close(): Unit = {
prefix = "ERROR_VALUE"
}
}
@SerialVersionUID(1L)
class RichFunc3 extends ScalarFunction {
private val words = mutable.HashSet[String]()
override def open(context: FunctionContext): Unit = {
val file = context.getCachedFile("words")
for (line <- Source.fromFile(file.getCanonicalPath).getLines) {
words.add(line.trim)
}
}
def eval(value: String): Boolean = {
words.contains(value)
}
override def close(): Unit = {
words.clear()
}
}
@SerialVersionUID(1L)
class Func13(prefix: String) extends ScalarFunction {
def eval(a: String): String = {
s"$prefix-$a"
}
}
@SerialVersionUID(1L)
object Func14 extends ScalarFunction {
@varargs
def eval(a: Int*): Int = {
a.sum
}
}
@SerialVersionUID(1L)
object Func15 extends ScalarFunction {
@varargs
def eval(a: String, b: Int*): String = {
a + b.length
}
def eval(a: String): String = {
a
}
}
@SerialVersionUID(1L)
object Func16 extends ScalarFunction {
def eval(a: Seq[String]): String = {
a.mkString(", ")
}
}
@SerialVersionUID(1L)
object Func17 extends ScalarFunction {
// Without @varargs, we will throw an exception
def eval(a: String*): String = {
a.mkString(", ")
}
}
@SerialVersionUID(1L)
object Func18 extends ScalarFunction {
def eval(str: String, prefix: String): Boolean = {
str.startsWith(prefix)
}
}
@SerialVersionUID(1L)
object Func19 extends ScalarFunction {
def eval(obj: Object): Int = {
if (null != obj) {
obj.hashCode()
} else {
0
}
}
def eval(obj: Object, len: Int): Int = {
if (null != obj) {
obj.hashCode()
} else {
Math.max(len, 0)
}
}
}
@SerialVersionUID(1L)
object Func20 extends ScalarFunction {
def eval(row: Row): Row = {
row
}
override def getParameterTypes(signature: Array[Class[_]]): Array[TypeInformation[_]] = {
Array(new RowTypeInfo(Types.INT, Types.BOOLEAN,
new RowTypeInfo(Types.INT, Types.INT, Types.INT)))
}
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] = {
new RowTypeInfo(Types.INT, Types.BOOLEAN,
new RowTypeInfo(Types.INT, Types.INT, Types.INT))
}
}
@SerialVersionUID(1L)
object Func21 extends ScalarFunction {
def eval(p: People): String = {
p.name
}
def eval(p: Student): String = {
"student#" + p.name
}
}
@SerialVersionUID(1L)
object Func22 extends ScalarFunction {
def eval(a: Array[People]): String = {
a.head.name
}
def eval(a: Array[Student]): String = {
"student#" + a.head.name
}
}
@SerialVersionUID(1L)
object Func23 extends ScalarFunction {
def eval(a: Integer, b: JLong, c: String): Row = {
Row.of("star", a, b, c)
}
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] =
Types.ROW(Types.STRING, Types.INT, Types.LONG, Types.STRING)
}
@SerialVersionUID(1L)
object Func25 extends ScalarFunction {
private val random = new Random()
def eval(a: Integer): Row = {
val col = random.nextInt()
Row.of(Integer.valueOf(a + col), Integer.valueOf(a + col))
}
override def isDeterministic: Boolean = false
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] =
Types.ROW(Types.INT, Types.INT)
}
@SerialVersionUID(1L)
object Func24 extends ScalarFunction {
def eval(a: String, b: Integer, c: JLong, d: String): Row = {
Row.of(a, Integer.valueOf(b + 1), c, d)
}
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] =
Types.ROW(Types.STRING, Types.INT, Types.LONG, Types.STRING)
}
/**
* A scalar function that always returns TRUE if opened correctly.
*/
@SerialVersionUID(1L)
class FuncWithOpen extends ScalarFunction {
private var permitted: Boolean = false
override def open(context: FunctionContext): Unit = {
permitted = true
}
def eval(x: Int): Boolean = {
permitted
}
override def close(): Unit = {
permitted = false
}
}
@SerialVersionUID(1L)
class SplitUDF(deterministic: Boolean) extends ScalarFunction {
def eval(x: String, sep: String, index: Int): String = {
val splits = StringUtils.splitByWholeSeparator(x, sep)
if (splits.length > index) {
splits(index)
} else {
null
}
}
override def isDeterministic: Boolean = deterministic
}
class People(val name: String)
class Student(name: String) extends People(name)
class GraduatedStudent(name: String) extends Student(name)
| fhueske/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/utils/userDefinedScalarFunctions.scala | Scala | apache-2.0 | 9,739 |
package org.randi3.utility
import org.randi3.dao._
import org.randi3.schema.{LiquibaseUtil, AdaptiveRandomizationSchema}
object TestingEnvironmentAdaptive extends TestingEnvironment{
val schemaBlock = new AdaptiveRandomizationSchema(driver)
LiquibaseUtil.updateDatabase(database, "db/db.changelog-master-adaptive.xml", this.getClass.getClassLoader)
lazy val adaptiveRandomizationDao = new ResponseAdaptiveRandomizationDao(database, driver)
} | dschrimpf/randi3-method-adaptive | src/test/scala/org/randi3/utility/TestingEnvironmentAdaptive.scala | Scala | gpl-3.0 | 455 |
package mesosphere.marathon
package state
import com.wix.accord._
import com.wix.accord.combinators.GeneralPurposeCombinators
import com.wix.accord.dsl._
import mesosphere.marathon.Protos.Constraint
import mesosphere.marathon.api.serialization._
import mesosphere.marathon.api.v2.Validation._
import mesosphere.marathon.api.v2.validation.NetworkValidation
import mesosphere.marathon.core.externalvolume.ExternalVolumes
import mesosphere.marathon.core.health._
import mesosphere.marathon.core.plugin.PluginManager
import mesosphere.marathon.core.pod.{ HostNetwork, Network }
import mesosphere.marathon.core.readiness.ReadinessCheck
import mesosphere.marathon.plugin.validation.RunSpecValidator
import mesosphere.marathon.raml.{ App, Apps, Resources }
import mesosphere.marathon.state.Container.{ Docker, MesosAppC, MesosDocker }
import mesosphere.marathon.state.VersionInfo._
import mesosphere.marathon.stream.Implicits._
import mesosphere.mesos.TaskBuilder
import mesosphere.mesos.protos.{ Resource, ScalarResource }
import org.apache.mesos.{ Protos => mesos }
import scala.concurrent.duration._
case class AppDefinition(
id: PathId,
override val cmd: Option[String] = App.DefaultCmd,
override val args: Seq[String] = App.DefaultArgs,
user: Option[String] = App.DefaultUser,
env: Map[String, EnvVarValue] = AppDefinition.DefaultEnv,
instances: Int = App.DefaultInstances,
resources: Resources = Apps.DefaultResources,
executor: String = App.DefaultExecutor,
constraints: Set[Constraint] = AppDefinition.DefaultConstraints,
fetch: Seq[FetchUri] = AppDefinition.DefaultFetch,
portDefinitions: Seq[PortDefinition] = AppDefinition.DefaultPortDefinitions,
requirePorts: Boolean = App.DefaultRequirePorts,
backoffStrategy: BackoffStrategy = AppDefinition.DefaultBackoffStrategy,
override val container: Option[Container] = AppDefinition.DefaultContainer,
healthChecks: Set[HealthCheck] = AppDefinition.DefaultHealthChecks,
readinessChecks: Seq[ReadinessCheck] = AppDefinition.DefaultReadinessChecks,
taskKillGracePeriod: Option[FiniteDuration] = AppDefinition.DefaultTaskKillGracePeriod,
dependencies: Set[PathId] = AppDefinition.DefaultDependencies,
upgradeStrategy: UpgradeStrategy = AppDefinition.DefaultUpgradeStrategy,
labels: Map[String, String] = AppDefinition.DefaultLabels,
acceptedResourceRoles: Set[String] = AppDefinition.DefaultAcceptedResourceRoles,
networks: Seq[Network] = AppDefinition.DefaultNetworks,
versionInfo: VersionInfo = VersionInfo.OnlyVersion(Timestamp.now()),
secrets: Map[String, Secret] = AppDefinition.DefaultSecrets,
override val unreachableStrategy: UnreachableStrategy = AppDefinition.DefaultUnreachableStrategy,
override val killSelection: KillSelection = KillSelection.DefaultKillSelection,
tty: Option[Boolean] = AppDefinition.DefaultTTY) extends RunSpec
with plugin.ApplicationSpec with MarathonState[Protos.ServiceDefinition, AppDefinition] {
/**
* As an optimization, we precompute and cache the hash of this object
* This is done to speed up deployment plan computation.
*/
override val hashCode: Int = scala.util.hashing.MurmurHash3.productHash(this)
import mesosphere.mesos.protos.Implicits._
/* The following requirements are either validated at the API layer, or precluded by our normalization layer.
* However, we had instances of rogue definitions of apps in our tests that were causing related business logic to be
* overly complex and handle state that should not exist */
require(networks.nonEmpty, "an application must declare at least one network")
require(
(!networks.exists(!_.eq(HostNetwork))) || portDefinitions.isEmpty,
s"non-host-mode networking ($networks) and ports/portDefinitions ($portDefinitions) are not allowed at the same time")
require(
!(networks.exists(_.eq(HostNetwork)) && container.fold(false)(c => c.portMappings.nonEmpty)),
"port-mappings may not be used in conjunction with host networking")
require(
!(portDefinitions.nonEmpty && container.fold(false)(_.portMappings.nonEmpty)),
"portDefinitions and container.portMappings are not allowed at the same time"
)
// Our normalization layer replaces hostPort None to Some(0) for bridge networking
require(
!(networks.hasBridgeNetworking && container.fold(false)(c => c.portMappings.exists(_.hostPort.isEmpty))),
"bridge networking requires that every host-port in a port-mapping is non-empty (but may be zero)")
val portNumbers: Seq[Int] = portDefinitions.map(_.port)
override val version: Timestamp = versionInfo.version
override val isSingleInstance: Boolean = labels.get(Apps.LabelSingleInstanceApp).contains("true")
override val volumes: Seq[Volume] = container.map(_.volumes.map(_.volume)).getOrElse(Seq.empty)
override val volumeMounts: Seq[VolumeMount] = container.map(_.volumes.map(_.mount)).getOrElse(Seq.empty)
override val persistentVolumes: Seq[PersistentVolume] =
container.map(_.volumes.map(_.volume).collect { case pv: PersistentVolume => pv }).getOrElse(Seq.empty)
override val persistentVolumeMounts: Seq[VolumeMount] =
container.map(_.volumes.collect { case VolumeWithMount(_: PersistentVolume, m) => m }).getOrElse(Seq.empty)
override val externalVolumes: Seq[ExternalVolume] =
container.map(_.volumes.map(_.volume).collect { case pv: ExternalVolume => pv }).getOrElse(Seq.empty)
override val diskForPersistentVolumes: Double = persistentVolumes.map(_.persistent.size).sum.toDouble
private[state] val persistentVolumesWithMounts: Seq[VolumeWithMount[PersistentVolume]] =
container.map(_.volumes.collect {
case vm @ VolumeWithMount(_: PersistentVolume, _) => vm.asInstanceOf[VolumeWithMount[PersistentVolume]]
}).getOrElse(Seq.empty)
def toProto: Protos.ServiceDefinition = {
val commandInfo = TaskBuilder.commandInfo(
runSpec = this,
taskId = None,
host = None,
hostPorts = Seq.empty,
envPrefix = None
)
val cpusResource = ScalarResource(Resource.CPUS, resources.cpus)
val memResource = ScalarResource(Resource.MEM, resources.mem)
val diskResource = ScalarResource(Resource.DISK, resources.disk)
val gpusResource = ScalarResource(Resource.GPUS, resources.gpus.toDouble)
val appLabels = labels.map {
case (key, value) =>
mesos.Parameter.newBuilder
.setKey(key)
.setValue(value)
.build
}
val builder = Protos.ServiceDefinition.newBuilder
.setId(id.toString)
.setCmd(commandInfo)
.setInstances(instances)
.addAllPortDefinitions(portDefinitions.map(PortDefinitionSerializer.toProto).asJava)
.setRequirePorts(requirePorts)
.setBackoff(backoffStrategy.backoff.toMillis)
.setBackoffFactor(backoffStrategy.factor)
.setMaxLaunchDelay(backoffStrategy.maxLaunchDelay.toMillis)
.setExecutor(executor)
.addAllConstraints(constraints.asJava)
.addResources(cpusResource)
.addResources(memResource)
.addResources(diskResource)
.addResources(gpusResource)
.addAllHealthChecks(healthChecks.map(_.toProto).asJava)
.setUpgradeStrategy(upgradeStrategy.toProto)
.addAllDependencies(dependencies.map(_.toString).asJava)
.addAllLabels(appLabels.asJava)
.addAllSecrets(secrets.map(SecretsSerializer.toProto).asJava)
.addAllEnvVarReferences(env.flatMap(EnvVarRefSerializer.toProto).asJava)
.setUnreachableStrategy(unreachableStrategy.toProto)
.setKillSelection(killSelection.toProto)
tty.filter(tty => tty).foreach(builder.setTty(_))
networks.foreach { network => builder.addNetworks(Network.toProto(network)) }
container.foreach { c => builder.setContainer(ContainerSerializer.toProto(c)) }
readinessChecks.foreach { r => builder.addReadinessCheckDefinition(ReadinessCheckSerializer.toProto(r)) }
taskKillGracePeriod.foreach { t => builder.setTaskKillGracePeriod(t.toMillis) }
if (acceptedResourceRoles.nonEmpty) {
val roles = Protos.ResourceRoles.newBuilder()
roles.addAllRole(acceptedResourceRoles.asJava)
builder.setAcceptedResourceRoles(roles)
}
builder.setVersion(version.toString)
versionInfo match {
case fullInfo: FullVersionInfo =>
builder.setLastScalingAt(fullInfo.lastScalingAt.millis)
builder.setLastConfigChangeAt(fullInfo.lastConfigChangeAt.millis)
case _ => // ignore
}
builder.build
}
override def withInstances(instances: Int): RunSpec = copy(instances = instances)
def mergeFromProto(proto: Protos.ServiceDefinition): AppDefinition = {
val envMap: Map[String, EnvVarValue] = EnvVarValue(
proto.getCmd.getEnvironment.getVariablesList.map {
v => v.getName -> v.getValue
}(collection.breakOut))
val envRefs: Map[String, EnvVarValue] =
proto.getEnvVarReferencesList.flatMap(EnvVarRefSerializer.fromProto)(collection.breakOut)
val resourcesMap: Map[String, Double] =
proto.getResourcesList.map {
r => r.getName -> (r.getScalar.getValue: Double)
}(collection.breakOut)
val argsOption = proto.getCmd.getArgumentsList.toSeq
//Precondition: either args or command is defined
val commandOption =
if (argsOption.isEmpty && proto.getCmd.hasValue && proto.getCmd.getValue.nonEmpty)
Some(proto.getCmd.getValue)
else None
val containerOption = if (proto.hasContainer) Some(ContainerSerializer.fromProto(proto.getContainer)) else None
val acceptedResourceRoles = proto.getAcceptedResourceRoles.getRoleList.toSet
val versionInfoFromProto = AppDefinition.versionInfoFrom(proto)
val networks: Seq[Network] = proto.getNetworksList.flatMap(Network.fromProto)(collection.breakOut)
val tty: Option[Boolean] = if (proto.hasTty) Some(proto.getTty) else AppDefinition.DefaultTTY
// TODO (gkleiman): we have to be able to read the ports from the deprecated field in order to perform migrations
// until the deprecation cycle is complete.
val portDefinitions =
if (proto.getPortsCount > 0) PortDefinitions(proto.getPortsList.map(_.intValue)(collection.breakOut): _*)
else proto.getPortDefinitionsList.map(PortDefinitionSerializer.fromProto).to[Seq]
val unreachableStrategy =
if (proto.hasUnreachableStrategy)
UnreachableStrategy.fromProto(proto.getUnreachableStrategy)
else
UnreachableStrategy.default(isResident)
AppDefinition(
id = PathId(proto.getId),
user = if (proto.getCmd.hasUser) Some(proto.getCmd.getUser) else None,
cmd = commandOption,
args = argsOption,
executor = proto.getExecutor,
instances = proto.getInstances,
portDefinitions = portDefinitions,
requirePorts = proto.getRequirePorts,
backoffStrategy = BackoffStrategy(
backoff = proto.getBackoff.milliseconds,
factor = proto.getBackoffFactor,
maxLaunchDelay = proto.getMaxLaunchDelay.milliseconds),
constraints = proto.getConstraintsList.toSet,
acceptedResourceRoles = acceptedResourceRoles,
resources = Resources(
cpus = resourcesMap.getOrElse(Resource.CPUS, this.resources.cpus),
mem = resourcesMap.getOrElse(Resource.MEM, this.resources.mem),
disk = resourcesMap.getOrElse(Resource.DISK, this.resources.disk),
gpus = resourcesMap.getOrElse(Resource.GPUS, this.resources.gpus.toDouble).toInt
),
env = envMap ++ envRefs,
fetch = proto.getCmd.getUrisList.map(FetchUri.fromProto)(collection.breakOut),
container = containerOption,
healthChecks = proto.getHealthChecksList.map(HealthCheck.fromProto).toSet,
readinessChecks =
proto.getReadinessCheckDefinitionList.map(ReadinessCheckSerializer.fromProto)(collection.breakOut),
taskKillGracePeriod = if (proto.hasTaskKillGracePeriod) Some(proto.getTaskKillGracePeriod.milliseconds)
else None,
labels = proto.getLabelsList.map { p => p.getKey -> p.getValue }(collection.breakOut),
versionInfo = versionInfoFromProto,
upgradeStrategy =
if (proto.hasUpgradeStrategy) UpgradeStrategy.fromProto(proto.getUpgradeStrategy)
else UpgradeStrategy.empty,
dependencies = proto.getDependenciesList.map(PathId(_))(collection.breakOut),
networks = if (networks.isEmpty) AppDefinition.DefaultNetworks else networks,
secrets = proto.getSecretsList.map(SecretsSerializer.fromProto)(collection.breakOut),
unreachableStrategy = unreachableStrategy,
killSelection = KillSelection.fromProto(proto.getKillSelection),
tty = tty
)
}
val hostPorts: Seq[Option[Int]] =
container.withFilter(_.portMappings.nonEmpty).map(_.hostPorts).getOrElse(portNumbers.map(Some(_)))
val servicePorts: Seq[Int] =
container.withFilter(_.portMappings.nonEmpty).map(_.servicePorts).getOrElse(portNumbers)
/** should be kept in sync with [[mesosphere.marathon.api.v2.validation.AppValidation.portIndices]] */
private val portIndices: Range = hostPorts.indices
val hasDynamicServicePorts: Boolean = servicePorts.contains(AppDefinition.RandomPortValue)
def mergeFromProto(bytes: Array[Byte]): AppDefinition = {
val proto = Protos.ServiceDefinition.parseFrom(bytes)
mergeFromProto(proto)
}
/**
* Returns whether this is a scaling change only.
*/
def isOnlyScaleChange(to: RunSpec): Boolean = !isUpgrade(to) && (instances != to.instances)
/**
* True if the given app definition is a change to the current one in terms of runtime characteristics
* of all deployed tasks of the current app, otherwise false.
*/
def isUpgrade(to: RunSpec): Boolean = to match {
case to: AppDefinition =>
id == to.id && {
cmd != to.cmd ||
args != to.args ||
user != to.user ||
env != to.env ||
resources != to.resources ||
executor != to.executor ||
constraints != to.constraints ||
fetch != to.fetch ||
portDefinitions != to.portDefinitions ||
requirePorts != to.requirePorts ||
backoffStrategy != to.backoffStrategy ||
container != to.container ||
healthChecks != to.healthChecks ||
taskKillGracePeriod != to.taskKillGracePeriod ||
dependencies != to.dependencies ||
upgradeStrategy != to.upgradeStrategy ||
labels != to.labels ||
acceptedResourceRoles != to.acceptedResourceRoles ||
networks != to.networks ||
readinessChecks != to.readinessChecks ||
secrets != to.secrets ||
unreachableStrategy != to.unreachableStrategy ||
killSelection != to.killSelection ||
tty != to.tty
}
case _ =>
// A validation rule will ensure, this can not happen
throw new IllegalStateException("Can't change app to pod")
}
/**
* Returns the changed app definition that is marked for restarting.
*/
def markedForRestarting: AppDefinition = copy(versionInfo = VersionInfo.NoVersion)
/**
* Returns true if we need to restart all tasks.
*
* This can either be caused by changed configuration (e.g. a new cmd, a new docker image version)
* or by a forced restart.
*/
def needsRestart(to: RunSpec): Boolean = this.versionInfo != to.versionInfo || isUpgrade(to)
val portNames: Seq[String] = {
def fromPortMappings = container.map(_.portMappings.flatMap(_.name)).getOrElse(Seq.empty)
def fromPortDefinitions = portDefinitions.flatMap(_.name)
if (networks.hasNonHostNetworking) fromPortMappings else fromPortDefinitions
}
}
@SuppressWarnings(Array("IsInstanceOf")) // doesn't work well in the validation macros?!
object AppDefinition extends GeneralPurposeCombinators {
type AppKey = PathId
val RandomPortValue: Int = 0
val RandomPortDefinition: PortDefinition = PortDefinition(RandomPortValue, "tcp", None, Map.empty[String, String])
// App defaults
val DefaultId = PathId.empty
val DefaultEnv = Map.empty[String, EnvVarValue]
val DefaultConstraints = Set.empty[Constraint]
val DefaultFetch: Seq[FetchUri] = FetchUri.empty
val DefaultPortDefinitions: Seq[PortDefinition] = Nil
val DefaultBackoffStrategy = BackoffStrategy(
App.DefaultBackoffSeconds.seconds, App.DefaultMaxLaunchDelaySeconds.seconds, App.DefaultBackoffFactor)
val DefaultContainer = Option.empty[Container]
val DefaultHealthChecks = Set.empty[HealthCheck]
val DefaultReadinessChecks = Seq.empty[ReadinessCheck]
val DefaultTaskKillGracePeriod = Option.empty[FiniteDuration]
val DefaultDependencies = Set.empty[PathId]
val DefaultUpgradeStrategy: UpgradeStrategy = UpgradeStrategy.empty
val DefaultSecrets = Map.empty[String, Secret]
val DefaultUnreachableStrategy = UnreachableStrategy.default(resident = false)
val DefaultLabels = Map.empty[String, String]
val DefaultIsResident = false
/**
* This default is only used in tests
*/
val DefaultAcceptedResourceRoles = Set.empty[String]
val DefaultTTY: Option[Boolean] = None
/**
* should be kept in sync with `Apps.DefaultNetworks`
*/
val DefaultNetworks = Seq[Network](HostNetwork)
def fromProto(proto: Protos.ServiceDefinition): AppDefinition =
AppDefinition(id = DefaultId).mergeFromProto(proto)
def versionInfoFrom(proto: Protos.ServiceDefinition): VersionInfo = {
if (proto.hasLastScalingAt)
FullVersionInfo(
version = Timestamp(proto.getVersion),
lastScalingAt = Timestamp(proto.getLastScalingAt),
lastConfigChangeAt = Timestamp(proto.getLastConfigChangeAt)
)
else
OnlyVersion(Timestamp(proto.getVersion))
}
/**
* We cannot validate HealthChecks here, because it would break backwards compatibility in weird ways.
* If users had already one invalid app definition, each deployment would cause a complete revalidation of
* the root group including the invalid one.
* Until the user changed all invalid apps, the user would get weird validation
* errors for every deployment potentially unrelated to the deployed apps.
*/
def validAppDefinition(
enabledFeatures: Set[String])(implicit pluginManager: PluginManager): Validator[AppDefinition] =
validator[AppDefinition] { app =>
app.id is valid and PathId.absolutePathValidator and PathId.nonEmptyPath
app.dependencies is every(PathId.pathIdValidator)
} and validBasicAppDefinition(enabledFeatures) and pluginValidators
private def pluginValidators(implicit pluginManager: PluginManager): Validator[AppDefinition] =
new Validator[AppDefinition] {
override def apply(app: AppDefinition): Result = {
val plugins = pluginManager.plugins[RunSpecValidator]
new And(plugins: _*).apply(app)
}
}
private val containsCmdArgsOrContainer: Validator[AppDefinition] =
isTrue("AppDefinition must either contain one of 'cmd' or 'args', and/or a 'container'.") { app =>
val cmd = app.cmd.nonEmpty
val args = app.args.nonEmpty
val container = app.container.exists {
case _: MesosDocker => true
case _: MesosAppC => true
case _: Container.Docker => true
case _ => false
}
(cmd ^ args) || (!(cmd && args) && container)
}
private val complyWithMigrationAPI: Validator[AppDefinition] =
isTrue("DCOS_PACKAGE_FRAMEWORK_NAME and DCOS_MIGRATION_API_PATH must be defined" +
" when using DCOS_MIGRATION_API_VERSION") { app =>
val understandsMigrationProtocol = app.labels.get(Apps.LabelDcosMigrationApiVersion).exists(_.nonEmpty)
// if the api version IS NOT set, we're ok
// if the api version IS set, we expect to see a valid version, a frameworkName and a path
def compliesWithMigrationApi =
app.labels.get(Apps.LabelDcosMigrationApiVersion).fold(true) { apiVersion =>
apiVersion == "v1" &&
app.labels.get(Apps.LabelDcosPackageFrameworkName).exists(_.nonEmpty) &&
app.labels.get(Apps.LabelDcosMigrationApiPath).exists(_.nonEmpty)
}
!understandsMigrationProtocol || (understandsMigrationProtocol && compliesWithMigrationApi)
}
private val complyWithSingleInstanceLabelRules: Validator[AppDefinition] =
isTrue("Single instance app may only have one instance") { app =>
(!app.isSingleInstance) || (app.instances <= 1)
}
private val complyWithReadinessCheckRules: Validator[AppDefinition] = validator[AppDefinition] { app =>
app.readinessChecks.size should be <= 1
app.readinessChecks is every(ReadinessCheck.readinessCheckValidator(app))
}
private val complyWithUpgradeStrategyRules: Validator[AppDefinition] = validator[AppDefinition] { appDef =>
(appDef.isSingleInstance is false) or (appDef.upgradeStrategy is UpgradeStrategy.validForSingleInstanceApps)
(appDef.isResident is false) or (appDef.upgradeStrategy is UpgradeStrategy.validForResidentTasks)
}
private def complyWithGpuRules(enabledFeatures: Set[String]): Validator[AppDefinition] =
conditional[AppDefinition](_.resources.gpus > 0) {
isTrue[AppDefinition]("GPU resources only work with the Mesos containerizer") { app =>
app.container match {
case Some(_: Docker) => false
case _ => true
}
} and featureEnabled(enabledFeatures, Features.GPU_RESOURCES)
}
private val haveAtMostOneMesosHealthCheck: Validator[AppDefinition] =
isTrue[AppDefinition]("AppDefinition can contain at most one Mesos health check") { appDef =>
// Previous versions of Marathon allowed saving an app definition with more than one command health check, and
// we don't want to make them invalid
(appDef.healthChecks.count(_.isInstanceOf[MesosHealthCheck]) -
appDef.healthChecks.count(_.isInstanceOf[MesosCommandHealthCheck])) <= 1
}
private[state] val requireUnreachableDisabledForResidentTasks =
isTrue[AppDefinition]("unreachableStrategy must be disabled for resident tasks") { app =>
if (app.isResident)
app.unreachableStrategy == UnreachableDisabled
else
true
}
def validBasicAppDefinition(enabledFeatures: Set[String]) = validator[AppDefinition] { appDef =>
appDef.upgradeStrategy is valid
appDef.container is optional(Container.validContainer(appDef.networks, enabledFeatures))
appDef.portDefinitions is PortDefinitions.portDefinitionsValidator
appDef.executor should matchRegexFully("^(//cmd)|(/?[^/]+(/[^/]+)*)|$")
appDef must containsCmdArgsOrContainer
appDef.healthChecks is every(portIndexIsValid(appDef.portIndices))
appDef must haveAtMostOneMesosHealthCheck
appDef.instances should be >= 0
appDef.fetch is every(fetchUriIsValid)
appDef.resources.mem as "mem" should be >= 0.0
appDef.resources.cpus as "cpus" should be >= 0.0
appDef.instances should be >= 0
appDef.resources.disk as "disk" should be >= 0.0
appDef.resources.gpus as "gpus" should be >= 0
appDef.secrets is valid(Secret.secretsValidator)
appDef.secrets is empty or featureEnabled(enabledFeatures, Features.SECRETS)
appDef.env is valid(EnvVarValue.envValidator)
appDef.acceptedResourceRoles is empty or valid(ResourceRole.validAcceptedResourceRoles(appDef.isResident))
appDef must complyWithGpuRules(enabledFeatures)
appDef must complyWithMigrationAPI
appDef must complyWithReadinessCheckRules
appDef must complyWithSingleInstanceLabelRules
appDef must complyWithUpgradeStrategyRules
appDef should requireUnreachableDisabledForResidentTasks
// constraints are validated in AppValidation
appDef.unreachableStrategy is valid
appDef.networks is valid(NetworkValidation.modelNetworksValidator)
} and ExternalVolumes.validApp and EnvVarValue.validApp
@SuppressWarnings(Array("TraversableHead"))
private def portIndexIsValid(hostPortsIndices: Range): Validator[HealthCheck] =
isTrue("Health check port indices must address an element of the ports array or container port mappings.") {
case hc: HealthCheckWithPort =>
hc.portIndex match {
case Some(PortReference.ByIndex(idx)) => hostPortsIndices.contains(idx)
case Some(PortReference.ByName(name)) => false // TODO(jdef) support port name as an index
case None => hc.port.nonEmpty || (hostPortsIndices.length == 1 && hostPortsIndices.head == 0)
}
case _ => true
}
@SuppressWarnings(Array("ComparingFloatingPointTypes"))
def residentUpdateIsValid(from: AppDefinition): Validator[AppDefinition] = {
val changeNoVolumes =
isTrue[AppDefinition]("Persistent volumes can not be changed!") { to =>
val fromVolumes = from.persistentVolumesWithMounts
val toVolumes = to.persistentVolumesWithMounts
def sameSize = fromVolumes.size == toVolumes.size
def noChange = fromVolumes.forall { fromVolume =>
toVolumes.find(_.mount.mountPath == fromVolume.mount.mountPath).contains(fromVolume)
}
sameSize && noChange
}
val changeNoResources =
isTrue[AppDefinition]("Resident Tasks may not change resource requirements!") { to =>
from.resources.cpus == to.resources.cpus &&
from.resources.mem == to.resources.mem &&
from.resources.disk == to.resources.disk &&
from.resources.gpus == to.resources.gpus &&
from.hostPorts.flatten.toSet == to.hostPorts.flatten.toSet &&
from.requirePorts == to.requirePorts
}
validator[AppDefinition] { app =>
app should changeNoVolumes
app should changeNoResources
app.upgradeStrategy is UpgradeStrategy.validForResidentTasks
}
}
def updateIsValid(from: RootGroup): Validator[AppDefinition] = {
new Validator[AppDefinition] {
override def apply(app: AppDefinition): Result = {
from.app(app.id) match {
case (Some(last)) if last.isResident || app.isResident => residentUpdateIsValid(last)(app)
case _ => Success
}
}
}
}
}
| janisz/marathon | src/main/scala/mesosphere/marathon/state/AppDefinition.scala | Scala | apache-2.0 | 26,120 |
package dk.gp.hgpc.util
import dk.gp.hgpc.HgpcModel
import breeze.linalg.DenseMatrix
import breeze.linalg.DenseVector
import dk.bayes.dsl.factor.DoubleFactor
import dk.bayes.math.gaussian.canonical.CanonicalGaussian
import dk.bayes.math.gaussian.canonical.DenseCanonicalGaussian
import dk.bayes.math.gaussian.canonical.SparseCanonicalGaussian
import breeze.numerics._
object calcHGPCLoglik {
def apply(model: HgpcModel): Double = {
val hgpcFactorGraph = HgpcFactorGraph(model)
val (calib, iters) = calibrateHgpcFactorGraph(hgpcFactorGraph, maxIter = 10)
apply(hgpcFactorGraph)
}
def apply(calibratedHgpcFactorGraph: HgpcFactorGraph): Double = {
val totalLoglik = calibratedHgpcFactorGraph.taskIds.map { taskId =>
calibratedHgpcFactorGraph.taskYFactorsMap(taskId).map { taskYFactor =>
val outcome1Prob = taskYFactor.calcNewMsgV2()
if (taskYFactor.v2.k == 1) log(outcome1Prob) else log1p(-outcome1Prob)
}.sum
}.sum
totalLoglik
}
/**
* This function will update calibratedHgpcFactorGraph for the purpose of computing approximated loglikelihood of evidence given provided covFuncParams and gpMean parameters
* This might be used for computing approximated derivatives of approximated loglikelihood(lowerBound) for covFuncParams and gpMean parameters
*/
def apply(calibratedHgpcFactorGraph: HgpcFactorGraph,covFuncParams: DenseVector[Double], gpMean: Double): Double = {
???
}
} | danielkorzekwa/bayes-scala-gp | src/main/scala/dk/gp/hgpc/util/calcHGPCLoglik.scala | Scala | bsd-2-clause | 1,469 |
/*
* Copyright 2011-2017 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.yaidom.queryapi
/**
* Abstract node (marker) trait hierarchy. It offers a common minimal API for different kinds of nodes. It also shows
* what yaidom typically considers to be nodes, and what it does not consider to be nodes. For example, documents
* are not nodes in yaidom, so it is thus prevented to create documents as element children. Moreover, attributes
* are typically not nodes in yaidom, although custom element implementations may think otherwise.
*
* The down-side is that we have to consider mixing in (some or all of) these traits everywhere we create a node/element implementation.
*
* @author Chris de Vreeze
*/
object Nodes {
/**
* Arbitrary node
*/
trait Node {
def nodeKind: NodeKind
}
/**
* Potential document child, so an element, processing instruction or comment
*/
trait CanBeDocumentChild extends Node
/**
* Arbitrary element node
*/
trait Elem extends CanBeDocumentChild {
final def nodeKind: NodeKind = ElementKind
}
/**
* Arbitrary text node
*/
trait Text extends Node {
final def nodeKind: NodeKind = TextKind
def text: String
}
/**
* Arbitrary comment node
*/
trait Comment extends CanBeDocumentChild {
final def nodeKind: NodeKind = CommentKind
def text: String
}
/**
* Arbitrary processing instruction node
*/
trait ProcessingInstruction extends CanBeDocumentChild {
final def nodeKind: NodeKind = ProcessingInstructionKind
def target: String
def data: String
}
/**
* Arbitrary entity reference node
*/
trait EntityRef extends Node {
final def nodeKind: NodeKind = EntityRefKind
def entity: String
}
/** Node kind, which can be used for cheap pattern matching on kinds of nodes */
sealed trait NodeKind
case object ElementKind extends NodeKind
case object TextKind extends NodeKind
case object CommentKind extends NodeKind
case object ProcessingInstructionKind extends NodeKind
case object EntityRefKind extends NodeKind
}
| dvreeze/yaidom | shared/src/main/scala/eu/cdevreeze/yaidom/queryapi/Nodes.scala | Scala | apache-2.0 | 2,648 |
package com.github.spirom.sparkflights.fw
import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
abstract class CoreExperiment(name: String, sc: SparkContext) extends Experiment(name) {
// context may have been modified from the one in the constructor
def runUserCode(sc: SparkContext, df: DataFrame, outputBase: String): Unit
def runQuery(df: DataFrame, runOutputBase: String, index: Int): Unit = {
val prefix = String.format("%05d", int2Integer(index))
runUserCode(sc, df, runOutputBase + "/" + prefix + "_" + name)
}
}
| spirom/SparkFlightExamples | src/main/scala/com/github/spirom/sparkflights/fw/CoreExperiment.scala | Scala | mit | 563 |
package com.twitter.finagle.http.exp
import com.twitter.finagle.Failure
import com.twitter.finagle.context.{Contexts, RemoteInfo}
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.CancelledRequestException
import com.twitter.finagle.util.DefaultTimer
import com.twitter.logging.Logger
import com.twitter.util._
import java.util.concurrent.atomic.AtomicReference
private[finagle] object GenSerialServerDispatcher {
private val logger = Logger.get()
// Note: this is a slightly different Eof than the finagle-core version, but I don't think it matters
private val Eof = Future.exception(Failure("EOF"))
private val cancelled = new CancelledRequestException
private sealed trait DispatchState
private case object Idle extends DispatchState
private case object Running extends DispatchState
private case object Closing extends DispatchState
}
/**
* A generic version of
* [[com.twitter.finagle.dispatch.SerialServerDispatcher SerialServerDispatcher]],
* allowing the implementor to furnish custom dispatchers & handlers.
*/
private[finagle] abstract class GenSerialServerDispatcher[Req, Rep, In, Out](trans: StreamTransport[In, Out])
extends Closable {
def this(trans: Transport[In, Out]) = this(new IdentityStreamTransport(trans))
import GenSerialServerDispatcher._
private[this] val state = new AtomicReference[DispatchState](Idle)
/**
* Dispatches a request. The first argument is the request. The second
* argument `eos` (end-of-stream promise) must be fulfilled when the request
* is complete.
*
* For non-streaming requests, `eos.setDone()` should be called immediately,
* since the entire request is present. For streaming requests,
* `eos.setDone()` must be called at the end of stream (in HTTP, this is on
* receipt of last chunk). Refer to the implementation in
* [[com.twitter.finagle.http.codec.HttpServerDispatcher]].
*/
protected def dispatch(req: Out): Future[Rep]
protected def handle(rep: Rep): Future[Unit]
/**
* Only the dispatch loop can make state transitions to Idle and Running but close
* operations can transition the state to Closing. If the loop finds that the state
* has been transitioned from Idle -> Closing, it is the closer's job to close the
* transport. If the loops finds that the state has transitioned from Running -> Closing,
* it has been given a chance to drain the last connection and will ensure that the
* transport is closed.
*/
private[this] def loop(): Future[Unit] = {
trans.read()
.flatMap(dispatchAndHandleFn)
.transform(continueLoopFn)
}
private[this] val handleFn: Rep => Future[Unit] = handle(_)
// Dispatches and handles a message from the transport or closes down if necessary
private[this] val dispatchAndHandleFn: Multi[Out] => Future[Unit] = { case Multi(req, eos) =>
if (state.compareAndSet(Idle, Running)) {
val save = Local.save()
val dispatched = try {
Contexts.local.let(RemoteInfo.Upstream.AddressCtx, trans.remoteAddress) {
trans.peerCertificate match {
case None => dispatch(req)
case Some(cert) => Contexts.local.let(Transport.peerCertCtx, cert) {
dispatch(req)
}
}
}
} finally Local.restore(save)
val handled = dispatched.flatMap(handleFn)
// This version of `Future.join` doesn't collect the values from the Futures, but
// since they are both Future[Unit], we know what the result is and can avoid the
// overhead of collecting two Units just to throw them away via another flatMap.
Future.join(handled::eos::Nil)
} else {
// must have transitioned from Idle to Closing, by someone else who is
// responsible for closing the transport
val st = state.get
if (st == Closing) Eof
else {
// Something really bad happened. Shutdown and log as loudly as possible.
trans.close()
val msg = s"Dispatch loop found in illegal state: $st"
val ex = new IllegalStateException(msg)
logger.error(ex, msg)
Future.exception(ex)
}
}
}
// Checks the state after a dispatch and continues or shuts down the transport if necessary
private[this] val continueLoopFn: Try[Unit] => Future[Unit] = { res =>
if (res.isReturn && state.compareAndSet(Running, Idle)) loop()
else {
// The loop has been canceled and we have been given the opportunity to drain so
// we need to close the transport.
// Note: We don't sequence the transport.close() Future because we don't care to wait
// for it and also don't want to clobber the result of the loop.
trans.close()
Future.const(res)
}
}
// Clear all locals to start the loop; we want a clean slate.
private[this] val looping = Local.letClear { loop() }
trans.onClose.ensure {
state.set(Closing)
looping.raise(cancelled)
}
/** Exposed for testing */
protected[exp] def isClosing: Boolean = state.get() == Closing
/** Exposed for testing */
private[exp] def timer: Timer = DefaultTimer
// Note: this is racy, but that's inherent in draining (without
// protocol support). Presumably, half-closing a TCP connection is
// also possible.
def close(deadline: Time): Future[Unit] = {
// What to do next depends on the state of the dispatcher:
// - Idle: we can close the transport immediately.
// - Running: we need to allow time to drain. Set a timer to ensure it closes by the deadline
// - Closing: close has already been called or the transport closed: return the trans.onClose future.
state.getAndSet(Closing) match {
case Idle => trans.close(deadline)
case Running =>
trans.onClose.by(timer, deadline).onFailure { _ =>
trans.close(deadline) // The dispatcher took too long, ask the transport to close
}
case Closing => () // No action required.
}
trans.onClose.unit
}
}
| koshelev/finagle | finagle-http/src/main/scala/com/twitter/finagle/http/exp/ServerDispatcher.scala | Scala | apache-2.0 | 6,002 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.errors
import org.apache.spark.{SparkException, SparkRuntimeException}
import org.apache.spark.sql.{DataFrame, QueryTest}
import org.apache.spark.sql.test.SharedSparkSession
class QueryExecutionErrorsSuite extends QueryTest with SharedSparkSession {
import testImplicits._
private def getAesInputs(): (DataFrame, DataFrame) = {
val encryptedText16 = "4Hv0UKCx6nfUeAoPZo1z+w=="
val encryptedText24 = "NeTYNgA+PCQBN50DA//O2w=="
val encryptedText32 = "9J3iZbIxnmaG+OIA9Amd+A=="
val encryptedEmptyText16 = "jmTOhz8XTbskI/zYFFgOFQ=="
val encryptedEmptyText24 = "9RDK70sHNzqAFRcpfGM5gQ=="
val encryptedEmptyText32 = "j9IDsCvlYXtcVJUf4FAjQQ=="
val df1 = Seq("Spark", "").toDF
val df2 = Seq(
(encryptedText16, encryptedText24, encryptedText32),
(encryptedEmptyText16, encryptedEmptyText24, encryptedEmptyText32)
).toDF("value16", "value24", "value32")
(df1, df2)
}
test("INVALID_PARAMETER_VALUE: invalid key lengths in AES functions") {
val (df1, df2) = getAesInputs()
def checkInvalidKeyLength(df: => DataFrame): Unit = {
val e = intercept[SparkException] {
df.collect
}.getCause.asInstanceOf[SparkRuntimeException]
assert(e.getErrorClass === "INVALID_PARAMETER_VALUE")
assert(e.getSqlState === "22023")
assert(e.getMessage.contains(
"The value of parameter(s) 'key' in the aes_encrypt/aes_decrypt function is invalid: " +
"expects a binary value with 16, 24 or 32 bytes, but got"))
}
// Encryption failure - invalid key length
checkInvalidKeyLength(df1.selectExpr("aes_encrypt(value, '12345678901234567')"))
checkInvalidKeyLength(df1.selectExpr("aes_encrypt(value, binary('123456789012345'))"))
checkInvalidKeyLength(df1.selectExpr("aes_encrypt(value, binary(''))"))
// Decryption failure - invalid key length
Seq("value16", "value24", "value32").foreach { colName =>
checkInvalidKeyLength(df2.selectExpr(
s"aes_decrypt(unbase64($colName), '12345678901234567')"))
checkInvalidKeyLength(df2.selectExpr(
s"aes_decrypt(unbase64($colName), binary('123456789012345'))"))
checkInvalidKeyLength(df2.selectExpr(
s"aes_decrypt(unbase64($colName), '')"))
checkInvalidKeyLength(df2.selectExpr(
s"aes_decrypt(unbase64($colName), binary(''))"))
}
}
test("INVALID_PARAMETER_VALUE: AES decrypt failure - key mismatch") {
val (_, df2) = getAesInputs()
Seq(
("value16", "1234567812345678"),
("value24", "123456781234567812345678"),
("value32", "12345678123456781234567812345678")).foreach { case (colName, key) =>
val e = intercept[SparkException] {
df2.selectExpr(s"aes_decrypt(unbase64($colName), binary('$key'), 'ECB')").collect
}.getCause.asInstanceOf[SparkRuntimeException]
assert(e.getErrorClass === "INVALID_PARAMETER_VALUE")
assert(e.getSqlState === "22023")
assert(e.getMessage.contains(
"The value of parameter(s) 'expr, key' in the aes_encrypt/aes_decrypt function " +
"is invalid: Detail message:"))
}
}
test("UNSUPPORTED_MODE: unsupported combinations of AES modes and padding") {
val key16 = "abcdefghijklmnop"
val key32 = "abcdefghijklmnop12345678ABCDEFGH"
val (df1, df2) = getAesInputs()
def checkUnsupportedMode(df: => DataFrame): Unit = {
val e = intercept[SparkException] {
df.collect
}.getCause.asInstanceOf[SparkRuntimeException]
assert(e.getErrorClass === "UNSUPPORTED_FEATURE")
assert(e.getSqlState === "0A000")
assert(e.getMessage.matches("""The feature is not supported: AES-\\w+ with the padding \\w+""" +
" by the aes_encrypt/aes_decrypt function."))
}
// Unsupported AES mode and padding in encrypt
checkUnsupportedMode(df1.selectExpr(s"aes_encrypt(value, '$key16', 'CBC')"))
checkUnsupportedMode(df1.selectExpr(s"aes_encrypt(value, '$key16', 'ECB', 'NoPadding')"))
// Unsupported AES mode and padding in decrypt
checkUnsupportedMode(df2.selectExpr(s"aes_decrypt(value16, '$key16', 'GSM')"))
checkUnsupportedMode(df2.selectExpr(s"aes_decrypt(value16, '$key16', 'GCM', 'PKCS')"))
checkUnsupportedMode(df2.selectExpr(s"aes_decrypt(value32, '$key32', 'ECB', 'None')"))
}
}
| WeichenXu123/spark | sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala | Scala | apache-2.0 | 5,114 |
package progscala2.typesystem.typepaths
class Service {
class Logger {
def log(message: String): Unit = println(s"log: $message")
}
val logger: Logger = new Logger
}
object ServiceMain {
def main(args: Array[String]): Unit = {
// s1 과 s2의 인스턴스에 있는 Logger 타입은 서로 다른 타입이다
val s1 = new Service
// Error: 인스턴스가 다르면 내포된 타입은 서로 다른 타입이다.
// val s2 = new Service {
// override val logger = s1.logger
// }
}
} | younggi/books | programming_scala/progscala2/src/main/scala/progscala2/typesystem/typepaths/type-path.scala | Scala | mit | 522 |
/**
* This file is part of mycollab-scheduler.
*
* mycollab-scheduler is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-scheduler is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-scheduler. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.schedule.email.project.service
import com.esofthead.mycollab.common.domain.SimpleRelayEmailNotification
import com.esofthead.mycollab.common.i18n.GenericI18Enum
import com.esofthead.mycollab.common.{MonitorTypeConstants, NotificationType}
import com.esofthead.mycollab.core.utils.StringUtils
import com.esofthead.mycollab.html.FormatUtils._
import com.esofthead.mycollab.html.LinkUtils
import com.esofthead.mycollab.module.mail.MailUtils
import com.esofthead.mycollab.module.project.domain._
import com.esofthead.mycollab.module.project.i18n.{BugI18nEnum, OptionI18nEnum}
import com.esofthead.mycollab.module.project.service.{MilestoneService, ProjectMemberService, ProjectNotificationSettingService, ProjectService}
import com.esofthead.mycollab.module.project.{ProjectLinkGenerator, ProjectResources, ProjectTypeConstants}
import com.esofthead.mycollab.module.tracker.domain.{BugWithBLOBs, SimpleBug}
import com.esofthead.mycollab.module.tracker.service.BugService
import com.esofthead.mycollab.module.user.AccountLinkGenerator
import com.esofthead.mycollab.module.user.domain.SimpleUser
import com.esofthead.mycollab.module.user.service.UserService
import com.esofthead.mycollab.schedule.email.format._
import com.esofthead.mycollab.schedule.email.project.BugRelayEmailNotificationAction
import com.esofthead.mycollab.schedule.email.{ItemFieldMapper, MailContext}
import com.esofthead.mycollab.spring.ApplicationContextUtil
import com.hp.gagawa.java.elements.{A, Img, Span, Text}
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.config.BeanDefinition
import org.springframework.context.annotation.Scope
import org.springframework.stereotype.Component
/**
* @author MyCollab Ltd.
* @since 4.6.0
*/
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
class BugRelayEmailNotificationActionImpl extends SendMailToFollowersAction[SimpleBug] with BugRelayEmailNotificationAction {
private val LOG = LoggerFactory.getLogger(classOf[BugRelayEmailNotificationActionImpl])
@Autowired var bugService: BugService = _
@Autowired var projectMemberService: ProjectMemberService = _
@Autowired var projectNotificationService: ProjectNotificationSettingService = _
private val mapper = new BugFieldNameMapper
protected def buildExtraTemplateVariables(context: MailContext[SimpleBug]) {
val currentProject = new WebItem(bean.getProjectname, ProjectLinkGenerator.generateProjectFullLink(siteUrl, bean.getProjectid))
val emailNotification: SimpleRelayEmailNotification = context.getEmailNotification
val summary = "#" + bean.getBugkey + " - " + bean.getSummary
val summaryLink: String = ProjectLinkGenerator.generateBugPreviewFullLink(siteUrl, bean.getBugkey, bean.getProjectShortName)
val projectMember: SimpleProjectMember = projectMemberService.findMemberByUsername(emailNotification.getChangeby,
bean.getProjectid, emailNotification.getSaccountid)
val avatarId: String = if (projectMember != null) projectMember.getMemberAvatarId else ""
val userAvatar: Img = LinkUtils.newAvatar(avatarId)
val makeChangeUser: String = userAvatar.toString + emailNotification.getChangeByUserFullName
val actionEnum: Enum[_] = emailNotification.getAction match {
case MonitorTypeConstants.CREATE_ACTION => BugI18nEnum.MAIL_CREATE_ITEM_HEADING
case MonitorTypeConstants.UPDATE_ACTION => BugI18nEnum.MAIL_UPDATE_ITEM_HEADING
case MonitorTypeConstants.ADD_COMMENT_ACTION => BugI18nEnum.MAIL_COMMENT_ITEM_HEADING
}
contentGenerator.putVariable("actionHeading", context.getMessage(actionEnum, makeChangeUser))
contentGenerator.putVariable("titles", List(currentProject))
contentGenerator.putVariable("summary", summary)
contentGenerator.putVariable("summaryLink", summaryLink)
}
protected def getBeanInContext(context: MailContext[SimpleBug]): SimpleBug = bugService.findById(context.getTypeid.toInt, context.getSaccountid)
protected def getItemName: String = StringUtils.trim(bean.getSummary, 100)
protected def getCreateSubject(context: MailContext[SimpleBug]): String = context.getMessage(BugI18nEnum.MAIL_CREATE_ITEM_SUBJECT,
bean.getProjectname, context.getChangeByUserFullName, getItemName)
protected def getUpdateSubject(context: MailContext[SimpleBug]): String = context.getMessage(BugI18nEnum.MAIL_UPDATE_ITEM_SUBJECT,
bean.getProjectname, context.getChangeByUserFullName, getItemName)
protected def getCommentSubject(context: MailContext[SimpleBug]): String = context.getMessage(BugI18nEnum.MAIL_COMMENT_ITEM_SUBJECT,
bean.getProjectname, context.getChangeByUserFullName, getItemName)
protected def getItemFieldMapper: ItemFieldMapper = mapper
protected def getListNotifyUsersWithFilter(notification: ProjectRelayEmailNotification): Set[SimpleUser] = {
import scala.collection.JavaConverters._
val notificationSettings: List[ProjectNotificationSetting] = projectNotificationService.
findNotifications(notification.getProjectId, notification.getSaccountid).asScala.toList
var notifyUsers: Set[SimpleUser] = notification.getNotifyUsers.asScala.toSet
for (notificationSetting <- notificationSettings) {
if (NotificationType.None.name == notificationSetting.getLevel) {
notifyUsers = notifyUsers.filter(notifyUser => !(notifyUser.getUsername == notificationSetting.getUsername))
}
else if (NotificationType.Minimal.name == notificationSetting.getLevel) {
val findResult: Option[SimpleUser] = notifyUsers.find(notifyUser => notifyUser.getUsername == notificationSetting.getUsername);
findResult match {
case None => {
val bug: SimpleBug = bugService.findById(notification.getTypeid.toInt, notification.getSaccountid)
if (notificationSetting.getUsername == bug.getAssignuser) {
val prjMember: SimpleUser = projectMemberService.getActiveUserOfProject(notificationSetting.getUsername,
notificationSetting.getProjectid, notificationSetting.getSaccountid)
if (prjMember != null) {
notifyUsers += prjMember
}
}
}
case Some(user) => {}
}
}
else if (NotificationType.Full.name == notificationSetting.getLevel) {
val prjMember: SimpleUser = projectMemberService.getActiveUserOfProject(notificationSetting.getUsername,
notificationSetting.getProjectid, notificationSetting.getSaccountid)
if (prjMember != null) {
notifyUsers += prjMember
}
}
}
notifyUsers
}
class BugFieldNameMapper extends ItemFieldMapper {
put(BugWithBLOBs.Field.summary, BugI18nEnum.FORM_SUMMARY, isColSpan = true)
put(BugWithBLOBs.Field.environment, BugI18nEnum.FORM_ENVIRONMENT, isColSpan = true)
put(BugWithBLOBs.Field.description, GenericI18Enum.FORM_DESCRIPTION, isColSpan = true)
put(BugWithBLOBs.Field.assignuser, new AssigneeFieldFormat(BugWithBLOBs.Field.assignuser.name, GenericI18Enum.FORM_ASSIGNEE))
put(BugWithBLOBs.Field.milestoneid, new MilestoneFieldFormat(BugWithBLOBs.Field.milestoneid.name, BugI18nEnum.FORM_PHASE))
put(BugWithBLOBs.Field.status, new I18nFieldFormat(BugWithBLOBs.Field.status.name, BugI18nEnum.FORM_STATUS, classOf[OptionI18nEnum.BugStatus]))
put(BugWithBLOBs.Field.resolution, new I18nFieldFormat(BugWithBLOBs.Field.resolution.name, BugI18nEnum.FORM_RESOLUTION, classOf[OptionI18nEnum.BugResolution]))
put(BugWithBLOBs.Field.severity, new I18nFieldFormat(BugWithBLOBs.Field.severity.name, BugI18nEnum.FORM_SEVERITY, classOf[OptionI18nEnum.BugSeverity]))
put(BugWithBLOBs.Field.priority, new I18nFieldFormat(BugWithBLOBs.Field.priority.name, BugI18nEnum.FORM_PRIORITY, classOf[OptionI18nEnum.BugPriority]))
put(BugWithBLOBs.Field.duedate, new DateFieldFormat(BugWithBLOBs.Field.duedate.name, BugI18nEnum.FORM_DUE_DATE))
put(BugWithBLOBs.Field.logby, new LogUserFieldFormat(BugWithBLOBs.Field.logby.name, BugI18nEnum.FORM_LOG_BY))
}
class MilestoneFieldFormat(fieldName: String, displayName: Enum[_]) extends FieldFormat(fieldName, displayName) {
def formatField(context: MailContext[_]): String = {
val bug: SimpleBug = context.getWrappedBean.asInstanceOf[SimpleBug]
if (bug.getMilestoneid == null || bug.getMilestoneName == null) {
new Span().write
} else {
val img: Text = new Text(ProjectResources.getFontIconHtml(ProjectTypeConstants.MILESTONE));
val milestoneLink: String = ProjectLinkGenerator.generateMilestonePreviewFullLink(context.siteUrl,
bug.getProjectid, bug.getMilestoneid)
val link: A = newA(milestoneLink, bug.getMilestoneName)
newLink(img, link).write
}
}
def formatField(context: MailContext[_], value: String): String = {
if (StringUtils.isBlank(value)) {
new Span().write
} else {
try {
val milestoneId: Int = value.toInt
val milestoneService: MilestoneService = ApplicationContextUtil.getSpringBean(classOf[MilestoneService])
val milestone: SimpleMilestone = milestoneService.findById(milestoneId, context.getUser.getAccountId)
if (milestone != null) {
val img: Text = new Text(ProjectResources.getFontIconHtml(ProjectTypeConstants.MILESTONE));
val milestoneLink: String = ProjectLinkGenerator.generateMilestonePreviewFullLink(context.siteUrl, milestone
.getProjectid, milestone.getId)
val link: A = newA(milestoneLink, milestone.getName)
return newLink(img, link).write
}
}
catch {
case e: Exception => LOG.error("Error", e)
}
value
}
}
}
class AssigneeFieldFormat(fieldName: String, displayName: Enum[_]) extends FieldFormat(fieldName, displayName) {
def formatField(context: MailContext[_]): String = {
val bug: SimpleBug = context.getWrappedBean.asInstanceOf[SimpleBug]
if (bug.getAssignuser != null) {
val userAvatarLink: String = MailUtils.getAvatarLink(bug.getAssignUserAvatarId, 16)
val img: Img = newImg("avatar", userAvatarLink)
val userLink: String = AccountLinkGenerator.generatePreviewFullUserLink(MailUtils.getSiteUrl(bug.getSaccountid), bug.getAssignuser)
val link: A = newA(userLink, bug.getAssignuserFullName)
newLink(img, link).write
}
else {
new Span().write
}
}
def formatField(context: MailContext[_], value: String): String = {
if (org.apache.commons.lang3.StringUtils.isBlank(value)) {
new Span().write
} else {
val userService: UserService = ApplicationContextUtil.getSpringBean(classOf[UserService])
val user: SimpleUser = userService.findUserByUserNameInAccount(value, context.getUser.getAccountId)
if (user != null) {
val userAvatarLink: String = MailUtils.getAvatarLink(user.getAvatarid, 16)
val userLink: String = AccountLinkGenerator.generatePreviewFullUserLink(MailUtils.getSiteUrl(user.getAccountId), user.getUsername)
val img: Img = newImg("avatar", userAvatarLink)
val link: A = newA(userLink, user.getDisplayName)
newLink(img, link).write
} else
value
}
}
}
class LogUserFieldFormat(fieldName: String, displayName: Enum[_]) extends FieldFormat(fieldName, displayName) {
def formatField(context: MailContext[_]): String = {
val bug: SimpleBug = context.getWrappedBean.asInstanceOf[SimpleBug]
if (bug.getLogby != null) {
val userAvatarLink: String = MailUtils.getAvatarLink(bug.getLoguserAvatarId, 16)
val img: Img = newImg("avatar", userAvatarLink)
val userLink: String = AccountLinkGenerator.generatePreviewFullUserLink(MailUtils.getSiteUrl(bug.getSaccountid), bug.getLogby)
val link: A = newA(userLink, bug.getLoguserFullName)
newLink(img, link).write
}
else
new Span().write
}
def formatField(context: MailContext[_], value: String): String = {
if (StringUtils.isBlank(value))
return new Span().write
val userService: UserService = ApplicationContextUtil.getSpringBean(classOf[UserService])
val user: SimpleUser = userService.findUserByUserNameInAccount(value, context.getUser.getAccountId)
if (user != null) {
val userAvatarLink: String = MailUtils.getAvatarLink(user.getAvatarid, 16)
val userLink: String = AccountLinkGenerator.generatePreviewFullUserLink(MailUtils.getSiteUrl(user.getAccountId), user.getUsername)
val img: Img = newImg("avatar", userAvatarLink)
val link: A = newA(userLink, user.getDisplayName)
newLink(img, link).write
} else
value
}
}
} | maduhu/mycollab | mycollab-scheduler/src/main/scala/com/esofthead/mycollab/schedule/email/project/service/BugRelayEmailNotificationActionImpl.scala | Scala | agpl-3.0 | 14,706 |
package com.olvind
package sui
object SuiTypeMapper extends TypeMapper {
val typeT = Normal("T").generic("T")
val typeTJs = Normal("T").genericJs("T")
def apply(compName: CompName, fieldName: PropName, typeString: String): Type = {
def is(s: String) =
fieldName.value.toLowerCase contains s.toLowerCase
def split(drop: Int, s: String) =
s.split("[\\'\\"\\\\(\\\\)\\\\[\\\\],\\\\s]").map(_.trim).filterNot(_.isEmpty).drop(drop)
(compName.value, fieldName.value, typeString) match {
case ("Input", "icon", _) => Normal("SuiIconType")
case ("Flag", "name", _) => Normal("String | SuiCountry")
case ("Header", "as", _) => Normal("String | js.Function")
case ("Header", "image", _) => Normal("String | React.Element")
case ("Header", "icon", _) => Normal("String | js.Object | React.Element")
case ("Button", "children", _) => Normal("VdomNode")
case ("Button", "animated", _) => Normal("Boolean | ButtonAnimatedType")
case ("Icon", "name", "_lib.customsuggest(_lib.SUI.ALL_ICONS_IN_ALL_CONTEXTS)") =>
Normal("SuiIconType")
case (_, "textAlign", _) =>
Enum(compName, Seq("left", "center", "right", "justified"), "SuiTextAlignment")
case (_, "size", _) =>
Enum(compName,
Seq("mini", "tiny", "small", "medium", "large", "big", "huge", "massive"),
"SuiSize")
case ("IconGroup", "name", "_lib.customsuggest(_lib.SUI.ALL_ICONS_IN_ALL_CONTEXTS)") =>
Normal("SuiIconType")
case (_, _, e) if e.contains("oneOfType") || e.contains("some(") => {
val splitted = split(1, e)
Normal(splitted.map(t => apply(compName, fieldName, t))
.filter(_.name.nonEmpty)
.map(_.name)
.toSet
.mkString(" | "))
}
case (_, _, "Mui.oneOf(_lib.SUI.WIDTHS)") => Normal("Double")
case (_, _, "Mui.oneOf(_lib.SUI.COLORS)") =>
Enum(compName,
Seq("red",
"orange",
"yellow",
"olive",
"green",
"teal",
"blue",
"violet",
"purple",
"pink",
"brown",
"grey",
"black"),
"SuiColor")
case (_, _, "Mui.oneOf(_lib.SUI.FLOATS)") => Enum(compName, Seq("left", "right"), "SuiFloat")
case (_, _, "Mui.oneOf(_lib.SUI.SIZES)") =>
Enum(compName,
Seq("mini", "tiny", "small", "medium", "large", "big", "huge", "massive"),
"SuiSize")
case (_, _, "Mui.oneOf(_lib.SUI.TEXT_ALIGNMENTS)") =>
Enum(compName, Seq("left", "center", "right", "justified"), "SuiTextAlignment")
case (_, _, "Mui.oneOf(_lib.SUI.VERTICAL_ALIGNMENTS)") =>
Enum(compName, Seq("bottom", "middle", "top"), "SuiVerticalAlignment")
case (a, b, enum) if enum.contains("oneOf(") && enum.contains(']') =>
val found = "(\\\\[.*?\\\\])".r.findAllIn(enum).toList
val array =
found.last.replaceAll("\\\\[|\\\\]", "").split(", ").map(_.replace("'", "").replace(" ", ""))
Enum(compName, array)
case (a, b, enum) if enum.contains("oneOf(") && !enum.contains(']') =>
Enum(compName, split(1, enum))
case (_, _, "_lib.customas") => Normal("js.Any") //TODO: what to do with this?
/* general */
case (_, "valueLink", "object") => Normal("js.Any")
case (_, _, "string") => Normal("String")
case (_, _, "bool") => Normal("Boolean")
case (_, "children", "element") => Normal("VdomElement")
case (_, _, "element") => Normal("React.Element")
case (_, "children", "node") => Normal("VdomNode")
case (_, _, "node") => Normal("React.Node")
case (_, _, "number") => Normal("Double")
case (_, "children", "arrayOf(element)") => Normal("js.Array[React.Element]")
case (_, _, "Mui.arrayOf") => Normal("js.Array[js.Any]")
case (_, "valueLink", "Mui.object") => Normal("js.Any")
case (_, _, "Mui.string") => Normal("String")
case (_, _, "Mui.bool") => Normal("Boolean")
case (_, "children", "Mui.element") => Normal("VdomElement")
case (_, _, "Mui.element") => Normal("React.Element")
case (_, "children", "Mui.node") => Normal("VdomNode")
case (_, _, "Mui.node") => Normal("React.Node")
case (_, _, "Mui.object") => Normal("js.Object")
case (_, _, "Mui.number") => Normal("Double")
case (_, "children", "Mui.arrayOf(Mui.element)") => Normal("js.Array[React.Element]")
case ("AutoComplete", "popoverProps", "object") => Normal("js.Any")
case ("RadioButtonGroup", "defaultSelected", "any") => Normal("js.Any")
case ("RadioButtonGroup", "valueSelected", "any") => Normal("js.Any")
case ("Stepper", "children", "arrayOf(node)") => Normal("VdomElement")
/*Added by roberto@leibman.net*/
case ("Advertisement", "unit", _) => Enum(compName, Seq("medium", "rectanglelarge", "rectanglevertical", "rectanglesmall", "rectanglemobile", "bannerbannervertical", "bannertop", "bannerhalf", "bannerbuttonsquare", "buttonsmall", "buttonskyscraperwide", "skyscraperleaderboardlarge", "leaderboardmobile", "leaderboardbillboardpanoramanetboardhalf", "pagesquaresmall", "square"), "SuiAdvertisementUnit")
case ("DatePicker", "utils", "object") => Normal("DatePickerUtils") //TODO ???
case ("SelectField", "dropDownMenuProps", "object") => Normal("DropDownMenuProps") //TODO ???
case (_, _, "_lib.customcontentShorthand") => Normal("js.Any") //TODO write this Missing in TypeMapper
case (_, _, "_lib.customcollectionShorthand") => Normal("js.Any") //TODO write this Missing in TypeMapper
case (_, _, "_lib.customitemShorthand") => Normal("js.Any") //TODO write this Missing in TypeMapper
case (_, "control", "_FormField2.default.propTypes.control") => Normal("js.Any") //TODO ???
case ("GridColumn", "only", "_lib.customonlyProp(_lib.SUI.VISIBILITY)") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Image", "fluid", "_lib.customevery([Mui.bool, _lib.customdisallow(['size'])])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Image", "spaced", _) => Enum(compName, Seq("left", "right"), "LeftRight")
case ("List", "relaxed", _) => Normal("js.Any") //TODO Enum, very
case ("Menu", "floated", _) => Normal("js.Any") //TODO Enum, right
case ("Menu", "icon", _) => Normal("js.Any") //TODO Enum, labeled
case ("Menu", "tabular", _) => Normal("js.Any") //TODO Enum, right
case ("MenuItem", "fitted", _) => Enum(compName, Seq("horizontally", "vertically"), "SuiHorizontallyOrVertically")
case ("Segment", "padded", _) => Normal("Boolean | String") //TODO |Enum "very"
case ("Dropdown", "allowAdditions", "_lib.customevery([_lib.customdemand(['options', 'selection', 'search']), Mui.bool])") => Normal("Boolean")
case ("Dropdown", "children", "_lib.customevery([_lib.customdisallow(['options', 'selection']), _lib.customgivenProps({ children: Mui.any }, Mui.element)])") => Normal("VdomElement")
case ("Dropdown", "options", "_lib.customevery([_lib.customdisallow(['children']), Mui.arrayOf(Mui.shape(_DropdownItem2.default.propTypes))])") => Normal("js.Array[SuiDropDownOption]")
case ("Dropdown", "pointing", _) => Normal("Boolean | PointingDirection")
case ("Dropdown", "selectedLabel", "_lib.customdemand") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Dropdown", "selectedLabel", "multiple") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Dropdown", "selection", "_lib.customevery([_lib.customdisallow(['children']), _lib.customdemand(['options']), Mui.bool])") => Normal("Boolean")
case ("Dropdown", "trigger", "_lib.customevery([_lib.customdisallow(['selection', 'text']), Mui.node])") => Normal("VdomNode")
case ("Dropdown", "defaultSelectedLabel", "_lib.customdemand") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Dropdown", "defaultSelectedLabel", "multiple") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Dropdown", "searchInput", "Mui.array") => Normal("js.Array[js.Object] | VdomNode | js.Object")
case ("FormField", "control", _) => Enum(compName, Seq("button", "input", "select", "textarea"), "SuiFormFieldControlType")
case ("FormField", "type", "_lib.customevery([_lib.customdemand(['control'])])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("FormGroup", "grouped", "_lib.customevery([_lib.customdisallow(['inline']), Mui.bool])") => Normal("Boolean")
case ("FormGroup", "inline", "_lib.customevery([_lib.customdisallow(['grouped']), Mui.bool])") => Normal("Boolean")
case ("Grid", "celled", _) => Normal("js.Any") //TODO Enum, internally
case ("Grid", "divided", _) => Normal("js.Any") //TODO Enum, vertically
case ("Grid", "padded", _) => Enum(compName, Seq("horizontally", "vertically"), "SuiHorizontallyOrVertically")
case ("Grid", "relaxed", _) => Normal("js.Any") //TODO Enum, very
case ("Grid", "reversed", "_lib.custommultipleProp(['computer', 'computer vertically', 'mobile', 'mobile vertically', 'tablet', 'tablet vertically'])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("GridColumn", "only", "_lib.custommultipleProp(_lib.SUI.VISIBILITY)") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("ListItem", "icon", "_lib.customevery([_lib.customdisallow(['image']), _lib.customitemShorthand])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("ListItem", "image", "_lib.customevery([_lib.customdisallow(['icon']), _lib.customitemShorthand])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Modal", "actions", "Mui.arrayOf(_lib.customitemShorthand)") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Modal", "dimmer", _) => Enum(compName, Seq("inverted", "blurring"), "SuiModalDimmer")
case ("Modal", "mountNode", "Mui.any") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("ModalActions", "actions", "_lib.customevery([_lib.customdisallow(['children']), Mui.arrayOf(_lib.customitemShorthand)])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("ModalActions", "onActionClick", "_lib.customevery([_lib.customdisallow(['children']), Mui.func])") => Normal("ReactEvent => Callback") //TODO write this Missing in TypeMapper
case ("Radio", "slider", "_Checkbox2.default.propTypes.slider") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Radio", "toggle", "_Checkbox2.default.propTypes.toggle") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Radio", "type", "_Checkbox2.default.propTypes.type") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Rating", "clearable", _) => Normal("js.Any") //TODO enum, auto
case ("Checkbox", "radio", "_lib.customevery([Mui.bool, _lib.customdisallow(['slider', 'toggle'])])") => Normal("Boolean") //TODO write this Missing in TypeMapper
case ("Checkbox", "slider", "_lib.customevery([Mui.bool, _lib.customdisallow(['radio', 'toggle'])])") => Normal("Boolean") //TODO write this Missing in TypeMapper
case ("Checkbox", "toggle", "_lib.customevery([Mui.bool, _lib.customdisallow(['radio', 'slider'])])") => Normal("Boolean") //TODO write this Missing in TypeMapper
case ("Popup", "on", _) => Normal("js.Any") //TODO Enum, hover, click or focus
case ("Popup", "wide", _) => Normal("js.Any") //TODO Enum, very
case ("Table", "basic", _) => Normal("js.Any") //TODO Enum, very
case ("Table", "compact", _) => Normal("js.Any") //TODO Enum, very
case ("Table", "padded", _) => Normal("js.Any") //TODO Enum, very
case ("Table", "renderBodyRow", "_lib.customevery([_lib.customdisallow(['children']), _lib.customdemand(['tableData']), Mui.func])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Table", "tableData", "_lib.customevery([_lib.customdisallow(['children']), _lib.customdemand(['renderBodyRow']), Mui.array])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("GridRow", "only", "_lib.custommultipleProp(_lib.SUI.VISIBILITY)") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("GridRow", "reversed", "_lib.custommultipleProp(['computer', 'computer vertically', 'mobile', 'mobile vertically', 'tablet', 'tablet vertically'])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Label", "corner", _) => Enum(compName, Seq("left", "right"), "LeftRight")
case ("Label", "empty", "_lib.customevery([Mui.bool, _lib.customdemand(['circular'])])") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Label", "pointing", _) => Normal("js.Any") //TODO enum above, below, left, right
case ("Label", "ribbon", _) => Enum(compName, Seq("left", "right"), "LeftRight")
case ("Search", "results", _) => Normal("js.Object | js.Array[js.Object]") //TODO One of: - array of Search.Result props e.g. `{ title: '', description: '' }` or - object of categories e.g. `{ name: '', results: [{ title: '', description: '' }]`
case ("SearchCategory", "results", "Mui.array") => Normal("js.Array[js.Object]") //TODO write this Missing in TypeMapper
case (_, "attached", _) => Enum(compName, Seq("left", "right", "top", "bottom"), "SuiFourDirections")
case ("Loader", "inline", "Mui.oneOf") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Loader", "inline", "centered") => Normal("js.Any") //TODO write this Missing in TypeMapper case ("Progress","percent","_lib.customdisallow") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "percent", "total") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "percent", "value") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "progress", "Mui.oneOf") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "progress", "percent") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "progress", "ratio") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "progress", "value") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "total", "_lib.customdemand") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "total", "value") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "total", "_lib.customdisallow") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "total", "percent") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "value", "_lib.customdisallow") => Normal("js.Any") //TODO write this Missing in TypeMapper
case ("Progress", "value", "percent") => Normal("js.Any") //TODO write this Missing in TypeMapper
case (_, "onItemClick", "_lib.customevery([_lib.customdisallow(['children']), Mui.func])") =>
Normal(SuiTypeMapperFunction(compName, fieldName))
case (_, _, "Mui.func") =>
Normal(SuiTypeMapperFunction(compName, fieldName))
case (_, _, "func") =>
Normal(SuiTypeMapperFunction(compName, fieldName))
case (a, b, c) =>
println(s"""case ("$a","$b","$c") => Normal("js.Any") //TODO write this Missing in TypeMapper""")
Normal("js.Any /*//TODO: fix this in the TypeMapper*/")
}
}
}
| aparo/scalajs-react-components | gen/src/main/scala/com/olvind/sui/SuiTypeMapper.scala | Scala | apache-2.0 | 15,683 |
package BIDMach.models
import BIDMat.{Mat,SBMat,CMat,DMat,FMat,IMat,HMat,GMat,GIMat,GSMat,SMat,SDMat}
import BIDMat.MatFunctions._
import BIDMat.SciFunctions._
import BIDMach.datasources._
import BIDMach.updaters._
import BIDMach._
/**
* Abstract class with shared code for Regression Models
*/
abstract class RegressionModel(override val opts:RegressionModel.Opts) extends Model {
var targmap:Mat = null
var targets:Mat = null
var mask:Mat = null
var sp:Mat = null
override def copyTo(mod:Model) = {
super.copyTo(mod);
val rmod = mod.asInstanceOf[RegressionModel];
rmod.targmap = targmap;
rmod.targets = targets;
rmod.mask = mask;
rmod.sp = sp;
}
def init() = {
useGPU = opts.useGPU && Mat.hasCUDA > 0
val data0 = mats(0)
val m = data0.nrows;
val targetData = mats.length > 1
val d = if (opts.targmap.asInstanceOf[AnyRef] != null) {
opts.targmap.nrows
} else if (opts.targets.asInstanceOf[AnyRef] != null) {
opts.targets.nrows
} else {
mats(1).nrows
}
val sdat = (sum(data0,2).t + 0.5f).asInstanceOf[FMat]
sp = sdat / sum(sdat)
println("corpus perplexity=%f" format (math.exp(-(sp ddot ln(sp)))))
if (refresh) {
val mm = zeros(d,m);
setmodelmats(Array(mm))
}
modelmats(0) = convertMat(modelmats(0));
updatemats = Array(modelmats(0).zeros(modelmats(0).nrows, modelmats(0).ncols));
targmap = if (opts.targmap.asInstanceOf[AnyRef] != null) convertMat(opts.targmap) else opts.targmap
if (! targetData) {
targets = if (opts.targets.asInstanceOf[AnyRef] != null) convertMat(opts.targets) else opts.targets
mask = if (opts.rmask.asInstanceOf[AnyRef] != null) convertMat(opts.rmask) else opts.rmask
}
}
def mupdate(data:Mat, ipass:Int, i:Long)
def mupdate2(data:Mat, targ:Mat, ipass:Int, i:Long)
def meval(data:Mat):FMat
def meval2(data:Mat, targ:Mat):FMat
def dobatch(gmats:Array[Mat], ipass:Int, i:Long) = {
if (gmats.length == 1) {
mupdate(gmats(0), ipass, i)
} else {
mupdate2(gmats(0), gmats(1), ipass, i)
}
}
def evalbatch(mats:Array[Mat], ipass:Int, here:Long):FMat = {
if (gmats.length == 1) {
meval(gmats(0))
} else {
meval2(gmats(0), gmats(1))
}
}
}
object RegressionModel {
trait Opts extends Model.Opts {
var targets:FMat = null
var targmap:FMat = null
var rmask:FMat = null
}
class Options extends Opts {}
}
| yanqingmen/BIDMach | src/main/scala/BIDMach/models/Regression.scala | Scala | bsd-3-clause | 2,585 |
package edu.rice.habanero.benchmarks.piprecision
import java.math.BigDecimal
import java.util.concurrent.atomic.AtomicInteger
import edu.rice.habanero.actors.{FuncJavaActor, FuncJavaActorState, FuncJavaPool}
import edu.rice.habanero.benchmarks.piprecision.PiPrecisionConfig.{StartMessage, StopMessage}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object PiPrecisionFuncJavaActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new PiPrecisionFuncJavaActorBenchmark)
}
private final class PiPrecisionFuncJavaActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
PiPrecisionConfig.parseArgs(args)
}
def printArgInfo() {
PiPrecisionConfig.printArgs()
}
def runIteration() {
val numWorkers: Int = PiPrecisionConfig.NUM_WORKERS
val precision: Int = PiPrecisionConfig.PRECISION
val master = new Master(numWorkers, precision)
master.start()
master.send(StartMessage.ONLY)
FuncJavaActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double) {
if (lastIteration) {
FuncJavaPool.shutdown()
}
}
}
private class Master(numWorkers: Int, scale: Int) extends FuncJavaActor[AnyRef] {
private final val workers = Array.tabulate[Worker](numWorkers)(i => new Worker(this, i))
private var result: BigDecimal = BigDecimal.ZERO
private final val tolerance = BigDecimal.ONE.movePointLeft(scale)
private final val numWorkersTerminated: AtomicInteger = new AtomicInteger(0)
private var numTermsRequested: Int = 0
private var numTermsReceived: Int = 0
private var stopRequests: Boolean = false
override def onPostStart() {
workers.foreach(loopWorker => {
loopWorker.start()
})
}
/**
* Generates work for the given worker
*
* @param workerId the id of te worker to send work
*/
private def generateWork(workerId: Int) {
val wm: PiPrecisionConfig.WorkMessage = new PiPrecisionConfig.WorkMessage(scale, numTermsRequested)
workers(workerId).send(wm)
numTermsRequested += 1
}
def requestWorkersToExit() {
workers.foreach(loopWorker => {
loopWorker.send(StopMessage.ONLY)
})
}
override def process(msg: AnyRef) {
msg match {
case rm: PiPrecisionConfig.ResultMessage =>
numTermsReceived += 1
result = result.add(rm.result)
if (rm.result.compareTo(tolerance) <= 0) {
stopRequests = true
}
if (!stopRequests) {
generateWork(rm.workerId)
}
if (numTermsReceived == numTermsRequested) {
requestWorkersToExit()
}
case _: PiPrecisionConfig.StopMessage =>
val numTerminated: Int = numWorkersTerminated.incrementAndGet
if (numTerminated == numWorkers) {
exit()
}
case _: PiPrecisionConfig.StartMessage =>
var t: Int = 0
while (t < Math.min(scale, 10 * numWorkers)) {
generateWork(t % numWorkers)
t += 1
}
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
}
def getResult: String = {
result.toPlainString
}
}
private class Worker(master: Master, id: Int) extends FuncJavaActor[AnyRef] {
private var termsProcessed: Int = 0
override def process(msg: AnyRef) {
msg match {
case _: PiPrecisionConfig.StopMessage =>
master.send(new PiPrecisionConfig.StopMessage)
exit()
case wm: PiPrecisionConfig.WorkMessage =>
termsProcessed += 1
val result: BigDecimal = PiPrecisionConfig.calculateBbpTerm(wm.scale, wm.term)
master.send(new PiPrecisionConfig.ResultMessage(result, id))
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
}
}
}
| shamsmahmood/savina | src/main/scala/edu/rice/habanero/benchmarks/piprecision/PiPrecisionFuncJavaActorBenchmark.scala | Scala | gpl-2.0 | 4,218 |
package cromwell.logging
import java.util.UUID
import cromwell.CromwellTestkitSpec
import cromwell.core.WorkflowId
import cromwell.engine.WorkflowSourceFiles
import cromwell.engine.backend.local.LocalBackend
import cromwell.engine.backend.{BackendCallJobDescriptor, WorkflowDescriptorBuilder}
import cromwell.engine.workflow.BackendCallKey
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import wdl4s.values.WdlValue
class WorkflowLoggerSpec extends FlatSpec with Matchers with BeforeAndAfterAll with WorkflowDescriptorBuilder {
val testWorkflowManagerSystem = new CromwellTestkitSpec.TestWorkflowManagerSystem()
override implicit val actorSystem = testWorkflowManagerSystem.actorSystem
override protected def afterAll() = {
testWorkflowManagerSystem.shutdownTestActorSystem()
super.afterAll()
}
val descriptor = materializeWorkflowDescriptorFromSources(id = WorkflowId(UUID.fromString("fc6cfad9-65e9-4eb7-853f-7e08c1c8cf8e")),
workflowSources = WorkflowSourceFiles(
"task x {command {ps}} workflow w {call x}",
"{}",
"{}"
)
)
val backend = LocalBackend(testWorkflowManagerSystem.actorSystem)
val jobDescriptor = BackendCallJobDescriptor(
descriptor,
BackendCallKey(descriptor.namespace.workflow.calls.find(_.unqualifiedName == "x").head, None, 1),
Map.empty[String, WdlValue])
"WorkflowLogger" should "create a valid tag" in {
backend.workflowLogger(descriptor).tag shouldBe "LocalBackend [UUID(fc6cfad9)]"
}
it should "create a valid tag for backend call" in {
backend.jobLogger(jobDescriptor).tag shouldBe "LocalBackend [UUID(fc6cfad9):x]"
}
}
| cowmoo/cromwell | engine/src/test/scala/cromwell/logging/WorkflowLoggerSpec.scala | Scala | bsd-3-clause | 1,645 |
/**
* Copyright: Copyright (C) 2016, ATS Advanced Telematic Systems GmbH
* License: MPL-2.0
*/
package org.genivi.sota.device_registry.db
import org.genivi.sota.data.Uuid
import org.genivi.sota.db.SlickAnyVal._
import org.genivi.sota.db.SlickExtensions._
import org.genivi.sota.device_registry.common.Errors
import slick.driver.MySQLDriver.api._
import scala.concurrent.ExecutionContext
object PublicCredentialsRepository {
case class DevicePublicCredentials(device: Uuid, credentials: Array[Byte])
class PublicCredentialsTable(tag: Tag) extends Table[DevicePublicCredentials] (tag, "DevicePublicCredentials") {
def device = column[Uuid]("device_uuid")
def publicCredentials = column[Array[Byte]]("public_credentials")
def * = (device, publicCredentials).shaped <>
((DevicePublicCredentials.apply _).tupled, DevicePublicCredentials.unapply)
def pk = primaryKey("device_uuid", device)
}
val allPublicCredentials = TableQuery[PublicCredentialsTable]
def findByUuid(uuid: Uuid)(implicit ec: ExecutionContext): DBIO[Array[Byte]] = {
allPublicCredentials.filter(_.device === uuid)
.map(_.publicCredentials)
.result
.failIfNotSingle(Errors.MissingDevicePublicCredentials)
}
def update(uuid: Uuid, creds: Array[Byte])(implicit ec: ExecutionContext): DBIO[Unit] = {
(allPublicCredentials.insertOrUpdate(DevicePublicCredentials(uuid, creds)))
.map(_ => ())
}
}
| PDXostc/rvi_sota_server | device-registry/src/main/scala/org/genivi/sota/device_registry/db/PublicCredentialsRepository.scala | Scala | mpl-2.0 | 1,439 |
/*
* Copyright © 2014 TU Berlin (emma@dima.tu-berlin.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage
package examples.ml.clustering
import KMeans.Solution
import api.Meta.Projections._
import api._
import examples.ml.model._
class FlinkKMeansIntegrationSpec extends BaseKMeansIntegrationSpec with FlinkAware {
override def kMeans(k: Int, epsilon: Double, iterations: Int, input: String): Set[Solution[Long]] =
withDefaultFlinkEnv(implicit flink => emma.onFlink {
// read the input
val points = for (line <- DataBag.readText(input)) yield {
val record = line.split("\\t")
Point(record.head.toLong, record.tail.map(_.toDouble))
}
// do the clustering
val result = KMeans(2, k, epsilon, iterations)(points)
// return the solution as a local set
result.collect().toSet[Solution[Long]]
})
}
| aalexandrov/emma | emma-examples/emma-examples-flink/src/test/scala/org/emmalanguage/examples/ml/clustering/FlinkKMeansIntegrationSpec.scala | Scala | apache-2.0 | 1,401 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.Literal._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
// scalastyle:off
/**
* Calculates and propagates precision for fixed-precision decimals. Hive has a number of
* rules for this based on the SQL standard and MS SQL:
* https://cwiki.apache.org/confluence/download/attachments/27362075/Hive_Decimal_Precision_Scale_Support.pdf
* https://msdn.microsoft.com/en-us/library/ms190476.aspx
*
* In particular, if we have expressions e1 and e2 with precision/scale p1/s2 and p2/s2
* respectively, then the following operations have the following precision / scale:
*
* Operation Result Precision Result Scale
* ------------------------------------------------------------------------
* e1 + e2 max(s1, s2) + max(p1-s1, p2-s2) + 1 max(s1, s2)
* e1 - e2 max(s1, s2) + max(p1-s1, p2-s2) + 1 max(s1, s2)
* e1 * e2 p1 + p2 + 1 s1 + s2
* e1 / e2 p1 - s1 + s2 + max(6, s1 + p2 + 1) max(6, s1 + p2 + 1)
* e1 % e2 min(p1-s1, p2-s2) + max(s1, s2) max(s1, s2)
* e1 union e2 max(s1, s2) + max(p1-s1, p2-s2) max(s1, s2)
*
* When `spark.sql.decimalOperations.allowPrecisionLoss` is set to true, if the precision / scale
* needed are out of the range of available values, the scale is reduced up to 6, in order to
* prevent the truncation of the integer part of the decimals.
*
* To implement the rules for fixed-precision types, we introduce casts to turn them to unlimited
* precision, do the math on unlimited-precision numbers, then introduce casts back to the
* required fixed precision. This allows us to do all rounding and overflow handling in the
* cast-to-fixed-precision operator.
*
* In addition, when mixing non-decimal types with decimals, we use the following rules:
* - BYTE gets turned into DECIMAL(3, 0)
* - SHORT gets turned into DECIMAL(5, 0)
* - INT gets turned into DECIMAL(10, 0)
* - LONG gets turned into DECIMAL(20, 0)
* - FLOAT and DOUBLE cause fixed-length decimals to turn into DOUBLE
* - Literals INT and LONG get turned into DECIMAL with the precision strictly needed by the value
*/
// scalastyle:on
object DecimalPrecision extends TypeCoercionRule {
import scala.math.{max, min}
private def isFloat(t: DataType): Boolean = t == FloatType || t == DoubleType
// Returns the wider decimal type that's wider than both of them
def widerDecimalType(d1: DecimalType, d2: DecimalType): DecimalType = {
widerDecimalType(d1.precision, d1.scale, d2.precision, d2.scale)
}
// max(s1, s2) + max(p1-s1, p2-s2), max(s1, s2)
def widerDecimalType(p1: Int, s1: Int, p2: Int, s2: Int): DecimalType = {
val scale = max(s1, s2)
val range = max(p1 - s1, p2 - s2)
DecimalType.bounded(range + scale, scale)
}
private def promotePrecision(e: Expression, dataType: DataType): Expression = {
PromotePrecision(Cast(e, dataType))
}
private def nullOnOverflow: Boolean = !SQLConf.get.ansiEnabled
override protected def coerceTypes(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
// fix decimal precision for expressions
case q => q.transformExpressionsUp(
decimalAndDecimal.orElse(integralAndDecimalLiteral).orElse(nondecimalAndDecimal))
}
/** Decimal precision promotion for +, -, *, /, %, pmod, and binary comparison. */
private[catalyst] val decimalAndDecimal: PartialFunction[Expression, Expression] = {
// Skip nodes whose children have not been resolved yet
case e if !e.childrenResolved => e
// Skip nodes who is already promoted
case e: BinaryArithmetic if e.left.isInstanceOf[PromotePrecision] => e
case Add(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultScale = max(s1, s2)
val resultType = if (SQLConf.get.decimalOperationsAllowPrecisionLoss) {
DecimalType.adjustPrecisionScale(max(p1 - s1, p2 - s2) + resultScale + 1,
resultScale)
} else {
DecimalType.bounded(max(p1 - s1, p2 - s2) + resultScale + 1, resultScale)
}
CheckOverflow(Add(promotePrecision(e1, resultType), promotePrecision(e2, resultType)),
resultType, nullOnOverflow)
case Subtract(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultScale = max(s1, s2)
val resultType = if (SQLConf.get.decimalOperationsAllowPrecisionLoss) {
DecimalType.adjustPrecisionScale(max(p1 - s1, p2 - s2) + resultScale + 1,
resultScale)
} else {
DecimalType.bounded(max(p1 - s1, p2 - s2) + resultScale + 1, resultScale)
}
CheckOverflow(Subtract(promotePrecision(e1, resultType), promotePrecision(e2, resultType)),
resultType, nullOnOverflow)
case Multiply(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultType = if (SQLConf.get.decimalOperationsAllowPrecisionLoss) {
DecimalType.adjustPrecisionScale(p1 + p2 + 1, s1 + s2)
} else {
DecimalType.bounded(p1 + p2 + 1, s1 + s2)
}
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Multiply(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType, nullOnOverflow)
case Divide(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultType = if (SQLConf.get.decimalOperationsAllowPrecisionLoss) {
// Precision: p1 - s1 + s2 + max(6, s1 + p2 + 1)
// Scale: max(6, s1 + p2 + 1)
val intDig = p1 - s1 + s2
val scale = max(DecimalType.MINIMUM_ADJUSTED_SCALE, s1 + p2 + 1)
val prec = intDig + scale
DecimalType.adjustPrecisionScale(prec, scale)
} else {
var intDig = min(DecimalType.MAX_SCALE, p1 - s1 + s2)
var decDig = min(DecimalType.MAX_SCALE, max(6, s1 + p2 + 1))
val diff = (intDig + decDig) - DecimalType.MAX_SCALE
if (diff > 0) {
decDig -= diff / 2 + 1
intDig = DecimalType.MAX_SCALE - decDig
}
DecimalType.bounded(intDig + decDig, decDig)
}
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Divide(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType, nullOnOverflow)
case Remainder(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultType = if (SQLConf.get.decimalOperationsAllowPrecisionLoss) {
DecimalType.adjustPrecisionScale(min(p1 - s1, p2 - s2) + max(s1, s2), max(s1, s2))
} else {
DecimalType.bounded(min(p1 - s1, p2 - s2) + max(s1, s2), max(s1, s2))
}
// resultType may have lower precision, so we cast them into wider type first.
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Remainder(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType, nullOnOverflow)
case Pmod(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultType = if (SQLConf.get.decimalOperationsAllowPrecisionLoss) {
DecimalType.adjustPrecisionScale(min(p1 - s1, p2 - s2) + max(s1, s2), max(s1, s2))
} else {
DecimalType.bounded(min(p1 - s1, p2 - s2) + max(s1, s2), max(s1, s2))
}
// resultType may have lower precision, so we cast them into wider type first.
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Pmod(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType, nullOnOverflow)
case expr @ IntegralDivide(
e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val widerType = widerDecimalType(p1, s1, p2, s2)
val promotedExpr =
IntegralDivide(promotePrecision(e1, widerType), promotePrecision(e2, widerType))
if (expr.dataType.isInstanceOf[DecimalType]) {
// This follows division rule
val intDig = p1 - s1 + s2
// No precision loss can happen as the result scale is 0.
// Overflow can happen only in the promote precision of the operands, but if none of them
// overflows in that phase, no overflow can happen, but CheckOverflow is needed in order
// to return a decimal with the proper scale and precision
CheckOverflow(promotedExpr, DecimalType.bounded(intDig, 0), nullOnOverflow)
} else {
promotedExpr
}
case b @ BinaryComparison(e1 @ DecimalType.Expression(p1, s1),
e2 @ DecimalType.Expression(p2, s2)) if p1 != p2 || s1 != s2 =>
val resultType = widerDecimalType(p1, s1, p2, s2)
b.makeCopy(Array(Cast(e1, resultType), Cast(e2, resultType)))
}
/**
* Strength reduction for comparing integral expressions with decimal literals.
* 1. int_col > decimal_literal => int_col > floor(decimal_literal)
* 2. int_col >= decimal_literal => int_col >= ceil(decimal_literal)
* 3. int_col < decimal_literal => int_col < ceil(decimal_literal)
* 4. int_col <= decimal_literal => int_col <= floor(decimal_literal)
* 5. decimal_literal > int_col => ceil(decimal_literal) > int_col
* 6. decimal_literal >= int_col => floor(decimal_literal) >= int_col
* 7. decimal_literal < int_col => floor(decimal_literal) < int_col
* 8. decimal_literal <= int_col => ceil(decimal_literal) <= int_col
*
* Note that technically this is an "optimization" and should go into the optimizer. However,
* by the time the optimizer runs, these comparison expressions would be pretty hard to pattern
* match because there are multiple (at least 2) levels of casts involved.
*
* There are a lot more possible rules we can implement, but we don't do them
* because we are not sure how common they are.
*/
private val integralAndDecimalLiteral: PartialFunction[Expression, Expression] = {
case GreaterThan(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
GreaterThan(i, Literal(value.floor.toLong))
}
case GreaterThanOrEqual(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
GreaterThanOrEqual(i, Literal(value.ceil.toLong))
}
case LessThan(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
LessThan(i, Literal(value.ceil.toLong))
}
case LessThanOrEqual(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
LessThanOrEqual(i, Literal(value.floor.toLong))
}
case GreaterThan(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
GreaterThan(Literal(value.ceil.toLong), i)
}
case GreaterThanOrEqual(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
GreaterThanOrEqual(Literal(value.floor.toLong), i)
}
case LessThan(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
LessThan(Literal(value.floor.toLong), i)
}
case LessThanOrEqual(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
LessThanOrEqual(Literal(value.ceil.toLong), i)
}
}
/**
* Type coercion for BinaryOperator in which one side is a non-decimal numeric, and the other
* side is a decimal.
*/
private val nondecimalAndDecimal: PartialFunction[Expression, Expression] = {
// Promote integers inside a binary expression with fixed-precision decimals to decimals,
// and fixed-precision decimals in an expression with floats / doubles to doubles
case b @ BinaryOperator(left, right) if left.dataType != right.dataType =>
(left, right) match {
// Promote literal integers inside a binary expression with fixed-precision decimals to
// decimals. The precision and scale are the ones strictly needed by the integer value.
// Requiring more precision than necessary may lead to a useless loss of precision.
// Consider the following example: multiplying a column which is DECIMAL(38, 18) by 2.
// If we use the default precision and scale for the integer type, 2 is considered a
// DECIMAL(10, 0). According to the rules, the result would be DECIMAL(38 + 10 + 1, 18),
// which is out of range and therefore it will become DECIMAL(38, 7), leading to
// potentially loosing 11 digits of the fractional part. Using only the precision needed
// by the Literal, instead, the result would be DECIMAL(38 + 1 + 1, 18), which would
// become DECIMAL(38, 16), safely having a much lower precision loss.
case (l: Literal, r) if r.dataType.isInstanceOf[DecimalType] &&
l.dataType.isInstanceOf[IntegralType] &&
SQLConf.get.literalPickMinimumPrecision =>
b.makeCopy(Array(Cast(l, DecimalType.fromLiteral(l)), r))
case (l, r: Literal) if l.dataType.isInstanceOf[DecimalType] &&
r.dataType.isInstanceOf[IntegralType] &&
SQLConf.get.literalPickMinimumPrecision =>
b.makeCopy(Array(l, Cast(r, DecimalType.fromLiteral(r))))
// Promote integers inside a binary expression with fixed-precision decimals to decimals,
// and fixed-precision decimals in an expression with floats / doubles to doubles
case (l @ IntegralType(), r @ DecimalType.Expression(_, _)) =>
b.makeCopy(Array(Cast(l, DecimalType.forType(l.dataType)), r))
case (l @ DecimalType.Expression(_, _), r @ IntegralType()) =>
b.makeCopy(Array(l, Cast(r, DecimalType.forType(r.dataType))))
case (l, r @ DecimalType.Expression(_, _)) if isFloat(l.dataType) =>
b.makeCopy(Array(l, Cast(r, DoubleType)))
case (l @ DecimalType.Expression(_, _), r) if isFloat(r.dataType) =>
b.makeCopy(Array(Cast(l, DoubleType), r))
case _ => b
}
}
}
| jkbradley/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala | Scala | apache-2.0 | 16,108 |
/*******************************************************************************
Copyright (c) 2013, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.bug_detector
import kr.ac.kaist.jsaf.analysis.cfg._
import kr.ac.kaist.jsaf.analysis.typing.CallContext._
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.{SemanticsExpr => SE}
import kr.ac.kaist.jsaf.compiler.Parser
import kr.ac.kaist.jsaf.nodes_util.{NodeRelation, Walkers}
import kr.ac.kaist.jsaf.scala_src.nodes._
class ASTDetect(bugDetector: BugDetector) {
val ast = bugDetector.ast
val cfg = bugDetector.cfg
val bugStorage = bugDetector.bugStorage
val stateManager = bugDetector.stateManager
def check(): Unit = {
object walker extends Walkers {
override def walkAST(parent: Any, node: Any): Unit = {
node match {
// Check parsing arguments of "JSON.parse"
case n@SFunApp(info, SDot(_, SVarRef(_, obj), mem), args)
if obj.getText.equals("JSON") && mem.getText.equals("parse") && args.length > 0 =>
NodeRelation.ast2cfgMap.get(n) match {
case Some(cfgList) =>
for (cfgInst <- cfgList) {
cfgInst match {
case inst@CFGCall(_, _, _, _, arguments, _) =>
val cfgNode = cfg.findEnclosingNode(inst)
val cstate = stateManager.getInputCState(cfgNode, inst.getInstId, _MOST_SENSITIVE)
for ((callContext, state) <- cstate) {
val argLocSet = SE.V(arguments, state.heap, state.context)._1.locset
for (argLoc <- argLocSet) {
val argObj = state.heap(argLoc)
if (argObj != null && argObj.map != null) {
// Check parsing the first argument of "JSON.parse"
val res = argObj.map.get("0")
if (res.isDefined) {
res.get._1._1._1._1._5 match {
case OtherStrSingle(str) if !Parser.parseJSON(str) =>
bugStorage.addMessage(info.getSpan, ParseJSON, inst, callContext, str)
case _ =>
}
}
}
}
}
case _ =>
}
}
case None =>
}
// Check parsing arguments of "new Function"
case n@SNew(info, SFunApp(_, SVarRef(_, id), args)) if id.getText.equals("Function") && args.length > 0 =>
NodeRelation.ast2cfgMap.get(n) match {
case Some(cfgList) =>
for (cfgInst <- cfgList) {
cfgInst match {
case inst@CFGConstruct(_, _, _, _, arguments, _) =>
val cfgNode = cfg.findEnclosingNode(inst)
val cstate = stateManager.getInputCState(cfgNode, inst.getInstId, _MOST_SENSITIVE)
for ((callContext, state) <- cstate) {
val argLocSet = SE.V(arguments, state.heap, state.context)._1.locset
for (argLoc <- argLocSet) {
val argObj = state.heap(argLoc)
if (argObj != null && argObj.map != null) {
// Check parsing arguments except the last of "new Function"
args.zipWithIndex.dropRight(1).foreach(p => {
val res = argObj.map.get(p._2.toString)
if (res.isDefined) {
for(pvalue <- res.get._1.objval.value.pvalue) {
if(pvalue.isConcrete) {
var str = pvalue.toString
if(str.startsWith("\\"") && str.endsWith("\\"")) str = str.substring(1, str.length-1)
if(!Parser.parseFunctionParams(str))
bugStorage.addMessage(info.getSpan, ParseFunctionParams, inst, callContext, str)
}
}
}
})
// Check parsing the last argument of "new Function"
val res = argObj.map.get((args.length-1).toString)
if (res.isDefined) {
for(pvalue <- res.get._1.objval.value.pvalue) {
if(pvalue.isConcrete()) {
var str = pvalue.toString
if(str.startsWith("\\"") && str.endsWith("\\"")) str = str.substring(1, str.length - 1)
if(!Parser.parseFunctionBody("{"+str+"}"))
bugStorage.addMessage(info.getSpan, ParseFunctionBody, inst, callContext, str)
}
}
}
}
}
}
case _ =>
}
}
case None =>
}
case _ =>
}
// Walk child nodes
super.walkAST(parent, node)
}
}
// Walk AST nodes to collect only strict mode code ASTs
walker.walkAST(null, ast)
}
}
| daejunpark/jsaf | src/kr/ac/kaist/jsaf/bug_detector/ASTDetect.scala | Scala | bsd-3-clause | 5,911 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolkit.neuralnetwork.policy
import libcog._
case object StubInit extends WeightInitPolicy {
override def initState(fieldShape: Shape, tensorShape: Shape): Field = {
throw new RuntimeException("cannot initialize weights with a StubInit policy")
}
}
| hpe-cct/cct-nn | src/main/scala/toolkit/neuralnetwork/policy/StubInit.scala | Scala | apache-2.0 | 894 |
package blended.itest.runner
import akka.actor.Actor
import akka.actor.Props
import blended.util.logging.Logger
import scala.util.Try
import scala.concurrent.Future
import scala.concurrent.ExecutionContext
import akka.pattern.pipe
import scala.util.Success
import scala.util.Failure
object TestRunner {
def props(t : TestTemplate, testId : String) : Props = Props(new TestRunner(t, testId))
}
class TestRunner(t : TestTemplate, testId : String) extends Actor {
private val log : Logger = Logger[TestRunner]
private implicit val eCtxt : ExecutionContext = context.system.dispatcher
case object Start
case class Result(result : Try[Unit])
override def toString: String = s"TestRunner(template = [$t], id = [$testId])"
override def preStart(): Unit = {
self ! Start
}
override def receive: Actor.Receive = {
case Start =>
val s : TestEvent = TestEvent(
factoryName = t.factory.name,
testName = t.name,
id = testId,
state = TestEvent.State.Started,
timestamp = System.currentTimeMillis()
)
log.info(s"Starting test for template [${t.factory.name}::${t.name}] with id [${s.id}]")
context.system.eventStream.publish(s)
val f : Future[Try[Unit]] = Future{ t.test(testId) }
f.map(r => Result(r)).pipeTo(self)
context.become(running(s))
}
private def running(s : TestEvent) : Receive = {
case Result(Success(())) =>
log.info(s"$toString() -- Test has succeeded.")
finish(s.copy(state = TestEvent.State.Success, timestamp = System.currentTimeMillis()))
case Result(Failure(e)) =>
log.warn(e, true)(s"$toString() -- Test has failed.")
finish(s.copy(state = TestEvent.State.Failed, timestamp = System.currentTimeMillis(), cause = Some(e)))
case m => s"$toString() - Received unexpected message [$m]"
}
private def finish(s : TestEvent) : Unit = {
context.system.eventStream.publish(s)
}
}
| woq-blended/blended | blended.itest.runner/src/main/scala/blended/itest/runner/TestRunner.scala | Scala | apache-2.0 | 1,945 |
package rxbehaviors
import rx.lang.scala.Observable
case class Transition(trigger: Observable[Unit], target: State)
| furuholm/ReactiveBehaviors | src/main/scala/rxbehaviors/Transition.scala | Scala | apache-2.0 | 118 |
package org.jetbrains.plugins.scala
package lang
package completion3
import org.junit.Assert.assertFalse
/**
* @author Alefas
* @since 23.03.12
*/
class ScalaLookupRenderingTest extends ScalaCodeInsightTestBase {
import ScalaCodeInsightTestBase._
def testJavaVarargs(): Unit = {
configureJavaFile(
fileText =
"""
|package a;
|
|public class Java {
| public static void foo(int... x) {}
|}
""".stripMargin,
className = "Java",
packageName = "a"
)
configureTest(
fileText =
"""
|import a.Java
|class A {
| Java.fo<caret>
|}
""".stripMargin
)
val lookups = this.lookups {
hasItemText(_, "foo", "foo", tailText = "(x: Int*)")
}
assertFalse(lookups.isEmpty)
}
}
| jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/completion3/ScalaLookupRenderingTest.scala | Scala | apache-2.0 | 857 |
package org.workcraft.services
import java.awt.geom.Point2D
import org.workcraft.scala.effects.IO
import java.awt.geom.Rectangle2D
import java.awt.geom.AffineTransform
object LayoutService extends Service[ModelScope, IO[Layout]]
sealed abstract class LayoutOrientation (val transform: AffineTransform)
object LayoutOrientation {
case object Up extends LayoutOrientation(new AffineTransform())
case object Down extends LayoutOrientation (AffineTransform.getScaleInstance(1, -1))
case object LeftToRight extends LayoutOrientation (AffineTransform.getRotateInstance(scala.math.Pi / 2))
case class Custom (t: AffineTransform) extends LayoutOrientation(t)
}
trait LayoutNode
case class Layout (spec: LayoutSpec, apply: List[(LayoutNode, Point2D.Double)] => IO[Unit])
case class LayoutSpec (
nodes: List[LayoutNode],
size: LayoutNode => (Double, Double), // (width, height)
outgoingArcs: LayoutNode => List[LayoutNode],
nodeSeparation: Double,
rankSeparation: Double,
orientation: LayoutOrientation) | tuura/workcraft-2.2 | Core/src/main/scala/org/workcraft/services/LayoutService.scala | Scala | gpl-3.0 | 1,063 |
sealed trait ISimpleValue
sealed trait IListValue extends ISimpleValue {
def items: List[IAtomicValue[_]]
}
sealed trait IAtomicValue[O] extends ISimpleValue {
def data: O
}
sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] {
}
sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
class DoubleValue(val data: Double) extends IDoubleValue {
def asDouble = data
}
object Test {
/**
* @param args the command line arguments
*/
def main(args: Array[String]): Unit = {
val v: ISimpleValue = new DoubleValue(1)
v match {
case m: IListValue => println("list")
case a: IAtomicValue[_] => println("atomic")
}
}
} | som-snytt/dotty | tests/patmat/t3097.scala | Scala | apache-2.0 | 749 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.ElasticDsl._
import org.scalatest.{ FlatSpec, Matchers }
import org.scalatest.concurrent.ScalaFutures
import com.sksamuel.elastic4s.testkit.ElasticSugar
/** @author Stephen Samuel */
class GetTest extends FlatSpec with Matchers with ScalaFutures with ElasticSugar {
client.execute {
bulk(
index into "beer/lager" fields ("name" -> "coors light", "brand" -> "coors", "ingredients" -> Seq("hops",
"barley",
"water",
"yeast")) id 4,
index into "beer/lager" fields ("name" -> "bud lite", "brand" -> "bud") id 8
)
}.await
refresh("beer")
blockUntilCount(2, "beer")
"A Get request" should "retrieve a document by id" in {
val resp = client.execute {
get id 8 from "beer/lager"
}.await
resp.id shouldBe "8"
}
it should "retrieve a document asynchronously by id" in {
val resp = client.execute {
get id 8 from "beer/lager"
}
whenReady(resp) { result =>
result.isExists should be(true)
result.id shouldBe "8"
}
}
it should "retrieve a document asynchronously by id w/ source" in {
val resp = client.execute {
get id 8 from "beer/lager"
}
whenReady(resp) { result =>
result.isExists should be(true)
result.id shouldBe "8"
result.source should not be null
result.fields should have size 0
}
}
it should "retrieve a document asynchronously by id w/o source" in {
val resp = client.execute {
get id 8 from "beer/lager" fetchSourceContext false
}
whenReady(resp) { result =>
result.isExists should be(true)
result.id shouldBe "8"
result.source shouldBe Map.empty
result.fields should have size 0
}
}
it should "retrieve a document asynchronously by id w/ name and w/o source" in {
val resp = client.execute {
get id 8 from "beer/lager" fields "name"
}
whenReady(resp) { result =>
result.isExists should be(true)
result.id shouldBe "8"
result.source shouldBe Map.empty
result.fields should (contain key "name" and not contain key("brand"))
}
}
it should "retrieve a document asynchronously by id w/ name and brand and source" in {
val resp = client.execute {
get id 4 from "beer/lager" fields "name" fetchSourceContext true
}
whenReady(resp) { result =>
result.isExists should be(true)
result.id shouldBe "4"
result.source should not be null
result.fields should (contain key "name" and not contain key("brand"))
}
}
it should "not retrieve any documents w/ unknown id" in {
val resp = client.execute {
get id 1 from "beer/lager" fields "name" fetchSourceContext true
}
whenReady(resp) { result =>
result.isExists should be(false)
}
}
import scala.collection.JavaConverters._
it should "retrieve multi value fields" in {
val resp = client.execute {
get id 4 from "beer/lager" fields "ingredients"
}
whenReady(resp) {
result => println(result.field("ingredients").getValues.asScala)
}
}
}
| sjoerdmulder/elastic4s | elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/GetTest.scala | Scala | apache-2.0 | 3,139 |
/*******************************************************************************
* Copyright (c) 2019. Carl Minden
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package com.anathema_roguelike
package environment.features
import com.anathema_roguelike.main.display.VisualRepresentation
import com.anathema_roguelike.main.utilities.position.Orientation.Orientation
class OpenDoorway(val direction: Orientation) extends Doorway(direction, true) {
override def getRepresentation = new VisualRepresentation('.')
override def close(): Unit = {}
} | carlminden/anathema-roguelike | src/com/anathema_roguelike/environment/features/OpenDoorway.scala | Scala | gpl-3.0 | 1,233 |
package spinoco.protocol.http.header
import spinoco.protocol.http.header.value.HeaderCodecDefinition
import spinoco.protocol.common.codec._
/**
* A Websocket header indicating websocket protocol version.
*
* @param version Version value
*/
case class `Sec-WebSocket-Version`(version: Int) extends DefaultHeader
object `Sec-WebSocket-Version` { val codec =
HeaderCodecDefinition[`Sec-WebSocket-Version`](intAsString.xmap (`Sec-WebSocket-Version`.apply,_.version))
}
| Spinoco/protocol | http/src/main/scala/spinoco/protocol/http/header/Sec-WebSocket-Version.scala | Scala | mit | 480 |
//make linear regression
import java.io._
import com.cloudera.datascience.lsa._
import com.cloudera.datascience.lsa.ParseWikipedia._
import com.cloudera.datascience.lsa.RunLSA._
import org.apache.spark.rdd.EmptyRDD
import scala.collection.mutable.ListBuffer
import org.apache.spark.mllib.linalg._
import org.apache.spark.mllib.linalg.distributed.RowMatrix
import breeze.linalg.{DenseMatrix => BDenseMatrix, DenseVector => BDenseVector, SparseVector => BSparseVector}
import org.apache.spark.mllib.regression._
import org.apache.spark.rdd._
val csv = sc.textFile("file:"+new File(".").getCanonicalPath()+"/data/training_set_rel3.tsv")
// split / clean data
val headerAndRows = csv.map(line => line.split("\t").map(_.trim).map(_.replaceAll("(^\"|\"$)","")))
// get header
val header = headerAndRows.first
// filter out header (eh. just check if the first val matches the first header name)
val data = headerAndRows.filter(_(0) != header(0))
//data.foreach(println)
var essay1=data.filter(a=>a(1).equals("1"))
var essay2=data.filter(a=>a(1).equals("2"))
var essay3=data.filter(a=>a(1).equals("3"))
var essay4=data.filter(a=>a(1).equals("4"))
var essay5=data.filter(a=>a(1).equals("5"))
var essay6=data.filter(a=>a(1).equals("6"))
var essay7=data.filter(a=>a(1).equals("7"))
var essay8=data.filter(a=>a(1).equals("8"))
val stopWordsIn = sc.broadcast(ParseWikipedia.loadStopWords("deps/lsa/src/main/resources/stopwords.txt")).value
val numTerms = 500
val k = 30 // nombre de valeurs singuliers à garder
val nbConcept = 30
def makeLSAAndSave( essay:RDD[Array[String]], column:Int , name:String,stopWords:Set[String]) :Int = {
var lemmatized=essay.map(s=> (s(0),ParseWikipedia.plainTextToLemmas(s(2), stopWords, ParseWikipedia.createNLPPipeline())))
val filtered = lemmatized.filter(_._2.size > 1)
val documentSize=essay.collect().length
println("Documents Size : "+documentSize)
println("Number of Terms : "+numTerms)
val (termDocMatrix, termIds, docIds, idfs) = ParseWikipedia.termDocumentMatrix(filtered, stopWords, numTerms, sc)
//save idf for validation process
//sc.parallelize(idfs.toSeq).saveAsObjectFile("idfObj_"+name)
sc.parallelize(idfs.toSeq).saveAsTextFile("idf_"+name)
val mat = new RowMatrix(termDocMatrix)
val svd = mat.computeSVD(k, computeU=true)
val topConceptTerms = RunLSA.topTermsInTopConcepts(svd, nbConcept, numTerms, termIds)
val topConceptDocs = RunLSA.topDocsInTopConcepts(svd, nbConcept, documentSize, docIds)
var all=sc.emptyRDD[(String,Double)]
import collection.mutable.HashMap
val docConcept = new HashMap[String,ListBuffer[Double]]()
var count=0
for ( a <- topConceptDocs) {
count+=1
for ( (b,c) <- a) {
if (!docConcept.contains(b)) {
docConcept.put(b, new ListBuffer[Double]())
}
docConcept(b) += c
}
for((k,v) <- docConcept){
while(v.size<count){
v+=0.0
}
}
}
//Add notes
var docConceptRDD=sc.parallelize(docConcept.toSeq)
var toJoin=essay.map(s=> (s(0),s(column).toDouble))
var joined=toJoin.join(docConceptRDD)
var toWrite=joined.map(a => (a._1,a._2._1, a._2._2.toArray.mkString(",")))
toWrite.saveAsTextFile("docConceptLSAWithLabel_"+name)
//make labeled point
var labeled=joined.map(a => LabeledPoint(a._2._1, Vectors.dense(a._2._2.toArray)))
val termConcept = new HashMap[String,ListBuffer[Double]]()
count=0
for ( a <- topConceptTerms) {
count+=1
for ( (b,c) <- a) {
if (!termConcept.contains(b)) {
termConcept.put(b, new ListBuffer[Double]())
}
termConcept(b) += c
}
for((k,v) <- termConcept){
while(v.size<count){
v+=0.0
}
}
}
var parr=sc.parallelize(termConcept.toSeq)
parr.map(a => (a._1,a._2.toArray.mkString(","))).saveAsTextFile("termConceptLSA_"+name)
return 0
}
makeLSAAndSave(essay1,3,"Essay1",stopWordsIn)
makeLSAAndSave(essay2,3,"Essay2",stopWordsIn)
makeLSAAndSave(essay3,3,"Essay3",stopWordsIn)
makeLSAAndSave(essay4,3,"Essay4",stopWordsIn)
makeLSAAndSave(essay5,3,"Essay5",stopWordsIn)
makeLSAAndSave(essay6,3,"Essay6",stopWordsIn)
makeLSAAndSave(essay7,3,"Essay7",stopWordsIn)
makeLSAAndSave(essay8,3,"Essay8",stopWordsIn)
| StatisticalProject/essays | project/makeLSA.scala | Scala | apache-2.0 | 4,196 |
package com.lot.marketEvent.model
import java.sql.Date
import slick.driver.MySQLDriver.api._
import com.github.tototoshi.slick.H2JodaSupport._
import spray.json.DefaultJsonProtocol
import spray.json.RootJsonFormat
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormatter
import spray.json.JsString
import spray.json.JsValue
import spray.json.DeserializationException
import org.joda.time.format.ISODateTimeFormat
import com.lot.utils.CustomJson
/**
* The model class
*/
case class MarketEvent(id: Option[Long],
name: String,
event_type: String,
summary: String,
description: Option[String],
direction: String,
intensity: String,
asset_class: Option[String],
region: Option[String],
sector: Option[String],
ticker: Option[String],
external_url: Option[String],
created_at: Option[DateTime],
updated_at: Option[DateTime]) {
def priceMultiplier(price: Double, factor: Int = 1) = {
val pm = intensity match {
case MarketEventType.INTENSITY_HIGH => 5.0
case MarketEventType.INTENSITY_MED => 3.0
case MarketEventType.INTENSITY_LOW => 1.0
}
direction match {
case MarketEventType.DIRECTION_UP => Math.round(price * (1 + factor * pm / 100.0))
case MarketEventType.DIRECTION_DOWN => Math.round(price * (1 - factor * pm / 100.0))
}
}
}
/**
* The DB schema
*/
class MarketEventTable(tag: Tag) extends Table[MarketEvent](tag, "market_events") {
/*
* Auto inc primary key
*/
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
/*
* Updated automatically by the DAO on save
*/
def created_at = column[Option[DateTime]]("created_at", O.Nullable)
/*
* Updated automatically by the DAO on update
*/
def updated_at = column[Option[DateTime]]("updated_at", O.Nullable)
/*
* The rest of the domain specific fields
*/
def name = column[String]("name")
def event_type = column[String]("event_type", O.Length(10, varying = true))
def summary = column[String]("summary", O.Length(255, varying = true))
def description = column[String]("description", O.Nullable)
def direction = column[String]("direction", O.Length(5, varying = true))
def intensity = column[String]("intensity", O.Length(10, varying = true))
def asset_class = column[String]("asset_class", O.Length(10, varying = true), O.Nullable)
def region = column[String]("region", O.Length(10, varying = true), O.Nullable)
def sector = column[String]("sector", O.Length(20, varying = true), O.Nullable)
def ticker = column[String]("ticker", O.Length(5, varying = true), O.Nullable)
def external_url = column[String]("external_url", O.Nullable)
/*
* Projection betw the DB and the model
*/
def * = (id.?, name, event_type, summary, description.?, direction, intensity, asset_class.?, region.?, sector.?, ticker.?, external_url.?, created_at, updated_at) <> (MarketEvent.tupled, MarketEvent.unapply)
}
/**
* The JSON protocol
*/
object MarketEventJsonProtocol extends CustomJson {
implicit val marketEventFormat = jsonFormat14(MarketEvent)
}
object MarketEventType {
val ASSET_CLASSES = List("Bond", "Stock", "Derivative")
val REGIONS = List("NA", "EMEA", "APAC")
val SECTORS = List("Tech", "Pharma", "Auto", "Finance", "Consumer")
val TYPE_MARKET = "Market"
val TYPE_NON_MARKET = "Non Market"
val DIRECTION_DOWN = "Down"
val DIRECTION_UP = "Up"
val INTENSITY_HIGH = "High"
val INTENSITY_MED = "Medium"
val INTENSITY_LOW = "Low"
} | thimmaiah/life_of_a_trade_scala | src/main/scala/com/lot/marketEvent/model/MarketEvent.scala | Scala | apache-2.0 | 3,744 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.lang.ref.WeakReference
import scala.collection.mutable.{HashSet, SynchronizedSet}
import scala.language.existentials
import scala.util.Random
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.PatienceConfiguration
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.rdd.{ReliableRDDCheckpointData, RDD}
import org.apache.spark.storage._
import org.apache.spark.shuffle.hash.HashShuffleManager
import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.storage.BroadcastBlockId
import org.apache.spark.storage.RDDBlockId
import org.apache.spark.storage.ShuffleBlockId
import org.apache.spark.storage.ShuffleIndexBlockId
/**
* An abstract base class for context cleaner tests, which sets up a context with a config
* 上下文清理测试的抽象基类,设置一个适合于清理测试的配置,并提供了一些实用功能
* suitable for cleaner tests and provides some utility functions. Subclasses can use different
* 子类可以使用不同的配置选项,一个不同的Shuffle管理器类
* config options, in particular(详细的), a different shuffle manager class
*/
abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[HashShuffleManager])
extends SparkFunSuite with BeforeAndAfter with LocalSparkContext
{
implicit val defaultTimeout = timeout(10000 millis)
val conf = new SparkConf()
.setMaster("local[2]")
.setAppName("ContextCleanerSuite")
.set("spark.cleaner.referenceTracking.blocking", "true")
.set("spark.cleaner.referenceTracking.blocking.shuffle", "true")
.set("spark.cleaner.referenceTracking.cleanCheckpoints", "true")
.set("spark.shuffle.manager", shuffleManager.getName)
before {
sc = new SparkContext(conf)
}
after {
if (sc != null) {
sc.stop()
sc = null
}
}
// ------ Helper functions ------
protected def newRDD() = sc.makeRDD(1 to 10)
protected def newPairRDD() = newRDD().map(_ -> 1)
protected def newShuffleRDD() = newPairRDD().reduceByKey(_ + _)
protected def newBroadcast() = sc.broadcast(1 to 100)
protected def newRDDWithShuffleDependencies(): (RDD[_], Seq[ShuffleDependency[_, _, _]]) = {
def getAllDependencies(rdd: RDD[_]): Seq[Dependency[_]] = {
println("dependencies:"+rdd.dependencies)
rdd.dependencies ++ rdd.dependencies.flatMap { dep =>
getAllDependencies(dep.rdd)
}
}
val rdd = newShuffleRDD()
// Get all the shuffle dependencies 获取所有的shuffle依赖
val shuffleDeps = getAllDependencies(rdd)
.filter(_.isInstanceOf[ShuffleDependency[_, _, _]])
.map(_.asInstanceOf[ShuffleDependency[_, _, _]])
(rdd, shuffleDeps)
}
protected def randomRdd() = {
val rdd: RDD[_] = Random.nextInt(3) match {//包含3的随机数
case 0 => newRDD()
case 1 => newShuffleRDD()
case 2 => newPairRDD.join(newPairRDD())
}
//nextBoolean方法调用返回下一个伪均匀分布的boolean值
if (Random.nextBoolean()) rdd.persist()
rdd.count()
rdd
}
/**
* Run GC(垃圾回收) and make sure it actually has run
* 垃圾回收确保它实际上已经运行
* */
protected def runGC() {
//WeakReference 弱引用,在内存不足时,垃圾回收器会回收此对象,所以在每次使用此对象时,要检查其是否被回收
val weakRef = new WeakReference(new Object())
val startTime = System.currentTimeMillis//(毫秒时间)
System.gc() // 运行垃圾收集,通常*运行GC Make a best effort to run the garbage collection. It *usually* runs GC.
// Wait until a weak reference object has been GCed
//等到一个弱引用对象已垃圾回收,10000毫秒==10秒
while (System.currentTimeMillis - startTime < 10000 && weakRef.get != null) {
System.gc()
Thread.sleep(200)//毫秒
}
}
protected def cleaner = sc.cleaner.get
}
/**
* Basic ContextCleanerSuite, which uses sort-based shuffle
* 基础上下文件清理套件,它使用基于排序的Shuffle
*/
class ContextCleanerSuite extends ContextCleanerSuiteBase {
test("cleanup RDD") {
val rdd = newRDD().persist()
val collected = rdd.collect().toList
val tester = new CleanerTester(sc, rddIds = Seq(rdd.id))
// Explicit cleanup 显示清理
//参数blocking是否堵塞
cleaner.doCleanupRDD(rdd.id, blocking = true)
tester.assertCleanup()
// Verify that RDDs can be re-executed after cleaning up
// 验证RDDS可以重新清理后执行
assert(rdd.collect().toList === collected)
}
test("cleanup shuffle") {//清理shuffle
val (rdd, shuffleDeps) = newRDDWithShuffleDependencies()
val collected = rdd.collect().toList
val tester = new CleanerTester(sc, shuffleIds = shuffleDeps.map(_.shuffleId))
// Explicit cleanup 显式的清除
shuffleDeps.foreach(s => cleaner.doCleanupShuffle(s.shuffleId, blocking = true))
tester.assertCleanup()
// Verify that shuffles can be re-executed after cleaning up
//验证重新清理后执行
assert(rdd.collect().toList.equals(collected))
}
test("cleanup broadcast") {//清理广播
val broadcast = newBroadcast()
val tester = new CleanerTester(sc, broadcastIds = Seq(broadcast.id))
// Explicit cleanup 显式清除
cleaner.doCleanupBroadcast(broadcast.id, blocking = true)
tester.assertCleanup()
}
test("automatically cleanup RDD") {//自动清理RDD
var rdd = newRDD().persist()
rdd.count()
// Test that GC does not cause RDD cleanup due to a strong reference
//试验GC 清理RDD不会引起强引用
val preGCTester = new CleanerTester(sc, rddIds = Seq(rdd.id))
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
// Test that GC causes RDD cleanup after dereferencing the RDD
//测试 GC引起RDD清理后废弃的RDD
// Note rdd is used after previous GC to avoid early collection by the JVM
val postGCTester = new CleanerTester(sc, rddIds = Seq(rdd.id))
rdd = null // Make RDD out of scope
runGC()
postGCTester.assertCleanup()
}
test("automatically cleanup shuffle") {//自动清理Shuffle
var rdd = newShuffleRDD()
rdd.count()
// Test that GC does not cause shuffle cleanup due to a strong reference
//测试垃圾回收,不会因为强引用而导致Shuffle清理
val preGCTester = new CleanerTester(sc, shuffleIds = Seq(0))
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
rdd.count() // Defeat early collection by the JVM 由JVM早期采集失败
// Test that GC causes shuffle cleanup after dereferencing the RDD
//测试垃圾回收,原因Shuffle清理后引用的RDD
val postGCTester = new CleanerTester(sc, shuffleIds = Seq(0))
//使RDD超出范围,相应的Shuffle超出范围
rdd = null // Make RDD out of scope, so that corresponding shuffle goes out of scope
runGC()
postGCTester.assertCleanup()
}
test("automatically cleanup broadcast") {//自动清理广播
var broadcast = newBroadcast()
// Test that GC does not cause broadcast cleanup due to a strong reference
//测试垃圾,不会造成广播清理强引用
val preGCTester = new CleanerTester(sc, broadcastIds = Seq(broadcast.id))
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
// Test that GC causes broadcast cleanup after dereferencing the broadcast variable
//测试垃圾回收,广播清理后的引用广播变量,
// Note broadcast is used after previous GC to avoid early collection by the JVM
//注意广播后用以前的GC在JVM避免早期回收
val postGCTester = new CleanerTester(sc, broadcastIds = Seq(broadcast.id))
broadcast = null // Make broadcast variable out of scope 使广播变量的范围
runGC()
postGCTester.assertCleanup()
}
test("automatically cleanup normal checkpoint") {//自动清理正常检查点
val checkpointDir = java.io.File.createTempFile("temp", "")
/**
* delete为直接删除
* deleteOnExit文档解释为:在虚拟机终止时,请求删除此抽象路径名表示的文件或目录。
* 程序运行deleteOnExit成功后,File并没有直接删除,而是在虚拟机正常运行结束后才会删除
*/
checkpointDir.deleteOnExit()
checkpointDir.delete()
var rdd = newPairRDD()
sc.setCheckpointDir(checkpointDir.toString)
rdd.checkpoint()
rdd.cache()
rdd.collect()
var rddId = rdd.id
// Confirm the checkpoint directory exists
//确认检查点目录存在
assert(ReliableRDDCheckpointData.checkpointPath(sc, rddId).isDefined)//分布式检查点是否定义
val path = ReliableRDDCheckpointData.checkpointPath(sc, rddId).get//
val fs = path.getFileSystem(sc.hadoopConfiguration)
assert(fs.exists(path))
// the checkpoint is not cleaned by default (without the configuration set)
//默认情况下不清理检查点(没有配置集)
var postGCTester = new CleanerTester(sc, Seq(rddId), Nil, Nil, Seq(rddId))
//使RDD超出范围,好吧,如果提前收集
rdd = null // Make RDD out of scope, ok if collected earlier
runGC()
postGCTester.assertCleanup()
assert(!fs.exists(ReliableRDDCheckpointData.checkpointPath(sc, rddId).get))
// Verify that checkpoints are NOT cleaned up if the config is not enabled
//如果没有启用配置,则验证检查点未被清理
sc.stop()
val conf = new SparkConf()
.setMaster("local[2]")
.setAppName("cleanupCheckpoint")
.set("spark.cleaner.referenceTracking.cleanCheckpoints", "false")
sc = new SparkContext(conf)
rdd = newPairRDD()
sc.setCheckpointDir(checkpointDir.toString)
rdd.checkpoint()
rdd.cache()
rdd.collect()
rddId = rdd.id
// Confirm the checkpoint directory exists
//确认检查点目录存在
assert(fs.exists(ReliableRDDCheckpointData.checkpointPath(sc, rddId).get))
// Reference rdd to defeat any early collection by the JVM
//引用作废RDD,JVM早期垃圾收回
rdd.count()
// Test that GC causes checkpoint data cleanup after dereferencing the RDD
//测试GC(垃圾收回)检查站数据清理后引用RDD
postGCTester = new CleanerTester(sc, Seq(rddId))
rdd = null // Make RDD out of scope 使RDD超出范围
runGC()
postGCTester.assertCleanup()
assert(fs.exists(ReliableRDDCheckpointData.checkpointPath(sc, rddId).get))//检查点未被清理
}
test("automatically clean up local checkpoint") {//自动清理本地检查点
// Note that this test is similar to the RDD cleanup
//请注意,这个测试是类似于RDD清理
// test because the same underlying mechanism is used!
//测试,因为使用相同的基本机制
var rdd = newPairRDD().localCheckpoint()
assert(rdd.checkpointData.isDefined)
assert(rdd.checkpointData.get.checkpointRDD.isEmpty)
rdd.count()
assert(rdd.checkpointData.get.checkpointRDD.isDefined)
// Test that GC does not cause checkpoint cleanup due to a strong reference
//测试垃圾收回,不会因为强引用而导致检查点清理
val preGCTester = new CleanerTester(sc, rddIds = Seq(rdd.id))
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
// Test that RDD going out of scope does cause the checkpoint blocks to be cleaned up
//测试,RDD超出范围导致检查点块被清理
val postGCTester = new CleanerTester(sc, rddIds = Seq(rdd.id))
rdd = null
runGC()
postGCTester.assertCleanup()
}
//自动清理RDD +Shuffle+广播
test("automatically cleanup RDD + shuffle + broadcast") {
val numRdds = 100
val numBroadcasts = 4 // Broadcasts are more costly 广播是更昂贵
val rddBuffer = (1 to numRdds).map(i => randomRdd()).toBuffer
val broadcastBuffer = (1 to numBroadcasts).map(i => newBroadcast()).toBuffer
val rddIds = sc.persistentRdds.keys.toSeq
val shuffleIds = 0 until sc.newShuffleId
val broadcastIds = broadcastBuffer.map(_.id)
val preGCTester = new CleanerTester(sc, rddIds, shuffleIds, broadcastIds)
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
// Test that GC triggers the cleanup of all variables after the dereferencing them
//试验表明,GC触发清理引用后的所有变量
val postGCTester = new CleanerTester(sc, rddIds, shuffleIds, broadcastIds)
broadcastBuffer.clear()
rddBuffer.clear()
runGC()
postGCTester.assertCleanup()
// Make sure the broadcasted task closure no longer exists after GC.
//确保广播任务关闭后GC不再存在
val taskClosureBroadcastId = broadcastIds.max + 1
assert(sc.env.blockManager.master.getMatchingBlockIds({
case BroadcastBlockId(`taskClosureBroadcastId`, _) => true
case _ => false
}, askSlaves = true).isEmpty)
}
//自动清理RDD+Shuffle+分布式模式广播
test("automatically cleanup RDD + shuffle + broadcast in distributed mode") {
sc.stop()
val conf2 = new SparkConf()
// .setMaster("local-cluster[2, 1, 1024]")
.setMaster("local[*]")
.setAppName("ContextCleanerSuite")
.set("spark.cleaner.referenceTracking.blocking", "true")
.set("spark.cleaner.referenceTracking.blocking.shuffle", "true")
.set("spark.shuffle.manager", shuffleManager.getName)
sc = new SparkContext(conf2)
val numRdds = 10
val numBroadcasts = 4 // Broadcasts are more costly
val rddBuffer = (1 to numRdds).map(i => randomRdd()).toBuffer
val broadcastBuffer = (1 to numBroadcasts).map(i => newBroadcast()).toBuffer
val rddIds = sc.persistentRdds.keys.toSeq
val shuffleIds = 0 until sc.newShuffleId
val broadcastIds = broadcastBuffer.map(_.id)
val preGCTester = new CleanerTester(sc, rddIds, shuffleIds, broadcastIds)
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
// Test that GC triggers the cleanup of all variables after the dereferencing them
val postGCTester = new CleanerTester(sc, rddIds, shuffleIds, broadcastIds)
broadcastBuffer.clear()
rddBuffer.clear()
runGC()
postGCTester.assertCleanup()
// Make sure the broadcasted task closure no longer exists after GC.
// 确保广播任务关闭后GC不再存在
val taskClosureBroadcastId = broadcastIds.max + 1
assert(sc.env.blockManager.master.getMatchingBlockIds({
case BroadcastBlockId(`taskClosureBroadcastId`, _) => true
case _ => false
}, askSlaves = true).isEmpty)
}
}
/**
* A copy of the shuffle tests for sort-based shuffle
* 复制Shuffle测试排序为基础的Shuffle
*/
class SortShuffleContextCleanerSuite extends ContextCleanerSuiteBase(classOf[SortShuffleManager]) {
test("cleanup shuffle") {
val (rdd, shuffleDeps) = newRDDWithShuffleDependencies()
val collected = rdd.collect().toList
val tester = new CleanerTester(sc, shuffleIds = shuffleDeps.map(_.shuffleId))
// Explicit cleanup
//显示清理
shuffleDeps.foreach(s => cleaner.doCleanupShuffle(s.shuffleId, blocking = true))
tester.assertCleanup()
// Verify that shuffles can be re-executed after cleaning up
//验证将可以重新清理后执行
assert(rdd.collect().toList.equals(collected))
}
test("automatically cleanup shuffle") {//自动清理Shuffle
var rdd = newShuffleRDD()
rdd.count()
// Test that GC does not cause shuffle cleanup due to a strong reference
//测试GC,不会因为强引用而导致shuffle清理
val preGCTester = new CleanerTester(sc, shuffleIds = Seq(0))
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
rdd.count() // Defeat early collection by the JVM 由JVM早期采集失败
// Test that GC causes shuffle cleanup after dereferencing the RDD
//测试GC,Shuffle清理后废弃的RDD
val postGCTester = new CleanerTester(sc, shuffleIds = Seq(0))
//标记RDD超出范围,因此,相应的Shuffle会超出范围
rdd = null // Make RDD out of scope, so that corresponding shuffle goes out of scope
runGC()
postGCTester.assertCleanup()
}
//自动清理RDD +Shuffle+广播在分布式模式
test("automatically cleanup RDD + shuffle + broadcast in distributed mode") {
sc.stop()
val conf2 = new SparkConf()
// .setMaster("local-cluster[2, 1, 1024]")
.setMaster("local[*]")
.setAppName("ContextCleanerSuite")
.set("spark.cleaner.referenceTracking.blocking", "true")
.set("spark.cleaner.referenceTracking.blocking.shuffle", "true")
.set("spark.shuffle.manager", shuffleManager.getName)
sc = new SparkContext(conf2)
val numRdds = 10
val numBroadcasts = 4 // Broadcasts are more costly 广播是更昂贵
val rddBuffer = (1 to numRdds).map(i => randomRdd).toBuffer
val broadcastBuffer = (1 to numBroadcasts).map(i => newBroadcast).toBuffer
val rddIds = sc.persistentRdds.keys.toSeq
val shuffleIds = 0 until sc.newShuffleId()
val broadcastIds = broadcastBuffer.map(_.id)
val preGCTester = new CleanerTester(sc, rddIds, shuffleIds, broadcastIds)
runGC()
intercept[Exception] {
preGCTester.assertCleanup()(timeout(1000 millis))
}
// Test that GC triggers the cleanup of all variables after the dereferencing them
val postGCTester = new CleanerTester(sc, rddIds, shuffleIds, broadcastIds)
broadcastBuffer.clear()
rddBuffer.clear()
runGC()
postGCTester.assertCleanup()
// Make sure the broadcasted task closure no longer exists after GC.
val taskClosureBroadcastId = broadcastIds.max + 1
assert(sc.env.blockManager.master.getMatchingBlockIds({
case BroadcastBlockId(`taskClosureBroadcastId`, _) => true
case _ => false
}, askSlaves = true).isEmpty)
}
}
/**
* Class to test whether RDDs, shuffles, etc. have been successfully cleaned.
* 已成功清洗
* The checkpoint here refers only to normal (reliable) checkpoints, not local checkpoints.
* 这里的检查点只指正常(可靠)的检查点,没有本地检查点
*/
class CleanerTester(
sc: SparkContext,
rddIds: Seq[Int] = Seq.empty,
shuffleIds: Seq[Int] = Seq.empty,
broadcastIds: Seq[Long] = Seq.empty,
checkpointIds: Seq[Long] = Seq.empty)
extends Logging {
val toBeCleanedRDDIds = new HashSet[Int] with SynchronizedSet[Int] ++= rddIds
val toBeCleanedShuffleIds = new HashSet[Int] with SynchronizedSet[Int] ++= shuffleIds
val toBeCleanedBroadcstIds = new HashSet[Long] with SynchronizedSet[Long] ++= broadcastIds
val toBeCheckpointIds = new HashSet[Long] with SynchronizedSet[Long] ++= checkpointIds
val isDistributed = !sc.isLocal
val cleanerListener = new CleanerListener {
def rddCleaned(rddId: Int): Unit = {
toBeCleanedRDDIds -= rddId
logInfo("RDD " + rddId + " cleaned")
}
def shuffleCleaned(shuffleId: Int): Unit = {
toBeCleanedShuffleIds -= shuffleId
logInfo("Shuffle " + shuffleId + " cleaned")
}
def broadcastCleaned(broadcastId: Long): Unit = {
toBeCleanedBroadcstIds -= broadcastId
logInfo("Broadcast " + broadcastId + " cleaned")
}
def accumCleaned(accId: Long): Unit = {
logInfo("Cleaned accId " + accId + " cleaned")
}
def checkpointCleaned(rddId: Long): Unit = {
toBeCheckpointIds -= rddId
logInfo("checkpoint " + rddId + " cleaned")
}
}
val MAX_VALIDATION_ATTEMPTS = 10
val VALIDATION_ATTEMPT_INTERVAL = 100
logInfo("Attempting to validate before cleanup:\\n" + uncleanedResourcesToString)
preCleanupValidate()
sc.cleaner.get.attachListener(cleanerListener)
/**
* Assert that all the stuff has been cleaned up
* 断言所有的东西都被清理过了
* */
def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) {
try {
eventually(waitTimeout, interval(100 millis)) {
assert(isAllCleanedUp)
}
postCleanupValidate()
} finally {
//清理留下的资源
logInfo("Resources left from cleaning up:\\n" + uncleanedResourcesToString)
}
}
/**
* Verify that RDDs, shuffles, etc. occupy resources
* 验证RDDS,Shuffle,占用资源等
* */
private def preCleanupValidate() {
assert(rddIds.nonEmpty || shuffleIds.nonEmpty || broadcastIds.nonEmpty ||
checkpointIds.nonEmpty, "Nothing to cleanup")
// Verify the RDDs have been persisted and blocks are present
//验证RDDS已存在持久化块
rddIds.foreach { rddId =>
assert(
sc.persistentRdds.contains(rddId),
"RDD " + rddId + " have not been persisted, cannot start cleaner test"
)
assert(
!getRDDBlocks(rddId).isEmpty,
"Blocks of RDD " + rddId + " cannot be found in block manager, " +
"cannot start cleaner test"
)
}
// Verify the shuffle ids are registered and blocks are present
//验证注册Shuffle的ids存在的块
shuffleIds.foreach { shuffleId =>
assert(
mapOutputTrackerMaster.containsShuffle(shuffleId),
"Shuffle " + shuffleId + " have not been registered, cannot start cleaner test"
)
assert(
!getShuffleBlocks(shuffleId).isEmpty,
"Blocks of shuffle " + shuffleId + " cannot be found in block manager, " +
"cannot start cleaner test"
)
}
// Verify that the broadcast blocks are present
//确认广播块的存在
broadcastIds.foreach { broadcastId =>
assert(
!getBroadcastBlocks(broadcastId).isEmpty,
"Blocks of broadcast " + broadcastId + "cannot be found in block manager, " +
"cannot start cleaner test"
)
}
}
/**
* Verify that RDDs, shuffles, etc. do not occupy resources. Tests multiple times as there is
* 验证RDDS,Shuffle,不占用资源等,测试多次,没有保证多长时间,它会采取清理的资源
* as there is not guarantee on how long it will take clean up the resources.
*/
private def postCleanupValidate() {
// Verify the RDDs have been persisted and blocks are present
//验证RDDS持久化存在的块
rddIds.foreach { rddId =>
assert(
!sc.persistentRdds.contains(rddId),
"RDD " + rddId + " was not cleared from sc.persistentRdds"
)
assert(
getRDDBlocks(rddId).isEmpty,
"Blocks of RDD " + rddId + " were not cleared from block manager"
)
}
// Verify the shuffle ids are registered and blocks are present
//验证Shuffle注册ids存在的块
shuffleIds.foreach { shuffleId =>
assert(
!mapOutputTrackerMaster.containsShuffle(shuffleId),
"Shuffle " + shuffleId + " was not deregistered from map output tracker"
)
assert(
getShuffleBlocks(shuffleId).isEmpty,
"Blocks of shuffle " + shuffleId + " were not cleared from block manager"
)
}
// Verify that the broadcast blocks are present
//检查广播块的存在
broadcastIds.foreach { broadcastId =>
assert(
getBroadcastBlocks(broadcastId).isEmpty,
"Blocks of broadcast " + broadcastId + " were not cleared from block manager"
)
}
}
private def uncleanedResourcesToString = {
s"""
|\\tRDDs = ${toBeCleanedRDDIds.toSeq.sorted.mkString("[", ", ", "]")}
|\\tShuffles = ${toBeCleanedShuffleIds.toSeq.sorted.mkString("[", ", ", "]")}
|\\tBroadcasts = ${toBeCleanedBroadcstIds.toSeq.sorted.mkString("[", ", ", "]")}
""".stripMargin
}
private def isAllCleanedUp =
toBeCleanedRDDIds.isEmpty &&
toBeCleanedShuffleIds.isEmpty &&
toBeCleanedBroadcstIds.isEmpty &&
toBeCheckpointIds.isEmpty
private def getRDDBlocks(rddId: Int): Seq[BlockId] = {
blockManager.master.getMatchingBlockIds( _ match {
case RDDBlockId(`rddId`, _) => true
case _ => false
}, askSlaves = true)
}
private def getShuffleBlocks(shuffleId: Int): Seq[BlockId] = {
blockManager.master.getMatchingBlockIds( _ match {
case ShuffleBlockId(`shuffleId`, _, _) => true
case ShuffleIndexBlockId(`shuffleId`, _, _) => true
case _ => false
}, askSlaves = true)
}
private def getBroadcastBlocks(broadcastId: Long): Seq[BlockId] = {
blockManager.master.getMatchingBlockIds( _ match {
case BroadcastBlockId(`broadcastId`, _) => true
case _ => false
}, askSlaves = true)
}
private def blockManager = sc.env.blockManager
private def mapOutputTrackerMaster = sc.env.mapOutputTracker.asInstanceOf[MapOutputTrackerMaster]
}
| tophua/spark1.52 | core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala | Scala | apache-2.0 | 25,648 |
package com.greencatsoft.angularjs.test
import com.greencatsoft.angularjs.core.Injector
import com.greencatsoft.angularjs.{Angular, Module, internal}
import org.scalajs.dom.document
import scala.language.experimental.macros
/** Provides an injector for your test suites.
*
* Setup for example like this:
* {{{
* class MyDirectiveSpec extends FunSpec with AngularTestEnvironment with ScopeOps with MustMatchers {
* override val module = Angular.module("app", Seq("ngAnimate", "ngMaterial")).directive[MyDirective]
* override val moduleName = "app"
*
* describe("MyDirective") {
* it("must render") {
* val scope = inject[RootScope].\\$new(true)
* scope.dynamic.greeting = "Hello World!"
*
* val tag = """<my-directive greeting="{{greeting}}"></my-directive>"""
* val element = inject[Compile](tag)(scope, null)
* scope.\\$digest()
*
* element.textContent must be ("Hello World!")
* }
* }
* }
* }}}
*/
trait AngularTestEnvironment {
/** Your angular module to be used during the test.
*
* For example {{{Angular.module("app", Seq("ngAnimate", "ngMaterial")).directive[MyDirective]}}}
*/
val module: Module
/** The name of your application module */
val moduleName: String
/** Injector you can use in your tests to access services.
*
* You may want to use the `inject[A]` method for more readable code.
*/
implicit lazy val injector: Injector = {
val rootElement = document.documentElement
Angular.bootstrap(rootElement, moduleName)
}
/** Provides readable access to angular services.
*
* Example: {{{inject[RootScope].\\$new(true)}}}
*/
def inject[A](implicit injector: Injector): A = macro internal.Injector.get[A]
}
| greencatsoft/scalajs-angular | src/main/scala/com/greencatsoft/angularjs/test/AngularTestEnvironment.scala | Scala | apache-2.0 | 1,829 |
/*
* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt.{Project => SbtProject, _}
abstract class AbstractProject(info: ProjectInfo) extends DefaultProject(info) with IdeaProject {
def transitiveDepJars = (jars +++ Path.lazyPathFinder { dependencies.flatMap(jars(_)) }).distinct
private def jars: PathFinder = mainDependencies.scalaJars +++ projectJar +++ managedDepJars +++ unmanagedDepJars
private def jars(p: SbtProject): Seq[Path] = p match { case cp: AbstractProject => cp.jars.get.toList; case _ => Nil }
private def projectJar = ((outputPath ##) / defaultJarName)
private def managedDepJars = descendents(managedDependencyPath / "compile" ##, "*.jar")
private def unmanagedDepJars = descendents(info.projectPath / "lib" ##, "*.jar")
override def compileOptions = super.compileOptions ++ Seq(Unchecked)
}
class Project(info: ProjectInfo) extends ParentProject(info) with IdeaProject { rootProject =>
lazy val lib = project("ccf", "ccf", new CcfLibraryProject(_))
lazy val app = project("app", "app", new TextAppProject(_), lib)
lazy val perftest = project("perftest", "perftest", new PerftestProject(_), lib)
class CcfLibraryProject(info: ProjectInfo) extends AbstractProject(info) {
override def mainClass = Some("TestMain")
val testScopeDependency = "test"
val databinder_net = "databinder.net repository" at "http://databinder.net/repo"
val dispatchHttp = "net.databinder" %% "dispatch-http" % "0.7.4"
val httpclient = "org.apache.httpcomponents" % "httpclient" % "4.0.1"
val specs = "org.scala-tools.testing" % "specs_2.8.0" % "1.6.5" % testScopeDependency
val mockito = "org.mockito" % "mockito-core" % "1.8.4" % testScopeDependency
val scalacheck = "org.scala-tools.testing" % "scalacheck" % "1.5" % testScopeDependency
}
class TextAppProject(info: ProjectInfo) extends AbstractProject(info) {
override def mainClass = Some("textapp.TextAppMain")
val databinder_net = "databinder.net repository" at "http://databinder.net/repo"
val dispatchHttp = "net.databinder" %% "dispatch-http" % "0.7.4"
val dispatchJson = "net.databinder" %% "dispatch-json" % "0.7.4"
val dispatchHttpJson = "net.databinder" %% "dispatch-http-json" % "0.7.4"
val liftJson = "net.liftweb" % "lift-json_2.8.0" % "2.1-M1"
val jGoodiesForms = "com.jgoodies" % "forms" % "1.2.0"
}
class PerftestProject(info: ProjectInfo) extends AbstractProject(info) {
override def manifestClassPath = Some(distFileJars.map(_.getName).mkString(" "))
override def mainClass = Some("perftest.Perftest")
val jetty7 = "org.eclipse.jetty" % "jetty-webapp" % "7.1.0.RC0"
lazy val dist = zipTask(transitiveDepJars, "dist", distName) dependsOn (`package`)
private def distName = "%s-%s.zip".format(name, version)
private def distFileJars = transitiveDepJars.getFiles.filter(_.getName.endsWith(".jar"))
}
}
| akisaarinen/ccf | project/build/Project.scala | Scala | apache-2.0 | 3,467 |
package scala.collection.mutable
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import org.openjdk.jmh.runner.IterationType
import benchmark._
import java.util.concurrent.TimeUnit
import scala.collection.mutable
@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
@Threads(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
class HashMapBenchmark {
@Param(Array("10", "100", "1000"))
var size: Int = _
var existingKeys: Array[Any] = _
var missingKeys: Array[Any] = _
@Setup(Level.Trial) def initKeys(): Unit = {
existingKeys = (0 to size).map(i => (i % 4) match {
case 0 => i.toString
case 1 => i.toChar
case 2 => i.toDouble
case 3 => i.toInt
}).toArray
missingKeys = (size to 2 * size).toArray
}
var map = new mutable.HashMap[Any, Any]
@Setup(Level.Invocation) def initializeMutable = existingKeys.foreach(v => map.put(v, v))
@TearDown(Level.Invocation) def tearDown = map.clear()
@Benchmark def getOrElseUpdate(bh: Blackhole): Unit = {
var i = 0;
while (i < size) {
bh.consume(map.getOrElseUpdate(existingKeys(i), -1))
bh.consume(map.getOrElseUpdate(missingKeys(i), -1))
i += 1
}
}
@Benchmark def get(bh: Blackhole): Unit = {
var i = 0;
while (i < size) {
bh.consume(map.get(existingKeys(i), -1))
bh.consume(map.get(missingKeys(i), -1))
i += 1
}
}
@Benchmark def put(bh: Blackhole): Any = {
var map = new mutable.HashMap[Any, Any]
var i = 0;
while (i < size) {
map.put(existingKeys(i), i)
i += 1
}
map
}
}
| felixmulder/scala | test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala | Scala | bsd-3-clause | 1,666 |
/* Copyright 2014 White Label Personal Clouds Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.welcomer.rulesets.welcomerId
import scala.collection.mutable.HashMap
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success }
import me.welcomer.framework.models.{ ECI, Pico }
import me.welcomer.framework.pico.{ EventedEvent, PicoRuleset }
import me.welcomer.framework.pico.service.PicoServicesComponent
import me.welcomer.framework.pico.ruleset.patterns.TransactionIdMapping
import me.welcomer.rulesets.welcomerId.WelcomerIdSchema._
import play.api.libs.json._
import scala.util.control.NonFatal
class VendorRuleset(picoServices: PicoServicesComponent#PicoServices)
extends PicoRuleset(picoServices)
with TransactionIdMapping
with VendorRulesetHelper {
import context._
import me.welcomer.rulesets.welcomerId.WelcomerIdSchema._
subscribeToEventDomain(EventDomain.WELCOMER_ID)
subscribeToEventDomain(EventDomain.VENDOR)
subscribeToEventDomain(EventDomain.USER)
override def selectWhen = {
case event @ EventedEvent(EventDomain.VENDOR, eventType, _, _, _) => {
logEventInfo
eventType match {
case EventType.INITIALISE_USER => handleEvent[InitialiseUser](handleInitialiseUser)
case EventType.RETRIEVE_USER_DATA => handleEvent[VendorRetrieveUserData](handleRetrieveUserData)
case EventType.USER_DATA_AVAILABLE => handleEvent[VendorUserDataAvailable](handleUserDataAvailable)
case EventType.RETRIEVE_EDENTITI_ID_OR_NEW_USER_DATA => handleEvent[VendorRetrieveEdentitiIdOrNewUserData](handleVendorRetrieveEdentitiIdOrNewUserData)
case EventType.EDENTITI_NEW_USER => handleEvent[EdentitiNewUser](handleEdentitiNewUser)
case EventType.USER_VERIFICATION_NOTIFICATION => handleEvent[UserVerificationNotification](handleUserVerificationNotification)
case _ => log.debug("Unhandled {} EventedEvent Received ({})", EventDomain.VENDOR, event)
}
}
case event @ EventedEvent(EventDomain.USER, eventType, _, _, _) => {
logEventInfo
eventType match {
case EventType.USER_DATA => handleEvent[UserData](handleUserData)
case EventType.EDENTITI_ID => handleEvent[EdentitiId](handleEdentitiId)
case EventType.EDENTITI_NEW_USER_DATA => handleEvent[EdentitiNewUserData](handleEdentitiNewUserData)
case _ => log.debug("Unhandled {} EventedEvent Received ({})", EventDomain.USER, event)
}
}
}
}
trait VendorRulesetHelper { this: VendorRuleset =>
import me.welcomer.rulesets.welcomerId.WelcomerIdSchema._
import me.welcomer.framework.utils.ImplicitConversions._
val USER_NAMESPACE = "user"
val VENDOR_PREFERENCES_NAMESPACE = "vendorPreferences"
val USER_RULESETS = Set("welcomerId.UserRuleset")
// -------
// Vendor
// -------
protected def handleInitialiseUser(o: InitialiseUser, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
val transactionId = curTransactionId
retrieveUserMapping(o.channelDetails) map {
case Some(mapping) => log.info("ECI found for user, nothing to do: {} ({})", mapping.userEci, event)
case None => {
mapNewUserPico(o.channelDetails) onComplete {
case Success((userEci, vendorEci, storeResult)) => {
log.info("User pico created & mapped: {}->{} ({})", o.channelDetails, userEci, storeResult);
val vendorData = Json.obj(
"vendorId" -> vendorEci, // Do we actually have/need a 'vendorId' anymore?
"vendorEci" -> vendorEci)
val userData = Json.obj() // TODO: Will we ever have any data to store here? Handle certain known channels maybe? (email, mobile, etc)
raiseRemoteEvent(
EventDomain.USER,
EventType.CREATED,
Created(o.transactionId, vendorData, userData),
userEci)
}
case Failure(e) => log.error(e, "Error creating/mapping new User pico: {} ({}, {})", e.getMessage(), o, event);
}
}
}
}
protected def handleRetrieveUserData(o: VendorRetrieveUserData, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
mapReplyToEci(o.transactionId, o.replyTo)
// Mapping found, send event to user pico
def success(m: UserPicoMapping) = {
log.info("Sending {}::{} to {} ({})", EventDomain.USER, EventType.RETRIEVE_USER_DATA, m.userEci, o.transactionId);
EventedEvent(
EventDomain.USER,
EventType.RETRIEVE_USER_DATA,
attributes = UserRetrieveUserData(o.transactionId, o.filter, m.vendorEci),
entityId = m.userEci)
}
def failure = unknownUserFailure(o.transactionId, o.channelDetails)
raiseRemoteEventWithChannel(o.channelDetails, success, failure)
}
protected def handleUserDataAvailable(o: VendorUserDataAvailable, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
mapReplyToEci(o.transactionId)
def success(m: UserPicoMapping) = {
log.info("Sending {}::{} to {} ({})", EventDomain.USER, EventType.USER_DATA_AVAILABLE, m.userEci, o.transactionId);
EventedEvent(
EventDomain.USER,
EventType.USER_DATA_AVAILABLE,
attributes = UserUserDataAvailable(o.transactionId, o.data),
entityId = m.userEci)
}
def failure = unknownUserFailure(o.transactionId, o.channelDetails)
raiseRemoteEventWithChannel(o.channelDetails, success, failure)
}
protected def handleVendorRetrieveEdentitiIdOrNewUserData(o: VendorRetrieveEdentitiIdOrNewUserData, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
mapReplyToEci(o.transactionId, o.replyTo)
def success(m: UserPicoMapping) = {
log.info("Sending {}::{} to {} ({})", EventDomain.USER, EventType.RETRIEVE_EDENTITI_ID_OR_NEW_USER_DATA, m.userEci, o.transactionId);
EventedEvent(
EventDomain.USER,
EventType.RETRIEVE_EDENTITI_ID_OR_NEW_USER_DATA,
attributes = UserRetrieveEdentitiIdOrNewUserData(o.transactionId, m.vendorEci),
entityId = m.userEci)
}
def failure = unknownUserFailure(o.transactionId, o.channelDetails)
raiseRemoteEventWithChannel(o.channelDetails, success, failure)
}
protected def handleEdentitiNewUser(o: EdentitiNewUser, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
// mapReplyToEci(o.transactionId)
def success(m: UserPicoMapping) = {
log.debug("handleEdentitiNewUser Success: {} ({})", o, event)
val newChannelDetails = ChannelDetails(EDENTITI_ID, o.edentitiId)
val result = m.channels.contains(newChannelDetails) match {
case false => storeNewChannelDetails(m.userEci, newChannelDetails)
case true => Future(Json.obj()) // Do Nothing
}
result onComplete {
case Success(s) => {
log.info("Sending {}::{} to {} ({})", EventDomain.USER, EventType.EDENTITI_DATA_AVAILABLE, m.userEci, o.transactionId);
raiseRemoteEvent(
EventedEvent(
EventDomain.USER,
EventType.EDENTITI_DATA_AVAILABLE,
attributes = EdentitiDataAvailable(o.transactionId, o.edentitiId, o.data),
entityId = m.userEci))
}
case Failure(e) => log.error(e, e.getMessage())
}
}
def failure = unknownUserFailure(o.transactionId, o.channelDetails) map { raiseRemoteEvent(_) }
forChannel(o.channelDetails, success, failure)
}
protected def handleUserVerificationNotification(o: UserVerificationNotification, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
// mapReplyToEci(o.transactionId)
def success(m: UserPicoMapping) = {
log.debug("handleUserVerificationNotification Success: {} ({})", o, event)
// Send data to user pico
raiseRemoteEvent(
EventedEvent(
EventDomain.USER,
EventType.EDENTITI_DATA_AVAILABLE,
attributes = EdentitiDataAvailable(o.transactionId, o.channelDetails.channelId, o.data),
entityId = m.userEci))
// Handle callbacks if verified
(for {
pref <- retrieveVendorPreferences
if isVerified(o.data)
} yield {
// TODO: These should probably be handled in their own events..
handleNotificationEmail(o.data, pref)
handleNotificationCallback(o.data, pref)
}) recover {
case NonFatal(e) => log.error(e, "handleUserVerificationNotification: Error: " + e.getMessage())
}
}
def failure = unknownUserFailure(o.transactionId, o.channelDetails) map { raiseRemoteEvent(_) }
forChannel(o.channelDetails, success, failure)
}
def isVerified(result: JsObject): Boolean = {
// TODO: This should probably be made 'vendor customizable" at some point in the future
val verifiedStates = List[String]("VERIFIED", "VERIFIED_ADMIN", "VERIFIED_WITH_CHANGES", "PENDING")
val isVerified = (result \\ "outcome").asOpt[String] match {
case Some(outcome) => verifiedStates.contains(outcome)
case None => false
}
log.debug("isVerified={}", isVerified)
isVerified
}
// TODO: Implement this
def handleNotificationEmail(result: JsObject, pref: VendorPreferences)(implicit ec: ExecutionContext): Unit = {
import me.welcomer.link.mandrill.MandrillProtocol._
log.debug("WelcomerIdVendor::handleNotificationEmail")
// TODO: Load/generate this from a template?
val emailBody = "WelcomerID - Verification Result ID: TODOID TODONAME is verified. This occurred on 01 Jan 2014, 01:00am\\n\\n" + Json.prettyPrint(result)
def verifiedMessage(email: String) = Message(
List(ToStruct(email)),
FromStruct("verification@welcomerid.me"), // TODO: Move this to config?
"WelcomerID - Verified user", // TODO: Move this to config?
text = emailBody)
val emailPrefs = for {
key <- pref.apiKeys.mandrill
emailPrefs <- pref.emailNotification
} yield { (key, emailPrefs.email, emailPrefs.active) }
emailPrefs map {
case (key, email, active) =>
val mandrill = _picoServices.link.mandrill(key)
(for {
response <- mandrill.messages.send(verifiedMessage(email)) // TODO: Add PDF attachment of verification outcome
} yield {
response
}) onComplete {
case default => log.debug("EmailSendResponse: {}", default)
}
} getOrElse { log.debug("Not sending notification email due to prefs: {}", emailPrefs) }
}
// TODO: Implement this
def handleNotificationCallback(result: JsObject, pref: VendorPreferences)(implicit ec: ExecutionContext): Unit = {
log.debug("WelcomerIdVendor::handleNotificationCallback")
pref.callbackNotification map { p =>
if (p.active) {
val filledTemplate = p.template
.replace("<email>", "TODO GET EMAIL FROM AUTH OR SOMEWHERE")
.replace("<userId>", (result \\ "userId").asOpt[String].getOrElse("")); // TODO: Replace placeholders with parts from ?result?
val api = _picoServices.link.rawJsonApi
api.post(p.url, Json.parse(p.template)) onComplete {
case default => log.debug("CallbackSendResponse: {}", default)
}
} else { log.debug("Not sending notification callback due to prefs: active={}", p.active) }
} getOrElse { log.debug("Not sending notification callback due to prefs: {}", pref.callbackNotification) }
}
// -----
// User
// -----
protected def handleUserData(userData: UserData, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
replyToTransaction(
EventDomain.WELCOMER_ID,
EventType.USER_DATA,
userData)(event, userData.transactionId)
}
protected def handleEdentitiId(edentitiId: EdentitiId, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
replyToTransaction(
EventDomain.WELCOMER_ID,
EventType.EDENTITI_ID,
edentitiId)(event, edentitiId.transactionId)
}
protected def handleEdentitiNewUserData(o: EdentitiNewUserData, event: EventedEvent)(implicit ec: ExecutionContext): Unit = {
retrieveVendorPreferences map { pref =>
replyToTransaction(
EventDomain.WELCOMER_ID,
EventType.EDENTITI_CREATE_NEW_USER,
EdentitiCreateNewUser(o.transactionId, o.data, pref.ruleset))(event, o.transactionId)
}
}
// --------
// Helpers
// --------
protected def mapNewUserPico(channelDetails: ChannelDetails)(implicit ec: ExecutionContext): Future[(String, String, JsObject)] = {
def channelType = channelDetails.channelType
def channelId = channelDetails.channelId
lazy val eciDescription = s"[UserPico] $channelType->$channelId"
for {
userEci <- _picoServices.picoManagement.createNewPico(USER_RULESETS)
vendorEci <- _picoServices.eci.generate(Some(eciDescription))
storeResult <- storePicoMapping(UserPicoMapping(userEci, vendorEci, Set(channelDetails)))
} yield { (userEci, vendorEci, storeResult) }
}
// TODO: This is likely a fairly common pattern.. can we extract it?
def retrieveUserMapping(channelDetails: ChannelDetails)(implicit ec: ExecutionContext): Future[Option[UserPicoMapping]] = {
val selector = Json.obj(
"mappings.channels" -> Json.obj(
"$elemMatch" -> Json.obj(
"type" -> channelDetails.channelType,
"id" -> channelDetails.channelId)))
val projection = Json.obj("mappings.channels.$" -> 1)
_picoServices.pds.retrieve(selector, projection, USER_NAMESPACE) map {
case Some(json) => (json \\ "mappings")(0).asOpt[UserPicoMapping]
case None => None
}
}
def storeNewChannelDetails(eci: String, newChannelDetails: ChannelDetails)(implicit ec: ExecutionContext): Future[JsObject] = {
val selector = Json.obj("mappings.userEci" -> eci)
val arrayKey = "mappings.$.channels"
_picoServices.pds.pushArrayItem(arrayKey, newChannelDetails, USER_NAMESPACE, selector, unique = true) map { result =>
(result \\ "n").asOpt[Int] match {
case Some(n) if n > 0 => result
case _ => throw new Throwable(s"No documents were updated ($result) arrayKey=$arrayKey, arrayItem=$newChannelDetails")
}
}
}
def retrieveVendorPreferences(implicit ec: ExecutionContext): Future[VendorPreferences] = {
_picoServices.pds.retrieveAllItems(VENDOR_PREFERENCES_NAMESPACE) map {
case Some(pref) => pref.as[VendorPreferences]
case None => throw new Throwable("Vendor preferences weren't found")
}
}
// TODO: This is likely a fairly common pattern.. can we extract it?
protected def storePicoMapping(userPicoMapping: UserPicoMapping)(implicit ec: ExecutionContext): Future[JsObject] = {
val arrayKey = "mappings"
val arrayItem = Json.toJson(userPicoMapping)
_picoServices.pds.pushArrayItem(arrayKey, arrayItem, USER_NAMESPACE, unique = true) map { result =>
(result \\ "n").asOpt[Int] match {
case Some(n) if n > 0 => result
case _ => throw new Throwable(s"No documents were updated ($result) arrayKey=$arrayKey, arrayItem=$arrayItem")
}
}
}
// TODO: This is likely a fairly common pattern.. can we extract it?
protected def unknownUserFailure(transactionId: String, channelDetails: ChannelDetails): Option[EventedEvent] = {
retrieveReplyToEci(transactionId, true) map { replyTo =>
log.warning(s"Sending {}::{}({}) to ${replyTo} ({})", EventDomain.WELCOMER_ID, EventType.UNKNOWN_USER, channelDetails, transactionId);
Some(
EventedEvent(
EventDomain.WELCOMER_ID,
EventType.UNKNOWN_USER,
attributes = UnknownUser(transactionId, channelDetails),
entityId = replyTo))
} getOrElse {
log.error("Giving Up: UnknownUser and no replyTo ECI found for transactionId ({})", transactionId)
None
}
}
// TODO: This is likely a fairly common pattern.. can we extract it?
protected def forChannel(
channelDetails: ChannelDetails,
validChannelHandler: UserPicoMapping => Unit,
invalidChannelHandler: => Unit)(implicit ec: ExecutionContext): Unit = {
retrieveUserMapping(channelDetails) map {
case Some(mapping) => {
log.debug("[forChannel] ValidChannel: " + channelDetails)
validChannelHandler(mapping)
}
case None => {
log.debug("[forChannel] InvalidChannel: " + channelDetails)
invalidChannelHandler
}
} recover {
case NonFatal(e) => log.error(e, "[forChannel] Error: " + e.getMessage())
}
}
def raiseRemoteEventWithChannel(
channelDetails: ChannelDetails,
successEvent: UserPicoMapping => Future[Option[EventedEvent]],
failureEvent: => Future[Option[EventedEvent]])(implicit ec: ExecutionContext): Unit = {
def success(m: UserPicoMapping) = successEvent(m) map { _ map { raiseRemoteEvent(_) } }
def failure = failureEvent map { _ map { raiseRemoteEvent(_) } }
forChannel(channelDetails, success, failure)
}
}
| welcomer/framework | src/main/scala/me/welcomer/rulesets/welcomerId/VendorRuleset.scala | Scala | apache-2.0 | 17,700 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2013-2014 Alexey Aksenov ezh@ezh.msk.ru
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: ezh@ezh.msk.ru
*/
package org.digimead.tabuddy.desktop.core.ui.definition.widget.status
import org.digimead.digi.lib.log.api.XLoggable
import org.digimead.tabuddy.desktop.core.Core
import org.digimead.tabuddy.desktop.core.definition.command.Command
import org.digimead.tabuddy.desktop.core.support.App
import org.eclipse.core.databinding.observable.{ ChangeEvent, IChangeListener }
import org.eclipse.core.databinding.observable.value.IObservableValue
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.jface.action.{ StatusLineManager ⇒ JStatusLineManager }
import org.eclipse.jface.databinding.swt.SWTObservables
import org.eclipse.jface.fieldassist.{ ContentProposalAdapter, TextContentAdapter }
import org.eclipse.swt.SWT
import org.eclipse.swt.events.{ TraverseEvent, TraverseListener }
import org.eclipse.swt.layout.{ GridData, GridLayout }
import org.eclipse.swt.widgets.{ Composite, Control, Text }
import scala.concurrent.Future
/**
* Composite status manager for AppWindow
*/
class StatusLineManager extends JStatusLineManager with XLoggable {
/** Akka execution context. */
implicit lazy val ec = App.system.dispatcher
/** The status line control; <code>null</code> before creation and after disposal. */
protected var statusLineContainer: Composite = null
/** The command line control; <code>null</code> before creation and after disposal. */
protected var commandLine: Text = null
/** Creates and returns this manager's status line control. */
override def createControl(parent: Composite, style: Int): Control = {
statusLineContainer = new Composite(parent, SWT.NONE)
statusLineContainer.setLayout(new GridLayout(2, false))
commandLine = createCommandLine(statusLineContainer)
// This is critical. Without setFocus SWT lost FocusIn and FocusOut events. This is INITIAL window element that gains focus.
commandLine.setFocus()
val statusLine = super.createControl(statusLineContainer, SWT.NONE)
statusLine.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1))
statusLineContainer
}
/**
* Disposes of this status line manager and frees all allocated SWT resources.
* Notifies all contribution items of the dispose. Note that this method does
* not clean up references between this status line manager and its associated
* contribution items. Use <code>removeAll</code> for that purpose.
*/
override def dispose() {
super.dispose
commandLine.dispose()
commandLine = null
statusLineContainer.dispose()
statusLineContainer = null
}
/** Returns the control used by this StatusLineManager. */
override def getControl(): Control = statusLineContainer
/** Returns the status line control. */
def getStatusLine: Composite with IProgressMonitor =
Option(statusLineContainer).map(_.getChildren().last).getOrElse(null).asInstanceOf[Composite with IProgressMonitor]
/** Returns the command line control. */
def getCommandLine: Text = commandLine
protected def createCommandLine(parent: Composite): Text = {
val textField = new Text(statusLineContainer, SWT.NONE)
val textFieldLayoutData = new GridData()
textFieldLayoutData.widthHint = 200
textField.setLayoutData(textFieldLayoutData)
val proposalProvider = Command.getProposalProvider()
SWTObservables.observeText(textField, SWT.Modify).addChangeListener(new IChangeListener() {
override def handleChange(event: ChangeEvent) =
proposalProvider.setInput(event.getObservable().asInstanceOf[IObservableValue].getValue().asInstanceOf[String])
})
textField.addTraverseListener(new TraverseListener {
def keyTraversed(event: TraverseEvent) {
if (event.detail == SWT.TRAVERSE_RETURN) {
Command.parse(event.widget.asInstanceOf[Text].getText()) match {
case Command.Success(uniqueId, result) ⇒
Command.getContextParserInfo(uniqueId) match {
case Some(info) ⇒
Command.getDescriptor(info.parserId) match {
case Some(commandDescriptor) ⇒
val activeContext = Core.context.getActiveLeaf()
textField.setText("")
Future {
log.info(s"Execute command '${commandDescriptor.name}' within context '${info.context}' with argument: " + result)
commandDescriptor.callback(activeContext, info.context, result)
}
case None ⇒
log.fatal("Unable to find command description for " + info)
}
case None ⇒
log.fatal("Unable to find command information for unique Id " + uniqueId)
}
case Command.MissingCompletionOrFailure(completionList, message) ⇒
log.debug("Autocomplete: " + message)
case Command.Failure(message) ⇒
log.debug(message)
case Command.Error(message) ⇒
log.fatal(message)
}
}
}
})
val controlContentAdapter = new TextContentAdapter()
val adapter = new ContentProposalAdapter(textField, controlContentAdapter, proposalProvider, null, null)
adapter.setPropagateKeys(true)
adapter.setProposalAcceptanceStyle(ContentProposalAdapter.PROPOSAL_INSERT)
textField
}
}
| digimead/digi-TABuddy-desktop | part-core-ui/src/main/scala/org/digimead/tabuddy/desktop/core/ui/definition/widget/status/StatusLineManager.scala | Scala | agpl-3.0 | 7,604 |
package uk.gov.homeoffice.json
import org.json4s.JValue
import org.json4s.JsonAST.{JObject, JNothing}
import org.json4s.JsonDSL._
case class JsonError(json: JValue = JNothing, error: Option[String] = None, throwable: Option[Throwable] = None) {
@deprecated(message = "Use toJson instead which renders any JSON under a 'json' property", since = "30th March 2015")
def asJson = JObject() merge json merge {
error.fold(JObject()) { error => "error" -> error }
} merge {
throwable map { Json.toJson } getOrElse JObject()
}
def toJson = (if (json == JNothing) JObject() else JObject("json" -> json)) merge {
error.fold(JObject()) { error => "error" -> error }
} merge {
throwable map { Json.toJson } getOrElse JObject()
}
def toException = new JsonErrorException(this)
}
case class JsonErrorException(jsonError: JsonError) extends Exception(JsonErrorException.toString(jsonError), jsonError.throwable orNull)
object JsonErrorException {
def toString(jsonError: JsonError) = jsonError.json match {
case JNothing => jsonError.error getOrElse ""
case json => jsonError.error map { _ + s", json: $json"} getOrElse s", JSON: $json"
}
} | UKHomeOffice/rtp-io-lib | src/main/scala/uk/gov/homeoffice/json/JsonError.scala | Scala | mit | 1,175 |
package com.theseventhsense.utils.persistence.db
import akka.stream.scaladsl.Source
import play.api.db.slick.{HasDatabaseConfig, HasDatabaseConfigProvider}
import slick.backend.DatabaseConfig
import slick.dbio.Effect.Write
import slick.driver.JdbcProfile
import com.theseventhsense.utils.logging.Logging
import com.theseventhsense.utils.persistence._
import slick.profile.FixedSqlAction
import scala.collection.immutable
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.language.implicitConversions
trait SlickDTO extends DTO {
def id: SlickDTO.Id
}
object SlickDTO {
type Id = Long
}
trait SlickTable extends HasDatabaseConfig[JdbcProfile] {
import driver.api._
protected trait IdentifiedTable[T <: SlickDTO] extends Table[T] {
def id: Rep[SlickDTO.Id]
}
implicit val dateTimeColumnType = MappedColumnType.base[org.joda.time.DateTime, java.sql.Timestamp](
{ dt => new java.sql.Timestamp(dt.getMillis) },
{ ts => new org.joda.time.DateTime(ts) }
)
}
object SlickDAO {
}
trait SlickDAO[T <: SlickDTO]
extends DAO[T]
with HasDatabaseConfigProvider[JdbcProfile]
with SlickTable
with Logging {
import driver.api._
implicit def block[A](fut: Future[A]): A = {
Await.result(fut, 10.seconds)
}
type Items <: IdentifiedTable[T]
def table: TableQuery[Items]
def createTable(): Unit = {
val createAction = table.schema.create
logger.info(s"Creating table:\\n${createAction.statements}")
block(db.run(createAction))
()
}
def dropTable(): Unit = {
block(db.run(table.schema.drop))
()
}
def count: Long = {
db.run(table.length.result).map(_.toLong)
}
def get(id: Long): Option[T] = {
val query = table.filter(_.id === id).result.headOption
block(db.run(query))
}
def getOrCreate(item: T): T = {
get(item.id).getOrElse(create(item))
}
def delete(id: Long): Int = {
val action = table.filter(_.id === id).delete
block(db.run(action))
}
// Commented out since the type bounds don't quite work out (yet). Descendant
// classes will need a message such as this if you want to be able to insert
// records and get back a copy with the newly set id in it.
// def create(obj: T): T = {
// db withTransaction { implicit session =>
// (table returning table.map(_.id)
// into ((item, id) => item.copy(id = id))) += obj
// }
// }
def save(obj: T): T = {
val q = table
.filter(_.id === obj.id)
.update(obj)
db.run(q).map(x => obj)
}
override def forceInsert(obj: T): T = {
val q = table.forceInsert(obj)
db.run(q).map(x => obj)
}
override def insertOrUpdate(obj: T): T = {
val q = table.insertOrUpdate(obj)
db.run(q).map(x => obj)
}
override def forceInsertOrUpdate(obj: T): T = {
db.run(table.filter(_.id === obj.id).length.result)
.flatMap { count: Int =>
if (count == 0) {
db.run(table.forceInsert(obj))
} else {
db.run(table.insertOrUpdate(obj))
}
}.map(x => obj)
}
override def saveBulk(seq: Seq[T]): Seq[T] = {
val q = seq map { obj =>
db.run(table.filter(_.id === obj.id).update(obj)).map(x => obj)
}
Future.sequence(q)
}
def createFindResult(query: Query[Items, T, Seq], meta: QueryMeta): Future[ListQueryResult[T]] = {
val totalFut: Future[Int] = db.run(query.length.result)
val recordsFut = db.run(filterByMeta(query, meta).result)
val listQuery = for {
total: Int <- totalFut
records <- recordsFut
} yield ListQueryResult(total.toLong, records.toList)
listQuery
}
def createCountResult(query: Query[Items, T, Seq], meta: QueryMeta): Future[Int] = {
db.run(query.length.result)
}
def createDeleteResult(query: Query[Items, T, Seq]): Future[Int] = {
db.run(query.delete)
}
def find(meta: QueryMeta): Future[ListQueryResult[T]] = {
createFindResult(table, meta)
}
protected def filterByMeta(query: Query[Items, T, Seq], meta: QueryMeta) = {
var q = query.drop(meta.offset)
meta.limit.foreach(limit => q = q.take(limit))
meta.sort match {
case None =>
if (meta.sortAsc) {
q = q.sortBy(_.id)
} else {
q = q.sortBy(_.id.desc)
}
case Some(s: String) =>
if (meta.sortAsc) {
q = q.sortBy(_.id)
} else {
q = q.sortBy(_.id.desc)
}
}
q
}
def createStreamResult(query: Query[Items, T, Seq], meta: QueryMeta): Future[StreamQueryResult[T]] = {
val totalFut: Future[Int] = db.run(query.length.result)
val records = db.stream(filterByMeta(query, meta).result)
val streamQuery: Future[StreamQueryResult[T]] = for {
total <- totalFut
} yield StreamQueryResult(total.toLong, Source(records))
streamQuery
}
def stream(meta: QueryMeta): Future[StreamQueryResult[T]] = {
createStreamResult(table, meta)
}
def resetSequence(): Unit = {
db.run(table.map(_.id).max.result).map { max =>
if (max.isDefined) {
val sequence = s"${table.baseTableRow.tableName}_id_seq"
logger.debug(s"Resetting $sequence sequence to ${max.get}")
db.run(sqlu"SELECT setval('$sequence', ${max.get});")
}
}
()
}
override def commit(): Unit = {}
}
| easel/utils-persistence | src/main/scala/com/theseventhsense/utils/persistence/db/SlickDAO.scala | Scala | mit | 5,386 |
package com.eharmony.aloha.models.vw.jni.multilabel
import java.io.File
import com.eharmony.aloha.dataset.vw.multilabel.VwMultilabelRowCreator
import com.eharmony.aloha.dataset.vw.multilabel.VwMultilabelRowCreator.LabelNamespaces
import com.eharmony.aloha.models.vw.jni.multilabel.VwSparseMultilabelPredictor.ExpectedLearner
import org.apache.commons.io.{FileUtils, IOUtils}
import vowpalWabbit.learner.VWLearners
import scala.util.matching.Regex
import scala.util.{Failure, Success, Try}
/**
* Created by ryan.deak on 10/5/17.
*/
protected trait VwMultilabelParamAugmentation {
protected type VWNsSet = Set[Char]
protected type VWNsCrossProdSet = Set[(Char, Char)]
/**
* Adds VW parameters to make the parameters work as an Aloha multilabel model.
*
* The algorithm works as follows:
*
1. Ensure the VW `csoaa_ldf` or `wap_ldf` reduction is specified in the supplied VW
parameter list (''with the appropriate option for the flag'').
1. Ensure that no "''unrecoverable''" flags appear in the supplied VW parameter list.
See `UnrecoverableFlagSet` for flags whose appearance is considered
"''unrecoverable''".
1. Ensure that ''ignore'' and ''interaction'' flags (`--ignore`, `--ignore_linear`, `-q`,
`--quadratic`, `--cubic`) do not refer to namespaces not supplied in
the `namespaceNames` parameter.
1. Attempt to determine namespace names that can be used for the labels. For more
information on the label namespace resolution algorithm, see:
`com.eharmony.aloha.dataset.vw.multilabel.VwMultilabelRowCreator.determineLabelNamespaces`.
1. Remove flags and options found in `FlagsToRemove`.
1. Add `--noconstant` and `--csoaa_rank` flags. `--noconstant` is added because per-label
intercepts will be included and take the place of a single intercept. `--csoaa_rank`
is added to make the `VWLearner` a `VWActionScoresLearner`.
1. Create interactions between features and the label namespaces created above.
a. If a namespace in `namespaceNames` appears as an option to VW's `ignore_linear` flag,
'''do not''' create a quadratic interaction between that namespace and the label
namespace.
a. For each interaction term (`-q`, `--quadratic`, `--cubic`, `--interactions`), replace it
with an interaction term also interacted with the label namespace. This increases the
arity of the interaction by 1.
1. Finally, change the flag options that reference files to point to temp files so that
VW doesn't change the files. This may represent a problem if VW needs to read the file
in the option because although it should exist, it will be empty.
1. Let VW doing any validations it can.
*
* ==Success Examples==
*
* {{{
* import com.eharmony.aloha.models.vw.jni.multilabel.VwMultilabelModel.updatedVwParams
*
* // This is a basic example. 'y' and 'Y' in the output are label
* // namespaces. Notice all namespaces are quadratically interacted
* // with the label namespace.
* val uvw1 = updatedVwParams(
* "--csoaa_ldf mc",
* Set("a", "b", "c")
* )
* // Right("--csoaa_ldf mc --noconstant --csoaa_rank --ignore y " +
* // "--ignore_linear abc -qYa -qYb -qYc")
*
* // Here since 'a' is in 'ignore_linear', no '-qYa' term appears
* // in the output.
* val uvw2 = updatedVwParams(
* "--csoaa_ldf mc --ignore_linear a -qbc",
* Set("a", "b", "c")
* )
* // Right("--csoaa_ldf mc --noconstant --csoaa_rank --ignore y " +
* // "--ignore_linear abc -qYb -qYc --cubic Ybc)
*
* // 'a' is in 'ignore', so no terms with 'a' are emitted. 'b' is
* // in 'ignore_linear' so it does occur in any quadratic
* // interactions in the output, but can appear in interaction
* // terms of higher arity like the cubic interaction.
* val uvw3 = updatedVwParams(
* "--csoaa_ldf mc --ignore a --ignore_linear b -qbc --cubic abc",
* Set("a", "b", "c")
* )
* // Right("--csoaa_ldf mc --noconstant --csoaa_rank --ignore ay " +
* // "--ignore_linear bc -qYc --cubic Ybc")
* }}}
*
* ==Errors Examples==
*
* {{{
* import com.eharmony.aloha.models.vw.jni.multilabel.VwMultilabelModel.updatedVwParams
* import com.eharmony.aloha.models.vw.jni.multilabel.{
* NotCsoaaOrWap,
* NamespaceError
* }
*
* assert( updatedVwParams("", Set()) == Left(NotCsoaaOrWap("")) )
*
* assert(
* updatedVwParams("--wap_ldf m -qaa", Set()) ==
* Left(NamespaceError("--wap_ldf m -qaa", Set(), Map("quadratic" -> Set('a'))))
* )
*
* assert(
* updatedVwParams(
* "--wap_ldf m --ignore_linear b --ignore a -qbb -qbd " +
"--cubic bcd --interactions dde --interactions abcde",
* Set()
* ) ==
* Left(
* NamespaceError(
* "--wap_ldf m --ignore_linear b --ignore a -qbb -qbd --cubic bcd " +
* "--interactions dde --interactions abcde",
* Set(),
* Map(
* "ignore" -> Set('a'),
* "ignore_linear" -> Set('b'),
* "quadratic" -> Set('b', 'd'),
* "cubic" -> Set('b', 'c', 'd', 'e'),
* "interactions" -> Set('a', 'b', 'c', 'd', 'e')
* )
* )
* )
* )
* }}}
*
* @param vwParams current VW parameters passed to the VW JNI
* @param namespaceNames it is assumed that `namespaceNames` is a superset
* of all of the namespaces referred to by any flags
* found in `vwParams`.
* @param numUniqueLabels the number of unique labels in the training set.
* This is used to calculate the appropriate VW
* `ring_size` parameter.
* @return
*/
def updatedVwParams(
vwParams: String,
namespaceNames: Set[String],
numUniqueLabels: Int
): Either[VwParamError, String] = {
lazy val unrecovFlags = unrecoverableFlags(vwParams)
if (WapOrCsoaa.findFirstMatchIn(vwParams).isEmpty)
Left(NotCsoaaOrWap(vwParams))
else if (unrecovFlags.nonEmpty)
Left(UnrecoverableParams(vwParams, unrecovFlags))
else {
val is = interactions(vwParams)
val i = ignored(vwParams)
val il = ignoredLinear(vwParams)
// This won't effect anything if the definition of UnrecoverableFlags contains
// all of the flags referenced in the flagsRefMissingNss function. If there
// are flags referenced in flagsRefMissingNss but not in UnrecoverableFlags,
// then this is a valid check.
val flagsRefMissingNss = flagsReferencingMissingNss(namespaceNames, i, il, is)
if (flagsRefMissingNss.nonEmpty)
Left(NamespaceError(vwParams, namespaceNames, flagsRefMissingNss))
else
VwMultilabelRowCreator.determineLabelNamespaces(namespaceNames).fold(
Left(LabelNamespaceError(vwParams, namespaceNames)): Either[VwParamError, String]
){ labelNs =>
val paramsWithoutRemoved = removeParams(vwParams)
val updatedParams =
addParams(paramsWithoutRemoved, namespaceNames, i, il, is, labelNs, numUniqueLabels)
val (finalParams, flagToFileMap) = replaceFileBasedFlags(updatedParams, FileBasedFlags)
val ps = validateVwParams(
vwParams, updatedParams, finalParams, flagToFileMap, !isQuiet(updatedParams)
)
flagToFileMap.values foreach FileUtils.deleteQuietly // IO: Delete the files.
ps
}
}
}
/**
* VW Flags automatically resulting in an error.
*/
protected val UnrecoverableFlagSet: Set[String] =
Set("redefine", "stage_poly", "keep", "permutations", "autolink")
/**
* This is the capture group containing the content when the regex has been
* padded with the pad function.
*/
protected val CaptureGroupWithContent = 2
private[this] val FileBasedFlags = Set(
"-f", "--final_regressor",
"--readable_model",
"--invert_hash",
"--output_feature_regularizer_binary",
"--output_feature_regularizer_text",
"-p", "--predictions",
"-r", "--raw_predictions",
"-c", "--cache",
"--cache_file"
)
/**
* Pad the regular expression with a prefix and suffix that makes matching work.
* The prefix is `(^|\\s)` and means the if there's a character preceding the main
* content in `s`, then that character should be whitespace. The suffix is
* `(?=\\s|$)` which means that if a character follows the main content matched by
* `s`, then that character should be whitespace '''AND''' ''that character should
* not'' be consumed by the Regex. By allowing that character to be present for the
* next matching of a regex, it is consumable by the prefix of a regex padded with
* the `pad` function.
* @param s a string
* @return
*/
private[this] def pad(s: String) = """(^|\\s)""" + s + """(?=\\s|$)"""
private[this] val NumRegex = """-?(\\d+(\\.\\d*)?|\\d*\\.\\d+)([eE][+-]?\\d+)?"""
private[this] val ClassCastMsg = """(\\S+) cannot be cast to (\\S+)""".r
private[this] val CsoaaRank = pad("--csoaa_rank").r
private[this] val WapOrCsoaa = pad("""--(csoaa|wap)_ldf\\s+(mc?)""").r
private[this] val Quiet = pad("--quiet").r
protected val Ignore : Regex = pad("""--ignore\\s+(\\S+)""").r
protected val IgnoreLinear: Regex = pad("""--ignore_linear\\s+(\\S+)""").r
private[this] val UnrecoverableFlags = pad("--(" + UnrecoverableFlagSet.mkString("|") + ")").r
private[this] val QuadraticsShort = pad("""-q\\s*([\\S]{2})""").r
private[this] val QuadraticsLong = pad("""--quadratic\\s+(\\S{2})""").r
private[this] val Cubics = pad("""--cubic\\s+(\\S{3})""").r
private[this] val Interactions = pad("""--interactions\\s+(\\S{2,})""").r
private[this] val NoConstant = pad("""--noconstant""").r
private[this] val ConstantShort = pad("""-C\\s*(""" + NumRegex + ")").r
private[this] val ConstantLong = pad("""--constant\\s+(""" + NumRegex + ")").r
private[this] val FlagsToRemove = Seq(
QuadraticsShort,
QuadraticsLong,
Cubics,
Interactions,
NoConstant,
ConstantShort,
ConstantLong,
CsoaaRank,
IgnoreLinear,
Ignore
)
/**
* Remove flags (and options) for the flags listed in `FlagsToRemove`.
* @param vwParams VW params passed to the `updatedVwParams` function.
* @return
*/
protected def removeParams(vwParams: String): String =
FlagsToRemove.foldLeft(vwParams)((s, r) => r.replaceAllIn(s, ""))
protected def addParams(
paramsAfterRemoved: String,
namespaceNames: Set[String],
oldIgnored: VWNsSet,
oldIgnoredLinear: VWNsSet,
oldInteractions: Set[String],
labelNs: LabelNamespaces,
numUniqueLabels: Int
): String = {
val i = oldIgnored + labelNs.dummyLabelNs
// Don't include namespaces that are ignored in ignore_linear.
val il = (toVwNsSet(namespaceNames) ++ oldIgnoredLinear) -- i
// Don't turn a given namespace into quadratics interacted on label when the
// namespace is listed in the ignore_linear flag.
val qs = il.flatMap(n =>
if (oldIgnored.contains(n) || oldIgnoredLinear.contains(n)) Nil
else List(s"${labelNs.labelNs}$n")
)
// Turn quadratic into cubic and cubic into higher-order interactions.
val cs = createLabelInteractions(oldInteractions, oldIgnored, labelNs, _ == 2)
val hos = createLabelInteractions(oldInteractions, oldIgnored, labelNs, _ >= 3)
val quadratics = qs.toSeq.sorted.map(q => s"-q$q" ).mkString(" ")
val cubics = cs.toSeq.sorted.map(c => s"--cubic $c").mkString(" ")
val ints = hos.toSeq.sorted.map(ho => s"--interactions $ho").mkString(" ")
val igLin = if (il.nonEmpty) il.toSeq.sorted.mkString("--ignore_linear ", "", "") else ""
val rs = s"--ring_size ${numUniqueLabels + VwSparseMultilabelPredictor.AddlVwRingSize}"
// This is non-empty b/c i is non-empty.
val ig = s"--ignore ${i.mkString("")}"
// Consolidate whitespace because there shouldn't be whitespace in these flags' options.
val additions = s" --noconstant --csoaa_rank $rs $ig $igLin $quadratics $cubics $ints"
.replaceAll("\\\\s+", " ")
(paramsAfterRemoved.trim + additions).trim
}
/**
* VW will actually update / replace files if files appear as options to flags. To overcome
* this, an attempt is made to detect flags referencing files and if found, replace the the
* files with temp files. These files should be deleted before exiting the main program.
* @param updatedParams the parameters after the updates.
* @param flagsWithFiles the flag
* @return a tuple2 of the final string to try with VW for validation along with the mapping
* from flag to file that was used.
*/
protected def replaceFileBasedFlags(updatedParams: String, flagsWithFiles: Set[String]): (String, Map[String, File]) = {
// This is rather hairy function.
def flagRe(flags: Set[String], groupsForFlag: Int, c1: String, c2: String, c3: String) =
if (flags.nonEmpty)
Option(pad(flags.map(_ drop groupsForFlag).toVector.sorted.mkString(c1, c2, c3)).r)
else None
// Get short and long flags.
val shrt = flagsWithFiles.filter(s => s.startsWith("-") && 2 == s.length && s.charAt(1).isLetterOrDigit)
val lng = flagsWithFiles.filter(s => s.startsWith("--") && 2 < s.length)
val regexes = List(
flagRe(shrt, 1, "(-[", "", """])\\s*(\\S+)"""),
flagRe(lng, 2, "(--(", "|", """))\\s+(\\S+)""")
).flatten
regexes.foldLeft((updatedParams, Map[String, File]())) { case ((ps, ffm), r) =>
// Fold right to not affect subsequent replacements.
r.findAllMatchIn(ps).foldRight((ps, ffm)) { case (m, (ps1, ffm1)) =>
val f = File.createTempFile("REPLACED_", "_FILE")
f.deleteOnExit() // Attempt to be safe here.
val flag = m.group(CaptureGroupWithContent)
val rep = s"$flag ${f.getCanonicalPath}"
val ps2 = ps1.take(m.start) + rep + ps1.drop(m.end)
val ffm2 = ffm1 + (flag -> f)
(ps2, ffm2)
}
}
}
protected def createLabelInteractions(
interactions: Set[String],
ignored: VWNsSet,
labelNs: LabelNamespaces,
filter: Int => Boolean
): Set[String] =
interactions.collect {
case i if filter(i.length) && // Filter based on arity.
!i.toCharArray.exists(ignored.contains) => // Filter out ignored.
s"${labelNs.labelNs}$i"
}
/**
* Get the set of interactions (encoded as Strings). String length represents the
* interaction arity.
* @param vwParams VW params passed to the `updatedVwParams` function.
* @return
*/
protected def interactions(vwParams: String): Set[String] =
List(
QuadraticsShort,
QuadraticsLong,
Cubics,
Interactions
).foldLeft(Set.empty[String]){(is, r) =>
is ++ firstCaptureGroups(vwParams, r).map(s => s.sorted)
}
protected def unrecoverableFlags(vwParams: String): Set[String] =
firstCaptureGroups(vwParams, UnrecoverableFlags).toSet
protected def isQuiet(vwParams: String): Boolean = Quiet.findFirstIn(vwParams).nonEmpty
protected def ignored(vwParams: String): VWNsSet = charsIn(Ignore, vwParams)
protected def ignoredLinear(vwParams: String): VWNsSet = charsIn(IgnoreLinear, vwParams)
protected def handleClassCastException(
orig: String,
mod: String,
ex: ClassCastException
): VwParamError =
ex.getMessage match {
case ClassCastMsg(from, _) => IncorrectLearner(orig, mod, from)
case _ => ClassCastErr(orig, mod, ex)
}
protected def flagsReferencingMissingNss(
namespaceNames: Set[String],
i: VWNsSet,
il: VWNsSet,
is: Set[String]
): Map[String, VWNsSet] = {
val q = filterAndFlattenInteractions(is, _ == 2)
val c = filterAndFlattenInteractions(is, _ == 3)
val ho = filterAndFlattenInteractions(is, _ >= 4)
flagsReferencingMissingNss(namespaceNames, i, il, q, c, ho)
}
protected def filterAndFlattenInteractions(is: Set[String], filter: Int => Boolean): VWNsSet =
is.flatMap {
case interaction if filter(interaction.length) => interaction.toCharArray
case _ => Nil
}
protected def flagsReferencingMissingNss(
namespaceNames: Set[String],
i: VWNsSet, il: VWNsSet, q: VWNsSet, c: VWNsSet, ho: VWNsSet
): Map[String, VWNsSet] =
nssNotInNamespaceNames(
namespaceNames,
"ignore" -> i,
"ignore_linear" -> il,
"quadratic" -> q,
"cubic" -> c,
"interactions" -> ho
)
protected def nssNotInNamespaceNames(
nsNames: Set[String],
sets: (String, VWNsSet)*
): Map[String, VWNsSet] = {
val vwNss = toVwNsSet(nsNames)
sets.foldLeft(Map.empty[String, VWNsSet]){ case (m, (setName, nss)) =>
val extra = nss diff vwNss
if (extra.isEmpty) m
else m + (setName -> extra)
}
}
// TODO: Change file
protected def validateVwParams(
orig: String,
mod: String,
finalPs: String,
flagToFileMap: Map[String, File],
addQuiet: Boolean
): Either[VwParamError, String] = {
val ps = if (addQuiet) s"--quiet $finalPs" else finalPs
Try { VWLearners.create[ExpectedLearner](ps) } match {
case Success(m) =>
IOUtils.closeQuietly(m)
Right(mod)
case Failure(cce: ClassCastException) =>
Left(handleClassCastException(orig, mod, cce))
case Failure(ex) =>
Left(VwError(orig, mod, ex.getMessage))
}
}
// More general functions.
/**
* Find all of the regex matches and extract the first capture group from the match.
* @param vwParams VW params passed to the `updatedVwParams` function.
* @param regex with at least one capture group (this is unchecked).
* @return Iterator of the matches' first capture group.
*/
protected def firstCaptureGroups(vwParams: String, regex: Regex): Iterator[String] =
regex.findAllMatchIn(vwParams).map(m => m.group(CaptureGroupWithContent))
protected def charsIn(r: Regex, chrSeq: CharSequence): VWNsSet =
r.findAllMatchIn(chrSeq).flatMap(m => m.group(CaptureGroupWithContent).toCharArray).toSet
private[multilabel] def toVwNsSet(nsNames: Set[String]): VWNsSet =
nsNames.flatMap(_.take(1).toCharArray)
}
| eHarmony/aloha | aloha-vw-jni/src/main/scala/com/eharmony/aloha/models/vw/jni/multilabel/VwMultilabelParamAugmentation.scala | Scala | mit | 18,665 |
/*
* Copyright 2015 RONDHUIT Co.,LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File
import java.nio.file.FileSystems
import org.apache.lucene.index._
import org.apache.lucene.search.TermQuery
import org.nlp4l.core.analysis.Analyzer
import org.nlp4l.core._
import scalax.file.Path
import scalax.file.PathSet
val index = "/tmp/index-ldcc"
def document(file: Path): Document = {
val ps: Array[String] = file.path.split(File.separator)
// for Windows
// val ps: Array[String] = file.path.split("\\\\")
val cat = ps(3)
val lines = file.lines().toArray
val url = lines(0)
val date = lines(1)
val title = lines(2)
val body = file.lines().drop(3).toList
Document(Set(
Field("url", url), Field("date", date), Field("cat", cat),
Field("title", title), Field("body", body)
))
}
// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()
// write documents into an index
val schema = SchemaLoader.loadFile("examples/schema/ldcc.conf")
val writer = IWriter(index, schema)
val c: PathSet[Path] = Path("corpora", "ldcc", "text").children()
c.filterNot( e => e.name.endsWith(".txt") ).foreach {
f => f.children().filterNot( g => g.name.equals("LICENSE.txt") ).foreach( h => writer.write(document(h)) )
}
writer.close
// search
val searcher = ISearcher(index)
val results = searcher.search(query=new TermQuery(new Term("title", "iphone")), rows=10)
results.foreach(doc => {
printf("[DocID] %d: %s\n", doc.docId, doc.get("title"))
})
| gazimahmud/nlp4l | examples/index_ldcc.scala | Scala | apache-2.0 | 2,022 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.rest
import javax.ws.rs.{ PUT, DELETE }
/**
* @version $Revision: 1.1 $
*/
class ElementResource[K, E](
element: E,
container: Container[K, E]) {
@PUT
def put(updatedElement: E): Unit = {
// TODO validate the new element
container.put(updatedElement)
}
@DELETE
def delete(): Unit = {
container.remove(element)
}
}
| scalate/scalate | scalate-jaxrs/src/main/scala/org/fusesource/scalate/rest/ElementResource.scala | Scala | apache-2.0 | 1,118 |
package chalk.text.segment
import org.scalatest._
import org.scalatest.junit._
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class JavaSentenceSegmenterTest extends FunSuite {
val text = """
But, in a larger sense, we can not dedicate -- we can not consecrate -- we can not hallow -- this ground. The brave men, living and dead, who struggled here, have consecrated it, far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us -- that from these honored dead we take increased devotion to that cause for which they gave the last full measure of devotion -- that we here highly resolve that these dead shall not have died in vain -- that this nation, under God, shall have a new birth of freedom -- and that government of the people, by the people, for the people, shall not perish from the earth.
"""
test("Gettysburg address") {
val sentences = JavaSentenceSegmenter(text).toSeq
assert(sentences.length === 5, sentences)
assert(sentences(0).trim === "But, in a larger sense, we can not dedicate -- we can not consecrate -- we can not hallow -- this ground.")
}
}
| yinxusen/chalk | src/test/scala/chalk/text/segment/JavaSentenceSegmenterTest.scala | Scala | apache-2.0 | 1,433 |
package scala
package reflect.internal.util
import java.lang.ref.{WeakReference, ReferenceQueue}
import scala.annotation.tailrec
import scala.collection.mutable.{Set => MSet}
/**
* A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other
* hard references are associated with them. Its primary use case is as a canonical reference
* identity holder (aka "hash-consing") via findEntryOrUpdate
*
* This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException
*
* This set implementation is not in general thread safe without external concurrency control. However it behaves
* properly when GC concurrently collects elements in this set.
*/
final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] {
import WeakHashSet._
def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
type This = WeakHashSet[A]
/**
* queue of Entries that hold elements scheduled for GC
* the removeStaleEntries() method works through the queue to remove
* stale entries from the table
*/
private[this] val queue = new ReferenceQueue[A]
/**
* the number of elements in this set
*/
private[this] var count = 0
/**
* from a specified initial capacity compute the capacity we'll use as being the next
* power of two equal to or greater than the specified initial capacity
*/
private def computeCapacity = {
if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0")
var candidate = 1
while (candidate < initialCapacity) {
candidate *= 2
}
candidate
}
/**
* the underlying table of entries which is an array of Entry linked lists
*/
private[this] var table = new Array[Entry[A]](computeCapacity)
/**
* the limit at which we'll increase the size of the hash table
*/
private[this] var threshold = computeThreshold
private[this] def computeThreshold: Int = (table.size * loadFactor).ceil.toInt
/**
* find the bucket associated with an element's hash code
*/
private[this] def bucketFor(hash: Int): Int = {
// spread the bits around to try to avoid accidental collisions using the
// same algorithm as java.util.HashMap
var h = hash
h ^= h >>> 20 ^ h >>> 12
h ^= h >>> 7 ^ h >>> 4
// this is finding h % table.length, but takes advantage of the
// fact that table length is a power of 2,
// if you don't do bit flipping in your head, if table.length
// is binary 100000.. (with n 0s) then table.length - 1
// is 1111.. with n 1's.
// In other words this masks on the last n bits in the hash
h & (table.length - 1)
}
/**
* remove a single entry from a linked list in a given bucket
*/
private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) {
prevEntry match {
case null => table(bucket) = entry.tail
case _ => prevEntry.tail = entry.tail
}
count -= 1
}
/**
* remove entries associated with elements that have been gc'ed
*/
private[this] def removeStaleEntries() {
def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
@tailrec
def queueLoop(): Unit = {
val stale = poll()
if (stale != null) {
val bucket = bucketFor(stale.hash)
@tailrec
def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = if (stale eq entry) remove(bucket, prevEntry, entry)
else if (entry != null) linkedListLoop(entry, entry.tail)
linkedListLoop(null, table(bucket))
queueLoop()
}
}
queueLoop()
}
/**
* Double the size of the internal table
*/
private[this] def resize() {
val oldTable = table
table = new Array[Entry[A]](oldTable.size * 2)
threshold = computeThreshold
@tailrec
def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
@tailrec
def linkedListLoop(entry: Entry[A]): Unit = entry match {
case null => ()
case _ => {
val bucket = bucketFor(entry.hash)
val oldNext = entry.tail
entry.tail = table(bucket)
table(bucket) = entry
linkedListLoop(oldNext)
}
}
linkedListLoop(oldTable(oldBucket))
tableLoop(oldBucket + 1)
}
tableLoop(0)
}
// from scala.reflect.internal.Set, find an element or null if it isn't contained
override def findEntry(elem: A): A = elem match {
case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
case _ => {
removeStaleEntries()
val hash = elem.hashCode
val bucket = bucketFor(hash)
@tailrec
def linkedListLoop(entry: Entry[A]): A = entry match {
case null => null.asInstanceOf[A]
case _ => {
val entryElem = entry.get
if (elem == entryElem) entryElem
else linkedListLoop(entry.tail)
}
}
linkedListLoop(table(bucket))
}
}
// add an element to this set unless it's already in there and return the element
def findEntryOrUpdate(elem: A): A = elem match {
case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
case _ => {
removeStaleEntries()
val hash = elem.hashCode
val bucket = bucketFor(hash)
val oldHead = table(bucket)
def add() = {
table(bucket) = new Entry(elem, hash, oldHead, queue)
count += 1
if (count > threshold) resize()
elem
}
@tailrec
def linkedListLoop(entry: Entry[A]): A = entry match {
case null => add()
case _ => {
val entryElem = entry.get
if (elem == entryElem) entryElem
else linkedListLoop(entry.tail)
}
}
linkedListLoop(oldHead)
}
}
// add an element to this set unless it's already in there and return this set
override def +(elem: A): this.type = elem match {
case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
case _ => {
removeStaleEntries()
val hash = elem.hashCode
val bucket = bucketFor(hash)
val oldHead = table(bucket)
def add() {
table(bucket) = new Entry(elem, hash, oldHead, queue)
count += 1
if (count > threshold) resize()
}
@tailrec
def linkedListLoop(entry: Entry[A]): Unit = entry match {
case null => add()
case _ if (elem == entry.get) => ()
case _ => linkedListLoop(entry.tail)
}
linkedListLoop(oldHead)
this
}
}
def +=(elem: A) = this + elem
// from scala.reflect.internal.Set
override def addEntry(x: A) { this += x }
// remove an element from this set and return this set
override def -(elem: A): this.type = elem match {
case null => this
case _ => {
removeStaleEntries()
val bucket = bucketFor(elem.hashCode)
@tailrec
def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = entry match {
case null => ()
case _ if (elem == entry.get) => remove(bucket, prevEntry, entry)
case _ => linkedListLoop(entry, entry.tail)
}
linkedListLoop(null, table(bucket))
this
}
}
def -=(elem: A) = this - elem
// empty this set
override def clear(): Unit = {
table = new Array[Entry[A]](table.size)
threshold = computeThreshold
count = 0
// drain the queue - doesn't do anything because we're throwing away all the values anyway
@tailrec def queueLoop(): Unit = if (queue.poll() != null) queueLoop()
queueLoop()
}
// true if this set is empty
override def empty: This = new WeakHashSet[A](initialCapacity, loadFactor)
// the number of elements in this set
override def size: Int = {
removeStaleEntries()
count
}
override def apply(x: A): Boolean = this contains x
override def foreach[U](f: A => U): Unit = iterator foreach f
// It has the `()` because iterator runs `removeStaleEntries()`
override def toList(): List[A] = iterator.toList
// Iterator over all the elements in this set in no particular order
override def iterator: Iterator[A] = {
removeStaleEntries()
new Iterator[A] {
/**
* the bucket currently being examined. Initially it's set past the last bucket and will be decremented
*/
private[this] var currentBucket: Int = table.size
/**
* the entry that was last examined
*/
private[this] var entry: Entry[A] = null
/**
* the element that will be the result of the next call to next()
*/
private[this] var lookaheadelement: A = null.asInstanceOf[A]
@tailrec
def hasNext: Boolean = {
while (entry == null && currentBucket > 0) {
currentBucket -= 1
entry = table(currentBucket)
}
if (entry == null) false
else {
lookaheadelement = entry.get
if (lookaheadelement == null) {
// element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry
entry = entry.tail
hasNext
} else {
true
}
}
}
def next(): A = if (lookaheadelement == null)
throw new IndexOutOfBoundsException("next on an empty iterator")
else {
val result = lookaheadelement
lookaheadelement = null.asInstanceOf[A]
entry = entry.tail
result
}
}
}
/**
* Diagnostic information about the internals of this set. Not normally
* needed by ordinary code, but may be useful for diagnosing performance problems
*/
private[util] class Diagnostics {
/**
* Verify that the internal structure of this hash set is fully consistent.
* Throws an assertion error on any problem. In order for it to be reliable
* the entries must be stable. If any are garbage collected during validation
* then an assertion may inappropriately fire.
*/
def fullyValidate: Unit = {
var computedCount = 0
var bucket = 0
while (bucket < table.size) {
var entry = table(bucket)
while (entry != null) {
assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted")
computedCount += 1
val cachedHash = entry.hash
val realHash = entry.get.hashCode
assert(cachedHash == realHash, s"for $entry cached hash was $cachedHash but should have been $realHash")
val computedBucket = bucketFor(realHash)
assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket")
entry = entry.tail
}
bucket += 1
}
assert(computedCount == count, s"The computed count was $computedCount but should have been $count")
}
/**
* Produces a diagnostic dump of the table that underlies this hash set.
*/
def dump = table.deep
/**
* Number of buckets that hold collisions. Useful for diagnosing performance issues.
*/
def collisionBucketsCount: Int =
(table count (entry => entry != null && entry.tail != null))
/**
* Number of buckets that are occupied in this hash table.
*/
def fullBucketsCount: Int =
(table count (entry => entry != null))
/**
* Number of buckets in the table
*/
def bucketsCount: Int = table.size
}
private[util] def diagnostics = new Diagnostics
}
/**
* Companion object for WeakHashSet
*/
object WeakHashSet {
/**
* A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and
* a link to the next Entry in the same bucket
*/
private class Entry[A](element: A, val hash:Int, var tail: Entry[A], queue: ReferenceQueue[A]) extends WeakReference[A](element, queue)
val defaultInitialCapacity = 16
val defaultLoadFactor = .75
def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor)
}
| felixmulder/scala | src/reflect/scala/reflect/internal/util/WeakHashSet.scala | Scala | bsd-3-clause | 12,505 |
package org.juanitodread.pitayafinch.nlp.tools.tokenize.pipeline
import org.juanitodread.pitayafinch.model.nlp.tokenizer.LemmaResult
import org.juanitodread.pitayafinch.nlp.tools.tokenize.EnglishLemmatizer
import org.juanitodread.pitayafinch.nlp.tools.tokenize.pipeline.Tokenizer.Tokens
class Lemmatizer extends Result[Tokens, List[LemmaResult]] {
override val chain: String = "Lemmatizer"
override def produce: Tokens => List[LemmaResult] = {
tokens => EnglishLemmatizer(tokens)
}
}
| juanitodread/pitaya-finch | src/main/scala/org/juanitodread/pitayafinch/nlp/tools/tokenize/pipeline/Lemmatizer.scala | Scala | apache-2.0 | 497 |
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
@RunWith(classOf[JUnitRunner])
class IntegrationSpec extends Specification {
"Application" should {
"work from within a browser" in new WithBrowser {
browser.goTo("http://localhost:" + port)
browser.pageSource must contain("Development has started")
}
}
}
| plamola/FeedR-V1 | test/IntegrationSpec.scala | Scala | gpl-2.0 | 557 |
package ca.uwo.eng.sel.cepsim.metric
import ca.uwo.eng.sel.cepsim.placement.Placement
import ca.uwo.eng.sel.cepsim.query.{EventConsumer, InputVertex, EventProducer, Vertex}
/*
TODO probably a good idea to rename this metric to TotalEventMetric or something and create a new
ThroughputMetric that depends on this one.
*/
/**
* Throughput metric class. Actually, this metric is still not the throughput but the total number
* of events that had to be produced in order to generate the events consumed by the vertex v. To obtain
* the throughput, this calculated value must be divided by the period at which the vertex has been active.
*
* @param v EventConsumer of which the metric is calculated.
* @param time Time of the calculation.
* @param _value Metric value.
*/
case class ThroughputMetric(val v: Vertex, val time: Double, private var _value: Double) extends Metric {
def value: Double = _value
private [metric] def value_=(newValue: Double) = _value = newValue
}
/** ThroughputMetric companion object */
object ThroughputMetric {
/** Throughput metric identifier - used to register with QueryCloudlet. */
val ID = "THROUGHPUT_METRIC"
/**
* Obtains a calculator for the throughput metric.
* @param placement Placement of which the metric will be calculated.
* @return calculator for the throughput metric.
*/
def calculator(placement: Placement) = new LatencyThroughputCalculator(placement)
}
| virsox/cepsim | cepsim-core/src/main/scala/ca/uwo/eng/sel/cepsim/metric/ThroughputMetric.scala | Scala | mit | 1,450 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action.builder
import io.gatling.core.action.{ Action, RendezVous }
import io.gatling.core.structure.ScenarioContext
class RendezVousBuilder(users: Int) extends ActionBuilder {
override def build(ctx: ScenarioContext, next: Action): Action =
RendezVous(users, ctx.system, ctx.coreComponents.statsEngine, next)
}
| GabrielPlassard/gatling | gatling-core/src/main/scala/io/gatling/core/action/builder/RendezVousBuilder.scala | Scala | apache-2.0 | 963 |
package scala.tools.nsc
package util
import scala.collection.mutable
class WorkScheduler {
type Action = () => Unit
private val todo = new mutable.Queue[Action]
private val throwables = new mutable.Queue[Throwable]
private val interruptReqs = new mutable.Queue[InterruptReq]
/** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
def waitForMoreWork() = synchronized {
while (todo.isEmpty && throwables.isEmpty && interruptReqs.isEmpty) { wait() }
}
/** called from Server: test whether one of todo list, throwables, or InterruptReqs is nonempty */
def moreWork: Boolean = synchronized {
todo.nonEmpty || throwables.nonEmpty || interruptReqs.nonEmpty
}
/** Called from server: get first action in todo list, and pop it off */
def nextWorkItem(): Option[Action] = synchronized {
if (todo.isEmpty) None else Some(todo.dequeue())
}
def dequeueAll[T](f: Action => Option[T]): Seq[T] = synchronized {
todo.dequeueAll(a => f(a).isDefined).map(a => f(a).get)
}
def dequeueAllInterrupts(f: InterruptReq => Unit): Unit = synchronized {
interruptReqs.dequeueAll { iq => f(iq); true }
}
/** Called from server: return optional exception posted by client
* Reset to no exception.
*/
def pollThrowable(): Option[Throwable] = synchronized {
if (throwables.isEmpty)
None
else {
val result = Some(throwables.dequeue())
if (!throwables.isEmpty)
postWorkItem { () => }
result
}
}
def pollInterrupt(): Option[InterruptReq] = synchronized {
if (interruptReqs.isEmpty) None else Some(interruptReqs.dequeue())
}
/** Called from client: have interrupt executed by server and return result */
def doQuickly[A](op: () => A): A = {
val ir = askDoQuickly(op)
ir.getResult()
}
def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
val ir = new InterruptReq {
type R = A
val todo = op
}
synchronized {
interruptReqs enqueue ir
notify()
}
ir
}
/** Called from client: have action executed by server */
def postWorkItem(action: Action) = synchronized {
todo enqueue action
notify()
}
/** Called from client: cancel all queued actions */
def cancelQueued() = synchronized {
todo.clear()
}
/** Called from client:
* Require an exception to be thrown on next poll.
*/
def raise(exc: Throwable) = synchronized {
throwables enqueue exc
postWorkItem { new EmptyAction }
}
}
class EmptyAction extends (() => Unit) {
def apply() {}
}
| felixmulder/scala | src/compiler/scala/tools/nsc/util/WorkScheduler.scala | Scala | bsd-3-clause | 2,595 |
/* Copyright 2017-18, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.ops.rnn.attention
import org.platanios.tensorflow.api.core.Shape
import org.platanios.tensorflow.api.implicits.Implicits._
import org.platanios.tensorflow.api.ops._
import org.platanios.tensorflow.api.ops.control_flow.WhileLoopVariable
import org.platanios.tensorflow.api.ops.rnn.cell.{RNNCell, Tuple}
import org.platanios.tensorflow.api.types.{DataType, INT32}
/** RNN cell that wraps another RNN cell and adds support for attention to it.
*
* @param cell RNN cell being wrapped.
* @param attentions Attention mechanisms to use.
* @param attentionLayerWeights Attention layer weights to use for projecting the computed attention.
* @param cellInputFn Function that takes the original cell input tensor and the attention tensor as inputs
* and returns the mixed cell input to use. Defaults to concatenating the two tensors
* across their last axis.
* @param outputAttention If `true` (the default), the output of this cell at each step is the attention value.
* This is the behavior of Luong-style attention mechanisms. If `false`, the output at
* each step is the output of `cell`. This is the behavior of Bhadanau-style attention
* mechanisms. In both cases, the `attention` tensor is propagated to the next time step
* via the state and is used there. This flag only controls whether the attention
* mechanism is propagated up to the next cell in an RNN stack or to the top RNN output.
* @param storeAlignmentsHistory If `true`, the alignments history from all steps is stored in the final output state
* (currently stored as a time major `TensorArray` on which you must call `stack()`).
* Defaults to `false`.
* @param name Name prefix used for all new ops.
*
* @author Emmanouil Antonios Platanios
*/
class AttentionWrapperCell[S, SS, AS, ASS] private[attention] (
val cell: RNNCell[Output, Shape, S, SS],
val attentions: Seq[Attention[AS, ASS]], // TODO: Allow for varying supported types in the sequence.
val attentionLayerWeights: Seq[Output] = null,
val cellInputFn: (Output, Output) => Output = (input, attention) => Basic.concatenate(Seq(input, attention), -1),
val outputAttention: Boolean = true,
val storeAlignmentsHistory: Boolean = false,
val name: String = "AttentionWrapperCell"
)(implicit
evS: WhileLoopVariable.Aux[S, SS],
evAS: WhileLoopVariable.Aux[AS, ASS]
) extends RNNCell[Output, Shape, AttentionWrapperState[S, SS, Seq[AS], Seq[ASS]], (SS, Shape, Shape, Seq[Shape], Seq[Shape], Seq[ASS])] {
private[this] val attentionLayersSize: Int = {
if (attentionLayerWeights != null) {
require(attentionLayerWeights.lengthCompare(attentions.size) == 0,
s"The number of attention layer weights (${attentionLayerWeights.size}) must match the number of " +
s"attention mechanisms (${attentions.size}).")
val sizes = attentionLayerWeights.map(_.shape(-1))
if (sizes.contains(-1)) -1 else sizes.sum
} else {
val sizes = attentions.map(_.values.shape(-1))
if (sizes.contains(-1)) -1 else sizes.sum
}
}
/** Returns an initial state for this attention cell wrapper.
*
* @param initialCellState Initial state for the wrapped cell.
* @param dataType Optional data type which defaults to the data type of the last tensor in
* `initialCellState`.
* @return Initial state for this attention cell wrapper.
*/
def initialState(initialCellState: S, dataType: DataType = null): AttentionWrapperState[S, SS, Seq[AS], Seq[ASS]] = {
if (initialCellState == null) {
null
} else {
Op.createWithNameScope(s"$name/InitialState") {
val state = evS.outputs(initialCellState).last
val inferredDataType = if (dataType == null) state.dataType else dataType
val batchSize: Output = if (state.rank != -1 && state.shape(0) != -1) state.shape(0) else Basic.shape(state)(0)
val checkedCellState = Op.createWith(controlDependencies = attentions.map(a => Checks.assertEqual(
a.batchSize, batchSize,
message = s"When calling `initialState` of `AttentionWrapperCell` '$name': " +
"Non-matching batch sizes between the memory (encoder output) and the requested batch size.")).toSet) {
evS.map(initialCellState, {
case s: TensorArray => s.identity
case s: OutputLike => Basic.identity(s, "CheckedInitialCellState")
})
}
val initialAlignments = attentions.map(_.initialAlignment)
AttentionWrapperState(
cellState = checkedCellState,
time = Basic.zeros(INT32, Shape.scalar()),
attention = Basic.fill(inferredDataType, Basic.stack(Seq(batchSize, attentionLayersSize)))(0),
alignments = initialAlignments,
alignmentsHistory = {
if (storeAlignmentsHistory)
initialAlignments.map(a =>
TensorArray.create(0, inferredDataType, dynamicSize = true, elementShape = a.shape))
else
Seq.empty
},
attentionState = attentions.map(_.initialState))
}
}
}
override def outputShape: Shape = if (outputAttention) Shape(attentionLayersSize) else cell.outputShape
override def stateShape: (SS, Shape, Shape, Seq[Shape], Seq[Shape], Seq[ASS]) = {
(cell.stateShape, Shape(1), Shape(attentionLayersSize),
attentions.map(a => Output.constantValueAsShape(a.alignmentSize.expandDims(0)).getOrElse(Shape.unknown())),
attentions.map(a => {
if (storeAlignmentsHistory)
Output.constantValueAsShape(a.alignmentSize.expandDims(0)).getOrElse(Shape.unknown())
else
Shape.scalar()
}),
attentions.map(_.stateSize))
}
/** Performs a step using this attention-wrapped RNN cell.
*
* - Step 1: Mix the `inputs` and the previous step's `attention` output via `cellInputFn`.
* - Step 2: Call the wrapped `cell` with the mixed input and its previous state.
* - Step 3: Score the cell's output with `attentionMechanism`.
* - Step 4: Calculate the alignments by passing the score through the `normalizer`.
* - Step 5: Calculate the context vector as the inner product between the alignments and the attention mechanism's
* values (memory).
* - Step 6: Calculate the attention output by concatenating the cell output and context through the attention layer
* (a linear layer with `attentionLayerWeights.shape(-1)` outputs).
*
* @param input Input tuple to the attention wrapper cell.
* @return Next tuple.
*/
override def forward(
input: Tuple[Output, AttentionWrapperState[S, SS, Seq[AS], Seq[ASS]]]
): Tuple[Output, AttentionWrapperState[S, SS, Seq[AS], Seq[ASS]]] = {
// Step 1: Calculate the true inputs to the cell based on the previous attention value.
val cellInput = cellInputFn(input.output, input.state.attention)
val nextTuple = cell.forward(Tuple(cellInput, input.state.cellState))
val output = nextTuple.output
val batchSize: Output = if (output.rank != -1 && output.shape(0) != -1) output.shape(0) else Basic.shape(output)(0)
val checkedOutput = Op.createWith(controlDependencies = attentions.map(a => Checks.assertEqual(
a.batchSize, batchSize, message =
s"When calling `initialState` of `AttentionWrapperCell` '$name': Non-matching batch sizes between the " +
"memory (encoder output) and the requested batch size.")).toSet) {
Basic.identity(output, "CheckedCellOutput")
}
val weights = if (attentionLayerWeights != null) attentionLayerWeights else attentions.map(_ => null)
val (allAttentions, allAlignments, allStates) = (attentions, input.state.attentionState, weights).zipped.map {
case (mechanism, previousState, w) =>
val (alignments, state) = mechanism.alignment(checkedOutput, previousState)
// Reshape from [batchSize, memoryTime] to [batchSize, 1, memoryTime]
val expandedAlignments = alignments.expandDims(1)
// Context is the inner product of alignments and values along the memory time dimension.
// The alignments shape is: [batchSize, 1, memoryTime]
// The mechanism values shape is: [batchSize, memoryTime, memorySize]
// The batched matrix multiplication is over `memoryTime` and so the output shape is: [batchSize, 1, memorySize]
// We then squeeze out the singleton dimension.
val context = Math.matmul(expandedAlignments, mechanism.values).squeeze(Seq(1))
val attention = {
if (w != null)
Math.matmul(Basic.concatenate(Seq(checkedOutput, context), 1), w)
else
context
}
(attention, alignments, state)
}.unzip3
val histories = {
if (storeAlignmentsHistory)
input.state.alignmentsHistory.zip(allAlignments).map(p => p._1.write(input.state.time, p._2))
else
input.state.alignmentsHistory
}
val one = Basic.constant(1)
val attention = Basic.concatenate(allAttentions, one)
val nextState = AttentionWrapperState(
nextTuple.state, input.state.time + one, attention, allAlignments, histories, allStates)
if (outputAttention)
Tuple(attention, nextState)
else
Tuple(checkedOutput, nextState)
}
}
object AttentionWrapperCell {
def apply[S, SS, AS, ASS](
cell: RNNCell[Output, Shape, S, SS],
attentions: Seq[Attention[AS, ASS]],
attentionLayerWeights: Seq[Output] = null,
cellInputFn: (Output, Output) => Output = (input, attention) => Basic.concatenate(Seq(input, attention), -1),
outputAttention: Boolean = true,
storeAlignmentsHistory: Boolean = false,
name: String = "AttentionWrapperCell"
)(implicit
evS: WhileLoopVariable.Aux[S, SS],
evAS: WhileLoopVariable.Aux[AS, ASS]
): AttentionWrapperCell[S, SS, AS, ASS] = {
new AttentionWrapperCell[S, SS, AS, ASS](
cell, attentions, attentionLayerWeights, cellInputFn, outputAttention, storeAlignmentsHistory, name)
}
}
| eaplatanios/tensorflow | tensorflow/scala/api/src/main/scala/org/platanios/tensorflow/api/ops/rnn/attention/AttentionWrapperCell.scala | Scala | apache-2.0 | 11,117 |
package Cheetah.Immutable
import java.util.NoSuchElementException
import Cheetah.Immutable.Vector.empty
import scala.annotation.tailrec
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.generic.CanBuildFrom
import scala.collection.immutable.Stream
import scala.collection.mutable.ArrayBuffer
import scala.collection.{GenIterable, GenMap, GenSeq, GenTraversable, immutable, mutable}
import scala.language.{higherKinds, postfixOps}
import scala.reflect.ClassTag
import scala.{Vector => ScalaVector}
object Vector {
def newBuilder[A: ClassTag]: VectorBuilder[A] = new VectorBuilder[A]()
/** The generic builder that builds instances of $Coll
* at arbitrary element types.
*/
def genericBuilder[B: ClassTag]: VectorBuilder[B] = new VectorBuilder[B]()
implicit def canBuildFrom[A: ClassTag]: CanBuildFrom[Vector[_], A, Vector[A]] =
new CanBuildFrom[Vector[_], A, Vector[A]] {
def apply: VectorBuilder[A] = new VectorBuilder[A]()
override def apply(from: Vector[_]): VectorBuilder[A] = new VectorBuilder[A]()
}
@inline private[Immutable] final val compileAssertions = false
def empty[A: ClassTag]: Vector[A] = new Vector[A](0)
final lazy private[Immutable] val emptyTransientBlock: Array[AnyRef] = new Array[AnyRef](2)
}
final class Vector[+A: ClassTag](override private[Immutable] val endIndex: Int)
extends VectorPointer[A@uncheckedVariance]
with Serializable {
self =>
private[Immutable] var transient: Boolean = false
// GenSeqLike
def seq: Vector[A] = this
/** Selects an element by its index in the type constructor Vector
*
* Example:
*
* {{{
* scala> val x = Vector(1, 2, 3, 4, 5)
* x: Vector[Int] = Vector(1, 2, 3, 4, 5)
*
* scala> x(3)
* res1: Int = 4
* }}}
*
* @param index The index to select.
* @return the element of this $coll at index `index`, where `0` indicates the first element.
* @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`.
*/
def apply(index: Int): A = {
val _focusStart: Int = this.focusStart
if (_focusStart <= index && index < this.focusEnd) {
val indexInFocus: Int = index - _focusStart
getElem(indexInFocus, indexInFocus ^ this.focus)
} else if (isDefinedAt(index)) {
if (this.transient) {
this.normalize(this.depth)
this.transient = false
}
getElementFromRoot(index)
} else {
throw new IndexOutOfBoundsException(index.toString)
}
}
/** The length/size of the $coll.
*
* $willNotTerminateInf
*
* Note: `xs.length` and `xs.size` yield the same result.
*
* @return the number of elements in this $coll.
*/
def length: Int = endIndex
def size: Int = endIndex
/** Tests whether this $coll contains given index.
*
* The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into
* a `PartialFunction[Int, A]`.
*
* @param index the index to test
* @return `true` if this $coll contains an element at position `idx`, `false` otherwise.
*/
@inline def isDefinedAt(index: Int): Boolean = (index >= 0) && (index < this.endIndex)
/** Computes length of longest segment whose elements all satisfy some predicate.
*
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
* @param from the index where the search starts.
* @return the length of the longest segment of this $coll starting from index `from`
* such that every element of the segment satisfies the predicate `p`.
*/
def segmentLength(p: A => Boolean, from: Int): Int = {
if (from >= endIndex) 0 else {
var i: Int = 0
val forward: VectorIterator[A] = iterator(from, endIndex)
while (forward.hasNext && p(forward.next())) i += 1
i
}
}
/** Returns the length of the longest prefix whose elements all satisfy some predicate.
*
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
* @return the length of the longest prefix of this $coll
* such that every element of the segment satisfies the predicate `p`.
*/
def prefixLength(p: A => Boolean): Int = segmentLength(p, 0)
/** Finds index of the first element satisfying some predicate after or at some start index.
*
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
* @param from the start index
* @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists.
*/
def indexWhere(p: A => Boolean, from: Int): Int = {
var i: Int = from
val forward: VectorIterator[A] = iterator(from, endIndex)
while (forward.hasNext) {
if (p(forward.next())) return i
i += 1
}
-1
}
/** Finds index of first element satisfying some predicate.
*
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
* @return the index of the first element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists.
*/
def indexWhere(p: A => Boolean): Int = indexWhere(p, 0)
/** Finds index of first occurrence of some value in this $coll.
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @return the index of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
* @usecase def indexOf(elem: A): Int
* @inheritdoc
*
* $mayNotTerminateInf
*
*/
def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
/** Finds index of first occurrence of some value in this $coll after or at some start index.
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
* @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
* @usecase def indexOf(elem: A, from: Int): Int
* @inheritdoc
*
* $mayNotTerminateInf
*
*/
def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem == _, from)
/** Finds index of last occurrence of some value in this $coll.
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @return the index of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
* @usecase def lastIndexOf(elem: A): Int
* @inheritdoc
*
* $willNotTerminateInf
*
*/
def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem == _)
/** Finds index of last occurrence of some value in this $coll before or at a given end index.
*
* @param elem the element value to search for.
* @param end the end index.
* @tparam B the type of the element `elem`.
* @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
* @usecase def lastIndexOf(elem: A, end: Int): Int
* @inheritdoc
*/
def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem == _, end)
/** Finds index of last element satisfying some predicate.
*
* $willNotTerminateInf
*
* @param p the predicate used to test elements.
* @return the index of the last element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists.
*/
def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, endIndex)
/** Finds index of last element satisfying some predicate before or at given end index.
*
* @param p the predicate used to test elements.
* @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists.
*/
def lastIndexWhere(p: A => Boolean, end: Int): Int = {
var i: Int = endIndex - 1
val reverse: VectorReverseIterator[A] = reverseiterator(0, end)
while (reverse.hasNext && !p(reverse.next())) i -= 1
i
}
/** Returns new $coll with elements within start to end in reversed order.
*
* $willNotTerminateInf
*
* @param start : Int
* @param end : Int
* @return A new $coll with all elements of this $coll in reversed order.
*/
def reverse(start: Int, end: Int): Vector[A] = {
val reverse: VectorReverseIterator[A] = reverseiterator(start, end)
val build: VectorBuilder[A] = newBuilder
while (reverse.hasNext) build += reverse.next()
build.result()
}
def reverse: Vector[A] = reverse(0, endIndex)
/**
* Builds a new collection by applying a function to all elements of this Vector type constructor and
* collecting the results in reversed order.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @return a new collection of type `Vector[B]` resulting from applying the given function
* `f` to each element of this Vector and collecting the results in reversed order.
* @usecase def reverseMap[B](f: A => B): Vector[B]
* @inheritdoc
*
* $willNotTerminateInf
*
* Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
* @return a new Vector resulting from applying the given function
* `f` to each element of this Vector and collecting the results in reversed order.
*/
def reverseMap[B: ClassTag](f: A => B): Vector[B] = {
val reverse: VectorReverseIterator[A] = reverseiterator(0, endIndex)
val build: VectorBuilder[B] = genericBuilder[B]
while (reverse.hasNext) build += f(reverse.next())
build.result()
}
/** Tests whether this $coll starts with the given sequence.
*
* @param that the sequence to test
* @return `true` if this collection has `that` as a prefix, `false` otherwise.
*/
def startsWith[B](that: Vector[B]): Boolean = startsWith(that, 0)
/** Tests whether this $coll contains the given sequence at a given index.
*
* '''Note''': If the both the receiver object `this` and the argument
* `that` are infinite sequences this method may not terminate.
*
* @param that the sequence to test
* @param offset the index where the sequence is searched.
* @return `true` if the sequence `that` is contained in this $coll at
* index `offset`, otherwise `false`.
*/
def startsWith[B](that: Vector[B], offset: Int): Boolean = {
if (offset >= this.endIndex) false
else if ((this.endIndex - offset) != that.endIndex) false
else {
var i: Int = 0
val thisforward: VectorIterator[A] = this.iterator(offset, this.endIndex)
val thatforward: VectorIterator[B] = that.iterator(0, that.endIndex)
while (thatforward.hasNext && thisforward.next() == thatforward.next()) i += 1
i == that.endIndex
}
}
/** Tests whether this $coll ends with the given sequence.
* $willNotTerminateInf
*
* @param that the sequence to test
* @return `true` if this $coll has `that` as a , `false` otherwise.
*/
def endsWith[B](that: Vector[B]): Boolean = {
if (this.endIndex < that.endIndex) false
else {
var i: Int = 0
val thisreverse: VectorReverseIterator[A] = this.reverseiterator
val thatreverse: VectorReverseIterator[B] = that.reverseiterator
while (thatreverse.hasNext && thisreverse.next() == thatreverse.next()) i += 1
i == that.endIndex
}
}
/** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence.
*
* @param from the index of the first replaced element
* @param patch the replacement sequence
* @param replaced the number of elements to drop in the original $coll
* @tparam B the element type of the returned $coll.
* @return a new $coll consisting of all elements of this $coll
* except that `replaced` elements starting from `from` are replaced
* by `patch`.
* @usecase def patch(from: Int, that: GenSeq[A], replaced: Int): $Coll[A]
* @inheritdoc
* @return a new $coll consisting of all elements of this $coll
* except that `replaced` elements starting from `from` are replaced
* by `patch`.
*/
def patch[B >: A : ClassTag](from: Int, patch: Vector[B], replaced: Int): Vector[B] = {
val vector: Vector[B] = new Vector[B](endIndex)
vector.transient = this.transient
vector.initWithFocusFrom(this.asInstanceOf[Vector[B]])
var index: Int = {
if (from > 0) {
if (from < vector.focusStart || vector.focusEnd <= from || ((from - vector.focusStart) & ~31) != (vector.focus & ~31)) {
if (!vector.isDefinedAt(from)) throw new IndexOutOfBoundsException(from.toString)
vector.normalizeAndFocusOn(from)
}
from - ((from - vector.focusStart) & 31)
} else {
0
}
}
// Pad the patch with enough elements so that blocks are aligned
var i: Int = from - 1
while (i >= index) {
this (i) +: patch
i -= 1
}
while (index < from + spire.math.min(replaced, patch.length)) {
// Focus on the index that needs to be updated
if (index < vector.focusStart || vector.focusEnd <= index || ((index - vector.focusStart) & ~31) != (vector.focus & ~31)) {
if (!vector.isDefinedAt(index)) throw new IndexOutOfBoundsException(index.toString)
vector.normalizeAndFocusOn(index)
}
vector.makeTransientIfNeeded()
if ((from + spire.math.min(replaced, patch.length) - index) < 32) {
// Replace only the subset of the Array if less than a block of elements are left to be updated
val d0: Array[B] = copyOf(vector.display0.asInstanceOf[Leaf]).asInstanceOf[Array[B]]
while (index < from + spire.math.min(replaced, patch.length)) {
d0.update((index - vector.focusStart) & 31, patch(index - from))
index += 1
}
vector.display0 = d0
} else {
// Replace by a copy of the whole Array when all the elements of the block need to be updated
patch.normalizeAndFocusOn(index - from)
patch.makeTransientIfNeeded()
vector.display0 = copyOf(patch.display0.asInstanceOf[Leaf]).asInstanceOf[Array[B]]
index += 32
}
}
vector
}
/** A copy of this $coll with one single replaced element.
*
* @param index the position of the replacement
* @param elem the replacing element
* @tparam B the element type of the returned $coll.
* @return a new $coll which is a copy of this $coll with the element at position `index` replaced by `elem`.
* @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`.
* @usecase def update(index: Int, elem: A): $Coll[A]
* @inheritdoc
* @return a copy of this $coll with the element at position `index` replaced by `elem`.
*/
def update[B >: A : ClassTag](index: Int, elem: B): Vector[B] = {
val vector: Vector[B] = new Vector[B](endIndex)
vector.transient = this.transient
vector.initWithFocusFrom(this.asInstanceOf[Vector[B]])
if (index < focusStart || focusEnd <= index || ((index - focusStart) & ~31) != (focus & ~31)) {
if (!vector.isDefinedAt(index)) throw new IndexOutOfBoundsException(index.toString)
vector.normalizeAndFocusOn(index)
}
vector.makeTransientIfNeeded()
val d0: Array[B] = copyOf(vector.display0.asInstanceOf[Leaf]).asInstanceOf[Array[B]]
d0.update((index - vector.focusStart) & 31, elem)
vector.display0 = d0
vector.asInstanceOf[Vector[B]]
}
/** A copy of the $coll with an element prepended.
*
* @param elem the prepended element
* @tparam B the element type of the returned $coll.
* @return a new collection of type `That` consisting of `elem` followed
* by all elements of this $coll.
* @usecase def +:(elem: A): $Coll[A]
* @inheritdoc
*
* Note that :-ending operators are right associative (see example).
* A mnemonic for `+:` vs. `:+` is: the Colon goes on the Collection side.
*
* Also, the original $coll is not modified, so you will want to capture the result.
*
* Example:
* {{{
* scala> val x = List(1)
* x: List[Int] = List(1)
*
* scala> val y = 2 +: x
* y: List[Int] = List(2, 1)
*
* scala> println(x)
* List(1)
* }}}
* @return a new $coll consisting of `elem` followed
* by all elements of this $coll.
*/
def +:[B >: A : ClassTag](elem: B): Vector[B] = {
if (this.length != 0) {
val vector = new Vector[B](this.endIndex + 1)
vector.transient = this.transient
vector.initWithFocusFrom(this.asInstanceOf[Vector[B]])
vector.prepend(elem)
vector.asInstanceOf[Vector[B]]
} else {
createSingletonVector(elem)
}
}
/** A copy of this $coll with an element appended.
*
* A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
*
* @param elem the appended element
* @tparam B the element type of the returned $coll.
* @return a new collection of type `That` consisting of
* all elements of this $coll followed by `elem`.
* @usecase def :+(elem: A): $Coll[A]
* @inheritdoc
*
* $willNotTerminateInf
*
* Example:
* {{{
* scala> val a = List(1)
* a: List[Int] = List(1)
*
* scala> val b = a :+ 2
* b: List[Int] = List(1, 2)
*
* scala> println(a)
* List(1)
* }}}
* @return a new $coll consisting of
* all elements of this $coll followed by `elem`.
*/
def :+[B >: A : ClassTag](elem: B): Vector[B] = {
if (this.endIndex != 0) {
val resultVector: Vector[B] = new Vector[B](this.endIndex + 1)
resultVector.transient = this.transient
resultVector.initWithFocusFrom(this.asInstanceOf[Vector[B]])
resultVector.append(elem, this.endIndex)
resultVector.asInstanceOf[Vector[B]]
} else {
createSingletonVector(elem)
}
}
/** A copy of this $coll with an element value appended until a given target length is reached.
*
* @param length the target length
* @param elem the padding value
* @tparam B the element type of the returned $coll.
* @return a new collection of type `That` consisting of
* all elements of this $coll followed by the minimal number of occurrences of `elem` so
* that the resulting collection has a length of at least `len`.
* @usecase def padTo(len: Int, elem: A): $Coll[A]
* @inheritdoc
* @return a new $coll consisting of
* all elements of this $coll followed by the minimal number of occurrences of `elem` so
* that the resulting $coll has a length of at least `len`.
*/
def padTo[B >: A : ClassTag](length: Int, elem: B): Vector[B] = {
val build: VectorBuilder[B] = genericBuilder[B]
var index = length - this.length
build ++= this
while (index > 0) {
build += elem
index -= 1
}
build.result()
}
/** Tests whether every element of this $coll relates to the
* corresponding element of another sequence by satisfying a test predicate.
*
* @param that the other sequence
* @param p the test predicate, which relates elements from both sequences
* @tparam B the type of the elements of `that`
* @return `true` if both sequences have the same length and
* `p(x, y)` is `true` for all corresponding elements `x` of this $coll
* and `y` of `that`, otherwise `false`.
*/
def corresponds[B](that: Vector[B])(p: (A, B) => Boolean): Boolean = {
val thisforward: VectorIterator[A] = this.iterator
val thatforward = that.iterator
if (this.length == that.length) { // If both vectors have the same length
while (thisforward.hasNext) { // If every corresponding element satisfies the predicate
if (!p(thisforward.next(), thatforward.next())) return false
}
true
} else {
false
}
}
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
*
* @param that the sequence to add.
* @tparam B the element type of the returned $coll.
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
* @usecase def union(that: GenSeq[A]): $Coll[A]
* @inheritdoc
*
* Another way to express this
* is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`.
* `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
*
* $willNotTerminateInf
* @return a new $coll which contains all elements of this $coll
* followed by all elements of `that`.
*/
def union[B >: A : ClassTag](that: Vector[B]): Vector[B] = this ++ that
/** Computes the multiset difference between this $coll and another sequence.
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
* @return a new collection of type `That` which contains all elements of this $coll
* except some of occurrences of elements that also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
* @usecase def diff(that: GenSeq[A]): $Coll[A]
* @inheritdoc
*
* $willNotTerminateInf
* @return a new $coll which contains all elements of this $coll
* except some of occurrences of elements that also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
*/
def diff[B >: A](that: Vector[B]): Vector[A] = {
val occurs: mutable.Map[B, Int] = that.occurrences
val build: VectorBuilder[A] = newBuilder
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: A = forward.next()
val count: Int = occurs(x)
if (count == 0) {
build += x
} else {
occurs(x) = count - 1
}
}
build.result()
}
/** Computes the multiset intersection between this $coll and another sequence.
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
* @return a new collection of type `That` which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
* @usecase def intersect(that: GenSeq[A]): $Coll[A]
* @inheritdoc
*
* $mayNotTerminateInf
* @return a new $coll which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: Vector[B]): Vector[A] = {
val occurs: mutable.Map[B, Int] = that.occurrences
val build: VectorBuilder[A] = newBuilder
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: A = forward.next()
val count: Int = occurs(x)
if (count != 0) {
build += x
occurs(x) = count - 1
}
}
build.result()
}
/** Builds a new $coll from this $coll without any duplicate elements.
* $willNotTerminateInf
*
* @return A new $coll which contains the first occurrence of every element of this $coll.
*/
def distinct: Vector[A] = {
val occurrence: mutable.HashMap[A, Int] = new mutable.HashMap[A, Int] {
override def default(k: A): Int = 0
}
val build: VectorBuilder[A] = newBuilder
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: A = forward.next()
occurrence(x) += 1
if (occurrence(x) == 1) {
build += x
}
}
build.result()
}
/** Hashcodes for $Coll produce a value from the hashcodes of all the
* elements of the $coll.
*/
override def hashCode(): Int = scala.util.hashing.MurmurHash3.hashCode()
/** The equals method for arbitrary sequences. Compares this sequence to
* some other object.
*
* @param that The object to compare the sequence to
* @return `true` if `that` is a sequence that has the same elements as
* this sequence in the same order, `false` otherwise
*/
override def equals(that: Any): Boolean = {
that match {
case that: Vector[_] => (this corresponds that) ((x, y) => x == y)
case _ => false
}
}
// GenericTraversableTemplate
/** Applies a function `f` to all elements of this $coll.
*
* @param f the function that is applied for its side-effect to every element.
* The result of function `f` is discarded.
* @tparam B the type parameter describing the result of function `f`.
* This result will always be ignored. Typically `U` is `Unit`,
* but this is not necessary.
* @usecase def foreach(f: A => Unit): Unit
* @inheritdoc
*
* Note: this method underlies the implementation of most other bulk operations.
* It's important to implement this method in an efficient way.
*
*/
def foreach[B](f: A => B): Unit = {
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) f(forward.next())
}
/** Selects the first element of this $coll.
*
* @return the first element of this $coll.
* @throws NoSuchElementException if the $coll is empty.
*/
def head: A = {
if (this.endIndex != 0) {
this.apply(0)
}
else {
throw new UnsupportedOperationException("empty.head")
}
}
/** Tests whether this $coll is empty.
*
* @return `true` if the $coll contain no elements, `false` otherwise.
*/
def isEmpty: Boolean = this.endIndex == 0
/** Tests whether the $coll is not empty.
*
* @return `true` if the $coll contains at least one element, `false` otherwise.
*/
def nonEmpty: Boolean = !isEmpty
// /** The factory companion object that builds instances of class $Coll.
// * (or its `Iterable` superclass where class $Coll is not a `Seq`.)
// */
// def companion: GenericCompanion[Vector] = new GenericCompanion[Vector] {
// override def newBuilder[A]: VectorBuilder[A] = new VectorBuilder[A]
// }
/** The builder that builds instances of type $Coll[A]
*/
protected[this] def newBuilder: VectorBuilder[A] = new VectorBuilder[A]
/** The generic builder that builds instances of $Coll
* at arbitrary element types.
*/
def genericBuilder[B: ClassTag]: VectorBuilder[B] = new VectorBuilder[B]
/** Returns a $coll formed from this $coll and another iterable collection
* by combining corresponding elements in pairs.
* If one of the two collections is longer than the other, its remaining elements are ignored.
*
* @param B The iterable providing the second half of each result pair
* @tparam A the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
* @tparam B the type of the second half of the returned pairs
* @return a new collection of type `That` containing pairs consisting of
* corresponding elements of this $coll and `that`. The length
* of the returned collection is the minimum of the lengths of this $coll and `that`.
* @usecase def zip[B](that: Vector[B]): $Coll[(A, B)]
* @inheritdoc
*
* $orderDependent
* @param that The iterable providing the second half of each result pair
* @tparam B the type of the second half of the returned pairs
* @return a new $coll containing pairs consisting of
* corresponding elements of this $coll and `that`. The length
* of the returned collection is the minimum of the lengths of this $coll and `that`.
*/
def zip[B: ClassTag](that: Vector[B]): Vector[(A, B)] = {
val build: VectorBuilder[(A, B)] = genericBuilder[(A, B)]
val thisforward: VectorIterator[A] = this.iterator
val thatforward: VectorIterator[B] = that.iterator
while (thisforward.hasNext && thatforward.hasNext) build += Tuple2(thisforward.next(), thatforward.next())
build.result()
}
/** Zips this $coll with its indices.
*
* @tparam B the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
* @tparam [(B , Int)] the class of the returned collection. Where possible, `That` is
* the same class as the current collection class `Repr`, but this
* depends on the element type `(A1, Int)` being admissible for that class,
* which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
* is found.
* @return A new collection of type `That` containing pairs consisting of all elements of this
* $coll paired with their index. Indices start at `0`.
* @usecase def zipWithIndex: $Coll[(A, Int)]
* @inheritdoc
*
* $orderDependent
* @return A new $coll containing pairs consisting of all elements of this
* $coll paired with their index. Indices start at `0`.
* @example
* `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
*
*/
def zipWithIndex[B >: A]: Vector[(B, Int)] = {
val build: VectorBuilder[(B, Int)] = genericBuilder[(B, Int)]
val forward: VectorIterator[A] = this.iterator
var index: Int = 0
while (forward.hasNext) {
build += Tuple2(forward.next(), index)
index += 1
}
build.result()
}
/** Returns a $coll formed from this $coll and another iterable collection
* by combining corresponding elements in pairs.
* If one of the two collections is shorter than the other,
* placeholder elements are used to extend the shorter collection to the length of the longer.
*
* @param that the iterable providing the second half of each result pair
* @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
* @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
* @return a new collection of type `That` containing pairs consisting of
* corresponding elements of this $coll and `that`. The length
* of the returned collection is the maximum of the lengths of this $coll and `that`.
* If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
* If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
* @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)]
* @inheritdoc
*
* $orderDependent
* @param that The iterable providing the second half of each result pair
* @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
* @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
* @tparam B the type of the second half of the returned pairs
* @return a new $coll containing pairs consisting of
* corresponding elements of this $coll and `that`. The length
* of the returned collection is the maximum of the lengths of this $coll and `that`.
* If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
* If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*/
def zipAll[B, A1 >: A](that: Vector[B], thisElem: A1, thatElem: B): Vector[(A1, B)] = {
val build: VectorBuilder[(A1, B)] = genericBuilder[(A1, B)]
val thisforward: VectorIterator[A] = this.iterator
val thatforward: VectorIterator[B] = that.iterator
while (thisforward.hasNext && thatforward.hasNext) build += Tuple2(thisforward.next(), thatforward.next())
while (thisforward.hasNext) build += Tuple2(thisforward.next(), thatElem)
while (thatforward.hasNext) build += Tuple2(thisElem, thatforward.next())
build.result()
}
/** Converts this $coll of pairs into two collections of the first and second
* half of each pair.
*
* {{{
* val xs = $Coll(
* (1, "one"),
* (2, "two"),
* (3, "three")).unzip
* // xs == ($Coll(1, 2, 3),
* // $Coll(one, two, three))
* }}}
*
* @tparam A1 the type of the first half of the element pairs
* @tparam A2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
* of this $coll is a pair.
* @return a pair of ${coll}s, containing the first, respectively second
* half of each element pair of this $coll.
*/
def unzip[A1: ClassTag, A2: ClassTag](implicit asPair: A => (A1, A2)): (Vector[A1], Vector[A2]) = {
val build1: VectorBuilder[A1] = genericBuilder[A1]
val build2: VectorBuilder[A2] = genericBuilder[A2]
this.foreach(xy => {
val (x, y) = asPair(xy)
build1 += x
build2 += y
})
(build1.result(), build2.result())
}
/** Converts this $coll of triples into three collections of the first, second,
* and third element of each triple.
*
* {{{
* val xs = $Coll(
* (1, "one", '1'),
* (2, "two", '2'),
* (3, "three", '3')).unzip3
* // xs == ($Coll(1, 2, 3),
* // $Coll(one, two, three),
* // $Coll(1, 2, 3))
* }}}
*
* @tparam A1 the type of the first member of the element triples
* @tparam A2 the type of the second member of the element triples
* @tparam A3 the type of the third member of the element triples
* @param asTriple an implicit conversion which asserts that the element type
* of this $coll is a triple.
* @return a triple of ${coll}s, containing the first, second, respectively
* third member of each element triple of this $coll.
*/
def unzip3[A1: ClassTag, A2: ClassTag, A3: ClassTag](implicit asTriple: A => (A1, A2, A3)): (Vector[A1], Vector[A2], Vector[A3]) = {
val build1: VectorBuilder[A1] = genericBuilder[A1]
val build2: VectorBuilder[A2] = genericBuilder[A2]
val build3: VectorBuilder[A3] = genericBuilder[A3]
this.foreach(xyz => {
val (x, y, z) = asTriple(xyz)
build1 += x
build2 += y
build3 += z
})
(build1.result(), build2.result(), build3.result())
}
/** Converts this $coll of traversable collections into
* a $coll formed by the elements of these traversable
* collections.
*
* @tparam B the type of the elements of each traversable collection.
* @return a new $coll resulting from concatenating all element ${coll}s.
* @usecase def flatten[B]: $Coll[B]
* @inheritdoc
*
* The resulting collection's type will be guided by the
* static type of $coll. For example:
*
* {{{
* val xs = List(
* Set(1, 2, 3),
* Set(1, 2, 3)
* ).flatten
* // xs == List(1, 2, 3, 1, 2, 3)
*
* val ys = Set(
* List(1, 2, 3),
* List(3, 2, 1)
* ).flatten
* // ys == Set(1, 2, 3)
* }}}
*/
def flatten[B: ClassTag](implicit asVector: A => Vector[B]): Vector[B] = {
val build: VectorBuilder[B] = genericBuilder[B]
this.foreach(xs => build ++= asVector(xs))
build.result()
}
/** Transposes this $coll of traversable collections into
* a $coll of ${coll}s.
*
* The resulting collection's type will be guided by the
* static type of $coll. For example:
*
* {{{
* val xs = List(
* Set(1, 2, 3),
* Set(4, 5, 6)).transpose
* // xs == List(
* // List(1, 4),
* // List(2, 5),
* // List(3, 6))
*
* val ys = Vector(
* List(1, 2, 3),
* List(4, 5, 6)).transpose
* // ys == Vector(
* // Vector(1, 4),
* // Vector(2, 5),
* // Vector(3, 6))
* }}}
*
* @tparam B the type of the elements of each traversable collection.
* @return a two-dimensional $coll of ${coll}s which has as ''n''th row
* the ''n''th column of this $coll.
* @throws IllegalArgumentException if all collections in this $coll
* are not of the same size.
*/
/*
def transpose[B](implicit asVector: A => Vector[B]): Vector[Vector[B]@uncheckedVariance] = {
if (isEmpty) return genericBuilder[Vector[B]].result()
def fail: Nothing = throw new IllegalArgumentException("transpose requires all collections have the same size")
val headSize: Int = asVector(this.head).size
val bs: IndexedSeq[VectorBuilder[B]] = IndexedSeq.fill(headSize)(genericBuilder[B])
this.foreach(xs => {
var i: Int = 0
asVector(xs).foreach(x => {
if (i >= headSize) fail
bs(i) += x
i += 1
})
if (i != headSize) fail
})
val build: VectorBuilder[Vector[B]] = genericBuilder[Vector[B]]
bs.foreach(b => build += b.result)
build.result()
}
*/
// GenIterableLike
/** Constructs an Iterator for the Vector type constructor
*
* @param start Int
* @param end Int
* @return And iterator from start to end
*/
final def iterator(start: Int, end: Int): VectorIterator[A] = {
if (this.transient) {
this.normalize(this.depth)
this.transient = false
}
val iterator: VectorIterator[A] = new VectorIterator[A](start, end)
iterator.initIteratorFrom(this)
iterator
}
final def iterator: VectorIterator[A] = iterator(0, endIndex)
/** Constructs the Reverse Iterator for the Vector type constructor
*
* @param start Int
* @param end Int
* @return And iterator from start to end
*/
final def reverseiterator(start: Int, end: Int): VectorReverseIterator[A] = {
if (this.transient) {
this.normalize(this.depth)
this.transient = false
}
val reverseiterator: VectorReverseIterator[A] = new VectorReverseIterator[A](start, end)
reverseiterator.initIteratorFrom(this)
reverseiterator
}
final def reverseiterator: VectorReverseIterator[A] = reverseiterator(0, endIndex)
/** Checks if the other iterable collection contains the same elements in the same order as this $coll.
*
* @param that the collection to compare with.
* @tparam B the type of the elements of collection `that`.
* @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
* @usecase def sameElements(that: GenIterable[A]): Boolean
* @inheritdoc
*
* $orderDependent
* $willNotTerminateInf
* @param that the collection to compare with.
* @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*/
def sameElements[B >: A](that: Vector[B]): Boolean = this.corresponds(that)((x, y) => x == y)
// GenTraversableLike
/** Optionally selects the first element.
* $orderDependent
*
* @return the first element of this $coll if it is nonempty,
* `None` if it is empty.
*/
final def headOption: Option[A] = if (isEmpty) None else Some(this.head)
/** Tests whether this $coll can be repeatedly traversed.
*
* @return `true`
*/
final def isTraversableAgain: Boolean = true
/** Selects all elements except the first.
* $orderDependent
*
* @return a $coll consisting of all elements of this $coll
* except the first one.
* @throws UnsupportedOperationException if the $coll is empty.
*/
final def tail: Vector[A] = this.drop(1)
/** Selects the last element.
* $orderDependent
*
* @return The last element of this $coll.
* @throws NoSuchElementException If the $coll is empty.
*/
final def last: A = if (this.isEmpty) throw new NoSuchElementException else this (this.length - 1)
/** Optionally selects the last element.
* $orderDependent
*
* @return the last element of this $coll$ if it is nonempty,
* `None` if it is empty.
*/
final def lastOption: Option[A] = if (this.isEmpty) None else Some(this (this.length - 1))
/** Selects all elements except the last.
* $orderDependent
*
* @return a $coll consisting of all elements of this $coll
* except the last one.
* @throws UnsupportedOperationException if the $coll is empty.
*/
final def init: Vector[A] = take(this.length - 1)
/** Computes a prefix scan of the elements of the collection.
*
* Note: The neutral element `z` may be applied more than once.
*
* @tparam B element type of the resulting collection
* @param z neutral element for the operator `op`
* @param op the associative operator for the scan
* @return a new $coll containing the prefix scan of the elements in this $coll
*/
def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Vector[B] = scanLeft(z)(op) //TODO Maybe use a recursive method?
/** Produces a collection containing cumulative results of applying the
* operator going left to right.
*
* $willNotTerminateInf
* $orderDependent
*
* @tparam B the type of the elements in the resulting collection
* @param z the initial value
* @param op the binary operator applied to the intermediate result and the element
* @return collection with intermediate results
*/
def scanLeft[B: ClassTag](z: B)(op: (B, A) => B): Vector[B] = {
val build: VectorBuilder[B] = genericBuilder[B]
val forward: VectorIterator[A] = this.iterator
var acc: B = z
while (forward.hasNext) {
acc = op(acc, forward.next())
build += acc
}
build.result()
}
/** Produces a collection containing cumulative results of applying the operator going right to left.
* The head of the collection is the last cumulative result.
* $willNotTerminateInf
* $orderDependent
*
* Example:
* {{{
* Vector(1, 2, 3, 4).scanRight(0)(_ + _) == Vector(0, 4, 7, 9, 10)
* }}}
*
* @tparam B the type of the elements in the resulting collection
* @tparam That the actual type of the resulting collection
* @param z the initial value
* @param op the binary operator applied to the intermediate result and the element
* @return collection with intermediate results
*/
def scanRight[B: ClassTag](z: B)(op: (A, B) => B): Vector[B] = {
val build: VectorBuilder[B] = genericBuilder[B]
val backward: VectorReverseIterator[A] = this.reverseiterator
var acc: B = z
while (backward.hasNext) {
acc = op(backward.next(), acc)
build += acc
}
build.result()
}
/** Builds a new collection by applying a function to all elements of this $coll.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @return a new collection of type `That` resulting from applying the given function
* `f` to each element of this $coll and collecting the results.
* @usecase def map[B](f: A => B): $Coll[B]
* @inheritdoc
* @return a new $coll resulting from applying the given function
* `f` to each element of this $coll and collecting the results.
*/
def hashedmap[B: ClassTag](f: A => B): Vector[B] = {
val value: mutable.HashMap[A, B] = new mutable.HashMap[A, B] {
override def default(k: A): B = empty.asInstanceOf[B]
}
val build: VectorBuilder[B] = genericBuilder[B]
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
// TODO HashedMap map: Check if it actually helps,
// or memory is a problem, if so we could use a zero-allocation hashing algorithm
val x: A = forward.next()
if (value(x) == null) value(x) = f(x)
build += value(x)
}
build.result()
}
def map[B: ClassTag](f: A => B): Vector[B] = {
val build: VectorBuilder[B] = genericBuilder[B]
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) build += f(forward.next())
build.result()
}
/** Builds a new collection by applying a partial function to all elements of this $coll
* on which the function is defined.
*
* @param pf the partial function which filters and maps the $coll.
* @tparam B the element type of the returned collection.
* @return a new collection of type `That` resulting from applying the partial function
* `pf` to each element on which it is defined and collecting the results.
* The order of the elements is preserved.
* @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
* @inheritdoc
*
* $collectExample
* @return a new $coll resulting from applying the given partial function
* `pf` to each element on which it is defined and collecting the results.
* The order of the elements is preserved.
*/
def collect[B: ClassTag](pf: PartialFunction[A, B]): Vector[B] = {
val build: VectorBuilder[B] = genericBuilder[B]
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: A = forward.next()
if (pf.isDefinedAt(x)) build += pf(x)
}
build.result()
}
/** Builds a new collection by applying a function to all elements of this $coll
* and using the elements of the resulting collections.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @return a new collection of type `That` resulting from applying the given collection-valued function
* `f` to each element of this $coll and concatenating the results.
* @usecase def flatMap[B](f: A => GenTraversableOnce[B]): $Coll[B]
* @inheritdoc
*
* For example:
*
* {{{
* def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\\\W+")
* }}}
*
* The type of the resulting collection is guided by the static type of $coll. This might
* cause unexpected results sometimes. For example:
*
* {{{
* // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
* def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
*
* // lettersOf will return a Set[Char], not a Seq
* def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
*
* // xs will be an Iterable[Int]
* val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
*
* // ys will be a Map[Int, Int]
* val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
* }}}
* @return a new $coll resulting from applying the given collection-valued function
* `f` to each element of this $coll and concatenating the results.
*/
def flatMap[B: ClassTag](f: A => Vector[B]): Vector[B] = {
val build: VectorBuilder[B] = genericBuilder[B]
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) build ++= f(forward.next())
build.result()
}
def hashedflatMap[B: ClassTag](f: A => Vector[B]): Vector[B] = {
val value: mutable.HashMap[A, Vector[B]] = new mutable.HashMap[A, Vector[B]] {
override def default(k: A): Vector[B] = empty.asInstanceOf[Vector[B]]
}
val build: VectorBuilder[B] = genericBuilder[B]
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
// TODO HashedflatMap map: Check if it actually helps,
// or memory is a problem, if so we could use a zero-allocation hashing algorithm
val x: A = forward.next()
if (value(x) == null) value(x) = f(x)
build ++= value(x)
}
build.result()
}
/** Returns a new $coll containing the elements from the left hand operand followed by the elements from the
* right hand operand. The element type of the $coll is the most specific superclass encompassing
* the element types of the two operands.
*
* @param that the traversable to append.
* @tparam B the element type of the returned collection.
* @return a new collection of type `That` which contains all elements
* of this $coll followed by all elements of `that`.
*/
def ++[B >: A : ClassTag](that: Vector[B]): Vector[B] = {
if (that.isEmpty) this.asInstanceOf[Vector[B]]
else if (this.length == 0) {
that.asInstanceOf[Vector[B]]
} else {
val vector: Vector[B] = new Vector[B](this.length + that.length)
vector.initWithFocusFrom(this.asInstanceOf[Vector[B]])
vector.transient = this.transient
vector.concatenate(this.length, that)
vector.asInstanceOf[Vector[B]]
}
}
/** Selects all elements of this $coll which satisfy a predicate.
*
* @param p the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that satisfy the given
* predicate `p`. Their order is preserved.
*/
def filter(p: A => Boolean): Vector[A] = { // TODO Is it better to implement it via hashmaps
val build: VectorBuilder[A] = newBuilder
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: A = forward.next()
if (p(x)) build += x
}
build.result()
}
/** Selects all elements of this $coll which do not satisfy a predicate.
*
* @param p the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that do not satisfy the given
* predicate `p`. Their order is preserved.
*/
def filterNot(p: A => Boolean): Vector[A] = this.filter(x => !p(x))
/** Partitions this $coll in two ${coll}s according to a predicate.
*
* @param p the predicate on which to partition.
* @return a pair of ${coll}s: the first $coll consists of all elements that
* satisfy the predicate `p` and the second $coll consists of all elements
* that don't. The relative order of the elements in the resulting ${coll}s
* may not be preserved.
*/
def partition(p: A => Boolean): (Vector[A], Vector[A]) = {
val build1: VectorBuilder[A] = newBuilder
val build2: VectorBuilder[A] = newBuilder
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: A = forward.next()
if (p(x)) build1 += x else build2 += x
}
(build1.result(), build2.result())
}
/** Partitions this $coll into a map of ${coll}s according to some discriminator function.
*
* Note: this method is not re-implemented by views. This means
* when applied to a view it will always force the view and
* return a new $coll.
*
* @param f the discriminator function.
* @tparam K the type of keys returned by the discriminator function.
* @return A map from keys to ${coll}s such that the following invariant holds:
* {{{
* (xs groupBy f)(k) = xs filter (x => f(x) == k)
* }}}
* That is, every key `k` is bound to a $coll of those elements `x`
* for which `f(x)` equals `k`.
*
*/
def groupBy[B >: A : ClassTag, K](f: B => K): mutable.Map[K, Vector[B]] = {
val group: mutable.HashMap[K, Vector[B]] = new mutable.HashMap[K, Vector[B]] {
override def default(k: K) = new Vector[B](0)
}
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: B = forward.next()
group(f(x)) :+ x
}
group
}
/** Selects first ''n'' elements.
* $orderDependent
*
* @param n the number of elements to take from this $coll.
* @return a $coll consisting only of the first `n` elements of this $coll,
* or else the whole $coll, if it has less than `n` elements.
*/
def take(n: Int): Vector[A] = {
if (n <= 0) empty
else if (n < endIndex)
takeFront0(n)
else
this
}
/** Selects all elements except first ''n'' ones.
* $orderDependent
*
* @param n the number of elements to drop from this $coll.
* @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the
* empty $coll, if this $coll has less than `n` elements.
*/
def drop(n: Int): Vector[A] = {
if (n <= 0) this
else if (n < endIndex)
dropFront0(n)
else
empty
}
/** Selects an interval of elements. The returned collection is made up
* of all elements `x` which satisfy the invariant:
* {{{
* from <= indexOf(x) < until
* }}}
* $orderDependent
*
* @param from the lowest index to include from this $coll.
* @param until the lowest index to EXCLUDE from this $coll.
* @return a $coll containing the elements greater than or equal to
* index `from` extending up to (but not including) index `until`
* of this $coll.
*/
def slice(from: Int, until: Int): Vector[A] = take(until).drop(from)
/** Splits this $coll into two at a given position.
* Note: `c splitAt n` is equivalent to (but possibly more efficient than)
* `(c take n, c drop n)`.
* $orderDependent
*
* @param n the position at which to split.
* @return a pair of ${coll}s consisting of the first `n`
* elements of this $coll, and the other elements.
*/
def splitAt(n: Int): (Vector[A], Vector[A]) = (this take n, this drop n)
/** Takes longest prefix of elements that satisfy a predicate.
* $orderDependent
*
* @param p The predicate used to test elements.
* @return the longest prefix of this $coll whose elements all satisfy
* the predicate `p`.
*/
def takeWhile(p: A => Boolean): Vector[A] = take(prefixLength(p))
/** Drops longest prefix of elements that satisfy a predicate.
* $orderDependent
*
* @param p The predicate used to test elements.
* @return the longest suffix of this $coll whose first element
* does not satisfy the predicate `p`.
*/
def dropWhile(p: A => Boolean): Vector[A] = drop(prefixLength(p))
/** Splits this $coll into a prefix/suffix pair according to a predicate.
*
* Note: `c span p` is equivalent to (but possibly more efficient than)
* `(c takeWhile p, c dropWhile p)`, provided the evaluation of the
* predicate `p` does not cause any side-effects.
* $orderDependent
*
* @param p the test predicate
* @return a pair consisting of the longest prefix of this $coll whose
* elements all satisfy `p`, and the rest of this $coll.
*/
def span(p: A => Boolean): (Vector[A], Vector[A]) = (this takeWhile p, this dropWhile p)
/** Defines the prefix of this object's `toString` representation.
*
* @return a string representation which starts the result of `toString`
* applied to this $coll. By default the string prefix is the
* simple name of the collection class $coll.
*/
def stringPrefix: String = "Vector"
// Parallelizable
/** Returns a parallel implementation of this collection.
*
* For most collection types, this method creates a new parallel collection by copying
* all the elements. For these collection, `par` takes linear time. Mutable collections
* in this category do not produce a mutable parallel collection that has the same
* underlying dataset, so changes in one collection will not be reflected in the other one.
*
* Specific collections (e.g. `ParArray` or `mutable.ParHashMap`) override this default
* behaviour by creating a parallel collection which shares the same underlying dataset.
* For these collections, `par` takes constant or sublinear time.
*
* All parallel collections return a reference to themselves.
*
* @return a parallel implementation of this collection
*/
/* def par: ParRepr = {
val cb = parCombiner
for (x <- seq) cb += x
cb.result()
}*/
/** The default `par` implementation uses the combiner provided by this method
* to create a new parallel collection.
*
* @return a combiner for the parallel collection of type `ParRepr`
*/
/*
protected[this] def parCombiner: Combiner[A, ParRepr]
*/
// GenTraversableOnce
/** Tests whether this $coll is known to have a finite size.
* All strict collections are known to have finite size. For a non-strict
* collection such as `Stream`, the predicate returns `'''true'''` if all
* elements have been computed. It returns `'''false'''` if the stream is
* not yet evaluated to the end. Non-empty Iterators usually return
* `'''false'''` even if they were created from a collection with a known
* finite size.
*
* Note: many collection methods will not work on collections of infinite sizes.
* The typical failure mode is an infinite loop. These methods always attempt a
* traversal without checking first that `hasDefiniteSize` returns `'''true'''`.
* However, checking `hasDefiniteSize` can provide an assurance that size is
* well-defined and non-termination is not a concern.
*
* @return `'''true'''` if this collection is known to have finite size,
* `'''false'''` otherwise.
*/
def hasDefiniteSize: Boolean = true
/** The size of this $coll, if it can be cheaply computed
*
* @return the number of elements in this $coll, or -1 if the size cannot be determined cheaply
*/
protected[Immutable] def sizeHintIfCheap: Int = this.length
/** Reduces the elements of this $coll using the specified associative binary operator.
*
* $undefinedorder
*
* @tparam B A type parameter for the binary operator, a supertype of `A`.
* @param op A binary operator that must be associative.
* @return The result of applying reduce operator `op` between all the elements if the $coll is nonempty.
* @throws UnsupportedOperationException
* if this $coll is empty.
*/
def reduce[B >: A](op: (B, B) => B): B = {
val forward: VectorIterator[A] = this.iterator
var acc: B = forward.next()
while (forward.hasNext) acc = op(acc, forward.next())
acc
}
/** Reduces the elements of this $coll, if any, using the specified
* associative binary operator.
*
* $undefinedorder
*
* @tparam B A type parameter for the binary operator, a supertype of `A`.
* @param op A binary operator that must be associative.
* @return An option value containing result of applying reduce operator `op` between all
* the elements if the collection is nonempty, and `None` otherwise.
*/
def reduceOption[B >: A](op: (A, B) => B): Option[B] = {
if (this isEmpty) None
else {
val forward = this.iterator
var acc: B = forward.next()
while (forward.hasNext) acc = op(forward.next(), acc)
Some(acc)
}
}
/** Folds the elements of this $coll using the specified associative
* binary operator.
*
* $undefinedorder
* $willNotTerminateInf
*
* @tparam B a type parameter for the binary operator, a supertype of `A`.
* @param z a neutral element for the fold operation; may be added to the result
* an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation,
* 0 for addition, or 1 for multiplication).
* @param op a binary operator that must be associative.
* @return the result of applying the fold operator `op` between all the elements and `z`, or `z` if this $coll is empty.
*/
final def fold[B >: A](z: B)(op: (B, B) => B): B = {
val forward: VectorIterator[A] = this.iterator
var acc: B = z
while (forward.hasNext) acc = op(acc, forward.next())
acc
}
/** Applies a binary operator to a start value and all elements of this $coll,
* going left to right.
*
* Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as
* `xs foldLeft z`.
*
* Examples:
*
* Note that the folding function used to compute b is equivalent to that used to compute c.
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = (5 /: a)(_+_)
* b: Int = 15
*
* scala> val c = (5 /: a)((x,y) => x + y)
* c: Int = 15
* }}}
*
* $willNotTerminateInf
* $orderDependentFold
*
* @param z the start value.
* @param op the binary operator.
* @tparam B the result type of the binary operator.
* @return the result of inserting `op` between consecutive elements of this $coll,
* going left to right with the start value `z` on the left:
* {{{
* op(...op(op(z, x_1), x_2), ..., x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@tailrec final def /:[B](z: B)(op: (B, A) => B): B = {
this.tail./:(op(z, this.head))(op)
}
/** Applies a binary operator to all elements of this $coll and a start value,
* going right to left.
*
* Note: `:\\` is alternate syntax for `foldRight`; `xs :\\ z` is the same as
* `xs foldRight z`.
* $willNotTerminateInf
* $orderDependentFold
*
* Examples:
*
* Note that the folding function used to compute b is equivalent to that used to compute c.
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = (a :\\ 5)(_+_)
* b: Int = 15
*
* scala> val c = (a :\\ 5)((x,y) => x + y)
* c: Int = 15
*
* }}}
*
* @param z the start value
* @param op the binary operator
* @tparam B the result type of the binary operator.
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left with the start value `z` on the right:
* {{{
* op(x_1, op(x_2, ... op(x_n, z)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@tailrec final def :\\[B](z: B)(op: (A, B) => B): B = {
this.init.:\\(op(this.last, z))(op)
}
/** Applies a binary operator to a start value and all elements of this $coll,
* going left to right.
*
* $willNotTerminateInf
* $orderDependentFold
*
* @param z the start value.
* @param op the binary operator.
* @tparam B the result type of the binary operator.
* @return the result of inserting `op` between consecutive elements of this $coll,
* going left to right with the start value `z` on the left:
* {{{
* op(...op(z, x_1), x_2, ..., x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
* Returns `z` if this $coll is empty.
*/
def foldLeft[B](z: B)(op: (B, A) => B): B = {
val forward: VectorIterator[A] = this.iterator
var acc: B = z
while (forward.hasNext) acc = op(acc, forward.next())
acc
}
/** Applies a binary operator to all elements of this $coll and a start value,
* going right to left.
*
* $willNotTerminateInf
* $orderDependentFold
*
* @param z the start value.
* @param op the binary operator.
* @tparam B the result type of the binary operator.
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left with the start value `z` on the right:
* {{{
* op(x_1, op(x_2, ... op(x_n, z)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
* Returns `z` if this $coll is empty.
*/
def foldRight[B](z: B)(op: (A, B) => B): B = {
val backward: VectorReverseIterator[A] = this.reverseiterator
var acc: B = z
while (backward.hasNext) acc = op(backward.next(), acc)
acc
}
/** Aggregates the results of applying an operator to subsequent elements.
*
* This is a more general form of `fold` and `reduce`. It is similar to
* `foldLeft` in that it doesn't require the result to be a supertype of the
* element type. In addition, it allows parallel collections to be processed
* in chunks, and then combines the intermediate results.
*
* `aggregate` splits the $coll into partitions and processes each
* partition by sequentially applying `seqop`, starting with `z` (like
* `foldLeft`). Those intermediate results are then combined by using
* `combop` (like `fold`). The implementation of this operation may operate
* on an arbitrary number of collection partitions (even 1), so `combop` may
* be invoked an arbitrary number of times (even 0).
*
* As an example, consider summing up the integer values of a list of chars.
* The initial value for the sum is 0. First, `seqop` transforms each input
* character to an Int and adds it to the sum (of the partition). Then,
* `combop` just needs to sum up the intermediate results of the partitions:
* {{{
* List('a', 'b', 'c').aggregate(0)({ (sum, ch) => sum + ch.toInt }, { (p1, p2) => p1 + p2 })
* }}}
*
* @tparam B the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
* `Nil` for list concatenation or `0` for summation) and may be evaluated
* more than once
* @param seqop an operator used to accumulate results within a partition
* @param combop an associative operator used to combine results from different partitions
*/
def aggregate[B](z: => B)(seqop: (B, A) => B, combop: (B, B) => B): B = { // TODO Proof of correctness
var acc: B = z
this.split.foreach(x =>
acc = combop({
var acc: B = z
while (x.hasNext) acc = seqop(acc, x.next())
acc
}, acc))
acc
}
def split: Seq[VectorIterator[A]] = {
var splitted: ArrayBuffer[VectorIterator[A]] = new ArrayBuffer[VectorIterator[A]]
val nsplits: Int = this.length / (1 << 5)
var currentPos: Int = 0
if (nsplits > 0) {
var i: Int = 0
while (i < nsplits) {
val forward: VectorIterator[A] = new VectorIterator[A](currentPos, currentPos + 1 << 5)
forward.initIteratorFrom(this)
splitted += forward
currentPos += 1 << 5
i += 1
}
val forward: VectorIterator[A] = new VectorIterator[A](currentPos, this.length)
forward.initIteratorFrom(this)
splitted += forward
splitted
} else {
Seq(this.iterator)
}
}
/** Applies a binary operator to all elements of this $coll, going right to left.
* $willNotTerminateInf
* $orderDependentFold
*
* @param op the binary operator.
* @tparam B the result type of the binary operator.
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left:
* {{{
* op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
* @throws UnsupportedOperationException if this $coll is empty.
*/
def reduceRight[B >: A](op: (A, B) => B): B = {
val reverse: VectorReverseIterator[A] = this.reverseiterator
var acc: B = reverse.next()
while (reverse.hasNext) acc = op(reverse.next(), acc)
acc
}
/** Optionally applies a binary operator to all elements of this $coll, going
* right to left.
* $willNotTerminateInf
* $orderDependentFold
*
* @param op the binary operator.
* @tparam B the result type of the binary operator.
* @return an option value containing the result of `reduceRight(op)` if this $coll is nonempty,
* `None` otherwise.
*/
def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = {
if (this.isEmpty) None
else {
val reverse: VectorReverseIterator[A] = this.reverseiterator
var acc: B = reverse.next()
while (reverse.hasNext) acc = op(reverse.next(), acc)
Some(acc)
}
}
/** Applies a binary operator to all elements of this $coll, going right to left.
* $willNotTerminateInf
* $orderDependentFold
*
* @param op the binary operator.
* @tparam B the result type of the binary operator.
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left:
* {{{
* op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
* @throws UnsupportedOperationException if this $coll is empty.
*/
def reduceLeft[B >: A](op: (B, A) => B): B = {
val forward: VectorIterator[A] = this.iterator
var acc: B = forward.next()
while (forward.hasNext) acc = op(acc, forward.next())
acc
}
/** Optionally applies a binary operator to all elements of this $coll, going left to right.
* $willNotTerminateInf
* $orderDependentFold
*
* @param op the binary operator.
* @tparam B the result type of the binary operator.
* @return an option value containing the result of `reduceLeft(op)` if this $coll is nonempty,
* `None` otherwise.
*/
def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = {
if (this.isEmpty) None
else {
val forward: VectorIterator[A] = this.iterator
var acc: B = forward.next()
while (forward.hasNext) acc = op(acc, forward.next())
Some(acc)
}
}
/** Counts the number of elements in the $coll which satisfy a predicate.
*
* @param p the predicate used to test elements.
* @return the number of elements satisfying the predicate `p`.
*/
def count(p: A => Boolean): Int = {
val forward: VectorIterator[A] = this.iterator
var acc: Int = 0
while (forward.hasNext) if (p(forward.next())) acc += 1
acc
}
/** Tests whether a predicate holds for all elements of this $coll.
*
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
* @return `true` if this $coll is empty or the given predicate `p`
* holds for all elements of this $coll, otherwise `false`.
*/
def forall(p: A => Boolean): Boolean = {
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) if (!p(forward.next())) return false
true
}
/** Tests whether a predicate holds for at least one element of this $coll.
*
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
* @return `true` if the given predicate `p` is satisfied by at least one element of this $coll, otherwise `false`
*/
def exists(p: A => Boolean): Boolean = {
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) if (p(forward.next())) return true
false
}
/** Finds the first element of the $coll satisfying a predicate, if any.
*
* $mayNotTerminateInf
* $orderDependent
*
* @param p the predicate used to test elements.
* @return an option value containing the first element in the $coll
* that satisfies `p`, or `None` if none exists.
*/
def find(p: A => Boolean): Option[A] = {
val forward: VectorIterator[A] = this.iterator
while (forward.hasNext) {
val x: A = forward.next()
if (p(x)) return Some(x)
}
None
}
/** Copies the elements of this $coll to an array.
* Fills the given array `xs` with values of this $coll.
* Copying will stop once either the end of the current $coll is reached,
* or the end of the target array is reached.
*
* @param xs the array to fill.
* @tparam B the type of the elements of the target array.
* @usecase def copyToArray(xs: Array[A]): Unit
* @inheritdoc
*
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B]): Unit = {
val forward: VectorIterator[A] = this.iterator
var index: Int = 0
while (forward.hasNext) {
xs(index) = forward.next()
index += 1
}
}
/** Copies the elements of this $coll to an array.
* Fills the given array `xs` with values of this $coll, beginning at index `start`.
* Copying will stop once either the end of the current $coll is reached,
* or the end of the target array is reached.
*
* @param xs the array to fill.
* @param start the starting index.
* @tparam B the type of the elements of the target array.
* @usecase def copyToArray(xs: Array[A], start: Int): Unit
* @inheritdoc
*
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int): Unit = {
val forward: VectorIterator[A] = this.iterator(start, this.endIndex)
var index: Int = 0
while (forward.hasNext) {
xs(index) = forward.next()
index += 1
}
}
/** Copies the elements of this $coll to an array.
* Fills the given array `xs` with at most `len` elements of
* this $coll, starting at position `start`.
* Copying will stop once either the end of the current $coll is reached,
* or the end of the target array is reached, or `len` elements have been copied.
*
* @param xs the array to fill.
* @param start the starting index.
* @param len the maximal number of elements to copy.
* @tparam B the type of the elements of the target array.
* @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
* @inheritdoc
*
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
val forward: VectorIterator[A] = this.iterator(start, spire.math.min(start + len, this.endIndex))
var index: Int = 0
while (forward.hasNext) {
xs(index) = forward.next()
index += 1
}
}
/** Displays all elements of this $coll in a string using start, end, and
* separator strings.
*
* @param start the starting string.
* @param sep the separator string.
* @param end the ending string.
* @return a string representation of this $coll. The resulting string
* begins with the string `start` and ends with the string
* `end`. Inside, the string representations (w.r.t. the method
* `toString`) of all elements of this $coll are separated by
* the string `sep`.
* @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
*/
def mkString(start: String, sep: String, end: String): String = addString(new StringBuilder(), start, sep, end).toString
/** Displays all elements of this $coll in a string using a separator string.
*
* @param sep the separator string.
* @return a string representation of this $coll. In the resulting string
* the string representations (w.r.t. the method `toString`)
* of all elements of this $coll are separated by the string `sep`.
* @example `List(1, 2, 3).mkString("|") = "1|2|3"`
*/
def mkString(sep: String): String = mkString("", sep, "")
/** Displays all elements of this $coll in a string.
*
* @return a string representation of this $coll. In the resulting string
* the string representations (w.r.t. the method `toString`)
* of all elements of this $coll follow each other without any
* separator string.
*/
def mkString: String = mkString("")
/** Appends all elements of this $coll to a string builder using start, end, and separator strings.
* The written text begins with the string `start` and ends with the string `end`.
* Inside, the string representations (w.r.t. the method `toString`)
* of all elements of this $coll are separated by the string `sep`.
*
* Example:
*
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
* scala> a.addString(b , "List(" , ", " , ")")
* res5: StringBuilder = List(1, 2, 3, 4)
* }}}
*
* @param b the string builder to which elements are appended.
* @param start the starting string.
* @param sep the separator string.
* @param end the ending string.
* @return the string builder `b` to which elements were appended.
*/
def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
var first = true
b.append(start)
for (x <- self) {
if (first) {
b append x
first = false
}
else {
b.append(sep)
b.append(x)
}
}
b.append(end)
b
}
/** Appends all elements of this $coll to a string builder using a separator string.
* The written text consists of the string representations (w.r.t. the method `toString`)
* of all elements of this $coll, separated by the string `sep`.
*
* Example:
*
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
* scala> a.addString(b, ", ")
* res0: StringBuilder = 1, 2, 3, 4
* }}}
*
* @param b the string builder to which elements are appended.
* @param sep the separator string.
* @return the string builder `b` to which elements were appended.
*/
def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "")
/** Appends all elements of this $coll to a string builder.
* The written text consists of the string representations (w.r.t. the method
* `toString`) of all elements of this $coll without any separator string.
*
* Example:
*
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
* scala> val h = a.addString(b)
* h: StringBuilder = 1234
* }}}
*
* @param b the string builder to which elements are appended.
* @return the string builder `b` to which elements were appended.
*/
def addString(b: StringBuilder): StringBuilder = addString(b, "")
/** Converts this $coll to an array.
*
* @tparam B the type of the elements of the array. An `ClassTag` for
* this type must be available.
* @return an array containing all elements of this $coll.
* @usecase def toArray: Array[A]
* @inheritdoc
*
* $willNotTerminateInf
* @return an array containing all elements of this $coll.
* An `ClassTag` must be available for the element type of this $coll.
*/
def toArray[B >: A : ClassTag]: Array[B] = {
if (this.isTraversableAgain) {
val result: Array[B] = new Array[B](size)
this.copyToArray(result, 0)
result
} else this.toBuffer.toArray
}
/** Converts this $coll to a list.
* $willNotTerminateInf
*
* @return a list containing all elements of this $coll.
*/
def toList: List[A] = to[List]
/** Converts this $coll to an indexed sequence.
* $willNotTerminateInf
*
* @return an indexed sequence containing all elements of this $coll.
*/
def toIndexedSeq: immutable.IndexedSeq[A] = to[immutable.IndexedSeq]
/** Converts this $coll to a stream.
*
* @return a stream containing all elements of this $coll.
*/
def toStream: Stream[A] = {
val forward = this.iterator
if (!forward.hasNext) Stream.empty[A]
else Stream.cons(forward.next(), this.tail.toStream)
}
/** Returns an Iterator over the elements in this $coll. Will return
* the same Iterator if this instance is already an Iterator.
* $willNotTerminateInf
*
* @return an Iterator containing all elements of this $coll.
*/
def toIterator: VectorIterator[A] = this.iterator
/** Uses the contents of this $coll to create a new mutable buffer.
* $willNotTerminateInf
*
* @return a buffer containing all elements of this $coll.
*/
def toBuffer[B >: A]: mutable.Buffer[B] = to[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
/** Converts this $coll to an unspecified Traversable. Will return
* the same collection if this instance is already Traversable.
* $willNotTerminateInf
*
* @return a Traversable containing all elements of this $coll.
*/
def toTraversable: GenTraversable[A] = toStream
/** Converts this $coll to an iterable collection. Note that
* the choice of target `Iterable` is lazy in this default implementation
* as this `TraversableOnce` may be lazy and unevaluated (i.e. it may
* be an iterator which is only traversable once).
*
* $willNotTerminateInf
*
* @return an `Iterable` containing all elements of this $coll.
*/
def toIterable: GenIterable[A] = toStream
/** Converts this $coll to a sequence. As with `toIterable`, it's lazy
* in this default implementation, as this `TraversableOnce` may be
* lazy and unevaluated.
*
* $willNotTerminateInf
*
* @return a sequence containing all elements of this $coll.
*/
def toSeq: GenSeq[A] = toStream
/** Converts this $coll to a set.
* $willNotTerminateInf
*
* @return a set containing all elements of this $coll.
*/
def toSet[B >: A]: Set[B] = to[immutable.Set].asInstanceOf[immutable.Set[B]]
/** Converts this $coll to a map. This method is unavailable unless
* the elements are members of Tuple2, each ((T, U)) becoming a key-value
* pair in the map. Duplicate keys will be overwritten by later keys:
* if this is an unordered collection, which key is in the resulting map
* is undefined.
*
* @return a map containing all elements of this $coll.
* @usecase def toMap[T, U]: Map[T, U]
* @inheritdoc
* $willNotTerminateInf
* @return a map of type `immutable.Map[T, U]`
* containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V] = {
val build = immutable.Map.newBuilder[K, V]
this.foreach(x => build += x)
build.result()
}
/** Converts this $coll to a Vector.
* $willNotTerminateInf
*
* @return a vector containing all elements of this $coll.
*/
def toVector: ScalaVector[A] = to[ScalaVector]
/** Converts this $coll into another by copying all elements.
*
* @tparam Col The collection type to build.
* @return a new collection containing all elements of this $coll.
* @usecase def to[Col[_] ]: Col[A]
* @inheritdoc
* $willNotTerminateInf
* @return a new collection containing all elements of this $coll.
*/
def to[Col[_]](implicit cbf: CanBuildFrom[Col[_], A, Col[A@uncheckedVariance]]): Col[A@uncheckedVariance] = {
val build: mutable.Builder[A, Col[A]] = cbf()
build ++= this.asInstanceOf[TraversableOnce[A]]
build.result()
}
/** Helper Functions
*
*/
private[Immutable] def makeTransientIfNeeded(): Unit = {
val _depth = depth
if (_depth > 1 && transient.`unary_!`) {
copyDisplaysAndNullFocusedBranch(_depth, focus.|(focusRelax))
transient = true
}
}
private[Immutable] def normalizeAndFocusOn(index: Int): Unit = {
if (transient) {
normalize(depth)
transient = false
}
focusOn(index)
}
private def createSingletonVector[B >: A : ClassTag](elem: B): Vector[B] = {
val resultVector: Vector[B] = new Vector[B](1)
resultVector.initSingleton(elem)
resultVector
}
private[Immutable] def prepend[B >: A : ClassTag](elem: B): Unit = {
if (focusStart != 0 || (focus & -32) != 0) {
normalizeAndFocusOn(0)
}
val d0: Leaf = display0
if (d0.length < 32) {
prependOnCurrentBlock(elem, d0.asInstanceOf[Array[B]])
}
else {
prependFrontNewBlock(elem)
}
}
private def prependOnCurrentBlock[B >: A](elem: B,
old_d0: Array[B]): Unit = {
val new_length: Int = old_d0.length + 1
focusEnd = new_length
val new_d0: Leaf = new Leaf(new_length)
new_d0.update(0, elem.asInstanceOf[A])
System.arraycopy(old_d0, 0, new_d0, 1, new_length - 1)
display0 = new_d0
makeTransientIfNeeded()
}
private def prependFrontNewBlock[B >: A](elem: B)(implicit ct: ClassTag[B]): Unit = {
var currentDepth = focusDepth
var display: Node = currentDepth match {
case 1 => display1
case 2 => display2
case 3 => display3
case 4 => display4
case 5 => display5
case 6 => display6
case 7 => display7
}
while (display != null && display.length == 33) {
currentDepth += 1
currentDepth match {
case 1 => display = display1
case 2 => display = display2
case 3 => display = display3
case 4 => display = display4
case 5 => display = display5
case 6 => display = display6
case 7 => display = display7
case _ => throw new IllegalStateException()
}
}
val oldDepth: Int = depth
val _transient: Boolean = transient
setupNewBlockInInitBranch(currentDepth, _transient)(ct.asInstanceOf[ClassTag[A]])
if (oldDepth == depth) {
var i: Int = currentDepth
if (i < oldDepth) {
val _focusDepth: Int = focusDepth
var display: Node = i match {
case 1 => display1
case 2 => display2
case 3 => display3
case 4 => display4
case 5 => display5
case 6 => display6
case 7 => display7
}
do {
val displayLen: Int = display.length - 1
val newSizes: Array[Int] = {
if (i >= _focusDepth) {
makeTransientSizes(display(displayLen).asInstanceOf[Array[Int]], 1)
}
else {
null
}
}
val newDisplay: Node = new Node(display.length)
System.arraycopy(display, 0, newDisplay, 0, displayLen - 1)
if (i >= _focusDepth) {
newDisplay.update(displayLen, newSizes)
}
i match {
case 2 =>
display1 = newDisplay
display = display2
case 3 =>
display2 = newDisplay
display = display3
case 4 =>
display3 = newDisplay
display = display4
case 5 =>
display4 = newDisplay
display = display5
case 6 =>
display5 = newDisplay
display = display6
case 7 =>
display6 = newDisplay
display = display7
}
i += 1
} while (i < oldDepth)
}
}
initFocus(0, 0, 1, 1, 0)
display0.update(0, elem.asInstanceOf[A])
transient = true
}
private[Immutable] def append[B >: A : ClassTag](elem: B, _endIndex: Int): Unit = {
if ((focusStart + focus ^ _endIndex - 1) >= 32)
normalizeAndFocusOn(_endIndex - 1)
val elemIndexInBlock = _endIndex.-(focusStart).&(31)
if (elemIndexInBlock != 0)
appendOnCurrentBlock(elem, elemIndexInBlock)
else
appendBackNewBlock(elem, elemIndexInBlock)
}
private def appendOnCurrentBlock[B >: A](elem: B,
elemIndexInBlock: Int): Unit = {
focusEnd = endIndex
val d0: Leaf = new Leaf(elemIndexInBlock + 1)
System.arraycopy(display0, 0, d0, 0, elemIndexInBlock)
d0.update(elemIndexInBlock, elem.asInstanceOf[A])
display0 = d0
makeTransientIfNeeded()
}
private def appendBackNewBlock[B >: A](elem: B,
elemIndexInBlock: Int)(implicit m: ClassTag[B]): Unit = {
val oldDepth: Int = depth
val newRelaxedIndex: Int = endIndex - 1 - focusStart + focusRelax
val focusJoined: Int = focus | focusRelax
val xor: Int = newRelaxedIndex ^ focusJoined
val _transient: Boolean = transient
setupNewBlockInNextBranch(xor, _transient)(m.asInstanceOf[ClassTag[A]])
if (oldDepth == depth) {
var i: Int = {
if (xor < (1 << 10)) {
2
}
else if (xor < (1 << 15)) {
3
}
else if (xor < (1 << 20)) {
4
}
else if (xor < (1 << 25)) {
5
}
else if (xor < (1 << 30)) {
6
}
else if (xor < (1 << 35)) {
7
}
else {
7
}
}
if (i < oldDepth) {
val _focusDepth: Int = focusDepth
var display: Node = i match {
case 1 => display1
case 2 => display2
case 3 => display3
case 4 => display4
case 5 => display5
case 6 => display6
case 7 => display7
}
do {
val displayLen: Int = display.length - 1
val newSizes: Array[Int] = {
if (i >= _focusDepth) {
makeTransientSizes(display(displayLen).asInstanceOf[Array[Int]], displayLen - 1)
}
else {
null
}
}
val newDisplay: Node = new Node(display.length)
System.arraycopy(display, 0, newDisplay, 0, displayLen - 1)
if (i >= _focusDepth) {
newDisplay.update(displayLen, newSizes)
}
i match {
case 2 =>
display1 = newDisplay
display = display2
case 3 =>
display2 = newDisplay
display = display3
case 4 =>
display3 = newDisplay
display = display4
case 5 =>
display4 = newDisplay
display = display5
case 6 =>
display5 = newDisplay
display = display6
case 7 =>
display6 = newDisplay
display = display7
}
i += 1
} while (i < oldDepth)
}
}
if (oldDepth == focusDepth)
initFocus(endIndex - 1, 0, endIndex, depth, 0)
else
initFocus(endIndex - 1, endIndex - 1, endIndex, 1, newRelaxedIndex & -32)
display0.update(elemIndexInBlock, elem.asInstanceOf[A])
transient = true
}
private def occurrences[B >: A]: mutable.Map[B, Int] = {
val occurrence: mutable.HashMap[B, Int] = new mutable.HashMap[B, Int] {
override def default(k: B) = 0
}
val forward = this.iterator
while (forward.hasNext) occurrence(forward.next()) += 1
occurrence
}
private[Immutable] def concatenate[B >: A](currentSize: Int,
that: Vector[B]): Unit = {
if (this.transient) {
this.normalize(this.depth)
this.transient = false
}
if (that.transient) {
that.normalize(that.depth)
that.transient = false
}
this.focusOn(currentSize - 1)
spire.math.max(this.depth, that.depth) match {
case 1 =>
val concat: Node = rebalancedLeafs(display0, that.display0.asInstanceOf[Leaf])
initFromRoot(concat, 1)
case 2 =>
var d0: Leaf = null
var d1: Node = null
if (((that.focus | that.focusRelax) & -32) == 0) {
d1 = that.display1
d0 = that.display0.asInstanceOf[Leaf]
} else {
if (that.display1 != null)
d1 = that.display1
if (d1 == null)
d0 = that.display0.asInstanceOf[Leaf]
else
d0 = d1(0).asInstanceOf[Leaf]
var concat: Node = rebalancedLeafs(this.display0, d0)
concat = rebalanced(this.display1, concat, that.display1, 2)
if (concat.length == 2)
initFromRoot(concat(0).asInstanceOf[Node], 2)
else
initFromRoot(withComputedSizes(concat, 3), 3)
}
case 3 =>
var d0: Leaf = null
var d1: Node = null
var d2: Node = null
if ((that.focus & -32) == 0) {
d2 = that.display2
d1 = that.display1
d0 = that.display0.asInstanceOf[Leaf]
} else {
if (that.display2 != null)
d2 = that.display2
if (d2 == null)
d1 = that.display1
else
d1 = d2(0).asInstanceOf[Node]
if (d1 == null)
d0 = that.display0.asInstanceOf[Leaf]
else
d0 = d1(0).asInstanceOf[Leaf]
}
var concat: Node = rebalancedLeafs(this.display0, d0)
concat = rebalanced(this.display1, concat, d1, 2)
concat = rebalanced(this.display2, concat, that.display2, 3)
if (concat.length == 2)
initFromRoot(concat(0).asInstanceOf[Node], 3)
else
initFromRoot(withComputedSizes(concat, 4), 4)
case 4 =>
var d0: Leaf = null
var d1: Node = null
var d2: Node = null
var d3: Node = null
if ((that.focus & -32) == 0) {
d3 = that.display3
d2 = that.display2
d1 = that.display1
d0 = that.display0.asInstanceOf[Leaf]
} else {
if (that.display3 != null)
d3 = that.display3
if (d3 == null)
d2 = that.display2
else
d2 = d3(0).asInstanceOf[Node]
if (d2 == null)
d1 = that.display1
else
d1 = d2(0).asInstanceOf[Node]
if (d1 == null)
d0 = that.display0.asInstanceOf[Leaf]
else
d0 = d1(0).asInstanceOf[Leaf]
}
var concat: Node = rebalancedLeafs(this.display0, d0)
concat = rebalanced(this.display1, concat, d1, 2)
concat = rebalanced(this.display2, concat, d2, 3)
concat = rebalanced(this.display3, concat, that.display3, 4)
if (concat.length == 2)
initFromRoot(concat(0).asInstanceOf[Node], 4)
else
initFromRoot(withComputedSizes(concat, 5), 5)
case 5 =>
var d0: Leaf = null
var d1: Node = null
var d2: Node = null
var d3: Node = null
var d4: Node = null
if ((that.focus & -32) == 0) {
d4 = that.display4
d3 = that.display3
d2 = that.display2
d1 = that.display1
d0 = that.display0.asInstanceOf[Leaf]
} else {
if (that.display4 != null)
d4 = that.display4
if (d4 == null)
d3 = that.display3
else
d3 = d4(0).asInstanceOf[Node]
if (d3 == null)
d2 = that.display2
else
d2 = d3(0).asInstanceOf[Node]
if (d2 == null)
d1 = that.display1
else
d1 = d2(0).asInstanceOf[Node]
if (d1 == null)
d0 = that.display0.asInstanceOf[Leaf]
else
d0 = d1(0).asInstanceOf[Leaf]
}
var concat: Node = rebalancedLeafs(this.display0, d0)
concat = rebalanced(this.display1, concat, d1, 2)
concat = rebalanced(this.display2, concat, d2, 3)
concat = rebalanced(this.display3, concat, d3, 4)
concat = rebalanced(this.display4, concat, d4, 5)
if (concat.length == 2)
initFromRoot(concat(0).asInstanceOf[Node], 5)
else
initFromRoot(withComputedSizes(concat, 6), 6)
case 6 =>
var d0: Leaf = null
var d1: Node = null
var d2: Node = null
var d3: Node = null
var d4: Node = null
var d5: Node = null
if ((that.focus & -32) == 0) {
d5 = that.display5
d4 = that.display4
d3 = that.display3
d2 = that.display2
d1 = that.display1
d0 = that.display0.asInstanceOf[Leaf]
} else {
if (that.display5 != null)
d5 = that.display5
if (d5 == null)
d4 = that.display4
else
d4 = d5(0).asInstanceOf[Node]
if (d4 == null)
d3 = that.display3
else
d3 = d4(0).asInstanceOf[Node]
if (d3 == null)
d2 = that.display2
else
d2 = d3(0).asInstanceOf[Node]
if (d2 == null)
d1 = that.display1
else
d1 = d2(0).asInstanceOf[Node]
if (d1 == null)
d0 = that.display0.asInstanceOf[Leaf]
else
d0 = d1(0).asInstanceOf[Leaf]
}
var concat: Node = rebalancedLeafs(this.display0, d0)
concat = rebalanced(this.display1, concat, d1, 2)
concat = rebalanced(this.display2, concat, d2, 3)
concat = rebalanced(this.display3, concat, d3, 4)
concat = rebalanced(this.display4, concat, d4, 5)
concat = rebalanced(this.display5, concat, that.display5, 6)
if (concat.length == 2)
initFromRoot(concat(0).asInstanceOf[Node], 6)
else
initFromRoot(withComputedSizes(concat, 7), 7)
case 7 =>
var d0: Leaf = null
var d1: Node = null
var d2: Node = null
var d3: Node = null
var d4: Node = null
var d5: Node = null
var d6: Node = null
if ((that.focus & -32) == 0) {
d6 = that.display6
d5 = that.display5
d4 = that.display4
d3 = that.display3
d2 = that.display2
d1 = that.display1
d0 = that.display0.asInstanceOf[Leaf]
} else {
if (that.display6 != null)
d6 = that.display6
if (d6 == null)
d5 = that.display5
else
d5 = d6(0).asInstanceOf[Node]
if (d5 == null)
d4 = that.display4
else
d4 = d5(0).asInstanceOf[Node]
if (d4 == null)
d3 = that.display3
else
d3 = d4(0).asInstanceOf[Node]
if (d3 == null)
d2 = that.display2
else
d2 = d3(0).asInstanceOf[Node]
if (d2 == null)
d1 = that.display1
else
d1 = d2(0).asInstanceOf[Node]
if (d1 == null)
d0 = that.display0.asInstanceOf[Leaf]
else
d0 = d1(0).asInstanceOf[Leaf]
}
var concat: Node = rebalancedLeafs(this.display0, d0)
concat = rebalanced(this.display1, concat, d1, 2)
concat = rebalanced(this.display2, concat, d2, 3)
concat = rebalanced(this.display3, concat, d3, 4)
concat = rebalanced(this.display4, concat, d4, 5)
concat = rebalanced(this.display5, concat, d5, 6)
concat = rebalanced(this.display6, concat, that.display6, 7)
if (concat.length == 2)
initFromRoot(concat(0).asInstanceOf[Node], 7)
else
initFromRoot(withComputedSizes(concat, 8), 8)
case _ => throw new IllegalStateException("depth = " + spire.math.max(this.depth, that.depth).toString)
}
}
private def rebalanced[B >: A](displayLeft: Node,
concat: Node,
displayRight: Node,
currentDepth: Int)(implicit ct: ClassTag[B]): Node = {
val leftLength: Int = {
if (displayLeft == null)
0
else
displayLeft.length - 1
}
val concatLength: Int = {
if (concat == null)
0
else
concat.length - 1
}
val rightLength: Int = {
if (displayRight == null)
0
else
displayRight.length - 1
}
val branching: Int = computeBranching(displayLeft, concat, displayRight, currentDepth)
val top: Node = new Node(branching >> 10 + (if ((branching & 1 << 10 - 1) == 0) 1 else 2))
var mid: Node = new Node(if ((branching >> 10) == 0) (branching + 31) >> 5 + 1 else 33)
var bot: Node = null
var iSizes: Int = 0
var iTop: Int = 0
var iMid: Int = 0
var iBot: Int = 0
var i: Int = 0
var j: Int = 0
var d: Int = 0
var currentDisplay: Node = null
var displayEnd: Int = 0
do {
d match {
case 0 =>
if (displayLeft != null) {
currentDisplay = displayLeft
if (concat == null)
displayEnd = leftLength
else
displayEnd = leftLength - 1
}
case 1 =>
if (concat == null)
displayEnd = 0
else {
currentDisplay = concat
displayEnd = concatLength
}
i = 0
case 2 =>
if (displayRight != null) {
currentDisplay = displayRight
displayEnd = rightLength
i = if (concat == null) 0 else 1
}
}
while (i < displayEnd) {
val displayValue: Node = currentDisplay(i).asInstanceOf[Node]
val displayValueEnd: Int = {
if (currentDepth == 2)
displayValue.length
else
displayValue.length - 1
}
if (((iBot | j) == 0) && (displayValueEnd == 32)) {
if ((currentDepth != 2) && (bot != null)) {
withComputedSizes(bot, currentDepth - 1)
bot = null
}
mid.update(iMid, displayValue)
i += 1
iMid += 1
iSizes += 1
} else {
val numElementsToCopy: Int = spire.math.min(displayValueEnd - j, 32 - iBot)
if (iBot == 0) {
if ((currentDepth != 2) && (bot != null))
withComputedSizes(bot, currentDepth - 1)
bot = new Node(
spire.math.min(branching - (iTop << 10) - (iMid << 5), 32)
+ (if (currentDepth == 2) 0 else 1)
)
mid.update(iMid, bot)
}
System.arraycopy(displayValue, j, bot, iBot, numElementsToCopy)
j += numElementsToCopy
iBot += numElementsToCopy
if (j == displayValueEnd) {
i += 1
j = 0
}
if (iBot == 32) {
iMid += 1
iBot = 0
iSizes += 1
if (currentDepth != 2 && bot != null)
withComputedSizes(bot, currentDepth - 1)
}
}
if (iMid == 32) {
top.update(iTop, withComputedSizes(mid, currentDepth))
iTop += 1
iMid = 0
val remainingBranches =
branching - (iTop << 5 | (iMid << 5) | iBot)
if (remainingBranches > 0)
mid = new Node(
if ((remainingBranches >> 10) == 0)
remainingBranches + 63 >> 5 else 33
)
else
mid = null
}
}
d += 1
} while (d < 3)
if (currentDepth != 2 && bot != null)
withComputedSizes(bot, currentDepth - 1)
if (mid != null)
top.update(iTop, withComputedSizes(mid, currentDepth))
top
}
private def rebalancedLeafs[B >: A](displayLeft: Array[B],
displayRight: Array[B]
): Node = {
val leftLength = displayLeft.length
val rightLength = displayRight.length
if (leftLength == 32) {
val top = new Node(3)
top.update(0, displayLeft)
top.update(1, displayRight)
top
} else {
if (leftLength + rightLength <= 32) {
val mergedDisplay = new Node(leftLength + rightLength)
System.arraycopy(displayLeft, 0, mergedDisplay, 0, leftLength)
System.arraycopy(displayRight, 0, mergedDisplay, leftLength, rightLength)
val top = new Node(2)
top.update(0, mergedDisplay)
top
} else {
val top = new Node(3)
val arr0 = new Leaf(32)
val arr1 = new Leaf(leftLength + rightLength - 32)
System.arraycopy(displayLeft, 0, arr0, 0, leftLength)
System.arraycopy(displayRight, 0, arr0, leftLength, 32 - leftLength)
System.arraycopy(displayRight, 32 - leftLength, arr1, 0, rightLength - 32 + leftLength)
top.update(0, arr0)
top.update(1, arr1)
top
}
}
}
private def computeBranching(displayLeft: Node,
concat: Node,
displayRight: Node,
currentDepth: Int): Int = {
val leftLength =
if (displayLeft == null)
0
else
displayLeft.length - 1
val concatLength =
if (concat == null)
0
else
concat.length - 1
val rightLength =
if (displayRight == null)
0
else
displayRight.length - 1
var branching = 0
if (currentDepth == 1) {
branching = leftLength + concatLength + rightLength
if (leftLength != 0)
branching -= 1
if (rightLength != 0)
branching -= 1
} else {
var i = 0
while (i < leftLength - 1) {
branching += displayLeft(i).asInstanceOf[Node].length
i += 1
}
i = 0
while (i < concatLength) {
branching += concat(i).asInstanceOf[Leaf].length
i += 1
}
i = 1
while (i < rightLength) {
branching += displayRight(i).asInstanceOf[Node].length
i += 1
}
if (currentDepth != 2) {
branching -= leftLength + concatLength + rightLength
if (leftLength != 0)
branching += 1
if (rightLength != 0)
branching += 1
}
}
branching
}
private def takeFront0(n: Int): Vector[A] = {
if (transient) {
normalize(depth)
transient = false
}
val vector = new Vector[A](n)
vector.initWithFocusFrom(this)
if (depth > 1) {
vector.focusOn(n - 1)
val d0len = (vector.focus & 31) + 1
if (d0len != 32) {
val d0 = new Leaf(d0len)
System.arraycopy(vector.display0, 0, d0, 0, d0len)
vector.display0 = d0
}
val cutIndex = vector.focus | vector.focusRelax
vector.cleanTopTake(cutIndex)
vector.focusDepth = spire.math.min(vector.depth, vector.focusDepth)
if (vector.depth > 1) {
vector.copyDisplays(vector.focusDepth, cutIndex)
var i = vector.depth
var offset = 0
var display: Node = null
while (i > vector.focusDepth) {
i match {
case 2 => display = vector.display1
case 3 => display = vector.display2
case 4 => display = vector.display3
case 5 => display = vector.display4
case 6 => display = vector.display5
case 7 => display = vector.display6
}
val oldSizes = display(display.length - 1).asInstanceOf[Array[Int]]
val newLen = (vector.focusRelax >> 5 * (i - 1) & 31) + 1
val newSizes = new Array[Int](newLen)
System.arraycopy(oldSizes, 0, newSizes, 0, newLen - 1)
newSizes.update(newLen - 1, n - offset)
if (newLen > 1)
offset += newSizes(newLen - 2)
val newDisplay = new Node(newLen + 1)
System.arraycopy(display, 0, newDisplay, 0, newLen)
newDisplay.update(newLen - 1, null)
newDisplay.update(newLen, newSizes)
i match {
case 1 => vector.display1 = newDisplay
case 2 => vector.display2 = newDisplay
case 3 => vector.display3 = newDisplay
case 4 => vector.display4 = newDisplay
case 5 => vector.display5 = newDisplay
case 6 => vector.display6 = newDisplay
case 7 => vector.display7 = newDisplay
}
i -= 1
}
vector.stabilizeDisplayPath(vector.depth, cutIndex)
vector.focusEnd = n
} else
vector.focusEnd = n
} else if (n != 32) {
val d0 = new Leaf(n)
System.arraycopy(vector.display0, 0, d0, 0, n)
vector.display0 = d0
vector.initFocus(0, 0, n, 1, 0)
}
vector
}
private def dropFront0(n: Int): Vector[A] = { // Need to fix, since depth = 1 means there is still a node
if (transient) {
normalize(depth)
transient = false
}
val vector: Vector[A] = new Vector[A](this.endIndex - n)
vector.initWithFocusFrom(this)
//if (vector.depth >= 1) { // vector.depth > 1
vector.focusOn(n)
val cutIndex: Int = vector.focus | vector.focusRelax
val d0Start: Int = cutIndex & 31
if (d0Start != 0) {
val d0length: Int = vector.display0.length - d0Start
val d0: Leaf = new Leaf(d0length)
System.arraycopy(vector.display0, d0Start, d0, 0, d0length)
vector.display0 = d0
}
vector.cleanTopDrop(cutIndex)
// if (vector.depth >= 1) { // vector.depth > 1
var i: Int = 2
var display: Node = vector.display1
while (i <= vector.depth) {
val splitStart: Int = cutIndex >> 5 * (i - 1) & 31
val newLen: Int = display.length - splitStart - 1
val newDisplay: Node = new Node(newLen + 1)
System.arraycopy(display, splitStart + 1, newDisplay, 1, newLen - 1)
i match {
case 2 =>
newDisplay.update(0, vector.display0)
vector.display1 = withComputedSizes(newDisplay, 2)
display = vector.display2
case 3 =>
newDisplay.update(0, vector.display1)
vector.display2 = withComputedSizes(newDisplay, 3)
display = vector.display3
case 4 =>
newDisplay.update(0, vector.display2)
vector.display3 = withComputedSizes(newDisplay, 4)
display = vector.display4
case 5 =>
newDisplay.update(0, vector.display3)
vector.display4 = withComputedSizes(newDisplay, 5)
display = vector.display5
case 6 =>
newDisplay.update(0, vector.display4)
vector.display5 = withComputedSizes(newDisplay, 6)
display = vector.display6
case 7 =>
newDisplay.update(0, vector.display5)
vector.display6 = withComputedSizes(newDisplay, 7)
}
i += 1
}
// }
vector.initFocus(0, 0, vector.display0.length, 1, 0)
// }
//else
/*{
val newLen: Int = vector.display0.length - n
val d0: Leaf = new Leaf(newLen)
System.arraycopy(vector.display0, n, d0, 0, newLen)
vector.display0 = d0
vector.initFocus(0, 0, newLen, 1, 0)
}
*/
vector
}
private[Immutable] final def assertVectorInvariant(): Unit = { // TODO Need to update to to my design of the data structure
if (Vector.compileAssertions) {
assert(0 <= depth && depth <= 6, depth)
assert(isEmpty == (depth == 0), (isEmpty, depth))
assert(isEmpty == (length == 0), (isEmpty, length))
assert(length == endIndex, (length, endIndex))
assert((depth <= 0 && display0 == null) || (depth > 0 && display0 != null))
assert((depth <= 1 && display1 == null) || (depth > 0 && display1 != null))
assert((depth <= 2 && display2 == null) || (depth > 0 && display2 != null))
assert((depth <= 3 && display3 == null) || (depth > 0 && display3 != null))
assert((depth <= 4 && display4 == null) || (depth > 0 && display4 != null))
assert((depth <= 5 && display5 == null) || (depth > 0 && display5 != null))
if (!transient) {
if (display5 != null) {
assert(display4 != null)
if (focusDepth <= 5) assert(display5((focusRelax >> 25) & 31) == display4)
else assert(display5((focus >> 25) & 31) == display4)
}
if (display4 != null) {
assert(display3 != null)
if (focusDepth <= 4) assert(display4((focusRelax >> 20) & 31) == display3)
else assert(display4((focus >> 20) & 31) == display3)
}
if (display3 != null) {
assert(display2 != null)
if (focusDepth <= 3) assert(display3((focusRelax >> 15) & 31) == display2)
else assert(display3((focus >> 15) & 31) == display2)
}
if (display2 != null) {
assert(display1 != null)
if (focusDepth <= 2) assert(display2((focusRelax >> 10) & 31) == display1)
else assert(display2((focus >> 10) & 31) == display1)
}
if (display1 != null) {
assert(display0 != null)
if (focusDepth <= 1) assert(display1((focusRelax >> 5) & 31) == display0)
else assert(display1((focus >> 5) & 31) == display0)
}
} else {
assert(depth > 1)
if (display5 != null) {
assert(display4 != null)
if (focusDepth <= 5) assert(display5((focusRelax >> 25) & 31) == null)
else assert(display5((focus >> 25) & 31) == null)
}
if (display4 != null) {
assert(display3 != null)
if (focusDepth <= 4) assert(display4((focusRelax >> 20) & 31) == null)
else assert(display4((focus >> 20) & 31) == null)
}
if (display3 != null) {
assert(display2 != null)
if (focusDepth <= 3) assert(display3((focusRelax >> 15) & 31) == null)
else assert(display3((focus >> 15) & 31) == null)
}
if (display2 != null) {
assert(display1 != null)
if (focusDepth <= 2) assert(display2((focusRelax >> 10) & 31) == null)
else assert(display2((focus >> 10) & 31) == null)
}
if (display1 != null) {
assert(display0 != null)
if (focusDepth <= 1) assert(display1((focusRelax >> 5) & 31) == null)
else assert(display1((focus >> 5) & 31) == null)
}
}
assert(0 <= focusStart && focusStart <= focusEnd && focusEnd <= endIndex, (focusStart, focusEnd, endIndex))
assert(focusStart == focusEnd || focusEnd != 0, (focusStart, focusEnd))
assert(0 <= focusDepth && focusDepth <= depth, scala.Tuple2(focusDepth, depth))
def checkSizes(node: Node, currentDepth: Int, _endIndex: Int): Unit = {
if (currentDepth > 1) {
if (node != null) {
val sizes = node(node.length - 1).asInstanceOf[Size]
if (sizes != null) {
assert(node.length == sizes.length + 1)
if (!transient)
assert(sizes(sizes.length - 1) == _endIndex, (sizes(sizes.length - 1), _endIndex))
var i = 0
while (i < sizes.length - 1) {
checkSizes(node(i).asInstanceOf[Node], currentDepth - 1, sizes(i) - (if (i == 0) 0 else sizes(i - 1)))
i += 1
}
checkSizes(node(node.length - 2).asInstanceOf[Node], currentDepth - 1, if (sizes.length > 1) sizes(sizes.length - 1) - sizes(sizes.length - 2) else sizes(sizes.length - 1))
} else {
var i = 0
while (i < node.length - 2) {
checkSizes(node(i).asInstanceOf[Node], currentDepth - 1, 1 << (5 * (currentDepth - 1)))
i += 1
}
val expectedLast = _endIndex - (1 << (5 * (currentDepth - 1))) * (node.length - 2)
assert(1 <= expectedLast && expectedLast <= (1 << (5 * currentDepth)))
checkSizes(node(node.length - 2).asInstanceOf[Node], currentDepth - 1, expectedLast)
}
} else {
assert(transient)
}
} else if (node != null) {
assert(node.length == _endIndex)
} else {
assert(transient)
}
}
depth match {
case 1 => checkSizes(display1, 1, endIndex)
case 2 => checkSizes(display2, 2, endIndex)
case 3 => checkSizes(display3, 3, endIndex)
case 4 => checkSizes(display4, 4, endIndex)
case 5 => checkSizes(display5, 5, endIndex)
case 6 => checkSizes(display6, 6, endIndex)
case 7 => checkSizes(display7, 7, endIndex)
case _ => ()
}
}
}
}
| das-projects/Cheetah | src/main/scala/Cheetah/Immutable/Vector.scala | Scala | apache-2.0 | 117,656 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.joins
import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution.{BinaryExecNode, SparkPlan}
import org.apache.spark.sql.execution.metric.SQLMetrics
/**
* Performs a hash join of two child relations by first shuffling the data using the join keys.
*/
case class ShuffledHashJoinExec(
leftKeys: Seq[Expression],
rightKeys: Seq[Expression],
joinType: JoinType,
buildSide: BuildSide,
condition: Option[Expression],
left: SparkPlan,
right: SparkPlan)
extends BinaryExecNode with HashJoin {
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"buildDataSize" -> SQLMetrics.createSizeMetric(sparkContext, "data size of build side"),
"buildTime" -> SQLMetrics.createTimingMetric(sparkContext, "time to build hash map"),
"avgHashProbe" -> SQLMetrics.createAverageMetric(sparkContext, "avg hash probe"))
override def requiredChildDistribution: Seq[Distribution] =
HashClusteredDistribution(leftKeys) :: HashClusteredDistribution(rightKeys) :: Nil
private def buildHashedRelation(iter: Iterator[InternalRow]): HashedRelation = {
val buildDataSize = longMetric("buildDataSize")
val buildTime = longMetric("buildTime")
val start = System.nanoTime()
val context = TaskContext.get()
val relation = HashedRelation(iter, buildKeys, taskMemoryManager = context.taskMemoryManager())
buildTime += (System.nanoTime() - start) / 1000000
buildDataSize += relation.estimatedSize
// This relation is usually used until the end of task.
context.addTaskCompletionListener(_ => relation.close())
relation
}
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
val avgHashProbe = longMetric("avgHashProbe")
streamedPlan.execute().zipPartitions(buildPlan.execute()) { (streamIter, buildIter) =>
val hashed = buildHashedRelation(buildIter)
join(streamIter, hashed, numOutputRows, avgHashProbe)
}
}
}
| bravo-zhang/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/joins/ShuffledHashJoinExec.scala | Scala | apache-2.0 | 3,120 |
package com.twitter.finagle.param
import com.twitter.finagle.Stack
import com.twitter.finagle.factory.TimeoutFactory
import com.twitter.finagle.service.ExpiringService
import com.twitter.util.Duration
/**
* A collection of methods for configuring sessions of the Finagle clients.
*
* Session might be viewed as logical connection that wraps a physical connection
* (i.e., [[com.twitter.finagle.transport.Transport transport]]) and controls its
* lifecycle. Sessions are used in Finagle to maintain liveness, requests cancellation,
* draining, and many more.
*
* @tparam A a [[Stack.Parameterized]] client to configure
*
* @see [[SessionPoolingParams]] for pooling related configuration
*/
class SessionParams[A <: Stack.Parameterized[A]](self: Stack.Parameterized[A]) {
/**
* Configures the session acquisition `timeout` of this client (default: unbounded).
*
* This timeout is applied to the acquisition of a service and includes
* both queueing time (e.g. because we cannot create more connections due
* to connections limit and there are outstanding requests) as well as physical
* TCP connection time. Futures returned from `factory()` will always be satisfied
* within this timeout.
*
* This timeout also includes resolving logical destinations, but the cost of
* resolution is amortized.
*
* @see [[http://twitter.github.io/finagle/guide/Clients.html#timeouts-expiration]]
*/
def acquisitionTimeout(timeout: Duration): A =
self.configured(TimeoutFactory.Param(timeout))
/**
* Configures the session max idle time `timeout` - the maximum amount of time
* a given session is allowed to be idle before it is closed (default: unbounded).
*
* @see [[http://twitter.github.io/finagle/guide/Clients.html#timeouts-expiration]]
*/
def maxIdleTime(timeout: Duration): A =
self.configured(self.params[ExpiringService.Param].copy(idleTime = timeout))
/**
* Configures the session lifetime `timeout` - the maximum amount of time a given
* service is allowed to live before it is closed (default: unbounded).
*
* @see [[http://twitter.github.io/finagle/guide/Clients.html#timeouts-expiration]]
*/
def maxLifeTime(timeout: Duration): A =
self.configured(self.params[ExpiringService.Param].copy(lifeTime = timeout))
}
| liamstewart/finagle | finagle-core/src/main/scala/com/twitter/finagle/param/SessionParams.scala | Scala | apache-2.0 | 2,317 |
package flagship.console.terminal
import flagship.console.input.ConsoleKey
import flagship.console.widget.Window
import java.awt.event.{MouseEvent, MouseAdapter}
/**
* User: mtrupkin
* Date: 7/5/13
*/
class GUIConsole(val terminal: Terminal, val window: Window) {
val framesPerSecond = 23
val refreshRate = (1f / framesPerSecond) * 1000
val screen = Screen(terminal.terminalSize)
def completed(): Boolean = (window.closed || terminal.closed)
var consoleKey: Option[ConsoleKey] = None
val mouseAdapter = new MouseAdapter {
override def mouseClicked(e: MouseEvent) {
window.mouseClicked(terminal.mouse)
}
}
terminal.addMouseAdapter(mouseAdapter)
def render() {
window.render(screen)
terminal.flush(screen)
}
def processInput() {
for (key <- terminal.key) {
window.keyPressed(key)
terminal.key = None
}
if (terminal.key != consoleKey) {
consoleKey = terminal.key
for (key <- consoleKey) {
//window.keyPressed(key)
}
} else {
for (key <- consoleKey) {
}
}
}
def doEventLoop() {
var lastUpdateTime = System.currentTimeMillis()
while (!completed()) {
val currentTime = System.currentTimeMillis()
val elapsedTime = currentTime - lastUpdateTime
if (elapsedTime > refreshRate) {
lastUpdateTime = currentTime
processInput
window.update(elapsedTime.toInt)
render
}
}
terminal.close()
}
}
| mtrupkin/brace-for-impact | console-lib/src/main/scala/flagship/console/terminal/GUIConsole.scala | Scala | mit | 1,488 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.http
import org.specs2.mutable.Specification
import play.api.{ Configuration, Environment, Mode, PlayException }
class ActualKeySecretConfigurationParserSpec extends SecretConfigurationParserSpec {
override def secretKey: String = "play.http.secret.key"
}
class DeprecatedKeySecretConfigurationParserSpec extends SecretConfigurationParserSpec {
override def secretKey: String = "play.crypto.secret"
override def parseSecret(mode: Mode.Mode, secret: Option[String] = None) = {
HttpConfiguration.fromConfiguration(
Configuration.reference ++ Configuration.from(
secret.map(secretKey -> _).toMap ++ Map(
"play.http.secret.key" -> null
)
),
Environment.simple(mode = mode)
).secret.secret
}
}
trait SecretConfigurationParserSpec extends Specification {
def secretKey: String
val Secret = "abcdefghijklmnopqrs"
def parseSecret(mode: Mode.Mode, secret: Option[String] = None): String = {
HttpConfiguration.fromConfiguration(
Configuration.reference ++ Configuration.from(
secret.map(secretKey -> _).toMap
),
Environment.simple(mode = mode)
).secret.secret
}
"Secret config parser" should {
"parse the secret" in {
"load a configured secret in prod" in {
parseSecret(Mode.Prod, Some(Secret)) must_== Secret
}
"load a configured secret in dev" in {
parseSecret(Mode.Dev, Some(Secret)) must_== Secret
}
"throw an exception if secret is changeme in prod" in {
parseSecret(Mode.Prod, Some("changeme")) must throwA[PlayException]
}
"throw an exception if no secret in prod" in {
parseSecret(Mode.Prod, Some(null)) must throwA[PlayException]
}
"throw an exception if secret is blank in prod" in {
parseSecret(Mode.Prod, Some(" ")) must throwA[PlayException]
}
"throw an exception if secret is empty in prod" in {
parseSecret(Mode.Prod, Some("")) must throwA[PlayException]
}
"generate a secret if secret is changeme in dev" in {
parseSecret(Mode.Dev, Some("changeme")) must_!= "changeme"
}
"generate a secret if no secret in dev" in {
parseSecret(Mode.Dev) must_!= ""
}
"generate a secret if secret is blank in dev" in {
parseSecret(Mode.Dev, Some(" ")) must_!= " "
}
"generate a secret if secret is empty in dev" in {
parseSecret(Mode.Dev, Some("")) must_!= ""
}
"generate a stable secret in dev" in {
parseSecret(Mode.Dev, Some("changeme")) must_!= "changeme"
}
}
}
}
| aradchykov/playframework | framework/src/play/src/test/scala/play/api/http/SecretConfigurationParserSpec.scala | Scala | apache-2.0 | 2,700 |
package com.lookout.borderpatrol
import java.util.concurrent.TimeUnit
import argonaut.Argonaut._
import argonaut.CodecJson
import com.lookout.borderpatrol.session.secret._
import com.lookout.borderpatrol.session.tokens._
import com.lookout.borderpatrol.session.id._
import com.twitter.bijection.{Base64String, Injection}
import com.twitter.util.{Time, Duration}
import org.jboss.netty.buffer.ChannelBuffers
import com.lookout.borderpatrol.util.Combinators.tap
import org.jboss.netty.handler.codec.http.{DefaultHttpRequest, HttpMethod, HttpVersion, HttpRequest}
import scala.collection.JavaConversions._
package object session {
object Constants {
object SessionId {
val entropySize = 16
val lifetime = Duration(1, TimeUnit.DAYS)
}
object Secret {
val entropySize = 16
val lifetime = Duration(1, TimeUnit.DAYS)
}
}
implicit def ByteCodecJson: CodecJson[Byte] =
CodecJson(
(b: Byte) => jNumberOrNull(b.toInt),
c => for (b <- c.as[Int]) yield b.toByte
)
implicit def TimeCodecJson: CodecJson[Time] =
CodecJson(
(t: Time) =>
("ms" := t.inMilliseconds) ->: jEmptyObject,
c => for {
s <- (c --\\ "ms").as[Long]
} yield Time.fromMilliseconds(s))
implicit def HttpRequestCodecJson: CodecJson[HttpRequest] =
CodecJson(
(r: HttpRequest) =>
("u" := r.getUri) ->:
("m" := r.getMethod.getName) ->:
("v" := r.getProtocolVersion.getText) ->:
("c" := r.getContent.array.toList) ->:
("h" := r.headers.names.toList.map(n => Map[String, String](n -> r.headers.get(n)))) ->:
jEmptyObject,
c => for {
uri <- (c --\\ "u").as[String]
meth <- (c --\\ "m").as[String]
ver <- (c --\\ "v").as[String]
heads <- (c --\\ "h").as[List[Map[String, String]]]
cont <- (c --\\ "c").as[List[Byte]]
} yield tap(new DefaultHttpRequest(HttpVersion.valueOf(ver), HttpMethod.valueOf(meth), uri))(req => {
heads.foreach(head => head.foreach(kv => req.headers.add(kv._1, kv._2)))
req.setContent(ChannelBuffers.copiedBuffer(cont.toArray))
})
)
implicit def SessionCodecJson: CodecJson[Session] =
casecodec3(Session.apply, Session.unapply)("id", "req", "tokens")
implicit class SessionJsonEncode(val s: Session) extends AnyVal {
def asJson: String =
SessionCodecJson.encode(s).toString
}
implicit class SessionJsonDecode(val s: String) extends AnyVal {
def asSession: Option[Session] =
s.decodeOption[Session]
}
lazy val bytes264 = Injection.connect[Array[Byte], Base64String, String]
lazy val json2bytes = Injection.connect[String, Array[Byte]]
}
| rtyler/borderpatrol | borderpatrol-core/src/main/scala/com/lookout/borderpatrol/session/package.scala | Scala | mit | 2,712 |
package com.letstalkdata.hexiles
package game
import com.letstalkdata.hexiles.graphics.Drawable
import com.letstalkdata.hexiles.shapes.Hexagon
import org.scalajs.dom
/**
* The hexagon grid onto which pieces are placed.
*
* Author: Phillip Johnson
* Date: 4/30/15
*/
class Board extends Drawable[Double] {
val tiles:Seq[Hexagon] = (0 to 4).flatMap(row => {
(7 - row to 11 - row).map(column => Hexagon(column, row))
})
override def draw(context:dom.CanvasRenderingContext2D): Unit = {
tiles.foreach(_.draw(context))
}
override def contains(point: Point[Double]): Boolean = tiles.exists(tile => tile.contains(point))
}
| SeriousSoftware/ScalaJS-Hexiles-web | src/main/scala-2.11/com/letstalkdata/hexiles/game/Board.scala | Scala | mit | 643 |
/*
* Copyright 2011-2017 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.yaidom.integrationtest
import eu.cdevreeze.yaidom.parse
/**
* See AbstractOtherNamespaceTest.
*
* Acknowledgments: This test uses the examples in http://www.datypic.com/books/defxmlschema/chapter03.html, that are also used
* in the excellent book Definitive XML Schema.
*
* @author Chris de Vreeze
*/
class OtherNamespaceTestUsingSaxParser extends AbstractOtherNamespaceTest {
val documentParser: parse.DocumentParser = parse.DocumentParserUsingSax.newInstance()
val documentParserForXml11: parse.DocumentParser = parse.DocumentParserUsingSax.newInstance()
}
| dvreeze/yaidom | jvm/src/test/scala/eu/cdevreeze/yaidom/integrationtest/OtherNamespaceTestUsingSaxParser.scala | Scala | apache-2.0 | 1,197 |
package utilities
import play.api.mvc._
import models.UserSession
import scala.concurrent.Future
class AuthenticatedRequest[A](val userSession: UserSession, request: Request[A]) extends WrappedRequest[A](request)
/**
* A custom Action for allowing requests with valid authentication credentials
*/
object Authenticated extends ActionBuilder[AuthenticatedRequest] with Results {
def invokeBlock[A](request: Request[A], block: (AuthenticatedRequest[A]) => Future[SimpleResult]) = {
AuthenticationHelper.getUserSessionForRequest(request) map {
userSession: UserSession =>
block(new AuthenticatedRequest(userSession, request))
} getOrElse {
Future.successful(Forbidden(views.html.index()).discardingCookies(DiscardingCookie("logged_user")).withNewSession)
}
}
} | mehmetakiftutuncu/simplelogin | app/utilities/Authenticated.scala | Scala | apache-2.0 | 798 |
import akka.sbt.AkkaKernelPlugin
import akka.sbt.AkkaKernelPlugin.{Dist, distJvmOptions, distMainClass, outputDirectory}
import sbt.Keys._
import sbt._
object RiepeteKernelBuild extends Build {
val Organization = "io.simao"
val Version = "0.0.2"
val ScalaVersion = "2.11.4"
val Name = "riepete"
lazy val RiepeteKernel = Project(
id = "riepete-kernel",
base = file("."),
settings = defaultSettings ++ AkkaKernelPlugin.distSettings ++ Seq(
libraryDependencies ++= Dependencies.all,
distJvmOptions in Dist := "-Xms256M -Xmx1024M",
outputDirectory in Dist := file("target/riepete-dist"),
distMainClass in Dist := "akka.kernel.Main io.simao.riepete.server.RiepeteKernel"
)
)
lazy val buildSettings = Defaults.defaultSettings ++ Seq(
organization := Organization,
version := Version,
scalaVersion := ScalaVersion,
name := Name,
crossPaths := false,
organizationName := "simao.io",
organizationHomepage := Some(url("https://simao.io"))
)
lazy val defaultSettings = buildSettings ++ Seq(
// compile options
scalacOptions ++= Seq("-encoding", "UTF-8", "-deprecation", "-unchecked"),
javacOptions ++= Seq("-Xlint:unchecked", "-Xlint:deprecation"),
resolvers += "clojars" at "http://clojars.org/repo/"
)
}
object Dependencies {
object Versions {
val Akka = "2.3.6"
}
val all = List(
"com.typesafe.akka" %% "akka-kernel" % Versions.Akka,
"com.typesafe.akka" %% "akka-slf4j" % Versions.Akka,
"com.typesafe.akka" %% "akka-actor" % Versions.Akka,
"com.typesafe.akka" %% "akka-testkit" % Versions.Akka % "test",
"ch.qos.logback" % "logback-classic" % "1.1.2",
"org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.1",
"io.argonaut" %% "argonaut" % "6.0.4",
"io.dropwizard.metrics" % "metrics-core" % "3.1.0",
"com.aphyr" % "riemann-java-client" % "0.2.10",
"org.scalatest" %% "scalatest" % "2.2.0" % "test",
"org.scalacheck" %% "scalacheck" % "1.11.6" % "test"
)
}
| simao/riepete | project/Build.scala | Scala | mit | 2,053 |
package cromwell.engine.workflow.lifecycle.execution.callcaching
import akka.actor.{Actor, ActorLogging, Props}
import cromwell.core.Dispatcher.EngineDispatcher
import cromwell.database.sql.tables.CallCachingEntry
import scala.concurrent.ExecutionContext
import scala.util.{Failure, Success}
class CallCacheInvalidateActor(callCache: CallCache, cacheId: CallCachingEntryId) extends Actor with ActorLogging {
implicit val ec: ExecutionContext = context.dispatcher
def receiver = context.parent
callCache.invalidate(cacheId) onComplete {
case Success(maybeEntry) =>
receiver ! CallCacheInvalidatedSuccess(maybeEntry)
context.stop(self)
case Failure(t) =>
receiver ! CallCacheInvalidatedFailure(t)
context.stop(self)
}
override def receive: Receive = {
case any => log.error("Unexpected message to InvalidateCallCacheActor: " + any)
}
}
object CallCacheInvalidateActor {
def props(callCache: CallCache, cacheId: CallCachingEntryId) = {
Props(new CallCacheInvalidateActor(callCache: CallCache, cacheId: CallCachingEntryId)).withDispatcher(EngineDispatcher)
}
}
sealed trait CallCacheInvalidatedResponse
case class CallCacheInvalidatedSuccess(maybeEntry: Option[CallCachingEntry]) extends CallCacheInvalidatedResponse
case object CallCacheInvalidationUnnecessary extends CallCacheInvalidatedResponse
case class CallCacheInvalidatedFailure(t: Throwable) extends CallCacheInvalidatedResponse | ohsu-comp-bio/cromwell | engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheInvalidateActor.scala | Scala | bsd-3-clause | 1,449 |
package backend.distributor
import akka.actor.ActorRef
import backend.PricerMsg
/**
* Internal API for stream linking
*/
object StreamLinkApi {
case class DistributorStreamRef(ref: ActorRef)
case class PricerStreamRef(ref: ActorRef)
case class Demand(sender: ActorRef)
case class Payload(sender: ActorRef, msg: PricerMsg)
}
| intelix/activator-reactive-fx | app/backend/distributor/StreamLinkApi.scala | Scala | apache-2.0 | 343 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the Coinis entity.
*/
class CoinisGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseUrl(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connectionHeader("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
.silentResources // Silence all resources like css or css so they don't clutter the results
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authentication = Map(
"Content-Type" -> """application/json""",
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"Authorization" -> "${access_token}"
)
val scn = scenario("Test the Coinis entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))
).exitHereIfFailed
.pause(10)
.exec(http("Authentication")
.post("/api/authenticate")
.headers(headers_http_authentication)
.body(StringBody("""{"username":"admin", "password":"admin"}""")).asJson
.check(header("Authorization").saveAs("access_token"))).exitHereIfFailed
.pause(2)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10)
.repeat(2) {
exec(http("Get all coinis")
.get("/api/coinis")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new coinis")
.post("/api/coinis")
.headers(headers_http_authenticated)
.body(StringBody("""{
"id":null
, "closeprice":"SAMPLE_TEXT"
, "highprice":"SAMPLE_TEXT"
, "itemcode":"SAMPLE_TEXT"
, "lowprice":"SAMPLE_TEXT"
, "openprice":"SAMPLE_TEXT"
, "prevcloseprice":"SAMPLE_TEXT"
, "tradeamount":"SAMPLE_TEXT"
, "tradedaebi":"SAMPLE_TEXT"
, "tradedaebirate":"SAMPLE_TEXT"
, "tradedate":"SAMPLE_TEXT"
, "tradevolumn":"SAMPLE_TEXT"
, "symbol":"SAMPLE_TEXT"
, "createdat":"2020-01-01T00:00:00.000Z"
}""")).asJson
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_coinis_url"))).exitHereIfFailed
.pause(10)
.repeat(5) {
exec(http("Get created coinis")
.get("${new_coinis_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created coinis")
.delete("${new_coinis_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(Integer.getInteger("users", 100)) during (Integer.getInteger("ramp", 1) minutes))
).protocols(httpConf)
}
| iansoftdev/all-blockchain | src/test/gatling/user-files/simulations/CoinisGatlingTest.scala | Scala | mit | 4,041 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author Arash Fard, Usman Nisar, Ayushi Jain, Aravind Kalimurthy, John Miller
* @version 1.3
* @date Thu Nov 25 11:28:31 EDT 2013
* @see LICENSE (MIT style license file).
*/
package scalation.graphalytics
import scala.collection._
import scala.collection.immutable.{Set => SET}
import scala.collection.mutable.{ArrayStack, ListBuffer, Map, HashMap, MutableList, Set}
import scala.math.pow
import scala.util.control.Breaks.{break, breakable}
import scala.util.Random
import scalation.stat.Statistic
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The 'StrictSim' class provides an implementation for strict simulation
* graph pattern matching. This version uses `DualSim`.
* @see hipore.com/ijbd/2014/IJBD%20Vol%201%20No%201%202014.pdf
* @param q the query graph Q(U, D, k)
* @param g the data graph G(V, E, l)
*/
class StrictSim (g: Graph, q: Graph)
extends GraphMatcher (g, q)
{
private val listOfDistinctReducedSet = new ListBuffer [SET [String]] () // contains total number of matches
// after post processing
private val mapOfBallWithSize = Map [Int, Long] () // contains balls left after
// post processing with diameter.
private val listOfMatchedBallVertices = MutableList [Int] () // contains list of center vertices
private val qmet = new GraphMetrics (q.clone, false) // creating graph metrics object of query graph
private val dataSize = g.size // size of the data graph
private val querySize = q.size // size of the query graph
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Apply the Strict Graph Simulation pattern matching algorithm to find the mappings
* from the query graph 'q' to the data graph 'g'. These are represented by a
* multi-valued function 'phi' that maps each query graph vertex 'u' to a
* set of data graph vertices '{v}'.
*/
def mappings (): Array [SET [Int]] = merge (mappings2 ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Mapping results per ball.
*/
def mappings2 (): HashMap [Int, Array [SET [Int]]] = strictSim (new DualSim (g, q).mappings ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Merged mapping results, the union over all balls.
*/
def merge (matches: HashMap [Int, Array [SET [Int]]]): Array [SET [Int]] =
{
val phi_all = Array.ofDim [SET [Int]] (querySize)
for (i <- 0 until querySize) phi_all (i) = SET [Int] ()
for ((c, phi_c) <- matches) {
println (s"(c, phi_c) = ($c, ${phi_c.deep})")
for (i <- 0 until querySize) phi_all(i) ++= phi_c(i)
} // for
phi_all
} // merge
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Performs strict simulation to find mappings with balls.
* @param phi the initial mapping after applying Dual to the whole graph
*/
def strictSim (phi: Array [SET [Int]]): HashMap [Int, Array [SET [Int]]] =
{
if (phi.size == 0) { println ("No dual match."); return null } // exit if no match after dual simulation
val newGraph = filterGraph (phi) // if doing strong sim more than once, must clone g
val prunedSize = phi.flatten.toSet.size // size of feasible matches after strict simulation
val qDiameter = qmet.diam // get the query diameter
val balls = HashMap [Int, Ball] () // map of balls: center -> ball
val matches = HashMap [Int, Array [SET [Int]]] () // map of matches in balls: center -> match
val gCenters = (0 until q.size).flatMap(phi(_)) // set of mapped data graph centers
val bCenters = Set [Int] () // set of centers for all balls
var ballSum = 0
for (center <- gCenters) { // for each mapped data graph center
val ball = new Ball (newGraph, center, qDiameter) // create a new ball for that center vertex
ballSum += ball.nodesInBall.size // calculate ball size
val mat = dualFilter (phi.clone, ball) // perform dual filter on the ball
println (s"center = $center, mat = ${mat.deep}")
balls.put (center, ball)
if (mat.size != 0) { bCenters += center; matches += center -> mat }
else println ("No match for ball centered at " + center + "\\n")
} // for
println ("SEQUENTIAL: Data Graph Name: " + g.name +
"\\n Number of Data Graph Nodes: " + dataSize +
"\\n Query Graph Name: " + q.name +
"\\n Number of Query Graph Nodes: " + querySize +
"\\n Number of Strict Matches: " + bCenters.size +
"\\n Graph Size after Pruning: " + prunedSize + " nodes" +
"\\n Query Diameter: " + qDiameter +
"\\n Average Ball Size: " + (ballSum / prunedSize.toDouble) +
"\\n Total Distinct Edges: " + calculateTotalEdges (g, balls, bCenters) +
"\\n Total Distinct Vertices: " + calculateTotalVertices ())
println ("Ball Diameter Metrics(Min, Max, Mean, StdDev): " + calculateBallDiameterMetrics (balls) )
matches
} // strictSim
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Prune the data graph by consider only those vertices and edges which
* are part of feasible matches after performing initial dual simulation.
* @param phi mappings from a query vertex u_q to { graph vertices v_g }
*/
def filterGraph (phi: Array [SET [Int]]): Graph =
{
val nodesInSimset = phi.flatten.toSet // get all the vertices of feasible matches
for (i <- 0 until dataSize) g.ch(i) &= nodesInSimset // prune via intersection
val newCh = Array.ofDim [SET [Int]] (dataSize)
for (i <- 0 until dataSize) newCh(i) = SET [Int] ()
for (u <- 0 until q.size; w <- phi(u)) { // new ch and pa set for data graph based upon feasible vertices
for (v <- q.ch(u)) newCh(w) |= (g.ch(w) & phi(v))
} // for
new Graph (newCh, g.label, g.inverse, g.name + "2") // create a new data graph
} // filterGraph
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform dual simulation onto the ball.
* @param phi mappings from a query vertex u_q to { graph vertices v_g }
* @param ball the Ball B(Graph, Center, Radius)
*/
def dualFilter (phi: Array [SET [Int]], ball: Ball): Array [SET [Int]] =
{
for (v <- phi.indices) phi(v) &= ball.nodesInBall // project simset onto ball
val filterSet = new ArrayStack [(Int, Int)] ()
var filtered = false
for (u <- phi.indices; v <- phi(u) if ball.borderNodes contains v) {
filtered = false // filtering ball based on child relationship
breakable { for (u1 <- q.ch(u)) {
if ((ball.post (v) & phi (u1)).isEmpty) {
filterSet.push ((u, v))
filtered = true
break
} // if
}} // breakable for
if (! filtered) { // filtering ball based on parent relationship,
breakable { for (u2 <- q.pa(u)) { // if no child has been filtered out
if ((ball.pre (v) & phi(u2)).isEmpty) {
filterSet.push ((u, v))
break
} // if
}} // breakable for
} // if
} // for
while (! filterSet.isEmpty) { // refine ch and pa relationship for the vertex v,
val (u, v) = filterSet.pop () // which is now not a feasible match
phi(u) -= v
for (u2 <- q.pa(u); v2 <- (ball.pre (v) & phi(u2)) if (ball.post (v2) & phi(u)).isEmpty)
filterSet.push ((u2, v2))
for (u1 <- q.ch(u); v1 <- (ball.post (v) & phi(u1)) if (ball.pre (v1) & phi(u)).isEmpty)
filterSet.push ((u1, v1))
} // while
val chSet = HashMap [Int, Set [Int]] ()
val paSet = HashMap [Int, Set [Int]] ()
// create new ch and pa set for the ball after above pruning
for (u <- phi.indices; v <- phi(u); uc <- q.ch(u); vc <- (ball.post (v) & phi(uc))) {
chSet.getOrElseUpdate (v, Set [Int] ()) += vc
paSet.getOrElseUpdate (vc, Set [Int] ()) += v
} // for
// Finding max perfect subgraph
val stack = new ArrayStack [Int] ()
val visited = Set (ball.center)
stack.push (ball.center)
while (! stack.isEmpty) {
val v = stack.pop ()
for (child <- (chSet.getOrElse (v, Set ()) | paSet.getOrElse (v, Set ()))) {
if (! visited.contains (child)) {
stack.push (child)
visited += child
} // if
} // for
} // while
for ( v <- phi.indices) phi(v) = phi(v) & visited
//fixes the edges in the ball
//(note that it does not change the parent set; this is only used for printing)
//uncomment if you want to see the ball after finding maximum perfect subgraph
ball.chMap = Map [Int, Set [Int]] ()
val matchNodes = phi.flatten.toSet
for ((n, nset) <- chSet; nc <- nset) {
if ((matchNodes contains n) && (matchNodes contains nc)) ball.chMap.getOrElseUpdate (n, Set () ) += nc
} // for
for (v <- phi.indices if phi(v).isEmpty) return Array [SET [Int]] ()
phi
} //dualFilter
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count distinct vertices left after post processing.
*/
def calculateTotalVertices (): Int =
{
val totalSet = Set [String] ()
for (i <- 0 until listOfDistinctReducedSet.length) totalSet ++= listOfDistinctReducedSet(i)
totalSet.size
} // calculateTotalVertices
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count distinct edges left after post processing.
* @param g the data graph G(V, E, l)
* @param balls mappings from a center vertex to the Ball B(Graph, Center, Radius)
* @param matchCenters set of all vertices which are considered as center
*/
def calculateTotalEdges (g: Graph, balls: HashMap [Int, Ball], matchCenters: Set [Int]): Int =
{
val distinctEdges = Set [String] ()
for (vert_id <- 0 until g.ch.length; if balls.keySet.contains (vert_id)) {
balls.get (vert_id).get.chMap.foreach (i => i._2.foreach (j => distinctEdges += (i._1.toString + "_" + j.toString)))
} // for
distinctEdges.size
} // calculateTotalEdges
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Calculate statistics (e.g., min, max, average diameter and standard deviation)
* on the balls left after post-processing.
* @param balls mappings from a center vertex to the Ball B(Graph, Center, Radius)
*/
def calculateBallDiameterMetrics (balls: HashMap [Int, Ball]): Statistic =
{
val ballStats = new Statistic ()
for (vert_id <- listOfMatchedBallVertices) ballStats.tally (balls.get (vert_id).get.getBallDiameter)
ballStats
} // calculateBallDiameterMetrics
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vertex from an array of central vertices, those which have
* highest 'ch' set size and lowest frequency of label in the query graph, i.e.,
* highest ratio.
* @param centr the array of vertices whose eccentricity is equal to the radius
*/
def selectivityCriteria (qmet: GraphMetrics): Int =
{
var index = 0
var max = 0.0
for (ctr <- qmet.central) {
val ratio = qmet.g.ch(ctr).size.toDouble / qmet.g.labelMap (qmet.g.label(ctr)).size.toDouble
if (max < ratio) { max = ratio; index = ctr }
} // for
index
} // selectivityCriteria
} // StrictSim class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: ::::::::::::
/** The `StrictSimTest` object is used to test the `StrictSim` class.
* > run-main scalation.graphalytics.StrictSimTest
*/
object StrictSimTest extends App
{
val g = Graph.g1p
val q = Graph.q1p
println (s"g.checkEdges = ${g.checkEdges}")
g.printG ()
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new StrictSim (g, q)).test ("StrictSim") // Strict Graph Simulation Pattern Matcher
} // StrictSimTest object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: ::::::::::::
/** The `StrictSimTest2` object is used to test the `StrictSim` class.
* > run-main scalation.graphalytics.StrictSimTest2
*/
object StrictSimTest2 extends App
{
val g = Graph.g2p
val q = Graph.q2p
println (s"g.checkEdges = ${g.checkEdges}")
g.printG ()
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new StrictSim (g, q)).test ("StrictSim") // Strict Graph Simulation Pattern Matcher
} // StrictSimTest2 object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `StrictSimTest3` object test the `StrictSim` class by passing data graph
* and query graph relative file paths.
* > run-main scalation.graphalytics.StrictSimTest3
*/
object StrictSimTest3 extends App
{
val g = GraphIO ("gfile")
val q = GraphIO ("qfile")
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new StrictSim (g, q)).test ("StrictSim") // Strict Graph Simulation Pattern Matcher
} // StrictSimTest3 object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/graphalytics/StrictSim.scala | Scala | mit | 14,810 |
package com.whitepages.cloudmanager.action
import com.whitepages.cloudmanager.client.SolrRequestHelpers
import com.whitepages.cloudmanager.state.ClusterManager
import org.apache.solr.client.solrj.impl.CloudSolrServer
import org.apache.solr.common.params.ModifiableSolrParams
import org.apache.solr.common.params.CollectionParams.CollectionAction
import scala.collection.JavaConverters._
case class DeleteCollection(collection: String) extends Action {
override val preConditions: List[StateCondition] = List(
StateCondition("collection exists", Conditions.collectionExists(collection))
)
override def execute(clusterManager: ClusterManager): Boolean = {
// aliases aren't part of the SolrState object, so can't do this in preConditions
val currentAliases = clusterManager.aliasMap.filter{ case (_, value) => value == collection }.map(_._1)
if (currentAliases.nonEmpty) {
comment.warn("Refusing to delete a collection that an alias is currently pointing to. Delete or move the alias first.")
comment.warn(s"Active aliases for $collection: ${currentAliases.mkString(", ")}")
false
} else {
val params = new ModifiableSolrParams
params.set("action", CollectionAction.DELETE.toString)
params.set("name", collection)
SolrRequestHelpers.submitRequest(clusterManager.client, params) &&
Conditions.waitForState(clusterManager, Conditions.collectionExists(collection).andThen(!_))
}
}
override val postConditions: List[StateCondition] = List(
StateCondition("collection doesn't exist", Conditions.collectionExists(collection).andThen(!_))
)
override def toString = s"DeleteCollection: name: $collection"
}
| randomstatistic/solrcloud_manager | src/main/scala/com/whitepages/cloudmanager/action/DeleteCollection.scala | Scala | apache-2.0 | 1,696 |
package com.sksamuel.elastic4s.handlers.searches.queries.text
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
import com.sksamuel.elastic4s.requests.searches.queries.matches.MatchPhrasePrefixQuery
object MatchPhrasePrefixBodyFn {
def apply(q: MatchPhrasePrefixQuery): XContentBuilder = {
val builder = XContentFactory.jsonBuilder()
builder.startObject("match_phrase_prefix")
builder.startObject(q.field)
builder.autofield("query", q.value)
q.queryName.foreach(builder.field("_name", _))
q.analyzer.foreach(builder.field("analyzer", _))
q.slop.foreach(builder.field("slop", _))
q.maxExpansions.foreach(builder.field("max_expansions", _))
q.boost.foreach(builder.field("boost", _))
builder.endObject()
builder.endObject()
builder.endObject()
builder
}
}
| sksamuel/elastic4s | elastic4s-handlers/src/main/scala/com/sksamuel/elastic4s/handlers/searches/queries/text/MatchPhrasePrefixBodyFn.scala | Scala | apache-2.0 | 832 |
/*
* =========================================================================================
* Copyright © 2013-2017 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package kamon.netty.instrumentation
import io.netty.channel.{Channel, ChannelHandler, ChannelHandlerContext, ChannelInboundHandlerAdapter}
import kamon.util.Clock
import org.aspectj.lang.annotation._
@Aspect
class ServerBootstrapInstrumentation {
import ServerBootstrapInstrumentation._
@Before("execution(* io.netty.bootstrap.ServerBootstrap.group(..)) && args(bossGroup, workerGroup)")
def onNewServerBootstrap(bossGroup:NamedEventLoopGroup, workerGroup:NamedEventLoopGroup):Unit = {
if(bossGroup == workerGroup) {
bossGroup.name = BossGroupName
workerGroup.name = BossGroupName
} else {
bossGroup.name = BossGroupName
workerGroup.name = WorkerGroupName
}
}
@After("execution(* io.netty.bootstrap.ServerBootstrap.ServerBootstrapAcceptor.channelRead(..)) && args(ctx, child)")
def onChannelRead(ctx: ChannelHandlerContext, child: Channel):Unit = {
val pipeline = child.pipeline()
if(pipeline.get(KamonHandler) == null)
pipeline.addFirst(KamonHandler, new KamonHandler())
}
}
object ServerBootstrapInstrumentation {
val BossGroupName = "boss-group"
val WorkerGroupName = "worker-group"
val KamonHandler = "kamon-handler"
@ChannelHandler.Sharable
private class KamonHandler extends ChannelInboundHandlerAdapter {
override def channelRead(ctx: ChannelHandlerContext, msg: AnyRef): Unit = {
ctx.channel().toContextAware().startTime = Clock.microTimestamp()
super.channelRead(ctx, msg)
}
}
}
@Aspect
class EventLoopMixin {
@DeclareMixin("io.netty.channel.EventLoopGroup+")
def mixinEventLoopGroupWithNamedEventLoopGroup: NamedEventLoopGroup = new NamedEventLoopGroup {}
}
trait NamedEventLoopGroup {
var name:String = _
} | kamon-io/kamon-netty | src/main/scala/kamon/netty/instrumentation/ServerBootstrapInstrumentation.scala | Scala | apache-2.0 | 2,535 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.lambda
import java.io.Closeable
import kafka.server.KafkaConfig
import kafka.utils.TestUtils
import kafka.zk.EmbeddedZookeeper
import org.apache.kafka.common.network.ListenerName
import org.locationtech.geomesa.utils.io.PathUtils
class EmbeddedKafka extends Closeable {
private val zookeeper = new EmbeddedZookeeper()
val zookeepers = s"127.0.0.1:${zookeeper.port}"
private val logs = TestUtils.tempDir()
private val server = {
val config = TestUtils.createBrokerConfig(1, zookeepers)
config.setProperty("offsets.topic.num.partitions", "1")
config.setProperty("listeners", s"PLAINTEXT://127.0.0.1:${TestUtils.RandomPort}")
config.setProperty("log.dirs", logs.getAbsolutePath)
TestUtils.createServer(new KafkaConfig(config))
}
val brokers = s"127.0.0.1:${server.boundPort(ListenerName.normalised("PLAINTEXT"))}"
override def close(): Unit = {
try { server.shutdown() } catch { case _: Throwable => }
try { zookeeper.shutdown() } catch { case _: Throwable => }
PathUtils.deleteRecursively(logs.toPath)
}
} | locationtech/geomesa | geomesa-lambda/geomesa-lambda-datastore/src/test/scala/org/locationtech/geomesa/lambda/EmbeddedKafka.scala | Scala | apache-2.0 | 1,557 |
/**
* Copyright 2013 Robert Welin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mooo.nilewapps.bokbytarappen.server.authentication
import scala.concurrent.{ExecutionContext, Future}
import spray.http._
import spray.http.HttpHeaders._
import spray.httpx.unmarshalling._
import spray.routing._
import spray.routing.AuthenticationFailedRejection._
import spray.routing.authentication._
class SimpleTokenAuthenticator[U](
val realm: String,
val authenticator: Option[String] => Future[Option[U]],
val fieldName: String = "token")
(implicit val executionContext: ExecutionContext)
extends ContextAuthenticator[U] {
def apply(ctx: RequestContext) = {
val field = ctx.request.entity.as[FormData] match {
case Right(m) => Map(m.fields: _*).get(fieldName)
case _ => None
}
authenticate(field) map {
case Some(t) => Right(t)
case None =>
val cause =
if (field == None) CredentialsMissing
else CredentialsRejected
Left(AuthenticationFailedRejection(cause, getChallengeHeaders(ctx.request)))
}
}
/**
* Extracts a password reset token from the request entity and
* passes it to the authenticator.
*/
def authenticate(field: Option[String]) = authenticator {
field
}
def getChallengeHeaders(httpRequest: HttpRequest) =
`WWW-Authenticate`(HttpChallenge(
scheme = "Nilewapp", realm = realm, params = Map.empty)) :: Nil
}
| nilewapp/BokBytarAppenServer | src/main/scala/com/mooo/nilewapps/bokbytarappen/server/authentication/SimpleTokenAuthenticator.scala | Scala | apache-2.0 | 1,967 |
package org.kangmo.tradeapi
import org.kangmo.http._
import org.kangmo.helper._
import java.math.BigDecimal
import scala.concurrent._
abstract class AbstractChannel() {
def getPublicFuture[T : Manifest](resource : String) : Future[T] = {
val p = promise[T]
HTTPActor.dispatcher ! GetPublicResource(resource) { jsonResponse =>
val obj : T = Json.deserialize[T](jsonResponse)
p success obj
}
p.future
}
}
abstract class AbstractUserChannel(context : Context) {
def getUserFuture[T : Manifest](resource : String) : Future[T] = {
val p = promise[T]
HTTPActor.dispatcher ! GetUserResource(context, resource) { jsonResponse =>
val obj : T = Json.deserialize[T](jsonResponse)
p success obj
}
p.future
}
def postUserFuture[T : Manifest](resource : String, postData : String) : Future[T] = {
val p = promise[T]
HTTPActor.dispatcher ! PostUserResource(context, resource, postData) { jsonResponse =>
val obj : T = Json.deserialize[T](jsonResponse)
p success obj
}
p.future
}
}
| Kangmo/korbit-nodejs-sdk | main/src/main/scala/scala/AbstractChannel.scala | Scala | apache-2.0 | 1,025 |
package sri.sangria.mobile.routes
import sri.core.ReactElement
import sri.relay.container.RelayRootContainer
import sri.sangria.mobile.components.{HomeScreen, LoadingIndicator}
import sri.sangria.mobile.containers.TodosScreenContainer
import sri.sangria.mobile.queries.ViewerQuery
import sri.universal.components.DefaultNavigationBar.Style
import sri.universal.components.{DefaultNavigationBar, View}
import sri.universal.router.{NavigatorRoute, StaticPage, UniversalRouter, UniversalRouterConfig}
import sri.universal.styles.UniversalStyleSheet
object AppRouter {
object HomePage extends StaticPage
object TodosPage extends StaticPage
object Config extends UniversalRouterConfig {
override val initialRoute: (StaticPage, NavigatorRoute) = defineInitialRoute(HomePage, "SriSangria", HomeScreen())
staticRoute(TodosPage, "Todos", RelayRootContainer(Component = TodosScreenContainer.container, query = ViewerQuery(), renderLoading = () => LoadingIndicator()))
override val notFound: (StaticPage, NavigatorRoute) = initialRoute
override def renderScene(route: NavigatorRoute): ReactElement = {
View(style = UniversalStyleSheet.wholeContainer)(
DefaultNavigationBar(CustomNavigationBarTheme),
super.renderScene(route)
)
}
}
val router = UniversalRouter(Config, style = styles.rootStyle)
}
object styles extends UniversalStyleSheet {
val rootStyle = style(backgroundColor := "#f2606f")
}
object CustomNavigationBarTheme extends Style {
override val navBar = styleE(super.navBar)(backgroundColor := "#E84254", borderBottomColor := "#D66767")
override val navBarTitleText = styleE(super.navBarTitleText)(color := "white")
override val navBarLeftButton = super.navBarLeftButton
override val navBarButtonText = styleE(super.navBarButtonText)(color := "white")
} | chandu0101/sri-sangria-example | mobile/src/main/scala/sri/sangria/mobile/routes/AppRouter.scala | Scala | apache-2.0 | 1,844 |
package poker.app
import poker.Utils._
import poker.core.handanalyzer.HandAnalyzer._
import poker.core.{Card, Hand}
/**
* App that takes five or more cards, and returns the best five-card hand
* that can be made with those cards.
*/
object Best extends App {
private val lines = getLines(args)
require(
lines.size == 1,
"The file should contain a single line with a JSON array " +
"such as [\"TC\", \"JC\", \"QC\", \"KC\", \"AC\", \"5D\"]" +
s"Found ${lines.size}"
)
private val jsonArray = lines.head
private val bestHands = doMain(jsonArray)
if (bestHands.size == 1) {
val bestHand = bestHands.head
println(s"The best possible hand is $bestHand, which is a ${classify(bestHand)}.")
} else {
println(s"The best possible hands are ${bestHands.mkString(", ")}.")
}
private[app] def doMain(jsonArray: String): Set[Hand] = {
val parsedHandStr = parseJsonArray(jsonArray)
val parsedHandStrSize = parsedHandStr.size
require(
parsedHandStrSize >= 5,
s"Expected hand size of 5 or more but found $parsedHandStrSize"
)
val cards = parsedHandStr.map(Card(_)).toVector
bestPossibleHands(cards: _*)
}
}
| kyuksel/poker | src/main/scala/poker/app/Best.scala | Scala | mit | 1,195 |
/*
* Copyright (c) 2014.
* Created by MrTJP.
* All rights reserved.
*/
package mrtjp.core.item
import net.minecraft.item.{Item, ItemStack}
import net.minecraft.nbt.CompoundNBT
import scala.collection.immutable.HashMap
object ItemKey
{
def get(stack:ItemStack):ItemKey = new ItemKey(stack.getItem, stack.getTag)
@deprecated("Use nonnull standard")
def getOrNull(stack:ItemStack):ItemKey =
{
if (stack.isEmpty) null
else get(stack)
}
}
class ItemKey(val item:Item, val tag:CompoundNBT) extends Ordered[ItemKey]
{
lazy val testStack = makeStack(1)
lazy val itemID = Item.getIdFromItem(item)
private val hash = itemID*1000001+(if (tag != null) tag.hashCode else 0)
override def hashCode = hash
override def equals(other:Any) = other match
{
case that:ItemKey =>
item == that.item &&
tag == that.tag
case _ => false
}
override def toString = getName.toString
def compare(that:ItemKey) =
{
itemID-that.itemID
}
def makeStack(size:Int):ItemStack =
{
val stack = new ItemStack(item, size)
if (tag != null) stack.setTag(tag.copy())
stack
}
def copy = new ItemKey(item, tag)
def isEmpty = testStack.isEmpty
/** Interactions **/
def getItem = item
def getMaxStackSize = testStack.getMaxStackSize
def getName = testStack.getDisplayName
}
object ItemKeyStack
{
def get(key:ItemKey, size:Int) = new ItemKeyStack(key, size)
def get(stack:ItemStack):ItemKeyStack = new ItemKeyStack(ItemKey.get(stack), stack.getCount)
@deprecated("Use nonnull standard")
def getOrNull(stack:ItemStack):ItemKeyStack =
{
if (stack.isEmpty) null
else get(stack)
}
}
class ItemKeyStack(val key:ItemKey, var stackSize:Int) extends Ordered[ItemKeyStack]
{
override def hashCode = key.hashCode
override def equals(other:Any) = other match
{
case that:ItemKeyStack =>
key == that.key && stackSize == that.stackSize
case _ => false
}
override def toString = "["+key.toString+", "+stackSize+"]"
def makeStack = key.makeStack(stackSize)
def copy = new ItemKeyStack(key.copy, stackSize)
def isEmpty = key.isEmpty || stackSize <= 0
def compare(that:ItemKeyStack) =
{
val c = key.compare(that.key)
if (c == 0) stackSize-that.stackSize
else c
}
}
class ItemQueue
{
private var collection = HashMap[ItemKey, Int]()
def +=(elem:(ItemKey, Int)) =
{
val current = collection.getOrElse(elem._1, 0)
collection += elem._1 -> (current+elem._2)
this
}
def ++=(xs:IterableOnce[(ItemKey, Int)]) = {xs foreach +=; this}
def ++=(that:ItemQueue) = {that.result.foreach(+=); this}
def add(item:ItemKey, amount:Int)
{
this += item -> amount
}
def -=(elem:(ItemKey, Int)) =
{
val remaining = apply(elem._1)-elem._2
if (remaining > 0) collection += elem._1 -> remaining
else collection -= elem._1
}
def --=(xs:IterableOnce[(ItemKey, Int)]) = {xs foreach -=; this}
def --=(that:ItemQueue) = {that.result.foreach(-=); this}
def remove(item:ItemKey, amount:Int)
{
this -= item -> amount
}
def apply(item:ItemKey):Int = collection.getOrElse(item, 0)
def clear(){collection = HashMap[ItemKey, Int]()}
def isEmpty = collection.isEmpty
def nonEmpty = collection.nonEmpty
def keySet = collection.keySet
def count(p:ItemKey => Boolean) =
collection.foldLeft(0){(i, pair) =>
if (p(pair._1)) i+pair._2 else i
}
def countItems(p:ItemKey => Boolean) =
collection.count(pair => p(pair._1))
def result =
{
val b = HashMap.newBuilder[ItemKey, Int]
b ++= collection
b.result()
}
}
object ItemKeyConversions
{
implicit def itemToIK(item:Item):ItemKey = ItemKey.get(new ItemStack(item))
implicit def IKToItem(i:ItemKey):Item = i.getItem
implicit def stackToIK(stack:ItemStack):ItemKey = ItemKey.get(stack)
implicit def IKToStack(key:ItemKey):ItemStack = key.makeStack(0)
implicit def stackToIKS(stack:ItemStack):ItemKeyStack = ItemKeyStack.get(stack)
implicit def IKSToStack(key:ItemKeyStack):ItemStack = key.makeStack
implicit def KToKS(key:ItemKey):ItemKeyStack = ItemKeyStack.get(key, 0)
implicit def KSToK(key:ItemKeyStack):ItemKey = key.key
}
| MrTJP/MrTJPCore | src/main/scala/mrtjp/core/item/itemident.scala | Scala | lgpl-3.0 | 4,503 |
/*
* Copyright (c) 2017-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
import sbt._
object Dependencies {
object V {
// Java
val aws = "1.11.105" // This dependency won't outside EMR, as original hadoop-aws require 1.7.4
val hadoop = "2.7.3"
// Scala
val igluClient = "0.5.0"
val spark = "2.1.0"
val scopt = "3.5.0"
val scalaz7 = "7.0.9"
val json4sJackson = "3.2.11"
// Scala (test only)
val specs2 = "2.3.13"
val scalazSpecs2 = "0.2"
val scalaCheck = "1.12.2"
}
// Java
val dynamodb = "com.amazonaws" % "aws-java-sdk-dynamodb" % V.aws
val s3 = "com.amazonaws" % "aws-java-sdk-s3" % V.aws
val hadoop = "org.apache.hadoop" % "hadoop-aws" % V.hadoop % "provided"
// Scala
val igluClient = "com.snowplowanalytics" %% "iglu-scala-client" % V.igluClient
val spark = "org.apache.spark" %% "spark-core" % V.spark % "provided" withSources() withJavadoc()
val scopt = "com.github.scopt" %% "scopt" % V.scopt
val scalaz7 = "org.scalaz" %% "scalaz-core" % V.scalaz7
val json4sJackson = "org.json4s" %% "json4s-jackson" % V.json4sJackson
// Scala (test only)
val specs2 = "org.specs2" %% "specs2" % V.specs2 % "test"
val scalazSpecs2 = "org.typelevel" %% "scalaz-specs2" % V.scalazSpecs2 % "test"
val scalaCheck = "org.scalacheck" %% "scalacheck" % V.scalaCheck % "test"
}
| snowplow/snowplow | 5-data-modeling/event-manifest-populator/project/Dependencies.scala | Scala | apache-2.0 | 2,375 |
/**
* Created on February 26, 2011
* Copyright (c) 2011, Wei-ju Wu
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Wei-ju Wu nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY WEI-JU WU ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL WEI-JU WU BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.dmpp.adf.util
/**
* Implicit conversion in order to use the UnsignedInt class more
* naturally.
*/
object UnsignedInt32Conversions {
implicit def uint2Long(uint: UnsignedInt32): Long = uint.value
implicit def uint2Int(uint: UnsignedInt32): Int = uint.intValue
implicit def long2Uint(value: Long): UnsignedInt32 = UnsignedInt32(value)
implicit def int2Uint(value: Int): UnsignedInt32 = {
UnsignedInt32(value.asInstanceOf[Long])
}
}
/**
* Constants for UnsignedInt32 class.
*/
object UnsignedInt32 {
val MaxValue = 4294967295l
}
/**
* A class to emulate unsigned int behavior for addition, namely wrapping around
* when an addition overflow occurs. In that case, the "overflowOccurred" flag
* is also set on the resulting value.
*
* @constructor creates a new UnsignedInt32 value with optional overflow flag
* @param value the represented value
* @param overflowOccurred flag to indicate whether an overflow occurred during
* an arithmetic operation
*/
case class UnsignedInt32(value: Long, overflowOccurred: Boolean = false) {
import UnsignedInt32._
if (value > MaxValue) {
throw new IllegalArgumentException("value exceeds maximal unsigned 32 bit range")
}
if (value < 0) {
throw new IllegalArgumentException("attempted to create UnsignedInt with a" +
"negative value")
}
/**
* Addition of UnsignedInt32 values. If a 32-bit overflow occurs during the addition,
* the result value's overflowOccurred flag will be set to true.
*
* @param aValue value that is added to this object
* @return a new UnsignedInt representing the result
*/
def +(aValue: UnsignedInt32): UnsignedInt32 = {
import scala.math._
val result = (value + aValue.value) % (MaxValue + 1)
val overflowOccurred = result < max(value, aValue.value)
UnsignedInt32(result, overflowOccurred)
}
/**
* A Scala Int value representing this value. It should be pointed out that
* while UnsignedInt32 values are never negative, the resulting Int is
* possibly negative.
*
* @return the correponding Scala Int value
*/
def intValue: Int = value.asInstanceOf[Int]
}
| weiju/adf-tools | adf-core/src/main/scala/org/dmpp/adf/util/UnsignedInt32.scala | Scala | bsd-3-clause | 3,737 |
package com.avsystem.commons
package mongo.typed
import com.avsystem.commons.annotation.{explicitGenerics, macroPrivate}
import com.avsystem.commons.macros.serialization.MongoMacros
trait DataRefDsl[E, T] {
// convenience type alias
type Ref[T0] = MongoPropertyRef[E, T0]
// ThisRef = MongoPropertyRef for MongoPropertyRef and MongoRef for all other types
// This makes it possible to refine the result type of `as`, `compose`, `andThen` etc. in MongoPropertyRef
// TODO: can we redesign this hierarchy to get rid of this abstraction and simplify things?
type ThisRef[E0, T0] <: MongoRef[E0, T0]
def SelfRef: ThisRef[E, T]
// called by .ref macro to ensure that the source type is not opaque and inner references are possible
@macroPrivate def asAdtRef(implicit ev: IsMongoAdtOrSubtype[T]): ThisRef[E, T] = SelfRef
/**
* A macro that interprets an anonymous function as a [[MongoPropertyRef]].
*
* Let's define a MongoDB entity:
* {{{
* case class Entity(
* id: String,
* number: Int,
* data: Data,
* dataOpt: Opt[Data],
* dataList: List[Data],
* dataMap: Map[String, Data]
* ) extends MongoEntity[String],
* object Entity extends MongoEntityCompanion[Entity]
*
* case class Data(
* value: Int,
* complexData: Map[String, List[Opt[Int]]]
* )
* object Data extends MongoDataCompanion[Data]
* }}}
*
* The `.ref` macro is available on its companion object.
*
* The function may be a reference to one of its fields:
* {{{
* val intRef: MongoPropertyRef[Entity, Int] =
* Entity.ref(_.int)
* val dataRef: MongoPropertyRef[Entity, Data] =
* Entity.ref(_.data)
* }}}
*
* Chaining is also possible:
*
* {{{
* val dataValueRef: MongoPropertyRef[Entity, Int] =
* Entity.ref(_.data.value)
* }}}
*
* When `T` is an `Option`, `Opt`, or similar `Option`-like type, the function may refer its `.get` method
* to return a reference to its inner value.
*
* {{{
* val dataRef: MongoPropertyRef[Entity, Data] =
* Entity.ref(_.dataOpt.get)
* }}}
*
* When `T` is a collection, the function may call its `apply` method to refer to an element at specific index.
* Also, `.head` may be used as an alias for `.apply(0)`.
*
* {{{
* val firstDataRef: MongoPropertyRef[Entity, Data] =
* Entity.ref(_.dataList.head)
* val secondDataRef: MongoPropertyRef[Entity, Data] =
* Entity.ref(_.dataList(1))
* }}}
*
* When `T` is a map, the function may call its `apply` method to refer to a value at specific key.
*
* {{{
* val dataAtOneRef: MongoPropertyRef[Entity, Data] =
* Entity.ref(_.dataMap("one"))
* }}}
*
* Now consider a MongoDB entity expressed as a sealed hierarchy with `@flatten` annotation:
*
* {{{
* @flatten sealed trait UnionEntity extends MongoEntity[UnionEntity] {
* def id: String
* }
* sealed trait HasNumber extends UnionEntity {
* def number: Int
* }
* case class FirstCase(id: String, flag: Boolean)
* extends UnionEntity
* case class SecondCase(id: String, number: Int, num: Double)
* extends HasNumber
* case class ThirdCase(id: String, number: Int, data: Data)
* extends HasNumber
* object UnionEntity extends MongoEntityCompanion[UnionEntity]
* }}}
*
* The function passed to `.ref` macro may now refer to fields shared by all case classes
* (represented as abstract `def`s on the sealed trait):
*
* {{{
* val idRef: MongoPropertyRef[UnionEntity, String] =
* UnionEntity.ref(_.id)
* }}}
*
* You may also access fields of individual case classes by "narrowing" the reference explicitly to one
* particular case class:
*
* {{{
* val flagRef: MongoPropertyRef[UnionEntity, Boolean] =
* UnionEntity.ref(_.as[CaseOne].flag)
* }}}
*
* The same may be done for a subset of case classes sharing some common field.
* This subset must be expressed with an intermediate sealed trait, like `HasNumber` in the above example:
*
* {{{
* val numberRef: MongoPropertyRef[UnionEntity, Int] =
* UnionEntity.ref(_.as[HasNumber].number)
* }}}
*
* Finally, you can chain all of the above references into more complex paths:
*
* {{{
* val deeplyNestedRef: MongoPropertyRef[UnionEntity, Int] =
* UnionEntity.ref(_.as[ThirdCase].data.complexData("key").head.get)
* }}}
*
*/
def ref[T0](fun: T => T0): MongoPropertyRef[E, T0] = macro MongoMacros.refImpl
/**
* Given a MongoDB union data type (defined with a sealed hierarchy with `@flatten` annotation), you can
* narrow it to one of its case classes or intermediate sealed traits.
*
* {{{
* @flatten sealed trait UnionEntity extends MongoEntity[UnionEntity] {
* def id: String
* }
* sealed trait HasNumber extends UnionEntity {
* def number: Int
* }
* case class FirstCase(id: String, flag: Boolean)
* extends UnionEntity
* case class SecondCase(id: String, number: Int, num: Double)
* extends HasNumber
* case class ThirdCase(id: String, number: Int, data: Data)
* extends HasNumber
* object UnionEntity extends MongoEntityCompanion[UnionEntity]
*
* val thirdCaseRef: MongoRef[UnionEntity, ThirdCase] =
* UnionEntity.as[ThirdCase]
* val hasNumberRef: MongoRef[UnionEntity, HasNumber] =
* UnionEntity.as[HasNumber]
* }}}
*
* You can use such "narrowed" reference as a prefix for accessing [[MongoPropertyRef]]s using the [[ref]] macro,
* e.g. `thirdCaseRef.ref(_.data)`. You can also use it as a [[MongoProjection]] passed to one of
* [[TypedMongoCollection]] methods. Note that in such case the projection also serves as a filter, limiting the
* results of the query only to selected cases.
*/
@explicitGenerics
def as[C <: T]: ThisRef[E, C] = macro MongoMacros.asSubtype[C]
/**
* Macro for obtaining a [[MongoDocumentFilter]] (condition) which is satisfied only by some specific subtype
* of an entity type. The entity must be a sealed trait/class and the subtype must be either one of its case classes
* or an intermediate sealed trait extended by some subset of its case classes.
*
* {{{
* @flatten sealed trait UnionEntity extends MongoEntity[UnionEntity] {
* def id: String
* }
* sealed trait HasNumber extends UnionEntity {
* def number: Int
* }
* case class FirstCase(id: String, flag: Boolean)
* extends UnionEntity
* case class SecondCase(id: String, number: Int, num: Double)
* extends HasNumber
* case class ThirdCase(id: String, number: Int, data: Data)
* extends HasNumber
* object UnionEntity extends MongoEntityCompanion[UnionEntity]
*
* val isThirdCase: MongoDocumentFilter[UnionEntity] =
* UnionEntity.is[ThirdCase]
* val hasNumber: MongoDocumentFilter[UnionEntity] =
* UnionEntity.is[HasNumber]
* }}}
*/
@explicitGenerics
def is[C <: T]: MongoDocumentFilter[E] = macro MongoMacros.isSubtype[C]
/**
* A negated version of [[is]].
*/
@explicitGenerics
def isNot[C <: T]: MongoDocumentFilter[E] = macro MongoMacros.isNotSubtype[C]
}
trait DataTypeDsl[T] extends DataRefDsl[T, T] {
type ThisRef[E0, T0] = MongoRef[E0, T0]
}
| AVSystem/scala-commons | commons-mongo/jvm/src/main/scala/com/avsystem/commons/mongo/typed/DataTypeDsl.scala | Scala | mit | 7,623 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import monix.execution.exceptions.DummyException
import concurrent.duration._
import scala.util.{Failure, Success}
object TaskDeferActionSuite extends BaseTestSuite {
test("Task.deferAction works") { implicit s =>
def measureLatency[A](source: Task[A]): Task[(A, Long)] =
Task.deferAction { implicit s =>
val start = s.clockMonotonic(MILLISECONDS)
source.map(a => (a, s.clockMonotonic(MILLISECONDS) - start))
}
val task = measureLatency(Task.now("hello").delayExecution(1.second))
val f = task.runToFuture
s.tick()
assertEquals(f.value, None)
s.tick(1.second)
assertEquals(f.value, Some(Success(("hello", 1000))))
}
test("Task.deferAction protects against user error") { implicit s =>
val dummy = DummyException("dummy")
val task = Task.deferAction(_ => throw dummy)
val f = task.runToFuture
s.tick()
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.deferAction is stack safe") { implicit sc =>
def loop(n: Int, acc: Int): Task[Int] =
Task.deferAction { _ =>
if (n > 0)
loop(n - 1, acc + 1)
else
Task.now(acc)
}
val f = loop(10000, 0).runToFuture; sc.tick()
assertEquals(f.value, Some(Success(10000)))
}
testAsync("deferAction(local.write) works") { _ =>
import monix.execution.Scheduler.Implicits.global
implicit val opts = Task.defaultOptions.enableLocalContextPropagation
val task = for {
l <- TaskLocal(10)
_ <- Task.deferAction(_ => l.write(100))
_ <- Task.shift
v <- l.read
} yield v
for (v <- task.runToFutureOpt) yield {
assertEquals(v, 100)
}
}
}
| monifu/monifu | monix-eval/shared/src/test/scala/monix/eval/TaskDeferActionSuite.scala | Scala | apache-2.0 | 2,377 |
package dao
import constants.SecurityRoleKey
import generated.Tables.{LinkedAccountRow, SecurityRoleRow, UserRow}
import org.scalatest.Matchers
import play.api.test.WithApplication
import helpers.AwaitHelpers
import be.objectify.deadbolt.scala.models.{Role, Permission}
import scala.concurrent.ExecutionContext.Implicits.global
import helpers.AwaitHelpers._
class UserDaoFunSpec extends AbstractDaoFunSpec with Matchers {
//------------------------------------------------------------------------
// public
//------------------------------------------------------------------------
describe("Create user") {
new WithApplication() {
val dao = daoContext
// ensure repeatability of the test
AwaitHelpers.await(dao.userDao.deleteAll)
val result = (for {
user <- dao.userDao.createAndFetch(UserRow(id = 0L, username = "test", email = "test@test.test", modified = None))
all <- dao.userDao.findAll
} yield (user, all))
val user: UserRow = result._1
val all = result._2
it("user should be correct") {
user.id should equal (1L)
user.username should equal ("test")
user.email should equal ("test@test.test")
user.modified should not be None
}
it("there must be only one user") {
all.size should equal(1)
}
}
}
//------------------------------------------------------------------------
describe("Create user with linked account and security role") {
new WithApplication() {
val dao = daoContext
// ensure repeatability of the test
AwaitHelpers.await(dao.userDao.deleteAll)
// initialize the security role as we know it exists
val securityRole = SecurityRoleRow(id = 1L, name = SecurityRoleKey.USER_ROLE.toString)
val result = (for {
user <- dao.userDao.create(UserRow(id = 0L, username = "test", email = "test@test.test",
active = true, modified = None), securityRole, LinkedAccountRow(0L, "xxx", "password", None))
linkedAccount <- dao.userDao.linkedAccounts(user)
securityRoles <- dao.userDao.roles(user)
permissions <- dao.userDao.permissions(user)
} yield (user, linkedAccount, securityRoles, permissions))
val user: UserRow = result._1
val linkedAccounts: Seq[LinkedAccountRow] = result._2
val securityRoles: Seq[Role] = result._3
val permissions: Seq[Permission] = result._4
it("user should be correct") {
user.id should equal (1L)
user.username should equal ("test")
user.email should equal ("test@test.test")
user.active should be (true)
}
it("user linked account should be correct") {
linkedAccounts.size should equal (1)
val linkedAccount = linkedAccounts.head
linkedAccount.userId should equal (user.id)
linkedAccount.providerUserId should equal("xxx")
linkedAccount.providerKey should equal ("password")
linkedAccount.modified should not be None
}
it("user security roles should be correct") {
securityRoles.size should equal (1)
securityRoles.head.name should equal (SecurityRoleKey.USER_ROLE.toString)
}
it("user permissions should be empty") {
permissions.isEmpty should be (true)
}
}
//------------------------------------------------------------------------
describe("Find active user by provider key and email") {
new WithApplication() {
val dao = daoContext
// reuses the user created in the previous test
val user: Option[UserRow] = (for {
user <- dao.userDao.findActiveByProviderKeyAndEmail("password", "test@test.test")
} yield user)
it("the user was found") {
user should not be (None)
}
}
}
//------------------------------------------------------------------------
describe("Find active user by provider key and password") {
new WithApplication() {
val dao = daoContext
// reuses the user created in the previous test
val user: Option[UserRow] = (for {
user <- dao.userDao.findActiveByProvider("password", "xxx")
} yield user)
it("the user was found") {
user should not be (None)
}
}
}
//------------------------------------------------------------------------
describe("Find user by email") {
new WithApplication() {
val dao = daoContext
// reuses the user created in the previous test
val users: Seq[UserRow] = (for {
user <- dao.userDao.findByEmail("test@test.test")
} yield user)
it("the user was found") {
users.size should equal (1)
}
}
}
//------------------------------------------------------------------------
describe("Merge two users") {
new WithApplication() {
val dao = daoContext
// we know it ...
val sourceUserId = 1L
// reuses the user created in the previous test
val result = (for {
targetUser <- dao.userDao.createAndFetch(UserRow(id = 0L, username = "target", active = true, email = "target@target.target", modified = None))
_ <- dao.userDao.merge(targetUser, dao.userDao.findById(sourceUserId).get)
linkedAccounts <- dao.userDao.linkedAccounts(targetUser)
sourceUser <- dao.userDao.findById(sourceUserId)
} yield (targetUser, linkedAccounts, sourceUser))
val targetUser: UserRow = result._1
val linkedAccounts: Seq[LinkedAccountRow] = result._2
val sourceUser: Option[UserRow] = result._3
it("target user should be correct") {
targetUser.id should equal (2L)
targetUser.username should equal ("target")
targetUser.email should equal ("target@target.target")
targetUser.active should be (true)
targetUser.modified should not be (None)
}
it("target user linked account should be correct") {
linkedAccounts.size should equal (1)
val linkedAccount = linkedAccounts.head
linkedAccount.userId should equal (targetUser.id)
linkedAccount.providerUserId should equal("xxx")
linkedAccount.providerKey should equal ("password")
linkedAccount.modified should not be (None)
}
it("source user should be inactive") {
sourceUser should not be None
sourceUser.get.active should be (false)
}
}
}
}
}
| bravegag/play-authenticate-usage-scala | test/dao/UserDaoFunSpec.scala | Scala | apache-2.0 | 6,556 |
package io.skysail.app.demo
import akka.actor.{ ActorLogging, Props }
import akka.persistence.PersistentActor
import io.skysail.app.demo.UserRepository.{ AddUser, ConfirmAddUser, GetUsers }
object UserRepository {
final val Name = "user-repository"
def props(): Props = Props(new UserRepository())
case object GetUsers
final case class AddUser(deliveryId: Long, user: Contact)
final case class ConfirmAddUser(deliveryId: Long)
}
class UserRepository extends PersistentActor with ActorLogging {
override val persistenceId: String = "user-repository"
private var users = Set.empty[Contact]
override def receiveCommand: Receive = {
case GetUsers =>
sender() ! users
case AddUser(id, user) =>
log.info(s"Adding $id new user with email; ${user.email}")
persist(user) { persistedUser =>
receiveRecover(persistedUser)
sender() ! ConfirmAddUser(id)
}
}
override def receiveRecover: Receive = {
case user: Contact => users += user
}
}
| evandor/skysail-core | skysail.app.demo/src/io/skysail/app/demo/UserRepository.scala | Scala | apache-2.0 | 1,009 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers
import com.intel.analytics.bigdl.dllib.nn.{Square => BSquare}
import com.intel.analytics.bigdl.dllib.keras.layers.{Square => ZSquare}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.ZooSpecHelper
import com.intel.analytics.bigdl.dllib.keras.serializer.ModuleSerializationTest
class SquareSpec extends ZooSpecHelper {
"Square input size (1, 3) Zoo" should "be the same as BigDL" in {
val blayer = BSquare[Float]()
val zlayer = ZSquare[Float](inputShape = Shape(3))
zlayer.build(Shape(-1, 3))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 3))
val input = Tensor[Float](Array(1, 3)).range(1, 3, 1)
compareOutputAndGradInput(blayer, zlayer, input)
}
"Square input size (2, 5) Zoo" should "be the same as BigDL" in {
val blayer = BSquare[Float]()
val zlayer = ZSquare[Float](inputShape = Shape(5))
zlayer.build(Shape(-1, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 5))
val input = Tensor[Float](Array(2, 5)).range(2, 12, 1)
compareOutputAndGradInput(blayer, zlayer, input)
}
}
class SquareSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = Square[Float](inputShape = Shape(5))
layer.build(Shape(2, 5))
val input = Tensor[Float](2, 5).rand()
runSerializationTest(layer, input)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/keras/layers/SquareSpec.scala | Scala | apache-2.0 | 2,093 |
package org.jetbrains.plugins.scala
package lang
package resolve
package processor
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi._
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScPrimaryConstructor
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScReferencePattern
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameterClause
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.implicits.ImplicitResolveResult
import org.jetbrains.plugins.scala.lang.psi.types.Compatibility.Expression
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.api.designator.ScDesignatorType
import org.jetbrains.plugins.scala.lang.psi.types.api.{Any, Nothing, _}
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{Parameter, ScMethodType, ScTypePolymorphicType}
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.project.ProjectContext
import scala.collection.Set
import scala.collection.mutable.ArrayBuffer
/**
* User: Alexander Podkhalyuzin
* Date: 26.04.2010
*/
case class MostSpecificUtil(elem: PsiElement, length: Int) {
implicit def ctx: ProjectContext = elem
def mostSpecificForResolveResult(applicable: Set[ScalaResolveResult],
hasTypeParametersCall: Boolean = false,
expandInnerResult: Boolean = true): Option[ScalaResolveResult] = {
mostSpecificGeneric(applicable.map(r => r.innerResolveResult match {
case Some(rr) if expandInnerResult =>
new InnerScalaResolveResult(rr.element, rr.implicitConversionClass, r, r.substitutor)
case _ =>
new InnerScalaResolveResult(r.element, r.implicitConversionClass, r, r.substitutor)
}), noImplicit = false).map(_.repr)
}
def mostSpecificForImplicitParameters(applicable: Set[(ScalaResolveResult, ScSubstitutor)]): Option[ScalaResolveResult] = {
mostSpecificGeneric(applicable.map{case (r, subst) => r.innerResolveResult match {
case Some(rr) => new InnerScalaResolveResult(rr.element, rr.implicitConversionClass, r, subst, implicitCase = true)
case None => new InnerScalaResolveResult(r.element, r.implicitConversionClass, r, subst, implicitCase = true)
}}, noImplicit = true).map(_.repr)
}
def nextLayerSpecificForImplicitParameters(filterRest: Option[ScalaResolveResult],
rest: Seq[ScalaResolveResult]): (Option[ScalaResolveResult], Seq[ScalaResolveResult]) = {
val (next, r) = nextLayerSpecificGeneric(filterRest.map(toInnerSRR), rest.map(toInnerSRR))
(next.map(_.repr), r.map(_.repr))
}
private def toInnerSRR(r: ScalaResolveResult): InnerScalaResolveResult[ScalaResolveResult] = {
r.innerResolveResult match {
case Some(rr) => new InnerScalaResolveResult(rr.element, rr.implicitConversionClass, r, ScSubstitutor.empty, implicitCase = true)
case None => new InnerScalaResolveResult(r.element, r.implicitConversionClass, r, ScSubstitutor.empty, implicitCase = true)
}
}
def nextMostSpecific(rest: Set[ScalaResolveResult]): Option[ScalaResolveResult] = {
nextMostSpecificGeneric(rest.map(toInnerSRR)).map(_.repr)
}
def notMoreSpecificThan(result: ScalaResolveResult): (ScalaResolveResult) => Boolean = {
val inner = toInnerSRR(result)
cand => !isMoreSpecific(inner, toInnerSRR(cand), checkImplicits = false)
}
def filterLessSpecific(result: ScalaResolveResult, rest: Set[ScalaResolveResult]): Set[ScalaResolveResult] = {
val inners = rest.map(toInnerSRR)
val innerResult = toInnerSRR(result)
inners.filter(!isMoreSpecific(innerResult, _, checkImplicits = false)).map(_.repr)
}
def mostSpecificForImplicit(applicable: Set[ImplicitResolveResult]): Option[ImplicitResolveResult] = {
mostSpecificGeneric(applicable.map(r => {
var callByName = false
def checkCallByName(clauses: Seq[ScParameterClause]): Unit = {
if (clauses.nonEmpty && clauses.head.parameters.length == 1 && clauses.head.parameters.head.isCallByNameParameter) {
callByName = true
}
}
r.element match {
case f: ScFunction => checkCallByName(f.paramClauses.clauses)
case f: ScPrimaryConstructor => checkCallByName(f.effectiveParameterClauses)
case _ =>
}
new InnerScalaResolveResult(r.element, None, r, r.substitutor, callByName, implicitCase = true)
}), noImplicit = true).map(_.repr)
}
private class InnerScalaResolveResult[T](val element: PsiNamedElement, val implicitConversionClass: Option[PsiClass],
val repr: T, val substitutor: ScSubstitutor,
val callByNameImplicit: Boolean = false,
val implicitCase: Boolean = false)
private def isAsSpecificAs[T](r1: InnerScalaResolveResult[T], r2: InnerScalaResolveResult[T],
checkImplicits: Boolean): Boolean = {
def lastRepeated(params: Seq[Parameter]): Boolean = {
val lastOption: Option[Parameter] = params.lastOption
if (lastOption.isEmpty) return false
lastOption.get.isRepeated
}
(r1.element, r2.element) match {
case (m1@(_: PsiMethod | _: ScFun), m2@(_: PsiMethod | _: ScFun)) =>
val (t1, t2) = (r1.substitutor.subst(getType(m1, r1.implicitCase)), r2.substitutor.subst(getType(m2, r2.implicitCase)))
def calcParams(tp: ScType, existential: Boolean): Either[Seq[Parameter], ScType] = {
tp match {
case ScMethodType(_, params, _) => Left(params)
case ScTypePolymorphicType(ScMethodType(_, params, _), typeParams) =>
if (!existential) {
val s: ScSubstitutor = typeParams.foldLeft(ScSubstitutor.empty) {
(subst: ScSubstitutor, tp: TypeParameter) =>
subst.bindT(tp.nameAndId,
UndefinedType(TypeParameterType(tp.psiTypeParameter, None)))
}
Left(params.map(p => p.copy(paramType = s.subst(p.paramType))))
} else {
val s: ScSubstitutor = typeParams.foldLeft(ScSubstitutor.empty) {
(subst: ScSubstitutor, tp: TypeParameter) =>
subst.bindT(tp.nameAndId,
ScExistentialArgument(tp.name, List.empty /* todo? */ , tp.lowerType, tp.upperType))
}
val arguments = typeParams.toList.map(tp =>
ScExistentialArgument(tp.name, List.empty /* todo? */ , s.subst(tp.lowerType), s.subst(tp.upperType)))
Left(params.map(p => p.copy(paramType = ScExistentialType(s.subst(p.paramType), arguments))))
}
case ScTypePolymorphicType(internal, typeParams) =>
if (!existential) {
val s: ScSubstitutor = typeParams.foldLeft(ScSubstitutor.empty) {
(subst: ScSubstitutor, tp: TypeParameter) =>
subst.bindT(tp.nameAndId,
UndefinedType(TypeParameterType(tp.psiTypeParameter, None)))
}
Right(s.subst(internal))
} else {
val s: ScSubstitutor = typeParams.foldLeft(ScSubstitutor.empty) {
(subst: ScSubstitutor, tp: TypeParameter) =>
subst.bindT(tp.nameAndId,
ScExistentialArgument(tp.name, List.empty /* todo? */ , tp.lowerType, tp.upperType))
}
val arguments = typeParams.toList.map(tp =>
ScExistentialArgument(tp.name, List.empty /* todo? */ , s.subst(tp.lowerType), s.subst(tp.upperType)))
Right(ScExistentialType(s.subst(internal), arguments))
}
case _ => Right(tp)
}
}
val conformance = (calcParams(t1, existential = true), calcParams(t2, existential = false)) match {
case (Left(p1), Left(p2)) =>
var (params1, params2) = (p1, p2)
if ((t1.isInstanceOf[ScTypePolymorphicType] && t2.isInstanceOf[ScTypePolymorphicType] ||
(!(m1.isInstanceOf[ScFunction] || m1.isInstanceOf[ScFun] || m1.isInstanceOf[ScPrimaryConstructor]) ||
!(m2.isInstanceOf[ScFunction] || m2.isInstanceOf[ScFun] || m2.isInstanceOf[ScPrimaryConstructor]))) &&
(lastRepeated(params1) ^ lastRepeated(params2))) return lastRepeated(params2) //todo: this is hack!!! see SCL-3846, SCL-4048
if (lastRepeated(params1) && !lastRepeated(params2)) params1 = params1.map {
case p: Parameter if p.isRepeated =>
val seq = ScalaPsiManager.instance(r1.element.getProject).getCachedClass(r1.element.resolveScope,
"scala.collection.Seq").orNull
if (seq != null) {
val newParamType = p.paramType match {
case ScExistentialType(q, wilds) =>
ScExistentialType(ScParameterizedType(ScDesignatorType(seq), Seq(q)), wilds)
case paramType => ScParameterizedType(ScDesignatorType(seq), Seq(paramType))
}
Parameter(p.name, p.deprecatedName, newParamType, p.expectedType,
p.isDefault, isRepeated = false, isByName = p.isByName)
}
else p
case p => p
}
val i: Int = if (params1.nonEmpty) 0.max(length - params1.length) else 0
val default: Expression =
new Expression(if (params1.nonEmpty) params1.last.paramType else Nothing, elem)
val exprs: Seq[Expression] = params1.map(p => new Expression(p.paramType, elem)) ++
Seq.fill(i)(default)
Compatibility.checkConformance(checkNames = false, params2, exprs, checkImplicits)
case (Right(type1), Right(type2)) =>
type1.conforms(type2, ScUndefinedSubstitutor()) //todo: with implcits?
//todo this is possible, when one variant is empty with implicit parameters, and second without parameters.
//in this case it's logical that method without parameters must win...
case (Left(_), Right(_)) if !r1.implicitCase => return false
case _ => return true
}
var u = conformance._2
if (!conformance._1) return false
t2 match {
case ScTypePolymorphicType(_, typeParams) =>
u.getSubstitutor match {
case Some(uSubst) =>
val nameAndIds = typeParams.map(_.nameAndId).toSet
def hasRecursiveTypeParameters(typez: ScType): Boolean = typez.hasRecursiveTypeParameters(nameAndIds)
typeParams.foreach(tp => {
if (tp.lowerType != Nothing) {
val substedLower = uSubst.subst(tp.lowerType)
if (!hasRecursiveTypeParameters(tp.lowerType)) {
u = u.addLower(tp.nameAndId, substedLower, additional = true)
}
}
if (tp.upperType != Any) {
val substedUpper = uSubst.subst(tp.upperType)
if (!hasRecursiveTypeParameters(tp.upperType)) {
u = u.addUpper(tp.nameAndId, substedUpper, additional = true)
}
}
})
case None => return false
}
case _ =>
}
u.getSubstitutor.isDefined
case (_, _: PsiMethod) => true
case (e1, e2) =>
val t1: ScType = getType(e1, r1.implicitCase)
val t2: ScType = getType(e2, r2.implicitCase)
t1.conforms(t2)
}
}
private def getClazz[T](r: InnerScalaResolveResult[T]): Option[PsiClass] = {
val element = ScalaPsiUtil.nameContext(r.element)
element match {
case memb: PsiMember => Option(memb.containingClass)
case _ => None
}
}
/**
* c1 is a subclass of c2, or
* c1 is a companion object of a class derived from c2, or
* c2 is a companion object of a class from which c1 is derived.
*
* @return true is c1 is derived from c2, false if c1 or c2 is None
*/
def isDerived(c1: Option[PsiClass], c2: Option[PsiClass]): Boolean = {
(c1, c2) match {
case (Some(clazz1), Some(clazz2)) =>
if (clazz1 == clazz2) return false
if (ScalaPsiUtil.isInheritorDeep(clazz1, clazz2)) return true
(clazz1, clazz2) match {
case (clazz1: ScObject, _) => isDerived(ScalaPsiUtil.getCompanionModule(clazz1), Some(clazz2))
case (_, clazz2: ScObject) => isDerived(Some(clazz1), ScalaPsiUtil.getCompanionModule(clazz2))
case _ => false
}
case _ => false
}
}
private def relativeWeight[T](r1: InnerScalaResolveResult[T], r2: InnerScalaResolveResult[T],
checkImplicits: Boolean): Int = {
val s1 = if (isAsSpecificAs(r1, r2, checkImplicits)) 1 else 0
val s2 = if (isDerived(getClazz(r1), getClazz(r2))) 1 else 0
s1 + s2
}
private def isMoreSpecific[T](r1: InnerScalaResolveResult[T], r2: InnerScalaResolveResult[T], checkImplicits: Boolean): Boolean = {
ProgressManager.checkCanceled()
(r1.implicitConversionClass, r2.implicitConversionClass) match {
case (Some(t1), Some(t2)) => if (ScalaPsiUtil.isInheritorDeep(t1, t2)) return true
case _ =>
}
if (r1.callByNameImplicit ^ r2.callByNameImplicit) return !r1.callByNameImplicit
val weightR1R2 = relativeWeight(r1, r2, checkImplicits)
val weightR2R1 = relativeWeight(r2, r1, checkImplicits)
weightR1R2 > weightR2R1
}
private def mostSpecificGeneric[T](applicable: Set[InnerScalaResolveResult[T]],
noImplicit: Boolean): Option[InnerScalaResolveResult[T]] = {
def calc(checkImplicits: Boolean): Option[InnerScalaResolveResult[T]] = {
val a1iterator = applicable.iterator
while (a1iterator.hasNext) {
val a1 = a1iterator.next()
var break = false
val a2iterator = applicable.iterator
while (a2iterator.hasNext && !break) {
val a2 = a2iterator.next()
if (a1 != a2 && !isMoreSpecific(a1, a2, checkImplicits)) break = true
}
if (!break) return Some(a1)
}
None
}
val result = calc(checkImplicits = false)
if (!noImplicit && result.isEmpty) calc(checkImplicits = true)
else result
}
private def nextLayerSpecificGeneric[T](filterRest: Option[InnerScalaResolveResult[T]],
rest: Seq[InnerScalaResolveResult[T]]): (Option[InnerScalaResolveResult[T]], Seq[InnerScalaResolveResult[T]]) = {
val filteredRest = filterRest match {
case Some(r) => rest.filter(!isMoreSpecific(r, _, checkImplicits = false))
case _ => rest
}
if (filteredRest.isEmpty) return (None, Seq.empty)
if (filteredRest.length == 1) return (Some(filteredRest.head), Seq.empty)
var found = filteredRest.head
val iter = filteredRest.tail.iterator
val out: ArrayBuffer[InnerScalaResolveResult[T]] = new ArrayBuffer[InnerScalaResolveResult[T]]()
while (iter.hasNext) {
val res = iter.next()
if (isDerived(getClazz(res), getClazz(found))) {
out += found
found = res
} else out += res
}
(Some(found), out)
}
private def nextMostSpecificGeneric[T](rest: Set[InnerScalaResolveResult[T]]): Option[InnerScalaResolveResult[T]] = {
if (rest.isEmpty) return None
if (rest.size == 1) return Some(rest.head)
val iter = rest.iterator
var foundMax = iter.next
while (iter.hasNext) {
val res = iter.next()
if (isDerived(getClazz(res), getClazz(foundMax)))
foundMax = res
}
Some(foundMax)
}
//todo: implement existential dual
def getType(e: PsiNamedElement, implicitCase: Boolean): ScType = {
val res = e match {
case fun: ScFun => fun.polymorphicType
case f: ScFunction if f.isConstructor =>
f.containingClass match {
case td: ScTypeDefinition if td.hasTypeParameters =>
ScTypePolymorphicType(f.methodType, td.typeParameters.map(TypeParameter(_)))
case _ => f.polymorphicType()
}
case f: ScFunction => f.polymorphicType()
case p: ScPrimaryConstructor => p.polymorphicType
case m: PsiMethod => ResolveUtils.javaPolymorphicType(m, ScSubstitutor.empty, elem.resolveScope)
case refPatt: ScReferencePattern => refPatt.getParent /*id list*/ .getParent match {
case pd: ScPatternDefinition if PsiTreeUtil.isContextAncestor(pd, elem, true) =>
pd.declaredType.getOrElse(Nothing)
case vd: ScVariableDefinition if PsiTreeUtil.isContextAncestor(vd, elem, true) =>
vd.declaredType.getOrElse(Nothing)
case _ => refPatt.`type`().getOrAny
}
case typed: ScTypedDefinition => typed.`type`().getOrAny
case f: PsiField => f.getType.toScType()
case _ => Nothing
}
res match {
case ScMethodType(retType, _, true) if implicitCase => retType
case ScTypePolymorphicType(ScMethodType(retType, _, true), typeParameters) if implicitCase =>
ScTypePolymorphicType(retType, typeParameters)
case tp => tp
}
}
}
| triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/resolve/processor/MostSpecificUtil.scala | Scala | apache-2.0 | 17,895 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.shellbase
import org.apache.commons.io.IOUtils
class ShellBanner(resource: String) {
def load(): String = {
val in = getClass.getClassLoader.getResourceAsStream(resource)
val banner = IOUtils.toString(in)
in.close()
banner
}
}
object ShellBanner {
lazy val Warning = new ShellBanner("banners/warning.txt").load()
}
| SumoLogic/shellbase | shellbase-core/src/main/scala/com/sumologic/shellbase/ShellBanner.scala | Scala | apache-2.0 | 1,171 |
package nikhil.tcp.client
import akka.actor.{ActorSystem, Props}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.language.postfixOps
import scalaz.Scalaz._
import scalaz._
/**
* @author Nikhil
*/
object ClientUtils {
/**
* Start actors with interval between 2ms and 100ms.
*
* @param numberOfActors Number of actors to start.
* @param props Actor's Props object.
* @param system Actor system.
*/
def startActors(numberOfActors: Int, props: Props)(implicit system: ActorSystem): Unit = {
val ratio = 10000 / numberOfActors
val interval: Int = if (ratio > 100) 100 else if(ratio < 2) 2 else ratio
(1 to numberOfActors).foreach(n => system.scheduler.scheduleOnce((n * interval) milliseconds)(system.actorOf(props)))
}
/**
* Parse arguments.
* Expects:
* 0 - host (string)
* 1 - port (int)
* 2 - number of connections (int)
*
* @param args Arguments to parse
* @return Validation object with host, port and number of connections.
*/
def parseArguments(args: Array[String]): Validation[String, (Host, Port, NumberOfClients, FiniteDuration)] = {
def parseArgument[A](
argName: String,
arg: => String,
validator: (String) => Validation[String, A]): Validation[String, A] = try {
validator(arg)
} catch {
case _: IndexOutOfBoundsException => Failure(s"$argName is not specified\\n")
}
(parseArgument[String]("host", args(0), notEmpty) ⊛
parseArgument[Int]("port", args(1), parseInt) ⊛
parseArgument[Int]("number of connections", args(2), parseInt) ⊛
parseArgument[FiniteDuration]("sleep time", args(3), parseDuration)
) {
(_, _, _, _)
}
}
/**
* Parse string to integer.
*
* @param string String to parse.
* @return Validation object.
*/
def parseInt(string: String): Validation[String, Int] = try {
Success(string.toInt)
} catch {
case e: NumberFormatException =>
Failure(s"$string is not an integer number\\n")
}
/**
* Parse string to int but return it as a duration in milliseconds.
*
* @param string String to parse.
* @return Validation object.
*/
def parseDuration(string: String): Validation[String, FiniteDuration] = try {
Success(string.toInt milliseconds)
} catch {
case e: NumberFormatException =>
Failure(s"$string is not an integer number\\n")
}
/**
* Validate if string is not empty.
*
* @param string String to validate.
* @return Validation object.
*/
def notEmpty(string: String): Validation[String, String] = if (string.isEmpty) Failure("string is empty") else Success(string)
/**
* Call function f only if the validation function returns true otherwise return the default value.
*
* @param data Data object.
* @param validation Data validation function.
* @param f Conversion function from data type to type B
* @param default Default value.
* @tparam A Data type.
* @tparam B Result type.
* @return result of function f or default value.
*/
def applyIf[A, B](data: A, validation: A => Boolean, f: A => B, default: B): B = if (validation(data)) f(data) else default
}
| nikhilRP/akka_requests | src/main/scala/nikhil/tcp/client/ClientUtils.scala | Scala | apache-2.0 | 3,287 |
package oriana
import akka.actor.ActorSystem
import akka.stream.{ActorMaterializer, Materializer}
import akka.util.Timeout
import org.scalatest.{BeforeAndAfterAll, Suite}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
trait TestActorSystem extends BeforeAndAfterAll { self: Suite =>
private var _system: ActorSystem = _
implicit def system: ActorSystem = _system
implicit lazy val ec: ExecutionContext = system.dispatcher
implicit lazy val mat: Materializer = ActorMaterializer()
implicit val timeout = Timeout(10.seconds)
override protected def beforeAll(): Unit =
_system = ActorSystem("test-" + getClass.getName.filter("abcdefghijklmnopqrstuvwxzyABCDEFGHIJKLMNOPQRSTUVWXYZ".contains(_)))
override protected def afterAll() = {
system.terminate()
super.afterAll()
}
}
| Norwae/oriana | src/test/scala/oriana/TestActorSystem.scala | Scala | bsd-2-clause | 832 |
package pl.writeonly.son2.funs.liners
import org.scalatest.Outcome
import pl.writeonly.son2.apis.converters.ConverterFake
import pl.writeonly.scalaops.specs.fixture.WhiteAssertSpec
import pl.writeonly.scalaops.pipe.Pipe
class LinerOptAssertSpec extends WhiteAssertSpec with Pipe {
override type FixtureParam = LinerOpt
override protected def withFixture(test: OneArgTest): Outcome =
new LinerOpt(new ConverterFake) |> test
"A LinerOpt" when {
"apply empty string" should {
"return ?" in { liner =>
assertResult("\\n")(liner.apply(""))
}
}
}
}
| writeonly/son2 | scallions-core/scallions-funs/src/test/scala/pl/writeonly/son2/funs/liners/LinerOptAssertSpec.scala | Scala | apache-2.0 | 588 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.featurespec.markup
import collection.mutable
import org.scalatest._
class SetSpec extends FeatureSpec with GivenWhenThen {
markup { """
Mutable Set
-----------
A set is a collection that contains no duplicate elements.
To implement a concrete mutable set, you need to provide implementations
of the following methods:
def contains(elem: A): Boolean
def iterator: Iterator[A]
def += (elem: A): this.type
def -= (elem: A): this.type
If you wish that methods like `take`,
`drop`, `filter` return the same kind of set,
you should also override:
def empty: This
It is also good idea to override methods `foreach` and
`size` for efficiency.
""" }
feature("An element can be added to an empty mutable Set") {
scenario("When an element is added to an empty mutable Set") {
Given("an empty mutable Set")
val set = mutable.Set.empty[String]
When("an element is added")
set += "clarity"
Then("the Set should have size 1")
assert(set.size === 1)
And("the Set should contain the added element")
assert(set.contains("clarity"))
markup("This test finished with a **bold** statement!")
}
}
}
| dotty-staging/scalatest | examples/src/test/scala/org/scalatest/examples/featurespec/markup/SetSpec.scala | Scala | apache-2.0 | 1,816 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Ben Howell
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.benhowell.diminutives.core
import java.io._
/**
* Created by Ben Howell [ben@benhowell.net] on 25-Mar-2014.
*/
object Logger {
def onEvent(ps: PrintStream) = (topic: String, payload: Any) => {
try {
ps.println("[" + topic + "]" + topic)
}
catch {
case ioe: IOException => println("IOException: " + ioe.toString)
case e: IOException => println("Exception: " + e.toString)
}
}
def stop(ps: PrintStream) = ps.close()
}
| benhowell/diminutives | src/main/scala/net/benhowell/diminutives/core/Logger.scala | Scala | mit | 1,619 |
import scala.reflect.macros.blackbox.Context
object Macros1 {
val impls = new Impls1
def foo(x: Any) = macro impls.foo
}
object Macros2 {
val impls = Impls2
def foo(x: Any) = macro impls.foo
}
class Macros3 {
object Impls3 {
def foo(c: Context)(x: c.Expr[Any]) = ???
}
def foo(x: Any) = macro Impls3.foo
}
class Macros4 extends MacroHelpers {
def foo(x: Any) = macro Impls4.foo
}
object Macros5 {
def foo(x: Any) = macro Impls5.foo
def foo(x: Any, y: Any) = macro Impls5.foo
}
object Macros6 {
def foo1 = macro Impls6.fooEmpty
def bar1() = macro Impls6.fooNullary
}
object Macros7 {
def foo = macro Impls7.foo[String]
}
object Test extends App {
println(Macros1.foo(42))
println(Macros2.foo(42))
println(new Macros3().foo(42))
println(new Macros4().foo(42))
println(Macros5.foo(42))
println(Macros6.foo1)
println(Macros6.bar1)
println(Macros6.bar1())
println(Macros7.foo)
}
package foo {
object Test extends App {
def foo = macro Impls8.impl
}
} | felixmulder/scala | test/files/neg/macro-invalidimpl/Macros_Test_2.scala | Scala | bsd-3-clause | 1,011 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.tf.loaders
import com.intel.analytics.bigdl.dllib.tensor.Tensor
class MinimumSpec extends BinaryOpBaseSpec {
override def getOpName: String = "Maximum"
override def getInputs: Seq[Tensor[_]] =
Seq(Tensor[Float](4).rand(), Tensor[Float](4).rand())
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/utils/tf/loaders/MinimumSpec.scala | Scala | apache-2.0 | 908 |
package com.dys.chatwork4s.http.parameters
import com.dys.chatwork4s.enum.IconPreset.IconPreset
/**
* グループチャットを新規作成する際に指定するパラメータ
*
* @param name グループチャット名<br>作成したいグループチャットのチャット名
* @param adminIds 管理者権限のユーザー<br>作成したチャットに参加メンバーのうち、管理者権限にしたいユーザーのアカウントIDの配列。最低1人は指定する必要がある。
* @param description チャット概要<br>グループチャットの概要説明テキスト
* @param iconPreset アイコン種類<br>グループチャットのアイコン種類
* @param memberIds メンバー権限のユーザー<br>作成したチャットに参加メンバーのうち、メンバー権限にしたいユーザーのアカウントIDの配列。
* @param readonlyIds 閲覧のみ権限のユーザー<br>作成したチャットに参加メンバーのうち、閲覧のみ権限にしたいユーザーのアカウントIDの配列。
*/
case class CreateRoom(
name: String,
adminIds: Seq[Int],
description: Option[String] = None,
iconPreset: Option[IconPreset] = None,
memberIds: Option[Seq[Int]] = None,
readonlyIds: Option[Seq[Int]] = None
) extends HttpParameter {
def toParameters: Seq[(String, String)] = singleParameter(
("name", Some(name)),
("members_admin_ids", Some(adminIds.mkString(","))),
("description", description),
("icon_preset", iconPreset.map(_.name)),
("members_member_ids", memberIds.map(_.mkString(","))),
("members_readonly_ids", readonlyIds.map(_.mkString(",")))
)
}
| kado-yasuyuki/chatwork4s | src/main/scala/com/dys/chatwork4s/http/parameters/CreateRoom.scala | Scala | apache-2.0 | 1,849 |
package com.github.vitalsoftware.scalaredox.receiver
import com.google.common.base.MoreObjects
import play.api.libs.json.JsValue
import play.api.mvc.Request
import com.github.vitalsoftware.scalaredox.receiver.RedoxRequest.TokenHeader
case class RedoxRequest(underlying: Request[JsValue]) {
override def toString: String =
MoreObjects
.toStringHelper(this)
.add("host", underlying.host)
.add("remoteAddress", underlying.remoteAddress)
.add("method", underlying.method)
.add("path", underlying.path)
.add("tokenHeader", token.getOrElse("unspecified"))
.toString
val token: Option[String] = underlying.headers.get(TokenHeader)
}
object RedoxRequest {
final val TokenHeader: String = "verification-token"
}
| vital-software/scala-redox | src/main/scala/com/github/vitalsoftware/scalaredox/receiver/RedoxRequest.scala | Scala | mit | 759 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.