code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* @author Philip Stutz
*
* Copyright 2014 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.signalcollect.util
import scala.Array.canBuildFrom
import scala.reflect.ClassTag
class HashSet[Key <: AnyRef: ClassTag](
initialSize: Int = 32768,
rehashFraction: Float = 0.75f) {
assert(initialSize > 0)
final var maxSize = nextPowerOfTwo(initialSize)
assert(1.0f >= rehashFraction && rehashFraction > 0.1f, "Unreasonable rehash fraction.")
assert(maxSize > 0 && maxSize >= initialSize, "Initial size is too large.")
private[this] final var maxElements: Int = (rehashFraction * maxSize).floor.toInt
private[this] final var keys = new Array[Key](maxSize) // 0 means empty
private[this] final var mask = maxSize - 1
private[this] final var nextPositionToProcess = 0
final def size: Int = numberOfElements
final def isEmpty: Boolean = numberOfElements == 0
private[this] final var numberOfElements = 0
final def clear(): Unit = {
keys = new Array[Key](maxSize)
numberOfElements = 0
nextPositionToProcess = 0
}
def toScalaSet: Set[Key] = {
keys.filter(_ != null).toSet
}
private[this] final def tryDouble(): Unit = {
// 1073741824 is the largest size and cannot be doubled anymore.
if (maxSize != 1073741824) {
val oldKeys = keys
val oldNumberOfElements = numberOfElements
maxSize *= 2
maxElements = (rehashFraction * maxSize).floor.toInt
keys = new Array[Key](maxSize)
mask = maxSize - 1
numberOfElements = 0
var i = 0
var elementsMoved = 0
while (elementsMoved < oldNumberOfElements) {
val oldKey = oldKeys(i)
if (oldKey != null) {
add(oldKey)
elementsMoved += 1
}
i += 1
}
}
}
final def foreach(f: Key => Unit): Unit = {
var i = 0
var elementsProcessed = 0
while (elementsProcessed < numberOfElements) {
val key = keys(i)
if (key != null) {
f(key)
elementsProcessed += 1
}
i += 1
}
}
final def remove(key: Key): Unit = {
remove(key, true)
}
private final def remove(key: Key, optimize: Boolean): Unit = {
var position = keyToPosition(key)
var keyAtPosition = keys(position)
while (keyAtPosition != null && key != keyAtPosition) {
position = (position + 1) & mask
keyAtPosition = keys(position)
}
// We can only remove the entry if it was found.
if (keyAtPosition != null) {
keys(position) = null.asInstanceOf[Key]
numberOfElements -= 1
if (optimize) {
optimizeFromPosition((position + 1) & mask)
}
}
}
// Try to reinsert all elements that are not optimally placed until an empty position is found.
// See http://stackoverflow.com/questions/279539/best-way-to-remove-an-entry-from-a-hash-table
private[this] final def optimizeFromPosition(startingPosition: Int): Unit = {
var currentPosition = startingPosition
var keyAtPosition = keys(currentPosition)
while (isCurrentPositionOccupied) {
val perfectPositionForEntry = keyToPosition(keyAtPosition)
if (perfectPositionForEntry != currentPosition) {
// We try to optimize the placement of the entry by removing and then reinserting it.
removeCurrentEntry
add(keyAtPosition)
}
advance
}
@inline def advance(): Unit = {
currentPosition = ((currentPosition + 1) & mask)
keyAtPosition = keys(currentPosition)
}
@inline def isCurrentPositionOccupied: Boolean = {
keyAtPosition != null
}
@inline def removeCurrentEntry(): Unit = {
keys(currentPosition) = null.asInstanceOf[Key]
numberOfElements -= 1
}
}
final def apply(key: Key): Boolean = contains(key)
@inline final def contains(key: Key): Boolean = {
var position = keyToPosition(key)
var keyAtPosition = keys(position)
while (keyAtPosition != null && key != keyAtPosition) {
position = (position + 1) & mask
keyAtPosition = keys(position)
}
if (keyAtPosition != null) {
true
} else {
false
}
}
/**
* Returns true iff this entry already existed.
*/
def add(key: Key): Boolean = {
assert(key != null, "Key cannot be null")
var position = keyToPosition(key)
var keyAtPosition = keys(position)
while (keyAtPosition != null && key != keyAtPosition) {
position = (position + 1) & mask
keyAtPosition = keys(position)
}
val alreadyContained = keyAtPosition == key
if (!alreadyContained) {
keys(position) = key
numberOfElements += 1
if (numberOfElements >= maxElements) {
tryDouble
if (numberOfElements >= maxSize) {
throw new OutOfMemoryError("The hash map is full and cannot be expanded any further.")
}
}
}
alreadyContained
}
private[this] final def keyToPosition(key: Key): Int = {
key.hashCode & mask
}
private[this] final def nextPowerOfTwo(x: Int): Int = {
var r = x - 1
r |= r >> 1
r |= r >> 2
r |= r >> 4
r |= r >> 8
r |= r >> 16
r + 1
}
}
|
uzh/signal-collect
|
src/main/scala/com/signalcollect/util/HashSet.scala
|
Scala
|
apache-2.0
| 5,679
|
package org.jetbrains.plugins.scala.conversion.ast
/**
* Created by Kate Ustyuzhanina
* on 10/27/15
*/
case class AnnotaionConstruction(inAnnotation: Boolean, attributes: Seq[(Option[String], Option[IntermediateNode])],
name: Option[IntermediateNode]) extends IntermediateNode {
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/conversion/ast/AnnotaionConstruction.scala
|
Scala
|
apache-2.0
| 323
|
package scala.concurrent
import org.junit.Assert.assertTrue
import org.junit.Test
import scala.tools.testkit.AssertUtil._
import scala.util.Try
import duration.Duration.Inf
class FutureTest {
@Test
def testZipWithFailFastBothWays(): Unit = {
import ExecutionContext.Implicits.global
val p1 = Promise[Int]()
val p2 = Promise[Int]()
// Make sure that the combined future fails early, after the earlier failure occurs, and does not
// wait for the later failure regardless of which one is on the left and which is on the right
p1.failure(new Exception("Boom Early"))
val f1 = p1.future
val f2 = p2.future
val scala.util.Failure(fa) = Try(Await.result(f1.zip(f2), Inf))
val scala.util.Failure(fb) = Try(Await.result(f2.zip(f1), Inf))
val scala.util.Failure(fc) = Try(Await.result(f1.zipWith(f2)((_, _)), Inf))
val scala.util.Failure(fd) = Try(Await.result(f2.zipWith(f1)((_, _)), Inf))
val scala.util.Failure(fe) = Try(Await.result(Future.sequence(Seq(f1, f2)), Inf))
val scala.util.Failure(ff) = Try(Await.result(Future.sequence(Seq(f2, f1)), Inf))
val scala.util.Failure(fg) = Try(Await.result(Future.traverse(Seq(0, 1))(Seq(f1, f2)(_)), Inf))
val scala.util.Failure(fh) = Try(Await.result(Future.traverse(Seq(0, 1))(Seq(f1, f2)(_)), Inf))
// Make sure the early failure is always reported, regardless of whether it's on
// the left or right of the zip/zipWith/sequence/traverse
assert(fa.getMessage == "Boom Early")
assert(fb.getMessage == "Boom Early")
assert(fc.getMessage == "Boom Early")
assert(fd.getMessage == "Boom Early")
assert(fe.getMessage == "Boom Early")
assert(ff.getMessage == "Boom Early")
assert(fg.getMessage == "Boom Early")
assert(fh.getMessage == "Boom Early")
}
@Test
def `bug/issues#10513 firstCompletedOf must not leak references`(): Unit = {
val unfulfilled = Promise[AnyRef]()
val quick = Promise[AnyRef]()
val result = new AnyRef
// all callbacks will be registered
val first = Future.firstCompletedOf(List(quick.future, unfulfilled.future))(ExecutionContext.parasitic)
// callbacks run parasitically to avoid race or waiting for first future;
// normally we have no guarantee that firstCompletedOf completed, so we assert that this assumption held
assertNotReachable(result, unfulfilled) {
quick.complete(Try(result))
assertTrue("First must complete", first.isCompleted)
}
/* The test has this structure under the hood:
val p = Promise[String]
val q = Promise[String]
val res = Promise[String]
val s = "hi"
p.future.onComplete(t => res.complete(t))
q.future.onComplete(t => res.complete(t)) // previously, uncompleted promise held reference to promise completed with value
assertNotReachable(s, q) {
p.complete(Try(s))
}
*/
}
}
|
scala/scala
|
test/junit/scala/concurrent/FutureTest.scala
|
Scala
|
apache-2.0
| 2,890
|
package tholowka.diz.unmarshalling.terms
import tholowka.diz.interfaces._
/**
* This parser represents the parsing of a 'row' in a Json object definition.
* It assumes that there is a left side of the row, and a right side, delimited by a colon.
* The left side should parse to a string, the right can parse to any of the basic types.
* Architecturally, the parser is positioned as follows:
*
* {{{
JsonObjectContent
|
|
Row
| \
| \
RowKey RowValue
}}}
*/
private [terms] object Row {}
private [terms] case class Row extends Parser[(String,Any)] {
def consume(input: Stream[Char]): Option[(String,Any)] = {
var keyElements = true
var valueElements = false
var keyPrevChars = EscapeSequenceCheck()
var valuePrevChars = EscapeSequenceCheck()
var colonCount = 0
var keyParenthesesCount = InParenthesesCheck()
var splitStreamEscapeSeqCheck = EscapeSequenceCheck()
var splitStreamParenthesesCheck = InParenthesesCheck()
def splitter(ch: Char) = {
(ch, splitStreamEscapeSeqCheck.escapeSeqFound || splitStreamParenthesesCheck.inParentheses) match {
case (':', false) => {
colonCount = colonCount + 1
splitStreamEscapeSeqCheck = EscapeSequenceCheck(splitStreamEscapeSeqCheck, ch)
splitStreamParenthesesCheck = InParenthesesCheck(splitStreamParenthesesCheck, ch)
colonCount match {
case 1=> 2//ignored
case _=> 1
}
}
case (_,_) => {
splitStreamEscapeSeqCheck = EscapeSequenceCheck(splitStreamEscapeSeqCheck, ch)
splitStreamParenthesesCheck = InParenthesesCheck(splitStreamParenthesesCheck, ch)
colonCount match {
case 1 => 1
case 0 => 0
case _ => 1
}
}
}
}
// print("row=> ")
val streams = input.groupBy(splitter)
var key = None : Option[String]
var value = None : Option[Any]
streams.foreach(stream => {
stream._1 match {
case 0 => key = RowKey().consume(stream._2)
case 1 => value = RowValue().consume(stream._2)
case _ => {}
}
})
// print(">=row ")
if (!key.isDefined) throw new IllegalArgumentException("Expecting a key to be provided in the Json object")
if (!value.isDefined) throw new IllegalArgumentException("Expecting a value to be provided in the Json object")
Some((key.get,value.get))
}
}
|
tholowka/diz
|
src/main/scala/tholowka/diz/unmarshalling/terms/Row.scala
|
Scala
|
mit
| 2,787
|
package edu.rice.habanero.benchmarks.radixsort
import java.util.Random
import edu.rice.habanero.actors.{ScalazActor, ScalazActorState, ScalazPool}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
/**
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object RadixSortScalazActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new RadixSortScalazActorBenchmark)
}
private final class RadixSortScalazActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
RadixSortConfig.parseArgs(args)
}
def printArgInfo() {
RadixSortConfig.printArgs()
}
def runIteration() {
val validationActor = new ValidationActor(RadixSortConfig.N)
validationActor.start()
val sourceActor = new IntSourceActor(RadixSortConfig.N, RadixSortConfig.M, RadixSortConfig.S)
sourceActor.start()
var radix = RadixSortConfig.M / 2
var nextActor: ScalazActor[AnyRef] = validationActor
while (radix > 0) {
val sortActor = new SortActor(RadixSortConfig.N, radix, nextActor)
sortActor.start()
radix /= 2
nextActor = sortActor
}
sourceActor.send(NextActorMessage(nextActor))
ScalazActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double): Unit = {
if (lastIteration) {
ScalazPool.shutdown()
}
}
}
private case class NextActorMessage(actor: ScalazActor[AnyRef])
private case class ValueMessage(value: Long)
private class IntSourceActor(numValues: Int, maxValue: Long, seed: Long) extends ScalazActor[AnyRef] {
val random = new Random(seed)
override def process(msg: AnyRef) {
msg match {
case nm: NextActorMessage =>
var i = 0
while (i < numValues) {
val candidate = Math.abs(random.nextLong()) % maxValue
val message = new ValueMessage(candidate)
nm.actor.send(message)
i += 1
}
exit()
}
}
}
private class SortActor(numValues: Int, radix: Long, nextActor: ScalazActor[AnyRef]) extends ScalazActor[AnyRef] {
private val orderingArray = Array.ofDim[ValueMessage](numValues)
private var valuesSoFar = 0
private var j = 0
override def process(msg: AnyRef): Unit = {
msg match {
case vm: ValueMessage =>
valuesSoFar += 1
val current = vm.value
if ((current & radix) == 0) {
nextActor.send(vm)
} else {
orderingArray(j) = vm
j += 1
}
if (valuesSoFar == numValues) {
var i = 0
while (i < j) {
nextActor.send(orderingArray(i))
i += 1
}
exit()
}
}
}
}
private class ValidationActor(numValues: Int) extends ScalazActor[AnyRef] {
private var sumSoFar = 0.0
private var valuesSoFar = 0
private var prevValue = 0L
private var errorValue = (-1L, -1)
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
valuesSoFar += 1
if (vm.value < prevValue && errorValue._1 < 0) {
errorValue = (vm.value, valuesSoFar - 1)
}
prevValue = vm.value
sumSoFar += prevValue
if (valuesSoFar == numValues) {
if (errorValue._1 >= 0) {
println("ERROR: Value out of place: " + errorValue._1 + " at index " + errorValue._2)
} else {
println("Elements sum: " + sumSoFar)
}
exit()
}
}
}
}
}
|
smarr/savina
|
src/main/scala/edu/rice/habanero/benchmarks/radixsort/RadixSortScalazActorBenchmark.scala
|
Scala
|
gpl-2.0
| 3,737
|
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "app"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
// Add your project dependencies here,
)
val main = play.Project(appName, appVersion, appDependencies).settings(
// Add your own project settings here
)
}
|
cescoffier/maven-play2-plugin
|
src/test/resources/scala/app/project/Build.scala
|
Scala
|
apache-2.0
| 383
|
package org.thp.cortex.controllers
import javax.inject.{ Inject, Singleton }
import scala.concurrent.ExecutionContext
import play.api.Configuration
import play.api.http.Status
import play.api.libs.json.{ JsString, Json }
import play.api.libs.json.Json.toJsFieldJsValueWrapper
import play.api.mvc.{ AbstractController, Action, AnyContent, ControllerComponents }
import com.sksamuel.elastic4s.ElasticDsl
import org.thp.cortex.models.Worker
import org.elastic4play.database.DBIndex
import org.elastic4play.services.AuthSrv
import org.elastic4play.services.auth.MultiAuthSrv
@Singleton
class StatusCtrl @Inject() (
configuration: Configuration,
authSrv: AuthSrv,
dbIndex: DBIndex,
components: ControllerComponents,
implicit val ec: ExecutionContext) extends AbstractController(components) with Status {
private[controllers] def getVersion(c: Class[_]) = Option(c.getPackage.getImplementationVersion).getOrElse("SNAPSHOT")
def get: Action[AnyContent] = Action.async { _ β
dbIndex.clusterVersions.map { versions β
Ok(Json.obj(
"versions" β Json.obj(
"Cortex" β getVersion(classOf[Worker]),
"Elastic4Play" β getVersion(classOf[AuthSrv]),
"Play" β getVersion(classOf[AbstractController]),
"Elastic4s" β getVersion(classOf[ElasticDsl]),
"ElasticSearch client" β getVersion(classOf[org.elasticsearch.Build]),
"ElasticSearch cluster" β versions.mkString(", ")),
"config" β Json.obj(
"authType" β (authSrv match {
case multiAuthSrv: MultiAuthSrv β multiAuthSrv.authProviders.map { a β JsString(a.name) }
case _ β JsString(authSrv.name)
}),
"capabilities" β authSrv.capabilities.map(c β JsString(c.toString)))))
}
}
def health: Action[AnyContent] = TODO
}
|
CERT-BDF/Cortex
|
app/org/thp/cortex/controllers/StatusCtrl.scala
|
Scala
|
agpl-3.0
| 1,881
|
package net.sansa_stack.inference.spark.data.writer
import org.apache.jena.rdf.model.{Model, ModelFactory}
import org.apache.jena.graph.{GraphUtil, NodeFactory, Triple}
import org.apache.jena.sparql.util.TripleComparator
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row}
import org.slf4j.LoggerFactory
import net.sansa_stack.inference.spark.data.model.RDFGraph
import net.sansa_stack.inference.utils.JenaTripleToNTripleString
/**
* A class that provides methods to write an RDF graph to disk.
*
* @author Lorenz Buehmann
*
*/
object RDFGraphWriter {
private val logger = com.typesafe.scalalogging.Logger(LoggerFactory.getLogger(this.getClass.getName))
/**
* Write the graph to disk in N-Triples format.
*
* @param graph the RDF graph
* @param path the output directory
* @param singleFile whether to put all data into a single file
* @param sorted whether to sort the triples by subject, predicate, object
*/
def writeToDisk(graph: RDFGraph, path: String, singleFile: Boolean = false, sorted: Boolean = false): Unit = {
writeTriplesToDisk(graph.triples, path, singleFile, sorted)
}
/**
* Write the triples to disk in N-Triples format.
*
* @param triples the triples
* @param path the output directory
* @param singleFile whether to put all data into a single file
* @param sorted whether to sort the triples by subject, predicate, object
*/
def writeTriplesToDisk(triples: RDD[Triple],
path: String,
singleFile: Boolean = false,
sorted: Boolean = false): Unit = {
logger.info("writing triples to disk...")
val startTime = System.currentTimeMillis()
implicit val tripleOrdering = new Ordering[Triple] {
val comparator: TripleComparator = new TripleComparator()
override def compare(t1: Triple, t2: Triple): Int = comparator.compare(t1, t2)
}
// sort triples if enabled
val tmp = if (sorted) {
triples.map(t => (t, t)).sortByKey().map(_._1)
} else {
triples
}
// convert to N-Triple format
var triplesNTFormat = tmp.map(new JenaTripleToNTripleString())
// convert to single file, i.e. move al lto one partition
// (might be very expensive and contradicts the Big Data paradigm on Hadoop in general)
if (singleFile) {
triplesNTFormat = triplesNTFormat.coalesce(1, shuffle = true)
}
// finally, write to disk
triplesNTFormat.saveAsTextFile(path)
logger.info("finished writing triples to disk in " + (System.currentTimeMillis() - startTime) + "ms.")
}
/**
* Write the triples represented by the DataFrame to disk in N-Triples format.
*
* @param triples the DataFrame containing the triples
* @param path the output directory
* @param singleFile whether to put all data into a single file
* @param sorted whether to sort the triples by subject, predicate, object
*/
def writeDataframeToDisk(triples: DataFrame,
path: String,
singleFile: Boolean = false,
sorted: Boolean = false): Unit = {
val rowToJenaTriple = (row: Row) => {
Triple.create(
NodeFactory.createURI(row.getString(0)),
NodeFactory.createURI(row.getString(1)),
if (row.getString(2).startsWith("http:")) NodeFactory.createURI(row.getString(2)) else NodeFactory.createLiteral(row.getString(2)))
}
writeTriplesToDisk(
triples.rdd.map(rowToJenaTriple),
path,
singleFile,
sorted
)
}
/**
* Converts an RDF graph to an Apache Jena in-memory model.
*
* @note For large graphs this can be too expensive
* and lead to a OOM exception
*
* @param graph the RDF graph
*
* @return the in-memory Apache Jena model containing the triples
*/
def convertToModel(graph: RDFGraph): Model = {
val triples = graph.triples.collect()
val model = ModelFactory.createDefaultModel()
GraphUtil.add(model.getGraph, triples)
model
}
}
|
SANSA-Stack/SANSA-RDF
|
sansa-inference/sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/data/writer/RDFGraphWriter.scala
|
Scala
|
apache-2.0
| 4,112
|
package eu.gruchala.typelevel.full
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.language.implicitConversions
object F_MagnetPattern {
class HttpResponse
sealed trait StatusCode
object StatusCode {
case object Ok extends StatusCode
case object Bad_Request extends StatusCode
}
trait RouteOps {
//A problem
def complete(status: StatusCode): Unit
def complete(response: HttpResponse): Int
// def complete(future: Future[StatusCode]): Unit //does not compile
def complete(future: Future[HttpResponse]): Unit
def complete[T](obj: T): Int
def complete[T](statusCode: StatusCode, obj: T): Int
}
//Magnet pattern is an alternative approach to method overloading. A fix for type erasure and code duplication.
//A user-extensible type system.
sealed trait CompletionMagnet {
type Result
def apply(): Result
}
object CompletionMagnet {
implicit def fromStatusCode(statusCode: StatusCode): CompletionMagnet =
new CompletionMagnet {
override type Result = Int
override def apply(): Int = if (statusCode == StatusCode.Ok) 200 else 500
}
implicit def fromFutureStatusCode(future: Future[StatusCode]): CompletionMagnet =
new CompletionMagnet {
override type Result = Unit
override def apply(): Result = future onSuccess { case resp => s"log: Got $resp" }
}
//etc.
}
object MagnetRoute {
import CompletionMagnet._
def complete(magnet: CompletionMagnet): magnet.Result = magnet()
val statusResponse: CompletionMagnet#Result = complete(StatusCode.Bad_Request)
statusResponse.isInstanceOf[Int] // true
val futureStatusResponse: CompletionMagnet#Result = complete(Future(StatusCode.Ok))
futureStatusResponse.isInstanceOf[Unit] // true
}
}
|
leszekgruchala/typelevel-programming-scala
|
src/main/scala/eu/gruchala/typelevel/full/F_MagnetPattern.scala
|
Scala
|
mit
| 1,850
|
package sbt
package internal
import java.util.concurrent.atomic.AtomicLong
import java.io.Closeable
import Def.{ ScopedKey, Setting, Classpath }
import scala.concurrent.ExecutionContext
import Scope.GlobalScope
import java.io.File
import sbt.io.{ IO, Hash }
import sbt.io.syntax._
import sbt.util.{ Logger, LogExchange }
import sbt.internal.util.{ Attributed, ManagedLogger }
/**
* Interface between sbt and a thing running in the background.
*/
private[sbt] abstract class BackgroundJob {
def humanReadableName: String
def awaitTermination(): Unit
def shutdown(): Unit
// this should be true on construction and stay true until
// the job is complete
def isRunning(): Boolean
// called after stop or on spontaneous exit, closing the result
// removes the listener
def onStop(listener: () => Unit)(implicit ex: ExecutionContext): Closeable
// do we need this or is the spawning task good enough?
// def tags: SomeType
}
private[sbt] abstract class AbstractJobHandle extends JobHandle {
override def toString =
s"JobHandle(${id}, ${humanReadableName}, ${Def.showFullKey.show(spawningTask)})"
}
private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobService {
private val nextId = new AtomicLong(1)
private val pool = new BackgroundThreadPool()
private val serviceTempDir = IO.createTemporaryDirectory
// hooks for sending start/stop events
protected def onAddJob(job: JobHandle): Unit = {}
protected def onRemoveJob(job: JobHandle): Unit = {}
// this mutable state could conceptually go on State except
// that then every task that runs a background job would have
// to be a command, so not sure what to do here.
@volatile
private final var jobSet = Set.empty[ThreadJobHandle]
private def addJob(job: ThreadJobHandle): Unit = synchronized {
onAddJob(job)
jobSet += job
}
private def removeJob(job: ThreadJobHandle): Unit = synchronized {
onRemoveJob(job)
jobSet -= job
}
override def jobs: Vector[ThreadJobHandle] = jobSet.toVector
final class ThreadJobHandle(
override val id: Long,
override val spawningTask: ScopedKey[_],
val logger: ManagedLogger,
val workingDirectory: File,
val job: BackgroundJob
) extends AbstractJobHandle {
def humanReadableName: String = job.humanReadableName
// EC for onStop handler below
import ExecutionContext.Implicits.global
job.onStop { () =>
// TODO: Fix this
// logger.close()
removeJob(this)
IO.delete(workingDirectory)
LogExchange.unbindLoggerAppenders(logger.name)
}
addJob(this)
override final def equals(other: Any): Boolean = other match {
case handle: JobHandle if handle.id == id => true
case _ => false
}
override final def hashCode(): Int = id.hashCode
}
private val unknownTask = TaskKey[Unit]("unknownTask", "Dummy value")
// we use this if we deserialize a handle for a job that no longer exists
private final class DeadHandle(override val id: Long, override val humanReadableName: String)
extends AbstractJobHandle {
override val spawningTask: ScopedKey[_] = unknownTask
}
protected def makeContext(id: Long, spawningTask: ScopedKey[_], state: State): ManagedLogger
def doRunInBackground(spawningTask: ScopedKey[_],
state: State,
start: (Logger, File) => BackgroundJob): JobHandle = {
val id = nextId.getAndIncrement()
val logger = makeContext(id, spawningTask, state)
val workingDir = serviceTempDir / s"job-$id"
IO.createDirectory(workingDir)
val job = try {
new ThreadJobHandle(id, spawningTask, logger, workingDir, start(logger, workingDir))
} catch {
case e: Throwable =>
// TODO: Fix this
// logger.close()
throw e
}
job
}
override def runInBackground(spawningTask: ScopedKey[_], state: State)(
start: (Logger, File) => Unit): JobHandle = {
pool.run(this, spawningTask, state)(start)
}
override final def close(): Unit = shutdown()
override def shutdown(): Unit = {
while (jobSet.nonEmpty) {
jobSet.headOption.foreach {
case handle: ThreadJobHandle @unchecked =>
handle.job.shutdown()
handle.job.awaitTermination()
case _ => //
}
}
pool.close()
IO.delete(serviceTempDir)
}
private def withHandle(job: JobHandle)(f: ThreadJobHandle => Unit): Unit = job match {
case handle: ThreadJobHandle @unchecked => f(handle)
case dead: DeadHandle @unchecked => () // nothing to stop or wait for
case other =>
sys.error(
s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other")
}
override def stop(job: JobHandle): Unit =
withHandle(job)(_.job.shutdown())
override def waitFor(job: JobHandle): Unit =
withHandle(job)(_.job.awaitTermination())
override def toString(): String = s"BackgroundJobService(jobs=${jobs.map(_.id).mkString})"
/**
* Copies products to the workind directory, and the rest to the serviceTempDir of this service,
* both wrapped in SHA-1 hash of the file contents.
* This is intended to mimize the file copying and accumulation of the unused JAR file.
* Since working directory is wiped out when the background job ends, the product JAR is deleted too.
* Meanwhile, the rest of the dependencies are cached for the duration of this service.
*/
override def copyClasspath(products: Classpath,
full: Classpath,
workingDirectory: File): Classpath = {
def syncTo(dir: File)(source0: Attributed[File]): Attributed[File] = {
val source = source0.data
val hash8 = Hash.toHex(Hash(source)).take(8)
val dest = dir / hash8 / source.getName
if (!dest.exists) { IO.copyFile(source, dest) }
Attributed.blank(dest)
}
val xs = (products.toVector map { syncTo(workingDirectory / "target") }) ++
((full diff products) map { syncTo(serviceTempDir / "target") })
Thread.sleep(100)
xs
}
}
private[sbt] object BackgroundThreadPool {
sealed trait Status
case object Waiting extends Status
final case class Running(thread: Thread) extends Status
// the oldThread is None if we never ran
final case class Stopped(oldThread: Option[Thread]) extends Status
}
private[sbt] class BackgroundThreadPool extends java.io.Closeable {
private val nextThreadId = new java.util.concurrent.atomic.AtomicInteger(1)
private val threadGroup = Thread.currentThread.getThreadGroup()
private val threadFactory = new java.util.concurrent.ThreadFactory() {
override def newThread(runnable: Runnable): Thread = {
val thread =
new Thread(threadGroup, runnable, s"sbt-bg-threads-${nextThreadId.getAndIncrement}")
// Do NOT setDaemon because then the code in TaskExit.scala in sbt will insta-kill
// the backgrounded process, at least for the case of the run task.
thread
}
}
private val executor = new java.util.concurrent.ThreadPoolExecutor(
0, /* corePoolSize */
32, /* maxPoolSize, max # of bg tasks */
2,
java.util.concurrent.TimeUnit.SECONDS,
/* keep alive unused threads this long (if corePoolSize < maxPoolSize) */
new java.util.concurrent.SynchronousQueue[Runnable](),
threadFactory
)
private class BackgroundRunnable(val taskName: String, body: () => Unit)
extends BackgroundJob
with Runnable {
import BackgroundThreadPool._
private val finishedLatch = new java.util.concurrent.CountDownLatch(1)
// synchronize to read/write this, no sync to just read
@volatile
private var status: Status = Waiting
// double-finally for extra paranoia that we will finishedLatch.countDown
override def run() =
try {
val go = synchronized {
status match {
case Waiting =>
status = Running(Thread.currentThread())
true
case Stopped(_) =>
false
case Running(_) =>
throw new RuntimeException("Impossible status of bg thread")
}
}
try { if (go) body() } finally cleanup()
} finally finishedLatch.countDown()
private class StopListener(val callback: () => Unit, val executionContext: ExecutionContext)
extends Closeable {
override def close(): Unit = removeListener(this)
override def hashCode: Int = System.identityHashCode(this)
override def equals(other: Any): Boolean = other match {
case r: AnyRef => this eq r
case _ => false
}
}
// access is synchronized
private var stopListeners = Set.empty[StopListener]
private def removeListener(listener: StopListener): Unit = synchronized {
stopListeners -= listener
}
def cleanup(): Unit = {
// avoid holding any lock while invoking callbacks, and
// handle callbacks being added by other callbacks, just
// to be all fancy.
while (synchronized { stopListeners.nonEmpty }) {
val listeners = synchronized {
val list = stopListeners.toList
stopListeners = Set.empty
list
}
listeners.foreach { l =>
l.executionContext.execute(new Runnable { override def run = l.callback() })
}
}
}
override def onStop(listener: () => Unit)(implicit ex: ExecutionContext): Closeable =
synchronized {
val result = new StopListener(listener, ex)
stopListeners += result
result
}
override def awaitTermination(): Unit = finishedLatch.await()
override def humanReadableName: String = taskName
override def isRunning(): Boolean =
status match {
case Waiting => true // we start as running from BackgroundJob perspective
case Running(thread) => thread.isAlive()
case Stopped(threadOption) => threadOption.map(_.isAlive()).getOrElse(false)
}
override def shutdown(): Unit =
synchronized {
status match {
case Waiting =>
status = Stopped(None) // makes run() not run the body
case Running(thread) =>
status = Stopped(Some(thread))
thread.interrupt()
case Stopped(threadOption) =>
// try to interrupt again! woot!
threadOption.foreach(_.interrupt())
}
}
}
def run(manager: AbstractBackgroundJobService, spawningTask: ScopedKey[_], state: State)(
work: (Logger, File) => Unit): JobHandle = {
def start(logger: Logger, workingDir: File): BackgroundJob = {
val runnable = new BackgroundRunnable(spawningTask.key.label, { () =>
work(logger, workingDir)
})
executor.execute(runnable)
runnable
}
manager.doRunInBackground(spawningTask, state, start _)
}
override def close(): Unit = {
executor.shutdown()
}
}
private[sbt] class DefaultBackgroundJobService extends AbstractBackgroundJobService {
override def makeContext(id: Long, spawningTask: ScopedKey[_], state: State): ManagedLogger = {
val extracted = Project.extract(state)
LogManager.constructBackgroundLog(extracted.structure.data, state)(spawningTask)
}
}
private[sbt] object DefaultBackgroundJobService {
lazy val backgroundJobService: DefaultBackgroundJobService = new DefaultBackgroundJobService
lazy val backgroundJobServiceSetting: Setting[_] =
((Keys.bgJobService in GlobalScope) :== backgroundJobService)
}
|
Duhemm/sbt
|
main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala
|
Scala
|
bsd-3-clause
| 11,606
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.mesos
import java.io.File
import java.util.{ArrayList => JArrayList, Collections, List => JList}
import scala.collection.JavaConversions._
import scala.collection.mutable.{HashMap, HashSet}
import org.apache.mesos.{Scheduler => MScheduler, _}
import org.apache.mesos.Protos.{ExecutorInfo => MesosExecutorInfo, TaskInfo => MesosTaskInfo, _}
import org.apache.mesos.protobuf.ByteString
import org.apache.spark.{SparkContext, SparkException, TaskState}
import org.apache.spark.executor.MesosExecutorBackend
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.Utils
/**
* A SchedulerBackend for running fine-grained tasks on Mesos. Each Spark task is mapped to a
* separate Mesos task, allowing multiple applications to share cluster nodes both in space (tasks
* from multiple apps can run on different cores) and in time (a core can switch ownership).
*/
private[spark] class MesosSchedulerBackend(
scheduler: TaskSchedulerImpl,
sc: SparkContext,
master: String)
extends SchedulerBackend
with MScheduler
with MesosSchedulerUtils {
// Stores the slave ids that has launched a Mesos executor.
val slaveIdToExecutorInfo = new HashMap[String, MesosExecutorInfo]
val taskIdToSlaveId = new HashMap[Long, String]
// An ExecutorInfo for our tasks
var execArgs: Array[Byte] = null
var classLoader: ClassLoader = null
// The listener bus to publish executor added/removed events.
val listenerBus = sc.listenerBus
private[mesos] val mesosExecutorCores = sc.conf.getDouble("spark.mesos.mesosExecutor.cores", 1)
// Offer constraints
private[this] val slaveOfferConstraints =
parseConstraintString(sc.conf.get("spark.mesos.constraints", ""))
@volatile var appId: String = _
override def start() {
classLoader = Thread.currentThread.getContextClassLoader
val driver = createSchedulerDriver(
master, MesosSchedulerBackend.this, sc.sparkUser, sc.appName, sc.conf)
startScheduler(driver)
}
/**
* Creates a MesosExecutorInfo that is used to launch a Mesos executor.
* @param availableResources Available resources that is offered by Mesos
* @param execId The executor id to assign to this new executor.
* @return A tuple of the new mesos executor info and the remaining available resources.
*/
def createExecutorInfo(
availableResources: JList[Resource],
execId: String): (MesosExecutorInfo, JList[Resource]) = {
val executorSparkHome = sc.conf.getOption("spark.mesos.executor.home")
.orElse(sc.getSparkHome()) // Fall back to driver Spark home for backward compatibility
.getOrElse {
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
}
val environment = Environment.newBuilder()
sc.conf.getOption("spark.executor.extraClassPath").foreach { cp =>
environment.addVariables(
Environment.Variable.newBuilder().setName("SPARK_CLASSPATH").setValue(cp).build())
}
val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions").getOrElse("")
val prefixEnv = sc.conf.getOption("spark.executor.extraLibraryPath").map { p =>
Utils.libraryPathEnvPrefix(Seq(p))
}.getOrElse("")
environment.addVariables(
Environment.Variable.newBuilder()
.setName("SPARK_EXECUTOR_OPTS")
.setValue(extraJavaOpts)
.build())
sc.executorEnvs.foreach { case (key, value) =>
environment.addVariables(Environment.Variable.newBuilder()
.setName(key)
.setValue(value)
.build())
}
val command = CommandInfo.newBuilder()
.setEnvironment(environment)
val uri = sc.conf.getOption("spark.executor.uri")
.orElse(Option(System.getenv("SPARK_EXECUTOR_URI")))
val executorBackendName = classOf[MesosExecutorBackend].getName
if (uri.isEmpty) {
val executorPath = new File(executorSparkHome, "/bin/spark-class").getCanonicalPath
command.setValue(s"$prefixEnv $executorPath $executorBackendName")
} else {
// Grab everything to the first '.'. We'll use that and '*' to
// glob the directory "correctly".
val basename = uri.get.split('/').last.split('.').head
command.setValue(s"cd ${basename}*; $prefixEnv ./bin/spark-class $executorBackendName")
command.addUris(CommandInfo.URI.newBuilder().setValue(uri.get))
}
val builder = MesosExecutorInfo.newBuilder()
val (resourcesAfterCpu, usedCpuResources) =
partitionResources(availableResources, "cpus", mesosExecutorCores)
val (resourcesAfterMem, usedMemResources) =
partitionResources(resourcesAfterCpu, "mem", calculateTotalMemory(sc))
builder.addAllResources(usedCpuResources)
builder.addAllResources(usedMemResources)
sc.conf.getOption("spark.mesos.uris").map { uris =>
setupUris(uris, command)
}
val executorInfo = builder
.setExecutorId(ExecutorID.newBuilder().setValue(execId).build())
.setCommand(command)
.setData(ByteString.copyFrom(createExecArg()))
sc.conf.getOption("spark.mesos.executor.docker.image").foreach { image =>
MesosSchedulerBackendUtil
.setupContainerBuilderDockerInfo(image, sc.conf, executorInfo.getContainerBuilder())
}
(executorInfo.build(), resourcesAfterMem)
}
/**
* Create and serialize the executor argument to pass to Mesos. Our executor arg is an array
* containing all the spark.* system properties in the form of (String, String) pairs.
*/
private def createExecArg(): Array[Byte] = {
if (execArgs == null) {
val props = new HashMap[String, String]
for ((key, value) <- sc.conf.getAll) {
props(key) = value
}
// Serialize the map as an array of (String, String) pairs
execArgs = Utils.serialize(props.toArray)
}
execArgs
}
override def offerRescinded(d: SchedulerDriver, o: OfferID) {}
override def registered(d: SchedulerDriver, frameworkId: FrameworkID, masterInfo: MasterInfo) {
inClassLoader() {
appId = frameworkId.getValue
logInfo("Registered as framework ID " + appId)
markRegistered()
}
}
private def inClassLoader()(fun: => Unit) = {
val oldClassLoader = Thread.currentThread.getContextClassLoader
Thread.currentThread.setContextClassLoader(classLoader)
try {
fun
} finally {
Thread.currentThread.setContextClassLoader(oldClassLoader)
}
}
override def disconnected(d: SchedulerDriver) {}
override def reregistered(d: SchedulerDriver, masterInfo: MasterInfo) {}
private def getTasksSummary(tasks: JArrayList[MesosTaskInfo]): String = {
val builder = new StringBuilder
tasks.foreach { t =>
builder.append("Task id: ").append(t.getTaskId.getValue).append("\\n")
.append("Slave id: ").append(t.getSlaveId.getValue).append("\\n")
.append("Task resources: ").append(t.getResourcesList).append("\\n")
.append("Executor resources: ").append(t.getExecutor.getResourcesList)
.append("---------------------------------------------\\n")
}
builder.toString()
}
/**
* Method called by Mesos to offer resources on slaves. We respond by asking our active task sets
* for tasks in order of priority. We fill each node with tasks in a round-robin manner so that
* tasks are balanced across the cluster.
*/
override def resourceOffers(d: SchedulerDriver, offers: JList[Offer]) {
inClassLoader() {
// Fail-fast on offers we know will be rejected
val (usableOffers, unUsableOffers) = offers.partition { o =>
val mem = getResource(o.getResourcesList, "mem")
val cpus = getResource(o.getResourcesList, "cpus")
val slaveId = o.getSlaveId.getValue
val offerAttributes = toAttributeMap(o.getAttributesList)
// check if all constraints are satisfield
// 1. Attribute constraints
// 2. Memory requirements
// 3. CPU requirements - need at least 1 for executor, 1 for task
val meetsConstraints = matchesAttributeRequirements(slaveOfferConstraints, offerAttributes)
val meetsMemoryRequirements = mem >= calculateTotalMemory(sc)
val meetsCPURequirements = cpus >= (mesosExecutorCores + scheduler.CPUS_PER_TASK)
val meetsRequirements =
(meetsConstraints && meetsMemoryRequirements && meetsCPURequirements) ||
(slaveIdToExecutorInfo.contains(slaveId) && cpus >= scheduler.CPUS_PER_TASK)
// add some debug messaging
val debugstr = if (meetsRequirements) "Accepting" else "Declining"
val id = o.getId.getValue
logDebug(s"$debugstr offer: $id with attributes: $offerAttributes mem: $mem cpu: $cpus")
meetsRequirements
}
// Decline offers we ruled out immediately
unUsableOffers.foreach(o => d.declineOffer(o.getId))
val workerOffers = usableOffers.map { o =>
val cpus = if (slaveIdToExecutorInfo.contains(o.getSlaveId.getValue)) {
getResource(o.getResourcesList, "cpus").toInt
} else {
// If the Mesos executor has not been started on this slave yet, set aside a few
// cores for the Mesos executor by offering fewer cores to the Spark executor
(getResource(o.getResourcesList, "cpus") - mesosExecutorCores).toInt
}
new WorkerOffer(
o.getSlaveId.getValue,
o.getHostname,
cpus)
}
val slaveIdToOffer = usableOffers.map(o => o.getSlaveId.getValue -> o).toMap
val slaveIdToWorkerOffer = workerOffers.map(o => o.executorId -> o).toMap
val slaveIdToResources = new HashMap[String, JList[Resource]]()
usableOffers.foreach { o =>
slaveIdToResources(o.getSlaveId.getValue) = o.getResourcesList
}
val mesosTasks = new HashMap[String, JArrayList[MesosTaskInfo]]
val slavesIdsOfAcceptedOffers = HashSet[String]()
// Call into the TaskSchedulerImpl
val acceptedOffers = scheduler.resourceOffers(workerOffers).filter(!_.isEmpty)
acceptedOffers
.foreach { offer =>
offer.foreach { taskDesc =>
val slaveId = taskDesc.executorId
slavesIdsOfAcceptedOffers += slaveId
taskIdToSlaveId(taskDesc.taskId) = slaveId
val (mesosTask, remainingResources) = createMesosTask(
taskDesc,
slaveIdToResources(slaveId),
slaveId)
mesosTasks.getOrElseUpdate(slaveId, new JArrayList[MesosTaskInfo])
.add(mesosTask)
slaveIdToResources(slaveId) = remainingResources
}
}
// Reply to the offers
val filters = Filters.newBuilder().setRefuseSeconds(1).build() // TODO: lower timeout?
mesosTasks.foreach { case (slaveId, tasks) =>
slaveIdToWorkerOffer.get(slaveId).foreach(o =>
listenerBus.post(SparkListenerExecutorAdded(System.currentTimeMillis(), slaveId,
// TODO: Add support for log urls for Mesos
new ExecutorInfo(o.host, o.cores, Map.empty)))
)
logTrace(s"Launching Mesos tasks on slave '$slaveId', tasks:\\n${getTasksSummary(tasks)}")
d.launchTasks(Collections.singleton(slaveIdToOffer(slaveId).getId), tasks, filters)
}
// Decline offers that weren't used
// NOTE: This logic assumes that we only get a single offer for each host in a given batch
for (o <- usableOffers if !slavesIdsOfAcceptedOffers.contains(o.getSlaveId.getValue)) {
d.declineOffer(o.getId)
}
}
}
/** Turn a Spark TaskDescription into a Mesos task and also resources unused by the task */
def createMesosTask(
task: TaskDescription,
resources: JList[Resource],
slaveId: String): (MesosTaskInfo, JList[Resource]) = {
val taskId = TaskID.newBuilder().setValue(task.taskId.toString).build()
val (executorInfo, remainingResources) = if (slaveIdToExecutorInfo.contains(slaveId)) {
(slaveIdToExecutorInfo(slaveId), resources)
} else {
createExecutorInfo(resources, slaveId)
}
slaveIdToExecutorInfo(slaveId) = executorInfo
val (finalResources, cpuResources) =
partitionResources(remainingResources, "cpus", scheduler.CPUS_PER_TASK)
val taskInfo = MesosTaskInfo.newBuilder()
.setTaskId(taskId)
.setSlaveId(SlaveID.newBuilder().setValue(slaveId).build())
.setExecutor(executorInfo)
.setName(task.name)
.addAllResources(cpuResources)
.setData(MesosTaskLaunchData(task.serializedTask, task.attemptNumber).toByteString)
.build()
(taskInfo, finalResources)
}
override def statusUpdate(d: SchedulerDriver, status: TaskStatus) {
inClassLoader() {
val tid = status.getTaskId.getValue.toLong
val state = TaskState.fromMesos(status.getState)
synchronized {
if (TaskState.isFailed(TaskState.fromMesos(status.getState))
&& taskIdToSlaveId.contains(tid)) {
// We lost the executor on this slave, so remember that it's gone
removeExecutor(taskIdToSlaveId(tid), "Lost executor")
}
if (TaskState.isFinished(state)) {
taskIdToSlaveId.remove(tid)
}
}
scheduler.statusUpdate(tid, state, status.getData.asReadOnlyByteBuffer)
}
}
override def error(d: SchedulerDriver, message: String) {
inClassLoader() {
logError("Mesos error: " + message)
scheduler.error(message)
}
}
override def stop() {
if (mesosDriver != null) {
mesosDriver.stop()
}
}
override def reviveOffers() {
mesosDriver.reviveOffers()
}
override def frameworkMessage(d: SchedulerDriver, e: ExecutorID, s: SlaveID, b: Array[Byte]) {}
/**
* Remove executor associated with slaveId in a thread safe manner.
*/
private def removeExecutor(slaveId: String, reason: String) = {
synchronized {
listenerBus.post(SparkListenerExecutorRemoved(System.currentTimeMillis(), slaveId, reason))
slaveIdToExecutorInfo -= slaveId
}
}
private def recordSlaveLost(d: SchedulerDriver, slaveId: SlaveID, reason: ExecutorLossReason) {
inClassLoader() {
logInfo("Mesos slave lost: " + slaveId.getValue)
removeExecutor(slaveId.getValue, reason.toString)
scheduler.executorLost(slaveId.getValue, reason)
}
}
override def slaveLost(d: SchedulerDriver, slaveId: SlaveID) {
recordSlaveLost(d, slaveId, SlaveLost())
}
override def executorLost(d: SchedulerDriver, executorId: ExecutorID,
slaveId: SlaveID, status: Int) {
logInfo("Executor lost: %s, marking slave %s as lost".format(executorId.getValue,
slaveId.getValue))
recordSlaveLost(d, slaveId, ExecutorExited(status))
}
override def killTask(taskId: Long, executorId: String, interruptThread: Boolean): Unit = {
mesosDriver.killTask(
TaskID.newBuilder()
.setValue(taskId.toString).build()
)
}
// TODO: query Mesos for number of cores
override def defaultParallelism(): Int = sc.conf.getInt("spark.default.parallelism", 8)
override def applicationId(): String =
Option(appId).getOrElse {
logWarning("Application ID is not initialized yet.")
super.applicationId
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
|
Scala
|
apache-2.0
| 16,160
|
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.avocado.algorithms.math
import scala.annotation.tailrec
import scala.math.log
object LogBinomial extends Serializable {
// TODO: for efficiency, we should cache these values and not recompute
def logBinomial(n: Int,
k: Int): Double = {
((n - k + 1) to n).map(v => log(v.toDouble)).sum - (1 to k).map(v => log(v.toDouble)).sum
}
/**
* For a binomial distribution with a given log success probability and
* max number of events, returns the log probabilities associated with the
* number of events between 0 and the max number of events.
*
* @param logP The log success probability of an event.
* @param m The max number of events.
* @return Returns an m + 1 length array with the log probability of each possible
* success count.
*/
def calculateLogProbabilities(logP: Double,
m: Int): Array[Double] = {
// take the additive inverse of P
val log1mP = LogUtils.logAdditiveInverse(logP)
// loop to calculate probabilities
val pArray = new Array[Double](m + 1)
var idx = 0
while (idx <= m) {
// if we special case idx and m == 0, we can save some compute
if (idx == 0) {
pArray(0) = m.toDouble * log1mP
} else if (idx != m) {
pArray(idx) = logBinomial(m, idx) + (idx * logP).toDouble + ((m - idx).toDouble * log1mP)
} else {
pArray(m) = m.toDouble * logP
}
idx += 1
}
// return probability array
pArray
}
}
|
FusionWorks/avocado
|
avocado-core/src/main/scala/org/bdgenomics/avocado/algorithms/math/LogBinomial.scala
|
Scala
|
apache-2.0
| 2,325
|
package mesosphere.marathon.core.matcher.reconcile.impl
import mesosphere.marathon.core.launcher.TaskOp
import mesosphere.marathon.core.launcher.impl.TaskLabels
import mesosphere.marathon.core.matcher.base.OfferMatcher
import mesosphere.marathon.core.matcher.base.OfferMatcher.{ MatchedTaskOps, TaskOpSource, TaskOpWithSource }
import mesosphere.marathon.core.task.TaskStateOp
import mesosphere.marathon.core.task.Task.Id
import mesosphere.marathon.core.task.tracker.TaskTracker
import mesosphere.marathon.core.task.tracker.TaskTracker.TasksByApp
import mesosphere.marathon.state.{ Group, GroupRepository, Timestamp }
import mesosphere.util.state.FrameworkId
import org.apache.mesos.Protos.{ Offer, OfferID, Resource }
import org.slf4j.LoggerFactory
import scala.concurrent.Future
/**
* Matches task labels found in offer against known tasks/apps and
*
* * destroys unknown volumes
* * unreserves unknown reservations
*
* In the future, we probably want to switch to a less agressive approach
*
* * by creating tasks in state "unknown" of unknown tasks which are then transitioned to state "garbage" after
* a delay
* * and creating unreserved/destroy operations for tasks in state "garbage" only
*/
private[reconcile] class OfferMatcherReconciler(taskTracker: TaskTracker, groupRepository: GroupRepository)
extends OfferMatcher {
private val log = LoggerFactory.getLogger(getClass)
import scala.concurrent.ExecutionContext.Implicits.global
override def matchOffer(deadline: Timestamp, offer: Offer): Future[MatchedTaskOps] = {
val frameworkId = FrameworkId("").mergeFromProto(offer.getFrameworkId)
val resourcesByTaskId: Map[Id, Iterable[Resource]] = {
import scala.collection.JavaConverters._
offer.getResourcesList.asScala.groupBy(TaskLabels.taskIdForResource(frameworkId, _)).collect {
case (Some(taskId), resources) => taskId -> resources
}
}
processResourcesByTaskId(offer, resourcesByTaskId)
}
private[this] def processResourcesByTaskId(
offer: Offer, resourcesByTaskId: Map[Id, Iterable[Resource]]): Future[MatchedTaskOps] =
{
// do not query taskTracker in the common case
if (resourcesByTaskId.isEmpty) Future.successful(MatchedTaskOps.noMatch(offer.getId))
else {
def createTaskOps(tasksByApp: TasksByApp, rootGroup: Group): MatchedTaskOps = {
def spurious(taskId: Id): Boolean =
tasksByApp.task(taskId).isEmpty || rootGroup.app(taskId.runSpecId).isEmpty
val taskOps = resourcesByTaskId.iterator.collect {
case (taskId, spuriousResources) if spurious(taskId) =>
val unreserveAndDestroy =
TaskOp.UnreserveAndDestroyVolumes(
stateOp = TaskStateOp.ForceExpunge(taskId),
oldTask = tasksByApp.task(taskId),
resources = spuriousResources.to[Seq]
)
TaskOpWithSource(source(offer.getId), unreserveAndDestroy)
}.to[Seq]
MatchedTaskOps(offer.getId, taskOps, resendThisOffer = true)
}
// query in parallel
val tasksByAppFuture = taskTracker.tasksByApp()
val rootGroupFuture = groupRepository.rootGroupOrEmpty()
for { tasksByApp <- tasksByAppFuture; rootGroup <- rootGroupFuture } yield createTaskOps(tasksByApp, rootGroup)
}
}
private[this] def source(offerId: OfferID) = new TaskOpSource {
override def taskOpAccepted(taskOp: TaskOp): Unit =
log.info(s"accepted unreserveAndDestroy for ${taskOp.taskId} in offer [${offerId.getValue}]")
override def taskOpRejected(taskOp: TaskOp, reason: String): Unit =
log.info("rejected unreserveAndDestroy for {} in offer [{}]: {}", taskOp.taskId, offerId.getValue, reason)
}
}
|
yp-engineering/marathon
|
src/main/scala/mesosphere/marathon/core/matcher/reconcile/impl/OfferMatcherReconciler.scala
|
Scala
|
apache-2.0
| 3,797
|
package iot.pood.storage.actor
import akka.actor.Actor.Receive
import akka.actor.Props
import iot.pood.base.actors.BaseActor
import iot.pood.base.messages.integration.IntegrationMessages.DataMessages.DataMessage
/**
* Created by rafik on 26.9.2017.
*/
object DataStorage {
val NAME = "dataStorage"
def props() = Props(new DataStorageActor)
}
class DataStorageActor extends BaseActor {
override def receive = {
case m: DataMessage => log.info("Store data message: {}",m)
}
}
|
rafajpet/iot-pood
|
iot-pood-storage/src/main/scala/iot/pood/storage/actor/DataStorage.scala
|
Scala
|
mit
| 498
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.descriptors
import java.util
import org.apache.flink.table.api.{Types, ValidationException}
import org.junit.Test
import scala.collection.JavaConverters._
class SchemaTest extends DescriptorTestBase {
@Test(expected = classOf[ValidationException])
def testInvalidType(): Unit = {
addPropertyAndVerify(
descriptors().get(0),
"schema.1.type", "dfghj")
}
@Test(expected = classOf[ValidationException])
def testBothRowtimeAndProctime(): Unit = {
addPropertyAndVerify(
descriptors().get(0),
"schema.2.rowtime.watermarks.type", "from-source")
}
// ----------------------------------------------------------------------------------------------
override def descriptors(): util.List[Descriptor] = {
val desc1 = new Schema()
.field("myField", Types.BOOLEAN)
.field("otherField", "VARCHAR").from("csvField")
.field("p", Types.SQL_TIMESTAMP).proctime()
.field("r", Types.SQL_TIMESTAMP).rowtime(
new Rowtime().timestampsFromSource().watermarksFromSource())
val desc2 = new Schema()
.field("myField", Types.BOOLEAN)
.field("otherField", "VARCHAR").from("csvField")
.field("p", Types.SQL_TIMESTAMP).proctime()
.field("r", Types.SQL_TIMESTAMP)
util.Arrays.asList(desc1, desc2)
}
override def validator(): DescriptorValidator = {
new SchemaValidator(true, true, true)
}
override def properties(): util.List[util.Map[String, String]] = {
val props1 = Map(
"schema.0.name" -> "myField",
"schema.0.type" -> "BOOLEAN",
"schema.1.name" -> "otherField",
"schema.1.type" -> "VARCHAR",
"schema.1.from" -> "csvField",
"schema.2.name" -> "p",
"schema.2.type" -> "TIMESTAMP",
"schema.2.proctime" -> "true",
"schema.3.name" -> "r",
"schema.3.type" -> "TIMESTAMP",
"schema.3.rowtime.watermarks.type" -> "from-source",
"schema.3.rowtime.timestamps.type" -> "from-source"
)
val props2 = Map(
"schema.0.name" -> "myField",
"schema.0.type" -> "BOOLEAN",
"schema.1.name" -> "otherField",
"schema.1.type" -> "VARCHAR",
"schema.1.from" -> "csvField",
"schema.2.name" -> "p",
"schema.2.type" -> "TIMESTAMP",
"schema.2.proctime" -> "true",
"schema.3.name" -> "r",
"schema.3.type" -> "TIMESTAMP"
)
util.Arrays.asList(props1.asJava, props2.asJava)
}
}
|
fhueske/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/descriptors/SchemaTest.scala
|
Scala
|
apache-2.0
| 3,244
|
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action.builder
import scala.concurrent.duration.Duration
import io.gatling.core.action.{ Action, Pace }
import io.gatling.core.structure.ScenarioContext
import io.gatling.core.session.Expression
/**
* Builder for the Pace action
*
* Originally contributed by James Pickering.
*/
class PaceBuilder(interval: Expression[Duration], counter: String) extends ActionBuilder {
override def build(ctx: ScenarioContext, next: Action): Action =
new Pace(interval, counter, ctx.system, ctx.coreComponents.statsEngine, next)
}
|
MykolaB/gatling
|
gatling-core/src/main/scala/io/gatling/core/action/builder/PaceBuilder.scala
|
Scala
|
apache-2.0
| 1,171
|
package com.sksamuel.scrimage.filter
import com.sksamuel.scrimage.ImmutableImage
import org.scalatest.FunSuite
class TelevisionFilterTest extends FunSuite {
private val original = ImmutableImage.fromResource("/bird_small.png")
private val expected = ImmutableImage.fromResource("/com/sksamuel/scrimage/filters/bird_small_television.png")
test("television filter output matches expected") {
assert(original.filter(new TelevisionFilter) === expected)
}
}
|
sksamuel/scrimage
|
scrimage-filters/src/test/scala/com/sksamuel/scrimage/filter/TelevisionFilterTest.scala
|
Scala
|
apache-2.0
| 469
|
package skinny.http
import org.specs2.mutable.Specification
class RequestSpec extends Specification {
sequential
val request = new Request("http://example.com/")
"Request" should {
"be available" in {
request.enableThrowingIOException(true)
request.url("http://www.example.com")
request.followRedirects(true)
request.connectTimeoutMillis(100)
request.readTimeoutMillis(100)
request.referer("foo")
request.userAgent("ua")
request.contentType("text/html")
request.header("foo") should equalTo(None)
request.header("foo", "bar")
request.headerNames.size should equalTo(1)
}
}
}
|
Kuchitama/skinny-framework
|
http-client/src/test/scala/skinny/http/RequestSpec.scala
|
Scala
|
mit
| 663
|
package sjr
import scalafix.v1._
class ScalaJsReactEffectAgnosticism extends SemanticRule("ScalaJsReactEffectAgnosticism") {
private[this] val prohibitDefaultEffects = new ProhibitDefaultEffects
override def fix(implicit doc: SemanticDocument): Patch =
prohibitDefaultEffects.fix
}
|
japgolly/scalajs-react
|
scalafixRules/src/main/scala/sjr/ScalaJsReactEffectAgnosticism.scala
|
Scala
|
apache-2.0
| 294
|
package colossus.controller
import akka.actor._
import akka.util.ByteString
import colossus.RawProtocol.{Raw, RawServerCodec}
import colossus.core.{ConnectionManager, CoreDownstream, CoreUpstream, DynamicOutBuffer}
import org.scalamock.scalatest.MockFactory
import colossus.util.DataSize._
import colossus.streaming.{BufferedPipe, Pipe}
import colossus.testkit.FakeIOSystem
trait ControllerMocks extends MockFactory { self: org.scalamock.scalatest.MockFactory with org.scalatest.Suite =>
val defaultConfig = ControllerConfig(4, 2000.bytes)
class TestDownstream[E <: Encoding](config: ControllerConfig)(implicit actorsystem: ActorSystem)
extends ControllerDownstream[E] {
val pipe = new BufferedPipe[E#Input](3)
def incoming = pipe
def controllerConfig = config
def context = FakeIOSystem.fakeContext
def namespace = colossus.metrics.MetricSystem.deadSystem
override def onFatalError(reason: Throwable) = {
println(s"FATAL : $reason")
FatalErrorAction.Terminate
}
}
class TestUpstream[E <: Encoding](val outgoing: Pipe[E#Output, E#Output] = new BufferedPipe[E#Output](2))
extends ControllerUpstream[E] {
val connection = stub[ConnectionManager]
(connection.isConnected _).when().returns(true)
val pipe = outgoing
}
def get(config: ControllerConfig = defaultConfig)(implicit sys: ActorSystem)
: (CoreUpstream, Controller[Encoding.Server[Raw]], TestDownstream[Encoding.Server[Raw]]) = {
get(RawServerCodec, config)
}
def get[E <: Encoding](codec: Codec[E], config: ControllerConfig)(
implicit sys: ActorSystem): (CoreUpstream, Controller[E], TestDownstream[E]) = {
val upstream = stub[CoreUpstream]
val downstream = new TestDownstream[E](config)
val controller = new Controller(downstream, codec)
controller.setUpstream(upstream)
(upstream, controller, downstream)
}
def expectWrite(c: CoreDownstream, expected: ByteString, bufferSize: Int = 100) {
val d = new DynamicOutBuffer(bufferSize)
c.readyForData(d)
assert(ByteString(d.data.takeAll) == expected)
}
}
|
tumblr/colossus
|
colossus-tests/src/test/scala/colossus/controller/Common.scala
|
Scala
|
apache-2.0
| 2,106
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.ui
import java.text.SimpleDateFormat
import java.util.Date
import scala.xml.Node
import org.apache.spark.ui.{UIUtils => SparkUIUtils}
private[ui] abstract class BatchTableBase(tableId: String, batchInterval: Long) {
protected def columns: Seq[Node] = {
<th>Batch Time</th>
<th>Input Size</th>
<th>Scheduling Delay
{SparkUIUtils.tooltip("Time taken by Streaming scheduler to submit jobs of a batch", "top")}
</th>
<th>Processing Time
{SparkUIUtils.tooltip("Time taken to process all jobs of a batch", "top")}</th>
}
protected def baseRow(batch: BatchUIData): Seq[Node] = {
val batchTime = batch.batchTime.milliseconds
val formattedBatchTime = UIUtils.formatBatchTime(batchTime, batchInterval)
val eventCount = batch.numRecords
val schedulingDelay = batch.schedulingDelay
val formattedSchedulingDelay = schedulingDelay.map(SparkUIUtils.formatDuration).getOrElse("-")
val processingTime = batch.processingDelay
val formattedProcessingTime = processingTime.map(SparkUIUtils.formatDuration).getOrElse("-")
val batchTimeId = s"batch-$batchTime"
<td id={batchTimeId} sorttable_customkey={batchTime.toString}>
<a href={s"batch?id=$batchTime"}>
{formattedBatchTime}
</a>
</td>
<td sorttable_customkey={eventCount.toString}>{eventCount.toString} events</td>
<td sorttable_customkey={schedulingDelay.getOrElse(Long.MaxValue).toString}>
{formattedSchedulingDelay}
</td>
<td sorttable_customkey={processingTime.getOrElse(Long.MaxValue).toString}>
{formattedProcessingTime}
</td>
}
private def batchTable: Seq[Node] = {
<table id={tableId} class="table table-bordered table-striped table-condensed sortable">
<thead>
{columns}
</thead>
<tbody>
{renderRows}
</tbody>
</table>
}
def toNodeSeq: Seq[Node] = {
batchTable
}
/**
* Return HTML for all rows of this table.
*/
protected def renderRows: Seq[Node]
}
private[ui] class ActiveBatchTable(
runningBatches: Seq[BatchUIData],
waitingBatches: Seq[BatchUIData],
batchInterval: Long) extends BatchTableBase("active-batches-table", batchInterval) {
override protected def columns: Seq[Node] = super.columns ++ <th>Status</th>
override protected def renderRows: Seq[Node] = {
// The "batchTime"s of "waitingBatches" must be greater than "runningBatches"'s, so display
// waiting batches before running batches
waitingBatches.flatMap(batch => <tr>{waitingBatchRow(batch)}</tr>) ++
runningBatches.flatMap(batch => <tr>{runningBatchRow(batch)}</tr>)
}
private def runningBatchRow(batch: BatchUIData): Seq[Node] = {
baseRow(batch) ++ <td>processing</td>
}
private def waitingBatchRow(batch: BatchUIData): Seq[Node] = {
baseRow(batch) ++ <td>queued</td>
}
}
private[ui] class CompletedBatchTable(batches: Seq[BatchUIData], batchInterval: Long)
extends BatchTableBase("completed-batches-table", batchInterval) {
override protected def columns: Seq[Node] = super.columns ++ {
<th>Total Delay {SparkUIUtils.tooltip("Total time taken to handle a batch", "top")}</th>
<th>Output Ops: Succeeded/Total</th>
}
override protected def renderRows: Seq[Node] = {
batches.flatMap(batch => <tr>{completedBatchRow(batch)}</tr>)
}
private def completedBatchRow(batch: BatchUIData): Seq[Node] = {
val totalDelay = batch.totalDelay
val formattedTotalDelay = totalDelay.map(SparkUIUtils.formatDuration).getOrElse("-")
val numFailedOutputOp = batch.failureReason.size
val outputOpColumn = if (numFailedOutputOp > 0) {
s"${batch.numOutputOp - numFailedOutputOp}/${batch.numOutputOp}" +
s" (${numFailedOutputOp} failed)"
} else {
s"${batch.numOutputOp}/${batch.numOutputOp}"
}
baseRow(batch) ++
<td sorttable_customkey={totalDelay.getOrElse(Long.MaxValue).toString}>
{formattedTotalDelay}
</td>
<td>{outputOpColumn}</td>
}
}
|
practice-vishnoi/dev-spark-1
|
streaming/src/main/scala/org/apache/spark/streaming/ui/AllBatchesTable.scala
|
Scala
|
apache-2.0
| 4,877
|
import leon.lang._
import leon.annotation._
object SemanticsPreservation {
sealed abstract class Formula
case class And(lhs: Formula, rhs: Formula) extends Formula
case class Or(lhs: Formula, rhs: Formula) extends Formula
case class Not(f: Formula) extends Formula
case class Variable(id: Int) extends Formula
@induct
def nnf(formula: Formula): Formula = (formula match {
case And(lhs, rhs) => And(nnf(lhs), nnf(rhs))
case Or(lhs, rhs) => Or(nnf(lhs), nnf(rhs))
case Not(And(lhs, rhs)) => Or(nnf(Not(lhs)), nnf(Not(rhs)))
case Not(Or(lhs, rhs)) => And(nnf(Not(lhs)), nnf(Not(rhs)))
case Not(Not(f)) => nnf(f)
case n @ Not(_) => n
case v @ Variable(_) => v
}) ensuring(isNNF(_))
def isNNF(f: Formula): Boolean = f match {
case And(lhs, rhs) => isNNF(lhs) && isNNF(rhs)
case Or(lhs, rhs) => isNNF(lhs) && isNNF(rhs)
case Not(_) => false
case Variable(_) => true
}
@induct
def eval(formula: Formula): Boolean = (formula match {
case And(lhs, rhs) => eval(lhs) && eval(rhs)
case Or(lhs, rhs) => eval(lhs) || eval(rhs)
case Not(f) => !eval(f)
case Variable(id) => id > 42
}) ensuring(res => res == eval(nnf(formula)))
@induct
def nnfPreservesSemantics(f: Formula): Boolean = {
eval(f) == eval(nnf(f))
} holds
}
|
ericpony/scala-examples
|
testcases/verification/compilation/SemanticsPreservation.scala
|
Scala
|
mit
| 1,310
|
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.jobs
import java.util.Base64
import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.{Configurable, Configuration}
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.{Result, Scan}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.{MultiTableInputFormat, TableInputFormat}
import org.apache.hadoop.hbase.protobuf.ProtobufUtil
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce._
import org.geotools.data.Query
import org.locationtech.geomesa.hbase.data.HBaseQueryPlan.ScanPlan
import org.locationtech.geomesa.hbase.data.{HBaseConnectionPool, HBaseDataStore}
import org.locationtech.geomesa.hbase.jobs.GeoMesaHBaseInputFormat.GeoMesaHBaseRecordReader
import org.locationtech.geomesa.index.api.QueryPlan.{FeatureReducer, ResultsToFeatures}
import org.locationtech.geomesa.jobs.GeoMesaConfigurator
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.locationtech.geomesa.utils.io.WithStore
import org.opengis.feature.simple.SimpleFeature
/**
* Input format that allows processing of simple features from GeoMesa based on a CQL query
*/
class GeoMesaHBaseInputFormat extends InputFormat[Text, SimpleFeature] with Configurable with LazyLogging {
private val delegate = new MultiTableInputFormat
/**
* Gets splits for a job.
*/
override def getSplits(context: JobContext): java.util.List[InputSplit] = {
val splits = delegate.getSplits(context)
logger.debug(s"Got ${splits.size()} splits")
splits
}
override def createRecordReader(
split: InputSplit,
context: TaskAttemptContext
): RecordReader[Text, SimpleFeature] = {
val toFeatures = GeoMesaConfigurator.getResultsToFeatures[Result](context.getConfiguration)
val reducer = GeoMesaConfigurator.getReducer(context.getConfiguration)
new GeoMesaHBaseRecordReader(toFeatures, reducer, delegate.createRecordReader(split, context))
}
override def setConf(conf: Configuration): Unit = {
delegate.setConf(conf)
// configurations aren't thread safe - if multiple input formats are configured at once,
// updating it could cause ConcurrentModificationExceptions
conf.synchronized {
// see TableMapReduceUtil.java
HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf))
HBaseConnectionPool.configureSecurity(conf)
}
}
override def getConf: Configuration = delegate.getConf
}
object GeoMesaHBaseInputFormat {
/**
* Configure the input format based on a query
*
* @param job job to configure
* @param params data store parameters
* @param query query
*/
def configure(job: Job, params: java.util.Map[String, _], query: Query): Unit = {
// get the query plan to set up the iterators, ranges, etc
val plan = WithStore[HBaseDataStore](params) { ds =>
assert(ds != null, "Invalid data store parameters")
HBaseJobUtils.getSingleScanPlan(ds, query)
}
configure(job, plan)
}
/**
* Configure the input format based on a query plan
*
* @param job job to configure
* @param plan query plan
*/
def configure(job: Job, plan: ScanPlan): Unit = {
job.setInputFormatClass(classOf[GeoMesaHBaseInputFormat])
configure(job.getConfiguration, plan)
}
/**
* Configure the input format based on a query plan
*
* @param conf conf
* @param plan query plan
*/
def configure(conf: Configuration, plan: ScanPlan): Unit = {
if (plan.scans.lengthCompare(1) != 0) {
throw new IllegalArgumentException(s"Query requires multiple tables: ${plan.scans.map(_.table).mkString(", ")}")
}
conf.set(TableInputFormat.INPUT_TABLE, plan.scans.head.table.getNameAsString)
// note: secondary filter is handled by scan push-down filter
val scans = plan.scans.head.scans.map { scan =>
// need to set the table name in each scan
scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, plan.scans.head.table.getName)
Base64.getEncoder.encodeToString(ProtobufUtil.toScan(scan).toByteArray)
}
conf.setStrings(MultiTableInputFormat.SCANS, scans: _*)
GeoMesaConfigurator.setResultsToFeatures(conf, plan.resultsToFeatures)
// note: reduce and sorting have to be handled in the job reducers
plan.reducer.foreach(GeoMesaConfigurator.setReducer(conf, _))
plan.sort.foreach(GeoMesaConfigurator.setSorting(conf, _))
plan.projection.foreach(GeoMesaConfigurator.setProjection(conf, _))
}
/**
* Record reader for simple features
*
* @param toFeatures converts results to features
* @param reducer feature reducer, if any
* @param reader underlying hbase reader
*/
class GeoMesaHBaseRecordReader(
toFeatures: ResultsToFeatures[Result],
reducer: Option[FeatureReducer],
reader: RecordReader[ImmutableBytesWritable, Result]
) extends RecordReader[Text, SimpleFeature] with LazyLogging {
private val features = {
val base = new RecordReaderIterator(reader, toFeatures)
reducer match {
case None => base
case Some(reduce) => reduce(base)
}
}
private val key = new Text()
private var value: SimpleFeature = _
override def initialize(split: InputSplit, context: TaskAttemptContext): Unit = reader.initialize(split, context)
override def getProgress: Float = reader.getProgress
override def nextKeyValue(): Boolean = {
if (features.hasNext) {
value = features.next
key.set(value.getID)
true
} else {
false
}
}
override def getCurrentKey: Text = key
override def getCurrentValue: SimpleFeature = value
override def close(): Unit = features.close()
}
private class RecordReaderIterator(
reader: RecordReader[ImmutableBytesWritable, Result],
toFeatures: ResultsToFeatures[Result]
) extends CloseableIterator[SimpleFeature] {
private var staged: SimpleFeature = _
override def hasNext: Boolean = {
staged != null || {
if (reader.nextKeyValue()) {
staged = toFeatures(reader.getCurrentValue)
true
} else {
false
}
}
}
override def next(): SimpleFeature = {
val res = staged
staged = null
res
}
override def close(): Unit = reader.close()
}
}
|
locationtech/geomesa
|
geomesa-hbase/geomesa-hbase-jobs/src/main/scala/org/locationtech/geomesa/hbase/jobs/GeoMesaHBaseInputFormat.scala
|
Scala
|
apache-2.0
| 6,888
|
package net.xylophones.planetoid.game
import net.xylophones.planetoid.game.maths.Vector2D
import net.xylophones.planetoid.game.model._
class GameContainerFactory(rocketFactory: RocketFactory) {
def createGameContainer(player1Id: String, player2Id: String) = {
val phys = new GamePhysics
val planetPosition = Vector2D(phys.universeWidth / 2, phys.universeHeight / 2)
val planet = Planet(planetPosition, phys.planetRadius)
val p1Rocket = rocketFactory.getRocketAtInitialPosition(PlayerIdentifier.Player1, phys)
val player1 = Player(p1Rocket, phys.numLives)
val p2Rocket = rocketFactory.getRocketAtInitialPosition(PlayerIdentifier.Player2, phys)
val player2 = Player(p2Rocket, phys.numLives)
val roundTimer = RoundCountdownTimer(remainingTimeMs = phys.roundStartDelayMilliseconds)
val model = GameModel(planet, Players(player1, player2), roundStartTimer = roundTimer)
new GameContainer(uuid, player1Id, player2Id, phys, model, PlayerInput(), PlayerInput())
}
private def uuid = java.util.UUID.randomUUID.toString
}
|
wjsrobertson/planetoid
|
game/src/main/scala/net/xylophones/planetoid/game/GameContainerFactory.scala
|
Scala
|
apache-2.0
| 1,068
|
package mimir.models;
import java.io.File
import java.sql.SQLException
import java.util
import scala.collection.JavaConversions._
import scala.util._
import scala.util.Random
import com.typesafe.scalalogging.Logger
import org.joda.time.{DateTime, Seconds, Days, Period}
import com.typesafe.scalalogging.Logger
import mimir.algebra._
import mimir.ctables._
import mimir.util.{RandUtils,TextUtils,TimeUtils}
import mimir.Database
import mimir.models._
import mimir.util._
import mimir.statistics.DetectSeries
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.functions.{col, monotonically_increasing_id, lit, not, isnull,asc,desc}
import org.apache.spark.sql.DataFrame
import mimir.exec.spark.RAToSpark
import org.apache.spark.sql.Column
import org.apache.spark.sql.Row
//Upgrade: Move the series column detection to SeriesMissingValueModel
object SeriesMissingValueModel
{
val logger = Logger(org.slf4j.LoggerFactory.getLogger("mimir.models.SeriesMissingValueModel"))
def train(db: Database, name: ID, columns: Seq[ID], query:Operator, humanReadableName:String): Map[ID,(Model,Int,Seq[Expression])] =
{
logger.debug(s"Train on: $query")
val (schemaWProv, modelHT) = SparkUtils.getDataFrameWithProvFromQuery(db, query)
val model = new SimpleSeriesModel(name, columns, schemaWProv, modelHT, humanReadableName)
val usefulColumns = trainModel( modelHT, columns, schemaWProv, model)
columns.zip(usefulColumns)
.zipWithIndex
.filter(_._1._2)
.map { case ((column, _), idx) =>
(column -> (model, idx, Seq()))
}
.toMap
}
def trainModel(queryDf: DataFrame, columns:Seq[ID], schema:Seq[(ID, Type)], model:SimpleSeriesModel) =
{
val predictions =
columns.zipWithIndex.map { case (col, idx) =>
DetectSeries.bestMatchSeriesColumn(
col,
schema.toMap.get(col).get,
queryDf,
schema,
0.1
)
}
model.train(predictions)
}
}
case class SeriesColumnItem(columnName: ID, reason: String, score: Double)
/**
* A model performs estimation of missing value column based on the column that follows a series.
* Best Guess : Performs the best guess based on the weighted-average of upper and lower bound values.
* Sample : Picks a random value within the range of uppper and lower bound
* Train : Performs best guess on missing value fields. For each missing value, a map in created
* [ ROWID -> (Best Guess Value, Lower Bound Value, Upper Bound Value, Recieved Feedback) ]
*
* */
@SerialVersionUID(1000L)
class SimpleSeriesModel(name: ID, val seriesCols:Seq[ID], val querySchema: Seq[(ID, Type)], queryDf: DataFrame, humanReadableName:String)
extends Model(name)
with SourcedFeedback
with ModelCache
{
var predictions:Seq[Dataset[(ID, Double)]] = Seq()
val colNames = querySchema.map { x => x._1 }
def getCacheKey(idx: Int, args: Seq[PrimitiveValue], hints: Seq[PrimitiveValue] ) : ID =
ID(s"${idx}_${args(0).asString}_${hints(0).asString}")
def getFeedbackKey(idx: Int, args: Seq[PrimitiveValue] ) : ID =
ID(s"${idx}_${args(0).asString}")
def train(predictionsDs:Seq[Dataset[(ID, Double)]] ): Seq[Boolean] =
{
predictions = predictionsDs
SeriesMissingValueModel.logger.debug(s"Trained: $predictions")
predictions.map(!_.limit(1).collect().isEmpty)
}
def validFor: Set[ID] =
{
seriesCols
.zip(predictions)
.flatMap {
case (col, Seq()) => None
case (col, _) => Some(col)
}
.toSet
}
def interpolate(idx: Int, args:Seq[PrimitiveValue], series: ID): PrimitiveValue =
interpolate(idx, args(0).asInstanceOf[RowIdPrimitive], series)
def interpolate(idx: Int, rowid:RowIdPrimitive, series: ID): PrimitiveValue =
{
val colName = seriesCols(idx)
val sp2m : (String, Row) => PrimitiveValue = (colName, row) => {
row match {
case null => NullPrimitive()
case _ => SparkUtils.convertField(
querySchema.toMap.get(ID(colName)).get match {
case TDate() => TInt()
case TTimestamp() => TInt()
case x => x
},
row,
row.fieldIndex(colName)) /*match {
case np@NullPrimitive() => np
case x => querySchema.toMap.get(colName).get match {
case TDate() => SparkUtils.convertDate(x.asLong)
case TTimestamp() => SparkUtils.convertTimestamp(x.asLong)
case _ => x
}
}*/
}
}
val m2sp : PrimitiveValue => Any = prim => RAToSpark.mimirPrimitiveToSparkExternalRowValue(prim)
val sprowid = m2sp(rowid)
val rowIdVar = col(colNames.last.id)//(monotonically_increasing_id()+1).alias(RowIdVar().toString()).cast(OperatorTranslation.getSparkType(rowIdType))
val rowDF = queryDf.filter(rowIdVar === sprowid )
val key = sp2m(series.id,rowDF.select(series.id).limit(1).collect().headOption.getOrElse(null))
val nkey = querySchema.toMap.get(series).get match {
case TDate() => SparkUtils.convertDate(key.asLong)
case TTimestamp() => SparkUtils.convertTimestamp(key.asLong)
case _ => key
}
SeriesMissingValueModel.logger.debug(
s"Interpolate $rowid with key: $nkey for column: (${colName} -> ${querySchema.toMap.get(colName).get}) with series: ($series -> ${querySchema.toMap.get(series.id)})")
if(key == NullPrimitive()){ return NullPrimitive(); }
val low =
queryDf.filter((col(series.id) <= lit(m2sp(key))).and(not(isnull(col(colName.id)))).and(rowIdVar =!= sprowid))
.sort(desc(series.id))
.limit(1)
.select(series.id, colName.id).collect().map( row => {
(querySchema.toMap.get(series).get match {
case TDate() => SparkUtils.convertDate(sp2m(series.id,row).asLong)
case TTimestamp() => SparkUtils.convertTimestamp(sp2m(series.id,row).asLong)
case _ => sp2m(series.id,row)
},
querySchema.toMap.get(colName).get match {
case TDate() => SparkUtils.convertDate(sp2m(colName.id,row).asLong)
case TTimestamp() => SparkUtils.convertTimestamp(sp2m(colName.id,row).asLong)
case _ => sp2m(colName.id,row)
} )
} ).toSeq
val high =
queryDf.filter((col(series.id) >= lit(m2sp(key))).and(not(isnull(col(colName.id)))).and(rowIdVar =!= sprowid))
.sort(asc(series.id))
.limit(1)
.select(series.id, colName.id).collect().map( row => {
(querySchema.toMap.get(series).get match {
case TDate() => SparkUtils.convertDate(sp2m(series.id,row).asLong)
case TTimestamp() => SparkUtils.convertTimestamp(sp2m(series.id,row).asLong)
case _ => sp2m(series.id,row)
},
querySchema.toMap.get(colName).get match {
case TDate() => SparkUtils.convertDate(sp2m(colName.id,row).asLong)
case TTimestamp() => SparkUtils.convertTimestamp(sp2m(colName.id,row).asLong)
case _ => sp2m(colName.id,row)
} )
} ).toSeq
SeriesMissingValueModel.logger.debug(s" -> low = $low; high = $high")
val result = (low, high) match {
case (Seq(), Seq()) => NullPrimitive()
case (Seq((_, low_v)), Seq()) => low_v
case (Seq(), Seq((_, high_v))) => high_v
case (Seq((low_k, low_v)), Seq((high_k, high_v))) => {
val ratio = DetectSeries.ratio(low_k, nkey, high_k)
SeriesMissingValueModel.logger.debug(s" -> ratio = $ratio")
DetectSeries.interpolate(low_v, ratio, high_v)
}
}
SeriesMissingValueModel.logger.debug(s"result = $result; rowid = $rowid")
setCache(idx, Seq(rowid), Seq(StringPrimitive(series.id)),result)
result
}
def bestSequence(idx: Int): ID = {
val df = predictions(idx)
val bestSeq = df.sort(asc(df.columns(1))).limit(1).head._1
SeriesMissingValueModel.logger.debug(s"bestSeq for col:${seriesCols(idx)} => $bestSeq")
bestSeq
}
def argTypes(idx: Int) = Seq(TRowId())
def varType(idx: Int, args: Seq[Type]): Type = querySchema.toMap.get(seriesCols(idx)).get
def bestGuess(idx: Int, args: Seq[PrimitiveValue], hints: Seq[PrimitiveValue] ): PrimitiveValue =
{
getFeedback(idx, args) match {
case Some(v) => v
case None => {
val bestSeries = bestSequence(idx)
getCache(idx, args, Seq(StringPrimitive(bestSeries.id))) match {
case Some(v) => v
case None => {
//throw new Exception(s"The Model is not trained: ${this.name}: idx: $idx args: [${args.mkString(",")}] series: $bestSeries" )
interpolate(idx, args, bestSeries)
}
}
}
}
}
def sample(idx: Int, randomness: Random, args: Seq[PrimitiveValue], hints: Seq[PrimitiveValue]): PrimitiveValue =
{
getFeedback(idx, args) match {
case Some(v) => v
case None => {
//TODO: this should probably be scaled by variance. For now... just pick entirely at random
//val df = predictions(idx)
val series = predictions(idx).sample(false, 0.1, randomness.nextInt()).limit(1).head()._1
getCache(idx, args, Seq(StringPrimitive(series.id))) match {
case Some(v) => v
case None => {
//throw new Exception(s"The Model is not trained: ${this.name}: idx: $idx args: [${args.mkString(",")}] series: $series" )
interpolate(idx, args, series)
}
}
}
}
}
def reason(idx: Int, args: Seq[PrimitiveValue], hints: Seq[PrimitiveValue]): String = {
getFeedback(idx, args) match {
case Some(value) =>
s"You told me that $name.${seriesCols(idx)} = ${value} on row ${args(0)} (${args(0).getType})"
case None => {
val bestSeries = bestSequence(idx)
getCache(idx, args, Seq(StringPrimitive(bestSeries.id))) match {
case Some(value) =>
s"I interpolated $humanReadableName.${seriesCols(idx)}, ordered by $humanReadableName.${bestSequence(idx)} to get $value for row ${args(0)}"
case None =>{
//s"I interpolated $name.${colNames(idx)}, ordered by $name.${bestSequence(idx)} row ${args(0)}"
s"I interpolated $humanReadableName.${seriesCols(idx)}, ordered by $humanReadableName.${bestSeries} to get ${interpolate(idx, args, bestSeries)} for row ${args(0)}"
}
}
}
}
}
def feedback(idx: Int, args: Seq[PrimitiveValue], v: PrimitiveValue): Unit =
{
SeriesMissingValueModel.logger.debug(s"Feedback: $idx / $args (${args(0).getType}) <- $v")
setFeedback(idx, args, v)
SeriesMissingValueModel.logger.debug(s"Now: ${getFeedback(idx, args)}")
}
def isAcknowledged(idx: Int, args: Seq[PrimitiveValue]): Boolean =
hasFeedback(idx, args)
def hintTypes(idx: Int): Seq[mimir.algebra.Type] = Seq()
def confidence (idx: Int, args: Seq[PrimitiveValue], hints:Seq[PrimitiveValue]) : Double = {
val df = predictions(idx)
df.sort(asc(df.columns(1))).limit(1).head._2
}
}
|
UBOdin/mimir
|
src/main/scala/mimir/models/SeriesMissingValueModel.scala
|
Scala
|
apache-2.0
| 11,150
|
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package funnel
package http
import argonaut.{DecodeJson, EncodeJson}
import java.io.InputStream
import java.util.concurrent.ExecutorService
import java.net.{URL,URI}
import scalaz.\\/
import scalaz.concurrent.Task
import scalaz.stream._
import scalaz.stream.{Process => P}
import scalaz.stream.async.mutable.{Queue,Signal}
object SSE {
import JSON._
def dataEncode[A](a: A)(implicit A: EncodeJson[A]): String =
"data: " + A(a).nospaces.replace("\\n", "\\ndata: ")
/**
* Write a server-side event stream (http://www.w3.org/TR/eventsource/)
* of the given metrics to the `Writer`. This will block the calling
* thread indefinitely.
*/
def writeEvents(events: Process[Task, Datapoint[Any]],
sink: java.io.Writer): Unit =
events.map(kv => s"event: reportable\\n${dataEncode(kv)(EncodeDatapoint[Any])}\\n")
.intersperse("\\n")
.flatMap(writeTo(sink))
.run.run
/**
* Write a server-side event stream (http://www.w3.org/TR/eventsource/)
* of the given keys to the given `Writer`. This will block the calling
* thread indefinitely.
*/
def writeKeys(events: Process[Task, Key[Any]], sink: java.io.Writer): Unit =
events.map(k => s"event: key\\n${dataEncode(k)}\\n")
.intersperse("\\n")
.map(writeTo(sink))
.run.run
private def writeTo(sink: java.io.Writer): String => Process[Task, Unit] =
line => Process.eval(Task {
sink.write(line)
sink.flush // this is a line-oriented protocol,
// so we flush after each line, otherwise
// consumer may get delayed messages
}.attempt).flatMap(_.fold(e => e match {
case x: java.io.IOException =>
// when client disconnects we'll get a broken pipe
// IOException from the above `sink.write`. This
// gets translated to normal termination
Process.halt.kill
}, x => Process.emit(x)))
/// parsing
case class ParseError(message: String) extends Exception(message)
/**
* Streaming parser for an SSE stream. Example, given:
*
* {{{
* event: blah
* data: uno
* data: dos
*
* event: other-event
* data: tres
* }}}
*
* It will emit ("blah", "uno\\ndos"), ("other-event","tres")
*
* This parser is rather brittle, and doesn't implement all the
* various features of arbitrary SSE streams, described here:
* http://www.w3.org/TR/2012/WD-eventsource-20120426/ In the
* event of any unexpected formatting, raises a `ParseError`
* within the returned `Process`.
*/
def blockParser: Process1[String, (String,String)] = {
def awaitingEvent: Process1[String,(String,String)] =
P.await1[String].flatMap { line =>
if (line.forall(_.isWhitespace)) awaitingEvent
else if (line.startsWith(":")) awaitingEvent
else {
val (k,v) = parseLine(line)
if (k != "event") throw ParseError(s"expected 'event', got $k")
collectingData(v, new StringBuilder)
}
}
def collectingData(event: String, buf: StringBuilder):
Process1[String,(String,String)] =
P.await1[String].flatMap { line =>
if (line.forall(_.isWhitespace)) P.emit(event -> buf.toString) ++ awaitingEvent
else {
val (k, v) = parseLine(line)
if (k != "data") throw ParseError("expected 'data'")
val nl = if (buf.isEmpty) "" else "\\n"
collectingData(event, buf.append(v).append(nl))
}
}
awaitingEvent
}
// "data: blah" -> ("data", "blah")
// "foo: bar" -> ("foo", "bar")
def parseLine(line: String): (String, String) = {
val c = line.indexOf(':')
if (c == -1) (line, "")
else {
val hd = line.substring(0, c)
val tl = line.drop(c+1).trim
(hd, tl)
}
}
/**
* Return a stream of all events from the given URL.
* Example: `readEvents("http://localhost:8001/stream/sliding/jvm")`.
*/
def readEvents(uri: URI)(implicit S: ExecutorService = Monitoring.serverPool):
Process[Task, Datapoint[Any]] = {
readUrl(urlLinesR(uri.toURL)(S))
}
//for testing purposes
private[http] def readUrl(urlData: Process[Task, String]) = {
urlData.attempt().
pipeO(
blockParser.map {
case (_, data) => parseOrThrow[Datapoint[Any]](data)
}
).
flatMap(_.fold(P.fail, P.emit))
}
// various helper functions
def parseOrThrow[A:DecodeJson](s: String): A =
argonaut.Parse.decodeEither[A](s).fold(e => throw ParseError(e), identity)
def urlDecode[A:DecodeJson](url: URL)(implicit S: ExecutorService = Monitoring.serverPool): Task[A] =
urlFullR(url)(S).map(parseOrThrow[A])
def urlLinesR(url: URL)(implicit S: ExecutorService = Monitoring.serverPool): Process[Task, String] =
Process.suspend {
try linesR(url.openStream)(S)
catch { case e: Throwable => Process.fail(e) }
}
def urlFullR(url: URL)(implicit S: ExecutorService = Monitoring.serverPool): Task[String] =
urlLinesR(url)(S).chunkAll.map(_.mkString("\\n")).runLastOr("")
/**
* Adapted from scalaz-stream, but this version is nonblocking.
*/
def linesR(in: InputStream)(implicit S: ExecutorService = Monitoring.serverPool): Process[Task,String] =
io.resource(Task(scala.io.Source.fromInputStream(in))(S))(
src => Task(src.close)(S)) { src =>
lazy val lines = src.getLines // A stateful iterator
Task { if (lines.hasNext) lines.next else throw Cause.Terminated(Cause.End) } (S)
}
}
|
neigor/funnel
|
http/src/main/scala/SSE.scala
|
Scala
|
apache-2.0
| 6,335
|
package org.jetbrains.plugins.dotty.lang.psi.impl.base.types
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.dotty.lang.psi.api.base.types.DottyDesugarizableTypeElement
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElementImpl
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScInfixTypeElement
/**
* @author adkozlov
*/
class DottyInfixTypeElementImpl(node: ASTNode) extends ScalaPsiElementImpl(node)
with ScInfixTypeElement with DottyDesugarizableTypeElement
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/dotty/lang/psi/impl/base/types/DottyInfixTypeElementImpl.scala
|
Scala
|
apache-2.0
| 499
|
package pl.touk.nussknacker.ui.api
import akka.http.scaladsl.model.{HttpEntity, MediaTypes, StatusCodes}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import pl.touk.nussknacker.ui.util.ConfigWithScalaVersion
import org.scalatest.{FunSuite, Matchers}
import pl.touk.nussknacker.engine.api.DisplayJsonWithEncoder
import pl.touk.nussknacker.ui.api.ServiceRoutes.JsonThrowable
import pl.touk.nussknacker.ui.api.helpers.{TestPermissions, TestProcessingTypes}
import pl.touk.nussknacker.ui.security.api.{LoggedUser, Permission}
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import io.circe.generic.JsonCodec
import io.circe.Decoder
import pl.touk.nussknacker.engine.{ModelData, ProcessingTypeConfig}
import pl.touk.nussknacker.engine.util.service.query.ServiceQuery.{QueryResult, ServiceInvocationException, ServiceNotFoundException}
import pl.touk.nussknacker.ui.api.helpers.TestFactory.mapProcessingTypeDataProvider
class ServiceRoutesSpec extends FunSuite with Matchers with ScalatestRouteTest with FailFastCirceSupport with TestPermissions{
private val category1Deploy = Map("Category1" -> Set(Permission.Deploy))
private implicit val user: LoggedUser = LoggedUser("1", "admin", category1Deploy)
private val modelData = ModelData(ProcessingTypeConfig.read(ConfigWithScalaVersion.streamingProcessTypeConfig))
private val serviceRoutes = new ServiceRoutes(mapProcessingTypeDataProvider(TestProcessingTypes.Streaming -> modelData))
implicit val queryResultDecoder: Decoder[QueryResult] = Decoder.decodeJson
.map(_.hcursor.downField("result").focus.flatMap(_.asString).getOrElse(""))
.map(QueryResult(_, List.empty))
test("invoke service") {
val entity = HttpEntity(MediaTypes.`application/json`,
"""
|[
| {
| "name": "param",
| "expression": {
| "language":"spel",
| "expression":"'parameterValue'"
| }
| },
| {
| "name": "tariffType",
| "expression": {
| "language": "spel",
| "expression": "null"
| }
| }
|]
""".stripMargin)
Post("/service/streaming/enricher", entity) ~> serviceRoutes.securedRoute ~> check {
status shouldEqual StatusCodes.OK
val result = entityAs[io.circe.Json]
result.asObject.flatMap(_.apply("result")).flatMap(_.asString).getOrElse("") shouldEqual "RichObject(parameterValue,123,Optional[rrrr])" //TODO: should be JSON
}
}
test("display valuable error message for invalid spell expression") {
val entity = HttpEntity(MediaTypes.`application/json`,
"""
|[
| {
| "name": "param",
| "expression": {
| "language":"spel",
| "expression":"not valid spell expression"
| }
| },
| {
| "name": "tariffType",
| "expression": {
| "language": "spel",
| "expression": "null"
| }
| }
|]
""".stripMargin)
Post("/service/streaming/enricher", entity) ~> serviceRoutes.securedRoute ~> check {
status shouldEqual StatusCodes.InternalServerError
entityAs[JsonThrowable].message shouldEqual Some("ExpressionParseError(EL1041E: After parsing a valid expression, there is still more data in the expression: 'spell',,Some(param),not valid spell expression)")
entityAs[JsonThrowable].className shouldEqual classOf[ServiceInvocationException].getCanonicalName
}
}
test("display valuable error message for mismatching parameters") {
val entity = HttpEntity(MediaTypes.`application/json`, "[]")
Post("/service/streaming/enricher", entity) ~> serviceRoutes.securedRoute ~> check {
status shouldEqual StatusCodes.InternalServerError
entityAs[JsonThrowable].message shouldEqual Some( "MissingParameters(Set(param, tariffType),)")
entityAs[JsonThrowable].className shouldEqual classOf[ServiceInvocationException].getCanonicalName
}
}
test("display valuable error message for missing service") {
val entity = HttpEntity(MediaTypes.`application/json`, "[]")
Post("/service/streaming/unexcitingService", entity) ~> serviceRoutes.securedRoute ~> check {
status shouldEqual StatusCodes.NotFound
entityAs[JsonThrowable].message shouldEqual Some("Service unexcitingService not found")
entityAs[JsonThrowable].className shouldEqual classOf[ServiceNotFoundException].getCanonicalName
}
}
test("prevent unauthorized user service invocation") {
val user = LoggedUser("1", "nonAdmin")
serviceRoutes.canUserInvokeService(user, "enricher", modelData) shouldBe false
}
test("user with category invoke service") {
val user = LoggedUser("1", "nonAdmin", category1Deploy)
serviceRoutes.canUserInvokeService(user, "enricher", modelData) shouldBe true
}
test("canUserInvokeService always pass unexciting service") {
val user = LoggedUser("1", "nonAdmin", category1Deploy)
serviceRoutes.canUserInvokeService(user, "unexcitingService", modelData) shouldBe true
}
}
object ServiceRoutesSpec {
@JsonCodec case class Response(age: Int, name: String) extends DisplayJsonWithEncoder[Response]
}
|
TouK/nussknacker
|
ui/server/src/test/scala/pl/touk/nussknacker/ui/api/ServiceRoutesSpec.scala
|
Scala
|
apache-2.0
| 5,248
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Prettifier
import org.scalatest.prop.PropertyChecks
import Integer.MIN_VALUE
import org.scalatest.enablers.Length
import org.scalatest.enablers.Size
import org.scalatest.exceptions.TestFailedException
import org.scalatest.CompatParColls.Converters._
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShouldLengthSpec extends AnyFunSpec with PropertyChecks with ReturnsNormallyThrowsAssertion {
private val prettifier = Prettifier.default
// Checking for a specific length
describe("The 'have length (Int)' syntax") {
describe("on String") {
it("should do nothing if string length matches specified length") {
"hi" should have length (2)
forAll((s: String) => s should have length (s.length))
}
it("should do nothing if string length does not match and used with should not") {
"hi" should not { have length (3) }
"hi" should not have length (3)
forAll((s: String, i: Int) => if (i != s.length) s should not { have length (i) } else succeed)
forAll((s: String, i: Int) => if (i != s.length) s should not have length (i) else succeed)
}
it("should do nothing when string length matches and used in a logical-and expression") {
"hi" should (have length (2) and (have length (3 - 1)))
"hi" should (have length (2) and have length (3 - 1))
}
it("should do nothing when string length matches and used in a logical-or expression") {
"hi" should { have length (77) or (have length (3 - 1)) }
"hi" should (have length (77) or have length (3 - 1))
}
it("should do nothing when string length doesn't match and used in a logical-and expression with not") {
"hi" should (not (have length (5)) and not (have length (3)))
"hi" should { not have length (5) and (not have length (3)) }
"hi" should (not have length (5) and not have length (3))
}
it("should do nothing when string length doesn't match and used in a logical-or expression with not") {
"hi" should (not (have length (2)) or not (have length (3)))
"hi" should ((not have length (2)) or (not have length (3)))
"hi" should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if string length does not match specified length") {
val caught1 = intercept[TestFailedException] {
"hi" should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 3))
forAll((s: String) => assertThrows[TestFailedException](s should have length (s.length + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
"hi" should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, -2))
forAll((s: String) => assertThrows[TestFailedException](s should have length (if (s.length == 0) -1 else -s.length)))
}
it("should throw an assertion error when string length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
"hi" should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 5))
val caught2 = intercept[TestFailedException] {
"hi" should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 5))
val caught3 = intercept[TestFailedException] {
"hi" should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 5))
}
it("should throw an assertion error when string length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
"hi" should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 22))))
val caught2 = intercept[TestFailedException] {
"hi" should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 22))))
val caught3 = intercept[TestFailedException] {
"hi" should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 22))))
}
it("should throw an assertion error when string length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
"hi" should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("\\"hi\\""), 2))))
val caught2 = intercept[TestFailedException] {
"hi" should { not have length (3) and (not have length (2)) }
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("\\"hi\\""), 2))))
val caught3 = intercept[TestFailedException] {
"hi" should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("\\"hi\\""), 2))))
}
it("should throw an assertion error when string length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
"hi" should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "\\"hi\\" had length 2, and \\"hi\\" had length 2")
val caught2 = intercept[TestFailedException] {
"hi" should { not have length (2) or (not have length (2)) }
}
assert(caught2.getMessage === "\\"hi\\" had length 2, and \\"hi\\" had length 2")
val caught3 = intercept[TestFailedException] {
"hi" should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "\\"hi\\" had length 2, and \\"hi\\" had length 2")
}
// SKIP-DOTTY-START
it("should give good error messages when more than two clauses are used with logical connectors") {
val caught1 = intercept[TestFailedException] {
"hi" should (not have length (1) and not have length (3) and not have length (2))
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 1)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("\\"hi\\""), 2, 3)))), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("\\"hi\\""), 2))))
val caught2 = intercept[TestFailedException] {
"hi" should (not have length (2) or not equal ("hi") or equal ("frog"))
}
assert(caught2.getMessage === "\\"hi\\" had length 2, and \\"hi\\" equaled \\"hi\\", and \\"[hi]\\" did not equal \\"[frog]\\"")
}
// SKIP-DOTTY-END
}
describe("on Array") {
it("should do nothing if array length matches specified length") {
Array(1, 2) should have length (2)
// check((arr: Array[Int]) => returnsNormally(arr should have length (arr.length)))
}
it("should do nothing if array length does not match and used with should not") {
Array(1, 2) should not { have length (3) }
Array(1, 2) should not have length (3)
// check((arr: Array[Int], i: Int) => i != arr.length ==> returnsNormally(arr should not { have length (i) }))
// check((arr: Array[Int], i: Int) => i != arr.length ==> returnsNormally(arr should not have length (i)))
}
it("should do nothing when array length matches and used in a logical-and expression") {
Array(1, 2) should { have length (2) and (have length (3 - 1)) }
Array(1, 2) should (have length (2) and have length (3 - 1))
}
it("should do nothing when array length matches and used in a logical-or expression") {
Array(1, 2) should { have length (77) or (have length (3 - 1)) }
Array(1, 2) should (have length (77) or have length (3 - 1))
}
it("should do nothing when array length doesn't match and used in a logical-and expression with not") {
Array(1, 2) should { not { have length (5) } and not { have length (3) }}
Array(1, 2) should { not have length (5) and (not have length (3)) }
Array(1, 2) should (not have length (5) and not have length (3))
}
it("should do nothing when array length doesn't match and used in a logical-or expression with not") {
Array(1, 2) should { not { have length (2) } or not { have length (3) }}
Array(1, 2) should { not have length (2) or (not have length (3)) }
Array(1, 2) should (not have length (5) and not have length (3))
}
it("should throw TestFailedException if array length does not match specified length") {
val caught1 = intercept[TestFailedException] {
Array(1, 2) should have length (3)
}
assert(caught1.getMessage.endsWith(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 3)))
// check((arr: Array[String]) => throwsTestFailedException(arr should have length (arr.length + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
Array(1, 2) should have length (-2)
}
assert(caught1.getMessage.endsWith(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, -2)))
// check((arr: Array[Int]) => throwsTestFailedException(arr should have length (if (arr.length == 0) -1 else -arr.length)))
}
it("should throw an assertion error when array length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
Array(1, 2) should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 5))
val caught2 = intercept[TestFailedException] {
Array(1, 2) should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 5))
val caught3 = intercept[TestFailedException] {
Array(1, 2) should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 5))
}
it("should throw an assertion error when array length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
Array(1, 2) should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 22))))
val caught2 = intercept[TestFailedException] {
Array(1, 2) should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 22))))
val caught3 = intercept[TestFailedException] {
Array(1, 2) should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 22))))
}
it("should throw an assertion error when array length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
Array(1, 2) should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("Array(1, 2)"), 2))))
val caught2 = intercept[TestFailedException] {
Array(1, 2) should { not have length (3) and (not have length (2)) }
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("Array(1, 2)"), 2))))
val caught3 = intercept[TestFailedException] {
Array(1, 2) should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("Array(1, 2)"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("Array(1, 2)"), 2))))
}
it("should throw an assertion error when array length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
Array(1, 2) should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "Array(1, 2) had length 2, and Array(1, 2) had length 2")
val caught2 = intercept[TestFailedException] {
Array(1, 2) should { not have length (2) or (not have length (2)) }
}
assert(caught2.getMessage === "Array(1, 2) had length 2, and Array(1, 2) had length 2")
val caught3 = intercept[TestFailedException] {
Array(1, 2) should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "Array(1, 2) had length 2, and Array(1, 2) had length 2")
}
// SKIP-SCALATESTJS,NATIVE-START
it("should work on parallel form") {
Array(1, 2).par should have length (2)
}
// SKIP-SCALATESTJS,NATIVE-END
}
describe("on scala.List") {
it("should do nothing if list length matches specified length") {
List(1, 2) should have length (2)
forAll((lst: List[Int]) => lst should have length (lst.length))
}
it("should do nothing if list length does not match and used with should not") {
List(1, 2) should not { have length (3) }
List(1, 2) should not have length (3)
forAll((lst: List[Int], i: Int) => if (i != lst.length) lst should not { have length (i) } else succeed)
forAll((lst: List[Int], i: Int) => if (i != lst.length) lst should not have length (i) else succeed)
}
it("should do nothing when list length matches and used in a logical-and expression") {
List(1, 2) should { have length (2) and (have length (3 - 1)) }
List(1, 2) should (have length (2) and have length (3 - 1))
}
it("should do nothing when list length matches and used in a logical-or expression") {
List(1, 2) should { have length (77) or (have length (3 - 1)) }
List(1, 2) should (have length (77) or have length (3 - 1))
}
it("should do nothing when list length doesn't match and used in a logical-and expression with not") {
List(1, 2) should { not { have length (5) } and not { have length (3) }}
List(1, 2) should { not have length (5) and (not have length (3)) }
}
it("should do nothing when list length doesn't match and used in a logical-or expression with not") {
List(1, 2) should { not { have length (2) } or not { have length (3) }}
List(1, 2) should { not have length (2) or (not have length (3)) }
List(1, 2) should (not have length (5) and not have length (3))
}
it("should throw TestFailedException if list length does not match specified length") {
val caught1 = intercept[TestFailedException] {
List(1, 2) should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 3))
forAll((lst: List[String]) => assertThrows[TestFailedException](lst should have length (lst.length + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
List(1, 2) should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, -2))
forAll((lst: List[Int]) => assertThrows[TestFailedException](lst should have length (if (lst.length == 0) -1 else -lst.length)))
}
it("should throw an assertion error when list length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
List(1, 2) should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 5))
val caught2 = intercept[TestFailedException] {
List(1, 2) should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 5))
val caught3 = intercept[TestFailedException] {
List(1, 2) should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 5))
}
it("should throw an assertion error when list length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
List(1, 2) should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 22))))
val caught2 = intercept[TestFailedException] {
List(1, 2) should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 22))))
val caught3 = intercept[TestFailedException] {
List(1, 2) should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 22))))
}
it("should throw an assertion error when list length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
List(1, 2) should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("List(1, 2)"), 2))))
val caught2 = intercept[TestFailedException] {
List(1, 2) should { not have length (3) and (not have length (2)) }
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("List(1, 2)"), 2))))
val caught3 = intercept[TestFailedException] {
List(1, 2) should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("List(1, 2)"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("List(1, 2)"), 2))))
}
it("should throw an assertion error when list length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
List(1, 2) should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "List(1, 2) had length 2, and List(1, 2) had length 2")
val caught2 = intercept[TestFailedException] {
List(1, 2) should { not have length (2) or (not have length (2)) }
}
assert(caught2.getMessage === "List(1, 2) had length 2, and List(1, 2) had length 2")
val caught3 = intercept[TestFailedException] {
List(1, 2) should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "List(1, 2) had length 2, and List(1, 2) had length 2")
}
// SKIP-SCALATESTJS,NATIVE-START
it("should work on parallel form") {
List(1, 2).par should have length (2)
}
// SKIP-SCALATESTJS,NATIVE-END
}
// SKIP-SCALATESTJS,NATIVE-START
describe("on java.util.List") {
val javaList: java.util.List[Int] = new java.util.ArrayList
javaList.add(1)
javaList.add(2)
it("should do nothing if list length matches specified length") {
javaList should have length (2)
// check((lst: java.util.List[Int]) => returnsNormally(lst should have length (lst.length)))
}
it("should do nothing if list length does not match and used with should not") {
javaList should not { have length (3) }
javaList should not have length (3)
// check((lst: List[Int], i: Int) => i != lst.length ==> returnsNormally(lst should not { have length (i) }))
}
it("should do nothing when list length matches and used in a logical-and expression") {
javaList should { have length (2) and (have length (3 - 1)) }
javaList should (have length (2) and have length (3 - 1))
}
it("should do nothing when list length matches and used in a logical-or expression") {
javaList should { have length (77) or (have length (3 - 1)) }
javaList should (have length (77) or have length (3 - 1))
}
it("should do nothing when list length doesn't match and used in a logical-and expression with not") {
javaList should { not { have length (5) } and not { have length (3) }}
javaList should (not have length (5) and not have length (3))
}
it("should do nothing when list length doesn't match and used in a logical-or expression with not") {
javaList should { not { have length (2) } or not { have length (3) }}
javaList should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if list length does not match specified length") {
val caught1 = intercept[TestFailedException] {
javaList should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 3))
// check((lst: List[String]) => throwsTestFailedException(lst should have length (lst.length + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
javaList should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, -2))
// check((lst: List[Int]) => throwsTestFailedException(lst should have length (if (lst.length == 0) -1 else -lst.length)))
}
it("should throw an assertion error when list length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
javaList should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 5))
val caught2 = intercept[TestFailedException] {
javaList should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 5))
val caught3 = intercept[TestFailedException] {
javaList should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 5))
}
it("should throw an assertion error when list length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
javaList should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 22))))
val caught2 = intercept[TestFailedException] {
javaList should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 22))))
val caught3 = intercept[TestFailedException] {
javaList should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 22))))
}
it("should throw an assertion error when list length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
javaList should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("[1, 2]"), 2))))
val caught2 = intercept[TestFailedException] {
javaList should { not have length (3) and (not have length (2)) }
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("[1, 2]"), 2))))
val caught3 = intercept[TestFailedException] {
javaList should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("[1, 2]"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("[1, 2]"), 2))))
}
it("should throw an assertion error when list length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
javaList should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "[1, 2] had length 2, and [1, 2] had length 2")
val caught2 = intercept[TestFailedException] {
javaList should { not have length (2) or (not have length (2)) }
}
assert(caught2.getMessage === "[1, 2] had length 2, and [1, 2] had length 2")
val caught3 = intercept[TestFailedException] {
javaList should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "[1, 2] had length 2, and [1, 2] had length 2")
}
}
// SKIP-SCALATESTJS,NATIVE-END
// I repeat these with copy and paste, becuase I need to test that each static structural type works, and
// that makes it hard to pass them to a common "behaves like" method
describe("on an arbitrary object that has an empty-paren Int length method") {
class Lengthy(len: Int) {
def length(): Int = len
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy: Length[Lengthy] =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.length()
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
forAll((len: Int) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed )
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed )
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has a parameterless Int length method") {
class Lengthy(len: Int) {
def length: Int = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy: Length[Lengthy] =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.length
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
forAll((len: Int) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has a Int length field") {
class Lengthy(len: Int) {
val length: Int = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy: Length[Lengthy] =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.length
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
forAll((len: Int) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has an empty-paren Int getLength method") {
class Lengthy(len: Int) {
def getLength(): Int = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy: Length[Lengthy] =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.getLength()
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
forAll((len: Int) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has a parameterless Int getLength method") {
class Lengthy(len: Int) {
def getLength: Int = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy: Length[Lengthy] =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.getLength
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
forAll((len: Int) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has an Int getLength field") {
class Lengthy(len: Int) {
val getLength: Int = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy: Length[Lengthy] =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.getLength
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
forAll((len: Int) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has an empty-paren Long length method") {
class Lengthy(len: Long) {
def length(): Long = len
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.length()
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
obj should have length (2L)
forAll((len: Int) => new Lengthy(len) should have length (len))
forAll((len: Long) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not { have length (3L) }
obj should not have length (3)
obj should not have length (3L)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should { have length (2L) and (have length (3 - 1)) }
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (2L)) }
obj should { have length (77L) or (have length (2)) }
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has a parameterless Long length method") {
class Lengthy(len: Long) {
def length: Long = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.length
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
obj should have length (2L)
forAll((len: Int) => new Lengthy(len) should have length (len))
forAll((len: Long) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not { have length (3L) }
obj should not have length (3)
obj should not have length (3L)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has a Long length field") {
class Lengthy(len: Long) {
val length: Long = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.length
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
obj should have length (2L)
forAll((len: Int) => new Lengthy(len) should have length (len))
forAll((len: Long) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has an empty-paren Long getLength method") {
class Lengthy(len: Long) {
def getLength(): Long = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.getLength()
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
obj should have length (2L)
forAll((len: Int) => new Lengthy(len) should have length (len))
forAll((len: Long) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not { have length (3L) }
obj should not have length (3)
obj should not have length (3L)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has a parameterless Long getLength method") {
class Lengthy(len: Long) {
def getLength: Long = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.getLength
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
obj should have length (2L)
forAll((len: Int) => new Lengthy(len) should have length (len))
forAll((len: Long) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not { have length (3L) }
obj should not have length (3)
obj should not have length (3L)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has a Long getLength field") {
class Lengthy(len: Long) {
val getLength: Long = len // The only difference between the previous is the structure of this member
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy =
new Length[Lengthy] {
def lengthOf(o: Lengthy): Long = o.getLength
}
it("should do nothing if object length matches specified length") {
obj should have length (2)
obj should have length (2L)
forAll((len: Int) => new Lengthy(len) should have length (len))
forAll((len: Long) => new Lengthy(len) should have length (len))
}
it("should do nothing if object length does not match and used with should not") {
obj should not { have length (3) }
obj should not { have length (3L) }
obj should not have length (3)
obj should not have length (3L)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not { have length (wrongLen) } else succeed)
forAll((len: Int, wrongLen: Int) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
forAll((len: Long, wrongLen: Long) => if (len != wrongLen) new Lengthy(len) should not have length (wrongLen) else succeed)
}
it("should do nothing when object length matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
}
it("should do nothing when object length matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
}
it("should do nothing when object length doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
}
it("should do nothing when object length doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
}
it("should throw TestFailedException if object length does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (len + 1)))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
forAll((len: Int) => assertThrows[TestFailedException](new Lengthy(len) should have length (if ((len == 0) || (len == MIN_VALUE)) -1 else -len)))
}
it("should throw an assertion error when object length doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
}
}
describe("on an arbitrary object that has both parameterless Int length and parameterless Int size methods") {
class Lengthy(len: Int) {
def length: Int = len
def size: Int = len
override def toString = "lengthy"
}
val obj = new Lengthy(2)
implicit val lengthOfLengthy: Length[Lengthy] with Size[Lengthy] =
new Length[Lengthy] with Size[Lengthy] {
def lengthOf(o: Lengthy): Long = o.length
def sizeOf(o: Lengthy): Long = o.size
}
it("should do nothing if object length or size matches specified length") {
obj should have length (2)
obj should have size (2)
}
it("should do nothing if object length or size does not match and used with should not") {
obj should not { have length (3) }
obj should not have length (3)
obj should not { have size (3) }
obj should not have size (3)
}
it("should do nothing when object length or size matches and used in a logical-and expression") {
obj should { have length (2) and (have length (3 - 1)) }
obj should (have length (2) and have length (3 - 1))
obj should { have size (2) and (have size (3 - 1)) }
obj should (have size (2) and have size (3 - 1))
}
it("should do nothing when object length or size matches and used in a logical-or expression") {
obj should { have length (77) or (have length (3 - 1)) }
obj should (have length (77) or have length (3 - 1))
obj should { have size (77) or (have size (3 - 1)) }
obj should (have size (77) or have size (3 - 1))
}
it("should do nothing when object length or size doesn't match and used in a logical-and expression with not") {
obj should { not { have length (5) } and not { have length (3) }}
obj should (not have length (5) and not have length (3))
obj should { not { have size (5) } and not { have size (3) }}
obj should (not have size (5) and not have size (3))
}
it("should do nothing when object length or size doesn't match and used in a logical-or expression with not") {
obj should { not { have length (2) } or not { have length (3) }}
obj should (not have length (2) or not have length (3))
obj should { not { have size (2) } or not { have size (3) }}
obj should (not have size (2) or not have size (3))
}
it("should throw TestFailedException if object length or size does not match specified length") {
val caught1 = intercept[TestFailedException] {
obj should have length (3)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3))
val caught2 = intercept[TestFailedException] {
obj should have size (3)
}
assert(caught2.getMessage === FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 3))
}
it("should throw TestFailedException with normal error message if specified length is negative") {
val caught1 = intercept[TestFailedException] {
obj should have length (-2)
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, -2))
val caught2 = intercept[TestFailedException] {
obj should have size (-2)
}
assert(caught2.getMessage === FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, -2))
}
it("should throw an assertion error when object length or size doesn't match and used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (5) and (have length (2 - 1)) }
}
assert(caught1.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught2 = intercept[TestFailedException] {
obj should ((have length (5)) and (have length (2 - 1)))
}
assert(caught2.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught3 = intercept[TestFailedException] {
obj should (have length (5) and have length (2 - 1))
}
assert(caught3.getMessage === FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 5))
val caught1b = intercept[TestFailedException] {
obj should { have size (5) and (have size (2 - 1)) }
}
assert(caught1b.getMessage === FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 5))
val caughtb2 = intercept[TestFailedException] {
obj should ((have size (5)) and (have size (2 - 1)))
}
assert(caughtb2.getMessage === FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 5))
val caughtb3 = intercept[TestFailedException] {
obj should (have size (5) and have size (2 - 1))
}
assert(caughtb3.getMessage === FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 5))
}
it("should throw an assertion error when object length or size doesn't match and used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
obj should { have length (55) or (have length (22)) }
}
assert(caught1.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2 = intercept[TestFailedException] {
obj should ((have length (55)) or (have length (22)))
}
assert(caught2.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3 = intercept[TestFailedException] {
obj should (have length (55) or have length (22))
}
assert(caught3.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught1b = intercept[TestFailedException] {
obj should { have size (55) or (have size (22)) }
}
assert(caught1b.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught2b = intercept[TestFailedException] {
obj should ((have size (55)) or (have size (22)))
}
assert(caught2b.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 22))))
val caught3b = intercept[TestFailedException] {
obj should (have size (55) or have size (22))
}
assert(caught3b.getMessage === FailureMessages.commaAnd(prettifier, UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 55)), UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 22))))
}
it("should throw an assertion error when object length or size matches and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (3) } and not { have length (2) }}
}
assert(caught1.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught2 = intercept[TestFailedException] {
obj should { { not have length (3) } and { not have length (2) }}
}
assert(caught2.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught3 = intercept[TestFailedException] {
obj should (not have length (3) and not have length (2))
}
assert(caught3.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadLengthInsteadOfExpectedLength(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadLength(prettifier, UnquotedString("lengthy"), 2))))
val caught1b = intercept[TestFailedException] {
obj should { not { have size (3) } and not { have size (2) }}
}
assert(caught1b.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadSize(prettifier, UnquotedString("lengthy"), 2))))
val caught2b = intercept[TestFailedException] {
obj should { { not have size (3) } and { not have size (2) }}
}
assert(caught2b.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadSize(prettifier, UnquotedString("lengthy"), 2))))
val caught3b = intercept[TestFailedException] {
obj should (not have size (3) and not have size (2))
}
assert(caught3b.getMessage === FailureMessages.commaBut(prettifier, UnquotedString(FailureMessages.hadSizeInsteadOfExpectedSize(prettifier, UnquotedString("lengthy"), 2, 3)), UnquotedString(FailureMessages.hadSize(prettifier, UnquotedString("lengthy"), 2))))
}
it("should throw an assertion error when object length or size matches and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
obj should { not { have length (2) } or not { have length (2) }}
}
assert(caught1.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught2 = intercept[TestFailedException] {
obj should { { not have length (2) } or { not have length (2) }}
}
assert(caught2.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught3 = intercept[TestFailedException] {
obj should (not have length (2) or not have length (2))
}
assert(caught3.getMessage === "lengthy had length 2, and lengthy had length 2")
val caught1b = intercept[TestFailedException] {
obj should { not { have size (2) } or not { have size (2) }}
}
assert(caught1b.getMessage === "lengthy had size 2, and lengthy had size 2")
val caught2b = intercept[TestFailedException] {
obj should { { not have size (2) } or { not have size (2) }}
}
assert(caught2b.getMessage === "lengthy had size 2, and lengthy had size 2")
val caught3b = intercept[TestFailedException] {
obj should (not have size (2) or not have size (2))
}
assert(caught3b.getMessage === "lengthy had size 2, and lengthy had size 2")
}
}
// SKIP-SCALATESTJS,NATIVE-START
it("should allow multiple implicits of the same type class (such as Length) to be resolve so long as the type param is not ambiguous") {
import java.net.DatagramPacket
val dp = new DatagramPacket(Array(0x0, 0x1, 0x2, 0x3), 4)
dp.getLength
implicit val lengthOfDatagramPacket: Length[DatagramPacket] =
new Length[DatagramPacket] {
def lengthOf(dp: DatagramPacket): Long = dp.getLength
}
dp should have length 4
dp should not have length (99)
import java.awt.image.DataBufferByte
val db = new DataBufferByte(4)
implicit val sizeOfDataBufferByte: Length[DataBufferByte] =
new Length[DataBufferByte] {
def lengthOf(db: DataBufferByte): Long = db.getSize
}
db should have length 4
db should not have length (99)
}
// SKIP-SCALATESTJS,NATIVE-END
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/ShouldLengthSpec.scala
|
Scala
|
apache-2.0
| 137,353
|
package scratchpad.done
import scratchpad.done.Applicative
object Main {
def main(args: Array[String]): Unit = {
println(ListMonad.replicateM(10, List[Int](1, 2, 3)))
println(OptionMonad.replicateM(5, Some("a")))
println(ListMonad.filterM(List(1, 2, 3, 4, 5, 6))((ele: Int) => List[Boolean](ele % 2 == 0)))
println(OptionMonad.filterM(List(Some(1), Some(2), Some(3), Some(4)))((ele: Option[Int]) => {
ele.flatMap((eleVal) => {
if (eleVal % 2 == 0) {
Some(true)
} else {
Some(false)
}})
}))
val ei: Either[String, Int] = Right(1)
val eitherMonad = new EitherMonad[String]
println(eitherMonad.flatMap(ei)((a: Int) => if (a == 1) Left("Crazy") else Right(2)))
println(eitherMonad.apply(Left("BadBad"))(ei))
println(eitherMonad.apply(Right((a: Int) => a + 1))(ei))
}
}
trait Monad[F[_]] { self =>
//trait Monad[F[_]] extends Applicative[F] { self =>
implicit class Operations[A](fa: F[A]) {
def map[B](f: A => B): F[B] = self.map(fa)(f)
def flatMap[B](f: A => F[B]): F[B] = self.flatMap(fa)(f)
}
//Primitives
def unit[A](a: A): F[A]
def flatMap[A,B](fa: F[A])(f: A => F[B]): F[B]
//Compounds
def map2[A,B,C](fa: F[A], fb: F[B])(f: (A,B) => C): F[C] = {
fa flatMap (a => fb map (b => f(a, b)))
}
def apply[A, B](fab: F[(A) => B])(fa: F[A]): F[B]
//These now come from Applicative
def map[A,B](fa: F[A])(f: A => B): F[B] = {
fa.flatMap((a: A) => unit(f(a)))
}
def sequence[A](lma: List[F[A]]): F[List[A]] = {
lma.foldRight(unit(List.empty[A]))((cur: F[A], result: F[List[A]]) => {
cur.flatMap((a: A) => {
result.map((li: List[A]) => a :: li)
})
})
}
def traverse[A,B](la: List[A])(f: A => F[B]): F[List[B]] = {
la.foldRight(unit(List.empty[B]))((cur: A, result: F[List[B]]) => {
map2(f(cur), result)((_ :: _))
})
}
//The answer is nicer
def replicateM[A](n: Int, ma: F[A]): F[List[A]] = {
//to? or until?
ma.flatMap((a: A) => unit((1 until n).foldLeft(List[A]())((liA: List[A], b) => a :: liA)))
}
def product[A,B](ma: F[A], mb: F[B]): F[(A, B)] = map2(ma, mb)((_, _))
def merge[A](liFA: F[List[A]], a: A): F[List[A]] = {
liFA.map(li => {
a :: li
})
}
def filterM[A](ms: List[A])(f: A => F[Boolean]): F[List[A]] = {
ms.foldRight(unit(List.empty[A]))((a: A, li: F[List[A]]) => {
f(a).flatMap(if (_) {
merge(li, a)
} else {
li
})
})
}
def compose[A,B,C](f: A => F[B], g: B => F[C]): A => F[C] = {
(a: A) => f(a).flatMap(fa => g(fa))
}
def flatMapViaCompose[A,B](fa: F[A])(f: A => F[B]): F[B] = {
compose((_: Unit) => fa, f)()
}
def join[A](mma: F[F[A]]): F[A] = mma.flatMap(fa => fa)
def flatMapViaJoinAndMap[A,B](fa: F[A])(f: A => F[B]): F[B] = join(fa.map(f(_)))
}
object OptionMonad extends Monad[Option] {
override def unit[A](a: A): Option[A] = Some(a)
override def flatMap[A, B](fa: Option[A])(f: (A) => Option[B]): Option[B] = {
val result: Option[B] = fa match {
case Some(a: A) => f(a)
case None => None
}
result
}
override def apply[A, B](fab: Option[(A) => B])(fa: Option[A]): Option[B] = {
fa.flatMap((a: A) => fab.flatMap((fab: (A => B)) => Some(fab(a))))
}
}
//object ListMonad {
// def m[A] = new ListMonad[A]
//}
object ListMonad extends Monad[List] {
override def unit[A](a: A): List[A] = List(a)
override def flatMap[A, B](fa: List[A])(f: (A) => List[B]): List[B] = {
fa.flatMap(f)
}
override def apply[A, B](fab: List[(A) => B])(fa: List[A]): List[B] = {
fa.flatMap((a: A) => {
fab.foldRight(List[B]())((fa: A => B, li: List[B]) => fa(a) :: li)
})
}
}
case class Id[A](value: A) {
def flatMap[A, B](fa: Id[A])(f: (A) => Id[B]): Id[B] = {
f(fa.value)
}
def map[A, B](fa: Id[A])(f: (A) => B): Id[B] = {
unit(f(fa.value))
}
def unit[A](a: A): Id[A] = {
Id(a)
}
}
//def eitherMonad[E]: Monad[({type f[x] = Either[E, x]})#f]
class EitherMonad[E] extends Monad[({type f[x] = Either[E, x]})#f] {
//Primitives
override def unit[A](a: A): Either[E, A] = {
Right(a)
}
override def flatMap[A, B](fa: Either[E, A])(f: (A) => Either[E, B]): Either[E, B] = {
fa.right.flatMap(f(_))
}
override def apply[A, B](fab: Either[E, (A) => B])(fa: Either[E, A]): Either[E, B] = {
fab.right.flatMap((f: A => B) =>
fa.right.flatMap((a: A) => Right(f(a)))
)
}
}
//class StateMonad extends Monad[State] {
// type AS[A, B] = (A, B)
//
// //Primitives
// override def unit[A, B](a: AS): State[A, B] = ???
//
// override def flatMap[A, B](fa: State[A])(f: (A) => State[B]): State[B] = ???
//}
|
waxmittmann/fpinscala
|
answers/src/main/scala/scratchpad/done/Monad.scala
|
Scala
|
mit
| 4,724
|
package net.paploo.diestats.statistics.util
object AdditionalOrderings {
class SeqOrdering[A](implicit ord: Ordering[A]) extends Ordering[Seq[A]] {
override def compare(x: Seq[A], y: Seq[A]): Int = {
val as = x.iterator
val bs = y.iterator
// Find the first non-matching element.
while(as.hasNext && bs.hasNext) {
val cmp = ord.compare(as.next(), bs.next())
if (cmp != 0) return cmp
}
// If we got here, they were the same up until one ran out of elements, so it just comes down to size.
x.length - y.length
}
}
implicit def SeqOrdering[A](implicit ord: Ordering[A]): Ordering[Seq[A]] = new SeqOrdering[A]()
}
|
paploo/DieStats
|
src/main/scala/net/paploo/diestats/statistics/util/AdditionalOrderings.scala
|
Scala
|
bsd-3-clause
| 689
|
package com.seanshubin.detangler.collection
case class DifferenceResult(isSame: Boolean, messageLines: Seq[String])
|
SeanShubin/detangler
|
collection/src/main/scala/com/seanshubin/detangler/collection/DifferenceResult.scala
|
Scala
|
unlicense
| 117
|
package org.chipmunk.value
import java.sql.{ Date => SQLDate }
import java.math.{ BigDecimal => JBigDecimal }
import java.sql.Timestamp
import java.util.UUID
import scala.annotation.implicitNotFound
import java.math.{BigDecimal => JBigDecimal}
import java.sql.{Date => SQLDate}
@implicitNotFound("No member of typeclass Defaultable in scope for ${T}")
trait Defaultable[T] {
def defaultVal: T
}
object Defaultable {
def defaultOf[T: Defaultable]: T =
implicitly[Defaultable[T]].defaultVal
implicit object DefaultableInt extends Defaultable[Int] {
val defaultVal: Int = 0
}
implicit object DefaultableLong extends Defaultable[Long] {
val defaultVal: Long = 0
}
implicit object DefaultableFloat extends Defaultable[Float] {
val defaultVal: Float = 0.0f
}
implicit object DefaultableDouble extends Defaultable[Double] {
val defaultVal: Double = 0.0
}
implicit object DefaultableBigDecimal extends Defaultable[JBigDecimal] {
val defaultVal: JBigDecimal = new JBigDecimal(0)
}
implicit object DefaultableString extends Defaultable[String] {
val defaultVal: String = ""
}
implicit object DefaultableDate extends Defaultable[SQLDate] {
val defaultVal: SQLDate = new SQLDate(0)
}
implicit object DefaultableTimestamp extends Defaultable[Timestamp] {
val defaultVal: Timestamp = new Timestamp(0)
}
implicit object DefaultableByteArray extends Defaultable[Array[Byte]] {
val defaultVal: Array[Byte] = Array[Byte]()
}
implicit object DefaultableBoolean extends Defaultable[Boolean] {
val defaultVal: Boolean = false
}
implicit object DefaultableUUID extends Defaultable[UUID] {
val defaultVal: UUID = new UUID(0, 0)
}
implicit object DefaultableType extends Defaultable[Type] {
val defaultVal: Type = new Type("")
}
implicit object DefaultableDuration extends Defaultable[Duration] {
val defaultVal: Duration = new Duration
}
implicit def defaultableOption[T: Defaultable]: Defaultable[Option[T]] =
new Defaultable[Option[T]] {
val defaultVal: Option[T] = Some(defaultOf[T])
}
}
|
kpjjpk/chipmunk
|
src/main/scala/org/chipmunk/value/Defaultable.scala
|
Scala
|
mit
| 2,113
|
package de.unifreiburg.cs.proglang.jgs.typing
import de.unifreiburg.cs.proglang.jgs.constraints.CTypes.CType
import de.unifreiburg.cs.proglang.jgs.constraints.{CTypes, Constraint}
import de.unifreiburg.cs.proglang.jgs.constraints.TypeVars.TypeVar
import de.unifreiburg.cs.proglang.jgs.instrumentation.Var
import de.unifreiburg.cs.proglang.jgs.typing.Environment.JoinResult
import de.unifreiburg.cs.proglang.jgs.util.Extra
import soot.jimple.Stmt
import scala.collection.JavaConversions._
/**
* A map from statements to pre- and post- environments
*/
case class EnvMap private (private val m : Map[Stmt, (Set[TypeVar], (EnvMap.MultiEnv, EnvMap.MultiEnv))]) {
def putNew(s : Stmt, pc : TypeVar, pre : Environment, post : Environment) : EnvMap = {
if (this.m.isDefinedAt(s)) {
throw new IllegalArgumentException(s"Environment map already contains an entry for statement ${s}: ${this.m}")
}
new EnvMap(this.m + (s -> (Set(pc), (EnvMap.multiEnvFromEnv(pre), EnvMap.multiEnvFromEnv(post)))))
}
def join(other : EnvMap) : EnvMap = {
def joinPrePost(p1 : (Set[TypeVar], (EnvMap.MultiEnv, EnvMap.MultiEnv)),
p2 : (Set[TypeVar], (EnvMap.MultiEnv, EnvMap.MultiEnv)))
: (Set[TypeVar], (EnvMap.MultiEnv, EnvMap.MultiEnv)) =
(p1._1 ++ p2._1, Extra.combinePairs(EnvMap.joinMultis, p1._2, p2._2))
new EnvMap(Extra.joinWith(joinPrePost, this.m, other.m))
}
/*
/**
*
* Combine two envmaps. The entries of statements that occur in both envmaps need to be unified. If these entries
* have different mappings for common variables then they are unified using the translation of `joinResult`
* (effectively this maps them to a type variable that represents the lub).
*/
def joinWith[Level](joinResult : JoinResult[Level], other : EnvMap) : EnvMap = {
def joinEnv(env1 : Map[Var[_], TypeVar], env2 : Map[Var[_], TypeVar]) : Environment = {
val combine = (v1 : TypeVar, v2 : TypeVar) => if (v1 != v2) { joinResult.translate(v1) } else {v1}
Environments.fromMap(Extra.joinWith(combine, env1, env2))
/*
val commonKeys : Set[Var[_]] = env1.keySet & env2.keySet
val joins : Map[Var[_], TypeVar] = (for {
key <- commonKeys
v1 = env1(key)
v2 = env2(key)
v = if (v1 != v2) { joinResult.translate(v1) } else { v1 }
} yield key -> v).toMap
Environments.fromMap(env1 ++ env2 ++ joins)
*/
}
type EnvPair = (Environment, Environment)
def joinEnvs(envPair1 : EnvPair, envPair2 : EnvPair) : EnvPair =
(joinEnv(envPair1._1.getMap.toMap, envPair2._1.getMap.toMap), joinEnv(envPair1._2.getMap.toMap, envPair2._2.getMap.toMap))
val result : Map[Stmt, EnvPair] =
Extra.joinWith[Stmt, EnvPair](joinEnvs, this.m, other.m)
/*
val commonDomain = for {
key <- this.m.keySet & other.m.keySet
thisVal = this.m.get(key).get
otherVal = other.m.get(key).get
preEnv = joinEnv(thisVal._1.getMap.toMap, otherVal._1.getMap.toMap)
postEnv = joinEnv(thisVal._2.getMap.toMap, otherVal._2.getMap.toMap)
} // TODO I need to map the individual vars to the joined ones... requires a map to see what was joined
yield key ->(preEnv, postEnv)
new EnvMap(
this.m.filterKeys(k => !other.m.isDefinedAt(k)) ++
other.m.filterKeys(k => !this.m.isDefinedAt(k)) ++
commonDomain
)
*/
new EnvMap(result)
}
*/
def getPre(s : Stmt) : Option[EnvMap.MultiEnv] = m.get(s).map(_._2._1)
def getPost(s : Stmt) : Option[EnvMap.MultiEnv] = m.get(s).map(_._2._2)
def getPCs(s : Stmt) : Option[Set[TypeVar]] = m.get(s).map(_._1)
def preAsConstraints[Level](mkConstraint : CType[Level] => Constraint[Level], s : Stmt, x : Var[_]) =
EnvMap.asConstraints(mkConstraint, this.getPre(s).get, x)
def postAsConstraints[Level](mkConstraint : CType[Level] => Constraint[Level], s : Stmt, x : Var[_]) =
EnvMap.asConstraints(mkConstraint, this.getPost(s).get, x)
}
object EnvMap {
type MultiEnv = Map[Var[_], Set[TypeVar]]
def multiEnvFromEnv(env : Environment) : MultiEnv =
(for ((v , tv) <- env.getMap) yield v -> Set(tv)).toMap
def joinMultis(m1 : MultiEnv, m2 : MultiEnv) = Extra.joinWith((v1 : Set[TypeVar], v2 : Set[TypeVar]) => v1 ++ v2, m1, m2)
def apply(kv : (Stmt, (Set[TypeVar], (Environment, Environment)))*) = {
val m = (for ((k, (pcs, (v1, v2))) <- kv) yield k -> (pcs, (multiEnvFromEnv(v1), multiEnvFromEnv(v2)))).toMap
new EnvMap(m)
}
def empty(): EnvMap = {
new EnvMap(Map())
}
def asConstraints[Level](mkConstraint : CType[Level] => Constraint[Level], env : MultiEnv, v : Var[_]) :
java.util.Set[Constraint[Level]] =
for (tv <- env(v)) yield mkConstraint(CTypes.variable[Level](tv))
}
|
luminousfennell/jgs
|
GradualConstraints/src/main/java/de/unifreiburg/cs/proglang/jgs/typing/EnvMap.scala
|
Scala
|
bsd-3-clause
| 4,781
|
package com.github.ldaniels528.trifecta
import com.github.ldaniels528.commons.helpers.OptionHelper._
import com.github.ldaniels528.commons.helpers.Resource
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfterEach, FeatureSpec, GivenWhenThen}
import scala.io.Source
/**
* Trifecta Configuration Specification
* @author lawrence.daniels@gmail.com
*/
class TxConfigSpec() extends FeatureSpec with BeforeAndAfterEach with GivenWhenThen with MockitoSugar {
info("As a TxConfig instance")
info("I want to be able to provide configuration properties to the application")
feature("Retrieve message decoders") {
scenario("Load decoders from the user's .trifecta directory") {
Given("a default configuration")
val config = TxConfig.defaultConfig
When("the messages decoders are retrieved")
val decoders = config.getDecoders
decoders foreach(d => info(s"decoder: $d"))
Then(s"Two message decoders should be found")
/*
decoders shouldBe Some(Array(
TxDecoder("shocktrade.keystats.avro", AvroDecoder("keyStatistics.avsc", loadSchema("/avro/keyStatistics.avsc"))),
TxDecoder("shocktrade.quotes.avro", AvroDecoder("quotes.avsc", loadSchema("/avro/stockQuotes.avsc")))
))*/
}
}
/*
Some(Array(
TxDecoder(shocktrade.keystats.avro,AvroDecoder(keyStatistics.avsc)),
TxDecoder(shocktrade.quotes.avro,AvroDecoder(quotes.avsc))))
Some(Array(
TxDecoder(shocktrade.keystats.avro,AvroDecoder(keyStatistics.avsc)),
TxDecoder(shocktrade.quotes.avro,AvroDecoder(quotes.avsc))))
*/
private def loadSchema(path: String): String = {
Resource(path) map (url => Source.fromURL(url).getLines().mkString) orDie s"Resource $path not found"
}
}
|
ldaniels528/trifecta
|
src/test/scala/com/github/ldaniels528/trifecta/TxConfigSpec.scala
|
Scala
|
apache-2.0
| 1,743
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.scalar.lang
import org.apache.ignite._
import org.apache.ignite.internal.util.lang.GridPredicate3X
/**
* Peer deploy aware adapter for Java's `GridPredicate3X`.
*/
class ScalarPredicate3X[T1, T2, T3](private val p: (T1, T2, T3) => Boolean) extends GridPredicate3X[T1, T2, T3] {
assert(p != null)
/**
* Delegates to passed in function.
*/
@throws(classOf[IgniteCheckedException])
def applyx(e1: T1, e2: T2, e3: T3): Boolean = {
p(e1, e2, e3)
}
}
|
agoncharuk/ignite
|
modules/scalar/src/main/scala/org/apache/ignite/scalar/lang/ScalarPredicate3X.scala
|
Scala
|
apache-2.0
| 1,318
|
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.sdk
case class Measures(measuresValues: MeasuresValues, newValues: Int)
|
danielcsant/sparta
|
sdk/src/main/scala/com/stratio/sparta/sdk/Measures.scala
|
Scala
|
apache-2.0
| 720
|
/*
* Copyright 2016 org.NLP4L
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nlp4l.framework.builtin.acronym
import com.typesafe.config.Config
import org.nlp4l.framework.models.{Record, Cell, Dictionary}
import org.nlp4l.framework.processors._
import play.api.Logger
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
class AcronymExtractionProcessorFactory(settings: Config) extends ProcessorFactory(settings) {
override def getInstance: Processor = {
new AcronymExtractionProcessor(getStrParamRequired("textField"))
}
}
class AcronymExtractionProcessor(val textField: String) extends Processor {
private val logger = Logger(this.getClass)
private val regexAcronym = """[A-Z][A-Za-z/\\-]*[A-Z]""".r
private val MAX_WORDS = 20
override def execute(data: Option[Dictionary]): Option[Dictionary] = {
data match {
case Some(dic) => {
val result = ListBuffer.empty[Record]
dic.recordList.foreach( record => {
val value = record.cellValue(textField)
if(value != None){
val text = value.get.toString
regexAcronym.findAllIn(text).matchData.foreach( m => {
val length = m.end - m.start
if(length <= 10){
val rec = expansion(text, m.toString, m.start, m.end, length)
if(rec != None)
result += Record(Seq(Cell("acronyms", s"${m.toString}, ${rec.get}")))
}
})
}
})
Some(Dictionary(result))
}
case _ => None
}
}
def expansion(text: String, acronym: String, start: Int, end: Int, length: Int): Option[String] = {
expansionCandidate(text, acronym, start, end, length) match {
case Some(ec) => {
val ac = ec._1
val as = ec._2
val reverse = ec._3
val looseEnd = ec._4
if(as.length >= 2){
testUpperCaseStrict(acronym, ac, as, reverse, looseEnd, ArrayBuffer.empty[String]) match {
case Some(expansion) => Some(expansion)
case _ => {
testLowerCaseStrict(acronym, ac, as, reverse, looseEnd, ArrayBuffer.empty[String]) match {
case Some(expansion) => Some(expansion)
case _ => {
testUpperCaseLoose(acronym, ac, as, reverse, looseEnd) match {
case Some(expansion) => Some(expansion)
case _ => None
}
}
}
}
}
}
else None
}
case _ => None
}
}
def stackWords(str: String, reverse: Boolean): Array[String] = {
val words = str.trim.split("""\\s+""")
if(reverse) words.reverse.take(MAX_WORDS) else words.take(MAX_WORDS)
}
def expansionCandidate(text: String, acronym: String, start: Int, end: Int, length: Int): Option[(Array[Char], Array[String], Boolean, Boolean)] = {
// check for "Expansion (ACRONYM)"
// ex) He is the Chief Executive Officer (CEO)
// 0123456789012345678901234567890123456789
// 1 2 3
val test1 = if(text.length > end){
val regex = """([A-Za-z/\\-\\s]+) \\(%s\\)$""".format(acronym).r
val src = text.substring(0, end + 1)
regex.findFirstMatchIn(src) match {
case Some(m) => {
val reverse = true
val ac = acronym.toCharArray.reverse
val as = stackWords(m.group(1), reverse)
Some((ac, as, reverse, true))
}
case _ => {
None
}
}
}
else None
// check for "Expansion or ACRONYM" and "Expansion, or ACRONYM"
// ex) He is the Chief Executive Officer or CEO
val test2 = if(test1 != None) test1
else{
val regex = """([A-Za-z/\\-\\s]+),? or %s$""".format(acronym).r
val src = text.substring(0, end)
regex.findFirstMatchIn(src) match {
case Some(m) => {
val reverse = true
val ac = acronym.toCharArray.reverse
val as = stackWords(m.group(1), reverse)
Some((ac, as, reverse, true))
}
case _ => {
None
}
}
}
// check for "Expansion, ACRONYM"
// ex) He is the Chief Executive Officer, CEO
val test3 = if(test2 != None) test2
else{
val regex = """([A-Za-z/\\-\\s]+), %s$""".format(acronym).r
val src = text.substring(0, end)
regex.findFirstMatchIn(src) match {
case Some(m) => {
val reverse = true
val ac = acronym.toCharArray.reverse
val as = stackWords(m.group(1), reverse)
Some((ac, as, reverse, true))
}
case _ => {
None
}
}
}
// check for "ACRONYM (Expansion)"
// ex) He is the CEO (Chief Executive Officer).
val test4 = if(test3 != None) test3
else{
val regex = """^%s \\(([A-Za-z/\\-\\s]+)\\)""".format(acronym).r
val src = text.substring(start)
regex.findFirstMatchIn(src) match {
case Some(m) => {
val reverse = false
val ac = acronym.toCharArray
val as = stackWords(m.group(1), reverse)
Some((ac, as, reverse, false))
}
case _ => {
None
}
}
}
// check for "ACRONYM, Expansion"
// ex) He is the CEO, Chief Executive Officer.
val test5 = if(test4 != None) test4
else{
val regex = """^%s, ([A-Za-z/\\-\\s]+)""".format(acronym).r
val src = text.substring(start)
regex.findFirstMatchIn(src) match {
case Some(m) => {
val reverse = false
val ac = acronym.toCharArray
val as = stackWords(m.group(1), reverse)
Some((ac, as, reverse, true))
}
case _ => {
None
}
}
}
test5
}
def testUpperCaseStrict(acronym: String, ac: Array[Char], as: Array[String], reverse: Boolean, looseEnd: Boolean, temp: ArrayBuffer[String]): Option[String] = {
if(ac.size == 0){
if(looseEnd || (!looseEnd && as.size == 0)){
val str = if(reverse) temp.reverse.mkString(" ")
else temp.mkString(" ")
if(str.charAt(0) == acronym.charAt(0)) Some(str)
else None
}
else{
None
}
} else if(as.size == 0){
None
} else{
val c = ac(0).toUpper
val word = as(0)
if(c == word(0)){
temp += word
testUpperCaseStrict(acronym, ac.tail.dropWhile{ a => a == '-' || a == '/'}, as.tail, reverse, looseEnd, temp)
}
else{
temp += word
testUpperCaseStrict(acronym, ac, as.tail, reverse, looseEnd, temp)
}
}
}
def testLowerCaseStrict(acronym: String, ac: Array[Char], as: Array[String], reverse: Boolean, looseEnd: Boolean, temp: ArrayBuffer[String]): Option[String] = {
if(ac.size == 0){
if(looseEnd || (!looseEnd && as.size == 0)){
val str = if(reverse) temp.reverse.mkString(" ")
else temp.mkString(" ")
if(str.charAt(0) == acronym.charAt(0).toLower) Some(str)
else None
}
else{
None
}
} else if(as.size == 0){
None
} else{
val c = ac(0).toLower
val word = as(0)
if(c == word(0)){
temp += word
testLowerCaseStrict(acronym, ac.tail.dropWhile{ a => a == '-' || a == '/'}, as.tail, reverse, looseEnd, temp)
}
else{
temp += word
testLowerCaseStrict(acronym, ac, as.tail, reverse, looseEnd, temp)
}
}
}
def testUpperCaseLoose(acronym: String, ac: Array[Char], as: Array[String], reverse: Boolean, looseEnd: Boolean): Option[String] = {
if(!looseEnd){
val expWords = if(reverse) as.reverse else as
if(acronym.charAt(0) == expWords(0).charAt(0) && ac.contains(expWords(expWords.length - 1).charAt(0))) Some(expWords.mkString(" "))
else None
}
else{
val firstLetter = acronym.charAt(0)
if(reverse){
if(ac.contains(as(0).charAt(0))){
// it is safe to use takeWhile() after using tail() because as.length is always larger than 1
val intermediateWords = as.tail.takeWhile(_.charAt(0) != firstLetter)
if(as.length >= intermediateWords.length + 2) Some(Array(Array(as(0)),intermediateWords,Array(as(intermediateWords.length + 1))).flatten.reverse.mkString(" "))
else None
}
else None
}
else {
if(as(0).charAt(0) == firstLetter){
// it is safe to use takeWhile() after using tail() because as.length is always larger than 1
val intermediateWords = as.tail.takeWhile(a => !ac.contains(a.charAt(0)))
if(as.length >= intermediateWords.length + 2) Some(Array(Array(as(0)),intermediateWords,Array(as(intermediateWords.length + 1))).flatten.mkString(" "))
else None
}
else None
}
}
}
}
|
NLP4L/framework
|
app/org/nlp4l/framework/builtin/acronym/AcronymExtractionProcessor.scala
|
Scala
|
apache-2.0
| 9,366
|
package skinny.test
import skinny.controller.SkinnyServlet
import skinny.micro.context.SkinnyContext
/**
* Mock of SkinnyServlet.
*/
trait MockServlet extends SkinnyServlet with MockControllerBase with MockWebPageControllerFeatures {
// Work around for the following error
// java.lang.NullPointerException
// at java.util.regex.Matcher.getTextLength(Matcher.java:1234)
// at java.util.regex.Matcher.reset(Matcher.java:308)
// at java.util.regex.Matcher.<init>(Matcher.java:228)
// at java.util.regex.Pattern.matcher(Pattern.java:1088)
// at scala.util.matching.Regex.findFirstIn(Regex.scala:388)
// at rl.UrlCodingUtils$class.isUrlEncoded(UrlCodingUtils.scala:24)
// at rl.UrlCodingUtils$.isUrlEncoded(UrlCodingUtils.scala:111)
// at rl.UrlCodingUtils$class.needsUrlEncoding(UrlCodingUtils.scala:32)
// at rl.UrlCodingUtils$.needsUrlEncoding(UrlCodingUtils.scala:111)
// at rl.UrlCodingUtils$class.ensureUrlEncoding(UrlCodingUtils.scala:35)
// at rl.UrlCodingUtils$.ensureUrlEncoding(UrlCodingUtils.scala:111)
// at org.scalatra.UriDecoder$.firstStep(ScalatraBase.scala:19)
// at org.scalatra.ScalatraServlet$.requestPath(ScalatraServlet.scala:30)
// at org.scalatra.ScalatraServlet$.org$scalatra$ScalatraServlet$$getRequestPath$1(ScalatraServlet.scala:19)
// at org.scalatra.ScalatraServlet$$anonfun$requestPath$3.apply(ScalatraServlet.scala:23)
// at org.scalatra.ScalatraServlet$$anonfun$requestPath$3.apply(ScalatraServlet.scala:22)
// at scala.Option.getOrElse(Option.scala:120)
// at org.scalatra.ScalatraServlet$.requestPath(ScalatraServlet.scala:22)
// at org.scalatra.ScalatraServlet.requestPath(ScalatraServlet.scala:68)
// at skinny.controller.feature.RequestScopeFeature$class.initializeRequestScopeAttributes(RequestScopeFeature.scala:98)
// at skinny.controller.SkinnyServlet.initializeRequestScopeAttributes(SkinnyServlet.scala:8)
// at skinny.test.MockControllerBase$class.$init$(MockControllerBase.scala:89)
override def requestPath(implicit ctx: SkinnyContext = skinnyContext): String = {
try {
super.requestPath(ctx)
} catch {
case e: NullPointerException =>
logger.debug("[work around] skipped NPE when resolving requestPath", e)
"/"
}
}
}
|
seratch/skinny-framework
|
test/src/main/scala/skinny/test/MockServlet.scala
|
Scala
|
mit
| 2,285
|
/*
* NOTICE: The original file was changed by Philip Stutz and Sara Magliacane.
*
* This file is part of the PSL software.
* Copyright 2011-2013 University of Maryland
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.signalcollect.psl
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import com.signalcollect.util.TestAnnouncements
class SquaredLinearLossSpec extends FlatSpec with Matchers with TestAnnouncements {
"SquaredLinearLoss" should "correctly solve problem A" in {
val z = Array(0.4, 0.5, 0.1)
val y = Array(0.0, 0.0, -0.05)
val coeffs = Array(0.3, -1.0, 0.4)
val constant = -20.0
val weight = 0.5
val stepSize = 2.0
val expected = Array(-1.41569, 6.55231, -2.29593)
testProblem(z, y, coeffs, constant, weight, stepSize, expected)
}
def testProblem(
z: Array[Double],
y: Array[Double],
coeffs: Array[Double],
constant: Double,
weight: Double,
stepSize: Double,
expected: Array[Double]) {
val zMap = (0 until z.length).zip(z).toMap
val hlt = Optimizer.squaredLinearLoss(
stepSize,
zMap,
weight,
constant,
coeffs,
zMap.keys.toArray,
1)
hlt.setY(y)
val resultMap = hlt.optimize(zMap)
for (i <- 0 until z.length) {
resultMap(i) should be(expected(i) +- 5e-5)
}
}
}
|
uzh/fox
|
src/test/scala/com/signalcollect/psl/SquaredLinearLossSpec.scala
|
Scala
|
apache-2.0
| 1,853
|
package com.karasiq.shadowcloud.exceptions
import java.io.FileNotFoundException
import java.nio.file.FileAlreadyExistsException
import com.karasiq.shadowcloud.index.diffs.IndexDiff
import com.karasiq.shadowcloud.model.{Chunk, Path, RegionId, StorageId}
abstract class SCException(message: String = null, cause: Throwable = null) extends Exception(message, cause)
object SCException {
// -----------------------------------------------------------------------
// Traits
// -----------------------------------------------------------------------
trait WrappedError
trait IOError
trait NotFound
trait AlreadyExists
trait ChunkAssociated {
def chunk: Chunk
}
trait DiffAssociated {
def diff: IndexDiff
}
trait PathAssociated {
def path: Path
}
trait RegionAssociated {
def regionId: RegionId
}
trait StorageAssociated {
def storageId: StorageId
}
// -----------------------------------------------------------------------
// Utils
// -----------------------------------------------------------------------
def isNotFound(error: Throwable): Boolean = error match {
case _: NotFound | _: FileNotFoundException | _: NoSuchElementException β
true
case wrapped: WrappedError if wrapped.getCause != error β
isNotFound(wrapped.getCause)
case _ β
false
}
def isAlreadyExists(error: Throwable): Boolean = error match {
case _: AlreadyExists | _: FileAlreadyExistsException β
true
case wrapped: WrappedError if wrapped.getCause != error β
isAlreadyExists(wrapped.getCause)
case _ β
false
}
}
|
Karasiq/shadowcloud
|
model/src/main/scala/com/karasiq/shadowcloud/exceptions/SCException.scala
|
Scala
|
apache-2.0
| 1,633
|
package shop.infrastructure
import slick.driver.PostgresDriver.simple._
// import org.joda.time.DateTime
// import com.github.tototoshi.slick.PostgresJodaSupport._
import shop.model._
// import akka.event.Logging
import scala.slick.jdbc.meta.MTable
class ShopperSchema(tag: Tag) extends Table[(Long,String)](tag,"shopper"){
def id = column[Long]("id",O.PrimaryKey,O.AutoInc)
def username = column[String]("username")
def uniqueUsername = index("IDX_SHOPPER_USERNAME", username, unique = true)
def * = (id, username) // <> (Shopper.tupled,Shopper.unapply)
}
class IdentitySchema(tag: Tag) extends Table[(Long,String,Long)](tag,"shopper_identity"){
def id = column[Long]("id",O.PrimaryKey,O.AutoInc)
def password = column[String]("password")
def shopperId = column[Long]("shopper_id")
// def shopper = foreignKey("IDENTITY_SHOPPER_FK", shopperId, TableQuery[ShopperSchema])(_.id, onUpdate=ForeignKeyAction.Restrict, onDelete=ForeignKeyAction.Cascade)
def * = (id, password, shopperId)
}
class RepositoryInitialiser(implicit val registry: ComponentRegistry) extends Repository {
def initialiseDatabase = {
database.withSession{ implicit session =>
List(shoppers,identities,shoppingLists,shoppingItems,listParticipants).map( createTableIfNotExists(_))
}
}
private def isTableCreated(table: TableQuery[_ <: Table[_]])(implicit session: Session): Boolean = {
MTable.getTables(table.baseTableRow.tableName).list.isEmpty
}
private def createTableIfNotExists(table: TableQuery[_ <: Table[_]])(implicit session: Session){
if(isTableCreated(table)) table.ddl.create
}
def createTables {
database.withSession{ implicit session =>
(shoppers.ddl ++ identities.ddl ++ shoppingLists.ddl ++ shoppingItems.ddl ++ listParticipants.ddl).create
}
}
def cleanDatabase = {
database.withSession{ implicit session =>
shoppingItems.delete
listParticipants.delete
shoppingLists.delete
identities.delete
shoppers.delete
}
}
}
trait Repository {
val registry: ComponentRegistry
lazy val database = Database.forDataSource(registry.datasourceConfig.datasource)
val shoppers = TableQuery[ShopperSchema]
val identities = TableQuery[IdentitySchema]
val shoppingLists = TableQuery[ShoppingListSchema]
val shoppingItems = TableQuery[ShoppingItemSchema]
val listParticipants = TableQuery[ListParticipantSchema]
}
class IdentityRepository(implicit val registry: ComponentRegistry) extends Repository with Logging {
val shopperRepository = registry.shopperRepository
def save(shopperId: Long, password: String): Option[Long] = {
database.withSession{ implicit session =>
Some( (identities returning identities.map(_.id) += (-1,password,shopperId)) )
}
}
def findEncryptedPassword(shopperId: Long): Option[String] = {
database.withSession{ implicit session =>
logger.info(s"find pw shopper id $shopperId")
identities.
filter( _.shopperId === shopperId ).
map(_.password).
firstOption
}
}
}
class ShopperRepository(implicit val registry: ComponentRegistry) extends Repository with Logging {
val identityRepository = registry.identityRepository
def save(username: String): Option[Long] = {
database.withSession{ implicit session =>
Some( (shoppers returning shoppers.map(_.id) += (-1,username) ) )
}
}
def findShopper(username: String): Option[Shopper] = {
database.withSession{ implicit session =>
shoppers.filter(_.username === username).
firstOption.map{
case (id,username) => Shopper(Some(id),username)
}
}
}
def findShopperById(shopperId: Long): Option[Shopper] = {
database.withSession{ implicit session =>
logger.debug(s"Looking for $shopperId")
shoppers.filter(_.id === shopperId).
firstOption.map{
case (id,username) => new Shopper(id,username)
}
}
}
}
|
flurdy/shoppinglist
|
shopservice/src/main/scala/infrastructure/shopperrepository.scala
|
Scala
|
mit
| 4,129
|
package im.tox.antox.tox
import java.sql.Timestamp
import im.tox.antox.data.State
import im.tox.antox.utils.TimestampUtils
import im.tox.antox.wrapper._
import rx.lang.scala.subjects.BehaviorSubject
object Reactive {
val chatActive = BehaviorSubject[Boolean](false)
val chatActiveSub = chatActive.subscribe(x => State.chatActive(x))
val activeKey = BehaviorSubject[Option[String]](None)
val activeKeySub = activeKey.subscribe(x => State.activeKey(x))
val friendList = BehaviorSubject[Array[FriendInfo]](new Array[FriendInfo](0))
val groupList = BehaviorSubject[Array[GroupInfo]](new Array[GroupInfo](0))
val friendRequests = BehaviorSubject[Array[FriendRequest]](new Array[FriendRequest](0))
val groupInvites = BehaviorSubject[Array[GroupInvite]](new Array[GroupInvite](0))
val lastMessages = BehaviorSubject[Map[String, (String, Timestamp)]](Map.empty[String, (String, Timestamp)])
val unreadCounts = BehaviorSubject[Map[String, Integer]](Map.empty[String, Integer])
val typing = BehaviorSubject[Boolean](false)
val updatedMessages = BehaviorSubject[Boolean](true)
val friendInfoList = friendList
.combineLatestWith(lastMessages)((fl, lm) => (fl, lm))
.combineLatestWith(unreadCounts)((tup, uc) => {
tup match {
case (fl, lm) =>
fl.map(f => {
val lastMessageTup: Option[(String, Timestamp)] = lm.get(f.key)
val unreadCount: Option[Integer] = uc.get(f.key)
(lastMessageTup, unreadCount) match {
case (Some((lastMessage, lastMessageTimestamp)), Some(unreadCount)) =>
new FriendInfo(f, lastMessage, lastMessageTimestamp, unreadCount)
case (Some((lastMessage, lastMessageTimestamp)), None) =>
new FriendInfo(f, lastMessage, lastMessageTimestamp, 0)
case _ =>
new FriendInfo(f, "", new Timestamp(0, 0, 0, 0, 0, 0, 0), 0)
}
})
}
})
val groupInfoList = groupList
.combineLatestWith(lastMessages)((gl, lm) => (gl, lm))
.combineLatestWith(unreadCounts)((tup, uc) => {
tup match {
case (gl, lm) =>
gl.map(g => {
val lastMessageTup: Option[(String, Timestamp)] = lm.get(g.id)
val unreadCount: Option[Integer] = uc.get(g.id)
(lastMessageTup, uc) match {
case (Some((lastMessage, lastMessageTimestamp)), _) =>
g.lastMessage = lastMessage
g.lastMessageTimestamp = lastMessageTimestamp
g.unreadCount = unreadCount.getOrElse(0).asInstanceOf[Int]
g
case _ =>
g.lastMessage = ""
g.lastMessageTimestamp = TimestampUtils.emptyTimestamp()
g.unreadCount = 0
g
}
})
}
})
//this is bad FIXME
val contactListElements = friendInfoList
.combineLatestWith(friendRequests)((friendInfos, friendRequests) => (friendInfos, friendRequests)) //combine friendinfolist and friend requests and return them in a tuple
.combineLatestWith(groupInvites)((a, gil) => (a._1, a._2, gil)) //return friendinfolist, friendrequests (a) and groupinvites (gi) in a tuple
.combineLatestWith(groupInfoList)((a, gil) => (a._1, a._2, a._3, gil)) //return friendinfolist, friendrequests and groupinvites (a), and groupInfoList (gl) in a tuple
}
|
Ansa89/Antox
|
app/src/main/scala/im/tox/antox/tox/Reactive.scala
|
Scala
|
gpl-3.0
| 3,313
|
/*
This file is part of Intake24.
Copyright 2015, 2016 Newcastle University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package security
class AuthenticationException(msg: String) extends Exception(msg)
|
digitalinteraction/intake24
|
ApiPlayServer/app/security/AuthenticationException.scala
|
Scala
|
apache-2.0
| 690
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.models.application.debts
import iht.testhelpers.CommonBuilder
import org.scalatestplus.mockito.MockitoSugar
import common.CommonPlaySpec
class BasicEstateElementLiabilitiesTest extends CommonPlaySpec with MockitoSugar{
"isComplete" must {
"return Some(true) when ElementLiability is complete" in {
val estateElementLiability = CommonBuilder.buildBasicEstateElementLiabilities.copy(
isOwned = Some(true),
value = Some(BigDecimal(10000)))
estateElementLiability.isComplete shouldBe Some(true)
}
"return Some(true) when ElementLiability has isOwned as false" in {
val estateElementLiability = CommonBuilder.buildBasicEstateElementLiabilities.copy(
isOwned = Some(false), value = None)
estateElementLiability.isComplete shouldBe Some(true)
}
"return Some(false) when one of the fields is None" in {
val estateElementLiability = CommonBuilder.buildBasicEstateElementLiabilities.copy(
isOwned = Some(true),
value = None)
estateElementLiability.isComplete shouldBe Some(false)
}
"return None when both the fields are None" in {
val estateElementLiability = CommonBuilder.buildBasicEstateElementLiabilities.copy(None, None)
estateElementLiability.isComplete shouldBe empty
}
}
}
|
hmrc/iht-frontend
|
test/iht/models/application/debts/BasicEstateElementLiabilitiesTest.scala
|
Scala
|
apache-2.0
| 1,923
|
/*
* tuProlog - Copyright (C) 2001-2002 aliCE team at deis.unibo.it
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.szadowsz.gospel.core.db.primitive
import com.szadowsz.gospel.core.data.{Struct, Term}
import com.szadowsz.gospel.util.InspectionUtils
import java.lang.reflect.{InvocationTargetException, Method}
import com.szadowsz.gospel.core.db.lib.Library
/**
* Primitive class
* referring to a builtin predicate or functor
*
* @see Struct
*/
object PrimitiveInfo {
val DIRECTIVE: Int = 0
val PREDICATE: Int = 1
val FUNCTOR: Int = 2
}
@throws(classOf[NoSuchMethodException])
class PrimitiveInfo(theType: Int, key: String, lib: Library, m: Method, arity: Int) {
private val _type: Int = theType
/**
* method to be call when evaluating the built-in
*/
private val _method: Method = if (m == null) { throw new NoSuchMethodException} else m
/**
* lib object where the builtin is defined
*/
private val source: Library = lib
/**
* for optimization purposes
*/
private val primitive_args = new Array[Term](arity)
private var primitive_key: String = key
/**
* Method to invalidate primitives. It's called just mother library removed
*/
def invalidate: String = {
val key: String = primitive_key
primitive_key = null
key
}
def getKey: String = {
primitive_key
}
def isDirective: Boolean = {
(_type == PrimitiveInfo.DIRECTIVE)
}
def isFunctor: Boolean = {
(_type == PrimitiveInfo.FUNCTOR)
}
def isPredicate: Boolean = {
(_type == PrimitiveInfo.PREDICATE)
}
def getType: Int = {
_type
}
def getSource: Library = {
source
}
/**
* evaluates the primitive as a directive
*
* @throws InvocationTargetException
* @throws IllegalAccessException
*/
@throws(classOf[IllegalAccessException])
@throws(classOf[InvocationTargetException])
def evalAsDirective(g: Struct) {
{
var i: Int = 0
while (i < primitive_args.length) {
{
primitive_args(i) = g.getTerm(i)
}
({
i += 1; i - 1
})
}
}
InspectionUtils.methodCall(source, _method,primitive_args.asInstanceOf[Array[Object]])
}
/**
* evaluates the primitive as a predicate
* @throws Exception if invocation primitive failure
*/
@throws(classOf[Throwable])
def evalAsPredicate(g: Struct): Boolean = {
{
var i: Int = 0
while (i < primitive_args.length) {
{
primitive_args(i) = g.getArg(i)
}
({
i += 1; i - 1
})
}
}
try {
InspectionUtils.methodCall(source, _method,primitive_args.asInstanceOf[Array[Object]]).asInstanceOf[Boolean].booleanValue
}
catch {
case e: InvocationTargetException => {
throw e.getCause
}
}
}
/**
* evaluates the primitive as a functor
* @throws Throwable
*/
@throws(classOf[Throwable])
def evalAsFunctor(g: Struct): Term = {
try { {
var i: Int = 0
while (i < primitive_args.length) {
{
primitive_args(i) = g.getTerm(i)
}
({
i += 1; i - 1
})
}
}
InspectionUtils.methodCall(source, _method,primitive_args.asInstanceOf[Array[Object]]).asInstanceOf[Term]
}
catch {
case ex: Exception => {
throw ex.getCause
}
}
}
override def toString: String = {
"[ primitive: method " + _method.getName + " - " + primitive_args + " - N args: " + primitive_args.length + " - " + source.getClass.getName + " ]\\n"
}
}
|
zakski/project-soisceal
|
scala-core/src/main/scala/com/szadowsz/gospel/core/db/primitive/PrimitiveInfo.scala
|
Scala
|
lgpl-3.0
| 4,282
|
package com.pauldoo.euler.puzzle
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class Puzzle9Test extends PuzzleTest {
def puzzle = Puzzle9;
def expectedAnswer = 31875000;
}
|
pauldoo/projecteuler
|
test/com/pauldoo/euler/puzzle/Puzzle9Test.scala
|
Scala
|
isc
| 233
|
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager
import akka.actor.{ActorRef, Cancellable, ActorPath}
import scala.collection.mutable
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.Try
/**
* @author hiral
*/
import ActorModel._
case class BrokerViewCacheActorConfig(kafkaStateActorPath: ActorPath,
clusterConfig: ClusterConfig,
longRunningPoolConfig: LongRunningPoolConfig,
updatePeriod: FiniteDuration = 10 seconds)
class BrokerViewCacheActor(config: BrokerViewCacheActorConfig) extends LongRunningPoolActor {
private[this] val ZERO = BigDecimal(0)
private[this] var cancellable : Option[Cancellable] = None
private[this] var topicIdentities : Map[String, TopicIdentity] = Map.empty
private[this] var topicDescriptionsOption : Option[TopicDescriptions] = None
private[this] var brokerListOption : Option[BrokerList] = None
private[this] var brokerMetrics : Map[Int, BrokerMetrics] = Map.empty
private[this] val brokerTopicPartitions : mutable.Map[Int, BVView] = new mutable.HashMap[Int, BVView]
private[this] val topicMetrics: mutable.Map[String, mutable.Map[Int, BrokerMetrics]] =
new mutable.HashMap[String, mutable.Map[Int, BrokerMetrics]]()
private[this] var combinedBrokerMetric : Option[BrokerMetrics] = None
private[this] val EMPTY_BVVIEW = BVView(Map.empty, config.clusterConfig, Option(BrokerMetrics.DEFAULT))
override def preStart() = {
log.info("Started actor %s".format(self.path))
log.info("Scheduling updater for %s".format(config.updatePeriod))
cancellable = Some(
context.system.scheduler.schedule(0 seconds,
config.updatePeriod,
self,
BVForceUpdate)(context.system.dispatcher,self)
)
}
@scala.throws[Exception](classOf[Exception])
override def postStop(): Unit = {
log.info("Stopped actor %s".format(self.path))
log.info("Cancelling updater...")
Try(cancellable.map(_.cancel()))
super.postStop()
}
override protected def longRunningPoolConfig: LongRunningPoolConfig = config.longRunningPoolConfig
override protected def longRunningQueueFull(): Unit = {
log.error("Long running pool queue full, skipping!")
}
private def produceBViewWithBrokerClusterState(bv: BVView) : BVView = {
val bcs = for {
metrics <- bv.metrics
cbm <- combinedBrokerMetric
} yield {
val perMessages = if(cbm.messagesInPerSec.oneMinuteRate > 0) {
BigDecimal(metrics.messagesInPerSec.oneMinuteRate / cbm.messagesInPerSec.oneMinuteRate * 100D).setScale(3, BigDecimal.RoundingMode.HALF_UP)
} else ZERO
val perIncoming = if(cbm.bytesInPerSec.oneMinuteRate > 0) {
BigDecimal(metrics.bytesInPerSec.oneMinuteRate / cbm.bytesInPerSec.oneMinuteRate * 100D).setScale(3, BigDecimal.RoundingMode.HALF_UP)
} else ZERO
val perOutgoing = if(cbm.bytesOutPerSec.oneMinuteRate > 0) {
BigDecimal(metrics.bytesOutPerSec.oneMinuteRate / cbm.bytesOutPerSec.oneMinuteRate * 100D).setScale(3, BigDecimal.RoundingMode.HALF_UP)
} else ZERO
BrokerClusterStats(perMessages, perIncoming, perOutgoing)
}
if(bcs.isDefined) {
bv.copy(stats=bcs)
} else {
bv
}
}
private def allBrokerViews(): Seq[BVView] = {
var bvs = mutable.MutableList[BVView]()
for (key <- brokerTopicPartitions.keySet.toSeq.sorted) {
val bv = brokerTopicPartitions.get(key).map { bv => produceBViewWithBrokerClusterState(bv) }
if (bv.isDefined) {
bvs += bv.get
}
}
bvs.asInstanceOf[Seq[BVView]]
}
override def processActorRequest(request: ActorRequest): Unit = {
request match {
case BVForceUpdate =>
log.info("Updating broker view...")
//ask for topic descriptions
val lastUpdateMillisOption: Option[Long] = topicDescriptionsOption.map(_.lastUpdateMillis)
context.actorSelection(config.kafkaStateActorPath).tell(KSGetAllTopicDescriptions(lastUpdateMillisOption), self)
context.actorSelection(config.kafkaStateActorPath).tell(KSGetBrokers, self)
case BVGetViews =>
sender ! allBrokerViews()
case BVGetView(id) =>
sender ! brokerTopicPartitions.get(id).map { bv =>
produceBViewWithBrokerClusterState(bv)
}
case BVGetBrokerMetrics =>
sender ! brokerMetrics
case BVGetTopicMetrics(topic) =>
sender ! topicMetrics.get(topic).map(m => m.values.foldLeft(BrokerMetrics.DEFAULT)((acc,bm) => acc + bm))
case BVGetTopicIdentities =>
sender ! topicIdentities
case BVUpdateTopicMetricsForBroker(id, metrics) =>
metrics.foreach {
case (topic, bm) =>
val tm = topicMetrics.getOrElse(topic, new mutable.HashMap[Int, BrokerMetrics])
tm.put(id, bm)
topicMetrics.put(topic, tm)
}
case BVUpdateBrokerMetrics(id, metrics) =>
brokerMetrics += (id -> metrics)
combinedBrokerMetric = Option(brokerMetrics.values.foldLeft(BrokerMetrics.DEFAULT)((acc, m) => acc + m))
val updatedBVView = brokerTopicPartitions.getOrElse(id, EMPTY_BVVIEW).copy(metrics = Option(metrics))
brokerTopicPartitions.put(id, updatedBVView)
case any: Any => log.warning("bvca : processActorRequest : Received unknown message: {}", any)
}
}
override def processActorResponse(response: ActorResponse): Unit = {
response match {
case td: TopicDescriptions =>
topicDescriptionsOption = Some(td)
updateView()
case bl: BrokerList =>
brokerListOption = Some(bl)
updateView()
case any: Any => log.warning("bvca : processActorResponse : Received unknown message: {}", any)
}
}
private[this] def updateView(): Unit = {
for {
brokerList <- brokerListOption
topicDescriptions <- topicDescriptionsOption
} {
val topicIdentity : IndexedSeq[TopicIdentity] = topicDescriptions.descriptions.map(
TopicIdentity.from(brokerList.list.size,_,None, config.clusterConfig))
topicIdentities = topicIdentity.map(ti => (ti.topic, ti)).toMap
val topicPartitionByBroker = topicIdentity.flatMap(
ti => ti.partitionsByBroker.map(btp => (ti,btp.id,btp.partitions))).groupBy(_._2)
//check for 2*broker list size since we schedule 2 jmx calls for each broker
if (config.clusterConfig.jmxEnabled && hasCapacityFor(2*brokerListOption.size)) {
implicit val ec = longRunningExecutionContext
val brokerLookup = brokerList.list.map(bi => bi.id -> bi).toMap
topicPartitionByBroker.foreach {
case (brokerId, topicPartitions) =>
val brokerInfoOpt = brokerLookup.get(brokerId)
brokerInfoOpt.foreach {
broker =>
longRunning {
Future {
val tryResult = KafkaJMX.doWithConnection(broker.host, broker.jmxPort) {
mbsc =>
topicPartitions.map {
case (topic, id, partitions) =>
(topic.topic,
KafkaMetrics.getBrokerMetrics(config.clusterConfig.version, mbsc, Option(topic.topic)))
}
}
val result = tryResult match {
case scala.util.Failure(t) =>
log.error(t, s"Failed to get topic metrics for broker $broker")
topicPartitions.map {
case (topic, id, partitions) =>
(topic.topic, BrokerMetrics.DEFAULT)
}
case scala.util.Success(bm) => bm
}
self.tell(BVUpdateTopicMetricsForBroker(broker.id,result), ActorRef.noSender)
}
}
}
}
brokerList.list.foreach {
broker =>
longRunning {
Future {
val tryResult = KafkaJMX.doWithConnection(broker.host, broker.jmxPort) {
mbsc =>
KafkaMetrics.getBrokerMetrics(config.clusterConfig.version, mbsc)
}
val result = tryResult match {
case scala.util.Failure(t) =>
log.error(t, s"Failed to get broker metrics for $broker")
BrokerMetrics.DEFAULT
case scala.util.Success(bm) => bm
}
self.tell(BVUpdateBrokerMetrics(broker.id,result), ActorRef.noSender)
}
}
}
} else if(config.clusterConfig.jmxEnabled) {
log.warning("Not scheduling update of JMX for all brokers, not enough capacity!")
}
topicPartitionByBroker.foreach {
case (brokerId, topicPartitions) =>
val topicPartitionsMap : Map[TopicIdentity, IndexedSeq[Int]] = topicPartitions.map {
case (topic, id, partitions) =>
(topic, partitions)
}.toMap
brokerTopicPartitions.put(
brokerId,BVView(topicPartitionsMap, config.clusterConfig, brokerMetrics.get(brokerId)))
}
}
}
}
|
LastManStanding/kafka-manager
|
app/kafka/manager/BrokerViewCacheActor.scala
|
Scala
|
apache-2.0
| 9,435
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.compat.java8
import java.time.{Duration => JavaDuration}
import org.junit.Assert._
import org.junit.Test
import scala.util.Try
class DurationConvertersTest {
import DurationConverters._
import scala.concurrent.duration._
@Test
def scalaNanosToJavaDuration(): Unit = {
Seq[(Long, (Long, Int))](
(Long.MinValue + 1) -> (-9223372037L, 145224193), // because java duration nanos are offset from the "wrong" direction
-1000000001L -> (-2, 999999999),
-1L -> (-1, 999999999),
0L -> (0, 0),
1L -> (0, 1),
1000000001L -> (1,1),
Long.MaxValue -> (9223372036L, 854775807)
).foreach { case (n, (expSecs, expNanos)) =>
val result = n.nanos.toJava
assertEquals(s"toJava($n nanos) -> $expSecs s)", expSecs, result.getSeconds)
assertEquals(s"toJava($n nanos) -> $expNanos n)", expNanos, result.getNano)
}
}
@Test
def scalaMilliSecondsToJavaDuration(): Unit = {
Seq[(Long, (Long, Int))](
-9223372036854L -> (-9223372037L, 146000000),
-1L -> (-1L, 999000000),
0L -> (0L, 0),
1L -> (0L, 1000000),
9223372036854L -> (9223372036L, 854000000)
).foreach { case (n, (expSecs, expNanos)) =>
val result = n.millis.toJava
assertEquals(s"toJava($n millis) -> $expSecs s)", expSecs, result.getSeconds)
assertEquals(s"toJava($n millis) -> $expNanos n)", expNanos, result.getNano)
}
}
@Test
def scalaMicroSecondsToJavaDuration(): Unit = {
Seq[(Long, (Long, Int))](
-9223372036854775L -> (-9223372037L, 145225000),
-1L -> (-1L, 999999000),
0L -> (0L, 0),
1L -> (0L, 1000),
9223372036854775L -> (9223372036L, 854775000)
).foreach { case (n, (expSecs, expNanos)) =>
val result = n.micros.toJava
assertEquals(s"toJava($n micros) -> $expSecs s)", expSecs, result.getSeconds)
assertEquals(s"toJava($n micros) -> $expNanos n)", expNanos, result.getNano)
}
}
@Test
def scalaSecondsToJavaDuration(): Unit = {
Seq[(Long, (Long, Int))](
-9223372036L -> (-9223372036L, 0),
-1L -> (-1L, 0),
0L -> (0L, 0),
1L -> (1L, 0),
9223372036L -> (9223372036L, 0)
).foreach { case (n, (expSecs, expNanos)) =>
val result = n.seconds.toJava
assertEquals(expSecs, result.getSeconds)
assertEquals(expNanos, result.getNano)
}
}
@Test
def javaSecondsToScalaDuration(): Unit = {
Seq[Long](-9223372036L, -1L, 0L, 1L, 9223372036L).foreach { n =>
assertEquals(n, toScala(JavaDuration.ofSeconds(n)).toSeconds)
}
}
@Test
def javaNanosPartToScalaDuration(): Unit = {
val nanosPerSecond = 1000000000L
Seq[Long](-nanosPerSecond - 1L, 0L, 1L, nanosPerSecond - 1L).foreach { n =>
assertEquals(n, toScala(JavaDuration.ofNanos(n)).toNanos)
}
}
@Test
def unsupportedJavaDurationThrows(): Unit = {
Seq(JavaDuration.ofSeconds(-9223372037L), JavaDuration.ofSeconds(9223372037L)).foreach { d =>
val res = Try { toScala(d) }
assertTrue(s"Expected exception for $d but got success", res.isFailure)
}
}
}
|
scala/scala-java8-compat
|
src/test/scala/scala/compat/java8/DurationConvertersTest.scala
|
Scala
|
apache-2.0
| 3,588
|
package org.jetbrains.plugins.scala.components.libinjection
import java.io._
import java.net.URL
import java.util
import java.util.concurrent.atomic.AtomicBoolean
import com.intellij.ide.plugins.cl.PluginClassLoader
import com.intellij.notification._
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components.AbstractProjectComponent
import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.module._
import com.intellij.openapi.progress.ProgressManager
import com.intellij.openapi.progress.util.ProgressIndicatorBase
import com.intellij.openapi.project.{DumbService, Project}
import com.intellij.openapi.roots.libraries.{Library, LibraryTable, LibraryTablesRegistrar}
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.vfs.{JarFileSystem, VirtualFile, VirtualFileManager}
import com.intellij.psi.search.{FilenameIndex, GlobalSearchScope}
import org.jetbrains.plugins.scala.compiler.CompileServerLauncher
import org.jetbrains.plugins.scala.components.ScalaPluginVersionVerifier
import org.jetbrains.plugins.scala.components.ScalaPluginVersionVerifier.Version
import org.jetbrains.plugins.scala.debugger.evaluation.EvaluationException
import org.jetbrains.plugins.scala.project._
import org.jetbrains.plugins.scala.settings.ScalaProjectSettings
import org.jetbrains.plugins.scala.util.ScalaUtil
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
@SerialVersionUID(-8361292897316544896L)
case class InjectorPersistentCache(pluginVersion: Version, cache: java.util.HashMap[String, JarManifest]) {
def ensurePathExists(): Unit = {
if (!LibraryInjectorLoader.myInjectorCacheDir.exists())
FileUtil.createDirectory(LibraryInjectorLoader.myInjectorCacheDir)
}
def saveJarCache(): Unit = {
ensurePathExists()
val stream = new ObjectOutputStream(
new BufferedOutputStream(
new FileOutputStream(LibraryInjectorLoader.myInjectorCacheIndex)
))
try {
stream.writeObject(this)
stream.flush()
} catch {
case e: Throwable =>
Error.cacheSaveError(e)
} finally {
stream.close()
}
}
}
object InjectorPersistentCache {
def loadJarCache: InjectorPersistentCache = {
import LibraryInjectorLoader.LOG
var stream: ObjectInputStream = null
try {
stream = new ObjectInputStream(new BufferedInputStream(new FileInputStream(LibraryInjectorLoader.myInjectorCacheIndex)))
val cache = stream.readObject().asInstanceOf[InjectorPersistentCache]
LOG.trace(s"Loaded cache with ${cache.cache.size()} entries")
cache
} catch {
case e: Throwable =>
LOG.warn(s"Failed to load injector cache, continuing with empty(${e.getMessage})")
InjectorPersistentCache(ScalaPluginVersionVerifier.getPluginVersion.getOrElse(Version.Snapshot), new util.HashMap())
} finally {
if (stream != null) stream.close()
}
}
}
class LibraryInjectorLoader(val project: Project) extends AbstractProjectComponent(project) {
import LibraryInjectorLoader.{LOG, _}
class DynamicClassLoader(urls: Array[URL], parent: ClassLoader) extends java.net.URLClassLoader(urls, parent) {
def addUrl(url: URL): Unit = {
super.addURL(url)
}
}
type AttributedManifest = (JarManifest, Seq[InjectorDescriptor])
type ManifestToDescriptors = Seq[AttributedManifest]
private val myListeners = mutable.HashSet[InjectorsLoadedListener]()
private val myClassLoader = new DynamicClassLoader(Array(myInjectorCacheDir.toURI.toURL), this.getClass.getClassLoader)
private val initialized = new AtomicBoolean(false)
private val ackProvider = {
if (ApplicationManager.getApplication.isUnitTestMode)
new TestAcknowledgementProvider
else
new UIAcknowledgementProvider(GROUP, project)(LOG)
}
// reset cache if plugin has been updated
// cache: jarFilePath -> jarManifest
private var jarCache: InjectorPersistentCache = _
def getJarCache: InjectorPersistentCache = jarCache
private val loadedInjectors: mutable.HashMap[Class[_], mutable.HashSet[String]] = mutable.HashMap()
private val myLibraryTableListener = new LibraryTable.Listener {
val skippedLibs = Array(HELPER_LIBRARY_NAME, "scala-sdk", ScalaLibraryName)
override def afterLibraryRenamed(library: Library): Unit = ()
override def beforeLibraryRemoved(library: Library): Unit = ()
override def afterLibraryRemoved(newLibrary: Library): Unit = {
if (!skippedLibs.contains(newLibrary.getName))
initialized.set(false)
}
override def afterLibraryAdded(newLibrary: Library): Unit = {
if (!skippedLibs.contains(newLibrary.getName))
initialized.set(false)
}
}
override def projectOpened(): Unit = {
myInjectorCacheDir.mkdirs()
LibraryTablesRegistrar.getInstance().getLibraryTable(project).addListener(myLibraryTableListener)
jarCache = verifyAndLoadCache
// init()
}
override def projectClosed(): Unit = {
jarCache.saveJarCache()
LibraryTablesRegistrar.getInstance().getLibraryTable(project).removeListener(myLibraryTableListener)
}
def addListener(l: InjectorsLoadedListener) {
myListeners += l
}
def deleteListener(l: InjectorsLoadedListener) {
myListeners remove l
}
def init(): Unit = {
initialized.set(true)
if (ScalaProjectSettings.getInstance(project).isEnableLibraryExtensions) {
DumbService.getInstance(project).smartInvokeLater {
toRunnable {
loadCachedInjectors()
rescanAllJars()
}
}
}
}
def conditionalInit(): Unit = if (!initialized.get()) init()
override def getComponentName: String = "ScalaLibraryInjectorLoader"
@inline def invokeLater(f: => Unit): Unit = ApplicationManager.getApplication.invokeLater(toRunnable(f))
@inline def toRunnable(f: => Unit): Runnable = new Runnable { override def run(): Unit = f }
@inline def inReadAction(f: => Unit): Unit = ApplicationManager.getApplication.runReadAction(toRunnable(f))
@inline def inWriteAction[T](f: => T): Unit = ApplicationManager.getApplication.runWriteAction(toRunnable(f))
def getInjectorClasses[T](interface: Class[T]): Seq[Class[T]] = {
if (!initialized.get()) init()
loadedInjectors.getOrElse(interface, Seq.empty).map(myClassLoader.loadClass(_).asInstanceOf[Class[T]]).toSeq
}
def getInjectorInstances[T](interface: Class[T]): Seq[T] = {
if (!initialized.get()) init()
getInjectorClasses(interface).map(_.newInstance())
}
private def verifyManifest(manifest: JarManifest): Option[JarManifest] = {
def verifyInjector(injector: InjectorDescriptor): Option[InjectorDescriptor] = {
if (injector.sources.isEmpty) {
LOG.warn(s"Injector $injector has no sources, skipping")
None
} else {
val sourcesValid = injector.sources.forall { source =>
VirtualFileManager.getInstance.findFileByUrl(s"jar://${manifest.jarPath}!/$source") != null
}
if (!sourcesValid) {
LOG.warn(s"Injector $injector has invalid source roots")
None
} else {
try {
myClassLoader.loadClass(injector.iface)
Some(injector)
} catch {
case _: ClassNotFoundException =>
LOG.warn(s"Interface class ${injector.iface} not found, skipping injector")
None
case NonFatal(e) =>
LOG.warn(s"Error while verifying injector interface - ${e.getMessage}, skipping")
None
}
}
}
}
def verifyDescriptor(descriptor: PluginDescriptor): Option[PluginDescriptor] = {
if (descriptor.since > descriptor.until || descriptor.since == descriptor.until) {
LOG.warn(s"Plugin descriptor since >= until in $descriptor")
None
} else if (descriptor.injectors.isEmpty) {
LOG.warn(s"Plugin descriptor has no injectors in $descriptor")
None
} else {
val checkedInjectors = descriptor.injectors.flatMap(verifyInjector)
if (checkedInjectors.nonEmpty)
Some(descriptor.copy(injectors = checkedInjectors))
else {
LOG.warn(s"Descriptor $descriptor has no valid injectors, skipping")
None
}
}
}
if (!new File(manifest.jarPath).exists)
LOG.warn(s"Manifest has wrong JAR path(jar doesn't exist) - ${manifest.jarPath}")
if (manifest.modTimeStamp > System.currentTimeMillis())
LOG.warn(s"Manifest timestamp for ${manifest.jarPath} is in the future")
if (manifest.pluginDescriptors.isEmpty) {
LOG.warn(s"Manifest for ${manifest.jarPath} has no plugin descriptors")
}
val checkedDescriptor = findMatchingPluginDescriptor(manifest) match {
case Some(descriptor) => verifyDescriptor(descriptor)
case None =>
LOG.info(s"No extensions found for current IDEA version")
None
}
checkedDescriptor.map(descriptor => manifest.copy(pluginDescriptors = Seq(descriptor))(manifest.isBlackListed, manifest.isLoaded))
}
private def loadCachedInjectors(): Unit = {
val allProjectJars = getAllJarsWithManifest.map(_.getPath).toSet
val cachedProjectJars = jarCache.cache.asScala.filter(cacheItem => allProjectJars.contains(s"${cacheItem._1}!/")).values
var numLoaded = 0
for (manifest <- cachedProjectJars if !manifest.isBlackListed) {
if (isJarCacheUpToDate(manifest)) {
for (injector <- findMatchingInjectors(manifest)) {
loadInjector(manifest, injector)
jarCache.cache.put(manifest.jarPath, manifest.copy()(isBlackListed = false, isLoaded = true))
numLoaded += 1
}
} else {
jarCache.cache.remove(manifest.jarPath)
}
}
LOG.trace(s"Loaded injectors from $numLoaded jars (${cachedProjectJars.size - numLoaded} filtered)")
}
private def rescanAllJars(): Unit = {
val parsedManifests = getAllJarsWithManifest.flatMap(f=>extractLibraryManifest(f))
.filterNot(jarCache.cache.values().contains)
val validManifests = parsedManifests.flatMap(verifyManifest)
val candidates = validManifests.map(manifest => manifest -> findMatchingInjectors(manifest))
LOG.trace(s"Found ${candidates.size} new jars with embedded extensions")
if (candidates.nonEmpty)
askUser(candidates) else myListeners.foreach(_.onLoadingCompleted())
}
private def getAllJarsWithManifest: Seq[VirtualFile] = {
val jarFS = JarFileSystem.getInstance
val psiFiles = FilenameIndex.getFilesByName(project, INJECTOR_MANIFEST_NAME, GlobalSearchScope.allScope(project))
psiFiles.map(f => jarFS.getJarRootForLocalFile(jarFS.getVirtualFileForJar(f.getVirtualFile)))
}
private def isJarCacheUpToDate(manifest: JarManifest): Boolean = {
val jarFile = new File(manifest.jarPath)
jarFile.exists() &&
jarFile.isFile &&
(jarFile.lastModified() == manifest.modTimeStamp) &&
getLibraryCacheDir(jarFile).list().nonEmpty
}
private def extractLibraryManifest(jar: VirtualFile, skipIncompatible: Boolean = true): Option[JarManifest] = {
val manifestFile = Option(jar.findFileByRelativePath(s"META-INF/$INJECTOR_MANIFEST_NAME"))
manifestFile
.map(JarManifest.deserialize(_, jar))
.filterNot(m => skipIncompatible && findMatchingInjectors(m).isEmpty)
}
private def compileInjectorFromLibrary(sources: Seq[File], outDir: File, m: Module): Seq[File] = {
val platformJars = collectPlatformJars()
CompileServerLauncher.ensureServerRunning(project)
val connector = new InjectorServerConnector(m, sources, outDir, platformJars)
try {
connector.compile() match {
case Left(output) => output.map(_._1)
case Right(errors) => throw EvaluationException(errors.mkString("\\n"))
}
}
catch {
case e: Exception => Error.compilationError("Could not compile:\\n" + e.getMessage)
}
}
private def loadInjector(jarManifest: JarManifest, injectorDescriptor: InjectorDescriptor): Unit = {
myClassLoader.addUrl(getInjectorCacheDir(jarManifest)(injectorDescriptor).toURI.toURL)
val injectors = findMatchingInjectors(jarManifest)
for (injector <- injectors) {
loadedInjectors
.getOrElseUpdate(
getClass.getClassLoader.loadClass(injector.iface),
mutable.HashSet(injector.impl)
) += injector.impl
}
}
private def findMatchingPluginDescriptor(libraryManifest: JarManifest): Option[PluginDescriptor] = {
val curVer = ScalaPluginVersionVerifier.getPluginVersion.getOrElse(Version.Snapshot)
libraryManifest.pluginDescriptors
.find(d => (curVer > d.since && curVer < d.until) || curVer.isSnapshot)
}
private def findMatchingInjectors(libraryManifest: JarManifest): Seq[InjectorDescriptor] = {
findMatchingPluginDescriptor(libraryManifest).map(_.injectors).getOrElse(Seq.empty)
}
// don't forget to remove temp directory after compilation
private def extractInjectorSources(jar: File, injectorDescriptor: InjectorDescriptor): Seq[File] = {
val tmpDir = ScalaUtil.createTmpDir("inject")
def copyToTmpDir(virtualFile: VirtualFile): File = {
val target = new File(tmpDir, virtualFile.getName)
val targetStream = new BufferedOutputStream(new FileOutputStream(target))
try {
target.createNewFile()
FileUtil.copy(virtualFile.getInputStream, targetStream)
target
} finally {
targetStream.close()
}
}
if (tmpDir.exists()) {
val root = VirtualFileManager.getInstance().findFileByUrl("jar://"+jar.getAbsolutePath+"!/")
if (root != null) {
injectorDescriptor.sources.flatMap(path => {
Option(root.findFileByRelativePath(path)).map { f =>
if (f.isDirectory)
f.getChildren.filter(!_.isDirectory).map(copyToTmpDir).toSeq
else
Seq(copyToTmpDir(f))
}.getOrElse(Seq.empty)
})
} else {
Error.noJarFound(jar)
}
} else {
Error.extractFailed(injectorDescriptor.impl, tmpDir)
}
}
private def askUser(candidates: ManifestToDescriptors): Unit = {
ackProvider.askGlobalInjectorEnable(acceptCallback = compile(showReviewDialogAndFilter(candidates)))
}
private def showReviewDialogAndFilter(candidates: ManifestToDescriptors): ManifestToDescriptors = {
val (accepted, rejected) = ackProvider.showReviewDialogAndFilter(candidates)
for ((manifest, _) <- rejected) {
jarCache.cache.put(manifest.jarPath, manifest.copy()(isBlackListed = true, isLoaded = false))
}
accepted
}
private def compile(data: ManifestToDescriptors): Unit = {
if (data.isEmpty) return
val indicator = new ProgressIndicatorBase()
indicator.setIndeterminate(true)
val startTime = System.currentTimeMillis()
var numSuccessful, numFailed = 0
LOG.trace(s"Compiling ${data.size} injectors from ${data.size} jars")
runWithHelperModule { module =>
ProgressManager.getInstance().runProcess(toRunnable {
for ((manifest, injectors) <- data) {
for (injectorDescriptor <- injectors) {
try {
compileInjectorFromLibrary(
extractInjectorSources(new File(manifest.jarPath), injectorDescriptor),
getInjectorCacheDir(manifest)(injectorDescriptor),
module
)
numSuccessful += 1
loadInjector(manifest, injectorDescriptor)
jarCache.cache.put(manifest.jarPath, manifest.copy()(isBlackListed = false, isLoaded = true))
} catch {
case e: InjectorCompileException =>
LOG.error("Failed to compile injector", e)
numFailed += 1
}
}
}
val msg = if (numFailed == 0)
s"Compiled $numSuccessful injector(s) in ${(System.currentTimeMillis() - startTime) / 1000} seconds"
else
s"Failed to compile $numFailed injectors out of ${numSuccessful+numFailed}, see Event Log for details"
val notificationDisplayType = if (numFailed == 0) NotificationType.INFORMATION else NotificationType.ERROR
GROUP.createNotification("IDEA Extensions", msg, notificationDisplayType, null).notify(project)
LOG.trace(msg)
myListeners.foreach(_.onLoadingCompleted())
}, indicator)
}
}
private def getLibraryCacheDir(jar: File): File = {
val file = new File(myInjectorCacheDir,
(jar.getName + ScalaPluginVersionVerifier.getPluginVersion.get.toString).replaceAll("\\\\.", "_")
)
file.mkdir()
file
}
private def getInjectorCacheDir(jarManifest: JarManifest)(injectorDescriptor: InjectorDescriptor): File = {
val jarName = new File(jarManifest.jarPath).getName
val pluginVersion = ScalaPluginVersionVerifier.getPluginVersion.get.toString
val libraryDir = new File(myInjectorCacheDir, (jarName + pluginVersion).replaceAll("\\\\.", "_"))
val injectorDir = new File(libraryDir, injectorDescriptor.impl.hashCode.abs.toString)
injectorDir.mkdirs()
injectorDir
}
private def collectPlatformJars(): Seq[File] = {
val buffer: ArrayBuffer[File] = mutable.ArrayBuffer()
// these are actually different classes calling different methods which are surprisingly called the same
this.getClass.getClassLoader match {
case cl: PluginClassLoader =>
buffer ++= cl.getUrls.asScala.map(u => new File(u.getFile))
case cl: java.net.URLClassLoader =>
buffer ++= cl.getURLs.map(u => new File(u.getFile))
}
// get application classloader urls using reflection :(
ApplicationManager.getApplication.getClass.getClassLoader match {
case cl: java.net.URLClassLoader =>
val v = cl.getClass.getMethods.find(_.getName == "getURLs")
.map(_.invoke(ApplicationManager.getApplication.getClass.getClassLoader)
.asInstanceOf[Array[URL]].map(u => new File(u.getFile))).getOrElse(Array())
buffer ++= v
case cl: com.intellij.util.lang.UrlClassLoader =>
val v = cl.getClass.getMethods
.find(_.getName == "getUrls")
.map { method =>
method.invoke(ApplicationManager.getApplication.getClass.getClassLoader)
.asInstanceOf[java.util.List[URL]]
.asScala
.map(url => new File(url.getFile))
}
.getOrElse(Seq.empty)
buffer ++= v
case other =>
val v =
other.getClass.getMethods
.find(_.getName == "getUrls")
.map { method =>
method
.invoke(ApplicationManager.getApplication.getClass.getClassLoader)
.asInstanceOf[java.util.List[URL]]
.asScala
.map(url => new File(url.getFile))
}
.getOrElse(Seq.empty)
buffer ++= v
}
buffer
}
private def createIdeaModule(): Module = {
import org.jetbrains.plugins.scala.project._
val scalaSDK = project.modulesWithScala.head.scalaSdk.get
val model = project.modifiableModel
val module = model.newModule(ScalaUtil.createTmpDir("injectorModule").getAbsolutePath +
"/" + INJECTOR_MODULE_NAME, JavaModuleType.getModuleType.getId)
model.commit()
module.configureScalaCompilerSettingsFrom("Default", Seq())
module.attach(scalaSDK)
module
}
private def removeIdeaModule(): Unit = {
val model = project.modifiableModel
val module = model.findModuleByName(INJECTOR_MODULE_NAME.replaceAll("\\\\.iml$", ""))
if (module != null) {
model.disposeModule(module)
model.commit()
} else {
LOG.warn(s"Failed to remove helper module - $INJECTOR_MODULE_NAME not found")
}
}
private def runWithHelperModule[T](f: Module => T): Unit = {
inWriteAction {
val module = createIdeaModule()
try {
f(module)
} finally {
removeIdeaModule()
}
}
}
}
object LibraryInjectorLoader {
trait InjectorsLoadedListener {
def onLoadingCompleted(): Unit
}
val HELPER_LIBRARY_NAME = "scala-plugin-dev"
val INJECTOR_MANIFEST_NAME = "intellij-compat.xml"
val INJECTOR_MODULE_NAME = "ijscala-plugin-injector-compile.iml" // TODO: use UUID
val myInjectorCacheDir = new File(ScalaUtil.getScalaPluginSystemPath + "injectorCache/")
val myInjectorCacheIndex = new File(ScalaUtil.getScalaPluginSystemPath + "injectorCache/libs.index")
implicit val LOG: Logger = Logger.getInstance(getClass)
private val GROUP = new NotificationGroup("Injector", NotificationDisplayType.STICKY_BALLOON, false)
def getInstance(project: Project): LibraryInjectorLoader = project.getComponent(classOf[LibraryInjectorLoader])
private def verifyLibraryCache(cache: InjectorPersistentCache): InjectorPersistentCache = {
if (ScalaPluginVersionVerifier.getPluginVersion.exists(_ != cache.pluginVersion))
InjectorPersistentCache(ScalaPluginVersionVerifier.getPluginVersion.get, new util.HashMap())
else
cache
}
def verifyAndLoadCache: InjectorPersistentCache = {
verifyLibraryCache(InjectorPersistentCache.loadJarCache)
}
}
|
triplequote/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/components/libinjection/LibraryInjectorLoader.scala
|
Scala
|
apache-2.0
| 21,300
|
package models.dao
import play.api.db.slick.Profile
import org.joda.time.DateTime
case class TestimonialRecord(id: Option[Long], authorName: String, authorEmail: Option[String], text: String, addedAt: DateTime, isApproved: Boolean)
trait TestimonialComponent { this: Profile =>
import profile.simple._
import com.github.tototoshi.slick.JodaSupport._
class Testimonials extends Table[TestimonialRecord]("testimonials") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def authorName = column[String]("author_name", O.NotNull)
def authorEmail = column[Option[String]]("author_email")
def text = column[String]("text", O.NotNull, O.DBType("text"))
def addedAt = column[DateTime]("added_at", O.NotNull)
def isApproved = column[Boolean]("is_approved", O.NotNull)
def * = id.? ~ authorName ~ authorEmail ~ text ~ addedAt ~ isApproved <> (TestimonialRecord, TestimonialRecord.unapply _)
def autoInc = * returning id
}
}
|
vokhotnikov/sevstone-play
|
app/models/dao/Testimonial.scala
|
Scala
|
mit
| 968
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.radanalytics.streaming.amqp
import org.apache.qpid.proton.message.Message
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.amqp.ReliableAMQPReceiver
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.receiver.Receiver
import scala.reflect.ClassTag
/**
* Input stream that receives messages from an AMQP sender node
* @param ssc Spark Streaming context
* @param host AMQP container hostname or IP address to connect
* @param port AMQP container port to connect
* @param username Username for SASL PLAIN authentication
* @param password Password for SASL PLAIN authentication
* @param address AMQP node address on which receive messages
* @param messageConverter Callback for converting AMQP message to custom type at application level
* @param storageLevel RDD storage level
*/
class AMQPInputDStream[T: ClassTag](
ssc: StreamingContext,
host: String,
port: Int,
username: Option[String],
password: Option[String],
address: String,
messageConverter: Message => Option[T],
useReliableReceiver: Boolean,
storageLevel: StorageLevel
) extends ReceiverInputDStream[T](ssc) {
def getReceiver(): Receiver[T] = {
if (!useReliableReceiver) {
new AMQPReceiver(host, port, username, password, address, messageConverter, storageLevel)
} else {
new ReliableAMQPReceiver(host, port, username, password, address, messageConverter, storageLevel)
}
}
}
|
redhatanalytics/dstream-amqp
|
src/main/scala/io/radanalytics/streaming/amqp/AMQPInputDStream.scala
|
Scala
|
apache-2.0
| 2,432
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.utils
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, Row}
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers}
import play.api.libs.json.{JsArray, JsDefined, JsString, JsValue, Json}
import test.utils.SparkContextProvider
import scala.collection.mutable
class DataFrameConverterSpec extends FunSpec with MockitoSugar with Matchers with BeforeAndAfterAll {
lazy val spark = SparkContextProvider.sparkContext
override protected def afterAll(): Unit = {
spark.stop()
super.afterAll()
}
val dataFrameConverter: DataFrameConverter = new DataFrameConverter
val mockDataFrame = mock[DataFrame]
val mockRdd = spark.parallelize(Seq(Row(new mutable.WrappedArray.ofRef(Array("test1", "test2")), 2, null)))
val mockStruct = mock[StructType]
val columns = Array("foo", "bar")
doReturn(mockStruct).when(mockDataFrame).schema
doReturn(columns).when(mockStruct).fieldNames
doReturn(mockRdd).when(mockDataFrame).rdd
describe("DataFrameConverter") {
describe("#convert") {
it("should convert to a valid JSON object") {
val someJson = dataFrameConverter.convert(mockDataFrame, "json")
val jsValue = Json.parse(someJson.get)
(jsValue \\ "columns").as[Array[JsValue]] should contain theSameElementsAs Array(JsString("foo"), JsString("bar"))
(jsValue \\ "rows").as[Array[JsValue]] should contain theSameElementsAs Array(
JsArray(Seq(JsString("[test1, test2]"), JsString("2"), JsString("null")))
)
}
it("should convert to csv") {
val csv = dataFrameConverter.convert(mockDataFrame, "csv").get
val values = csv.split("\\n")
values(0) shouldBe "foo,bar"
values(1) shouldBe "[test1, test2],2,null"
}
it("should convert to html") {
val html = dataFrameConverter.convert(mockDataFrame, "html").get
html.contains("<th>foo</th>") should be(true)
html.contains("<th>bar</th>") should be(true)
html.contains("<td>[test1, test2]</td>") should be(true)
html.contains("<td>2</td>") should be(true)
html.contains("<td>null</td>") should be(true)
}
it("should convert limit the selection") {
val someLimited = dataFrameConverter.convert(mockDataFrame, "csv", 1)
val limitedLines = someLimited.get.split("\\n")
limitedLines.length should be(2)
}
it("should return a Failure for invalid types") {
val result = dataFrameConverter.convert(mockDataFrame, "Invalid Type")
result.isFailure should be(true)
}
}
}
}
|
apache/incubator-toree
|
kernel/src/test/scala/org/apache/toree/utils/DataFrameConverterSpec.scala
|
Scala
|
apache-2.0
| 3,505
|
package lila.puzzle
import play.api.data._
import play.api.data.Forms._
object DataForm {
val difficulty = Form(single(
"difficulty" -> number(min = 1, max = 3)
))
val attempt = Form(mapping(
"win" -> number,
"time" -> number
)(AttemptData.apply)(AttemptData.unapply))
case class AttemptData(
win: Int,
time: Int) {
def isWin = win == 1
}
val vote = Form(single(
"vote" -> number
))
}
|
Happy0/lila
|
modules/puzzle/src/main/DataForm.scala
|
Scala
|
mit
| 440
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.kafka.scheduler.http.api
import javax.ws.rs.core.Response
import javax.ws.rs.core.Response.StatusType
import net.elodina.mesos.util.{Constraint, Strings}
import scala.collection.JavaConverters._
import scala.collection.mutable
class StringMap(value: String) extends mutable.HashMap[String, String] {
this ++= Strings.parseMap(value).asScala
}
class ConstraintMap(value: String) extends mutable.HashMap[String, Constraint] {
this ++= Strings.parseMap(value).asScala.mapValues(new Constraint(_))
}
object Status {
class BadRequest(reason: String) extends StatusType {
override def getStatusCode: Int = Response.Status.BAD_REQUEST.getStatusCode
override def getReasonPhrase: String = reason
override def getFamily: Response.Status.Family = Response.Status.BAD_REQUEST.getFamily
}
object BadRequest {
def apply(reason: String) = Response.status(new BadRequest(reason)).build()
}
}
|
tc-dc/kafka-mesos
|
src/scala/main/ly/stealth/mesos/kafka/scheduler/http/api/Model.scala
|
Scala
|
apache-2.0
| 1,735
|
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.tauri.choam
package kcas
import org.openjdk.jcstress.annotations.Expect._
import org.openjdk.jcstress.annotations.Outcome.Outcomes
import org.openjdk.jcstress.annotations._
import org.openjdk.jcstress.infra.results._
@JCStressTest
@State
@Description("IBR reservation rel/acq: store/load reordering")
@Outcomes(Array(
new Outcome(id = Array("0, 0"), expect = ACCEPTABLE_INTERESTING, desc = "Neither thread sees the other store"),
new Outcome(id = Array("1, 0"), expect = ACCEPTABLE, desc = "Only actor1 sees the other store"),
new Outcome(id = Array("0, 1"), expect = ACCEPTABLE, desc = "Only actor2 sees the other store"),
new Outcome(id = Array("1, 1"), expect = ACCEPTABLE, desc = "Both threads see the other store")
))
class IBRRelAcqTestReorder {
private[this] val res =
new IBRReservation(0L)
@Actor
@deprecated("it calls `setLowerRelease`", since = "0")
def actor1(r: JJ_Result): Unit = {
// these lines can be reordered:
this.res.setLowerRelease(1L) // release
r.r1 = this.res.getUpper() // acquire
}
@Actor
@deprecated("it calls `setUpperRelease`", since = "0")
def actor2(r: JJ_Result): Unit = {
// these lines can be reordered:
this.res.setUpperRelease(1L) // release
r.r2 = this.res.getLower() // acquire
}
}
|
durban/exp-reagents
|
stress/src/test/scala/dev/tauri/choam/kcas/IBRRelAcqTestReorder.scala
|
Scala
|
apache-2.0
| 1,971
|
package mobile.ionic
import scalatags.Text.all._
object IonicHtmlTags {
///tags
val ionNavBar = "ion-nav-bar".tag
val ionNavView = "ion-nav-view".tag
val ionNavBackButton = "ion-nav-back-button".tag
val ionView = "ion-view".tag
val ionContent = "ion-content".tag
val ionList = "ion-list".tag
val ionItem = "ion-item".tag
val ionTabs = "ion-tabs".tag
val ionTab = "ion-tab".tag
val ionOptionButton = "ion-option-button".tag
/// attr
val ngApp = "ng-app".attr
val animation = "animation".attr
val hasHeader = "has-header".attr
val ionPadding = "padding".attr
val ngRepeat = "ng-repeat".attr
val ngType= "type".attr
val ngClick = "ng-click".attr
val icon = "icon".attr
val iconOff = "icon-off".attr
val iconOn = "icon-on".attr
val ngSrc = "ng-src".attr
val ngModel = "ng-model".attr
val viewTitle = "view-title".attr
}
|
monifu/monix-ionic-sample
|
app-jvm/src/main/scala/mobile/ionic/IonicHtmlTags.scala
|
Scala
|
gpl-2.0
| 872
|
/**
* Copyright (C) 2020 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.util
import org.orbeon.dom.QName
import org.orbeon.oxf.externalcontext.ExternalContext
import org.orbeon.oxf.properties.PropertySet
trait CoreCrossPlatformSupportTrait {
type FileItemType
def isPE: Boolean
def isJsEnv: Boolean
def randomHexId: String
def getApplicationResourceVersion: Option[String]
def properties: PropertySet
def getPropertySet(processorName: QName): PropertySet
def externalContext: ExternalContext
}
|
orbeon/orbeon-forms
|
core-cross-platform/shared/src/main/scala/org/orbeon/oxf/util/CoreCrossPlatformSupportTrait.scala
|
Scala
|
lgpl-2.1
| 1,113
|
package pb.shared.protocol
import java.util.UUID
case class Session(path: String, id: UUID, hangoutUrl: String)
|
pairing-buddy/pairing-buddy
|
shared/src/main/scala/pb/shared/protocol/Session.scala
|
Scala
|
mit
| 114
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.utils
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.tuple.{Tuple1, Tuple2}
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, ListTypeInfo, PojoField, PojoTypeInfo, RowTypeInfo}
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.api.scala.typeutils.Types
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.dataformat.{BaseRow, BinaryString}
import org.apache.flink.table.functions.{AggregateFunction, FunctionContext, ScalarFunction}
import org.apache.flink.types.Row
import com.google.common.base.Charsets
import com.google.common.io.Files
import java.io.File
import java.lang.{Iterable => JIterable}
import java.sql.{Date, Timestamp}
import java.time.{Instant, LocalDate, LocalDateTime, LocalTime}
import java.util
import java.util.TimeZone
import java.util.concurrent.atomic.AtomicInteger
import scala.annotation.varargs
object UserDefinedFunctionTestUtils {
// ------------------------------------------------------------------------------------
// AggregateFunctions
// ------------------------------------------------------------------------------------
class MyPojoAggFunction extends AggregateFunction[MyPojo, CountAccumulator] {
def accumulate(acc: CountAccumulator, value: MyPojo): Unit = {
if (value != null) {
acc.f0 += value.f2
}
}
def retract(acc: CountAccumulator, value: MyPojo): Unit = {
if (value != null) {
acc.f0 -= value.f2
}
}
override def getValue(acc: CountAccumulator): MyPojo = {
new MyPojo(acc.f0.asInstanceOf[Int], acc.f0.asInstanceOf[Int])
}
def merge(acc: CountAccumulator, its: JIterable[CountAccumulator]): Unit = {
val iter = its.iterator()
while (iter.hasNext) {
acc.f0 += iter.next().f0
}
}
override def createAccumulator(): CountAccumulator = {
new CountAccumulator
}
def resetAccumulator(acc: CountAccumulator): Unit = {
acc.f0 = 0L
}
}
/** The initial accumulator for count aggregate function */
class CountAccumulator extends Tuple1[Long] {
f0 = 0L //count
}
class VarArgsAggFunction extends AggregateFunction[Long, CountAccumulator] {
@varargs
def accumulate(acc: CountAccumulator, value: Long, args: String*): Unit = {
acc.f0 += value
args.foreach(s => acc.f0 += s.toLong)
}
@varargs
def retract(acc: CountAccumulator, value: Long, args: String*): Unit = {
acc.f0 -= value
args.foreach(s => acc.f0 -= s.toLong)
}
override def getValue(acc: CountAccumulator): Long = {
acc.f0
}
def merge(acc: CountAccumulator, its: JIterable[CountAccumulator]): Unit = {
val iter = its.iterator()
while (iter.hasNext) {
acc.f0 += iter.next().f0
}
}
override def createAccumulator(): CountAccumulator = {
new CountAccumulator
}
def resetAccumulator(acc: CountAccumulator): Unit = {
acc.f0 = 0L
}
}
/** Counts how often the first argument was larger than the second argument. */
class LargerThanCount extends AggregateFunction[Long, Tuple1[Long]] {
def accumulate(acc: Tuple1[Long], a: Long, b: Long): Unit = {
if (a > b) acc.f0 += 1
}
def retract(acc: Tuple1[Long], a: Long, b: Long): Unit = {
if (a > b) acc.f0 -= 1
}
override def createAccumulator(): Tuple1[Long] = Tuple1.of(0L)
override def getValue(acc: Tuple1[Long]): Long = acc.f0
}
class CountNullNonNull extends AggregateFunction[String, Tuple2[Long, Long]] {
override def createAccumulator(): Tuple2[Long, Long] = Tuple2.of(0L, 0L)
override def getValue(acc: Tuple2[Long, Long]): String = s"${acc.f0}|${acc.f1}"
def accumulate(acc: Tuple2[Long, Long], v: String): Unit = {
if (v == null) {
acc.f1 += 1
} else {
acc.f0 += 1
}
}
def retract(acc: Tuple2[Long, Long], v: String): Unit = {
if (v == null) {
acc.f1 -= 1
} else {
acc.f0 -= 1
}
}
}
class CountPairs extends AggregateFunction[Long, Tuple1[Long]] {
def accumulate(acc: Tuple1[Long], a: String, b: String): Unit = {
acc.f0 += 1
}
def retract(acc: Tuple1[Long], a: String, b: String): Unit = {
acc.f0 -= 1
}
override def createAccumulator(): Tuple1[Long] = Tuple1.of(0L)
override def getValue(acc: Tuple1[Long]): Long = acc.f0
}
// ------------------------------------------------------------------------------------
// ScalarFunctions
// ------------------------------------------------------------------------------------
@SerialVersionUID(1L)
object MyHashCode extends ScalarFunction {
def eval(s: String): Int = s.hashCode()
}
@SerialVersionUID(1L)
object OldHashCode extends ScalarFunction {
def eval(s: String): Int = -1
}
@SerialVersionUID(1L)
object StringFunction extends ScalarFunction {
def eval(s: String): String = s
}
@SerialVersionUID(1L)
object MyStringFunc extends ScalarFunction {
def eval(s: String): String = s + "haha"
}
@SerialVersionUID(1L)
object BinaryStringFunction extends ScalarFunction {
def eval(s: BinaryString): BinaryString = s
}
@SerialVersionUID(1L)
object DateFunction extends ScalarFunction {
def eval(d: Date): String = d.toString
}
@SerialVersionUID(1L)
object LocalDateFunction extends ScalarFunction {
def eval(d: LocalDate): String = d.toString
}
@SerialVersionUID(1L)
object TimestampFunction extends ScalarFunction {
def eval(t: java.sql.Timestamp): String = t.toString
}
@SerialVersionUID(1L)
object DateTimeFunction extends ScalarFunction {
def eval(t: LocalDateTime): String = t.toString
}
@SerialVersionUID(1L)
object TimeFunction extends ScalarFunction {
def eval(t: java.sql.Time): String = t.toString
}
@SerialVersionUID(1L)
object LocalTimeFunction extends ScalarFunction {
def eval(t: LocalTime): String = t.toString
}
@SerialVersionUID(1L)
object InstantFunction extends ScalarFunction {
def eval(t: Instant): Instant = t
override def getResultType(signature: Array[Class[_]]) = Types.INSTANT
}
// Understand type: Row wrapped as TypeInfoWrappedDataType.
@SerialVersionUID(1L)
object RowFunc extends ScalarFunction {
def eval(s: String): Row = Row.of(s)
override def getResultType(signature: Array[Class[_]]) =
new RowTypeInfo(Types.STRING)
}
@SerialVersionUID(1L)
object RowToStrFunc extends ScalarFunction {
def eval(s: BaseRow): String = s.getString(0).toString
}
// generic.
@SerialVersionUID(1L)
object ListFunc extends ScalarFunction {
def eval(s: String): java.util.List[String] = util.Arrays.asList(s)
override def getResultType(signature: Array[Class[_]]) =
new ListTypeInfo(Types.STRING)
}
// internal but wrapped as TypeInfoWrappedDataType.
@SerialVersionUID(1L)
object StringFunc extends ScalarFunction {
def eval(s: String): String = s
override def getResultType(signature: Array[Class[_]]): TypeInformation[String] =
Types.STRING
}
@SerialVersionUID(1L)
object MyPojoFunc extends ScalarFunction {
def eval(s: MyPojo): Int = s.f2
override def getParameterTypes(signature: Array[Class[_]]): Array[TypeInformation[_]] =
Array(MyToPojoFunc.getResultType(signature))
}
@SerialVersionUID(1L)
object MyToPojoFunc extends ScalarFunction {
def eval(s: Int): MyPojo = new MyPojo(s, s)
override def getResultType(signature: Array[Class[_]]): PojoTypeInfo[MyPojo] = {
val cls = classOf[MyPojo]
new PojoTypeInfo[MyPojo](classOf[MyPojo], util.Arrays.asList(
new PojoField(cls.getDeclaredField("f1"), Types.INT),
new PojoField(cls.getDeclaredField("f2"), Types.INT)))
}
}
@SerialVersionUID(1L)
object ToCompositeObj extends ScalarFunction {
def eval(id: Int, name: String, age: Int): CompositeObj = {
CompositeObj(id, name, age, "0.0")
}
def eval(id: Int, name: String, age: Int, point: String): CompositeObj = {
CompositeObj(id, name, age, point)
}
}
@SerialVersionUID(1L)
object TestWrapperUdf extends ScalarFunction {
def eval(id: Int): Int = {
id
}
def eval(id: String): String = {
id
}
}
@SerialVersionUID(1L)
class TestAddWithOpen extends ScalarFunction {
var isOpened: Boolean = false
override def open(context: FunctionContext): Unit = {
super.open(context)
isOpened = true
TestAddWithOpen.aliveCounter.incrementAndGet()
}
def eval(a: Long, b: Long): Long = {
if (!isOpened) {
throw new IllegalStateException("Open method is not called.")
}
a + b
}
def eval(a: Long, b: Int): Long = {
eval(a, b.asInstanceOf[Long])
}
override def close(): Unit = {
TestAddWithOpen.aliveCounter.decrementAndGet()
}
}
object TestAddWithOpen {
/** A thread-safe counter to record how many alive TestAddWithOpen UDFs */
val aliveCounter = new AtomicInteger(0)
}
@SerialVersionUID(1L)
object TestMod extends ScalarFunction {
def eval(src: Long, mod: Int): Long = {
src % mod
}
}
@SerialVersionUID(1L)
object TestExceptionThrown extends ScalarFunction {
def eval(src: String): Int = {
throw new NumberFormatException("Cannot parse this input.")
}
}
@SerialVersionUID(1L)
class ToMillis extends ScalarFunction {
def eval(t: Timestamp): Long = {
t.toInstant.toEpochMilli + TimeZone.getDefault.getOffset(t.toInstant.toEpochMilli)
}
}
@SerialVersionUID(1L)
object MyNegative extends ScalarFunction {
def eval(d: java.math.BigDecimal): java.lang.Object = d.negate()
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] = Types.JAVA_BIG_DEC
}
@SerialVersionUID(1L)
object IsNullUDF extends ScalarFunction {
def eval(v: Any): Boolean = v == null
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] = Types.BOOLEAN
}
// ------------------------------------------------------------------------------------
// POJOs
// ------------------------------------------------------------------------------------
class MyPojo() {
var f1: Int = 0
var f2: Int = 0
def this(f1: Int, f2: Int) {
this()
this.f1 = f1
this.f2 = f2
}
override def equals(other: Any): Boolean = other match {
case that: MyPojo =>
(that canEqual this) &&
f1 == that.f1 &&
f2 == that.f2
case _ => false
}
def canEqual(other: Any): Boolean = other.isInstanceOf[MyPojo]
override def toString = s"MyPojo($f1, $f2)"
}
case class CompositeObj(id: Int, name: String, age: Int, point: String)
// ------------------------------------------------------------------------------------
// Utils
// ------------------------------------------------------------------------------------
def setJobParameters(env: ExecutionEnvironment, parameters: Map[String, String]): Unit = {
val conf = new Configuration()
parameters.foreach {
case (k, v) => conf.setString(k, v)
}
env.getConfig.setGlobalJobParameters(conf)
}
def setJobParameters(env: StreamExecutionEnvironment, parameters: Map[String, String]): Unit = {
val conf = new Configuration()
parameters.foreach {
case (k, v) => conf.setString(k, v)
}
env.getConfig.setGlobalJobParameters(conf)
}
def setJobParameters(
env: org.apache.flink.streaming.api.environment.StreamExecutionEnvironment,
parameters: Map[String, String]): Unit = {
val conf = new Configuration()
parameters.foreach {
case (k, v) => conf.setString(k, v)
}
env.getConfig.setGlobalJobParameters(conf)
}
def writeCacheFile(fileName: String, contents: String): String = {
val tempFile = File.createTempFile(this.getClass.getName + "-" + fileName, "tmp")
tempFile.deleteOnExit()
Files.write(contents, tempFile, Charsets.UTF_8)
tempFile.getAbsolutePath
}
}
class RandomClass(var i: Int)
class GenericAggregateFunction extends AggregateFunction[java.lang.Integer, RandomClass] {
override def getValue(accumulator: RandomClass): java.lang.Integer = accumulator.i
override def createAccumulator(): RandomClass = new RandomClass(0)
override def getResultType: TypeInformation[java.lang.Integer] =
new GenericTypeInfo[Integer](classOf[Integer])
override def getAccumulatorType: TypeInformation[RandomClass] = new GenericTypeInfo[RandomClass](
classOf[RandomClass])
def accumulate(acc: RandomClass, value: Int): Unit = {
acc.i = value
}
def retract(acc: RandomClass, value: Int): Unit = {
acc.i = value
}
def resetAccumulator(acc: RandomClass): Unit = {
acc.i = 0
}
}
|
bowenli86/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/UserDefinedFunctionTestUtils.scala
|
Scala
|
apache-2.0
| 13,830
|
package nounou.elements.data.filters
import nounou.elements.data.NNData
import breeze.numerics.isOdd
import breeze.stats.median
import breeze.linalg.{DenseVector => DV}
import breeze.signal.{filterMedian, OptOverhang}
import nounou.elements.ranges.{SampleRangeValid, SampleRange}
/**This filter applies a median subtraction, which is a non-linear form of high-pass which is
* less biased by extreme transients like spiking.
* @author ktakagaki
* //@date 3/17/14.
*/
class NNDataFilterMedianSubtract( private var parenVar: NNData ) extends NNDataFilter( parenVar ) {
private var _windowLength = 1
private val upstreamBuff: NNData = new NNDataFilterBuffer(parenVar)
var windowLengthHalf = 0
def setWindowLength( value: Int ): Unit = {
loggerRequire( value > 0, "Parameter windowLength must be bigger than 0, invalid value: {}", value.toString)
loggerRequire( isOdd(value), "Parameter windowLength must be odd to account correctly for overhangs, invalid value: {}", value.toString)
_windowLength = value
windowLengthHalf = (_windowLength-1)/2
}
def getWindowLength(): Int = _windowLength
def windowLength_=( value: Int ) = setWindowLength( value )
def windowLength() = _windowLength
// <editor-fold defaultstate="collapsed" desc=" calculate data ">
override def readPointImpl(channel: Int, frame: Int, segment: Int): Int =
if(windowLength == 1){
upstreamBuff.readPointImpl(channel, frame, segment)
} else {
//by calling upstream.readTrace instead of upstream.readTraceImpl, we can deal with cases where the kernel will overhang actual data, since the method will return zeros
val tempData = upstreamBuff.readTraceDV(
channel,
new SampleRange(frame - windowLengthHalf, frame + windowLengthHalf, 1, segment) )
median(tempData).toInt
}
override def readTraceDVImpl(channel: Int, ran: SampleRangeValid): DV[Int] =
if(windowLength == 1){
upstreamBuff.readTraceDVImpl(channel, ran)
} else {
//by calling upstream.readTrace instead of upstream.readTraceImpl, we can deal with cases where the kernel will overhang actual data, since the method will return zeros
val tempData = upstreamBuff.readTraceDV(
channel,
new SampleRange( ran.start - windowLengthHalf, ran.last + windowLengthHalf, 1, ran.segment) )
tempData(windowLengthHalf to -windowLengthHalf-1) - filterMedian(tempData, windowLength, OptOverhang.None)
}
// override def readFrameImpl(frame: Int, segment: Int): Vector[Int] = super[XDataFilter].readFrameImpl(frame, segment)
// override def readFrameImpl(frame: Int, channels: Vector[Int], segment: Int): Vector[Int] = super[XDataFilter].readFrameImpl(frame, channels, segment)
// </editor-fold>
}
|
ktakagaki/nounou.rebooted150527
|
src/main/scala/nounou/elements/data/filters/NNDataFilterMedianSubtract.scala
|
Scala
|
apache-2.0
| 2,800
|
package com.pragmasoft.scaldingunit
import com.twitter.scalding._
import cascading.tuple.Tuple
import scala.Predef._
import scala.collection.mutable.Buffer
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import cascading.pipe.Pipe
import org.specs2.{mutable => mutableSpec}
import Dsl._
// Should stay here and not inside the class otherwise Hadoop will try to serialise the container too
object HadoopSpec2SupportSpecOperations {
implicit class OperationsWrapper(val pipe: Pipe) extends Serializable {
def changeColValue : Pipe = {
pipe.map('col1 -> 'col1_transf) {
col1: String => col1 + "_transf"
}
}
}
implicit def fromRichPipe(rp: RichPipe) = new OperationsWrapper(rp.pipe)
}
class HadoopSpec2SupportSpec extends mutableSpec.SpecificationWithJUnit with TupleConversions with Serializable {
import HadoopSpec2SupportSpecOperations._
// Can only do one test. If I try to execute the test inside an instance of HadoopTestInfrastrucutureWithSpy I have
// serialization issues
"A test with single source" should {
new HadoopTestInfrastructureWithSpy {
Given {
List(("col1_1", "col2_1"), ("col1_2", "col2_2")) withSchema (('col1, 'col2))
} When {
pipe: RichPipe => pipe.changeColValue
} Then {
buffer: Buffer[(String, String, String)] =>
"run in hadoop mode" in {
testHasBeenExecutedInHadoopMode
}
}
}
() //without this spec2 gets annoyed
}
}
|
scalding-io/ScaldingUnit
|
scalding-unit/src/test/scala/com/pragmasoft/scaldingunit/HadoopSpec2SupportSpec.scala
|
Scala
|
apache-2.0
| 1,528
|
package org.jetbrains.plugins.scala.lang.completion.weighter
import com.intellij.openapi.module.ModuleUtilCore
import com.intellij.openapi.util.Key
import com.intellij.psi._
import com.intellij.psi.util.proximity.ProximityWeigher
import com.intellij.psi.util.{ProximityLocation, PsiTreeUtil}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScValue, ScVariable}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportStmt
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScMember, ScObject}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScNamedElement, ScPackaging}
import org.jetbrains.plugins.scala.lang.psi.{ScImportsHolder, ScalaPsiUtil}
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
/**
* @author Alexander Podkhalyuzin
*/
class ScalaExplicitlyImportedWeigher extends ProximityWeigher {
def applyQualifier(qual: String, position: PsiElement): Option[Integer] = {
if (position == null) return None
val index = qual.lastIndexOf('.')
val qualNoPoint = if (index < 0) null else qual.substring(0, index)
val tuple: (ArrayBuffer[ScImportStmt], Long) = position.getUserData(ScalaExplicitlyImportedWeigher.key)
var buffer: ArrayBuffer[ScImportStmt] = if (tuple != null) tuple._1 else null
val currentModCount = position.getManager.getModificationTracker.getModificationCount
if (buffer == null || tuple._2 != currentModCount) {
@tailrec
def treeWalkup(place: PsiElement, lastParent: PsiElement) {
if (place == null) return
place match {
case holder: ScImportsHolder =>
buffer ++= holder.getImportsForLastParent(lastParent)
if (place.isInstanceOf[ScalaFile]) return
case _ =>
}
treeWalkup(place.getContext, place)
}
buffer = new ArrayBuffer[ScImportStmt]()
treeWalkup(position.getContext, position)
position.putUserData(ScalaExplicitlyImportedWeigher.key, (buffer, currentModCount))
}
val iter = buffer.iterator
while (iter.hasNext) {
val stmt = iter.next()
val exprIter = stmt.importExprs.iterator
while (exprIter.hasNext) {
val expr = exprIter.next()
if (expr.isSingleWildcard && qualNoPoint != null) {
for (resolve <- expr.qualifier.multiResolve(false))
resolve match {
case ScalaResolveResult(pack: PsiPackage, _) =>
if (qualNoPoint == pack.getQualifiedName) return Some(4)
case ScalaResolveResult(clazz: PsiClass, _) =>
if (qualNoPoint == clazz.qualifiedName) return Some(4)
case _ =>
}
} else if (expr.selectorSet.isDefined) {
for (selector <- expr.selectors) {
for (element <- selector.reference;
resolve <- element.multiResolve(false)) {
resolve match {
case ScalaResolveResult(clazz: PsiClass, _) =>
if (qual == clazz.qualifiedName) return Some(4)
case _ =>
}
}
}
} else {
expr.reference match {
case Some(ref) =>
for (resolve <- ref.multiResolve(false))
resolve match {
case ScalaResolveResult(clazz: PsiClass, _) =>
if (qual == clazz.qualifiedName) return Some(4)
case _ =>
}
case None =>
}
}
}
}
if (qualNoPoint != null && qualNoPoint == "scala" ||
qualNoPoint == "java.lang" || qualNoPoint == "scala.Predef") {
if (qualNoPoint == "java.lang") return Some(1)
else return Some(2)
}
None
}
def applyToMember(member: ScMember, position: PsiElement): Option[Integer] = {
member.getContext match {
case _: ScTemplateBody =>
val clazz: PsiClass = member.containingClass
clazz match {
case obj: ScObject =>
val qualNoPoint = obj.qualifiedName
if (qualNoPoint != null) {
val memberName = member match {
case named: ScNamedElement => named.name
case _ => member.getName
}
val qual = qualNoPoint + "." + memberName
applyQualifier(qual, position) match {
case Some(x) => return Some(x)
case None =>
}
}
case _ =>
}
case _ =>
}
None
}
def weigh(element: PsiElement, location: ProximityLocation): Integer = {
val position: PsiElement = location.getPosition
if (position == null) {
return 0
}
val elementFile: PsiFile = element.getContainingFile
val positionFile: PsiFile = position.getContainingFile
if (!positionFile.isInstanceOf[ScalaFile]) return 0
if (positionFile != null && elementFile != null && positionFile.getOriginalFile == elementFile.getOriginalFile) {
return 3
}
element match {
case clazz: PsiClass if clazz.qualifiedName != null =>
val qual: String = clazz.qualifiedName
applyQualifier(qual, position) match {
case Some(x) => return x
case None =>
}
case member: ScMember =>
applyToMember(member, position) match {
case Some(x) => return x
case None =>
}
case member: PsiMember if member.hasModifierProperty("static") =>
val clazz = member.containingClass
if (clazz != null && clazz.qualifiedName != null) {
val qualNoPoint = clazz.qualifiedName
val memberName = member match {
case named: ScNamedElement => named.name
case _ => member.getName
}
val qual = qualNoPoint + "." + memberName
applyQualifier(qual, position) match {
case Some(x) => return x
case None =>
}
}
case b: ScBindingPattern =>
ScalaPsiUtil.nameContext(b) match {
case v: ScValue =>
applyToMember(v, position) match {
case Some(x) => return x
case None =>
}
case v: ScVariable =>
applyToMember(v, position) match {
case Some(x) => return x
case None =>
}
case _ =>
}
case _ =>
}
def packageName(element: PsiElement): Option[String] = {
val packageObject = Option(PsiTreeUtil.getContextOfType(element, classOf[ScObject]))
val nameAsPackageObject = packageObject.collect { case po: ScObject if po.isPackageObject => po.qualifiedName }
if (nameAsPackageObject.isEmpty) {
Option(PsiTreeUtil.getContextOfType(element, classOf[ScPackaging])).map(_.fullPackageName)
} else {
nameAsPackageObject
}
}
packageName(position).foreach { pName =>
val elementModule = ModuleUtilCore.findModuleForPsiElement(element)
if (location.getPositionModule == elementModule && packageName(element).contains(pName)) {
return 3
}
}
0
}
}
object ScalaExplicitlyImportedWeigher {
private[weighter] val key: Key[(ArrayBuffer[ScImportStmt], Long)] = Key.create("scala.explicitly.imported.weigher.key")
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/completion/weighter/ScalaExplicitlyImportedWeigher.scala
|
Scala
|
apache-2.0
| 7,620
|
package io.buoyant.router.http
import com.twitter.finagle.Path
import com.twitter.finagle.buoyant.Dst
import com.twitter.finagle.http.Request
import io.buoyant.router.RoutingFactory.{UnidentifiedRequest, IdentifiedRequest}
import io.buoyant.test.{Awaits, Exceptions}
import org.scalatest.FunSuite
class PathIdentifierTest extends FunSuite with Awaits with Exceptions {
test("request with same number of segments as requested") {
val identifier = PathIdentifier(Path.Utf8("http"), 2)
val req = Request()
req.uri = "/mysvc/subsvc"
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst ==
Dst.Path(Path.read("/http/mysvc/subsvc"))
)
}
test("request with more segments than requested") {
val identifier = PathIdentifier(Path.Utf8("http"), 2)
val req = Request()
req.uri = "/mysvc/subsvc/some/path?other=stuff"
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst ==
Dst.Path(Path.read("/http/mysvc/subsvc"))
)
}
test("request with fewer segments than requested") {
val identifier = PathIdentifier(Path.Utf8("http"), 2)
val req = Request()
req.uri = "/mysvc?other=stuff"
assert(await(identifier(req)).isInstanceOf[UnidentifiedRequest[Request]])
}
test("consumes path segments") {
val identifier = PathIdentifier(Path.Utf8("http"), 2, consume = true)
val req0 = Request()
req0.uri = "/mysvc/subsvc/some/path?other=stuff"
val identified = await(identifier(req0)).asInstanceOf[IdentifiedRequest[Request]]
assert(identified.dst == Dst.Path(Path.read("/http/mysvc/subsvc")))
assert(identified.request.uri == "/some/path?other=stuff")
}
test("consumes entire path") {
val identifier = PathIdentifier(Path.Utf8("http"), 2, consume = true)
val req0 = Request()
req0.uri = "/mysvc/subsvc"
val identified = await(identifier(req0)).asInstanceOf[IdentifiedRequest[Request]]
assert(identified.dst == Dst.Path(Path.read("/http/mysvc/subsvc")))
assert(identified.request.uri == "/")
}
test("does not parse more segments than requested") {
val identifier = PathIdentifier(Path.Utf8("http"), 2)
val req = Request()
req.uri = "/mysvc/subsvc/!"
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst ==
Dst.Path(Path.read("/http/mysvc/subsvc"))
)
}
test("preserve trailing slash with consume = true") {
case class ExpParsedPath(segment: String, path: String)
val testCaseConsume = Map(
"/mysvc/subsvc/" -> ExpParsedPath("/http/mysvc/subsvc", "/"),
"/mysvc/subsvc/path1/" -> ExpParsedPath("/http/mysvc/subsvc", "/path1/"),
"/mysvc/subsvc/path1" -> ExpParsedPath("/http/mysvc/subsvc", "/path1"),
"/mysvc/subsvc/path1/path2" -> ExpParsedPath("/http/mysvc/subsvc", "/path1/path2"),
"/mysvc/subsvc/path1/path2/" -> ExpParsedPath("/http/mysvc/subsvc", "/path1/path2/"),
"/mysvc/subsvc/path1/?foo=bar" -> ExpParsedPath("/http/mysvc/subsvc", "/path1/?foo=bar"),
"/mysvc/subsvc/path1?foo=bar/" -> ExpParsedPath("/http/mysvc/subsvc", "/path1?foo=bar/")
)
val identifier = PathIdentifier(Path.Utf8("http"), 2, consume = true)
testCaseConsume foreach ((input) => {
val req0 = Request()
req0.uri = input._1
val identified = await(identifier(req0)).asInstanceOf[IdentifiedRequest[Request]]
assert(identified.dst == Dst.Path(Path.read(input._2.segment)))
assert(identified.request.uri == input._2.path)
})
}
test("preserve trailing slash with consume = false") {
case class ExpParsedPath(segment: String, path: String)
val testCaseConsume = Map(
"/mysvc/subsvc/" -> ExpParsedPath("/http/mysvc/subsvc", "/mysvc/subsvc/"),
"/mysvc/subsvc/path1/" -> ExpParsedPath("/http/mysvc/subsvc", "/mysvc/subsvc/path1/"),
"/mysvc/subsvc/path1" -> ExpParsedPath("/http/mysvc/subsvc", "/mysvc/subsvc/path1"),
"/mysvc/subsvc/path1/path2" -> ExpParsedPath("/http/mysvc/subsvc", "/mysvc/subsvc/path1/path2"),
"/mysvc/subsvc/path1/path2/" -> ExpParsedPath("/http/mysvc/subsvc", "/mysvc/subsvc/path1/path2/")
)
val identifier = PathIdentifier(Path.Utf8("http"), 2, consume = false)
testCaseConsume foreach ((input) => {
val req0 = Request()
req0.uri = input._1
val identified = await(identifier(req0)).asInstanceOf[IdentifiedRequest[Request]]
assert(identified.dst == Dst.Path(Path.read(input._2.segment)))
assert(identified.request.uri == input._2.path)
})
}
}
|
denverwilliams/linkerd
|
router/http/src/test/scala/io/buoyant/router/http/PathIdentifierTest.scala
|
Scala
|
apache-2.0
| 4,556
|
package org.bdgenomics.sparkbox
import org.apache.avro.file.{ DataFileWriter, DataFileReader }
import org.apache.avro.io.EncoderFactory
import org.apache.avro.specific.{ SpecificDatumWriter, SpecificDatumReader }
import org.apache.hadoop.fs.{ FileContext, AvroFSInput, Path, FileSystem }
import org.apache.spark.mllib.classification.LogisticRegressionModel
import org.apache.spark.mllib.linalg.DenseVector
import org.apache.spark.mllib.regression.GeneralizedLinearModel
import scala.collection.JavaConversions._
import scala.collection.JavaConversions
case class LinearModelWithFeatureNames(featureNames: Seq[String], model: GeneralizedLinearModel) {
assume(featureNames.length == model.weights.size)
def write(filename: String) = {
val record = toAvro
val filesystem = FileSystem.get(Common.hadoopConfiguration)
val datumWriter = new SpecificDatumWriter[avro.LinearModel]()
val stream = filesystem.create(new Path(filename))
val writer = new DataFileWriter[avro.LinearModel](datumWriter)
/* TODO: support JSON encoding. */
/*
val encoder = if (filename.endsWith(".json")) {
EncoderFactory.get.jsonEncoder(record.getSchema, stream, true)
} else if (filename.endsWith(".avro")) {
EncoderFactory.get.binaryEncoder(stream, null)
} else {
throw new IllegalArgumentException("Unrecognized file extension for filename: %s".format(filename))
}
*/
writer.create(record.getSchema, stream)
writer.append(record)
writer.close()
stream.close()
}
def toAvro(): avro.LinearModel = {
val builder = avro.LinearModel.newBuilder
builder.setProvenance(Provenance.get())
builder.setIntercept(model.intercept)
val weights = featureNames.zip(model.weights.toArray).map(pair => new avro.Weight(pair._1, pair._2))
builder.setWeights(JavaConversions.seqAsJavaList(weights))
builder.build()
}
override def toString: String = {
val builder = StringBuilder.newBuilder
builder ++= "MODEL WEIGHTS:\\n"
(0 until featureNames.size).sortBy(i => math.abs(model.weights(i)) * -1).filter(model.weights(_) != 0.0).foreach(i => {
builder ++= " [%10d] %20s = %.15f\\n".format(i, featureNames(i), model.weights(i))
})
val numZeroFeatures = model.weights.toArray.count(_ == 0.0)
builder ++= "Remaining %,d / %,d = %,f%% features have 0 weight.\\n".format(
numZeroFeatures, model.weights.size, numZeroFeatures * 100.0 / model.weights.size)
builder.result
}
}
object LinearModelWithFeatureNames {
def readFromFile(filename: String): LinearModelWithFeatureNames = {
val path = new Path(filename)
val input = new AvroFSInput(FileContext.getFileContext(Common.hadoopConfiguration), path)
val reader = new SpecificDatumReader[avro.LinearModel]()
reader.setSchema(avro.LinearModel.getClassSchema)
val fileReader = new DataFileReader(input, reader)
val result = fileReader.next()
fileReader.close()
fromAvro(result)
}
def fromAvro(record: avro.LinearModel) = {
val featureNames = record.getWeights.map(_.getName).map(_.toString)
val weights = new DenseVector(record.getWeights.map(_.getValue.toDouble).toArray)
LinearModelWithFeatureNames(
featureNames,
new LogisticRegressionModel(weights, record.getIntercept))
}
}
|
timodonnell/sparkbox
|
src/main/scala/org/bdgenomics/sparkbox/LinearModelWithFeatureNames.scala
|
Scala
|
apache-2.0
| 3,298
|
package com.twitter.finagle.stats.buoyant
import com.twitter.finagle.stats.{BucketAndCount, BucketedHistogram, Counter => FCounter, Stat => FStat}
import com.twitter.util.{Duration, Time}
import java.util.concurrent.atomic.AtomicLong
sealed trait Metric
object Metric {
object None extends Metric
class Counter extends FCounter with Metric {
private[this] val value = new AtomicLong
def incr(delta: Long): Unit = {
val _ = value.getAndAdd(delta)
}
def get: Long = value.get
}
class Stat extends FStat with Metric {
// Access must be synchronized
private[this] val underlying = BucketedHistogram()
private[this] var summarySnapshot: HistogramSummary = null
private[this] var resetTime = Time.now
def startingAt: Time = resetTime
def add(value: Float): Unit = underlying.synchronized {
// TODO track update time to allow detection of stale stats.
underlying.add(value.toLong)
}
def peek: Seq[BucketAndCount] = underlying.synchronized {
underlying.bucketAndCounts
}
def snapshot(): HistogramSummary = underlying.synchronized {
summarySnapshot = summary
summarySnapshot
}
def reset(): (Seq[BucketAndCount], Duration) = underlying.synchronized {
val buckets = underlying.bucketAndCounts
underlying.clear()
val now = Time.now
val delta = now - resetTime
resetTime = now
(buckets, delta)
}
def summary: HistogramSummary = underlying.synchronized {
HistogramSummary(
underlying.count,
underlying.minimum,
underlying.maximum,
underlying.sum,
underlying.percentile(0.50),
underlying.percentile(0.90),
underlying.percentile(0.95),
underlying.percentile(0.99),
underlying.percentile(0.999),
underlying.percentile(0.9999),
underlying.average
)
}
def snapshottedSummary: HistogramSummary = summarySnapshot
}
class Gauge(f: => Float) extends Metric {
def get: Float = f
}
case class HistogramSummary(
count: Long,
min: Long,
max: Long,
sum: Long,
p50: Long,
p90: Long,
p95: Long,
p99: Long,
p9990: Long,
p9999: Long,
avg: Double
)
}
|
BuoyantIO/linkerd
|
telemetry/core/src/main/scala/com/twitter/finagle/stats/buoyant/Metric.scala
|
Scala
|
apache-2.0
| 2,251
|
package org.scalacvx
import org.scalacvx.atoms.Expression
import org.scalacvx.conic.ConicForm
import org.scalacvx.constraints.{ComparisonConstraint, ConeConstraint, Constraint}
import org.scalacvx.dcp.{AffineVexity, ConvexVexity, ConstantVexity, Vexity}
case class ConvexProblem(objective: Expression, constraints:Array[ComparisonConstraint] = Array[ComparisonConstraint]()) {
// In Convex.jl, the problem class contains the solution. Can we do better ?
require(objective.vexity.isInstanceOf[ConvexVexity], "Objective should be convex for min problems and concave for max problems.")
// The conic form functon converts the current problem into an equivalent one
// with linear function as objective subject to comparison and/or cone constraints
lazy val conicForm:ConicForm = ??? // ConicForm(objective, constraints)
def subjectTo(const:ComparisonConstraint) = ConvexProblem(objective, constraints :+ const)
def subjectTo(consts:Array[ComparisonConstraint]) = ConvexProblem(objective, constraints ++ consts)
}
object ConvexProblem {
def minimize(expr:Expression) = ConvexProblem(expr, Array[ComparisonConstraint]())
def maximize(expr:Expression) = ConvexProblem(-expr, Array[ComparisonConstraint]())
}
sealed trait ProblemStatus
case object Solved extends ProblemStatus
case object Unbounded extends ProblemStatus
case object Unfeasible extends ProblemStatus
case class Solution[T](primal:Array[T], dual:Array[T], status:ProblemStatus, optval:T, has_dual:Boolean)
|
lorenzolucido/ScalaCVX
|
src/main/scala/org/scalacvx/ConvexProblem.scala
|
Scala
|
mit
| 1,491
|
package io.iohk.ethereum.db.storage
import io.iohk.ethereum.ObjectGenerators
import io.iohk.ethereum.db.dataSource.EphemDataSource
import io.iohk.ethereum.security.SecureRandomBuilder
import io.iohk.ethereum.network.p2p.messages.CommonMessages
import io.iohk.ethereum.network.p2p.messages.CommonMessages.NewBlock
import org.bouncycastle.util.encoders.Hex
import org.scalacheck.Gen
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import org.scalatest.wordspec.AnyWordSpec
class BlockBodiesStorageSpec
extends AnyWordSpec
with ScalaCheckPropertyChecks
with ObjectGenerators
with SecureRandomBuilder {
val chainId: Option[Byte] = Hex.decode("3d").headOption
"BlockBodiesStorage" should {
"insert block body properly" in {
forAll(Gen.listOfN(32, ObjectGenerators.newBlockGen(secureRandom, chainId))) { newBlocks =>
val blocks = newBlocks.distinct
val totalStorage = insertBlockBodiesMapping(newBlocks)
blocks.foreach { case NewBlock(block, _) =>
assert(totalStorage.get(block.header.hash).contains(block.body))
}
}
}
"delete block body properly" in {
forAll(Gen.listOfN(32, ObjectGenerators.newBlockGen(secureRandom, chainId))) { newBlocks =>
val blocks = newBlocks.distinct
val storage = insertBlockBodiesMapping(newBlocks)
// Mapping of block bodies is deleted
val (toDelete, toLeave) = blocks.splitAt(Gen.choose(0, blocks.size).sample.get)
val batchUpdates = toDelete.foldLeft(storage.emptyBatchUpdate) { case (updates, NewBlock(block, _)) =>
updates.and(storage.remove(block.header.hash))
}
batchUpdates.commit()
toLeave.foreach { case NewBlock(block, _) =>
assert(storage.get(block.header.hash).contains(block.body))
}
toDelete.foreach { case NewBlock(block, _) => assert(storage.get(block.header.hash).isEmpty) }
}
}
def insertBlockBodiesMapping(newBlocks: Seq[CommonMessages.NewBlock]): BlockBodiesStorage = {
val storage = new BlockBodiesStorage(EphemDataSource())
val batchUpdates = newBlocks.foldLeft(storage.emptyBatchUpdate) { case (updates, NewBlock(block, _)) =>
updates.and(storage.put(block.header.hash, block.body))
}
batchUpdates.commit()
storage
}
}
}
|
input-output-hk/etc-client
|
src/test/scala/io/iohk/ethereum/db/storage/BlockBodiesStorageSpec.scala
|
Scala
|
mit
| 2,344
|
/*
* Copyright 2013 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core.iterators
import java.util.{UUID, Map => JMap}
import org.apache.accumulo.core.client.mapreduce.InputFormatBase
import org.apache.accumulo.core.client.{IteratorSetting, ScannerBase}
import org.apache.accumulo.core.data.{Key, Value}
import org.apache.accumulo.core.iterators.{IteratorEnvironment, SkippingIterator, SortedKeyValueIterator}
import org.apache.hadoop.mapreduce.Job
import org.locationtech.geomesa.core.iterators.TimestampSetIterator._
import scala.collection.SortedSet
object TimestampSetIterator {
def setupIterator(scanner: ScannerBase, timestampLongs: Long*) {
val iteratorName: String = "tsi-" + UUID.randomUUID.toString
val cfg = new IteratorSetting(10, iteratorName, classOf[TimestampSetIterator])
cfg.addOption(timestampsOption, timestampLongs.map(_.toString).mkString(";"))
scanner.addScanIterator(cfg)
}
def setupIterator(job: Job, timestampLongs: Long*) {
val iteratorName: String = "tsi-" + UUID.randomUUID.toString
val cfg = new IteratorSetting(10, iteratorName, classOf[TimestampSetIterator])
cfg.addOption(timestampsOption, timestampLongs.map(_.toString).mkString(";"))
InputFormatBase.addIterator(job, cfg)
}
final val timestampsOption: String = "timestampsOption"
}
class TimestampSetIterator(var timestamps: SortedSet[Long])
extends SkippingIterator {
@Override
def this() = this(null)
@Override
override protected def consume() {
while (getSource.hasTop && !isValid(getSource.getTopKey)) {
getSource.next()
}
}
private def isValid(topKey: Key): Boolean = timestamps.contains(topKey.getTimestamp)
@Override
override def deepCopy(env: IteratorEnvironment): SortedKeyValueIterator[Key, Value] = {
throw new UnsupportedOperationException
}
@Override
override def init(source: SortedKeyValueIterator[Key, Value],
options: JMap[String, String],
env: IteratorEnvironment) {
super.init(source, options, env)
timestamps = SortedSet(options.get(timestampsOption).split(";").map(_.toLong):_*)
}
}
|
jwkessi/geomesa
|
geomesa-core/src/main/scala/org/locationtech/geomesa/core/iterators/TimestampSetIterator.scala
|
Scala
|
apache-2.0
| 2,718
|
package frameless
import org.scalacheck.Prop
import org.scalacheck.Prop._
import shapeless.test.illTyped
class DropTest extends TypedDatasetSuite {
import DropTest._
test("fail to compile on missing value") {
val f: TypedDataset[X] = TypedDataset.create(X(1, 1, false) :: X(1, 1, false) :: X(1, 10, false) :: Nil)
illTyped {
"""val fNew: TypedDataset[XMissing] = f.drop[XMissing]('j)"""
}
}
test("fail to compile on different column name") {
val f: TypedDataset[X] = TypedDataset.create(X(1, 1, false) :: X(1, 1, false) :: X(1, 10, false) :: Nil)
illTyped {
"""val fNew: TypedDataset[XDifferentColumnName] = f.drop[XDifferentColumnName]('j)"""
}
}
test("fail to compile on added column name") {
val f: TypedDataset[X] = TypedDataset.create(X(1, 1, false) :: X(1, 1, false) :: X(1, 10, false) :: Nil)
illTyped {
"""val fNew: TypedDataset[XAdded] = f.drop[XAdded]('j)"""
}
}
test("remove column in the middle") {
val f: TypedDataset[X] = TypedDataset.create(X(1, 1, false) :: X(1, 1, false) :: X(1, 10, false) :: Nil)
val fNew: TypedDataset[XGood] = f.drop[XGood]
fNew.collect().run().foreach(xg => assert(xg === XGood(1, false)))
}
test("drop four columns") {
def prop[A: TypedEncoder](value: A): Prop = {
val d5 = TypedDataset.create(X5(value, value, value, value, value) :: Nil)
val d4 = d5.drop[X4[A, A, A, A]]
val d3 = d4.drop[X3[A, A, A]]
val d2 = d3.drop[X2[A, A]]
val d1 = d2.drop[X1[A]]
X1(value) ?= d1.collect().run().head
}
check(prop[Int] _)
check(prop[Long] _)
check(prop[String] _)
check(prop[SQLDate] _)
check(prop[Option[X1[Boolean]]] _)
}
}
object DropTest {
case class X(i: Int, j: Int, k: Boolean)
case class XMissing(i: Int)
case class XDifferentColumnName(ij: Int, k: Boolean)
case class XAdded(i: Int, j: Int, k: Boolean, l: Int)
case class XGood(i: Int, k: Boolean)
}
|
adelbertc/frameless
|
dataset/src/test/scala/frameless/DropTest.scala
|
Scala
|
apache-2.0
| 1,962
|
package benchmark
import java.util.function.BiConsumer
import org.openjdk.jmh.annotations._
import mutabilite._
import org.openjdk.jmh.infra.Blackhole
import scala.collection.mutable.{OpenHashMap => StdlibMap}
import java.util.{HashMap => JavaMap}
@State(Scope.Thread)
class MapBenchmark {
import Benchmark._
val specMap: Map_Object_Object[Key, Key] = {
val map = new Map_Object_Object[Key, Key](initialSize)
var i = 0
while (i < size) {
map.put(keys(i), keys(i))
i += 1
}
map
}
// val deboxMap: debox.Map[Key, Key] = {
// val map = debox.Map.ofSize[Key, Key](size)
// var i = 0
// while (i < size) {
// map.update(keys(i), keys(i))
// i += 1
// }
// map
// }
val stdMap: StdlibMap[Key, Key] = {
val map = new StdlibMap[Key, Key](initialSize)
var i = 0
while (i < size) {
map.put(keys(i), keys(i))
i += 1
}
map
}
val javaMap: JavaMap[Key, Key] = {
val map = new JavaMap[Key, Key](initialSize)
var i = 0
while (i < size) {
map.put(keys(i), keys(i))
i += 1
}
map
}
var randKey: Key = _
var nonExistingKey: Key = _
@Setup(Level.Invocation)
def setup = {
randKey = keys(random.nextInt(size))
nonExistingKey = Key.generate
}
@Benchmark
def getDirectSpecialized = specMap(randKey)
// @Benchmark
// def getDirectDebox = deboxMap(randKey)
@Benchmark
def getDirectStdlib = stdMap(randKey)
@Benchmark
def getDirectJavalib = javaMap.get(randKey)
@Benchmark
def getNonExistingSpecialized = specMap.get(nonExistingKey)
// @Benchmark
// def getNonExistingDebox = deboxMap.get(nonExistingKey)
@Benchmark
def getNonExistingStdlib = stdMap.get(nonExistingKey)
@Benchmark
def getNonExistingJavalib = javaMap.get(nonExistingKey)
@Benchmark
def putAllSpecialized = {
val m = new Map_Object_Object[Key, Key](initialSize = initialSize)
var i = 0
while (i < size) {
m.put(keys(i), keys(i))
i += 1
}
}
// @Benchmark
// def putAllDebox = {
// val m = debox.Map.ofSize[Key, Key](size)
// var i = 0
// while (i < size) {
// m.update(keys(i), keys(i))
// i += 1
// }
// }
@Benchmark
def putAllStdlib = {
val m = new StdlibMap[Key, Key](initialSize = initialSize)
var i = 0
while (i < size) {
m.put(keys(i), keys(i))
i += 1
}
}
@Benchmark
def putAllJavalib = {
val m = new JavaMap[Key, Key](initialSize)
var i = 0
while (i < size) {
m.put(keys(i), keys(i))
i += 1
}
}
@Benchmark
def foreachSpecialized(blackhole: Blackhole) =
specMap foreach ((k, v) => blackhole.consume(k))
// @Benchmark
// def foreachDebox(blackhole: Blackhole) =
// deboxMap foreach ((k, v) => blackhole.consume(k))
@Benchmark
def foreachStdlib(blackhole: Blackhole) =
stdMap foreach (blackhole.consume(_))
@Benchmark
def foreachJavalib(blackhole: Blackhole) =
javaMap forEach (new BiConsumer[Key, Key] {
def accept(t: Key, x: Key): Unit = blackhole.consume(t)
})
@Benchmark
def putRemoveReadSpecialized(blackhole: Blackhole) = {
val map = new Map_Object_Object[Key, Key](initialSize = initialSize)
var i = 0
while (i < size) { map.put(keys(i), keys(i)); i += 1 }
i = 0
while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
i = 0
while (i < size) { blackhole.consume(map.get(keys(i))); i += 1 }
}
// @Benchmark
// def putRemoveReadDebox(blackhole: Blackhole) = {
// val map = debox.Map.ofSize[Key, Key](size)
// var i = 0
// while (i < size) { map.update(keys(i), keys(i)); i += 1 }
// i = 0
// while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
// i = 0
// while (i < size) { blackhole.consume(map.get(keys(i))); i += 1 }
// }
@Benchmark
def putRemoveReadStdlib(blackhole: Blackhole) = {
val map = new StdlibMap[Key, Key](initialSize = initialSize)
var i = 0
while (i < size) { map.put(keys(i), keys(i)); i += 1 }
i = 0
while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
i = 0
while (i < size) { blackhole.consume(map.get(keys(i))); i += 1 }
}
@Benchmark
def putRemoveReadJavalib(blackhole: Blackhole) = {
val map = new JavaMap[Key, Key](initialSize)
var i = 0
while (i < size) { map.put(keys(i), keys(i)); i += 1 }
i = 0
while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
i = 0
while (i < size) { blackhole.consume(map.get(i)); i += 1 }
}
}
@State(Scope.Thread)
class MapRemoveSpecializedBenchmark {
import Benchmark._
var map: Map_Object_Object[Key, Key] = _
@Setup(Level.Invocation)
def setup = {
map = new Map_Object_Object[Key, Key](initialSize = initialSize)
0 until size foreach (i => map.put(keys(i), keys(i)))
}
@Benchmark
def benchmark = {
var i = 0
while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
}
}
//@State(Scope.Thread)
//class MapRemoveDeboxBenchmark {
//
// import Benchmark._
//
// var map: debox.Map[Key, Key] = _
//
// @Setup(Level.Invocation)
// def setup = {
// map = debox.Map.ofSize[Key, Key](size)
// 0 until size foreach (i => map.update(keys(i), keys(i)))
// }
//
// @Benchmark
// def benchmark = {
// var i = 0
// while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
// }
//}
@State(Scope.Thread)
class MapRemoveStdlibBenchmark {
import Benchmark._
var map: StdlibMap[Key, Key] = _
@Setup(Level.Invocation)
def setup = {
map = new StdlibMap[Key, Key](initialSize = initialSize)
0 until size foreach (i => map.put(keys(i), keys(i)))
}
@Benchmark
def benchmark = {
var i = 0
while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
}
}
@State(Scope.Thread)
class MapRemoveJavalibBenchmark {
import Benchmark._
var map: JavaMap[Key, Key] = _
@Setup(Level.Invocation)
def setup = {
map = new JavaMap[Key, Key](initialSize)
0 until size foreach (i => map.put(keys(i), keys(i)))
}
@Benchmark
def benchmark = {
var i = 0
while (i < size / 10) { map.remove(keys(i * 10)); i += 1 }
}
}
|
adamwy/scala-offheap-collections
|
benchmark/src/main/scala/MapBenchmark.scala
|
Scala
|
bsd-3-clause
| 6,141
|
package com.github.sstone.amqp.samples
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import akka.actor.ActorSystem
import akka.pattern.ask
import akka.util.Timeout
import com.github.sstone.amqp.{ConnectionOwner, RpcClient}
import com.github.sstone.amqp.RpcClient.Request
import com.github.sstone.amqp.Amqp.Publish
import com.rabbitmq.client.ConnectionFactory
/**
* start with mvn exec:java -Dexec.mainClass=com.github.sstone.amqp.samples.BasicRpcClient -Dexec.classpathScope="compile"
*/
object BasicRpcClient extends App {
import ExecutionContext.Implicits.global
implicit val system = ActorSystem("mySystem")
implicit val timeout: Timeout = 5.seconds
// create an AMQP connection
val connFactory = new ConnectionFactory()
connFactory.setUri("amqp://guest:guest@localhost/%2F")
val conn = system.actorOf(ConnectionOwner.props(connFactory, 1.second))
val client = ConnectionOwner.createChildActor(conn, RpcClient.props())
// send 1 request every second
while(true) {
println("sending request")
(client ? Request(Publish("amq.direct", "my_key", "test".getBytes("UTF-8")))).mapTo[RpcClient.Response].map(response => {
// we expect 1 delivery
val delivery = response.deliveries.head
println("response : " + new String(delivery.body))
})
Thread.sleep(1000)
}
}
|
gawkermedia/amqp-client
|
src/main/scala/com/github.sstone/amqp/samples/BasicRpcClient.scala
|
Scala
|
mit
| 1,350
|
/*
* Copyright 2014β2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.ejson
import slamdata.Predef.{Byte => SByte, Char => SChar, Int => _, Map => _, _}
import quasar.fp.PrismNT
import monocle.Prism
import scalaz.{==>>, Order}
object optics {
import Common.{Optics => CO}
import Extension.{Optics => EO}
def com[A]: Prism[EJson[A], Common[A]] =
PrismNT.inject[Common, EJson].asPrism[A]
def ext[A]: Prism[EJson[A], Extension[A]] =
PrismNT.inject[Extension, EJson].asPrism[A]
def arr[A]: Prism[EJson[A], List[A]] =
com composePrism CO.arr
def bool[A]: Prism[EJson[A], Boolean] =
com composePrism CO.bool
def byte[A]: Prism[EJson[A], SByte] =
ext composePrism EO.byte
def char[A]: Prism[EJson[A], SChar] =
ext composePrism EO.char
def dec[A]: Prism[EJson[A], BigDecimal] =
com composePrism CO.dec
def int[A]: Prism[EJson[A], BigInt] =
ext composePrism EO.int
def imap[A: Order]: Prism[EJson[A], A ==>> A] =
ext composePrism EO.imap
def map[A]: Prism[EJson[A], List[(A, A)]] =
ext composePrism EO.map
def meta[A]: Prism[EJson[A], (A, A)] =
ext composePrism EO.meta
def nul[A]: Prism[EJson[A], Unit] =
com composePrism CO.nul
def str[A]: Prism[EJson[A], String] =
com composePrism CO.str
}
|
jedesah/Quasar
|
ejson/src/main/scala/quasar/ejson/optics.scala
|
Scala
|
apache-2.0
| 1,833
|
package is.launaskil
import is.launaskil.models.Timestamp$
import org.joda.time.DateTime
import scala.language.implicitConversions
package object models {
implicit def time2joda(time: Timestamp): DateTime = new DateTime(time.millis)
}
|
olafurpg/slick-codegen-scalajs
|
server/app/is/launaskil/models/package.scala
|
Scala
|
mit
| 240
|
package org.bitcoins.spvnode.networking
import java.net.{InetSocketAddress, ServerSocket}
import akka.actor.ActorSystem
import akka.io.{Inet, Tcp}
import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe}
import org.bitcoins.core.config.TestNet3
import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil}
import org.bitcoins.spvnode.messages.control.VersionMessage
import org.bitcoins.spvnode.messages.{NetworkPayload, VersionMessage}
import org.bitcoins.spvnode.util.BitcoinSpvNodeUtil
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FlatSpecLike, MustMatchers}
import scala.concurrent.duration._
import scala.util.Try
/**
* Created by chris on 6/7/16.
*/
class ClientTest extends TestKit(ActorSystem("ClientTest")) with FlatSpecLike
with MustMatchers with ImplicitSender
with BeforeAndAfter with BeforeAndAfterAll {
"Client" must "connect to a node on the bitcoin network, " +
"send a version message to a peer on the network and receive a version message back, then close that connection" in {
val probe = TestProbe()
val client = TestActorRef(Client.props,probe.ref)
val remote = new InetSocketAddress(TestNet3.dnsSeeds(1), TestNet3.port)
val randomPort = 23521
//random port
client ! Tcp.Connect(remote, Some(new InetSocketAddress(randomPort)))
//val bound : Tcp.Bound = probe.expectMsgType[Tcp.Bound]
val conn : Tcp.Connected = probe.expectMsgType[Tcp.Connected]
//make sure the socket is currently bound
Try(new ServerSocket(randomPort)).isSuccess must be (false)
client ! Tcp.Abort
val confirmedClosed = probe.expectMsg(Tcp.Aborted)
//make sure the port is now available
val boundSocket = Try(new ServerSocket(randomPort))
boundSocket.isSuccess must be (true)
boundSocket.get.close()
}
it must "bind connect to two nodes on one port" in {
//NOTE if this test case fails it is more than likely because one of the two dns seeds
//below is offline
val remote1 = new InetSocketAddress(TestNet3.dnsSeeds(1), TestNet3.port)
val remote2 = new InetSocketAddress(TestNet3.dnsSeeds(2), TestNet3.port)
val probe1 = TestProbe()
val probe2 = TestProbe()
val client1 = TestActorRef(Client.props, probe1.ref)
val client2 = TestActorRef(Client.props, probe2.ref)
val local1 = new InetSocketAddress(TestNet3.port)
val options = List(Inet.SO.ReuseAddress(true))
client1 ! Tcp.Connect(remote1,Some(local1),options)
probe1.expectMsgType[Tcp.Connected]
client1 ! Tcp.Abort
val local2 = new InetSocketAddress(TestNet3.port)
client2 ! Tcp.Connect(remote2,Some(local2),options)
probe2.expectMsgType[Tcp.Connected](5.seconds)
client2 ! Tcp.Abort
}
override def afterAll: Unit = {
TestKit.shutdownActorSystem(system)
}
}
|
Christewart/bitcoin-s-spv-node
|
src/test/scala/org/bitcoins/spvnode/networking/ClientTest.scala
|
Scala
|
mit
| 2,809
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.models.lm
import breeze.linalg.{DenseMatrix, DenseVector}
import io.github.mandar2812.dynaml.optimization.{RegularizedLSSolver, RegularizedOptimizer}
import org.apache.spark.rdd.RDD
/**
* @author mandar2812 date: 25/01/2017.
* A Generalized Linear Model applied to a
* single output regression task. The training
* set is an Apache Spark [[RDD]]
*
* @param data The training data as an [[RDD]]
* @param numPoints Number of training data points
* @param map A general non-linear feature mapping/basis function expansion.
*
*/
class SparkGLM(
data: RDD[(DenseVector[Double], Double)], numPoints: Long,
map: (DenseVector[Double]) => DenseVector[Double] = identity[DenseVector[Double]])
extends GenericGLM[
RDD[(DenseVector[Double], Double)],
(DenseMatrix[Double], DenseVector[Double])](data, numPoints, map) {
private lazy val sample_input = g.first()._1
/**
* The link function; in this case simply the identity map
* */
override val h: (Double) => Double = identity[Double]
featureMap = map
override protected var params: DenseVector[Double] = initParams()
override protected val optimizer: RegularizedOptimizer[
DenseVector[Double], DenseVector[Double],
Double, (DenseMatrix[Double], DenseVector[Double])] = new RegularizedLSSolver
def dimensions = featureMap(sample_input).length
override def initParams() = DenseVector.zeros[Double](dimensions + 1)
/**
* Input an [[RDD]] containing the data set and output
* a design matrix and response vector which can be solved
* in the OLS sense.
* */
override def prepareData(d: RDD[(DenseVector[Double], Double)]) = {
val phi = featureMap
val mapFunc = (xy: (DenseVector[Double], Double)) => {
val phiX = DenseVector(phi(xy._1).toArray ++ Array(1.0))
val phiY = phiX*xy._2
(phiX*phiX.t, phiY)
}
d.mapPartitions((partition) => {
Iterator(partition.map(mapFunc).reduce((a,b) => (a._1+b._1, a._2+b._2)))
}).reduce((a,b) => (a._1+b._1, a._2+b._2))
}
}
|
transcendent-ai-labs/DynaML
|
dynaml-core/src/main/scala/io/github/mandar2812/dynaml/models/lm/SparkGLM.scala
|
Scala
|
apache-2.0
| 2,833
|
package fpinscala.errorhandling
import scala.{Option => _, Either => _, Left => _, Right => _, _} // hide std library `Option` and `Either`, since we are writing our own in this chapter
sealed trait Either[+E,+A] {
def map[B](f: A => B): Either[E, B] = this match {
case lf@ Left(l) => lf
case Right(r) => Right(f(r))
}
def flatMap[EE >: E, B](f: A => Either[EE, B]): Either[EE, B] = this match {
case lf@ Left(l) => lf
case Right(r) => f(r)
}
def orElse[EE >: E, B >: A](b: => Either[EE, B]): Either[EE, B] = this match {
case Left(_) => b
case Right(_) => this
}
def map2[EE >: E, B, C](b: Either[EE, B])(f: (A, B) => C): Either[EE, C] = {
for {
aa <-this
bb <- b
} yield f(aa, bb)
}
}
case class Left[+E](get: E) extends Either[E,Nothing]
case class Right[+A](get: A) extends Either[Nothing,A]
object Either {
def traverse[E,A,B](es: List[A])(f: A => Either[E, B]): Either[E, List[B]] =
es match {
case Nil => Right(Nil)
case h :: t => f(h).map2(traverse(t)(f))(_ :: _)
// case h::t => (f(h) map2 traverse(t)(f))(_ :: _)
}
def traverse_1[E,A,B](es: List[A])(f: A => Either[E, B]): Either[E, List[B]] =
es.foldRight[Either[E, List[B]]] (Right(Nil)) ((a, acc) => f(a).map2(acc)(_ :: _))
def sequence[E,A](es: List[Either[E,A]]): Either[E,List[A]] =
es match {
case Nil => Right(Nil)
case h :: t => h flatMap (hh => sequence(t) map (hh :: _))
}
def sequence_1 [E,A](es: List[Either[E,A]]): Either[E,List[A]] =
es.foldRight[Either[E,List[A]]] (Right(Nil)) ((a, acc) => a.map2(acc)(_ :: _))
def mean(xs: IndexedSeq[Double]): Either[String, Double] =
if (xs.isEmpty)
Left("mean of empty list!")
else
Right(xs.sum / xs.length)
def safeDiv(x: Int, y: Int): Either[Exception, Int] =
try Right(x / y)
catch { case e: Exception => Left(e) }
def Try[A](a: => A): Either[Exception, A] =
try Right(a)
catch { case e: Exception => Left(e) }
}
|
mkunikow/fpinscala
|
exercises/src/main/scala/fpinscala/errorhandling/Either.scala
|
Scala
|
mit
| 1,997
|
/*
* Copyright 2015-2020 Noel Welsh
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package doodle
package algebra
package generic
import cats.data.State
import doodle.core.{BoundingBox, Transform => Tx}
import doodle.core.font.Font
trait GenericText[F[_]] extends Text[Finalized[F, ?]] {
trait TextApi {
/**
* The type of additional information that is useful for laying out text.
*
* Text layout is complicated. Doodle's layout only cares about the bounding
* box, with the usual assumption that the origin is the center of the
* bounding box. However, when we come to actually render the text we
* usually want additional information. In particular we usually specify the
* origin where we start rendering as the left-most point on the baseline of
* the text (and text may descend below the baseline). This is difficult to
* calculate just from the Doodle bounding box, so we allows methods to
* return an additional piece of information that can be used to layout the
* text.
*/
type Bounds
def text(tx: Tx, fill: Option[Fill], stroke: Option[Stroke], font: Font, text: String, bounds: Bounds): F[Unit]
def textBoundingBox(text: String, font: Font): (BoundingBox, Bounds)
}
def TextApi: TextApi
def font[A](image: Finalized[F, A], font: Font): Finalized[F, A] =
Finalized.contextTransform(_.font(font))(image)
def text(text: String): Finalized[F, Unit] = {
val api = TextApi
Finalized.leaf { dc =>
val (bb, bounds) = api.textBoundingBox(text, dc.font)
(bb, State.inspect(tx => api.text(tx, dc.fill, dc.stroke, dc.font, text, bounds)))
}
}
}
|
underscoreio/doodle
|
core/shared/src/main/scala/doodle/algebra/generic/GenericText.scala
|
Scala
|
apache-2.0
| 2,186
|
package dibl
import java.io.{ BufferedReader, File, FileInputStream, InputStreamReader }
import dibl.sheet.SheetSVG
import scala.collection.JavaConverters._
object Patterns {
private def getTesselaceNr(s: String) = {
s.replaceAll(". pattern.*", "")
.replaceAll(".*\\"", "")
}
private def toSheetSvg(str: String) = {
val Array(m, t) = str.split(";")
val patterns = new SheetSVG
patterns.add(m, t)
patterns.toSvgDoc()
}
private lazy val tesselaceLines: Seq[String] = readLines("../gw-lace-to-gf/docs/index.md", "pattern=")
/** (nr, sheetArgs, svg, tileArgs) */
lazy val tesselaceSheets: Seq[(String, String, String, String)] = tesselaceLines.flatMap { line => line
.replaceAll(""".*"patch=""", "")
.replaceAll("""" *%}""", "")
.split("&patch=")
.zipWithIndex.toSeq
.map { case (sheetArgs: String, i: Int) =>
val svg = toSheetSvg(sheetArgs)
val tileArgs = svg.split("\\n")
.filter(_.contains("tiles.html"))
.map(_.replaceAll(".*[?]","").replaceAll("'.*",""))
.head
(s"${getTesselaceNr(line)}-$i", sheetArgs, svg, tileArgs)
}
}
lazy val tesselace: Seq[(String, String)] = tesselaceLines.map { s =>
(getTesselaceNr(s)
, s.replaceAll(".*pattern=.", "tile=")
.replaceAll("\\".*", "")
)
}
lazy val whiting: Seq[(String, String)] = readLines("../gw-lace-to-gf/docs/index.md", "tiles?whiting").map { s =>
(s.replaceAll(".*whiting=", "")
.replaceAll("&.*", "")
, s.replaceAll(".*tiles[?]", "")
)
}
lazy val all: Seq[(String, String)] = tesselace.toList ::: whiting.toList //::: tesselaceSheets.map(_._4)
private def readLines(file: String, content: String): Seq[String] = {
new BufferedReader(new InputStreamReader(new FileInputStream(file)))
.lines().iterator().asScala
.withFilter(_.contains(content))
.toSeq.distinct
}
}
|
d-bl/GroundForge
|
src/test/scala/dibl/Patterns.scala
|
Scala
|
gpl-3.0
| 1,903
|
package org.jetbrains.plugins.scala
package codeInspection
package shadow
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.openapi.project.Project
import com.intellij.psi.{PsiElement, PsiNamedElement, ResolveResult}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScCaseClause, ScReferencePattern}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.{createPatternFromText, createReferenceFromText}
import org.jetbrains.plugins.scala.lang.resolve.StdKinds
import org.jetbrains.plugins.scala.lang.resolve.processor.ResolveProcessor
class VariablePatternShadowInspection extends AbstractInspection("VariablePatternShadow", "Suspicious shadowing by a Variable Pattern") {
def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case refPat: ScReferencePattern => check(refPat, holder)
}
private def check(refPat: ScReferencePattern, holder: ProblemsHolder) {
val isInCaseClause = ScalaPsiUtil.nameContext(refPat).isInstanceOf[ScCaseClause]
if (isInCaseClause) {
val dummyRef: ScStableCodeReferenceElement = createReferenceFromText(refPat.name, refPat.getContext.getContext, refPat)
if (dummyRef == null) return //can happen in invalid code, e.g. if ')' is absent in case pattern
val proc = new ResolveProcessor(StdKinds.valuesRef, dummyRef, refPat.name)
val results = dummyRef.asInstanceOf[ScStableCodeReferenceElement].doResolve(proc)
def isAccessible(rr: ResolveResult): Boolean = rr.getElement match {
case named: PsiNamedElement => proc.isAccessible(named, refPat)
case _ => false
}
if (results.exists(isAccessible)) {
holder.registerProblem(refPat.nameId, getDisplayName, new ConvertToStableIdentifierPatternFix(refPat), new RenameVariablePatternFix(refPat))
}
}
}
}
class ConvertToStableIdentifierPatternFix(r: ScReferencePattern)
extends AbstractFixOnPsiElement("Convert to Stable Identifier Pattern `%s`".format(r.getText), r) {
def doApplyFix(project: Project) {
val ref = getElement
val stableIdPattern = createPatternFromText("`%s`".format(ref.getText))(ref.getManager)
ref.replace(stableIdPattern)
}
}
class RenameVariablePatternFix(ref: ScReferencePattern) extends RenameElementQuickfix(ref, "Rename Variable Pattern")
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/shadow/VariablePatternShadowInspection.scala
|
Scala
|
apache-2.0
| 2,476
|
package api
import models.api.IntegrationToken
import models.{IntegrationTopic, IntegrationUpdate}
import play.api.Play
import play.api.mvc.{AnyContent, Request, Result}
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
/**
* @author Alefas
* @since 15/09/15
*/
trait Integration {
def id: String
def name: String
def authentificator: OAuthAuthentificator
def hookHandler: Option[HookHandler]
def messageHandler: MessageHandler
def userHandler: UserHandler
}
trait OAuthAuthentificator {
def integrationId: String
final def clientId: String = Play.current.configuration.getString(s"api.$integrationId.clientId").get
final def clientSecret: String = Play.current.configuration.getString(s"api.$integrationId.clientSecret").get
def auth(redirectUrl: Option[String], state: String)(implicit request: Request[AnyContent]): Future[Result]
def logout(token: String): Future[Boolean]
def token(redirectUri: String, code: String): Future[String]
}
trait HookHandler {
def init(token: String): Unit
def handle() //todo:
}
trait MessageHandler {
def collectMessages(integrationToken: IntegrationToken, since: Option[Long]): Future[CollectedMessages]
def sendMessage(integrationToken: IntegrationToken, groupId: String, topicId: String, message: SentMessage, messageId: Long): Future[Option[IntegrationUpdate]]
def isNewTopicAvailable: Boolean
def newTopic(integrationToken: IntegrationToken, groupId: String, message: SentNewTopic): Future[Option[IntegrationTopic]]
}
trait UserHandler {
def login(token: String): Future[String]
def name(token: String, login: Option[String] = None): Future[Option[String]]
def avatarUrl(token: String, login: Option[String] = None): Future[Option[String]]
def email(token: String, login: Option[String] = None): Future[Option[String]]
def groupName(token: String, groupId: String): Future[String]
}
case class CollectedMessages(messages: Map[IntegrationTopic, Seq[IntegrationUpdate]], nextCheck: FiniteDuration, lastUpdate: Long)
sealed trait SentMessage
case class TopicComment(text: String) extends SentMessage
sealed trait SentNewTopic
case class NewTopic(text: String) extends SentNewTopic
|
JetChat/JetChat
|
app/api/Integration.scala
|
Scala
|
apache-2.0
| 2,206
|
package im.tox.antox.callbacks
import android.app.{Notification, PendingIntent}
import android.content.{Context, Intent}
import android.preference.PreferenceManager
import android.support.v4.app.{NotificationCompat, TaskStackBuilder}
import android.util.Log
import im.tox.antox.R
import im.tox.antox.activities.MainActivity
import im.tox.antox.callbacks.AntoxOnMessageCallback._
import im.tox.antox.data.{AntoxDB, State}
import im.tox.antox.tox.{MessageHelper, ToxSingleton}
import im.tox.antox.utils.{Hex, Constants}
import im.tox.antox.wrapper.MessageType
import im.tox.tox4j.core.callbacks.{GroupMessageCallback, FriendMessageCallback}
class AntoxOnGroupMessageCallback(private var ctx: Context) extends GroupMessageCallback {
override def groupMessage(groupNumber: Int, peerNumber: Int, timeDelta: Int, message: Array[Byte]): Unit = {
println("new group message callback for id " + ToxSingleton.getGroupList.getGroup(groupNumber).id)
MessageHelper.handleGroupMessage(ctx, groupNumber, peerNumber, ToxSingleton.getGroupList.getGroup(groupNumber).id,
new String(message, "UTF-8"), MessageType.GROUP_PEER)
}
}
|
afkgeek/Antox
|
app/src/main/scala/im/tox/antox/callbacks/AntoxOnGroupMessageCallback.scala
|
Scala
|
gpl-3.0
| 1,164
|
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.event.events
import org.orbeon.oxf.xforms.event.XFormsEvent
import org.orbeon.oxf.xforms.event.XFormsEventTarget
import org.orbeon.oxf.xforms.event.XFormsEvents._
import XFormsEvent._
import org.orbeon.saxon.om
class XXFormsReplaceEvent(target: XFormsEventTarget, properties: PropertyGetter)
extends XFormsEvent(XXFORMS_REPLACE, target, properties, bubbles = true, cancelable = false)
with InstanceEvent {
def this(target: XFormsEventTarget, formerNode: om.NodeInfo, currentNode: om.NodeInfo) = {
this(target, Map("former-node" -> Option(formerNode), "current-node" -> Option(currentNode)))
}
def formerNode = property[om.NodeInfo]("former-node").get
def currentNode = property[om.NodeInfo]("current-node").get
}
|
orbeon/orbeon-forms
|
xforms-runtime/shared/src/main/scala/org/orbeon/oxf/xforms/event/events/XXFormsReplaceEvent.scala
|
Scala
|
lgpl-2.1
| 1,411
|
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package impl
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs.{IStubElementType, StubElement}
import com.intellij.util.io.StringRef
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.stubs.elements.StringRefArrayExt
/**
* User: Alexander Podkhalyuzin
* Date: 14.10.2008
*/
class ScFunctionStubImpl(parent: StubElement[_ <: PsiElement],
elementType: IStubElementType[_ <: StubElement[_ <: PsiElement], _ <: PsiElement],
nameRef: StringRef,
val isDeclaration: Boolean,
private val annotationsRefs: Array[StringRef],
protected[impl] val typeTextRef: Option[StringRef],
protected[impl] val bodyTextRef: Option[StringRef],
val hasAssign: Boolean,
val isImplicit: Boolean,
val isLocal: Boolean)
extends ScNamedStubBase[ScFunction](parent, elementType, nameRef) with ScFunctionStub {
def annotations: Array[String] = annotationsRefs.asStrings
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScFunctionStubImpl.scala
|
Scala
|
apache-2.0
| 1,229
|
object Test {
def main(args: Array[String]): Unit = {
println("CaseNums")
test(CaseNums)
println()
println("IntNums")
test(IntNums)
}
def test(numbers: Numbers) = {
import numbers._
val zero: Nat = Zero
val one: Nat = Succ(zero)
val two: Nat = Succ(one)
val three: Nat = Succ(two)
zero match {
case Succ(p) => println("error")
case Zero => println("ok") // extra argument removed by language extension
}
one match {
case Zero => println("error") // extra argument removed by language extension
case Succ(p) => println("ok")
}
zero match {
case s: Succ => println("ok - unchecked error")
case z: Zero => println("ok - unchecked no error")
}
def divOpt(a: Nat, b: Nat): Option[(Nat, Nat)] = b match {
case s @ Succ(p) =>
Some(safeDiv(a, s.asInstanceOf[Succ])) // this case will not be needed
case _ => None
}
println(divOpt(one, zero))
println(divOpt(three, two))
def divOptExpanded(a: Nat, b: Nat): Option[(Nat, Nat)] = {
val x0 = Succ.unapply(b)
if (!x0.isEmpty) {
val s = b.asInstanceOf[x0.Refined] // safe unchecked cast inserted by the language extension
val p = x0.get
Some(safeDiv(a, s))
} else {
None
}
}
println(divOptExpanded(one, zero))
println(divOptExpanded(three, two))
}
}
trait Numbers {
type Nat
type Zero <: Nat
type Succ <: Nat
val Zero: Zero
val Succ: SuccExtractor
trait SuccExtractor {
def apply(nat: Nat): Succ
def unapply(nat: Nat): SuccOpt { type Refined <: nat.type } // check that SuccOpt#Refined <: nat.type. Could be forced by the compiler?
}
trait SuccOpt {
type Refined <: Succ // optionally added by language extension
def get: Nat
def isEmpty: Boolean
}
implicit def SuccDeco(succ: Succ): SuccAPI
trait SuccAPI {
def pred: Nat
}
def safeDiv(a: Nat, b: Succ): (Nat, Nat)
}
object CaseNums extends Numbers {
trait NatClass
case object ZeroObj extends NatClass
case class SuccClass(pred: NatClass) extends NatClass with SuccOpt {
type Refined = this.type
def get: NatClass = pred
def isEmpty: Boolean = false
}
class EmptySuccOpt extends SuccOpt {
type Refined = Nothing
def isEmpty: Boolean = true
def get: NatClass = throw new Exception("empty")
}
type Nat = NatClass
type Zero = ZeroObj.type
type Succ = SuccClass
val Zero: Zero = ZeroObj
object Succ extends SuccExtractor {
def apply(nat: Nat): Succ = SuccClass(nat)
def unapply(nat: Nat) = nat match {
case succ: SuccClass => succ.asInstanceOf[nat.type & SuccClass]
case _ => new EmptySuccOpt
}
}
def SuccDeco(succ: Succ): SuccAPI = new SuccAPI {
def pred: Nat = succ.pred
}
def safeDiv(a: Nat, b: Succ): (Nat, Nat) = {
def sdiv(div: Nat, rem: Nat): (Nat, Nat) =
if (lessOrEq(rem, b)) (div, rem)
else sdiv(Succ(div), minus(rem, b))
sdiv(Zero, a)
}
private def lessOrEq(a: Nat, b: Nat): Boolean = (a, b) match {
case (Succ(a1), Succ(b1)) => lessOrEq(a1, b1)
case (Zero, _) => true // extra argument removed by language extension
case _ => false
}
// assumes a >= b
private def minus(a: Nat, b: Nat): Nat = (a, b) match {
case (Succ(a1), Succ(b1)) => minus(a1, b1)
case _ => a
}
}
object IntNums extends Numbers {
type Nat = Int
type Zero = Int // 0
type Succ = Int // n > 0
val Zero = 0
object Succ extends SuccExtractor {
def apply(nat: Nat): Int = nat + 1
def unapply(nat: Nat) = new SuccOpt {
type Refined = nat.type
def isEmpty: Boolean = nat <= 0
def get: Int = nat - 1
}
}
def SuccDeco(succ: Succ): SuccAPI = new SuccAPI {
def pred: Int = succ - 1
}
def safeDiv(a: Nat, b: Succ): (Nat, Nat) = (a / b, a % b)
}
|
som-snytt/dotty
|
tests/run/fully-abstract-nat-7.scala
|
Scala
|
apache-2.0
| 3,890
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.io._
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.util.zip.GZIPOutputStream
import scala.io.Source
import com.google.common.io.Files
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io._
import org.apache.hadoop.io.compress.DefaultCodec
import org.apache.hadoop.mapred.{FileAlreadyExistsException, FileSplit, JobConf, TextInputFormat, TextOutputFormat}
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.{FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat}
import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
import org.apache.spark.internal.config._
import org.apache.spark.rdd.{HadoopRDD, NewHadoopRDD, RDD}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
class FileSuite extends SparkFunSuite with LocalSparkContext {
var tempDir: File = _
override def beforeEach() {
super.beforeEach()
tempDir = Utils.createTempDir()
}
override def afterEach() {
try {
Utils.deleteRecursively(tempDir)
} finally {
super.afterEach()
}
}
test("text files") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 4)
nums.saveAsTextFile(outputDir)
// Read the plain text file and check it's OK
val outputFile = new File(outputDir, "part-00000")
val bufferSrc = Source.fromFile(outputFile)
Utils.tryWithSafeFinally {
val content = bufferSrc.mkString
assert(content === "1\\n2\\n3\\n4\\n")
// Also try reading it in as a text file RDD
assert(sc.textFile(outputDir).collect().toList === List("1", "2", "3", "4"))
} {
bufferSrc.close()
}
}
test("text files (compressed)") {
sc = new SparkContext("local", "test")
val normalDir = new File(tempDir, "output_normal").getAbsolutePath
val compressedOutputDir = new File(tempDir, "output_compressed").getAbsolutePath
val codec = new DefaultCodec()
val data = sc.parallelize("a" * 10000, 1)
data.saveAsTextFile(normalDir)
data.saveAsTextFile(compressedOutputDir, classOf[DefaultCodec])
val normalFile = new File(normalDir, "part-00000")
val normalContent = sc.textFile(normalDir).collect
assert(normalContent === Array.fill(10000)("a"))
val compressedFile = new File(compressedOutputDir, "part-00000" + codec.getDefaultExtension)
val compressedContent = sc.textFile(compressedOutputDir).collect
assert(compressedContent === Array.fill(10000)("a"))
assert(compressedFile.length < normalFile.length)
}
test("SequenceFiles") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (x, "a" * x)) // (1,a), (2,aa), (3,aaa)
nums.saveAsSequenceFile(outputDir)
// Try reading the output back as a SequenceFile
val output = sc.sequenceFile[IntWritable, Text](outputDir)
assert(output.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
}
test("SequenceFile (compressed)") {
sc = new SparkContext("local", "test")
val normalDir = new File(tempDir, "output_normal").getAbsolutePath
val compressedOutputDir = new File(tempDir, "output_compressed").getAbsolutePath
val codec = new DefaultCodec()
val data = sc.parallelize(Seq.fill(100)("abc"), 1).map(x => (x, x))
data.saveAsSequenceFile(normalDir)
data.saveAsSequenceFile(compressedOutputDir, Some(classOf[DefaultCodec]))
val normalFile = new File(normalDir, "part-00000")
val normalContent = sc.sequenceFile[String, String](normalDir).collect
assert(normalContent === Array.fill(100)(("abc", "abc")))
val compressedFile = new File(compressedOutputDir, "part-00000" + codec.getDefaultExtension)
val compressedContent = sc.sequenceFile[String, String](compressedOutputDir).collect
assert(compressedContent === Array.fill(100)(("abc", "abc")))
assert(compressedFile.length < normalFile.length)
}
test("SequenceFile with writable key") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), "a" * x))
nums.saveAsSequenceFile(outputDir)
// Try reading the output back as a SequenceFile
val output = sc.sequenceFile[IntWritable, Text](outputDir)
assert(output.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
}
test("SequenceFile with writable value") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (x, new Text("a" * x)))
nums.saveAsSequenceFile(outputDir)
// Try reading the output back as a SequenceFile
val output = sc.sequenceFile[IntWritable, Text](outputDir)
assert(output.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
}
test("SequenceFile with writable key and value") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), new Text("a" * x)))
nums.saveAsSequenceFile(outputDir)
// Try reading the output back as a SequenceFile
val output = sc.sequenceFile[IntWritable, Text](outputDir)
assert(output.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
}
test("implicit conversions in reading SequenceFiles") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (x, "a" * x)) // (1,a), (2,aa), (3,aaa)
nums.saveAsSequenceFile(outputDir)
// Similar to the tests above, we read a SequenceFile, but this time we pass type params
// that are convertable to Writable instead of calling sequenceFile[IntWritable, Text]
val output1 = sc.sequenceFile[Int, String](outputDir)
assert(output1.collect().toList === List((1, "a"), (2, "aa"), (3, "aaa")))
// Also try having one type be a subclass of Writable and one not
val output2 = sc.sequenceFile[Int, Text](outputDir)
assert(output2.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
val output3 = sc.sequenceFile[IntWritable, String](outputDir)
assert(output3.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
}
test("object files of ints") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 4)
nums.saveAsObjectFile(outputDir)
// Try reading the output back as an object file
val output = sc.objectFile[Int](outputDir)
assert(output.collect().toList === List(1, 2, 3, 4))
}
test("object files of complex types") {
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (x, "a" * x))
nums.saveAsObjectFile(outputDir)
// Try reading the output back as an object file
val output = sc.objectFile[(Int, String)](outputDir)
assert(output.collect().toList === List((1, "a"), (2, "aa"), (3, "aaa")))
}
test("object files of classes from a JAR") {
// scalastyle:off classforname
val original = Thread.currentThread().getContextClassLoader
val className = "FileSuiteObjectFileTest"
val jar = TestUtils.createJarWithClasses(Seq(className))
val loader = new java.net.URLClassLoader(Array(jar), Utils.getContextOrSparkClassLoader)
Thread.currentThread().setContextClassLoader(loader)
try {
sc = new SparkContext("local", "test")
val objs = sc.makeRDD(1 to 3).map { x =>
val loader = Thread.currentThread().getContextClassLoader
Class.forName(className, true, loader).getConstructor().newInstance()
}
val outputDir = new File(tempDir, "output").getAbsolutePath
objs.saveAsObjectFile(outputDir)
// Try reading the output back as an object file
val ct = reflect.ClassTag[Any](Class.forName(className, true, loader))
val output = sc.objectFile[Any](outputDir)
assert(output.collect().size === 3)
assert(output.collect().head.getClass.getName === className)
}
finally {
Thread.currentThread().setContextClassLoader(original)
}
// scalastyle:on classforname
}
test("write SequenceFile using new Hadoop API") {
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), new Text("a" * x)))
nums.saveAsNewAPIHadoopFile[SequenceFileOutputFormat[IntWritable, Text]](
outputDir)
val output = sc.sequenceFile[IntWritable, Text](outputDir)
assert(output.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
}
test("read SequenceFile using new Hadoop API") {
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat
sc = new SparkContext("local", "test")
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), new Text("a" * x)))
nums.saveAsSequenceFile(outputDir)
val output =
sc.newAPIHadoopFile[IntWritable, Text, SequenceFileInputFormat[IntWritable, Text]](outputDir)
assert(output.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", "(3,aaa)"))
}
private def writeBinaryData(testOutput: Array[Byte], testOutputCopies: Int): File = {
val outFile = new File(tempDir, "record-bytestream-00000.bin")
val file = new FileOutputStream(outFile)
val channel = file.getChannel
for (i <- 0 until testOutputCopies) {
// Shift values by i so that they're different in the output
val alteredOutput = testOutput.map(b => (b + i).toByte)
val buffer = ByteBuffer.wrap(alteredOutput)
while (buffer.hasRemaining) {
channel.write(buffer)
}
}
channel.close()
file.close()
outFile
}
test("binary file input as byte array") {
sc = new SparkContext("local", "test")
val testOutput = Array[Byte](1, 2, 3, 4, 5, 6)
val outFile = writeBinaryData(testOutput, 1)
val inRdd = sc.binaryFiles(outFile.getAbsolutePath)
val (infile, indata) = inRdd.collect().head
// Make sure the name and array match
assert(infile.contains(outFile.toURI.getPath)) // a prefix may get added
assert(indata.toArray === testOutput)
}
test("portabledatastream caching tests") {
sc = new SparkContext("local", "test")
val testOutput = Array[Byte](1, 2, 3, 4, 5, 6)
val outFile = writeBinaryData(testOutput, 1)
val inRdd = sc.binaryFiles(outFile.getAbsolutePath).cache()
inRdd.foreach(_._2.toArray()) // force the file to read
// Try reading the output back as an object file
assert(inRdd.values.collect().head.toArray === testOutput)
}
test("portabledatastream persist disk storage") {
sc = new SparkContext("local", "test")
val testOutput = Array[Byte](1, 2, 3, 4, 5, 6)
val outFile = writeBinaryData(testOutput, 1)
val inRdd = sc.binaryFiles(outFile.getAbsolutePath).persist(StorageLevel.DISK_ONLY)
inRdd.foreach(_._2.toArray()) // force the file to read
assert(inRdd.values.collect().head.toArray === testOutput)
}
test("portabledatastream flatmap tests") {
sc = new SparkContext("local", "test")
val testOutput = Array[Byte](1, 2, 3, 4, 5, 6)
val outFile = writeBinaryData(testOutput, 1)
val inRdd = sc.binaryFiles(outFile.getAbsolutePath)
val numOfCopies = 3
val copyRdd = inRdd.flatMap(curData => (0 until numOfCopies).map(_ => curData._2))
val copyArr = copyRdd.collect()
assert(copyArr.length == numOfCopies)
for (i <- copyArr.indices) {
assert(copyArr(i).toArray === testOutput)
}
}
test("SPARK-22357 test binaryFiles minPartitions") {
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local")
.set("spark.files.openCostInBytes", "0")
.set("spark.default.parallelism", "1"))
withTempDir { tempDir =>
val tempDirPath = tempDir.getAbsolutePath
for (i <- 0 until 8) {
val tempFile = new File(tempDir, s"part-0000$i")
Files.write("someline1 in file1\\nsomeline2 in file1\\nsomeline3 in file1", tempFile,
StandardCharsets.UTF_8)
}
for (p <- Seq(1, 2, 8)) {
assert(sc.binaryFiles(tempDirPath, minPartitions = p).getNumPartitions === p)
}
}
}
test("minimum split size per node and per rack should be less than or equal to maxSplitSize") {
sc = new SparkContext("local", "test")
val testOutput = Array[Byte](1, 2, 3, 4, 5)
val outFile = writeBinaryData(testOutput, 1)
sc.hadoopConfiguration.setLong(
"mapreduce.input.fileinputformat.split.minsize.per.node", 5123456)
sc.hadoopConfiguration.setLong(
"mapreduce.input.fileinputformat.split.minsize.per.rack", 5123456)
val (_, data) = sc.binaryFiles(outFile.getAbsolutePath).collect().head
assert(data.toArray === testOutput)
}
test("fixed record length binary file as byte array") {
sc = new SparkContext("local", "test")
val testOutput = Array[Byte](1, 2, 3, 4, 5, 6)
val testOutputCopies = 10
val outFile = writeBinaryData(testOutput, testOutputCopies)
val inRdd = sc.binaryRecords(outFile.getAbsolutePath, testOutput.length)
assert(inRdd.count == testOutputCopies)
val inArr = inRdd.collect()
for (i <- inArr.indices) {
assert(inArr(i) === testOutput.map(b => (b + i).toByte))
}
}
test ("negative binary record length should raise an exception") {
sc = new SparkContext("local", "test")
val outFile = writeBinaryData(Array[Byte](1, 2, 3, 4, 5, 6), 1)
intercept[SparkException] {
sc.binaryRecords(outFile.getAbsolutePath, -1).count()
}
}
test("file caching") {
sc = new SparkContext("local", "test")
val out = new FileWriter(tempDir + "/input")
out.write("Hello world!\\n")
out.write("What's up?\\n")
out.write("Goodbye\\n")
out.close()
val rdd = sc.textFile(tempDir + "/input").cache()
assert(rdd.count() === 3)
assert(rdd.count() === 3)
assert(rdd.count() === 3)
}
test ("prevent user from overwriting the empty directory (old Hadoop API)") {
sc = new SparkContext("local", "test")
val randomRDD = sc.parallelize(Array((1, "a"), (1, "a"), (2, "b"), (3, "c")), 1)
intercept[FileAlreadyExistsException] {
randomRDD.saveAsTextFile(tempDir.getPath)
}
}
test ("prevent user from overwriting the non-empty directory (old Hadoop API)") {
sc = new SparkContext("local", "test")
val randomRDD = sc.parallelize(Array((1, "a"), (1, "a"), (2, "b"), (3, "c")), 1)
randomRDD.saveAsTextFile(tempDir.getPath + "/output")
assert(new File(tempDir.getPath + "/output/part-00000").exists() === true)
intercept[FileAlreadyExistsException] {
randomRDD.saveAsTextFile(tempDir.getPath + "/output")
}
}
test ("allow user to disable the output directory existence checking (old Hadoop API)") {
val conf = new SparkConf()
conf.setAppName("test").setMaster("local").set("spark.hadoop.validateOutputSpecs", "false")
sc = new SparkContext(conf)
val randomRDD = sc.parallelize(Array((1, "a"), (1, "a"), (2, "b"), (3, "c")), 1)
randomRDD.saveAsTextFile(tempDir.getPath + "/output")
assert(new File(tempDir.getPath + "/output/part-00000").exists() === true)
randomRDD.saveAsTextFile(tempDir.getPath + "/output")
assert(new File(tempDir.getPath + "/output/part-00000").exists() === true)
}
test ("prevent user from overwriting the empty directory (new Hadoop API)") {
sc = new SparkContext("local", "test")
val randomRDD = sc.parallelize(
Array(("key1", "a"), ("key2", "a"), ("key3", "b"), ("key4", "c")), 1)
intercept[FileAlreadyExistsException] {
randomRDD.saveAsNewAPIHadoopFile[NewTextOutputFormat[String, String]](tempDir.getPath)
}
}
test ("prevent user from overwriting the non-empty directory (new Hadoop API)") {
sc = new SparkContext("local", "test")
val randomRDD = sc.parallelize(
Array(("key1", "a"), ("key2", "a"), ("key3", "b"), ("key4", "c")), 1)
randomRDD.saveAsNewAPIHadoopFile[NewTextOutputFormat[String, String]](
tempDir.getPath + "/output")
assert(new File(tempDir.getPath + "/output/part-r-00000").exists() === true)
intercept[FileAlreadyExistsException] {
randomRDD.saveAsNewAPIHadoopFile[NewTextOutputFormat[String, String]](tempDir.getPath)
}
}
test ("allow user to disable the output directory existence checking (new Hadoop API") {
val conf = new SparkConf()
conf.setAppName("test").setMaster("local").set("spark.hadoop.validateOutputSpecs", "false")
sc = new SparkContext(conf)
val randomRDD = sc.parallelize(
Array(("key1", "a"), ("key2", "a"), ("key3", "b"), ("key4", "c")), 1)
randomRDD.saveAsNewAPIHadoopFile[NewTextOutputFormat[String, String]](
tempDir.getPath + "/output")
assert(new File(tempDir.getPath + "/output/part-r-00000").exists() === true)
randomRDD.saveAsNewAPIHadoopFile[NewTextOutputFormat[String, String]](
tempDir.getPath + "/output")
assert(new File(tempDir.getPath + "/output/part-r-00000").exists() === true)
}
test ("save Hadoop Dataset through old Hadoop API") {
sc = new SparkContext("local", "test")
val randomRDD = sc.parallelize(
Array(("key1", "a"), ("key2", "a"), ("key3", "b"), ("key4", "c")), 1)
val job = new JobConf()
job.setOutputKeyClass(classOf[String])
job.setOutputValueClass(classOf[String])
job.set("mapred.output.format.class", classOf[TextOutputFormat[String, String]].getName)
job.set("mapreduce.output.fileoutputformat.outputdir", tempDir.getPath + "/outputDataset_old")
randomRDD.saveAsHadoopDataset(job)
assert(new File(tempDir.getPath + "/outputDataset_old/part-00000").exists() === true)
}
test ("save Hadoop Dataset through new Hadoop API") {
sc = new SparkContext("local", "test")
val randomRDD = sc.parallelize(
Array(("key1", "a"), ("key2", "a"), ("key3", "b"), ("key4", "c")), 1)
val job = Job.getInstance(sc.hadoopConfiguration)
job.setOutputKeyClass(classOf[String])
job.setOutputValueClass(classOf[String])
job.setOutputFormatClass(classOf[NewTextOutputFormat[String, String]])
val jobConfig = job.getConfiguration
jobConfig.set("mapreduce.output.fileoutputformat.outputdir",
tempDir.getPath + "/outputDataset_new")
randomRDD.saveAsNewAPIHadoopDataset(jobConfig)
assert(new File(tempDir.getPath + "/outputDataset_new/part-r-00000").exists() === true)
}
test("Get input files via old Hadoop API") {
sc = new SparkContext("local", "test")
val outDir = new File(tempDir, "output").getAbsolutePath
sc.makeRDD(1 to 4, 2).saveAsTextFile(outDir)
val inputPaths =
sc.hadoopFile(outDir, classOf[TextInputFormat], classOf[LongWritable], classOf[Text])
.asInstanceOf[HadoopRDD[_, _]]
.mapPartitionsWithInputSplit { (split, part) =>
Iterator(split.asInstanceOf[FileSplit].getPath.toUri.getPath)
}.collect()
val outPathOne = new Path(outDir, "part-00000").toUri.getPath
val outPathTwo = new Path(outDir, "part-00001").toUri.getPath
assert(inputPaths.toSet === Set(outPathOne, outPathTwo))
}
test("Get input files via new Hadoop API") {
sc = new SparkContext("local", "test")
val outDir = new File(tempDir, "output").getAbsolutePath
sc.makeRDD(1 to 4, 2).saveAsTextFile(outDir)
val inputPaths =
sc.newAPIHadoopFile(outDir, classOf[NewTextInputFormat], classOf[LongWritable], classOf[Text])
.asInstanceOf[NewHadoopRDD[_, _]]
.mapPartitionsWithInputSplit { (split, part) =>
Iterator(split.asInstanceOf[NewFileSplit].getPath.toUri.getPath)
}.collect()
val outPathOne = new Path(outDir, "part-00000").toUri.getPath
val outPathTwo = new Path(outDir, "part-00001").toUri.getPath
assert(inputPaths.toSet === Set(outPathOne, outPathTwo))
}
test("spark.files.ignoreCorruptFiles should work both HadoopRDD and NewHadoopRDD") {
val inputFile = File.createTempFile("input-", ".gz")
try {
// Create a corrupt gzip file
val byteOutput = new ByteArrayOutputStream()
val gzip = new GZIPOutputStream(byteOutput)
try {
gzip.write(Array[Byte](1, 2, 3, 4))
} finally {
gzip.close()
}
val bytes = byteOutput.toByteArray
val o = new FileOutputStream(inputFile)
try {
// It's corrupt since we only write half of bytes into the file.
o.write(bytes.take(bytes.length / 2))
} finally {
o.close()
}
// Reading a corrupt gzip file should throw EOFException
sc = new SparkContext("local", "test")
// Test HadoopRDD
var e = intercept[SparkException] {
sc.textFile(inputFile.toURI.toString).collect()
}
assert(e.getCause.isInstanceOf[EOFException])
assert(e.getCause.getMessage === "Unexpected end of input stream")
// Test NewHadoopRDD
e = intercept[SparkException] {
sc.newAPIHadoopFile(
inputFile.toURI.toString,
classOf[NewTextInputFormat],
classOf[LongWritable],
classOf[Text]).collect()
}
assert(e.getCause.isInstanceOf[EOFException])
assert(e.getCause.getMessage === "Unexpected end of input stream")
sc.stop()
val conf = new SparkConf().set(IGNORE_CORRUPT_FILES, true)
sc = new SparkContext("local", "test", conf)
// Test HadoopRDD
assert(sc.textFile(inputFile.toURI.toString).collect().isEmpty)
// Test NewHadoopRDD
assert {
sc.newAPIHadoopFile(
inputFile.toURI.toString,
classOf[NewTextInputFormat],
classOf[LongWritable],
classOf[Text]).collect().isEmpty
}
} finally {
inputFile.delete()
}
}
test("spark.hadoopRDD.ignoreEmptySplits work correctly (old Hadoop API)") {
val conf = new SparkConf()
.setAppName("test")
.setMaster("local")
.set(HADOOP_RDD_IGNORE_EMPTY_SPLITS, true)
sc = new SparkContext(conf)
def testIgnoreEmptySplits(
data: Array[Tuple2[String, String]],
actualPartitionNum: Int,
expectedPartitionNum: Int): Unit = {
val output = new File(tempDir, "output")
sc.parallelize(data, actualPartitionNum)
.saveAsHadoopFile[TextOutputFormat[String, String]](output.getPath)
for (i <- 0 until actualPartitionNum) {
assert(new File(output, s"part-0000$i").exists() === true)
}
val hadoopRDD = sc.textFile(new File(output, "part-*").getPath)
assert(hadoopRDD.partitions.length === expectedPartitionNum)
Utils.deleteRecursively(output)
}
// Ensure that if all of the splits are empty, we remove the splits correctly
testIgnoreEmptySplits(
data = Array.empty[Tuple2[String, String]],
actualPartitionNum = 1,
expectedPartitionNum = 0)
// Ensure that if no split is empty, we don't lose any splits
testIgnoreEmptySplits(
data = Array(("key1", "a"), ("key2", "a"), ("key3", "b")),
actualPartitionNum = 2,
expectedPartitionNum = 2)
// Ensure that if part of the splits are empty, we remove the splits correctly
testIgnoreEmptySplits(
data = Array(("key1", "a"), ("key2", "a")),
actualPartitionNum = 5,
expectedPartitionNum = 2)
}
test("spark.hadoopRDD.ignoreEmptySplits work correctly (new Hadoop API)") {
val conf = new SparkConf()
.setAppName("test")
.setMaster("local")
.set(HADOOP_RDD_IGNORE_EMPTY_SPLITS, true)
sc = new SparkContext(conf)
def testIgnoreEmptySplits(
data: Array[Tuple2[String, String]],
actualPartitionNum: Int,
expectedPartitionNum: Int): Unit = {
val output = new File(tempDir, "output")
sc.parallelize(data, actualPartitionNum)
.saveAsNewAPIHadoopFile[NewTextOutputFormat[String, String]](output.getPath)
for (i <- 0 until actualPartitionNum) {
assert(new File(output, s"part-r-0000$i").exists() === true)
}
val hadoopRDD = sc.newAPIHadoopFile(new File(output, "part-r-*").getPath,
classOf[NewTextInputFormat], classOf[LongWritable], classOf[Text])
.asInstanceOf[NewHadoopRDD[_, _]]
assert(hadoopRDD.partitions.length === expectedPartitionNum)
Utils.deleteRecursively(output)
}
// Ensure that if all of the splits are empty, we remove the splits correctly
testIgnoreEmptySplits(
data = Array.empty[Tuple2[String, String]],
actualPartitionNum = 1,
expectedPartitionNum = 0)
// Ensure that if no split is empty, we don't lose any splits
testIgnoreEmptySplits(
data = Array(("1", "a"), ("2", "a"), ("3", "b")),
actualPartitionNum = 2,
expectedPartitionNum = 2)
// Ensure that if part of the splits are empty, we remove the splits correctly
testIgnoreEmptySplits(
data = Array(("1", "a"), ("2", "b")),
actualPartitionNum = 5,
expectedPartitionNum = 2)
}
test("spark.files.ignoreMissingFiles should work both HadoopRDD and NewHadoopRDD") {
// "file not found" can happen both when getPartitions or compute in HadoopRDD/NewHadoopRDD,
// We test both cases here.
val deletedPath = new Path(tempDir.getAbsolutePath, "test-data-1")
val fs = deletedPath.getFileSystem(new Configuration())
fs.delete(deletedPath, true)
intercept[FileNotFoundException](fs.open(deletedPath))
def collectRDDAndDeleteFileBeforeCompute(newApi: Boolean): Array[_] = {
val dataPath = new Path(tempDir.getAbsolutePath, "test-data-2")
val writer = new OutputStreamWriter(new FileOutputStream(new File(dataPath.toString)))
writer.write("hello\\n")
writer.write("world\\n")
writer.close()
val rdd = if (newApi) {
sc.newAPIHadoopFile(dataPath.toString, classOf[NewTextInputFormat],
classOf[LongWritable], classOf[Text])
} else {
sc.textFile(dataPath.toString)
}
rdd.partitions
fs.delete(dataPath, true)
// Exception happens when initialize record reader in HadoopRDD/NewHadoopRDD.compute
// because partitions' info already cached.
rdd.collect()
}
// collect HadoopRDD and NewHadoopRDD when spark.files.ignoreMissingFiles=false by default.
sc = new SparkContext("local", "test")
intercept[org.apache.hadoop.mapred.InvalidInputException] {
// Exception happens when HadoopRDD.getPartitions
sc.textFile(deletedPath.toString).collect()
}
var e = intercept[SparkException] {
collectRDDAndDeleteFileBeforeCompute(false)
}
assert(e.getCause.isInstanceOf[java.io.FileNotFoundException])
intercept[org.apache.hadoop.mapreduce.lib.input.InvalidInputException] {
// Exception happens when NewHadoopRDD.getPartitions
sc.newAPIHadoopFile(deletedPath.toString, classOf[NewTextInputFormat],
classOf[LongWritable], classOf[Text]).collect
}
e = intercept[SparkException] {
collectRDDAndDeleteFileBeforeCompute(true)
}
assert(e.getCause.isInstanceOf[java.io.FileNotFoundException])
sc.stop()
// collect HadoopRDD and NewHadoopRDD when spark.files.ignoreMissingFiles=true.
val conf = new SparkConf().set(IGNORE_MISSING_FILES, true)
sc = new SparkContext("local", "test", conf)
assert(sc.textFile(deletedPath.toString).collect().isEmpty)
assert(collectRDDAndDeleteFileBeforeCompute(false).isEmpty)
assert(sc.newAPIHadoopFile(deletedPath.toString, classOf[NewTextInputFormat],
classOf[LongWritable], classOf[Text]).collect().isEmpty)
assert(collectRDDAndDeleteFileBeforeCompute(true).isEmpty)
}
}
|
hhbyyh/spark
|
core/src/test/scala/org/apache/spark/FileSuite.scala
|
Scala
|
apache-2.0
| 28,994
|
package com.atanana
import java.nio.ByteBuffer
import java.nio.channels.ServerSocketChannel
import java.util
import scala.io.Source
import scala.util.Try
class CommandProvider(socket: ServerSocketChannel) {
private val buffer = ByteBuffer.allocate(128)
def getCommand: Try[Option[String]] = {
Try {
var command: Option[String] = None
val socketChannel = socket.accept()
if (socketChannel != null) {
val read = socketChannel.read(buffer)
if (read > 0) {
command = Some(Source.fromBytes(buffer.array()).mkString.trim)
}
util.Arrays.fill(buffer.array(), 0.toByte)
buffer.clear()
socketChannel.close()
}
command
}
}
}
|
atanana/rating-bot
|
src/main/scala/com/atanana/CommandProvider.scala
|
Scala
|
mit
| 720
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.suiteprop
import org.scalatest._
class FirstTestIgnoredExamples extends SuiteExamples {
trait Services {
val theTestNames = Vector("first test", "second test")
}
trait NestedTestNames extends Services {
override val theTestNames = Vector("A subject should first test", "A subject should second test")
}
trait DeeplyNestedTestNames extends Services {
override val theTestNames = Vector("A subject when created should first test", "A subject when created should second test")
}
trait NestedTestNamesWithMust extends Services {
override val theTestNames = Vector("A subject must first test", "A subject must second test")
}
trait DeeplyNestedTestNamesWithMust extends Services {
override val theTestNames = Vector("A subject when created must first test", "A subject when created must second test")
}
trait NestedTestNamesWithCan extends Services {
override val theTestNames = Vector("A subject can first test", "A subject can second test")
}
trait DeeplyNestedTestNamesWithCan extends Services {
override val theTestNames = Vector("A subject when created can first test", "A subject when created can second test")
}
type FixtureServices = Services
class SuiteExample extends Suite with Services {
@Ignore def testFirst {}
def testSecond {}
override val theTestNames = Vector("testFirst", "testSecond")
}
class FixtureSuiteExample extends StringFixtureSuite with Services {
@Ignore def testFirst(s: String) {}
def testSecond(s: String) {}
override val theTestNames = Vector("testFirst(FixtureParam)", "testSecond(FixtureParam)")
}
class FunSuiteExample extends FunSuite with Services {
ignore("first test") {}
test("second test") {}
}
class FixtureFunSuiteExample extends StringFixtureFunSuite with Services {
ignore("first test") { s => }
test("second test") { s => }
}
class FunSpecExample extends FunSpec with Services {
ignore("first test") {}
it("second test") {}
}
class NestedFunSpecExample extends FunSpec with NestedTestNames {
describe("A subject") {
ignore("should first test") {}
it("should second test") {}
}
}
class DeeplyNestedFunSpecExample extends FunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
ignore("should first test") {}
it("should second test") {}
}
}
}
class FixtureFunSpecExample extends StringFixtureFunSpec with Services {
ignore("first test") { s => }
it("second test") { s => }
}
class NestedFixtureFunSpecExample extends StringFixtureFunSpec with NestedTestNames {
describe("A subject") {
ignore("should first test") { s => }
it("should second test") { s => }
}
}
class DeeplyNestedFixtureFunSpecExample extends StringFixtureFunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
ignore("should first test") { s => }
it("should second test") { s => }
}
}
}
class PathFunSpecExample extends path.FunSpec with Services {
ignore("first test") {}
it("second test") {}
override def newInstance = new PathFunSpecExample
}
class NestedPathFunSpecExample extends path.FunSpec with NestedTestNames {
describe("A subject") {
ignore("should first test") {}
it("should second test") {}
}
override def newInstance = new NestedPathFunSpecExample
}
class DeeplyNestedPathFunSpecExample extends path.FunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
ignore("should first test") {}
it("should second test") {}
}
}
override def newInstance = new DeeplyNestedPathFunSpecExample
}
class WordSpecExample extends WordSpec with Services {
"first test" ignore {}
"second test" in {}
}
class NestedWordSpecExample extends WordSpec with NestedTestNames {
"A subject" should {
"first test" ignore {}
"second test" in {}
}
}
class DeeplyNestedWordSpecExample extends WordSpec with DeeplyNestedTestNames {
"A subject" when {
"created" should {
"first test" ignore {}
"second test" in {}
}
}
}
class FixtureWordSpecExample extends StringFixtureWordSpec with Services {
"first test" ignore { s => }
"second test" in { s => }
}
class NestedFixtureWordSpecExample extends StringFixtureWordSpec with NestedTestNames {
"A subject" should {
"first test" ignore { s => }
"second test" in { s => }
}
}
class DeeplyNestedFixtureWordSpecExample extends StringFixtureWordSpec with DeeplyNestedTestNames {
"A subject" when {
"created" should {
"first test" ignore { s => }
"second test" in { s => }
}
}
}
class NestedWordSpecWithMustExample extends WordSpec with NestedTestNamesWithMust {
"A subject" must {
"first test" ignore {}
"second test" in {}
}
}
class DeeplyNestedWordSpecWithMustExample extends WordSpec with DeeplyNestedTestNamesWithMust {
"A subject" when {
"created" must {
"first test" ignore {}
"second test" in {}
}
}
}
class NestedFixtureWordSpecWithMustExample extends StringFixtureWordSpec with NestedTestNamesWithMust {
"A subject" must {
"first test" ignore { s => }
"second test" in { s => }
}
}
class DeeplyNestedFixtureWordSpecWithMustExample extends StringFixtureWordSpec with DeeplyNestedTestNamesWithMust {
"A subject" when {
"created" must {
"first test" ignore { s => }
"second test" in { s => }
}
}
}
class NestedWordSpecWithCanExample extends WordSpec with NestedTestNamesWithCan {
"A subject" can {
"first test" ignore {}
"second test" in {}
}
}
class DeeplyNestedWordSpecWithCanExample extends WordSpec with DeeplyNestedTestNamesWithCan {
"A subject" when {
"created" can {
"first test" ignore {}
"second test" in {}
}
}
}
class NestedFixtureWordSpecWithCanExample extends StringFixtureWordSpec with NestedTestNamesWithCan {
"A subject" can {
"first test" ignore { s => }
"second test" in { s => }
}
}
class DeeplyNestedFixtureWordSpecWithCanExample extends StringFixtureWordSpec with DeeplyNestedTestNamesWithCan {
"A subject" when {
"created" can {
"first test" ignore { s => }
"second test" in { s => }
}
}
}
class FlatSpecExample extends FlatSpec with Services {
it should "first test" ignore {}
it should "second test" in {}
override val theTestNames = Vector("should first test", "should second test")
}
class SubjectFlatSpecExample extends FlatSpec with NestedTestNames {
behavior of "A subject"
it should "first test" ignore {}
it should "second test" in {}
}
class ShorthandSubjectFlatSpecExample extends FlatSpec with NestedTestNames {
"A subject" should "first test" ignore {}
it should "second test" in {}
}
class FixtureFlatSpecExample extends StringFixtureFlatSpec with Services {
it should "first test" ignore { s => }
it should "second test" in { s => }
override val theTestNames = Vector("should first test", "should second test")
}
class SubjectFixtureFlatSpecExample extends StringFixtureFlatSpec with NestedTestNames {
behavior of "A subject"
it should "first test" ignore { s => }
it should "second test" in { s => }
}
class ShorthandSubjectFixtureFlatSpecExample extends StringFixtureFlatSpec with NestedTestNames {
"A subject" should "first test" ignore { s => }
it should "second test" in { s => }
}
class FlatSpecWithMustExample extends FlatSpec with Services {
it must "first test" ignore {}
it must "second test" in {}
override val theTestNames = Vector("must first test", "must second test")
}
class SubjectFlatSpecWithMustExample extends FlatSpec with NestedTestNamesWithMust {
behavior of "A subject"
it must "first test" ignore {}
it must "second test" in {}
}
class ShorthandSubjectFlatSpecWithMustExample extends FlatSpec with NestedTestNamesWithMust {
"A subject" must "first test" ignore {}
it must "second test" in {}
}
class FixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with Services {
it must "first test" ignore { s => }
it must "second test" in { s => }
override val theTestNames = Vector("must first test", "must second test")
}
class SubjectFixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with NestedTestNamesWithMust {
behavior of "A subject"
it must "first test" ignore { s => }
it must "second test" in { s => }
}
class ShorthandSubjectFixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with NestedTestNamesWithMust {
"A subject" must "first test" ignore { s => }
it must "second test" in { s => }
}
class FlatSpecWithCanExample extends FlatSpec with Services {
it can "first test" ignore {}
it can "second test" in {}
override val theTestNames = Vector("can first test", "can second test")
}
class SubjectFlatSpecWithCanExample extends FlatSpec with NestedTestNamesWithCan {
behavior of "A subject"
it can "first test" ignore {}
it can "second test" in {}
}
class ShorthandSubjectFlatSpecWithCanExample extends FlatSpec with NestedTestNamesWithCan {
"A subject" can "first test" ignore {}
it can "second test" in {}
}
class FixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with Services {
it can "first test" ignore { s => }
it can "second test" in { s => }
override val theTestNames = Vector("can first test", "can second test")
}
class SubjectFixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with NestedTestNamesWithCan {
behavior of "A subject"
it can "first test" ignore { s => }
it can "second test" in { s => }
}
class ShorthandSubjectFixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with NestedTestNamesWithCan {
"A subject" can "first test" ignore { s => }
it can "second test" in { s => }
}
class FreeSpecExample extends FreeSpec with Services {
"first test" ignore {}
"second test" in {}
}
class NestedFreeSpecExample extends FreeSpec with NestedTestNames {
"A subject" - {
"should first test" ignore {}
"should second test" in {}
}
}
class DeeplyNestedFreeSpecExample extends FreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" ignore {}
"should second test" in {}
}
}
}
class FixtureFreeSpecExample extends StringFixtureFreeSpec with Services {
"first test" ignore { s => }
"second test" in { s => }
}
class NestedFixtureFreeSpecExample extends StringFixtureFreeSpec with NestedTestNames {
"A subject" - {
"should first test" ignore { s => }
"should second test" in { s => }
}
}
class DeeplyNestedFixtureFreeSpecExample extends StringFixtureFreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" ignore { s => }
"should second test" in { s => }
}
}
}
class PathFreeSpecExample extends path.FreeSpec with Services {
"first test" ignore {}
"second test" in {}
override def newInstance = new PathFreeSpecExample
}
class NestedPathFreeSpecExample extends path.FreeSpec with NestedTestNames {
"A subject" - {
"should first test" ignore {}
"should second test" in {}
}
override def newInstance = new NestedPathFreeSpecExample
}
class DeeplyNestedPathFreeSpecExample extends path.FreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" ignore {}
"should second test" in {}
}
}
override def newInstance = new DeeplyNestedPathFreeSpecExample
}
class FeatureSpecExample extends FeatureSpec with Services {
ignore("first test") {}
scenario("second test") {}
override val theTestNames = Vector("Scenario: first test", "Scenario: second test")
}
class NestedFeatureSpecExample extends FeatureSpec with Services {
feature("A feature") {
ignore("first test") {}
scenario("second test") {}
}
override val theTestNames = Vector("Feature: A feature Scenario: first test", "A feature Scenario: second test")
}
class FixtureFeatureSpecExample extends StringFixtureFeatureSpec with Services {
ignore("first test") { s => }
scenario("second test") { s => }
override val theTestNames = Vector("Scenario: first test", "Scenario: second test")
}
class NestedFixtureFeatureSpecExample extends StringFixtureFeatureSpec with Services {
feature("A feature") {
ignore("first test") { s => }
scenario("second test") { s => }
}
override val theTestNames = Vector("Feature: A feature Scenario: first test", "A feature Scenario: second test")
}
class PropSpecExample extends PropSpec with Services {
ignore("first test") {}
property("second test") {}
}
class FixturePropSpecExample extends StringFixturePropSpec with Services {
ignore("first test") { s => }
property("second test") { s => }
}
lazy val suite = new SuiteExample
lazy val fixtureSuite = new FixtureSuiteExample
lazy val funSuite = new FunSuiteExample
lazy val fixtureFunSuite = new FixtureFunSuiteExample
lazy val funSpec = new FunSpecExample
lazy val nestedFunSpec = new NestedFunSpecExample
lazy val deeplyNestedFunSpec = new DeeplyNestedFunSpecExample
lazy val fixtureFunSpec = new FixtureFunSpecExample
lazy val nestedFixtureFunSpec = new NestedFixtureFunSpecExample
lazy val deeplyNestedFixtureFunSpec = new DeeplyNestedFixtureFunSpecExample
lazy val pathFunSpec = new PathFunSpecExample
lazy val nestedPathFunSpec = new NestedPathFunSpecExample
lazy val deeplyNestedPathFunSpec = new DeeplyNestedPathFunSpecExample
lazy val wordSpec = new WordSpecExample
lazy val nestedWordSpec = new NestedWordSpecExample
lazy val deeplyNestedWordSpec = new DeeplyNestedWordSpecExample
lazy val fixtureWordSpec = new FixtureWordSpecExample
lazy val nestedFixtureWordSpec = new NestedFixtureWordSpecExample
lazy val deeplyNestedFixtureWordSpec = new DeeplyNestedFixtureWordSpecExample
lazy val nestedWordSpecWithMust = new NestedWordSpecWithMustExample
lazy val deeplyNestedWordSpecWithMust = new DeeplyNestedWordSpecWithMustExample
lazy val nestedFixtureWordSpecWithMust = new NestedFixtureWordSpecWithMustExample
lazy val deeplyNestedFixtureWordSpecWithMust = new DeeplyNestedFixtureWordSpecWithMustExample
lazy val nestedWordSpecWithCan = new NestedWordSpecWithCanExample
lazy val deeplyNestedWordSpecWithCan = new DeeplyNestedWordSpecWithCanExample
lazy val nestedFixtureWordSpecWithCan = new NestedFixtureWordSpecWithCanExample
lazy val deeplyNestedFixtureWordSpecWithCan = new DeeplyNestedFixtureWordSpecWithCanExample
lazy val flatSpec = new FlatSpecExample
lazy val subjectFlatSpec = new SubjectFlatSpecExample
lazy val shorthandSubjectFlatSpec = new ShorthandSubjectFlatSpecExample
lazy val fixtureFlatSpec = new FixtureFlatSpecExample
lazy val subjectFixtureFlatSpec = new SubjectFixtureFlatSpecExample
lazy val shorthandSubjectFixtureFlatSpec = new ShorthandSubjectFixtureFlatSpecExample
lazy val flatSpecWithMust = new FlatSpecWithMustExample
lazy val subjectFlatSpecWithMust = new SubjectFlatSpecWithMustExample
lazy val shorthandSubjectFlatSpecWithMust = new ShorthandSubjectFlatSpecWithMustExample
lazy val fixtureFlatSpecWithMust = new FixtureFlatSpecWithMustExample
lazy val subjectFixtureFlatSpecWithMust = new SubjectFixtureFlatSpecWithMustExample
lazy val shorthandSubjectFixtureFlatSpecWithMust = new ShorthandSubjectFixtureFlatSpecWithMustExample
lazy val flatSpecWithCan = new FlatSpecWithCanExample
lazy val subjectFlatSpecWithCan = new SubjectFlatSpecWithCanExample
lazy val shorthandSubjectFlatSpecWithCan = new ShorthandSubjectFlatSpecWithCanExample
lazy val fixtureFlatSpecWithCan = new FixtureFlatSpecWithCanExample
lazy val subjectFixtureFlatSpecWithCan = new SubjectFixtureFlatSpecWithCanExample
lazy val shorthandSubjectFixtureFlatSpecWithCan = new ShorthandSubjectFixtureFlatSpecWithCanExample
lazy val freeSpec = new FreeSpecExample
lazy val nestedFreeSpec = new NestedFreeSpecExample
lazy val deeplyNestedFreeSpec = new DeeplyNestedFreeSpecExample
lazy val fixtureFreeSpec = new FixtureFreeSpecExample
lazy val nestedFixtureFreeSpec = new NestedFixtureFreeSpecExample
lazy val deeplyNestedFixtureFreeSpec = new DeeplyNestedFixtureFreeSpecExample
lazy val pathFreeSpec = new PathFreeSpecExample
lazy val nestedPathFreeSpec = new NestedPathFreeSpecExample
lazy val deeplyNestedPathFreeSpec = new DeeplyNestedPathFreeSpecExample
lazy val featureSpec = new FeatureSpecExample
lazy val nestedFeatureSpec = new NestedFeatureSpecExample
lazy val fixtureFeatureSpec = new FixtureFeatureSpecExample
lazy val nestedFixtureFeatureSpec = new NestedFixtureFeatureSpecExample
lazy val propSpec = new PropSpecExample
lazy val fixturePropSpec = new FixturePropSpecExample
// Two ways to ignore in a flat spec, so add two more examples
override def examples = super.examples ++
Vector(
new FlatSpecExample2,
new FixtureFlatSpecExample2,
new FlatSpecWithMustExample2,
new FixtureFlatSpecWithMustExample2,
new FlatSpecWithCanExample2,
new FixtureFlatSpecWithCanExample2
)
class FlatSpecExample2 extends FlatSpec with Services {
ignore should "first test" in {}
it should "second test" in {}
override val theTestNames = Vector("should first test", "should second test")
}
class FixtureFlatSpecExample2 extends StringFixtureFlatSpec with Services {
ignore should "first test" in { s => }
it should "second test" in { s => }
override val theTestNames = Vector("should first test", "should second test")
}
class FlatSpecWithMustExample2 extends FlatSpec with Services {
ignore must "first test" in {}
it must "second test" in {}
override val theTestNames = Vector("must first test", "must second test")
}
class FixtureFlatSpecWithMustExample2 extends StringFixtureFlatSpec with Services {
ignore must "first test" in { s => }
it must "second test" in { s => }
override val theTestNames = Vector("must first test", "must second test")
}
class FlatSpecWithCanExample2 extends FlatSpec with Services {
ignore can "first test" in {}
it can "second test" in {}
override val theTestNames = Vector("can first test", "can second test")
}
class FixtureFlatSpecWithCanExample2 extends StringFixtureFlatSpec with Services {
ignore can "first test" in { s => }
it can "second test" in { s => }
override val theTestNames = Vector("can first test", "can second test")
}
}
|
travisbrown/scalatest
|
src/test/scala/org/scalatest/suiteprop/FirstTestIgnoredExamples.scala
|
Scala
|
apache-2.0
| 19,745
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.connector.read.{InputPartition, Scan}
import org.apache.spark.sql.connector.read.streaming.{ContinuousPartitionReaderFactory, ContinuousStream, Offset}
import org.apache.spark.sql.execution.streaming.continuous._
/**
* Physical plan node for scanning data from a streaming data source with continuous mode.
*/
case class ContinuousScanExec(
output: Seq[Attribute],
@transient scan: Scan,
@transient stream: ContinuousStream,
@transient start: Offset) extends DataSourceV2ScanExecBase {
// TODO: unify the equal/hashCode implementation for all data source v2 query plans.
override def equals(other: Any): Boolean = other match {
case other: ContinuousScanExec => this.stream == other.stream
case _ => false
}
override def hashCode(): Int = stream.hashCode()
override lazy val partitions: Seq[InputPartition] = stream.planInputPartitions(start)
override lazy val readerFactory: ContinuousPartitionReaderFactory = {
stream.createContinuousReaderFactory()
}
override lazy val inputRDD: RDD[InternalRow] = {
EpochCoordinatorRef.get(
sparkContext.getLocalProperty(ContinuousExecution.EPOCH_COORDINATOR_ID_KEY),
sparkContext.env)
.askSync[Unit](SetReaderPartitions(partitions.size))
new ContinuousDataSourceRDD(
sparkContext,
sqlContext.conf.continuousStreamingExecutorQueueSize,
sqlContext.conf.continuousStreamingExecutorPollIntervalMs,
partitions,
schema,
readerFactory.asInstanceOf[ContinuousPartitionReaderFactory],
customMetrics)
}
}
|
BryanCutler/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ContinuousScanExec.scala
|
Scala
|
apache-2.0
| 2,572
|
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogdebugger.opengl
import java.awt.event._
import javax.swing.JComponent
import scala.swing._
import scala.swing.event._
import com.jogamp.opengl.{GLProfile, GLCapabilities, GLEventListener}
/**
* Scala Swing wrapper for a GLCanvas (a heavyweight AWT component). GLCanvas
* has better rendering performance than GLPanel, but it doesn't always play
* nicely with Swing. It's pretty easy to change a class to use one or the
* other, so it may be a good idea to start with the this GLCanvas wrapper and
* only switch to the GLPanel wrapper if you can't get the former to work in
* your GUI.
*/
abstract class SGLCanvas(caps: GLCapabilities) extends Component with GLEventListener {
def this(profile: GLProfile) = this(new GLCapabilities(profile))
def this() = this(GLProfile.getDefault)
val wrappedGlCanvas = new com.jogamp.opengl.awt.GLCanvas(caps)
peer.setLayout(new java.awt.GridLayout(1, 1))
wrappedGlCanvas.addGLEventListener(SGLCanvas.this)
peer.add(wrappedGlCanvas)
// Scala Swing's normal event publishers don't work because our peer is using
// a "heavyweight" AWT component as a child instead of a "lightweight" Swing
// component. Scala won't let us override Component's mouse object, so we
// have to wire up our AWT canvas to the existing publishers. As long as the
// events appear to be coming from 'peer,' things ought to work.
override def focusable: Boolean = wrappedGlCanvas.isFocusable
override def focusable_=(b: Boolean) { wrappedGlCanvas.setFocusable(b) }
override def requestFocus() { wrappedGlCanvas.requestFocus() }
override def requestFocusInWindow() = wrappedGlCanvas.requestFocusInWindow()
override def hasFocus: Boolean = wrappedGlCanvas.isFocusOwner
wrappedGlCanvas.addMouseListener(new MouseListener {
def mouseEntered(e: java.awt.event.MouseEvent) {
e.setSource(peer)
mouse.moves.publish(new MouseEntered(e))
}
def mouseExited(e: java.awt.event.MouseEvent) {
e.setSource(peer)
mouse.moves.publish(new MouseExited(e))
}
def mouseClicked(e: java.awt.event.MouseEvent) {
e.setSource(peer)
mouse.clicks.publish(new MouseClicked(e))
}
def mousePressed(e: java.awt.event.MouseEvent) {
e.setSource(peer)
mouse.clicks.publish(new MousePressed(e))
}
def mouseReleased(e: java.awt.event.MouseEvent) {
e.setSource(peer)
mouse.clicks.publish(new MouseReleased(e))
}
})
wrappedGlCanvas.addMouseMotionListener(new MouseMotionListener {
def mouseMoved(e: java.awt.event.MouseEvent) {
e.setSource(peer)
mouse.moves.publish(new MouseMoved(e))
}
def mouseDragged(e: java.awt.event.MouseEvent) {
e.setSource(peer)
mouse.moves.publish(new MouseDragged(e))
}
})
/* This may not be the correct implementation - it's not clear how it will
* behave if this canvas is dropped into a scroll pane. Likely, it will
* prevent mouse wheel movements from scrolling the pane.
*/
wrappedGlCanvas.addMouseWheelListener(new MouseWheelListener {
def mouseWheelMoved(e: MouseWheelEvent) {
e.setSource(peer)
mouse.wheel.publish(new MouseWheelMoved(e))
}
})
wrappedGlCanvas.addKeyListener(new KeyListener {
def keyPressed(e: java.awt.event.KeyEvent) {
e.setSource(peer)
publish(new KeyPressed(e))
}
def keyReleased(e: java.awt.event.KeyEvent) {
e.setSource(peer)
publish(new KeyReleased(e))
}
def keyTyped(e: java.awt.event.KeyEvent) {
e.setSource(peer)
publish(new KeyTyped(e))
}
})
/* Publishes focus events of the wrapped heavyweight canvas instance. */
wrappedGlCanvas.addFocusListener(new java.awt.event.FocusListener {
def other(e: java.awt.event.FocusEvent) = e.getOppositeComponent match {
case c: JComponent =>
// I'd prefer to use "UIElement.cachedWrapper[Component](c)" as a
// vanilla Scala-Swing Component does, but that's marked private. So we
// do it by hand.
Some(c.getClientProperty("scala.swingWrapper").asInstanceOf[Component])
case _ => None
}
def focusGained(e: java.awt.event.FocusEvent) {
e.setSource(peer)
publish(FocusGained(SGLCanvas.this, other(e), e.isTemporary))
}
def focusLost(e: java.awt.event.FocusEvent) {
e.setSource(peer)
publish(FocusLost(SGLCanvas.this, other(e), e.isTemporary))
}
})
}
|
hpe-cct/cct-core
|
src/main/scala/cogdebugger/opengl/SGLCanvas.scala
|
Scala
|
apache-2.0
| 5,058
|
def /*caret*/bar(a: Int, b: Int, c: String): String = ((a + b) * b).toString + c
bar(4, 5, "foo")
//((4 + 5) * 5).toString + "foo"
|
JetBrains/intellij-scala
|
scala/scala-impl/testdata/inline/arguments/MethodWithMultipleArgumentsWithMultipleReferenes.scala
|
Scala
|
apache-2.0
| 131
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.utils
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValidation
trait AdditionalNotesAndFootnotesHelper extends WordSpec with Matchers with MockitoSugar {
val input: String = "Some very off balance arrangements"
val validationSuccess: Set[CtValidation] = Set.empty
val boxId: String
val minNumberOfEmployees = 0
val maxNumberOfEmployees = 99999
val fieldRequiredError: String => Set[CtValidation] =
boxId => Set(CtValidation(Some(boxId), s"error.$boxId.required", None))
}
|
hmrc/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/accounts/utils/AdditionalNotesAndFootnotesHelper.scala
|
Scala
|
apache-2.0
| 1,204
|
package keystoneml.utils
import breeze.linalg.{SparseVector, DenseMatrix, DenseVector}
/**
* Provides conversions between MLlib vectors & matrices, and Breeze vectors & matrices
*/
object MLlibUtils {
/** Convert an MLlib vector to a Breeze dense vector */
def mllibVectorToDenseBreeze(vector: org.apache.spark.mllib.linalg.Vector): DenseVector[Double] = {
vector match {
case dense: org.apache.spark.mllib.linalg.DenseVector => new DenseVector[Double](dense.values)
case _ => new DenseVector[Double](vector.toArray)
}
}
/** Convert an MLlib matrix to a Breeze dense matrix */
def mllibMatrixToDenseBreeze(matrix: org.apache.spark.mllib.linalg.Matrix): DenseMatrix[Double] = {
matrix match {
case dense: org.apache.spark.mllib.linalg.DenseMatrix => {
if (!dense.isTransposed) {
new DenseMatrix[Double](dense.numRows, dense.numCols, dense.values)
} else {
val breezeMatrix = new DenseMatrix[Double](dense.numRows, dense.numCols, dense.values)
breezeMatrix.t
}
}
case _ => new DenseMatrix[Double](matrix.numRows, matrix.numCols, matrix.toArray)
}
}
/** Convert a Breeze vector to an MLlib vector, maintaining underlying data structure (sparse vs dense) */
def breezeVectorToMLlib(breezeVector: breeze.linalg.Vector[Double]): org.apache.spark.mllib.linalg.Vector = {
breezeVector match {
case v: DenseVector[Double] =>
if (v.offset == 0 && v.stride == 1 && v.length == v.data.length) {
new org.apache.spark.mllib.linalg.DenseVector(v.data)
} else {
new org.apache.spark.mllib.linalg.DenseVector(v.toArray) // Can't use underlying array directly, so make a new one
}
case v: SparseVector[Double] =>
if (v.index.length == v.used) {
new org.apache.spark.mllib.linalg.SparseVector(v.length, v.index, v.data)
} else {
new org.apache.spark.mllib.linalg.SparseVector(v.length, v.index.slice(0, v.used), v.data.slice(0, v.used))
}
case v: breeze.linalg.Vector[_] =>
sys.error("Unsupported Breeze vector type: " + v.getClass.getName)
}
}
}
|
amplab/keystone
|
src/main/scala/keystoneml/utils/MLlibUtils.scala
|
Scala
|
apache-2.0
| 2,178
|
import math.{Ray, Vector3}
import org.scalacheck.{Arbitrary, Gen}
import org.scalacheck.Prop._
import org.specs2.{ScalaCheck, Specification}
class RaySpec extends Specification with ScalaCheck {
//TODO separate special cases of reflection and refraction
def is =
s2"""
A ray should
calculate the direction of a reflection correctly $testReflection
calculate the direction of a refraction correctly $testRefraction
calculate the position for a given marching diretion
"""
val testReflection = forAll { (dir: Vector3, o: Vector3) =>
{
val ray = Ray(origin = o, direction = dir.normalized)
val rray = ray.reflectedAt(o, -ray.direction)
(rray.direction + ray.direction).length < 0.01 && rray.depth == ray.depth + 1 && ray.n == rray.n
}
}
val testRefraction = forAll { (dir: Vector3, o: Vector3, norm: Vector3) =>
{
val ray = Ray(origin = o, direction = dir.normalized)
val refractedRay =
ray.refractedAt(position = o, normal = norm, newN = 1f)
refractedRay match {
case None => false
case Some(rray) =>
(rray.direction - ray.direction).length < 0.01 && rray.depth == ray.depth + 1 && rray.n == 1
}
}
}
implicit lazy val VectorGen: Arbitrary[Vector3] =
Arbitrary {
for {
x: Double <- Gen.choose(-.9, .9)
y: Double <- Gen.choose(-.9, .9)
z: Double <- Gen.choose(-.9, .9)
} yield Vector3(x, y, z)
}
}
|
wookenny/scalarty
|
src/test/scala/RaySpec.scala
|
Scala
|
mit
| 1,472
|
package org.jetbrains.plugins.scala.lang.psi.stubs
import com.intellij.psi.stubs.StubElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScEarlyDefinitions
/**
* User: Alexander Podkhalyuzin
* Date: 17.06.2009
*/
trait ScEarlyDefinitionsStub extends StubElement[ScEarlyDefinitions]
|
gtache/intellij-lsp
|
intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/stubs/ScEarlyDefinitionsStub.scala
|
Scala
|
apache-2.0
| 299
|
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import scala.concurrent.duration._
class OffshoreSimulation extends Simulation {
val httpConf = http
//ec2-52-7-167-16.compute-1.amazonaws.com
//.baseURL("http://ec2-52-6-114-144.compute-1.amazonaws.com") // Here is the root for all relative URLs
.baseURL("http://awseb-e-a-AWSEBLoa-BZ3R8GJHNH9W-1571054632.us-east-1.elb.amazonaws.com")
//.baseURL("http://offshore.lh")
.acceptHeader("text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") // Here are the common headers
.doNotTrackHeader("1")
.acceptLanguageHeader("en-US,en;q=0.5")
.acceptEncodingHeader("gzip, deflate")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:16.0) Gecko/20100101 Firefox/16.0")
val headers_10 = Map("Content-Type" -> """application/x-www-form-urlencoded""") // Note the headers specific to a given request
val scn = scenario("Offshore_1") // A scenario is a chain of requests and pauses
.exec(http("homepage")
.get("/")
.check(regex("""<h2 class="visible-lg">Siste Nytt:</h2>""").exists))
.pause(10) // Note that Gatling has recorded real time pauses
// .exec(http("riggdata")
// .get("/rigg"))
// .pause(10)
// .exec(http("search1")
// .get("/?s=Viking"))
// .pause(10)
//setUp(scn.inject(atOnceUsers(1)).protocols(httpConf))
setUp(scn.inject(rampUsers(500) over (60 seconds))).protocols(httpConf)
}
|
jacekelgda/gatling-offshore
|
user-files/simulations/offshore/offshore.scala
|
Scala
|
apache-2.0
| 1,478
|
package dedep.bonobo.utils
object MathUtils {
def isPowerOfTwo(num: Int): Boolean =
if (num < 1) false
else getFloorPowerOfTwoNumber(num) == num
def getFloorPowerOfTwoNumber(number: Int): Int = {
require(number >= 1)
Stream.iterate(1) {
_ * 2
}.takeWhile(_ <= number).last
}
}
|
dedep/bonobo-core
|
src/main/scala/dedep/bonobo/utils/MathUtils.scala
|
Scala
|
mit
| 311
|
package at.logic.gapt.examples
import at.logic.gapt.expr.fol.FOLSubstitution
import at.logic.gapt.expr.{ FOLConst, FOLVar }
import at.logic.gapt.formats.prover9.Prover9TermParserLadrStyle
import at.logic.gapt.proofs.lk._
import at.logic.gapt.proofs.lk.base.LKProof
object equation_example {
def apply: ( LKProof, FOLSubstitution ) = {
val List(
uv, fuu, fuv, ab, fab ) = List(
"u = v",
"f(u)=f(u)", "f(u)=f(v)", "a=b", "f(a)=f(b)"
) map ( Prover9TermParserLadrStyle.parseFormula )
val List( uy, xy, ay ) = List(
"(all y (u = y -> f(u) = f(y)))",
"(all x all y (x = y -> f(x) = f(y)))",
"(all y (a = y -> f(a) = f(y)))"
) map ( Prover9TermParserLadrStyle
.parseFormula )
val List( u, v ) = List( "u", "v" ).map( s => FOLVar( s ) )
val List( a, b ) = List( "a", "b" ).map( s => FOLConst( s ) )
val ax1 = Axiom( List( uv ), List( uv ) )
val ax2 = Axiom( List(), List( fuu ) )
val ax3 = Axiom( List( ab ), List( ab ) )
val ax4 = Axiom( List( fab ), List( fab ) )
val i1 = EquationRight1Rule( ax1, ax2, ax1.root.succedent( 0 ), ax2.root.succedent( 0 ), fuv )
val i2 = ImpRightRule( i1, i1.root.antecedent( 0 ), i1.root.succedent( 0 ) )
println( i2.root )
val i3 = ForallRightRule( i2, i2.root.succedent( 0 ), uy, v )
val i4 = ForallRightRule( i3, i3.root.succedent( 0 ), xy, u )
val i5 = ImpLeftRule( ax3, ax4, ax3.root.succedent( 0 ), ax4.root.antecedent( 0 ) )
val i6 = ForallLeftRule( i5, i5.root.antecedent( 1 ), ay, b )
val i7 = ForallLeftRule( i6, i6.root.antecedent( 1 ), xy, a )
val es = CutRule( i4, i7, i4.root.succedent( 0 ), i7.root.antecedent( 1 ) )
val sub = FOLSubstitution( ( u, b ) :: Nil )
( es, sub )
}
}
|
loewenheim/gapt
|
src/main/scala/at/logic/gapt/examples/equation_example.scala
|
Scala
|
gpl-3.0
| 1,758
|
import org.apache.spark._
import org.apache.spark.graphx._
import org.apache.spark.rdd._
import org.apache.spark.SparkContext._
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import java.io._
object PageRank{
def main(args: Array[String]){
val conf = new SparkConf().setAppName("PageRank_CSLab")
val sc = new SparkContext(conf)
val inputGraph = GraphLoader.edgeListFile(sc, args(0))
val start = java.lang.System.currentTimeMillis
val alpha = args(1).toDouble
val ranks = inputGraph.pageRank(alpha).vertices
val execTime = java.lang.System.currentTimeMillis - start
val operator = "PageRank_Spark"
var output = ("operator" -> operator) ~
("exec_time" -> execTime) ~
("alpha" -> alpha)
}
}
|
project-asap/IReS-Platform
|
asap-tools/spark/scala/src/main/scala/PageRank.scala
|
Scala
|
apache-2.0
| 789
|
package com.sksamuel.scapegoat.inspections.math
import com.sksamuel.scapegoat.PluginRunner
import org.scalatest.{ OneInstancePerTest, FreeSpec, Matchers }
/** @author Stephen Samuel */
class DivideByOneTest
extends FreeSpec
with PluginRunner
with Matchers
with OneInstancePerTest {
override val inspections = Seq(new DivideByOne)
"divide by one" - {
"for int" - {
"should report warning" in {
val code = """object Test {
val a = 14
val b = a / 1
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
"for float" - {
"should report warning" in {
val code = """object Test {
val c = 10.0
val d = c / 1
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
"for long" - {
"should report warning" in {
val code = """object Test {
val e = 100l
val f = e / 1
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
"for double" - {
"should report warning" in {
val code = """object Test {
val g = 5.0d
val h = g / 1
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
}
}
|
jasonchaffee/scalac-scapegoat-plugin
|
src/test/scala/com/sksamuel/scapegoat/inspections/math/DivideByOneTest.scala
|
Scala
|
apache-2.0
| 1,616
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.