code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.driver.test.exception
import com.stratio.sparta.driver.exception.DriverException
import org.junit.runner.RunWith
import org.scalatest._
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class DriverExceptionTest extends FlatSpec with ShouldMatchers {
"DriverException" should "return a Throwable" in {
val msg = "my custom exception"
val ex = DriverException.create(msg)
ex.getMessage should be(msg)
}
it should "return a exception with the msg and a cause" in {
val msg = "my custom exception"
val ex = DriverException.create(msg, new RuntimeException("cause"))
ex.getCause.getMessage should be("cause")
}
}
| danielcsant/sparta | driver/src/test/scala/com/stratio/sparta/driver/test/exception/DriverExceptionTest.scala | Scala | apache-2.0 | 1,318 |
package com.googlecode.warikan.domain.contexts
import org.junit._
import Assert._
import com.googlecode.warikan.domain.contexts._
import com.googlecode.warikan.domain.models._
import com.googlecode.warikan.domain.repositories._
import com.googlecode.warikan.mocks._
class AccountingTest extends InjectorInitializer {
@Before
def setup = {
PartyRepository.repository.asInstanceOf[MockPartyRepositoryImpl].initialize
SlopeRepository.repository.asInstanceOf[MockSlopeRepositoryImpl].initialize
}
@Test
def shouldExecuteAccounting = {
// id
val slopeId:String = 1.toString
val partyId:String = 2.toString
// Preparation
val slope:Slope = new Slope(slopeId)
slope.put(Role("Chief") -> AllotWeight(10))
slope.put(Role("Novice") -> AllotWeight(5))
SlopeRepository.add(slope)
// Party
val party:Party = new Party(partyId)
// Planning
party.putParticipant(UserName("Jack") -> new Participant(UserName("Jack"), Role("Chief")))
party.putParticipant(UserName("John") -> new Participant(UserName("John"), Role("Novice")))
party.putParticipant(UserName("Paul") -> new Participant(UserName("Paul"), Role("Novice")))
// Paying
party.sum = 10000
PartyRepository.add(party)
/* Accounting */
val accounting:Accounting = new Accounting(partyId)
accounting.adjustBy(slopeId)
accounting.commit
assertEquals(5000, PartyRepository.paymentOf(partyId, UserName("Jack")))
assertEquals(2500, PartyRepository.paymentOf(partyId, UserName("John")))
assertEquals(2500, PartyRepository.paymentOf(partyId, UserName("Paul")))
}
} | digitalsoul0124/warikan | test/scala/com/googlecode/warikan/domain/contexts/AccountingTest.scala | Scala | mit | 1,821 |
package com.codacy.client.bitbucket.v2.service
import java.net.URLEncoder
import com.codacy.client.bitbucket.client.{BitbucketClient, Request, RequestResponse}
import com.codacy.client.bitbucket.v2.CommitComment
import play.api.libs.json.{JsNumber, JsObject, JsString, Json}
class CommitServices(client: BitbucketClient) {
def createComment(
author: String,
repository: String,
commit: String,
body: String,
file: Option[String] = None,
line: Option[Int] = None
): RequestResponse[CommitComment] = {
val commitCommentUrl = generateCommitCommentUrl(author, repository, commit)
val params = for {
filename <- file
lineTo <- line
} yield {
"inline" -> Json.obj("path" -> JsString(filename), "to" -> JsNumber(lineTo))
}
val values = JsObject(params.toSeq :+ "content" -> Json.obj("raw" -> JsString(body)))
client.postJson(Request(commitCommentUrl, classOf[CommitComment]), values)
}
def listComments(author: String, repository: String, commit: String): RequestResponse[Seq[CommitComment]] = {
val commitCommentUrl = generateCommitCommentUrl(author, repository, commit)
client
.executePaginated(Request(commitCommentUrl, classOf[Seq[CommitComment]]))
.map(_.filterNot(_.deleted))
}
def deleteComment(author: String, repository: String, commit: String, commentId: Long): RequestResponse[Boolean] = {
val commitCommentUrl = generateCommitCommentUrl(author, repository, commit)
val url = s"$commitCommentUrl/$commentId"
client.delete(url)
}
private def generateCommitCommentUrl(author: String, repository: String, commit: String): String = {
val encodedAuthor = URLEncoder.encode(author, "UTF-8")
val encodedRepository = URLEncoder.encode(repository, "UTF-8")
val encodedCommit = URLEncoder.encode(commit, "UTF-8")
s"${client.repositoriesBaseUrl}/$encodedAuthor/$encodedRepository/commit/$encodedCommit/comments"
}
}
| codacy/bitbucket-scala-client | src/main/scala/com/codacy/client/bitbucket/v2/service/CommitServices.scala | Scala | apache-2.0 | 1,965 |
package magento
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import io.gatling.jdbc.Predef._
import io.gatling.http.Headers.Names._
import scala.concurrent.duration._
import assertions._
class BrowserSimulation_30 extends Simulation {
val defaultUrlBase = "http://127.0.0.1/magento-git-clone"
val userOption: Int = Integer.getInteger("users", 3000).toInt
val timeOption: Int = Integer.getInteger("time", 100).toInt
val httpProtocol = http
.baseURL(defaultUrlBase)
.acceptCharsetHeader("ISO-8859-1,utf-8;q=0.7,*;q=0.7")
.acceptHeader("text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("en-us;q=0.9,en;q=0.3")
// .disableFollowRedirect
setUp(AnonymousBrowserScenario.scn.inject(ramp(userOption) over (timeOption)))
.protocols(httpProtocol)
.assertions(
global.successfulRequests.percent.is(100),
global.responseTime.mean.lessThan(1000)
)
} | candes/magento-gatling2 | user-files/simulations/magento/BrowserSimulation_30.scala | Scala | apache-2.0 | 1,053 |
package me.m1key.audioliciousmigration.importer
import me.m1key.audioliciousmigration.AudioliciousImporter
import me.m1key.audioliciousmigration.repository.LibraryRepository
import com.google.inject.Inject
import me.m1key.audioliciousmigration.feeder.Feeder
private[audioliciousmigration] class RelativeDataImporter @Inject() (private val feeder: Feeder, private val libraryRepository: LibraryRepository) extends AudioliciousImporter {
def importLibrary(libraryUuid: String): Unit = {
println("Importing library [%s]...".format(libraryUuid));
libraryRepository.getLatestLibrary match {
case Some(library) => println("Library [%s] imported.".format(libraryUuid));
feeder.feed(library)
case None => throw new RuntimeException("Library [%s] not found.".format(libraryUuid))
}
}
} | m1key/audiolicious-migration | src/main/scala/me/m1key/audioliciousmigration/importer/RelativeDataImporter.scala | Scala | gpl-3.0 | 815 |
sealed trait List[+A]
case object Nil extends List[Nothing]
case class Cons[+A](head: A, tail: List[A]) extends List[A]
object List {
def apply[A](as: A*): List[A] = {
if(as.isEmpty) Nil
else Cons(as.head, apply(as.tail: _*))
}
def tail[A](items: List[A]): List[A] = ???
def setHead[A](items: List[A], n: A): List[A] = ???
def drop[A](l: List[A], n: Int): List[A] = ???
def dropWhile[A](l: List[A])(f: A => Boolean): List[A] = ???
def foldRight[A,B](as: List[A], z: B)(f:(A,B) => B): B = as match {
case Nil => z
case Cons(x, xs) => f(x, foldRight(xs,z)(f))
}
def sum(ns: List[Int]) = foldLeft(ns, 0)((x,y) => x + y)
def product(ns: List[Double]) = foldLeft(ns, 1.0) (_ * _)
def length[A](as: List[A]): Int = foldLeft(as, 0)((x,y) => 1 + x)
@annotation.tailrec
def foldLeft[A,B](as: List[A], z: B)(f: (B, A) => B): B = as match {
case Nil => z
case Cons(x, xs) => foldLeft(xs,f(z,x))(f)
}
/*
Write a function that turns each value in a List[Double] into a String.
You can use the expression d.toString to convert some d: Double to a String.
*/
def doubleToString(as: List[Int]): List[String] = as match {
case Nil => Nil
case Cons(x, xs) => Cons(x.toString, doubleToString(xs))
}
}
/*
tests
*/
assert(List.doubleToString(Nil) == Nil)
assert(List.doubleToString(List(1)) == List("1"))
assert(List.doubleToString(List(1,2,3)) == List("1","2","3")) | wkimeria/fp_scala_for_mortals | chapter_3/exercises/exercise_17.scala | Scala | mit | 1,404 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
/**
* Trait extended by matcher objects, which may appear after the word <code>be</code>, that can match a value of the specified type.
* The value to match is passed to the <code>BeMatcher</code>'s <code>apply</code> method. The result is a <code>MatchResult</code>.
* A <code>BeMatcher</code> is, therefore, a function from the specified type, <code>T</code>, to a <code>MatchResult</code>.
*
* <p>
* Although <code>BeMatcher</code>
* and <code>Matcher</code> represent very similar concepts, they have no inheritance relationship
* because <code>Matcher</code> is intended for use right after <code>should</code> or <code>must</code>
* whereas <code>BeMatcher</code> is intended for use right after <code>be</code>.
* </p>
*
* <p>
* As an example, you could create <code>BeMatcher[Int]</code>
* called <code>odd</code> that would match any odd <code>Int</code>, and one called <code>even</code> that would match
* any even <code>Int</code>.
* Given this pair of <code>BeMatcher</code>s, you could check whether an <code>Int</code> was odd or even with expressions like:
* </p>
*
* <pre class="stHighlight">
* num should be (odd)
* num should not be (even)
* </pre>
*
* <p>
* Here's is how you might define the odd and even <code>BeMatchers</code>:
* </p>
*
* <pre class="stHighlight">
* trait CustomMatchers {
*
* class OddMatcher extends BeMatcher[Int] {
* def apply(left: Int) =
* MatchResult(
* left % 2 == 1,
* left.toString + " was even",
* left.toString + " was odd"
* )
* }
* val odd = new OddMatcher
* val even = not (odd)
* }
*
* // Make them easy to import with:
* // import CustomMatchers._
* object CustomMatchers extends CustomMatchers
* </pre>
*
* <p>
* These <code>BeMatcher</code>s are defined inside a trait to make them easy to mix into any
* suite or spec that needs them.
* The <code>CustomMatchers</code> companion object exists to make it easy to bring the
* <code>BeMatcher</code>s defined in this trait into scope via importing, instead of mixing in the trait. The ability
* to import them is useful, for example, when you want to use the matchers defined in a trait in the Scala interpreter console.
* </p>
*
* <p>
* Here's an rather contrived example of how you might use <code>odd</code> and <code>even</code>:
* </p>
*
* <pre class="stHighlight">
* class DoubleYourPleasureSuite extends FunSuite with MustMatchers with CustomMatchers {
*
* def doubleYourPleasure(i: Int): Int = i * 2
*
* test("The doubleYourPleasure method must return proper odd or even values")
*
* val evenNum = 2
* evenNum must be (even)
* doubleYourPleasure(evenNum) must be (even)
*
* val oddNum = 3
* oddNum must be (odd)
* doubleYourPleasure(oddNum) must be (odd) // This will fail
* }
* }
* </pre>
*
* <p>
* The last assertion in the above test will fail with this failure message:
* </p>
*
* <pre class="stHighlight">
* 6 was even
* </pre>
*
* <p>
* For more information on <code>MatchResult</code> and the meaning of its fields, please
* see the documentation for <a href="MatchResult.html"><code>MatchResult</code></a>. To understand why <code>BeMatcher</code>
* is contravariant in its type parameter, see the section entitled "Matcher's variance" in the
* documentation for <a href="../Matcher.html"><code>Matcher</code></a>.
* </p>
*
* @author Bill Venners
*/
trait BeMatcher[-T] extends Function1[T, MatchResult] { thisBeMatcher =>
/**
* Check to see if the specified object, <code>left</code>, matches, and report the result in
* the returned <code>MatchResult</code>. The parameter is named <code>left</code>, because it is
* usually the value to the left of a <code>should</code> or <code>must</code> invocation. For example,
* in:
*
* <pre class="stHighlight">
* num should be (odd)
* </pre>
*
* The <code>be (odd)</code> expression results in a regular <a href="../Matcher.html"><code>Matcher</code></a> that holds
* a reference to <code>odd</code>, the
* <code>BeMatcher</code> passed to <code>be</code>. The <code>should</code> method invokes <code>apply</code>
* on this matcher, passing in <code>num</code>, which is therefore the "<code>left</code>" value. The
* matcher will pass <code>num</code> (the <code>left</code> value) to the <code>BeMatcher</code>'s <code>apply</code>
* method.
*
* @param left the value against which to match
* @return the <code>MatchResult</code> that represents the result of the match
*/
def apply(left: T): MatchResult
/**
* Compose this <code>BeMatcher</code> with the passed function, returning a new <code>BeMatcher</code>.
*
* <p>
* This method overrides <code>compose</code> on <code>Function1</code> to
* return a more specific function type of <code>BeMatcher</code>. For example, given
* an <code>odd</code> matcher defined like this:
* </p>
*
* <pre class="stHighlight">
* val odd =
* new BeMatcher[Int] {
* def apply(left: Int) =
* MatchResult(
* left % 2 == 1,
* left.toString + " was even",
* left.toString + " was odd"
* )
* }
* </pre>
*
* <p>
* You could use <code>odd</code> like this:
* </p>
*
* <pre class="stHighlight">
* 3 should be (odd)
* 4 should not be (odd)
* </pre>
*
* <p>
* If for some odd reason, you wanted a <code>BeMatcher[String]</code> that
* checked whether a string, when converted to an <code>Int</code>,
* was odd, you could make one by composing <code>odd</code> with
* a function that converts a string to an <code>Int</code>, like this:
* </p>
*
* <pre class="stHighlight">
* val oddAsInt = odd compose { (s: String) => s.toInt }
* </pre>
*
* <p>
* Now you have a <code>BeMatcher[String]</code> whose <code>apply</code> method first
* invokes the converter function to convert the passed string to an <code>Int</code>,
* then passes the resulting <code>Int</code> to <code>odd</code>. Thus, you could use
* <code>oddAsInt</code> like this:
* </p>
*
* <pre class="stHighlight">
* "3" should be (oddAsInt)
* "4" should not be (oddAsInt)
* </pre>
*/
override def compose[U](g: U => T): BeMatcher[U] =
new BeMatcher[U] {
def apply(u: U) = thisBeMatcher.apply(g(u))
}
}
/**
* Companion object for trait <code>BeMatcher</code> that provides a
* factory method that creates a <code>BeMatcher[T]</code> from a
* passed function of type <code>(T => MatchResult)</code>.
*
* @author Bill Venners
*/
object BeMatcher {
/**
* Factory method that creates a <code>BeMatcher[T]</code> from a
* passed function of type <code>(T => MatchResult)</code>.
*
* @author Bill Venners
*/
def apply[T](fun: T => MatchResult)(implicit ev: Manifest[T]): BeMatcher[T] =
new BeMatcher[T] {
def apply(left: T) = fun(left)
override def toString: String = "BeMatcher[" + ev.erasure.getName + "](" + ev.erasure.getName + " => MatchResult)"
}
}
| travisbrown/scalatest | src/main/scala/org/scalatest/matchers/BeMatcher.scala | Scala | apache-2.0 | 7,751 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Sun Jan 20 13:01:21 EST 2013
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra
import scalation.math.double_exp
import scalation.util.Error
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Fac_QR_MGS` class provides methods to factor an 'm-by-n' matrix 'a' into the
* product of two matrices:
* <p>
* 'q' - an 'm-by-n' orthogonal matrix and
* 'r' - an 'n-by-n' right upper triangular matrix
* <p>
* such that 'a = q * r'. It uses Gram-Schmidt orthogonalization.
* Note, orthogonal means that 'q.t * q = I'.
* @see http://www.stat.wisc.edu/~larget/math496/qr.html
* @see http://en.wikipedia.org/wiki/Gram–Schmidt_process
* (stabilized Gram–Schmidt orthonormalization)
* @param a the matrix to be factor into q and r
*/
class Fac_QR_MGS (a: MatrixD)
extends Factorization with Error
{
private val m = a.dim1 // the number of rows in matrix a
private val n = a.dim2 // the number of columns in matrix a
private val q = new MatrixD (a) // the orthogonal q matrix
private val r = new MatrixD (n, n) // the right upper triangular r matrix
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Factor matrix 'a' into the product of two matrices, 'a = q * r', returning
* both the orthogonal 'q' matrix and the right upper triangular 'r' matrix.
* This algorithm uses Modified Gram-Schmidt (MGS) orthogonalization.
* @see Algorithm 5.2.6 in Matrix Computations.
*/
def factor (): Tuple2 [MatrixD, MatrixD] =
{
for (j <- 0 until n) { // for each column j
val _norm = q.col(j).norm // norm of the jth column
r(j, j) = _norm
if (! (_norm =~ 0.0)) {
for (i <- 0 until m) q(i, j) /= _norm
for (k <- j + 1 until n) {
r(j, k) = q.col(j) dot q.col(k)
for (i <- 0 until m) q(i, k) -= q(i, j) * r(j, k)
} // for
} // if
} // for
raw = false // factoring completed
(q, r)
} // factor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the first factor, i.e., orthogonal 'q' matrix.
*/
def factor1 (): MatrixD = { if (raw) factor (); q }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the second factor, i.e., the right upper triangular 'r' matrix.
*/
def factor2 (): MatrixD = { if (raw) factor (); r }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in 'a*x = b' using the QR Factorization 'a = q*r' via
* 'r*x = q.t * b'.
* @param b the constant vector
*/
def solve (b: VectorD): VectorD = backSub (r, q.t * b)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform backward substitution to solve for 'x' in 'r*x = b'.
* @param r the right upper triangular matrix
* @param b the constant vector
*/
def backSub (r: MatrixD, b: VectorD): VectorD =
{
val x = new VectorD (n) // vector to solve for
for (k <- n-1 to 0 by -1) { // solve for x in r*x = b
x(k) = (b(k) - (r(k) dot x)) / r(k, k)
} // for
x
} // backSub
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the nullspace of matrix 'a: { x | a*x = 0 }' using QR Factorization
* 'q*r*x = 0'. Gives a basis of dimension 'n' - rank for the nullspace
* @param rank the rank of the matrix (number of linearly independent row vectors)
* FIX: should work, but it does not
*/
def nullspace (rank: Int): MatrixD =
{
flaw ("nullspace", "method has bugs - so do not use")
(new Fac_QR_MGS (a.t)).factor1 ().slice (0, n, rank, n) // last n - rank columns
} // nullspace
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the nullspace of matrix 'a: { x | a*x = 0 }' using QR Factorization
* 'q*r*x = 0'. Gives only one vector in the nullspace.
*/
def nullspaceV: VectorD =
{
val x = new VectorD (n); x(n-1) = 1.0 // vector to solve for
val b = new VectorD (n) // new rhs as -r_i,n-1
for (i <- 0 until n) b(i) = -r(i, n-1)
val rr = r.slice (0, n, 0, n-1) // drop last column
for (k <- n-2 to 0 by -1) { // solve for x in rr*x = b
x(k) = (b(k) - (rr(k) dot x)) / rr(k, k)
} // for
x
} // nullspaceV
} // Fac_QR_MGS class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Fac_QR_MGSTest` object is used to test the `Fac_QR_MGS` class.
* @see http://www.ee.ucla.edu/~vandenbe/103/lectures/qr.pdf
* > run-main scalation.linalgebra.Fac_QR_MGSTest
*/
object Fac_QR_MGSTest extends App
{
def test (a: MatrixD)
{
val qr = new Fac_QR_MGS (a) // for factoring a into q * r
val (q, r) = qr.factor () // (q orthogonal, r upper triangular)
val ns = qr.nullspaceV // ns is a point in the nullscpace
println ("--------------------------------------------------------")
println ("a = " + a)
println ("q = " + q)
println ("r = " + r)
println ("ns = " + ns)
println ("q*r = " + (q*r)) // check that q*r = a
println ("a*ns = " + (a*ns)) // check that a*ns = 0
} // test
val a1 = new MatrixD ((4, 3), 9.0, 0.0, 26.0,
12.0, 0.0, -7.0,
0.0, 4.0, 4.0,
0.0, -3.0, -3.0)
val a2 = new MatrixD ((2, 2), 2.0, 1.0,
-4.0, -2.0)
val a3 = new MatrixD ((3, 3), 2.0, 1.0, 1.0,
-5.0, -2.0, -2.0,
-5.0, -2.0, -2.0)
val a4 = new MatrixD ((2, 4), -1.0, 1.0, 2.0, 4.0,
2.0, 0.0, 1.0, -7.0)
val a5 = new MatrixD ((4, 4), -1.0, 1.0, 2.0, 4.0,
2.0, 0.0, 1.0, -7.0,
2.0, 0.0, 1.0, -7.0,
2.0, 0.0, 1.0, -7.0)
test (a1)
test (a2)
test (a3)
test (a4)
test (a5)
} // Fac_QR_MGSTest object
| mvnural/scalation | src/main/scala/scalation/linalgebra/Fac_QR_MGS.scala | Scala | mit | 6,879 |
package at.logic.gapt.formats.ivy
import at.logic.gapt.formats.lisp.{ List => LispList, Atom => LispAtom, Cons => LispCons, SExpression, SExpressionParser }
import at.logic.gapt.expr._
import at.logic.gapt.language.fol.FOLSubstitution
import at.logic.gapt.proofs.resolution.Clause
import at.logic.gapt.proofs.lk.base.FSequent
import at.logic.gapt.proofs.occurrences.FormulaOccurrence
import at.logic.gapt.proofs.occurrences
import at.logic.gapt.expr.Ti
import at.logic.gapt.utils.logging.Logger
/**
* Implements parsing of ivy format: https://www.cs.unm.edu/~mccune/papers/ivy/ into Ivy's Resolution calculus.
*/
/* Constructor object, takes a filename and tries to parse as a lisp_file */
object IvyParser extends Logger {
override def loggerName = "IvyParserLogger"
//easy parametrization to choose naming conventions (there is no information about this in the ivy format)
sealed abstract class VariableNamingConvention;
case object PrologStyleVariables extends VariableNamingConvention;
case object LadrStyleVariables extends VariableNamingConvention;
case object IvyStyleVariables extends VariableNamingConvention;
//calls the sexpression parser on the given file and parses it, needs a naming convention
def apply( fn: String, naming_convention: VariableNamingConvention ): IvyResolutionProof = {
naming_convention match {
case PrologStyleVariables => apply_( fn, is_prologstyle_variable )
case LadrStyleVariables => apply_( fn, is_ladrstyle_variable )
case IvyStyleVariables => apply_( fn, is_ivy_variable )
}
}
//calls the sexpression parser on the given file and parses it, needs a naming convention
def apply_( fn: String, is_variable_symbol: ( String => Boolean ) ): IvyResolutionProof = {
val exp = SExpressionParser( fn )
require( exp.length >= 1, "An ivy proof must contain at least one proof object, not " + exp.length + "! " )
if ( exp.length > 1 ) warn( "WARNING: Ivy proof in " + fn + " contains more than one proof, taking the first one." )
parse( exp( 0 ), is_variable_symbol )
}
// the type synoyms should make the parsing functions more readable
type ProofId = String
type ProofMap = Map[ProofId, IvyResolutionProof]
type Position = List[Int]
//decompose the proof object to a list and hand it to parse(exp: List[SExpression], found_steps : ProofMap )
def parse( exp: SExpression, is_variable_symbol: ( String => Boolean ) ): IvyResolutionProof = exp match {
case LispList( Nil ) => throw new Exception( "Trying to parse an empty proof!" )
case LispList( l ) => parse( l, Map[String, IvyResolutionProof](), is_variable_symbol ) // extract the list of inferences from exp
case _ => throw new Exception( "Parsing error: The proof object is not a list!" )
}
/* traverses the list of inference sexpressions and returns an IvyResolution proof - this can then be translated to
* our resolution calculus (i.e. where instantiation is contained in the substitution)
* note: requires that an if inference a references inference b, then a occurs before b in the list */
def parse( exp: List[SExpression], found_steps: ProofMap, is_variable_symbol: String => Boolean ): IvyResolutionProof = {
exp match {
case List( last ) =>
val ( lastid, found_steps_ ) = parse_step( last, found_steps, is_variable_symbol );
found_steps_( lastid )
case head :: tail =>
val ( _, found_steps_ ) = parse_step( head, found_steps, is_variable_symbol );
parse( tail, found_steps_, is_variable_symbol );
case _ => throw new Exception( "Cannot create an object for an empty proof (list of inferences is empty)." )
}
}
/* parses an inference step and updates the proof map */
def parse_step( exp: SExpression, found_steps: ProofMap, is_variable_symbol: String => Boolean ): ( ProofId, ProofMap ) = {
exp match {
case LispList( LispAtom( id ) :: _ ) => ()
case _ => ()
}
exp match {
/* ================== Atom ========================== */
case LispList( LispAtom( id ) :: LispList( LispAtom( "input" ) :: Nil ) :: clause :: _ ) => {
val fclause = parse_clause( clause, is_variable_symbol )
val inference = InitialClause( id, clause,
Clause( fclause.antecedent map ( occurrences.factory.createFormulaOccurrence( _, Nil ) ),
fclause.succedent map ( occurrences.factory.createFormulaOccurrence( _, Nil ) ) ) )
require( inference.root.toFSequent setEquals fclause, "Error in Atom parsing: required result=" + fclause + " but got: " + inference.root )
( id, found_steps + ( ( id, inference ) ) )
}
/* ================== Instance ========================== */
case LispList( LispAtom( id ) :: LispList( LispAtom( "instantiate" ) :: LispAtom( parent_id ) :: subst_exp :: Nil ) :: clause :: rest ) => {
val parent_proof = found_steps( parent_id )
val sub: FOLSubstitution = parse_substitution( subst_exp, is_variable_symbol )
val fclause: FSequent = parse_clause( clause, is_variable_symbol )
def connect( ancestors: Seq[FormulaOccurrence], formulas: Seq[HOLFormula] ): Seq[FormulaOccurrence] =
( ancestors zip formulas ) map ( ( v: ( FormulaOccurrence, HOLFormula ) ) =>
occurrences.factory.createFormulaOccurrence( v._2, List( v._1 ) ) )
val inference = Instantiate( id, clause, sub,
Clause( connect( parent_proof.vertex.antecedent, fclause.antecedent ),
connect( parent_proof.vertex.succedent, fclause.succedent ) ), parent_proof )
require( inference.root.toFSequent setEquals fclause, "Error in Instance parsing: required result=" + fclause + " but got: " + inference.root )
( id, found_steps + ( ( id, inference ) ) )
}
/* ================== Resolution ========================== */
case LispList( LispAtom( id ) :: LispList( LispAtom( "resolve" ) ::
LispAtom( parent_id1 ) :: LispList( position1 ) ::
LispAtom( parent_id2 ) :: LispList( position2 ) :: Nil ) ::
clause :: rest ) => {
val parent_proof1 = found_steps( parent_id1 )
val parent_proof2 = found_steps( parent_id2 )
val fclause: FSequent = parse_clause( clause, is_variable_symbol )
val ( occ1, polarity1, _ ) = get_literal_by_position( parent_proof1.vertex, position1, parent_proof1.clause_exp, is_variable_symbol )
val ( occ2, polarity2, _ ) = get_literal_by_position( parent_proof2.vertex, position2, parent_proof2.clause_exp, is_variable_symbol )
require( occ1.formula == occ2.formula, "Resolved formula " + occ1.formula + " must be equal to " + occ2.formula + " !" )
def connect( c1: Clause, c2: Clause, conclusion: FSequent ): Clause = {
conclusion match {
//process antecedent
case FSequent( x :: xs, ys ) =>
val pos1 = c1.antecedent indexWhere ( _.formula == x )
if ( pos1 >= 0 ) {
val focc = c1.antecedent( pos1 ).factory.createFormulaOccurrence( x, c1.antecedent( pos1 ).parents )
val rec = connect( Clause( c1.antecedent.filterNot( _ == c1.antecedent( pos1 ) ), c1.succedent ), c2, FSequent( xs, ys ) )
Clause( focc :: rec.antecedent.toList, rec.succedent )
} else {
val pos2 = c2.antecedent indexWhere ( _.formula == x )
if ( pos2 >= 0 ) {
val focc = c2.antecedent( pos2 ).factory.createFormulaOccurrence( x, c2.antecedent( pos2 ).parents )
val rec = connect( c1, Clause( c2.antecedent.filterNot( _ == c2.antecedent( pos2 ) ), c2.succedent ), FSequent( xs, ys ) )
Clause( focc :: rec.antecedent.toList, rec.succedent )
} else throw new Exception( "Error in parsing resolution inference: resolved literal " + x + " not found!" )
}
//then succedent
case FSequent( Nil, y :: ys ) =>
val pos1 = c1.succedent indexWhere ( _.formula == y )
if ( pos1 >= 0 ) {
val focc = c1.succedent( pos1 ).factory.createFormulaOccurrence( y, c1.succedent( pos1 ).parents )
val rec = connect( Clause( c1.antecedent, c1.succedent.filterNot( _ == c1.succedent( pos1 ) ) ), c2, FSequent( Nil, ys ) )
Clause( rec.antecedent, focc :: rec.succedent.toList )
} else {
val pos2 = c2.succedent indexWhere ( _.formula == y )
if ( pos2 >= 0 ) {
val focc = c2.succedent( pos2 ).factory.createFormulaOccurrence( y, c2.succedent( pos2 ).parents )
val rec = connect( c1, Clause( c2.antecedent, c2.succedent.filterNot( _ == c2.succedent( pos2 ) ) ), FSequent( Nil, ys ) )
Clause( rec.antecedent, focc :: rec.succedent.toList )
} else throw new Exception( "Error in parsing resolution inference: resolved literal " + y + " not found!" )
}
//base case
case FSequent( Nil, Nil ) => Clause( Nil, Nil )
case _ => throw new Exception( "Unhandled case in calculation of ancestor relationship during creation of a resolution iference!" )
}
}
( polarity1, polarity2 ) match {
case ( true, false ) =>
val clause1 = Clause( parent_proof1.vertex.antecedent, parent_proof1.vertex.succedent filterNot ( _ == occ1 ) )
val clause2 = Clause( parent_proof2.vertex.antecedent filterNot ( _ == occ2 ), parent_proof2.vertex.succedent )
val inference = Resolution( id, clause, occ1, occ2, connect( clause1, clause2, fclause ), parent_proof1, parent_proof2 )
require( inference.root.toFSequent setEquals fclause, "Error in Resolution parsing: required result=" + fclause + " but got: " + inference.root )
( id, found_steps + ( ( id, inference ) ) )
case ( false, true ) =>
val clause1 = Clause( parent_proof1.vertex.antecedent filterNot ( _ == occ1 ), parent_proof1.vertex.succedent )
val clause2 = Clause( parent_proof2.vertex.antecedent, parent_proof2.vertex.succedent filterNot ( _ == occ2 ) )
val inference = Resolution( id, clause, occ1, occ2, connect( clause1, clause2, fclause ), parent_proof1, parent_proof2 )
require( inference.root.toFSequent setEquals fclause, "Error in Resolution parsing: required result=" + fclause + " but got: " + inference.root )
( id, found_steps + ( ( id, inference ) ) )
case _ =>
throw new Exception( "Error parsing resolution inference: must resolve over a positive and a negative literal!" )
}
}
/* ================== Flip ========================== */
case LispList( LispAtom( id ) :: LispList( LispAtom( "flip" ) :: LispAtom( parent_id ) :: LispList( position ) :: Nil ) :: clause :: rest ) =>
val parent_proof = found_steps( parent_id )
val fclause = parse_clause( clause, is_variable_symbol )
val ( occ, polarity, _ ) = get_literal_by_position( parent_proof.root, position, parent_proof.clause_exp, is_variable_symbol )
occ.formula match {
case Eq( left, right ) =>
//the negative literals are the same
def connect_directly( x: FormulaOccurrence ) = x.factory.createFormulaOccurrence( x.formula, x :: Nil )
polarity match {
case true =>
val neglits = parent_proof.root.negative map connect_directly
val ( pos1, pos2 ) = parent_proof.root.positive.splitAt( parent_proof.root.positive.indexOf( occ ) )
val ( pos1_, pos2_ ) = ( pos1 map connect_directly, pos2 map connect_directly )
val flipped = occ.factory.createFormulaOccurrence( Eq( right, left ), occ :: Nil )
val inference = Flip( id, clause, flipped, Clause( neglits, pos1_ ++ List( flipped ) ++ pos2_.tail ), parent_proof )
require( fclause setEquals inference.root.toFSequent,
"Error parsing flip rule: inferred clause " + inference.root.toFSequent +
" is not the same as given clause " + fclause )
( id, found_steps + ( ( id, inference ) ) )
case false =>
val poslits = parent_proof.root.positive map connect_directly
val ( neg1, neg2 ) = parent_proof.root.negative.splitAt( parent_proof.root.negative.indexOf( occ ) )
val ( neg1_, neg2_ ) = ( neg1 map connect_directly, neg2 map connect_directly )
val flipped = occ.factory.createFormulaOccurrence( Eq( right, left ), occ :: Nil )
val inference = Flip( id, clause, flipped, Clause( neg1_ ++ List( flipped ) ++ neg2_.tail, poslits ), parent_proof )
require( fclause setEquals inference.root.toFSequent,
"Error parsing flip rule: inferred clause " + inference.root.toFSequent +
" is not the same as given clause " + fclause )
( id, found_steps + ( ( id, inference ) ) )
}
case _ =>
throw new Exception( "Error parsing position in flip rule: literal " + occ.formula + " is not the equality predicate." )
}
/* ================== Paramodulation ========================== */
case LispList( LispAtom( id ) ::
LispList( LispAtom( "paramod" ) :: LispAtom( modulant_id ) :: LispList( mposition ) ::
LispAtom( parent_id ) :: LispList( pposition ) :: Nil ) ::
clause :: rest ) =>
val modulant_proof = found_steps( modulant_id )
val parent_proof = found_steps( parent_id )
val fclause = parse_clause( clause, is_variable_symbol )
val ( mocc, mpolarity, direction ) = get_literal_by_position( modulant_proof.root, mposition, modulant_proof.clause_exp, is_variable_symbol )
require( direction == List( 1 ) || direction == List( 2 ), "Must indicate if paramod or demod!" )
val orientation = if ( direction.head == 1 ) true else false //true = paramod (left to right), false = demod (right to left)
require( mpolarity == true, "Paramodulated literal must be positive!" )
val ( pocc, polarity, int_position ) = get_literal_by_position( parent_proof.root, pposition, parent_proof.clause_exp, is_variable_symbol )
mocc.formula match {
case Eq( left: FOLTerm, right: FOLTerm ) =>
def connect_directly( x: FormulaOccurrence ) = x.factory.createFormulaOccurrence( x.formula, x :: Nil )
polarity match {
case true =>
val neglits = parent_proof.root.negative map connect_directly
val ( pneg, ppos ) = ( modulant_proof.root.negative map connect_directly, modulant_proof.root.positive.filterNot( _ == mocc ) map connect_directly )
val ( pos1, pos2 ) = parent_proof.root.positive.splitAt( parent_proof.root.positive.indexOf( pocc ) )
val ( pos1_, pos2_ ) = ( pos1 map connect_directly, pos2 map connect_directly )
val paraformula = if ( orientation )
replaceTerm_by_in_at( left, right, pocc.formula.asInstanceOf[FOLFormula], int_position ).asInstanceOf[FOLFormula]
else
replaceTerm_by_in_at( right, left, pocc.formula.asInstanceOf[FOLFormula], int_position ).asInstanceOf[FOLFormula]
val para = pocc.factory.createFormulaOccurrence( paraformula, mocc :: pocc :: Nil )
val inferred_clause = Clause( pneg ++ neglits, ppos ++ pos1_ ++ List( para ) ++ pos2_.tail )
val inference = Paramodulation( id, clause, int_position, para, orientation, inferred_clause, modulant_proof, parent_proof )
require( inference.root.toFSequent setEquals fclause, "Error in Paramodulation parsing: required result=" + fclause + " but got: " + inference.root )
( id, found_steps + ( ( id, inference ) ) )
case false =>
val poslits = parent_proof.root.positive map connect_directly
val ( pneg, ppos ) = ( modulant_proof.root.negative map connect_directly, modulant_proof.root.positive.filterNot( _ == mocc ) map connect_directly )
val ( neg1, neg2 ) = parent_proof.root.negative.splitAt( parent_proof.root.negative.indexOf( pocc ) )
val ( neg1_, neg2_ ) = ( neg1 map connect_directly, neg2 map connect_directly )
val paraformula = if ( orientation )
replaceTerm_by_in_at( left, right, pocc.formula.asInstanceOf[FOLFormula], int_position ).asInstanceOf[FOLFormula]
else
replaceTerm_by_in_at( right, left, pocc.formula.asInstanceOf[FOLFormula], int_position ).asInstanceOf[FOLFormula]
val para = pocc.factory.createFormulaOccurrence( paraformula, mocc :: pocc :: Nil )
val inferred_clause = Clause( pneg ++ neg1_ ++ List( para ) ++ neg2_.tail, ppos ++ poslits )
val inference = Paramodulation( id, clause, int_position, para, orientation, inferred_clause, modulant_proof, parent_proof )
require( inference.root.toFSequent setEquals fclause, "Error in Paramodulation parsing: required result=" + fclause + " but got: " + inference.root )
( id, found_steps + ( ( id, inference ) ) )
}
case _ =>
throw new Exception( "Error parsing position in paramod rule: literal " + mocc.formula + " is not the equality predicate." )
}
/* ================== Propositional ========================== */
case LispList( LispAtom( id ) :: LispList( LispAtom( "propositional" ) :: LispAtom( parent_id ) :: Nil ) :: clause :: rest ) => {
val parent_proof = found_steps( parent_id )
val fclause: FSequent = parse_clause( clause, is_variable_symbol )
def list_withoutn[A]( l: List[A], n: Int ): List[A] = l match {
case x :: xs =>
if ( n == 0 ) xs else x :: list_withoutn( xs, n - 1 )
case Nil => Nil
}
//connects ancestors to formulas
def connect( ancestors: List[FormulaOccurrence], formulas: List[HOLFormula] ): List[FormulaOccurrence] = {
//find ancestor for every formula in conclusion clause
val ( occs, rem ) = connect_( ancestors, formulas )
//now connect the contracted formulas
val connected: List[FormulaOccurrence] = connect_missing( occs, rem )
connected
}
//connects each formula to an ancestor, returns a pair of connected formulas and unconnected ancestors
def connect_( ancestors: List[FormulaOccurrence], formulas: List[HOLFormula] ): ( List[FormulaOccurrence], List[FormulaOccurrence] ) = {
formulas match {
case x :: xs =>
val index = ancestors.indexWhere( _.formula == x )
require( index >= 0, "Error connecting ancestors in propositional ivy inference: formula " + x + " does not occur in ancestors " + ancestors )
val anc = ancestors( index )
val occ = anc.factory.createFormulaOccurrence( x, anc :: Nil )
val ( occs, rem ) = connect_( list_withoutn( ancestors, index ), xs )
( occ :: occs, rem )
case Nil => ( Nil, ancestors )
}
}
//connects unconnected (missing) ancestors to list of potential targets, returns list of updated targets
def connect_missing( targets: List[FormulaOccurrence], missing: List[FormulaOccurrence] ): List[FormulaOccurrence] = missing match {
case x :: xs =>
val targets_ = connect_missing_( targets, x )
connect_missing( targets_, xs )
case Nil =>
targets
}
//connects one missing occurence to possible tagets, returns list of updated targets
def connect_missing_( targets: List[FormulaOccurrence], missing: FormulaOccurrence ): List[FormulaOccurrence] = targets match {
case x :: xs =>
if ( missing.formula == x.formula )
List( x.factory.createFormulaOccurrence( x.formula, List( missing ) ++ x.parents ) ) ++ xs
else
List( x ) ++ connect_missing_( xs, missing )
case Nil =>
throw new Exception( "Error connecting factorized literal, no suitable successor found!" )
}
val inference = Propositional( id, clause,
Clause( connect( parent_proof.vertex.antecedent.toList, fclause.antecedent.toList ),
connect( parent_proof.vertex.succedent.toList, fclause.succedent.toList ) ), parent_proof )
require( inference.root.toFSequent setEquals fclause, "Error in Propositional parsing: required result=" + fclause + " but got: " + inference.root )
( id, found_steps + ( ( id, inference ) ) )
}
// new symbol
case LispList( LispAtom( id ) ::
LispList( LispAtom( "new_symbol" ) :: LispAtom( parent_id ) :: Nil ) ::
clause :: rest ) =>
val parent_proof = found_steps( parent_id )
val fclause: FSequent = parse_clause( clause, is_variable_symbol )
require( fclause.antecedent.isEmpty, "Expecting only positive equations in parsing of new_symbol rule " + id )
require( fclause.succedent.size == 1, "Expecting exactly one positive equation in parsing of new_symbol rule " + id )
val Eq( l: FOLTerm, r ) = fclause.succedent( 0 )
val nclause = Clause( Nil, List( parent_proof.root.occurrences( 0 ).factory.createFormulaOccurrence( fclause.succedent( 0 ), Nil ) ) )
val const: FOLConst = r match {
case f @ FOLConst( _ ) => f.asInstanceOf[FOLConst]
case _ => throw new Exception( "Expecting right hand side of new_symbol equation to be the introduced symbol!" )
}
val inference = NewSymbol( id, clause, nclause.succedent( 0 ), const, l, nclause, parent_proof )
( id, found_steps + ( ( id, inference ) ) )
case _ => throw new Exception( "Error parsing inference rule in expression " + exp )
}
}
//extracts a literal from a clause - since the clause seperates positive and negative clauses,
// we also need the original SEXpression to make sense of the position.
// paramodulation continues inside the term, so we return the remaining position together with the occurrence
// the boolean indicates a positive or negative formula
def get_literal_by_position( c: Clause, pos: List[SExpression],
clauseexp: SExpression, is_variable_symbol: String => Boolean ): ( FormulaOccurrence, Boolean, List[Int] ) = {
val ipos = parse_position( pos )
val ( iformula, termpos ) = parse_clause_frompos( clauseexp, ipos, is_variable_symbol )
//Remark: we actually return the first occurrence of the formula, not the one at the position indicated as
// it should not make a difference. (if f occurs twice in the clause, it might be derived differently
// but we usually don't care for that)
iformula match {
case a @ FOLAtom( sym, args ) =>
c.positive.find( _.formula == a ) match {
case Some( occ ) =>
( occ, true, termpos )
case None =>
throw new Exception( "Error in getting literal by position! Could not find " + iformula + " in " + c )
}
case Neg( a @ FOLAtom( sym, args ) ) =>
c.negative.find( _.formula == a ) match {
case Some( occ ) =>
( occ, false, termpos )
case None =>
throw new Exception( "Error in getting literal by position! Could not find " + iformula + " in " + c )
}
}
}
//term replacement
//TODO: refactor replacement for lambda expressions
def replaceTerm_by_in_at( what: FOLTerm, by: FOLTerm, exp: FOLExpression, pos: List[Int] ): FOLExpression = pos match {
case p :: ps =>
exp match {
case FOLAtom( sym, args ) =>
require( 1 <= p && p <= args.length, "Error in parsing replacement: invalid argument position in atom!" )
val ( args1, rterm :: args2 ) = args.splitAt( p - 1 )
FOLAtom( sym, ( args1 ++ List( replaceTerm_by_in_at( what, by, rterm, ps ).asInstanceOf[FOLTerm] ) ++ args2 ) )
case FOLFunction( sym, args ) =>
require( 1 <= p && p <= args.length, "Error in parsing replacement: invalid argument position in function!" )
val ( args1, rterm :: args2 ) = args.splitAt( p - 1 )
FOLFunction( sym, ( args1 ++ List( replaceTerm_by_in_at( what, by, rterm, ps ).asInstanceOf[FOLTerm] ) ++ args2 ) )
case _ => throw new Exception( "Error in parsing replacement: unexpected (sub)term " + exp + " )" )
}
case Nil =>
if ( exp == what ) by else throw new Exception( "Error in parsing replacement: (sub)term " + exp + " is not the expected term " + what )
}
def parse_position( l: List[SExpression] ): List[Int] = l match {
case LispAtom( x ) :: xs => try {
x.toInt :: parse_position( xs )
} catch {
case e: Exception => throw new Exception( "Error parsing position: cannot convert atom " + x + " to integer!" )
}
case Nil => Nil
case x :: _ => throw new Exception( "Error parsing position: unexpected expression " + x )
case _ => throw new Exception( "Error parsing position: unexpected expression " + l )
}
def parse_substitution( exp: SExpression, is_variable_symbol: String => Boolean ): FOLSubstitution = exp match {
case LispList( list ) =>
FOLSubstitution( parse_substitution_( list, is_variable_symbol ) )
case _ => throw new Exception( "Error parsing substitution expression " + exp + " (not a list)" )
}
//Note:substitution are sometimes given as lists of cons and sometimes as two-element list...
def parse_substitution_( exp: List[SExpression], is_variable_symbol: String => Boolean ): List[( FOLVar, FOLTerm )] = exp match {
case LispList( vexp :: texp ) :: xs =>
val v = parse_term( vexp, is_variable_symbol )
val t = parse_term( LispList( texp ), is_variable_symbol )
v match {
case v_ : FOLVar =>
( v_, t ) :: parse_substitution_( xs, is_variable_symbol )
case _ =>
throw new Exception( "Error parsing substitution expression " + exp + ": substiution variable was not parsed as variable!" )
}
case LispCons( vexp, texp ) :: xs =>
val v = parse_term( vexp, is_variable_symbol )
val t = parse_term( texp, is_variable_symbol )
v match {
case v_ : FOLVar =>
( v_, t ) :: parse_substitution_( xs, is_variable_symbol )
case _ =>
throw new Exception( "Error parsing substitution expression " + exp + ": substiution variable was not parsed as variable!" )
}
case Nil =>
Nil
case _ => throw new Exception( "Error parsing substitution expression " + exp + " (could not match substitution term!)" )
}
/* create_ladrstyle_symbol and create_prologstyle_symbol implement the logic for the prover9 and prolog style
* variable naming convention -- both are possible in prover9;
* see also http://www.cs.unm.edu/~mccune/mace4/manual/2009-11A/syntax.html
*/
val ladr_variable_regexp = """^[u-z].*$""".r
def is_ladrstyle_variable( s: String ) = ladr_variable_regexp.findFirstIn( s ) match {
case None => false
case _ => true
}
val prolog_variable_regexp = """^[A-Z].*$""".r
def is_prologstyle_variable( s: String ) = prolog_variable_regexp.findFirstIn( s ) match {
case None => false
case _ => true
}
val ivy_variable_regexp = """^v[0-9]+$""".r
def is_ivy_variable( s: String ) = ivy_variable_regexp.findFirstIn( s ) match {
case None => false
case _ => true
}
/* parses a clause sexpression to a fclause -- the structure is (or lit1 (or lit2 .... (or litn-1 litn)...)) */
def parse_clause( exp: SExpression, is_variable_symbol: String => Boolean ): FSequent = {
val clauses = parse_clause_( exp, is_variable_symbol )
var pos: List[HOLFormula] = Nil
var neg: List[HOLFormula] = Nil
for ( c <- clauses ) {
c match {
case Neg( formula ) =>
formula match {
case FOLAtom( _, _ ) => neg = formula :: neg
case _ => throw new Exception( "Error parsing clause: negative Literal " + formula + " is not an atom!" )
}
case FOLAtom( _, _ ) =>
pos = c :: pos
case _ =>
throw new Exception( "Error parsing clause: formula " + c + " is not a literal!" )
}
}
//the literals were prepended to the list, so we have to reverse them to get the original order
FSequent( neg.reverse, pos.reverse )
}
//TODO: merge code with parse_clause_
def parse_clause_frompos( exp: SExpression, pos: List[Int], is_variable_symbol: String => Boolean ): ( HOLFormula, List[Int] ) = exp match {
case LispList( LispAtom( "or" ) :: left :: right :: Nil ) =>
pos match {
case 1 :: rest =>
left match {
case LispList( LispAtom( "not" ) :: LispList( LispAtom( name ) :: args ) :: Nil ) =>
val npos = if ( rest.isEmpty ) rest else rest.tail //if we point to a term we have to strip the indicator for neg
( Neg( parse_atom( name, args, is_variable_symbol ) ), npos )
case LispList( LispAtom( name ) :: args ) =>
( parse_atom( name, args, is_variable_symbol ), rest )
case _ => throw new Exception( "Parsing Error: unexpected element " + exp + " in parsing of Ivy proof object." )
}
case 2 :: rest =>
parse_clause_frompos( right, rest, is_variable_symbol )
case _ => throw new Exception( "pos " + pos + " did not point to a literal!" )
}
case LispList( LispAtom( "not" ) :: LispList( LispAtom( name ) :: args ) :: Nil ) =>
val npos = if ( pos.isEmpty ) pos else pos.tail //if we point to a term we have to strip the indicator for neg
( Neg( parse_atom( name, args, is_variable_symbol ) ), npos )
case LispList( LispAtom( name ) :: args ) =>
( parse_atom( name, args, is_variable_symbol ), pos )
//the empty clause is denoted by false
case LispAtom( "false" ) =>
throw new Exception( "Parsing Error: want to extract literal from empty clause!" )
case _ => throw new Exception( "Parsing Error: unexpected element " + exp + " in parsing of Ivy proof object." )
}
//directly converts a clause as nested or expression into a list with the literals in the same order
def parse_clause_( exp: SExpression, is_variable_symbol: String => Boolean ): List[HOLFormula] = exp match {
case LispList( LispAtom( "or" ) :: left :: right :: Nil ) =>
val rightclause = parse_clause_( right, is_variable_symbol )
left match {
case LispList( LispAtom( "not" ) :: LispList( LispAtom( name ) :: args ) :: Nil ) =>
Neg( parse_atom( name, args, is_variable_symbol ) ) :: rightclause
case LispList( LispAtom( name ) :: args ) =>
parse_atom( name, args, is_variable_symbol ) :: rightclause
case _ => throw new Exception( "Parsing Error: unexpected element " + exp + " in parsing of Ivy proof object." )
}
case LispList( LispAtom( "not" ) :: LispList( LispAtom( name ) :: args ) :: Nil ) =>
Neg( parse_atom( name, args, is_variable_symbol ) ) :: Nil
case LispList( LispAtom( name ) :: args ) =>
parse_atom( name, args, is_variable_symbol ) :: Nil
//the empty clause is denoted by false
case LispAtom( "false" ) =>
List()
case _ => throw new Exception( "Parsing Error: unexpected element " + exp + " in parsing of Ivy proof object." )
}
def parse_atom( name: String, args: List[SExpression], is_variable_symbol: String => Boolean ) = {
if ( is_variable_symbol( name ) ) throw new Exception( "Parsing Error: Predicate name " + name + " does not conform to naming conventions." )
val argterms = args map ( parse_term( _, is_variable_symbol ) )
if ( name == "=" ) {
require( args.length == 2, "Error parsing equality: = must be a binary predicate!" )
Eq( argterms( 0 ), argterms( 1 ) )
} else {
FOLAtom( name, argterms )
}
}
//some names are escaped for ivy, see also LADR-2009-11A/ladr/ivy.c in the Prover9 source
val ivy_escape_table = Map[String, String]( ( "zero_for_ivy", "0" ),
( "one_for_ivy", "1" ),
( "quote_for_ivy", "'" ),
( "backslash_for_ivy", "\\\\\\\\" ),
( "at_for_ivy", "@" ),
( "meet_for_ivy", "^" ) )
def rewrite_name( s: String ): String = if ( ivy_escape_table contains s ) ivy_escape_table( s ) else s
def parse_term( ts: SExpression, is_variable_symbol: String => Boolean ): FOLTerm = ts match {
case LispAtom( name ) =>
val rname = rewrite_name( name )
if ( is_variable_symbol( rname ) )
FOLVar( rname )
else
FOLConst( rname )
//the proof might contain the constant nil which is parsed to an empty LispList. in this case the empty list
//corresponds to a constant
case LispList( LispList( Nil ) :: Nil ) =>
FOLConst( "nil" )
case LispList( LispAtom( name ) :: args ) =>
val rname = rewrite_name( name )
if ( is_variable_symbol( rname ) ) throw new Exception( "Parsing Error: Function name " + rname + " does not conform to naming conventions." )
FOLFunction( rname, args.map( parse_term( _, is_variable_symbol ) ) )
case _ =>
throw new Exception( "Parsing Error: Unexpected expression " + ts + " in parsing of a term." )
}
}
| gisellemnr/gapt | src/main/scala/at/logic/gapt/formats/ivy/Ivy.scala | Scala | gpl-3.0 | 33,838 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources.wmstrategies
import org.apache.flink.streaming.api.watermark.Watermark
import org.apache.flink.table.dataformat.BaseRow
import org.apache.flink.table.descriptors.Rowtime
import java.util
/** A periodic watermark assigner. */
abstract class PeriodicWatermarkAssigner extends WatermarkStrategy {
/**
* Updates the assigner with the next timestamp.
*
* @param timestamp The next timestamp to update the assigner.
*/
def nextTimestamp(timestamp: Long): Unit
/**
* Returns the current watermark.
*
* @return The current watermark.
*/
def getWatermark: Watermark
}
/** A punctuated watermark assigner. */
abstract class PunctuatedWatermarkAssigner extends WatermarkStrategy {
/**
* Returns the watermark for the current row or null if no watermark should be generated.
*
* @param row The current row.
* @param timestamp The value of the timestamp attribute for the row.
* @return The watermark for this row or null if no watermark should be generated.
*/
def getWatermark(row: BaseRow, timestamp: Long): Watermark
}
/** A strategy which indicates the watermarks should be preserved from the underlying datastream.*/
final class PreserveWatermarks extends WatermarkStrategy {
override def equals(obj: scala.Any): Boolean = {
obj match {
case _: PreserveWatermarks => true
case _ => false
}
}
override def hashCode(): Int = {
classOf[PreserveWatermarks].hashCode()
}
override def toProperties: util.Map[String, String] = {
val javaMap = new util.HashMap[String, String]()
javaMap.put(Rowtime.ROWTIME_WATERMARKS_TYPE, Rowtime.ROWTIME_WATERMARKS_TYPE_VALUE_FROM_SOURCE)
javaMap
}
}
object PreserveWatermarks {
val INSTANCE: PreserveWatermarks = new PreserveWatermarks
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/sources/wmstrategies/watermarkStrategies.scala | Scala | apache-2.0 | 2,638 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.clustering
import scala.util.Random
import org.jblas.{DoubleMatrix, SimpleBlas}
/**
* An utility object to run K-means locally. This is private to the ML package because it's used
* in the initialization of KMeans but not meant to be publicly exposed.
*/
private[mllib] object LocalKMeans {
/**
* Run K-means++ on the weighted point set `points`. This first does the K-means++
* initialization procedure and then roudns of Lloyd's algorithm.
*/
def kMeansPlusPlus(
seed: Int,
points: Array[Array[Double]],
weights: Array[Double],
k: Int,
maxIterations: Int)
: Array[Array[Double]] =
{
val rand = new Random(seed)
val dimensions = points(0).length
val centers = new Array[Array[Double]](k)
// Initialize centers by sampling using the k-means++ procedure
centers(0) = pickWeighted(rand, points, weights)
for (i <- 1 until k) {
// Pick the next center with a probability proportional to cost under current centers
val curCenters = centers.slice(0, i)
val sum = points.zip(weights).map { case (p, w) =>
w * KMeans.pointCost(curCenters, p)
}.sum
val r = rand.nextDouble() * sum
var cumulativeScore = 0.0
var j = 0
while (j < points.length && cumulativeScore < r) {
cumulativeScore += weights(j) * KMeans.pointCost(curCenters, points(j))
j += 1
}
centers(i) = points(j-1)
}
// Run up to maxIterations iterations of Lloyd's algorithm
val oldClosest = Array.fill(points.length)(-1)
var iteration = 0
var moved = true
while (moved && iteration < maxIterations) {
moved = false
val sums = Array.fill(k)(new DoubleMatrix(dimensions))
val counts = Array.fill(k)(0.0)
for ((p, i) <- points.zipWithIndex) {
val index = KMeans.findClosest(centers, p)._1
SimpleBlas.axpy(weights(i), new DoubleMatrix(p), sums(index))
counts(index) += weights(i)
if (index != oldClosest(i)) {
moved = true
oldClosest(i) = index
}
}
// Update centers
for (i <- 0 until k) {
if (counts(i) == 0.0) {
// Assign center to a random point
centers(i) = points(rand.nextInt(points.length))
} else {
centers(i) = sums(i).divi(counts(i)).data
}
}
iteration += 1
}
centers
}
private def pickWeighted[T](rand: Random, data: Array[T], weights: Array[Double]): T = {
val r = rand.nextDouble() * weights.sum
var i = 0
var curWeight = 0.0
while (i < data.length && curWeight < r) {
curWeight += weights(i)
i += 1
}
data(i - 1)
}
}
| dotunolafunmiloye/spark | mllib/src/main/scala/org/apache/spark/mllib/clustering/LocalKMeans.scala | Scala | apache-2.0 | 3,522 |
package com.zobot.client.packet.definitions.clientbound.play
import com.zobot.client.packet.Packet
case class RemoveEntityEffect(entityId: Int, effectId: Any) extends Packet {
override lazy val packetId = 0x33
override lazy val packetData: Array[Byte] =
fromVarInt(entityId) ++
fromAny(effectId)
}
| BecauseNoReason/zobot | src/main/scala/com/zobot/client/packet/definitions/clientbound/play/RemoveEntityEffect.scala | Scala | mit | 313 |
package at.logic.gapt.formats.ivy
import at.logic.gapt.utils.testing.ClasspathFileCopier
import org.specs2.mutable._
import at.logic.gapt.formats.lisp._
import java.io.File.separator
import scala.util.{ Success, Failure }
import util.parsing.input.Reader
/**
* Test for the Ivy interface.
*/
class IvyTest extends Specification with ClasspathFileCopier {
"The Ivy Parser " should {
" parse an empty list " in {
SExpressionParser.tryParseString( "()" ) must_== Success( List( LList() ) )
}
" not parse an empty list + garbage" in {
SExpressionParser.tryParseString( "())" ) must beLike { case Failure( _ ) => ok }
}
" parse the atom a1" in {
SExpressionParser.tryParseString( "a1" ) must_== Success( List( LAtom( "a1" ) ) )
}
" parse the atom a2(space)" in {
SExpressionParser.tryParseString( "a2 " ) must_== Success( List( LAtom( "a2" ) ) )
}
""" parse the atom "a b c" """ in {
SExpressionParser.tryParseString( """"a b c"""" ) must_== Success( List( LAtom( "a b c" ) ) )
}
" parse the list (c1 (c2 c2) c) " in {
SExpressionParser.tryParseString( "(c1 (c2 c2) c)" ) must_== Success(
LFun( "c1", LFun( "c2", LAtom( "c2" ) ), LAtom( "c" ) ) :: Nil
)
}
" parse the list c4;;comment" in {
SExpressionParser.tryParseString( "c4;;comment" ) must_== Success(
LAtom( "c4" ) :: Nil
)
}
" parse the comments ;;comment 1<newline>;;comment 2" in {
SExpressionParser.tryParseString( ";;comment 1\\r\\n;;comment 2" ) must_== Success( List() )
}
" parse the list ;;comment<newline>c5" in {
SExpressionParser.tryParseString( ";;comment\\nc5" ) must_== Success( List( LAtom( "c5" ) ) )
}
" parse the list (c1 (c2 c2) c) ;;comment" in {
SExpressionParser.tryParseString( "(c1 (c2 c2) c);;comment" ) must_== Success(
LFun( "c1", LFun( "c2", LAtom( "c2" ) ), LAtom( "c" ) ) :: Nil
)
}
" parse the list (c1 (c2 c2) ;;comment<newline>c)" in {
SExpressionParser.tryParseString( "(c1 (c2 c2) c);;comment" ) must_== Success(
LFun( "c1", LFun( "c2", LAtom( "c2" ) ), LAtom( "c" ) ) :: Nil
)
}
" parse the list (c1 \\"c2 c2\\" c) " in {
SExpressionParser.tryParseString( "(c1 \\"c2 c2\\" c)" ) must_== Success(
List( LFun( "c1", LAtom( "c2 c2" ), LAtom( "c" ) ) )
)
}
" parse the list_ a1 b " in {
SExpressionParser.tryParseString( "a1 b" ) must_== Success( List( LAtom( "a1" ), LAtom( "b" ) ) )
}
" parse the list ;;comment 1\\n(c1 (c2 c2) ;;comment 2\\nc)" in {
SExpressionParser.tryParseString( "(\\n;;comment 1\\nc1 (c2 c2) c);;comment 2" ) must_== Success(
List( LFun( "c1", LFun( "c2", LAtom( "c2" ) ), LAtom( "c" ) ) )
)
}
" parse the test file simple.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "simple.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( input1, input2, instantiate8, paramod3, input4, input5, instantiate9, resolve6, resolve7 ) =>
val pinput1 = IvyParser.parse( LList( input1 ) )
//debug(pinput1)
val pinput2 = IvyParser.parse( LList( input2 ) )
//debug(pinput2)
val pinput3 = IvyParser.parse( LList( input1, instantiate8 ) )
//debug(pinput3)
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in simple.ivy must have 9 inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test file instantiations.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "instantiations.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( input1, input2, instantiate8, paramod3, input4, input5, instantiate9, resolve6, resolve7, instantiate10 ) =>
val pinput3 = IvyParser.parse( LList( paramod3, instantiate9 ) )
//debug(pinput3)
val pinput4 = IvyParser.parse( LList( instantiate10 ) )
//debug(pinput4)
/*
pinput4 match {
case Instantiate(id, exp, sub, clause, parent) =>
"instantiate" must beEqualTo("instantiate")
case _ =>
"last inference must be instantiate" must beEqualTo("failed")
} */
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in instantiations.ivy must have 9 inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test file flip.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "flip.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case l @ LList( input0, input1, flip2, input3, para4a, inst6, resolve4 ) =>
val pinput3 = IvyParser.parse( LList( input1, flip2 ) )
//debug(pinput3)
val pinput4 = IvyParser.parse( l )
//println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
//println(pinput4)
//println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
case LList( list @ _* ) =>
//println(list)
//println(list.length)
"The proof in flip.ivy must have 7, not " + list.length + " inferences" must beEqualTo( "failed" )
case _ =>
"The proof in flip.ivy must be a nonempty list" must beEqualTo( "failed" )
}
ok
}
" parse the test file resulution.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "resolution.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( input1, input2, instantiate8, paramod3, input4, input5, instantiate9, resolve6, resolve7 ) =>
val pinput = IvyParser.parse( proof )
//debug("resolution: "+pinput)
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in resolution.ivy must have 9 inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test files factor.ivy and factor2.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "factor.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( _* ) =>
val pinput = IvyParser.parse( proof )
//debug("resolution: "+pinput)
case _ =>
"The proof in factor.ivy must have some inferences" must beEqualTo( "failed" )
}
val result2 = SExpressionParser( tempCopyOfClasspathFile( "factor2.ivy" ) )
result2 must not beEmpty
val proof2 = result2.head
proof2 match {
case LList( _* ) =>
val pinput = IvyParser.parse( proof2 )
//debug("resolution: "+pinput)
case _ =>
"The proof in factor.ivy must have some inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test file manyliterals.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "manyliterals.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( _* ) =>
val pinput = IvyParser.parse( proof )
//debug("resolution: "+pinput)
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in manyliterals.ivy must have some inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test file simple2.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "simple2.ivy" ) )
ok
}
}
" parse the test file prime1-0sk.ivy (clause set of the 0 instance of the prime proof) " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "prime1-0sk.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( _* ) =>
val pinput = IvyParser.parse( proof )
//debug("resolution: "+pinput)
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in prime1-0sk.ivy must have some inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test file GRA014+1.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "GRA014+1.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( _* ) =>
val pinput = IvyParser.parse( proof )
//debug("resolution: "+pinput)
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in manyliterals.ivy must have some inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test file GEO037-2.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "GEO037-2.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( _* ) =>
val pinput = IvyParser.parse( proof )
//debug("resolution: "+pinput)
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in GEO037-2.ivy must have some inferences" must beEqualTo( "failed" )
}
ok
}
" parse the test file issue221.ivy " in {
val result = SExpressionParser( tempCopyOfClasspathFile( "issue221.ivy" ) )
result must not beEmpty
val proof = result.head
proof match {
case LList( _* ) =>
val pinput = IvyParser.parse( proof )
//debug("resolution: "+pinput)
case _ =>
// "The first two rules of simple.ivy must parse correctly" must beEqualTo("failed")
"The proof in issue221.ivy must have some inferences" must beEqualTo( "failed" )
}
ok
}
}
| loewenheim/gapt | src/test/scala/at/logic/gapt/formats/ivy/IvyTest.scala | Scala | gpl-3.0 | 10,061 |
/**
* Copyright 2013 Bayes Technologies
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package olympian.party.repository
/**
* @author kevinbayes
*
*/
class PersonRepositoryComponent {
this: PersonStorage =>
} | kevinbayes/olympian | modules/party/party-model/src/main/scala/olympian/party/repository/PersonRepositoryComponent.scala | Scala | apache-2.0 | 741 |
import org.junit.runner._
import org.specs2.mutable._
import org.specs2.runner._
import play.api.libs.json.{JsNull, JsNumber, JsObject, JsValue}
import play.api.test.Helpers._
import play.api.test._
import scala.concurrent._
@RunWith(classOf[JUnitRunner])
class GenerationSpec extends Specification {
val applicationJson = FakeHeaders(
Seq("Content-Type" -> Seq("application/json"))
)
val routeBase = "/api/generations"
def fakePostRequestWithNumber(number: Int) = fakePostRequest(
JsObject(
"number" -> JsNumber(number) ::
Nil
)
)
def fakePostRequest(json: JsValue) = FakeRequest(
POST,
routeBase,
applicationJson,
json
)
def fakePutRequestWithNumber(number: Int, id: Int) = fakePutRequest(
JsObject(
"number" -> JsNumber(number) ::
Nil
),
id
)
def fakePutRequest(json: JsValue, id: Int) = FakeRequest(
PUT,
routeBase + "/" + id,
applicationJson,
json
)
val fakeGetRequest = FakeRequest(
GET,
routeBase
)
def fakeGetQuery(number: Int) = FakeRequest(
GET,
routeBase + "?number=" + number
)
def fakeGetByIdRequest(number: Int) = FakeRequest(
GET,
routeBase + "/" + number
)
def fakeDeleteRequest(number: Int) = FakeRequest(
DELETE,
routeBase + "/" + number
)
def sendRequest(responseOption: Option[Future[play.api.mvc.Result]], expectedResponse: Int, action: JsValue => Unit) {
responseOption match {
case Some(response) => {
status(response) must equalTo(expectedResponse)
val jsonContent = contentAsJson(response)
action(jsonContent)
}
case None => 1 must equalTo(2)
}
}
def sendJsonRequest(request: FakeRequest[JsValue], expectedResponse: Int, action: JsValue => Unit) {
sendRequest(route(request), expectedResponse, action)
}
def sendEmptyRequest(request: FakeRequest[play.api.mvc.AnyContentAsEmpty.type], expectedResponse: Int, action: JsValue => Unit) {
sendRequest(route(request), expectedResponse, action)
}
"Generation POST" should {
"Reject if JSON structure isn't there" in new WithApplication {
sendJsonRequest(
fakePostRequest(JsNull),
400,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("Missing")
}
)
}
"Reject if JSON key number is missing" in new WithApplication {
sendJsonRequest(
fakePostRequest(
JsObject(
"hello" -> JsNull ::
Nil
)
),
400,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("Missing")
}
)
}
"Succesfully create" in new WithApplication {
sendJsonRequest(
fakePostRequestWithNumber(1),
200,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("success")
(jsonContent \\ "data" \\ "number").as[Int] must equalTo(1)
}
)
}
"Reject create if generation with number already exists" in new WithApplication {
val fakePostRequestOne = fakePostRequestWithNumber(1)
sendJsonRequest(
fakePostRequestOne,
200,
jsonContent => sendJsonRequest(
fakePostRequestOne,
400,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("violation")
}
)
)
}
}
"Generation GET" should {
"return all generations" in new WithApplication {
val fakePostRequestOne = fakePostRequestWithNumber(1)
val fakePostRequestTwo = fakePostRequestWithNumber(2)
sendJsonRequest(
fakePostRequestOne,
200,
jsonContent => sendJsonRequest(
fakePostRequestTwo,
200,
jsonContent => sendEmptyRequest(
fakeGetRequest,
200,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("success")
val data = (jsonContent \\ "data").as[List[JsValue]]
data.size must equalTo(2)
(data.head \\ "number").as[Int] must equalTo(1)
}
)
)
)
}
"only return a generation with a particular number" in new WithApplication {
val fakePostRequestOne = fakePostRequestWithNumber(1)
val fakePostRequestTwo = fakePostRequestWithNumber(2)
sendJsonRequest(
fakePostRequestOne,
200,
jsonContent => sendJsonRequest(
fakePostRequestTwo,
200,
jsonContent => sendEmptyRequest(
fakeGetQuery(1),
200,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("success")
val data = (jsonContent \\ "data").as[List[JsValue]]
data.size must equalTo(1)
(data.head \\ "number").as[Int] must equalTo(1)
}
)
)
)
}
}
"Generation GET by Id" should {
"fail if generation with id doesn't exist" in new WithApplication {
sendEmptyRequest(
fakeGetByIdRequest(1),
404,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("found")
}
)
}
"return a particular generation" in new WithApplication {
sendJsonRequest(
fakePostRequestWithNumber(1),
200,
jsonContent => sendEmptyRequest(
fakeGetByIdRequest((jsonContent \\ "data" \\ "id").as[Int]),
200,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("success")
(jsonContent \\ "data" \\ "number").as[Int] must equalTo(1)
}
)
)
}
}
"Generation DELETE" should {
"fail if generation with id doesn't exist" in new WithApplication {
sendEmptyRequest(
fakeDeleteRequest(1),
404,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("found")
}
)
}
"Succesfully delete" in new WithApplication {
sendJsonRequest(
fakePostRequestWithNumber(1),
200,
jsonContent => {
val id = (jsonContent \\ "data" \\ "id").as[Int]
sendEmptyRequest(
fakeDeleteRequest(id),
200,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("success")
sendEmptyRequest(
fakeGetByIdRequest(id),
404,
jsonContent => ()
)
}
)
}
)
}
}
"Generation PUT" should {
"fail if such a generation doesnt exist" in new WithApplication {
sendJsonRequest(
fakePutRequestWithNumber(1, 1),
404,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("found")
}
)
}
"Succesfully update" in new WithApplication {
sendJsonRequest(
fakePostRequestWithNumber(1),
200,
jsonContent => sendJsonRequest(
fakePutRequestWithNumber(2, (jsonContent \\ "data" \\ "id").as[Int]),
200,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("success")
(jsonContent \\ "data" \\ "number").as[Int] must equalTo(2)
}
)
)
}
// Ones below need editing
"Reject update if generation with number already exists" in new WithApplication {
val fakePostRequestOne = fakePostRequestWithNumber(1)
val fakePostRequestTwo = fakePostRequestWithNumber(2)
sendJsonRequest(
fakePostRequestOne,
200,
jsonContent => sendJsonRequest(
fakePostRequestTwo,
200,
jsonContent => sendJsonRequest(
fakePutRequestWithNumber(1, (jsonContent \\ "data" \\ "id").as[Int]),
400,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("violation")
}
)
)
)
}
"Reject if JSON structure isn't there" in new WithApplication {
sendJsonRequest(
fakePutRequest(JsNull, 1),
400,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("nothing")
}
)
}
"Reject if JSON key number is missing" in new WithApplication {
sendJsonRequest(
fakePutRequest(
JsObject(
"hello" -> JsNull ::
Nil
),
1
),
400,
jsonContent => {
(jsonContent \\ "metadata" \\ "status").as[String] must equalTo("error")
(jsonContent \\ "metadata" \\ "messages")(0).as[String] must contain("nothing")
}
)
}
}
}
| ishakir/PokeStat | test/GenerationSpec.scala | Scala | mit | 9,630 |
package spire.math
import scala.math.{ScalaNumber, ScalaNumericConversions}
import spire.algebra.{Order, Trig, Signed}
import spire.syntax.nroot._
sealed trait Real extends ScalaNumber with ScalaNumericConversions { x =>
import Real.{roundUp, Exact, Inexact}
def apply(p: Int): SafeLong
def toRational(p: Int): Rational = this match {
case Exact(n) => n
case _ => Rational(x(p), SafeLong.two.pow(p))
}
def toRational: Rational = toRational(Real.bits)
// ugh scala.math
def doubleValue(): Double = toRational.toDouble
def floatValue(): Float = toRational.toFloat
def intValue(): Int = toRational.toInt
def longValue(): Long = toRational.toLong
def underlying(): Object = this
override def hashCode(): Int = toRational.hashCode
override def equals(y: Any): Boolean = y match {
case y: Real => x eqv y
case y => toRational.equals(y)
}
def eqv(y: Real): Boolean = (x compare y) == 0
def compare(y: Real): Int = (x, y) match {
case (Exact(nx), Exact(ny)) => nx compare ny
case _ => (x - y).signum
}
def min(y: Real): Real = (x, y) match {
case (Exact(nx), Exact(ny)) => Exact(nx min ny)
case _ => Real(p => x(p) min y(p))
}
def max(y: Real): Real = (x, y) match {
case (Exact(nx), Exact(ny)) => Exact(nx max ny)
case _ => Real(p => x(p) max y(p))
}
def abs(): Real = this match {
case Exact(n) => Exact(n.abs)
case _ => Real(p => x(p).abs)
}
def signum(): Int = this match {
case Exact(n) => n.signum
case _ => x(Real.bits).signum
}
def unary_-(): Real = this match {
case Exact(n) => Exact(-n)
case _ => Real(p => -x(p))
}
def reciprocal(): Real = {
def findNonzero(i: Int): Int =
if (SafeLong.three <= x(i).abs) i else findNonzero(i + 1)
this match {
case Exact(n) => Exact(n.reciprocal)
case _ => Real({p =>
val s = findNonzero(0)
roundUp(Rational(SafeLong.two.pow(2 * p + 2 * s + 2), x(p + 2 * s + 2)))
})
}
}
def +(y: Real): Real = (x, y) match {
case (Exact(nx), Exact(ny)) => Exact(nx + ny)
case (Exact(Rational.zero), _) => y
case (_, Exact(Rational.zero)) => x
case _ => Real(p => roundUp(Rational(x(p + 2) + y(p + 2), 4)))
}
def -(y: Real): Real = x + (-y)
def *(y: Real): Real = (x, y) match {
case (Exact(nx), Exact(ny)) => Exact(nx * ny)
case (Exact(Rational.zero), _) => Real.zero
case (_, Exact(Rational.zero)) => Real.zero
case (Exact(Rational.one), _) => y
case (_, Exact(Rational.one)) => x
case _ => Real({p =>
val x0 = x(0).abs + 2
val y0 = y(0).abs + 2
val sx = Real.sizeInBase(x0, 2) + 3
val sy = Real.sizeInBase(y0, 2) + 3
roundUp(Rational(x(p + sy) * y(p + sx), SafeLong.two.pow(p + sx + sy)))
})
}
def **(k: Int): Real = pow(k)
def pow(k: Int): Real = {
def loop(b: Real, k: Int, extra: Real): Real =
if (k == 1)
b * extra
else
loop(b * b, k >>> 1, if ((k & 1) == 1) b * extra else extra)
this match {
case Exact(n) =>
Exact(n.pow(k))
case _ =>
if (k < 0) {
reciprocal.pow(-k)
} else if (k == 0) {
Real.one
} else if (k == 1) {
this
} else {
loop(x, k - 1, x)
}
}
}
def /(y: Real): Real = x * y.reciprocal
def %(y: Real): Real = (x, y) match {
case (Exact(nx), Exact(ny)) => Exact(nx % ny)
case _ => Real({ p =>
val d = x / y
val s = d(2)
val d2 = if (s >= 0) d.floor else d.ceil
(x - d2 * y)(p)
})
}
def /~(y: Real): Real = (x, y) match {
case (Exact(nx), Exact(ny)) => Exact(nx /~ ny)
case _ => Real({ p =>
val d = x / y
val s = d(2)
val d2 = if (s >= 0) d.floor else d.ceil
d2(p)
})
}
def gcd(y: Real): Real = (x, y) match {
case (Exact(nx), Exact(ny)) => Exact(nx gcd ny)
case _ => Real({ p =>
val g = x.toRational(p) gcd y.toRational(p)
roundUp(g * SafeLong.two.pow(p))
})
}
def ceil(): Real = x match {
case Exact(n) => Exact(n.ceil)
case _ => Real({ p =>
val n = x(p)
val t = SafeLong.two.pow(p)
val m = n % t
if (m == 0) n
else if (n.signum >= 0) n + t - m
else n - m
})
}
def floor(): Real = x match {
case Exact(n) => Exact(n.floor)
case _ => Real({ p =>
val n = x(p)
val t = SafeLong.two.pow(p)
val m = n % t
if (n.signum >= 0) n - m else n - t - m
})
}
def round(): Real = x match {
case Exact(n) => Exact(n.round)
case _ => Real({ p =>
val n = x(p)
val t = SafeLong.two.pow(p)
val h = t / 2
val m = n % t
if (m < h) n - m else n - m + t
})
}
def isWhole(): Boolean = x match {
case Exact(n) =>
n.isWhole
case _ =>
val n = x(Real.bits)
val t = SafeLong.two.pow(Real.bits)
(n % t) == 0
}
def sqrt(): Real = Real(p => x(p * 2).sqrt)
def nroot(k: Int): Real = Real(p => x(p * k).nroot(k))
def fpow(r: Rational): Real =
Real({ p =>
val r2 = r.limitToInt
val n = r2.numerator
val d = r2.denominator
x.pow(n.toInt).nroot(d.toInt)(p)
})
// a bit hand-wavy
def fpow(y: Real): Real = y match {
case Exact(n) => x.fpow(n)
case _ => Real({ p =>
x.fpow(Rational(y(p), SafeLong.two.pow(p)))(p)
})
}
override def toString: String = x match {
case Exact(n) => n.toString
case _ => getString(Real.digits)
}
def repr: String = x match {
case Exact(n) => s"Exact(${n.toString})"
case _ => s"Inexact(${toRational})"
}
def getString(d: Int): String = {
val b = Real.digitsToBits(d)
val r = Rational(x(b) * SafeLong.ten.pow(d), SafeLong.two.pow(b))
val m = roundUp(r)
val (sign, str) = m.signum match {
case -1 => ("-", m.abs.toString)
case 0 => ("", "0")
case 1 => ("", m.toString)
}
val i = str.length - d
val s = if (i > 0) {
sign + str.substring(0, i) + "." + str.substring(i)
} else {
sign + "0." + ("0" * -i) + str
}
s.replaceAll("0+$", "").replaceAll("\\\\.$", "")
}
}
object Real {
val zero: Real = Exact(Rational.zero)
val one: Real = Exact(Rational.one)
val two: Real = Exact(Rational(2))
val four: Real = Exact(Rational(4))
def apply(f: Int => SafeLong): Real = Inexact(f)
implicit def apply(n: Int): Real = Exact(Rational(n))
implicit def apply(n: Long): Real = Exact(Rational(n))
implicit def apply(n: BigInt): Real = Exact(Rational(n))
implicit def apply(n: SafeLong): Real = Exact(Rational(n))
implicit def apply(n: Rational): Real = Exact(n)
implicit def apply(n: Double): Real = Exact(Rational(n))
implicit def apply(n: BigDecimal): Real = Exact(Rational(n))
def apply(s: String): Real = Exact(Rational(s))
lazy val pi: Real = Real(16) * atan(Real(Rational(1, 5))) - Real.four * atan(Real(Rational(1, 239)))
lazy val e: Real = exp(Real.one)
def log(x: Real): Real = {
val t = x(2)
val n = sizeInBase(t, 2) - 3
if (t < 0) sys.error("log of negative number")
else if (t < 4) -log(x.reciprocal)
else if (t < 8) logDr(x)
else logDr(div2n(x, n)) + Real(n) * log2
}
def exp(x: Real): Real = {
val u = x / log2
val n = u(0)
val s = x - Real(n) * log2
if (!n.isValidInt) sys.error("sorry")
else if (n < 0) div2n(expDr(s), -n.toInt)
else if (n > 0) mul2n(expDr(s), n.toInt)
else expDr(s)
}
def sin(x: Real): Real = {
val z = x / piBy4
val s = roundUp(Rational(z(2), 4))
val y = x - piBy4 * Real(s)
val m = (s % 8).toInt
val n = if (m < 0) m + 8 else m
n match {
case 0 => sinDr(y)
case 1 => sqrt1By2 * (cosDr(y) + sinDr(y))
case 2 => cosDr(y)
case 3 => sqrt1By2 * (cosDr(y) - sinDr(y))
case 4 => -sinDr(y)
case 5 => -sqrt1By2 * (cosDr(y) + sinDr(y))
case 6 => -cosDr(y)
case 7 => -sqrt1By2 * (cosDr(y) - sinDr(y))
}
}
def cos(x: Real): Real = {
val z = x / piBy4
val s = roundUp(Rational(z(2), 4))
val y = x - piBy4 * Real(s)
val m = (s % 8).toInt
val n = if (m < 0) m + 8 else m
n match {
case 0 => cosDr(y)
case 1 => sqrt1By2 * (cosDr(y) - sinDr(y))
case 2 => -sinDr(y)
case 3 => -sqrt1By2 * (cosDr(y) + sinDr(y))
case 4 => -cosDr(y)
case 5 => -sqrt1By2 * (cosDr(y) - sinDr(y))
case 6 => sinDr(y)
case 7 => sqrt1By2 * (cosDr(y) + sinDr(y))
}
}
def tan(x: Real): Real = sin(x) / cos(x)
def atan(x: Real): Real = {
val t = x(2)
val xp1 = x + Real.one
val xm1 = x - Real.one
if (t < -5) atanDr(-x.reciprocal) - piBy2
else if (t == -4) -piBy4 - atanDr(xp1 / xm1)
else if (t < 4) atanDr(x)
else if (t == 4) piBy4 + atanDr(xm1 / xp1)
else piBy2 - atanDr(x.reciprocal)
}
def atan2(y: Real, x: Real): Real = Real({ p =>
var pp = p
var sx = x(pp).signum
var sy = y(pp).signum
// val maxp = p * p
// while (sx == 0 && sy == 0 && pp < maxp) {
while (sx == 0 && sy == 0) {
sx = x(pp).signum
sy = y(pp).signum
pp += 1
}
if (sx > 0) {
atan(y / x)(p)
} else if (sy >= 0 && sx < 0) {
(atan(y / x) + Real.pi)(p)
} else if (sy < 0 && sx < 0) {
(atan(y / x) - Real.pi)(p)
} else if (sy > 0) {
(Real.pi / Real.two)(p)
} else if (sy < 0) {
(-Real.pi / Real.two)(p)
} else {
throw new IllegalArgumentException("atan2(0, 0) is undefined")
// // ugh
// Real.zero
// //sys.error("undefined sx=%s sy=%s" format (sx, sy))
}
})
def asin(x: Real): Real = {
val x0 = x(0)
val s = (Real.one - x * x).sqrt
x0.signum match {
case n if n > 0 => (Real.pi / Real.two) - atan(s / x)
case 0 => atan(x / s)
case _ => (-Real.pi / Real.two) - atan(s / x)
}
}
def acos(x: Real): Real = (Real.pi / Real.two) - asin(x)
def sinh(x: Real): Real = {
val y = exp(x)
(y - y.reciprocal) / Real.two
}
def cosh(x: Real): Real = {
val y = exp(x)
(y + y.reciprocal) / Real.two
}
def tanh(x: Real): Real = {
val y = exp(x);
val y2 = y.reciprocal
(y - y2) / (y + y2)
}
def asinh(x: Real): Real = log(x + (x * x + Real.one).sqrt)
def acosh(x: Real): Real = log(x + (x * x - Real.one).sqrt)
def atanh(x: Real): Real = log((Real.one + x) / (Real.one - x)) / Real.two
def digits: Int = 40
def bits: Int = digitsToBits(digits)
def digitsToBits(n: Int): Int =
spire.math.ceil(n * (spire.math.log(10.0) / spire.math.log(2.0))).toInt + 4
def sizeInBase(n: SafeLong, base: Int): Int = {
def loop(n: SafeLong, acc: Int): Int = if (n <= 1) acc + 1 else loop(n / base, acc + 1)
loop(n.abs, 0)
}
def roundUp(r: Rational): SafeLong = SafeLong(r.round.toBigInt)
def div2n(x: Real, n: Int): Real =
Real(p => if (p >= n) x(p - n) else roundUp(Rational(x(p), SafeLong.two.pow(n))))
def mul2n(x: Real, n: Int): Real =
Real(p => x(p + n))
lazy val piBy2 = div2n(pi, 1)
lazy val piBy4 = div2n(pi, 2)
lazy val log2 = div2n(logDrx(Real.two.reciprocal), 1)
lazy val sqrt1By2 = Real.two.reciprocal.sqrt
def accumulate(total: SafeLong, xs: Stream[SafeLong], cs: Stream[Rational]): SafeLong = {
(xs, cs) match {
case (_, Stream.Empty) => total
case (Stream.Empty, _) => sys.error("nooooo")
case (x #:: xs, c #:: cs) =>
val t = roundUp(c * Rational(x))
if (t == 0) total else accumulate(total + t, xs, cs)
}
}
private[spire] def powerSeries(ps: Stream[Rational], terms: Int => Int, x: Real): Real = {
Real({p =>
val t = terms(p)
val l2t = 2 * sizeInBase(SafeLong(t) + 1, 2) + 6
val p2 = p + l2t
val xr = x(p2)
val xn = SafeLong.two.pow(p2)
if (xn == 0) sys.error("oh no")
def g(yn: SafeLong): SafeLong = roundUp(Rational(yn * xr, xn))
val num = accumulate(SafeLong.zero, Stream.iterate(xn)(g), ps.take(t))
val denom = SafeLong.two.pow(l2t)
roundUp(Rational(num, denom))
})
}
private[spire] def accSeq(f: (Rational, SafeLong) => Rational): Stream[Rational] = {
def loop(r: Rational, n: SafeLong): Stream[Rational] =
r #:: loop(f(r, n), n + 1)
loop(Rational.one, SafeLong.one)
}
def expDr(x: Real): Real =
powerSeries(accSeq((r, n) => r / n), n => n, x)
def logDr(x: Real): Real = {
val y = (x - Real.one) / x
y * logDrx(y)
}
def logDrx(x: Real): Real = {
powerSeries(Stream.from(1).map(n => Rational(1, n)), _ + 1, x)
}
def sinDr(x: Real): Real =
x * powerSeries(accSeq((r, n) => -r * Rational(1, 2*n*(2*n+1))), n => n, x * x)
def cosDr(x: Real): Real =
powerSeries(accSeq((r, n) => -r * Rational(1, 2*n*(2*n-1))), n => n, x * x)
def atanDr(x: Real): Real = {
val y = x * x + Real(1)
(x / y) * atanDrx((x * x) / y)
}
def atanDrx(x: Real): Real =
//powerSeries(accSeq((r, n) => r * (Rational(2*n, 2*n + 1))), _ + 1, x)
powerSeries(accSeq((r, n) => r * (Rational(2*n, 2*n + 1))), _ * 2, x)
implicit val algebra = new RealAlgebra {}
case class Exact(n: Rational) extends Real {
def apply(p: Int): SafeLong = Real.roundUp(Rational(2).pow(p) * n)
}
case class Inexact(f: Int => SafeLong) extends Real {
@volatile private[spire] var memo: Option[(Int, SafeLong)] = None
def apply(p: Int): SafeLong = memo match {
case Some((bits, value)) if bits >= p =>
Real.roundUp(Rational(value, SafeLong(2).pow(bits - p)))
case _ =>
val result = f(p)
memo = Some((p, result))
result
}
}
}
@SerialVersionUID(0L)
class RealAlgebra extends RealIsFractional {}
trait RealIsFractional extends Fractional[Real] with Order[Real] with Signed[Real] with Trig[Real] {
def abs(x: Real): Real = x.abs
def signum(x: Real): Int = x.signum
override def eqv(x: Real, y: Real): Boolean = x eqv y
def compare(x: Real, y: Real): Int = x compare y
def zero: Real = Real.zero
def one: Real = Real.one
def negate(x: Real): Real = -x
def plus(x: Real, y: Real): Real = x + y
override def minus(x: Real, y: Real): Real = x - y
def times(x: Real, y: Real): Real = x * y
def gcd(x: Real, y: Real): Real = x gcd y
def quot(x: Real, y: Real): Real = x /~ y
def mod(x: Real, y: Real): Real = x % y
override def reciprocal(x: Real): Real = x.reciprocal
def div(x: Real, y: Real): Real = x / y
override def sqrt(x: Real): Real = x.sqrt
def nroot(x: Real, k: Int): Real = x.nroot(k)
def fpow(x: Real, y: Real): Real = x fpow y
def acos(a: Real): Real = Real.acos(a)
def asin(a: Real): Real = Real.asin(a)
def atan(a: Real): Real = Real.atan(a)
def atan2(y: Real, x: Real): Real = Real.atan2(y, x)
def cos(a: Real): Real = Real.cos(a)
def cosh(x: Real): Real = Real.cosh(x)
def e: Real = Real.e
def exp(x: Real): Real = Real.exp(x)
def expm1(x: Real): Real = Real.exp(x) - Real.one
def log(x: Real): Real = Real.log(x)
def log1p(x: Real): Real = Real.log(Real.one + x)
def pi: Real = Real.pi
def sin(x: Real): Real = Real.sin(x)
def sinh(x: Real): Real = Real.sinh(x)
def tan(x: Real): Real = Real.tan(x)
def tanh(x: Real): Real = Real.tanh(x)
def toDegrees(a: Real): Real = a / (Real.two * Real.pi) * Real(360)
def toRadians(a: Real): Real = a / Real(360) * (Real.two * Real.pi)
def ceil(x: Real): Real = x.ceil
def floor(x: Real): Real = x.floor
def isWhole(x: Real): Boolean = x.isWhole
def round(x: Real): Real = x.round
def toByte(x: Real): Byte = x.toRational.toByte
def toInt(x: Real): Int = x.toRational.toInt
def toShort(x: Real): Short = x.toRational.toShort
def toLong(x: Real): Long = x.toRational.toLong
def toFloat(x: Real): Float = x.toRational.toFloat
def toDouble(x: Real): Double = x.toRational.toDouble
def toBigInt(x: Real): BigInt = x.toRational.toBigInt
def toBigDecimal(x: Real): BigDecimal = x.toRational.toBigDecimal
def toRational(x: Real): Rational = x.toRational
def toAlgebraic(x: Real): Algebraic = Algebraic(x.toRational) //FIXME
def toReal(x: Real): Real = x
def toNumber(x: Real): Number = Number(x.toRational)
def toString(x: Real): String = x.toString
def toType[B](x: Real)(implicit ev: ConvertableTo[B]): B =
ev.fromReal(x)
def fromByte(n: Byte): Real = Real(n)
def fromShort(n: Short): Real = Real(n)
def fromFloat(n: Float): Real = Real(n)
def fromLong(n: Long): Real = Real(n)
def fromBigInt(n: BigInt): Real = Real(n)
def fromBigDecimal(n: BigDecimal): Real = Real(n)
def fromRational(n: Rational): Real = Real(n)
def fromAlgebraic(n: Algebraic): Real = Real(n.toRational)
def fromReal(n: Real): Real = n
def fromType[B](b: B)(implicit ev: ConvertableFrom[B]): Real =
ev.toReal(b)
}
| lrytz/spire | core/src/main/scala/spire/math/Real.scala | Scala | mit | 16,810 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.builders
import java.util.concurrent.TimeUnit
import monix.execution.{Ack, Cancelable}
import monix.execution.Ack.{Continue, Stop}
import monix.execution.cancelables.MultiAssignCancelable
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Failure, Success}
private[reactive] final class IntervalFixedRateObservable(initialDelay: FiniteDuration, period: FiniteDuration)
extends Observable[Long] {
override def unsafeSubscribeFn(subscriber: Subscriber[Long]): Cancelable = {
import subscriber.{scheduler => s}
val o = subscriber
val task = MultiAssignCancelable()
val runnable = new Runnable { self =>
private[this] val periodNanos = period.toNanos
private[this] var counter = 0L
private[this] var startedAt = 0L
def scheduleNext(): Unit = {
counter += 1
val delay = {
val durationNanos = s.clockMonotonic(NANOSECONDS) - startedAt
val d = periodNanos - durationNanos
if (d >= 0L) d else 0L
}
// No need to synchronize, since we have a happens-before
// relationship between scheduleOnce invocations.
task := s.scheduleOnce(delay, TimeUnit.NANOSECONDS, self)
()
}
def asyncScheduleNext(r: Future[Ack]): Unit =
r.onComplete {
case Success(ack) =>
if (ack == Continue) scheduleNext()
case Failure(ex) =>
s.reportFailure(ex)
}
def run(): Unit = {
startedAt = s.clockMonotonic(NANOSECONDS)
val ack = o.onNext(counter)
if (ack == Continue)
scheduleNext()
else if (ack != Stop)
asyncScheduleNext(ack)
}
}
if (initialDelay.length <= 0)
s.execute(runnable)
else
task := s.scheduleOnce(initialDelay.length, initialDelay.unit, runnable)
task
}
}
| alexandru/monifu | monix-reactive/shared/src/main/scala/monix/reactive/internal/builders/IntervalFixedRateObservable.scala | Scala | apache-2.0 | 2,658 |
package sorm.driver
import sorm.ddl._
import sorm.jdbc._
import sorm.sql.Sql
class Mysql (protected val connection : JdbcConnection)
extends DriverConnection
with StdConnection
with StdTransaction
with StdAbstractSqlToSql
with StdQuote
with StdSqlRendering
with StdStatement
with StdQuery
with StdModify
with StdCreateTable
with StdListTables
with StdDropTables
with StdDropAllTables
with StdNow
{
override protected def quote ( x : String ) = "`" + x + "`"
override protected def columnTypeDdl ( t : ColumnType )
= {
import ColumnType._
t match {
case Text => "LONGTEXT"
case _ => super.columnTypeDdl(t)
}
}
override protected def postProcessSql(sql: Sql.Statement) = sql
}
| cllu/sorm2 | src/main/scala/sorm/driver/Mysql.scala | Scala | mit | 754 |
package org.elinker.core.api.process
import akka.actor.SupervisorStrategy.Restart
import akka.actor.{Actor, OneForOneStrategy}
import akka.event.Logging
import edu.stanford.nlp.ie.crf.{CRFClassifier, CRFCliqueTree}
import edu.stanford.nlp.ling.CoreAnnotations
import edu.stanford.nlp.util.CoreMap
import org.apache.solr.client.solrj.SolrQuery
import org.apache.solr.client.solrj.impl.HttpSolrClient
import org.apache.solr.client.solrj.util.ClientUtils
import org.apache.solr.common.SolrDocumentList
import org.elinker.core.api.filter.SimilarityFilter
import org.elinker.core.api.java.utils.SPARQLProcessor
import org.elinker.core.api.process.Rest.{EnrichedOutput, RestMessage}
import org.nlp2rdf.NIFWrapper
import org.nlp2rdf.parser.NIFParser
import scala.collection.JavaConversions._
/**
* Actor for enriching raw text or NIF documents with entities, classes and URIs.
*
* @param nerClassifier Stanford CoreNLP CRFClassifier created from a particular NER model
* @param solrURI SOLR instance URI where entity URIs and labels are indexed
* @param sparqlEndpoint SPARQL endpoint URI for retrieving rdf:type of entities
* @author Nilesh Chakraborty <nilesh@nileshc.com>
* @todo Replace all printlns with proper logging.
*/
class EntityLinker[T <: CoreMap](nerClassifier: CRFClassifier[T], solrURI: String, sparqlEndpoint: String) extends Actor {
import EntityLinker._
import context._
implicit val system = context.system
implicit val executionContext = system.dispatcher
val log = Logging(system, getClass)
val solr = new HttpSolrClient(solrURI)
val similarityFilter = new SimilarityFilter(solr)
/**
* Disambiguate an entity mention against a knowledge base. Follows a naive approach currently: query for the mention
* against a SOLR index of URIs, surface forms and their respective usage counts on Wikipedia, and pick the most common
* sense from the candidates.
*
* @param mention String spotted by nerClassifier
* @param datasets DatasetMetadata name (eg. dbpedia)
* @param language Language code, eg. en
* @param maxLinks Maximum number of URIs to fetch (top-N)
* @return Seq of (URI, confidence score)
*/
private def linkToKB(mention: String, datasets: String, language: String, maxLinks: Int): Seq[(String, Double)] = {
// Find links to URIs in datasets by querying SOLR index
def e(s: String) = ClientUtils.escapeQueryChars(s)
val query = new SolrQuery()
val results = new SolrDocumentList
datasets.split(",").foreach(dataset => {
query.set("q", s"""label:"${e(mention)}"~3 AND dataset:"$dataset" AND (language:"$language" OR language:"xx")""")
query.set("sort", "score desc, count desc")
query.set("rows", 10)
val response = solr.query("elinker", query)
val solrResult = response.getResults
if (solrResult.isEmpty) {
Nil
} else {
results.addAll(solrResult.take(maxLinks))
}
})
results.map {
case document =>
val resource = document.get("resource").asInstanceOf[String]
val relevance = 0.0
(resource, relevance)
}
}
/**
* Get a list of spotted entity mentions. Spotting is currently done using StanfordNER and custom NER models.
*
* @param text Raw text
* @return Seq of entity annotations and confidence scores
*/
def getMentions(text: String): Seq[Result] = {
// Fetch entity mentions in text (only spotting) along with confidence scores
(for (sentence <- nerClassifier.classify(text)) yield {
// println(sentence)
val p = nerClassifier.documentToDataAndLabels(sentence)
val cliqueTree: CRFCliqueTree[String] = nerClassifier.getCliqueTree(p)
var currentBegin = 0
var currentEnd = 0
var tokensInCurrentEntity = 0
var currentClassLabel = "O"
var currentProbs = 0.0
var entityMention = ""
// Iterate through NER-tagged words, join consecutive words into phrases and average their individual confidence scores.
// Each Result is a single named entity with its position in text and averaged confidence score.
val entities = (for ((doc, i) <- sentence.zipWithIndex;
//mention = doc.get(classOf[CoreAnnotations.TextAnnotation]);
begin = doc.get(classOf[CoreAnnotations.CharacterOffsetBeginAnnotation]);
end = doc.get(classOf[CoreAnnotations.CharacterOffsetEndAnnotation]);
mention = text.substring(begin, end);
(classLabel, prob) = (for ((classLabel, j) <- nerClassifier.classIndex.objectsList().zipWithIndex) yield (classLabel, cliqueTree.prob(i, j))).maxBy(_._2)
) yield {
// println(mention + " " + begin + " " + end)
if (currentClassLabel != classLabel && currentClassLabel != "O") {
val result = Result(currentClassLabel, entityMention, currentBegin, currentEnd, None, Some(currentProbs / tokensInCurrentEntity))
currentBegin = 0
currentEnd = 0
tokensInCurrentEntity = 0
currentClassLabel = classLabel
entityMention = ""
currentProbs = 0.0
Seq(result)
} else {
if (classLabel != "O") {
if (tokensInCurrentEntity == 0) currentBegin = begin
tokensInCurrentEntity += 1
currentEnd = end
currentClassLabel = classLabel
entityMention = if (entityMention.isEmpty) mention else entityMention + " " + mention
currentProbs += prob
}
Nil
}
}).flatten
if (tokensInCurrentEntity > 0)
entities += Result(currentClassLabel, entityMention, currentBegin, currentEnd, None, Some(currentProbs / tokensInCurrentEntity))
entities
}).flatten.filter(_.mention.nonEmpty)
}
/**
*
* @param text Raw text to annotate
* @param language Language code, eg. en
* @param dataset DatasetMetadata name (eg. dbpedia)
* @param linksPerMention max. number of links/URIs to fetch for each spotted entity mention
* @return Seq of entity annotations, URI links and confidence scores
*/
def getEntities(text: String, language: String, dataset: String, linksPerMention: Int): Seq[Result] = {
// Spot entities and link to given dataset
(for (result@Result(entityType, phrase, begin, end, _, Some(score)) <- getMentions(text)) yield {
val links = linkToKB(phrase, dataset, language, linksPerMention)
if (links.isEmpty)
Seq(result)
else
for ((link, _) <- links) yield result.copy(taIdentRef = Some(link))
}).flatten
}
val sparqlProc = new SPARQLProcessor(sparqlEndpoint)
def getDbpediaTypes(uri: String): Set[String] = sparqlProc.getTypes(uri).toSet
def getMostSpecificTypeFromDBpediaOntology(uri: String): Set[String] = sparqlProc.getMostSpecificTypeFromDBpediaOntology(uri).toSet;
def receive = {
case SpotEntities(text, language, outputFormat, prefix, classify, nifVersion) =>
val results = getMentions(text)
val nif = new NIFWrapper(prefix, nifVersion, classify)
nif.context(text)
nif.entities(results.toList)
sender ! EnrichedOutput(nif.getNIF(outputFormat))
stop(self)
case SpotLinkEntities(text, language, outputFormat, dataset, prefix, numLinks, types, classify, linkingMethod: String, nifVersion: String) =>
val results = linkingMethod match {
case similarityFilter.SURFACE_FORM_SIMILARITY => getEntities (text, language, dataset, numLinks).filter(similarityFilter.filterByStringSimilarity(_, dataset, language) )
case _ => getEntities (text, language, dataset, numLinks)
}
val nif = new NIFWrapper(prefix, nifVersion, classify)
nif.context(text)
results.foreach {
case Result(entityType, mention, begin, end, taIdentRef, score) =>
val mentionModel = (taIdentRef, score) match {
case (Some(ref), Some(s)) if numLinks == 1 =>
if (types.isEmpty || types.intersect(getDbpediaTypes(ref)).nonEmpty) {
if (classify) {
val otherTypes = getDbpediaTypes(ref).toArray
val taClassRef = getMostSpecificTypeFromDBpediaOntology(ref).toArray
nif.entity(Result.apply(entityType, mention, begin, end, taIdentRef, score), otherTypes, taClassRef)
} else {
nif.entity(Result.apply(entityType, mention, begin, end, taIdentRef, score))
}
} else {
null
}
case (Some(ref), Some(s)) =>
if (types.isEmpty || types.intersect(getDbpediaTypes(ref)).nonEmpty) {
if (classify) {
val otherTypes = getDbpediaTypes(ref).toArray
val taClassRef = getMostSpecificTypeFromDBpediaOntology(ref).toArray
nif.entity(Result.apply(entityType, mention, begin, end, taIdentRef, score), otherTypes, taClassRef)
} else {
nif.entity(Result.apply(null, mention, begin, end, taIdentRef, score))
}
} else {
null
}
case (None, Some(score)) =>
if (classify)
nif.entity(Result.apply(entityType, mention, begin, end, taIdentRef, Some(score)))
else
nif.entity(Result.apply(null, mention, begin, end, taIdentRef, Some(score)))
}
}
// Convert the model to String.
sender ! EnrichedOutput(nif.getNIF(outputFormat))
stop(self)
case LinkEntities(nifString, language, outputFormat, dataset, prefix, numLinks, types, linkingMethod, nifVersion) =>
val document = NIFParser.getDocumentFromNIFString(nifString)
val text = document.getText
val annotations = document.getEntities
val nif = new NIFWrapper(prefix, nifVersion, false)
nif.context(text)
for (annotation <- annotations;
begin = annotation.getBeginIndex;
end = annotation.getEndIndex;
mention = annotation.getMention;
refs = linkToKB(mention, dataset, language, numLinks)
if refs.nonEmpty
) {
for (ref <- refs; uri = ref._1) {
if (types.isEmpty || types.intersect(getDbpediaTypes(uri)).nonEmpty)
nif.entity(Result.apply("", mention, begin, end, Option.apply(uri), None))
}
}
// Convert the model to String.
sender ! EnrichedOutput(nif.getNIF(outputFormat, new NIFParser(nifString)))
stop(self)
}
override val supervisorStrategy =
OneForOneStrategy() {
case e => Restart
}
}
object EntityLinker {
case class SpotLinkEntities(text: String, language: String, outputFormat: String, dataset: String, prefix: String, numLinks: Int, types: Set[String], classify: Boolean, linkingMethod: String, nifVersion: String) extends RestMessage
case class SpotEntities(text: String, language: String, outputFormat: String, prefix: String, classify: Boolean, nifVersion: String) extends RestMessage
case class LinkEntities(text: String, language: String, outputFormat: String, dataset: String, prefix: String, numLinks: Int, types: Set[String], linkingMethod: String, nifVersion: String) extends RestMessage
}
case class Result(entityType: String, mention: String, beginIndex: Int, endIndex: Int, taIdentRef: Option[String], score: Option[Double]) | freme-project/freme-ner | src/main/scala/org/elinker/core/api/process/EntityLinker.scala | Scala | apache-2.0 | 11,565 |
package org.jetbrains.plugins.scala.worksheet.settings.persistent
import com.intellij.openapi.vfs.VirtualFile
import com.intellij.openapi.vfs.newvfs.FileAttribute
import org.jetbrains.plugins.scala.worksheet.settings.WorksheetExternalRunType
import org.jetbrains.plugins.scala.worksheet.utils.FileAttributeUtilCache
private trait SerializableInFileAttribute[T] {
def readAttribute(attr: FileAttribute, file: VirtualFile): Option[T] =
FileAttributeUtilCache.readAttribute(attr, file).map(convertTo)
def writeAttribute(attr: FileAttribute, file: VirtualFile, t: T): Unit =
FileAttributeUtilCache.writeAttribute(attr, file, convertFrom(t))
def convertFrom(t: T): String
def convertTo(s: String): T
}
private object SerializableInFileAttribute {
implicit val StringFileAttribute: SerializableInFileAttribute[String] = new SerializableInFileAttribute[String] {
override def convertFrom(t: String): String = t
override def convertTo(s: String): String = s
}
private val EnabledValue = "enabled"
private val DisabledValue = "disabled"
implicit val BooleanFileAttribute: SerializableInFileAttribute[Boolean] = new SerializableInFileAttribute[Boolean] {
override def convertFrom(t: Boolean): String = if (t) EnabledValue else DisabledValue
override def convertTo(s: String): Boolean = s == EnabledValue
}
implicit val ExternalRunTypeAttribute: SerializableInFileAttribute[WorksheetExternalRunType] = new SerializableInFileAttribute[WorksheetExternalRunType] {
override def convertFrom(t: WorksheetExternalRunType): String = t.getName
override def convertTo(s: String): WorksheetExternalRunType = WorksheetExternalRunType.findRunTypeByName(s).getOrElse(WorksheetExternalRunType.getDefaultRunType)
}
} | JetBrains/intellij-scala | scala/worksheet/src/org/jetbrains/plugins/scala/worksheet/settings/persistent/SerializableInFileAttribute.scala | Scala | apache-2.0 | 1,756 |
package com.karasiq.shadowcloud.crypto
import akka.util.ByteString
import com.karasiq.shadowcloud.model.crypto.{SignMethod, SignParameters}
trait SignModule extends CryptoModule {
def method: SignMethod
def createParameters(): SignParameters
def sign(data: ByteString, parameters: SignParameters): ByteString
def verify(data: ByteString, signature: ByteString, parameters: SignParameters): Boolean
}
trait SignModuleStreamer extends CryptoModuleStreamer {
def module: SignModule
def init(sign: Boolean, parameters: SignParameters): Unit
def update(data: ByteString): Unit
def finishVerify(signature: ByteString): Boolean
def finishSign(): ByteString
override def process(data: ByteString): ByteString = {
update(data)
ByteString.empty
}
override final def finish(): ByteString = finishSign()
}
trait StreamSignModule extends SignModule with StreamCryptoModule {
def createStreamer(): SignModuleStreamer
}
trait OnlyStreamSignModule extends StreamSignModule {
def sign(data: ByteString, parameters: SignParameters): ByteString = {
val streamer = this.createStreamer()
streamer.init(sign = true, parameters)
streamer.process(data) ++ streamer.finishSign()
}
def verify(data: ByteString, signature: ByteString, parameters: SignParameters): Boolean = {
val streamer = this.createStreamer()
streamer.init(sign = false, parameters)
streamer.process(data)
streamer.finishVerify(signature)
}
}
| Karasiq/shadowcloud | crypto/parent/src/main/scala/com/karasiq/shadowcloud/crypto/SignModule.scala | Scala | apache-2.0 | 1,466 |
package com.mesosphere.cosmos.model
import com.mesosphere.universe.{Resource, PackageDetails, Command}
import io.circe.JsonObject
case class DescribeResponse(
`package`: PackageDetails,
marathonMustache: String,
command: Option[Command] = None,
config: Option[JsonObject] = None,
resource: Option[Resource] = None
)
| movicha/cosmos | cosmos-model/src/main/scala/com/mesosphere/cosmos/model/DescribeResponse.scala | Scala | apache-2.0 | 328 |
package monix.nio.tcp
import java.net.{ InetSocketAddress, StandardSocketOptions }
import java.nio.channels.spi.AsynchronousChannelProvider
import java.nio.channels.{ AsynchronousChannelGroup, AsynchronousServerSocketChannel, AsynchronousSocketChannel, CompletionHandler }
import java.util.concurrent.Executors
import monix.execution.{ Callback, Cancelable, Scheduler }
import scala.concurrent.{ Future, Promise }
import scala.util.control.NonFatal
/**
* An asynchronous channel for stream-oriented listening sockets.
*
* On the JVM this is a wrapper around
* [[https://docs.oracle.com/javase/8/docs/api/java/nio/channels/AsynchronousServerSocketChannel.html java.nio.channels.AsynchronousServerSocketChannel]]
* (class available since Java 7 for doing async I/O on sockets).
*
* @example {{{
* val server = AsyncServerSocketChannel()
* server.bind(new InetSocketAddress(InetAddress.getByName(null), 9000))
*
* val bytes = ByteBuffer.wrap("Hello world!".getBytes("UTF-8"))
* val writeF = server
* .accept()
* .flatMap { conn =>
* val writeF0 = conn.write(bytes, None)
* conn.stopWriting()
* writeF0
* }
* .map { sentLen =>
* server.close()
* sentLen
* }
*
* writeF.onComplete {
* case Success(nr) =>
* println(f"Bytes sent: $nr%d")
*
* case Failure(exc) =>
* println(s"ERR: $exc")
* }
* }}}
*
* @define callbackDesc is the callback to be called with the result, once
* this asynchronous operation is complete
* @define acceptDesc Accepts a connection
* @define bindDesc Binds the channel's socket to a local address and configures the socket to listen for connections
* @define localDesc the local address to bind the socket, or null to bind to an automatically assigned socket address
* @define backlogDesc the maximum number of pending connections. If the backlog parameter has the value 0,
* or a negative value, then an implementation specific default is used.
* @define localAddressDesc Asks the socket address that this channel's socket is bound to
*/
abstract class AsyncServerSocketChannel extends AutoCloseable {
/**
* $acceptDesc
*
* @param cb $callbackDesc
*/
def accept(cb: Callback[Throwable, AsyncSocketChannel]): Unit
/** $acceptDesc */
def accept(): Future[AsyncSocketChannel] = {
val p = Promise[AsyncSocketChannel]()
accept(Callback.fromPromise(p))
p.future
}
/**
* $bindDesc
*
* @param local $localDesc
* @param backlog $backlogDesc
*/
def bind(local: InetSocketAddress, backlog: Int = 0): Unit
/** $localAddressDesc */
def localAddress(): Option[InetSocketAddress]
}
object AsyncServerSocketChannel {
/**
* Opens a TCP server-socket channel
*
* @param reuseAddress [[java.net.ServerSocket#setReuseAddress]]
* @param receiveBufferSize [[java.net.Socket#setReceiveBufferSize]] [[java.net.ServerSocket#setReceiveBufferSize]]
*
* @param s is the `Scheduler` used for asynchronous computations
*
* @return an [[monix.nio.tcp.AsyncServerSocketChannel AsyncServerSocketChannel]] instance for handling connections.
*/
def apply(
reuseAddress: Boolean = true,
receiveBufferSize: Option[Int] = None)(implicit s: Scheduler): AsyncServerSocketChannel = {
NewIOImplementation(reuseAddress, receiveBufferSize)
}
private lazy val acg =
AsynchronousChannelGroup.withCachedThreadPool(Executors.newCachedThreadPool(), -1)
private final case class NewIOImplementation(
reuseAddress: Boolean = true,
receiveBufferSize: Option[Int] = None)(implicit scheduler: Scheduler) extends AsyncServerSocketChannel {
private[this] lazy val asynchronousServerSocketChannel: Either[Throwable, AsynchronousServerSocketChannel] = try {
val server = AsynchronousChannelProvider.provider().openAsynchronousServerSocketChannel(acg)
server.setOption[java.lang.Boolean](StandardSocketOptions.SO_REUSEADDR, reuseAddress)
receiveBufferSize.foreach(sz => server.setOption[Integer](StandardSocketOptions.SO_RCVBUF, sz))
Right(server)
} catch {
case NonFatal(exc) =>
scheduler.reportFailure(exc)
Left(exc)
}
private[this] val acceptHandler: CompletionHandler[AsynchronousSocketChannel, Callback[Throwable, AsyncSocketChannel]] = {
new CompletionHandler[AsynchronousSocketChannel, Callback[Throwable, AsyncSocketChannel]] {
override def completed(result: AsynchronousSocketChannel, attachment: Callback[Throwable, AsyncSocketChannel]) =
attachment.onSuccess(new AsyncSocketChannel.NewIOImplementation(result))
override def failed(exc: Throwable, attachment: Callback[Throwable, AsyncSocketChannel]) =
attachment.onError(exc)
}
}
override def accept(cb: Callback[Throwable, AsyncSocketChannel]): Unit = {
asynchronousServerSocketChannel.fold(_ => (), s => try s.accept(cb, acceptHandler) catch {
case NonFatal(exc) =>
cb.onError(exc)
})
}
override def bind(local: InetSocketAddress, backlog: Int = 0): Unit = {
asynchronousServerSocketChannel.fold(_ => (), s => try s.bind(local, backlog) catch {
case NonFatal(ex) =>
scheduler.reportFailure(ex)
})
}
override def localAddress(): Option[InetSocketAddress] = {
asynchronousServerSocketChannel.fold(_ => None, c => try Option(c.getLocalAddress).map(_.asInstanceOf[InetSocketAddress]) catch {
case NonFatal(exc) =>
scheduler.reportFailure(exc)
None
})
}
private[this] val cancelable: Cancelable = Cancelable { () =>
asynchronousServerSocketChannel.fold(_ => (), c => try c.close() catch {
case NonFatal(exc) =>
scheduler.reportFailure(exc)
})
}
override def close(): Unit =
cancelable.cancel()
}
}
| monix/monix-nio | src/main/scala/monix/nio/tcp/AsyncServerSocketChannel.scala | Scala | apache-2.0 | 5,942 |
package ui
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
object StaticContent {
val route =
pathEndOrSingleSlash {
redirect("ui/index.html", StatusCodes.SeeOther)
} ~
pathPrefix("ui") {
pathEnd {
redirect("ui/index.html", StatusCodes.SeeOther)
} ~
pathSingleSlash {
redirect("index.html", StatusCodes.SeeOther)
} ~
getFromDirectory("ui")
}
}
| nettok/datosgob | web/src/main/scala/ui/StaticContent.scala | Scala | gpl-3.0 | 484 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.remote
import akka.actor._
import akka.dispatch.sysmsg._
import akka.event.{ Logging, LoggingAdapter, EventStream }
import akka.event.Logging.Error
import akka.serialization.{ JavaSerializer, Serialization, SerializationExtension }
import akka.pattern.pipe
import scala.util.control.NonFatal
import akka.actor.SystemGuardian.{ TerminationHookDone, TerminationHook, RegisterTerminationHook }
import scala.util.control.Exception.Catcher
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.forkjoin.ThreadLocalRandom
import com.typesafe.config.Config
import akka.ConfigurationException
import akka.dispatch.{ RequiresMessageQueue, UnboundedMessageQueueSemantics }
/**
* INTERNAL API
*/
private[akka] object RemoteActorRefProvider {
private case class Internals(transport: RemoteTransport, serialization: Serialization, remoteDaemon: InternalActorRef)
extends NoSerializationVerificationNeeded
sealed trait TerminatorState
case object Uninitialized extends TerminatorState
case object Idle extends TerminatorState
case object WaitDaemonShutdown extends TerminatorState
case object WaitTransportShutdown extends TerminatorState
case object Finished extends TerminatorState
private class RemotingTerminator(systemGuardian: ActorRef) extends Actor with FSM[TerminatorState, Option[Internals]]
with RequiresMessageQueue[UnboundedMessageQueueSemantics] {
import context.dispatcher
startWith(Uninitialized, None)
when(Uninitialized) {
case Event(i: Internals, _) ⇒
systemGuardian ! RegisterTerminationHook
goto(Idle) using Some(i)
}
when(Idle) {
case Event(TerminationHook, Some(internals)) ⇒
log.info("Shutting down remote daemon.")
internals.remoteDaemon ! TerminationHook
goto(WaitDaemonShutdown)
}
// TODO: state timeout
when(WaitDaemonShutdown) {
case Event(TerminationHookDone, Some(internals)) ⇒
log.info("Remote daemon shut down; proceeding with flushing remote transports.")
internals.transport.shutdown() pipeTo self
goto(WaitTransportShutdown)
}
when(WaitTransportShutdown) {
case Event((), _) ⇒
log.info("Remoting shut down.")
systemGuardian ! TerminationHookDone
stop()
}
}
/*
* Remoting wraps messages destined to a remote host in a remoting specific envelope: EndpointManager.Send
* As these wrapped messages might arrive to the dead letters of an EndpointWriter, they need to be unwrapped
* and handled as dead letters to the original (remote) destination. Without this special case, DeathWatch related
* functionality breaks, like the special handling of Watch messages arriving to dead letters.
*/
private class RemoteDeadLetterActorRef(_provider: ActorRefProvider,
_path: ActorPath,
_eventStream: EventStream) extends DeadLetterActorRef(_provider, _path, _eventStream) {
import EndpointManager.Send
override def !(message: Any)(implicit sender: ActorRef): Unit = message match {
case Send(m, senderOption, _, seqOpt) ⇒
// else ignore: it is a reliably delivered message that might be retried later, and it has not yet deserved
// the dead letter status
if (seqOpt.isEmpty) super.!(m)(senderOption.orNull)
case DeadLetter(Send(m, senderOption, recipient, seqOpt), _, _) ⇒
// else ignore: it is a reliably delivered message that might be retried later, and it has not yet deserved
// the dead letter status
if (seqOpt.isEmpty) super.!(m)(senderOption.orNull)
case _ ⇒ super.!(message)(sender)
}
@throws(classOf[java.io.ObjectStreamException])
override protected def writeReplace(): AnyRef = DeadLetterActorRef.serialized
}
}
/**
* INTERNAL API
* Depending on this class is not supported, only the [[akka.actor.ActorRefProvider]] interface is supported.
*
* Remote ActorRefProvider. Starts up actor on remote node and creates a RemoteActorRef representing it.
*
*/
private[akka] class RemoteActorRefProvider(
val systemName: String,
val settings: ActorSystem.Settings,
val eventStream: EventStream,
val dynamicAccess: DynamicAccess) extends ActorRefProvider {
import RemoteActorRefProvider._
val remoteSettings: RemoteSettings = new RemoteSettings(settings.config)
override val deployer: Deployer = createDeployer
/**
* Factory method to make it possible to override deployer in subclass
* Creates a new instance every time
*/
protected def createDeployer: RemoteDeployer = new RemoteDeployer(settings, dynamicAccess)
private val local = new LocalActorRefProvider(systemName, settings, eventStream, dynamicAccess, deployer,
Some(deadLettersPath ⇒ new RemoteDeadLetterActorRef(this, deadLettersPath, eventStream)))
@volatile
private var _log = local.log
def log: LoggingAdapter = _log
override def rootPath: ActorPath = local.rootPath
override def deadLetters: InternalActorRef = local.deadLetters
// these are only available after init()
override def rootGuardian: InternalActorRef = local.rootGuardian
override def guardian: LocalActorRef = local.guardian
override def systemGuardian: LocalActorRef = local.systemGuardian
override def terminationFuture: Future[Unit] = local.terminationFuture
override def registerTempActor(actorRef: InternalActorRef, path: ActorPath): Unit = local.registerTempActor(actorRef, path)
override def unregisterTempActor(path: ActorPath): Unit = local.unregisterTempActor(path)
override def tempPath(): ActorPath = local.tempPath()
override def tempContainer: VirtualPathContainer = local.tempContainer
@volatile private var _internals: Internals = _
def transport: RemoteTransport = _internals.transport
def serialization: Serialization = _internals.serialization
def remoteDaemon: InternalActorRef = _internals.remoteDaemon
// This actor ensures the ordering of shutdown between remoteDaemon and the transport
@volatile private var remotingTerminator: ActorRef = _
@volatile private var remoteWatcher: ActorRef = _
@volatile private var remoteDeploymentWatcher: ActorRef = _
def init(system: ActorSystemImpl): Unit = {
local.init(system)
remotingTerminator = system.systemActorOf(
remoteSettings.configureDispatcher(Props(classOf[RemotingTerminator], local.systemGuardian)),
"remoting-terminator")
val internals = Internals(
remoteDaemon = {
val d = new RemoteSystemDaemon(
system,
local.rootPath / "remote",
rootGuardian,
remotingTerminator,
log,
untrustedMode = remoteSettings.UntrustedMode)
local.registerExtraNames(Map(("remote", d)))
d
},
serialization = SerializationExtension(system),
transport = new Remoting(system, this))
_internals = internals
remotingTerminator ! internals
_log = Logging(eventStream, "RemoteActorRefProvider")
// this enables reception of remote requests
transport.start()
remoteWatcher = createRemoteWatcher(system)
remoteDeploymentWatcher = createRemoteDeploymentWatcher(system)
}
protected def createRemoteWatcher(system: ActorSystemImpl): ActorRef = {
import remoteSettings._
val failureDetector = createRemoteWatcherFailureDetector(system)
system.systemActorOf(
configureDispatcher(
RemoteWatcher.props(
failureDetector,
heartbeatInterval = WatchHeartBeatInterval,
unreachableReaperInterval = WatchUnreachableReaperInterval,
heartbeatExpectedResponseAfter = WatchHeartbeatExpectedResponseAfter)),
"remote-watcher")
}
protected def createRemoteWatcherFailureDetector(system: ExtendedActorSystem): FailureDetectorRegistry[Address] = {
def createFailureDetector(): FailureDetector =
FailureDetectorLoader.load(remoteSettings.WatchFailureDetectorImplementationClass, remoteSettings.WatchFailureDetectorConfig, system)
new DefaultFailureDetectorRegistry(() ⇒ createFailureDetector())
}
protected def createRemoteDeploymentWatcher(system: ActorSystemImpl): ActorRef =
system.systemActorOf(remoteSettings.configureDispatcher(Props[RemoteDeploymentWatcher]), "remote-deployment-watcher")
def actorOf(system: ActorSystemImpl, props: Props, supervisor: InternalActorRef, path: ActorPath,
systemService: Boolean, deploy: Option[Deploy], lookupDeploy: Boolean, async: Boolean): InternalActorRef =
if (systemService) local.actorOf(system, props, supervisor, path, systemService, deploy, lookupDeploy, async)
else {
if (!system.dispatchers.hasDispatcher(props.dispatcher))
throw new ConfigurationException(s"Dispatcher [${props.dispatcher}] not configured for path $path")
/*
* This needs to deal with “mangled” paths, which are created by remote
* deployment, also in this method. The scheme is the following:
*
* Whenever a remote deployment is found, create a path on that remote
* address below “remote”, including the current system’s identification
* as “sys@host:port” (typically; it will use whatever the remote
* transport uses). This means that on a path up an actor tree each node
* change introduces one layer or “remote/scheme/sys@host:port/” within the URI.
*
* Example:
*
* akka.tcp://sys@home:1234/remote/akka/sys@remote:6667/remote/akka/sys@other:3333/user/a/b/c
*
* means that the logical parent originates from “akka.tcp://sys@other:3333” with
* one child (may be “a” or “b”) being deployed on “akka.tcp://sys@remote:6667” and
* finally either “b” or “c” being created on “akka.tcp://sys@home:1234”, where
* this whole thing actually resides. Thus, the logical path is
* “/user/a/b/c” and the physical path contains all remote placement
* information.
*
* Deployments are always looked up using the logical path, which is the
* purpose of the lookupRemotes internal method.
*/
@scala.annotation.tailrec
def lookupRemotes(p: Iterable[String]): Option[Deploy] = {
p.headOption match {
case None ⇒ None
case Some("remote") ⇒ lookupRemotes(p.drop(3))
case Some("user") ⇒ deployer.lookup(p.drop(1))
case Some(_) ⇒ None
}
}
val elems = path.elements
val lookup =
if (lookupDeploy)
elems.head match {
case "user" ⇒ deployer.lookup(elems.drop(1))
case "remote" ⇒ lookupRemotes(elems)
case _ ⇒ None
}
else None
val deployment = {
deploy.toList ::: lookup.toList match {
case Nil ⇒ Nil
case l ⇒ List(l reduce ((a, b) ⇒ b withFallback a))
}
}
Iterator(props.deploy) ++ deployment.iterator reduce ((a, b) ⇒ b withFallback a) match {
case d @ Deploy(_, _, _, RemoteScope(addr), _, _) ⇒
if (hasAddress(addr)) {
local.actorOf(system, props, supervisor, path, false, deployment.headOption, false, async)
} else if (props.deploy.scope == LocalScope) {
throw new ConfigurationException(s"configuration requested remote deployment for local-only Props at [$path]")
} else try {
try {
// for consistency we check configuration of dispatcher and mailbox locally
val dispatcher = system.dispatchers.lookup(props.dispatcher)
system.mailboxes.getMailboxType(props, dispatcher.configurator.config)
} catch {
case NonFatal(e) ⇒ throw new ConfigurationException(
s"configuration problem while creating [$path] with dispatcher [${props.dispatcher}] and mailbox [${props.mailbox}]", e)
}
val localAddress = transport.localAddressForRemote(addr)
val rpath = (RootActorPath(addr) / "remote" / localAddress.protocol / localAddress.hostPort / path.elements).
withUid(path.uid)
new RemoteActorRef(transport, localAddress, rpath, supervisor, Some(props), Some(d))
} catch {
case NonFatal(e) ⇒ throw new IllegalArgumentException(s"remote deployment failed for [$path]", e)
}
case _ ⇒
local.actorOf(system, props, supervisor, path, systemService, deployment.headOption, false, async)
}
}
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(path: ActorPath): InternalActorRef = {
if (hasAddress(path.address)) actorFor(rootGuardian, path.elements)
else try {
new RemoteActorRef(transport, transport.localAddressForRemote(path.address),
path, Nobody, props = None, deploy = None)
} catch {
case NonFatal(e) ⇒
log.error(e, "Error while looking up address [{}]", path.address)
new EmptyLocalActorRef(this, path, eventStream)
}
}
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(ref: InternalActorRef, path: String): InternalActorRef = path match {
case ActorPathExtractor(address, elems) ⇒
if (hasAddress(address)) actorFor(rootGuardian, elems)
else {
val rootPath = RootActorPath(address) / elems
try {
new RemoteActorRef(transport, transport.localAddressForRemote(address),
rootPath, Nobody, props = None, deploy = None)
} catch {
case NonFatal(e) ⇒
log.error(e, "Error while looking up address [{}]", rootPath.address)
new EmptyLocalActorRef(this, rootPath, eventStream)
}
}
case _ ⇒ local.actorFor(ref, path)
}
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(ref: InternalActorRef, path: Iterable[String]): InternalActorRef =
local.actorFor(ref, path)
def rootGuardianAt(address: Address): ActorRef =
if (hasAddress(address)) rootGuardian
else new RemoteActorRef(transport, transport.localAddressForRemote(address),
RootActorPath(address), Nobody, props = None, deploy = None)
/**
* INTERNAL API
* Called in deserialization of incoming remote messages where the correct local address is known.
*/
private[akka] def resolveActorRefWithLocalAddress(path: String, localAddress: Address): InternalActorRef = {
path match {
case ActorPathExtractor(address, elems) ⇒
if (hasAddress(address)) local.resolveActorRef(rootGuardian, elems)
else
new RemoteActorRef(transport, localAddress, RootActorPath(address) / elems,
Nobody, props = None, deploy = None)
case _ ⇒
log.debug("resolve of unknown path [{}] failed", path)
deadLetters
}
}
def resolveActorRef(path: String): ActorRef = path match {
case ActorPathExtractor(address, elems) ⇒
if (hasAddress(address)) local.resolveActorRef(rootGuardian, elems)
else {
val rootPath = RootActorPath(address) / elems
try {
new RemoteActorRef(transport, transport.localAddressForRemote(address),
rootPath, Nobody, props = None, deploy = None)
} catch {
case NonFatal(e) ⇒
log.error(e, "Error while resolving address [{}]", rootPath.address)
new EmptyLocalActorRef(this, rootPath, eventStream)
}
}
case _ ⇒
log.debug("resolve of unknown path [{}] failed", path)
deadLetters
}
def resolveActorRef(path: ActorPath): ActorRef = {
if (hasAddress(path.address)) local.resolveActorRef(rootGuardian, path.elements)
else try {
new RemoteActorRef(transport, transport.localAddressForRemote(path.address),
path, Nobody, props = None, deploy = None)
} catch {
case NonFatal(e) ⇒
log.error(e, "Error while resolving address [{}]", path.address)
new EmptyLocalActorRef(this, path, eventStream)
}
}
/**
* Using (checking out) actor on a specific node.
*/
def useActorOnNode(ref: ActorRef, props: Props, deploy: Deploy, supervisor: ActorRef): Unit = {
log.debug("[{}] Instantiating Remote Actor [{}]", rootPath, ref.path)
// we don’t wait for the ACK, because the remote end will process this command before any other message to the new actor
// actorSelection can't be used here because then it is not guaranteed that the actor is created
// before someone can send messages to it
resolveActorRef(RootActorPath(ref.path.address) / "remote") !
DaemonMsgCreate(props, deploy, ref.path.toSerializationFormat, supervisor)
remoteDeploymentWatcher ! RemoteDeploymentWatcher.WatchRemote(ref, supervisor)
}
def getExternalAddressFor(addr: Address): Option[Address] = {
addr match {
case _ if hasAddress(addr) ⇒ Some(local.rootPath.address)
case Address(_, _, Some(_), Some(_)) ⇒ try Some(transport.localAddressForRemote(addr)) catch { case NonFatal(_) ⇒ None }
case _ ⇒ None
}
}
def getDefaultAddress: Address = transport.defaultAddress
private def hasAddress(address: Address): Boolean =
address == local.rootPath.address || address == rootPath.address || transport.addresses(address)
/**
* Marks a remote system as out of sync and prevents reconnects until the quarantine timeout elapses.
* @param address Address of the remote system to be quarantined
* @param uid UID of the remote system, if the uid is not defined it will not be a strong quarantine but
* the current endpoint writer will be stopped (dropping system messages) and the address will be gated
*/
def quarantine(address: Address, uid: Option[Int]): Unit = transport.quarantine(address, uid)
/**
* INTERNAL API
*/
private[akka] def afterSendSystemMessage(message: SystemMessage): Unit =
message match {
// Sending to local remoteWatcher relies strong delivery guarantees of local send, i.e.
// default dispatcher must not be changed to an implementation that defeats that
case rew: RemoteWatcher.Rewatch ⇒
remoteWatcher ! RemoteWatcher.RewatchRemote(rew.watchee, rew.watcher)
case Watch(watchee, watcher) ⇒ remoteWatcher ! RemoteWatcher.WatchRemote(watchee, watcher)
case Unwatch(watchee, watcher) ⇒ remoteWatcher ! RemoteWatcher.UnwatchRemote(watchee, watcher)
case _ ⇒
}
}
private[akka] trait RemoteRef extends ActorRefScope {
final def isLocal = false
}
/**
* INTERNAL API
* Remote ActorRef that is used when referencing the Actor on a different node than its "home" node.
* This reference is network-aware (remembers its origin) and immutable.
*/
private[akka] class RemoteActorRef private[akka] (
remote: RemoteTransport,
val localAddressToUse: Address,
val path: ActorPath,
val getParent: InternalActorRef,
props: Option[Props],
deploy: Option[Deploy])
extends InternalActorRef with RemoteRef {
def getChild(name: Iterator[String]): InternalActorRef = {
val s = name.toStream
s.headOption match {
case None ⇒ this
case Some("..") ⇒ getParent getChild name
case _ ⇒ new RemoteActorRef(remote, localAddressToUse, path / s, Nobody, props = None, deploy = None)
}
}
@deprecated("Use context.watch(actor) and receive Terminated(actor)", "2.2") override def isTerminated: Boolean = false
private def handleException: Catcher[Unit] = {
case e: InterruptedException ⇒
remote.system.eventStream.publish(Error(e, path.toString, getClass, "interrupted during message send"))
Thread.currentThread.interrupt()
case NonFatal(e) ⇒
remote.system.eventStream.publish(Error(e, path.toString, getClass, "swallowing exception during message send"))
}
def sendSystemMessage(message: SystemMessage): Unit =
try {
remote.send(message, None, this)
provider.afterSendSystemMessage(message)
} catch handleException
override def !(message: Any)(implicit sender: ActorRef = Actor.noSender): Unit = {
if (message == null) throw new InvalidMessageException("Message is null")
try remote.send(message, Option(sender), this) catch handleException
}
override def provider: RemoteActorRefProvider = remote.provider
def start(): Unit =
if (props.isDefined && deploy.isDefined) remote.provider.useActorOnNode(this, props.get, deploy.get, getParent)
def suspend(): Unit = sendSystemMessage(Suspend())
def resume(causedByFailure: Throwable): Unit = sendSystemMessage(Resume(causedByFailure))
def stop(): Unit = sendSystemMessage(Terminate())
def restart(cause: Throwable): Unit = sendSystemMessage(Recreate(cause))
@throws(classOf[java.io.ObjectStreamException])
private def writeReplace(): AnyRef = SerializedActorRef(this)
}
| Fincore/org.spark-project.akka | remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala | Scala | mit | 21,126 |
package filodb.core.metadata
import com.typesafe.config.{Config, ConfigFactory}
import filodb.core._
import filodb.core.query.ColumnInfo
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
class SchemasSpec extends AnyFunSpec with Matchers {
import Column.ColumnType._
import Dataset._
import NamesTestData._
describe("DataSchema") {
it("should return NotNameColonType if column specifiers not name:type format") {
val resp1 = DataSchema.make("dataset", dataColSpecs :+ "column2", Nil, None, "first")
resp1.isBad shouldEqual true
resp1.swap.get shouldEqual ColumnErrors(Seq(NotNameColonType("column2")))
}
it("should return BadColumnParams if name:type:params portion not valid key=value pairs") {
val resp1 = DataSchema.make("dataset", dataColSpecs :+ "column2:a:b", Nil, None, "first")
resp1.isBad shouldEqual true
resp1.swap.get shouldBe a[ColumnErrors]
val errors = resp1.swap.get.asInstanceOf[ColumnErrors].errs
errors should have length 1
errors.head shouldBe a[BadColumnParams]
}
it("should return BadColumnParams if required param config not specified") {
val resp1 = DataSchema.make("dataset", dataColSpecs :+ "h:hist:foo=bar", Nil, None, "first")
resp1.isBad shouldEqual true
resp1.swap.get shouldBe a[ColumnErrors]
val errors = resp1.swap.get.asInstanceOf[ColumnErrors].errs
errors should have length 1
errors.head shouldBe a[BadColumnParams]
val resp2 = DataSchema.make("dataset", dataColSpecs :+ "h:hist:counter=bar", Nil, None, "first")
resp2.isBad shouldEqual true
resp2.swap.get shouldBe a[ColumnErrors]
val errors2 = resp2.swap.get.asInstanceOf[ColumnErrors].errs
errors2 should have length 1
errors2.head shouldBe a[BadColumnParams]
}
it("should return BadColumnName if illegal chars in column name") {
val resp1 = DataSchema.make("dataset", Seq("col, umn1:string"), Nil, None, "first")
resp1.isBad shouldEqual true
val errors = resp1.swap.get match {
case ColumnErrors(errs) => errs
case x => throw new RuntimeException(s"Did not expect $x")
}
errors should have length (1)
errors.head shouldBe a[BadColumnName]
}
it("should return BadColumnType if unsupported type specified in column spec") {
val resp1 = DataSchema.make("dataset", dataColSpecs :+ "part:linkedlist", Nil, None, "first")
resp1.isBad shouldEqual true
val errors = resp1.swap.get match {
case ColumnErrors(errs) => errs
case x => throw new RuntimeException(s"Did not expect $x")
}
errors should have length (1)
errors.head shouldEqual BadColumnType("linkedlist")
}
it("should return BadColumnName if value column not one of other columns") {
val conf2 = ConfigFactory.parseString("""
{
columns = ["first:string", "last:string", "age:long"]
value-column = "first2"
downsamplers = []
}""")
val resp = DataSchema.fromConfig("dataset", conf2)
resp.isBad shouldEqual true
resp.swap.get shouldBe a[BadColumnName]
}
it("should return multiple column spec errors") {
val resp1 = DataSchema.make("dataset", Seq("first:str", "age:long", "la(st):int"), Nil, None, "first")
resp1.isBad shouldEqual true
val errors = resp1.swap.get match {
case ColumnErrors(errs) => errs
case x => throw new RuntimeException(s"Did not expect $x")
}
errors should have length (2)
errors.head shouldEqual BadColumnType("str")
}
it("should return BadDownsampler if no downsample-schema given when downsamplers present") {
val conf2 = ConfigFactory.parseString("""
{
columns = ["timestamp:ts", "value:double:detectDrops=true"]
value-column = "value"
downsamplers = [ "tTime(0)", "dMin(1)", "dMax(1)", "dSum(1)", "dCount(1)", "dAvg(1)" ]
}""")
val resp = DataSchema.fromConfig("dataset", conf2)
resp.isBad shouldEqual true
resp.swap.get shouldBe a[BadDownsampler]
}
it("should return NoTimestampRowKey if non timestamp used for row key / first column") {
val ds1 = DataSchema.make("dataset", Seq("first:string", "age:long"), Nil, None, "first")
ds1.isBad shouldEqual true
ds1.swap.get shouldBe a[NoTimestampRowKey]
}
it("should return a valid Dataset when a good specification passed") {
val conf2 = ConfigFactory.parseString("""
{
columns = ["timestamp:ts", "code:long", "event:string"]
value-column = "event"
downsamplers = []
}""")
val schema = DataSchema.fromConfig("dataset", conf2).get
schema.columns should have length (3)
schema.columns.map(_.id) shouldEqual Seq(0, 1, 2)
schema.columns.map(_.columnType) shouldEqual Seq(TimestampColumn, LongColumn, StringColumn)
schema.timestampColumn.name shouldEqual "timestamp"
}
}
val partSchemaStr = """{
columns = ["tags:map"]
predefined-keys = ["_ns", "app", "__name__", "instance", "dc"]
options {
copyTags = {}
ignoreShardKeyColumnSuffixes = {}
ignoreTagsOnPartitionKeyHash = ["le"]
metricColumn = "__name__"
shardKeyColumns = ["__name__", "_ns"]
}
}"""
describe("PartitionSchema") {
it("should allow MapColumns only in last position of partition key") {
val mapCol = "tags:map"
// OK: only partition column is map
val ds1 = PartitionSchema.make(Seq(mapCol), DatasetOptions.DefaultOptions).get
ds1.columns.map(_.name) should equal (Seq("tags"))
// OK: last partition column is map
val ds2 = PartitionSchema.make(Seq("first:string", mapCol), DatasetOptions.DefaultOptions).get
ds2.columns.map(_.name) should equal (Seq("first", "tags"))
// Not OK: first partition column is map
val resp3 = PartitionSchema.make(Seq(mapCol, "first:string"), DatasetOptions.DefaultOptions)
resp3.isBad shouldEqual true
resp3.swap.get shouldBe an[IllegalMapColumn]
}
it("should return BadColumnType if unsupported type specified in column spec") {
val resp1 = PartitionSchema.make(Seq("first:strolo"), DatasetOptions.DefaultOptions)
resp1.isBad shouldEqual true
val errors = resp1.swap.get match {
case ColumnErrors(errs) => errs
case x => throw new RuntimeException(s"Did not expect $x")
}
errors should have length (1)
errors.head shouldEqual BadColumnType("strolo")
}
it("should parse config with options") {
val conf2 = ConfigFactory.parseString(partSchemaStr)
val schema = PartitionSchema.fromConfig(conf2).get
schema.columns.map(_.columnType) shouldEqual Seq(MapColumn)
schema.predefinedKeys shouldEqual Seq("_ns", "app", "__name__", "instance", "dc")
}
}
describe("Schema") {
it("should return IDs for column names or seq of missing names") {
val sch = largeDataset.schema
sch.colIDs("first", "age").get shouldEqual Seq(1, 0)
sch.colIDs("league").get shouldEqual Seq(Dataset.PartColStartIndex)
val resp1 = sch.colIDs("last", "unknown")
resp1.isBad shouldEqual true
resp1.swap.get shouldEqual Seq("unknown")
}
it("should return ColumnInfos for colIDs") {
val sch = largeDataset.schema
val infos = sch.infosFromIDs(Seq(1, 0))
infos shouldEqual Seq(ColumnInfo("first", StringColumn), ColumnInfo("age", LongColumn))
val infos2 = sch.infosFromIDs(Seq(PartColStartIndex, 2))
infos2 shouldEqual Seq(ColumnInfo("league", StringColumn), ColumnInfo("last", StringColumn))
}
}
describe("Schemas") {
it("should return all errors from every data schema") {
val conf2 = ConfigFactory.parseString(s"""
{
partition-schema $partSchemaStr
schemas {
prom1 {
columns = ["timestamp:tsa", "code:long", "event:string"]
value-column = "event"
downsamplers = []
}
prom2 {
columns = ["timestamp:ts", "code:long", "ev. ent:string"]
value-column = "foo"
downsamplers = []
}
prom3 {
columns = ["timestamp:ts", "code:long", "event:string"]
value-column = "event"
downsamplers = []
}
}
}""")
val resp = Schemas.fromConfig(conf2)
resp.isBad shouldEqual true
val errors = resp.swap.get
errors should have length (2)
errors.map(_._1).toSet shouldEqual Set("prom1", "prom2")
errors.map(_._2.getClass).toSet shouldEqual Set(classOf[ColumnErrors])
}
it("should detect and report hash conflicts") {
val conf2 = ConfigFactory.parseString(s"""
{
partition-schema $partSchemaStr
schemas {
prom {
columns = ["timestamp:ts", "value:double"]
value-column = "value"
downsamplers = []
}
prom2 {
columns = ["timestamp:ts", "value:double"]
value-column = "timestamp"
downsamplers = []
}
}
}""")
val resp = Schemas.fromConfig(conf2)
resp.isBad shouldEqual true
val errors = resp.swap.get
errors.map(_._2.getClass) shouldEqual Seq(classOf[HashConflict])
}
it("should detect and report invalid downsample-schema references") {
val conf2 = ConfigFactory.parseString(s"""
{
partition-schema $partSchemaStr
schemas {
prom {
columns = ["timestamp:ts", "value:double"]
value-column = "value"
downsamplers = ["tTime(0)", "dMin(1)"]
downsample-schema = "foo"
}
prom-ds-gauge {
columns = ["timestamp:ts", "min:double"]
value-column = "timestamp"
downsamplers = []
}
}
}""")
val resp = Schemas.fromConfig(conf2)
resp.isBad shouldEqual true
val errors = resp.swap.get
errors.map(_._2.getClass) shouldEqual Seq(classOf[BadDownsampler])
errors.map(_._1) shouldEqual Seq("prom")
}
def schemasFromString(partConf: String): Config = ConfigFactory.parseString(s"""
{
partition-schema $partConf
schemas {
prom {
columns = ["timestamp:ts", "value:double"]
value-column = "value"
downsamplers = ["tTime(0)", "dMin(1)"]
downsample-schema = "prom-ds-gauge"
}
prom-ds-gauge {
columns = ["timestamp:ts", "min:double"]
value-column = "timestamp"
downsamplers = []
}
hist {
columns = ["timestamp:ts", "count:long", "sum:long", "h:hist:counter=true"]
value-column = "h"
downsamplers = []
}
}
}""")
it("should return Schemas instance with every schema parsed") {
val conf2 = schemasFromString(partSchemaStr)
val schemas = Schemas.fromConfig(conf2).get
schemas.part.columns.map(_.columnType) shouldEqual Seq(MapColumn)
schemas.part.columns.map(_.id) shouldEqual Seq(PartColStartIndex)
schemas.part.predefinedKeys shouldEqual Seq("_ns", "app", "__name__", "instance", "dc")
Dataset.isPartitionID(schemas.part.columns.head.id) shouldEqual true
schemas.schemas.keySet shouldEqual Set("prom", "hist", "prom-ds-gauge")
schemas.schemas("prom").data.columns.map(_.columnType) shouldEqual Seq(TimestampColumn, DoubleColumn)
schemas.schemas("prom").data.columns.map(_.id) shouldEqual Seq(0, 1)
schemas.schemas("prom").data.timestampColumn.name shouldEqual "timestamp"
schemas.schemas("hist").data.columns.map(_.columnType) shouldEqual
Seq(TimestampColumn, LongColumn, LongColumn, HistogramColumn)
schemas.schemas("prom").downsample.get shouldEqual schemas.schemas("prom-ds-gauge")
}
val partSchemaStr2 = """{
columns = ["_metric_:string", "tags:map"]
predefined-keys = ["_ns", "app", "__name__", "instance", "dc"]
options {
copyTags = {}
ignoreShardKeyColumnSuffixes = {}
ignoreTagsOnPartitionKeyHash = ["le"]
metricColumn = "_metric_"
shardKeyColumns = ["_metric_", "_ns"]
}
}"""
it("should return unique schema hashes when partition keys different") {
val conf1 = schemasFromString(partSchemaStr)
val conf2 = schemasFromString(partSchemaStr2)
val schemas1 = Schemas.fromConfig(conf1).get
val schemas2 = Schemas.fromConfig(conf2).get
schemas1.schemas("prom").schemaHash should not equal (schemas2.schemas("prom").schemaHash)
schemas1.schemas("hist").schemaHash should not equal (schemas2.schemas("hist").schemaHash)
}
it("should allow column type params to differentiate hash") {
val conf3 = ConfigFactory.parseString(s"""
{
partition-schema $partSchemaStr
schemas {
prom {
columns = ["timestamp:ts", "value:double"]
value-column = "value"
downsamplers = ["tTime(0)", "dMin(1)"]
downsample-schema = "prom2"
}
# Everything exactly the same except for column params, which are different
prom2 {
columns = ["timestamp:ts", "value:double:detectDrops=true"]
value-column = "timestamp"
downsamplers = []
}
}
}""")
val schemas = Schemas.fromConfig(conf3).get
schemas.schemas.keySet shouldEqual Set("prom", "prom2")
schemas.schemas("prom").data.columns.map(_.columnType) shouldEqual Seq(TimestampColumn, DoubleColumn)
schemas.schemas("prom").data.columns.map(_.id) shouldEqual Seq(0, 1)
schemas.schemas("prom").data.timestampColumn.name shouldEqual "timestamp"
schemas.schemas("prom").downsample.get shouldEqual schemas.schemas("prom2")
}
}
}
| tuplejump/FiloDB | core/src/test/scala/filodb.core/metadata/SchemasSpec.scala | Scala | apache-2.0 | 15,812 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.dataSet
import org.apache.calcite.plan.volcano.RelSubset
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rel.core._
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.dataset.DataSetSingleRowJoin
import org.apache.flink.table.plan.nodes.logical.FlinkLogicalJoin
class DataSetSingleRowJoinRule
extends ConverterRule(
classOf[FlinkLogicalJoin],
FlinkConventions.LOGICAL,
FlinkConventions.DATASET,
"DataSetSingleRowJoinRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val join = call.rel(0).asInstanceOf[FlinkLogicalJoin]
join.getJoinType match {
case JoinRelType.INNER if isSingleRow(join.getLeft) || isSingleRow(join.getRight) => true
case JoinRelType.LEFT if isSingleRow(join.getRight) => true
case JoinRelType.RIGHT if isSingleRow(join.getLeft) => true
case _ => false
}
}
private def isInnerJoin(join: FlinkLogicalJoin) = {
join.getJoinType == JoinRelType.INNER
}
/**
* Recursively checks if a [[RelNode]] returns at most a single row.
* Input must be a global aggregation possibly followed by projections or filters.
*/
private def isSingleRow(node: RelNode): Boolean = {
node match {
case ss: RelSubset => isSingleRow(ss.getOriginal)
case lp: Project => isSingleRow(lp.getInput)
case lf: Filter => isSingleRow(lf.getInput)
case lc: Calc => isSingleRow(lc.getInput)
case la: Aggregate => la.getGroupSet.isEmpty
case _ => false
}
}
override def convert(rel: RelNode): RelNode = {
val join = rel.asInstanceOf[FlinkLogicalJoin]
val traitSet = rel.getTraitSet.replace(FlinkConventions.DATASET)
val dataSetLeftNode = RelOptRule.convert(join.getLeft, FlinkConventions.DATASET)
val dataSetRightNode = RelOptRule.convert(join.getRight, FlinkConventions.DATASET)
val leftIsSingle = isSingleRow(join.getLeft)
new DataSetSingleRowJoin(
rel.getCluster,
traitSet,
dataSetLeftNode,
dataSetRightNode,
leftIsSingle,
rel.getRowType,
join.getCondition,
join.getRowType,
join.getJoinType,
"DataSetSingleRowJoinRule")
}
}
object DataSetSingleRowJoinRule {
val INSTANCE: RelOptRule = new DataSetSingleRowJoinRule
}
| GJL/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/rules/dataSet/DataSetSingleRowJoinRule.scala | Scala | apache-2.0 | 3,265 |
package serverless
import com.amazonaws.services.lambda.runtime.events.{APIGatewayV2ProxyRequestEvent, APIGatewayV2ProxyResponseEvent}
import com.amazonaws.services.lambda.runtime.{Context, RequestHandler}
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
object JsonUtil {
val mapper = new ObjectMapper() with ScalaObjectMapper
mapper.registerModule(DefaultScalaModule)
def toJson(value: Any): String = {
mapper.writeValueAsString(value)
}
}
class Handler extends RequestHandler[APIGatewayV2ProxyRequestEvent, APIGatewayV2ProxyResponseEvent] {
def handleRequest(input: APIGatewayV2ProxyRequestEvent, context: Context): APIGatewayV2ProxyResponseEvent = {
val response = new APIGatewayV2ProxyResponseEvent()
response.setStatusCode(200)
response.setBody(JsonUtil.toJson(Map("message" -> "Go Serverless v1.0! Your function executed successfully!")))
response
}
}
| dherault/serverless-offline | tests/integration/scala/src/main/scala/serverless/Handler.scala | Scala | mit | 1,032 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cba.omnia.edge
package source.memory
import cascading.tap.Tap
import cascading.tuple.Tuple
import cascading.tuple.Fields
import cascading.scheme.NullScheme
import com.twitter.scalding._
import java.io.{InputStream,OutputStream}
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapred.OutputCollector
import org.apache.hadoop.mapred.RecordReader
/**
* This is an implementation of an iterable backed scalding source.
*
* This works in much the same way as does the standard IterableSource
* but we can distribute the contents across a fixed number of mappers.
* This is useful for explosive map jobs that use small inputs to
* produce largs outputs.
*/
case class DistributableIterableSource[T](@transient iter: Iterable[T], mappers: Int, inFields: Fields = Fields.NONE)(
implicit set: TupleSetter[T], conv: TupleConverter[T]) extends Source with Mappable[T] {
override def converter[U >: T] =
TupleConverter.asSuperConverter[T, U](conv)
def fields =
if (inFields.isNone && set.arity > 0)
Dsl.intFields(0 until set.arity)
else
inFields
def data =
iter.map(set(_))
def buffer =
data.toBuffer
def tap: Tap[_,_,_] =
new DistributableMemorySourceTap(buffer, fields, mappers)
def memory: Tap[_,_,_] =
new MemoryTap[InputStream, OutputStream](new NullScheme(fields, fields), buffer)
override def createTap(readOrWrite: AccessMode)(implicit mode: Mode): Tap[_,_,_] =
if (readOrWrite == Write)
sys.error("Error using read only source.")
else
mode match {
case Hdfs(_, _) => tap
case HadoopTest(_,_) => tap
case Local(_) => memory
case Test(_) => memory
case _ => sys.error(s"Unknown mode <$mode>")
}
}
| CommBank/edge | src/main/scala/com/cba/omnia/edge/source/memory/DistributableIterableSource.scala | Scala | apache-2.0 | 2,390 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import java.io._
import java.nio._
import java.nio.channels._
import java.util.Random
import java.util.Properties
import charset.Charset
import org.apache.kafka.common.protocol.SecurityProtocol
import org.apache.kafka.common.utils.Utils._
import collection.mutable.ListBuffer
import org.I0Itec.zkclient.ZkClient
import kafka.server._
import kafka.producer._
import kafka.message._
import kafka.api._
import kafka.cluster.Broker
import kafka.consumer.{ConsumerTimeoutException, KafkaStream, ConsumerConfig}
import kafka.serializer.{StringEncoder, DefaultEncoder, Encoder}
import kafka.common.TopicAndPartition
import kafka.admin.AdminUtils
import kafka.producer.ProducerConfig
import kafka.log._
import junit.framework.AssertionFailedError
import junit.framework.Assert._
import org.apache.kafka.clients.producer.KafkaProducer
import scala.collection.Map
import org.apache.kafka.clients.consumer.KafkaConsumer
/**
* Utility functions to help with testing
*/
object TestUtils extends Logging {
val IoTmpDir = System.getProperty("java.io.tmpdir")
val Letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
val Digits = "0123456789"
val LettersAndDigits = Letters + Digits
/* A consistent random number generator to make tests repeatable */
val seededRandom = new Random(192348092834L)
val random = new Random()
/* 0 gives a random port; you can then retrieve the assigned port from the Socket object. */
val RandomPort = 0
/** Port to use for unit tests that mock/don't require a real ZK server. */
val MockZkPort = 1
/** Zookeeper connection string to use for unit tests that mock/don't require a real ZK server. */
val MockZkConnect = "127.0.0.1:" + MockZkPort
/**
* Create a temporary directory
*/
def tempDir(): File = {
val f = new File(IoTmpDir, "kafka-" + random.nextInt(1000000))
f.mkdirs()
f.deleteOnExit()
Runtime.getRuntime().addShutdownHook(new Thread() {
override def run() = {
CoreUtils.rm(f)
}
})
f
}
def tempTopic(): String = "testTopic" + random.nextInt(1000000)
/**
* Create a temporary relative directory
*/
def tempRelativeDir(parent: String): File = {
val f = new File(parent, "kafka-" + random.nextInt(1000000))
f.mkdirs()
f.deleteOnExit()
f
}
/**
* Create a temporary file
*/
def tempFile(): File = {
val f = File.createTempFile("kafka", ".tmp")
f.deleteOnExit()
f
}
/**
* Create a temporary file and return an open file channel for this file
*/
def tempChannel(): FileChannel = new RandomAccessFile(tempFile(), "rw").getChannel()
/**
* Create a kafka server instance with appropriate test settings
* USING THIS IS A SIGN YOU ARE NOT WRITING A REAL UNIT TEST
* @param config The configuration of the server
*/
def createServer(config: KafkaConfig, time: Time = SystemTime): KafkaServer = {
val server = new KafkaServer(config, time)
server.startup()
server
}
/**
* Create a test config for the given node id
*/
def createBrokerConfigs(numConfigs: Int,
zkConnect: String,
enableControlledShutdown: Boolean = true,
enableDeleteTopic: Boolean = false): Seq[Properties] = {
(0 until numConfigs).map(node => createBrokerConfig(node, zkConnect, enableControlledShutdown, enableDeleteTopic))
}
def getBrokerListStrFromServers(servers: Seq[KafkaServer]): String = {
servers.map(s => formatAddress(s.config.hostName, s.boundPort())).mkString(",")
}
/**
* Create a test config for the given node id
*/
def createBrokerConfig(nodeId: Int, zkConnect: String,
enableControlledShutdown: Boolean = true,
enableDeleteTopic: Boolean = false,
port: Int = RandomPort): Properties = {
val props = new Properties
if (nodeId >= 0) props.put("broker.id", nodeId.toString)
props.put("listeners", "PLAINTEXT://localhost:"+port.toString)
props.put("log.dir", TestUtils.tempDir().getAbsolutePath)
props.put("zookeeper.connect", zkConnect)
props.put("replica.socket.timeout.ms", "1500")
props.put("controller.socket.timeout.ms", "1500")
props.put("controlled.shutdown.enable", enableControlledShutdown.toString)
props.put("delete.topic.enable", enableDeleteTopic.toString)
props.put("controlled.shutdown.retry.backoff.ms", "100")
props
}
/**
* Create a topic in zookeeper.
* Wait until the leader is elected and the metadata is propagated to all brokers.
* Return the leader for each partition.
*/
def createTopic(zkClient: ZkClient,
topic: String,
numPartitions: Int = 1,
replicationFactor: Int = 1,
servers: Seq[KafkaServer],
topicConfig: Properties = new Properties) : scala.collection.immutable.Map[Int, Option[Int]] = {
// create topic
AdminUtils.createTopic(zkClient, topic, numPartitions, replicationFactor, topicConfig)
// wait until the update metadata request for new topic reaches all servers
(0 until numPartitions).map { case i =>
TestUtils.waitUntilMetadataIsPropagated(servers, topic, i)
i -> TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, i)
}.toMap
}
/**
* Create a topic in zookeeper using a customized replica assignment.
* Wait until the leader is elected and the metadata is propagated to all brokers.
* Return the leader for each partition.
*/
def createTopic(zkClient: ZkClient, topic: String, partitionReplicaAssignment: collection.Map[Int, Seq[Int]],
servers: Seq[KafkaServer]) : scala.collection.immutable.Map[Int, Option[Int]] = {
// create topic
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, partitionReplicaAssignment)
// wait until the update metadata request for new topic reaches all servers
partitionReplicaAssignment.keySet.map { case i =>
TestUtils.waitUntilMetadataIsPropagated(servers, topic, i)
i -> TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, i)
}.toMap
}
/**
* Create a test config for a consumer
*/
def createConsumerProperties(zkConnect: String, groupId: String, consumerId: String,
consumerTimeout: Long = -1): Properties = {
val props = new Properties
props.put("zookeeper.connect", zkConnect)
props.put("group.id", groupId)
props.put("consumer.id", consumerId)
props.put("consumer.timeout.ms", consumerTimeout.toString)
props.put("zookeeper.session.timeout.ms", "6000")
props.put("zookeeper.sync.time.ms", "200")
props.put("auto.commit.interval.ms", "1000")
props.put("rebalance.max.retries", "4")
props.put("auto.offset.reset", "smallest")
props.put("num.consumer.fetchers", "2")
props
}
/**
* Wrap the message in a message set
* @param payload The bytes of the message
*/
def singleMessageSet(payload: Array[Byte], codec: CompressionCodec = NoCompressionCodec, key: Array[Byte] = null) =
new ByteBufferMessageSet(compressionCodec = codec, messages = new Message(payload, key))
/**
* Generate an array of random bytes
* @param numBytes The size of the array
*/
def randomBytes(numBytes: Int): Array[Byte] = {
val bytes = new Array[Byte](numBytes)
seededRandom.nextBytes(bytes)
bytes
}
/**
* Generate a random string of letters and digits of the given length
* @param len The length of the string
* @return The random string
*/
def randomString(len: Int): String = {
val b = new StringBuilder()
for(i <- 0 until len)
b.append(LettersAndDigits.charAt(seededRandom.nextInt(LettersAndDigits.length)))
b.toString
}
/**
* Check that the buffer content from buffer.position() to buffer.limit() is equal
*/
def checkEquals(b1: ByteBuffer, b2: ByteBuffer) {
assertEquals("Buffers should have equal length", b1.limit - b1.position, b2.limit - b2.position)
for(i <- 0 until b1.limit - b1.position)
assertEquals("byte " + i + " byte not equal.", b1.get(b1.position + i), b2.get(b1.position + i))
}
/**
* Throw an exception if the two iterators are of differing lengths or contain
* different messages on their Nth element
*/
def checkEquals[T](expected: Iterator[T], actual: Iterator[T]) {
var length = 0
while(expected.hasNext && actual.hasNext) {
length += 1
assertEquals(expected.next, actual.next)
}
// check if the expected iterator is longer
if (expected.hasNext) {
var length1 = length;
while (expected.hasNext) {
expected.next
length1 += 1
}
assertFalse("Iterators have uneven length-- first has more: "+length1 + " > " + length, true);
}
// check if the actual iterator was longer
if (actual.hasNext) {
var length2 = length;
while (actual.hasNext) {
actual.next
length2 += 1
}
assertFalse("Iterators have uneven length-- second has more: "+length2 + " > " + length, true);
}
}
/**
* Throw an exception if an iterable has different length than expected
*
*/
def checkLength[T](s1: Iterator[T], expectedLength:Int) {
var n = 0
while (s1.hasNext) {
n+=1
s1.next
}
assertEquals(expectedLength, n)
}
/**
* Throw an exception if the two iterators are of differing lengths or contain
* different messages on their Nth element
*/
def checkEquals[T](s1: java.util.Iterator[T], s2: java.util.Iterator[T]) {
while(s1.hasNext && s2.hasNext)
assertEquals(s1.next, s2.next)
assertFalse("Iterators have uneven length--first has more", s1.hasNext)
assertFalse("Iterators have uneven length--second has more", s2.hasNext)
}
def stackedIterator[T](s: Iterator[T]*): Iterator[T] = {
new Iterator[T] {
var cur: Iterator[T] = null
val topIterator = s.iterator
def hasNext() : Boolean = {
while (true) {
if (cur == null) {
if (topIterator.hasNext)
cur = topIterator.next
else
return false
}
if (cur.hasNext)
return true
cur = null
}
// should never reach her
throw new RuntimeException("should not reach here")
}
def next() : T = cur.next
}
}
/**
* Create a hexidecimal string for the given bytes
*/
def hexString(bytes: Array[Byte]): String = hexString(ByteBuffer.wrap(bytes))
/**
* Create a hexidecimal string for the given bytes
*/
def hexString(buffer: ByteBuffer): String = {
val builder = new StringBuilder("0x")
for(i <- 0 until buffer.limit)
builder.append(String.format("%x", Integer.valueOf(buffer.get(buffer.position + i))))
builder.toString
}
/**
* Create a producer with a few pre-configured properties.
* If certain properties need to be overridden, they can be provided in producerProps.
*/
def createProducer[K, V](brokerList: String,
encoder: String = classOf[DefaultEncoder].getName,
keyEncoder: String = classOf[DefaultEncoder].getName,
partitioner: String = classOf[DefaultPartitioner].getName,
producerProps: Properties = null): Producer[K, V] = {
val props: Properties = getProducerConfig(brokerList)
//override any explicitly specified properties
if (producerProps != null)
props.putAll(producerProps)
props.put("serializer.class", encoder)
props.put("key.serializer.class", keyEncoder)
props.put("partitioner.class", partitioner)
new Producer[K, V](new ProducerConfig(props))
}
/**
* Create a (new) producer with a few pre-configured properties.
*/
def createNewProducer(brokerList: String,
acks: Int = -1,
metadataFetchTimeout: Long = 3000L,
blockOnBufferFull: Boolean = true,
bufferSize: Long = 1024L * 1024L,
retries: Int = 0,
lingerMs: Long = 0) : KafkaProducer[Array[Byte],Array[Byte]] = {
import org.apache.kafka.clients.producer.ProducerConfig
val producerProps = new Properties()
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
producerProps.put(ProducerConfig.ACKS_CONFIG, acks.toString)
producerProps.put(ProducerConfig.METADATA_FETCH_TIMEOUT_CONFIG, metadataFetchTimeout.toString)
producerProps.put(ProducerConfig.BLOCK_ON_BUFFER_FULL_CONFIG, blockOnBufferFull.toString)
producerProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferSize.toString)
producerProps.put(ProducerConfig.RETRIES_CONFIG, retries.toString)
producerProps.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, "100")
producerProps.put(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG, "200")
producerProps.put(ProducerConfig.LINGER_MS_CONFIG, lingerMs.toString)
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer")
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer")
new KafkaProducer[Array[Byte],Array[Byte]](producerProps)
}
/**
* Create a new consumer with a few pre-configured properties.
*/
def createNewConsumer(brokerList: String,
groupId: String,
autoOffsetReset: String = "earliest",
partitionFetchSize: Long = 4096L) : KafkaConsumer[Array[Byte],Array[Byte]] = {
import org.apache.kafka.clients.consumer.ConsumerConfig
val consumerProps= new Properties()
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId)
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset)
consumerProps.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, partitionFetchSize.toString)
consumerProps.put(ConsumerConfig.RETRY_BACKOFF_MS_CONFIG, "100")
consumerProps.put(ConsumerConfig.RECONNECT_BACKOFF_MS_CONFIG, "200")
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
new KafkaConsumer[Array[Byte],Array[Byte]](consumerProps)
}
/**
* Create a default producer config properties map with the given metadata broker list
*/
def getProducerConfig(brokerList: String): Properties = {
val props = new Properties()
props.put("metadata.broker.list", brokerList)
props.put("message.send.max.retries", "5")
props.put("retry.backoff.ms", "1000")
props.put("request.timeout.ms", "2000")
props.put("request.required.acks", "-1")
props.put("send.buffer.bytes", "65536")
props.put("connect.timeout.ms", "100000")
props.put("reconnect.interval", "10000")
props
}
def getSyncProducerConfig(port: Int): Properties = {
val props = new Properties()
props.put("host", "localhost")
props.put("port", port.toString)
props.put("request.timeout.ms", "500")
props.put("request.required.acks", "1")
props.put("serializer.class", classOf[StringEncoder].getName)
props
}
def updateConsumerOffset(config : ConsumerConfig, path : String, offset : Long) = {
val zkClient = ZkUtils.createZkClient(config.zkConnect, config.zkSessionTimeoutMs, config.zkConnectionTimeoutMs)
ZkUtils.updatePersistentPath(zkClient, path, offset.toString)
}
def getMessageIterator(iter: Iterator[MessageAndOffset]): Iterator[Message] = {
new IteratorTemplate[Message] {
override def makeNext(): Message = {
if (iter.hasNext)
iter.next.message
else
allDone()
}
}
}
def createBrokersInZk(zkClient: ZkClient, ids: Seq[Int]): Seq[Broker] = {
val brokers = ids.map(id => new Broker(id, "localhost", 6667, SecurityProtocol.PLAINTEXT))
brokers.foreach(b => ZkUtils.registerBrokerInZk(zkClient, b.id, "localhost", 6667, b.endPoints, 6000, jmxPort = -1))
brokers
}
def deleteBrokersInZk(zkClient: ZkClient, ids: Seq[Int]): Seq[Broker] = {
val brokers = ids.map(id => new Broker(id, "localhost", 6667, SecurityProtocol.PLAINTEXT))
brokers.foreach(b => ZkUtils.deletePath(zkClient, ZkUtils.BrokerIdsPath + "/" + b))
brokers
}
def getMsgStrings(n: Int): Seq[String] = {
val buffer = new ListBuffer[String]
for (i <- 0 until n)
buffer += ("msg" + i)
buffer
}
/**
* Create a wired format request based on simple basic information
*/
def produceRequest(topic: String,
partition: Int,
message: ByteBufferMessageSet,
acks: Int = SyncProducerConfig.DefaultRequiredAcks,
timeout: Int = SyncProducerConfig.DefaultAckTimeoutMs,
correlationId: Int = 0,
clientId: String = SyncProducerConfig.DefaultClientId): ProducerRequest = {
produceRequestWithAcks(Seq(topic), Seq(partition), message, acks, timeout, correlationId, clientId)
}
def produceRequestWithAcks(topics: Seq[String],
partitions: Seq[Int],
message: ByteBufferMessageSet,
acks: Int = SyncProducerConfig.DefaultRequiredAcks,
timeout: Int = SyncProducerConfig.DefaultAckTimeoutMs,
correlationId: Int = 0,
clientId: String = SyncProducerConfig.DefaultClientId): ProducerRequest = {
val data = topics.flatMap(topic =>
partitions.map(partition => (TopicAndPartition(topic, partition), message))
)
new ProducerRequest(correlationId, clientId, acks.toShort, timeout, collection.mutable.Map(data:_*))
}
def makeLeaderForPartition(zkClient: ZkClient, topic: String,
leaderPerPartitionMap: scala.collection.immutable.Map[Int, Int],
controllerEpoch: Int) {
leaderPerPartitionMap.foreach
{
leaderForPartition => {
val partition = leaderForPartition._1
val leader = leaderForPartition._2
try{
val currentLeaderAndIsrOpt = ZkUtils.getLeaderAndIsrForPartition(zkClient, topic, partition)
var newLeaderAndIsr: LeaderAndIsr = null
if(currentLeaderAndIsrOpt == None)
newLeaderAndIsr = new LeaderAndIsr(leader, List(leader))
else{
newLeaderAndIsr = currentLeaderAndIsrOpt.get
newLeaderAndIsr.leader = leader
newLeaderAndIsr.leaderEpoch += 1
newLeaderAndIsr.zkVersion += 1
}
ZkUtils.updatePersistentPath(zkClient, ZkUtils.getTopicPartitionLeaderAndIsrPath(topic, partition),
ZkUtils.leaderAndIsrZkData(newLeaderAndIsr, controllerEpoch))
} catch {
case oe: Throwable => error("Error while electing leader for partition [%s,%d]".format(topic, partition), oe)
}
}
}
}
/**
* If neither oldLeaderOpt nor newLeaderOpt is defined, wait until the leader of a partition is elected.
* If oldLeaderOpt is defined, it waits until the new leader is different from the old leader.
* If newLeaderOpt is defined, it waits until the new leader becomes the expected new leader.
* @return The new leader or assertion failure if timeout is reached.
*/
def waitUntilLeaderIsElectedOrChanged(zkClient: ZkClient, topic: String, partition: Int, timeoutMs: Long = 5000L,
oldLeaderOpt: Option[Int] = None, newLeaderOpt: Option[Int] = None): Option[Int] = {
require(!(oldLeaderOpt.isDefined && newLeaderOpt.isDefined), "Can't define both the old and the new leader")
val startTime = System.currentTimeMillis()
var isLeaderElectedOrChanged = false
trace("Waiting for leader to be elected or changed for partition [%s,%d], older leader is %s, new leader is %s"
.format(topic, partition, oldLeaderOpt, newLeaderOpt))
var leader: Option[Int] = None
while (!isLeaderElectedOrChanged && System.currentTimeMillis() < startTime + timeoutMs) {
// check if leader is elected
leader = ZkUtils.getLeaderForPartition(zkClient, topic, partition)
leader match {
case Some(l) =>
if (newLeaderOpt.isDefined && newLeaderOpt.get == l) {
trace("Expected new leader %d is elected for partition [%s,%d]".format(l, topic, partition))
isLeaderElectedOrChanged = true
} else if (oldLeaderOpt.isDefined && oldLeaderOpt.get != l) {
trace("Leader for partition [%s,%d] is changed from %d to %d".format(topic, partition, oldLeaderOpt.get, l))
isLeaderElectedOrChanged = true
} else if (!oldLeaderOpt.isDefined) {
trace("Leader %d is elected for partition [%s,%d]".format(l, topic, partition))
isLeaderElectedOrChanged = true
} else {
trace("Current leader for partition [%s,%d] is %d".format(topic, partition, l))
}
case None =>
trace("Leader for partition [%s,%d] is not elected yet".format(topic, partition))
}
Thread.sleep(timeoutMs.min(100L))
}
if (!isLeaderElectedOrChanged)
fail("Timing out after %d ms since leader is not elected or changed for partition [%s,%d]"
.format(timeoutMs, topic, partition))
leader
}
/**
* Execute the given block. If it throws an assert error, retry. Repeat
* until no error is thrown or the time limit ellapses
*/
def retry(maxWaitMs: Long)(block: => Unit) {
var wait = 1L
val startTime = System.currentTimeMillis()
while(true) {
try {
block
return
} catch {
case e: AssertionFailedError =>
val ellapsed = System.currentTimeMillis - startTime
if(ellapsed > maxWaitMs) {
throw e
} else {
info("Attempt failed, sleeping for " + wait + ", and then retrying.")
Thread.sleep(wait)
wait += math.min(wait, 1000)
}
}
}
}
/**
* Wait until the given condition is true or throw an exception if the given wait time elapses.
*/
def waitUntilTrue(condition: () => Boolean, msg: String, waitTime: Long = 5000L): Boolean = {
val startTime = System.currentTimeMillis()
while (true) {
if (condition())
return true
if (System.currentTimeMillis() > startTime + waitTime)
fail(msg)
Thread.sleep(waitTime.min(100L))
}
// should never hit here
throw new RuntimeException("unexpected error")
}
def isLeaderLocalOnBroker(topic: String, partitionId: Int, server: KafkaServer): Boolean = {
val partitionOpt = server.replicaManager.getPartition(topic, partitionId)
partitionOpt match {
case None => false
case Some(partition) =>
val replicaOpt = partition.leaderReplicaIfLocal
replicaOpt match {
case None => false
case Some(_) => true
}
}
}
def createRequestByteBuffer(request: RequestOrResponse): ByteBuffer = {
val byteBuffer = ByteBuffer.allocate(request.sizeInBytes + 2)
byteBuffer.putShort(request.requestId.get)
request.writeTo(byteBuffer)
byteBuffer.rewind()
byteBuffer
}
/**
* Wait until a valid leader is propagated to the metadata cache in each broker.
* It assumes that the leader propagated to each broker is the same.
* @param servers The list of servers that the metadata should reach to
* @param topic The topic name
* @param partition The partition Id
* @param timeout The amount of time waiting on this condition before assert to fail
* @return The leader of the partition.
*/
def waitUntilMetadataIsPropagated(servers: Seq[KafkaServer], topic: String, partition: Int, timeout: Long = 5000L): Int = {
var leader: Int = -1
TestUtils.waitUntilTrue(() =>
servers.foldLeft(true) {
(result, server) =>
val partitionStateOpt = server.apis.metadataCache.getPartitionInfo(topic, partition)
partitionStateOpt match {
case None => false
case Some(partitionState) =>
leader = partitionState.leaderIsrAndControllerEpoch.leaderAndIsr.leader
result && Request.isValidBrokerId(leader)
}
},
"Partition [%s,%d] metadata not propagated after %d ms".format(topic, partition, timeout),
waitTime = timeout)
leader
}
def writeNonsenseToFile(fileName: File, position: Long, size: Int) {
val file = new RandomAccessFile(fileName, "rw")
file.seek(position)
for(i <- 0 until size)
file.writeByte(random.nextInt(255))
file.close()
}
def appendNonsenseToFile(fileName: File, size: Int) {
val file = new FileOutputStream(fileName, true)
for(i <- 0 until size)
file.write(random.nextInt(255))
file.close()
}
def checkForPhantomInSyncReplicas(zkClient: ZkClient, topic: String, partitionToBeReassigned: Int, assignedReplicas: Seq[Int]) {
val inSyncReplicas = ZkUtils.getInSyncReplicasForPartition(zkClient, topic, partitionToBeReassigned)
// in sync replicas should not have any replica that is not in the new assigned replicas
val phantomInSyncReplicas = inSyncReplicas.toSet -- assignedReplicas.toSet
assertTrue("All in sync replicas %s must be in the assigned replica list %s".format(inSyncReplicas, assignedReplicas),
phantomInSyncReplicas.size == 0)
}
def ensureNoUnderReplicatedPartitions(zkClient: ZkClient, topic: String, partitionToBeReassigned: Int, assignedReplicas: Seq[Int],
servers: Seq[KafkaServer]) {
TestUtils.waitUntilTrue(() => {
val inSyncReplicas = ZkUtils.getInSyncReplicasForPartition(zkClient, topic, partitionToBeReassigned)
inSyncReplicas.size == assignedReplicas.size
},
"Reassigned partition [%s,%d] is under replicated".format(topic, partitionToBeReassigned))
var leader: Option[Int] = None
TestUtils.waitUntilTrue(() => {
leader = ZkUtils.getLeaderForPartition(zkClient, topic, partitionToBeReassigned)
leader.isDefined
},
"Reassigned partition [%s,%d] is unavailable".format(topic, partitionToBeReassigned))
TestUtils.waitUntilTrue(() => {
val leaderBroker = servers.filter(s => s.config.brokerId == leader.get).head
leaderBroker.replicaManager.underReplicatedPartitionCount() == 0
},
"Reassigned partition [%s,%d] is under-replicated as reported by the leader %d".format(topic, partitionToBeReassigned, leader.get))
}
def checkIfReassignPartitionPathExists(zkClient: ZkClient): Boolean = {
ZkUtils.pathExists(zkClient, ZkUtils.ReassignPartitionsPath)
}
def verifyNonDaemonThreadsStatus() {
assertEquals(0, Thread.getAllStackTraces.keySet().toArray
.map(_.asInstanceOf[Thread])
.count(t => !t.isDaemon && t.isAlive && t.getClass.getCanonicalName.toLowerCase.startsWith("kafka")))
}
/**
* Create new LogManager instance with default configuration for testing
*/
def createLogManager(logDirs: Array[File] = Array.empty[File],
defaultConfig: LogConfig = LogConfig(),
cleanerConfig: CleanerConfig = CleanerConfig(enableCleaner = false),
time: MockTime = new MockTime()): LogManager = {
new LogManager(logDirs = logDirs,
topicConfigs = Map(),
defaultConfig = defaultConfig,
cleanerConfig = cleanerConfig,
ioThreads = 4,
flushCheckMs = 1000L,
flushCheckpointMs = 10000L,
retentionCheckMs = 1000L,
scheduler = time.scheduler,
time = time,
brokerState = new BrokerState())
}
def sendMessages(servers: Seq[KafkaServer],
topic: String,
numMessages: Int,
partition: Int = -1,
compression: CompressionCodec = NoCompressionCodec): List[String] = {
val header = "test-%d".format(partition)
val props = new Properties()
props.put("compression.codec", compression.codec.toString)
val ms = 0.until(numMessages).map(x => header + "-" + x)
// Specific Partition
if (partition >= 0) {
val producer: Producer[Int, String] =
createProducer(TestUtils.getBrokerListStrFromServers(servers),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[IntEncoder].getName,
partitioner = classOf[FixedValuePartitioner].getName,
producerProps = props)
producer.send(ms.map(m => new KeyedMessage[Int, String](topic, partition, m)):_*)
debug("Sent %d messages for partition [%s,%d]".format(ms.size, topic, partition))
producer.close()
ms.toList
} else {
// Use topic as the key to determine partition
val producer: Producer[String, String] = createProducer(
TestUtils.getBrokerListStrFromServers(servers),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName,
partitioner = classOf[DefaultPartitioner].getName,
producerProps = props)
producer.send(ms.map(m => new KeyedMessage[String, String](topic, topic, m)):_*)
producer.close()
debug("Sent %d messages for topic [%s]".format(ms.size, topic))
ms.toList
}
}
def sendMessage(servers: Seq[KafkaServer],
topic: String,
message: String) = {
val producer: Producer[String, String] =
createProducer(TestUtils.getBrokerListStrFromServers(servers),
encoder = classOf[StringEncoder].getName(),
keyEncoder = classOf[StringEncoder].getName())
producer.send(new KeyedMessage[String, String](topic, topic, message))
producer.close()
}
/**
* Consume all messages (or a specific number of messages)
* @param topicMessageStreams the Topic Message Streams
* @param nMessagesPerThread an optional field to specify the exact number of messages to be returned.
* ConsumerTimeoutException will be thrown if there are no messages to be consumed.
* If not specified, then all available messages will be consumed, and no exception is thrown.
*
*
* @return the list of messages consumed.
*/
def getMessages(topicMessageStreams: Map[String, List[KafkaStream[String, String]]],
nMessagesPerThread: Int = -1): List[String] = {
var messages: List[String] = Nil
val shouldGetAllMessages = nMessagesPerThread < 0
for ((topic, messageStreams) <- topicMessageStreams) {
for (messageStream <- messageStreams) {
val iterator = messageStream.iterator()
try {
var i = 0
while ((shouldGetAllMessages && iterator.hasNext()) || (i < nMessagesPerThread)) {
assertTrue(iterator.hasNext)
val message = iterator.next.message // will throw a timeout exception if the message isn't there
messages ::= message
debug("received message: " + message)
i += 1
}
} catch {
case e: ConsumerTimeoutException =>
if (shouldGetAllMessages) {
// swallow the exception
debug("consumer timed out after receiving " + messages.length + " message(s).")
} else {
throw e
}
}
}
}
messages.reverse
}
def verifyTopicDeletion(zkClient: ZkClient, topic: String, numPartitions: Int, servers: Seq[KafkaServer]) {
val topicAndPartitions = (0 until numPartitions).map(TopicAndPartition(topic, _))
// wait until admin path for delete topic is deleted, signaling completion of topic deletion
TestUtils.waitUntilTrue(() => !ZkUtils.pathExists(zkClient, ZkUtils.getDeleteTopicPath(topic)),
"Admin path /admin/delete_topic/%s path not deleted even after a replica is restarted".format(topic))
TestUtils.waitUntilTrue(() => !ZkUtils.pathExists(zkClient, ZkUtils.getTopicPath(topic)),
"Topic path /brokers/topics/%s not deleted after /admin/delete_topic/%s path is deleted".format(topic, topic))
// ensure that the topic-partition has been deleted from all brokers' replica managers
TestUtils.waitUntilTrue(() =>
servers.forall(server => topicAndPartitions.forall(tp => server.replicaManager.getPartition(tp.topic, tp.partition) == None)),
"Replica manager's should have deleted all of this topic's partitions")
// ensure that logs from all replicas are deleted if delete topic is marked successful in zookeeper
assertTrue("Replica logs not deleted after delete topic is complete",
servers.forall(server => topicAndPartitions.forall(tp => server.getLogManager().getLog(tp).isEmpty)))
// ensure that topic is removed from all cleaner offsets
TestUtils.waitUntilTrue(() => servers.forall(server => topicAndPartitions.forall { tp =>
val checkpoints = server.getLogManager().logDirs.map { logDir =>
new OffsetCheckpoint(new File(logDir, "cleaner-offset-checkpoint")).read()
}
checkpoints.forall(checkpointsPerLogDir => !checkpointsPerLogDir.contains(tp))
}), "Cleaner offset for deleted partition should have been removed")
}
/**
* Translate the given buffer into a string
* @param buffer The buffer to translate
* @param encoding The encoding to use in translating bytes to characters
*/
def readString(buffer: ByteBuffer, encoding: String = Charset.defaultCharset.toString): String = {
val bytes = new Array[Byte](buffer.remaining)
buffer.get(bytes)
new String(bytes, encoding)
}
}
class IntEncoder(props: VerifiableProperties = null) extends Encoder[Int] {
override def toBytes(n: Int) = n.toString.getBytes
}
class StaticPartitioner(props: VerifiableProperties = null) extends Partitioner{
def partition(data: Any, numPartitions: Int): Int = {
(data.asInstanceOf[String].length % numPartitions)
}
}
class HashPartitioner(props: VerifiableProperties = null) extends Partitioner {
def partition(data: Any, numPartitions: Int): Int = {
(data.hashCode % numPartitions)
}
}
class FixedValuePartitioner(props: VerifiableProperties = null) extends Partitioner {
def partition(data: Any, numPartitions: Int): Int = data.asInstanceOf[Int]
}
| confluentinc/kafka-deprecated-fork | core/src/test/scala/unit/kafka/utils/TestUtils.scala | Scala | apache-2.0 | 35,595 |
package com.glowingavenger.plan.util
import org.jgrapht.DirectedGraph
import com.glowingavenger.plan.ActionEdge
import org.jgrapht.graph.DirectedMultigraph
import scala.collection.JavaConversions._
import ReachGraph._
import com.glowingavenger.plan.model.state.BeliefState
object ReachGraph {
@inline implicit def graphToReachGraph(g: DirectedGraph[BeliefState, ActionEdge]): ReachGraph = new ReachGraph(g)
}
class ReachGraph(g: DirectedGraph[BeliefState, ActionEdge]) {
def +++(other: DirectedGraph[BeliefState, ActionEdge]): DirectedGraph[BeliefState, ActionEdge] = {
val newGraph = new DirectedMultigraph[BeliefState, ActionEdge](classOf[ActionEdge])
for (v <- g.vertexSet()) newGraph.addVertex(v)
for (v <- other.vertexSet()) newGraph.addVertex(v)
for (e <- g.edgeSet()) newGraph.addEdge(e.from, e.to, e)
for (e <- other.edgeSet()) newGraph.addEdge(e.from, e.to, e)
newGraph
}
def ++(edges: Iterable[ActionEdge]): DirectedGraph[BeliefState, ActionEdge] = {
val newGraph = g +++ new DirectedMultigraph[BeliefState, ActionEdge](classOf[ActionEdge])
for (e <- edges) {
newGraph.addVertex(e.from)
newGraph.addVertex(e.to)
newGraph.addEdge(e.from, e.to, e)
}
newGraph
}
def +(edge: ActionEdge): DirectedGraph[BeliefState, ActionEdge] = {
val newGraph = g +++ new DirectedMultigraph[BeliefState, ActionEdge](classOf[ActionEdge])
newGraph.addVertex(edge.from)
newGraph.addVertex(edge.to)
newGraph.addEdge(edge.from, edge.to, edge)
newGraph
}
def <<>>(v: BeliefState): Set[ActionEdge] = asScalaSet[ActionEdge](g.outgoingEdgesOf(v)).toSet
}
| dreef3/glowing-avenger | src/main/scala/com/glowingavenger/plan/util/ReachGraph.scala | Scala | mit | 1,636 |
package org.fusesource.insight.maven.resources
import collection.immutable.TreeMap
import collection.JavaConversions._
import java.io.StringWriter
import org.fusesource.insight.maven.util.CsvWriter
import org.sonatype.aether.graph.DependencyNode
import org.fusesource.insight.maven.aether.CompareDependencyNode
object LegalCsvReport {
val vendors = List(
Vendor("Apache Software Foundation", "Apache 2.0", "commons-", "org.apache"),
Vendor("AOP Alliance", "Apache 2.0", "aopalliance", "org.aopalliance"),
Vendor("VMWare SpringSource", "Apache 2.0", "org.springframework"),
Vendor("EPFL", "Apache 2.0", "org.scala-lang"),
Vendor("Google", "Apache 2.0", "com.google.gwt"),
Vendor("QOS.ch", "MIT", "ch.qos", "org.slf4j"),
Vendor("Oracle", "CDDL", "com.sun.jersey", "com.sun.xml", "com.sun.tools", "com.sun.msv", "com.sun.mail"),
Vendor("Codehaus", "Apache 2.0", "org.codehaus.jackson"),
Vendor("INRIA, France Telecom", "BSD", "asm")
)
val fuseProducts = List[String]("org.apache.activemq", "org.apache.camel", "org.apache.cxf", "org.apache.karaf", "org.apache.servicemix")
val ignoreGroupIds = List[String]("org.apache", "org.fusesource", "com.fusesource", "commons-", "log4j", "org.eclipse", "org.osgi")
def legalCsv(seq: Traversable[LegalReport]): String = {
val buffer = new StringWriter()
legalCsv(seq, new CsvWriter(buffer))
buffer.toString
}
def legalCsv(seq: Traversable[LegalReport], out: CsvWriter): Unit = {
out.println("Third Party Product", "Vendor", "Changes from Previous Version?", "License Agreement",
"License Type (by Legal)", "License Term (by Legal)",
"Open Source? (Yes or No)", "Modified? (Yes or No)", "Embedded? (Yes or No)", "Source or Binary?",
"Distribution Rights (by Legal)", "Copyright and TM Notice (by Legal)",
"EULA Requirements (by Legal)", "Assignment (by Legal)",
"Notes Purpose/Nature of Component Royalty Payments Owed? (Yes or No)",
"Royalty Details and Product Code List")
for (r <- seq) {
out.println(r.product, r.vendor, r.change, r.license, "", "", "Yes", "No", "Yes", "Binary", "", "", "", r.notes, "Library", "No", "")
}
}
def toLegalReports(node: DependencyNode, productGroups: List[String]): Iterable[LegalReport] = {
val map = legalDependencies(node, productGroups)
toLegalReports(map)
}
def toLegalReports(depMap: Map[String, List[DependencyNode]]): Iterable[LegalReport] = {
depMap.map{
case (g, v) =>
val artifacts = v.map{
n =>
val a = n.getDependency.getArtifact
a.getArtifactId + "-" + a.getVersion + "." + a.getExtension
}.mkString(" ")
val product = g + " files: " + artifacts
// TODO we should attempt to load the Project for the pom.xml for each of the
// available artifacts and check the licenses and vendor..
var vendor = ""
var license = ""
vendors.find(_.matches(g)) match {
case Some(v) =>
vendor = v.vendor
license = v.license
case _ =>
}
val change = "Added"
val notes = ""
LegalReport(product, vendor, license, change, notes)
}
}
/**
* Returns the legal dependencies ignoring our products and filtering out apache code
*/
def legalDependencies(node: DependencyNode, productGroups: List[String], others: Map[String, List[DependencyNode]] = new TreeMap[String, List[DependencyNode]]()): Map[String, List[DependencyNode]] = {
val d = node.getDependency
val a = d.getArtifact
val groupId = a.getGroupId
var map = others
if (d.isOptional) {
println("Ignoring optional dependency: " + a)
} else if (productGroups.exists(p => groupId.startsWith(p))) {
println("Ignoring " + a + " as its a product")
} else {
if (ignoreGroupIds.exists(p => groupId.startsWith(p))) {
println("Ignoring " + a + " as its an apache distro")
} else {
val list = map.getOrElse(groupId, List())
val newList = list ++ List(node)
map += (groupId -> newList)
}
for (c <- node.getChildren) {
map = legalDependencies(c, productGroups, map)
}
}
map
}
def toLegalCompareReports(node: CompareDependencyNode, productGroups: List[String]): Iterable[LegalReport] = {
val map = legalCompareDependencies(node, productGroups)
toLegalCompareReports(map)
}
def toLegalCompareReports(depMap: Map[String, List[CompareDependencyNode]]): Iterable[LegalReport] = {
depMap.map{
case (g, v) =>
var notes = ""
var change = "Added"
val artifacts = v.map{
n =>
if (n.change.isUpdate) {
change = "Updated"
}
val version1 = n.version1.getOrElse("??")
val version2 = n.version2.getOrElse("??")
notes += n.artifactId + "." + n.extension + "(" + version1 + " => " + version2 + ") "
n.artifactId + "-" + version2 + "." + n.extension
}.mkString(" ")
val product = g + " files: " + artifacts
var vendor = ""
var license = ""
vendors.find(_.matches(g)) match {
case Some(v) =>
vendor = v.vendor
license = v.license
case _ =>
}
LegalReport(product, vendor, license, change, notes)
}
}
/**
* Returns the legal dependencies ignoring our products and filtering out apache code
*/
def legalCompareDependencies(node: CompareDependencyNode, productGroups: List[String], others: Map[String, List[CompareDependencyNode]] = new TreeMap[String, List[CompareDependencyNode]]()): Map[String, List[CompareDependencyNode]] = {
val groupId = node.groupId
var map = others
lazy val description = node.groupId + ":" + node.artifactId
if (node.isOptional) {
println("Ignoring optional dependency: " + description)
} else if (!node.change.isAddOrUpdate) {
println("Ignoring non add/update change: " + description + " " + node.change)
} else if (productGroups.exists(p => groupId.startsWith(p))) {
println("Ignoring " + description + " as its a product")
} else {
if (ignoreGroupIds.exists(p => groupId.startsWith(p))) {
println("Ignoring " + description + " as its an apache distro")
} else {
val list = map.getOrElse(groupId, List())
val newList = list ++ List(node)
map += (groupId -> newList)
}
for (c <- node.children) {
map = legalCompareDependencies(c, productGroups, map)
}
}
map
}
}
case class LegalReport(product: String, vendor: String, license: String, change: String, notes: String)
case class Vendor(vendor: String, license: String, groupIdPrefixes: String*) {
def matches(aGroupId: String) = groupIdPrefixes.exists(p => aGroupId.startsWith(p))
}
| janstey/fuse | sandbox/insight-maven-web/src/main/scala/org/fusesource/insight/maven/resources/LegalCsvReport.scala | Scala | apache-2.0 | 6,928 |
package de.tu_berlin.dima.bdapro.flink.oddsemordnilaps.thaohtp
import org.apache.flink.api.scala.{ExecutionEnvironment, _}
import org.apache.flink.util.Collector
/**
* Created by JML on 11/7/16.
* Warm up task: Odd Semordnilap Number
*/
object OddSemordnilaps {
def main(args: Array[String]): Unit = {
if (args.length <1) {
Console.err.println("Usage: <jar> inputPath")
System.exit(-1)
}
val inputFilePath = args(0);
// set up environment
val env = ExecutionEnvironment.getExecutionEnvironment
val dataSet1 = env.readTextFile(inputFilePath)
// transform data
val transformData = dataSet1.flatMap(_.split("\n")).flatMap(_.split(" "))
.distinct()
.filter(x => filterOddNumber(x))
// create reversed number and add to list
.flatMap(x => Array(x, x.reverse))
.map(x => (x, 1))
val groupedDataSet = transformData
// group by number string
.groupBy(0)
// if group has more than 2 members => means that they have reversed number in data set
// => keep that group and count as 1
.reduceGroup {
(in, out: Collector[(Int)]) => {
if (in.size >= 2) {
out.collect(1)
}
}
}
val result = groupedDataSet.count()
println("The result is " + result)
}
def filterOddNumber(numberStr: String): Boolean = {
if (numberStr == null || numberStr.isEmpty) {
false
}
else {
if (numberStr.charAt(0).toInt % 2 == 0) {
false
}
else {
if (numberStr.charAt(numberStr.length -1).toInt % 2 == 0) {
false
}
else {
true
}
}
}
}
}
| cristiprg/BDAPRO.GlobalStateML | bdapro-ws1617-flink-jobs/src/main/scala/de/tu_berlin/dima/bdapro/flink/oddsemordnilaps/thaohtp/OddSemordnilaps.scala | Scala | apache-2.0 | 1,679 |
package org.coursera.naptime.ari.graphql
import com.google.inject.Injector
import com.linkedin.data.schema.DataSchema
import com.linkedin.data.schema.RecordDataSchema
import org.coursera.naptime.ResourceName
import org.coursera.naptime.ari.FullSchema
import org.coursera.naptime.ari.LocalSchemaProvider
import org.coursera.naptime.ari.SchemaProvider
import org.coursera.naptime.ari.engine.CoursesResource
import org.coursera.naptime.ari.engine.InstructorsResource
import org.coursera.naptime.ari.engine.PartnersResource
import org.coursera.naptime.model.Keyed
import org.coursera.naptime.router2.NaptimeRoutes
import org.coursera.naptime.router2.ResourceRouterBuilder
import org.coursera.naptime.schema.Resource
import org.junit.Test
import org.mockito.Mockito._
import org.scalatest.junit.AssertionsForJUnit
import org.scalatest.mock.MockitoSugar
class DefaultGraphqlSchemaProviderTest extends AssertionsForJUnit {
import DefaultGraphqlSchemaProviderTest._
@Test
def checkEmptySchema(): Unit = {
val emptySchema = new DefaultGraphqlSchemaProvider(emptySchemaProvider())
val nonMetadataTypes = emptySchema.schema.allTypes.filterNot(_._1.startsWith("__"))
assert(nonMetadataTypes.keySet === DEFAULT_TYPES, s"${nonMetadataTypes.keySet}")
}
@Test
def checkBasicSchemaComputation(): Unit = {
val simpleSchema = new DefaultGraphqlSchemaProvider(simpleSchemaProvider())
val nonMetadataTypes = simpleSchema.schema.allTypes.filterNot(_._1.startsWith("__"))
assert(nonMetadataTypes.keySet === DEFAULT_TYPES ++ COMPUTED_TYPES,
s"${nonMetadataTypes.keySet}")
}
@Test
def constantlyChanging(): Unit = {
val regeneratingProvider = new SchemaProvider {
val underlying = simpleSchemaProvider()
override def mergedType(resourceName: ResourceName): Option[RecordDataSchema] = {
underlying.mergedType(resourceName)
}
override def fullSchema: FullSchema = {
FullSchema(
Set.empty ++ underlying.fullSchema.resources,
Set.empty ++ underlying.fullSchema.types)
}
}
assert(!(regeneratingProvider.fullSchema eq regeneratingProvider.fullSchema))
val regenerating = new DefaultGraphqlSchemaProvider(regeneratingProvider)
val nonMetadataTypes = regenerating.schema.allTypes.filterNot(_._1.startsWith("__"))
assert(nonMetadataTypes.keySet === DEFAULT_TYPES ++ COMPUTED_TYPES,
s"${nonMetadataTypes.keySet}")
}
// TODO: check to ensure that it recomputes only when required.
}
object DefaultGraphqlSchemaProviderTest extends MockitoSugar {
import org.coursera.naptime.ari.engine.EngineImplTest._
val DEFAULT_TYPES = Set(
"ID",
"root",
"Boolean",
"Long",
"Float",
"Int",
"BigInt",
"String",
"BigDecimal")
val COMPUTED_TYPES = Set(
"CoursesV1",
"CoursesV1Connection",
"CoursesV1Resource",
"InstructorsV1",
"InstructorsV1Connection",
"InstructorsV1Resource",
"intMember",
"org_coursera_naptime_ari_graphql_models_Coordinates",
"org_coursera_naptime_ari_graphql_models_CoursePlatform",
"org_coursera_naptime_ari_graphql_models_originalId",
"PartnersV1",
"ResponsePagination",
"stringMember",
"DataMap")
val extraTypes = TYPE_SCHEMAS.map { case (key, value) => Keyed(key, value) }.toList
def simpleSchemaProvider(): SchemaProvider = {
val courseRouterBuilder = mock[ResourceRouterBuilder]
when(courseRouterBuilder.schema).thenReturn(COURSES_RESOURCE)
when(courseRouterBuilder.types).thenReturn(extraTypes)
when(courseRouterBuilder.resourceClass()).thenReturn(
classOf[CoursesResource].asInstanceOf[Class[courseRouterBuilder.ResourceClass]])
val instructorRouterBuilder = mock[ResourceRouterBuilder]
when(instructorRouterBuilder.schema).thenReturn(INSTRUCTORS_RESOURCE)
when(instructorRouterBuilder.types).thenReturn(extraTypes)
when(instructorRouterBuilder.resourceClass()).thenReturn(
classOf[InstructorsResource].asInstanceOf[Class[instructorRouterBuilder.ResourceClass]])
val partnerRouterBuilder = mock[ResourceRouterBuilder]
when(partnerRouterBuilder.schema).thenReturn(PARTNERS_RESOURCE)
when(partnerRouterBuilder.types).thenReturn(extraTypes)
when(partnerRouterBuilder.resourceClass()).thenReturn(
classOf[PartnersResource].asInstanceOf[Class[partnerRouterBuilder.ResourceClass]])
val injector = mock[Injector]
new LocalSchemaProvider(NaptimeRoutes(injector, Set(
courseRouterBuilder,
instructorRouterBuilder,
partnerRouterBuilder)))
}
def emptySchemaProvider() = {
val injector = mock[Injector]
new LocalSchemaProvider(NaptimeRoutes(injector, Set.empty))
}
}
| josh-newman/naptime | naptime-graphql/src/test/scala/org/coursera/naptime/ari/graphql/DefaultGraphqlSchemaProviderTest.scala | Scala | apache-2.0 | 4,712 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.anyvals
import reflect.macros.Context
import org.scalactic.Resources
private[scalactic] object PosIntMacro extends CompileTimeAssertions {
def apply(c: Context)(value: c.Expr[Int]): c.Expr[PosInt] = {
val notValidMsg = Resources.notValidPosInt
val notLiteralMsg = Resources.notLiteralPosInt
import c.universe._
ensureValidIntLiteral(c)(value, notValidMsg, notLiteralMsg) { i => i > 0 }
reify { PosInt.from(value.splice).get }
}
}
| cheeseng/scalatest | scalactic-macro/src/main/scala/org/scalactic/anyvals/PosIntMacro.scala | Scala | apache-2.0 | 1,082 |
package com.intenthq.gander.extractors
import org.joda.time.DateTime
import org.specs2.mutable.Specification
class ContentExtractorSpec extends Specification {
def date(year: Int, month: Int, day: Int) = Some(new DateTime(year, month, day, 0, 0).toDate)
"extractDateFromURLUnsafe" >> {
" should extract the date from the path, if present" >> {
ContentExtractor.extractDateFromURL("http://a.com/no/date/in/this/path") must_== None
ContentExtractor.extractDateFromURL("http://a.com/not/every/number/1900/is/a/date") must_== None
ContentExtractor.extractDateFromURL("http://a.com/number/2000a/plus/letters") must_== None
ContentExtractor.extractDateFromURL("http://a.com/a/year/2000/and/nothing/else") must_== date(2000, 1, 1)
ContentExtractor.extractDateFromURL("http://a.com/a/year/2000/and/10/not/a/month") must_== date(2000, 1, 1)
ContentExtractor.extractDateFromURL("http://a.com/a/year/2000/13/not/a/month") must_== date(2000, 1, 1)
ContentExtractor.extractDateFromURL("http://a.com/a/year/2000/10/and/a/month") must_== date(2000, 10, 1)
ContentExtractor.extractDateFromURL("http://a.com/not/2000/10/a/20/day") must_== date(2000, 10, 1)
ContentExtractor.extractDateFromURL("http://a.com/not/2000/10/32/a/day") must_== date(2000, 10, 1)
ContentExtractor.extractDateFromURL("http://a.com/not/2000/10/31/a/day") must_== date(2000, 10, 31)
}
}
}
| albertpastrana/gander | src/test/scala/com/intenthq/gander/extractors/ContentExtractorSpec.scala | Scala | apache-2.0 | 1,426 |
import collection._
object Test {
def main(args: Array[String]): Unit = {
val gm: GenMap[Int, Int] = GenMap(0 -> 0, 1 -> 1).par
// ops
assert(gm.isDefinedAt(1))
assert(gm.contains(1))
assert(gm.getOrElse(1, 2) == 1)
assert(gm.getOrElse(2, 3) == 3)
assert(gm.keysIterator.toSet == Set(0, 1))
assert(gm.valuesIterator.toSet == Set(0, 1))
assert(gm.keySet == Set(0, 1))
assert(gm.keys.toSet == Set(0, 1))
assert(gm.values.toSet == Set(0, 1))
try {
gm.default(-1)
assert(false)
} catch {
case e: NoSuchElementException => // ok
}
assert(gm.filterKeys(_ % 2 == 0)(0) == 0)
assert(gm.filterKeys(_ % 2 == 0).get(1) == None)
assert(gm.mapValues(_ + 1)(0) == 1)
// with defaults
val pm = parallel.mutable.ParMap(0 -> 0, 1 -> 1)
val dm = pm.withDefault(x => -x)
assert(dm(0) == 0)
assert(dm(1) == 1)
assert(dm(2) == -2)
assert(dm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2))
dm.put(3, 3)
assert(dm(3) == 3)
assert(pm(3) == 3)
assert(dm(4) == -4)
val imdm = parallel.immutable.ParMap(0 -> 0, 1 -> 1).withDefault(x => -x)
assert(imdm(0) == 0)
assert(imdm(1) == 1)
assert(imdm(2) == -2)
assert(imdm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2))
}
}
| yusuke2255/dotty | tests/run/parmap-ops.scala | Scala | bsd-3-clause | 1,322 |
package gsd.graph
case class Edge[V](source: V, target: V)
abstract class Graph[V] protected (val vertices: Set[V], val edges: EdgeMap[V])
extends GraphWriter[V] with Graphviz[V] {
def this(vs: Set[V], es: Iterable[Edge[V]]) =
this(vs, toMultiMap(es))
assert (!(edges exists { case (x,y) => y contains x }),
"selp-loops not allowed: " + edges)
assert ((edges forall {
case (x,y) => (vertices contains x) && (y forall (vertices contains))
}), "Edge contains vertex that is not in this graph!")
val revEdges: EdgeMap[V] = toMultiMap {
edges flatMap {
case (src,tars) => tars map { Edge(_, src) }
}
} withDefaultValue Set()
type This <: Graph[V]
def New(newVs : Set[V], newEs: EdgeMap[V]) : This
def +(t : Edge[V]): This = New(vertices, edges.toEdgeMap + t)
def -(t : Edge[V]): This = New(vertices, edges.toEdgeMap - t)
def ++(ts : Iterable[Edge[V]]): This = New(vertices, edges.toEdgeMap ++ ts)
def --(ts : Iterable[Edge[V]]): This = New(vertices, edges.toEdgeMap -- ts)
def successors(v: V): Set[V] = edges(v)
def predecessors(v: V): Set[V] = revEdges(v)
/** Vertices with no outgoing edges */
lazy val sinks = vertices filter { successors(_).isEmpty }
/** Vertices with no incoming edges */
lazy val sources = vertices filter { v =>
!edges.values.exists { _ contains v }
}
def toParseString(implicit toOrdered: V => Ordered[V]): String
}
case class DirectedGraph[V] protected (vs: Set[V], es: EdgeMap[V])
extends Graph[V](vs,es) with BFS[V] with Cliques[V] {
type This = DirectedGraph[V]
def this(vs: Set[V], es: Iterable[Edge[V]]) =
this(vs, toMultiMap(es) withDefaultValue Set())
def New(newVs: Set[V], newEs: EdgeMap[V]) =
new DirectedGraph(newVs,newEs)
def reverseEdges = New(vs, revEdges)
def toParseString(implicit toOrdered: V => Ordered[V]) =
mkParseString("->")
/**
* WARNING: Only works on DAGs, will cause an infinite loop on graphs with
* cycles!
*
* For an implication graph g, call:
* g.reduceCliques.transitiveReduction.expandCliques
*
* to reduce cliques prior to the transitive reduction. Cliques are then
* expanded out in the reduced graph.
*/
def transitiveReduction: DirectedGraph[V] = {
def visit[U](f: (V) => Iterable[V],
toVisit: List[V], visited: Set[V] = Set()): Set[V] =
(toVisit dropWhile { visited contains _ }) match {
case Nil => visited
case head::tail => visit(f, tail ::: f(head).toList, visited + head)
}
// Transitive successors
def tsuccessors(v: V): Set[V] = visit(successors, successors(v).toList)
def _doVertex(v : V) =
for (x <- tsuccessors(v) & (successors(v) flatMap tsuccessors))
yield Edge(v, x)
this -- (vertices flatMap _doVertex)
}
}
case class UndirectedGraph[V <% Ordered[V]] protected (vs: Set[V], es: EdgeMap[V])
extends Graph[V](vs, es) {
type This = UndirectedGraph[V]
def this(vs: Set[V], es: Iterable[Edge[V]]) =
this(vs, toUndirectedMultiMap(es))
def New(newVs: Set[V], newEs: EdgeMap[V]) =
new UndirectedGraph(newVs,newEs)
def toParseString(implicit toOrdered: V => Ordered[V]) =
mkParseString("--")(toOrdered)
}
trait GraphWriter[V] {
this: Graph[V] =>
def mkParseString(edgeSep: String)
(implicit toOrdered: V => Ordered[V]): String = {
val sb = new StringBuilder
val fmap = Map() ++ (vertices.zipWithIndex map { case (f,i) => (f, i+1) })
for ((id, v) <- fmap.iterator.toList sortWith { case ((_,i),(_,j)) => i < j })
sb append v append ": " append id append ";\\n"
var len = 0
for {
(src, targets) <- edges.toList sortWith { case ((x,_),(y,_)) => x < y }
tar <- targets.toList sortWith { _ < _ }
} {
val prev = sb.length
sb append fmap(src) append edgeSep append fmap(tar) append ";"
val curr = sb.length
len += curr - prev
if (len > 80) {
len = 0
sb append "\\n"
}
}
sb toString
}
}
trait Graphviz[T] {
this: Graph[T] =>
def toGraphvizString(params: GraphvizParams = GraphvizParams()): String = {
val sb = new StringBuilder
//Header
sb append "digraph {\\n"
sb append "graph [ rankdir=%s ];\\n".format(params.rankDir)
sb append "node [ shape=%s ];\\n".format(params.shape)
val fmap = Map() ++ (vertices.zipWithIndex map { case (f,i) => (f, i+1) })
//Vertices
for ((id, v) <- fmap.iterator.toList sortWith
{ case ((_,i),(_,j)) => i.toString < j.toString })
sb append """%d [label="%s"]""".format(v, id.toString replace ("\\"", "\\\\\\"")) append "\\n"
for {
(src, targets) <- edges.toList sortWith { case ((x,_),(y,_)) => x.toString < y.toString }
tar <- targets.toList sortWith { _.toString < _.toString }
} {
sb append fmap(src) append "->" append fmap(tar) append "\\n"
}
sb append "}"
sb.toString
}
}
case class GraphvizParams(rankDir: String = "TB",
shape: String = "box")
| scas-mdd/linux-variability-analysis-tools.fm-translation | src/main/scala/gsd/graph/Graph.scala | Scala | gpl-3.0 | 5,102 |
object listBuilder extends App {
// 1: direct translation from Java to Scala:
import scala.collection.mutable
def seqFromRangeVersion1: Seq[Int] = {
val result = mutable.ArrayBuffer[Int]()
(0 until 100).foreach { i =>
result += i * i
}
result
}
// 2: getting rid of mutable collection:
def seqFromRangeVersion2: Seq[Int] = {
var result = Vector[Int]()
(0 until 100).foreach { i =>
result = result :+ i * i
}
result
}
// 3: removing local var using recursion:
import scala.annotation.tailrec
def seqFromRangeVersion3: Seq[Int] = {
@tailrec
def recurse(range: Range, accumulator: Seq[Int]): Seq[Int] = {
if (range.isEmpty) {
accumulator
} else {
val i = range.head
recurse(range.tail, accumulator :+ i * i)
}
}
recurse(0 until 100, Vector())
}
// 4: removing local var using foldLeft:
def seqFromRangeVersion4: Seq[Int] = {
(0 until 100).foldLeft(Vector[Int]()) {
(accumulator, i) => accumulator :+ i * i
}
}
// 5: shorthand version of foldLeft:
def seqFromRangeVersion5: Seq[Int] = {
(Vector[Int]() /: (0 until 100)) {
(accumulator, i) => accumulator :+ i * i
}
}
// 6: using map:
def seqFromRangeVersion6: Seq[Int] = (0 until 100) map { i => i * i }
// -----
// println(s"version 1: $seqFromRangeVersion1")
// println(s"version 2: $seqFromRangeVersion2")
// println(s"version 3: $seqFromRangeVersion3")
// println(s"version 4: $seqFromRangeVersion4")
// println(s"version 5: $seqFromRangeVersion5")
// println(s"version 6: $seqFromRangeVersion6")
assert(seqFromRangeVersion1 == seqFromRangeVersion2)
assert(seqFromRangeVersion1 == seqFromRangeVersion3)
assert(seqFromRangeVersion1 == seqFromRangeVersion4)
assert(seqFromRangeVersion1 == seqFromRangeVersion5)
assert(seqFromRangeVersion1 == seqFromRangeVersion6)
}
| sullivan-/six-ways | listBuilder.scala | Scala | apache-2.0 | 1,922 |
package com.containant.casestudies
/** The dependency injection problem can encode various combinatorial
* optimization problems easily and intuitively.
*
* The following encodes the subset-sum problem.
*
* The test instances are from
* https://people.sc.fsu.edu/~jburkardt/datasets/subset_sum/subset_sum.html
*/
import com.containant._
import com.containant.heuristics._
object CS2SubsetSum {
//////////////////////////////////////////////////////////////////////
// Configuration
val _seed: Int = 0xDEADBEEF
val _runs: Int = 100
// target fitness fn. evaluations ~ 100 for comparison with SMAC, which often
// terminates under 100 on this prob.
object Hmma extends AntHeuristic {
override val _maxPheromone: Double = 10
override val _evaporationRate: Double = 0.4
override val _iterations: Int = 1000
override val _antNumber: Int = 3
override val _minimumFraction: Double = 0.10
override val _recursionDepth: Int = 10
override val RNG: java.util.Random = new java.util.Random(_seed)
override def toString: String = "mma"
}
object Hgre extends GrEvoHeuristic {
override val _population: Int = 100
override val _length: Int = 9
override val _maxChoice: Int = 7
override val _tournamentSize = 5
override val _generations = 10
override val _recursionDepth = 10
override val RNG: java.util.Random = new java.util.Random(_seed)
override def toString: String = "gre"
}
object Hran extends RandomHeuristic {
override val _iterations = 100
override val _recursionDepth = 10
override val RNG: java.util.Random = new java.util.Random(_seed)
override def toString: String = "ran"
}
//////////////////////////////////////////////////////////////////////
// Problem Description
type Solution = Set[Int]
trait SubsetSumModule extends Module {
// val empty: Solution = Set.empty[Int]
// def add(i: Int, xs: Solution): Solution = xs + i
val empty: collection.immutable.Set[Int] = collection.immutable.Set.empty[Int]
def add(i: Int, xs: collection.immutable.Set[Int]): collection.immutable.Set[Int] = xs + i
}
// def subsetSum(target: Int)(solution: Solution): Double = {
def subsetSum(target: Int)(solution: collection.immutable.Set[Int]): Double = {
if (solution.sum == target) 2
else 1.0/ Math.abs(solution.sum - target).toDouble
}
object P02 extends SubsetSumModule {
val i267: Int = 267
val i493: Int = 493
val i869: Int = 869
val i961: Int = 961
val i1000: Int = 1000
val i1153: Int = 1153
val i1246: Int = 1246
val i1598: Int = 1598
val i1766: Int = 1766
val i1922: Int = 1922
} // target: 5842
object P03 extends SubsetSumModule {
val i518533: Int = 518533
val i1037066: Int = 1037066
val i2074132: Int = 2074132
val i1648264: Int = 1648264
val i796528: Int = 796528
val i1593056: Int = 1593056
val i686112: Int = 686112
val i1372224: Int = 1372224
val i244448: Int = 244448
val i488896: Int = 488896
val i977792: Int = 977792
val i1955584: Int = 1955584
val i1411168: Int = 1411168
val i322336: Int = 322336
val i644672: Int = 644672
val i1289344: Int = 1289344
val i78688: Int = 78688
val i157376: Int = 157376
val i314752: Int = 314752
val i629504: Int = 629504
val i1259008: Int = 1259008
} // target: 2463098
//////////////////////////////////////////////////////////////////////
// Experiment Details
def main(args: Array[String]): Unit = {
import com.containant.casestudies.Framework
println("\n-----------------------------")
println("Case Study 2: Subset Sum (P02)")
println("Runs: " + _runs)
val comparison02 =
Framework.experiment[Solution](Hmma, Hgre, _runs, P02, subsetSum(5842))
val reference02 =
Framework.experiment[Solution](Hran, Hran, _runs, P02, subsetSum(5842))
println("heuristic,min,mean,max,var")
println(comparison02.summary1)
println(comparison02.summary2)
println(reference02.summary1)
println("p: " + comparison02.pvalue)
println()
println("\n-----------------------------")
println("Case Study 2: Subset Sum (P03)")
println("Runs: " + _runs)
val comparison03 =
Framework.experiment[Solution](Hmma, Hgre, _runs, P03, subsetSum(2463098))
val reference03 =
Framework.experiment[Solution](Hran, Hran, _runs, P03, subsetSum(2463098))
println("heuristic,min,mean,max,var")
println(comparison03.summary1)
println(comparison03.summary2)
println(reference03.summary1)
println("p: " + comparison03.pvalue)
println()
}
}
| zaklogician/ContainAnt-devel | src/main/scala/com/containant/casestudies/CS2SubsetSum.scala | Scala | bsd-3-clause | 4,725 |
object Test extends App {
List[List[Any]]().transpose.isEmpty
Array[Array[Any]]().transpose.isEmpty
Vector[Vector[Any]]().transpose.isEmpty
Stream[Stream[Any]]().transpose.isEmpty
}
| felixmulder/scala | test/files/run/t7215.scala | Scala | bsd-3-clause | 190 |
package beam.utils
import akka.actor.{Actor, ActorLogging, ActorRef}
import beam.agentsim.agents.ridehail.RideHailManager.DebugRideHailManagerDuringExecution
import beam.agentsim.scheduler.BeamAgentScheduler.Monitor
class DebugActorWithTimer(val rideHailManager: ActorRef, val scheduler: ActorRef) extends Actor with ActorLogging {
def receive: PartialFunction[Any, Unit] = {
case Tick =>
log.info(DebugLib.gcAndGetMemoryLogMessage("Memory use after GC: "))
rideHailManager ! DebugRideHailManagerDuringExecution
scheduler ! Monitor
}
}
case object Tick
| colinsheppard/beam | src/main/scala/beam/utils/DebugActorWithTimer.scala | Scala | gpl-3.0 | 583 |
package tethys.writers
import tethys.JsonWriter
import tethys.writers.tokens.TokenWriter
trait EmptyWriters {
def emptyWriter[A]: JsonWriter[A] = new JsonWriter[A] {
override def write(name: String, value: A, tokenWriter: TokenWriter): Unit = ()
override def write(value: A, tokenWriter: TokenWriter): Unit = ()
}
}
object EmptyWriters extends EmptyWriters
| tethys-json/tethys | modules/core/src/main/scala/tethys/writers/EmptyWriters.scala | Scala | apache-2.0 | 372 |
package org.http4s
import cats.effect.IO
import cats.effect.laws.util.TestContext
import org.scalacheck.Prop
import scala.util.Success
package object testing {
// Media types used for testing
@deprecated("Will be removed in a future version.", "0.21.0-M2")
val `text/asp`: MediaType =
new MediaType("text", "asp", MediaType.Compressible, MediaType.NotBinary, List("asp"))
@deprecated("Will be removed in a future version.", "0.21.0-M2")
val `text/x-h` = new MediaType("text", "x-h")
@deprecated("Will be removed in a future version.", "0.21.0-M2")
val `application/excel`: MediaType =
new MediaType("application", "excel", true, false, List("xls"))
@deprecated("Will be removed in a future version.", "0.21.0-M2")
val `application/gnutar`: MediaType =
new MediaType("application", "gnutar", true, false, List("tar"))
@deprecated("Will be removed in a future version.", "0.21.0-M2")
val `audio/aiff`: MediaType =
new MediaType(
"audio",
"aiff",
MediaType.Compressible,
MediaType.Binary,
List("aif", "aiff", "aifc"))
@deprecated("Will be removed in a future version.", "0.21.0-M2")
val `application/soap+xml`: MediaType =
new MediaType("application", "soap+xml", MediaType.Compressible, MediaType.NotBinary)
@deprecated("Will be removed in a future version.", "0.21.0-M2")
val `audio/mod`: MediaType =
new MediaType("audio", "mod", MediaType.Uncompressible, MediaType.Binary, List("mod"))
@deprecated("Will be removed in a future version. Prefer IsEq[F[Boolean]].", "0.21.0-M2")
def ioBooleanToProp(iob: IO[Boolean])(implicit ec: TestContext): Prop = {
val f = iob.unsafeToFuture()
ec.tick()
f.value match {
case Some(Success(true)) => true
case _ => false
}
}
@deprecated("Import from org.http4s.laws.discipline.arbitrary._.", "0.21.0-M2")
type ArbitraryInstances
@deprecated("Moved to org.http4s.laws.discipline.arbitrary.", "0.21.0-M2")
val ArbitraryInstances = org.http4s.laws.discipline.arbitrary
@deprecated("Moved to org.http4s.laws.discipline.arbitrary.", "0.21.0-M2")
val instances = org.http4s.laws.discipline.arbitrary
@deprecated("Moved to org.http4s.laws.EntityEncoderLaws.", "0.21.0-M2")
type EntityEncoderLaws[F[_], A] = org.http4s.laws.EntityEncoderLaws[F, A]
@deprecated("Moved to org.http4s.laws.EntityEncoderLaws.", "0.21.0-M2")
val EntityEncoderLaws = org.http4s.laws.EntityEncoderLaws
@deprecated("Moved to org.http4s.laws.discipline.EntityEncoderTests.", "0.21.0-M2")
type EntityEncoderTests[F[_], A] = org.http4s.laws.discipline.EntityEncoderTests[F, A]
@deprecated("Moved to org.http4s.laws.discipline.EntityEncoderTests.", "0.21.0-M2")
val EntityEncoderTests = org.http4s.laws.discipline.EntityEncoderTests
@deprecated("Moved to org.http4s.laws.EntityCodecLaws.", "0.21.0-M2")
type EntityCodecLaws[F[_], A] = org.http4s.laws.EntityCodecLaws[F, A]
@deprecated("Moved to org.http4s.laws.EntityCodecLaws.", "0.21.0-M2")
val EntityCodecLaws = org.http4s.laws.EntityCodecLaws
@deprecated("Moved to org.http4s.laws.discipline.EntityCodecTests.", "0.21.0-M2")
type EntityCodecTests[F[_], A] = org.http4s.laws.discipline.EntityCodecTests[F, A]
@deprecated("Moved to org.http4s.laws.discipline.EntityCodecTests.", "0.21.0-M2")
val EntityCodecTests = org.http4s.laws.discipline.EntityCodecTests
@deprecated("Moved to org.http4s.laws.HttpCodecLaws.", "0.21.0-M2")
type HttpCodecLaws[A] = org.http4s.laws.HttpCodecLaws[A]
@deprecated("Moved to org.http4s.laws.HttpCodecLaws.", "0.21.0-M2")
val HttpCodecLaws = org.http4s.laws.EntityCodecLaws
@deprecated("Moved to org.http4s.laws.discipline.HttpCodecTests.", "0.21.0-M2")
type HttpCodecTests[A] = org.http4s.laws.discipline.HttpCodecTests[A]
@deprecated("Moved to org.http4s.laws.discipline.HttpCodecTests.", "0.21.0-M2")
val HttpCodecTests = org.http4s.laws.discipline.HttpCodecTests
}
| ChristopherDavenport/http4s | testing/src/main/scala/org/http4s/testing/package.scala | Scala | apache-2.0 | 3,945 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import java.io.{File, IOException}
import java.lang.{Integer => JInt}
import java.net.InetSocketAddress
import java.util.{Map => JMap, Properties}
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import scala.language.postfixOps
import scala.util.Random
import kafka.admin.AdminUtils
import kafka.api.Request
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.server.checkpoints.OffsetCheckpointFile
import kafka.utils.ZkUtils
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.clients.admin.{AdminClient, CreatePartitionsOptions, NewPartitions}
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.producer._
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.util.{ShutdownHookManager, Utils}
/**
* This is a helper class for Kafka test suites. This has the functionality to set up
* and tear down local Kafka servers, and to push data using Kafka producers.
*
* The reason to put Kafka test utility class in src is to test Python related Kafka APIs.
*/
class KafkaTestUtils(withBrokerProps: Map[String, Object] = Map.empty) extends Logging {
// Zookeeper related configurations
private val zkHost = "127.0.0.1"
private var zkPort: Int = 0
private val zkConnectionTimeout = 60000
private val zkSessionTimeout = 10000
private var zookeeper: EmbeddedZookeeper = _
private var zkUtils: ZkUtils = _
private var adminClient: AdminClient = null
// Kafka broker related configurations
private val brokerHost = "127.0.0.1"
private var brokerPort = 0
private var brokerConf: KafkaConfig = _
// Kafka broker server
private var server: KafkaServer = _
// Kafka producer
private var producer: Producer[String, String] = _
// Flag to test whether the system is correctly started
private var zkReady = false
private var brokerReady = false
private var leakDetector: AnyRef = null
def zkAddress: String = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper address")
s"$zkHost:$zkPort"
}
def brokerAddress: String = {
assert(brokerReady, "Kafka not setup yet or already torn down, cannot get broker address")
s"$brokerHost:$brokerPort"
}
def zookeeperClient: ZkUtils = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper client")
Option(zkUtils).getOrElse(
throw new IllegalStateException("Zookeeper client is not yet initialized"))
}
// Set up the Embedded Zookeeper server and get the proper Zookeeper port
private def setupEmbeddedZookeeper(): Unit = {
// Zookeeper server startup
zookeeper = new EmbeddedZookeeper(s"$zkHost:$zkPort")
// Get the actual zookeeper binding port
zkPort = zookeeper.actualPort
zkUtils = ZkUtils(s"$zkHost:$zkPort", zkSessionTimeout, zkConnectionTimeout, false)
zkReady = true
}
// Set up the Embedded Kafka server
private def setupEmbeddedKafkaServer(): Unit = {
assert(zkReady, "Zookeeper should be set up beforehand")
// Kafka broker startup
Utils.startServiceOnPort(brokerPort, port => {
brokerPort = port
brokerConf = new KafkaConfig(brokerConfiguration, doLog = false)
server = new KafkaServer(brokerConf)
server.startup()
brokerPort = server.boundPort(new ListenerName("PLAINTEXT"))
(server, brokerPort)
}, new SparkConf(), "KafkaBroker")
brokerReady = true
val props = new Properties()
props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, s"$brokerHost:$brokerPort")
adminClient = AdminClient.create(props)
}
/** setup the whole embedded servers, including Zookeeper and Kafka brokers */
def setup(): Unit = {
// Set up a KafkaTestUtils leak detector so that we can see where the leak KafkaTestUtils is
// created.
val exception = new SparkException("It was created at: ")
leakDetector = ShutdownHookManager.addShutdownHook { () =>
logError("Found a leak KafkaTestUtils.", exception)
}
setupEmbeddedZookeeper()
setupEmbeddedKafkaServer()
eventually(timeout(60.seconds)) {
assert(zkUtils.getAllBrokersInCluster().nonEmpty, "Broker was not up in 60 seconds")
}
}
/** Teardown the whole servers, including Kafka broker and Zookeeper */
def teardown(): Unit = {
if (leakDetector != null) {
ShutdownHookManager.removeShutdownHook(leakDetector)
}
brokerReady = false
zkReady = false
if (producer != null) {
producer.close()
producer = null
}
if (adminClient != null) {
adminClient.close()
}
if (server != null) {
server.shutdown()
server.awaitShutdown()
server = null
}
// On Windows, `logDirs` is left open even after Kafka server above is completely shut down
// in some cases. It leads to test failures on Windows if the directory deletion failure
// throws an exception.
brokerConf.logDirs.foreach { f =>
try {
Utils.deleteRecursively(new File(f))
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
}
if (zkUtils != null) {
zkUtils.close()
zkUtils = null
}
if (zookeeper != null) {
zookeeper.shutdown()
zookeeper = null
}
}
/** Create a Kafka topic and wait until it is propagated to the whole cluster */
def createTopic(topic: String, partitions: Int, overwrite: Boolean = false): Unit = {
var created = false
while (!created) {
try {
AdminUtils.createTopic(zkUtils, topic, partitions, 1)
created = true
} catch {
// Workaround fact that TopicExistsException is in kafka.common in 0.10.0 and
// org.apache.kafka.common.errors in 0.10.1 (!)
case e: Exception if (e.getClass.getSimpleName == "TopicExistsException") && overwrite =>
deleteTopic(topic)
}
}
// wait until metadata is propagated
(0 until partitions).foreach { p =>
waitUntilMetadataIsPropagated(topic, p)
}
}
def getAllTopicsAndPartitionSize(): Seq[(String, Int)] = {
zkUtils.getPartitionsForTopics(zkUtils.getAllTopics()).mapValues(_.size).toSeq
}
/** Create a Kafka topic and wait until it is propagated to the whole cluster */
def createTopic(topic: String): Unit = {
createTopic(topic, 1)
}
/** Delete a Kafka topic and wait until it is propagated to the whole cluster */
def deleteTopic(topic: String): Unit = {
val partitions = zkUtils.getPartitionsForTopics(Seq(topic))(topic).size
AdminUtils.deleteTopic(zkUtils, topic)
verifyTopicDeletionWithRetries(zkUtils, topic, partitions, List(this.server))
}
/** Add new partitions to a Kafka topic */
def addPartitions(topic: String, partitions: Int): Unit = {
adminClient.createPartitions(
Map(topic -> NewPartitions.increaseTo(partitions)).asJava,
new CreatePartitionsOptions)
// wait until metadata is propagated
(0 until partitions).foreach { p =>
waitUntilMetadataIsPropagated(topic, p)
}
}
/** Java-friendly function for sending messages to the Kafka broker */
def sendMessages(topic: String, messageToFreq: JMap[String, JInt]): Unit = {
sendMessages(topic, Map(messageToFreq.asScala.mapValues(_.intValue()).toSeq: _*))
}
/** Send the messages to the Kafka broker */
def sendMessages(topic: String, messageToFreq: Map[String, Int]): Unit = {
val messages = messageToFreq.flatMap { case (s, freq) => Seq.fill(freq)(s) }.toArray
sendMessages(topic, messages)
}
/** Send the array of messages to the Kafka broker */
def sendMessages(topic: String, messages: Array[String]): Seq[(String, RecordMetadata)] = {
sendMessages(topic, messages, None)
}
/** Send the array of messages to the Kafka broker using specified partition */
def sendMessages(
topic: String,
messages: Array[String],
partition: Option[Int]): Seq[(String, RecordMetadata)] = {
producer = new KafkaProducer[String, String](producerConfiguration)
val offsets = try {
messages.map { m =>
val record = partition match {
case Some(p) => new ProducerRecord[String, String](topic, p, null, m)
case None => new ProducerRecord[String, String](topic, m)
}
val metadata =
producer.send(record).get(10, TimeUnit.SECONDS)
logInfo(s"\\tSent $m to partition ${metadata.partition}, offset ${metadata.offset}")
(m, metadata)
}
} finally {
if (producer != null) {
producer.close()
producer = null
}
}
offsets
}
def cleanupLogs(): Unit = {
server.logManager.cleanupLogs()
}
def getEarliestOffsets(topics: Set[String]): Map[TopicPartition, Long] = {
val kc = new KafkaConsumer[String, String](consumerConfiguration)
logInfo("Created consumer to get earliest offsets")
kc.subscribe(topics.asJavaCollection)
kc.poll(0)
val partitions = kc.assignment()
kc.pause(partitions)
kc.seekToBeginning(partitions)
val offsets = partitions.asScala.map(p => p -> kc.position(p)).toMap
kc.close()
logInfo("Closed consumer to get earliest offsets")
offsets
}
def getLatestOffsets(topics: Set[String]): Map[TopicPartition, Long] = {
val kc = new KafkaConsumer[String, String](consumerConfiguration)
logInfo("Created consumer to get latest offsets")
kc.subscribe(topics.asJavaCollection)
kc.poll(0)
val partitions = kc.assignment()
kc.pause(partitions)
kc.seekToEnd(partitions)
val offsets = partitions.asScala.map(p => p -> kc.position(p)).toMap
kc.close()
logInfo("Closed consumer to get latest offsets")
offsets
}
protected def brokerConfiguration: Properties = {
val props = new Properties()
props.put("broker.id", "0")
props.put("host.name", "127.0.0.1")
props.put("advertised.host.name", "127.0.0.1")
props.put("port", brokerPort.toString)
props.put("log.dir", Utils.createTempDir().getAbsolutePath)
props.put("zookeeper.connect", zkAddress)
props.put("zookeeper.connection.timeout.ms", "60000")
props.put("log.flush.interval.messages", "1")
props.put("replica.socket.timeout.ms", "1500")
props.put("delete.topic.enable", "true")
props.put("offsets.topic.num.partitions", "1")
props.put("offsets.topic.replication.factor", "1")
props.put("group.initial.rebalance.delay.ms", "10")
// Can not use properties.putAll(propsMap.asJava) in scala-2.12
// See https://github.com/scala/bug/issues/10418
withBrokerProps.foreach { case (k, v) => props.put(k, v) }
props
}
private def producerConfiguration: Properties = {
val props = new Properties()
props.put("bootstrap.servers", brokerAddress)
props.put("value.serializer", classOf[StringSerializer].getName)
props.put("key.serializer", classOf[StringSerializer].getName)
// wait for all in-sync replicas to ack sends
props.put("acks", "all")
props
}
private def consumerConfiguration: Properties = {
val props = new Properties()
props.put("bootstrap.servers", brokerAddress)
props.put("group.id", "group-KafkaTestUtils-" + Random.nextInt)
props.put("value.deserializer", classOf[StringDeserializer].getName)
props.put("key.deserializer", classOf[StringDeserializer].getName)
props.put("enable.auto.commit", "false")
props
}
/** Verify topic is deleted in all places, e.g, brokers, zookeeper. */
private def verifyTopicDeletion(
topic: String,
numPartitions: Int,
servers: Seq[KafkaServer]): Unit = {
val topicAndPartitions = (0 until numPartitions).map(new TopicPartition(topic, _))
import ZkUtils._
// wait until admin path for delete topic is deleted, signaling completion of topic deletion
assert(
!zkUtils.pathExists(getDeleteTopicPath(topic)),
s"${getDeleteTopicPath(topic)} still exists")
assert(!zkUtils.pathExists(getTopicPath(topic)), s"${getTopicPath(topic)} still exists")
// ensure that the topic-partition has been deleted from all brokers' replica managers
assert(servers.forall(server => topicAndPartitions.forall(tp =>
server.replicaManager.getPartition(tp) == None)),
s"topic $topic still exists in the replica manager")
// ensure that logs from all replicas are deleted if delete topic is marked successful
assert(servers.forall(server => topicAndPartitions.forall(tp =>
server.getLogManager().getLog(tp).isEmpty)),
s"topic $topic still exists in log mananger")
// ensure that topic is removed from all cleaner offsets
assert(servers.forall(server => topicAndPartitions.forall { tp =>
val checkpoints = server.getLogManager().liveLogDirs.map { logDir =>
new OffsetCheckpointFile(new File(logDir, "cleaner-offset-checkpoint")).read()
}
checkpoints.forall(checkpointsPerLogDir => !checkpointsPerLogDir.contains(tp))
}), s"checkpoint for topic $topic still exists")
// ensure the topic is gone
assert(
!zkUtils.getAllTopics().contains(topic),
s"topic $topic still exists on zookeeper")
}
/** Verify topic is deleted. Retry to delete the topic if not. */
private def verifyTopicDeletionWithRetries(
zkUtils: ZkUtils,
topic: String,
numPartitions: Int,
servers: Seq[KafkaServer]) {
eventually(timeout(60.seconds), interval(200.millis)) {
try {
verifyTopicDeletion(topic, numPartitions, servers)
} catch {
case e: Throwable =>
// As pushing messages into Kafka updates Zookeeper asynchronously, there is a small
// chance that a topic will be recreated after deletion due to the asynchronous update.
// Hence, delete the topic and retry.
AdminUtils.deleteTopic(zkUtils, topic)
throw e
}
}
}
private def waitUntilMetadataIsPropagated(topic: String, partition: Int): Unit = {
def isPropagated = server.apis.metadataCache.getPartitionInfo(topic, partition) match {
case Some(partitionState) =>
zkUtils.getLeaderForPartition(topic, partition).isDefined &&
Request.isValidBrokerId(partitionState.basePartitionState.leader) &&
!partitionState.basePartitionState.replicas.isEmpty
case _ =>
false
}
eventually(timeout(60.seconds)) {
assert(isPropagated, s"Partition [$topic, $partition] metadata not propagated after timeout")
}
}
private class EmbeddedZookeeper(val zkConnect: String) {
val snapshotDir = Utils.createTempDir()
val logDir = Utils.createTempDir()
val zookeeper = new ZooKeeperServer(snapshotDir, logDir, 500)
val (ip, port) = {
val splits = zkConnect.split(":")
(splits(0), splits(1).toInt)
}
val factory = new NIOServerCnxnFactory()
factory.configure(new InetSocketAddress(ip, port), 16)
factory.startup(zookeeper)
val actualPort = factory.getLocalPort
def shutdown() {
factory.shutdown()
// The directories are not closed even if the ZooKeeper server is shut down.
// Please see ZOOKEEPER-1844, which is fixed in 3.4.6+. It leads to test failures
// on Windows if the directory deletion failure throws an exception.
try {
Utils.deleteRecursively(snapshotDir)
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
try {
Utils.deleteRecursively(logDir)
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
}
}
}
| rikima/spark | external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala | Scala | apache-2.0 | 16,865 |
/**
* Illustrates a simple map partition to parse CSV data in Scala
*/
package com.oreilly.learningsparkexamples.scala
import java.io.StringReader
import org.apache.spark._
import play.api.libs.json._
import play.api.libs.functional.syntax._
import scala.util.parsing.json.JSON
import scala.collection.JavaConversions._
import au.com.bytecode.opencsv.CSVReader
object BasicParseWholeFileCsv {
def main(args: Array[String]) {
if (args.length < 2) {
println("Usage: [sparkmaster] [inputfile]")
exit(1)
}
val master = args(0)
val inputFile = args(1)
val sc = new SparkContext(master, "BasicParseWholeFileCsv", System.getenv("SPARK_HOME"))
val input = sc.wholeTextFiles(inputFile)
val result = input.flatMap{ case (_, txt) =>
val reader = new CSVReader(new StringReader(txt));
reader.readAll()
}
println(result.collect().map(_.toList).mkString(","))
}
}
| holdenk/learning-spark-examples | src/main/scala/com/oreilly/learningsparkexamples/scala/BasicParseWholeFileCsv.scala | Scala | mit | 921 |
package org.jetbrains.sbt.project.data.service
import java.io.File
import com.intellij.compiler.CompilerConfiguration
import com.intellij.openapi.externalSystem.model.DataNode
import com.intellij.openapi.externalSystem.model.project.ProjectData
import com.intellij.openapi.externalSystem.service.notification.{NotificationCategory, NotificationSource}
import com.intellij.openapi.module.{Module, ModuleManager}
import com.intellij.openapi.projectRoots
import com.intellij.openapi.projectRoots.ProjectJdkTable
import com.intellij.openapi.roots.{LanguageLevelModuleExtensionImpl, ModuleRootManager}
import com.intellij.pom.java.LanguageLevel
import com.intellij.testFramework.{IdeaTestUtil, UsefulTestCase}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.project.external.{JdkByHome, JdkByName, SdkReference, SdkUtils}
import org.jetbrains.plugins.scala.project.settings.ScalaCompilerConfiguration
import org.jetbrains.plugins.scala.project.{DebuggingInfoLevel, Version}
import org.jetbrains.sbt.UsefulTestCaseHelper
import org.jetbrains.sbt.project.SbtProjectSystem
import org.jetbrains.sbt.project.data._
import org.jetbrains.sbt.project.data.service.ModuleExtDataService.NotificationException
import org.junit.Assert._
import scala.collection.JavaConverters._
import scala.util.{Failure, Try}
/**
* @author Nikolay Obedin
* @since 6/9/15.
*/
class ModuleExtDataServiceTest extends ProjectDataServiceTestCase with UsefulTestCaseHelper {
import ExternalSystemDataDsl._
override def setUp(): Unit = {
super.setUp()
setUpJdks()
}
override def tearDown(): Unit = {
tearDownJdks()
super.tearDown()
}
def testWithoutScalaLibrary(): Unit =
importProjectData(generateScalaProject("2.11.5", None, Seq.empty))
def testWithIncompatibleScalaLibrary(): Unit = {
@scala.annotation.tailrec
def checkFailure(t: Throwable): Boolean = {
t match {
case null => false
case NotificationException(data, SbtProjectSystem.Id)
if data.getNotificationSource == NotificationSource.PROJECT_SYNC &&
data.getNotificationCategory == NotificationCategory.WARNING => true
case _ if t.getCause != t => checkFailure(t.getCause)
}
}
Try(importProjectData(generateScalaProject("2.11.5", Some("2.10.4"), Seq.empty))) match {
case Failure(t) if checkFailure(t) =>
case _ => fail("Warning notification is expected")
}
}
def testWithCompatibleScalaLibrary(): Unit = {
doTestAndCheckScalaSdk("2.11.1", "2.11.5")
doTestAndCheckScalaSdk("2.10.4", "2.10.3")
}
def testWithTheSameVersionOfScalaLibrary(): Unit = {
doTestAndCheckScalaSdk("2.11.6", "2.11.6")
doTestAndCheckScalaSdk("2.10.4", "2.10.4")
doTestAndCheckScalaSdk("2.9.2", "2.9.2")
}
def testCompilerOptionsSetup(): Unit = {
val options = Seq(
"-g:source",
"-Xplugin:test-plugin.jar",
"-Xexperimental",
"-P:continuations:enable",
"-deprecation",
"-language:dynamics",
"-language:existentials",
"-explaintypes",
"-feature",
"-language:higherKinds",
"-language:implicitConversions",
"-language:macros",
"-optimise",
"-language:postfixOps",
"-language:reflectiveCalls",
"-no-specialization",
"-unchecked",
"-nowarn",
"-XmyCoolAdditionalOption"
)
importProjectData(generateScalaProject("2.11.5", Some("2.11.5"), options))
val module = ModuleManager.getInstance(getProject).findModuleByName("Module 1")
val compilerConfiguration = ScalaCompilerConfiguration.instanceIn(getProject).getSettingsForModule(module)
assertEquals(compilerConfiguration.debuggingInfoLevel, DebuggingInfoLevel.Source)
UsefulTestCase.assertContainsElements(compilerConfiguration.plugins.asJava, "test-plugin.jar")
UsefulTestCase.assertContainsElements(compilerConfiguration.additionalCompilerOptions.asJava, "-XmyCoolAdditionalOption")
assertTrue(compilerConfiguration.continuations)
assertTrue(compilerConfiguration.experimental)
assertTrue(compilerConfiguration.deprecationWarnings)
assertTrue(compilerConfiguration.dynamics)
assertTrue(compilerConfiguration.existentials)
assertTrue(compilerConfiguration.explainTypeErrors)
assertTrue(compilerConfiguration.featureWarnings)
assertTrue(compilerConfiguration.higherKinds)
assertTrue(compilerConfiguration.implicitConversions)
assertTrue(compilerConfiguration.macros)
assertTrue(compilerConfiguration.optimiseBytecode)
assertTrue(compilerConfiguration.postfixOps)
assertTrue(compilerConfiguration.reflectiveCalls)
assertFalse(compilerConfiguration.specialization)
assertTrue(compilerConfiguration.uncheckedWarnings)
assertFalse(compilerConfiguration.warnings)
}
def testModuleIsNull(): Unit = {
val testProject = new project {
name := getProject.getName
ideDirectoryPath := getProject.getBasePath
linkedProjectPath := getProject.getBasePath
arbitraryNodes += new ModuleExtNode(ModuleExtData("org.scala-lang", Some(Version("2.11.5")), Seq.empty, Seq.empty, None, Seq.empty))
}.build.toDataNode
importProjectData(testProject)
}
def testValidJavaSdk(): Unit =
doTestSdk(Some(JdkByName("1.8")),
ProjectJdkTable.getInstance().findJdk(IdeaTestUtil.getMockJdk18.getName),
LanguageLevel.JDK_1_8)
def testValidJavaSdkWithDifferentLanguageLevel(): Unit =
doTestSdk(Some(JdkByName("1.8")),
Seq("-source", "1.6"),
ProjectJdkTable.getInstance().findJdk(IdeaTestUtil.getMockJdk18.getName),
LanguageLevel.JDK_1_6)
def testInvalidSdk(): Unit =
doTestSdk(Some(JdkByName("20")), defaultJdk, LanguageLevel.JDK_1_7)
def testAbsentSdk(): Unit =
doTestSdk(None, defaultJdk, LanguageLevel.JDK_1_7)
def testValidJdkByHome(): Unit = {
val jdk = ProjectJdkTable.getInstance().findJdk(IdeaTestUtil.getMockJdk18.getName)
doTestSdk(Some(JdkByHome(new File(jdk.getHomePath))), jdk, LanguageLevel.JDK_1_8)
}
def testJavacOptions(): Unit = {
val options = Seq(
"-g:none",
"-nowarn",
"-deprecation",
"-target", "1.8",
"-Werror"
)
importProjectData(generateJavaProject(None, options))
val compilerConfiguration = CompilerConfiguration.getInstance(getProject)
assertEquals("1.8", compilerConfiguration.getBytecodeTargetLevel(getModule))
}
def testScalaSdkForEvictedVersion(): Unit = {
import org.jetbrains.plugins.scala.project._
val evictedVersion = "2.11.2"
val newVersion = "2.11.6"
val projectData = new project {
name := getProject.getName
ideDirectoryPath := getProject.getBasePath
linkedProjectPath := getProject.getBasePath
arbitraryNodes += new SbtProjectNode(SbtProjectData(Seq.empty, None, Seq.empty, "", getProject.getBasePath))
val evictedScalaLibrary: library = new library { name := s"org.scala-lang:scala-library:$evictedVersion" }
val newScalaLibrary: library = new library { name := s"org.scala-lang:scala-library:$newVersion" }
libraries ++= Seq(evictedScalaLibrary, newScalaLibrary)
modules += new javaModule {
val uri = new File(getProject.getBasePath).toURI
val moduleName = "Module 1"
projectId := ModuleNode.combinedId(moduleName, Option(uri))
projectURI := uri
name := moduleName
moduleFileDirectoryPath := getProject.getBasePath + "/module1"
externalConfigPath := getProject.getBasePath + "/module1"
libraryDependencies += newScalaLibrary
arbitraryNodes += new ModuleExtNode(ModuleExtData("org.scala-lang", Some(Version(evictedVersion)), Seq.empty, Seq.empty, None, Seq.empty))
}
}.build.toDataNode
importProjectData(projectData)
val isLibrarySetUp = getProject.libraries
.filter(_.getName.contains(newVersion))
.exists(_.isScalaSdk)
assertTrue("Scala library is not set up", isLibrarySetUp)
}
private def generateScalaProject(scalaVersion: String, scalaLibraryVersion: Option[String], scalacOptions: Seq[String]): DataNode[ProjectData] =
generateProject("org.scala-lang", Some(scalaVersion), scalaLibraryVersion, scalacOptions, None, Seq.empty)
private def generateJavaProject(jdk: Option[SdkReference], javacOptions: Seq[String]): DataNode[ProjectData] =
generateProject("org.scala-lang", None, None, Seq.empty, jdk, javacOptions)
private def generateProject(scalaOrganization: String, scalaVersion: Option[String], scalaLibraryVersion: Option[String], scalacOptions: Seq[String], jdk: Option[SdkReference], javacOptions: Seq[String]): DataNode[ProjectData] =
new project {
name := getProject.getName
ideDirectoryPath := getProject.getBasePath
linkedProjectPath := getProject.getBasePath
arbitraryNodes += new SbtProjectNode(SbtProjectData(Seq.empty, None, Seq.empty, "", getProject.getBasePath))
val scalaLibrary: Option[library] = scalaLibraryVersion.map { version =>
new library { name := "org.scala-lang:scala-library:" + version }
}
scalaLibrary.foreach(libraries += _)
modules += new javaModule {
val uri = new File(getProject.getBasePath).toURI
val moduleName = "Module 1"
projectId := ModuleNode.combinedId(moduleName, Option(uri))
projectURI := uri
name := moduleName
moduleFileDirectoryPath := getProject.getBasePath + "/module1"
externalConfigPath := getProject.getBasePath + "/module1"
scalaLibrary.foreach(libraryDependencies += _)
arbitraryNodes += new ModuleExtNode(ModuleExtData(scalaOrganization, scalaVersion.map(Version(_)), Seq.empty, scalacOptions, jdk, javacOptions))
}
}.build.toDataNode
private def doTestAndCheckScalaSdk(scalaVersion: String, scalaLibraryVersion: String): Unit = {
import org.jetbrains.plugins.scala.project._
importProjectData(generateScalaProject(scalaVersion, Some(scalaLibraryVersion), Seq.empty))
val isLibrarySetUp = getProject.libraries
.filter(_.getName.contains("scala-library"))
.exists(_.isScalaSdk)
assertTrue("Scala library is not set up", isLibrarySetUp)
}
private def doTestSdk(sdk: Option[SdkReference], expectedSdk: projectRoots.Sdk, expectedLanguageLevel: LanguageLevel): Unit =
doTestSdk(sdk, Seq.empty, expectedSdk, expectedLanguageLevel)
private def doTestSdk(sdk: Option[SdkReference], javacOptions: Seq[String], expectedSdk: projectRoots.Sdk, expectedLanguageLevel: LanguageLevel): Unit = {
importProjectData(generateJavaProject(sdk, javacOptions))
val moduleRootManager = ModuleRootManager.getInstance(getModule)
if (sdk.flatMap(SdkUtils.findProjectSdk).isEmpty) {
assertTrue(moduleRootManager.isSdkInherited)
} else {
assertEquals(expectedSdk, moduleRootManager.getSdk)
val languageLevelModuleExtension = LanguageLevelModuleExtensionImpl.getInstance(getModule)
val actualLanguageLevel = languageLevelModuleExtension.getLanguageLevel
assertEquals(expectedLanguageLevel, actualLanguageLevel)
}
}
private def setUpJdks(): Unit = inWriteAction {
val projectJdkTable = ProjectJdkTable.getInstance()
projectJdkTable.getAllJdks.foreach(projectJdkTable.removeJdk)
projectJdkTable.addJdk(IdeaTestUtil.getMockJdk17)
projectJdkTable.addJdk(IdeaTestUtil.getMockJdk18)
// TODO: find a way to create mock Android SDK
}
private def tearDownJdks(): Unit = inWriteAction {
val projectJdkTable = ProjectJdkTable.getInstance()
projectJdkTable.getAllJdks.foreach(projectJdkTable.removeJdk)
}
private def defaultJdk: projectRoots.Sdk =
ProjectJdkTable.getInstance().getAllJdks.head
override def getModule: Module =
ModuleManager.getInstance(getProject).findModuleByName("Module 1")
}
| jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/sbt/project/data/service/ModuleExtDataServiceTest.scala | Scala | apache-2.0 | 11,835 |
package shapes.overloading
import Math._
import shapes.untouchable._
object Calculator {
def area(shape: Circle): Double = shape.r * shape.r * PI
def area(shape: Square): Double = shape.a * shape.a
// Without this
def area(shape: Shape): Double = shape match {
case c : Circle => area(c)
case s : Square => area(s)
}
def area(shapes : List[Shape]): List[Double] =
shapes.map(area(_))
}
| tupol/scala-patterns-tc-pml | src/main/scala/shapes/overloading/Calculator.scala | Scala | apache-2.0 | 423 |
package empress
import scalatags.Text.all._
import scalatags.Text.tags2.nav
object SlideView {
def template(
slide: String,
prev: Option[Int],
next: Option[Int],
presentationName: String
) =
"<!DOCTYPE html>" + html(
head(
scalatags.Text.tags2.title()(presentationName),
link(rel := "stylesheet", href := "https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css"),
link(rel := "stylesheet", href := "https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap-theme.min.css"),
link(rel := "stylesheet", href := "//cdnjs.cloudflare.com/ajax/libs/highlight.js/8.5/styles/github.min.css"),
meta(charset := "utf-8"),
meta(httpEquiv := "X-UA-Compatible", content := "IE=edge"),
meta(name := "viewport", content := "width=device-width, initial-scale=1")
),
body(
style := "padding-top: 50px;",
nav(cls := "navbar navbar-default navbar-fixed-top")(
div(
cls := "container"
)(
div(cls := "navbar-header")(
button(
`type` := "button",
cls := "navbar-toggle collapsed",
data.toggle := "collapse",
data.target := "#navbar",
aria.expanded := "false",
aria.controls := "navbar"
)(
span(cls := "icon-bar"),
span(cls := "icon-bar"),
span(cls := "icon-bar")
),
a(
cls := "navbar-brand",
href := "#"
)(
presentationName
)
),
div(
id := "navbar",
cls := "navbar-collapse collapse"
)(
ul(cls := "nav navbar-nav navbar-right")(
SlideView.slide(prev, next)
)
)
)
),
div(cls := "container")(raw(slide.toString)),
script(src := "https://ajax.googleapis.com/ajax/libs/jquery/1.11.2/jquery.min.js"),
script(src := "https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"),
script(src := "//cdnjs.cloudflare.com/ajax/libs/highlight.js/8.5/highlight.min.js")
)
)
def slide(prev: Option[Int], next: Option[Int]): Seq[scalatags.Text.Modifier] =
(prev, next) match {
case (Some(p), Some(n)) => {
Seq(
li(
a(href := s"/slides/$p")("Previous")
),
li(
a(href := s"/slides/$n")("Next")
)
)
}
case (Some(p), None) => {
Seq(
li(
a(href := s"/slides/$p")("Previous")
)
)
}
case (None, Some(n)) => {
Seq(
li(
a(href := s"/slides/$n")("Next")
)
)
}
case (None, None) => Seq() // display nothing!
}
}
| ckampfe/empress | src/main/scala/empress/SlideView.scala | Scala | agpl-3.0 | 2,935 |
/*
* Copyright (c) 2014 Paul Bernard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Spectrum Finance is based in part on:
* QuantLib. http://quantlib.org/
*
*/
package org.quantintel.ql.time.calendars
import org.quantintel.ql.time.Month._
import org.quantintel.ql.time.{Orthodox, Date, Calendar}
import org.quantintel.ql.time.Weekday._
object UkraineEnum extends Enumeration {
type UkraineEnum = Value
val USE = Value(1)
def valueOf(market: Int) : UkraineEnum = market match {
case 1 => USE
case _ => throw new Exception("Valid units = 1")
}
}
object Ukraine {
def apply(): Calendar = {
new Ukraine()
}
def apply(market: org.quantintel.ql.time.calendars.UkraineEnum.UkraineEnum): Calendar = {
new Ukraine(market)
}
}
/**
*
* Holidays for the Ukrainian stock exchange
* Saturdays
* Sundays
* New Year's Day, JANUARY 1st
* Orthodox Christmas, JANUARY 7th
* International Women's Day, March 8th
* Easter Monday
* Holy Trinity Day, 50 days after Easter
* International Workers Solidarity Days, May 1st and 2n
* Victory Day, May 9th
* Constitution Day, June 28th
* Independence Day, August 24th
* Note: Holidays falling on a Saturday or Sunday are moved to the following Monday.
*
* Reference: http://www.ukrse.kiev.ua/eng/
*
* @author Paul Bernard
*/
class Ukraine extends Calendar {
impl = new Use
import org.quantintel.ql.time.calendars.UkraineEnum._
def this(market: org.quantintel.ql.time.calendars.UkraineEnum.UkraineEnum ) {
this
market match {
case USE => impl = new Use
case _ => throw new Exception("Valid units = 1")
}
}
private class Use extends Orthodox {
override def name : String = "Ukrainian stock exchange"
override def isBusinessDay(date: Date): Boolean = {
// standard dependencies
val w: Weekday = date.weekday
val d: Int = date.dayOfMonth
val dd: Int = date.dayOfYear
val m: Month = date.month
val y: Int = date.year
val em: Int = easterMonday(y)
if (isWeekend(w)
|| ((d == 1 || ((d == 2 || d == 3) && w == MONDAY)) && m == JANUARY) // New Year's Day (possibly moved to Monday)
|| ((d == 7 || ((d == 8 || d == 9) && w == MONDAY)) && m == JANUARY) // Orthodox Christmas
|| ((d == 8 || ((d == 9 || d == 10) && w == MONDAY)) && m == MARCH) // Women's Day
|| (dd == em) // Orthodox Easter MONDAY
|| (dd == em + 49) // Holy Trinity Day
|| ((d == 1 || d == 2 || (d == 3 && w == MONDAY)) && m == MAY) // Workers' Solidarity Days
|| ((d == 9 || ((d == 10 || d == 11) && w == MONDAY)) && m == MAY) // Victory Day
|| (d == 28 && m == JUNE) // Constitution Day
|| (d == 24 && m == AUGUST)) // Independence Day
false
else true
}
}
}
| quantintel/spectrum | financial/src/main/scala/org/quantintel/ql/time/calendars/Ukraine.scala | Scala | apache-2.0 | 3,335 |
package eventstreams.support
/*
* Copyright 2014-15 Intelix Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import akka.cluster.Cluster
import eventstreams.core.actors.DefaultTopicKeys
import eventstreams.core.components.routing.MessageRouterActor
import eventstreams.{Command, ComponentKey, LocalSubj, TopicKey}
import play.api.libs.json.{JsValue, Json}
trait MessageRouterActorTestContext extends DefaultTopicKeys {
def startMessageRouter(system: ActorSystemWrapper, cluster: Cluster) =
system.start(MessageRouterActor.props(cluster, system.config), MessageRouterActor.id)
def sendCommand(system: ActorSystemWrapper, subject: Any, data: Option[JsValue]) =
messageRouterActorSelection(system) ! Command(subject, None, data.map(Json.stringify))
def sendCommand(system: ActorSystemWrapper, localRoute: String, topic: TopicKey, data: Option[JsValue]) =
messageRouterActorSelection(system) ! Command(LocalSubj(ComponentKey(localRoute), topic), None, data.map(Json.stringify))
def messageRouterActorSelection(system: ActorSystemWrapper) = system.rootUserActorSelection(MessageRouterActor.id)
}
| intelix/eventstreams | es-core/es-api/src/test/scala/eventstreams/support/MessageRouterActorTestContext.scala | Scala | apache-2.0 | 1,641 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity, TensorModule}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{Node, T, Table}
import scala.reflect.ClassTag
/**
* A graph container. Each node can have multiple inputs. The output of the node should be a tensor.
* The output tensor can be connected to multiple nodes. So the module in each node can have a
* tensor or table input, and should have a tensor output.
*
* The graph container can have multiple inputs and multiple outputs. If there's one input, the
* input data fed to the graph module should be a tensor. If there're multiple inputs, the input
* data fed to the graph module should be a table, which is actually an sequence of tensor. The
* order of the input tensors should be same with the order of the input nodes. This is also
* applied to the gradient from the module in the back propagation.
*
* All of the input modules must accept a tensor input. If your input module accept multiple
* tensors as input, you should add some Input module before it as input nodes and connect the
* output of the Input modules to that module.
*
* If there's one output, the module output is a tensor. If there're multiple outputs, the module
* output is a table, which is actually an sequence of tensor. The order of the output tensors is
* same with the order of the output modules. This is also applied to the gradient passed to the
* module in the back propagation.
*
* All inputs should be able to connect to outputs through some paths in the graph. It is
* allowed that some successors of the inputs node are not connect to outputs. If so, these nodes
* will be excluded in the computation.
*
* @param inputs input nodes
* @param outputs output nodes
* @tparam T Numeric type. Only support float/double now
*/
@SerialVersionUID(- 2896121321564992779L)
class Graph[T: ClassTag](inputs : Seq[ModuleNode[T]],
outputs : Seq[ModuleNode[T]])(implicit ev: TensorNumeric[T])
extends Container[Activity, Activity, T]{
override def updateOutput(input: Activity): Activity = {
var i = 0
while(i < executions.length) {
val node = executions(i)
inputsBP(i) = if (node.prevNodes.length == 0) {
inputData(node, input)
} else if (node.prevNodes.length == 1) {
node.prevNodes.head.element.output.toTensor[T]
} else {
seqToTable(node.prevNodes.map(_.element.output))
}
node.element.updateOutput(inputsBP(i))
i += 1
}
output = if (outputs.length == 1) {
outputs(0).element.output
} else {
seqToTable(outputs.map(_.element.output))
}
output
}
override def backward(input: Activity, gradOutput: Activity): Activity = {
dummyOutput.element.gradInput = gradOutput
var i = executions.length - 1
while(i >= 0) {
val curNode = executions(i)
var curGradOutput : Tensor[T] = null
curNode.nextNodes.foreach(n => {
val nextGradOutput = if (n.prevNodes.length == 1) {
n.element.gradInput.toTensor
} else {
val nextGradOutputTable = n.element.gradInput.toTable
nextGradOutputTable[Tensor[T]](n.prevNodes.indexOf(curNode) + 1)
}
if (curGradOutput == null) {
curGradOutput = nextGradOutput
} else {
curGradOutput.add(nextGradOutput)
}
})
gradOutputBP(i) = curGradOutput
curNode.element.backward(inputsBP(i), curGradOutput)
i -= 1
}
gradInput = if (inputs.length == 1) {
inputs(0).element.gradInput
} else {
seqToTable(inputs.map(_.element.gradInput))
}
gradInput
}
override def updateGradInput(input: Activity, gradOutput: Activity): Activity = {
dummyOutput.element.gradInput = gradOutput
var i = executions.length - 1
while(i >= 0) {
val curNode = executions(i)
var curGradOutput : Tensor[T] = null
curNode.nextNodes.foreach(n => {
val nextGradOutput = if (n.prevNodes.length == 1) {
n.element.gradInput.toTensor
} else {
val nextGradOutputTable = n.element.gradInput.toTable
nextGradOutputTable[Tensor[T]](n.prevNodes.indexOf(curNode) + 1)
}
if (curGradOutput == null) {
curGradOutput = nextGradOutput
} else {
curGradOutput.add(nextGradOutput)
}
})
gradOutputBP(i) = curGradOutput
curNode.element.updateGradInput(inputsBP(i), curGradOutput)
i -= 1
}
gradInput = if (inputs.length == 1) {
inputs(0).element.gradInput
} else {
seqToTable(inputs.map(_.element.gradInput))
}
gradInput
}
override def accGradParameters(input: Activity, gradOutput: Activity, scale: Double): Unit = {
var i = executions.length - 1
while(i >= 0) {
val curNode = executions(i)
curNode.element.accGradParameters(inputsBP(i), gradOutputBP(i))
i -= 1
}
}
override def add(module: AbstractModule[_ <: Activity, _ <: Activity, T]): Graph.this.type = {
throw new IllegalArgumentException("Graph: Please don't use add method in Graph container. " +
"A graph container should not be changed after it is constructed")
}
// Add a dummy output node, to get an one end graph. So the nodes that are not dependent by
// the outputs will be excluded
private val dummyOutput = new ModuleNode[T](new Dummy[T]())
outputs.foreach(_ -> dummyOutput)
private val backGraph = dummyOutput.graph(reverse = true)
// Build execution plan
private val executions = backGraph.topologySort.filter(!_.element.isInstanceOf[Dummy[T]]).reverse
modules.appendAll(executions.map(_.element.asInstanceOf[AbstractModule[Activity, Activity, T]]))
// Check all inputs of the graph should be passed in
checkRoots
private val inputsBP = new Array[Activity](executions.length)
private val gradOutputBP = new Array[Tensor[T]](executions.length)
private def checkRoots : Unit = {
val roots = executions.filter(_.prevNodes.size == 0)
require(roots.size == inputs.length,
s"There're ${inputs.length} inputs, but graph has ${roots.size} roots")
inputs.foreach(n =>
require(roots.contains(n), "inputs and graph roots are not match")
)
}
private[nn] def shift[B](data : Array[B], from : Int, to : Int): Array[B] = {
require(from < data.length && from >= 0, s"invalid from $from array length is ${data.length}")
require(to < data.length && to >= 0, s"invalid to $to array length is ${data.length}")
if (from == to) {
data
} else if (from < to) {
var i = from
while(i < to) {
val tmp = data(i)
data(i) = data(i + 1)
data(i + 1) = tmp
i += 1
}
data
} else {
var i = from
while(i > to) {
val tmp = data(i)
data(i) = data(i - 1)
data(i - 1) = tmp
i -= 1
}
data
}
}
private def seqToTable(inputs: Seq[_]) : Table = {
val t = T()
var j = 1
inputs.foreach(tensor => {
t(j) = tensor
j += 1
})
t
}
private def inputData(
node: Node[AbstractModule[Activity, Tensor[T], T]],
input: Activity
): Activity = {
if (inputs.length == 1) {
require(inputs(0).eq(node), "input node is not in the input list")
input.toTensor
} else {
val i = inputs.indexOf(node)
require(i != -1, "input node is not in the input list")
input.toTable[Tensor[T]](i + 1)
}
}
}
object Graph {
/**
* Node for graph container. The module should have a tensor/table input while a tensor output
* @tparam T
*/
type ModuleNode[T] = Node[AbstractModule[Activity, Tensor[T], T]]
/**
* Build multiple inputs, multiple outputs graph container.
* @param input input node
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](input : Array[ModuleNode[T]], output : Array[ModuleNode[T]])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new Graph[T](input, output)
}
/**
* Build a single input, multiple outputs graph container
* @param input input node
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input : ModuleNode[T], output : Array[ModuleNode[T]])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new Graph[T](Array(input), output)
}
/**
* Build a multiple inputs, single output graph container
* @param input input nodes
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](input : Array[ModuleNode[T]], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new Graph[T](input, Array(output))
}
/**
* Build a single input, single output graph container
* @param input input nodes
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input : ModuleNode[T], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new Graph[T](Array(input), Array(output))
}
}
/**
* Each input node of the graph container should accept one tensor as input. If you want a module
* accepting multiple tensors as input, you should add some Input module before it and connect
* the outputs of the Input nodes to it.
* @tparam T The numeric type in the criterion, usually which are [[Float]] or [[Double]]
*/
@SerialVersionUID(- 8525406230282608924L)
class Input[T: ClassTag]()(implicit ev: TensorNumeric[T]) extends TensorModule[T] {
override def updateOutput(input: Tensor[T]): Tensor[T] = {
output = input
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
gradInput = gradOutput
gradInput
}
}
object Input {
def apply[T: ClassTag]()(implicit ev: TensorNumeric[T]): ModuleNode[T] = {
new Node(new Input().asInstanceOf[AbstractModule[Activity, Tensor[T], T]])
}
}
private class Dummy[T: ClassTag]()(implicit ev: TensorNumeric[T])
extends AbstractModule[Activity, Tensor[T], T] {
override def updateOutput(input: Activity): Tensor[T] = null
override def updateGradInput(input: Activity, gradOutput: Tensor[T]): Activity = null
}
| psyyz10/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Graph.scala | Scala | apache-2.0 | 10,998 |
package org.zbizaca.electric
import org.zbizaca.axonic.tools.Color
/**
* Created by zbizaca on 11/24/16.
*/
case class Result(color: Long, nodeId: Long, score: Double) {
def isCompatible(linkType:Long):Boolean =
(color & linkType) == linkType
def getId: Int = Color.getHitId(color)
def getQueryId: Byte = Color.getQueryId(color)
}
| zbizaca/toy-land | src/main/scala/org/zbizaca/electric/Result.scala | Scala | apache-2.0 | 350 |
import java.io.File
import testgen.TestSuiteBuilder._
import testgen._
object LuhnGenerator {
def main(args: Array[String]): Unit = {
val file = new File("src/main/resources/luhn.json")
val code = TestSuiteBuilder.build(file, fromLabeledTestFromInput("value"))
println(s"-------------")
println(code)
println(s"-------------")
}
}
| ricemery/xscala | testgen/src/main/scala/LuhnTestGenerator.scala | Scala | mit | 358 |
package querious
import StringInterpolation._
import fastparse.core
import fastparse.core.Parsed
import fastparse.core.Parsed.{Failure, Success}
import hedgehog._
import hedgehog.runner._
/**
* @author Kevin Lee
* @since 2017-07-22
*/
object EscapeParserSpec extends Properties {
override def tests: List[Test] = List(
example(
"""Parsers.escape.parse("'") return Failure(_, 0, _)""",
testParsersEscapeParseOneSingleQuotes
),
example(
"""Parsers.escape.parse("''") return Success("'", 2)""",
testParsersEscapeParseTwoSingleQuotes
),
example(
"""Parsers.escape.parse("\\\\") should return Failure(_, 0, _)""",
testParsersEscapeParseDoubleBackslash
),
property(
raw"""Parsers.escape.parse(one of ${TestData.escapingCharsToString})""",
testParsersEscapeParseOneOfEscapingChars
)
)
def testParsersEscapeParseOneSingleQuotes: Result = {
val actual = Parsers.escape.parse("'")
actual matchPattern { case Failure(_, 0, _) => }
}
def testParsersEscapeParseTwoSingleQuotes: Result = {
val expected: Success[Any, Char, String] = Success("'", 2)
val actual: Parsed[Any, Char, String] = Parsers.escape.parse("''")
actual ==== expected
}
def testParsersEscapeParseDoubleBackslash: Result = {
val actual = Parsers.escape.parse("\\\\")
actual matchPattern { case Failure(_, 0, _) => }
}
def testParsersEscapeParseOneOfEscapingChars: Property = for {
value <- Gen.elementUnsafe(TestData.escapingChars).log("value")
} yield {
val expected: Success[Any, Char, String] = Success((), value.length)
val actual: Parsed[Any, Char, String] = Parsers.escape.parse(value)
val expectedCapture: core.Parsed[String, Char, String] = Success(value, value.length)
import fastparse.all._
val actualCaptured: core.Parsed[String, Char, String] = Parsers.escape.!.parse(value)
actual ==== expected and actualCaptured ==== expectedCapture
}
}
| Kevin-Lee/sql-parser-scala | src/test/scala/querious/EscapeParserSpec.scala | Scala | mit | 1,973 |
import com.dongxiguo.fastring.Fastring.Implicits._
import io.gatling.commons.stats.assertion._
import io.gatling.commons.util.StringHelper.Eol
import io.gatling.core.stats.writer.RunMessage
import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
package com.dongxiguo.fastring.benchmark {
/**
* @author 杨博 (Yang Bo)
*/
object AssertionsJUnitTemplateBenchmark {
@volatile
var runMessage: RunMessage = RunMessage(
simulationClassName = "MySimulationClassName",
userDefinedSimulationId = Some("myUserDefinedSimulationId"),
defaultSimulationId = "defaultSimulationId",
start = 42L,
runDescription = """My description:
blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah
"""
)
@volatile
var assertionResults: List[AssertionResult] = List(
AssertionResult(
assertion = Assertion(
path = ForAll,
target = CountTarget(FailedRequests),
condition = Gt(1000.0)
),
result = true,
message = "my assertion message blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah",
None
),
AssertionResult(
assertion = Assertion(
path = Details(List("foo", "bar", "baz")),
target = CountTarget(AllRequests),
condition = Gte(9999.99)
),
result = true,
message = "my assertion message blah blah blah blah blah blah blah blah blah blah blah blah blah blah",
Some(12.3)
),
AssertionResult(
assertion = Assertion(
path = Global,
target = CountTarget(SuccessfulRequests),
condition = Gte(9999.99)
),
result = false,
message = "my assertion message blah blah blah blah blah blah blah blah blah blah blah blah",
None
)
)
/**
*
* @note This benchmark is a modified version of [[https://github.com/gatling/gatling/blob/62340816e95d212a4cee07b296a5c8dee73eaf59/gatling-charts/src/main/scala/io/gatling/charts/template/AssertionsJUnitTemplate.scala]]
*/
@State(Scope.Benchmark)
class FastringAssertionsJUnitTemplate {
private[this] def printMessage(assertionResult: AssertionResult): Fastring =
if (assertionResult.result)
fastraw"""<system-out>${assertionResult.message}</system-out>"""
else
fastraw"""<failure type="${assertionResult.assertion.path.printable}">Actual value: ${assertionResult.actualValue
.getOrElse(-1)}</failure>"""
private[this] def print(assertionResult: AssertionResult): Fastring =
fastraw"""<testcase name="${assertionResult.message}" status="${assertionResult.result}" time="0">
${printMessage(assertionResult)}
</testcase>"""
def getOutput: Fastring =
fastraw"""<testsuite name="${runMessage.simulationClassName}" tests="${assertionResults.size}" errors="0" failures="${assertionResults
.count(_.result == false)}" time="0">
${assertionResults.map(print).mkFastring(Eol)}
</testsuite>"""
@Benchmark
def benchmark() = {
getOutput
}
@Benchmark
def benchmarkToString() = {
getOutput.toString
}
}
}
}
package com.sizmek.fsi.workaround {
@State(Scope.Benchmark)
class SizmekFastStringInterpolatorAssertionsJUnitTemplate {
import com.sizmek.fsi._
import com.dongxiguo.fastring.benchmark.AssertionsJUnitTemplateBenchmark._
private[this] def printMessage(assertionResult: AssertionResult): String =
if (assertionResult.result)
fraw"""<system-out>${assertionResult.message}</system-out>"""
else
fraw"""<failure type="${assertionResult.assertion.path.printable}">Actual value: ${assertionResult.actualValue
.getOrElse(-1)}</failure>"""
private[this] def print(assertionResult: AssertionResult): String =
fraw"""<testcase name="${assertionResult.message}" status="${assertionResult.result}" time="0">
${printMessage(assertionResult)}
</testcase>"""
def getOutput: String = {
fraw"""<testsuite name="${runMessage.simulationClassName}" tests="${assertionResults.size}" errors="0" failures="${assertionResults
.count(_.result == false)}" time="0">
${assertionResults.map(print).mkString(Eol)}
</testsuite>"""
}
@Benchmark
def benchmark() = {
getOutput
}
}
}
| Atry/fastring | benchmark/src/main/scala-2.12/com/dongxiguo/fastring/benchmark/AssertionsJUnitTemplateBenchmark.scala | Scala | apache-2.0 | 4,555 |
/*
* Copyright 2015 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.spark
import org.mongodb.scala.Document
import org.mongodb.spark.rdd.DocumentRDDFunctions
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContext
import org.bson.BsonValue
import scala.language.implicitConversions
package object streaming {
implicit def toStreamingContextFunctions(ssc: StreamingContext): StreamingContextFunctions =
StreamingContextFunctions(ssc)
implicit def toDocumentRDDFunctions(rdd: RDD[Document]): DocumentRDDFunctions =
DocumentRDDFunctions(rdd)
implicit def iterableToDocumentRDDFunctions[D <: Iterable[(String, BsonValue)]](rdd: RDD[D]): DocumentRDDFunctions =
DocumentRDDFunctions(rdd.map(Document(_)))
}
| rozza/sparked | mongo-spark/src/main/scala/org/mongodb/spark/streaming/package.scala | Scala | apache-2.0 | 1,301 |
package com.eevolution.context.dictionary.infrastructure.service
import java.util.UUID
import akka.NotUsed
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.PackageExport
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.api.{Service, ServiceCall}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 15/11/17.
*/
/**
* Package Export Service
*/
trait PackageExportService extends Service with api.service.PackageExportService {
override def getAll() : ServiceCall[NotUsed, List[PackageExport]]
override def getById(id: Int): ServiceCall[NotUsed, PackageExport]
override def getByUUID(uuid :UUID): ServiceCall[NotUsed, PackageExport]
override def getAllByPage(pageNo: Option[Int], pageSize: Option[Int]): ServiceCall[NotUsed, PaginatedSequence[PackageExport]]
def descriptor = {
import Service._
named("packageExport").withCalls(
pathCall("/api/v1_0_0/packageExport/all", getAll _) ,
pathCall("/api/v1_0_0/packageExport/:id", getById _),
pathCall("/api/v1_0_0/packageExport/:uuid", getByUUID _) ,
pathCall("/api/v1_0_0/packageExport?pageNo&pageSize", getAllByPage _)
)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/service/PackageExportService.scala | Scala | gpl-3.0 | 2,087 |
package com.breezecapsule.scalarest
import org.mortbay.jetty.handler.{AbstractHandler, DefaultHandler}
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import org.mortbay.jetty.{Request, Server}
import xml.Elem
import scala.collection.JavaConverters._
import io.Source
;
/**
* HTTP companion object for accessing the HTTP Server API.
*/
object HTTP {
/**
* Create a new HTTPServer.
*/
def createServer(port: Int = 8080) = new HTTPServer(port);
}
/**
* Class representing a Response.
*/
sealed abstract class ResourceRepresentation();
case class ResourceNotRepresentable(httpCode: Int, details: Option[MimeResource]) extends ResourceRepresentation();
case class ResourceNotFound() extends ResourceNotRepresentable(404, None);
case class ResourceUnauthorized() extends ResourceNotRepresentable(401, None);
case class ResourcePermanentlyRedirected(url: String) extends ResourceNotRepresentable(301, None);
case class ResourceTemporarilyRedirected(url: String) extends ResourceNotRepresentable(307, None);
case class MimeResource(mimeType: String) extends ResourceRepresentation;
case class Text(text: String) extends MimeResource("text/plain");
case class Xml(xml: Elem) extends MimeResource("text/xml");
case class CharacterStream(override val mimeType: String, source: Source) extends MimeResource(mimeType);
case class ByteStream(override val mimeType: String, stream: Stream[Byte]) extends MimeResource(mimeType);
/**
* Case class encapsulating a Request.
*/
abstract class ResourceRequest(path: String = ".*", parameters: Map[String, String] = Map());
/**
* Case class representing HTTP GET
*/
case class Get(path: String = ".*", parameters: Map[String, String] = Map()) extends ResourceRequest(path, parameters);
/**
* Case class representing HTTP PUT
*/
case class Put(path: String = ".*", parameters: Map[String, String] = Map()) extends ResourceRequest(path, parameters);
/**
* Case class representing HTTP POST
*/
case class Post(path: String = ".*", parameters: Map[String, String] = Map()) extends ResourceRequest(path, parameters);
/**
* Case class representing HTTP DELETE
*/
case class Delete(path: String = ".*", parameters: Map[String, String] = Map()) extends ResourceRequest(path, parameters);
/**
* Lifecycle trait
*/
trait Lifecycle {
/**
* Start the object.
*/
def start();
/**
* Stop the object.
*/
def stop();
}
/**
* HTTP server.
*/
class HTTPServer(port: Int) extends Lifecycle {
/**
* Jetty server.
*/
private val server = new Server(port);
private val defaultHandler = new DefaultHandler();
private var requestRouter = new RequestRouterHandler();
// register the request router with Jetty.
server.addHandler(requestRouter);
/**
* Start the server.
*/
def start = server.start();
/**
* Stop the server.
*/
def stop = server.stop();
/**
* Add a reactor to the HTTPServer that will handle matching requests
*/
def reactsTo(reactor: PartialFunction[ResourceRequest, ResourceRepresentation]): AndWord = {
requestRouter.registerReactor(reactor);
new AndWord {
def and(reactor: PartialFunction[ResourceRequest, ResourceRepresentation]): AndWord = {
requestRouter.registerReactor(reactor);
return this;
};
}
}
}
/**
* And of the reactor DSL.
*/
trait AndWord {
/**
* Reacting to the given reactor
* @reactor a PartialFunction representing the case statements of inputs the reactor will respond to.
*/
def and(reactor: PartialFunction[ResourceRequest, ResourceRepresentation]): AndWord;
}
/**
* Jetty handler for providing the routing logic.
*/
protected class RequestRouterHandler extends AbstractHandler {
/**
* List of reactors.
*/
var reactors: List[PartialFunction[ResourceRequest, ResourceRepresentation]] = List();
/**
* Register a reactor.
*/
def registerReactor(reactor: PartialFunction[ResourceRequest, ResourceRepresentation]) = {
reactors ::= reactor;
}
/**
* Handle the request from Jetty.
*/
override def handle(target: String, request: HttpServletRequest, response: HttpServletResponse, dispatch: Int) = {
val parameters: scala.collection.mutable.Map[String, Array[String]] =
request.getParameterMap.asScala.asInstanceOf[scala.collection.mutable.Map[String, Array[String]]];
val resourceRequest = request.getMethod match {
case "GET" => Get(request.getRequestURI, parameters.mapValues(_(0)).toMap);
case "PUT" => Put(request.getRequestURI, parameters.mapValues(_(0)).toMap);
case "DELETE" => Delete(request.getRequestURI, parameters.mapValues(_(0)).toMap);
case "POST" => Post(request.getRequestURI, parameters.mapValues(_(0)).toMap);
};
reactors.find(_.isDefinedAt(resourceRequest)) match {
case Some(reactor) => routeRequestToReactor(reactor, resourceRequest, request, response);
case _ => handle404(request, response);
}
(request.asInstanceOf[Request]).setHandled(true);
}
/**
* Route a request to the reactor and handle its response.
*/
private def routeRequestToReactor(reactor: PartialFunction[ResourceRequest, ResourceRepresentation],
resourceRequest: ResourceRequest, request: HttpServletRequest,
response: HttpServletResponse) = {
def mimeResponseHandler(r: MimeResource): Any = {
response.setHeader("Content-Type", r.mimeType);
r match {
case Text(s) => response.getWriter.print(s);
case Xml(xml) => response.getWriter.print(xml.toString);
case ByteStream(_, stream) => {
val outputStream = response.getOutputStream;
stream.foreach(outputStream.write(_));
}
case CharacterStream(_, source) => {
val writer = response.getWriter;
source.foreach(writer.write(_));
}
}
}
reactor(resourceRequest) match {
case r: MimeResource =>
response.setStatus(HttpServletResponse.SC_OK);
mimeResponseHandler(r);
case r: ResourceTemporarilyRedirected =>
response.sendRedirect(response.encodeRedirectURL(r.url));
case r: ResourcePermanentlyRedirected =>
response.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
response.setHeader("Location", response.encodeRedirectURL(r.url));
case ResourceNotRepresentable(httpCode, None) =>
response.setStatus(httpCode);
case ResourceNotRepresentable(httpCode, Some(mimeDetails)) =>
response.setStatus(httpCode);
mimeResponseHandler(mimeDetails);
}
}
/**
* Handle the case where no reactor matches the request.
*/
private def handle404(request: HttpServletRequest, response: HttpServletResponse) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
} | breezecapsule/scalarest | src/main/scala/com/breezecapsule/scalarest/HTTP.scala | Scala | apache-2.0 | 7,321 |
package jp.co.cyberagent.aeromock.msgpack
import java.math.BigInteger
import jp.co.cyberagent.aeromock.test.SpecSupport
import org.msgpack.`type`.ValueFactory
import org.specs2.mutable.{Tables, Specification}
/**
*
* @author stormcat24
*/
class MessagepackValueSpec extends Specification with Tables with SpecSupport {
"array type" should {
"array simple" in {
val expected = ValueFactory.createArrayValue(Array(
ValueFactory.createIntegerValue(100),
ValueFactory.createIntegerValue(200),
ValueFactory.createIntegerValue(300)
))
val input = List(100, 200, 300)
MessagepackValue("compact").fromIterable(input) must_== expected
}
"array complex" in {
val expected = ValueFactory.createArrayValue(Array(
ValueFactory.createIntegerValue(100),
ValueFactory.createFloatValue(20.0f),
ValueFactory.createFloatValue(30.0),
ValueFactory.createBooleanValue(true),
ValueFactory.createIntegerValue(400.toShort),
ValueFactory.createIntegerValue(500L),
ValueFactory.createIntegerValue(BigInteger.valueOf(600L)),
ValueFactory.createNilValue,
ValueFactory.createRawValue("700")
))
val input = List(100, 20.0f, 30.0, true, 400.toShort, 500L, BigInteger.valueOf(600L), null, "700")
MessagepackValue("compact").fromIterable(input) must_== expected
}
}
"map type" should {
"simple" in {
val expected = ValueFactory.createArrayValue(Array(
ValueFactory.createIntegerValue(100),
ValueFactory.createArrayValue(Array(
ValueFactory.createIntegerValue(1000),
ValueFactory.createRawValue("prop1mapValue")
))
))
val input = Map(
"id" -> 100,
"detail" -> Map(
"intprop" -> 1000,
"stringprop" -> "prop1mapValue"
)
)
MessagepackValue("compact").fromIterable(input) must_== expected
}
"complex" in {
val expected = ValueFactory.createArrayValue(Array(
ValueFactory.createIntegerValue(100),
ValueFactory.createArrayValue(Array(
ValueFactory.createIntegerValue(100),
ValueFactory.createIntegerValue(200),
ValueFactory.createIntegerValue(300)
)),
ValueFactory.createArrayValue(Array(
ValueFactory.createArrayValue(Array(
ValueFactory.createRawValue("prop1value")
)),
ValueFactory.createArrayValue(Array(
ValueFactory.createRawValue("prop2value")
))
)),
ValueFactory.createArrayValue(Array(
ValueFactory.createRawValue("prop1value"),
ValueFactory.createArrayValue(Array(
ValueFactory.createIntegerValue(10),
ValueFactory.createIntegerValue(20),
ValueFactory.createIntegerValue(30)
))
))
))
val input = Map(
"id" -> 100,
"array" -> List(
100, 200, 300
),
"arraymap" -> List(
Map("prop1" -> "prop1value"),
Map("prop2" -> "prop2value")
),
"map" -> Map(
"prop1" -> "prop1value",
"array" -> List(10, 20, 30)
)
)
MessagepackValue("compact").fromIterable(input) must_== expected
}
}
}
| CyberAgent/aeromock | aeromock-server/src/test/scala/jp/co/cyberagent/aeromock/msgpack/MessagepackValueSpec.scala | Scala | mit | 3,308 |
package models
import play.api.libs.json.{JsNumber, JsObject, JsValue, Json}
import java.sql.Timestamp
/**
* Created by Engin Yoeyen on 02/10/14.
*/
class Row {
private var map = Map.empty[String, Any]
def select(name: String) = {
map get name getOrElse Logger.error("method not found")
}
def update(name: String)(value: Any) {
map += name -> value
}
def hasElement = map.size > 0
override def toString = toJson().toString()
def toJson(): Option[JsObject] = {
if(hasElement){
val list = for ((k,v) <- map) yield Json.obj( k -> anyWriter(v))
Some(list.reduce(_ ++ _))
}else{
None
}
}
private def anyWriter(a:Any): JsValue = {
a match {
case a: Double => Json.toJson(a)
case a: Float => Json.toJson(a)
case a: Long => Json.toJson(a)
case a: Int => Json.toJson(a)
case a: String => Json.toJson(a)
case a: Boolean => Json.toJson(a)
case a: Timestamp => Json.toJson(a)
case a: java.sql.Time => Json.toJson(a)
case a: Date => Json.toJson(a)
case a: java.math.BigInteger => Json.toJson( a.toString()) //TEMPORARY SOLUTION WRITE
case a: java.math.BigDecimal => Json.toJson(JsNumber(a))
case _ => throw new RuntimeException("Type not serializable : "+a)
}
}
}
| enginyoyen/postgresql-rest-api | app/models/Row.scala | Scala | mit | 1,310 |
package jp.co.dwango.s99
import jp.co.dwango.s99.binary_trees._
object P55 {
object Tree {
def cBalanced[T](count: Int, value: T): List[Tree[T]] = {
require(count >= 0)
if (count == 0) {
List(End)
} else if (count % 2 == 0) {
val ls = cBalanced((count - 1) / 2, value)
val rs = cBalanced((count - 1) / 2 + 1, value)
ls.flatMap { l =>
rs.flatMap { r =>
List(new Node(value, l, r), new Node(value, r, l))
}
}
} else {
val childrenList = cBalanced(count / 2, value)
for {
l <- childrenList
r <- childrenList
} yield new Node(value, l, r)
}
}
}
}
| dwango/S99 | src/main/scala/jp/co/dwango/s99/P55.scala | Scala | mit | 702 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services
import _root_.models.api.Address
import config.FrontendAppConfig
import models._
import org.mockito.ArgumentMatchers.matches
import org.mockito.Mockito._
import org.mockito.{ArgumentMatchers => matchers}
import play.api.libs.json.Json
import testHelpers.VatRegSpec
import uk.gov.hmrc.http.HttpResponse
import uk.gov.hmrc.http.cache.client.CacheMap
import scala.concurrent.Future
class BusinessContactServiceSpec extends VatRegSpec {
val testService: BusinessContactService = new BusinessContactService(
mockVatRegistrationConnector,
mockS4LService
)
implicit val appConfig: FrontendAppConfig = app.injector.instanceOf[FrontendAppConfig]
val dummyCacheMap: CacheMap = CacheMap("", Map("" -> Json.toJson("")))
"getBusinessContact" should {
"return a populated BusinessContact model" when {
"there is a model in S4L" in {
val businessContact = BusinessContact(
ppobAddress = Some(testAddress),
companyContactDetails = Some(CompanyContactDetails("test@test.com", None, None, None)),
contactPreference = Some(Letter)
)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
val result = await(testService.getBusinessContact)
result mustBe businessContact
}
"there is data in the backend" in {
val businessContact = BusinessContact(
ppobAddress = Some(Address(
line1 = "testLine1",
line2 = Some("testLine2"),
line3 = Some("testLine3"),
line4 = Some("testLine4"),
postcode = Some("TE57 7ET"),
addressValidated = true
)),
companyContactDetails = Some(CompanyContactDetails(
email = "test@test.com",
phoneNumber = Some("1234567890"),
mobileNumber = Some("9876545678"),
websiteAddress = Some("/test/url")
)),
contactPreference = Some(Email)
)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(None))
when(mockVatRegistrationConnector.getBusinessContact(matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.getBusinessContact)
result mustBe businessContact
}
}
"return an empty model" when {
"there is no data in either S4L or the backend" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = None,
contactPreference = None
)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(None))
when(mockVatRegistrationConnector.getBusinessContact(matchers.any(), matchers.any()))
.thenReturn(Future.successful(None))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.getBusinessContact)
result mustBe businessContact
}
}
}
"updateBusinessContact" should {
"determine that the model is incomplete and save in S4L - nothing pre-populated, update ppobAddress" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = None,
contactPreference = None
)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockVatRegistrationConnector.getBusinessContact(matchers.any(), matchers.any()))
.thenReturn(Future.successful(None))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[Address](testAddress))
result mustBe testAddress
}
"determine that the model is incomplete and save in S4L - nothing pre-populated, update companyContactDetails" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = None,
contactPreference = None
)
val companyContactDetails = CompanyContactDetails("test@test.com", None, None, None)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockVatRegistrationConnector.getBusinessContact(matchers.any(), matchers.any()))
.thenReturn(Future.successful(None))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[CompanyContactDetails](companyContactDetails))
result mustBe companyContactDetails
}
"determine that the model is incomplete and save in S4L - nothing pre-populated, update contactPreference" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = None,
contactPreference = None
)
val contactPreference = Email
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockVatRegistrationConnector.getBusinessContact(matchers.any(), matchers.any()))
.thenReturn(Future.successful(None))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[ContactPreference](contactPreference))
result mustBe contactPreference
}
"determine that the model is incomplete and save in S4L - ppobAddress pre-populated, update companyContactDetails" in {
val businessContact = BusinessContact(
ppobAddress = Some(testAddress),
companyContactDetails = None,
contactPreference = None
)
val companyContactDetails = CompanyContactDetails("test@test.com", None, None, None)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[CompanyContactDetails](companyContactDetails))
result mustBe companyContactDetails
}
"determine that the model is incomplete and save in S4L - ppobAddress pre-populated, update contactPreference" in {
val businessContact = BusinessContact(
ppobAddress = Some(testAddress),
companyContactDetails = None,
contactPreference = None
)
val contactPreference = Email
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[ContactPreference](contactPreference))
result mustBe contactPreference
}
"determine that the model is incomplete and save in S4L - companyContactDetails pre-populated, update ppobAddress" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = Some(CompanyContactDetails("test@test.com", None, None, None)),
contactPreference = None
)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[Address](testAddress))
result mustBe testAddress
}
"determine that the model is incomplete and save in S4L - companyContactDetails pre-populated, update contactPreference" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = Some(CompanyContactDetails("test@test.com", None, None, None)),
contactPreference = None
)
val contactPreference = Email
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[ContactPreference](contactPreference))
result mustBe contactPreference
}
"determine that the model is incomplete and save in S4L - contactPreference pre-populated, update ppobAddress" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = None,
contactPreference = Some(Letter)
)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[Address](testAddress))
result mustBe testAddress
}
"determine that the model is incomplete and save in S4L - contactPreference pre-populated, update companyContactDetails" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = None,
contactPreference = Some(Email)
)
val companyContactDetails = CompanyContactDetails("test@test.com", None, None, None)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockS4LService.save(matchers.any())(matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[CompanyContactDetails](companyContactDetails))
result mustBe companyContactDetails
}
"determine that the model is complete and save in the backend - update companyContactDetails" in {
val businessContact = BusinessContact(
ppobAddress = Some(testAddress),
companyContactDetails = None,
contactPreference = Some(Email)
)
val companyContactDetails = CompanyContactDetails("test@test.com", None, None, None)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockVatRegistrationConnector.upsertBusinessContact(matchers.any())(matchers.any(), matchers.any()))
.thenReturn(Future.successful(Json.parse("""{"abc" : "xyz"}""")))
when(mockS4LService.clearKey(matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[CompanyContactDetails](companyContactDetails))
result mustBe companyContactDetails
}
"determine that the model is complete and save in the backend - update ppobAddress" in {
val businessContact = BusinessContact(
ppobAddress = None,
companyContactDetails = Some(CompanyContactDetails("test@test.com", None, None, None)),
contactPreference = Some(Email)
)
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockVatRegistrationConnector.upsertBusinessContact(matchers.any())(matchers.any(), matchers.any()))
.thenReturn(Future.successful(Json.parse("""{"abc" : "xyz"}""")))
when(mockS4LService.clearKey(matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[Address](testAddress))
result mustBe testAddress
}
"determine that the model is complete and save in the backend - update contactPreference" in {
val businessContact = BusinessContact(
ppobAddress = Some(testAddress),
companyContactDetails = Some(CompanyContactDetails("test@test.com", None, None, None)),
contactPreference = None
)
val contactPreference = Letter
when(mockS4LService.fetchAndGet[BusinessContact](matchers.any(), matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(Some(businessContact)))
when(mockVatRegistrationConnector.upsertBusinessContact(matchers.any())(matchers.any(), matchers.any()))
.thenReturn(Future.successful(Json.parse("""{"abc" : "xyz"}""")))
when(mockS4LService.clearKey(matchers.any(), matchers.any(), matchers.any()))
.thenReturn(Future.successful(dummyCacheMap))
val result = await(testService.updateBusinessContact[ContactPreference](contactPreference))
result mustBe contactPreference
}
}
}
| hmrc/vat-registration-frontend | test/services/BusinessContactServiceSpec.scala | Scala | apache-2.0 | 15,015 |
/**
* Copyright 2013 SelfishInc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import Keys._
import sbtassembly.Plugin._
import AssemblyKeys._
object ApplicationBuild extends Build {
private def frumaticRepository(r : String) : Resolver =
"Sonatype Nexus Repository Manager" at "http://nexus.frumatic.com/content/repositories/" + r
val frumaticRepositorySnapshots = frumaticRepository("snapshots")
val frumaticRepositoryReleases = frumaticRepository("releases")
val appName = "scala-mongo-connector"
val isSnapshot = true
val version = "1.0" + (if (isSnapshot) "-SNAPSHOT" else "")
val scalaStyleSettings = org.scalastyle.sbt.ScalastylePlugin.Settings
val buildSettings = Defaults.defaultSettings ++ assemblySettings ++ scalaStyleSettings ++ Seq (
organization := "SelfishInc",
Keys.version := version,
scalaVersion := Versions.ScalaVersion,
scalacOptions in ThisBuild ++= Seq(
"-feature",
"-language:postfixOps",
"-deprecation"
),
retrieveManaged := true,
test in assembly := {},
//trying to fix GC limit overhead on hiload
javaOptions in run ++= Seq(
"-d64", "-Xmx2G", "-XX:-UseConcMarkSweepGC"
),
testOptions in Test := Nil,
libraryDependencies ++= appDependencies,
resolvers ++= Seq(
"Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
),
exportJars := true,
publishTo := {
if (isSnapshot)
Some(frumaticRepositorySnapshots)
else
Some(frumaticRepositoryReleases)
},
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials")
)
import Versions._
val appDependencies = Seq(
"com.typesafe.akka" %% "akka-actor" % AkkaVersion ,
"com.typesafe.akka" %% "akka-slf4j" % AkkaVersion,
"org.mongodb" % "mongo-java-driver" % "2.11.3",
"org.scalatest" %% "scalatest" % "1.9.2",
"org.apache.solr" % "solr-solrj" % "4.5.0",
"ch.qos.logback" % "logback-classic" % "1.0.13",
"com.typesafe" %% "scalalogging-slf4j" % "1.0.1",
"commons-logging" % "commons-logging" % "1.1.1"
)
val main = Project(
appName,
file("."),
settings = buildSettings
)
}
object Versions {
val ScalaVersion = "2.10.3"
val AkkaVersion = "2.2.2"
} | SelfishInc/solr-mongo-connector | project/Build.scala | Scala | apache-2.0 | 2,807 |
/*
* Copyright 2017-2022 John Snow Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.johnsnowlabs.nlp.annotators.classifier.dl
import com.johnsnowlabs.ml.tensorflow._
import com.johnsnowlabs.nlp.AnnotatorType.{CATEGORY, SENTENCE_EMBEDDINGS}
import com.johnsnowlabs.nlp._
import com.johnsnowlabs.nlp.annotators.ner.Verbose
import com.johnsnowlabs.nlp.pretrained.ResourceDownloader
import com.johnsnowlabs.nlp.serialization.StructFeature
import com.johnsnowlabs.storage.HasStorageRef
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.ml.param.{IntArrayParam, StringArrayParam}
import org.apache.spark.ml.util.Identifiable
import org.apache.spark.sql.{Dataset, SparkSession}
/**
* ClassifierDL for generic Multi-class Text Classification.
*
* ClassifierDL uses the state-of-the-art Universal Sentence Encoder as an input for text classifications.
* The ClassifierDL annotator uses a deep learning model (DNNs) we have built inside TensorFlow and supports up to
* 100 classes.
*
* This is the instantiated model of the [[ClassifierDLApproach]].
* For training your own model, please see the documentation of that class.
*
* Pretrained models can be loaded with `pretrained` of the companion object:
* {{{
* val classifierDL = ClassifierDLModel.pretrained()
* .setInputCols("sentence_embeddings")
* .setOutputCol("classification")
* }}}
* The default model is `"classifierdl_use_trec6"`, if no name is provided. It uses embeddings from the
* [[com.johnsnowlabs.nlp.embeddings.UniversalSentenceEncoder UniversalSentenceEncoder]] and is trained on the
* [[https://deepai.org/dataset/trec-6#:~:text=The%20TREC%20dataset%20is%20dataset,50%20has%20finer%2Dgrained%20labels TREC-6]] dataset.
* For available pretrained models please see the [[https://nlp.johnsnowlabs.com/models?task=Text+Classification Models Hub]].
*
* For extended examples of usage, see the
* [[https://github.com/JohnSnowLabs/spark-nlp-workshop/blob/master/tutorials/Certification_Trainings/Public/5.Text_Classification_with_ClassifierDL.ipynb Spark NLP Workshop]]
* and the [[https://github.com/JohnSnowLabs/spark-nlp/blob/master/src/test/scala/com/johnsnowlabs/nlp/annotators/classifier/dl/ClassifierDLTestSpec.scala ClassifierDLTestSpec]].
*
* ==Example==
* {{{
* import spark.implicits._
* import com.johnsnowlabs.nlp.base.DocumentAssembler
* import com.johnsnowlabs.nlp.annotator.SentenceDetector
* import com.johnsnowlabs.nlp.annotators.classifier.dl.ClassifierDLModel
* import com.johnsnowlabs.nlp.embeddings.UniversalSentenceEncoder
* import org.apache.spark.ml.Pipeline
*
* val documentAssembler = new DocumentAssembler()
* .setInputCol("text")
* .setOutputCol("document")
*
* val sentence = new SentenceDetector()
* .setInputCols("document")
* .setOutputCol("sentence")
*
* val useEmbeddings = UniversalSentenceEncoder.pretrained()
* .setInputCols("document")
* .setOutputCol("sentence_embeddings")
*
* val sarcasmDL = ClassifierDLModel.pretrained("classifierdl_use_sarcasm")
* .setInputCols("sentence_embeddings")
* .setOutputCol("sarcasm")
*
* val pipeline = new Pipeline()
* .setStages(Array(
* documentAssembler,
* sentence,
* useEmbeddings,
* sarcasmDL
* ))
*
* val data = Seq(
* "I'm ready!",
* "If I could put into words how much I love waking up at 6 am on Mondays I would."
* ).toDF("text")
* val result = pipeline.fit(data).transform(data)
*
* result.selectExpr("explode(arrays_zip(sentence, sarcasm)) as out")
* .selectExpr("out.sentence.result as sentence", "out.sarcasm.result as sarcasm")
* .show(false)
* +-------------------------------------------------------------------------------+-------+
* |sentence |sarcasm|
* +-------------------------------------------------------------------------------+-------+
* |I'm ready! |normal |
* |If I could put into words how much I love waking up at 6 am on Mondays I would.|sarcasm|
* +-------------------------------------------------------------------------------+-------+
* }}}
*
* @see [[MultiClassifierDLModel]] for multi-class classification
* @see [[SentimentDLModel]] for sentiment analysis
* @groupname anno Annotator types
* @groupdesc anno Required input and expected output annotator types
* @groupname Ungrouped Members
* @groupname param Parameters
* @groupname setParam Parameter setters
* @groupname getParam Parameter getters
* @groupname Ungrouped Members
* @groupprio param 1
* @groupprio anno 2
* @groupprio Ungrouped 3
* @groupprio setParam 4
* @groupprio getParam 5
* @groupdesc param A list of (hyper-)parameter keys this annotator can take. Users can set and get the parameter values through setters and getters, respectively.
*
* */
class ClassifierDLModel(override val uid: String)
extends AnnotatorModel[ClassifierDLModel] with HasSimpleAnnotate[ClassifierDLModel]
with WriteTensorflowModel
with HasStorageRef
with ParamsAndFeaturesWritable {
def this() = this(Identifiable.randomUID("ClassifierDLModel"))
/** Output annotator type : SENTENCE_EMBEDDINGS
*
* @group anno
* */
override val inputAnnotatorTypes: Array[AnnotatorType] = Array(SENTENCE_EMBEDDINGS)
/** Output annotator type : CATEGORY
*
* @group anno
* */
override val outputAnnotatorType: String = CATEGORY
/** ConfigProto from tensorflow, serialized into byte array. Get with config_proto.SerializeToString()
*
* @group param
* */
val configProtoBytes = new IntArrayParam(this, "configProtoBytes", "ConfigProto from tensorflow, serialized into byte array. Get with config_proto.SerializeToString()")
/** Tensorflow config Protobytes passed to the TF session
*
* @group setParam
* */
def setConfigProtoBytes(
bytes: Array[Int]
): ClassifierDLModel.this.type = set(this.configProtoBytes, bytes)
/** Tensorflow config Protobytes passed to the TF session
*
* @group getParam
* */
def getConfigProtoBytes: Option[Array[Byte]] =
get(this.configProtoBytes).map(_.map(_.toByte))
/** Dataset params
*
* @group param
* */
val datasetParams = new StructFeature[ClassifierDatasetEncoderParams](this, "datasetParams")
/** Labels used to train this model
*
* @group param
*/
val classes = new StringArrayParam(this, "classes", "Labels used to train this model")
/** Dataset params
*
* @group setParam
* */
def setDatasetParams(params: ClassifierDatasetEncoderParams): ClassifierDLModel.this.type =
set(this.datasetParams, params)
private var _model: Option[Broadcast[TensorflowClassifier]] = None
def setModelIfNotSet(spark: SparkSession, tf: TensorflowWrapper): this.type = {
if (_model.isEmpty) {
require(datasetParams.isSet, "datasetParams must be set before usage")
val encoder = new ClassifierDatasetEncoder(datasetParams.get.get)
_model = Some(
spark.sparkContext.broadcast(
new TensorflowClassifier(
tf,
encoder,
Verbose.Silent
)
)
)
}
this
}
def getModelIfNotSet: TensorflowClassifier = _model.get.value
/** Labels used to train this model
*
* @group getParam
* */
def getClasses: Array[String] = {
val encoder = new ClassifierDatasetEncoder(datasetParams.get.get)
set(classes, encoder.tags)
encoder.tags
}
override protected def beforeAnnotate(dataset: Dataset[_]): Dataset[_] = {
validateStorageRef(dataset, $(inputCols), AnnotatorType.SENTENCE_EMBEDDINGS)
dataset
}
/**
* takes a document and annotations and produces new annotations of this annotator's annotation type
*
* @param annotations Annotations that correspond to inputAnnotationCols generated by previous annotators if any
* @return any number of annotations processed for every input annotation. Not necessary one to one relationship
*/
override def annotate(annotations: Seq[Annotation]): Seq[Annotation] = {
val sentences = annotations
.filter(_.annotatorType == SENTENCE_EMBEDDINGS)
.groupBy(_.metadata.getOrElse[String]("sentence", "0").toInt)
.toSeq
.sortBy(_._1)
if (sentences.nonEmpty)
getModelIfNotSet.predict(sentences, getConfigProtoBytes)
else Seq.empty[Annotation]
}
override def onWrite(path: String, spark: SparkSession): Unit = {
super.onWrite(path, spark)
writeTensorflowModel(
path,
spark,
getModelIfNotSet.tensorflow,
"_classifierdl",
ClassifierDLModel.tfFile,
configProtoBytes = getConfigProtoBytes
)
}
}
trait ReadablePretrainedClassifierDL
extends ParamsAndFeaturesReadable[ClassifierDLModel]
with HasPretrained[ClassifierDLModel] {
override val defaultModelName: Some[String] = Some("classifierdl_use_trec6")
override def pretrained(name: String, lang: String, remoteLoc: String): ClassifierDLModel = {
ResourceDownloader.downloadModel(ClassifierDLModel, name, Option(lang), remoteLoc)
}
/** Java compliant-overrides */
override def pretrained(): ClassifierDLModel = pretrained(defaultModelName.get, defaultLang, defaultLoc)
override def pretrained(name: String): ClassifierDLModel = pretrained(name, defaultLang, defaultLoc)
override def pretrained(name: String, lang: String): ClassifierDLModel = pretrained(name, lang, defaultLoc)
}
trait ReadClassifierDLTensorflowModel extends ReadTensorflowModel {
this: ParamsAndFeaturesReadable[ClassifierDLModel] =>
override val tfFile: String = "classifierdl_tensorflow"
def readTensorflow(instance: ClassifierDLModel, path: String, spark: SparkSession): Unit = {
val tf = readTensorflowModel(path, spark, "_classifierdl_tf", initAllTables = true)
instance.setModelIfNotSet(spark, tf)
// This allows for Python to access getClasses function
val encoder = new ClassifierDatasetEncoder(instance.datasetParams.get.get)
instance.set(instance.classes, encoder.tags)
}
addReader(readTensorflow)
}
/**
* This is the companion object of [[ClassifierDLModel]]. Please refer to that class for the documentation.
*/
object ClassifierDLModel extends ReadablePretrainedClassifierDL with ReadClassifierDLTensorflowModel
| JohnSnowLabs/spark-nlp | src/main/scala/com/johnsnowlabs/nlp/annotators/classifier/dl/ClassifierDLModel.scala | Scala | apache-2.0 | 10,917 |
package slick.codegen
import slick.SlickException
import slick.ast.ColumnOption
import slick.{model => m}
import slick.model.ForeignKeyAction
import slick.relational.RelationalProfile
import slick.sql.SqlProfile
/** Base implementation for a Source code String generator */
abstract class AbstractSourceCodeGenerator(model: m.Model)
extends AbstractGenerator[String,String,String](model)
with StringGeneratorHelpers{
/** Generates code for the complete model (not wrapped in a package yet)
@group Basic customization overrides */
def code = {
"import slick.model.ForeignKeyAction\\n" +
( if(tables.exists(_.hlistEnabled)){
"import slick.collection.heterogeneous._\\n"+
"import slick.collection.heterogeneous.syntax._\\n"
} else ""
) +
( if(tables.exists(_.PlainSqlMapper.enabled)){
"// NOTE: GetResult mappers for plain SQL are only generated for tables where Slick knows how to map the types of all columns.\\n"+
"import slick.jdbc.{GetResult => GR}\\n"
} else ""
) +
(if(ddlEnabled){
"\\n/** DDL for all tables. Call .create to execute. */" +
(
if(tables.length > 5)
"\\nlazy val schema: profile.SchemaDescription = Array(" + tables.map(_.TableValue.name + ".schema").mkString(", ") + ").reduceLeft(_ ++ _)"
else if(tables.nonEmpty)
"\\nlazy val schema: profile.SchemaDescription = " + tables.map(_.TableValue.name + ".schema").mkString(" ++ ")
else
"\\nlazy val schema: profile.SchemaDescription = profile.DDL(Nil, Nil)"
) +
"\\n@deprecated(\\"Use .schema instead of .ddl\\", \\"3.0\\")"+
"\\ndef ddl = schema" +
"\\n\\n"
} else "") +
tables.map(_.code.mkString("\\n")).mkString("\\n\\n")
}
protected def tuple(i: Int) = termName(s"_${i+1}")
abstract class TableDef(model: m.Table) extends super.TableDef(model){
def compoundType(types: Seq[String]): String = {
if(hlistEnabled){
def mkHList(types: List[String]): String = types match {
case Nil => "HNil"
case e :: tail => s"HCons[$e," + mkHList(tail) + "]"
}
mkHList(types.toList)
}
else compoundValue(types)
}
def compoundValue(values: Seq[String]): String = {
if(hlistEnabled) values.mkString(" :: ") + " :: HNil"
else if (values.size == 1) values.head
else if(values.size <= 22) s"""(${values.mkString(", ")})"""
else throw new Exception("Cannot generate tuple for > 22 columns, please set hlistEnable=true or override compound.")
}
def factory = if(columns.size == 1) TableClass.elementType else s"${TableClass.elementType}.tupled"
def extractor = s"${TableClass.elementType}.unapply"
trait EntityTypeDef extends super.EntityTypeDef{
def code = {
val args = columns.map(c=>
c.default.map( v =>
s"${c.name}: ${c.exposedType} = $v"
).getOrElse(
s"${c.name}: ${c.exposedType}"
)
).mkString(", ")
if(classEnabled){
val prns = (parents.take(1).map(" extends "+_) ++ parents.drop(1).map(" with "+_)).mkString("")
s"""case class $name($args)$prns"""
} else {
s"""
type $name = $types
/** Constructor for $name providing default values if available in the database schema. */
def $name($args): $name = {
${compoundValue(columns.map(_.name))}
}
""".trim
}
}
}
trait PlainSqlMapperDef extends super.PlainSqlMapperDef{
def code = {
val positional = compoundValue(columnsPositional.map(c => (if(c.fakeNullable || c.model.nullable)s"<<?[${c.rawType}]"else s"<<[${c.rawType}]")))
val dependencies = columns.map(_.exposedType).distinct.zipWithIndex.map{ case (t,i) => s"""e$i: GR[$t]"""}.mkString(", ")
val rearranged = compoundValue(desiredColumnOrder.map(i => if(hlistEnabled) s"r($i)" else tuple(i)))
def result(args: String) = if(mappingEnabled) s"$factory($args)" else args
val body =
if(autoIncLastAsOption && columns.size > 1){
s"""
val r = $positional
import r._
${result(rearranged)} // putting AutoInc last
""".trim
} else
result(positional)
s"""
implicit def ${name}(implicit $dependencies): GR[${TableClass.elementType}] = GR{
prs => import prs._
${indent(body)}
}
""".trim
}
}
trait TableClassDef extends super.TableClassDef{
def star = {
val struct = compoundValue(columns.map(c=>if(c.fakeNullable)s"Rep.Some(${c.name})" else s"${c.name}"))
val rhs = if(mappingEnabled) s"$struct <> ($factory, $extractor)" else struct
s"def * = $rhs"
}
def option = {
val struct = compoundValue(columns.map(c=>if(c.model.nullable)s"${c.name}" else s"Rep.Some(${c.name})"))
val rhs = if(mappingEnabled) s"""$struct.shaped.<>($optionFactory, (_:Any) => throw new Exception("Inserting into ? projection not supported."))""" else struct
s"def ? = $rhs"
}
def optionFactory = {
val accessors = columns.zipWithIndex.map{ case(c,i) =>
val accessor = if(columns.size > 1) tuple(i) else "r"
if(c.fakeNullable || c.model.nullable) accessor else s"$accessor.get"
}
val fac = s"$factory(${compoundValue(accessors)})"
val discriminator = columns.zipWithIndex.collect{ case (c,i) if !c.model.nullable => if(columns.size > 1) tuple(i) else "r" }.headOption
val expr = discriminator.map(d => s"$d.map(_=> $fac)").getOrElse(s"None")
if(columns.size > 1)
s"{r=>import r._; $expr}"
else
s"r => $expr"
}
def code = {
val prns = parents.map(" with " + _).mkString("")
val args = model.name.schema.map(n => s"""Some("$n")""") ++ Seq("\\""+model.name.table+"\\"")
s"""
class $name(_tableTag: Tag) extends Table[$elementType](_tableTag, ${args.mkString(", ")})$prns {
${indent(body.map(_.mkString("\\n")).mkString("\\n\\n"))}
}
""".trim()
}
}
trait TableValueDef extends super.TableValueDef{
def code = s"lazy val $name = new TableQuery(tag => new ${TableClass.name}(tag))"
}
class ColumnDef(model: m.Column) extends super.ColumnDef(model){
import ColumnOption._
import RelationalProfile.ColumnOption._
import SqlProfile.ColumnOption._
def columnOptionCode = {
case ColumnOption.PrimaryKey => Some(s"O.PrimaryKey")
case Default(value) => Some(s"O.Default(${default.get})") // .get is safe here
case SqlType(dbType) => Some(s"""O.SqlType("$dbType")""")
case Length(length,varying) => Some(s"O.Length($length,varying=$varying)")
case AutoInc => Some(s"O.AutoInc")
case NotNull|Nullable => throw new SlickException( s"Please don't use Nullable or NotNull column options. Use an Option type, respectively the nullable flag in Slick's model model Column." )
case o => None // throw new SlickException( s"Don't know how to generate code for unexpected ColumnOption $o." )
}
def defaultCode = {
case Some(v) => s"Some(${defaultCode(v)})"
case s:String => "\\""+s+"\\""
case None => s"None"
case v:Byte => s"$v"
case v:Int => s"$v"
case v:Long => s"${v}L"
case v:Float => s"${v}F"
case v:Double => s"$v"
case v:Boolean => s"$v"
case v:Short => s"$v"
case v:Char => s"'$v'"
case v:BigDecimal => s"new scala.math.BigDecimal(new java.math.BigDecimal($v))"
case v => throw new SlickException( s"Dont' know how to generate code for default value $v of ${v.getClass}. Override def defaultCode to render the value." )
}
// Explicit type to allow overloading existing Slick method names.
// Explicit type argument for better error message when implicit type mapper not found.
def code = s"""val $name: Rep[$actualType] = column[$actualType]("${model.name}"${options.map(", "+_).mkString("")})"""
}
class PrimaryKeyDef(model: m.PrimaryKey) extends super.PrimaryKeyDef(model){
def code = s"""val $name = primaryKey("$dbName", ${compoundValue(columns.map(_.name))})"""
}
class ForeignKeyDef(model: m.ForeignKey) extends super.ForeignKeyDef(model){
def actionCode(action: ForeignKeyAction) = action match{
case ForeignKeyAction.Cascade => "ForeignKeyAction.Cascade"
case ForeignKeyAction.Restrict => "ForeignKeyAction.Restrict"
case ForeignKeyAction.NoAction => "ForeignKeyAction.NoAction"
case ForeignKeyAction.SetNull => "ForeignKeyAction.SetNull"
case ForeignKeyAction.SetDefault => "ForeignKeyAction.SetDefault"
}
def code = {
val pkTable = referencedTable.TableValue.name
val (pkColumns, fkColumns) = (referencedColumns, referencingColumns).zipped.map { (p, f) =>
val pk = s"r.${p.name}"
val fk = f.name
if(p.model.nullable && !f.model.nullable) (pk, s"Rep.Some($fk)")
else if(!p.model.nullable && f.model.nullable) (s"Rep.Some($pk)", fk)
else (pk, fk)
}.unzip
s"""lazy val $name = foreignKey("$dbName", ${compoundValue(fkColumns)}, $pkTable)(r => ${compoundValue(pkColumns)}, onUpdate=${onUpdate}, onDelete=${onDelete})"""
}
}
class IndexDef(model: m.Index) extends super.IndexDef(model){
def code = {
val unique = if(model.unique) s", unique=true" else ""
s"""val $name = index("$dbName", ${compoundValue(columns.map(_.name))}$unique)"""
}
}
}
}
trait StringGeneratorHelpers extends slick.codegen.GeneratorHelpers[String,String,String]{
def docWithCode(doc: String, code:String): String = (if(doc != "") "/** "+doc.split("\\n").mkString("\\n * ")+" */\\n" else "") + code
final def optionType(t: String) = s"Option[$t]"
def parseType(tpe: String): String = tpe
def shouldQuoteIdentifier(s: String) = {
def isIdent =
if(s.isEmpty) false
else Character.isJavaIdentifierStart(s.head) && s.tail.forall(Character.isJavaIdentifierPart)
scalaKeywords.contains(s) || !isIdent
}
def termName( name: String ) = if(shouldQuoteIdentifier(name)) "`"+name+"`" else name
def typeName( name: String ) = if(shouldQuoteIdentifier(name)) "`"+name+"`" else name
}
| nmartynenko/slick | slick-codegen/src/main/scala/slick/codegen/AbstractSourceCodeGenerator.scala | Scala | bsd-2-clause | 10,429 |
package net.ruippeixotog.scalascraper
import java.io.PrintStream
import java.net.{InetSocketAddress, Proxy}
import scala.collection.immutable.SortedMap
import net.ruippeixotog.scalascraper.ExampleMatchers._
import net.ruippeixotog.scalascraper.browser.{HtmlUnitBrowser, JsoupBrowser}
import net.ruippeixotog.scalascraper.dsl.DSL.Extract._
import net.ruippeixotog.scalascraper.dsl.DSL._
import net.ruippeixotog.scalascraper.model.Element
import net.ruippeixotog.scalascraper.scraper.HtmlValidator
import net.ruippeixotog.scalascraper.util.EitherRightBias._
object ExampleMatchers {
val succ = HtmlValidator(text("head > title"), 1)(_.matches(".*Observador.*"))
val errs = Seq(
HtmlValidator(attr("content")("meta[name=viewport]"), 2)(_.matches(".*initial-scale=2\\\\.0.*")),
HtmlValidator(elements("meta[name=viewport]"), 3)(_.nonEmpty)
)
}
object ProxyApp extends App {
val browser = new JsoupBrowser(proxy = new Proxy(Proxy.Type.SOCKS, new InetSocketAddress("localhost", 3128)))
val doc = browser.get("http://observador.pt")
println("=== OBSERVADOR HTTP & HTTPS PROXY ===")
Thread.sleep(2000)
// You should get a [java.net.SocketTimeoutException: connect timed out] if you are behind a proxy
browser.get("http://observador.pt")
}
object NewsApp extends App {
val browser = JsoupBrowser()
val doc = browser.get("http://observador.pt")
println()
println("=== OBSERVADOR ===")
doc >> extractor(".logo img", attr("src")) |> println
doc >> extractor("meta[name=description]", attr("content")) |> println
println("==================")
println()
doc >> ".small-news-list h4 > a" foreach println
}
object HeadlineApp extends App {
val browser = JsoupBrowser()
browser.get("http://observador.pt") >/~ (succ, errs) >> "h1" match {
case Right(headline) => println("HEADLINE: " + headline.head)
case Left(status) => println("Error: " + status)
}
}
object HeadlineBetterApp extends App {
val browser = JsoupBrowser()
for {
headline <- browser.get("http://observador.pt") >/~ (succ, errs) >> element("h1 a")
headlineDesc = browser.get(headline.attr("href")) >> text(".lead")
} println("== " + headline.text + " ==\\n" + headlineDesc)
}
object HeadlineVerboseApp extends App {
val browser = JsoupBrowser()
for {
headline <- browser.get("http://observador.pt") validateWith (succ, errs) extract element("h1 a")
headlineDesc = browser.get(headline.attr("href")) extract text(".lead")
} println("== " + headline.text + " ==\\n" + headlineDesc)
}
object MusicGenreTreeApp extends App {
val browser = JsoupBrowser()
case class GenreNode(root: Element) {
def leaves = root >> elementList("> a.genre") map { e => e.text -> e }
def nodes = root >> elementList("> div:has(b:has(a.genre))") >> (text(".genre"), element("blockquote"))
def children: Map[String, GenreNode] = SortedMap(leaves ++ nodes: _*).mapValues(GenreNode.apply).toMap
def renderYaml(d: Int = 0): String =
children.map {
case (k, v) if v.children.isEmpty => s"${" " * d}- $k\\n"
case (k, v) => s"${" " * d}- $k:\\n${v.renderYaml(d + 2)}"
}.mkString
}
val page = browser.get("http://rateyourmusic.com/rgenre/")
val out = new PrintStream("genres.yaml")
GenreNode(page >> element("#content")).renderYaml() |> out.println
}
object PageInteractionApp extends App {
val browser = HtmlUnitBrowser.typed()
val doc = browser.get("http://example.com")
val moreInfoLink = doc >> pElement("a")
moreInfoLink.underlying.click()
doc >> text("h1").map("== " + _ + "==") |> println
doc >> texts("p") foreach println
}
| ruippeixotog/scala-scraper | core/src/test/scala/net/ruippeixotog/scalascraper/Examples.scala | Scala | mit | 3,620 |
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.utilities
import scala.collection.mutable
import scala.util.Random
/** A key-value store using deterministic reservoir sampling.
*
* Items are added with an associated key. Items may be retrieved by the corresponding key, and a list of keys can also
* be retrieved. If `maxSize` is not zero, then it dictates the maximum number of items that will be stored for each
* key. Once there are more items for a given key, they are replaced via reservoir sampling, such that each item has an
* equal probability of being included in the sample.
*
* Deterministic means that for any given seed and bucket size, the sequence of values that are kept for any given key
* will always be the same, and that this is independent of any insertions for other keys. That is:
*
* {{{
* val reservoirA = ReservoirKVStore(10)
* val reservoirB = ReservoirKVStore(10)
* (0 until 100).foreach(i => reservoirA.add("key1", i))
* (0 until 100).foreach(i => reservoirA.add("key2", i))
* (0 until 100).foreach(i => {
* reservoirB.add("key1", i)
* reservoirB.add("key2", i)
* })
* }}}
*
* After executing this code, `reservoirA` and `reservoirB` will be in identical states.
*
* For more information on reservoir sampling, refer to [this page](https://en.wikipedia.org/wiki/Reservoir_sampling).
*
* Note that, adding items has amortized `O(1)` runtime cost.
*
* @param maxSize Maximum size of each bucket in this reservoir key-value store.
* @param seed Seed to use for the random number generator used while sampling.
* @param alwaysKeepLast Boolean flag indicating whether to always store the last seen item. If set to `true` and the
* last seen item was not sampled to be stored, then it replaces the last item in the
* corresponding bucket.
*
* @author Emmanouil Antonios Platanios
*/
case class Reservoir[K, V](maxSize: Int, seed: Long = 0L, alwaysKeepLast: Boolean = true) {
require(maxSize >= 0, s"'maxSize' (= $maxSize) must be a non-negative integer.")
// This lock protects the internal items, ensuring that calls to `add()` and `filter()` are thread-safe.
private[this] object ItemsLock
private[utilities] val buckets: mutable.Map[K, ReservoirBucket[V]] = mutable.Map.empty[K, ReservoirBucket[V]]
/** Returns all the keys in the reservoir. */
def keys: Iterable[K] = buckets.keys
/** Returns all the items stored for the provided key and throws an exception if the key does not exist. */
def items(key: K): List[V] = buckets(key).items
/** Adds a new item to the reservoir with the provided key.
*
* If the corresponding reservoir has not yet reached full size, then the new item is guaranteed to be added. If the
* reservoir is full, then the behavior of this method depends on the value of `alwaysKeepLast`.
*
* If `alwaysKeepLast` is set to `true`, then the new item is guaranteed to be added to the reservoir, and either the
* previous last item will be replaced, or (with low probability) an older item will be replaced.
*
* If `alwaysKeepLast` is set to `false`, then the new item may replace an old item with low probability.
*
* If `transformFn` is provided, then it will be applied to transform the provided item (lazily, if and only if the
* item is going to be included in the reservoir).
*
* @param key Key for the item to add.
* @param item Item to add.
* @param transformFn Transform function for the item to add.
*/
def add(key: K, item: V, transformFn: V => V = identity[V]): Unit = ItemsLock synchronized {
buckets.getOrElseUpdate(key, ReservoirBucket(maxSize, new Random(seed), alwaysKeepLast)).add(item, transformFn)
}
/** Filters the items in this reservoir using the provided filtering function.
*
* When filtering items from each reservoir bucket, we must update the internal state variable `numItemsSeen`, which
* is used for determining the rate of replacement in reservoir sampling. Ideally, `numItemsSeen` would contain the
* exact number of items that have ever been seen by the `add` function of this reservoir, and that satisfy the
* provided filtering function. However, the reservoir bucket does not have access to all of the items it has seen --
* it only has access to the subset of items that have survived sampling (i.e., `_items`). Therefore, we estimate
* `numItemsSeen` by scaling its original value by the same ratio as the ratio of items that were not filtered out
* and that are currently stored in this reservoir bucket.
*
* @param filterFn Filtering function that returns `true` for the items to be kept in the reservoir.
* @param key Optional key for which to filter the values. If `None` (the default), then the values for all
* keys in the reservoir are filtered.
* @return Number of items filtered from this reservoir.
*/
def filter(filterFn: V => Boolean, key: Option[K] = None): Int = ItemsLock synchronized {
if (key.isDefined)
buckets.get(key.get).map(_.filter(filterFn)).getOrElse(0)
else
buckets.values.map(_.filter(filterFn)).sum
}
}
/** Container for items coming from a stream, that implements reservoir sampling so that its size never exceeds
* `maxSize`.
*
* @param maxSize Maximum size of this bucket.
* @param random Random number generator to use while sampling.
* @param alwaysKeepLast Boolean flag indicating whether to always store the last seen item. If set to `true` and the
* last seen item was not sampled to be stored, then it replaces the last item in this bucket.
*/
case class ReservoirBucket[T](maxSize: Int, random: Random = new Random(0), alwaysKeepLast: Boolean = true) {
require(maxSize >= 0, s"'maxSize' (= $maxSize) must be a non-negative integer.")
// This lock protects the internal items, ensuring that calls to `add()` and `filter()` are thread-safe.
private[this] object ItemsLock
private[this] var _items: List[T] = List.empty[T]
private[utilities] var numItemsSeen: Int = 0
/** Returns all the items stored in this bucket. */
def items: List[T] = _items
/** Adds an item to this reservoir bucket, replacing an old item, if necessary.
*
* If `alwaysKeepLast` is `true`, then the new item is guaranteed to be added to the bucket, and to be the last
* element in the bucket. If the bucket has reached capacity, then an old item will be replaced. With probability
* `maxSize / numItemsSeen` a random item in the bucket will be popped out and the new item will be appended to the
* end. With probability `1 - maxSize / numItemsSeen`, the last item in the bucket will be replaced.
*
* If `alwaysKeepLast` is `false`, then with probability `1 - maxSize / numItemsSeen` the new item may not be added
* to the reservoir at all.
*
* Since the `O(n)` replacements occur with `O(1/numItemsSeen)` likelihood, the amortized runtime cost is `O(1)`.
*
* @param item Item to add.
* @param transformFn A function used to transform the item before addition, if the item will be kept in the
* reservoir.
*/
def add(item: T, transformFn: T => T = identity[T]): Unit = ItemsLock synchronized {
if (_items.size < maxSize || maxSize == 0) {
_items :+= transformFn(item)
} else {
val r = random.nextInt(numItemsSeen)
if (r < maxSize) {
_items = _items.patch(r, Nil, 1)
_items :+= transformFn(item)
} else if (alwaysKeepLast) {
_items = _items.updated(_items.size - 1, transformFn(item))
}
}
numItemsSeen += 1
}
/** Filters the items in this reservoir using the provided filtering function.
*
* When filtering items from the reservoir bucket, we must update the internal state variable `numItemsSeen`, which
* is used for determining the rate of replacement in reservoir sampling. Ideally, `numItemsSeen` would contain the
* exact number of items that have ever been seen by the `add` function of this reservoir, and that satisfy the
* provided filtering function. However, the reservoir bucket does not have access to all of the items it has seen --
* it only has access to the subset of items that have survived sampling (i.e., `_items`). Therefore, we estimate
* `numItemsSeen` by scaling its original value by the same ratio as the ratio of items that were not filtered out
* and that are currently stored in this reservoir bucket.
*
* @param filterFn Filtering function that returns `true` for the items to be kept in the reservoir.
* @return Number of items filtered from this reservoir bucket.
*/
def filter(filterFn: T => Boolean): Int = ItemsLock synchronized {
val sizeBefore = _items.size
_items = _items.filter(filterFn)
val numFiltered = sizeBefore - _items.size
// Estimate a correction for the number of items seen.
val proportionRemaining = if (sizeBefore > 0) _items.size.toFloat / sizeBefore else 0.0f
numItemsSeen = Math.round(proportionRemaining * numItemsSeen)
numFiltered
}
}
| eaplatanios/tensorflow_scala | modules/api/src/main/scala/org/platanios/tensorflow/api/utilities/Reservoir.scala | Scala | apache-2.0 | 9,912 |
package framian
import org.specs2.mutable._
import org.specs2.ScalaCheck
import org.scalacheck._
import org.scalacheck.Arbitrary.arbitrary
import spire.algebra._
import spire.std.string._
import spire.std.double._
import spire.std.int._
import spire.std.iterable._
import shapeless._
class FrameSpec extends Specification with ScalaCheck {
import Arbitrary.arbitrary
import Prop._
import FrameGenerators._
val f0 = Frame.fromRows(
"a" :: 1 :: HNil,
"b" :: 2 :: HNil,
"c" :: 3 :: HNil)
val f1 = Frame.fromRows(
"a" :: 3 :: HNil,
"b" :: 2 :: HNil,
"c" :: 1 :: HNil)
val f2 = Frame.fromRows(
"a" :: 1 :: HNil,
"b" :: 2 :: HNil,
"b" :: 3 :: HNil)
val f3 = Series(
1 -> 3,
2 -> 2,
2 -> 1
).toFrame(0)
val f4 = Series(
1 -> 3,
2 -> 2,
2 -> 1
).toFrame(1)
val f5 = Series(
2 -> 3,
2 -> 2,
3 -> 1
).toFrame(1)
val f6 = Series(
2 -> 2,
2 -> 1
).toFrame(1)
val s0 = Series(
0 -> "s3",
1 -> "s2",
2 -> "s1")
val s1 = Series(
1 -> "s3",
2 -> "s2",
2 -> "s1")
val homogeneous = Frame.fromRows(
1.0 :: 2.0 :: 3.0 :: HNil,
0.5 :: 1.0 :: 1.5 :: HNil,
0.25 :: 0.5 :: 0.75 :: HNil
)
val people = Frame.fromRows(
"Bob" :: 32 :: "Manager" :: HNil,
"Alice" :: 24 :: "Employee" :: HNil,
"Charlie" :: 44 :: "Employee" :: HNil)
.withColIndex(Index.fromKeys("Name", "Age", "Level"))
.withRowIndex(Index.fromKeys("Bob", "Alice", "Charlie"))
"Frame" should {
"be fill-able" in {
val f = Frame.fill(1 to 3, 4 to 5) { (i, j) =>
val k = i + j
if (k % 2 == 0) NA else Value(k)
}
f must_== Frame.mergeColumns(
4 -> Series.fromCells(1 -> Value(5), 2 -> NA, 3 -> Value(7)),
5 -> Series.fromCells(1 -> NA, 2 -> Value(7), 3 -> NA)
)
}
"have sane equality" in {
f0 must_== f0
f0 must_!= f1
f1 must_!= f0
f0.column[String](0).toFrame("abc") must_== f1.column[String](0).toFrame("abc")
f0.column[Int](1).toFrame("123") must_!= f1.column[Int](1).toFrame("123")
}
"have sane hashCode" in {
f0.hashCode must_== f0.hashCode
f0.hashCode must_!= f1.hashCode
f1.hashCode must_!= f0.hashCode
f0.column[String](0).toFrame("abc").hashCode must_== f1.column[String](0).toFrame("abc").hashCode
f0.column[Int](1).toFrame("123").hashCode must_!= f1.column[Int](1).toFrame("123").hashCode
}
"sort columns" in {
people.sortColumns must_== Frame.fromRows(
32 :: "Manager" :: "Bob" :: HNil,
24 :: "Employee" :: "Alice" :: HNil,
44 :: "Employee" :: "Charlie" :: HNil)
.withColIndex(Index.fromKeys("Age", "Level", "Name"))
.withRowIndex(Index.fromKeys("Bob", "Alice", "Charlie"))
}
"sort rows" in {
people.sortRows must_== Frame.fromRows(
"Alice" :: 24 :: "Employee" :: HNil,
"Bob" :: 32 :: "Manager" :: HNil,
"Charlie" :: 44 :: "Employee" :: HNil)
.withColIndex(Index.fromKeys("Name", "Age", "Level"))
.withRowIndex(Index.fromKeys("Alice", "Bob", "Charlie"))
}
"use new row index" in {
f0.withRowIndex(Index(0 -> 2, 1 -> 0, 2 -> 1)) must_== Frame.fromRows(
"c" :: 3 :: HNil,
"a" :: 1 :: HNil,
"b" :: 2 :: HNil)
f0.withRowIndex(Index(0 -> 0, 1 -> 0, 2 -> 0)) must_== Frame.fromRows(
"a" :: 1 :: HNil,
"a" :: 1 :: HNil,
"a" :: 1 :: HNil)
f0.withRowIndex(Index(0 -> 2)) must_== Frame.fromRows("c" :: 3 :: HNil)
f0.withRowIndex(Index.empty[Int]) must_== Frame.empty[Int, Int].withColIndex(f0.colIndex)
}
"use new column index" in {
f0.withColIndex(Index(0 -> 1, 1 -> 0)) must_== Frame.fromRows(
1 :: "a" :: HNil,
2 :: "b" :: HNil,
3 :: "c" :: HNil)
f0.withColIndex(Index(0 -> 0, 1 -> 0)) must_== Frame.fromRows(
"a" :: "a" :: HNil,
"b" :: "b" :: HNil,
"c" :: "c" :: HNil)
f0.withColIndex(Index(0 -> 1)) must_== Frame.fromRows(
1 :: HNil,
2 :: HNil,
3 :: HNil)
f0.withColIndex(Index.empty[Int]) must_== Frame.fromRows[HNil, Int](HNil, HNil, HNil)
}
"have trivial column/row representation for empty Frame" in {
val frame = Frame.empty[String, String]
frame.columnsAsSeries must_== Series.empty[String, UntypedColumn]
frame.rowsAsSeries must_== Series.empty[String, UntypedColumn]
}
"be representable as columns" in {
val series = f0.columnsAsSeries mapValues { col =>
Series(f0.rowIndex, col.cast[Any])
}
series must_== Series(
0 -> Series(0 -> "a", 1 -> "b", 2 -> "c"),
1 -> Series(0 -> 1, 1 -> 2, 2 -> 3)
)
}
"be representable as rows" in {
val series = f0.rowsAsSeries mapValues { col =>
Series(f0.colIndex, col.cast[Any])
}
series must_== Series(
0 -> Series(0 -> "a", 1 -> 1),
1 -> Series(0 -> "b", 1 -> 2),
2 -> Series(0 -> "c", 1 -> 3)
)
}
}
"Frame merges" should {
// these cases work as expected... tacking on a new column...
"inner merge with frame of same row index" in {
f3.merge(f4)(Merge.Inner) must_==
Frame.fromRows(
3 :: 3 :: HNil,
2 :: 2 :: HNil,
1 :: 1 :: HNil).withRowIndex(Index(Array(1,2,2)))
}
"outer merge with frame of same row index" in {
f3.merge(f4)(Merge.Outer) must_==
Frame.fromRows(
3 :: 3 :: HNil,
2 :: 2 :: HNil,
1 :: 1 :: HNil
).withRowIndex(Index(Array(1,2,2)))
}
"inner merge with an offset index with duplicates" in {
f3.merge(f5)(Merge.Inner) must_==
Frame.fromRows(
2 :: 3 :: HNil,
1 :: 2 :: HNil
).withRowIndex(Index(Array(2, 2)))
}
"outer merge with an offset index with duplicates" in {
f3.merge(f5)(Merge.Outer) must_==
Frame.fromRows(
3 :: NA :: HNil,
2 :: 3 :: HNil,
1 :: 2 :: HNil,
NA :: 1 :: HNil
).withRowIndex(Index(Array(1,2,2,3)))
}
"inner merge with a smaller index with duplicates" in {
f3.merge(f6)(Merge.Inner) must_==
Frame.fromRows(
2 :: 2 :: HNil,
1 :: 1 :: HNil
).withRowIndex(Index(Array(2, 2)))
}
"outer merge with a smaller index with duplicates" in {
f3.merge(f6)(Merge.Outer) must_==
Frame.fromRows(
3 :: NA :: HNil,
2 :: 2 :: HNil,
1 :: 1 :: HNil
).withRowIndex(Index(Array(1,2,2)))
}
"merge with a series" in {
f3.merge(1, s1)(Merge.Inner) must_==
Frame.fromRows(
3 :: "s3" :: HNil,
2 :: "s2" :: HNil,
1 :: "s1" :: HNil
).withRowIndex(Index(Array(1,2,2)))
}
}
"Frame joins" should {
"inner join with empty frame" in {
val e = Frame.empty[Int, Int]
f0.join(e)(Join.Inner) must_== f0.withRowIndex(Index.empty[Int])
e.join(f0)(Join.Inner) must_== f0.withRowIndex(Index.empty[Int])
e.join(e)(Join.Inner) must_== e
}
"inner join with series" in {
f0.join(2, s0)(Join.Inner) must_== Frame.fromRows(
"a" :: 1 :: "s3" :: HNil,
"b" :: 2 :: "s2" :: HNil,
"c" :: 3 :: "s1" :: HNil)
}
"inner join with self" in {
f0.join(f0)(Join.Inner) must_== Frame.fromRows(
"a" :: 1 :: "a" :: 1 :: HNil,
"b" :: 2 :: "b" :: 2 :: HNil,
"c" :: 3 :: "c" :: 3 :: HNil)
.withColIndex(Index.fromKeys(0, 1, 0, 1))
}
"inner join only matching rows" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "b"))
val b = Frame.fromRows(2.0 :: HNil, 3.0 :: HNil)
.withRowIndex(Index.fromKeys("b", "c"))
val c = Frame.fromRows(2 :: 2.0 :: HNil)
.withRowIndex(Index.fromKeys("b"))
.withColIndex(Index.fromKeys(0, 0))
a.join(b)(Join.Inner) must_== c
}
"inner join forms cross-product of matching rows" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "a"))
val b = Frame.fromRows(2.0 :: HNil, 3.0 :: HNil)
.withRowIndex(Index.fromKeys("a", "a"))
val c = Frame.fromRows(
1 :: 2.0 :: HNil,
1 :: 3.0 :: HNil,
2 :: 2.0 :: HNil,
2 :: 3.0 :: HNil)
.withRowIndex(Index.fromKeys("a", "a", "a", "a"))
.withColIndex(Index.fromKeys(0, 0))
a.join(b)(Join.Inner) must_== c
}
"left join keeps left mismatched rows" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "b"))
val b = Frame.fromRows(2.0 :: HNil, 3.0 :: HNil)
.withRowIndex(Index.fromKeys("b", "c"))
val c = Frame.mergeColumns(
0 -> Series.fromCells("a" -> Value(1), "b" -> Value(2)),
0 -> Series.fromCells("a" -> NA, "b" -> Value(2.0)))
a.join(b)(Join.Left) must_== c
}
"left join with empty frame" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "b"))
val e = Frame.empty[String, Int]
a.join(e)(Join.Left) must_== a
e.join(a)(Join.Left) must_== e.withColIndex(Index.fromKeys(0))
}
"right join keeps right mismatched rows" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "b"))
val b = Frame.fromRows(2.0 :: HNil, 3.0 :: HNil)
.withRowIndex(Index.fromKeys("b", "c"))
val c = Frame.mergeColumns(
0 -> Series.fromCells("b" -> Value(2), "c" -> NA),
0 -> Series.fromCells("b" -> Value(2.0), "c" -> Value(3.0)))
a.join(b)(Join.Right) must_== c
}
"right join with empty frame" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "b"))
val e = Frame.empty[String, Int]
a.join(e)(Join.Right) must_== e.withColIndex(Index.fromKeys(0))
e.join(a)(Join.Right) must_== a
}
"outer join keeps all rows" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "b"))
val b = Frame.fromRows(2.0 :: HNil, 3.0 :: HNil)
.withRowIndex(Index.fromKeys("b", "c"))
val c = Frame.mergeColumns(
0 -> Series.fromCells("a" -> Value(1), "b" -> Value(2), "c" -> NA),
0 -> Series.fromCells("a" -> NA, "b" -> Value(2.0), "c" -> Value(3.0)))
a.join(b)(Join.Outer) must_== c
}
"outer join with empty frame" in {
val a = Frame.fromRows(1 :: HNil, 2 :: HNil)
.withRowIndex(Index.fromKeys("a", "b"))
val e = Frame.empty[String, Int]
a.join(e)(Join.Outer) must_== a
e.join(a)(Join.Outer) must_== a
}
}
"mapRowGroups" should {
"not modify frame for identity" in {
f0.mapRowGroups { (_, f) => f } must_== f0
f1.mapRowGroups { (_, f) => f } must_== f1
}
val dups = Frame.fromRows(
1 :: 2.0 :: HNil,
2 :: 0.5 :: HNil,
3 :: 1.0 :: HNil,
4 :: 1.0 :: HNil,
5 :: 8.9 :: HNil,
6 :: 9.2 :: HNil
).withRowIndex(Index.fromKeys("a", "a", "b", "c", "c", "c"))
"reduce groups" in {
dups.mapRowGroups { (row, f) =>
val reduced = f.reduceFrame(reduce.Sum[Double]).to[List]
ColOrientedFrame(Index.fromKeys(row), Series(reduced map { case (key, value) =>
key -> TypedColumn(Column(value))
}: _*))
} must_== dups.reduceFrameByKey(reduce.Sum[Double])
}
"replace groups with constant" in {
val const = Frame.fromRows("repeat" :: HNil)
dups.mapRowGroups { (_, f) => const } must_== Frame.fromRows(
"repeat" :: HNil,
"repeat" :: HNil,
"repeat" :: HNil
).withRowIndex(Index.fromKeys(0, 0, 0))
}
}
"Frame" should {
"get row as HList" in {
f0.get(Cols(0, 1).as[String :: Int :: HNil])(0) must_== Value("a" :: 1 :: HNil)
f0.get(Cols(0, 1).as[String :: Int :: HNil])(1) must_== Value("b" :: 2 :: HNil)
f0.get(Cols(0, 1).as[String :: Int :: HNil])(2) must_== Value("c" :: 3 :: HNil)
f0.get(Cols(0, 1).as[String :: Int :: HNil])(3) must_== NA
f0.get(Cols(0).as[String :: HNil])(0) must_== Value("a" :: HNil)
f0.get(Cols(1).as[Int :: HNil])(2) must_== Value(3 :: HNil)
}
"convert to series" in {
f0.get(Cols(0).as[String]) must_== Series(0 -> "a", 1 -> "b", 2 -> "c")
f0.get(Cols(0).as[Int]) must_== Series(Index.fromKeys(0, 1, 2), Column[Int](NM, NM, NM))
f0.get(Cols(1).as[Int]) must_== Series(0 -> 1, 1 -> 2, 2 -> 3)
f0.get(Cols(0, 1).as[String :: Int :: HNil]) must_== Series(
0 -> ("a" :: 1 :: HNil),
1 -> ("b" :: 2 :: HNil),
2 -> ("c" :: 3 :: HNil))
}
"map to series" in {
f0.map(Cols(1).as[Int], 2)(_ + 1) must_== Frame.fromRows(
"a" :: 1 :: 2 :: HNil,
"b" :: 2 :: 3 :: HNil,
"c" :: 3 :: 4 :: HNil)
f0.map(Cols(0).as[String], 2)(_ => 42) must_== Frame.fromRows(
"a" :: 1 :: 42 :: HNil,
"b" :: 2 :: 42 :: HNil,
"c" :: 3 :: 42 :: HNil)
f0.map(Cols(1, 0).as[(Int, String)], 2) { case (x, y) =>
y + x
} must_== Frame.fromRows(
"a" :: 1 :: "a1" :: HNil,
"b" :: 2 :: "b2" :: HNil,
"c" :: 3 :: "c3" :: HNil)
}
"map with index to series" in {
f0.mapWithIndex(Cols(0).as[String], 2)(_ + _) must_== Frame.fromRows(
"a" :: 1 :: "0a" :: HNil,
"b" :: 2 :: "1b" :: HNil,
"c" :: 3 :: "2c" :: HNil)
f0.mapWithIndex(Cols(1).as[Int], 2)(_ + _) must_== Frame.fromRows(
"a" :: 1 :: 1 :: HNil,
"b" :: 2 :: 3 :: HNil,
"c" :: 3 :: 5 :: HNil)
}
"filter whole frame" in {
f0.filter(Cols(1).as[Int])(_ % 2 == 0) must_==
Frame.fromRows("b" :: 2 :: HNil).withRowIndex(Index.fromKeys(1))
}
"group by column values" in {
f0.group(Cols(0).as[String]) must_== f0.withRowIndex(Index.fromKeys("a", "b", "c"))
f0.group(Cols(1).as[Int].map(-_)) must_== f0.withRowIndex(Index(-3 -> 2, -2 -> 1, -1 -> 0))
f2.group(Cols(0).as[String]) must_== f2.withRowIndex(Index(("a",0), ("b",2), ("b",1)))
}
}
"reduceFrameWithCol" should {
"reduce with last" in {
f0.reduceFrameWithCol[String, Int, (String, Int)](0)(reduce.Last) must_==
Series(1 -> ("c", 3))
}
}
"map" should {
"append column when to is new" in {
f0.map(Cols(1).as[Int], to = 2)(_ + 2) must_== Frame.fromRows(
"a" :: 1 :: 3 :: HNil,
"b" :: 2 :: 4 :: HNil,
"c" :: 3 :: 5 :: HNil)
}
"replace column when `to` exists" in {
f0.map(Cols(1).as[Int], to = 1)(_ + 2) must_== Frame.fromRows(
"a" :: 3 :: HNil,
"b" :: 4 :: HNil,
"c" :: 5 :: HNil)
}
}
"reduce" should {
"reduce rows" in {
f0.reduce(Cols(1).as[Double], 2)(reduce.Mean) must_==
f0.merge(2, Series(0 -> 2D, 1 -> 2D, 2 -> 2D))(Merge.Outer)
}
"reduce cols" in {
f0.transpose.reduce(Rows(1).as[Double], 2)(reduce.Mean) must_==
f0.merge(2, Series(0 -> 2D, 1 -> 2D, 2 -> 2D))(Merge.Outer).transpose
}
"replace column when `to` exists" in {
f0.reduce(Cols(1).as[Int], to = 1)(reduce.Sum) must_== Frame.fromRows(
"a" :: 6 :: HNil,
"b" :: 6 :: HNil,
"c" :: 6 :: HNil)
f0.transpose.reduce(Rows(1).as[Int], to = 1)(reduce.Sum) must_== Frame.fromColumns(
"a" :: 6 :: HNil,
"b" :: 6 :: HNil,
"c" :: 6 :: HNil)
}
"respect NMs in reducer" in {
val f = Series.fromCells(1 -> Value(1), 2 -> NM, 3 -> Value(3)).toFrame("x")
f.reduce(Cols("x").as[Int], "y")(reduce.Sum) must_==
f.merge("y", Series(1 -> NM, 2 -> NM, 3 -> NM))(Merge.Outer)
}
"respect NAs in reducer" in {
val f = Series.fromCells[Int, Int](1 -> NA, 2 -> NA).toFrame("x")
f.reduce(Cols("x").as[Int], "y")(reduce.Max) must_==
f.merge("y", Series(1 -> NA, 2 -> NA))(Merge.Outer)
}
}
"reduceByKey" should {
val f = Frame.mergeColumns(
"x" -> Series.fromCells(0 -> Value(1), 2 -> Value(5), 2 -> Value(6)),
"y" -> Series.fromCells(0 -> Value(2), 0 -> Value(3), 1 -> NM, 1 -> Value(2)))
"reduce rows/cols" in {
f.reduceByKey(Cols("x").as[Int], "z")(reduce.Sum) must_==
f.join("z", Series.fromCells(0 -> Value(1), 1 -> Value(0), 2 -> Value(11)))(Join.Outer)
f.transpose.reduceByKey(Rows("x").as[Int], "z")(reduce.Sum) must_==
f.join("z", Series.fromCells(0 -> Value(1), 1 -> Value(0), 2 -> Value(11)))(Join.Outer).transpose
}
"respect NMs from reducer" in {
f.reduceByKey(Cols("y").as[Int], "z")(reduce.Sum) must_==
f.join("z", Series.fromCells(0 -> Value(5), 1 -> NM, 2 -> Value(0)))(Join.Outer)
}
"respect NAs from reducer" in {
f.reduceByKey(Cols("x").as[Int], "z")(reduce.Max) must_==
f.join("z", Series.fromCells(0 -> Value(1), 1 -> NA, 2 -> Value(6)))(Join.Outer)
}
}
"appendRows" should {
"append rows to empty frame" in {
Frame.empty[Int, Int].appendRows(f0) must_== f0
f0.appendRows(Frame.empty[Int, Int]) must_== f0
}
"append 2 simple frames with same columns" in {
f0.appendRows(f1) must_== Frame.fromRows(
"a" :: 1 :: HNil,
"b" :: 2 :: HNil,
"c" :: 3 :: HNil,
"a" :: 3 :: HNil,
"b" :: 2 :: HNil,
"c" :: 1 :: HNil
).withRowIndex(Index(Array(0, 1, 2, 0, 1, 2)))
}
"append 2 simple frames with different columns" in {
val a = Frame.fromRows(
"a" :: 1 :: HNil,
"b" :: 2 :: HNil,
"c" :: 3 :: HNil)
val b = Frame.fromRows(
9 :: 4D ::HNil,
8 :: 5D ::HNil,
7 :: 6D ::HNil).withColIndex(Index(Array(1, 2)))
val col0 = Column(Value("a"), Value("b"), Value("c"), NA, NA, NA)
val col1 = Column(Value(1), Value(2), Value(3), Value(9), Value(8), Value(7))
val col2 = Column(NA, NA, NA, Value(4D), Value(5D), Value(6D))
a.appendRows(b) must_== ColOrientedFrame(Index(Array(0, 1, 2, 0, 1, 2)),
Series(0 -> TypedColumn(col0), 1 -> TypedColumn(col1), 2 -> TypedColumn(col2)))
}
"append frame rows with same column oriented schema" in {
val genFrame = genColOrientedFrame[Int, String](arbitrary[Int])(
"a" -> arbitrary[String],
"b" -> arbitrary[Int],
"c" -> arbitrary[Double])
forAll(Gen.zip(genFrame, genFrame)) { case (f0, f1) =>
val rows0 = f0.get(Cols("a", "b", "c").as[(String, Int, Double)])
val rows1 = f1.get(Cols("a", "b", "c").as[(String, Int, Double)])
val index = Index(rows0.index.keys ++ rows1.index.keys)
val values = rows0.values ++ rows1.values
val expected = Frame.fromRows(values: _*).withRowIndex(index).withColIndex(Index(Array("a", "b", "c")))
f0.appendRows(f1) must_== expected
}
}
}
}
| codeaudit/framian | framian/src/test/scala/framian/FrameSpec.scala | Scala | apache-2.0 | 19,094 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.encoders
import java.math.BigInteger
import java.sql.{Date, Timestamp}
import java.util.Arrays
import scala.collection.mutable.ArrayBuffer
import scala.reflect.runtime.universe.TypeTag
import org.apache.spark.sql.{Encoder, Encoders}
import org.apache.spark.sql.catalyst.{OptionalData, PrimitiveData}
import org.apache.spark.sql.catalyst.analysis.AnalysisTest
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.plans.CodegenInterpretedPlanTest
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.catalyst.util.ArrayData
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.ClosureCleaner
case class RepeatedStruct(s: Seq[PrimitiveData])
case class NestedArray(a: Array[Array[Int]]) {
override def hashCode(): Int =
java.util.Arrays.deepHashCode(a.asInstanceOf[Array[AnyRef]])
override def equals(other: Any): Boolean = other match {
case NestedArray(otherArray) =>
java.util.Arrays.deepEquals(
a.asInstanceOf[Array[AnyRef]],
otherArray.asInstanceOf[Array[AnyRef]])
case _ => false
}
}
case class BoxedData(
intField: java.lang.Integer,
longField: java.lang.Long,
doubleField: java.lang.Double,
floatField: java.lang.Float,
shortField: java.lang.Short,
byteField: java.lang.Byte,
booleanField: java.lang.Boolean)
case class RepeatedData(
arrayField: Seq[Int],
arrayFieldContainsNull: Seq[java.lang.Integer],
mapField: scala.collection.Map[Int, Long],
mapFieldNull: scala.collection.Map[Int, java.lang.Long],
structField: PrimitiveData)
/** For testing Kryo serialization based encoder. */
class KryoSerializable(val value: Int) {
override def hashCode(): Int = value
override def equals(other: Any): Boolean = other match {
case that: KryoSerializable => this.value == that.value
case _ => false
}
}
/** For testing Java serialization based encoder. */
class JavaSerializable(val value: Int) extends Serializable {
override def hashCode(): Int = value
override def equals(other: Any): Boolean = other match {
case that: JavaSerializable => this.value == that.value
case _ => false
}
}
/** For testing UDT for a case class */
@SQLUserDefinedType(udt = classOf[UDTForCaseClass])
case class UDTCaseClass(uri: java.net.URI)
class UDTForCaseClass extends UserDefinedType[UDTCaseClass] {
override def sqlType: DataType = StringType
override def serialize(obj: UDTCaseClass): UTF8String = {
UTF8String.fromString(obj.uri.toString)
}
override def userClass: Class[UDTCaseClass] = classOf[UDTCaseClass]
override def deserialize(datum: Any): UDTCaseClass = datum match {
case uri: UTF8String => UDTCaseClass(new java.net.URI(uri.toString))
}
}
case class PrimitiveValueClass(wrapped: Int) extends AnyVal
case class ReferenceValueClass(wrapped: ReferenceValueClass.Container) extends AnyVal
object ReferenceValueClass {
case class Container(data: Int)
}
class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTest {
OuterScopes.addOuterScope(this)
implicit def encoder[T : TypeTag]: ExpressionEncoder[T] = verifyNotLeakingReflectionObjects {
ExpressionEncoder()
}
// test flat encoders
encodeDecodeTest(false, "primitive boolean")
encodeDecodeTest(-3.toByte, "primitive byte")
encodeDecodeTest(-3.toShort, "primitive short")
encodeDecodeTest(-3, "primitive int")
encodeDecodeTest(-3L, "primitive long")
encodeDecodeTest(-3.7f, "primitive float")
encodeDecodeTest(-3.7, "primitive double")
encodeDecodeTest(java.lang.Boolean.FALSE, "boxed boolean")
encodeDecodeTest(java.lang.Byte.valueOf(-3: Byte), "boxed byte")
encodeDecodeTest(java.lang.Short.valueOf(-3: Short), "boxed short")
encodeDecodeTest(java.lang.Integer.valueOf(-3), "boxed int")
encodeDecodeTest(java.lang.Long.valueOf(-3L), "boxed long")
encodeDecodeTest(java.lang.Float.valueOf(-3.7f), "boxed float")
encodeDecodeTest(java.lang.Double.valueOf(-3.7), "boxed double")
encodeDecodeTest(BigDecimal("32131413.211321313"), "scala decimal")
encodeDecodeTest(new java.math.BigDecimal("231341.23123"), "java decimal")
encodeDecodeTest(BigInt("23134123123"), "scala biginteger")
encodeDecodeTest(new BigInteger("23134123123"), "java BigInteger")
encodeDecodeTest(Decimal("32131413.211321313"), "catalyst decimal")
encodeDecodeTest("hello", "string")
encodeDecodeTest(Date.valueOf("2012-12-23"), "date")
encodeDecodeTest(Timestamp.valueOf("2016-01-29 10:00:00"), "timestamp")
encodeDecodeTest(Array(Timestamp.valueOf("2016-01-29 10:00:00")), "array of timestamp")
encodeDecodeTest(Array[Byte](13, 21, -23), "binary")
encodeDecodeTest(Seq(31, -123, 4), "seq of int")
encodeDecodeTest(Seq("abc", "xyz"), "seq of string")
encodeDecodeTest(Seq("abc", null, "xyz"), "seq of string with null")
encodeDecodeTest(Seq.empty[Int], "empty seq of int")
encodeDecodeTest(Seq.empty[String], "empty seq of string")
encodeDecodeTest(Seq(Seq(31, -123), null, Seq(4, 67)), "seq of seq of int")
encodeDecodeTest(Seq(Seq("abc", "xyz"), Seq[String](null), null, Seq("1", null, "2")),
"seq of seq of string")
encodeDecodeTest(Array(31, -123, 4), "array of int")
encodeDecodeTest(Array("abc", "xyz"), "array of string")
encodeDecodeTest(Array("a", null, "x"), "array of string with null")
encodeDecodeTest(Array.empty[Int], "empty array of int")
encodeDecodeTest(Array.empty[String], "empty array of string")
encodeDecodeTest(Array(Array(31, -123), null, Array(4, 67)), "array of array of int")
encodeDecodeTest(Array(Array("abc", "xyz"), Array[String](null), null, Array("1", null, "2")),
"array of array of string")
encodeDecodeTest(Map(1 -> "a", 2 -> "b"), "map")
encodeDecodeTest(Map(1 -> "a", 2 -> null), "map with null")
encodeDecodeTest(Map(1 -> Map("a" -> 1), 2 -> Map("b" -> 2)), "map of map")
encodeDecodeTest(Tuple1[Seq[Int]](null), "null seq in tuple")
encodeDecodeTest(Tuple1[Map[String, String]](null), "null map in tuple")
encodeDecodeTest(List(1, 2), "list of int")
encodeDecodeTest(List("a", null), "list with String and null")
encodeDecodeTest(
UDTCaseClass(new java.net.URI("http://spark.apache.org/")), "udt with case class")
// Kryo encoders
encodeDecodeTest("hello", "kryo string")(encoderFor(Encoders.kryo[String]))
encodeDecodeTest(new KryoSerializable(15), "kryo object")(
encoderFor(Encoders.kryo[KryoSerializable]))
// Java encoders
encodeDecodeTest("hello", "java string")(encoderFor(Encoders.javaSerialization[String]))
encodeDecodeTest(new JavaSerializable(15), "java object")(
encoderFor(Encoders.javaSerialization[JavaSerializable]))
// test product encoders
private def productTest[T <: Product : ExpressionEncoder](input: T): Unit = {
encodeDecodeTest(input, input.getClass.getSimpleName)
}
case class InnerClass(i: Int)
productTest(InnerClass(1))
encodeDecodeTest(Array(InnerClass(1)), "array of inner class")
encodeDecodeTest(Array(Option(InnerClass(1))), "array of optional inner class")
productTest(PrimitiveData(1, 1, 1, 1, 1, 1, true))
productTest(
OptionalData(Some(2), Some(2), Some(2), Some(2), Some(2), Some(2), Some(true),
Some(PrimitiveData(1, 1, 1, 1, 1, 1, true))))
productTest(OptionalData(None, None, None, None, None, None, None, None))
encodeDecodeTest(Seq(Some(1), None), "Option in array")
encodeDecodeTest(Map(1 -> Some(10L), 2 -> Some(20L), 3 -> None), "Option in map")
productTest(BoxedData(1, 1L, 1.0, 1.0f, 1.toShort, 1.toByte, true))
productTest(BoxedData(null, null, null, null, null, null, null))
productTest(RepeatedStruct(PrimitiveData(1, 1, 1, 1, 1, 1, true) :: Nil))
productTest((1, "test", PrimitiveData(1, 1, 1, 1, 1, 1, true)))
productTest(
RepeatedData(
Seq(1, 2),
Seq(Integer.valueOf(1), null, Integer.valueOf(2)),
Map(1 -> 2L),
Map(1 -> null),
PrimitiveData(1, 1, 1, 1, 1, 1, true)))
productTest(NestedArray(Array(Array(1, -2, 3), null, Array(4, 5, -6))))
productTest(("Seq[(String, String)]",
Seq(("a", "b"))))
productTest(("Seq[(Int, Int)]",
Seq((1, 2))))
productTest(("Seq[(Long, Long)]",
Seq((1L, 2L))))
productTest(("Seq[(Float, Float)]",
Seq((1.toFloat, 2.toFloat))))
productTest(("Seq[(Double, Double)]",
Seq((1.toDouble, 2.toDouble))))
productTest(("Seq[(Short, Short)]",
Seq((1.toShort, 2.toShort))))
productTest(("Seq[(Byte, Byte)]",
Seq((1.toByte, 2.toByte))))
productTest(("Seq[(Boolean, Boolean)]",
Seq((true, false))))
productTest(("ArrayBuffer[(String, String)]",
ArrayBuffer(("a", "b"))))
productTest(("ArrayBuffer[(Int, Int)]",
ArrayBuffer((1, 2))))
productTest(("ArrayBuffer[(Long, Long)]",
ArrayBuffer((1L, 2L))))
productTest(("ArrayBuffer[(Float, Float)]",
ArrayBuffer((1.toFloat, 2.toFloat))))
productTest(("ArrayBuffer[(Double, Double)]",
ArrayBuffer((1.toDouble, 2.toDouble))))
productTest(("ArrayBuffer[(Short, Short)]",
ArrayBuffer((1.toShort, 2.toShort))))
productTest(("ArrayBuffer[(Byte, Byte)]",
ArrayBuffer((1.toByte, 2.toByte))))
productTest(("ArrayBuffer[(Boolean, Boolean)]",
ArrayBuffer((true, false))))
productTest(("Seq[Seq[(Int, Int)]]",
Seq(Seq((1, 2)))))
// test for ExpressionEncoder.tuple
encodeDecodeTest(
1 -> 10L,
"tuple with 2 flat encoders")(
ExpressionEncoder.tuple(ExpressionEncoder[Int], ExpressionEncoder[Long]))
encodeDecodeTest(
(PrimitiveData(1, 1, 1, 1, 1, 1, true), (3, 30L)),
"tuple with 2 product encoders")(
ExpressionEncoder.tuple(ExpressionEncoder[PrimitiveData], ExpressionEncoder[(Int, Long)]))
encodeDecodeTest(
(PrimitiveData(1, 1, 1, 1, 1, 1, true), 3),
"tuple with flat encoder and product encoder")(
ExpressionEncoder.tuple(ExpressionEncoder[PrimitiveData], ExpressionEncoder[Int]))
encodeDecodeTest(
(3, PrimitiveData(1, 1, 1, 1, 1, 1, true)),
"tuple with product encoder and flat encoder")(
ExpressionEncoder.tuple(ExpressionEncoder[Int], ExpressionEncoder[PrimitiveData]))
encodeDecodeTest(
(1, (10, 100L)),
"nested tuple encoder") {
val intEnc = ExpressionEncoder[Int]
val longEnc = ExpressionEncoder[Long]
ExpressionEncoder.tuple(intEnc, ExpressionEncoder.tuple(intEnc, longEnc))
}
encodeDecodeTest(
PrimitiveValueClass(42), "primitive value class")
encodeDecodeTest(
ReferenceValueClass(ReferenceValueClass.Container(1)), "reference value class")
encodeDecodeTest(Option(31), "option of int")
encodeDecodeTest(Option.empty[Int], "empty option of int")
encodeDecodeTest(Option("abc"), "option of string")
encodeDecodeTest(Option.empty[String], "empty option of string")
productTest(("UDT", new ExamplePoint(0.1, 0.2)))
test("nullable of encoder schema") {
def checkNullable[T: ExpressionEncoder](nullable: Boolean*): Unit = {
assert(implicitly[ExpressionEncoder[T]].schema.map(_.nullable) === nullable.toSeq)
}
// test for flat encoders
checkNullable[Int](false)
checkNullable[Option[Int]](true)
checkNullable[java.lang.Integer](true)
checkNullable[String](true)
// test for product encoders
checkNullable[(String, Int)](true, false)
checkNullable[(Int, java.lang.Long)](false, true)
// test for nested product encoders
{
val schema = ExpressionEncoder[(Int, (String, Int))].schema
assert(schema(0).nullable === false)
assert(schema(1).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](0).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](1).nullable === false)
}
// test for tupled encoders
{
val schema = ExpressionEncoder.tuple(
ExpressionEncoder[Int],
ExpressionEncoder[(String, Int)]).schema
assert(schema(0).nullable === false)
assert(schema(1).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](0).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](1).nullable === false)
}
}
test("nullable of encoder serializer") {
def checkNullable[T: Encoder](nullable: Boolean): Unit = {
assert(encoderFor[T].objSerializer.nullable === nullable)
}
// test for flat encoders
checkNullable[Int](false)
checkNullable[Option[Int]](true)
checkNullable[java.lang.Integer](true)
checkNullable[String](true)
}
test("null check for map key: String") {
val encoder = ExpressionEncoder[Map[String, Int]]()
val e = intercept[RuntimeException](encoder.toRow(Map(("a", 1), (null, 2))))
assert(e.getMessage.contains("Cannot use null as map key"))
}
test("null check for map key: Integer") {
val encoder = ExpressionEncoder[Map[Integer, String]]()
val e = intercept[RuntimeException](encoder.toRow(Map((1, "a"), (null, "b"))))
assert(e.getMessage.contains("Cannot use null as map key"))
}
test("throw exception for tuples with more than 22 elements") {
val encoders = (0 to 22).map(_ => Encoders.scalaInt.asInstanceOf[ExpressionEncoder[_]])
val e = intercept[UnsupportedOperationException] {
ExpressionEncoder.tuple(encoders)
}
assert(e.getMessage.contains("tuple with more than 22 elements are not supported"))
}
// Scala / Java big decimals ----------------------------------------------------------
encodeDecodeTest(BigDecimal(("9" * 20) + "." + "9" * 18),
"scala decimal within precision/scale limit")
encodeDecodeTest(new java.math.BigDecimal(("9" * 20) + "." + "9" * 18),
"java decimal within precision/scale limit")
encodeDecodeTest(-BigDecimal(("9" * 20) + "." + "9" * 18),
"negative scala decimal within precision/scale limit")
encodeDecodeTest(new java.math.BigDecimal(("9" * 20) + "." + "9" * 18).negate,
"negative java decimal within precision/scale limit")
testOverflowingBigNumeric(BigDecimal("1" * 21), "scala big decimal")
testOverflowingBigNumeric(new java.math.BigDecimal("1" * 21), "java big decimal")
testOverflowingBigNumeric(-BigDecimal("1" * 21), "negative scala big decimal")
testOverflowingBigNumeric(new java.math.BigDecimal("1" * 21).negate, "negative java big decimal")
testOverflowingBigNumeric(BigDecimal(("1" * 21) + ".123"),
"scala big decimal with fractional part")
testOverflowingBigNumeric(new java.math.BigDecimal(("1" * 21) + ".123"),
"java big decimal with fractional part")
testOverflowingBigNumeric(BigDecimal(("1" * 21) + "." + "9999" * 100),
"scala big decimal with long fractional part")
testOverflowingBigNumeric(new java.math.BigDecimal(("1" * 21) + "." + "9999" * 100),
"java big decimal with long fractional part")
// Scala / Java big integers ----------------------------------------------------------
encodeDecodeTest(BigInt("9" * 38), "scala big integer within precision limit")
encodeDecodeTest(new BigInteger("9" * 38), "java big integer within precision limit")
encodeDecodeTest(-BigInt("9" * 38),
"negative scala big integer within precision limit")
encodeDecodeTest(new BigInteger("9" * 38).negate(),
"negative java big integer within precision limit")
testOverflowingBigNumeric(BigInt("1" * 39), "scala big int")
testOverflowingBigNumeric(new BigInteger("1" * 39), "java big integer")
testOverflowingBigNumeric(-BigInt("1" * 39), "negative scala big int")
testOverflowingBigNumeric(new BigInteger("1" * 39).negate, "negative java big integer")
testOverflowingBigNumeric(BigInt("9" * 100), "scala very large big int")
testOverflowingBigNumeric(new BigInteger("9" * 100), "java very big int")
encodeDecodeTest("foo" -> 1L, "makeCopy") {
Encoders.product[(String, Long)].makeCopy.asInstanceOf[ExpressionEncoder[(String, Long)]]
}
private def testOverflowingBigNumeric[T: TypeTag](bigNumeric: T, testName: String): Unit = {
Seq(true, false).foreach { ansiEnabled =>
testAndVerifyNotLeakingReflectionObjects(
s"overflowing $testName, ansiEnabled=$ansiEnabled") {
withSQLConf(
SQLConf.ANSI_ENABLED.key -> ansiEnabled.toString
) {
// Need to construct Encoder here rather than implicitly resolving it
// so that SQLConf changes are respected.
val encoder = ExpressionEncoder[T]()
if (!ansiEnabled) {
val convertedBack = encoder.resolveAndBind().fromRow(encoder.toRow(bigNumeric))
assert(convertedBack === null)
} else {
val e = intercept[RuntimeException] {
encoder.toRow(bigNumeric)
}
assert(e.getMessage.contains("Error while encoding"))
assert(e.getCause.getClass === classOf[ArithmeticException])
}
}
}
}
}
private def encodeDecodeTest[T : ExpressionEncoder](
input: T,
testName: String): Unit = {
testAndVerifyNotLeakingReflectionObjects(s"encode/decode for $testName: $input") {
val encoder = implicitly[ExpressionEncoder[T]]
// Make sure encoder is serializable.
ClosureCleaner.clean((s: String) => encoder.getClass.getName)
val row = encoder.toRow(input)
val schema = encoder.schema.toAttributes
val boundEncoder = encoder.resolveAndBind()
val convertedBack = try boundEncoder.fromRow(row) catch {
case e: Exception =>
fail(
s"""Exception thrown while decoding
|Converted: $row
|Schema: ${schema.mkString(",")}
|${encoder.schema.treeString}
|
|Encoder:
|$boundEncoder
|
""".stripMargin, e)
}
// Test the correct resolution of serialization / deserialization.
val attr = AttributeReference("obj", encoder.deserializer.dataType)()
val plan = LocalRelation(attr).serialize[T].deserialize[T]
assertAnalysisSuccess(plan)
val isCorrect = (input, convertedBack) match {
case (b1: Array[Byte], b2: Array[Byte]) => Arrays.equals(b1, b2)
case (b1: Array[Int], b2: Array[Int]) => Arrays.equals(b1, b2)
case (b1: Array[Array[_]], b2: Array[Array[_]]) =>
Arrays.deepEquals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
case (b1: Array[_], b2: Array[_]) =>
Arrays.equals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
case (left: Comparable[_], right: Comparable[_]) =>
left.asInstanceOf[Comparable[Any]].compareTo(right) == 0
case _ => input == convertedBack
}
if (!isCorrect) {
val types = convertedBack match {
case c: Product =>
c.productIterator.filter(_ != null).map(_.getClass.getName).mkString(",")
case other => other.getClass.getName
}
val encodedData = try {
row.toSeq(encoder.schema).zip(schema).map {
case (a: ArrayData, AttributeReference(_, ArrayType(et, _), _, _)) =>
a.toArray[Any](et).toSeq
case (other, _) =>
other
}.mkString("[", ",", "]")
} catch {
case e: Throwable => s"Failed to toSeq: $e"
}
fail(
s"""Encoded/Decoded data does not match input data
|
|in: $input
|out: $convertedBack
|types: $types
|
|Encoded Data: $encodedData
|Schema: ${schema.mkString(",")}
|${encoder.schema.treeString}
|
|fromRow Expressions:
|${boundEncoder.deserializer.treeString}
""".stripMargin)
}
}
}
/**
* Verify the size of scala.reflect.runtime.JavaUniverse.undoLog before and after `func` to
* ensure we don't leak Scala reflection garbage.
*
* @see org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects
*/
private def verifyNotLeakingReflectionObjects[T](func: => T): T = {
def undoLogSize: Int = {
scala.reflect.runtime.universe
.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.log.size
}
val previousUndoLogSize = undoLogSize
val r = func
assert(previousUndoLogSize == undoLogSize)
r
}
private def testAndVerifyNotLeakingReflectionObjects(testName: String)(testFun: => Any): Unit = {
test(testName) {
verifyNotLeakingReflectionObjects(testFun)
}
}
}
| rezasafi/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala | Scala | apache-2.0 | 21,414 |
package blog2
import scalikejdbc._
import scalikejdbc.scalatest.AutoRollback
import org.scalatest.{ Tag => _, _ }
class BlogSpec extends fixture.FunSpec with Matchers with Connection with CreateTables with AutoRollback {
override def db(): DB = NamedDB(Symbol("blog2")).toDB()
override def fixture(implicit session: DBSession): Unit = {
val postId =
Post.createWithAttributes(Symbol("title") -> "Hello World!", Symbol("body") -> "This is the first entry...")
val scalaTagId = Tag.createWithAttributes(Symbol("name") -> "Scala")
val rubyTagId = Tag.createWithAttributes(Symbol("name") -> "Ruby")
val pt = PostTag.column
insert.into(PostTag).namedValues(pt.postId -> postId, pt.tagId -> scalaTagId).toSQL.update.apply()
insert.into(PostTag).namedValues(pt.postId -> postId, pt.tagId -> rubyTagId).toSQL.update.apply()
}
describe("hasManyThrough without byDefault") {
it("should work as expected") { implicit session =>
val id = Post.limit(1).apply().head.id
val post = Post.joins(Post.tagsRef).findById(id)
post.get.tags.size should equal(2)
}
it("should work when joining twice") { implicit session =>
val id = Post.limit(1).apply().head.id
val post = Post.joins(Post.tagsRef, Post.tagsRef).findById(id)
post.get.tags.size should equal(2)
}
}
}
| skinny-framework/skinny-framework | orm/src/test/scala/blog2/BlogSpec.scala | Scala | mit | 1,356 |
package com.normation.rudder.domain.eventlog
import org.joda.time.DateTime
import com.normation.eventlog.EventActor
import com.normation.eventlog.EventLog
import com.normation.utils.HashcodeCaching
import scala.xml.NodeSeq
import com.normation.eventlog.EventLogDetails
import com.normation.eventlog.EventLogFilter
import com.normation.eventlog.EventLogType
import com.normation.rudder.domain.Constants
/**
* Update the policy server
*/
sealed trait PolicyServerEventLog extends EventLog
final case class UpdatePolicyServer(
override val eventDetails : EventLogDetails
) extends PolicyServerEventLog with HashcodeCaching {
override val cause = None
override val eventType = UpdatePolicyServer.eventType
override val eventLogCategory = PolicyServerLogCategory
}
final case class AuthorizedNetworkModification(
oldNetworks: Seq[String]
, newNetworks: Seq[String]
)
object UpdatePolicyServer extends EventLogFilter {
override val eventType = UpdatePolicyServerEventType
override def apply(x : (EventLogType, EventLogDetails)) : UpdatePolicyServer = UpdatePolicyServer(x._2)
def buildDetails(modification: AuthorizedNetworkModification) : NodeSeq = {
EventLog.withContent {
<changeAuthorizedNetworks fileFormat={Constants.XML_CURRENT_FILE_FORMAT.toString}>
<oldAuthorizedNetworks>{
modification.oldNetworks.map { net => <net>{net}</net>}
}</oldAuthorizedNetworks>
<newAuthorizedNetworks>{
modification.newNetworks.map { net => <net>{net}</net>}
}</newAuthorizedNetworks>
</changeAuthorizedNetworks>
}
}
}
object PolicyServerEventLogsFilter {
final val eventList : List[EventLogFilter] = List(
UpdatePolicyServer
)
} | Kegeruneku/rudder | rudder-core/src/main/scala/com/normation/rudder/domain/eventlog/PolicyServerEventLog.scala | Scala | agpl-3.0 | 1,731 |
/**
* Copyright 2015 Aalto University, Ahmed Hussnain
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package main.scala
import java.sql.Timestamp
import java.util.Properties
import org.apache.spark.mllib.clustering.{ OnlineLDAOptimizer, LDA }
import org.apache.spark.mllib.linalg.{ Vector, Vectors }
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.types._
import scala.collection.mutable.HashMap
object LDAObject {
/*
* Convert tweets to bag-of-words model
*/
def preprocess(data: RDD[String], stopwords: Array[String]) = {
// Filter out words that are; less than 3 char long or contain any non letter chars. If the word is at the end of sentence drop last char.
val tokenized: RDD[Array[String]] = data.map(d => d.toLowerCase().split("\\\\s")).map {
// d = document, a = array, w = word, c = character
a =>
a.filter(word => word.length > 2).map {
w =>
w.last match {
case ',' | '!' | '?' | '.' => w.dropRight(1)
case _ => w
}
}.filter(w => w.forall(c => java.lang.Character.isLetter(c)))
}
// termcounts for every document
val termCounts = tokenized.flatMap(d => d.map(w => (w, 1L))).reduceByKey(_ + _).collect().sortBy(-_._2)
val numFreqwords = 5
val vocabArray = // Remove stopwords. Remove few of the most freq words and words that only appear once
termCounts.takeRight(termCounts.size - numFreqwords).filter(_._2 > 1).filterNot(w => stopwords.contains(w._1)).map(_._1)
val vocabulary = vocabArray.zipWithIndex.toMap
// Make every document to a bag of words
val documents: RDD[(Long, Vector)] = tokenized.zipWithIndex.map {
case (tokens, id) =>
val counts = new HashMap[Int, Double]()
tokens.foreach { term =>
if (vocabulary.contains(term)) {
val idx = vocabulary(term)
counts(idx) = counts.getOrElse(idx, 0.0) + 1.0
}
}
// (documentID, Vector(vocab.size,Seq(wordId, freq))
(id, Vectors.sparse(vocabulary.size, counts.toSeq))
}
(documents, vocabArray)
}
def main(sqlContext: SQLContext, timestamp: Timestamp, k: Int = 5, i: Int = 20) = {
// Set up jdbc properties
val url = "jdbc:mysql://localhost/default"
val table = "Tweets"
val properties = new Properties
properties.setProperty("user", "myuser")
properties.setProperty("password", "mypassword")
// Load up stopwords
val stopwords = sqlContext.sparkContext.textFile("file:///path/to/spark-twitter-lda/stopwords.txt").collect()
val dataFrame = sqlContext.read.jdbc(url, table, properties)
dataFrame.registerTempTable("temp")
// Get tweets from the last 30min (10 minutes from both side of the peak)
val start = new Timestamp(timestamp.getTime() - (60000 * 20))
val peak_at = new Timestamp(timestamp.getTime() - (60000 * 10))
val end = timestamp
val tweets = // Query tweets from start to end
sqlContext.sql("SELECT text, hashtags FROM temp WHERE '" +
end + "'>CAST(created_at AS STRING) AND '" +
start + "'<CAST(created_at AS STRING) AND lang='en'")
val tweetsRDD = tweets.map(row => row.getString(0))
val hashtagsRDD = tweets.map(row => row.getString(1))
// Take ten most frequent hashtags
val hashtags10 = hashtagsRDD.flatMap(l => l.toLowerCase().split(" "))
.map(w => (w, 1L)).reduceByKey(_ + _).sortBy(-_._2).map(_._1).take(11).mkString(" ")
// Process the RDD
val (documents, vocabArray) = preprocess(tweetsRDD, stopwords)
val docCount = documents.count()
// Setup LDA
val lda = new LDA()
val optimizer = new OnlineLDAOptimizer().setMiniBatchFraction(0.05)
lda.setOptimizer(optimizer).setK(k).setMaxIterations(i)
// Run LDA
val ldaModel = lda.run(documents)
val topicIndices = ldaModel.describeTopics(10)
// Create a string from LDA result
val topics = topicIndices.map {
case (wordIds, termWeights) =>
wordIds.zip(termWeights).map { case (wordId, weight) => (vocabArray(wordId.toInt), weight) }
}
val results = topics.zipWithIndex.map {
case (topic, i) =>
"<b>Topic " + (i + 1) + "</b>: " + topic.map(_._1).mkString(" ") + "<br>"
}.mkString("\\n") + "\\n<b>Top 10 hashtags</b>:<br> " + hashtags10
// Schema for LDA results table
val schema =
StructType(
StructField("peak_at", TimestampType) ::
StructField("LDA", StringType) ::
StructField("hashtags", StringType) ::
Nil)
val resultRow = sqlContext.sparkContext.parallelize(Array(Row(peak_at, results, hashtags10)))
val dfToSave = sqlContext.createDataFrame(resultRow, schema)
// Write to database
dfToSave.write.mode("append").jdbc(url, "LDAResults", properties)
}
}
| hari4171/spark-twitter-lda | src/main/scala/LDAObject.scala | Scala | apache-2.0 | 5,377 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.comm
import com.ibm.spark.annotations.Experimental
import com.ibm.spark.kernel.protocol.v5._
import scala.util.Try
@Experimental
object CommCallbacks {
type OpenCallback = (CommWriter, UUID, String, MsgData) => Unit
type MsgCallback = (CommWriter, UUID, MsgData) => Unit
type CloseCallback = (CommWriter, UUID, MsgData) => Unit
}
import com.ibm.spark.comm.CommCallbacks._
/**
* Represents available callbacks to be triggered when various Comm events
* are triggered.
*
* @param openCallbacks The sequence of open callbacks
* @param msgCallbacks The sequence of msg callbacks
* @param closeCallbacks The sequence of close callbacks
*/
@Experimental
class CommCallbacks(
val openCallbacks: Seq[CommCallbacks.OpenCallback] = Nil,
val msgCallbacks: Seq[CommCallbacks.MsgCallback] = Nil,
val closeCallbacks: Seq[CommCallbacks.CloseCallback] = Nil
) {
/**
* Adds a new open callback to be triggered.
*
* @param openCallback The open callback to add
*
* @return The updated CommCallbacks instance
*/
def addOpenCallback(openCallback: OpenCallback): CommCallbacks =
new CommCallbacks(
openCallbacks :+ openCallback,
msgCallbacks,
closeCallbacks
)
/**
* Adds a new msg callback to be triggered.
*
* @param msgCallback The msg callback to add
*
* @return The updated CommCallbacks instance
*/
def addMsgCallback(msgCallback: MsgCallback): CommCallbacks =
new CommCallbacks(
openCallbacks,
msgCallbacks :+ msgCallback,
closeCallbacks
)
/**
* Adds a new close callback to be triggered.
*
* @param closeCallback The close callback to add
*
* @return The updated CommCallbacks instance
*/
def addCloseCallback(closeCallback: CloseCallback): CommCallbacks =
new CommCallbacks(
openCallbacks,
msgCallbacks,
closeCallbacks :+ closeCallback
)
/**
* Removes the specified open callback from the collection of callbacks.
*
* @param openCallback The open callback to remove
*
* @return The updated CommCallbacks instance
*/
def removeOpenCallback(openCallback: OpenCallback): CommCallbacks =
new CommCallbacks(
openCallbacks.filterNot(_ == openCallback),
msgCallbacks,
closeCallbacks
)
/**
* Removes the specified msg callback from the collection of callbacks.
*
* @param msgCallback The msg callback to remove
*
* @return The updated CommCallbacks instance
*/
def removeMsgCallback(msgCallback: MsgCallback): CommCallbacks =
new CommCallbacks(
openCallbacks,
msgCallbacks.filterNot(_ == msgCallback),
closeCallbacks
)
/**
* Removes the specified close callback from the collection of callbacks.
*
* @param closeCallback The close callback to remove
*
* @return The updated CommCallbacks instance
*/
def removeCloseCallback(closeCallback: CloseCallback): CommCallbacks =
new CommCallbacks(
openCallbacks,
msgCallbacks,
closeCallbacks.filterNot(_ == closeCallback)
)
/**
* Executes all registered open callbacks and returns a sequence of results.
*
* @param commWriter The Comm Writer that can be used for responses
* @param commId The Comm Id to pass to all open callbacks
* @param targetName The Comm Target Name to pass to all open callbacks
* @param data The data to pass to all open callbacks
*
* @return The sequence of results from trying to execute callbacks
*/
def executeOpenCallbacks(
commWriter: CommWriter, commId: UUID, targetName: String, data: MsgData
) = openCallbacks.map(f => Try(f(commWriter, commId, targetName, data)))
/**
* Executes all registered msg callbacks and returns a sequence of results.
*
* @param commWriter The Comm Writer that can be used for responses
* @param commId The Comm Id to pass to all msg callbacks
* @param data The data to pass to all msg callbacks
*
* @return The sequence of results from trying to execute callbacks
*/
def executeMsgCallbacks(commWriter: CommWriter, commId: UUID, data: MsgData) =
msgCallbacks.map(f => Try(f(commWriter, commId, data)))
/**
* Executes all registered close callbacks and returns a sequence of results.
*
* @param commWriter The Comm Writer that can be used for responses
* @param commId The Comm Id to pass to all close callbacks
* @param data The data to pass to all close callbacks
*
* @return The sequence of results from trying to execute callbacks
*/
def executeCloseCallbacks(commWriter: CommWriter, commId: UUID, data: MsgData) =
closeCallbacks.map(f => Try(f(commWriter, commId, data)))
}
| gef756/spark-kernel | protocol/src/main/scala/com/ibm/spark/comm/CommCallbacks.scala | Scala | apache-2.0 | 5,305 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.thrift.tests.binary.suites
import com.datastax.driver.core.utils.UUIDs
import com.outworkers.phantom.dsl._
import com.outworkers.phantom.thrift.tests.binary.BinarySuite
import com.outworkers.util.samplers._
class ThriftColumnTest extends BinarySuite {
it should "allow storing thrift columns" in {
val id = UUIDs.timeBased()
val sample = gen[ThriftTest]
val insert = thriftDb.thriftColumnTable.insert
.value(_.id, id)
.value(_.name, sample.name)
.value(_.ref, sample)
.future() flatMap {
_ => thriftDb.thriftColumnTable.select.where(_.id eqs id).one()
}
whenReady(insert) { result =>
result.value.struct shouldEqual sample
}
}
it should "allow storing lists of thrift objects" in {
val id = UUIDs.timeBased()
val sample = gen[ThriftTest]
val sample2 = gen[ThriftTest]
val sampleList = Set(sample, sample2)
val insert = thriftDb.thriftColumnTable.insert
.value(_.id, id)
.value(_.name, sample.name)
.value(_.ref, sample)
.value(_.thriftSet, sampleList)
.future() flatMap {
_ => thriftDb.thriftColumnTable.select.where(_.id eqs id).one()
}
whenReady(insert) { result =>
result.value.struct shouldEqual sample
result.value.thriftSet shouldEqual sampleList
}
}
}
| outworkers/phantom | phantom-thrift/src/test/scala/com/outworkers/phantom/thrift/tests/binary/suites/ThriftColumnTest.scala | Scala | apache-2.0 | 1,956 |
package com.clarifi.reporting
import scalaz._
import Scalaz._
import Show._
import Equal._
import Ordering._
import Order._
import java.util.{Date,Locale,UUID}
import java.text.{SimpleDateFormat,DateFormat,ParseException}
import PrimT._
/** Primitive relational expressions */
sealed abstract class PrimExpr(val typ: PrimT) extends Equals {
type Value
def value: Value
def nullable: Boolean
def isNull: Boolean = this match {
case NullExpr(_) => true
case _ => false
}
def valueOption: Option[Any] = this match {
case NullExpr(pt) => None
case _ => Some(value)
}
def extractNullableString(ifNull: => String): String = this match {
case StringExpr(_, s) => s
case DoubleExpr(_, d) => d.toString
case ByteExpr(_, d) => d.toString
case ShortExpr(_, d) => d.toString
case LongExpr(_, d) => d.toString
case IntExpr(_, i) => i.toString
case DateExpr(_, d) => PrimExprs.dateFormatter.format(d)
case BooleanExpr(_, b) => b.toString
case UuidExpr(_, u) => u.toString
case NullExpr(pt) => ifNull
}
def extractString: String = extractNullableString(sys.error("Could not extract a string value from null of type: " + typ))
def extractDouble: Double = this match {
case StringExpr(_, s) => s.toDouble
case DoubleExpr(_, d) => d
case ByteExpr(_, i) => i.toDouble
case ShortExpr(_, i) => i.toDouble
case IntExpr(_, i) => i.toDouble
case LongExpr(_, i) => i.toDouble
case DateExpr(_, d) => d.getTime.toDouble
case _ => sys.error("Could not extract a double value from " + this)
}
def extractDate: Date = this match {
case StringExpr(_, s) => DateFormat.getDateInstance.parse(s)
case DoubleExpr(_, d) => new Date(d.toLong)
case IntExpr(_, i) => new Date(i)
case DateExpr(_, d) => d
case _ => sys.error("Could not extract a date value from " + this)
}
def extractUuid: UUID = this match {
case StringExpr(_, s) => UUID.fromString(s)
case UuidExpr(_, s) => s
case _ => sys.error("Could not extract a UUID value from " + this)
}
/** Minimally total catamorphism that preserves most value structure. */
def minCata[Z](string: String => Z, double: Double => Z, bool: Boolean => Z,
date: Date => Z, uuid: UUID => Z, nul: => Z): Z = this match {
case StringExpr(_, s) => string(s)
case _: DoubleExpr | _: ByteExpr | _: ShortExpr | _: IntExpr
| _: LongExpr => double(extractDouble)
case DateExpr(_, d) => date(d)
case BooleanExpr(_, b) => bool(b)
case UuidExpr(_, u) => uuid(u)
case NullExpr(_) => nul
}
def +(p: PrimExpr) = (this, p) match {
case (DoubleExpr(b1, x), DoubleExpr(b2, y)) => DoubleExpr(b1 || b2, (x + y).toDouble)
case (ByteExpr(b1, x), ByteExpr(b2, y)) => ByteExpr(b1 || b2, (x + y).toByte)
case (ShortExpr(b1, x), ShortExpr(b2, y)) => ShortExpr(b1 || b2, (x + y).toShort)
case (IntExpr(b1, x), IntExpr(b2, y)) => IntExpr(b1 || b2, x + y)
case (LongExpr(b1, x), LongExpr(b2, y)) => LongExpr(b1 || b2, (x + y).toLong)
case (DateExpr(b1, x), DateExpr(b2, y)) => DateExpr(b1 || b2, new Date((x.getTime + y.getTime).toLong))
case (n@NullExpr(_), y) => n
case (y, n@NullExpr(_)) => n
case _ => sys.error(this.toString + " and " + p + " do not support addition.")
}
def -(p: PrimExpr) = (this, p) match {
case (DoubleExpr(b1, x), DoubleExpr(b2, y)) => DoubleExpr(b1 || b2, (x - y).toDouble)
case (ByteExpr(b1, x), ByteExpr(b2, y)) => ByteExpr(b1 || b2, (x - y).toByte)
case (ShortExpr(b1, x), ShortExpr(b2, y)) => ShortExpr(b1 || b2, (x - y).toShort)
case (IntExpr(b1, x), IntExpr(b2, y)) => IntExpr(b1 || b2, x - y)
case (LongExpr(b1, x), LongExpr(b2, y)) => LongExpr(b1 || b2, x - y)
case (DateExpr(b1, x), DateExpr(b2, y)) => DateExpr(b1 || b2, new Date(x.getTime - y.getTime))
case (n@NullExpr(_), y) => n
case (y, n@NullExpr(_)) => n
case _ => sys.error(this.toString + " and " + p + " do not support subtraction.")
}
def *(p: PrimExpr) = (this, p) match {
case (DoubleExpr(b1, x), DoubleExpr(b2, y)) => DoubleExpr(b1 || b2, x * y)
case (ByteExpr(b1, x), ByteExpr(b2, y)) => ByteExpr(b1 || b2, (x * y).toByte)
case (ShortExpr(b1, x), ShortExpr(b2, y)) => ShortExpr(b1 || b2, (x * y).toShort)
case (IntExpr(b1, x), IntExpr(b2, y)) => IntExpr(b1 || b2, x * y)
case (LongExpr(b1, x), LongExpr(b2, y)) => LongExpr(b1 || b2, x * y)
case (DateExpr(b1, x), DateExpr(b2, y)) => DateExpr(b1 || b2, new Date(x.getTime * y.getTime))
case (n@NullExpr(_), y) => n
case (y, n@NullExpr(_)) => n
case _ => sys.error(this.toString + " and " + p + " do not support multiplication.")
}
def /(p: PrimExpr) = (this, p) match {
case (DoubleExpr(b1, x), DoubleExpr(b2, y)) => DoubleExpr(b1 || b2, x / y)
case (ByteExpr(b1, x), ByteExpr(b2, y)) => ByteExpr(b1 || b2, (x / y).toByte)
case (ShortExpr(b1, x), ShortExpr(b2, y)) => ShortExpr(b1 || b2, (x / y).toShort)
case (IntExpr(b1, x), IntExpr(b2, y)) => IntExpr(b1 || b2, x / y)
case (LongExpr(b1, x), LongExpr(b2, y)) => LongExpr(b1 || b2, x / y)
case (n@NullExpr(_), y) => n
case (y, n@NullExpr(_)) => n
case _ => sys.error(this.toString + " and " + p + " do not support division.")
}
def pow(p: PrimExpr) = (this, p) match {
case (DoubleExpr(b1, x), DoubleExpr(b2, y)) => DoubleExpr(b1 || b2, math.pow(x, y))
case (ByteExpr(b1, x), ByteExpr(b2, y)) => ByteExpr(b1 || b2, math.pow(x, y).toByte)
case (ShortExpr(b1, x), ShortExpr(b2, y)) => ShortExpr(b1 || b2, math.pow(x, y).toShort)
case (IntExpr(b1, x), IntExpr(b2, y)) => IntExpr(b1 || b2, math.pow(x, y).toInt)
case (LongExpr(b1, x), LongExpr(b2, y)) => LongExpr(b1 || b2, math.pow(x, y).toLong)
case (DateExpr(b1, x), DateExpr(b2, y)) => DateExpr(b1 || b2, new Date(math.pow(x.getTime, y.getTime).toLong))
case (n@NullExpr(_), y) => n
case (y, n@NullExpr(_)) => n
case _ => sys.error(this.toString + " and " + p + " do not support exponentiation.")
}
/** Change type to nullable, if not already. */
def withNull: PrimExpr = if (this nullable) this else (this match {
case x: UuidExpr => x.copy(nullable=true)
case x: StringExpr => x.copy(nullable=true)
case x: ByteExpr => x.copy(nullable=true)
case x: ShortExpr => x.copy(nullable=true)
case x: IntExpr => x.copy(nullable=true)
case x: LongExpr => x.copy(nullable=true)
case x: DoubleExpr => x.copy(nullable=true)
case x: DateExpr => x.copy(nullable=true)
case x: BooleanExpr => x.copy(nullable=true)
case _: NullExpr => this
})
override def equals(a: Any): Boolean = a match {
case a: PrimExpr => (a canEqual this) &&
Order[PrimExpr].equal(this, a)
}
// NullExpr does not compare equal to itself in some contexts (evaluating predicates,
// doing joins in Mem)
def equalsIfNonNull(a: PrimExpr): Boolean =
if (this.isNull || a.isNull) false
else this == a
override def toString: String = cata(Option(value))(_.toString, "")
}
case class UuidExpr(nullable: Boolean, value: UUID)
extends PrimExpr(UuidT(nullable)) {
type Value = UUID
}
case class StringExpr(nullable: Boolean, value: String)
extends PrimExpr(StringT(0, nullable)) {
type Value = String
}
case class ByteExpr(nullable: Boolean, value: Byte)
extends PrimExpr(ByteT(nullable)) {
type Value = Byte
}
case class ShortExpr(nullable: Boolean, value: Short)
extends PrimExpr(ShortT(nullable)) {
type Value = Short
}
case class IntExpr(nullable: Boolean, value: Int)
extends PrimExpr(IntT(nullable)) {
type Value = Int
}
case class LongExpr(nullable: Boolean, value: Long)
extends PrimExpr(LongT(nullable)) {
type Value = Long
}
case class DateExpr(nullable: Boolean, value: Date)
extends PrimExpr(DateT(nullable)) {
type Value = Date
}
case class BooleanExpr(nullable: Boolean, value: Boolean)
extends PrimExpr(BooleanT(nullable)) {
type Value = Boolean
}
case class NullExpr(t: PrimT) extends PrimExpr(t.withNull) {
type Value = None.type
override def equals(that: Any): Boolean = false
override def hashCode: Int = 0
def value = None
def nullable = true
}
case class DoubleExpr(nullable: Boolean, value: Double)
extends PrimExpr(PrimT.DoubleT(nullable)) { dbl =>
type Value = Double
}
object PrimExprs {
val dateFormatter = DateFormat.getDateInstance(DateFormat.SHORT, Locale.ENGLISH)
val tz = java.util.TimeZone.getTimeZone("GMT")
dateFormatter.getCalendar.setTimeZone(tz)
val mmmyyyy = new SimpleDateFormat("MMM yyyy")
val mmm = new SimpleDateFormat("MMM")
val yyyy = new SimpleDateFormat("yyyy")
def parseDate(s: String): Option[Date] =
try { Some(dateFormatter.parse(s)) }
catch { case e: ParseException => None }
def formatDate(d: Date): String =
dateFormatter.format(d)
def formatMonthYear(d: Date): String =
mmmyyyy.format(d)
def formatYear(d: Date): String =
yyyy.format(d)
def formatMonth(d: Date): String =
mmm.format(d)
}
object PrimExpr {
implicit val PrimExprShow: Show[PrimExpr] = showFromToString[PrimExpr]
private def ctorOrder(a: PrimExpr) = a match {
case StringExpr(_, _) => 0
case DoubleExpr(_, _) => 1
case ByteExpr(_, _) => 2
case ShortExpr(_, _) => 3
case LongExpr(_, _) => 4
case IntExpr(_, _) => 5
case DateExpr(_, _) => 6
case BooleanExpr(_, _) => 7
case UuidExpr(_, _) => 8
case NullExpr(_) => 9
}
implicit val PrimExprOrder: Order[PrimExpr] = order((a, b) => {
val typeorder = ctorOrder(a) ?|? ctorOrder(b)
// XXX as of this comment scalaz Ordering semigroup is strict on
// right, so we have to fake it
if (typeorder === EQ)
(a, b) match {
case (StringExpr(_, v1), StringExpr(_, v2)) => v1 ?|? v2
case (IntExpr(_, v1), IntExpr(_, v2)) => v1 ?|? v2
// doubleOrdering doesn't follow the Order laws, so we can't use it.
case (DoubleExpr(_, v1), DoubleExpr(_, v2)) => v1 ?|? v2
case (ByteExpr(_, v1), ByteExpr(_, v2)) => v1 ?|? v2
case (ShortExpr(_, v1), ShortExpr(_, v2)) => v1 ?|? v2
case (LongExpr(_, v1), LongExpr(_, v2)) => v1 ?|? v2
case (DateExpr(_, v1), DateExpr(_, v2)) => v1.getTime ?|? v2.getTime
case (BooleanExpr(_, v1), BooleanExpr(_, v2)) => v1 ?|? v2
case (UuidExpr(_, v1), UuidExpr(_, v2)) => v1 ?|? v2
case (NullExpr(t1), NullExpr(t2)) => t1 ?|? t2
case (a, b) => sys.error("Error in PrimExpr ordering: cannot compare " + a.shows + " with " + b.shows)
}
else typeorder
})
implicit val PrimOrdering = PrimExprOrder.toScalaOrdering
def mkExpr(i: Int, t: PrimT) = t match {
case StringT(l,n) => StringExpr(n, i.toString)
case ByteT(n) => ByteExpr(n, i.toByte)
case ShortT(n) => ShortExpr(n, i.toShort)
case IntT(n) => IntExpr(n, i)
case LongT(n) => LongExpr(n, i)
case DoubleT(n) => DoubleExpr(n, i)
case DateT(n) => DateExpr(n, new Date(i)) // Hack
case BooleanT(n) => BooleanExpr(n, i != 0)
case UuidT(n) => UuidExpr(n, new UUID(i, i)) // Hack
}
def mkExpr(d: Double, t: PrimT) = t match {
case StringT(l,n) => StringExpr(n, d.toString)
case ByteT(n) => ByteExpr(n, d.toByte)
case ShortT(n) => ShortExpr(n, d.toShort)
case IntT(n) => IntExpr(n, d.toInt)
case LongT(n) => LongExpr(n, d.toLong)
case DoubleT(n) => DoubleExpr(n, d)
case DateT(n) => DateExpr(n, new Date(d.toLong)) // Hack
case BooleanT(n) => BooleanExpr(n, d != 0.0)
case UuidT(n) => UuidExpr(n, new UUID(d.toLong, d.toLong)) // Hack
}
def sumMonoid(t: PrimT) = new Monoid[PrimExpr] {
val zero = mkExpr(0, t)
def append(x: PrimExpr, y: => PrimExpr) = x + y
}
def minMonoid(t: PrimT) = new Monoid[PrimExpr] {
val zero = NullExpr(t)
def append(x: PrimExpr, y: => PrimExpr) = (x, y) match {
case (StringExpr(n, a), StringExpr(m, b)) => StringExpr(m && n, if (a <= b) a else b)
case (ByteExpr(n, a), ByteExpr(m, b)) => ByteExpr(m && n, a min b)
case (ShortExpr(n, a), ShortExpr(m, b)) => ShortExpr(m && n, a min b)
case (IntExpr(n, a), IntExpr(m, b)) => IntExpr(m && n, a min b)
case (LongExpr(n, a), LongExpr(m, b)) => LongExpr(m && n, a min b)
case (DoubleExpr(n, a), DoubleExpr(m, b)) => DoubleExpr(m && n, a min b)
case (DateExpr(n, a), DateExpr(m, b)) => DateExpr(m && n, if (a before b) a else b)
case (BooleanExpr(n, a), BooleanExpr(m, b)) => BooleanExpr(m && n, a && b)
case (NullExpr(t), e) => e.withNull
case (e, NullExpr(t)) => e.withNull
case (_, _) => sys.error("Type mismatch: " + x.typ + " is not " + y.typ)
}
}
def maxMonoid(t: PrimT) = new Monoid[PrimExpr] {
val zero = NullExpr(t)
def append(x: PrimExpr, y: => PrimExpr) = (x, y) match {
case (StringExpr(n, a), StringExpr(m, b)) => StringExpr(m && n, if (a >= b) a else b)
case (ByteExpr(n, a), ByteExpr(m, b)) => ByteExpr(m && n, a max b)
case (ShortExpr(n, a), ShortExpr(m, b)) => ShortExpr(m && n, a max b)
case (IntExpr(n, a), IntExpr(m, b)) => IntExpr(m && n, a max b)
case (LongExpr(n, a), LongExpr(m, b)) => LongExpr(m && n, a max b)
case (DoubleExpr(n, a), DoubleExpr(m, b)) => DoubleExpr(m && n, a max b)
case (DateExpr(n, a), DateExpr(m, b)) => DateExpr(m && n, if (a after b) a else b)
case (BooleanExpr(n, a), BooleanExpr(m, b)) => BooleanExpr(m && n, a || b)
case (NullExpr(t), e) => e.withNull
case (e, NullExpr(t)) => e.withNull
case (_, _) => sys.error("Type mismatch: " + x.typ + " is not " + y.typ)
}
}
}
| ermine-language/ermine-legacy | src/main/scala/com/clarifi/reporting/PrimExpr.scala | Scala | bsd-2-clause | 13,649 |
/*
* Copyright 2021 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.spline.consumer.service.repo
import za.co.absa.spline.consumer.service.model.{ExpressionGraph, Operation}
import scala.concurrent.{ExecutionContext, Future}
trait ExpressionRepository {
def expressionGraphUsedByOperation(operationId: Operation.Id)(implicit ec: ExecutionContext): Future[ExpressionGraph]
}
| AbsaOSS/spline | consumer-services/src/main/scala/za/co/absa/spline/consumer/service/repo/ExpressionRepository.scala | Scala | apache-2.0 | 934 |
/*
* Copyright (C) 2016 Department for Business, Energy and Industrial Strategy
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package controllers
import javax.inject.Inject
import forms.validation.CostItem
import models._
import play.api.libs.json.{JsArray, JsDefined, JsObject, JsValue}
import play.api.mvc.Result
import play.api.mvc.Results._
import services.{ApplicationFormOps, ApplicationOps, OpportunityOps}
import scala.concurrent.{ExecutionContext, Future}
class ActionHandler @Inject()(applications: ApplicationOps, applicationForms: ApplicationFormOps, opportunities: OpportunityOps)(implicit ec: ExecutionContext)
extends ApplicationResults {
import ApplicationData._
import FieldCheckHelpers._
def doSave(app: ApplicationSectionDetail, fieldValues: JsObject): Future[Result] = {
app.formSection.sectionType match {
case SectionTypeForm => {
if (JsonHelpers.allFieldsEmpty(fieldValues)) applications.deleteSection(app.id, app.sectionNumber)
else applications.saveSection(app.id, app.sectionNumber, fieldValues)
}.map(_ => redirectToOverview(app.id))
case SectionTypeList => Future.successful(redirectToOverview(app.id))
}
}
def doComplete(app: ApplicationSectionDetail, fieldValues: JsObject): Future[Result] = {
val answers = app.formSection.sectionType match {
case SectionTypeForm => fieldValues
// Instead of using the values that were passed in from the form we'll use the values that
// have already been saved against the item list, since these were created by the add-item
// form.
case SectionTypeList => app.section.map(_.answers).getOrElse(JsObject(Seq()))
}
applications.completeSection(app.id, app.sectionNumber, answers).map {
case Nil => redirectToOverview(app.id)
case errs => redisplaySectionForm(app, answers, errs)
}
}
def doSaveItem(app: ApplicationSectionDetail, fieldValues: JsObject): Future[Result] = {
JsonHelpers.allFieldsEmpty(fieldValues) match {
case true => applications.deleteSection(app.id, app.sectionNumber).map(_ => redirectToOverview(app.id))
case false => applications.saveItem(app.id, app.sectionNumber, fieldValues).flatMap {
case Nil => Future.successful(redirectToOverview(app.id))
case errs => Future.successful(redisplaySectionForm(app, fieldValues, errs))
}
}
}
def doPreview(app: ApplicationSectionDetail, fieldValues: JsObject): Future[Result] = {
app.formSection.sectionType match {
case SectionTypeForm =>
val errs = check(fieldValues, previewChecksFor(app.formSection))
if (errs.isEmpty) applications.saveSection(app.id, app.sectionNumber, fieldValues).map(_ => redirectToPreview(app.id, app.sectionNumber))
else Future.successful(redisplaySectionForm(app, fieldValues, errs))
case SectionTypeList => Future.successful(redirectToPreview(app.id, app.sectionNumber))
}
}
def doSubmit(id: ApplicationId): Future[Option[SubmittedApplicationRef]] = {
applications.submit(id)
}
def completeAndPreview(app: ApplicationSectionDetail, fieldValues: JsObject): Future[Result] = {
val answers = app.formSection.sectionType match {
case SectionTypeForm => fieldValues
// Instead of using the values that were passed in from the form we'll use the values that
// have already been saved against the item list, since these were created by the add-item
// form.
case SectionTypeList => app.section.map(_.answers).getOrElse(JsObject(Seq()))
}
val previewCheckErrs = check(answers, previewChecksFor(app.formSection))
if (previewCheckErrs.isEmpty) {
JsonHelpers.allFieldsEmpty(answers) match {
case true => applications.deleteSection(app.id, app.sectionNumber).map(_ => redirectToOverview(app.id))
case false => applications.completeSection(app.id, app.sectionNumber, answers).map {
case Nil => redirectToPreview(app.id, app.sectionNumber)
case errs => redisplaySectionForm(app, answers, errs)
}
}
} else Future.successful(redisplaySectionForm(app, answers, previewCheckErrs))
}
def redirectToPreview(id: ApplicationId, sectionNumber: AppSectionNumber) =
Redirect(routes.ApplicationPreviewController.previewSection(id, sectionNumber))
def renderSectionForm(app: ApplicationSectionDetail,
errs: FieldErrors,
hints: FieldHints): Result = {
val answers = app.section.map { s => s.answers }.getOrElse(JsObject(List.empty))
selectSectionForm(app, answers, errs)
}
def redisplaySectionForm(app: ApplicationSectionDetail, answers: JsObject, errs: FieldErrors = noErrors): Result = {
selectSectionForm(app, answers, errs)
}
def selectSectionForm(app: ApplicationSectionDetail, answers: JsObject, errs: FieldErrors): Result = {
val checks = app.formSection.fields.map(f => f.name -> f.check).toMap
val hints = hinting(answers, checks)
app.formSection.sectionType match {
case SectionTypeForm => Ok(views.html.sectionForm(app, answers, errs, hints))
case SectionTypeList =>
answers \\ "items" match {
case JsDefined(JsArray(is)) if is.nonEmpty =>
val itemValues: Seq[JsValue] = (answers \\ "items").validate[JsArray].asOpt.map(_.value).getOrElse(Seq())
val costItems = itemValues.flatMap(_.validate[CostItem].asOpt)
Ok(views.html.sectionList(app, costItems, answers, errs, hints))
case _ => Redirect(controllers.routes.CostController.addItem(app.id, app.formSection.sectionNumber))
}
}
}
def previewChecksFor(formSection: ApplicationFormSection): Map[String, FieldCheck] =
formSection.fields.map(f => f.name -> f.previewCheck).toMap
}
| UKGovernmentBEIS/rifs-frontend-play | src/main/scala/controllers/ActionHandler.scala | Scala | gpl-3.0 | 6,404 |
/*
* Select.scala
* Distributions with randomly chosen outcomes.
*
* Created By: Avi Pfeffer (apfeffer@cra.com)
* Creation Date: Jan 1, 2009
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.language
import com.cra.figaro.library.atomic.continuous._
import com.cra.figaro.util._
/**
* Distributions with randomly chosen outcomes. The probabilities can
* either be simple (Doubles) or complex (Elements).
*
* @param clauses The list of pairs of probability specifications and values.
* @tparam P The type of the probability specification.
* @tparam T The type of values of this element.
*/
abstract class Select[P, T](name: Name[T], val clauses: List[(P, T)], collection: ElementCollection)
extends Element[T](name, collection) with Cacheable[T] {
type Randomness = Double
def generateRandomness() = random.nextDouble()
private[figaro] lazy val (probs, outcomes) = clauses.unzip // lazy to avoid uninitialized val bug
override def toString = {
val clauseStrings = clauses map (clause => clause._1.toString + " -> " + clause._2)
"Select(" + clauseStrings.mkString(", ") + ")"
}
}
/**
* A distribution in which both the probabilities and the outcomes are values. Each outcome is
* chosen with the corresponding probability.
*/
class AtomicSelect[T](name: Name[T], clauses: List[(Double, T)], collection: ElementCollection)
extends Select(name, clauses, collection) with Atomic[T] {
private lazy val normalizedProbs = normalize(probs)
private lazy val normalizedClauses = normalizedProbs zip outcomes
def density(outcome: T) = (0.0 /: (normalizedClauses filter (_._2 == outcome)))(_ + _._1)
def generateValue(rand: Randomness) = selectMultinomial(rand, normalizedClauses)
}
/**
* A distribution in which the probabilities are Elements and the outcomes are values.
*/
class CompoundSelect[T](name: Name[T], clauses: List[(Element[Double], T)], collection: ElementCollection)
extends Select(name, clauses, collection) {
def args: List[Element[_]] = probs
def generateValue(rand: Randomness) = {
// This line generates a warning but it is not applicable
probs.foreach(prob => if (prob.value == null) prob.generate())
val unnormalized = probs map (_.value)
val normalized = normalize(unnormalized)
selectMultinomial(rand, normalized zip outcomes)
}
}
/**
* A distribution in which the probabilities are learnable parameters and the outcomes are values.
*/
class ParameterizedSelect[T](name: Name[T], override val parameter: AtomicDirichlet, outcomes: List[T], collection: ElementCollection)
extends Select(name, parameter.alphas.toList zip outcomes, collection) with Parameterized[T] {
private lazy val normalizedProbs = normalize(probs)
def args: List[Element[_]] = List(parameter)
private lazy val normalizedClauses = normalizedProbs zip outcomes
def distributionToStatistics(distribution: Stream[(Double, T)]): Seq[Double] = {
val distList = distribution.toList
for { outcome <- outcomes }
yield {
distList.find(_._2 == outcome) match {
case Some((prob, _)) => prob
case None => 0.0
}
}
}
def density(value: T): Double = {
outcomes.indexOf(value) match {
case -1 => 0.0
case i => parameter.value(i)
}
}
def generateValue(rand: Randomness) = selectMultinomial(rand, normalizedClauses)
}
object Select {
private def makeParameterizedSelect[T](name: Name[T], parameter: AtomicDirichlet, outcomes: List[T], collection: ElementCollection): ParameterizedSelect[T] = {
new ParameterizedSelect(name, parameter, outcomes, collection)
}
/**
* A distribution in which both the probabilities and the outcomes are values. Each outcome is
* chosen with the corresponding probability.
*/
def apply[T](clauses: (Double, T)*)(implicit name: Name[T], collection: ElementCollection) =
new AtomicSelect(name, clauses.toList, collection)
/**
* A distribution in which both the probabilities and the outcomes are values. Each outcome is
* chosen with the corresponding probability.
*/
def apply[T](probabilities: List[Double], outcomes: List[T])(implicit name: Name[T], collection: ElementCollection) =
new AtomicSelect(name, probabilities zip outcomes, collection)
/**
* A distribution in which the probabilities are Elements and the outcomes are values.
*/
def apply[T](clauses: (Element[Double], T)*)(implicit name: Name[T], collection: ElementCollection) =
new CompoundSelect(name, clauses.toList, collection)
/**
* A distribution in which the probabilities are Elements and the outcomes are values.
*/
def apply[T](probabilities: List[Element[Double]], outcomes: List[T])(implicit name: Name[T], collection: ElementCollection) =
new CompoundSelect(name, probabilities zip outcomes, collection)
/**
* A distribution in which the probabilities are specified by a learnable parameter and the outcomes are values.
*/
def apply[T](parameter: AtomicDirichlet, outcomes: T*)(implicit name: Name[T], collection: ElementCollection) =
makeParameterizedSelect(name, parameter, outcomes.toList, collection)
}
| wkretschmer/figaro | Figaro/src/main/scala/com/cra/figaro/language/Select.scala | Scala | bsd-3-clause | 5,362 |
package com.twitter.finagle.client
import com.twitter.conversions.time._
import com.twitter.finagle.Stack.Module0
import com.twitter.finagle._
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.dispatch.SerialClientDispatcher
import com.twitter.finagle.factory.BindingFactory
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.naming.{DefaultInterpreter, NameInterpreter}
import com.twitter.finagle.netty3.Netty3Transporter
import com.twitter.finagle.server.StringServer
import com.twitter.finagle.service.FailFastFactory.FailFast
import com.twitter.finagle.service.PendingRequestFilter
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.util.StackRegistry
import com.twitter.finagle.{param, Name}
import com.twitter.util._
import com.twitter.util.registry.{GlobalRegistry, SimpleRegistry, Entry}
import java.net.{InetAddress, InetSocketAddress}
import java.util.concurrent.RejectedExecutionException
import java.util.concurrent.atomic.AtomicInteger
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import org.scalatest.junit.JUnitRunner
private object StackClientTest {
case class LocalCheckingStringClient(
localKey: Contexts.local.Key[String],
stack: Stack[ServiceFactory[String, String]] = StackClient.newStack,
params: Stack.Params = Stack.Params.empty)
extends StdStackClient[String, String, LocalCheckingStringClient] {
protected def copy1(
stack: Stack[ServiceFactory[String, String]] = this.stack,
params: Stack.Params = this.params
): LocalCheckingStringClient = copy(localKey, stack, params)
protected type In = String
protected type Out = String
protected def newTransporter(): Transporter[String, String] =
Netty3Transporter(StringClientPipeline, params)
protected def newDispatcher(
transport: Transport[In, Out]
): Service[String, String] = {
Contexts.local.get(localKey) match {
case Some(s) =>
Service.constant(
Future.exception(
new IllegalStateException("should not have a local context: " + s)))
case None =>
new SerialClientDispatcher(transport)
}
}
}
}
@RunWith(classOf[JUnitRunner])
class StackClientTest extends FunSuite
with StringClient
with StringServer
with BeforeAndAfter
with Eventually
with IntegrationPatience {
trait Ctx {
val sr = new InMemoryStatsReceiver
val client = stringClient
.configured(param.Stats(sr))
}
after {
NameInterpreter.global = DefaultInterpreter
}
test("client stats are scoped to label")(new Ctx {
// use dest when no label is set
client.newService("inet!127.0.0.1:8080")
eventually {
val counter = sr.counters(Seq("inet!127.0.0.1:8080", "loadbalancer", "adds"))
assert(counter == 1, s"The instance should be to the loadbalancer once instead of $counter times.")
}
// use param.Label when set
client.configured(param.Label("myclient")).newService("127.0.0.1:8080")
eventually {
assert(sr.counters(Seq("myclient", "loadbalancer", "adds")) == 1)
}
// use evaled label when both are set
client.configured(param.Label("myclient")).newService("othername=127.0.0.1:8080")
eventually {
assert(sr.counters(Seq("othername", "loadbalancer", "adds")) == 1)
}
})
test("Client added to client registry")(new Ctx {
ClientRegistry.clear()
val name = "testClient"
client.newClient(Name.bound(Address(8080)), name)
client.newClient(Name.bound(Address(8080)), name)
assert(ClientRegistry.registrants.count { e: StackRegistry.Entry =>
val param.Label(actual) = e.params[param.Label]
name == actual
} == 1)
})
test("FailFast is respected") {
val ctx = new Ctx { }
val ex = new RuntimeException("lol")
val alwaysFail = new Module0[ServiceFactory[String, String]] {
val role = Stack.Role("lol")
val description = "lool"
def make(next: ServiceFactory[String, String]) =
ServiceFactory.apply(() => Future.exception(ex))
}
val alwaysFailStack = new StackBuilder(stack.nilStack[String, String])
.push(alwaysFail)
.result
val stk = ctx.client.stack.concat(alwaysFailStack)
def newClient(name: String, failFastOn: Option[Boolean]): Service[String, String] = {
var stack = ctx.client
.configured(param.Label(name))
.withStack(stk)
failFastOn.foreach { ffOn =>
stack = stack.configured(FailFast(ffOn))
}
val client = stack.newClient("/$/inet/localhost/0")
new FactoryToService[String, String](client)
}
def testClient(name: String, failFastOn: Option[Boolean]): Unit = {
val svc = newClient(name, failFastOn)
val e = intercept[RuntimeException] { Await.result(svc("hi")) }
assert(e == ex)
failFastOn match {
case Some(on) if !on =>
assert(ctx.sr.counters.get(Seq(name, "failfast", "marked_dead")) == None)
intercept[RuntimeException] { Await.result(svc("hi2")) }
case _ =>
eventually {
assert(ctx.sr.counters(Seq(name, "failfast", "marked_dead")) == 1)
}
intercept[FailedFastException] { Await.result(svc("hi2")) }
}
}
testClient("ff-client-default", None)
testClient("ff-client-enabled", Some(true))
testClient("ff-client-disabled", Some(false))
}
test("FactoryToService close propagated to underlying service") {
/*
* This test ensures that the following one doesn't succeed vacuously.
*/
var closed = false
val underlyingFactory = new ServiceFactory[Unit, Unit] {
def apply(conn: ClientConnection) = Future.value(new Service[Unit, Unit] {
def apply(request: Unit): Future[Unit] = Future.Unit
override def close(deadline: Time) = {
closed = true
Future.Done
}
})
def close(deadline: Time) = Future.Done
}
val stack = StackClient.newStack[Unit, Unit]
.concat(Stack.Leaf(Stack.Role("role"), underlyingFactory))
// don't pool or else we don't see underlying close until service is ejected from pool
.remove(DefaultPool.Role)
val factory = stack.make(Stack.Params.empty +
FactoryToService.Enabled(true) +
// default Dest is /$/fail
BindingFactory.Dest(Name.Path(Path.read("/$/inet/localhost/0"))))
val service = new FactoryToService(factory)
Await.result(service(()))
assert(closed)
}
test("prepFactory above FactoryToService") {
/*
* This approximates code in finagle-http which wraps services (in
* prepFactory) so the close is delayed until the chunked response
* has been read. We need prepFactory above FactoryToService or
* else FactoryToService closes the underlying service too soon.
*/
var closed = false
val underlyingFactory = new ServiceFactory[Unit, Unit] {
def apply(conn: ClientConnection) = Future.value(new Service[Unit, Unit] {
def apply(request: Unit): Future[Unit] = Future.Unit
override def close(deadline: Time) = {
closed = true
Future.Done
}
})
def close(deadline: Time) = Future.Done
}
val stack = StackClient.newStack[Unit, Unit]
.concat(Stack.Leaf(Stack.Role("role"), underlyingFactory))
// don't pool or else we don't see underlying close until service is ejected from pool
.remove(DefaultPool.Role)
.replace(StackClient.Role.prepFactory, { next: ServiceFactory[Unit, Unit] =>
next map { service: Service[Unit, Unit] =>
new ServiceProxy[Unit, Unit](service) {
override def close(deadline: Time) = Future.never
}
}
})
val factory = stack.make(Stack.Params.empty +
FactoryToService.Enabled(true) +
// default Dest is /$/fail
BindingFactory.Dest(Name.Path(Path.read("/$/inet/localhost/0"))))
val service = new FactoryToService(factory)
Await.result(service(()))
assert(!closed)
}
trait RequeueCtx {
var count = 0
var _status: Status = Status.Open
var runSideEffect = (_: Int) => false
var sideEffect = () => ()
val stubLB = new ServiceFactory[String, String] {
def apply(conn: ClientConnection) = Future.value(new Service[String, String] {
def apply(request: String): Future[String] = {
count += 1
if (runSideEffect(count)) sideEffect()
Future.exception(WriteException(new Exception("boom")))
}
override def close(deadline: Time) = Future.Done
})
def close(deadline: Time) = Future.Done
override def status = _status
}
val sr = new InMemoryStatsReceiver
val client = stringClient.configured(param.Stats(sr))
val stk = client.stack.replace(
LoadBalancerFactory.role,
(_: ServiceFactory[String, String]) => stubLB
)
val cl = client
.withStack(stk)
.configured(param.Label("myclient"))
.newClient("/$/inet/localhost/0")
def requeues = sr.counters.get(Seq("myclient", "retries", "requeues"))
def budget = sr.gauges(Seq("myclient", "retries", "budget"))()
}
// we get 20% of the budget, which is given 100 minimum retries
private val DefaultRequeues = 20
test("requeue failing requests when the stack is Open")(new RequeueCtx {
val session = cl()
val b = budget
// failing request and Open load balancer => max requeues
Await.ready(session.map(_("hi")), 5.seconds)
assert(requeues == Some(DefaultRequeues))
assert(budget == b - DefaultRequeues)
})
for (status <- Seq(Status.Busy, Status.Closed)) {
test(s"don't requeue failing requests when the stack is $status")(new RequeueCtx {
// failing request and Busy | Closed load balancer => zero requeues
_status = status
Await.ready(cl().map(_("hi")), 5.seconds)
assert(requeues.isEmpty)
})
}
test("dynamically stop requeuing")(new RequeueCtx {
// load balancer begins Open, becomes Busy after 10 requeues => 10 requeues
_status = Status.Open
runSideEffect = _ > DefaultRequeues
sideEffect = () => _status = Status.Busy
Await.ready(cl().map(_("hi")), 5.seconds)
assert(requeues == Some(DefaultRequeues))
})
test("service acquisition requeues use a separate fixed budget")(new RequeueCtx {
override val stubLB = new ServiceFactory[String, String] {
def apply(conn: ClientConnection) = Future.exception(
Failure.rejected("unable to establish session")
)
def close(deadline: Time) = Future.Done
}
intercept[Failure] { Await.result(cl(), 5.seconds) }
assert(requeues.isDefined)
assert(budget > 0)
})
test("service acquisition requeues respect Failure.Restartable")(new RequeueCtx {
override val stubLB = new ServiceFactory[String, String] {
def apply(conn: ClientConnection) = Future.exception(
Failure("don't restart this!")
)
def close(deadline: Time) = Future.Done
}
intercept[Failure] { Await.result(cl(), 5.seconds) }
assert(requeues.isEmpty)
assert(budget > 0)
})
test("service acquisition requeues respect Status.Open")(new RequeueCtx {
_status = Status.Closed
Await.result(cl(), 5.seconds)
assert(requeues.isEmpty)
assert(budget > 0)
})
test("Requeues all go to the same cluster in a Union") {
/*
* Once we have distributed a request to a particular cluster (in
* BindingFactory), retries should go to the same cluster rather
* than being redistributed (possibly to a different cluster).
*/
class CountFactory extends ServiceFactory[Unit, Unit] {
var count = 0
val service = new Service[Unit, Unit] {
def apply(request: Unit): Future[Unit] = {
count = count + 1
Future.exception(WriteException(null))
}
}
def apply(conn: ClientConnection) = Future.value(service)
def close(deadline: Time) = Future.Done
}
val fac1 = new CountFactory
val fac2 = new CountFactory
val addr1 = Address(1729)
val addr2 = Address(1730)
val baseDtab = Dtab.read("/s=>/test")
// override name resolution to a Union of two addresses, and check
// that the base dtab is properly passed in
NameInterpreter.global = new NameInterpreter {
override def bind(dtab: Dtab, path: Path): Activity[NameTree[Name.Bound]] = {
assert(dtab == baseDtab)
Activity.value(NameTree.Union(
NameTree.Weighted(1D, NameTree.Leaf(Name.bound(addr1))),
NameTree.Weighted(1D, NameTree.Leaf(Name.bound(addr2)))))
}
}
val stack = StackClient.newStack[Unit, Unit]
// direct the two addresses to the two service factories instead
// of trying to connect to them
.replace(LoadBalancerFactory.role,
new Stack.Module1[LoadBalancerFactory.Dest, ServiceFactory[Unit, Unit]] {
val role = new Stack.Role("role")
val description = "description"
def make(dest: LoadBalancerFactory.Dest, next: ServiceFactory[Unit, Unit]) = {
val LoadBalancerFactory.Dest(va) = dest
va.sample() match {
case Addr.Bound(addrs, _) if addrs == Set(addr1) => fac1
case Addr.Bound(addrs, _) if addrs == Set(addr2) => fac2
case _ => throw new IllegalArgumentException("wat")
}
}
})
val sr = new InMemoryStatsReceiver
val service =
new FactoryToService(stack.make(Stack.Params.empty +
FactoryToService.Enabled(true) +
param.Stats(sr) +
BindingFactory.BaseDtab(() => baseDtab)))
intercept[ChannelWriteException] {
Await.result(service(()), 5.seconds)
}
val requeues = sr.counters(Seq("retries", "requeues"))
// all retries go to one service
assert(
(fac1.count == requeues+1 && fac2.count == 0) ||
(fac2.count == requeues+1 && fac1.count == 0))
}
test("StackBasedClient.configured is a StackClient") {
// compilation test
val client: StackBasedClient[String, String] = stringClient
val client2: StackBasedClient[String, String] =
client.configured(param.Label("foo"))
val client3: StackBasedClient[String, String] =
client.configured[param.Label]((param.Label("foo"), param.Label.param))
}
test("StackClient.configured is a StackClient") {
// compilation test
val client: StackClient[String, String] = stringClient
val client2: StackClient[String, String] =
client.configured(param.Label("foo"))
val client3: StackClient[String, String] =
client.configured[param.Label]((param.Label("foo"), param.Label.param))
}
test("StackClient binds to a local service via exp.Address.ServiceFactory") {
val reverser = Service.mk[String, String] { in => Future.value(in.reverse) }
val sf = ServiceFactory(() => Future.value(reverser))
val addr = exp.Address(sf)
val name = Name.bound(addr)
val service = stringClient.newService(name, "sfsa-test")
val forward = "a man a plan a canal: panama"
val reversed = Await.result(service(forward), 1.second)
assert(reversed == forward.reverse)
}
test("filtered composes filters atop the stack") {
val echoServer = Service.mk[String, String] { in => Future.value(in) }
val sf = ServiceFactory(() => Future.value(echoServer))
val addr = exp.Address(sf)
val name = Name.bound(addr)
val reverseFilter = new SimpleFilter[String, String] {
def apply(str: String, svc: Service[String, String]) =
svc(str.reverse)
}
val svc = stringClient.filtered(reverseFilter).newRichClient(name, "test_client")
assert(Await.result(svc.ping(), 1.second) == "ping".reverse)
}
test("endpointer clears Contexts") {
import StackClientTest._
val key = new Contexts.local.Key[String]
Contexts.local.let(key, "SomeCoolContext") {
val echoSvc = Service.mk[String, String]{ Future.value }
val server = stringServer.serve(
new InetSocketAddress(InetAddress.getLoopbackAddress, 0),
echoSvc)
val ia = server.boundAddress.asInstanceOf[InetSocketAddress]
val client = new LocalCheckingStringClient(key)
.newService(Name.bound(Address(ia)), "a-label")
val result = Await.result(client("abc"), 5.seconds)
assert("abc" == result)
}
}
test("pending request limit is per connection") {
class CountingService(p: Promise[Unit]) extends Service[Unit, Unit] {
var pending = new AtomicInteger()
val satisfied = new AtomicInteger()
def apply(req: Unit): Future[Unit] = {
pending.incrementAndGet()
p.ensure(satisfied.incrementAndGet())
}
}
val (p1, p2) = (new Promise[Unit], new Promise[Unit])
val (endpoint1, endpoint2) = (new CountingService(p1), new CountingService(p2))
var first = true
val stack = StackClient.newStack[Unit, Unit]
.concat(Stack.Leaf(Stack.Role("role"),
new ServiceFactory[Unit, Unit] {
def apply(conn: ClientConnection): Future[Service[Unit, Unit]] =
if (first) {
first = false
Future.value(endpoint1)
}
else {
Future.value(endpoint2)
}
def close(deadline: Time): Future[Unit] = Future.Done
}
))
.remove(DefaultPool.Role)
val sr = new InMemoryStatsReceiver
val params =
Stack.Params.empty +
param.Stats(sr) +
DefaultPool.Param(
low = 0,
high = 2,
bufferSize = 0,
idleTime = Duration.Zero,
maxWaiters = 0) +
FactoryToService.Enabled(false) +
PendingRequestFilter.Param(Some(2)) +
BindingFactory.Dest(Name.Path(Path.read("/$/inet/localhost/0")))
val svcFac = stack.make(params)
val session1 = Await.result(svcFac(), 3.seconds)
// pending
val e1r1 = session1(())
// pending
val e1r2 = session1(())
// rejected
val e1r3 = session1(())
val e1rejected = intercept[Failure] { Await.result(e1r3, 3.seconds) }
val session2 = Await.result(svcFac(), 3.seconds)
// pending
val e2r1 = session2(())
// pending
val e2r2 = session2(())
// rejected
val e2r3 = session2(())
val e2rejected = intercept[Failure] { Await.result(e2r3, 3.seconds) }
// endpoint1 and endpoint2 both only see the first two requests,
// meaning they get distinct pending request limits
assert(endpoint1.pending.get() == 2)
assert(endpoint2.pending.get() == 2)
assert(endpoint1.satisfied.get() == 0)
assert(endpoint2.satisfied.get() == 0)
assert(!e1r1.isDefined)
assert(!e1r2.isDefined)
intercept[RejectedExecutionException] { throw e1rejected.cause.get }
intercept[RejectedExecutionException] { throw e2rejected.cause.get }
// pending requests are satisfied
p1.setDone()
p2.setDone()
assert(endpoint1.satisfied.get() == 2)
assert(endpoint2.satisfied.get() == 2)
// subsequent requests aren't filtered
val e2r4 = session2(())
val e2r5 = session2(())
val e2r6 = session2(())
Await.result(e2r4, 3.seconds)
Await.result(e2r5, 3.seconds)
Await.result(e2r6, 3.seconds)
assert(endpoint2.satisfied.get() == 5)
}
test("exports transporter type to registry") {
val listeningServer = stringServer
.serve(":*", Service.mk[String, String](Future.value(_)))
val boundAddress = listeningServer.boundAddress.asInstanceOf[InetSocketAddress]
val label = "stringClient"
val svc = stringClient.newService(Name.bound(Address(boundAddress)), label)
val registry = new SimpleRegistry
Await.result(GlobalRegistry.withRegistry(registry) {
svc("hello world")
}, 5.seconds)
val expectedEntry = Entry(
key = Seq("client", StringClient.protocolLibrary, label, "Transporter"),
value = "Netty3Transporter")
assert(registry.iterator.contains(expectedEntry))
Await.result(listeningServer.close(), 5.seconds)
Await.result(svc.close(), 5.seconds)
}
}
| adriancole/finagle | finagle-core/src/test/scala/com/twitter/finagle/client/StackClientTest.scala | Scala | apache-2.0 | 20,329 |
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion
import com.krux.hyperion.PipelineLifeCycle.Status
trait PipelineLifeCycle {
def onCreated(id: String, name: String, status: Status.Value): Unit = {
}
def onUploaded(id: String, name: String, status: Status.Value): Unit = {
}
}
object PipelineLifeCycle {
object Status extends Enumeration {
val Success, Fail, SuccessWithWarnings = Value
}
}
| realstraw/hyperion | core/src/main/scala/com/krux/hyperion/PipelineLifeCycle.scala | Scala | bsd-3-clause | 618 |
package scala.slick.queryable
import scala.language.implicitConversions
import scala.slick.driver._
import scala.slick.driver.{ExtendedTable => Table}
import scala.slick.ql._
import scala.slick.{ast => sq}
trait QueryableBackend
class SlickBackend(driver:BasicDriver) extends QueryableBackend{
import scala.reflect.mirror._
object removeTypeAnnotations extends reflect.mirror.Transformer {
def apply( tree:Tree ) = transform(tree)
override def transform(tree: Tree): Tree = {
super.transform {
tree match {
case TypeApply( tree, _ ) => tree
case Typed( tree, _ ) => tree
case tree => tree
}
}
}
}
type Scope = Map[Symbol,sq.Symbol]
def Scope() : Scope = Map()
class Query(
val node : sq.Node,
val scope : Scope
)
// // Why does this not work?
// invoke( n, classToType( n.getClass ).nonPrivateMember(newTermName("generator")) ).asInstanceOf[sq.Symbol]
def symbol2type( s:Symbol ) : Type = classToType(symbolToClass(s))
def classToQuery[T:reflect.ConcreteTypeTag] : Query = typetagToQuery( typeTag[T] )
def typetagToQuery(typetag:reflect.mirror.TypeTag[_]) : Query = {
val scala_symbol = classToSymbol(typetag.erasure)
val table =
new Table[Nothing]({
val ants = scala_symbol.annotations
ants match {
case AnnotationInfo(tpe,tree,_) :: Nil // FIXME:<- don't match list, match any annotation
//if tpe <:< classToType(classOf[table]) // genJVM bug
=>
{
val name = tree(0).toString
name.slice( 1,name.length-1 ) // FIXME: <- why needed?
}
case a => throw new Exception("Type argument passed to Queryable.apply needs database mapping annotations. None found on: " + typetag.erasure.toString )
}
}){def * = ???}
val sq_symbol = new sq.AnonSymbol
val columns =
classToType( typetag.erasure ).widen.members.collect{
case member if member.annotations.size > 0 && member.annotations.exists{
case x@AnnotationInfo(tpe,tree,_)
if tpe <:< classToType(classOf[column])
=> true
} => member.annotations.collect{
case x@AnnotationInfo(tpe,tree,_)
if tpe <:< classToType(classOf[column])
=>{ // FIXME: is this the right way to do it?
val name = tree(0).toString
name.slice( 1,name.length-1 ) // FIXME: <- why needed?
}
}.head
}.map(
column_name =>
sq.FieldRef(sq_symbol, sq.FieldSymbol(column_name)(Some(RawNamedColumn(column_name)(List(),null))) )
).toSeq
new Query( sq.Bind(sq_symbol, table, sq.Pure(sq.ProductNode(columns:_*))), Scope() )
}
/* def apply( tree:Tree, queryable:Queryable[_] ) : Query = {
this.apply(tree,queryable.query.scope)
}*/
def toQuery( tree:Tree, scope : Scope = Scope() ) : Query = {
val toolbox = mkToolBox(mkConsoleFrontEnd(),"")
// val typed_tree = toolbox.typeCheck(tree.asInstanceOf[reflect.runtime.Mirror.Tree] ).asInstanceOf[reflect.mirror.Tree]
val typed_tree = toolbox.typeCheck(tree)
scala2scalaquery_typed( removeTypeAnnotations(typed_tree), scope )
}
private def scala2scalaquery_typed( tree:Tree, scope : Scope ) : Query = {
def s2sq( tree:Tree, scope:Scope=scope ) : Query = scala2scalaquery_typed( tree, scope )
implicit def node2Query(node:sq.Node) = new Query( node, scope )
try{
tree match {
// explicitly state types here until SQ removes type parameters and type mapper from ConstColumn
case Literal(Constant(x:Int)) => ConstColumn[Int](x)
case Literal(Constant(x:String)) => ConstColumn[String](x)
case Literal(Constant(x:Double)) => ConstColumn[Double](x)
case node@Ident(name) if node.symbol.isInstanceOf[scala.reflect.internal.Symbols#FreeTerm] => // TODO: move this into a separate inlining step in queryable
node.symbol.asInstanceOf[scala.reflect.internal.Symbols#FreeTerm].value match{
case q:Queryable[_] => toQuery( q )
case x => s2sq( Literal(Constant(x)) )
}
case node@Ident(name) => {
val sq_symbol = scope(node.symbol)
sq.Ref(sq_symbol) // FIXME: this is probably wrong. what should go here?
}
// match columns
case Select(from,name)
if {
val annotations = from.tpe.widen.typeSymbol.annotations
annotations.length > 0 && (annotations match {
case AnnotationInfo(tpe,_,_) :: Nil
if tpe <:< classToType(classOf[table])
=> true
case _ => false
})
}
=>
val sq_symbol= scope(from.symbol)
val type_ = from.tpe.widen
val member = type_.members.filter(_.name == name).toList(0)
val column_name = member.annotations match {
case x@AnnotationInfo(_,tree,_) :: Nil =>
{ // FIXME: is this the right way to do it?
val name = tree(0).toString
name.slice( 1,name.length-1 ) // FIXME: <- why needed?
}
case a => throw new Exception(member.toString) // FIXME
}
sq.FieldRef(sq_symbol, sq.FieldSymbol(column_name)(Some(RawNamedColumn(column_name)(List(),null))) )
case Select(a:This,b) =>
val obj = companionInstance( a.symbol )
val value = invoke( obj, a.tpe.nonPrivateMember(b) )()
value match{
case q:Queryable[_] => toQuery( q )
case x => s2sq( Literal(Constant(x)) )
}
// match queryable methods
case Apply(Select(scala_lhs,term),Function( arg::Nil, body )::Nil)
if scala_lhs.tpe.erasure <:< classToType(classOf[Queryable[_]]).erasure
=>
val sq_lhs = s2sq( scala_lhs ).node
val sq_symbol = new sq.AnonSymbol
val new_scope = scope+(arg.symbol -> sq_symbol)
val rhs = s2sq(body, new_scope)
new Query( term.decoded match {
case "_filter_placeholder" => sq.Filter( sq_symbol, sq_lhs, rhs.node )
case "_map_placeholder" => sq.Bind( sq_symbol, sq_lhs, sq.Pure(rhs.node) )
case "_flatMap_placeholder" => sq.Bind( sq_symbol, sq_lhs, rhs.node )
case e => throw new UnsupportedMethodException( scala_lhs.tpe.erasure+"."+term.decoded )
},
new_scope
)
// match scalar operators
case Apply(Select(lhs,term),rhs::Nil)
if lhs.tpe <:< classToType( classOf[Boolean] )
&& rhs.tpe <:< classToType( classOf[Boolean] )
&& List("||", "&&").contains( term.decoded )
=>
ColumnOps.Relational(term.decoded, s2sq( lhs ).node, s2sq( rhs ).node )
case Apply(Select(lhs,term),rhs::Nil)
if (lhs.tpe <:< classToType( classOf[Int] )
&& rhs.tpe <:< classToType( classOf[Int] )
) || ( lhs.tpe <:< classToType( classOf[Double] )
&& rhs.tpe <:< classToType( classOf[Double] )
)
&& List("+").contains( term.decoded )
=>
ColumnOps.Relational(term.decoded, s2sq( lhs ).node, s2sq( rhs ).node )
case d@Apply(Select(lhs,term),rhs::Nil)
if {
/*println("_a__")
println(showRaw(d))
println(showRaw(lhs))
println(rhs.symbol.asInstanceOf[scala.reflect.internal.Symbols#FreeTerm].value)
println(rhs.tpe)
println("_b__")*/
(
(lhs.tpe <:< classToType( classOf[String] ))
&& (rhs.tpe <:< classToType( classOf[String] ))
&& (List("+").contains( term.decoded ))
)
}
=>
term.decoded match {
case "+" => ColumnOps.Relational("concat", s2sq( lhs ).node, s2sq( rhs ).node )
}
case Apply(Select(lhs,term),rhs::Nil)
if List("<",">","==","!=").contains( term.decoded )
=>
ColumnOps.Relational(term.decoded, s2sq( lhs ).node, s2sq( rhs ).node )
/*
// match other methods
case Apply(Select(lhs,term),rhs::Nil)
=>
throw new UnsupportedMethodException( lhs.tpe.erasedType+"."+term.decoded+"("+rhs.tpe.erasedType+")" )
*/
case tree => /*Expr[Any](tree).eval match{
case q:Queryable[_] => q.query
case x => s2sq( Literal(Constant(x)) )
}*/
throw new Exception( "no match for: " + showRaw(tree) )
}
} catch{
case e:java.lang.NullPointerException => { println("NPE in tree "+showRaw(tree));throw e}
}
}
protected[slick] def dump( queryable:Queryable[_] ) = {
val query = this.toQuery(queryable)
sq.Node(query.node).dump("")
}
protected[slick] def toSql( queryable:Queryable[_] ) = {
val query = this.toQuery(queryable)
import driver._
val node = processAST(query.node)
sq.AnonSymbol.assignNames( node )
val builder = new QueryBuilder( node, null )
builder.buildSelect.sql
}
protected[slick] def toQuery(queryable:Queryable[_]) : this.Query = queryable.expr_or_typetag match {
case Right(typetag) => this.typetagToQuery( typetag )
case Left(expr_) => this.toQuery(expr_.tree)
}
def toList[T]( queryable:Queryable[T] ) : List[T] = {
import this.driver.Implicit._
val node = this.toQuery(queryable).node : scala.slick.ast.Node
null
}
}
| szeiger/scala-query | src/main/scala/scala/slick/queryable/ScalaQueryDriver.scala | Scala | bsd-2-clause | 9,591 |
/*
* Copyright (C) 2015 Holmes Team at HUAWEI Noah's Ark Lab.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.spark.streamdm.utils
import java.io._
import java.util.Random
import scala.math.{ min, max, log }
import org.apache.spark.streamdm.classifiers.OnlineClassifier
/**
* Utility methods.
*
*/
object Utils {
/* Copy a classifier using serialization
*
* @param classifier the original classifier to copy
* @return the copy of the classifier
*/
def copyClassifier(classifier: OnlineClassifier): OnlineClassifier = {
val baoStream: ByteArrayOutputStream = new ByteArrayOutputStream()
val out: ObjectOutputStream = new ObjectOutputStream(
new BufferedOutputStream(baoStream))
out.writeObject(classifier)
out.flush()
out.close()
val byteArray: Array[Byte] = baoStream.toByteArray()
val in: ObjectInputStream = new ObjectInputStream(new BufferedInputStream(
new ByteArrayInputStream(byteArray)))
val copy: OnlineClassifier = in.readObject().asInstanceOf[OnlineClassifier]
in.close()
copy
}
/* Compute a random value from a Poisson distribution
*
* @param lambda the mean of the Poisson distribution
* @param r the random generator
* @return a random value sampled from the distribution
*/
def poisson(lambda: Double, r: Random) = {
if (lambda < 100.0) {
var product = 1.0
var sum = 1.0
val threshold = r.nextDouble() * Math.exp(lambda)
var i = 1.0
var max = Math.max(100, 10 * Math.ceil(lambda).toInt)
while ((i < max) && (sum <= threshold)) {
product *= (lambda / i)
sum += product
i += 1.0
}
i - 1.0
} else {
val x = lambda + Math.sqrt(lambda) * r.nextGaussian()
if (x < 0.0) 0.0 else Math.floor(x)
}
}
/* Get the most frequent value of an array of numeric values
*
* @param array the Array of numeric values
* @return the argument of the most frequent value
*/
def majorityVote(array: Array[Double], size: Integer): Double = {
val frequencyArray: Array[Double] = Array.fill(size)(0)
for (i <- 0 until array.length)
frequencyArray(array(i).toInt) += 1
argmax(frequencyArray)
}
/* Get the argument of the minimum value of an array of numeric values
*
* @param array the Array of numeric values
* @return the argument of the minimum value
*/
def argmax(array: Array[Double]): Double = array.zipWithIndex.maxBy(_._1)._2
/*
* Get the log2 of input
*
* @param v double value
* @return the log2 of v
*/
def log2(v: Double): Double = log(v) / log(2)
/* Transpose a matrix
*
* @param input matrix in form of 2-D array
* @return the transpose of input matrix
*/
def transpose(input: Array[Array[Double]]): Array[Array[Double]] = {
val output: Array[Array[Double]] = Array.fill(input(0).length)(new Array[Double](input.length))
input.zipWithIndex.map {
row =>
row._1.zipWithIndex.map {
col => output(col._2)(row._2) = input(row._2)(col._2)
}
}
output
}
/*
* Split a matrix with the input index, merge other columns into one column and transpose
*
* @param input matrix in form of 2-D array
* @param fIndex index of columns
* @return a matrix of 2 rows
*/
def splitTranspose(input: Array[Array[Double]], fIndex: Int): Array[Array[Double]] = {
val output: Array[Array[Double]] = Array.fill(2)(new Array[Double](input.length))
input.zipWithIndex.map {
row =>
row._1.zipWithIndex.map {
col =>
if (col._2 == fIndex) output(0)(row._2) = input(row._2)(col._2)
else output(1)(row._2) += input(row._2)(col._2)
}
}
output
}
/*
* Normalize input matrix
*
* @param input matrix in form of 2-D array
* @return normalized matrix
*/
def normal(input: Array[Array[Double]]): Array[Array[Double]] = {
val total = input.map(_.sum).sum
input.map { row => row.map { _ / total } }
}
/*
* Normalize input array
*
* @param input double array
* @return normalized array
*/
def normal(input: Array[Double]): Array[Double] = {
val total = input.sum
input.map { { _ / total } }
}
}
| gosubpl/akka-online | src/main/scala/org/apache/spark/streamdm/utils/Utils.scala | Scala | apache-2.0 | 4,769 |
package io.youi.path
case class MoveTo(x: Double, y: Double) extends PathAction {
override def draw(context: Context, x: Double, y: Double, scaleX: Double, scaleY: Double): Unit = {
context.moveTo(x + (this.x * scaleX), y + (this.y * scaleY))
}
override def toString: String = s"MoveTo(x: $x, y: $y)"
}
| outr/youi | ui/js/src/main/scala/io/youi/path/MoveTo.scala | Scala | mit | 315 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.http
/**
* Defines an HTTP authentication scheme.
*
* @param name The name of the scheme.
*/
sealed abstract class AuthScheme(name: String) {
/**
* Returns the string representation of the scheme.
*
* @return The string representation of the scheme.
*/
override def toString: String = name
/**
* Creates a string representation of the scheme with the given value.
*
* @param value The value to append to the scheme.
* @return The auth scheme in the form "[NAME] [VALUE]".
*/
def apply(value: String): String = s"$name $value"
/**
* An extractor that extracts the value of the scheme.
*
* @param value The complete authentication scheme.
* @return The value of the auth scheme.
*/
def unapply(value: String): Option[String] = if (value.startsWith(s"$name ")) {
Some(value.replace(s"$name ", ""))
} else {
None
}
}
/**
* The companion object.
*/
object AuthScheme {
/**
* The 'Basic' authentication scheme.
*
* @see https://tools.ietf.org/html/rfc7617
*/
case object Basic extends AuthScheme("Basic")
/**
* The 'Digest' authentication scheme.
*
* @see https://tools.ietf.org/html/rfc7616
*/
case object Digest extends AuthScheme("Digest")
/**
* The "Bearer" token authentication scheme.
*/
case object Bearer extends AuthScheme("Bearer")
}
| mohiva/silhouette | modules/http/src/main/scala/silhouette/http/AuthScheme.scala | Scala | apache-2.0 | 2,144 |
import boxity.BlackboxParserMacroSuite
import suites.CorrectParserMacros
class BlackboxMetaCorrectMacros extends BlackboxParserMacroSuite with CorrectParserMacros
| Duhemm/parsermacros | tests/src/test/scala/BlackboxMetaCorrectMacros.scala | Scala | bsd-3-clause | 164 |
package com.sksamuel.elastic4s.http.search.aggs
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
import com.sksamuel.elastic4s.searches.aggs.ExtendedStatsAggregation
object ExtendedStatsAggregationBuilder {
def apply(agg: ExtendedStatsAggregation): XContentBuilder = {
val builder = XContentFactory.jsonBuilder()
builder.startObject("extended_stats")
agg.field.foreach(builder.field("field", _))
agg.sigma.foreach(builder.field("sigma", _))
agg.missing.foreach(builder.field("missing", _))
SubAggsBuilderFn(agg, builder)
AggMetaDataFn(agg, builder)
builder
}
}
| Tecsisa/elastic4s | elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/search/aggs/ExtendedStatsAggregationBuilder.scala | Scala | apache-2.0 | 624 |
package com.scout24.pipedsl.model
class PeriodUnit {
}
| matey-jack/pipe-dsl | src/main/scala/com/scout24/pipedsl/model/PeriodUnit.scala | Scala | gpl-2.0 | 57 |
// @GENERATOR:play-routes-compiler
// @SOURCE:D:/git/trask/glowroot/agent-parent/plugins/play-plugin/tmp-router-files/conf/routes
// @DATE:Sat Apr 09 15:58:25 PDT 2016
import play.api.routing.JavaScriptReverseRoute
import play.api.mvc.{ QueryStringBindable, PathBindable, Call, JavascriptLiteral }
import play.core.routing.{ HandlerDef, ReverseRouteContext, queryString, dynamicString }
import _root_.controllers.Assets.Asset
import _root_.play.libs.F
// @LINE:5
package controllers.javascript {
import ReverseRouteContext.empty
// @LINE:8
class ReverseAssets(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:8
def versioned: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.Assets.versioned",
"""
function(file) {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "assets/" + (""" + implicitly[PathBindable[Asset]].javascriptUnbind + """)("file", file)})
}
"""
)
}
// @LINE:9
class ReverseBadController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:9
def bad: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.BadController.bad",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "bad"})
}
"""
)
}
// @LINE:5
class ReverseHomeController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:5
def index: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.HomeController.index",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + """"})
}
"""
)
}
// @LINE:6
class ReverseAsyncController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:6
def message: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.AsyncController.message",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "message"})
}
"""
)
}
// @LINE:7
class ReverseStreamController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:7
def stream: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.StreamController.stream",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "stream"})
}
"""
)
}
} | trask/glowroot | agent/plugins/play-plugin/src/test/app-2.4.x-java/scala/controllers/javascript/JavaScriptReverseRoutes.scala | Scala | apache-2.0 | 2,777 |
package org.scalatest
//import org.scalatest.matchers.ShouldMatchers
import scala.collection.immutable.TreeSet
/*import org.scalacheck._
import Arbitrary._
import Prop._ */
class FilterSpec extends Spec {
describe("A Filter") {
it("should throw NPEs if constructed with nulls") {
intercept[NullPointerException] {
new Filter(null, null)
}
intercept[NullPointerException] {
new Filter(None, null)
}
intercept[NullPointerException] {
new Filter(null, Set())
}
}
it("should throw IAE if passed a Some(Set()) for tagsToInclude") {
intercept[IllegalArgumentException] {
new Filter(Some(Set()), Set())
}
}
it("should throw IAE if passed an empty set for testName in the apply method") {
val caught = intercept[IllegalArgumentException] {
val filter = new Filter(None, Set())
filter.apply(Set("hi", "ho"), Map("hi" -> Set[String]()))
}
assert(caught.getMessage === "hi was associated with an empty set in the map passsed as tags")
}
it("should throw IAE if passed an empty set for testName in the includedTestCount method") {
val caught = intercept[IllegalArgumentException] {
val filter = new Filter(None, Set())
filter.runnableTestCount(Set("hi", "ho"), Map("hi" -> Set()))
}
assert(caught.getMessage === "hi was associated with an empty set in the map passsed as tags")
}
val potentialTestNames = List("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z")
val potentialTagNames = List("tag0", "tag1", "tag2", "tag3", "org.scalatest.Ignore", "tag5", "tag6", "tag7", "tag9", "tag9")
def randomPositiveInt(max: Int) = (Math.random * 10000).toInt % (max + 1)
def validateIgnoreBehavior(filter: Filter) {
val filtered = filter(Set("myTestName"), Map("myTestName" -> Set("org.scalatest.Ignore")))
assert(filtered exists (tuple => tuple._1 == "myTestName"), "myTestName was in the tags map, but did not show up in the result of apply")
assert(filtered exists (tuple => tuple._1 == "myTestName" && tuple._2 == true), "myTestName was in the result of apply, but was not marked as ignored")
}
it("should report a test as ignored when None is passed to filter for tagsToInclude, and" +
"org.scalatest.Ignore is not passed in the tagsToExclude") {
val filter = new Filter(None, Set("no ignore here"))
validateIgnoreBehavior(filter)
}
it("should report a test as ignored when None is passed to filter for tagsToInclude, and" +
"org.scalatest.Ignore is passed in the tagsToExclude") {
val filter = new Filter(None, Set("org.scalatest.Ignore"))
validateIgnoreBehavior(filter)
}
it("should report a test as ignored when Some(Ignore) is passed to filter for tagsToInclude, and" +
"org.scalatest.Ignore is not passed in the tagsToExclude") {
val filter = new Filter(Some(Set("org.scalatest.Ignore")), Set("no ignore here"))
validateIgnoreBehavior(filter)
}
it("should report a test as ignored when Some(Ignore) is passed to filter for tagsToInclude, and" +
"org.scalatest.Ignore is passed in the tagsToExclude") {
val filter = new Filter(Some(Set("org.scalatest.Ignore")), Set("org.scalatest.Ignore"))
validateIgnoreBehavior(filter)
}
def validateIgnoreOtherBehavior(filter: Filter) {
val filtered = filter(Set("myTestName"), Map("myTestName" -> Set("org.scalatest.Ignore", "Other")))
assert(filtered exists (tuple => tuple._1 == "myTestName"), "myTestName was in the tags map, but did not show up in the result of apply")
assert(filtered exists (tuple => tuple._1 == "myTestName" && tuple._2 == true), "myTestName was in the result of apply, but was not marked as ignored")
}
it("should report a test tagged as Other as ignored when Some(Other) is passed to filter" +
"for tagsToInclude, and org.scalatest.Ignore is not passed in the tagsToExclude") {
val filter = new Filter(Some(Set("Other")), Set("no ignore here"))
validateIgnoreOtherBehavior(filter)
}
it("should report a test tagged as Other as ignored when Some(Other) is passed to filter" +
"for tagsToInclude, and org.scalatest.Ignore is passed in the tagsToExclude") {
val filter = new Filter(Some(Set("Other")), Set("org.scalatest.Ignore"))
validateIgnoreOtherBehavior(filter)
}
def validateNotReportingIgnoresBehavior(filter: Filter) {
val filtered = filter(Set("myTestName"), Map("myTestName" -> Set("org.scalatest.Ignore")))
assert(!(filtered exists (tuple => tuple._1 == "myTestName")), "myTestName's Ignore tag was not in tagsToInclude, but showed up in the result of apply")
}
it("should not report a test as ignored when Some(no ignore here) is passed to filter for" +
"tagsToInclude, and org.scalatest.Ignore is not passed in the tagsToExclude") {
val filter = new Filter(Some(Set("no ignore here")), Set("no ignore here"))
validateNotReportingIgnoresBehavior(filter)
}
it("should not report a test as ignored when Some(no ignore here) is passed to filter for" +
"tagsToInclude, and org.scalatest.Ignore is passed in the tagsToExclude") {
val filter = new Filter(Some(Set("no ignore here")), Set("org.scalatest.Ignore"))
validateNotReportingIgnoresBehavior(filter)
}
it("should work properly when None is passed to filter for tagsToInclude") {
// I want to pass None in for includes, pick a set of test names. From those test names, put some of them in the tags map, grabbing an arbitrary nonzero number of tags
for (i <- 0 to 1000) {
val testNames = potentialTestNames.drop(randomPositiveInt(potentialTestNames.length))
val testNamesWithTags = testNames.drop(randomPositiveInt(testNames.length))
//val tuples = for (testName <- testNamesWithTags) yield (testName, Set() ++ potentialTagNames.drop(randomPositiveInt(potentialTagNames.length - 1)))
val tuples =
for (testName <- testNamesWithTags) yield
(testName, Set() ++ potentialTagNames.drop(randomPositiveInt(potentialTagNames.length - 1))) // subtract one, so never end up with an empty list
val tags = Map() ++ tuples
val tagsToExclude = Set() ++ potentialTagNames.drop(randomPositiveInt(potentialTagNames.length)) // Do want an empty set here occasionally
val filter = new Filter(None, tagsToExclude)
val filtered = filter(TreeSet[String]() ++ testNames, tags)
// assert(filtered.sort(_ < _) === filtered) TODO: COMMENTING OUT JUST TO GET THE 2.8 BUILD OUT THE DOOR. NEED TO FIX THIS.
for ((testName, ignore) <- filtered) {
// testName should not be in the tagsToExclude map unless it is ignored
if (tagsToExclude contains testName)
assert(tags(testName) exists (_ == "org.scalatest.Ignore"), testName + " was in the filtered list and in the tags, but didn't have an Ignore tag")
}
// Check that every test name that is not at all in the tags map, should be in the filtered
for (testName <- testNames) {
if (!tags.contains(testName)) {
assert(filtered exists (tuple => tuple._1 == testName), testName + " was not in the tags map, but did not show up in the result of apply")
assert(filtered exists (tuple => tuple._1 == testName && tuple._2 == false), testName + " was not in the tags map, and did show up in the result of apply, but was marked as ignored")
}
}
// Check that every test name that is in the tags as ignored, should be in the filtered as ignored,
// unless it is also tagged with some other tag that is in tagsToExclude. In the latter case, the
// other exclude tag should "overpower" the Ignore tag.
for (testName <- testNames) {
if (tags.contains(testName) && tags(testName).exists(_ == "org.scalatest.Ignore") &&
((tags(testName) - "org.scalatest.Ignore") ** tagsToExclude).isEmpty)
assert(filtered exists (tuple => tuple._1 == testName && tuple._2 == true), testName + " was in the tags map as ignored, but did not show up in the result of apply marked as ignored")
}
// Check that only the non-ignored tests are counted in the runnableTestsCount
val runnableTests =
for {
(testName, ignore) <- filtered
if !ignore
} yield testName
assert(filter.runnableTestCount(Set() ++ testNames, tags) === runnableTests.size, "runnableTests = " + runnableTests + ", testNames = " + testNames + ", tags = " + tags + ", tagsToExclude = " + tagsToExclude)
}
}
it("should not include an excluded tag even if it also appears as an included tag") {
val filter = new Filter(Some(Set("Slow")), Set("Slow"))
val filtered = filter(Set("myTestName"), Map("myTestName" -> Set("Slow")))
assert(filtered.size === 0)
}
it("should include an included tag if there are no excluded tags") {
val filter = new Filter(Some(Set("Slow")), Set())
val filtered = filter(Set("myTestName"), Map("myTestName" -> Set("Slow")))
assert(filtered.size === 1)
}
it("should work properly when Some is passed to filter for tagsToInclude") {
// I want to pass None in for includes, pick a set of test names. From those test names, put some of them in the tags map, grabbing an arbitrary nonzero number of tags
for (i <- 0 to 1000) {
val testNames = potentialTestNames.drop(randomPositiveInt(potentialTestNames.length))
val testNamesWithTags = testNames.drop(randomPositiveInt(testNames.length))
//val tuples = for (testName <- testNamesWithTags) yield (testName, Set() ++ potentialTagNames.drop(randomPositiveInt(potentialTagNames.length - 1)))
val tuples =
for (testName <- testNamesWithTags) yield
(testName, Set() ++ potentialTagNames.drop(randomPositiveInt(potentialTagNames.length - 1))) // subtract one, so never end up with an empty list
val tags = Map() ++ tuples
val tagsToExclude = Set() ++ potentialTagNames.drop(randomPositiveInt(potentialTagNames.length)) // Do want an empty set here occasionally
val tagsToInclude = Set() ++ potentialTagNames.drop(randomPositiveInt(potentialTagNames.length - 1)) // Again, subtracting one to avoid an empty set, which is an illegal argument.
val filter = new Filter(Some(tagsToInclude), tagsToExclude)
val filtered = filter(TreeSet[String]() ++ testNames, tags)
// assert(filtered.sort(_ < _) === filtered) TODO: COMMENTING OUT JUST TO GET THE 2.8 BUILD OUT THE DOOR. NEED TO FIX THIS.
// Anything that's not in the include tags should not appear in the output
// Look at everything in the output, and make sure it is in the include tags
for ((testName, _) <- filtered) {
assert(tags contains testName, "tags did not contain as a key the test name: " + testName)
val tagsForTestName = tags(testName)
val intersection = tagsToInclude ** tagsForTestName
assert(intersection.size != 0, "None of the tags for the test name showed up in the non-empty tags to include set")
}
for ((testName, ignore) <- filtered) {
// testName should not be in the tagsToExclude map unless it is ignored
if (tagsToExclude contains testName)
assert(tags(testName) exists (_ == "org.scalatest.Ignore"), testName + " was in the filtered list and in the tags, but didn't have an Ignore tag")
}
// Check that every test name that is not at all in the tags map, should not be in the filtered, because it has to be tagged by one of the tags in tagsToInclude
for (testName <- testNames) {
if (!tags.contains(testName)) {
assert(!filtered.exists(tuple => tuple._1 == testName), testName + " was not in the tags map, but showed up in the result of apply even though tagsToInclude was a Some")
}
}
// Check that every test name that is in the tags as ignored, which also shared a tag in common
// with tagsToInclude, should be in the filtered as ignored, unless it is also tagged with some
// other tag that is in tagsToExclude. In the latter case, the
// other exclude tag should "overpower" the Ignore tag.
for (testName <- testNames) {
if (tags.contains(testName) && tags(testName).exists(_ == "org.scalatest.Ignore") &&
((tags(testName) ** tagsToInclude).size > 0) &&
((tags(testName) - "org.scalatest.Ignore") ** tagsToExclude).isEmpty)
assert(filtered exists (tuple => tuple._1 == testName && tuple._2 == true), testName + " was in the tags map as ignored, but did not show up in the result of apply marked as ignored")
}
}
}
describe("(when invoking the apply method that takes one test name)") {
val emptyMap = Map[String, Set[String]]()
it("should return (false, false) if tagsToInclude is None and tagsToExclude is empty" +
"and the test has no tags") {
val filter = new Filter(None, Set[String]())
assert(filter("myTestName", emptyMap) === (false, false))
}
it("should return (true, false) if tagsToInclude is None and tagsToExclude includes" +
"SlowAsMolasses and the test is marked as SlowAsMolasses") {
val filter = new Filter(None, Set("SlowAsMolasses"))
assert(filter("myTestName", Map("myTestName" -> Set("SlowAsMolasses"))) === (true, false))
}
it("should return (false, true) if tagsToInclude is None and tagsToExclude is empty" +
"and the test is marked as ignored") {
val filter = new Filter(None, Set[String]())
assert(filter("myTestName", Map("myTestName" -> Set("org.scalatest.Ignore"))) === (false, true))
}
it("should return (true, false) if tagsToInclude is None and tagsToExclude includes" +
"SlowAsMolasses and the test is marked as SlowAsMolasses and ignored") {
val filter = new Filter(None, Set("SlowAsMolasses"))
assert(filter("myTestName", Map("myTestName" -> Set("SlowAsMolasses", "org.scalatest.Ignore"))) === (true, false))
}
it("should return (false, false) if tagsToInclude includes a tag for the test name and tagsToExclude" +
"is empty and the test has no tags") {
val filter = new Filter(Some(Set("SlowAsMolasses")), Set[String]())
assert(filter("myTestName", Map("myTestName" -> Set("SlowAsMolasses"))) === (false, false))
}
it("should return (true, false) if tagsToInclude includes a tag for the test name and tagsToExclude" +
"includes SlowAsMolasses and the test is marked as SlowAsMolasses") {
val filter = new Filter(Some(Set("SlowAsMolasses")), Set("SlowAsMolasses"))
assert(filter("myTestName", Map("myTestName" -> Set("SlowAsMolasses"))) === (true, false))
}
it("should return (false, true) if tagsToInclude includes a tag for the test name and tagsToExclude" +
"is empty and the test is marked as ignored") {
val filter = new Filter(Some(Set("SlowAsMolasses")), Set[String]())
assert(filter("myTestName", Map("myTestName" -> Set("SlowAsMolasses", "org.scalatest.Ignore"))) === (false, true))
}
it("should return (true, false) if tagsToInclude includes a tag for the test name and tagsToExclude" +
"includes SlowAsMolasses and the test is marked as SlowAsMolasses and ignored") {
val filter = new Filter(Some(Set("SlowAsMolasses")), Set("SlowAsMolasses"))
assert(filter("myTestName", Map("myTestName" -> Set("SlowAsMolasses", "org.scalatest.Ignore"))) === (true, false))
}
it("should return (true, false) if tagsToInclude is defined but does not include any tags for the" +
"test name") {
val filter = new Filter(Some(Set("SlowAsMolasses")), Set[String]())
assert(filter("myTestName", Map("myTestName" -> Set("FastAsLight"))) === (true, false))
}
}
}
}
| kevinwright/scalatest | src/test/scala/org/scalatest/FilterSpec.scala | Scala | apache-2.0 | 16,402 |
package zzb.datatype
import org.scalatest.WordSpec
import org.scalatest.MustMatchers
import zzb.datatype.demo._
class EnsureTest extends WordSpec with MustMatchers {
"Ensure define" must {
"can create new empty suite" in {
import BizSuite._
//创建一个空的suite
val suite = BizSuite()
val items = suite.Items
val canUseItemCount = items.availableKeys.size
items.usedKeys.size must equal(0)
suite.fieldCount must equal(2) //还有一个缺省的version_
}
"can create suite with some field" in {
import BizSuite._
val suite = BizSuite(charge := 1000.1, discountRate := 0.9)
suite.fieldCount must equal(3+1)
suite(charge).get.toString must equal("1000.1")
}
"can add some items to suite" in {
import BizSuite._
val suite0 = BizSuite(charge := 1000.1, discountRate := 0.9)
val suite1 = suite0 ~ VehicleDemageIns.makeItem(1)
suite1.Items.usedKeys.size must equal(1)
val suite2 = suite1 ~ List(NcfVehicleDemageIns.makeItem(1), ThirdPartyIns.makeItem(1))
suite2.Items.usedKeys.size must equal(3)
val suite3 = BizSuite(charge := 1000.1, discountRate := 0.9, suite2.Items)
suite3.Items.usedKeys.size must equal(3)
}
"can create empty suite with filter " in {
import BizSuite._
val excludeItem = Set(NcfTheftIns, ThirdPartyIns, DriverIns)
val keyFilter = (itemDef: ItemDef) => !excludeItem.contains(itemDef)
val filter = (kv: (ItemDef, Item.Pack)) => !excludeItem.contains(kv._1)
val suite0 = BizSuite(filter)
val items = suite0.Items
val availableCount = items.availableKeys.size
val allowCount = items.allowKeys(keyFilter).size
(availableCount - allowCount) must equal(excludeItem.size)
items.usedKeys.size must equal(0)
suite0.fieldCount must equal(1+1)
}
"can create not empty suite with filter" in {
import BizSuite._
val excludeItem = Set(NcfTheftIns, ThirdPartyIns, DriverIns)
val keyFilter = (itemDef: ItemDef) => !excludeItem.contains(itemDef)
val filter = (kv: (ItemDef, Item.Pack)) => !excludeItem.contains(kv._1)
val suite0 = BizSuite.makeWithFilter(filter, charge := 1000.1, discountRate := 0.9)
val items = suite0.Items
val availableCount = items.availableKeys.size
val allowCount = items.allowKeys(keyFilter).size
(availableCount - allowCount) must equal(excludeItem.size)
items.usedKeys.size must equal(0)
suite0.fieldCount must equal(3+1)
val suite1 = suite0 ~ NcfTheftIns.makeItem(1) //被排除的保障项目应该加不进去
suite1.Items.usedKeys.size must equal(0)
val suite2 = suite1 ~ VehicleDemageIns.makeItem(1)
suite2.Items.usedKeys.size must equal(1)
val suite3 = suite2 ~ NcfTheftIns.makeItem(1) //suite 复制时过滤器会被保存
suite3.Items.usedKeys.size must equal(1)
val suite4 = suite3 ~ List(NcfTheftIns.makeItem(1), NcfVehicleDemageIns.makeItem(1)) //suite 复制时过滤器会被保存
suite4.Items.usedKeys.size must equal(2)
val suite5 = BizSuite.makeWithFilter(filter, charge := 1000.1, discountRate := 0.9, suite4.Items)
suite5.Items.usedKeys.size must equal(2)
}
"can get selector from Item" in {
import BizSuite._
val suite0 = BizSuite(charge := 1000.1, discountRate := 0.9) ~ ThirdPartyIns.makeItem(2)
val items = suite0.Items
val item: Item.Pack = items.get(ThirdPartyIns).get
val selector = item.toSelector
selector.selectIdx must equal(2)
}
"can do validate " in {
{
import BizSuite._
val suite0 = BizSuite(charge := 1000.1, discountRate := 0.9)
val messages0 = suite0.doValidate
messages0.size must equal(1)
val suite1 = suite0 ~ List(NcfVehicleDemageIns.makeItem(1), DriverIns.makeItem(1))
val messages1 = suite1.doValidate
messages1.size must equal(3)
val ddd: BizSuite.Item.Pack = DriverIns.makeItem(1)
val sss = ddd.alter(Item.charge := 99)
val suite2 = suite1 ~ DriverIns.makeItem(1).alter(Item.charge := 99) //保费小于100,仍然会报错
val messages2 = suite2.doValidate
messages2.size must equal(3)
val suite3 = suite2 ~ DriverIns.makeItem(1).alter(Item.charge := 100) //错误已修正
val messages3 = suite3.doValidate
messages3.size must equal(2)
}
{
import ForceSuite._
val s0 = ForceSuite()
s0.doValidate.size must equal(0)
}
}
"can do map filter foreach " in {
import BizSuite._
val suite0 = BizSuite(charge := 1000.1, discountRate := 0.9) ~ List(VehicleDemageIns.makeItem(1),
NcfVehicleDemageIns.makeItem(1), ThirdPartyIns.makeItem(1))
suite0.Items.usedKeys.size must equal(3)
val suite1 = suite0.filter { kv => true }
suite1.Items.usedKeys.size must equal(3)
suite0.Items.usedKeys.size must equal(3)
val suite2 = suite0.filter { kv => kv._1 != VehicleDemageIns }
suite2.Items.usedKeys.size must equal(2)
//所有保障项目的保费都设置为500
val suite3: BizSuite.Pack = for( kv <- suite0 ) yield {
(kv._1,kv._2 <~ Item.charge()(500))
}
// val suite3: BizSuite.Pack = (suite0).map {
// case (define, item) => {
// (define,item <~ EnsureItem.charge()(500))
// }
// }
suite3.charge(VehicleDemageIns).get must equal(BigDecimal(500))
val suite4 = for( (define,item) <- suite3 ) yield {
if (define == VehicleDemageIns)
(define,item <~ Item.charge()(800))
else (define,item)
}
suite4.charge(VehicleDemageIns).get must equal(BigDecimal(800))
suite4.charge(ThirdPartyIns).get must equal(BigDecimal(500))
val suite5 = for( (define,item) <- suite4 if define == VehicleDemageIns) yield {
(define,item <~ Item.charge()(900))
}
suite5.charge(VehicleDemageIns).get must equal(BigDecimal(900))
suite5.charge(ThirdPartyIns) must be (None)
var defines = List[ItemDef]()
for((define,item) <- suite3) defines = define :: defines
defines.size must equal(3)
}
}
}
| stepover/zzb | zzb-datatype/src/test/scala/zzb/datatype/EnsureTest.scala | Scala | mit | 6,290 |
package com.dys.chatwork4s.http.parameters
/**
* チャットのファイル一覧を取得する際に指定するパラメータ
*
* @param accountId アップロードしたユーザーのアカウントID
*/
case class GetFiles(
accountId: Option[Int] = None
) extends HttpParameter {
override def toParameters: Seq[(String, String)] = singleParameter(
("account_id", accountId)
)
}
object GetFiles {
val empty: GetFiles = GetFiles()
} | kado-yasuyuki/chatwork4s | src/main/scala/com/dys/chatwork4s/http/parameters/GetFiles.scala | Scala | apache-2.0 | 499 |
/*
* Copyright DataStax, Inc.
*
* Please see the included license file for details.
*/
package com.datastax.spark.connector.cql
import com.datastax.bdp.spark.ContinuousPagingScanner
import com.datastax.dse.driver.api.core.config.DseDriverOption
import com.datastax.oss.driver.api.core.cql.Statement
import com.datastax.spark.connector._
import com.datastax.spark.connector.cluster.DefaultCluster
import com.datastax.spark.connector.rdd.ReadConf
import org.mockito.ArgumentCaptor
import org.mockito.Mockito._
import org.scalatest.concurrent.Eventually
import scala.concurrent.Future
class ContinuousPagingScannerSpec extends SparkCassandraITFlatSpecBase with DefaultCluster with Eventually {
sparkConf.set(CassandraConnectionFactory.continuousPagingParam.name, "true")
override lazy val conn = CassandraConnector(sparkConf)
override val ks = "continuous_paging"
val table = "atab"
override def beforeClass {
conn.withSessionDo { session =>
createKeyspace(session)
awaitAll(
Future {
session.execute(s"""CREATE TABLE $ks.test1 (a INT, b INT, c INT, d INT, e INT, f INT, g INT, h INT, PRIMARY KEY ((a, b, c), d , e, f))""")
session.execute(s"""INSERT INTO $ks.test1 (a, b, c, d, e, f, g, h) VALUES (1, 1, 1, 1, 1, 1, 1, 1)""")
session.execute(s"""INSERT INTO $ks.test1 (a, b, c, d, e, f, g, h) VALUES (1, 1, 1, 1, 2, 1, 1, 2)""")
session.execute(s"""INSERT INTO $ks.test1 (a, b, c, d, e, f, g, h) VALUES (1, 1, 1, 2, 1, 1, 2, 1)""")
session.execute(s"""INSERT INTO $ks.test1 (a, b, c, d, e, f, g, h) VALUES (1, 1, 1, 2, 2, 1, 2, 2)""")
session.execute(s"""INSERT INTO $ks.test1 (a, b, c, d, e, f, g, h) VALUES (1, 2, 1, 1, 1, 2, 1, 1)""")
session.execute(s"""INSERT INTO $ks.test1 (a, b, c, d, e, f, g, h) VALUES (1, 2, 1, 1, 2, 2, 1, 2)""")
}
)
session.execute(
s"""CREATE KEYSPACE IF NOT EXISTS $ks
|WITH REPLICATION = { 'class': 'SimpleStrategy', 'replication_factor': 1 }"""
.stripMargin)
session.execute(s"CREATE TABLE $ks.$table (p int, c int, d int, PRIMARY KEY (p,c))")
session.execute(s"INSERT INTO $ks.$table (p,c,d) VALUES (1,1,1)")
}
}
private def executeContinuousPagingScan(readConf: ReadConf): Statement[_] = {
// we don't want to use the session from CC as mockito is unable to spy on a Proxy
val cqlSession = conn.conf.connectionFactory.createSession(conn.conf)
try {
val sessionSpy = spy(cqlSession)
val scanner = ContinuousPagingScanner(readConf, conn.conf, IndexedSeq.empty, sessionSpy)
val stmt = sessionSpy.prepare(s"SELECT * FROM $ks.test1").bind()
val statementCaptor = ArgumentCaptor.forClass(classOf[Statement[_]])
scanner.scan(stmt)
verify(sessionSpy).executeContinuously(statementCaptor.capture())
statementCaptor.getValue
} finally {
cqlSession.close()
}
}
"ContinuousPagingScanner" should "re-use a session in the same thread" in {
val sessions = for (x <- 1 to 10) yield {
val cps = ContinuousPagingScanner(ReadConf(), conn.conf, IndexedSeq.empty)
cps.close()
cps.getSession()
}
sessions.forall(session => session == sessions(0))
}
it should "use a different session than the one provided by the default connector" in {
val scanner = ContinuousPagingScanner(ReadConf(), conn.conf, IndexedSeq.empty)
scanner.close()
conn.withSessionDo(session => session shouldNot be(scanner.getSession()))
}
it should "use a single CP session for all threads" in {
CassandraConnector.evictCache()
eventually {
CassandraConnector.sessionCache.cache.isEmpty
}
val rdd = sc.cassandraTable(ks, table).withReadConf(ReadConf(splitCount = Some(400)))
rdd.partitions.length should be > 100 //Sanity check that we will have to reuse sessions
rdd.count
val sessions = CassandraConnector
.sessionCache
.cache
.keys
withClue(sessions.map(_.toString).mkString("\n"))(sessions.size should be(1))
}
it should "apply MB/s throughput limit" in dseOnly {
val readConf = ReadConf(throughputMiBPS = Some(32.0))
val executedStmt = executeContinuousPagingScan(readConf)
executedStmt.getExecutionProfile.getBoolean(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES) should be(true)
executedStmt.getExecutionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND) should be(1000)
executedStmt.getExecutionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE) should be(33554) // 32MB/s
}
it should "apply reads/s throughput limit" in dseOnly {
val readConf = ReadConf(fetchSizeInRows = 999, readsPerSec = Some(5))
val executedStmt = executeContinuousPagingScan(readConf)
executedStmt.getExecutionProfile.getBoolean(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES) should be(false)
executedStmt.getExecutionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND) should be(5)
executedStmt.getExecutionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE) should be(999)
}
it should "throw a meaningful exception when pages per second does not fall int (0, Int.MaxValue)" in dseOnly {
val readConfs = Seq(
ReadConf(throughputMiBPS = Some(1.0 + Int.MaxValue), readsPerSec = Some(1)),
ReadConf(throughputMiBPS = Some(-1)),
ReadConf(throughputMiBPS = Some(0)))
for (readConf <- readConfs) {
withClue(s"Expected IllegalArgumentException for invalid throughput argument: ${readConf.throughputMiBPS}.") {
val exc = intercept[IllegalArgumentException] {
executeContinuousPagingScan(readConf)
}
exc.getMessage should include(s"This number must be positive, non-zero and smaller than ${Int.MaxValue}")
}
}
}
}
| datastax/spark-cassandra-connector | connector/src/it/scala/com/datastax/spark/connector/cql/ContinuousPagingScannerSpec.scala | Scala | apache-2.0 | 5,838 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.entity.test
import common.{StreamLogging, WskActorSystem}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import spray.json._
import spray.json.DefaultJsonProtocol._
import org.apache.openwhisk.core.entity.ExecManifest
import org.apache.openwhisk.core.entity.ExecManifest._
import org.apache.openwhisk.core.entity.size._
import org.apache.openwhisk.core.entity.ByteSize
import scala.util.Success
@RunWith(classOf[JUnitRunner])
class ExecManifestTests extends FlatSpec with WskActorSystem with StreamLogging with Matchers {
behavior of "ExecManifest"
private def manifestFactory(runtimes: JsObject) = {
JsObject("runtimes" -> runtimes)
}
it should "parse an image name" in {
Map(
"i" -> ImageName("i"),
"i:t" -> ImageName("i", tag = Some("t")),
"i:tt" -> ImageName("i", tag = Some("tt")),
"ii" -> ImageName("ii"),
"ii:t" -> ImageName("ii", tag = Some("t")),
"ii:tt" -> ImageName("ii", tag = Some("tt")),
"p/i" -> ImageName("i", Some("p")),
"pre/img" -> ImageName("img", Some("pre")),
"pre/img:t" -> ImageName("img", Some("pre"), Some("t")),
"pre1/pre2/img:t" -> ImageName("img", Some("pre1/pre2"), Some("t")),
"pre1/pre2/img" -> ImageName("img", Some("pre1/pre2")),
"hostname.com:3121/pre1/pre2/img:t" -> ImageName("img", Some("hostname.com:3121/pre1/pre2"), Some("t")),
"hostname.com:3121/pre1/pre2/img:t@sha256:77af4d6b9913e693e8d0b4b294fa62ade6054e6b2f1ffb617ac955dd63fb0182" ->
ImageName("img", Some("hostname.com:3121/pre1/pre2"), Some("t")))
.foreach {
case (s, v) => ImageName.fromString(s) shouldBe Success(v)
}
Seq("ABC", "x:8080:10/abc", "p/a:x:y", "p/a:t@sha256:77af4d6b9").foreach { s =>
a[DeserializationException] should be thrownBy ImageName.fromString(s).get
}
}
it should "read a valid configuration without default prefix, default tag or blackbox images" in {
val k1 = RuntimeManifest("k1", ImageName("???"))
val k2 = RuntimeManifest("k2", ImageName("???"), default = Some(true))
val p1 = RuntimeManifest("p1", ImageName("???"))
val s1 = RuntimeManifest("s1", ImageName("???"), stemCells = Some(List(StemCell(2, 256.MB))))
val mf = manifestFactory(JsObject("ks" -> Set(k1, k2).toJson, "p1" -> Set(p1).toJson, "s1" -> Set(s1).toJson))
val runtimes = ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
Seq("k1", "k2", "p1", "s1").foreach {
runtimes.knownContainerRuntimes.contains(_) shouldBe true
}
runtimes.knownContainerRuntimes.contains("k3") shouldBe false
runtimes.resolveDefaultRuntime("k1") shouldBe Some(k1)
runtimes.resolveDefaultRuntime("k2") shouldBe Some(k2)
runtimes.resolveDefaultRuntime("p1") shouldBe Some(p1)
runtimes.resolveDefaultRuntime("s1") shouldBe Some(s1)
runtimes.resolveDefaultRuntime("ks:default") shouldBe Some(k2)
runtimes.resolveDefaultRuntime("p1:default") shouldBe Some(p1)
runtimes.resolveDefaultRuntime("s1:default") shouldBe Some(s1)
}
it should "read a valid configuration where an image may omit prefix or tag" in {
val i1 = RuntimeManifest("i1", ImageName("???"))
val i2 = RuntimeManifest("i2", ImageName("???", Some("ppp")), default = Some(true))
val j1 = RuntimeManifest("j1", ImageName("???", Some("ppp"), Some("ttt")))
val k1 = RuntimeManifest("k1", ImageName("???", None, Some("ttt")))
val s1 = RuntimeManifest("s1", ImageName("???"), stemCells = Some(List(StemCell(2, 256.MB))))
val mf =
JsObject(
"runtimes" -> JsObject(
"is" -> Set(i1, i2).toJson,
"js" -> Set(j1).toJson,
"ks" -> Set(k1).toJson,
"ss" -> Set(s1).toJson))
val rmc = RuntimeManifestConfig()
val runtimes = ExecManifest.runtimes(mf, rmc).get
runtimes.resolveDefaultRuntime("i1").get.image.publicImageName shouldBe "???"
runtimes.resolveDefaultRuntime("i2").get.image.publicImageName shouldBe "ppp/???"
runtimes.resolveDefaultRuntime("j1").get.image.publicImageName shouldBe "ppp/???:ttt"
runtimes.resolveDefaultRuntime("k1").get.image.publicImageName shouldBe "???:ttt"
runtimes.resolveDefaultRuntime("s1").get.image.publicImageName shouldBe "???"
runtimes.resolveDefaultRuntime("s1").get.stemCells.get(0).count shouldBe 2
runtimes.resolveDefaultRuntime("s1").get.stemCells.get(0).memory shouldBe 256.MB
}
it should "read a valid configuration with blackbox images but without default prefix or tag" in {
val imgs = Set(
ImageName("???"),
ImageName("???", Some("ppp")),
ImageName("???", Some("ppp"), Some("ttt")),
ImageName("???", None, Some("ttt")))
val mf = JsObject("runtimes" -> JsObject.empty, "blackboxes" -> imgs.toJson)
val runtimes = ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
runtimes.blackboxImages shouldBe imgs
imgs.foreach(img => runtimes.skipDockerPull(img) shouldBe true)
runtimes.skipDockerPull(ImageName("???", Some("bbb"))) shouldBe false
}
it should "read a valid configuration with blackbox images, which may omit prefix or tag" in {
val imgs = List(
ImageName("???"),
ImageName("???", Some("ppp")),
ImageName("???", Some("ppp"), Some("ttt")),
ImageName("???", None, Some("ttt")))
val mf = JsObject("runtimes" -> JsObject.empty, "blackboxes" -> imgs.toJson)
val rmc = RuntimeManifestConfig()
val runtimes = ExecManifest.runtimes(mf, rmc).get
runtimes.blackboxImages shouldBe imgs.toSet
imgs.forall(runtimes.skipDockerPull(_)) shouldBe true
runtimes.skipDockerPull(ImageName("xxx")) shouldBe false
runtimes.skipDockerPull(ImageName("???", Some("bbb"))) shouldBe false
runtimes.skipDockerPull(ImageName("???", Some("ppp"), Some("test"))) shouldBe false
runtimes.skipDockerPull(ImageName("???", None, Some("test"))) shouldBe false
}
it should "reject runtimes with multiple defaults" in {
val k1 = RuntimeManifest("k1", ImageName("???"), default = Some(true))
val k2 = RuntimeManifest("k2", ImageName("???"), default = Some(true))
val mf = manifestFactory(JsObject("ks" -> Set(k1, k2).toJson))
an[IllegalArgumentException] should be thrownBy ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
}
it should "reject finding a default when none specified for multiple versions in the same family" in {
val k1 = RuntimeManifest("k1", ImageName("???"))
val k2 = RuntimeManifest("k2", ImageName("???"))
val mf = manifestFactory(JsObject("ks" -> Set(k1, k2).toJson))
an[IllegalArgumentException] should be thrownBy ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
}
it should "prefix image name with overrides" in {
val name = "xyz"
ExecManifest.ImageName(name, Some(""), Some("")).publicImageName shouldBe name
Seq(
(ExecManifest.ImageName(name), name),
(ExecManifest.ImageName(name, None, Some("t")), s"$name:t"),
(ExecManifest.ImageName(name, Some("pre")), s"pre/$name"),
(ExecManifest.ImageName(name, Some("pre"), Some("t")), s"pre/$name:t")).foreach {
case (image, exp) =>
image.publicImageName shouldBe exp
image.localImageName("") shouldBe exp
image.localImageName("r") shouldBe s"r/$exp"
image.localImageName("r/") shouldBe s"r/$exp"
}
}
it should "indicate image is local if it matches deployment docker prefix" in {
val mf = JsObject.empty
val rmc = RuntimeManifestConfig(bypassPullForLocalImages = Some(true), localImagePrefix = Some("localpre"))
val manifest = ExecManifest.runtimes(mf, rmc)
manifest.get.skipDockerPull(ImageName(prefix = Some("x"), name = "y")) shouldBe false
manifest.get.skipDockerPull(ImageName(prefix = Some("localpre"), name = "y")) shouldBe true
}
it should "de/serialize stem cell configuration" in {
val cell = StemCell(3, 128.MB)
val cellAsJson = JsObject("count" -> JsNumber(3), "memory" -> JsString("128 MB"))
stemCellSerdes.write(cell) shouldBe cellAsJson
stemCellSerdes.read(cellAsJson) shouldBe cell
an[IllegalArgumentException] shouldBe thrownBy {
StemCell(-1, 128.MB)
}
an[IllegalArgumentException] shouldBe thrownBy {
StemCell(0, 128.MB)
}
an[IllegalArgumentException] shouldBe thrownBy {
val cellAsJson = JsObject("count" -> JsNumber(0), "memory" -> JsString("128 MB"))
stemCellSerdes.read(cellAsJson)
}
the[IllegalArgumentException] thrownBy {
val cellAsJson = JsObject("count" -> JsNumber(1), "memory" -> JsString("128"))
stemCellSerdes.read(cellAsJson)
} should have message {
ByteSize.formatError
}
}
it should "parse manifest from JSON string" in {
val json = """
|{ "runtimes": {
| "nodef": [
| {
| "kind": "nodejs:6",
| "deprecated": true,
| "image": {
| "name": "nodejsaction"
| },
| "stemCells": [{
| "count": 1,
| "memory": "128 MB"
| }]
| }, {
| "kind": "nodejs:8",
| "default": true,
| "image": {
| "name": "nodejsaction"
| },
| "stemCells": [{
| "count": 1,
| "memory": "128 MB"
| }, {
| "count": 1,
| "memory": "256 MB"
| }]
| }
| ],
| "pythonf": [{
| "kind": "python",
| "image": {
| "name": "pythonaction"
| },
| "stemCells": [{
| "count": 2,
| "memory": "256 MB"
| }]
| }],
| "swiftf": [{
| "kind": "swift",
| "image": {
| "name": "swiftaction"
| },
| "stemCells": []
| }],
| "phpf": [{
| "kind": "php",
| "image": {
| "name": "phpaction"
| }
| }]
| }
|}
|""".stripMargin.parseJson.asJsObject
val js6 = RuntimeManifest(
"nodejs:6",
ImageName("nodejsaction"),
deprecated = Some(true),
stemCells = Some(List(StemCell(1, 128.MB))))
val js8 = RuntimeManifest(
"nodejs:8",
ImageName("nodejsaction"),
default = Some(true),
stemCells = Some(List(StemCell(1, 128.MB), StemCell(1, 256.MB))))
val py = RuntimeManifest("python", ImageName("pythonaction"), stemCells = Some(List(StemCell(2, 256.MB))))
val sw = RuntimeManifest("swift", ImageName("swiftaction"), stemCells = Some(List.empty))
val ph = RuntimeManifest("php", ImageName("phpaction"))
val mf = ExecManifest.runtimes(json, RuntimeManifestConfig()).get
mf shouldBe {
Runtimes(
Set(
RuntimeFamily("nodef", Set(js6, js8)),
RuntimeFamily("pythonf", Set(py)),
RuntimeFamily("swiftf", Set(sw)),
RuntimeFamily("phpf", Set(ph))),
Set.empty,
None)
}
def stemCellFactory(m: RuntimeManifest, cells: List[StemCell]) = cells.map { c =>
(m.kind, m.image, c.count, c.memory)
}
mf.stemcells.flatMap {
case (m, cells) =>
cells.map { c =>
(m.kind, m.image, c.count, c.memory)
}
}.toList should contain theSameElementsAs List(
(js6.kind, js6.image, 1, 128.MB),
(js8.kind, js8.image, 1, 128.MB),
(js8.kind, js8.image, 1, 256.MB),
(py.kind, py.image, 2, 256.MB))
}
}
| csantanapr/incubator-openwhisk | tests/src/test/scala/org/apache/openwhisk/core/entity/test/ExecManifestTests.scala | Scala | apache-2.0 | 12,996 |
package ucesoft.cbm.peripheral.rs232
import java.io._
import java.net.{ServerSocket, Socket}
import ucesoft.cbm.Log
import scala.util.matching.Regex
trait ModemCommandListener {
def hangUp : Unit
def commandMode(on:Boolean) : Unit
def connectTo(address:String) : Unit
def ring(ringing:Boolean) : Unit
}
object HayesResultCode extends Enumeration {
val OK = Value(0)
val CONNECT = Value(1)
val RING = Value(2)
val NO_CARRIER = Value(3)
val ERROR = Value(4)
val CONNECT1200 = Value(5)
val CONNECT2400 = Value(10)
val CONNECT4800 = Value(11)
val CONNECT9600 = Value(12)
val CONNECT14400= Value(13)
val CONNECT19200= Value(14)
val CONNECT38400= Value(28)
}
class Modem(mcl:ModemCommandListener,welcomeMessage:String = null) extends Runnable {
// ===================== Hayes stuff =============================
private case class HayesResult(matches:Boolean,rest:String,skipResultCode:Boolean = false)
private abstract class HayesCommand {
val RE1 : Option[Regex] = None
val RE2 : Option[Regex] = None
def matches(cmd:String) : HayesResult = {
RE1 match {
case Some(re1) =>
cmd match {
case re1(p1) =>
cmd1(cmd,p1)
case _ => HayesResult(false,cmd)
}
case None =>
RE2 match {
case Some(re2) =>
cmd match {
case re2(p1,p2) =>
cmd2(cmd,p1,p2)
}
case None =>
HayesResult(false,cmd)
}
}
}
protected def cmd1(cmd:String,p1:String) : HayesResult = HayesResult(false,cmd)
protected def cmd2(cmd:String,p1:String,p2:String) : HayesResult = HayesResult(false,cmd)
}
private class FastHayesCommand(re:String,handle:(String,String) => HayesResult) extends HayesCommand {
override val RE1 = Some(re.r)
override def cmd1(cmd:String,p1:String) : HayesResult = handle(cmd,p1)
}
// ========================= SUPPORTED HAYES COMMANDS ============================
private object ATDT extends FastHayesCommand("DT?(.*)", (_,p1) => {
if (p1.trim.length > 0) {
mcl.connectTo(p1.trim)
HayesResult(true, "", true)
}
else HayesResult(true,"")
})
private object ATI extends FastHayesCommand("(I)", (_,_) => {
modemIn.append("KERNAL64 INTERNAL MODEM EMULATOR" + 13.toChar)
HayesResult(true,"")
})
private object ATE extends FastHayesCommand("(E0?).*", (cmd,p1) => {
echoMode = false
HayesResult(true,cmd.substring(p1.length))
})
private object ATE1 extends FastHayesCommand("(E1).*", (cmd,p1) => {
echoMode = true
HayesResult(true,cmd.substring(p1.length))
})
private object ATH extends FastHayesCommand("(H0?).*", (cmd,p1) => {
mcl.hangUp
HayesResult(true,cmd.substring(p1.length))
})
private object ATO extends FastHayesCommand("(O)", (_,_) => {
commandMode = false
mcl.commandMode(false)
HayesResult(true,"")
})
private object Q1 extends FastHayesCommand("(Q1).*", (cmd,p1) => {
quiteMode = true
HayesResult(true,cmd.substring(p1.length))
})
private object Q extends FastHayesCommand("(Q0?).*", (cmd,p1) => {
quiteMode = false
HayesResult(true,cmd.substring(p1.length))
})
private object X1 extends FastHayesCommand("(X1).*", (cmd,p1) => {
addConnectionSpeed = true
HayesResult(true,cmd.substring(p1.length))
})
private object X0 extends FastHayesCommand("(X0).*", (cmd,p1) => {
addConnectionSpeed = false
HayesResult(true,cmd.substring(p1.length))
})
private object Dummy extends FastHayesCommand("([XM]\\d).*", (cmd,p1) => {
// do nothing
HayesResult(true,cmd.substring(p1.length))
})
private object Z extends FastHayesCommand("(Z).*", (cmd,p1) => {
reset
HayesResult(true,cmd.substring(p1.length))
})
private object Select extends FastHayesCommand("(S\\d{1,3}).*", (cmd,p1) => {
currentS = p1.substring(1).toInt
HayesResult(true,cmd.substring(p1.length))
})
private object QuestionM extends FastHayesCommand("(\\?).*", (cmd,p1) => {
modemIn.append(S(currentS).toString + 13.toChar)
HayesResult(true,cmd.substring(p1.length),true)
})
private object GetS extends FastHayesCommand("(=\\d{1,3}).*", (cmd,p1) => {
S(currentS) = p1.substring(1).toInt
HayesResult(true,cmd.substring(p1.length))
})
private object V extends FastHayesCommand("(V0?).*", (cmd,p1) => {
englishResultCodes = false
HayesResult(true,cmd.substring(p1.length))
})
private object V1 extends FastHayesCommand("(V1).*", (cmd,p1) => {
englishResultCodes = true
HayesResult(true,cmd.substring(p1.length))
})
private object ATAn extends FastHayesCommand("(A\\d{1,5}).*", (cmd,p1) => {
allowListening(true,p1.substring(1).toInt)
HayesResult(true,cmd.substring(p1.length))
})
private object ATA extends FastHayesCommand("(A).*", (cmd,p1) => {
if (ringing) answerCall = true
HayesResult(true,cmd.substring(p1.length))
})
// ===============================================================================
private object HayesCommand {
private val commands = List(ATDT, ATI, ATE1, ATE, ATH, ATO, Q1, Q, X1, X0, Dummy, Z, Select, QuestionM, GetS, V1, V, ATAn, ATA).view
def processCommands(_cmds:String) : Option[Boolean] = {
var processing = true
var cmds = _cmds.replaceAll("\\s+","")
var skipResult = false
while (cmds.length > 0 && processing) {
commands map { _.matches(cmds) } find { case HayesResult(true,_,_) => true case HayesResult(false,_,_) => false } match {
case Some(HayesResult(_,rest,sr)) =>
cmds = rest
skipResult = sr
case None =>
processing = false
}
}
if (skipResult) None else Some(processing)
}
}
// ===============================================================
private class CommandDetector {
private[this] val lastThree = new StringBuilder
def addAndCheck(c:Char) : Boolean = {
lastThree.append(c)
if (lastThree.length == 4) lastThree.deleteCharAt(0)
val s2 = S(2)
val commandModeReq = lastThree.length == 3 &&
lastThree.charAt(0) == s2 &&
lastThree.charAt(1) == s2 &&
lastThree.charAt(2) == s2
if (commandModeReq) lastThree.clear
commandModeReq
}
def reset = lastThree.clear
}
private[this] class ModemCommandStream(welcomeMessage:String = "") extends InputStream {
private[this] var msg = welcomeMessage
override def available = msg.length
def read = synchronized {
if (msg.length > 0) {
val b = msg.charAt(0).toInt
msg = msg.substring(1)
b
}
else -1
}
def append(s:String) = synchronized { msg += s }
}
private class WrappedInputStream extends InputStream {
override def available = {
if (commandMode) modemIn.available
else
if (in != null) in.available
else 0
}
def read = {
if (commandMode) modemIn.read
else {
if (in != null) {
val b = in.read
if (inCommandDetector.addAndCheck(b.toChar)) {
mcl.commandMode(true)
commandMode = true
}
b
}
else -1
}
}
}
private class WrappedOutputStream extends OutputStream {
def write(b:Int): Unit = {
if (commandMode) {
if (echoMode) modemIn.append(b.toChar.toString)
if (b == 13) {
try {
processCommand(commandOutBuffer)
}
finally {
commandOutBuffer.clear
}
}
else
if (b == S(5)) commandOutBuffer.delete(commandOutBuffer.length - 1,commandOutBuffer.length)
else commandOutBuffer.append(b.toChar)
}
else
if (out != null) {
out.write(b)
if (outCommandDetector.addAndCheck(b.toChar)) {
mcl.commandMode(true)
commandMode = true
}
}
}
override def flush: Unit = {
if (out != null) out.flush
}
}
private def processCommand(commandBuffer:StringBuilder): Unit = {
import HayesResultCode._
val cmd = commandBuffer.toString.toUpperCase.trim
println("Processing command " + cmd)
if (cmd.length == 0) return
if (cmd == "AT") modemIn.append("OK" + 13.toChar)
else
if (cmd.startsWith("AT")) {
HayesCommand.processCommands(cmd.substring(2)) match {
case Some(true) =>
if (!quiteMode) commandModeMessage(OK)
case Some(false) =>
if (!quiteMode) commandModeMessage(ERROR)
case None =>
}
}
}
private[this] var in : InputStream = _
private[this] var out : OutputStream = _
private[this] val win = new WrappedInputStream
private[this] val wout = new WrappedOutputStream
private[this] val commandOutBuffer = new StringBuilder
private[this] var commandMode = true
private[this] val inCommandDetector, outCommandDetector = new CommandDetector
private[this] val modemIn = new ModemCommandStream(if (welcomeMessage == null) "WELCOME TO KERNAL64 RS-232. ATDT<HOST:PORT> TO CONNECT" + 13.toChar else welcomeMessage)
private[this] var echoMode = true
private[this] var serverSocket : ServerSocket = _
private[this] var socket : Socket = _
private[this] var listeningThread : Thread = _
private[this] var listeningPort = 0
private[this] var quiteMode = false
private[this] var englishResultCodes = true
private[this] var currentBaud = 1200
private[this] var addConnectionSpeed = false
private[this] var currentS = 0
private[this] val S = Array.ofDim[Int](256)
@volatile private[this] var ringing,answerCall = false
reset
def setBaud(baud:Int) = {
currentBaud = baud
}
def setStreams(in:InputStream,out:OutputStream): Unit = {
this.in = in
this.out = out
commandMode = in == null
Log.info(s"Modem switched to ${if (commandMode) "command" else "internet"} mode")
if (in == null && socket != null) {
socket.close()
socket = null
}
}
def commandModeMessage(_code:HayesResultCode.Value): Unit = {
import HayesResultCode._
val code = _code match {
case CONNECT =>
currentBaud match {
case 1200 => CONNECT1200
case 2400 => CONNECT2400
case 4800 => CONNECT4800
case 9600 => CONNECT9600
case 14400 => CONNECT14400
case 19200 => CONNECT19200
case 38400 => CONNECT38400
case _ => _code
}
case _ => _code
}
if (englishResultCodes) {
val codeString = if (code == NO_CARRIER) "NO CARRIER" else code.toString
modemIn.append(codeString + 13.toChar)
}
else modemIn.append(code.id.toString + 13.toChar)
}
def commandModeMessage(msg:String): Unit = {
if (commandMode) modemIn.append(msg)
}
def inputStream : InputStream = win
def outputStream : OutputStream = wout
def reset: Unit = {
commandMode = true
commandOutBuffer.clear
inCommandDetector.reset
outCommandDetector.reset
allowListening(false)
quiteMode = false
englishResultCodes = true
currentBaud = 1200
addConnectionSpeed = false
// init S
java.util.Arrays.fill(S,0)
S(2) = 43
S(3) = 13
S(4) = 10
S(5) = 20
ringing = false
answerCall = false
}
def allowListening(allowed:Boolean,port:Int = -1): Unit = {
if (allowed) {
if (listeningThread != null && listeningThread.isAlive) listeningThread.interrupt()
listeningThread = new Thread(this,"ModemListener")
listeningPort = port
listeningThread.start
}
else {
if (listeningThread != null) {
listeningThread.interrupt()
if (in != null) in.close
if (out != null) out.close
}
}
}
def run: Unit = {
try {
serverSocket = new ServerSocket(listeningPort)
Log.info(s"Modem listening on port $listeningPort ...")
}
catch {
case io:IOException =>
Log.info(s"Modem: Cannot listen on port $listeningPort: " + io)
return
}
var running = true
while (running) {
try {
val newSocket = serverSocket.accept()
if (socket == null) {
socket = newSocket
Log.info(s"Modem: new incoming connection (${socket.getInetAddress.getHostAddress}) accepted. Sending RING")
S(1) = 0
ringing = true
val autoAnswer = S(0) > 0
val maxRings = if (autoAnswer) S(0) else 10
mcl.ring(true)
commandModeMessage(HayesResultCode.RING)
while (S(1) < maxRings && !answerCall) {
Thread.sleep(1000)
S(1) += 1
}
mcl.ring(false)
if (!autoAnswer && !answerCall) {
newSocket.getOutputStream.write(new String("SERVER NOT ANSWERED+++").getBytes)
newSocket.getOutputStream.flush
newSocket.close()
socket = null
ringing = false
answerCall = false
}
else {
ringing = false
answerCall = false
commandModeMessage(HayesResultCode.CONNECT)
Thread.sleep(1000)
Log.info("Answered call")
setStreams(new BufferedInputStream(socket.getInputStream),new BufferedOutputStream(socket.getOutputStream))
}
}
else {
Log.info("Modem busy, cannot answer new call")
newSocket.getOutputStream.write(new String("SERVER BUSY+++").getBytes)
newSocket.getOutputStream.flush
newSocket.close()
}
}
catch {
case _:InterruptedException =>
running = false
case io:IOException =>
Log.info(s"Modem: Error while listening on port $listeningPort: " + io)
}
}
Log.info("Modem: listener closed")
}
}
| abbruzze/kernal64 | Kernal64/src/ucesoft/cbm/peripheral/rs232/Modem.scala | Scala | mit | 13,965 |
package mesosphere.marathon.core.flow.impl
import akka.actor.{ Cancellable, Actor, ActorLogging, Props }
import akka.event.{ EventStream, LoggingReceive }
import mesosphere.marathon.MarathonSchedulerDriverHolder
import mesosphere.marathon.core.base.Clock
import mesosphere.marathon.core.flow.ReviveOffersConfig
import mesosphere.marathon.core.flow.impl.ReviveOffersActor.OffersWanted
import mesosphere.marathon.event.{ SchedulerReregisteredEvent, SchedulerRegisteredEvent }
import mesosphere.marathon.state.Timestamp
import rx.lang.scala.{ Observable, Subscription }
import scala.annotation.tailrec
import scala.concurrent.duration._
private[flow] object ReviveOffersActor {
def props(
clock: Clock, conf: ReviveOffersConfig,
marathonEventStream: EventStream,
offersWanted: Observable[Boolean], driverHolder: MarathonSchedulerDriverHolder): Props = {
Props(new ReviveOffersActor(clock, conf, marathonEventStream, offersWanted, driverHolder))
}
private[impl] case object TimedCheck
private[impl] case class OffersWanted(wanted: Boolean)
}
/**
* Revive offers whenever interest is signaled but maximally every 5 seconds.
*/
private[impl] class ReviveOffersActor(
clock: Clock, conf: ReviveOffersConfig,
marathonEventStream: EventStream,
offersWanted: Observable[Boolean],
driverHolder: MarathonSchedulerDriverHolder) extends Actor with ActorLogging {
private[impl] var subscription: Subscription = _
private[impl] var offersCurrentlyWanted: Boolean = false
private[impl] var revivesNeeded: Int = 0
private[impl] var lastRevive: Timestamp = Timestamp(0)
private[impl] var nextReviveCancellableOpt: Option[Cancellable] = None
override def preStart(): Unit = {
subscription = offersWanted.map(OffersWanted(_)).subscribe(self ! _)
marathonEventStream.subscribe(self, classOf[SchedulerRegisteredEvent])
marathonEventStream.subscribe(self, classOf[SchedulerReregisteredEvent])
}
override def postStop(): Unit = {
subscription.unsubscribe()
nextReviveCancellableOpt.foreach(_.cancel())
nextReviveCancellableOpt = None
marathonEventStream.unsubscribe(self)
}
@tailrec
private[this] def reviveOffers(): Unit = {
val now: Timestamp = clock.now()
val nextRevive = lastRevive + conf.minReviveOffersInterval().milliseconds
if (nextRevive <= now) {
log.info("=> revive offers NOW, canceling any scheduled revives")
nextReviveCancellableOpt.foreach(_.cancel())
nextReviveCancellableOpt = None
driverHolder.driver.foreach(_.reviveOffers())
lastRevive = now
revivesNeeded -= 1
if (revivesNeeded > 0) {
log.info(
"{} further revives still needed. Repeating reviveOffers according to --{} {}",
revivesNeeded, conf.reviveOffersRepetitions.name, conf.reviveOffersRepetitions())
reviveOffers()
}
} else {
lazy val untilNextRevive = now until nextRevive
if (nextReviveCancellableOpt.isEmpty) {
log.info(
"=> Schedule next revive at {} in {}, adhering to --{} {} (ms)",
nextRevive, untilNextRevive, conf.minReviveOffersInterval.name, conf.minReviveOffersInterval())
nextReviveCancellableOpt = Some(schedulerCheck(untilNextRevive))
} else if (log.isDebugEnabled) {
log.info("=> Next revive already scheduled at {} not yet due for {}", nextRevive, untilNextRevive)
}
}
}
private[this] def suppressOffers(): Unit = {
log.info("=> Suppress offers NOW")
driverHolder.driver.foreach(_.suppressOffers())
}
override def receive: Receive = LoggingReceive {
Seq(
receiveOffersWantedNotifications,
receiveReviveOffersEvents
).reduceLeft[Receive](_.orElse[Any, Unit](_))
}
private[this] def receiveOffersWantedNotifications: Receive = {
case OffersWanted(true) =>
log.info("Received offers WANTED notification")
offersCurrentlyWanted = true
initiateNewSeriesOfRevives()
case OffersWanted(false) =>
log.info("Received offers NOT WANTED notification, canceling {} revives", revivesNeeded)
offersCurrentlyWanted = false
revivesNeeded = 0
nextReviveCancellableOpt.foreach(_.cancel())
nextReviveCancellableOpt = None
// When we don't want any more offers, we ask mesos to suppress
// them. This alleviates load on the allocator, and acts as an
// infinite duration filter for all agents until the next time
// we call `Revive`.
suppressOffers()
}
def initiateNewSeriesOfRevives(): Unit = {
revivesNeeded = conf.reviveOffersRepetitions()
reviveOffers()
}
private[this] def receiveReviveOffersEvents: Receive = {
case msg @ (_: SchedulerRegisteredEvent | _: SchedulerReregisteredEvent | OfferReviverDelegate.ReviveOffers) =>
if (offersCurrentlyWanted) {
log.info(s"Received reviveOffers notification: ${msg.getClass.getSimpleName}")
initiateNewSeriesOfRevives()
} else {
log.info(s"Ignoring ${msg.getClass.getSimpleName} because no one is currently interested in offers")
}
case ReviveOffersActor.TimedCheck =>
log.info(s"Received TimedCheck")
if (revivesNeeded > 0) {
reviveOffers()
} else {
log.info("=> no revives needed right now")
}
}
protected def schedulerCheck(duration: FiniteDuration): Cancellable = {
import context.dispatcher
context.system.scheduler.scheduleOnce(duration, self, ReviveOffersActor.TimedCheck)
}
}
| yp-engineering/marathon | src/main/scala/mesosphere/marathon/core/flow/impl/ReviveOffersActor.scala | Scala | apache-2.0 | 5,514 |
package filodb.core.query
import scala.collection.Iterator
import filodb.memory.format.RowReader
/**
* Please note this is not the ideal contract of cursor.
* Instead, it is a stop-gap implementation that gets us ability to
* release resources from a query. Earlier implementation purely on
* Iterators didnt help us nicely with that. The expectation is that
* moving to this trait will help us get compile time checks
* that force developer to care for "closing" the cursor before
* completing the query
*/
trait RangeVectorCursor extends Iterator[RowReader] with java.io.Closeable { self =>
/**
* This method mut release all resources (example locks) acquired
* for the purpose of executing this query
*/
def close(): Unit
def mapRow(f: RowReader => RowReader): RangeVectorCursor = new RangeVectorCursor {
def hasNext = self.hasNext
def next() = f(self.next())
def close(): Unit = self.close()
}
}
class CustomCloseCursor(iter: Iterator[RowReader])(cl: => Unit) extends RangeVectorCursor {
override def close(): Unit = cl // invoke function
override def hasNext: Boolean = iter.hasNext
override def next(): RowReader = iter.next()
}
object NoCloseCursor {
implicit class NoCloseCursor(iter: Iterator[RowReader]) extends RangeVectorCursor {
override def close(): Unit = {}
override def hasNext: Boolean = iter.hasNext
override def next(): RowReader = iter.next()
}
}
/**
* Wraps another cursor and auto-closes it when an exception is thrown.
*/
abstract class WrappedCursor(rows: RangeVectorCursor) extends RangeVectorCursor {
final def next(): RowReader = {
try {
doNext()
} catch {
case e: Throwable => {
close()
throw e
}
}
}
def hasNext: Boolean = rows.hasNext
def close(): Unit = rows.close()
// Subclass must implement this method.
def doNext(): RowReader
}
| tuplejump/FiloDB | core/src/main/scala/filodb.core/query/RangeVectorCursor.scala | Scala | apache-2.0 | 1,906 |
package cz.jenda.pidifrky.ui
import android.os.Bundle
import android.text.method.ScrollingMovementMethod
import cz.jenda.pidifrky.R
import cz.jenda.pidifrky.logic.DebugReporter
import cz.jenda.pidifrky.logic.http.HttpRequester
import cz.jenda.pidifrky.logic.location.GpsLogger
import cz.jenda.pidifrky.ui.api.BasicActivity
class GpsLogActivity extends BasicActivity {
protected override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_gps_log)
for {
bundle <- Option(savedInstanceState)
textView <- findTextView(R.id.gpslog)
} yield {
textView.setText(bundle.getString("gpslog", ""))
}
findTextView(R.id.gpslog).foreach(_.setMovementMethod(new ScrollingMovementMethod()))
GpsLogger.setListener(event => findTextView(R.id.gpslog).foreach { textView =>
val text = textView.getText
textView.setText(text + event + "\\n")
})
}
override protected def onStart(): Unit = {
super.onStart()
}
override def onSaveInstanceState(outState: Bundle): Unit = {
super.onSaveInstanceState(outState)
findTextView(R.id.gpslog).foreach(t => outState.putString("gpslog", t.getText.toString))
}
} | jendakol/pidifrky | client/src/main/scala/cz/jenda/pidifrky/ui/GpsLogActivity.scala | Scala | apache-2.0 | 1,230 |
package com.datawizards.sparklocal.rdd
import com.datawizards.sparklocal.SparkLocalBaseTest
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class RandomSplitTest extends SparkLocalBaseTest {
val data:Range = 1 to 10
test("randomSplit result - Scala") {
val Array(sample1, sample2) = RDDAPI(data).randomSplit(Array(80,20))
assert(sample1.count() > sample2.count(), "First sample size > second sample size")
assert(sample1.collect().forall(x => data contains x), "All sample elements from input data")
assert(sample2.collect().forall(x => data contains x), "All sample elements from input data")
assert(sample1.map(x => (x,1)).reduceByKey(_ + _).collectAsMap().values.forall(_ == 1), "All sample elements only once")
assert(sample2.map(x => (x,1)).reduceByKey(_ + _).collectAsMap().values.forall(_ == 1), "All sample elements only once")
}
test("randomSplit result - negative weights") {
intercept[IllegalArgumentException]{
RDDAPI(data).randomSplit(Array(-1,-2))
}
}
test("randomSplit result - sum of weights is negative") {
intercept[IllegalArgumentException]{
RDDAPI(data).randomSplit(Array(1,-2))
}
}
test("randomSplit result - Spark") {
val Array(sample1, sample2) = RDDAPI(sc.parallelize(data)).randomSplit(Array(80,20))
assert(sample1.count() > sample2.count(), "First sample size > second sample size")
assert(sample1.collect().forall(x => data contains x), "All sample elements from input data")
assert(sample2.collect().forall(x => data contains x), "All sample elements from input data")
assert(sample1.map(x => (x,1)).reduceByKey(_ + _).collectAsMap().values.forall(_ == 1), "All sample elements only once")
assert(sample2.map(x => (x,1)).reduceByKey(_ + _).collectAsMap().values.forall(_ == 1), "All sample elements only once")
}
} | piotr-kalanski/spark-local | src/test/scala/com/datawizards/sparklocal/rdd/RandomSplitTest.scala | Scala | apache-2.0 | 1,898 |
/**
* License
* =======
*
* The MIT License (MIT)
*
*
* Copyright (c) 2017 Antoine DOERAENE @sherpal
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package plot
import gui._
/**
* MenuBand is a wide rectangle at the top of the screen that contains menu buttons.
*/
object MenuBand extends Frame("Menu", Some(UIParent)) {
setPoint(TopLeft)
setPoint(TopRight)
setHeight(30)
private val bg = createTexture()
bg.setAllPoints()
bg.setVertexColor(200.0 / 255, 200.0 / 255, 200.0 / 255)
private val newPlotBut: Button = new Button("", Some(this))
newPlotBut.setPoint(Left, this, Left, 10)
newPlotBut.setSize(100, 25)
newPlotBut.setText("New Plot")
newPlotBut.setTextColor(0,0,0)
newPlotBut.setNormalTexture()
newPlotBut.normalTexture.get.setAllPoints()
newPlotBut.normalTexture.get.setVertexColor(170.0 / 255, 170.0 / 255, 170.0 / 255)
newPlotBut.setPushedTexture(Button.makeSimplePushedTexture(newPlotBut))
newPlotBut.setHighlightTexture()
newPlotBut.highlightTexture.get.setAllPoints()
newPlotBut.highlightTexture.get.setVertexColor(89.0 / 255, 157.0 / 255, 220.0 / 255, 0.5)
newPlotBut.setScript(ScriptKind.OnMouseReleased)((_: Frame, x: Double, y: Double, button: Int) => {
if (button == 0 && newPlotBut.isMouseOver(x, y)) {
new Plot()
}
})
}
| sherpal/holomorphic-maps | src/main/scala/plot/MenuBand.scala | Scala | mit | 2,416 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.