code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package org.usagram.clarify
import org.usagram.clarify.error.Error
sealed trait Definite[+V] {
def value: V
def tags: Tags
def isValid: Boolean
def isInvalid: Boolean = !isValid
}
final case class Valid[+V](value: V, tags: Tags) extends Definite[V] {
val isValid = true
}
final case class Invalid[+V](value: V, tags: Tags, error: Error) extends Definite[V] {
val isValid = false
def message: String = error.message(tags)
}
final case class Unknown[+V](value: V, tags: Tags) extends Definite[V] {
val isValid = false
}
| takkkun/clarify | core/src/main/scala/org/usagram/clarify/Definite.scala | Scala | mit | 543 |
package com.azavea.rasterfoundry
import org.apache.spark.rdd.RDD
import geotrellis.raster._
import geotrellis.spark.SpatialKey
import spire.syntax.cfor._
object InputImageRDD {
def split(key: SpatialKey, image: MultiBandTile, tilesAcross: Int): Array[(SpatialKey, MultiBandTile)] = {
val newKeyColMin = key.col * tilesAcross
val newKeyRowMin = key.row * tilesAcross
val tiles = Array.ofDim[(SpatialKey, MultiBandTile)](tilesAcross * tilesAcross)
val bandCount = image.bandCount
var i = 0
cfor(0)(_ < tilesAcross, _ + 1) { layoutRow =>
cfor(0)(_ < tilesAcross, _ + 1) { layoutCol =>
val firstCol = layoutCol * TileMath.TILE_DIM
val lastCol = firstCol + TileMath.TILE_DIM - 1
val firstRow = layoutRow * TileMath.TILE_DIM
val lastRow = firstRow + TileMath.TILE_DIM - 1
tiles(i) = {
val gb = GridBounds(firstCol, firstRow, lastCol, lastRow)
val bands = Array.ofDim[Tile](bandCount)
cfor(0)(_ < bandCount, _ + 1) { b =>
bands(b) = CroppedTile(image.band(b), gb).toArrayTile
}
val newKey = SpatialKey(newKeyColMin + layoutCol, newKeyRowMin + layoutRow)
(newKey, ArrayMultiBandTile(bands))
}
i += 1
}
}
return tiles
}
}
case class InputImageRDD(priority: Int, zoom: Int, gridBounds: GridBounds, images: RDD[(SpatialKey, MultiBandTile)]) {
def orderedImages: RDD[(SpatialKey, OrderedImage)] =
images
.mapValues { image =>
OrderedImage(image, IntConstantTile(priority, image.cols, image.rows))
}
}
| kdeloach/raster-foundry-tiler | mosaic/src/main/scala/com/azavea/rasterfoundry/InputImageRDD.scala | Scala | apache-2.0 | 1,600 |
package chrome.runtime.bindings
import scala.scalajs.js
object OnInstalledDetails {
type InstallReason = String
object InstallReasons {
val INSTALL: InstallReason = "install"
val UPDATE: InstallReason = "update"
val CHROME_UPDATE: InstallReason = "chrome_update"
val SHARED_MODULE_UPDATE: InstallReason = "shared_module_update"
}
}
class OnInstalledDetails extends js.Object {
val reason: OnInstalledDetails.InstallReason = js.native
val previousVersion: js.UndefOr[String] = js.native
val id: js.UndefOr[String] = js.native
}
| amsayk/scala-js-chrome | bindings/src/main/scala/chrome/runtime/bindings/OnInstalledDetails.scala | Scala | mit | 566 |
package spire.laws
import spire.algebra._
import spire.implicits._
import org.typelevel.discipline.{Laws, Predicate}
import org.scalacheck.{Arbitrary, Prop}
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop._
object RingLaws {
def apply[A : Eq : Arbitrary](implicit _pred: Predicate[A]) = new RingLaws[A] {
def Arb = implicitly[Arbitrary[A]]
def pred = _pred
val nonZeroLaws = new GroupLaws[A] {
def Arb = Arbitrary(arbitrary[A] filter _pred)
def Equ = Eq[A]
}
}
}
trait RingLaws[A] extends GroupLaws[A] {
// must be a val (stable identifier)
val nonZeroLaws: GroupLaws[A]
def pred: Predicate[A]
def withPred(_pred: Predicate[A], replace: Boolean = true): RingLaws[A] = RingLaws[A](
Equ,
Arb,
if (replace) _pred else pred && _pred
)
implicit def Arb: Arbitrary[A]
implicit def Equ: Eq[A] = nonZeroLaws.Equ
// multiplicative groups
def multiplicativeSemigroup(implicit A: MultiplicativeSemigroup[A]) = new MultiplicativeProperties(
base = _.semigroup(A.multiplicative),
parent = None,
"prodn(a, 1) === a" β forAll((a: A) =>
A.prodn(a, 1) === a
),
"prodn(a, 2) === a * a" β forAll((a: A) =>
A.prodn(a, 2) === (a * a)
),
"prodOption" β forAll((a: A) =>
(A.prodOption(Seq.empty[A]) === None) &&
(A.prodOption(Seq(a)) === Some(a)) &&
(A.prodOption(Seq(a, a)) === Some(a * a)) &&
(A.prodOption(Seq(a, a, a)) === Some(a * a * a))
)
)
def multiplicativeMonoid(implicit A: MultiplicativeMonoid[A]) = new MultiplicativeProperties(
base = _.monoid(A.multiplicative),
parent = Some(multiplicativeSemigroup),
"prodn(a, 0) === one" β forAll((a: A) =>
A.prodn(a, 0) === A.one
),
"prod(Nil) === one" β forAll((a: A) =>
A.prod(Nil) === A.one
),
"isOne" β forAll((a: A) =>
a.isOne === (a === A.one)
)
)
def multiplicativeGroup(implicit A: MultiplicativeGroup[A]) = new MultiplicativeProperties(
base = _.group(A.multiplicative),
parent = Some(multiplicativeMonoid),
"reciprocal consistent" β forAll((x: A) =>
pred(x) ==> ((A.one / x) === x.reciprocal)
)
)
def multiplicativeAbGroup(implicit A: MultiplicativeAbGroup[A]) = new MultiplicativeProperties(
base = _.abGroup(A.multiplicative),
parent = Some(multiplicativeGroup)
)
// rings
def semiring(implicit A: Semiring[A]) = new RingProperties(
name = "semiring",
al = additiveSemigroup,
ml = multiplicativeSemigroup,
parents = Seq.empty,
"distributive" β forAll((x: A, y: A, z: A) =>
(x * (y + z) === (x * y + x * z)) && (((x + y) * z) === (x * z + y * z))
),
"pow" β forAll((x: A) =>
((x pow 1) === x) && ((x pow 2) === x * x) && ((x pow 3) === x * x * x)
)
)
def rng(implicit A: Rng[A]) = new RingProperties(
name = "rng",
al = additiveAbGroup,
ml = multiplicativeSemigroup,
parents = Seq(semiring)
)
def rig(implicit A: Rig[A]) = new RingProperties(
name = "rig",
al = additiveMonoid,
ml = multiplicativeMonoid,
parents = Seq(semiring)
)
def ring(implicit A: Ring[A]) = new RingProperties(
// TODO fromParents
name = "ring",
al = additiveAbGroup,
ml = multiplicativeMonoid,
parents = Seq(rig, rng)
)
def euclideanRing(implicit A: EuclideanRing[A]) = RingProperties.fromParent(
// TODO tests?!
name = "euclidean ring",
parent = ring
)
// Everything below fields (e.g. rings) does not require their multiplication
// operation to be a group. Hence, we do not check for the existence of an
// inverse. On the other hand, fields require their multiplication to be an
// abelian group. No we have to worry about zero.
// The usual text book definition says: Fields consist of two abelian groups
// (set, +, zero) and (set \\ zero, *, one). We do the same thing here.
// However, since law checking for the multiplication does not include zero
// any more, it is not immediately clear that desired properties like
// zero * x == x * zero hold.
// Luckily, these follow from the other field and group axioms.
def field(implicit A: Field[A]) = new RingProperties(
name = "field",
al = additiveAbGroup,
ml = multiplicativeAbGroup,
parents = Seq(euclideanRing)
) {
override def nonZero = true
}
// property classes
class MultiplicativeProperties(
val base: GroupLaws[A] => GroupLaws[A]#GroupProperties,
val parent: Option[MultiplicativeProperties],
val props: (String, Prop)*
) extends RuleSet with HasOneParent {
private val _base = base(RingLaws.this)
val name = _base.name
val bases = Seq("base" β _base)
}
object RingProperties {
def fromParent(name: String, parent: RingProperties, props: (String, Prop)*) =
new RingProperties(name, parent.al, parent.ml, Seq(parent), props: _*)
}
class RingProperties(
val name: String,
val al: AdditiveProperties,
val ml: MultiplicativeProperties,
val parents: Seq[RingProperties],
val props: (String, Prop)*
) extends RuleSet {
def nonZero: Boolean = false
def _ml =
if (nonZero)
new RuleSet with HasOneParent {
val name = ml.name
val bases = Seq("base-nonzero" β ml.base(nonZeroLaws))
val parent = ml.parent
val props = ml.props
}
else
ml
def bases = Seq("additive" β al, "multiplicative" β _ml)
}
}
// vim: expandtab:ts=2:sw=2
| woparry/spire | scalacheck-binding/src/main/scala/spire/laws/RingLaws.scala | Scala | mit | 5,523 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia.objects
sealed trait TypoTolerance {
def name: String
}
object TypoTolerance {
case object `true` extends TypoTolerance {
override def name: String = "true"
}
case object `false` extends TypoTolerance {
override def name: String = "false"
}
case object min extends TypoTolerance {
override def name: String = "min"
}
case object strict extends TypoTolerance {
override def name: String = "strict"
}
}
| algolia/algoliasearch-client-scala | src/main/scala/algolia/objects/TypoTolerance.scala | Scala | mit | 1,628 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.collection.mutable
import org.apache.spark.ml.linalg._
import org.apache.spark.rdd.RDD
/**
* Class that represents an instance of weighted data point with label and features.
*
* @param label Label for this data point.
* @param weight The weight of this instance.
* @param features The vector of features for this data point.
*/
private[spark] case class Instance(label: Double, weight: Double, features: Vector)
/**
* Class that represents an block of instance.
* If all weights are 1, then an empty array is stored.
*/
private[spark] case class InstanceBlock(
labels: Array[Double],
weights: Array[Double],
matrix: Matrix) {
require(labels.length == matrix.numRows)
require(matrix.isTransposed)
if (weights.nonEmpty) {
require(labels.length == weights.length)
}
def size: Int = labels.length
def numFeatures: Int = matrix.numCols
def instanceIterator: Iterator[Instance] = {
if (weights.nonEmpty) {
labels.iterator.zip(weights.iterator).zip(matrix.rowIter)
.map { case ((label, weight), vec) => Instance(label, weight, vec) }
} else {
labels.iterator.zip(matrix.rowIter)
.map { case (label, vec) => Instance(label, 1.0, vec) }
}
}
def getLabel(i: Int): Double = labels(i)
def labelIter: Iterator[Double] = labels.iterator
@transient lazy val getWeight: Int => Double = {
if (weights.nonEmpty) {
(i: Int) => weights(i)
} else {
(i: Int) => 1.0
}
}
def weightIter: Iterator[Double] = {
if (weights.nonEmpty) {
weights.iterator
} else {
Iterator.fill(size)(1.0)
}
}
// directly get the non-zero iterator of i-th row vector without array copy or slice
@transient lazy val getNonZeroIter: Int => Iterator[(Int, Double)] = {
matrix match {
case dm: DenseMatrix =>
(i: Int) =>
val start = numFeatures * i
Iterator.tabulate(numFeatures)(j =>
(j, dm.values(start + j))
).filter(_._2 != 0)
case sm: SparseMatrix =>
(i: Int) =>
val start = sm.colPtrs(i)
val end = sm.colPtrs(i + 1)
Iterator.tabulate(end - start)(j =>
(sm.rowIndices(start + j), sm.values(start + j))
).filter(_._2 != 0)
}
}
}
private[spark] object InstanceBlock {
/**
* Suggested value for BlockSizeInMB in Level-2 routine cases.
* According to performance tests of BLAS routine (see SPARK-31714) and
* LinearSVC (see SPARK-32907), 1.0 MB should be an acceptable value for
* linear models using Level-2 routine (GEMV) to perform prediction and
* gradient computation.
*/
val DefaultBlockSizeInMB = 1.0
private def getBlockMemUsage(
numCols: Long,
numRows: Long,
nnz: Long,
allUnitWeight: Boolean): Long = {
val doubleBytes = java.lang.Double.BYTES
val arrayHeader = 12L
val denseSize = Matrices.getDenseSize(numCols, numRows)
val sparseSize = Matrices.getSparseSize(nnz, numRows + 1)
val matrixSize = math.min(denseSize, sparseSize)
if (allUnitWeight) {
matrixSize + doubleBytes * numRows + arrayHeader * 2
} else {
matrixSize + doubleBytes * numRows * 2 + arrayHeader * 2
}
}
def fromInstances(instances: Seq[Instance]): InstanceBlock = {
val labels = instances.map(_.label).toArray
val weights = if (instances.exists(_.weight != 1)) {
instances.map(_.weight).toArray
} else {
Array.emptyDoubleArray
}
val matrix = Matrices.fromVectors(instances.map(_.features))
new InstanceBlock(labels, weights, matrix)
}
def blokify(instances: RDD[Instance], blockSize: Int): RDD[InstanceBlock] = {
instances.mapPartitions(_.grouped(blockSize).map(InstanceBlock.fromInstances))
}
def blokifyWithMaxMemUsage(
instanceIterator: Iterator[Instance],
maxMemUsage: Long): Iterator[InstanceBlock] = {
require(maxMemUsage > 0)
new Iterator[InstanceBlock]() {
private var numCols = -1L
override def hasNext: Boolean = instanceIterator.hasNext
override def next(): InstanceBlock = {
val buff = mutable.ArrayBuilder.make[Instance]
var buffCnt = 0L
var buffNnz = 0L
var buffUnitWeight = true
var blockMemUsage = 0L
while (instanceIterator.hasNext && blockMemUsage < maxMemUsage) {
val instance = instanceIterator.next()
if (numCols < 0L) numCols = instance.features.size
require(numCols == instance.features.size)
buff += instance
buffCnt += 1L
buffNnz += instance.features.numNonzeros
buffUnitWeight &&= (instance.weight == 1)
blockMemUsage = getBlockMemUsage(numCols, buffCnt, buffNnz, buffUnitWeight)
}
// the block memory usage may slightly exceed threshold, not a big issue.
// and this ensure even if one row exceed block limit, each block has one row.
InstanceBlock.fromInstances(buff.result())
}
}
}
def blokifyWithMaxMemUsage(
instances: RDD[Instance],
maxMemUsage: Long): RDD[InstanceBlock] = {
require(maxMemUsage > 0)
instances.mapPartitions(iter => blokifyWithMaxMemUsage(iter, maxMemUsage))
}
}
/**
* Case class that represents an instance of data point with
* label, weight, offset and features.
* This is mainly used in GeneralizedLinearRegression currently.
*
* @param label Label for this data point.
* @param weight The weight of this instance.
* @param offset The offset used for this data point.
* @param features The vector of features for this data point.
*/
private[ml] case class OffsetInstance(
label: Double,
weight: Double,
offset: Double,
features: Vector) {
/** Converts to an [[Instance]] object by leaving out the offset. */
def toInstance: Instance = Instance(label, weight, features)
}
| mahak/spark | mllib/src/main/scala/org/apache/spark/ml/feature/Instance.scala | Scala | apache-2.0 | 6,719 |
/*
comonads.scala
Examples for comonads blog post
*/
object ComonadExamples {
import cats._
import cats.implicits._
// linear filter for Stream
def linearFilterS(weights: Stream[Double])(s: Stream[Double]): Double =
(weights, s).parMapN(_*_).sum
// a generic linear filter
// (with help from Fabio Labella @SystemFw on Cats gitter channel)
def linearFilter[F[_]: Foldable, G[_]](
weights: F[Double], s: F[Double]
)(implicit ev: NonEmptyParallel[F, G]): Double =
(weights, s).parMapN(_*_).fold
def filterExamples: Unit = {
// define a steam (logistic map)
val lam = 3.7
def s = Stream.iterate(0.5)(x => lam*x*(1-x))
println(s.take(10).toList)
println(s.coflatten.take(3).map(_.take(4).toList).toList)
// apply a filter
println(s.coflatMap(linearFilterS(Stream(0.25,0.5,0.25))).take(5).toList)
// now use generic filter
def myFilter(s: Stream[Double]): Double =
linearFilter(Stream(0.25, 0.5, 0.25),s)
println(s.coflatMap(myFilter).take(5).toList)
// using a lambda
println(s.coflatMap(s => linearFilter(Stream(0.25,0.5,0.25),s)).
take(5).toList)
// check stack safety
println(s.coflatMap(s => linearFilter(Stream(0.25,0.5,0.25),s)).
take(1000000).drop(999995).toList)
// use a List instead of a Stream
val sl = s.take(10).toList
println(sl)
println(sl.coflatMap(sl => linearFilter(List(0.25,0.5,0.25),sl)) )
// some plots in breeze...
val n = 500
import breeze.plot._
import breeze.linalg._
val fig = Figure(s"The (smoothed) logistic map (lambda=$lam)")
val p0 = fig.subplot(3,1,0)
p0 += plot(linspace(1,n,n),s.take(n))
p0.ylim = (0.0,1.0)
p0.title = s"The logistic map (lambda=$lam)"
val p1 = fig.subplot(3,1,1)
p1 += plot(linspace(1,n,n),s.coflatMap(myFilter).take(n))
p1.ylim = (0.0,1.0)
p1.title = "Smoothed by a simple linear filter"
val p2 = fig.subplot(3,1,2)
p2 += plot(linspace(1,n,n),s.coflatMap(myFilter).coflatMap(myFilter).coflatMap(myFilter).coflatMap(myFilter).coflatMap(myFilter).take(n))
p2.ylim = (0.0,1.0)
p2.title = "Smoothed with 5 applications of the linear filter"
fig.refresh
}
// Numerical solution of the 2-D heat equation PDE
// Or image processing, depending on your point of view
// https://jaspervdj.be/posts/2014-11-27-comonads-image-processing.html
// I'm going to go for column-major rather than row-major
// Basic image class
import scala.collection.parallel.immutable.ParVector
case class Image[T](w: Int, h: Int, data: ParVector[T]) {
//case class Image[T](w: Int, h: Int, data: Vector[T]) {
def apply(x: Int, y: Int): T = data(x*h+y)
def map[S](f: T => S): Image[S] = Image(w, h, data map f)
def updated(x: Int, y: Int, value: T): Image[T] =
Image(w,h,data.updated(x*h+y,value))
}
// Pointed image (with a focus/cursor)
case class PImage[T](x: Int, y: Int, image: Image[T]) {
def extract: T = image(x, y)
def map[S](f: T => S): PImage[S] = PImage(x, y, image map f)
def coflatMap[S](f: PImage[T] => S): PImage[S] = PImage(
x, y, Image(image.w, image.h,
(0 until (image.w * image.h)).toVector.par.map(i => {
//(0 until (image.w * image.h)).toVector.map(i => {
val xx = i / image.h
val yy = i % image.h
f(PImage(xx, yy, image))
})))
// now a few methods for navigation - not part of the comonad interface
// using periodic boundary conditions
def up: PImage[T] = {
val py = y-1
val ny = if (py >= 0) py else (py + image.h)
PImage(x,ny,image)
}
def down: PImage[T] = {
val py = y+1
val ny = if (py < image.h) py else (py - image.h)
PImage(x,ny,image)
}
def left: PImage[T] = {
val px = x-1
val nx = if (px >= 0) px else (px + image.w)
PImage(nx,y,image)
}
def right: PImage[T] = {
val px = x+1
val nx = if (px < image.w) px else (px - image.w)
PImage(nx,y,image)
}
}
// Provide evidence that PImage is a Cats Comonad
implicit val pimageComonad = new Comonad[PImage] {
def extract[A](wa: PImage[A]) = wa.extract
def coflatMap[A,B](wa: PImage[A])(f: PImage[A] => B): PImage[B] =
wa.coflatMap(f)
def map[A,B](wa: PImage[A])(f: A => B): PImage[B] = wa.map(f)
}
// convert to and from Breeze matrices
import breeze.linalg.{Vector => BVec, _}
def BDM2I[T](m: DenseMatrix[T]): Image[T] =
Image(m.cols, m.rows, m.data.toVector.par)
//Image(m.cols, m.rows, m.data.toVector)
def I2BDM(im: Image[Double]): DenseMatrix[Double] =
new DenseMatrix(im.h,im.w,im.data.toArray)
// image examples
def imageExamples: Unit = {
// a filter correspoding to one step of integration of the heat equation
def fil(pi: PImage[Double]): Double = (2*pi.extract+pi.up.extract+pi.down.extract+pi.left.extract+pi.right.extract)/6.0
// simulate a noisy image
import breeze.stats.distributions.Gaussian
val bdm = DenseMatrix.tabulate(200,250){case (i,j) => math.cos(0.1*math.sqrt((i*i+j*j))) + Gaussian(0.0,2.0).draw}
val pim0 = PImage(0,0,BDM2I(bdm))
// just checking that Cats Comonad syntax is picked up - not required
pim0.coflatten
// use filter to generate an infinite stream of pointed images
def pims = Stream.iterate(pim0)(_.coflatMap(fil))
// render the first few images from the stream
import breeze.plot._
val fig = Figure("Diffusing a noisy image")
pims.take(25).zipWithIndex.foreach{case (pim,i) => {
val p = fig.subplot(5,5,i)
p += image(I2BDM(pim.image))
}}
}
// Ising model Gibbs sampler
def isingExamples: Unit = {
import breeze.stats.distributions.{Binomial,Bernoulli}
val beta = 0.4
val bdm = DenseMatrix.tabulate(500,600){
case (i,j) => (new Binomial(1,0.2)).draw
}.map(_*2 - 1) // random matrix of +/-1s
val pim0 = PImage(0,0,BDM2I(bdm))
def gibbsKernel(pi: PImage[Int]): Int = {
val sum = pi.up.extract+pi.down.extract+pi.left.extract+pi.right.extract
val p1 = math.exp(beta*sum)
val p2 = math.exp(-beta*sum)
val probplus = p1/(p1+p2)
if (new Bernoulli(probplus).draw) 1 else -1
}
def oddKernel(pi: PImage[Int]): Int =
if ((pi.x+pi.y) % 2 != 0) pi.extract else gibbsKernel(pi)
def evenKernel(pi: PImage[Int]): Int =
if ((pi.x+pi.y) % 2 == 0) pi.extract else gibbsKernel(pi)
//def pims = Stream.iterate(pim0)(_.coflatMap(gibbsKernel))
def pims = Stream.iterate(pim0)(_.coflatMap(oddKernel).coflatMap(evenKernel))
// render
import breeze.plot._
val fig = Figure("Ising model Gibbs sampler")
//fig.visible = false
fig.width = 1000
fig.height = 800
pims.take(50).zipWithIndex.foreach{case (pim,i) => {
print(s"$i ")
fig.clear
val p = fig.subplot(1,1,0)
p.title = s"Ising model: frame $i"
p += image(I2BDM(pim.image.map{_.toDouble}))
fig.refresh
//fig.saveas(f"ising$i%04d.png")
}}
println
}
def main(args: Array[String]): Unit = {
filterExamples
imageExamples
isingExamples
}
}
// eof
| darrenjw/blog | comonads/src/main/scala/comonads.scala | Scala | apache-2.0 | 7,150 |
package uk.gov.gds.ier.service.apiservice
import uk.gov.gds.common.model.LocalAuthority
import uk.gov.gds.ier.model._
case class OverseasApplication(
name: Option[Name],
previousName: Option[PreviousName],
dateLeftUk: Option[DateLeft],
dateLeftSpecial: Option[DateLeftSpecial],
overseasParentName: Option[OverseasParentName] = None,
lastRegisteredToVote: Option[LastRegisteredToVote],
dob: Option[DOB],
nino: Option[Nino],
address: Option[OverseasAddress],
lastUkAddress: Option[Address] = None,
parentsAddress: Option[Address] = None,
openRegisterOptin: Option[Boolean],
postalOrProxyVote: Option[PostalOrProxyVote],
passport: Option[Passport],
contact: Option[Contact],
referenceNumber: Option[String],
ip: Option[String],
timeTaken: String,
sessionId: String
) extends CompleteApplication {
def toApiMap = {
val authorityGssCodeSource = lastUkAddress.orElse(parentsAddress)
val apiMap = Map.empty ++
name.map(_.toApiMap("fn", "mn", "ln")).getOrElse(Map.empty) ++
previousName.map(_.toApiMap("p")).getOrElse(Map.empty) ++
lastRegisteredToVote.map(_.toApiMap).getOrElse(Map.empty) ++
dateLeftUk.map(_.toApiMap()).getOrElse(Map.empty) ++
dateLeftSpecial.map(_.toApiMap).getOrElse(Map.empty) ++
overseasParentName.map(_.toApiMap("pgr")).getOrElse(Map.empty) ++
nino.map(_.toApiMap).getOrElse(Map.empty) ++
lastUkAddress.map(_.toApiMap("reg")).getOrElse(Map.empty) ++
dob.map(_.toApiMap("dob")).getOrElse(Map.empty) ++
address.map(_.toApiMap).getOrElse(Map.empty) ++
lastUkAddress.map(_.toApiMap("reg")).getOrElse(Map.empty) ++
parentsAddress.map(_.toApiMap("reg")).getOrElse(Map.empty) ++
openRegisterOptin.map(open => Map("opnreg" -> open.toString)).getOrElse(Map.empty) ++
postalOrProxyVote.map(_.toApiMap).getOrElse(Map.empty) ++
passport.map(_.toApiMap).getOrElse(Map.empty) ++
contact.map(_.toApiMap).getOrElse(Map.empty) ++
referenceNumber.map(refNum => Map("refNum" -> refNum)).getOrElse(Map.empty) ++
authorityGssCodeSource.flatMap(_.gssCode.map(gssCode => Map("gssCode" -> gssCode))).getOrElse(Map.empty) ++
ip.map(ipAddress => Map("ip" -> ipAddress)).getOrElse(Map.empty) ++ Map(
"applicationType" -> "overseas",
"timeTaken" -> timeTaken,
"webHash" -> sessionId
)
removeSpecialCharacters(apiMap)
}
}
| michaeldfallen/ier-frontend | app/uk/gov/gds/ier/service/apiservice/OverseasApplication.scala | Scala | mit | 2,449 |
package knot.msgpack.serializers.collection
import knot.msgpack._
import scala.reflect.runtime.universe._
class MapSerializer[A: TypeTag, B: TypeTag]() extends MsgPackSerializer[Map[A, B]] {
protected[this] val keySer: MsgPackSerializer[A] = MsgPack().serializer[A]()
protected[this] val valueSer: MsgPackSerializer[B] = MsgPack().serializer[B]()
override def serialize(encoder: MsgPackEncoder, obj: Map[A, B]): Unit = {
if (obj == null) {
encoder.out.putNil()
} else {
encoder.out.putMapHeader(obj.size)
for (p <- obj) {
keySer.serialize(encoder, p._1)
valueSer.serialize(encoder, p._2)
}
}
}
override def deserialize(decoder: MsgPackDecoder): Map[A, B] = {
val size = decoder.in.getMapHeader()
val seq = for (_ <- 0 until size) yield {
(keySer.deserialize(decoder), valueSer.deserialize(decoder))
}
seq.toMap
}
}
| defvar/knot | knot-msgpack/src/main/scala/knot/msgpack/serializers/collection/MapSerializer.scala | Scala | mit | 906 |
package lila.user
import scala._
object Countries {
val all = List(
"AD" -> "Andorra",
"AE" -> "United Arab Emirates",
"AF" -> "Afghanistan",
"AG" -> "Antigua and Barbuda",
"AL" -> "Albania",
"AM" -> "Armenia",
"AO" -> "Angola",
"AR" -> "Argentina",
"AT" -> "Austria",
"AU" -> "Australia",
"AZ" -> "Azerbaijan",
"BA" -> "Bosnia-Herzegovina",
"BB" -> "Barbados",
"BD" -> "Bangladesh",
"BE" -> "Belgium",
"BF" -> "Burkina Faso",
"BG" -> "Bulgaria",
"BH" -> "Bahrain",
"BI" -> "Burundi",
"BJ" -> "Benin",
"BN" -> "Brunei",
"BO" -> "Bolivia",
"BR" -> "Brazil",
"BS" -> "Bahamas",
"BT" -> "Bhutan",
"BW" -> "Botswana",
"BY" -> "Belarus",
"BZ" -> "Belize",
"CA" -> "Canada",
"CD" -> "Congo (Democratic Rep.)",
"CF" -> "Central African Republic",
"CG" -> "Congo (Brazzaville)",
"CH" -> "Switzerland",
"CI" -> "Cote d'Ivoire",
"CL" -> "Chile",
"CM" -> "Cameroon",
"CN" -> "China",
"CO" -> "Colombia",
"CR" -> "Costa Rica",
"CU" -> "Cuba",
"CV" -> "Cape Verde",
"CY" -> "Cyprus",
"CZ" -> "Czech Republic",
"DE" -> "Germany",
"DJ" -> "Djibouti",
"DK" -> "Denmark",
"DM" -> "Dominica",
"DO" -> "Dominican Republic",
"DZ" -> "Algeria",
"EC" -> "Ecuador",
"EE" -> "Estonia",
"EG" -> "Egypt",
"EH" -> "Western Sahara",
"ER" -> "Eritrea",
"ES" -> "Spain",
"ET" -> "Ethiopia",
"FI" -> "Finland",
"FJ" -> "Fiji",
"FM" -> "Micronesia",
"FR" -> "France",
"GA" -> "Gabon",
"GB" -> "United Kingdom",
"GD" -> "Grenada",
"GE" -> "Georgia",
"GH" -> "Ghana",
"GL" -> "Greenland",
"GM" -> "Gambia",
"GN" -> "Guinea",
"GQ" -> "Equatorial Guinea",
"GR" -> "Greece",
"GT" -> "Guatemala",
"GW" -> "Guinea-Bissau",
"GY" -> "Guyana",
"HN" -> "Honduras",
"HR" -> "Croatia",
"HT" -> "Haiti",
"HU" -> "Hungary",
"ID" -> "Indonesia",
"IE" -> "Ireland",
"IL" -> "Israel",
"IN" -> "India",
"IQ" -> "Iraq",
"IR" -> "Iran",
"IS" -> "Iceland",
"IT" -> "Italy",
"JM" -> "Jamaica",
"JO" -> "Jordan",
"JP" -> "Japan",
"KE" -> "Kenya",
"KG" -> "Kyrgyzstan",
"KH" -> "Cambodia",
"KI" -> "Kiribati",
"KM" -> "Comoros",
"KN" -> "Saint Kitts and Nevis",
"KP" -> "North Korea",
"KR" -> "South Korea",
"KW" -> "Kuwait",
"KY" -> "Cayman Islands",
"KZ" -> "Kazakhstan",
"LA" -> "Laos",
"LB" -> "Lebanon",
"LC" -> "Saint Lucia",
"LI" -> "Liechtenstein",
"LK" -> "Sri Lanka",
"LR" -> "Liberia",
"LS" -> "Lesotho",
"LT" -> "Lithuania",
"LU" -> "Luxembourg",
"LV" -> "Latvia",
"LY" -> "Libya",
"MA" -> "Morocco",
"MD" -> "Moldova",
"ME" -> "Montenegro",
"MG" -> "Madagascar",
"MH" -> "Marshall Islands",
"MK" -> "Macedonia",
"ML" -> "Mali",
"MM" -> "Myanmar",
"MN" -> "Mongolia",
"MR" -> "Mauritania",
"MT" -> "Malta",
"MU" -> "Mauritius",
"MV" -> "Maldives",
"MW" -> "Malawi",
"MX" -> "Mexico",
"MY" -> "Malaysia",
"MZ" -> "Mozambique",
"NA" -> "Namibia",
"NE" -> "Niger",
"NG" -> "Nigeria",
"NI" -> "Nicaragua",
"NL" -> "Netherlands",
"NO" -> "Norway",
"NP" -> "Nepal",
"NR" -> "Nauru",
"NZ" -> "New Zealand",
"OM" -> "Oman",
"PA" -> "Panama",
"PE" -> "Peru",
"PG" -> "Papua New Guinea",
"PH" -> "Philippines",
"PK" -> "Pakistan",
"PL" -> "Poland",
"PS" -> "Palestine",
"PT" -> "Portugal",
"PW" -> "Palau",
"PY" -> "Paraguay",
"QA" -> "Qatar",
"RO" -> "Romania",
"RS" -> "Serbia",
"RU" -> "Russia",
"RW" -> "Rwanda",
"SA" -> "Saudi Arabia",
"SB" -> "Solomon Islands",
"SC" -> "Seychelles",
"SD" -> "Sudan",
"SE" -> "Sweden",
"SG" -> "Singapore",
"SI" -> "Slovenia",
"SK" -> "Slovakia",
"SL" -> "Sierra Leone",
"SM" -> "San Marino",
"SN" -> "Senegal",
"SO" -> "Somalia",
"SR" -> "Suriname",
"ST" -> "Sao Tome and Principe",
"SV" -> "El Salvador",
"SY" -> "Syria",
"SZ" -> "Swaziland",
"TD" -> "Chad",
"TG" -> "Togo",
"TH" -> "Thailand",
"TJ" -> "Tajikistan",
"TL" -> "Timor-Leste",
"TM" -> "Turkmenistan",
"TN" -> "Tunisia",
"TO" -> "Tonga",
"TR" -> "Turkey",
"TT" -> "Trinidad and Tobago",
"TV" -> "Tuvalu",
"TW" -> "Taiwan",
"TZ" -> "Tanzania",
"UA" -> "Ukraine",
"UG" -> "Uganda",
"US" -> "United States",
"UY" -> "Uruguay",
"UZ" -> "Uzbekistan",
"VC" -> "Saint Vincent and the Grenadines",
"VE" -> "Venezuela",
"VN" -> "Vietnam",
"VU" -> "Vanuatu",
"WS" -> "Samoa",
"YE" -> "Yemen",
"ZA" -> "South Africa",
"ZM" -> "Zambia",
"ZW" -> "Zimbabwe"
).sortBy(_._2)
val map = all.toMap
val codeSet = map.keySet
def info(code: String): Option[(String, String)] =
map get code map { code -> _ }
}
| danilovsergey/i-bur | modules/user/src/main/Countries.scala | Scala | mit | 5,040 |
package com.cloudera.sa.apptrans.streaming.ingestion.solr
import java.text.SimpleDateFormat
import java.util.Date
import com.cloudera.sa.apptrans.common.SolrSupport
import com.cloudera.sa.apptrans.model.{AppEvent, AppEventBuilder}
import kafka.serializer.StringDecoder
import org.apache.solr.common.SolrInputDocument
import org.apache.solr.common.cloud.ZooKeeperException
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
object SparkStreamingAppEventToSolR {
def main(args: Array[String]): Unit = {
println("Java Version:" + System.getProperty("java.version"))
println("Java Home:" + System.getProperties().getProperty("java.home"))
val v: ZooKeeperException = null
if (args.length == 0) {
println("Args: <KafkaBrokerList> " +
"<kafkaTopicList> " +
"<checkpointDir>" +
"<numberOfSeconds>" +
"<runLocal>" +
"<solrCollection>" +
"<zkHost>")
return
}
val kafkaBrokerList = args(0)
val kafkaTopicList = args(1)
val checkPointFolder = args(2)
val numberOfSeconds = args(3).toInt
val runLocal = args(4).equals("l")
val solrCollection = args(5)
val zkHost = args(6)
println("kafkaBrokerList:" + kafkaBrokerList)
println("kafkaTopicList:" + kafkaTopicList)
println("numberOfSeconds:" + numberOfSeconds)
println("runLocal:" + runLocal)
println("solrCollection:" + solrCollection)
println("zkHost:" + zkHost)
val sc: SparkContext = if (runLocal) {
val sparkConfig = new SparkConf()
sparkConfig.set("spark.broadcast.compress", "false")
sparkConfig.set("spark.shuffle.compress", "false")
sparkConfig.set("spark.shuffle.spill.compress", "false")
new SparkContext("local[2]", "TableStatsSinglePathMain", sparkConfig)
} else {
val sparkConf = new SparkConf().setAppName("Spark Streaming Ingestion to SolR")
new SparkContext(sparkConf)
}
val ssc = new StreamingContext(sc, Seconds(numberOfSeconds))
val topicsSet = kafkaTopicList.split(",").toSet
val kafkaParams = Map[String, String]("metadata.broker.list" -> kafkaBrokerList)
val messageStream = KafkaUtils.
createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topicsSet)
val appEventDStream = messageStream.map { case (key, value) => AppEventBuilder.build(value) }
val solrDocumentDStream = appEventDStream.map(convertToSolRDocuments)
SolrSupport.indexDStreamOfDocs(zkHost,
solrCollection,
100,
solrDocumentDStream)
ssc.checkpoint(checkPointFolder)
ssc.start()
ssc.awaitTermination()
}
def convertToSolRDocuments(appEvent:AppEvent): SolrInputDocument = {
val dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")
val doc: SolrInputDocument = new SolrInputDocument
doc.addField("id", appEvent.eventType + ":" + appEvent.eventId)
doc.addField("account_id", appEvent.accountId)
doc.addField("app_id", appEvent.appId)
doc.addField("event_time_stamp", dateFormat.format(new Date(appEvent.eventTimestamp)))
doc.addField("event_id", appEvent.eventId)
doc.addField("event_type", appEvent.eventType)
doc.addField("purchase", appEvent.purchase)
doc.addField("payment_type", appEvent.paymentType)
doc.addField("session_id", appEvent.sessionId)
doc.addField("latitude", appEvent.latitude)
doc.addField("longitude", appEvent.longitude)
doc
}
}
| tmalaska/AppTrans | src/main/scala/com/cloudera/sa/apptrans/streaming/ingestion/solr/SparkStreamingAppEventToSolR.scala | Scala | apache-2.0 | 3,551 |
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
package event
case class WindowClosed(override val source: Window) extends WindowEvent(source)
| scala/scala-swing | src/main/scala/scala/swing/event/WindowClosed.scala | Scala | apache-2.0 | 423 |
package threesixty.ProcessingMethods.interpolation
import threesixty.data.metadata.{Resolution, Scaling}
import threesixty.data.{ProcessedData, TaggedDataPoint, InputDataSkeleton}
import threesixty.data.Data.{Identifier, Timestamp}
import threesixty.data.Implicits.timestamp2Long
import threesixty.data.tags.{Tag, Interpolated, Original}
import threesixty.processor.{ProcessingMixins, SingleProcessingMethod, ProcessingMethodCompanion, ProcessingStep}
import spray.json._
import DefaultJsonProtocol._
import threesixty.visualizer.VisualizationConfig
import threesixty.visualizer.visualizations.barChart.BarChartConfig
import threesixty.visualizer.visualizations.lineChart.LineChartConfig
import threesixty.visualizer.visualizations.pieChart.PieChartConfig
import threesixty.visualizer.visualizations.scatterChart.ScatterChartConfig
object LinearInterpolation extends ProcessingMethodCompanion with ProcessingMixins {
trait Mixin extends ProcessingMixins {
abstract override def processingInfos: Map[String, ProcessingMethodCompanion] =
super.processingInfos + ("linearinterpolation" -> LinearInterpolation)
}
def name = "Linear Interpolation"
def fromString: (String) => ProcessingStep = { s => apply(s).asProcessingStep }
def usage =
""" |Linear Interpolation(frequency, idmap), takes one additional argument.
|Hereby the frequency is the desired max distance between two points in ms
|The interpolated points will be tagged with interpolated,the others not
""".stripMargin
def apply(jsonString: String): LinearInterpolation = {
implicit val linearInterpolationFormat =
jsonFormat(LinearInterpolation.apply, "frequency", "idMapping")
jsonString.parseJson.convertTo[LinearInterpolation]
}
def default(idMapping: Map[Identifier, Identifier]): ProcessingStep =
LinearInterpolation(1000, idMapping).asProcessingStep
def computeDegreeOfFit(inputData: InputDataSkeleton): Double = {
var temp = 0.0
val meta = inputData.metadata
if (meta.scaling == Scaling.Ordinal) {
temp += 0.4
}
if (meta.size >= 5) {
temp += 0.2
}
if (meta.size >= 50) {
temp += 0.2 //overall 0.4 because >= 50 includes >= 5
}
if (meta.resolution == Resolution.High) {
temp += 0.1
}
if (meta.resolution == Resolution.Middle) {
temp += 0.2
}
temp
}
def computeDegreeOfFit(targetVisualization: VisualizationConfig, inputData: InputDataSkeleton): Double = {
val strategyFactor = computeDegreeOfFit(inputData)
val visFactor = targetVisualization match {
//good
case _:LineChartConfig => 1.0
case _:BarChartConfig => 0.8
//bad
case _:ScatterChartConfig => 0.2
case _:PieChartConfig => 0.1
//default
case _ => 0.5
}
strategyFactor * visFactor
}
}
/**
* Linear interpolator
*
* @author Thomas Weber
* @param frequency Desired max. time-distance between datapoints.
*/
case class LinearInterpolation(frequency: Int, idMapping: Map[Identifier, Identifier])
extends SingleProcessingMethod {
def companion: ProcessingMethodCompanion = LinearInterpolation
/**
* Creates a new dataset with ID as specified in idMapping.
* Inserts interpolated values along the original ones into
* this new dataset and adds tags to identify interpolated
* and original values.
*
* @param data Data to interpolate
* @return One element Set containing the new dataset
*/
@throws[NoSuchElementException]("if data.id can not be found in idMapping")
def apply(data: ProcessedData): Set[ProcessedData] = {
/**
* Interpolation function.
* For each combination of two points it creates the linear
* equation paramters m (slope) and b (offset).
* It the generates the appropriate number of intermediary points
* with the corresponding values and tags and inserts them into
* the list of datapoints.
*
* @return list of datapoints with interpolated values and Tnterpolation-tags
*/
def linearInterpolated: List[TaggedDataPoint] => List[TaggedDataPoint] = {
case d1@TaggedDataPoint(t1, v1, tags1) :: (d2@TaggedDataPoint(t2, v2, tags2) :: ds) =>
if (t2 - t1 > frequency) {
val m = ((v2.value - v1.value) / (t2 - t1))
val b = v1.value - m * t1
def interpolFunc(x: Long): TaggedDataPoint =
TaggedDataPoint(new Timestamp(x), m * x + b, Set[Tag](Interpolated))
val diff: Int = (t2 - t1).toInt
TaggedDataPoint(t1, v1, tags1 + Original) ::
(for { i <- 1 to diff / frequency } yield { interpolFunc(t1 + i * frequency) }).toList ++
linearInterpolated(TaggedDataPoint(t2, v2, tags2 + Original) :: ds)
} else {
TaggedDataPoint(t1, v1, tags1 + Original) ::
linearInterpolated(TaggedDataPoint(t2, v2, tags2 + Original) :: ds)
}
case otherwise => otherwise
}
val orderedDataPoints = data.dataPoints.sortBy(d => timestamp2Long(d.timestamp))
val newID = idMapping(data.id)
Set(data.copy(id = newID, dataPoints = linearInterpolated(orderedDataPoints)))
}
}
| elordin/threesixty | src/main/scala/threesixty/ProcessingMethods/interpolation/LinearInterpolation.scala | Scala | mit | 5,695 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.scala.examples.iteration
import java.util.Random
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
/**
* Example illustrating iterations in Flink streaming.
*
* The program sums up random numbers and counts additions
* it performs to reach a specific threshold in an iterative streaming fashion.
*
* This example shows how to use:
*
* - streaming iterations,
* - buffer timeout to enhance latency,
* - directed outputs.
*
*/
object IterateExample {
private val Bound = 100
def main(args: Array[String]): Unit = {
// Checking input parameters
val params = ParameterTool.fromArgs(args)
// obtain execution environment and set setBufferTimeout to 1 to enable
// continuous flushing of the output buffers (lowest latency)
val env = StreamExecutionEnvironment.getExecutionEnvironment.setBufferTimeout(1)
// make parameters available in the web interface
env.getConfig.setGlobalJobParameters(params)
// create input stream of integer pairs
val inputStream: DataStream[(Int, Int)] =
if (params.has("input")) {
// map a list of strings to integer pairs
env.readTextFile(params.get("input")).map { value: String =>
val record = value.substring(1, value.length - 1)
val splitted = record.split(",")
(Integer.parseInt(splitted(0)), Integer.parseInt(splitted(1)))
}
} else {
println("Executing Iterate example with default input data set.")
println("Use --input to specify file input.")
env.addSource(new RandomFibonacciSource)
}
def withinBound(value: (Int, Int)) = value._1 < Bound && value._2 < Bound
// create an iterative data stream from the input with 5 second timeout
val numbers: DataStream[((Int, Int), Int)] = inputStream
// Map the inputs so that the next Fibonacci numbers can be calculated
// while preserving the original input tuple
// A counter is attached to the tuple and incremented in every iteration step
.map(value => (value._1, value._2, value._1, value._2, 0))
.iterate(
(iteration: DataStream[(Int, Int, Int, Int, Int)]) => {
// calculates the next Fibonacci number and increment the counter
val step = iteration.map(value =>
(value._1, value._2, value._4, value._3 + value._4, value._5 + 1))
// testing which tuple needs to be iterated again
val feedback = step.filter(value => withinBound(value._3, value._4))
// giving back the input pair and the counter
val output: DataStream[((Int, Int), Int)] = step
.filter(value => !withinBound(value._3, value._4))
.map(value => ((value._1, value._2), value._5))
(feedback, output)
}
// timeout after 5 seconds
, 5000L
)
if (params.has("output")) {
numbers.writeAsText(params.get("output"))
} else {
println("Printing result to stdout. Use --output to specify output path.")
numbers.print()
}
env.execute("Streaming Iteration Example")
}
// *************************************************************************
// USER FUNCTIONS
// *************************************************************************
/**
* Generate BOUND number of random integer pairs from the range from 0 to BOUND/2
*/
private class RandomFibonacciSource extends SourceFunction[(Int, Int)] {
val rnd = new Random()
var counter = 0
@volatile var isRunning = true
override def run(ctx: SourceContext[(Int, Int)]): Unit = {
while (isRunning && counter < Bound) {
val first = rnd.nextInt(Bound / 2 - 1) + 1
val second = rnd.nextInt(Bound / 2 - 1) + 1
ctx.collect((first, second))
counter += 1
Thread.sleep(50L)
}
}
override def cancel(): Unit = isRunning = false
}
}
| zimmermatt/flink | flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/iteration/IterateExample.scala | Scala | apache-2.0 | 4,982 |
package com.softwaremill.mqperf
import com.softwaremill.mqperf.config.TestConfig
import com.softwaremill.mqperf.mq.Mq
import com.softwaremill.mqperf.util.{Clock, PrometheusMetricServer, RealClock}
import com.typesafe.scalalogging.StrictLogging
import io.prometheus.client.{CollectorRegistry, Counter, Gauge, Histogram}
import scala.util.Random
object Sender {
def main(args: Array[String]): Unit = {
import PrometheusMetricServer._
withMetricsServerSync(CollectorRegistry.defaultRegistry) {
println("Starting sender...")
val testConfig = TestConfig.load()
val mq = Mq.instantiate(testConfig)
val labelValues = defaultLabelValues(testConfig, TestConfig.hostId)
val c = Counter.build("mqperf_sent_total", "number of sent messages").labelNames(DefaultLabelNames: _*).register()
val g = Gauge.build("mqperf_sent_threads_done", "number of sent threads done").labelNames(DefaultLabelNames: _*).register()
g.labels(labelValues: _*).set(0)
val h = Histogram.build("mqperf_send_latency_ms", "latency of sent messages")
.buckets(0, 50, 100, 150, 200, 250, 300, 350, 400, 450, 500, 600, 700, 800, 900,
1000, 1250, 1500, 1750,
2000, 2500, 3000, 3500, 4000, 4500, 5000)
.labelNames(DefaultLabelNames: _*).register()
val sr = new SenderRunnable(
mq,
testConfig.mqType, Msg.prefix(testConfig),
testConfig.msgCountPerThread, testConfig.maxSendMsgBatchSize,
c.labels(labelValues: _*), g.labels(labelValues: _*),
h.labels(labelValues: _*)
)
val threads = (1 to testConfig.senderThreads).map { _ =>
val t = new Thread(sr)
t.start()
t
}
threads.foreach(_.join())
mq.close()
}
}
}
class SenderRunnable(mq: Mq, mqType: String,
msgPrefix: String, msgCount: Int, maxSendMsgBatchSize: Int,
sendCounter: Counter.Child, sendDone: Gauge.Child, sendLatency: Histogram.Child,
clock: Clock = RealClock)
extends Runnable with StrictLogging {
override def run() = {
val mqSender = mq.createSender()
try {
var leftToSend = msgCount
logger.info(s"Sending $leftToSend messages")
while (leftToSend > 0) {
val batchSize = math.min(leftToSend, Random.nextInt(maxSendMsgBatchSize) + 1)
val fullMsg = Msg.addTimestamp(msgPrefix)
val batch = List.fill(batchSize)(fullMsg)
logger.debug("Sending batch")
val start = clock.currentTimeMillis()
mqSender.send(batch)
sendLatency.observe(clock.currentTimeMillis() - start)
leftToSend -= batchSize
sendCounter.inc(batchSize)
}
sendDone.inc()
}
finally {
mqSender.close()
}
}
}
| adamw/mqperf | src/main/scala/com/softwaremill/mqperf/Sender.scala | Scala | apache-2.0 | 2,728 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.hadoop.sources
import java.io.File
/**
* A collection of useful methods
*/
object Util {
val ZIPKIN_TRACE_URL = "your.zipkin.url"
/**
* Returns whether or not a directory will contain data. A directory contains data if its first character is not '_'
* @param f a File
* @return if the directory the file represents contains data
*/
def isDataDir(f: File) = {
f.getName.charAt(0) != '_'
}
/**
* Traverses a directory and applies a function to each file in the directory
* @param func a function which takes a File and returns a Unit
* @param f a File representing a directory and which we want to apply func on
*/
def traverseFileTree(f: File)(func: File => Unit): Unit = {
if (isDataDir(f)) {
if (f.isDirectory) {
val children = f.listFiles()
for (child <- children) {
traverseFileTree(child)(func)
}
} else {
func(f)
}
}
}
/**
* Converts the string to service name in HTML format (adds the .html tail)
* @param s a service name
* @return the string as a service name in HTML
*/
def toSafeHtmlName(s: String) = {
s.trim().replace("/", "-")
}
}
| dsias/zipkin | zipkin-hadoop-job-runner/src/main/scala/com/twitter/zipkin/hadoop/sources/Util.scala | Scala | apache-2.0 | 1,796 |
package org.littlewings.infinispan.query
import java.util.Objects
import org.hibernate.search.annotations.{Field, Indexed}
object Contents {
def apply(id: String, value: String): Contents = {
val c = new Contents
c.id = id
c.value = value
c
}
}
@Indexed
@SerialVersionUID(1L)
class Contents extends Serializable {
@Field
var id: String = _
@Field
var value: String = _
override def hashCode: Int = Objects.hash(id, value)
override def equals(o: Any): Boolean = o match {
case other: Contents => Objects.equals(id, other.id) && Objects.equals(value, other.value)
case _ => false
}
}
| kazuhira-r/infinispan-getting-started | embedded-clustered-update-index/src/test/scala/org/littlewings/infinispan/query/Contents.scala | Scala | mit | 631 |
package com.twitter.finagle.client
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.{param, ServiceFactory, Stack, Stackable, StackBuilder}
import com.twitter.finagle.pool.{WatermarkPool, CachingPool, BufferingPool}
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.util.DefaultTimer
import com.twitter.util.{Timer, Duration}
object DefaultPool {
implicit object Role extends Stack.Role("Pool") {
val bufferingPool = Stack.Role("BufferingPool")
val cachingPool = Stack.Role("CachingPool")
val watermarkPool = Stack.Role("WatermarkPool")
}
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* default pool module.
*
* @param low The low watermark used in the Watermark pool. If there
* is sufficient request concurrency, no fewer connections will be
* maintained by the pool.
*
* @param high The high watermark. The pool will not maintain more
* connections than this.
*
* @param bufferSize Specifies the size of the lock-free buffer in front of
* the pool configuration. Skipped if 0.
*
* @param idleTime The amount of idle time for which a connection is
* cached. This is applied to connections that number greater than
* the low watermark but fewer than the high.
*
* @param maxWaiters The maximum number of connection requests that
* are queued when the connection concurrency exceeds the high
* watermark.
*/
case class Param(low: Int, high: Int, bufferSize: Int, idleTime: Duration, maxWaiters: Int) {
def mk(): (Param, Stack.Param[Param]) =
(this, Param.param)
}
object Param {
implicit val param = Stack.Param(Param(0, Int.MaxValue, 0, Duration.Top, Int.MaxValue))
}
/**
* A [[com.twitter.finagle.Stackable]] client connection pool.
*
* @see [[com.twitter.finagle.pool.BufferingPool]].
* @see [[com.twitter.finagle.pool.WatermarkPool]].
* @see [[com.twitter.finagle.pool.CachingPool]].
*/
def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Module[ServiceFactory[Req, Rep]] {
import com.twitter.finagle.pool.{CachingPool, WatermarkPool, BufferingPool}
val role = DefaultPool.Role
val description = "Control client connection pool"
val parameters = Seq(
implicitly[Stack.Param[Param]],
implicitly[Stack.Param[param.Stats]],
implicitly[Stack.Param[param.Timer]]
)
def make(prms: Stack.Params, next: Stack[ServiceFactory[Req, Rep]]) = {
val Param(low, high, bufferSize, idleTime, maxWaiters) = prms[Param]
val param.Stats(statsReceiver) = prms[param.Stats]
val param.Timer(timer) = prms[param.Timer]
val stack = new StackBuilder[ServiceFactory[Req, Rep]](next)
if (idleTime > 0.seconds && high > low) {
stack.push(
Role.cachingPool,
(sf: ServiceFactory[Req, Rep]) =>
new CachingPool(sf, high - low, idleTime, timer, statsReceiver)
)
}
stack.push(
Role.watermarkPool,
(sf: ServiceFactory[Req, Rep]) =>
new WatermarkPool(sf, low, high, statsReceiver, maxWaiters)
)
if (bufferSize > 0) {
stack.push(
Role.bufferingPool,
(sf: ServiceFactory[Req, Rep]) => new BufferingPool(sf, bufferSize)
)
}
stack.result
}
}
}
/**
* Create a watermark pool backed by a caching pool. This is the
* default pooling setup of Finagle.
*
* @param low The low watermark used in the Watermark pool. If there
* is sufficient request concurrency, no fewer connections will be
* maintained by the pool.
*
* @param high The high watermark. The pool will not maintain more
* connections than this.
*
* @param bufferSize Specifies the size of the lock-free buffer in front of
* the pool configuration. Skipped if 0.
*
* @param idleTime The amount of idle time for which a connection is
* cached. This is applied to connections that number greater than
* the low watermark but fewer than the high.
*
* @param maxWaiters The maximum number of connection requests that
* are queued when the connection concurrency exceeds the high
* watermark.
*/
case class DefaultPool[Req, Rep](
low: Int = 0,
high: Int = Int.MaxValue,
bufferSize: Int = 0,
idleTime: Duration = Duration.Top,
maxWaiters: Int = Int.MaxValue,
timer: Timer = DefaultTimer)
extends (StatsReceiver => Transformer[Req, Rep]) {
def apply(statsReceiver: StatsReceiver) = inputFactory => {
val factory =
if (idleTime <= 0.seconds || high <= low) inputFactory
else
new CachingPool(inputFactory, high - low, idleTime, timer, statsReceiver)
// NB: WatermarkPool conceals the first "low" closes from CachingPool, so that
// CachingPool only caches the last "high - low", and WatermarkPool caches the first
// "low".
val pool = new WatermarkPool(factory, low, high, statsReceiver, maxWaiters)
if (bufferSize <= 0) pool else new BufferingPool(pool, bufferSize)
}
}
| luciferous/finagle | finagle-core/src/main/scala/com/twitter/finagle/client/DefaultPool.scala | Scala | apache-2.0 | 5,094 |
/**
* Copyright (C) 2018 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.xbl
import org.orbeon.dom.{Element, QName}
import org.orbeon.oxf.xforms.analysis.PartAnalysisForXblSupport
trait XBLSupport {
def keepElement(
partAnalysisCtx : PartAnalysisForXblSupport,
boundElement : Element,
directNameOpt : Option[QName],
elem : Element
): Boolean
}
| orbeon/orbeon-forms | xforms-compiler/jvm/src/main/scala/org/orbeon/oxf/xforms/xbl/XBLSupport.scala | Scala | lgpl-2.1 | 986 |
package p
import scala.language.experimental.macros
import scala.reflect.macros._
object Bar {
def printTree(arg: Any): Any = macro BarMacros.printTreeImpl
}
class BarMacros(val c: blackbox.Context) {
import c.universe._
def printTreeImpl(arg: Tree): Tree = Literal(Constant("Bar1: " + arg))
}
| som-snytt/xsbt | sbt/src/sbt-test/source-dependencies/macro-log/macros/Bar.scala | Scala | bsd-3-clause | 303 |
package com.miraeclimate.climateconvergence.sst
import java.time.{LocalDate, Month}
/**
* Created by shawn on 14. 11. 18..
*/
case class SeaSurfaceTemperature(dateIndex: Int, lat: Float, lng: Float, temperature: String) {
def date: String = {
val date1800: LocalDate = LocalDate.of(1800, Month.JANUARY, 1)
date1800.plusDays(dateIndex).toString
}
}
| practice/hycom-analysis | src/main/scala/com/miraeclimate/climateconvergence/sst/SeaSurfaceTemperature.scala | Scala | mit | 364 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.physical.stream
import org.apache.flink.table.planner.plan.`trait`.FlinkRelDistribution
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalSort
import org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalLimit
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
/**
* Rule that matches [[FlinkLogicalSort]] with empty sort fields,
* and converts it to [[StreamPhysicalLimit]].
*/
class StreamPhysicalLimitRule
extends ConverterRule(
classOf[FlinkLogicalSort],
FlinkConventions.LOGICAL,
FlinkConventions.STREAM_PHYSICAL,
"StreamPhysicalLimitRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val sort: FlinkLogicalSort = call.rel(0)
// only matches Sort with empty sort fields
sort.getCollation.getFieldCollations.isEmpty
}
override def convert(rel: RelNode): RelNode = {
val sort = rel.asInstanceOf[FlinkLogicalSort]
val input = sort.getInput
// require SINGLETON exchange
val newTraitSet = input.getTraitSet
.replace(FlinkConventions.STREAM_PHYSICAL)
.replace(FlinkRelDistribution.SINGLETON)
val newInput = RelOptRule.convert(input, newTraitSet)
// create StreamPhysicalLimit
val providedGlobalTraitSet = newTraitSet
new StreamPhysicalLimit(
rel.getCluster,
providedGlobalTraitSet,
newInput,
sort.offset,
sort.fetch)
}
}
object StreamPhysicalLimitRule {
val INSTANCE: RelOptRule = new StreamPhysicalLimitRule
}
| wwjiang007/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/physical/stream/StreamPhysicalLimitRule.scala | Scala | apache-2.0 | 2,510 |
package com.rasterfoundry.api
import java.net.{URI, URL, URLDecoder}
import java.nio.charset.StandardCharsets.UTF_8
import java.sql.Timestamp
import java.util.Date
import com.rasterfoundry.api.utils.Config
import com.rasterfoundry.common.S3
import com.rasterfoundry.datamodel.{Export, ExportOptions, User}
import io.circe.syntax._
package object exports extends Config {
val s3Client = S3()
implicit def listUrlToListString(urls: List[URL]): List[String] =
urls.map(_.toString)
implicit class ExportOptionsMethods(exportOptions: ExportOptions) {
def getSignedUrls(): List[URL] = {
(exportOptions.source.getScheme match {
case "s3" | "s3a" | "s3n" =>
Some(s3Client.getSignedUrls(exportOptions.source))
case _ => None
}).getOrElse(Nil)
}
def getObjectKeys(): List[String] = {
(exportOptions.source.getScheme match {
case "s3" | "s3a" | "s3n" =>
Some(s3Client.getObjectKeys(exportOptions.source))
case _ => None
}).getOrElse(Nil)
}
def getSignedUrl(objectKey: String): URL = {
val amazonURI = S3.createS3Uri(exportOptions.source + "/" + objectKey)
val bucket: String = amazonURI.getBucket
val key: String = URLDecoder.decode(amazonURI.getKey, UTF_8.toString())
(exportOptions.source.getScheme match {
case "s3" | "s3a" | "s3n" => Some(s3Client.getSignedUrl(bucket, key))
case _ => None
}).getOrElse(new URL(""))
}
}
implicit class UserMethods(user: User) {
def createDefaultExportSource(export: Export): URI = {
val uri = user.getDefaultExportSource(export, dataBucket)
val amazonURI = S3.createS3Uri(uri)
val (bucket, key) = amazonURI.getBucket -> amazonURI.getKey
val now = new Timestamp(new Date().getTime)
if (!s3Client.doesObjectExist(bucket, s"${key}/RFUploadAccessTestFile")) {
s3Client.putObjectString(
dataBucket,
s"${key}/RFUploadAccessTestFile",
s"Allow Upload Access for RF: ${key} at ${now.toString}"
)
}
uri
}
def updateDefaultExportSource(export: Export): Export = {
val exportOptions =
export.getExportOptions.map { exportOptions =>
val source: URI =
exportOptions.source match {
case uri if uri.toString.trim != "" => uri
case _ => createDefaultExportSource(export)
}
exportOptions.copy(source = source)
}
export.copy(exportOptions = exportOptions.asJson)
}
}
}
| aaronxsu/raster-foundry | app-backend/api/src/main/scala/exports/package.scala | Scala | apache-2.0 | 2,604 |
package org.ciroque.ccr.core
import org.joda.time.DateTime
import spray.json.{JsString, JsValue, RootJsonFormat}
object CommonJsonFormatters {
implicit object DateTimeFormatter extends RootJsonFormat[DateTime] {
override def read(json: JsValue): DateTime = {
json match {
case JsString(string) => DateTime.parse(string)
case _ => throw new IllegalArgumentException("WTF")
}
}
override def write(obj: DateTime): JsValue = JsString(obj.toString())
}
}
| ciroque/central-configuration-repository | src/main/scala/org/ciroque/ccr/core/CommonJsonFormatters.scala | Scala | mit | 499 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.views.formatting
import play.twirl.api.Html
import uk.gov.hmrc.play.views.helpers.{MoneyPounds, RenderableMoneyMessage}
object Money {
def pounds(value: BigDecimal, decimalPlaces: Int = 0) : Html = RenderableMoneyMessage(MoneyPounds(value, decimalPlaces)).render
}
| alexanderjamesking/play-ui | src/main/twirl/uk/gov/hmrc/play/views/formatting/Money.scala | Scala | apache-2.0 | 900 |
package com.codexica.s3crate.filetree
//TODO: sort events in a priority queue based on the time of modification
//TODO: make a new method on the FileSystem for watching for changes
/**
* A stream of events. Has no end. Will block if there are no more events.
* Returns events in order from highest priority to lowest priority, which is based on the modification date
* of the file (oldest should be synchronized first
* @author Josh Albrecht (joshalbrecht@gmail.com)
*/
class PathGenerator(allPaths: Set[FilePathEvent], files: ListenableFileTree) {
private val iterator = allPaths.iterator
def hasNext: Boolean = {
iterator.hasNext
}
def next(): FilePathEvent = {
iterator.next()
}
}
| joshalbrecht/s3crate | src/main/scala/com/codexica/s3crate/filetree/PathGenerator.scala | Scala | mit | 711 |
/**
* Copyright (c) 2015, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.finance.ts
import org.apache.commons.math3.analysis.{MultivariateFunction, MultivariateVectorFunction}
import org.apache.commons.math3.optim.{MaxEval, MaxIter, InitialGuess, SimpleValueChecker}
import org.apache.commons.math3.optim.nonlinear.scalar.{ObjectiveFunction,
ObjectiveFunctionGradient}
import org.apache.commons.math3.optim.nonlinear.scalar.gradient.NonLinearConjugateGradientOptimizer
import org.apache.commons.math3.random.RandomGenerator
object GARCH {
/**
* Fits a GARCH(1, 1) model to the given time series.
*
* @param ts The time series to fit the model to.
* @return The model and its log likelihood on the input data.
*/
def fitModel(ts: Array[Double]): (GARCHModel, Double) = {
val optimizer = new NonLinearConjugateGradientOptimizer(
NonLinearConjugateGradientOptimizer.Formula.FLETCHER_REEVES,
new SimpleValueChecker(1e-6, 1e-6))
val gradient = new ObjectiveFunctionGradient(new MultivariateVectorFunction() {
def value(params: Array[Double]): Array[Double] = {
new GARCHModel(params(0), params(1), params(2)).gradient(ts)
}
})
val objectiveFunction = new ObjectiveFunction(new MultivariateFunction() {
def value(params: Array[Double]): Double = {
new GARCHModel(params(0), params(1), params(2)).logLikelihood(ts)
}
})
val initialGuess = new InitialGuess(Array(.2, .2, .2)) // TODO: make this smarter
val maxIter = new MaxIter(10000)
val maxEval = new MaxEval(10000)
val optimal = optimizer.optimize(objectiveFunction, gradient, initialGuess, maxIter, maxEval)
val params = optimal.getPoint
(new GARCHModel(params(0), params(1), params(2)), optimal.getValue)
}
}
object ARGARCH {
/**
* Fits an AR(1) + GARCH(1, 1) model to the given time series.
*
* @param ts The time series to fit the model to.
* @return The model and its log likelihood on the input data.
*/
def fitModel(ts: Array[Double]): (ARGARCHModel, Double) = {
throw new UnsupportedOperationException()
}
}
class GARCHModel(
val omega: Double,
val alpha: Double,
val beta: Double) extends TimeSeriesModel {
/**
* Returns the log likelihood of the parameters on the given time series.
*
* Based on http://www.unc.edu/~jbhill/Bollerslev_GARCH_1986.pdf
*/
def logLikelihood(ts: Array[Double]): Double = {
var sum = 0.0
iterateWithHAndEta(ts) { (i, h, eta, prevH, prevEta) =>
sum += -.5 * math.log(h) - .5 * eta * eta / h
}
sum + -.5 * math.log(2 * math.Pi) * (ts.length - 1)
}
/**
* Find the gradient of the log likelihood with respect to the given time series.
*
* Based on http://www.unc.edu/~jbhill/Bollerslev_GARCH_1986.pdf
* @return an 3-element array containing the gradient for the alpha, beta, and omega parameters.
*/
private[finance] def gradient(ts: Array[Double]): Array[Double] = {
var omegaGradient = 0.0
var alphaGradient = 0.0
var betaGradient = 0.0
var omegaDhdtheta = 0.0
var alphaDhdtheta = 0.0
var betaDhdtheta = 0.0
iterateWithHAndEta(ts) { (i, h, eta, prevH, prevEta) =>
omegaDhdtheta = 1 + beta * omegaDhdtheta
alphaDhdtheta = prevEta * prevEta + beta * alphaDhdtheta
betaDhdtheta = prevH + beta * betaDhdtheta
val multiplier = (eta * eta / (h * h)) - (1 / h)
omegaGradient += multiplier * omegaDhdtheta
alphaGradient += multiplier * alphaDhdtheta
betaGradient += multiplier * betaDhdtheta
}
Array(alphaGradient * .5, betaGradient * .5, omegaGradient * .5)
}
private def iterateWithHAndEta(ts: Array[Double])
(fn: (Int, Double, Double, Double, Double) => Unit): Unit = {
var prevH = omega / (1 - alpha - beta)
var i = 1
while (i < ts.length) {
val h = omega + alpha * ts(i - 1) * ts(i - 1) + beta * prevH
fn(i, h, ts(i), prevH, ts(i - 1))
prevH = h
i += 1
}
}
/**
* {@inheritDoc}
*/
def removeTimeDependentEffects(ts: Array[Double], dest: Array[Double]): Array[Double] = {
var prevEta = ts(0)
var prevVariance = omega / (1.0 - alpha - beta)
dest(0) = prevEta / math.sqrt(prevVariance)
for (i <- 1 until ts.length) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val eta = ts(i)
dest(i) = eta / math.sqrt(variance)
prevEta = eta
prevVariance = variance
}
dest
}
/**
* {@inheritDoc}
*/
override def addTimeDependentEffects(ts: Array[Double], dest: Array[Double]): Array[Double] = {
var prevVariance = omega / (1.0 - alpha - beta)
var prevEta = ts(0) * math.sqrt(prevVariance)
dest(0) = prevEta
for (i <- 1 until ts.length) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val standardizedEta = ts(i)
val eta = standardizedEta * math.sqrt(variance)
dest(i) = eta
prevEta = eta
prevVariance = variance
}
dest
}
private def sampleWithVariances(n: Int, rand: RandomGenerator): (Array[Double], Array[Double]) = {
val ts = new Array[Double](n)
val variances = new Array[Double](n)
variances(0) = omega / (1 - alpha - beta)
var eta = math.sqrt(variances(0)) * rand.nextGaussian()
for (i <- 1 until n) {
variances(i) = omega + beta * variances(i-1) + alpha * eta * eta
eta = math.sqrt(variances(i)) * rand.nextGaussian()
ts(i) = eta
}
(ts, variances)
}
/**
* Samples a random time series of a given length with the properties of the model.
*
* @param n The length of the time series to sample.
* @param rand The random generator used to generate the observations.
* @return The samples time series.
*/
def sample(n: Int, rand: RandomGenerator): Array[Double] = sampleWithVariances(n, rand)._1
}
/**
* A GARCH(1, 1) + AR(1) model, where
* y(i) = c + phi * y(i - 1) + eta(i),
* and h(i), the variance of eta(i), is given by
* h(i) = omega + alpha * eta(i) ** 2 + beta * h(i - 1) ** 2
*
* @param c The constant term.
* @param phi The autoregressive term.
* @param omega The constant term in the variance.
*/
class ARGARCHModel(
val c: Double,
val phi: Double,
val omega: Double,
val alpha: Double,
val beta: Double) extends TimeSeriesModel {
/**
* {@inheritDoc}
*/
override def removeTimeDependentEffects(ts: Array[Double], dest: Array[Double]): Array[Double] = {
var prevEta = ts(0) - c
var prevVariance = omega / (1.0 - alpha - beta)
dest(0) = prevEta / math.sqrt(prevVariance)
for (i <- 1 until ts.length) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val eta = ts(i) - c - phi * ts(i - 1)
dest(i) = eta / math.sqrt(variance)
prevEta = eta
prevVariance = variance
}
dest
}
/**
* {@inheritDoc}
*/
override def addTimeDependentEffects(ts: Array[Double], dest: Array[Double]): Array[Double] = {
var prevVariance = omega / (1.0 - alpha - beta)
var prevEta = ts(0) * math.sqrt(prevVariance)
dest(0) = c + prevEta
for (i <- 1 until ts.length) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val standardizedEta = ts(i)
val eta = standardizedEta * math.sqrt(variance)
dest(i) = c + phi * dest(i - 1) + eta
prevEta = eta
prevVariance = variance
}
dest
}
private def sampleWithVariances(n: Int, rand: RandomGenerator): (Array[Double], Array[Double]) = {
val ts = new Array[Double](n)
val variances = new Array[Double](n)
variances(0) = omega / (1 - alpha - beta)
var eta = math.sqrt(variances(0)) * rand.nextGaussian()
for (i <- 1 until n) {
variances(i) = omega + beta * variances(i-1) + alpha * eta * eta
eta = math.sqrt(variances(i)) * rand.nextGaussian()
ts(i) = c + phi * ts(i - 1) + eta
}
(ts, variances)
}
/**
* Samples a random time series of a given length with the properties of the model.
*
* @param n The length of the time series to sample.
* @param rand The random generator used to generate the observations.
* @return The samples time series.
*/
def sample(n: Int, rand: RandomGenerator): Array[Double] = sampleWithVariances(n, rand)._1
}
class EGARCHModel(
val omega: Double,
val alpha: Double,
val beta: Double) extends TimeSeriesModel {
/**
* Returns the log likelihood of the parameters on the given time series.
*
* Based on http://swopec.hhs.se/hastef/papers/hastef0564.pdf
*/
def logLikelihood(ts: Array[Double]): Double = {
throw new UnsupportedOperationException()
}
/**
* {@inheritDoc}
*/
override def removeTimeDependentEffects(ts: Array[Double], dest: Array[Double]): Array[Double] = {
throw new UnsupportedOperationException()
}
/**
* {@inheritDoc}
*/
override def addTimeDependentEffects(ts: Array[Double], dest: Array[Double]): Array[Double] = {
throw new UnsupportedOperationException()
}
}
| helio9cn/spark-finance | src/main/scala/com/cloudera/finance/ts/GARCH.scala | Scala | apache-2.0 | 9,619 |
/**
* Copyright 2009 Jorge Ortiz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package com.github.nscala_time.time
import org.joda.time._
object DurationBuilder {
def apply(underlying: Period): DurationBuilder =
new DurationBuilder(underlying)
}
// Duration Builder
class DurationBuilder(val underlying: Period) extends Super {
// DurationBuilder + DurationBuilder = DurationBuilder
// This is the only operation that can maintain a DurationBuilder
// Everything else kicks us out to DateTime, Duration, or Period
def +(that: DurationBuilder): DurationBuilder =
DurationBuilder(this.underlying.plus(that.underlying))
def -(that: DurationBuilder): DurationBuilder =
DurationBuilder(this.underlying.minus(that.underlying))
def ago(): DateTime =
StaticDateTime.now.minus(underlying)
def later(): DateTime =
StaticDateTime.now.plus(underlying)
def from(dt: DateTime): DateTime =
dt.plus(underlying)
def before(dt: DateTime): DateTime =
dt.minus(underlying)
def standardDuration: Duration =
underlying.toStandardDuration
def toDuration: Duration =
underlying.toStandardDuration
def toPeriod: Period =
underlying
def -(period: ReadablePeriod): Period =
underlying.minus(period)
def +(period: ReadablePeriod): Period =
underlying.plus(period)
def millis: Long =
underlying.toStandardDuration.getMillis
def seconds: Long =
underlying.toStandardDuration.getStandardSeconds
def -(amount: Long): Duration =
underlying.toStandardDuration.minus(amount)
def -(amount: ReadableDuration): Duration =
underlying.toStandardDuration.minus(amount)
def +(amount: Long): Duration =
underlying.toStandardDuration.plus(amount)
def +(amount: ReadableDuration): Duration =
underlying.toStandardDuration.plus(amount)
}
| beni55/nscala-time | src/main/scala/com/github/nscala_time/time/DurationBuilder.scala | Scala | apache-2.0 | 2,332 |
package lib.reasoner.lexical
import scala.util.parsing.combinator.lexical.{ Lexical => ScalaLexical }
import scala.util.parsing.input.CharArrayReader.EofCh
import scala.util.parsing.input.CharSequenceReader
object Lexical extends ScalaLexical with Tokens {
/** Returns a lexical scanner for the given char sequence. */
def apply(c: CharSequence): Scanner = new Scanner(new CharSequenceReader(c))
/** A parser that produces a token (from a stream of characters). */
def token: Parser[Token] =
( identChar ~ rep(identChar | digit) ^^ { case first ~ rest => processIdent(first :: rest mkString "") }
| digit ~ rep(digit) ^^ { case first ~ rest => NumericLit(first :: rest mkString "") }
| '\'' ~ rep(chrExcept('\'', '\n', EofCh)) ~ '\'' ^^ { case '\'' ~ chars ~ '\'' => StringLit(chars mkString "") }
| '\"' ~ rep(chrExcept('\"', '\n', EofCh)) ~ '\"' ^^ { case '\"' ~ chars ~ '\"' => StringLit(chars mkString "") }
| EofCh ^^^ EOF
| '\'' ~> failure("unclosed string literal")
| '\"' ~> failure("unclosed string literal")
| delimiter
| failure("illegal character"))
/** Returns the legal identifier chars, except digits. */
def identChar = letter
/** A parser for white-space -- its result will be discarded. */
def whitespace: Parser[Any] = rep[Any](
whitespaceChar
| '/' ~ '*' ~ comment
| '/' ~ '/' ~ rep(chrExcept(EofCh, '\n'))
| '/' ~ '*' ~ failure("unclosed comment"))
protected def comment: Parser[Any] =
( '*' ~ '/' ^^ { case _ => ' ' }
| chrExcept(EofCh) ~ comment)
protected def processIdent(name: String) =
if(reserved contains name) Keyword(name.toLowerCase)
else if(name.head.isUpper) ClassName(name)
else Identifier(name)
protected lazy val delimiter: Parser[Token] =
delimiters.toList.map(s => accept(s.toList) ^^ { x => Keyword(s) })
.foldRight(failure("no matching delimiter"): Parser[Token])((x, y) => y | x)
} | fynnfeldpausch/frame | app/lib/reasoner/lexical/Lexical.scala | Scala | mit | 2,081 |
/**
* Copyright 2014 Dropbox, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package djinni
import java.io.{IOException, FileNotFoundException, FileInputStream, InputStreamReader, File, BufferedWriter, FileWriter}
import djinni.generatorTools._
object Main {
def main(args: Array[String]) {
var idlFile: File = null
var idlIncludePaths: List[String] = List("")
var cppOutFolder: Option[File] = None
var cppNamespace: String = ""
var cppIncludePrefix: String = ""
var cppExtendedRecordIncludePrefix: String = ""
var cppFileIdentStyle: IdentConverter = IdentStyle.underLower
var cppOptionalTemplate: String = "std::optional"
var cppOptionalHeader: String = "<optional>"
var cppEnumHashWorkaround : Boolean = true
var cppNnHeader: Option[String] = None
var cppNnType: Option[String] = None
var cppNnCheckExpression: Option[String] = None
var cppUseWideStrings: Boolean = false
var javaOutFolder: Option[File] = None
var javaPackage: Option[String] = None
var javaClassAccessModifier: JavaAccessModifier.Value = JavaAccessModifier.Public
var javaCppException: Option[String] = None
var javaAnnotation: Option[String] = None
var javaNullableAnnotation: Option[String] = None
var javaNonnullAnnotation: Option[String] = None
var javaImplementAndroidOsParcelable : Boolean = false
var javaUseFinalForRecord: Boolean = true
var jniOutFolder: Option[File] = None
var jniHeaderOutFolderOptional: Option[File] = None
var jniNamespace: String = "djinni_generated"
var jniClassIdentStyleOptional: Option[IdentConverter] = None
var jniIncludePrefix: String = ""
var jniIncludeCppPrefix: String = ""
var jniFileIdentStyleOptional: Option[IdentConverter] = None
var jniBaseLibClassIdentStyleOptional: Option[IdentConverter] = None
var jniBaseLibIncludePrefix: String = ""
var cppHeaderOutFolderOptional: Option[File] = None
var cppExt: String = "cpp"
var cppHeaderExt: String = "hpp"
var javaIdentStyle = IdentStyle.javaDefault
var cppIdentStyle = IdentStyle.cppDefault
var cppTypeEnumIdentStyle: IdentConverter = null
var objcOutFolder: Option[File] = None
var objcppOutFolder: Option[File] = None
var objcppExt: String = "mm"
var objcHeaderExt: String = "h"
var objcIdentStyle = IdentStyle.objcDefault
var objcTypePrefix: String = ""
var objcIncludePrefix: String = ""
var objcExtendedRecordIncludePrefix: String = ""
var objcSwiftBridgingHeader: Option[String] = None
var objcppIncludePrefix: String = ""
var objcppIncludeCppPrefix: String = ""
var objcppIncludeObjcPrefixOptional: Option[String] = None
var objcFileIdentStyleOptional: Option[IdentConverter] = None
var objcppNamespace: String = "djinni_generated"
var objcBaseLibIncludePrefix: String = ""
var inFileListPath: Option[File] = None
var outFileListPath: Option[File] = None
var skipGeneration: Boolean = false
var yamlOutFolder: Option[File] = None
var yamlOutFile: Option[String] = None
var yamlPrefix: String = ""
val argParser = new scopt.OptionParser[Unit]("djinni") {
def identStyle(optionName: String, update: IdentConverter => Unit) = {
opt[String](optionName).valueName("...").foreach(spec =>
IdentStyle.infer(spec) match {
case None => failure("invalid ident spec: \\"" + spec + "\\"")
case Some(func) => update(func)
}
)
}
override def showUsageOnError = false
help("help")
opt[File]("idl").valueName("<in-file>").required().foreach(idlFile = _)
.text("The IDL file with the type definitions, typically with extension \\".djinni\\".")
opt[String]("idl-include-path").valueName("<path> ...").optional().unbounded().foreach(x => idlIncludePaths = idlIncludePaths :+ x)
.text("An include path to search for Djinni @import directives. Can specify multiple paths.")
note("")
opt[File]("java-out").valueName("<out-folder>").foreach(x => javaOutFolder = Some(x))
.text("The output for the Java files (Generator disabled if unspecified).")
opt[String]("java-package").valueName("...").foreach(x => javaPackage = Some(x))
.text("The package name to use for generated Java classes.")
opt[JavaAccessModifier.Value]("java-class-access-modifier").valueName("<public/package>").foreach(x => javaClassAccessModifier = x)
.text("The access modifier to use for generated Java classes (default: public).")
opt[String]("java-cpp-exception").valueName("<exception-class>").foreach(x => javaCppException = Some(x))
.text("The type for translated C++ exceptions in Java (default: java.lang.RuntimeException that is not checked)")
opt[String]("java-annotation").valueName("<annotation-class>").foreach(x => javaAnnotation = Some(x))
.text("Java annotation (@Foo) to place on all generated Java classes")
opt[String]("java-nullable-annotation").valueName("<nullable-annotation-class>").foreach(x => javaNullableAnnotation = Some(x))
.text("Java annotation (@Nullable) to place on all fields and return values that are optional")
opt[String]("java-nonnull-annotation").valueName("<nonnull-annotation-class>").foreach(x => javaNonnullAnnotation = Some(x))
.text("Java annotation (@Nonnull) to place on all fields and return values that are not optional")
opt[Boolean]("java-implement-android-os-parcelable").valueName("<true/false>").foreach(x => javaImplementAndroidOsParcelable = x)
.text("all generated java classes will implement the interface android.os.Parcelable")
opt[Boolean]("java-use-final-for-record").valueName("<use-final-for-record>").foreach(x => javaUseFinalForRecord = x)
.text("Whether generated Java classes for records should be marked 'final' (default: true). ")
note("")
opt[File]("cpp-out").valueName("<out-folder>").foreach(x => cppOutFolder = Some(x))
.text("The output folder for C++ files (Generator disabled if unspecified).")
opt[File]("cpp-header-out").valueName("<out-folder>").foreach(x => cppHeaderOutFolderOptional = Some(x))
.text("The output folder for C++ header files (default: the same as --cpp-out).")
opt[String]("cpp-include-prefix").valueName("<prefix>").foreach(cppIncludePrefix = _)
.text("The prefix for #includes of header files from C++ files.")
opt[String]("cpp-namespace").valueName("...").foreach(x => cppNamespace = x)
.text("The namespace name to use for generated C++ classes.")
opt[String]("cpp-ext").valueName("<ext>").foreach(cppExt = _)
.text("The filename extension for C++ files (default: \\"cpp\\").")
opt[String]("hpp-ext").valueName("<ext>").foreach(cppHeaderExt = _)
.text("The filename extension for C++ header files (default: \\"hpp\\").")
opt[String]("cpp-optional-template").valueName("<template>").foreach(x => cppOptionalTemplate = x)
.text("The template to use for optional values (default: \\"std::optional\\")")
opt[String]("cpp-optional-header").valueName("<header>").foreach(x => cppOptionalHeader = x)
.text("The header to use for optional values (default: \\"<optional>\\")")
opt[Boolean]("cpp-enum-hash-workaround").valueName("<true/false>").foreach(x => cppEnumHashWorkaround = x)
.text("Work around LWG-2148 by generating std::hash specializations for C++ enums (default: true)")
opt[String]("cpp-nn-header").valueName("<header>").foreach(x => cppNnHeader = Some(x))
.text("The header to use for non-nullable pointers")
opt[String]("cpp-nn-type").valueName("<header>").foreach(x => cppNnType = Some(x))
.text("The type to use for non-nullable pointers (as a substitute for std::shared_ptr)")
opt[String]("cpp-nn-check-expression").valueName("<header>").foreach(x => cppNnCheckExpression = Some(x))
.text("The expression to use for building non-nullable pointers")
opt[Boolean]( "cpp-use-wide-strings").valueName("<true/false>").foreach(x => cppUseWideStrings = x)
.text("Use wide strings in C++ code (default: false)")
note("")
opt[File]("jni-out").valueName("<out-folder>").foreach(x => jniOutFolder = Some(x))
.text("The folder for the JNI C++ output files (Generator disabled if unspecified).")
opt[File]("jni-header-out").valueName("<out-folder>").foreach(x => jniHeaderOutFolderOptional = Some(x))
.text("The folder for the JNI C++ header files (default: the same as --jni-out).")
opt[String]("jni-include-prefix").valueName("<prefix>").foreach(jniIncludePrefix = _)
.text("The prefix for #includes of JNI header files from JNI C++ files.")
opt[String]("jni-include-cpp-prefix").valueName("<prefix>").foreach(jniIncludeCppPrefix = _)
.text("The prefix for #includes of the main header files from JNI C++ files.")
opt[String]("jni-namespace").valueName("...").foreach(x => jniNamespace = x)
.text("The namespace name to use for generated JNI C++ classes.")
opt[String]("jni-base-lib-include-prefix").valueName("...").foreach(x => jniBaseLibIncludePrefix = x)
.text("The JNI base library's include path, relative to the JNI C++ classes.")
note("")
opt[File]("objc-out").valueName("<out-folder>").foreach(x => objcOutFolder = Some(x))
.text("The output folder for Objective-C files (Generator disabled if unspecified).")
opt[String]("objc-h-ext").valueName("<ext>").foreach(objcHeaderExt = _)
.text("The filename extension for Objective-C[++] header files (default: \\"h\\")")
opt[String]("objc-type-prefix").valueName("<pre>").foreach(objcTypePrefix = _)
.text("The prefix for Objective-C data types (usually two or three letters)")
opt[String]("objc-include-prefix").valueName("<prefix>").foreach(objcIncludePrefix = _)
.text("The prefix for #import of header files from Objective-C files.")
opt[String]("objc-swift-bridging-header").valueName("<name>").foreach(x => objcSwiftBridgingHeader = Some(x))
.text("The name of Objective-C Bridging Header used in XCode's Swift projects.")
note("")
opt[File]("objcpp-out").valueName("<out-folder>").foreach(x => objcppOutFolder = Some(x))
.text("The output folder for private Objective-C++ files (Generator disabled if unspecified).")
opt[String]("objcpp-ext").valueName("<ext>").foreach(objcppExt = _)
.text("The filename extension for Objective-C++ files (default: \\"mm\\")")
opt[String]("objcpp-include-prefix").valueName("<prefix>").foreach(objcppIncludePrefix = _)
.text("The prefix for #import of Objective-C++ header files from Objective-C++ files.")
opt[String]("objcpp-include-cpp-prefix").valueName("<prefix>").foreach(objcppIncludeCppPrefix = _)
.text("The prefix for #include of the main C++ header files from Objective-C++ files.")
opt[String]("objcpp-include-objc-prefix").valueName("<prefix>").foreach(x => objcppIncludeObjcPrefixOptional = Some(x))
.text("The prefix for #import of the Objective-C header files from Objective-C++ files (default: the same as --objcpp-include-prefix)")
opt[String]("cpp-extended-record-include-prefix").valueName("<prefix>").foreach(cppExtendedRecordIncludePrefix = _)
.text("The prefix path for #include of the extended record C++ header (.hpp) files")
opt[String]("objc-extended-record-include-prefix").valueName("<prefix>").foreach(objcExtendedRecordIncludePrefix = _)
.text("The prefix path for #import of the extended record Objective-C header (.h) files")
opt[String]("objcpp-namespace").valueName("<prefix>").foreach(objcppNamespace = _)
.text("The namespace name to use for generated Objective-C++ classes.")
opt[String]("objc-base-lib-include-prefix").valueName("...").foreach(x => objcBaseLibIncludePrefix = x)
.text("The Objective-C++ base library's include path, relative to the Objective-C++ classes.")
note("")
opt[File]("yaml-out").valueName("<out-folder>").foreach(x => yamlOutFolder = Some(x))
.text("The output folder for YAML files (Generator disabled if unspecified).")
opt[String]("yaml-out-file").valueName("<out-file>").foreach(x => yamlOutFile = Some(x))
.text("If specified all types are merged into a single YAML file instead of generating one file per type (relative to --yaml-out).")
opt[String]("yaml-prefix").valueName("<pre>").foreach(yamlPrefix = _)
.text("The prefix to add to type names stored in YAML files (default: \\"\\").")
note("")
opt[File]("list-in-files").valueName("<list-in-files>").foreach(x => inFileListPath = Some(x))
.text("Optional file in which to write the list of input files parsed.")
opt[File]("list-out-files").valueName("<list-out-files>").foreach(x => outFileListPath = Some(x))
.text("Optional file in which to write the list of output files produced.")
opt[Boolean]("skip-generation").valueName("<true/false>").foreach(x => skipGeneration = x)
.text("Way of specifying if file generation should be skipped (default: false)")
note("\\nIdentifier styles (ex: \\"FooBar\\", \\"fooBar\\", \\"foo_bar\\", \\"FOO_BAR\\", \\"m_fooBar\\")\\n")
identStyle("ident-java-enum", c => { javaIdentStyle = javaIdentStyle.copy(enum = c) })
identStyle("ident-java-field", c => { javaIdentStyle = javaIdentStyle.copy(field = c) })
identStyle("ident-java-type", c => { javaIdentStyle = javaIdentStyle.copy(ty = c) })
identStyle("ident-cpp-enum", c => { cppIdentStyle = cppIdentStyle.copy(enum = c) })
identStyle("ident-cpp-field", c => { cppIdentStyle = cppIdentStyle.copy(field = c) })
identStyle("ident-cpp-method", c => { cppIdentStyle = cppIdentStyle.copy(method = c) })
identStyle("ident-cpp-type", c => { cppIdentStyle = cppIdentStyle.copy(ty = c) })
identStyle("ident-cpp-enum-type", c => { cppTypeEnumIdentStyle = c })
identStyle("ident-cpp-type-param", c => { cppIdentStyle = cppIdentStyle.copy(typeParam = c) })
identStyle("ident-cpp-local", c => { cppIdentStyle = cppIdentStyle.copy(local = c) })
identStyle("ident-cpp-file", c => { cppFileIdentStyle = c })
identStyle("ident-jni-class", c => { jniClassIdentStyleOptional = Some(c)})
identStyle("ident-jni-file", c => { jniFileIdentStyleOptional = Some(c)})
identStyle("ident-objc-enum", c => { objcIdentStyle = objcIdentStyle.copy(enum = c) })
identStyle("ident-objc-field", c => { objcIdentStyle = objcIdentStyle.copy(field = c) })
identStyle("ident-objc-method", c => { objcIdentStyle = objcIdentStyle.copy(method = c) })
identStyle("ident-objc-type", c => { objcIdentStyle = objcIdentStyle.copy(ty = c) })
identStyle("ident-objc-type-param", c => { objcIdentStyle = objcIdentStyle.copy(typeParam = c) })
identStyle("ident-objc-local", c => { objcIdentStyle = objcIdentStyle.copy(local = c) })
identStyle("ident-objc-file", c => { objcFileIdentStyleOptional = Some(c) })
}
if (!argParser.parse(args)) {
System.exit(1); return
}
val cppHeaderOutFolder = if (cppHeaderOutFolderOptional.isDefined) cppHeaderOutFolderOptional else cppOutFolder
val jniHeaderOutFolder = if (jniHeaderOutFolderOptional.isDefined) jniHeaderOutFolderOptional else jniOutFolder
val jniClassIdentStyle = jniClassIdentStyleOptional.getOrElse(cppIdentStyle.ty)
val jniBaseLibClassIdentStyle = jniBaseLibClassIdentStyleOptional.getOrElse(jniClassIdentStyle)
val jniFileIdentStyle = jniFileIdentStyleOptional.getOrElse(cppFileIdentStyle)
var objcFileIdentStyle = objcFileIdentStyleOptional.getOrElse(objcIdentStyle.ty)
val objcppIncludeObjcPrefix = objcppIncludeObjcPrefixOptional.getOrElse(objcppIncludePrefix)
// Add ObjC prefix to identstyle
objcIdentStyle = objcIdentStyle.copy(ty = IdentStyle.prefix(objcTypePrefix,objcIdentStyle.ty))
objcFileIdentStyle = IdentStyle.prefix(objcTypePrefix, objcFileIdentStyle)
if (cppTypeEnumIdentStyle != null) {
cppIdentStyle = cppIdentStyle.copy(enumType = cppTypeEnumIdentStyle)
}
// Parse IDL file.
System.out.println("Parsing...")
val inFileListWriter = if (inFileListPath.isDefined) {
if (inFileListPath.get.getParentFile != null)
createFolder("input file list", inFileListPath.get.getParentFile)
Some(new BufferedWriter(new FileWriter(inFileListPath.get)))
} else {
None
}
val idl = try {
(new Parser(idlIncludePaths)).parseFile(idlFile, inFileListWriter)
}
catch {
case ex @ (_: FileNotFoundException | _: IOException) =>
System.err.println("Error reading from --idl file: " + ex.getMessage)
System.exit(1); return
}
finally {
if (inFileListWriter.isDefined) {
inFileListWriter.get.close()
}
}
// Resolve names in IDL file, check types.
System.out.println("Resolving...")
resolver.resolve(meta.defaults, idl) match {
case Some(err) =>
System.err.println(err)
System.exit(1); return
case _ =>
}
System.out.println("Generating...")
val outFileListWriter = if (outFileListPath.isDefined) {
if (outFileListPath.get.getParentFile != null)
createFolder("output file list", outFileListPath.get.getParentFile)
Some(new BufferedWriter(new FileWriter(outFileListPath.get)))
} else {
None
}
val objcSwiftBridgingHeaderWriter = if (objcSwiftBridgingHeader.isDefined && objcOutFolder.isDefined) {
val objcSwiftBridgingHeaderFile = new File(objcOutFolder.get.getPath, objcSwiftBridgingHeader.get + ".h")
if (objcSwiftBridgingHeaderFile.getParentFile != null)
createFolder("output file list", objcSwiftBridgingHeaderFile.getParentFile)
Some(new BufferedWriter(new FileWriter(objcSwiftBridgingHeaderFile)))
} else {
None
}
val outSpec = Spec(
javaOutFolder,
javaPackage,
javaClassAccessModifier,
javaIdentStyle,
javaCppException,
javaAnnotation,
javaNullableAnnotation,
javaNonnullAnnotation,
javaImplementAndroidOsParcelable,
javaUseFinalForRecord,
cppOutFolder,
cppHeaderOutFolder,
cppIncludePrefix,
cppExtendedRecordIncludePrefix,
cppNamespace,
cppIdentStyle,
cppFileIdentStyle,
cppOptionalTemplate,
cppOptionalHeader,
cppEnumHashWorkaround,
cppNnHeader,
cppNnType,
cppNnCheckExpression,
cppUseWideStrings,
jniOutFolder,
jniHeaderOutFolder,
jniIncludePrefix,
jniIncludeCppPrefix,
jniNamespace,
jniClassIdentStyle,
jniFileIdentStyle,
jniBaseLibIncludePrefix,
cppExt,
cppHeaderExt,
objcOutFolder,
objcppOutFolder,
objcIdentStyle,
objcFileIdentStyle,
objcppExt,
objcHeaderExt,
objcIncludePrefix,
objcExtendedRecordIncludePrefix,
objcppIncludePrefix,
objcppIncludeCppPrefix,
objcppIncludeObjcPrefix,
objcppNamespace,
objcBaseLibIncludePrefix,
objcSwiftBridgingHeaderWriter,
outFileListWriter,
skipGeneration,
yamlOutFolder,
yamlOutFile,
yamlPrefix)
try {
val r = generate(idl, outSpec)
r.foreach(e => System.err.println("Error generating output: " + e))
}
finally {
if (outFileListWriter.isDefined) {
outFileListWriter.get.close()
}
if (objcSwiftBridgingHeaderWriter.isDefined) {
objcSwiftBridgingHeaderWriter.get.close()
}
}
}
}
| PSPDFKit-labs/djinni | src/source/Main.scala | Scala | apache-2.0 | 20,367 |
package com.atomist.tree.content.text
trait LineInputPosition extends InputPosition {
def lineFrom1: Int
def colFrom1: Int
def input: String
override def show: String = {
val lines = input.lines.toSeq
val gotoLine = Integer.min(lineFrom1 - 1, lines.size - 1)
val badLine = lines(gotoLine)
val errorLine = List.fill(colFrom1 - 1)(" ").mkString("") + "^"
badLine + "\n" + errorLine
}
override def toString =
s"${getClass.getSimpleName}: $lineFrom1/$colFrom1: offset=$offset"
}
/**
* InputPosition implementation given line position, from 1:1.
*
* @param input input string
* @param lineFrom1 line (from 1)
* @param colFrom1 column (from 1)
*/
case class LineInputPositionImpl(input: String, lineFrom1: Int, colFrom1: Int)
extends LineInputPosition {
override val offset: Int = {
if (lineFrom1 < 1)
0
else {
var offs = 0
var line = 1
var col = 1
for {
c <- input.dropRight(1)
if !(line >= lineFrom1 && col >= colFrom1)
}
c match {
case `c` if c == '\n' || c == '\r' =>
line += 1
col = 1
offs += 1
case _ =>
col += 1
offs += 1
}
offs
}
}
}
| atomist/rug | src/main/scala/com/atomist/tree/content/text/LineInputPosition.scala | Scala | gpl-3.0 | 1,270 |
package nofs.restfs.query.ast
class ApplyTransformationOperation(fromTyp: String, toTyp: String) extends FSOperation {
private var _fromType = fromTyp;
private var _toType = toTyp;
def getFromType() = _fromType;
def getToType() = _toType;
} | megacoder/restfs | src/main/scala/nofs/restfs/query/ast/ApplyTransformationOperation.scala | Scala | lgpl-2.1 | 258 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.models
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.numeric.NumericDouble
import com.intel.analytics.bigdl.utils.{T, Table}
/**
* models in this file is only for gradient check
*/
@com.intel.analytics.bigdl.tags.Serial
object GoogleNet_v1_test {
def apply(classNum: Int): Module[Double] = {
val feature1 = Sequential()
feature1.add(SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3, 1, true).setInitMethod(Xavier)
.setName("conv1/7x7_s2"))
feature1.add(ReLU(true).setName("conv1/relu_7x7"))
feature1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool1/3x3_s2"))
feature1.add(SpatialCrossMapLRN(5, 0.0001, 0.75).setName("pool1/norm1"))
feature1.add(SpatialConvolution(64, 64, 1, 1, 1, 1).setInitMethod(Xavier)
.setName("conv2/3x3_reduce"))
feature1.add(ReLU(true).setName("conv2/relu_3x3_reduce"))
feature1.add(SpatialConvolution(64, 192, 3, 3, 1, 1, 1, 1).setInitMethod(Xavier)
.setName("conv2/3x3"))
feature1.add(ReLU(true).setName("conv2/relu_3x3"))
feature1.add(SpatialCrossMapLRN(5, 0.0001, 0.75). setName("conv2/norm2"))
feature1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool2/3x3_s2"))
feature1.add(inception(192, T(T(64), T(96, 128), T(16, 32), T(32)), "inception_3a/"))
feature1.add(inception(256, T(T(128), T(128, 192), T(32, 96), T(64)), "inception_3b/"))
feature1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool3/3x3_s2"))
feature1.add(inception(480, T(T(192), T(96, 208), T(16, 48), T(64)), "inception_4a/"))
val output1 = Sequential()
output1.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("loss1/ave_pool"))
output1.add(SpatialConvolution(512, 128, 1, 1, 1, 1).setName("loss1/conv"))
output1.add(ReLU(true).setName("loss1/relu_conv"))
output1.add(View(128 * 4 * 4).setNumInputDims(3))
output1.add(Linear(128 * 4 * 4, 1024).setName("loss1/fc"))
output1.add(ReLU(true).setName("loss1/relu_fc"))
// output1.add(Dropout(0.7).setName("loss1/drop_fc"))
output1.add(Linear(1024, classNum).setName("loss1/classifier"))
output1.add(LogSoftMax().setName("loss1/loss"))
val feature2 = Sequential()
feature2.add(inception(512, T(T(160), T(112, 224), T(24, 64), T(64)), "inception_4b/"))
feature2.add(inception(512, T(T(128), T(128, 256), T(24, 64), T(64)), "inception_4c/"))
feature2.add(inception(512, T(T(112), T(144, 288), T(32, 64), T(64)), "inception_4d/"))
val output2 = Sequential()
output2.add(SpatialAveragePooling(5, 5, 3, 3).setName("loss2/ave_pool"))
output2.add(SpatialConvolution(528, 128, 1, 1, 1, 1).setName("loss2/conv"))
output2.add(ReLU(true).setName("loss2/relu_conv"))
output2.add(View(128 * 4 * 4).setNumInputDims(3))
output2.add(Linear(128 * 4 * 4, 1024).setName("loss2/fc"))
output2.add(ReLU(true).setName("loss2/relu_fc"))
// output2.add(Dropout(0.7).setName("loss2/drop_fc"))
output2.add(Linear(1024, classNum).setName("loss2/classifier"))
output2.add(LogSoftMax().setName("loss2/loss"))
val output3 = Sequential()
output3.add(inception(528, T(T(256), T(160, 320), T(32, 128), T(128)), "inception_4e/"))
output3.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool4/3x3_s2"))
output3.add(inception(832, T(T(256), T(160, 320), T(32, 128), T(128)), "inception_5a/"))
output3.add(inception(832, T(T(384), T(192, 384), T(48, 128), T(128)), "inception_5b/"))
output3.add(SpatialAveragePooling(7, 7, 1, 1).setName("pool5/7x7_s1"))
// output3.add(Dropout(0.4).setName("pool5/drop_7x7_s1"))
output3.add(View(1024).setNumInputDims(3))
output3.add(Linear(1024, classNum).setInitMethod(Xavier).setName("loss3/classifier"))
output3.add(LogSoftMax().setName("loss3/loss3"))
val split2 = Concat(2).setName("split2")
split2.add(output3)
split2.add(output2)
val mainBranch = Sequential()
mainBranch.add(feature2)
mainBranch.add(split2)
val split1 = Concat(2).setName("split1")
split1.add(mainBranch)
split1.add(output1)
val model = Sequential()
model.add(feature1)
model.add(split1)
model.reset()
model
}
def inception(inputSize: Int, config: Table, namePrefix : String = "") : Module[Double] = {
val concat = Concat(2)
val conv1 = Sequential()
conv1.add(SpatialConvolution(inputSize,
config[Table](1)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "1x1"))
conv1.add(ReLU(true).setName(namePrefix + "relu_1x1"))
concat.add(conv1)
val conv3 = Sequential()
conv3.add(SpatialConvolution(inputSize,
config[Table](2)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "3x3_reduce"))
conv3.add(ReLU(true).setName(namePrefix + "relu_3x3_reduce"))
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "3x3"))
conv3.add(ReLU(true).setName(namePrefix + "relu_3x3"))
concat.add(conv3)
val conv5 = Sequential()
conv5.add(SpatialConvolution(inputSize,
config[Table](3)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "5x5_reduce"))
conv5.add(ReLU(true).setName(namePrefix + "relu_5x5_reduce"))
conv5.add(SpatialConvolution(config[Table](3)(1),
config[Table](3)(2), 5, 5, 1, 1, 2, 2).setInitMethod(Xavier).setName(namePrefix + "5x5"))
conv5.add(ReLU(true).setName(namePrefix + "relu_5x5"))
concat.add(conv5)
val pool = Sequential()
pool.add(SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil().setName(namePrefix + "pool"))
pool.add(SpatialConvolution(inputSize,
config[Table](4)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "pool_proj"))
pool.add(ReLU(true).setName(namePrefix + "relu_pool_proj"))
concat.add(pool).setName(namePrefix + "output")
concat
}
}
object GoogleNet_v2_test {
def apply(classNum: Int): Module[Double] = {
val features1 = Sequential()
features1.add(SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3, 1, true)
.setName("conv1/7x7_s2"))
features1.add(SpatialBatchNormalization(64, 1e-3).setInit().setName("conv1/7x7_s2/bn"))
features1.add(ReLU(true).setName("conv1/7x7_s2/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool1/3x3_s2"))
features1.add(SpatialConvolution(64, 64, 1, 1).setName("conv2/3x3_reduce"))
features1.add(SpatialBatchNormalization(64, 1e-3).setInit().setName("conv2/3x3_reduce/bn"))
features1.add(ReLU(true).setName("conv2/3x3_reduce/bn/sc/relu"))
features1.add(SpatialConvolution(64, 192, 3, 3, 1, 1, 1, 1).setName("conv2/3x3"))
features1.add(SpatialBatchNormalization(192, 1e-3).setInit().setName("conv2/3x3/bn"))
features1.add(ReLU(true).setName("conv2/3x3/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool2/3x3_s2"))
features1.add(inception(192, T(T(64), T(64, 64), T(64, 96), T("avg", 32)), "inception_3a/"))
features1.add(inception(256, T(T(64), T(64, 96), T(64, 96), T("avg", 64)), "inception_3b/"))
features1.add(inception(320, T(T(0), T(128, 160), T(64, 96), T("max", 0)), "inception_3c/"))
val output1 = Sequential()
output1.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool3/5x5_s3"))
output1.add(SpatialConvolution(576, 128, 1, 1, 1, 1).setName("loss1/conv"))
output1.add(SpatialBatchNormalization(128, 1e-3).setInit().setName("loss1/conv/bn"))
output1.add(ReLU(true).setName("loss1/conv/bn/sc/relu"))
output1.add(View(128 * 4 * 4).setNumInputDims(3))
output1.add(Linear(128 * 4 * 4, 1024).setName("loss1/fc"))
output1.add(ReLU(true).setName("loss1/fc/bn/sc/relu"))
output1.add(Linear(1024, classNum).setName("loss1/classifier"))
output1.add(LogSoftMax().setName("loss1/loss"))
val features2 = Sequential()
features2
.add(inception(576, T(T(224), T(64, 96), T(96, 128), T("avg", 128)), "inception_4a/"))
.add(inception(576, T(T(192), T(96, 128), T(96, 128), T("avg", 128)), "inception_4b/"))
.add(inception(576, T(T(160), T(128, 160), T(128, 160), T("avg", 96)), "inception_4c/"))
.add(inception(576, T(T(96), T(128, 192), T(160, 192), T("avg", 96)), "inception_4d/"))
.add(inception(576, T(T(0), T(128, 192), T(192, 256), T("max", 0)), "inception_4e/"))
val output2 = Sequential()
output2.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool4/5x5_s3"))
output2.add(SpatialConvolution(1024, 128, 1, 1, 1, 1).setName("loss2/conv"))
output2.add(SpatialBatchNormalization(128, 1e-3).setInit().setName("loss2/conv/bn"))
output2.add(ReLU(true).setName("loss2/conv/bn/sc/relu"))
output2.add(View(128 * 2 * 2).setNumInputDims(3))
output2.add(Linear(128 * 2 * 2, 1024).setName("loss2/fc"))
output2.add(ReLU(true).setName("loss2/fc/bn/sc/relu"))
output2.add(Linear(1024, classNum).setName("loss2/classifier"))
output2.add(LogSoftMax().setName("loss2/loss"))
val output3 = Sequential()
output3.add(inception(1024, T(T(352), T(192, 320), T(160, 224), T("avg", 128)),
"inception_5a/"))
output3.add(inception(1024, T(T(352), T(192, 320), T(192, 224), T("max", 128)),
"inception_5b/"))
output3.add(SpatialAveragePooling(7, 7, 1, 1).ceil().setName("pool5/7x7_s1"))
output3.add(View(1024).setNumInputDims(3))
output3.add(Linear(1024, classNum).setName("loss3/classifier"))
output3.add(LogSoftMax().setName("loss3/loss"))
val split2 = Concat(2)
split2.add(output3)
split2.add(output2)
val mainBranch = Sequential()
mainBranch.add(features2)
mainBranch.add(split2)
val split1 = Concat(2)
split1.add(mainBranch)
split1.add(output1)
val model = Sequential()
model.add(features1)
model.add(split1)
model.reset()
model
}
def applyNoBn(classNum: Int): Module[Double] = {
val features1 = Sequential()
features1.add(SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3, 1, true)
.setName("conv1/7x7_s2"))
features1.add(ReLU(true).setName("conv1/7x7_s2/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool1/3x3_s2"))
features1.add(SpatialConvolution(64, 64, 1, 1).setName("conv2/3x3_reduce"))
features1.add(ReLU(true).setName("conv2/3x3_reduce/bn/sc/relu"))
features1.add(SpatialConvolution(64, 192, 3, 3, 1, 1, 1, 1).setName("conv2/3x3"))
features1.add(ReLU(true).setName("conv2/3x3/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool2/3x3_s2"))
features1.add(inceptionNoBn(192, T(T(64), T(64, 64), T(64, 96), T("avg", 32)),
"inception_3a/"))
features1.add(inceptionNoBn(256, T(T(64), T(64, 96), T(64, 96), T("avg", 64)),
"inception_3b/"))
features1.add(inceptionNoBn(320, T(T(0), T(128, 160), T(64, 96), T("max", 0)),
"inception_3c/"))
val output1 = Sequential()
output1.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool3/5x5_s3"))
output1.add(SpatialConvolution(576, 128, 1, 1, 1, 1).setName("loss1/conv"))
output1.add(ReLU(true).setName("loss1/conv/bn/sc/relu"))
output1.add(View(128 * 4 * 4).setNumInputDims(3))
output1.add(Linear(128 * 4 * 4, 1024).setName("loss1/fc"))
output1.add(ReLU(true).setName("loss1/fc/bn/sc/relu"))
output1.add(Linear(1024, classNum).setName("loss1/classifier"))
output1.add(LogSoftMax().setName("loss1/loss"))
val features2 = Sequential()
features2
.add(inceptionNoBn(576, T(T(224), T(64, 96), T(96, 128), T("avg", 128)),
"inception_4a/"))
.add(inceptionNoBn(576, T(T(192), T(96, 128), T(96, 128), T("avg", 128)),
"inception_4b/"))
.add(inceptionNoBn(576, T(T(160), T(128, 160), T(128, 160), T("avg", 96)),
"inception_4c/"))
.add(inceptionNoBn(576, T(T(96), T(128, 192), T(160, 192), T("avg", 96)),
"inception_4d/"))
.add(inceptionNoBn(576, T(T(0), T(128, 192), T(192, 256), T("max", 0)),
"inception_4e/"))
val output2 = Sequential()
output2.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool4/5x5_s3"))
output2.add(SpatialConvolution(1024, 128, 1, 1, 1, 1).setName("loss2/conv"))
output2.add(ReLU(true).setName("loss2/conv/bn/sc/relu"))
output2.add(View(128 * 2 * 2).setNumInputDims(3))
output2.add(Linear(128 * 2 * 2, 1024).setName("loss2/fc"))
output2.add(ReLU(true).setName("loss2/fc/bn/sc/relu"))
output2.add(Linear(1024, classNum).setName("loss2/classifier"))
output2.add(LogSoftMax().setName("loss2/loss"))
val output3 = Sequential()
output3.add(inceptionNoBn(1024, T(T(352), T(192, 320), T(160, 224), T("avg", 128)),
"inception_5a/"))
output3.add(inceptionNoBn(1024, T(T(352), T(192, 320), T(192, 224), T("max", 128)),
"inception_5b/"))
output3.add(SpatialAveragePooling(7, 7, 1, 1).ceil().setName("pool5/7x7_s1"))
output3.add(View(1024).setNumInputDims(3))
output3.add(Linear(1024, classNum).setName("loss3/classifier"))
output3.add(LogSoftMax().setName("loss3/loss"))
val split2 = Concat(2)
split2.add(output3)
split2.add(output2)
val mainBranch = Sequential()
mainBranch.add(features2)
mainBranch.add(split2)
val split1 = Concat(2)
split1.add(mainBranch)
split1.add(output1)
val model = Sequential()
model.add(features1)
model.add(split1)
model.reset()
model
}
def inception(inputSize: Int, config: Table, namePrefix : String): Module[Double] = {
val concat = Concat(2)
if (config[Table](1)[Int](1) != 0) {
val conv1 = Sequential()
conv1.add(SpatialConvolution(inputSize, config[Table](1)(1), 1, 1, 1, 1)
.setName(namePrefix + "1x1"))
conv1.add(SpatialBatchNormalization(config[Table](1)(1), 1e-3).setInit()
.setName(namePrefix + "1x1/bn"))
conv1.add(ReLU(true).setName(namePrefix + "1x1/bn/sc/relu"))
concat.add(conv1)
}
val conv3 = Sequential()
conv3.add(SpatialConvolution(inputSize, config[Table](2)(1), 1, 1, 1, 1)
.setName(namePrefix + "3x3_reduce"))
conv3.add(SpatialBatchNormalization(config[Table](2)(1), 1e-3).setInit()
.setName(namePrefix + "3x3_reduce/bn"))
conv3.add(ReLU(true). setName(namePrefix + "3x3_reduce/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "3x3"))
} else {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "3x3"))
}
conv3.add(SpatialBatchNormalization(config[Table](2)(2), 1e-3).setInit()
.setName(namePrefix + "3x3/bn"))
conv3.add(ReLU(true).setName(namePrefix + "3x3/bn/sc/relu"))
concat.add(conv3)
val conv3xx = Sequential()
conv3xx.add(SpatialConvolution(inputSize, config[Table](3)(1), 1, 1, 1, 1)
.setName(namePrefix + "double3x3_reduce"))
conv3xx.add(SpatialBatchNormalization(config[Table](3)(1), 1e-3).setInit()
.setName(namePrefix + "double3x3_reduce/bn"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3_reduce/bn/sc/relu"))
conv3xx.add(SpatialConvolution(config[Table](3)(1),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3a"))
conv3xx.add(SpatialBatchNormalization(config[Table](3)(2), 1e-3).setInit()
.setName(namePrefix + "double3x3a/bn"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3a/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "double3x3b"))
} else {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3b"))
}
conv3xx.add(SpatialBatchNormalization(config[Table](3)(2), 1e-3).setInit()
.setName(namePrefix + "double3x3b/bn"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3b/bn/sc/relu"))
concat.add(conv3xx)
val pool = Sequential()
config[Table](4)[String](1) match {
case "max" =>
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil().setName(namePrefix + "pool"))
} else {
pool.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName(namePrefix + "pool"))
}
case "avg" => pool.add(SpatialAveragePooling(3, 3, 1, 1, 1, 1).ceil()
.setName(namePrefix + "pool"))
case _ => throw new IllegalArgumentException
}
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialConvolution(inputSize, config[Table](4)[Int](2), 1, 1, 1, 1)
.setName(namePrefix + "pool_proj"))
pool.add(SpatialBatchNormalization(config[Table](4)(2), 1e-3).setInit()
.setName(namePrefix + "pool_proj/bn"))
pool.add(ReLU(true).setName(namePrefix + "pool_proj/bn/sc/relu"))
}
concat.add(pool)
concat.setName(namePrefix + "output")
}
def inceptionNoBn(inputSize: Int, config: Table, namePrefix : String): Module[Double] = {
val concat = Concat(2)
if (config[Table](1)[Int](1) != 0) {
val conv1 = Sequential()
conv1.add(SpatialConvolution(inputSize, config[Table](1)(1), 1, 1, 1, 1)
.setName(namePrefix + "1x1"))
conv1.add(ReLU(true).setName(namePrefix + "1x1/bn/sc/relu"))
concat.add(conv1)
}
val conv3 = Sequential()
conv3.add(SpatialConvolution(inputSize, config[Table](2)(1), 1, 1, 1, 1)
.setName(namePrefix + "3x3_reduce"))
conv3.add(ReLU(true). setName(namePrefix + "3x3_reduce/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "3x3"))
} else {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "3x3"))
}
conv3.add(ReLU(true).setName(namePrefix + "3x3/bn/sc/relu"))
concat.add(conv3)
val conv3xx = Sequential()
conv3xx.add(SpatialConvolution(inputSize, config[Table](3)(1), 1, 1, 1, 1)
.setName(namePrefix + "double3x3_reduce"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3_reduce/bn/sc/relu"))
conv3xx.add(SpatialConvolution(config[Table](3)(1),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3a"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3a/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "double3x3b"))
} else {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3b"))
}
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3b/bn/sc/relu"))
concat.add(conv3xx)
val pool = Sequential()
config[Table](4)[String](1) match {
case "max" =>
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil().setName(namePrefix + "pool"))
} else {
pool.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName(namePrefix + "pool"))
}
case "avg" => pool.add(SpatialAveragePooling(3, 3, 1, 1, 1, 1).ceil()
.setName(namePrefix + "pool"))
case _ => throw new IllegalArgumentException
}
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialConvolution(inputSize, config[Table](4)[Int](2), 1, 1, 1, 1)
.setName(namePrefix + "pool_proj"))
pool.add(ReLU(true).setName(namePrefix + "pool_proj/bn/sc/relu"))
}
concat.add(pool)
concat.setName(namePrefix + "output")
}
}
object VggLike_test {
def apply(classNum: Int): Module[Double] = {
val vggBnDo = Sequential[Double]()
def convBNReLU(nInputPlane: Int, nOutPutPlane: Int): Sequential[Double] = {
vggBnDo.add(SpatialConvolution(nInputPlane, nOutPutPlane, 3, 3, 1, 1, 1, 1))
vggBnDo.add(SpatialBatchNormalization[Double](nOutPutPlane, 1e-3).setInit())
vggBnDo.add(ReLU(true))
vggBnDo
}
convBNReLU(3, 64) // .add(Dropout((0.3)))
convBNReLU(64, 64)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(64, 128) // .add(Dropout(0.4))
convBNReLU(128, 128)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(128, 256)// .add(Dropout(0.4))
convBNReLU(256, 256) // .add(Dropout(0.4))
convBNReLU(256, 256)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(256, 512) // .add(Dropout(0.4))
convBNReLU(512, 512) // .add(Dropout(0.4))
convBNReLU(512, 512)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(512, 512) // .add(Dropout(0.4))
convBNReLU(512, 512) // .add(Dropout(0.4))
convBNReLU(512, 512)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
vggBnDo.add(View(512))
val classifier = Sequential[Double]()
// classifier.add(Dropout(0.5))
classifier.add(Linear(512, 512))
classifier.add(BatchNormalization[Double](512).setInit())
classifier.add(ReLU(true))
// classifier.add(Dropout(0.5))
classifier.add(Linear(512, classNum))
classifier.add(LogSoftMax())
vggBnDo.add(classifier)
vggBnDo
}
}
object LeNet5_test {
def apply(classNum: Int): Module[Double] = {
val model = Sequential()
model.add(Reshape(Array(1, 28, 28)))
model.add(SpatialConvolution(1, 6, 5, 5))
model.add(Tanh())
model.add(SpatialMaxPooling(2, 2, 2, 2))
model.add(Tanh())
model.add(SpatialConvolution(6, 12, 5, 5))
model.add(SpatialMaxPooling(2, 2, 2, 2))
model.add(Reshape(Array(12 * 4 * 4)))
model.add(Linear(12 * 4 * 4, 100))
model.add(Tanh())
model.add(Linear(100, classNum))
model.add(LogSoftMax())
model
}
}
object SimpleCNN_test {
val rowN = 28
val colN = 28
val featureSize = rowN * colN
def apply(classNum: Int): Module[Double] = {
val model = Sequential()
model.add(Reshape(Array(1, rowN, colN)))
model.add(SpatialConvolution(1, 32, 5, 5))
model.add(Tanh())
model.add(SpatialMaxPooling(3, 3, 3, 3))
model.add(SpatialConvolution(32, 64, 5, 5))
model.add(Tanh())
model.add(SpatialMaxPooling(2, 2, 2, 2))
val linearInputNum = 64 * 2 * 2
val hiddenNum = 200
model.add(Reshape(Array(linearInputNum)))
model.add(Linear(linearInputNum, hiddenNum))
model.add(Tanh())
model.add(Linear(hiddenNum, classNum))
model.add(LogSoftMax())
model
}
}
| psyyz10/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/models/ModelforCheck.scala | Scala | apache-2.0 | 23,438 |
package ca.dubey.music.midi.event
import javax.sound.midi.MidiEvent
import javax.sound.midi.MetaMessage
trait EventCompanion[E] {
val EVENT_ID : Int
def fromMidiEvent(m : MidiEvent) : Option[E] = {
m.getMessage match {
case e:MetaMessage if (e.getType == EVENT_ID) =>
return fromMidiEventData(e.getData)
case _ =>
return None
}
}
def fromMidiEventData(data : Array[Byte]) : Option[E]
}
abstract class Event {
var ticks : Long = 0
def toMidiEvent : MidiEvent
}
| adubey/music | src/main/scala/midi/event/Event.scala | Scala | gpl-2.0 | 517 |
package akka.persistence.kafka.snapshot
import scala.concurrent.{Promise, Future}
import scala.concurrent.duration._
import scala.util._
import akka.actor._
import akka.pattern.{PromiseActorRef, pipe}
import akka.persistence._
import akka.persistence.JournalProtocol._
import akka.persistence.kafka._
import akka.persistence.kafka.MetadataConsumer.Broker
import akka.persistence.kafka.BrokerWatcher.BrokersUpdated
import akka.serialization.SerializationExtension
import akka.util.Timeout
import _root_.kafka.producer.{Producer, KeyedMessage}
/**
* Optimized and fully async version of [[akka.persistence.snapshot.SnapshotStore]].
*/
trait KafkaSnapshotStoreEndpoint extends Actor {
import SnapshotProtocol._
import context.dispatcher
val extension = Persistence(context.system)
val publish = extension.settings.internal.publishPluginCommands
def receive = {
case LoadSnapshot(persistenceId, criteria, toSequenceNr) β
val p = sender
loadAsync(persistenceId, criteria.limit(toSequenceNr)) map {
sso β LoadSnapshotResult(sso, toSequenceNr)
} recover {
case e β LoadSnapshotResult(None, toSequenceNr)
} pipeTo (p)
case SaveSnapshot(metadata, snapshot) β
val p = sender
val md = metadata.copy(timestamp = System.currentTimeMillis)
saveAsync(md, snapshot) map {
_ β SaveSnapshotSuccess(md)
} recover {
case e β SaveSnapshotFailure(metadata, e)
} pipeTo (p)
case d @ DeleteSnapshot(metadata) β
deleteAsync(metadata) onComplete {
case Success(_) => if (publish) context.system.eventStream.publish(d)
case Failure(_) =>
}
case d @ DeleteSnapshots(persistenceId, criteria) β
deleteAsync(persistenceId, criteria) onComplete {
case Success(_) => if (publish) context.system.eventStream.publish(d)
case Failure(_) =>
}
}
def loadAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]]
def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit]
def deleteAsync(metadata: SnapshotMetadata): Future[Unit]
def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit]
}
class KafkaSnapshotStore extends KafkaSnapshotStoreEndpoint with MetadataConsumer with ActorLogging {
import context.dispatcher
type RangeDeletions = Map[String, SnapshotSelectionCriteria]
type SingleDeletions = Map[String, List[SnapshotMetadata]]
val serialization = SerializationExtension(context.system)
val config = new KafkaSnapshotStoreConfig(context.system.settings.config.getConfig("kafka-snapshot-store"))
val brokerWatcher = new BrokerWatcher(config.zookeeperConfig, self)
var brokers: List[Broker] = brokerWatcher.start()
override def postStop(): Unit = {
brokerWatcher.stop()
super.postStop()
}
def localReceive: Receive = {
case BrokersUpdated(newBrokers) =>
brokers = newBrokers
}
override def receive: Receive = localReceive.orElse(super.receive)
// Transient deletions only to pass TCK (persistent not supported)
var rangeDeletions: RangeDeletions = Map.empty.withDefaultValue(SnapshotSelectionCriteria.None)
var singleDeletions: SingleDeletions = Map.empty.withDefaultValue(Nil)
def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = Future.successful {
rangeDeletions += (persistenceId -> criteria)
}
def deleteAsync(metadata: SnapshotMetadata): Future[Unit] = Future.successful {
singleDeletions.get(metadata.persistenceId) match {
case Some(dels) => singleDeletions += (metadata.persistenceId -> (metadata :: dels))
case None => singleDeletions += (metadata.persistenceId -> List(metadata))
}
}
def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] = Future {
val snapshotBytes = serialization.serialize(KafkaSnapshot(metadata, snapshot)).get
val snapshotMessage = new KeyedMessage[String, Array[Byte]](snapshotTopic(metadata.persistenceId), "static", snapshotBytes)
val snapshotProducer = new Producer[String, Array[Byte]](config.producerConfig(brokers))
try {
// TODO: take a producer from a pool
snapshotProducer.send(snapshotMessage)
} finally {
snapshotProducer.close()
}
}
def loadAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = {
val singleDeletions = this.singleDeletions
val rangeDeletions = this.rangeDeletions
for {
highest <- if (config.ignoreOrphan) highestJournalSequenceNr(persistenceId) else Future.successful(Long.MaxValue)
adjusted = if (config.ignoreOrphan &&
highest < criteria.maxSequenceNr &&
highest > 0L) criteria.copy(maxSequenceNr = highest) else criteria
snapshot <- Future {
val topic = snapshotTopic(persistenceId)
def matcher(snapshot: KafkaSnapshot): Boolean = snapshot.matches(adjusted) &&
!snapshot.matches(rangeDeletions(persistenceId)) &&
!singleDeletions(persistenceId).contains(snapshot.metadata)
leaderFor(topic, brokers) match {
case Some(Broker(host, port)) => load(host, port, topic, matcher).map(s => SelectedSnapshot(s.metadata, s.snapshot))
case None => None
}
}
} yield snapshot
}
def load(host: String, port: Int, topic: String, matcher: KafkaSnapshot => Boolean): Option[KafkaSnapshot] = {
val offset = offsetFor(host, port, topic, config.partition)
@annotation.tailrec
def load(host: String, port: Int, topic: String, offset: Long): Option[KafkaSnapshot] =
if (offset < 0) None else {
val s = snapshot(host, port, topic, offset)
if (matcher(s)) Some(s) else load(host, port, topic, offset - 1)
}
load(host, port, topic, offset - 1)
}
/**
* Fetches the highest sequence number for `persistenceId` from the journal actor.
*/
private def highestJournalSequenceNr(persistenceId: String): Future[Long] = {
val journal = extension.journalFor(persistenceId)
val promise = Promise[Any]()
// We need to use a PromiseActorRef here instead of ask because the journal doesn't reply to ReadHighestSequenceNr requests
val ref = PromiseActorRef(extension.system.provider, Timeout(config.consumerConfig.socketTimeoutMs.millis), journal.toString)
journal ! ReadHighestSequenceNr(0L, persistenceId, ref)
ref.result.future.flatMap {
case ReadHighestSequenceNrSuccess(snr) => Future.successful(snr)
case ReadHighestSequenceNrFailure(err) => Future.failed(err)
}
}
private def snapshot(host: String, port: Int, topic: String, offset: Long): KafkaSnapshot = {
val iter = new MessageIterator(host, port, topic, config.partition, offset, config.consumerConfig)
try { serialization.deserialize(MessageUtil.payloadBytes(iter.next()), classOf[KafkaSnapshot]).get } finally { iter.close() }
}
private def snapshotTopic(persistenceId: String): String =
s"${config.prefix}${journalTopic(persistenceId)}"
}
| krasserm/akka-persistence-kafka | src/main/scala/akka/persistence/kafka/snapshot/KafkaSnapshotStore.scala | Scala | apache-2.0 | 7,097 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import com.codahale.metrics.{Gauge,MetricRegistry}
import org.apache.spark.metrics.source.Source
import org.apache.spark.SparkContext
private[spark] class BlockManagerSource(val blockManager: BlockManager, sc: SparkContext)
extends Source {
val metricRegistry = new MetricRegistry()
val sourceName = "%s.BlockManager".format(sc.appName)
metricRegistry.register(MetricRegistry.name("memory", "maxMem_MB"), new Gauge[Long] {
override def getValue: Long = {
val storageStatusList = blockManager.master.getStorageStatus
val maxMem = storageStatusList.map(_.maxMem).reduce(_ + _)
maxMem / 1024 / 1024
}
})
metricRegistry.register(MetricRegistry.name("memory", "remainingMem_MB"), new Gauge[Long] {
override def getValue: Long = {
val storageStatusList = blockManager.master.getStorageStatus
val remainingMem = storageStatusList.map(_.memRemaining).reduce(_ + _)
remainingMem / 1024 / 1024
}
})
metricRegistry.register(MetricRegistry.name("memory", "memUsed_MB"), new Gauge[Long] {
override def getValue: Long = {
val storageStatusList = blockManager.master.getStorageStatus
val maxMem = storageStatusList.map(_.maxMem).reduce(_ + _)
val remainingMem = storageStatusList.map(_.memRemaining).reduce(_ + _)
(maxMem - remainingMem) / 1024 / 1024
}
})
metricRegistry.register(MetricRegistry.name("disk", "diskSpaceUsed_MB"), new Gauge[Long] {
override def getValue: Long = {
val storageStatusList = blockManager.master.getStorageStatus
val diskSpaceUsed = storageStatusList
.flatMap(_.blocks.values.map(_.diskSize))
.reduceOption(_ + _)
.getOrElse(0L)
diskSpaceUsed / 1024 / 1024
}
})
}
| mkolod/incubator-spark | core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala | Scala | apache-2.0 | 2,576 |
package io.prediction.data.storage.hbase.upgrade
import grizzled.slf4j.Logger
import io.prediction.data.storage.Storage
import io.prediction.data.storage.DataMap
import io.prediction.data.storage.hbase.HBLEvents
import io.prediction.data.storage.hbase.HBEventsUtil
import scala.collection.JavaConversions._
import scala.concurrent._
import ExecutionContext.Implicits.global
import io.prediction.data.storage.LEvents
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import java.lang.Thread
object CheckDistribution {
def entityType(eventClient: LEvents, appId: Int)
: Map[(String, Option[String]), Int] = {
eventClient
.find(appId = appId)
.right
.get
.foldLeft(Map[(String, Option[String]), Int]().withDefaultValue(0)) {
case (m, e) => {
val k = (e.entityType, e.targetEntityType)
m.updated(k, m(k) + 1)
}
}
}
def runMain(appId: Int) {
val eventClient = Storage.getLEvents().asInstanceOf[HBLEvents]
entityType(eventClient, appId)
.toSeq
.sortBy(-_._2)
.foreach { println }
}
def main(args: Array[String]) {
runMain(args(0).toInt)
}
}
/* Experimental */
object Upgrade_0_8_3 {
val NameMap = Map(
"pio_user" -> "user",
"pio_item" -> "item")
val RevNameMap = NameMap.toSeq.map(_.swap).toMap
val logger = Logger[this.type]
def main(args: Array[String]) {
val fromAppId = args(0).toInt
val toAppId = args(1).toInt
runMain(fromAppId, toAppId)
}
def runMain(fromAppId: Int, toAppId: Int) = {
upgrade(fromAppId, toAppId)
}
val obsEntityTypes = Set("pio_user", "pio_item")
val obsProperties = Set(
"pio_itypes", "pio_starttime", "pio_endtime",
"pio_inactive", "pio_price", "pio_rating")
def hasPIOPrefix(eventClient: LEvents, appId: Int): Boolean = {
eventClient.find(appId = appId).right.get.filter( e =>
(obsEntityTypes.contains(e.entityType) ||
e.targetEntityType.map(obsEntityTypes.contains(_)).getOrElse(false) ||
(!e.properties.keySet.forall(!obsProperties.contains(_)))
)
).hasNext
}
def isEmpty(eventClient: LEvents, appId: Int): Boolean =
!eventClient.find(appId = appId).right.get.hasNext
def upgradeCopy(eventClient: LEvents, fromAppId: Int, toAppId: Int) {
val fromDist = CheckDistribution.entityType(eventClient, fromAppId)
logger.info("FromAppId Distribution")
fromDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
val events = eventClient
.find(appId = fromAppId)
.right
.get
.zipWithIndex
.foreach { case (fromEvent, index) => {
if (index % 50000 == 0) {
//logger.info(s"Progress: $fromEvent $index")
logger.info(s"Progress: $index")
}
val fromEntityType = fromEvent.entityType
val toEntityType = NameMap.getOrElse(fromEntityType, fromEntityType)
val fromTargetEntityType = fromEvent.targetEntityType
val toTargetEntityType = fromTargetEntityType
.map { et => NameMap.getOrElse(et, et) }
val toProperties = DataMap(fromEvent.properties.fields.map {
case (k, v) =>
val newK = if (obsProperties.contains(k)) {
val nK = k.stripPrefix("pio_")
logger.info(s"property ${k} will be renamed to ${nK}")
nK
} else k
(newK, v)
})
val toEvent = fromEvent.copy(
entityType = toEntityType,
targetEntityType = toTargetEntityType,
properties = toProperties)
eventClient.insert(toEvent, toAppId)
}}
val toDist = CheckDistribution.entityType(eventClient, toAppId)
logger.info("Recap fromAppId Distribution")
fromDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
logger.info("ToAppId Distribution")
toDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
val fromGood = fromDist
.toSeq
.forall { case (k, c) => {
val (et, tet) = k
val net = NameMap.getOrElse(et, et)
val ntet = tet.map(tet => NameMap.getOrElse(tet, tet))
val nk = (net, ntet)
val nc = toDist.getOrElse(nk, -1)
val checkMatch = (c == nc)
if (!checkMatch) {
logger.info(s"${k} doesn't match: old has ${c}. new has ${nc}.")
}
checkMatch
}}
val toGood = toDist
.toSeq
.forall { case (k, c) => {
val (et, tet) = k
val oet = RevNameMap.getOrElse(et, et)
val otet = tet.map(tet => RevNameMap.getOrElse(tet, tet))
val ok = (oet, otet)
val oc = fromDist.getOrElse(ok, -1)
val checkMatch = (c == oc)
if (!checkMatch) {
logger.info(s"${k} doesn't match: new has ${c}. old has ${oc}.")
}
checkMatch
}}
if (!fromGood || !toGood) {
logger.error("Doesn't match!! There is an import error.")
} else {
logger.info("Count matches. Looks like we are good to go.")
}
}
/* For upgrade from 0.8.2 to 0.8.3 only */
def upgrade(fromAppId: Int, toAppId: Int) {
val eventClient = Storage.getLEvents().asInstanceOf[HBLEvents]
require(fromAppId != toAppId,
s"FromAppId: $fromAppId must be different from toAppId: $toAppId")
if (hasPIOPrefix(eventClient, fromAppId)) {
require(
isEmpty(eventClient, toAppId),
s"Target appId: $toAppId is not empty. Please run " +
"`pio app data-delete <app_name>` to clean the data before upgrading")
logger.info(s"$fromAppId isEmpty: " + isEmpty(eventClient, fromAppId))
upgradeCopy(eventClient, fromAppId, toAppId)
} else {
logger.info(s"From appId: ${fromAppId} doesn't contain"
+ s" obsolete entityTypes ${obsEntityTypes} or"
+ s" obsolete properties ${obsProperties}."
+ " No need data migration."
+ s" You can continue to use appId ${fromAppId}.")
}
logger.info("Done.")
}
}
| TheDataShed/PredictionIO | data/src/main/scala/storage/hbase/upgrade/Upgrade_0_8_3.scala | Scala | apache-2.0 | 5,903 |
/*
* Ported by Alistair Johnson from
* https://android.googlesource.com/platform/libcore/+/master/luni/src/main/java/java/math/Conversion.java
* Original license copied below:
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.math
import scala.annotation.tailrec
/** Provides {@link BigInteger} base conversions.
*
* Static library that provides {@link BigInteger} base conversion from/to any
* integer represented in a {@link java.lang.String} Object.
*/
private[math] object Conversion {
/** Holds the maximal exponent for each radix.
*
* Holds the maximal exponent for each radix, so that
* radix<sup>digitFitInInt[radix]</sup> fit in an {@code int} (32 bits).
*/
final val DigitFitInInt = Array[Int](
-1, -1, 31, 19, 15, 13, 11, 11, 10, 9, 9, 8, 8, 8, 8, 7, 7, 7, 7, 7, 7, 7,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 5)
/** Precomputed maximal powers of radices.
*
* BigRadices values are precomputed maximal powers of radices (integer
* numbers from 2 to 36) that fit into unsigned int (32 bits). bigRadices[0] =
* 2 ^ 31, bigRadices[8] = 10 ^ 9, etc.
*/
final val BigRadices = Array[Int](
-2147483648, 1162261467, 1073741824, 1220703125, 362797056, 1977326743,
1073741824, 387420489, 1000000000, 214358881, 429981696, 815730721,
1475789056, 170859375, 268435456, 410338673, 612220032, 893871739,
1280000000, 1801088541, 113379904, 148035889, 191102976, 244140625,
308915776, 387420489, 481890304, 594823321, 729000000, 887503681,
1073741824, 1291467969, 1544804416, 1838265625, 60466176)
/** @see BigInteger#toString(int) */
def bigInteger2String(bi: BigInteger, radix: Int): String = {
val sign = bi.sign
val numberLength = bi.numberLength
val digits = bi.digits
val radixOutOfBounds =
radix < Character.MIN_RADIX || radix > Character.MAX_RADIX
if (sign == 0) {
"0"
} else if (numberLength == 1) {
val highDigit = digits(numberLength - 1)
var v = highDigit & 0xFFFFFFFFL
if (sign < 0)
v = -v
java.lang.Long.toString(v, radix)
} else if (radix == 10 || radixOutOfBounds) {
bi.toString
} else {
var bitsForRadixDigit: Double = 0.0
bitsForRadixDigit = Math.log(radix) / Math.log(2)
val addForSign = if (sign < 0) 1 else 0
val biAbsLen = bi.abs().bitLength()
val resLenInChars = (biAbsLen / bitsForRadixDigit + addForSign).toInt + 1
var result: String = ""
var currentChar = resLenInChars
var resDigit: Int = 0
if (radix != 16) {
val temp = new Array[Int](numberLength)
System.arraycopy(digits, 0, temp, 0, numberLength)
var tempLen = numberLength
val charsPerInt = DigitFitInInt(radix)
val bigRadix = BigRadices(radix - 2)
@inline
@tailrec
def loop(): Unit = {
resDigit = Division.divideArrayByInt(temp, temp, tempLen, bigRadix)
val previous = currentChar
@inline
@tailrec
def innerLoop(): Unit = {
currentChar -= 1
result = Character.forDigit(resDigit % radix, radix).toString + result
resDigit /= radix
if(resDigit != 0 && currentChar != 0)
innerLoop()
}
innerLoop()
val delta = charsPerInt - previous + currentChar
var i: Int = 0
while (i < delta && currentChar > 0) {
currentChar -= 1
result = "0" + result
i += 1
}
i = tempLen - 1
while (i > 0 && temp(i) == 0) {
i -= 1
}
tempLen = i + 1
if (!(tempLen == 1 && temp(0) == 0))
loop()
}
loop()
} else {
for (i <- 0 until numberLength) {
var j = 0
while (j < 8 && currentChar > 0) {
resDigit = digits(i) >> (j << 2) & 0xf
currentChar -= 1
result = resDigit.toHexString + result
j += 1
}
}
}
// strip leading zero's
result = result.dropWhile(_ == '0')
if (sign == -1) "-" + result
else result
}
}
/** The string representation scaled by zero.
*
* Builds the correspondent {@code String} representation of {@code val} being
* scaled by 0.
*
* @see BigInteger#toString()
* @see BigDecimal#toString()
*/
def toDecimalScaledString(bi: BigInteger): String = {
val sign: Int = bi.sign
val numberLength: Int = bi.numberLength
val digits: Array[Int] = bi.digits
if (sign == 0) {
"0"
} else if (numberLength == 1) {
val absStr = Integer.toUnsignedString(digits(0))
if (sign < 0) "-" + absStr
else absStr
} else {
var result: String = ""
val temp = new Array[Int](numberLength)
var tempLen = numberLength
System.arraycopy(digits, 0, temp, 0, tempLen)
do {
// Divide the array of digits by 1000000000 and compute the remainder
var rem: Int = 0
var i: Int = tempLen - 1
while (i >= 0) {
val temp1 = (rem.toLong << 32) + (temp(i) & 0xFFFFFFFFL)
val quot = java.lang.Long.divideUnsigned(temp1, 1000000000L).toInt
temp(i) = quot
rem = (temp1 - quot * 1000000000L).toInt
i -= 1
}
// Convert the remainder to string, and add it to the result
val remStr = rem.toString()
val padding = "000000000".substring(remStr.length)
result = padding + remStr + result
while ((tempLen != 0) && (temp(tempLen - 1) == 0))
tempLen -= 1
} while (tempLen != 0)
result = dropLeadingZeros(result)
if (sign < 0) "-" + result
else result
}
}
private def dropLeadingZeros(s: String): String = {
var zeroPrefixLength = 0
val len = s.length
while (zeroPrefixLength < len && s.charAt(zeroPrefixLength) == '0')
zeroPrefixLength += 1
s.substring(zeroPrefixLength)
}
/* can process only 32-bit numbers */
def toDecimalScaledString(value: Long, scale: Int): String = {
if (value == 0) {
scale match {
case 0 => "0"
case 1 => "0.0"
case 2 => "0.00"
case 3 => "0.000"
case 4 => "0.0000"
case 5 => "0.00000"
case 6 => "0.000000"
case _ =>
val scaleVal =
if (scale == Int.MinValue) "2147483648"
else java.lang.Integer.toString(-scale)
val result = if (scale < 0) "0E+" else "0E"
result + scaleVal
}
} else {
// one 32-bit unsigned value may contains 10 decimal digits
// Explanation why 10+1+7:
// +1 - one char for sign if needed.
// +7 - For "special case 2" (see below) we have 7 free chars for inserting necessary scaled digits.
val resLengthInChars = 18
val negNumber = value < 0
var result = ""
// Allocated [resLengthInChars+1] characters.
// a free latest character may be used for "special case 1" (see below)
var currentChar = resLengthInChars
var v: Long = if (negNumber) -value else value
do {
val prev = v
v /= 10
currentChar -= 1
result = (prev - v * 10).toInt.toString + result
} while (v != 0)
val exponent = resLengthInChars - currentChar - scale - 1
if (scale > 0 && exponent >= -6) {
val index = exponent + 1
if (index > 0) {
// special case 1
result = result.substring(0, index) + "." + result.substring(index)
} else {
// special case 2
for (j <- 0 until -index) {
result = "0" + result
}
result = "0." + result
}
} else if (scale !=0) {
var result1 = exponent.toString
if (exponent > 0)
result1 = "+" + result1
result1 = "E" + result1
result =
if (resLengthInChars - currentChar > 1)
result.substring(0, 1) + "." + result.substring(1) + result1
else
result + result1
}
if (negNumber) "-" + result
else result
}
}
def bigInteger2Double(bi: BigInteger): Double = {
if (bi.numberLength < 2 || ((bi.numberLength == 2) && (bi.digits(1) > 0))) {
bi.longValue()
} else if (bi.numberLength > 32) {
if (bi.sign > 0) Double.PositiveInfinity
else Double.NegativeInfinity
} else {
val bitLen = bi.abs().bitLength()
var exponent: Long = bitLen - 1
val delta = bitLen - 54
val lVal = bi.abs().shiftRight(delta).longValue()
var mantissa = lVal & 0x1FFFFFFFFFFFFFL
if (exponent == 1023 && mantissa == 0X1FFFFFFFFFFFFFL) {
if (bi.sign > 0) Double.PositiveInfinity
else Double.NegativeInfinity
} else if (exponent == 1023 && mantissa == 0x1FFFFFFFFFFFFEL) {
if (bi.sign > 0) Double.MaxValue
else -Double.MaxValue
} else {
val droppedBits = BitLevel.nonZeroDroppedBits(delta, bi.digits)
if (((mantissa & 1) == 1) && (((mantissa & 2) == 2) || droppedBits))
mantissa += 2
mantissa >>= 1
val resSign = if (bi.sign < 0) 0x8000000000000000L else 0
exponent = ((1023 + exponent) << 52) & 0x7FF0000000000000L
val result = resSign | exponent | mantissa
java.lang.Double.longBitsToDouble(result)
}
}
}
}
| SebsLittleHelpers/scala-js | javalib/src/main/scala/java/math/Conversion.scala | Scala | apache-2.0 | 10,230 |
package org.bitcoins.chain.models
import org.bitcoins.chain.config.ChainAppConfig
import org.bitcoins.core.crypto.DoubleSha256DigestBE
import org.bitcoins.db.{CRUD, SlickUtil}
import slick.jdbc.SQLiteProfile
import slick.lifted.TableQuery
import slick.jdbc.SQLiteProfile.api._
import scala.concurrent.{ExecutionContext, Future}
case class CompactFilterHeaderDAO()(
implicit ec: ExecutionContext,
appConfig: ChainAppConfig)
extends CRUD[CompactFilterHeaderDb, DoubleSha256DigestBE] {
import org.bitcoins.db.DbCommonsColumnMappers._
override val table = TableQuery[CompactFilterHeaderTable]
override def createAll(filterHeaders: Vector[CompactFilterHeaderDb]): Future[
Vector[CompactFilterHeaderDb]] = {
SlickUtil.createAllNoAutoInc(ts = filterHeaders,
database = database,
table = table)
}
/** Finds the rows that correlate to the given primary keys */
override protected def findByPrimaryKeys(
ids: Vector[DoubleSha256DigestBE]): Query[
Table[_],
CompactFilterHeaderDb,
Seq] =
table.filter(_.hash.inSet(ids))
override protected def findAll(ts: Vector[CompactFilterHeaderDb]): Query[
Table[_],
CompactFilterHeaderDb,
Seq] =
findByPrimaryKeys(ts.map(_.hashBE))
def findByHash(
hash: DoubleSha256DigestBE): Future[Option[CompactFilterHeaderDb]] = {
read(hash)
}
def findByBlockHash(
hash: DoubleSha256DigestBE): Future[Option[CompactFilterHeaderDb]] = {
val query = table.filter(_.blockHash === hash).take(1)
database.runVec(query.result).map(_.headOption)
}
def findAllByBlockHashes(hashes: Vector[DoubleSha256DigestBE]): Future[
Vector[CompactFilterHeaderDb]] = {
val query = table.filter(_.blockHash.inSet(hashes))
database.runVec(query.result)
}
/** Retrieves a [[CompactFilterHeaderDb]] at the given height */
def getAtHeight(height: Int): Future[Vector[CompactFilterHeaderDb]] = {
val query = getAtHeightQuery(height)
database.runVec(query)
}
private def getAtHeightQuery(
height: Int): SQLiteProfile.StreamingProfileAction[
Seq[CompactFilterHeaderDb],
CompactFilterHeaderDb,
Effect.Read] = {
table.filter(_.height === height).result
}
/** Returns the maximum block height from our database */
def maxHeight: Future[Int] = {
val query = maxHeightQuery
val result = database.run(query)
result
}
private def maxHeightQuery: SQLiteProfile.ProfileAction[
Int,
NoStream,
Effect.Read] = {
val query = table.map(_.height).max.getOrElse(0).result
query
}
}
| bitcoin-s/bitcoin-s-core | chain/src/main/scala/org/bitcoins/chain/models/CompactFilterHeaderDAO.scala | Scala | mit | 2,634 |
package knot.net.http.models
object ParserStates {
trait ParserState
object Initial extends ParserState
object ReadHeader extends ParserState
object ReadBody extends ParserState
object MoreHeaderData extends ParserState
case class MoreBodyData(contentLength: Int, remaining: Int) extends ParserState
object SuccessHeader extends ParserState
object SuccessBody extends ParserState
}
| defvar/knot | knot-net/src/main/scala/knot/net/http/models/ParserStates.scala | Scala | mit | 407 |
package edu.neu.coe.csye._7200.ingest
import edu.neu.coe.csye._7200.ingest._
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by scalaprof on 9/13/16.
*/
class MovieSpec extends FlatSpec with Matchers {
val phi = (math.sqrt(5) + 1) / 2
behavior of "Name"
it should "work for String" in {
val x = Name("Tom Brady")
x should matchPattern {
case Name("Tom", None, "Brady", None) =>
}
Name("NoΓ©mie Lenoir") should matchPattern {
case Name("NoΓ©mie", None, "Lenoir", None) =>
}
Name("J.J. Abrams") should matchPattern {
case Name("J.", Some("J."), "Abrams", None) =>
}
Name("Robert Downey Jr.") should matchPattern {
case Name("Robert", None, "Downey", Some("Jr.")) =>
}
}
it should "work for Name" in {
val x = Name("Tom", None, "Brady", None)
x should matchPattern {
case Name("Tom", None, "Brady", None) =>
}
}
behavior of "Principal"
it should "work for String, Int" in {
val x = Principal("Tom Brady", 1)
x should matchPattern { case Principal(Name("Tom", None, "Brady", None), 1) => }
}
it should "work for List[String]" in {
Principal(List("Tom Brady", "1")) should matchPattern {
case Principal(Name("Tom", None, "Brady", None), 1) =>
}
Principal(List("NoΓ©mie Lenoir", "2")) should matchPattern {
case Principal(Name("NoΓ©mie", None, "Lenoir", None), 2) =>
}
Principal(List("J.J. Abrams", "3")) should matchPattern {
case Principal(Name("J.", Some("J."), "Abrams", None), 3) =>
}
Principal(List("Robert Downey Jr.", "4")) should matchPattern {
case Principal(Name("Robert", None, "Downey", Some("Jr.")), 4) =>
}
}
behavior of "Rating"
it should "work for String, Int" in {
val x = Rating("PG", Some(13))
x should matchPattern {
case Rating("PG", Some(13)) =>
}
}
it should "work for PG-13" in {
val x = Rating("PG-13")
x should matchPattern {
case Rating("PG", Some(13)) =>
}
}
it should "work for R" in {
val x = Rating("R")
x should matchPattern {
case Rating("R", None) =>
}
}
behavior of "Format"
it should "work for Boolean, String, Double, Int" in {
val x = Format(color = true, "Swahili", phi, 129)
x should matchPattern {
case Format(true, "Swahili", `phi`, 129) =>
}
}
it should "work for List[String]" in {
val x = Format(List("Color", "Swahili", phi.toString, "129"))
x should matchPattern {
case Format(true, "Swahili", `phi`, 129) =>
}
}
behavior of "Production"
it should "work for String, Int" in {
val x = Production("Kenya", 1000000, 1000001, 2016)
x should matchPattern {
case Production("Kenya", 1000000, 1000001, 2016) =>
}
}
it should "work for List[String]" in {
val x = Production(List("Kenya", "1000000", "1000001", "2016"))
x should matchPattern {
case Production("Kenya", 1000000, 1000001, 2016) =>
}
}
it should "define isKiwi properly" in {
Production("Kenya", 1000000, 1000001, 2016).isKiwi shouldBe false
Production("New Zealand", 1000000, 1000001, 2016).isKiwi shouldBe true
}
behavior of "Reviews"
it should "work for params" in {
val x = Reviews(8.14, 42, Rating("PG-13"), 7, 10, 12, 99)
x should matchPattern {
case Reviews(8.14, 42, Rating("PG", Some(13)), 7, 10, 12, 99) =>
}
}
it should "work for List[String]" in {
val x = Reviews(List("8.14", "42", "PG-13", "7", "10", "12", "99"))
x should matchPattern {
case Reviews(8.14, 42, Rating("PG", Some(13)), 7, 10, 12, 99) =>
}
}
}
| rchillyard/Scalaprof | FunctionalProgramming/src/test/scala/edu/neu/coe/csye/_7200/ingest/MovieSpec.scala | Scala | gpl-2.0 | 3,630 |
package sparklyr
import java.io.{File, FileWriter}
import org.apache.spark._
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
import scala.util.Random
class WorkerApply(
closure: Array[Byte],
columns: Array[String],
config: String,
port: Int,
groupBy: Array[String],
closureRLang: Array[Byte],
bundlePath: String,
customEnv: Map[String, String],
connectionTimeout: Int,
context: Array[Byte],
options: Map[String, String],
timeZoneId: String,
schema: StructType,
genBarrierMap: () => Map[String, Any],
genPartitionIndex: () => Int
) extends java.io.Serializable {
private[this] var exception: Option[Exception] = None
private[this] var backendPort: Int = 0
def workerSourceFile(rscript: Rscript, sessionId: Int): String = {
val source = Sources.sources
val tempFile: File = new File(
rscript.getScratchDir() + File.separator + "sparkworker_" + sessionId.toString + ".R")
val outStream: FileWriter = new FileWriter(tempFile)
outStream.write(source)
outStream.flush()
tempFile.getAbsolutePath()
}
def apply(iterator: Iterator[Row]): Iterator[Row] = {
val sessionId: Int = Random.nextInt(10000)
val logger = new Logger("Worker", sessionId)
val lock: AnyRef = new Object()
// No point in starting up R process to not process anything
if (!iterator.hasNext) return Array[Row]().iterator
val workerContext = new WorkerContext(
iterator,
lock,
closure,
columns,
groupBy,
closureRLang,
bundlePath,
context,
timeZoneId,
schema,
options,
genBarrierMap(),
genPartitionIndex()
)
val tracker = new JVMObjectTracker()
val contextId = tracker.put(workerContext)
logger.log("is tracking worker context under " + contextId)
logger.log("initializing backend")
val backend: Backend = new Backend()
backend.setTracker(tracker)
/*
* initialize backend as worker and service, since exceptions and
* terminating the r session should not shutdown the process
*/
backend.setType(
true, /* isService */
false, /* isRemote */
true, /* isWorker */
false /* isBatch */
)
backend.setHostContext(
contextId
)
backend.init(
port,
sessionId,
connectionTimeout
)
backendPort = backend.getPort()
new Thread("starting backend thread") {
override def run(): Unit = {
try {
logger.log("starting backend")
backend.run()
} catch {
case e: Exception =>
logger.logError("failed while running backend: ", e)
exception = Some(e)
lock.synchronized {
lock.notify
}
}
}
}.start()
new Thread("starting rscript thread") {
override def run(): Unit = {
try {
logger.log("is starting rscript")
val rscript = new Rscript(logger)
val sourceFilePath: String = workerSourceFile(rscript, sessionId)
rscript.init(
List(
sessionId.toString,
backendPort.toString,
config
),
sourceFilePath,
customEnv,
options
)
lock.synchronized {
lock.notify
}
} catch {
case e: Exception =>
logger.logError("failed to run rscript: ", e)
exception = Some(e)
lock.synchronized {
lock.notify
}
}
}
}.start()
logger.log("is waiting using lock for RScript to complete")
lock.synchronized {
lock.wait()
}
logger.log("completed wait using lock for RScript")
if (exception.isDefined) {
throw exception.get
}
logger.log("is returning RDD iterator with " + workerContext.getResultArray().length + " rows")
return workerContext.getResultArray().iterator
}
}
| rstudio/sparklyr | java/spark-1.6.0/workerapply.scala | Scala | apache-2.0 | 4,012 |
// lets define the simplest class
class User
val u = new User
println(u)
class User1(n: String) {
val name: String = n
def greet: String = "Hello from " + name
override def toString = "User(" + name +")"
}
val u1 = new User1("Obul")
println(u1)
class User2(val name: String) {
def greet: String = "Hi " + name
override def toString: String = "User: " + name
}
val u2 = new User2("Obul2")
println(u2)
// Polymorphism
class A {
def hi = "Hello from A"
override def toString = getClass.getName
}
class B extends A {
override def hi = "Hello from B"
}
println(new A().hi)
println(new B().hi)
// abstract classes
abstract class Car {
val year: Int
val automatic: Boolean = true
def color: String
}
class RedMini(val year: Int) extends Car {
def color = "Red"
}
println(new RedMini(2005))
// overloading methods allowed
// apply method: the default method
class Multiplier(factor: Int) {
def apply(input: Int) = input * factor
}
val tripleMe = new Multiplier(3)
println(tripleMe.apply(10))
println(tripleMe(10))
class RandomPoint {
val x = {println("Creating x"); util.Random.nextInt()}
lazy val y = {println("Creating y"); util.Random.nextInt()}
}
val point = new RandomPoint()
println("Created class")
println(point, point.x, point.y)
// packages
// package <identifier>
// import <package>.<class>
// access modifiers
// private and protected
// fine grained privacy controls can be applied to specific packages as well
// final class: class members can never be overridden in subclasses.
// An object is a type of class that can have no more than one instance
// also known in object oriented design as a singleton.
object Hello {
println("in Hello");
def hi = "hi"
}
println(Hello.hi)
println(Hello.hi)
// A companion object is an object that shares the same name as a class
// and is defined together in the same file as the class.
// Having a companion object for a class is a common pattern in Scala
// but there is also a feature from which they can benefit.
// Companion objects and classes are considered a single unit
// in terms of access controls,
// so they can access each otherβs private and protected fields and methods.
// its like class and object stuff seperated
// class Multiplier(val x: Int) { def product(y: Int) = x * y }
// object Multiplier { def apply(x: Int) = new Multiplier(x) }
// val tripler = Multiplier(3)
// println(tripler.product(13))
// case classes: ?
// traits are like Javas interfaces, but a little richer
| obulpathi/scala | 06.Classes/classes.scala | Scala | gpl-3.0 | 2,533 |
package edu.cmu.dynet.examples
import edu.cmu.dynet._
import scala.language.implicitConversions
import java.nio.file.Paths
class RnnLanguageModelBatch(
model: ParameterCollection,
layers: Int,
inputDim: Int,
hiddenDim: Int,
vocabSize: Int
) {
val rnn = new LstmBuilder(layers, inputDim, hiddenDim, model)
val p_c = model.addLookupParameters(vocabSize, Dim(inputDim))
val p_R = model.addParameters(Dim(vocabSize, hiddenDim))
val p_bias = model.addParameters(Dim(vocabSize))
def getNegLogProb(
sents: Seq[IntVector],
id: Int,
bsize: Int,
tokens: IntPointer
): Expression = {
val slen = sents(id).size
//
rnn.newGraph()
//
rnn.startNewSequence()
//
val i_R = Expression.parameter(p_R)
val i_bias = Expression.parameter(p_bias)
//
val errs = new ExpressionVector()
// Set all inputs to the SOS symbol
val sos = sents(0)(0)
var last_arr = new UnsignedVector()
for (_ <- 1 to bsize) last_arr.add(sos)
val next_arr = new UnsignedVector(bsize)
// run rnn on batch
for (t <- 1 until slen) {
// fill next_arr
for (i <- 0 until bsize) {
next_arr.update(i, sents(id + i)(t))
// count non-EOS tokens
if (next_arr(i) != sents(id).last) tokens.increment()
}
// embed the current tokens
val i_x_t = Expression.lookup(p_c, last_arr)
//
val i_y_t = rnn.addInput(i_x_t)
//
val i_r_t = i_bias + i_R * i_y_t
//
val i_err = Expression.pickNegLogSoftmax(i_r_t, next_arr)
errs.add(i_err)
// change input
last_arr = next_arr
}
// add all errors
val i_nerr = Expression.sumBatches(Expression.sum(errs))
i_nerr
}
def randomSample(d: WordDict, maxLen: Int = 150, temp: Float = 1.0f) = {
ComputationGraph.renew()
rnn.newGraph()
rnn.startNewSequence()
//
val i_R = Expression.parameter(p_R)
val i_bias = Expression.parameter(p_bias)
val kSOS = RnnLanguageModelBatch.kSOS
val kEOS = RnnLanguageModelBatch.kEOS
// start generating
var len = 0
var cur = kSOS
while (len < maxLen) {
//println("len", len, "cur", cur)
len += 1
val i_x_t = Expression.lookup(p_c, cur)
//show(i_x_t.dim, "i_x_t ")
val i_y_t = rnn.addInput(i_x_t)
//show(i_y_t.dim, "i_y_t ")
val i_r_t = i_bias + i_R * i_y_t
//show(i_r_t.dim, "i_r_t ")
val ydist = Expression.softmax(i_r_t / temp)
//show(ydist.dim, "ydist ")
// sample token
var w = 0
while (w == 0 || w == kSOS) {
// The C++ example uses cg.incremental_forward, but that doesn't work here.
val dist = ComputationGraph.forward(ydist)
w = Utilities.sample(dist.toVector)
}
if (w == kEOS) {
//
rnn.startNewSequence()
println()
cur = kSOS
} else {
print(if (cur == kSOS) "" else " ")
print(d.convert(w))
cur = w
}
}
}
}
object RnnLanguageModelBatch {
var kSOS = 0
var kEOS = 0
var INPUT_VOCAB_SIZE = 0
var OUTPUT_VOCAB_SIZE = 0
val BATCH_SIZE = 1
val DEV_BATCH_SIZE = 16
val LAYERS = 2
val INPUT_DIM = 8
val HIDDEN_DIM = 24
val NUM_EPOCHS = -1
val userDir = System.getProperty("user.dir")
val TRAIN_FILE = Paths.get(userDir, "../../examples/cpp/example-data/train-hsm.txt").toString
val DEV_FILE = Paths.get(userDir, "../../examples/cpp/example-data/dev-hsm.txt").toString
def main(args: Array[String]) {
Initialize.initialize()
val d = new WordDict
kSOS = d.convert("<s>")
kEOS = d.convert("</s>")
// datasets
val training = new scala.collection.mutable.ArrayBuffer[IntVector]
val dev = new scala.collection.mutable.ArrayBuffer[IntVector]
var tlc = 0
var ttoks = 0
for (line <- scala.io.Source.fromFile(TRAIN_FILE).getLines) {
tlc += 1
val row = WordDict.read_sentence(line, d)
training.append(row)
ttoks += row.size
}
println(s"${tlc} lines, ${ttoks} tokens, ${d.size} types")
// sort the training sentences in descending order of length (for minibatching)
training.sortBy(row => -row.size).zipWithIndex.foreach {
case (iv, i) => training(i) = iv
}
// pad the sentences in the same batch with EOS so they are the same length
for (i <- 0 until training.size by BATCH_SIZE) {
for (j <- 1 until BATCH_SIZE) {
while (training(i + j).size < training(i).size) {
training(i + j).add(kEOS)
}
}
}
// freeze dictionary
d.freeze()
d.set_unk("UNK")
INPUT_VOCAB_SIZE = d.size()
OUTPUT_VOCAB_SIZE = d.size()
// read validation dataset
var dlc = 0
var dtoks = 0
for (line <- scala.io.Source.fromFile(DEV_FILE).getLines) {
dlc += 1
val row = WordDict.read_sentence(line, d)
dev.append(row)
dtoks += row.size
}
println(s"${dlc} lines, ${dtoks} tokens")
// sort the dev sentences in descending order of length
dev.sortBy(row => -row.size).zipWithIndex.foreach {
case (iv, i) => dev(i) = iv
}
// pad
for (i <- 0 until dev.size by BATCH_SIZE) {
for (j <- 1 until BATCH_SIZE) {
while (dev(i + j).size < dev(i).size) {
dev(i + j).add(kEOS)
}
}
}
val model = new ParameterCollection
val adam = new AdamTrainer(model, 0.001f, 0.9f, 0.999f, 1e-8f)
adam.clipThreshold = adam.clipThreshold * BATCH_SIZE
val lm = new RnnLanguageModelBatch(model, LAYERS, INPUT_DIM, HIDDEN_DIM, INPUT_VOCAB_SIZE)
val numBatches = training.size / BATCH_SIZE - 1
val numDevBatches = dev.size / DEV_BATCH_SIZE - 1
val sizeSamples = 200
// random indexing
val order = new IntVector(0 until numBatches)
var first = true
var epoch = 0
while (epoch < NUM_EPOCHS || NUM_EPOCHS < 0) {
//
if (first) {
first = false
} else {
adam.updateEpoch()
}
// reshuffle
Utilities.shuffle(order)
var loss = 0.0
val tokens = new IntPointer
for (si <- 0 until numBatches) {
ComputationGraph.renew
val id = order(si) * BATCH_SIZE
val bsize = math.min(training.size - id, BATCH_SIZE)
val loss_expr = lm.getNegLogProb(training, id, bsize, tokens)
loss += ComputationGraph.forward(loss_expr).toFloat
ComputationGraph.backward(loss_expr)
adam.update()
//
if ((si + 1) % (numBatches / 10) == 0 || si == numBatches - 1) {
adam.status()
val lt = loss / tokens.value
println(s" E = ${lt} ppl=${math.exp(lt)}")
loss = 0.0
tokens.set(0)
}
}
var dloss = 0.0
val dtokens = new IntPointer
for (i <- 0 until numDevBatches) {
ComputationGraph.renew
val id = i * DEV_BATCH_SIZE
val bsize = math.min(dev.size - id, DEV_BATCH_SIZE)
val loss_expr = lm.getNegLogProb(dev, id, bsize, dtokens)
dloss += ComputationGraph.forward(loss_expr).toFloat
}
val dt = dloss / dtokens.value
println(s"***DEV [epoch=${epoch}] E = ${dt} ppl=${math.exp(dt)}")
println("-----------------------")
lm.randomSample(d, sizeSamples)
println("-----------------------")
epoch += 1
}
}
}
| xunzhang/dynet | contrib/swig/src/main/scala/edu/cmu/dynet/examples/RnnLanguageModelBatch.scala | Scala | apache-2.0 | 7,351 |
package models
import scala.concurrent.Future
trait Repository {
def getBasket(customerId: String): Future[Option[CustomerBasket]]
def updateBasket(basket: CustomerBasket): Future[Option[CustomerBasket]]
def deleteBasket(id: String): Future[Boolean]
} | ziyasal/Wildfire | eshop/apis/basket/src/main/scala/models/Repository.scala | Scala | mit | 262 |
package org.jetbrains.plugins.scala
package codeInspection.collections
import org.jetbrains.plugins.scala.codeInspection.InspectionBundle
/**
* Nikolay.Tropin
* 1/24/14
*/
class SortFilterTest extends OperationsOnCollectionInspectionTest {
override def hint: String = InspectionBundle.message("sort.filter.hint")
def testWithoutParams() {
val selected = s"List(0, 1).${START}sorted.filter(_ => true)$END"
check(selected)
val text = "List(0, 1).sorted.filter(_ => true)"
val result = "List(0, 1).filter(_ => true).sorted"
testFix(text, result, hint)
}
def testWithParameter() {
val selected = s"List(0, 1).${START}sortWith((x, y) => x < y).filter(_ => true)$END"
check(selected)
val text = "List(0, 1).sortWith((x, y) => x < y).filter(_ => true)"
val result = "List(0, 1).filter(_ => true).sortWith((x, y) => x < y)"
testFix(text, result, hint)
}
def testWithGenericParameter() {
val selected = s"List(0, 1).${START}sortBy[String](_.toString).filter(_ => true)$END"
check(selected)
val text = "List(0, 1).sortBy[String](_.toString).filter(_ => true)"
val result = "List(0, 1).filter(_ => true).sortBy[String](_.toString)"
testFix(text, result, hint)
}
def testInfix() {
val selected = s"List(0, 1).${START}sortBy[String](_.toString) filter (_ => true)$END"
check(selected)
val text = "List(0, 1).sortBy[String](_.toString) filter (_ => true)"
val result = "List(0, 1).filter(_ => true).sortBy[String](_.toString)"
testFix(text, result, hint)
}
def testWithSideEffect(): Unit = {
checkTextHasNoErrors(
"""
|var q = 1
|Seq(3, 1, 2).sorted.filter {
| i =>
| q += 1
| i % 2 == 0
|}
""".stripMargin)
}
override val inspectionClass = classOf[SortFilterInspection]
}
| triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/codeInspection/collections/SortFilterTest.scala | Scala | apache-2.0 | 1,849 |
package ru.avhaliullin.whatever.semantic.function
import ru.avhaliullin.whatever.frontend.{syntax => syn}
import ru.avhaliullin.whatever.semantic.ImportsContext
import ru.avhaliullin.whatever.semantic.tpe.Tpe
/**
* @author avhaliullin
*/
class FnAnalyzer {
def convertSignature(sig: syn.FnDefinition.Signature, ic: ImportsContext): FnSignature = {
FnSignature(
sig.name,
sig.args.map(arg => FnSignature.Arg(arg.name, Tpe.getTpe(arg.tpe, ic))),
Tpe.getTpe(sig.returnT, ic),
ic.module
)
}
}
object FnAnalyzer extends FnAnalyzer | avhaliullin/whatever-compiler | compiler/src/main/scala/ru/avhaliullin/whatever/semantic/function/FnAnalyzer.scala | Scala | mit | 569 |
package strategies.trading
import akka.agent.Agent
import actors.RandomLiquidityDemanderConfig
import markets.participants.strategies.RandomMarketOrderTradingStrategy
import markets.tickers.Tick
import markets.tradables.Tradable
import scala.collection.{immutable, mutable}
import scala.util.Random
/** Zero Intelligence (ZI) market order trading strategy from Gode and Sunder, JPE (1996). */
case class ZIMarketOrderTradingStrategy(config: RandomLiquidityDemanderConfig, prng: Random)
extends RandomMarketOrderTradingStrategy {
def askQuantity(ticker: Agent[immutable.Seq[Tick]], tradable: Tradable): Long = {
uniformRandomVariate(config.minAskQuantity, config.maxAskQuantity)
}
def bidQuantity(ticker: Agent[immutable.Seq[Tick]], tradable: Tradable): Long = {
uniformRandomVariate(config.minBidQuantity, config.maxBidQuantity)
}
def chooseOneOf(tickers: mutable.Map[Tradable, Agent[immutable.Seq[Tick]]]): Option[(Tradable, Agent[immutable.Seq[Tick]])] = {
if (tickers.isEmpty) None else Some(tickers.toIndexedSeq(prng.nextInt(tickers.size)))
}
protected def uniformRandomVariate(lower: Long, upper: Long): Long = {
(lower + (upper - lower) * prng.nextDouble()).toLong
}
} | ScalABM/models-library | london-stock-exchange/src/main/scala-2.11/strategies/trading/ZIMarketOrderTradingStrategy.scala | Scala | apache-2.0 | 1,218 |
package ru.primetalk.synapse.core.ext
import ru.primetalk.synapse.core.components.SignalsApi
import ru.primetalk.synapse.core.runtime.TrellisApi
/**
* Extension for declaring unhandled exception handler via simple DSL.
* @author zhizhelev, 25.03.15.
*/
trait ExceptionHandlingExt extends SignalsApi with TrellisApi {
/** The type of a handler that will handle exceptions during signal processing.
* If the exception is recoverable, then the handler should provide a new Context
* for further processing.
* If not recoverable - throw some exception (or rethrow the original one).
* */
type UnhandledProcessingExceptionHandler = (Throwable, String, Signal[_], Context) => Context
val defaultUnhandledExceptionHandler : UnhandledProcessingExceptionHandler = (e, name, signal, context) => e match {
case e:Exception =>
val message: String =
s"Exception ${e.getClass.getSimpleName} in handler during processing '$signal' in system '$name'.\\n" +
s"Context value before processing:\\n" + context.mkString("\\n")
throw new RuntimeException(message,e)
case other =>
throw other
}
val rethrowUnhandledExceptionHandler : UnhandledProcessingExceptionHandler = (e, name, signal, context) => e match {
case any =>
throw any
}
implicit object UnhandledExceptionHandlerExtensionId extends StaticSystemExtensionId[UnhandledProcessingExceptionHandler]
implicit class StaticSystemWithUnhandledExceptionHandler(s:StaticSystem){
def unhandledExceptionHandler: UnhandledProcessingExceptionHandler =
s.extensionOpt(UnhandledExceptionHandlerExtensionId).getOrElse{
defaultUnhandledExceptionHandler
}
}
}
| ppopoff/SynapseGrid | synapse-grid-core/src/main/scala/ru/primetalk/synapse/core/ext/ExceptionHandlingExt.scala | Scala | bsd-2-clause | 1,699 |
/*
* Copyright (C) 2013 The Mango Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* The code of this project is a port of (or wrapper around) the Guava-libraries.
* See http://code.google.com/p/guava-libraries/
*
* @author Markus Schneider
*/
package org.feijoas.mango.common.cache
import java.util.concurrent.ExecutionException
import java.util.logging.{ Level, Logger }
import java.lang.{ Iterable => jIterable }
import scala.annotation.meta.{ beanGetter, beanSetter, field, getter, setter }
import scala.concurrent.duration.MILLISECONDS
import scala.concurrent.Future._
import scala.collection.mutable
import scala.util.{ Failure, Success }
import org.feijoas.mango.common.annotations.Beta
import org.feijoas.mango.common.base.Ticker.asMangoTickerConverter
import org.feijoas.mango.common.cache.LoadingCache.asMangoLoadingCacheConverter
import org.feijoas.mango.common.util.concurrent.Futures.asScalaFutureConverter
import org.junit.Assert.{ assertSame, assertEquals, assertTrue }
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito.{ times, verify, when }
import org.scalatest._
import org.scalatest.mockito.MockitoSugar
import com.google.common.cache.CacheLoader.InvalidCacheLoadException
import com.google.common.cache.{ LoadingCache => GuavaLoadingCache }
import com.google.common.testing.FakeTicker
import com.google.common.util.concurrent.{ ExecutionError }
import scala.concurrent.Future
import scala.util.Failure
import org.scalatest.BeforeAndAfter
import com.google.common.util.concurrent.UncheckedExecutionException
import java.util.concurrent.atomic.AtomicInteger
import scala.ref.WeakReference
import java.util.concurrent.TimeUnit
import java.util.concurrent.CountDownLatch
import java.io.IOException
import java.util.concurrent.atomic.AtomicReferenceArray
/**
* Tests for [[LoadingCacheWrapperTest]]
*
* @author Markus Schneider
* @since 0.7
*/
class LoadingCacheWrapperTest extends FlatSpec
with CacheWrapperBehaviour
with Matchers
with MockitoSugar
with CacheStatsMatcher
with BeforeAndAfter {
before {
// disable Guava log messages
import java.util.logging._
val logger = Logger.getLogger("com.google.common.cache")
logger.setLevel(Level.OFF)
}
def wrappedCacheFixture = {
val wrapped = mock[GuavaLoadingCache[String, Int]]
val cache: LoadingCache[String, Int] = wrapped.asScala
(wrapped, cache)
}
"LoadingCacheWrapper" should behave like forwardingWrapper(wrappedCacheFixture)
behavior of "LoadingCacheWrapper"
it should "forward #get to the underlying LoadingCache" in {
val (wrapped, cache) = wrappedCacheFixture
val getExecutionException = new ExecutionException(new Throwable)
when(wrapped.get("foo")).thenReturn(3).thenThrow(getExecutionException)
cache.get("foo") should be(Success(3))
cache.get("foo") should be(Failure(getExecutionException))
verify(wrapped, times(2)).get("foo")
}
it should "forward #getUnchecked to the underlying LoadingCache" in {
val (wrapped, cache) = wrappedCacheFixture
when(wrapped.getUnchecked("foo")).thenReturn(4)
cache.getUnchecked("foo") should be(4)
verify(wrapped).getUnchecked("foo")
}
it should "forward #refresh to the underlying LoadingCache" in {
val (wrapped, cache) = wrappedCacheFixture
cache.refresh("foo")
verify(wrapped).refresh("foo")
}
it should "forward #getAll to the underlying LoadingCache" in {
val (wrapped, cache) = wrappedCacheFixture
val getAllExecutionException = new ExecutionException(new Throwable)
when(wrapped.getAll(isA(classOf[jIterable[String]])))
.thenReturn(jImmutableMap("a" -> 1, "b" -> 2))
.thenThrow(getAllExecutionException)
cache.getAll(List("a", "b")) should be(Success(Map("a" -> 1, "b" -> 2)))
cache.getAll(List("a", "b")) should be(Failure(getAllExecutionException))
verify(wrapped, times(2)).getAll(anyIterableWith("a", "b"))
}
it should "forward #apply(key) to the underlying LoadingCache" in {
val (wrapped, cache) = wrappedCacheFixture
when(wrapped.getUnchecked("foo")).thenReturn(4)
cache("foo") should be(4)
verify(wrapped).getUnchecked("foo")
}
it should "forward #apply(keys) to the underlying LoadingCache" in {
val (wrapped, cache) = wrappedCacheFixture
when(wrapped.getAll(isA(classOf[jIterable[String]])))
.thenReturn(jImmutableMap("a" -> 1, "b" -> 2))
cache(List("a", "b")) should be(Map("a" -> 1, "b" -> 2))
verify(wrapped).getAll(anyIterableWith("a", "b"))
}
it should "load values if not present" in {
val cache = CacheBuilder.newBuilder().recordStats().build((a: Any) => a)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key1 = new Object()
assertSame(key1, cache.get(key1).get)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
val key2 = new Object()
assertSame(key2, cache.getUnchecked(key2))
cache.stats should have(missCount(2), loadSuccessCount(2), loadExceptionCount(0), hitCount(0))
val key3 = new Object()
cache.refresh(key3)
cache.stats should have(missCount(2), loadSuccessCount(3), loadExceptionCount(0), hitCount(0))
assertSame(key3, cache.get(key3).get)
cache.stats should have(missCount(2), loadSuccessCount(3), loadExceptionCount(0), hitCount(1))
// callable is not called
assertSame(key3, cache.getOrElseUpdate(key3, () => throw new Exception()))
cache.stats should have(missCount(2), loadSuccessCount(3), loadExceptionCount(0), hitCount(2))
val key4 = new Object()
val value4 = new Object()
assertSame(value4, cache.getOrElseUpdate(key4, () => value4))
cache.stats should have(missCount(3), loadSuccessCount(4), loadExceptionCount(0), hitCount(2))
}
it should "be able to reload values" in {
val one = new Integer(1)
val two = new Integer(2)
val loader = new CacheLoader[Any, Any] {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = Future.successful(two)
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object()
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(2), loadExceptionCount(0), hitCount(0))
assertSame(two, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(2), loadExceptionCount(0), hitCount(1))
}
it should "be able to refresh values" in {
val one = new Object()
val two = new Object()
val ticker = new FakeTicker
val loader = new CacheLoader[Any, Any] {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = Future.successful(two)
}
val cache = CacheBuilder.newBuilder()
.recordStats()
.ticker(ticker.asScala)
.refreshAfterWrite(1, MILLISECONDS)
.build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object()
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(1))
ticker.advance(1, MILLISECONDS)
assertSame(two, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(2), loadExceptionCount(0), hitCount(2))
ticker.advance(1, MILLISECONDS)
assertSame(two, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(2), loadExceptionCount(0), hitCount(3))
}
it should "reload the value if refreshAfterWrite expires" in {
val one = new Object()
val two = new Object()
val ticker = new FakeTicker()
val loader = new CacheLoader[Any, Any] {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = Future.successful(two)
}
val cache = CacheBuilder.newBuilder()
.recordStats()
.ticker(ticker.asScala)
.refreshAfterWrite(1, MILLISECONDS)
.build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object()
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
ticker.advance(1, MILLISECONDS);
assertSame(one, cache.getIfPresent(key).get)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(1))
ticker.advance(1, MILLISECONDS);
assertSame(two, cache.getIfPresent(key).get)
cache.stats should have(missCount(1), loadSuccessCount(2), loadExceptionCount(0), hitCount(2))
ticker.advance(1, MILLISECONDS);
assertSame(two, cache.getIfPresent(key).get)
cache.stats should have(missCount(1), loadSuccessCount(2), loadExceptionCount(0), hitCount(3))
}
it should "be able to bulk load values" in {
val bulkLoader = new CacheLoader[Int, Int]() {
override def load(key: Int) = key
override def loadAll(keys: Traversable[Int]): Map[Int, Int] = {
keys.map { (key: Int) => (key, load(key)) }.toMap
}
}
val cache = CacheBuilder.newBuilder().recordStats().build(bulkLoader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.getAll(List()) should be(Success(Map()))
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.getAll(List(1)) should be(Success(Map(1 -> 1)))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.getAll(List(1, 2, 3, 4)) should be(Success(Map(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4)))
cache.stats should have(missCount(4), loadSuccessCount(2), loadExceptionCount(0), hitCount(1))
cache.getAll(List(2, 3)) should be(Success(Map(2 -> 2, 3 -> 3)))
cache.stats should have(missCount(4), loadSuccessCount(2), loadExceptionCount(0), hitCount(3))
cache.getAll(List(4, 5)) should be(Success(Map(4 -> 4, 5 -> 5)))
cache.stats should have(missCount(5), loadSuccessCount(3), loadExceptionCount(0), hitCount(4))
}
it should "ignore extra values returned from a bulk loader" in {
val extraValueBulkLoader = new CacheLoader[Any, Any]() {
override def load(key: Any) = new Object()
override def loadAll(keys: Traversable[Any]): Map[Any, Any] = {
val kvs = keys.map { case key => (key, new Object) }
kvs.toMap ++ // add extra entries
kvs.map { case (key, value) => (value, key) }.toMap
}
}
val cache = CacheBuilder.newBuilder().build(extraValueBulkLoader)
val lookupKeys = List(new Object, new Object, new Object)
val result = cache.getAll(lookupKeys).get
result.keySet should be(lookupKeys.toSet)
result.foreach {
case (key: Any, value: Any) =>
assertSame(value, result.get(key).get)
result.get(value) should be(None)
assertSame(value, cache.asMap().get(key).get)
assertSame(key, cache.asMap().get(value).get)
}
}
it should "not call CacheLoader#load if bulk loading is required" in {
val extraKey: Any = new Object
val extraValue: Any = new Object
val clobbingBulkLoader = new CacheLoader[Any, Any]() {
override def load(key: Any) = throw new AssertionError()
override def loadAll(keys: Traversable[Any]): Map[Any, Any] = {
val kvs = keys.map { case key => (key, new Object) }.toMap
kvs.toMap + ((extraKey, extraValue))
}
}
val cache = CacheBuilder.newBuilder().build(clobbingBulkLoader)
cache.asMap().put(extraKey, extraKey)
assertSame(extraKey, cache.asMap().get(extraKey).get)
val lookupKeys = List(new Object, new Object, new Object)
val result = cache.getAll(lookupKeys).get
result.keySet should be(lookupKeys.toSet)
result.foreach {
case (key: Any, value: Any) =>
assertSame(value, result.get(key).get)
assertSame(value, cache.asMap().get(key).get)
}
result.get(extraKey) should be(None)
assertSame(extraValue, cache.asMap().get(extraKey).get)
}
it should "#getAll should be Failure if not all keys are in the Map returned by Loader#loadAll" in {
val extraKey: Any = new Object
val extraValue: Any = new Object
val ignoringBulkLoader = new CacheLoader[Any, Any]() {
override def load(key: Any) = throw new AssertionError()
override def loadAll(keys: Traversable[Any]): Map[Any, Any] = {
// ignore request keys
Map(extraKey -> extraValue)
}
}
val cache = CacheBuilder.newBuilder().build(ignoringBulkLoader)
val lookupKeys = List(new Object, new Object, new Object)
cache.getAll(lookupKeys) match {
case Failure(e: InvalidCacheLoadException) => // expected
case _ => fail
}
assertSame(extraValue, cache.asMap().get(extraKey).get)
}
it should "fail if the loader throws an error" in {
val e = new Error()
val loader = (arg: Any) => throw e
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.get(new Object) match {
case Failure(expected: ExecutionError) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
try {
cache.getUnchecked(new Object)
fail()
} catch {
case expected: ExecutionError => assertSame(e, expected.getCause())
}
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(2), hitCount(0))
cache.refresh(new Object)
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(3), hitCount(0))
val callableError = new Error()
try {
cache.getOrElseUpdate(key, () => throw callableError)
fail()
} catch {
case expected: ExecutionError => assertSame(callableError, expected.getCause())
}
cache.stats should have(missCount(3), loadSuccessCount(0), loadExceptionCount(4), hitCount(0))
cache.getAll(List(new Object)) match {
case Failure(expected: ExecutionError) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(4), loadSuccessCount(0), loadExceptionCount(5), hitCount(0))
}
it should "handle execptions during reload" in {
val one = new Object
val e = new Error
val loader = new CacheLoader[Any, Any]() {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = throw e
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "handle execptions in the Future returned by reload" in {
val one = new Object
val e = new Error
val loader = new CacheLoader[Any, Any]() {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = Future.failed(e)
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "fail if the loader returns null" in {
val cache = CacheBuilder.newBuilder()
.recordStats()
.build((any: Any) => null)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.get(new Object) match {
case Failure(e: InvalidCacheLoadException) => // expected
case _ => fail()
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
intercept[InvalidCacheLoadException] {
cache.getUnchecked(new Object)
}
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(2), hitCount(0))
cache.refresh(new Object())
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(3), hitCount(0))
intercept[InvalidCacheLoadException] {
cache.getOrElseUpdate(new Object, () => null)
}
cache.stats should have(missCount(3), loadSuccessCount(0), loadExceptionCount(4), hitCount(0))
cache.getAll(List(new Object)) match {
case Failure(e: InvalidCacheLoadException) => // expected
case _ => fail()
}
cache.stats should have(missCount(4), loadSuccessCount(0), loadExceptionCount(5), hitCount(0))
}
it should "ignore the reloaded value if reload returns null" in {
val one = new Object
val loader = new CacheLoader[Any, Any]() {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = null
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "ignore the reloaded value if reload returns a Future with null" in {
val one = new Object
val loader = new CacheLoader[Any, Any]() {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = Future.successful(null)
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key);
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "ignore the reloaded value if reload returns a Future with null (implicity by refreshAfterWrite)" in {
val one = new Object
val ticker = new FakeTicker()
val loader = new CacheLoader[Any, Any]() {
override def load(key: Any) = one
override def reload(key: Any, oldValue: Any) = Future.successful(null)
}
val cache = CacheBuilder.newBuilder()
.recordStats()
.ticker(ticker.asScala)
.refreshAfterWrite(1, MILLISECONDS)
.build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(1))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
// refreshed
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(2))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(2), hitCount(3))
}
it should "fail if CacheLoader#loadAll has a null value in the returned Map" in {
val loader = new CacheLoader[Any, Any]() {
override def load(key: Any) = fail
override def loadAll(keys: Traversable[Any]) = keys.map { case key => (key, null) }.toMap
}
val cache = CacheBuilder.newBuilder()
.recordStats()
.build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.getAll(List(new Object)) match {
case Failure(e: InvalidCacheLoadException) => // expected
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
}
it should "fail if CacheLoader#loadAll returns null" in {
val loader = new CacheLoader[Any, Any]() {
override def load(key: Any) = fail
override def loadAll(keys: Traversable[Any]) = null
}
val cache = CacheBuilder.newBuilder()
.recordStats()
.build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.getAll(List(new Object)) match {
case Failure(e: InvalidCacheLoadException) => // expected
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
}
it should "fail if the loader throws an Error" in {
val error = new Error()
val cache = CacheBuilder.newBuilder().recordStats().build((any: Any) => throw error)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.get(new Object) match {
case Failure(expected: ExecutionError) => assertSame(error, expected.getCause)
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
try {
cache.getUnchecked(new Object())
fail
} catch {
case expected: ExecutionError => assertSame(error, expected.getCause)
case _: Throwable => fail
}
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(2), hitCount(0))
cache.refresh(new Object())
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(3), hitCount(0))
val callableError = new Error
try {
cache.getOrElseUpdate(new Object, () => throw callableError)
fail
} catch {
case expected: ExecutionError => assertSame(callableError, expected.getCause)
case _: Throwable => fail
}
cache.stats should have(missCount(3), loadSuccessCount(0), loadExceptionCount(4), hitCount(0))
cache.getAll(List(new Object)) match {
case Failure(expected: ExecutionError) => assertSame(error, expected.getCause)
case _ => fail
}
cache.stats should have(missCount(4), loadSuccessCount(0), loadExceptionCount(5), hitCount(0))
}
it should "only log the error thrown in #reload" in {
val one = new Object
val e = new Error
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = throw e
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "only log the error returned as a failed Future by #reload" in {
val one = new Object
val e = new Error
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = Future.failed(e)
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "not replace value if an execption occures during #reload" in {
val one = new Object
val e = new Error
val ticker = new FakeTicker()
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = Future.failed(e)
}
val cache = CacheBuilder.newBuilder()
.recordStats()
.ticker(ticker.asScala)
.refreshAfterWrite(1, MILLISECONDS)
.build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
ticker.advance(1, MILLISECONDS);
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(1))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
// refreshed
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(2))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(2), hitCount(3))
}
it should "return a failure from #getAll if loader#loadAll throws an error" in {
val e = new Error()
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = throw e
override def loadAll(keys: Traversable[Any]) = throw e
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
cache.getAll(List(new Object)) match {
case Failure(expected: ExecutionError) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
}
it should "throw an UncheckedExecutionException in #getUncheched if loader throws an Exception" in {
val e = new Exception
val loader = (any: Any) => throw e
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.get(new Object) match {
case Failure(expected: ExecutionException) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
try {
cache.getUnchecked(new Object())
fail()
} catch {
case expected: UncheckedExecutionException => assertSame(e, expected.getCause())
case _: Throwable => fail
}
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(2), hitCount(0))
cache.refresh(new Object)
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(3), hitCount(0))
val callableException = new Exception()
try {
cache.getOrElseUpdate(new Object, () => throw callableException)
fail()
} catch {
case expected: ExecutionException => assertSame(callableException, expected.getCause())
}
cache.stats should have(missCount(3), loadSuccessCount(0), loadExceptionCount(4), hitCount(0))
cache.getAll(List(new Object)) match {
case Failure(expected: ExecutionException) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(4), loadSuccessCount(0), loadExceptionCount(5), hitCount(0))
}
it should "not replace the value in the cache if Loader#reload throws an Exception" in {
val one = new Object()
val e = new Exception()
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = throw e
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "not replace the value in the cache if Loader#reload returns a failed Future" in {
val one = new Object
val e = new Exception
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = Future.failed(e)
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "not replace the value in the cache if Loader#reload returns a failed Future on refreshAfterWrite" in {
val one = new Object
val e = new Exception
val ticker = new FakeTicker
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = Future.failed(e)
}
val cache = CacheBuilder.newBuilder()
.recordStats().ticker(ticker.asScala)
.refreshAfterWrite(1, MILLISECONDS).build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(1))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
// refreshed
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(2))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(2), hitCount(3))
}
it should "return Failure on bulk-load if loadAll throws a checked exception" in {
val e = new Exception
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = throw e
override def loadAll(keys: Traversable[Any]) = throw e
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.getAll(List(new Object)) match {
case Failure(expected: ExecutionException) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
}
it should "return Failure if #load throws an unchecked exception" in {
val e = new RuntimeException()
val cache = CacheBuilder.newBuilder().recordStats().build((any: Any) => throw e)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
cache.get(new Object) match {
case Failure(expected: UncheckedExecutionException) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(1), loadSuccessCount(0), loadExceptionCount(1), hitCount(0))
try {
cache.getUnchecked(new Object)
fail()
} catch {
case expected: UncheckedExecutionException => assertSame(e, expected.getCause)
}
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(2), hitCount(0))
cache.refresh(new Object)
cache.stats should have(missCount(2), loadSuccessCount(0), loadExceptionCount(3), hitCount(0))
val callableException = new RuntimeException();
try {
cache.getOrElseUpdate(new Object, () => throw callableException)
fail()
} catch {
case expected: UncheckedExecutionException => assertSame(callableException, expected.getCause)
}
cache.stats should have(missCount(3), loadSuccessCount(0), loadExceptionCount(4), hitCount(0))
cache.getAll(List(new Object)) match {
case Failure(expected: UncheckedExecutionException) => assertSame(e, expected.getCause())
case _ => fail
}
cache.stats should have(missCount(4), loadSuccessCount(0), loadExceptionCount(5), hitCount(0))
}
it should "not fail if reload throws an unchecked exception" in {
val one = new Object
val e = new RuntimeException
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = throw e
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "not fail on refresh if reload returns a Future with an unchecked exception" in {
val one = new Object
val e = new RuntimeException
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = Future.failed(e)
}
val cache = CacheBuilder.newBuilder().recordStats().build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
cache.refresh(key)
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(0))
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(1))
}
it should "not fail on reload if reload returns a Future with an unchecked exception" in {
val one = new Object
val e = new RuntimeException
val loader = new CacheLoader[Any, Any]() {
override def load(any: Any) = one
override def reload(key: Any, oldValue: Any) = Future.failed(e)
}
val ticker = new FakeTicker
val cache = CacheBuilder.newBuilder()
.recordStats().ticker(ticker.asScala)
.refreshAfterWrite(1, MILLISECONDS).build(loader)
cache.stats should be(CacheStats(0, 0, 0, 0, 0, 0))
val key = new Object
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(0))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(0), hitCount(1))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
// refreshed
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(1), hitCount(2))
ticker.advance(1, MILLISECONDS)
assertSame(one, cache.getUnchecked(key))
cache.stats should have(missCount(1), loadSuccessCount(1), loadExceptionCount(2), hitCount(3))
}
it should "not notify removal listener if reload fails" in {
val count = new AtomicInteger
val e = new IllegalStateException("exception to trigger failure on first load()")
val failOnceFunction = new CacheLoader[Int, String]() {
override def load(key: Int): String = {
if (count.getAndIncrement() == 0) {
throw e
}
return key.toString
}
}
val removalListener = new CountingRemovalListener[Int, String]
val cache = CacheBuilder.newBuilder().removalListener(removalListener).build(failOnceFunction)
try {
cache.getUnchecked(1)
fail()
} catch {
case ue: UncheckedExecutionException => assertSame(e, ue.getCause())
}
cache.getUnchecked(1) should be("1")
removalListener.getCount should be(0)
count.set(0)
cache.refresh(2)
cache.getUnchecked(2) should be("2")
removalListener.getCount should be(0)
}
it should "reaload after value reclamation" in {
val countingLoader = new CountingLoader
val cache = CacheBuilder.newBuilder()
.weakValues().build(countingLoader)
val map = cache.asMap()
val iterations = 10
var ref = new WeakReference[AnyRef](null)
var expectedComputations = 0
for (i <- 0 until iterations) {
// The entry should get garbage collected and recomputed.
var oldValue = ref.get
if (oldValue == None) {
expectedComputations = expectedComputations + 1
}
ref = new WeakReference[AnyRef](cache.getUnchecked(1).asInstanceOf[AnyRef])
oldValue = None
Thread.sleep(i)
System.gc()
}
assertEquals(expectedComputations, countingLoader.getCount())
for (i <- 0 until iterations) {
// The entry should get garbage collected and recomputed.
var oldValue = ref.get
if (oldValue == None) {
expectedComputations = expectedComputations + 1
}
cache.refresh(1)
ref = new WeakReference[AnyRef](map.get(1).get.asInstanceOf[AnyRef])
oldValue = None
Thread.sleep(i)
System.gc()
}
assertEquals(expectedComputations, countingLoader.getCount())
}
it should "be able to load concurrently" in {
testConcurrentLoading(CacheBuilder.newBuilder())
}
it should "be able to load concurrently with expiration" in {
testConcurrentLoading(CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.SECONDS))
}
private def testConcurrentLoading(builder: CacheBuilder[Any, Any]) = {
testConcurrentLoadingDefault(builder)
testConcurrentLoadingNull(builder)
testConcurrentLoadingUncheckedException(builder)
testConcurrentLoadingCheckedException(builder)
}
/**
* On a successful concurrent computation, only one thread does the work,
* but all the threads get the same result.
*/
private def testConcurrentLoadingDefault(builder: CacheBuilder[Any, Any]) = {
val count = 10
val callCount = new AtomicInteger
val startSignal = new CountDownLatch(count + 1)
val result = new Object
val cache = builder.build(new CacheLoader[String, AnyRef]() {
override def load(key: String): AnyRef = {
callCount.incrementAndGet()
startSignal.await()
result
}
})
val resultArray = doConcurrentGet(cache, "bar", count, startSignal)
callCount.get() should be(1)
for (i <- 0 until count) {
assertSame("result(" + i + ") didn't match expected", result, resultArray(i))
}
}
/**
* On a concurrent computation that returns null, all threads should get an
* InvalidCacheLoadException, with the loader only called once. The result
* should not be cached (a later request should call the loader again).
*/
private def testConcurrentLoadingNull(builder: CacheBuilder[Any, Any]) = {
val count = 10
val callCount = new AtomicInteger
val startSignal = new CountDownLatch(count + 1)
val cache = builder.build(new CacheLoader[String, AnyRef]() {
override def load(key: String): AnyRef = {
callCount.incrementAndGet()
startSignal.await()
null
}
})
val result = doConcurrentGet(cache, "bar", count, startSignal)
callCount.get() should be(1)
for (i <- 0 until count) {
assertTrue(result(i).isInstanceOf[InvalidCacheLoadException])
}
// subsequent calls should call the loader again, not get the old
// exception
try {
cache.getUnchecked("bar")
fail()
} catch {
case expected: InvalidCacheLoadException =>
}
callCount.get() should be(2)
}
/**
* On a concurrent computation that throws an unchecked exception, all
* threads should get the (wrapped) exception, with the loader called only
* once. The result should not be cached (a later request should call the
* loader again).
*/
def testConcurrentLoadingUncheckedException(builder: CacheBuilder[Any, Any]) = {
val count = 10
val callCount = new AtomicInteger
val startSignal = new CountDownLatch(count + 1)
val e = new RuntimeException()
val cache = builder.build(new CacheLoader[String, AnyRef]() {
override def load(key: String): AnyRef = {
callCount.incrementAndGet()
startSignal.await()
throw e
}
})
val result = doConcurrentGet(cache, "bar", count, startSignal)
callCount.get() should be(1)
for (i <- 0 until count) {
// doConcurrentGet alternates between calling getUnchecked and
// calling get, but an unchecked
// exception thrown by the loader is always wrapped as an
// UncheckedExecutionException.
result(i) match {
case _: UncheckedExecutionException => // expected
case _ => fail
}
assertSame(e, result(i).asInstanceOf[UncheckedExecutionException].getCause)
}
// subsequent calls should call the loader again, not get the old
// exception
try {
cache.getUnchecked("bar")
fail()
} catch {
case expected: UncheckedExecutionException => // expected
case _: Throwable => fail
}
callCount.get() should be(2)
}
/**
* On a concurrent computation that throws a checked exception, all threads
* should get the (wrapped) exception, with the loader called only once. The
* result should not be cached (a later request should call the loader
* again).
*/
private def testConcurrentLoadingCheckedException(builder: CacheBuilder[Any, Any]) = {
val count = 10
val callCount = new AtomicInteger
val startSignal = new CountDownLatch(count + 1)
val e = new IOException
val cache = builder.build(new CacheLoader[String, AnyRef]() {
override def load(key: String): AnyRef = {
callCount.incrementAndGet()
startSignal.await()
throw e
}
})
val result = doConcurrentGet(cache, "bar", count, startSignal)
callCount.get() should be(1)
for (i <- 0 until count) {
// doConcurrentGet alternates between calling getUnchecked and
// calling get. If we call get(),
// we should get an ExecutionException; if we call getUnchecked(),
// we should get an
// UncheckedExecutionException.
val mod = i % 3
if (mod == 0 || mod == 2) {
result(i) match {
case _: ExecutionException => // expected
case _ => fail
}
assertSame(e, result(i).asInstanceOf[ExecutionException].getCause)
} else {
result(i) match {
case _: UncheckedExecutionException => // expected
case _ => fail
}
assertSame(e, result(i).asInstanceOf[UncheckedExecutionException].getCause)
}
}
// subsequent calls should call the loader again, not get the old
// exception
try {
cache.getUnchecked("bar")
fail
} catch {
case expected: UncheckedExecutionException =>
case _: Throwable => fail
}
callCount.get() should be(2)
}
/**
* Test-helper method that performs {@code nThreads} concurrent calls to
* {@code cache.get(key)} or {@code cache.getUnchecked(key)}, and returns a
* List containing each of the results. The result for any given call to
* {@code cache.get} or {@code cache.getUnchecked} is the value returned, or
* the exception thrown.
*
* <p>
* As we iterate from {@code 0} to {@code nThreads}, threads with an even
* index will call {@code getUnchecked}, and threads with an odd index will
* call {@code get}. If the cache throws exceptions, this difference may be
* visible in the returned List.
*/
private def doConcurrentGet[K](cache: LoadingCache[K, AnyRef], key: K, nThreads: Int, gettersStartedSignal: CountDownLatch): List[AnyRef] = {
val result = new AtomicReferenceArray[AnyRef](nThreads)
val gettersComplete = new CountDownLatch(nThreads)
for (i <- 0 until nThreads) {
val index = i
val thread = new Thread(new Runnable() {
override def run() = {
gettersStartedSignal.countDown()
var value: AnyRef = null
try {
val mod = index % 3
if (mod == 0) {
value = cache.get(key).get
} else if (mod == 1) {
value = cache.getUnchecked(key)
} else {
cache.refresh(key)
value = cache.get(key).get
}
result.set(index, value);
} catch {
case t: Throwable => result.set(index, t)
}
gettersComplete.countDown()
}
})
thread.start()
// we want to wait until each thread is WAITING - one thread waiting
// inside CacheLoader.load
// (in startSignal.await()), and the others waiting for that
// thread's result.
while (thread.isAlive() && thread.getState() != Thread.State.WAITING) {
Thread.`yield`
}
}
gettersStartedSignal.countDown()
gettersComplete.await()
var resultList = mutable.MutableList[AnyRef]()
for (i <- (0 until nThreads)) {
resultList = resultList ++ mutable.MutableList(result.get(i))
}
return List.empty ++ resultList
}
it should "be possible to view #asMap during loading" in {
val getStartedSignal = new CountDownLatch(2)
val letGetFinishSignal = new CountDownLatch(1)
val getFinishedSignal = new CountDownLatch(2)
val getKey = "get"
val refreshKey = "refresh"
val suffix = "Suffix"
val computeFunction = new CacheLoader[String, String]() {
override def load(key: String): String = {
getStartedSignal.countDown()
letGetFinishSignal.await()
key + suffix
}
}
val cache = CacheBuilder.newBuilder().build(computeFunction)
val map = cache.asMap()
map.put(refreshKey, refreshKey)
map.size should be(1)
map.keySet.contains(getKey) should be(false)
assertSame(refreshKey, map.get(refreshKey).get)
new Thread() {
override def run {
cache.getUnchecked(getKey)
getFinishedSignal.countDown
}
}.start
new Thread() {
override def run {
cache.refresh(refreshKey)
getFinishedSignal.countDown
}
}.start
getStartedSignal.await
// computation is in progress; asMap shouldn't have changed
map.size should be(1)
map.keySet.contains(getKey) should be(false)
assertSame(refreshKey, map.get(refreshKey).get)
// let computation complete
letGetFinishSignal.countDown
getFinishedSignal.await
// asMap view should have been updated
cache.size should be(2)
map.get(getKey) should be(Some(getKey + suffix))
map.get(refreshKey) should be(Some(refreshKey + suffix))
}
it should "be able to invalidate during loading" in {
// computation starts; invalidate() is called on the key being computed,
// computation finishes
val computationStarted = new CountDownLatch(2)
val letGetFinishSignal = new CountDownLatch(1)
val getFinishedSignal = new CountDownLatch(2)
val getKey = "get"
val refreshKey = "refresh"
val suffix = "Suffix"
val computeFunction = (key: String) => {
computationStarted.countDown()
letGetFinishSignal.await()
key + suffix
}
val cache = CacheBuilder.newBuilder().build(computeFunction)
val map = cache.asMap()
map.put(refreshKey, refreshKey)
new Thread() {
override def run() = {
cache.getUnchecked(getKey)
getFinishedSignal.countDown()
}
}.start
new Thread() {
override def run() = {
cache.refresh(refreshKey)
getFinishedSignal.countDown()
}
}.start
computationStarted.await()
cache.invalidate(getKey)
cache.invalidate(refreshKey)
map.keySet.contains(getKey) should be(false)
map.keySet.contains(refreshKey) should be(false)
// let computation complete
letGetFinishSignal.countDown()
getFinishedSignal.await()
// results should be visible
cache.size should be(2)
map.get(getKey).get should be(getKey + suffix)
map.get(refreshKey).get should be(refreshKey + suffix)
cache.size should be(2)
}
it should "be able to invalidate and reload during loading" in {
// computation starts; clear() is called, computation finishes
val computationStarted = new CountDownLatch(2)
val letGetFinishSignal = new CountDownLatch(1)
val getFinishedSignal = new CountDownLatch(4)
val getKey = "get"
val refreshKey = "refresh"
val suffix = "Suffix"
val computeFunction = (key: String) => {
computationStarted.countDown()
letGetFinishSignal.await()
key + suffix
}
val cache = CacheBuilder.newBuilder().build(computeFunction)
val map = cache.asMap()
map.put(refreshKey, refreshKey)
new Thread() {
override def run() = {
cache.getUnchecked(getKey)
getFinishedSignal.countDown()
}
}.start
new Thread() {
override def run() = {
cache.refresh(refreshKey)
getFinishedSignal.countDown()
}
}.start
computationStarted.await()
cache.invalidate(getKey)
cache.invalidate(refreshKey)
map.keySet.contains(getKey) should be(false)
map.keySet.contains(refreshKey) should be(false)
// start new computations
new Thread() {
override def run() = {
cache.getUnchecked(getKey)
getFinishedSignal.countDown()
}
}.start
new Thread() {
override def run() = {
cache.refresh(refreshKey)
getFinishedSignal.countDown()
}
}.start
// let computation complete
letGetFinishSignal.countDown()
getFinishedSignal.await()
// results should be visible
cache.size should be(2)
map.get(getKey).get should be(getKey + suffix)
map.get(refreshKey).get should be(refreshKey + suffix)
}
}
| feijoas/mango | src/test/scala/org/feijoas/mango/common/cache/LoadingCacheWrapperTest.scala | Scala | apache-2.0 | 52,504 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import scala.language.implicitConversions
import org.apache.spark.annotation.Experimental
import org.apache.spark.Logging
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.types._
private[sql] object Column {
def apply(colName: String): Column = new Column(colName)
def apply(expr: Expression): Column = new Column(expr)
def unapply(col: Column): Option[Expression] = Some(col.expr)
}
/**
* :: Experimental ::
* A column in a [[DataFrame]].
*
* @groupname java_expr_ops Java-specific expression operators
* @groupname expr_ops Expression operators
* @groupname df_ops DataFrame functions
* @groupname Ungrouped Support functions for DataFrames
*
* @since 1.3.0
*/
@Experimental
class Column(protected[sql] val expr: Expression) extends Logging {
def this(name: String) = this(name match {
case "*" => UnresolvedStar(None)
case _ if name.endsWith(".*") => UnresolvedStar(Some(name.substring(0, name.length - 2)))
case _ => UnresolvedAttribute.quotedString(name)
})
/** Creates a column based on the given expression. */
implicit private def exprToColumn(newExpr: Expression): Column = new Column(newExpr)
override def toString: String = expr.prettyString
override def equals(that: Any): Boolean = that match {
case that: Column => that.expr.equals(this.expr)
case _ => false
}
override def hashCode: Int = this.expr.hashCode
/**
* Extracts a value or values from a complex type.
* The following types of extraction are supported:
* - Given an Array, an integer ordinal can be used to retrieve a single value.
* - Given a Map, a key of the correct type can be used to retrieve an individual value.
* - Given a Struct, a string fieldName can be used to extract that field.
* - Given an Array of Structs, a string fieldName can be used to extract filed
* of every struct in that array, and return an Array of fields
*
* @group expr_ops
* @since 1.4.0
*/
def apply(extraction: Any): Column = UnresolvedExtractValue(expr, lit(extraction).expr)
/**
* Unary minus, i.e. negate the expression.
* {{{
* // Scala: select the amount column and negates all values.
* df.select( -df("amount") )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.select( negate(col("amount") );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def unary_- : Column = UnaryMinus(expr)
/**
* Inversion of boolean expression, i.e. NOT.
* {{{
* // Scala: select rows that are not active (isActive === false)
* df.filter( !df("isActive") )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( not(df.col("isActive")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def unary_! : Column = Not(expr)
/**
* Equality test.
* {{{
* // Scala:
* df.filter( df("colA") === df("colB") )
*
* // Java
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").equalTo(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def === (other: Any): Column = {
val right = lit(other).expr
if (this.expr == right) {
logWarning(
s"Constructing trivially true equals predicate, '${this.expr} = $right'. " +
"Perhaps you need to use aliases.")
}
EqualTo(expr, right)
}
/**
* Equality test.
* {{{
* // Scala:
* df.filter( df("colA") === df("colB") )
*
* // Java
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").equalTo(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def equalTo(other: Any): Column = this === other
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") !== df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def !== (other: Any): Column = Not(EqualTo(expr, lit(other).expr))
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") !== df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def notEqual(other: Any): Column = Not(EqualTo(expr, lit(other).expr))
/**
* Greater than.
* {{{
* // Scala: The following selects people older than 21.
* people.select( people("age") > 21 )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* people.select( people("age").gt(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def > (other: Any): Column = GreaterThan(expr, lit(other).expr)
/**
* Greater than.
* {{{
* // Scala: The following selects people older than 21.
* people.select( people("age") > lit(21) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* people.select( people("age").gt(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def gt(other: Any): Column = this > other
/**
* Less than.
* {{{
* // Scala: The following selects people younger than 21.
* people.select( people("age") < 21 )
*
* // Java:
* people.select( people("age").lt(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def < (other: Any): Column = LessThan(expr, lit(other).expr)
/**
* Less than.
* {{{
* // Scala: The following selects people younger than 21.
* people.select( people("age") < 21 )
*
* // Java:
* people.select( people("age").lt(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def lt(other: Any): Column = this < other
/**
* Less than or equal to.
* {{{
* // Scala: The following selects people age 21 or younger than 21.
* people.select( people("age") <= 21 )
*
* // Java:
* people.select( people("age").leq(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def <= (other: Any): Column = LessThanOrEqual(expr, lit(other).expr)
/**
* Less than or equal to.
* {{{
* // Scala: The following selects people age 21 or younger than 21.
* people.select( people("age") <= 21 )
*
* // Java:
* people.select( people("age").leq(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def leq(other: Any): Column = this <= other
/**
* Greater than or equal to an expression.
* {{{
* // Scala: The following selects people age 21 or older than 21.
* people.select( people("age") >= 21 )
*
* // Java:
* people.select( people("age").geq(21) )
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def >= (other: Any): Column = GreaterThanOrEqual(expr, lit(other).expr)
/**
* Greater than or equal to an expression.
* {{{
* // Scala: The following selects people age 21 or older than 21.
* people.select( people("age") >= 21 )
*
* // Java:
* people.select( people("age").geq(21) )
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def geq(other: Any): Column = this >= other
/**
* Equality test that is safe for null values.
*
* @group expr_ops
* @since 1.3.0
*/
def <=> (other: Any): Column = EqualNullSafe(expr, lit(other).expr)
/**
* Equality test that is safe for null values.
*
* @group java_expr_ops
* @since 1.3.0
*/
def eqNullSafe(other: Any): Column = this <=> other
/**
* Evaluates a list of conditions and returns one of multiple possible result expressions.
* If otherwise is not defined at the end, null is returned for unmatched conditions.
*
* {{{
* // Example: encoding gender string column into integer.
*
* // Scala:
* people.select(when(people("gender") === "male", 0)
* .when(people("gender") === "female", 1)
* .otherwise(2))
*
* // Java:
* people.select(when(col("gender").equalTo("male"), 0)
* .when(col("gender").equalTo("female"), 1)
* .otherwise(2))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def when(condition: Column, value: Any): Column = this.expr match {
case CaseWhen(branches: Seq[Expression]) =>
CaseWhen(branches ++ Seq(lit(condition).expr, lit(value).expr))
case _ =>
throw new IllegalArgumentException(
"when() can only be applied on a Column previously generated by when() function")
}
/**
* Evaluates a list of conditions and returns one of multiple possible result expressions.
* If otherwise is not defined at the end, null is returned for unmatched conditions.
*
* {{{
* // Example: encoding gender string column into integer.
*
* // Scala:
* people.select(when(people("gender") === "male", 0)
* .when(people("gender") === "female", 1)
* .otherwise(2))
*
* // Java:
* people.select(when(col("gender").equalTo("male"), 0)
* .when(col("gender").equalTo("female"), 1)
* .otherwise(2))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def otherwise(value: Any): Column = this.expr match {
case CaseWhen(branches: Seq[Expression]) =>
if (branches.size % 2 == 0) {
CaseWhen(branches :+ lit(value).expr)
} else {
throw new IllegalArgumentException(
"otherwise() can only be applied once on a Column previously generated by when()")
}
case _ =>
throw new IllegalArgumentException(
"otherwise() can only be applied on a Column previously generated by when()")
}
/**
* True if the current column is between the lower bound and upper bound, inclusive.
*
* @group java_expr_ops
* @since 1.4.0
*/
def between(lowerBound: Any, upperBound: Any): Column = {
(this >= lowerBound) && (this <= upperBound)
}
/**
* True if the current expression is NaN.
*
* @group expr_ops
* @since 1.5.0
*/
def isNaN: Column = IsNaN(expr)
/**
* True if the current expression is null.
*
* @group expr_ops
* @since 1.3.0
*/
def isNull: Column = IsNull(expr)
/**
* True if the current expression is NOT null.
*
* @group expr_ops
* @since 1.3.0
*/
def isNotNull: Column = IsNotNull(expr)
/**
* Boolean OR.
* {{{
* // Scala: The following selects people that are in school or employed.
* people.filter( people("inSchool") || people("isEmployed") )
*
* // Java:
* people.filter( people("inSchool").or(people("isEmployed")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def || (other: Any): Column = Or(expr, lit(other).expr)
/**
* Boolean OR.
* {{{
* // Scala: The following selects people that are in school or employed.
* people.filter( people("inSchool") || people("isEmployed") )
*
* // Java:
* people.filter( people("inSchool").or(people("isEmployed")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def or(other: Column): Column = this || other
/**
* Boolean AND.
* {{{
* // Scala: The following selects people that are in school and employed at the same time.
* people.select( people("inSchool") && people("isEmployed") )
*
* // Java:
* people.select( people("inSchool").and(people("isEmployed")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def && (other: Any): Column = And(expr, lit(other).expr)
/**
* Boolean AND.
* {{{
* // Scala: The following selects people that are in school and employed at the same time.
* people.select( people("inSchool") && people("isEmployed") )
*
* // Java:
* people.select( people("inSchool").and(people("isEmployed")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def and(other: Column): Column = this && other
/**
* Sum of this expression and another expression.
* {{{
* // Scala: The following selects the sum of a person's height and weight.
* people.select( people("height") + people("weight") )
*
* // Java:
* people.select( people("height").plus(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def + (other: Any): Column = Add(expr, lit(other).expr)
/**
* Sum of this expression and another expression.
* {{{
* // Scala: The following selects the sum of a person's height and weight.
* people.select( people("height") + people("weight") )
*
* // Java:
* people.select( people("height").plus(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def plus(other: Any): Column = this + other
/**
* Subtraction. Subtract the other expression from this expression.
* {{{
* // Scala: The following selects the difference between people's height and their weight.
* people.select( people("height") - people("weight") )
*
* // Java:
* people.select( people("height").minus(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def - (other: Any): Column = Subtract(expr, lit(other).expr)
/**
* Subtraction. Subtract the other expression from this expression.
* {{{
* // Scala: The following selects the difference between people's height and their weight.
* people.select( people("height") - people("weight") )
*
* // Java:
* people.select( people("height").minus(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def minus(other: Any): Column = this - other
/**
* Multiplication of this expression and another expression.
* {{{
* // Scala: The following multiplies a person's height by their weight.
* people.select( people("height") * people("weight") )
*
* // Java:
* people.select( people("height").multiply(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def * (other: Any): Column = Multiply(expr, lit(other).expr)
/**
* Multiplication of this expression and another expression.
* {{{
* // Scala: The following multiplies a person's height by their weight.
* people.select( people("height") * people("weight") )
*
* // Java:
* people.select( people("height").multiply(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def multiply(other: Any): Column = this * other
/**
* Division this expression by another expression.
* {{{
* // Scala: The following divides a person's height by their weight.
* people.select( people("height") / people("weight") )
*
* // Java:
* people.select( people("height").divide(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def / (other: Any): Column = Divide(expr, lit(other).expr)
/**
* Division this expression by another expression.
* {{{
* // Scala: The following divides a person's height by their weight.
* people.select( people("height") / people("weight") )
*
* // Java:
* people.select( people("height").divide(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def divide(other: Any): Column = this / other
/**
* Modulo (a.k.a. remainder) expression.
*
* @group expr_ops
* @since 1.3.0
*/
def % (other: Any): Column = Remainder(expr, lit(other).expr)
/**
* Modulo (a.k.a. remainder) expression.
*
* @group java_expr_ops
* @since 1.3.0
*/
def mod(other: Any): Column = this % other
/**
* A boolean expression that is evaluated to true if the value of this expression is contained
* by the evaluated values of the arguments.
*
* @group expr_ops
* @since 1.3.0
*/
@deprecated("use isin", "1.5.0")
@scala.annotation.varargs
def in(list: Any*): Column = isin(list : _*)
/**
* A boolean expression that is evaluated to true if the value of this expression is contained
* by the evaluated values of the arguments.
*
* @group expr_ops
* @since 1.5.0
*/
@scala.annotation.varargs
def isin(list: Any*): Column = In(expr, list.map(lit(_).expr))
/**
* SQL like expression.
*
* @group expr_ops
* @since 1.3.0
*/
def like(literal: String): Column = Like(expr, lit(literal).expr)
/**
* SQL RLIKE expression (LIKE with Regex).
*
* @group expr_ops
* @since 1.3.0
*/
def rlike(literal: String): Column = RLike(expr, lit(literal).expr)
/**
* An expression that gets an item at position `ordinal` out of an array,
* or gets a value by key `key` in a [[MapType]].
*
* @group expr_ops
* @since 1.3.0
*/
def getItem(key: Any): Column = UnresolvedExtractValue(expr, Literal(key))
/**
* An expression that gets a field by name in a [[StructType]].
*
* @group expr_ops
* @since 1.3.0
*/
def getField(fieldName: String): Column = UnresolvedExtractValue(expr, Literal(fieldName))
/**
* An expression that returns a substring.
* @param startPos expression for the starting position.
* @param len expression for the length of the substring.
*
* @group expr_ops
* @since 1.3.0
*/
def substr(startPos: Column, len: Column): Column = Substring(expr, startPos.expr, len.expr)
/**
* An expression that returns a substring.
* @param startPos starting position.
* @param len length of the substring.
*
* @group expr_ops
* @since 1.3.0
*/
def substr(startPos: Int, len: Int): Column = Substring(expr, lit(startPos).expr, lit(len).expr)
/**
* Contains the other element.
*
* @group expr_ops
* @since 1.3.0
*/
def contains(other: Any): Column = Contains(expr, lit(other).expr)
/**
* String starts with.
*
* @group expr_ops
* @since 1.3.0
*/
def startsWith(other: Column): Column = StartsWith(expr, lit(other).expr)
/**
* String starts with another string literal.
*
* @group expr_ops
* @since 1.3.0
*/
def startsWith(literal: String): Column = this.startsWith(lit(literal))
/**
* String ends with.
*
* @group expr_ops
* @since 1.3.0
*/
def endsWith(other: Column): Column = EndsWith(expr, lit(other).expr)
/**
* String ends with another string literal.
*
* @group expr_ops
* @since 1.3.0
*/
def endsWith(literal: String): Column = this.endsWith(lit(literal))
/**
* Gives the column an alias. Same as `as`.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".alias("colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def alias(alias: String): Column = as(alias)
/**
* Gives the column an alias.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".as("colB"))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use `as` with explicitly empty metadata.
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: String): Column = expr match {
case ne: NamedExpression => Alias(expr, alias)(explicitMetadata = Some(ne.metadata))
case other => Alias(other, alias)()
}
/**
* (Scala-specific) Assigns the given aliases to the results of a table generating function.
* {{{
* // Renames colA to colB in select output.
* df.select(explode($"myMap").as("key" :: "value" :: Nil))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def as(aliases: Seq[String]): Column = MultiAlias(expr, aliases)
/**
* Assigns the given aliases to the results of a table generating function.
* {{{
* // Renames colA to colB in select output.
* df.select(explode($"myMap").as("key" :: "value" :: Nil))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def as(aliases: Array[String]): Column = MultiAlias(expr, aliases)
/**
* Gives the column an alias.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".as('colB))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use `as` with explicitly empty metadata.
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: Symbol): Column = expr match {
case ne: NamedExpression => Alias(expr, alias.name)(explicitMetadata = Some(ne.metadata))
case other => Alias(other, alias.name)()
}
/**
* Gives the column an alias with metadata.
* {{{
* val metadata: Metadata = ...
* df.select($"colA".as("colB", metadata))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: String, metadata: Metadata): Column = {
Alias(expr, alias)(explicitMetadata = Some(metadata))
}
/**
* Casts the column to a different data type.
* {{{
* // Casts colA to IntegerType.
* import org.apache.spark.sql.types.IntegerType
* df.select(df("colA").cast(IntegerType))
*
* // equivalent to
* df.select(df("colA").cast("int"))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def cast(to: DataType): Column = expr match {
// keeps the name of expression if possible when do cast.
case ne: NamedExpression => UnresolvedAlias(Cast(expr, to))
case _ => Cast(expr, to)
}
/**
* Casts the column to a different data type, using the canonical string representation
* of the type. The supported types are: `string`, `boolean`, `byte`, `short`, `int`, `long`,
* `float`, `double`, `decimal`, `date`, `timestamp`.
* {{{
* // Casts colA to integer.
* df.select(df("colA").cast("int"))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def cast(to: String): Column = cast(DataTypeParser.parse(to))
/**
* Returns an ordering used in sorting.
* {{{
* // Scala: sort a DataFrame by age column in descending order.
* df.sort(df("age").desc)
*
* // Java
* df.sort(df.col("age").desc());
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def desc: Column = SortOrder(expr, Descending)
/**
* Returns an ordering used in sorting.
* {{{
* // Scala: sort a DataFrame by age column in ascending order.
* df.sort(df("age").asc)
*
* // Java
* df.sort(df.col("age").asc());
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def asc: Column = SortOrder(expr, Ascending)
/**
* Prints the expression to the console for debugging purpose.
*
* @group df_ops
* @since 1.3.0
*/
def explain(extended: Boolean): Unit = {
// scalastyle:off println
if (extended) {
println(expr)
} else {
println(expr.prettyString)
}
// scalastyle:on println
}
/**
* Compute bitwise OR of this expression with another expression.
* {{{
* df.select($"colA".bitwiseOR($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseOR(other: Any): Column = BitwiseOr(expr, lit(other).expr)
/**
* Compute bitwise AND of this expression with another expression.
* {{{
* df.select($"colA".bitwiseAND($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseAND(other: Any): Column = BitwiseAnd(expr, lit(other).expr)
/**
* Compute bitwise XOR of this expression with another expression.
* {{{
* df.select($"colA".bitwiseXOR($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseXOR(other: Any): Column = BitwiseXor(expr, lit(other).expr)
/**
* Define a windowing column.
*
* {{{
* val w = Window.partitionBy("name").orderBy("id")
* df.select(
* sum("price").over(w.rangeBetween(Long.MinValue, 2)),
* avg("price").over(w.rowsBetween(0, 4))
* )
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def over(window: expressions.WindowSpec): Column = window.withAggregate(this)
}
/**
* :: Experimental ::
* A convenient class used for constructing schema.
*
* @since 1.3.0
*/
@Experimental
class ColumnName(name: String) extends Column(name) {
/**
* Creates a new [[StructField]] of type boolean.
* @since 1.3.0
*/
def boolean: StructField = StructField(name, BooleanType)
/**
* Creates a new [[StructField]] of type byte.
* @since 1.3.0
*/
def byte: StructField = StructField(name, ByteType)
/**
* Creates a new [[StructField]] of type short.
* @since 1.3.0
*/
def short: StructField = StructField(name, ShortType)
/**
* Creates a new [[StructField]] of type int.
* @since 1.3.0
*/
def int: StructField = StructField(name, IntegerType)
/**
* Creates a new [[StructField]] of type long.
* @since 1.3.0
*/
def long: StructField = StructField(name, LongType)
/**
* Creates a new [[StructField]] of type float.
* @since 1.3.0
*/
def float: StructField = StructField(name, FloatType)
/**
* Creates a new [[StructField]] of type double.
* @since 1.3.0
*/
def double: StructField = StructField(name, DoubleType)
/**
* Creates a new [[StructField]] of type string.
* @since 1.3.0
*/
def string: StructField = StructField(name, StringType)
/**
* Creates a new [[StructField]] of type date.
* @since 1.3.0
*/
def date: StructField = StructField(name, DateType)
/**
* Creates a new [[StructField]] of type decimal.
* @since 1.3.0
*/
def decimal: StructField = StructField(name, DecimalType.USER_DEFAULT)
/**
* Creates a new [[StructField]] of type decimal.
* @since 1.3.0
*/
def decimal(precision: Int, scale: Int): StructField =
StructField(name, DecimalType(precision, scale))
/**
* Creates a new [[StructField]] of type timestamp.
* @since 1.3.0
*/
def timestamp: StructField = StructField(name, TimestampType)
/**
* Creates a new [[StructField]] of type binary.
* @since 1.3.0
*/
def binary: StructField = StructField(name, BinaryType)
/**
* Creates a new [[StructField]] of type array.
* @since 1.3.0
*/
def array(dataType: DataType): StructField = StructField(name, ArrayType(dataType))
/**
* Creates a new [[StructField]] of type map.
* @since 1.3.0
*/
def map(keyType: DataType, valueType: DataType): StructField =
map(MapType(keyType, valueType))
def map(mapType: MapType): StructField = StructField(name, mapType)
/**
* Creates a new [[StructField]] of type struct.
* @since 1.3.0
*/
def struct(fields: StructField*): StructField = struct(StructType(fields))
/**
* Creates a new [[StructField]] of type struct.
* @since 1.3.0
*/
def struct(structType: StructType): StructField = StructField(name, structType)
}
| pronix/spark | sql/core/src/main/scala/org/apache/spark/sql/Column.scala | Scala | apache-2.0 | 27,806 |
package de.unihamburg.vsis.sddf.reading
import scala.reflect.ClassTag
import org.apache.spark.rdd.RDD
import de.unihamburg.vsis.sddf.pipe.PipeElement
import de.unihamburg.vsis.sddf.pipe.context.AbstractPipeContext
import de.unihamburg.vsis.sddf.visualisation.model.BasicAnalysable
class PipeReaderOmitTail[A: ClassTag](lineCount: Long = 1) extends PipeElement[RDD[A], RDD[A]] {
def step(input: RDD[A])(implicit pipeContext: AbstractPipeContext): RDD[A] = {
log.debug("Omitting last " + lineCount + " lines.")
analysable.values += ("lines omitted" -> lineCount.toString())
val maxIndexRaw = input.count - lineCount
val maxIndex = if(maxIndexRaw < 0) 0 else maxIndexRaw
input.zipWithIndex().filter(_._2 < maxIndex).map(_._1)
}
}
object PipeReaderOmitTail {
def apply[A: ClassTag](lineCount: Long = 1) = {
new PipeReaderOmitTail(lineCount)
}
} | numbnut/sddf | src/main/scala/de/unihamburg/vsis/sddf/reading/PipeReaderOmitTail.scala | Scala | gpl-3.0 | 888 |
package helper.email
import controllers.routes
import helper.Commons
import models.EmailService
import org.joda.time.{Hours, DateTime}
import play.Configuration
import play.api.Logger
/**
* Created by manuel on 19.04.2016.
*/
object MailTemplates {
def sendAccountMail(toEmail : String, configuration: Configuration, emailService: EmailService): Unit = {
var isNewEmailAddress = false
val email = emailService.findByEmail(toEmail).getOrElse({
emailService.create(toEmail,Commons.generateSecret())
isNewEmailAddress = true
emailService.findByEmail(toEmail).get
})
val link = configuration.getString("hcomp.ballot.baseURL") + routes.Account.account(email.id.get,email.secret).url
val subject = "PaperValidator: Your papers and conferences"
val content =
s"""Dear user of PaperValidator,<br><br>
|
|Here is the link to your paper and conferences:<br>
|<a href="$link">
| <b>$link</b>
|</a><br><br>
|
|You can edit your papers and conferences under this link.
|
|Have fun using PaperValidator!
""".stripMargin
Logger.debug(content)
if(Hours.hoursBetween(email.lastMail,DateTime.now()).getHours > 12 || isNewEmailAddress) {
Logger.debug("Email would be sent!")
emailService.setLastMailNow(email.id.getOrElse(-1),email.secret)
if(configuration.getString("helper.mailing.active") == "true") {
MailSendingService.sendMail(toEmail,subject,content)
}
} else {
Logger.debug("Email would not be sent! Daycount:" + Hours.hoursBetween(email.lastMail,DateTime.now()).getHours)
}
}
def sendConferenceMail(conferenceName : String, conferenceLink : String, toEmail : String): Unit = {
val subject = "PaperValidator: About Conference " + conferenceName
val content =
s"""Dear user of PaperValidator,<br><br>
|
|Here is the link to your Conference '"$conferenceName"':<br>
|<a href="$conferenceLink">
| <b>$conferenceLink</b>
|</a><br><br>
|
|You can edit the conference settings or delete the conference under this link.
|
|Have fun using PaperValidator!
""".stripMargin
Logger.debug(content)
//MailSendingService.sendMail(toEmail,subject,content)
}
def sendPaperAnalyzedMail(paperName: String, paperLink : String, permutations: Int, toEmail: String, comment: String = ""): Unit = {
val subject = "PaperValidator: " + paperName + " analyzed!"
val content =
s"""Dear user of PaperValidator,<br><br>
|
|Your paper '$paperName' has been analyzed.<br>
|There where $permutations permutations found. Confirm with the following link that you would like to process the paper:<br>
|<a href="$paperLink">
| <b>$paperLink</b>
|</a><br><br>
|
|$comment
|
|Have fun using PaperValidator!
""".stripMargin
Logger.debug(content)
//MailSendingService.sendMail(toEmail,subject,content)
}
def sendPaperCompletedMail(paperName: String, paperLink : String, toEmail: String): Unit = {
val subject = "PaperValidator: " + paperName + " completed!"
val content =
s"""Dear user of PaperValidator,<br><br>
|
|The analysis of your paper '$paperName' has been completed.<br>
|You can checkout the results under this link:
|<a href="$paperLink">
| <b>$paperLink</b>
|</a><br><br>
|
|Have fun using PaperValidator!
""".stripMargin
Logger.debug(content)
//MailSendingService.sendMail(toEmail,subject,content)
}
}
| manuelroesch/PaperValidator | app/helper/email/MailTemplates.scala | Scala | mit | 3,711 |
package controllers.registration.business
import itutil.ControllerISpec
import models.{ApplicantDetails, BusinessContact}
import models.api.{Address, Country}
import models.view.HomeAddressView
import org.jsoup.Jsoup
import play.api.http.HeaderNames
import play.api.libs.json.Json
import play.api.test.Helpers._
class InternationalPpobAddressControllerISpec extends ControllerISpec {
val url = "/principal-place-business/international"
val testForeignCountry = Country(Some("NO"), Some("Norway"))
val testShortForeignAddress = Address(testLine1, Some(testLine2), country = Some(testForeignCountry))
val testForeignAddress = address.copy(country = Some(testForeignCountry))
"GET /principal-place-business/international" when {
"reading from S4L" must {
"return OK when the ApplicantDetails block is empty" in new Setup {
given
.user.isAuthorised()
.vatScheme.contains(emptyVatSchemeNetp)
.vatScheme.has("business-contact", Json.toJson(BusinessContact())(BusinessContact.apiFormat))
.s4lContainer[BusinessContact].contains(BusinessContact())
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).get())
res.status mustBe OK
}
"return OK and pre-populate when the ApplicantDetails block contains an address" in new Setup {
given
.user.isAuthorised()
.vatScheme.contains(emptyVatSchemeNetp)
.s4lContainer[BusinessContact].contains(BusinessContact(ppobAddress = Some(testForeignAddress)))
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).get())
res.status mustBe OK
val doc = Jsoup.parse(res.body)
doc.select("input[id=line1]").`val`() mustBe testLine1
doc.select("input[id=line2]").`val`() mustBe testLine2
doc.select("option[value=Norway]").hasAttr("selected") mustBe true
}
}
"when reading from the backend" must {
"return OK and pre-populate the page" in new Setup {
val businessContact = BusinessContact(ppobAddress = Some(testForeignAddress))
given
.user.isAuthorised()
.s4lContainer[BusinessContact].isEmpty
.vatScheme.contains(emptyVatSchemeNetp)
.vatScheme.has("business-contact", Json.toJson(businessContact)(BusinessContact.apiFormat))
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).get())
res.status mustBe OK
val doc = Jsoup.parse(res.body)
doc.select("input[id=line1]").`val`() mustBe testLine1
doc.select("input[id=line2]").`val`() mustBe testLine2
doc.select("option[value=Norway]").hasAttr("selected") mustBe true
}
}
}
"POST /principal-place-business/international" must {
"Store the address and redirect to the previous address page if a minimal address is provided" in new Setup {
given
.user.isAuthorised()
.vatScheme.contains(emptyVatSchemeNetp)
.vatScheme.has("business-contact", Json.toJson(BusinessContact())(BusinessContact.apiFormat))
.s4lContainer[BusinessContact].isEmpty
.s4lContainer[BusinessContact].isUpdatedWith(BusinessContact(ppobAddress = Some(testShortForeignAddress)))
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).post(Map(
"line1" -> "testLine1",
"country" -> "Norway"
)))
res.status mustBe SEE_OTHER
res.header(HeaderNames.LOCATION) mustBe Some(routes.BusinessContactDetailsController.show.url)
}
"Store the address and redirect to the previous address page if a full address is provided" in new Setup {
given
.user.isAuthorised()
.vatScheme.contains(emptyVatSchemeNetp)
.vatScheme.has("business-contact", Json.toJson(BusinessContact())(BusinessContact.apiFormat))
.s4lContainer[BusinessContact].contains(BusinessContact())
.s4lContainer[BusinessContact].isUpdatedWith(BusinessContact(ppobAddress = Some(testForeignAddress)))
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).post(Map(
"line1" -> "testLine1",
"line2" -> "testLine2",
"line3" -> "testLine3",
"line4" -> "testLine4",
"line5" -> "testLine5",
"postcode" -> "AB12 3YZ",
"country" -> "Norway"
)))
res.status mustBe SEE_OTHER
res.header(HeaderNames.LOCATION) mustBe Some(routes.BusinessContactDetailsController.show.url)
}
"return BAD_REQUEST if line 1 is missing" in new Setup {
given
.user.isAuthorised()
.vatScheme.contains(emptyVatSchemeNetp)
.vatScheme.has("business-contact", Json.toJson(BusinessContact())(BusinessContact.apiFormat))
.s4lContainer[BusinessContact].contains(BusinessContact())
.s4lContainer[BusinessContact].isUpdatedWith(BusinessContact(ppobAddress = Some(testForeignAddress)))
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).post(Map(
"line2" -> "testLine2",
"line3" -> "testLine3",
"line4" -> "testLine4",
"line5" -> "testLine5",
"postcode" -> "AB12 3YZ",
"country" -> "Norway"
)))
res.status mustBe BAD_REQUEST
}
"return BAD_REQUEST if country is missing" in new Setup {
given
.user.isAuthorised()
.vatScheme.contains(emptyVatSchemeNetp)
.vatScheme.doesNotExistForKey("business-contact")
.s4lContainer[BusinessContact].contains(BusinessContact())
.s4lContainer[BusinessContact].isUpdatedWith(BusinessContact(ppobAddress = Some(testForeignAddress)))
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).post(Map(
"line1" -> "testLine1",
"line2" -> "testLine2",
"line3" -> "testLine3",
"line4" -> "testLine4",
"line5" -> "testLine5",
"postcode" -> "AB12 3YZ",
)))
res.status mustBe BAD_REQUEST
}
"return BAD_REQUEST if country is UK" in new Setup {
given
.user.isAuthorised()
.vatScheme.contains(emptyVatSchemeNetp)
.vatScheme.doesNotExistForKey("business-contact")
.s4lContainer[BusinessContact].contains(BusinessContact())
insertCurrentProfileIntoDb(currentProfile, sessionId)
val res = await(buildClient(url).post(Map(
"line1" -> "testLine1",
"line2" -> "testLine2",
"line3" -> "testLine3",
"line4" -> "testLine4",
"line5" -> "testLine5",
"postcode" -> "AB12 3YZ",
"country" -> "United Kingdom"
)))
res.status mustBe BAD_REQUEST
}
}
}
| hmrc/vat-registration-frontend | it/controllers/registration/business/InternationalPpobAddressControllerISpec.scala | Scala | apache-2.0 | 6,819 |
package scala.reflect.reify
package phases
import scala.tools.nsc.symtab.Flags._
trait Reshape {
self: Reifier =>
import global._
import definitions._
import treeInfo.Unapplied
private val runDefinitions = currentRun.runDefinitions
import runDefinitions._
/**
* Rolls back certain changes that were introduced during typechecking of the reifee.
*
* These include:
* * Undoing macro expansions
* * Replacing type trees with TypeTree(tpe)
* * Reassembling CompoundTypeTrees into reifiable form
* * Transforming Modifiers.annotations into Symbol.annotations
* * Transforming Annotated annotations into AnnotatedType annotations
* * Transforming Annotated(annot, expr) into Typed(expr, TypeTree(Annotated(annot, _))
* * Non-idempotencies of the typechecker: https://issues.scala-lang.org/browse/SI-5464
*/
val reshape = new Transformer {
var currentSymbol: Symbol = NoSymbol
override def transform(tree0: Tree) = {
val tree = undoMacroExpansion(tree0)
currentSymbol = tree.symbol
val preTyper = tree match {
case tree if tree.isErroneous =>
tree
case tt @ TypeTree() =>
toPreTyperTypeTree(tt)
case ctt @ CompoundTypeTree(_) =>
toPreTyperCompoundTypeTree(ctt)
case toa @ TypedOrAnnotated(_) =>
toPreTyperTypedOrAnnotated(toa)
case ta @ TypeApply(_, _) if isCrossStageTypeBearer(ta) =>
if (reifyDebug) println("cross-stage type bearer, retaining: " + tree)
ta
case ta @ TypeApply(hk, ts) =>
val discard = ts collect { case tt: TypeTree => tt } exists isDiscarded
if (reifyDebug && discard) println("discarding TypeApply: " + tree)
if (discard) hk else ta
case classDef @ ClassDef(mods, name, params, impl) =>
val Template(parents, self, body) = impl
var body1 = trimAccessors(classDef, body)
body1 = trimSyntheticCaseClassMembers(classDef, body1)
val impl1 = Template(parents, self, body1).copyAttrs(impl)
ClassDef(mods, name, params, impl1).copyAttrs(classDef)
case moduledef @ ModuleDef(mods, name, impl) =>
val Template(parents, self, body) = impl
var body1 = trimAccessors(moduledef, body)
body1 = trimSyntheticCaseClassMembers(moduledef, body1)
val impl1 = Template(parents, self, body1).copyAttrs(impl)
ModuleDef(mods, name, impl1).copyAttrs(moduledef)
case template @ Template(parents, self, body) =>
val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded
if (reifyDebug && discardedParents.length > 0) println("discarding parents in Template: " + discardedParents.mkString(", "))
val parents1 = parents diff discardedParents
val body1 = trimSyntheticCaseClassCompanions(body)
Template(parents1, self, body1).copyAttrs(template)
case block @ Block(stats, expr) =>
val stats1 = trimSyntheticCaseClassCompanions(stats)
Block(stats1, expr).copyAttrs(block)
case unapply @ UnApply(Unapplied(Select(fun, nme.unapply | nme.unapplySeq)), args) =>
if (reifyDebug) println("unapplying unapply: " + tree)
Apply(fun, args).copyAttrs(unapply)
case _ =>
tree
}
super.transform(preTyper)
}
private def undoMacroExpansion(tree: Tree): Tree =
tree.attachments.get[analyzer.MacroExpansionAttachment] match {
case Some(analyzer.MacroExpansionAttachment(original, _)) =>
def mkImplicitly(tp: Type) = atPos(tree.pos)(
gen.mkNullaryCall(Predef_implicitly, List(tp))
)
val sym = original.symbol
original match {
// this hack is necessary until I fix implicit macros
// so far tag materialization is implemented by sneaky macros hidden in scala-compiler.jar
// hence we cannot reify references to them, because noone will be able to see them later
// when implicit macros are fixed, these sneaky macros will move to corresponding companion objects
// of, say, ClassTag or TypeTag
case Apply(TypeApply(_, List(tt)), _) if sym == materializeClassTag => mkImplicitly(appliedType(ClassTagClass, tt.tpe))
case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeWeakTypeTag => mkImplicitly(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe)))
case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeTypeTag => mkImplicitly(typeRef(pre.tpe, TypeTagClass, List(tt.tpe)))
case _ => original
}
case _ => tree
}
override def transformModifiers(mods: Modifiers) = {
val mods1 = toPreTyperModifiers(mods, currentSymbol)
super.transformModifiers(mods1)
}
private def toPreTyperModifiers(mods: Modifiers, sym: Symbol) = {
if (!sym.annotations.isEmpty) {
val postTyper = sym.annotations filter (_.original != EmptyTree)
if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for: " + sym)
if (reifyDebug && !postTyper.isEmpty) println("originals are: " + sym.annotations)
val preTyper = postTyper map toPreTyperAnnotation
mods.withAnnotations(preTyper)
} else {
mods
}
}
/** Restore pre-typer representation of a type.
*
* NB: This is the trickiest part of reification!
*
* In most cases, we're perfectly fine to reify a Type itself (see `reifyType`).
* However if the type involves a symbol declared inside the quasiquote (i.e. registered in `boundSyms`),
* then we cannot reify it, or otherwise subsequent reflective compilation will fail.
*
* Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
* so naively reified symbols will become out of sync, which brings really funny compilation errors and/or crashes, e.g.:
* https://issues.scala-lang.org/browse/SI-5230
*
* To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
* Luckily, these original trees get preserved for us in the `original` field when Trees get transformed into TypeTrees.
* And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
* In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
*
* An important property of the original is that it isn't just a pre-typer tree.
* It's actually kind of a post-typer tree with symbols assigned to its Idents (e.g. Ident("List") will contain a symbol that points to immutable.this.List).
* This is very important, since subsequent reflective compilation won't have to resolve these symbols.
* In general case, such resolution cannot be performed, since reification doesn't preserve lexical context,
* which means that reflective compilation won't be aware of, say, imports that were provided when the reifee has been compiled.
*
* This workaround worked surprisingly well and allowed me to fix several important reification bugs, until the abstraction has leaked.
* Suddenly I found out that in certain contexts original trees do not contain symbols, but are just parser trees.
* To the moment I know only one such situation: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless.
* Thus we apply a workaround for that in typedAnnotated. I hope this will be the only workaround in this department.
* upd. There are also problems with CompoundTypeTrees. I had to use attachments to retain necessary information.
*
* upd. Recently I went ahead and started using original for all TypeTrees, regardless of whether they refer to local symbols or not.
* As a result, `reifyType` is never called directly by tree reification (and, wow, it seems to work great!).
* The only usage of `reifyType` now is for servicing typetags, however, I have some ideas how to get rid of that as well.
*/
private def isDiscarded(tt: TypeTree) = tt.original == null
private def toPreTyperTypeTree(tt: TypeTree): Tree = {
if (!isDiscarded(tt)) {
// here we rely on the fact that the originals that reach this point
// have all necessary symbols attached to them (i.e. that they can be recompiled in any lexical context)
// if this assumption fails, please, don't be quick to add postprocessing here (like I did before)
// but rather try to fix this in Typer, so that it produces quality originals (like it's done for typedAnnotated)
if (reifyDebug) println("TypeTree, essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original.toString.replaceAll("\\s+", " ")))
transform(tt.original)
} else {
// type is deemed to be non-essential
// erase it and hope that subsequent reflective compilation will be able to recreate it again
if (reifyDebug) println("TypeTree, non-essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
if (reifyDebug) println("verdict: discarded")
TypeTree()
}
}
private def toPreTyperCompoundTypeTree(ctt: CompoundTypeTree): Tree = {
val CompoundTypeTree(tmpl @ Template(parents, self, stats)) = ctt
if (stats.nonEmpty) CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt)
assert(self eq noSelfType, self)
val att = tmpl.attachments.get[CompoundTypeTreeOriginalAttachment]
val CompoundTypeTreeOriginalAttachment(parents1, stats1) = att.getOrElse(CompoundTypeTreeOriginalAttachment(parents, stats))
CompoundTypeTree(Template(parents1, self, stats1))
}
private def toPreTyperTypedOrAnnotated(tree: Tree): Tree = tree match {
case ty @ Typed(expr1, tpt) =>
if (reifyDebug) println("reify typed: " + tree)
val original = tpt match {
case tt @ TypeTree() => tt.original
case tpt => tpt
}
val annotatedArg = {
def loop(tree: Tree): Tree = tree match {
case annotated1 @ Annotated(ann, annotated2 @ Annotated(_, _)) => loop(annotated2)
case annotated1 @ Annotated(ann, arg) => arg
case _ => EmptyTree
}
loop(original)
}
if (annotatedArg != EmptyTree) {
if (annotatedArg.isType) {
if (reifyDebug) println("verdict: was an annotated type, reify as usual")
ty
} else {
if (reifyDebug) println("verdict: was an annotated value, equivalent is " + original)
toPreTyperTypedOrAnnotated(original)
}
} else {
if (reifyDebug) println("verdict: wasn't annotated, reify as usual")
ty
}
case at @ Annotated(annot, arg) =>
if (reifyDebug) println("reify type annotations for: " + tree)
assert(at.tpe.isInstanceOf[AnnotatedType], "%s (%s)".format(at.tpe, at.tpe.kind))
val annot1 = toPreTyperAnnotation(at.tpe.asInstanceOf[AnnotatedType].annotations(0))
if (reifyDebug) println("originals are: " + annot1)
Annotated(annot1, arg).copyAttrs(at)
}
/** Restore pre-typer representation of an annotation.
* The trick here is to retain the symbols that have been populated during typechecking of the annotation.
* If we do not do that, subsequent reflective compilation will fail.
*/
private def toPreTyperAnnotation(ann: AnnotationInfo): Tree = {
val args = if (ann.assocs.isEmpty) {
ann.args
} else {
def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = (jann: @unchecked) match {
case LiteralAnnotArg(const) => Literal(const)
case ArrayAnnotArg(arr) => Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
case NestedAnnotArg(ann) => toPreTyperAnnotation(ann)
}
ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) }
}
def extractOriginal: PartialFunction[Tree, Tree] = { case Apply(Select(New(tpt), _), _) => tpt }
assert(extractOriginal.isDefinedAt(ann.original), showRaw(ann.original))
New(TypeTree(ann.atp) setOriginal extractOriginal(ann.original), List(args))
}
private def toPreTyperLazyVal(ddef: DefDef): ValDef = {
def extractRhs(rhs: Tree) = rhs match {
case Block(Assign(lhs, rhs)::Nil, _) if lhs.symbol.isLazy => rhs
case _ => rhs // unit or trait case
}
val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef
val name1 = name0.dropLocal
val Modifiers(flags0, privateWithin0, annotations0) = mods0
val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
ValDef(mods2, name1, tpt0, extractRhs(rhs0))
}
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap
val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]()
stats collect { case ddef: DefDef => ddef } foreach (defdef => {
val valdef = symdefs get defdef.symbol.accessedOrSelf collect { case vdef: ValDef => vdef } getOrElse null
if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
def detectBeanAccessors(prefix: String): Unit = {
if (defdef.name.startsWith(prefix)) {
val name = defdef.name.toString.substring(prefix.length)
def uncapitalize(s: String) = if (s.length == 0) "" else { val chars = s.toCharArray; chars(0) = chars(0).toLower; new String(chars) }
def findValDef(name: String) = symdefs.values collectFirst {
case vdef: ValDef if vdef.name.dropLocal string_== name => vdef
}
val valdef = findValDef(name).orElse(findValDef(uncapitalize(name))).orNull
if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
}
}
detectBeanAccessors("get")
detectBeanAccessors("set")
detectBeanAccessors("is")
})
val stats1 = stats flatMap {
case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy =>
val mods1 = if (accessors.contains(vdef)) {
val ddef = accessors(vdef)(0) // any accessor will do
val Modifiers(flags, _, annotations) = mods
var flags1 = flags & ~LOCAL
if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE
val privateWithin1 = ddef.mods.privateWithin
val annotations1 = accessors(vdef).foldLeft(annotations)((curr, acc) => curr ++ (acc.symbol.annotations map toPreTyperAnnotation))
Modifiers(flags1, privateWithin1, annotations1) setPositions mods.positions
} else {
mods
}
val mods2 = toPreTyperModifiers(mods1, vdef.symbol)
val name1 = name.dropLocal
val vdef1 = ValDef(mods2, name1.toTermName, tpt, rhs)
if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1))
Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync
case ddef: DefDef if !ddef.mods.isLazy =>
// lazy val accessors are removed in reshapeLazyVals
// as they are needed to recreate lazy vals
if (accessors.values.exists(_.contains(ddef))) {
if (reifyDebug) println("discarding accessor method: " + ddef)
None
} else {
Some(ddef)
}
case tree =>
Some(tree)
}
stats1
}
private def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]): List[Tree] =
stats filterNot (memberDef => memberDef.isDef && {
val isSynthetic = memberDef.symbol.isSynthetic
// this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
// that's why I replace the check with an assumption that all synthetic members are, in fact, generated of case classes
// val isCaseMember = deff.symbol.isCaseClass || deff.symbol.companionClass.isCaseClass
val isCaseMember = true
if (isSynthetic && isCaseMember && reifyDebug) println("discarding case class synthetic def: " + memberDef)
isSynthetic && isCaseMember
})
private def trimSyntheticCaseClassCompanions(stats: List[Tree]): List[Tree] =
stats diff (stats collect { case moddef: ModuleDef => moddef } filter (moddef => {
val isSynthetic = moddef.symbol.isSynthetic
// this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
// that's why I replace the check with an assumption that all synthetic modules are, in fact, companions of case classes
// val isCaseCompanion = moddef.symbol.companionClass.isCaseClass
val isCaseCompanion = true
if (isSynthetic && isCaseCompanion && reifyDebug) println("discarding synthetic case class companion: " + moddef)
isSynthetic && isCaseCompanion
}))
}
}
| felixmulder/scala | src/compiler/scala/reflect/reify/phases/Reshape.scala | Scala | bsd-3-clause | 17,888 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.batch.sql.agg
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.tuple.{Tuple2 => JTuple2}
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.api.scala._
import org.apache.flink.table.api.{TableException, Types}
import org.apache.flink.table.dataformat.Decimal
import org.apache.flink.table.planner.runtime.utils.BatchTestBase
import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row
import org.apache.flink.table.planner.runtime.utils.TestData._
import org.apache.flink.types.Row
import org.junit.{Before, Test}
import scala.collection.Seq
/**
* Aggregate IT case base class.
*/
abstract class AggregateITCaseBase(testName: String) extends BatchTestBase {
def prepareAggOp(): Unit
@Before
override def before(): Unit = {
super.before()
registerCollection("SmallTable3", smallData3, type3, "a, b, c", nullablesOfSmallData3)
registerCollection("Table3", data3, type3, "a, b, c", nullablesOfData3)
registerCollection("Table5", data5, type5, "d, e, f, g, h", nullablesOfData5)
registerCollection("EmptyTable5", Seq(), type5, "d, e, f, g, h")
registerCollection("NullTable3", nullData3, type3, "a, b, c", nullablesOfNullData3)
registerCollection("AllNullTable3", allNullData3, type3, "a, b, c", allNullablesOfNullData3)
registerCollection("NullTable5", nullData5, type5, "d, e, f, g, h", nullablesOfNullData5)
registerCollection("DuplicateTable5", duplicateData5, type5, "d, e, f, g, h",
nullablesOfDuplicateData5)
registerCollection("GenericTypedTable3", genericData3, genericType3, "i, j, k",
nullablesOfData3)
prepareAggOp()
}
@Test
def testTypedGroupByKey(): Unit = {
checkResult(
"SELECT j, sum(k) FROM GenericTypedTable3 GROUP BY i, j",
Seq(
row("1,1", 2),
row("1,1", 2),
row("10,1", 3)
)
)
checkResult(
"SELECT k, count(j) FROM GenericTypedTable3 GROUP BY i, k",
Seq(
row(1, 2),
row(3, 1),
row(2, 1)
)
)
}
@Test
def testBigData(): Unit = {
// for hash agg mode it will fallback
val largeData5 = for (i <- 0 until 100000) yield row(i, 1L, 10, "Hallo", 1L)
registerCollection("LargeTable5", largeData5, type5, "d, e, f, g, h")
val expected = for (i <- 0 until 100000) yield row(i, "Hallo", 1L, 10, 1L)
checkResult(
"SELECT d, g, sum(e), avg(f), min(h) FROM LargeTable5 GROUP BY d, g",
expected
)
// composite type group key fallback case
val largeTypedData5 =
for (i <- 0 until 100000) yield row(new JTuple2(i, i), 1L, 10, "Hallo", 1L)
registerCollection("LargeTypedTable5", largeTypedData5, genericType5, "d, e, f, g, h")
val expectedTypedData5 =
for (i <- 0 until 100000) yield
row(row(i, i), "Hallo", 1L, 10, 1L)
checkResult(
"SELECT d, g, sum(e), avg(f), min(h) FROM LargeTypedTable5 GROUP BY d, g",
expectedTypedData5
)
// for hash agg mode it wont fallback
val singleGrouplargeData5 = for (i <- 0 until 100000) yield row(999, 1L, 10, "Hallo", 1L)
registerCollection("SingleGroupLargeTable5", singleGrouplargeData5, type5, "d, e, f, g, h")
checkResult(
"SELECT d, g, sum(e), avg(f), min(h) FROM SingleGroupLargeTable5 GROUP BY d, g",
Seq(row(999, "Hallo", 100000L, 10, 1L))
)
}
@Test
def testGroupByOnly(): Unit = {
checkResult(
"SELECT h FROM Table5 GROUP BY h",
Seq(
row(1),
row(2),
row(3)
)
)
}
@Test
def testTwoPhasesAggregation(): Unit = {
checkResult(
"SELECT sum(d), avg(d), count(g), min(e), h FROM Table5 GROUP BY h",
Seq(
row(16, 16 / 5, 5, 1L, 1),
row(26, 26 / 7, 7, 2L, 2),
row(13, 13 / 3, 3, 6L, 3)
)
)
}
@Test
def testPhaseAggregation(): Unit = {
// TODO
}
@Test
def testEmptyInputAggregation(): Unit = {
checkResult("SELECT sum(d), avg(d), count(g), min(e) FROM EmptyTable5 GROUP BY h", Seq())
}
@Test
def testNullGroupKeyAggregation(): Unit = {
checkResult("SELECT sum(d), d, count(d) FROM NullTable5 GROUP BY d",
Seq(
row(1, 1, 1),
row(25, 5, 5),
row(null, null, 0),
row(16, 4, 4),
row(4, 2, 2),
row(9, 3, 3)
)
)
}
@Test
def testAggregationWithoutGroupby(): Unit = {
checkResult(
"SELECT sum(d), avg(d), count(g), min(e) FROM Table5",
Seq(
row(55, 55 / 15, 15, 1L)
)
)
}
@Test
def testEmptyInputAggregationWithoutGroupby(): Unit = {
checkResult(
"SELECT sum(d), avg(d), count(g), min(e) FROM EmptyTable5",
Seq(
row(null, null, 0, null)
)
)
}
@Test
def testAggregationAfterProjection(): Unit = {
checkResult(
"SELECT c, count(a) FROM " +
"(SELECT d as a, f as b, h as c FROM Table5) GROUP BY c",
Seq(
row(1, 5),
row(2, 7),
row(3, 3)
)
)
}
@Test
def testAggregationWithArithmetic(): Unit = {
checkResult(
"SELECT avg(d + 2) + 2 FROM Table5",
Seq(
row(85 / 15 + 2)
)
)
}
@Test
def testGroupedDistinctAggregate(): Unit = {
checkResult(
"SELECT count(distinct g), h FROM DuplicateTable5 GROUP BY h",
Seq(
row(5, 1),
row(5, 2),
row(2, 3)
)
)
}
@Test
def testDistinctAggregate(): Unit = {
checkResult(
"SELECT count(distinct h) FROM DuplicateTable5",
Seq(
row(3)
)
)
}
@Test
def testUV(): Unit = {
val data = (0 until 100).map { i => row("1", "1", s"${i % 10}", "1") }.toList
val type4 = new RowTypeInfo(
Types.STRING,
Types.STRING,
Types.STRING,
Types.STRING)
registerCollection(
"src",
data,
type4,
"a, b, c, d")
val sql =
s"""
|SELECT
| a,
| b,
| COUNT(distinct c) as uv
|FROM (
| SELECT
| a, b, c, d
| FROM
| src where b <> ''
| UNION ALL
| SELECT
| a, 'ALL' as b, c, d
| FROM
| src where b <> ''
|) t
|GROUP BY
| a, b
""".stripMargin
checkResult(sql, Seq(row("1", "1", 10), row("1", "ALL", 10)))
}
//
// tests borrowed from org.apache.spark.sql.DataFrameAggregateSuite
//
private var newTableId = 0
def checkQuery[T <: Product : TypeInformation](
tableData: Seq[T],
sqlQuery: String,
expected: Seq[_ <: Product],
tableName: String = "t")
: Unit = {
val toRow = (p: Product) =>
Row.of(p.productIterator.map(_.asInstanceOf[AnyRef]).toArray: _*)
val tableRows = tableData.map(toRow)
val tupleTypeInfo = implicitly[TypeInformation[T]]
val fieldInfos = tupleTypeInfo.getGenericParameters.values()
import scala.collection.JavaConverters._
val rowTypeInfo = new RowTypeInfo(fieldInfos.asScala.toArray: _*)
newTableId += 1
val tableName = "TestTableX" + newTableId
val fields = rowTypeInfo.getFieldNames.mkString(",")
registerCollection(tableName, tableRows, rowTypeInfo, fields)
val sqlQueryX = sqlQuery.replace("TableName", tableName)
checkResult(sqlQueryX, expected.map(toRow))
}
def big(i: Int): java.math.BigDecimal = new java.math.BigDecimal(i)
def big(s: String): java.math.BigDecimal = new java.math.BigDecimal(s)
val (b1, b2, b3) = (big(1), big(2), big(3))
// with default scale for BigDecimal.class
def bigX(i: Int): java.math.BigDecimal = big(i).setScale(
Decimal.DECIMAL_SYSTEM_DEFAULT.getScale)
val (b1x, b2x, b3x) = (bigX(1), bigX(2), bigX(3))
val bN = null: java.math.BigDecimal
@Test
def testGroupBy(): Unit = {
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select f0, sum(f1) from TableName group by f0",
Seq((1, 3), (2, 3), (3, 3))
)
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select sum(totB) from (select f0, sum(f1) as totB from TableName group by f0)",
Seq(Tuple1(9))
)
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select f0, count(*) from TableName group by f0",
Seq((1, 2L), (2, 2L), (3, 2L)) // count=>long
)
checkQuery(
Seq(("a", 1, 0), ("b", 2, 4), ("a", 2, 3)),
"select f0, min(f1), min(f2) from TableName group by f0",
Seq(("a", 1, 0), ("b", 2, 4))
)
checkQuery(
Seq((b1, b1), (b1, b2), (b2, b1), (b2, b2), (b3, b1), (b3, b2)),
"select f0, sum(f1) from TableName group by f0",
Seq((b1x, b3x), (b2x, b3x), (b3x, b3x))
)
// nulls in key/value
checkQuery(
Seq((b1, b1), (b1, bN), (b2, b1), (b2, bN), (b3, b1), (b3, b2), (bN, b2)),
"select f0, sum(f1) from TableName group by f0",
Seq((b1x, b1x), (b2x, b1x), (b3x, b3x), (bN, b2x))
)
}
@Test(expected = classOf[TableException])
def testCountCannotByMultiFields(): Unit = {
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select count(distinct f0, f1) from TableName",
Seq()
)
}
@Test
def testSpark17124(): Unit = {
checkQuery(
Seq(Tuple1(0L), Tuple1(1L)),
"select f0, sum(f0), count(f0), min(f0) from TableName group by f0",
Seq((0L, 0L, 1L, 0L), (1L, 1L, 1L, 1L))
)
}
@Test
def testGroupByRegexp(): Unit = {
val expr = "regexp_extract(f0, '([a-z]+)\\[', 1)"
checkQuery(
Seq(("some[thing]", "random-string")),
s"select $expr, count(*) from TableName group by $expr",
Seq(("some", 1L))
)
}
@Test
def testRollup(): Unit = {
checkQuery(
Seq(
("dotNET", 2012, 10000.0),
("Java", 2012, 20000.0),
("dotNET", 2012, 5000.0),
("dotNET", 2013, 48000.0),
("Java", 2013, 30000.0)
),
"select f0, f1, sum(f2) from TableName group by rollup(f0, f1)",
Seq(
("Java", 2012, 20000.0),
("Java", 2013, 30000.0),
("Java", null, 50000.0),
("dotNET", 2012, 15000.0),
("dotNET", 2013, 48000.0),
("dotNET", null, 63000.0),
(null, null, 113000.0)
)
)
}
@Test
def testCube(): Unit = {
checkQuery(
Seq(
("dotNET", 2012, 10000.0),
("Java", 2012, 20000.0),
("dotNET", 2012, 5000.0),
("dotNET", 2013, 48000.0),
("Java", 2013, 30000.0)
),
"select f0, f1, sum(f2) from TableName group by cube(f0, f1)",
Seq(
("Java", 2012, 20000.0),
("Java", 2013, 30000.0),
("Java", null, 50000.0),
("dotNET", 2012, 15000.0),
("dotNET", 2013, 48000.0),
("dotNET", null, 63000.0),
(null, 2012, 35000.0),
(null, 2013, 78000.0),
(null, null, 113000.0)
)
)
}
@Test
def testGrouping(): Unit = {
checkQuery(
Seq(
("dotNET", 2012, 10000.0),
("Java", 2012, 20000.0),
("dotNET", 2012, 5000.0),
("dotNET", 2013, 48000.0),
("Java", 2013, 30000.0)
),
"select f0, f1, grouping(f0), grouping(f1), grouping_id(f0,f1) " +
"from TableName group by cube(f0, f1)",
Seq(
("Java", 2012, 0, 0, 0),
("Java", 2013, 0, 0, 0),
("Java", null, 0, 1, 1),
("dotNET", 2012, 0, 0, 0),
("dotNET", 2013, 0, 0, 0),
("dotNET", null, 0, 1, 1),
(null, 2012, 1, 0, 2),
(null, 2013, 1, 0, 2),
(null, null, 1, 1, 3)
)
)
}
@Test
def testGroupingInsideWindowFunction(): Unit = {
checkQuery(
Seq(
("dotNET", 2012, 10000.0),
("Java", 2012, 20000.0),
("dotNET", 2012, 5000.0),
("dotNET", 2013, 48000.0),
("Java", 2013, 30000.0)
),
"select f0, f1, sum(f2), grouping_id(f0, f1), " +
"rank() over (partition by grouping_id(f0, f1) order by sum(f2)) " +
"from TableName group by cube(f0, f1)",
Seq(
("Java", 2012, 20000.0, 0, 2),
("Java", 2013, 30000.0, 0, 3),
("Java", null, 50000.0, 1, 1),
("dotNET", 2012, 15000.0, 0, 1),
("dotNET", 2013, 48000.0, 0, 4),
("dotNET", null, 63000.0, 1, 2),
(null, 2012, 35000.0, 2, 1),
(null, 2013, 78000.0, 2, 2),
(null, null, 113000.0, 3, 1)
)
)
}
@Test
def testRollupOverlappingColumns(): Unit = {
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select f0+f1, f1, sum(f0-f1) from TableName group by rollup(f0+f1, f1)",
Seq((2, 1, 0), (3, 2, -1), (3, 1, 1), (4, 2, 0), (4, 1, 2), (5, 2, 1),
(2, null, 0), (3, null, 0), (4, null, 2), (5, null, 1), (null, null, 3))
)
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select f0, f1, sum(f1) from TableName group by rollup(f0, f1)",
Seq((1, 1, 1), (1, 2, 2), (2, 1, 1), (2, 2, 2), (3, 1, 1), (3, 2, 2),
(1, null, 3), (2, null, 3), (3, null, 3), (null, null, 9))
)
}
@Test
def testCubeOverlappingColumns(): Unit = {
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select f0+f1, f1, sum(f0-f1) from TableName group by cube(f0+f1, f1)",
Seq((2, 1, 0), (3, 2, -1), (3, 1, 1), (4, 2, 0), (4, 1, 2), (5, 2, 1),
(2, null, 0), (3, null, 0), (4, null, 2), (5, null, 1), (null, 1, 3),
(null, 2, 0), (null, null, 3))
)
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select f0, f1, sum(f1) from TableName group by cube(f0, f1)",
Seq((1, 1, 1), (1, 2, 2), (2, 1, 1), (2, 2, 2), (3, 1, 1), (3, 2, 2),
(1, null, 3), (2, null, 3), (3, null, 3), (null, 1, 3), (null, 2, 6),
(null, null, 9))
)
}
@Test
def testAggWithoutGroups(): Unit = {
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select sum(f1) from TableName",
Seq(Tuple1(9))
)
}
@Test
def testAggWithoutGroupsAndFunctions(): Unit = {
val one = Tuple1
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select 1 from TableName",
List.fill(6)(Tuple1(1))
)
}
@Test
def testAverage(): Unit = {
checkQuery(
Seq[(Integer, Integer)]((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select avg(f0), avg(f0) from TableName", // spark has mean(), but we don't
Seq((2, 2))
)
checkQuery(
Seq((b1, b1), (b1, b2), (b2, b1), (b2, b2), (b3, b1), (b3, b2)),
"select avg(f0), sum(f0) from TableName",
Seq((bigX(2), bigX(12)))
)
checkQuery(
Seq((b1, b1), (b1, b2), (b2, b1), (b2, b2), (b3, b1), (b3, b2)),
"select avg(cast (f0 as decimal(10,2))) from TableName",
Seq(Tuple1(big("2.000000")))
)
}
@Test
def testAverageWithDistinct(): Unit = {
checkQuery(
Seq[(Integer, Integer)]((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select avg(f0), sum(distinct f0) from TableName",
Seq((2, 6))
)
checkQuery(
Seq((b1, b1), (b1, b2), (b2, b1), (b2, b2), (b3, b1), (b3, b2)),
"select avg(f0), sum(distinct f0) from TableName",
Seq((bigX(2), bigX(6)))
)
checkQuery(
Seq((b1, b1), (b1, b2), (b2, b1), (b2, b2), (b3, b1), (b3, b2)),
"select avg(f0), sum(distinct cast (f0 as decimal(10,2))) from TableName",
Seq((bigX(2), big(6).setScale(2)))
)
}
@Test
def testNullAverage(): Unit = {
val testData3: Seq[(Integer, Integer)] =
Seq((1, null), (2, 2))
checkQuery(
testData3,
"select avg(f1) from TableName",
Seq(Tuple1(2))
)
}
@Test
def testNullAverageWithDistinct(): Unit = {
val testData3: Seq[(Integer, Integer)] =
Seq((1, null), (2, 2))
checkQuery(
testData3,
"select avg(f1), count(distinct f1) from TableName",
Seq((2, 1L))
)
checkQuery(
testData3,
"select avg(f1), sum(distinct f1) from TableName",
Seq((2, 2))
)
}
@Test
def testZeroAvg(): Unit = {
checkQuery(
Seq[(Int, Int)](),
"select avg(f0) from TableName",
Seq(Tuple1(null))
)
}
@Test
def testZeroAvgWithDistinct(): Unit = {
checkQuery(
Seq[(Int, Int)](),
"select avg(f0), sum(distinct f0) from TableName",
Seq((null, null))
)
}
@Test
def testCount(): Unit = {
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select count(f0), sum(distinct f0) from TableName",
Seq((6L, 6))
)
}
@Test
def testNullCount(): Unit = {
checkQuery(
Seq[(Integer, Integer)]((1, null), (2, 2)),
"select f0, count(f1) from TableName group by f0",
Seq((1, 0L), (2, 1L))
)
checkQuery(
Seq[(Integer, Integer)]((1, null), (2, 2)),
"select f0, count(f0+f1) from TableName group by f0",
Seq((1, 0L), (2, 1L))
)
}
@Test
def testNullCountWithDistinct(): Unit = {
checkQuery(
Seq[(Integer, Integer)]((1, null), (2, 2)),
"select count(f0), count(f1), count(1), " +
"count(distinct f0), count(distinct f1) from TableName",
Seq((2L, 1L, 2L, 2L, 1L))
)
checkQuery(
Seq[(Integer, Integer)]((1, null), (2, 2)),
"select count(f1), count(distinct f1), sum(distinct f1) from TableName",
Seq((1L, 1L, 2))
)
}
@Test(expected = classOf[TableException])
def testMultipleColumnDistinctCount(): Unit = {
val testData = Seq(
("a", "b", "c"),
("a", "b", "c"),
("a", "b", "d"),
("x", "y", "z"),
("x", "q", null: String))
checkQuery(
testData,
"select count(distinct f0, f1) from TableName",
Seq(Tuple1(3L))
)
// Note: count distinct on multiple columns
// what if, in a row, some columns are null, some are not-null
// should the row be counted?
// Calcite doc says yes. Spark/MySQL says no.
checkQuery(
testData,
"select count(distinct f0, f1, f2) from TableName",
Seq(Tuple1(4L)) // NOTE: Spark and MySQL returns 3
)
checkQuery(
testData,
"select f0, count(distinct f1, f2) from TableName group by f0",
Seq(("a", 2L), ("x", 2L)) // NOTE: Spark and MySQL returns 2
)
}
@Test
def testZeroCount(): Unit = {
val emptyTable = Seq[(Int, Int)]()
checkQuery(
emptyTable,
"select count(f0), sum(distinct f0) from TableName",
Seq((0L, null))
)
}
@Test
def testStdDev(): Unit = {
// NOTE: if f0 is INT type, our stddev functions return INT.
checkQuery(
Seq((1.0, 1), (1.0, 2), (2.0, 1), (2.0, 2), (3.0, 1), (3.0, 2)),
"select stddev_pop(f0), stddev_samp(f0), stddev(f0) from TableName",
Seq((math.sqrt(4.0 / 6.0), math.sqrt(4.0 / 5.0), math.sqrt(4.0 / 5.0)))
)
}
@Test
def test1RowStdDev(): Unit = {
checkQuery(Seq((1.0, 1)),
"select stddev_pop(f0), stddev_samp(f0), stddev(f0) from TableName",
Seq((0.0, null, null))
)
}
@Test
def testVariance(): Unit = {
checkQuery(Seq((1.0, 1), (2.0, 1)),
"select var_pop(f0), var_samp(f0), variance(f0) from TableName",
Seq((0.25, 0.5, 0.5))
)
}
@Test
def test1RowVariance(): Unit = {
checkQuery(Seq((1.0, 1)),
"select var_pop(f0), var_samp(f0), variance(f0) from TableName",
Seq((0.0, null, null))
)
}
@Test
def testZeroStdDev(): Unit = {
val emptyTable = Seq[(Int, Int)]()
checkQuery(
emptyTable,
"select stddev_pop(f0), stddev_samp(f0) from TableName",
Seq((null, null))
)
}
@Test
def testZeroSum(): Unit = {
val emptyTable = Seq[(Int, Int)]()
checkQuery(
emptyTable,
"select sum(f0) from TableName",
Seq(Tuple1(null))
)
}
@Test
def testZeroSumDistinct(): Unit = {
val emptyTable = Seq[(Int, Int)]()
checkQuery(
emptyTable,
"select sum(distinct f0) from TableName",
Seq(Tuple1(null))
)
}
@Test
def testMoments(): Unit = {
checkQuery(
Seq((1.0, 1), (1.0, 2), (2.0, 1), (2.0, 2), (3.0, 1), (3.0, 2)),
"select var_pop(f0), var_samp(f0) from TableName",
Seq((4.0 / 6.0, 4.0 / 5.0))
)
// todo: Spark has skewness() and kurtosis()
}
@Test
def testZeroMoments(): Unit = {
checkQuery(
Seq((1.0, 2.0)),
"select stddev_samp(f0), stddev_pop(f0), var_samp(f0), var_pop(f0) from TableName",
Seq((null, 0.0, null, 0.0))
)
// todo: Spark returns Double.NaN instead of null
}
@Test
def testNullMoments(): Unit = {
checkQuery(
Seq[(Int, Int)](),
"select stddev_samp(f0), stddev_pop(f0), var_samp(f0), var_pop(f0) from TableName",
Seq((null, null, null, null))
)
}
// NOTE: select from values -- supported by Spark, but not Blink
// "select sum(a) over () from values 1.0, 2.0, 3.0 T(a)"
@Test
def testDecimalSumAvgOverWindow(): Unit = {
checkQuery(
Seq(Tuple1(1.0), Tuple1(2.0), Tuple1(3.0)),
"select sum(f0) over () from TableName",
Seq(Tuple1(6.0), Tuple1(6.0), Tuple1(6.0))
)
checkQuery(
Seq(Tuple1(1.0), Tuple1(2.0), Tuple1(3.0)),
"select avg(f0) over () from TableName",
Seq(Tuple1(2.0), Tuple1(2.0), Tuple1(2.0))
)
}
@Test
def testDecimals(): Unit = {
checkQuery(
Seq((b1, b1), (b1, b2), (b2, b1), (b2, b2), (b3, b1), (b3, b2)),
"select cast (f0 as decimal(10,2)), avg(cast (f1 as decimal(10,2))) " +
" from TableName group by cast (f0 as decimal(10,2))",
Seq((big("1.00"), big("1.500000")), (big("2.00"), big("1.500000")),
(big("3.00"), big("1.500000")))
)
}
@Test
def testLimitPlusAgg(): Unit = {
checkQuery(
Seq(("a", 1), ("b", 2), ("c", 1), ("d", 5)),
"select f0, count(*) from (select * from TableName limit 2) group by f0",
Seq(("a", 1L), ("b", 1L))
)
}
// TODO: supports `pivot`.
@Test
def testGroupByLiteral(): Unit = {
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"select 3, 4, sum(f1) from TableName group by 1, 2",
Seq((3, 4, 9))
)
checkQuery(
Seq((1, 1), (1, 2), (2, 1), (2, 2), (3, 1), (3, 2)),
"SELECT 3, 4, SUM(f1) from TableName GROUP BY 3, 4",
Seq((3, 4, 9))
)
// NOTE: Spark runs this query
// "SELECT 3 AS c, 4 AS d, SUM(f1) FROM t GROUP BY c, d"
// with GROUP-BY clause referencing alias in SELECT clause.
// that doesn't make sense, and we do not support it.
}
// TODO support csv
// @Test
// def testMultiGroupBys(): Unit = {
// val csvPath = CommonTestData.writeToTempFile(
// "7369,SMITH,CLERK,7902,1980-12-17,800.00,,20$" +
// "7499,ALLEN,SALESMAN,7698,1981-02-20,1600.00,300.00,30$" +
// "7521,WARD,SALESMAN,7698,1981-02-22,1250.00,500.00,30",
// "csv-test", "tmp")
// tEnv.registerTableSource("emp",
// CsvTableSource.builder()
// .path(csvPath)
// .fields(Array("empno", "ename", "job", "mgr", "hiredate", "sal", "comm", "deptno"),
// Array(DataTypes.INT, DataTypes.STRING, DataTypes.STRING, DataTypes.INT, DataTypes.DATE,
// DataTypes.DOUBLE, DataTypes.DOUBLE, DataTypes.INT))
// .enableEmptyColumnAsNull()
// .fieldDelimiter(",")
// .lineDelimiter("$")
// .uniqueKeys(Set(Set("empno").asJava).asJava)
// .build())
//
// checkResult(
// """
// |SELECT empno, ename, hiredate, MIN(sal), MAX(comm)
// |FROM (SELECT empno, ename, hiredate, AVG(sal) AS sal, MIN(comm) AS comm
// |FROM emp
// |GROUP BY ename, empno, hiredate)
// |GROUP BY empno, ename, hiredate
// |FETCH NEXT 10 ROWS ONLY
// """.stripMargin,
// Seq(row(7369, "SMITH", "1980-12-17", 800.0, null),
// row(7499, "ALLEN", "1981-02-20", 1600.0, 300.0),
// row(7521, "WARD", "1981-02-22", 1250.0, 500.0)))
// }
}
| bowenli86/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/AggregateITCaseBase.scala | Scala | apache-2.0 | 24,878 |
package com.aktaion.parser
import java.sql.Timestamp
case class GenericProxyLogEvent(tsJavaTime: Timestamp, //0
timeString: String, //extra field
userName: String, //1
sourceIp: String, //2
destinationIp: String, //3
unknownField1: String, //4
statusCode: Int, //5
cacheResult: String, //6
httpMethod: String, //7
urlRequested: String, //8
httpVersion: String, //9
domainClass: String, //10
riskClass: String, //11
mimeType: String, //12
// encodingFormat: String, //13
bytesSent: Int, //14
bytesReceived: Int, //15
userAgent: String, //16
webReferrer: String, //17
urlMetaData: UrlDataVector
) extends ParsedLogEvent with Ordered[GenericProxyLogEvent] {
//used for implicit sorting on the ts field
def compare(that: GenericProxyLogEvent) =
tsJavaTime.getTime.compareTo(that.tsJavaTime.getTime)
}
// #fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p trans_depth method host uri referrer user_agent request_body_len response_body_len status_code status_msg info_code info_msg filename tags username password proxied orig_fuids orig_mime_types resp_fuids resp_mime_types
// #types time string addr port addr port count string string string string string count count count string count string string set[enum] string string set[string] vector[string] vector[string] vector[string] vector[string]
// 27 fields
case class BroHttpLogEvent(tsDouble: Double, //0
uid: String, //1
id_orig_host: String, //2
id_orig_port: Int, //3
id_resp_host: String, //4
id_resp_port: Int, //5
trans_depth: Int, //6
method: String, //7
host: String, //8
uri: String, //9
referrer: String, //10
user_agent: String, //11
request_body_len: String, //12
response_body_len: String, //13
status_code: Int, //14
status_msg: String, //15
info_code: String,
info_msg: String,
filename: String,
tags: String,
username: String,
password: String,
proxied: String,
orig_fuids: String,
orig_mime_types: String,
resp_fuids: String,
resp_mime_types: String,
urlMetaData: UrlDataVector,
tsJavaTime: Timestamp) extends ParsedLogEvent with Ordered[BroHttpLogEvent] {
//used for implicit sorting on the ts field
def compare(that: BroHttpLogEvent) =
tsDouble.compareTo(that.tsDouble)
}
case class NormalizedLogEvent(tsJavaTime: Timestamp,
timeString: String,
sourceIp: String,
destinationIp: String,
uri: String,
httpVersion: String,
mimeType: String,
userAgent: String,
statusCode: Int,
webReferrer: String,
urlMetaData: UrlDataVector) extends Ordered[NormalizedLogEvent] {
//used for implicit sorting on the ts field
def compare(that: NormalizedLogEvent) =
tsJavaTime.getTime.compareTo(that.tsJavaTime.getTime)
}
object ParsingNormalizationLogic {
/**
*
* @param inputDataLog
* @return
*/
def normalizeProxyLog(inputDataLog: Option[GenericProxyLogEvent]): Option[NormalizedLogEvent] = {
if (inputDataLog == None) {
return None
}
else {
val inputLog = inputDataLog.get
val normData = NormalizedLogEvent(inputLog.tsJavaTime, inputLog.timeString,
inputLog.sourceIp, inputLog.destinationIp, inputLog.urlRequested, inputLog.httpVersion,
inputLog.mimeType, inputLog.userAgent,
inputLog.statusCode, inputLog.webReferrer,
inputLog.urlMetaData)
return Some(normData)
}
}
/**
*
* @param inputDataLog
* @return
*/
def normalizeBroLog(inputDataLog: Option[BroHttpLogEvent]): Option[NormalizedLogEvent] = {
if (inputDataLog == None) {
return None
}
else {
val inputLog = inputDataLog.get
val normData = NormalizedLogEvent(inputLog.tsJavaTime, inputLog.tsDouble.toString,
inputLog.id_orig_host, inputLog.id_resp_host,
inputLog.uri, "", inputLog.orig_mime_types,
inputLog.user_agent, inputLog.status_code,
inputLog.referrer, inputLog.urlMetaData)
return Some(normData)
}
}
} | jzadeh/Aktaion | src/main/scala/com.aktaion/parser/ParsingNormalizationLogic.scala | Scala | apache-2.0 | 5,550 |
package space.spacelift.mq.proxy
import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import akka.serialization.Serializer
import akka.util.Timeout
import akka.pattern.{ask, pipe}
import org.slf4j.LoggerFactory
import space.spacelift.mq.proxy.patterns.{ProcessResult, Processor, Publisher, RpcClient}
import space.spacelift.mq.proxy.serializers.{JsonSerializer, Serializers}
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
/**
* Thrown when an error occurred on the "server" side and was sent back to the client
* If you have a server Actor and create a proxy for it, then:
* {{{
* proxy ? message
* }}}
* will behave as if you had written;
* {{{
* server ? message
* }}}
* and server had sent back an `akka.actor.Status.ServerFailure(new ProxyException(message)))`
*
* @param message error message
*/
class ProxyException(message: String, throwableAsString: String) extends RuntimeException(message)
/**
* Describes the properties of a message sent through the proxy
*
* @param clazz The fully qualified class name of the message
* @param contentType The content type for serialization/deserialization purposes
*/
case class MessageProperties(clazz: String, contentType: String)
/**
* Describes a delivered message
*
* @param body The message, serialized
* @param properties The properties of the serialized message
*/
case class Delivery(body: Array[Byte], properties: MessageProperties)
object Proxy {
/**
* In pre-2.0, the contentType had the name of the class, and contentEncoding had the name of the content type. Don't
* enable this unless you know you need it.
*/
var useLegacySerializerEncodingSwap = false
var namespaceMapping: Map[String, String] = Map()
var classLoader: Option[ClassLoader] = None
/**
* "server" side failure, that will be serialized and sent back to the client proxy
*
* @param message error message
*/
case class ServerFailure(message: String, throwableAsString: String)
def serialize(serializer: Serializer, msg: AnyRef): (Array[Byte], MessageProperties) = {
(serializer.toBinary(msg),
if (useLegacySerializerEncodingSwap) {
MessageProperties(
Serializers.serializerToContentType(serializer),
msg.getClass.getName.split('.').toList.reverse match {
case x :: xs => namespaceMapping.getOrElse(xs.reverse.mkString("."), xs.reverse.mkString(".")) + s".${x}"
}
)
} else {
MessageProperties(
msg.getClass.getName.split('.').toList.reverse match {
case x :: xs => namespaceMapping.getOrElse(xs.reverse.mkString("."), xs.reverse.mkString(".")) + s".${x}"
},
Serializers.serializerToContentType(serializer)
)
})
}
def deserialize(body: Array[Byte], props: MessageProperties): (AnyRef, Serializer) = {
// scalastyle:off null
require(props.clazz != null && props.clazz != "", "Class is not specified")
val serializer = (if (useLegacySerializerEncodingSwap) { props.clazz } else { props.contentType }) match {
case "" | null => JsonSerializer // use JSON if not serialization format was specified
case contentType => Serializers.contentTypeToSerializer(contentType)
}
// scalastyle:on null
val name = (if (useLegacySerializerEncodingSwap) { props.contentType } else { props.clazz }).split('.').toList.reverse match {
case x :: xs => namespaceMapping.map(_.swap).getOrElse(xs.reverse.mkString("."), xs.reverse.mkString(".")) + s".${x}"
}
(serializer.fromBinary(body, Some(if (classLoader.isDefined) {
Class.forName(name, true, classLoader.get)
} else {
Class.forName(name)
})), serializer)
}
class ProxyServer(server: ActorRef, timeout: Timeout = 30 seconds) extends Processor {
import ExecutionContext.Implicits.global
lazy val logger = LoggerFactory.getLogger(classOf[ProxyServer])
def process(delivery: Delivery): Future[ProcessResult] = {
Try(deserialize(delivery.body, delivery.properties)) match {
case Success((request, serializer)) => {
logger.debug("handling delivery of type %s with serializer %s".format(request.getClass.getName, serializer.getClass.getName))
val future = for {
response <- (server ? request)(timeout).mapTo[AnyRef]
_ = logger.debug("sending response of type %s".format(response.getClass.getName))
(body, props) = serialize(serializer, response)
} yield ProcessResult(Some(body), Some(props))
future.onFailure {
case cause => logger.error(s"inner call to server actor $server failed", cause)
}
future
}
case Failure(cause) => {
logger.error("deserialization failed", cause)
Future.failed(cause)
}
}
}
def onFailure(delivery: Delivery, e: Throwable): ProcessResult = {
val (body, props) = serialize(Serializers.contentTypeToSerializer(
if (useLegacySerializerEncodingSwap) {
delivery.properties.clazz
} else {
delivery.properties.contentType
}
), ServerFailure(e.getMessage, e.toString))
ProcessResult(Some(body), Some(props))
}
}
class ProxySubscriber(subscriber: ActorRef, timeout: Timeout = 30 seconds) extends Processor {
import ExecutionContext.Implicits.global
lazy val logger = LoggerFactory.getLogger(classOf[ProxyServer])
def process(delivery: Delivery): Future[ProcessResult] = {
Try(deserialize(delivery.body, delivery.properties)) match {
case Success((request, serializer)) => {
logger.debug("handling delivery of type %s with serializer %s".format(request.getClass.getName, serializer.getClass.getName))
subscriber ! request
Future { ProcessResult(None, None) }
}
case Failure(cause) => {
logger.error("deserialization failed", cause)
Future.failed(cause)
}
}
}
def onFailure(delivery: Delivery, e: Throwable): ProcessResult = {
val (body, props) = serialize(Serializers.contentTypeToSerializer(
if (useLegacySerializerEncodingSwap) {
delivery.properties.clazz
} else {
delivery.properties.contentType
}
), ServerFailure(e.getMessage, e.toString))
ProcessResult(Some(body), Some(props))
}
}
object ProxyClient {
/**
* Defines a ProxyClient with a default serializer
* @param client The RPC Client
* @return Props containing the ProxyClient
*/
def props(client: ActorRef): Props = Props(new ProxyClient(client, JsonSerializer))
}
/**
* standard one-request/one response proxy, which allows to write (myActor ? MyRequest).mapTo[MyResponse]
* @param client RPC Client
*/
class ProxyClient(client: ActorRef, serializer: Serializer, timeout: Timeout = 30 seconds) extends Actor {
import ExecutionContext.Implicits.global
def receive: Actor.Receive = {
case msg: AnyRef => {
Try(serialize(serializer, msg)) match {
case Success((body, props)) => {
// publish the serialized message (and tell the RPC client that we expect one response)
val publish = Delivery(body, props)
val future = (client ? RpcClient.Request(publish :: Nil, 1))(timeout).mapTo[AnyRef].map {
case result : RpcClient.Response => {
val delivery = result.deliveries(0)
val (response, serializer) = deserialize(delivery.body, delivery.properties)
response match {
case ServerFailure(message, throwableAsString) => akka.actor.Status.Failure(new ProxyException(message, throwableAsString))
case _ => response
}
}
case undelivered : RpcClient.Undelivered => undelivered
}
future.pipeTo(sender)
}
case Failure(cause) => sender ! akka.actor.Status.Failure(new ProxyException("Serialization error", cause.getMessage))
}
}
}
}
object ProxySender {
/**
* Defines a ProxySender with a default serializer
* @param client RPC Client
* @return Props containing the ProxySender
*/
def props(client: ActorRef): Props = Props(new ProxySender(client, JsonSerializer))
}
/**
* "fire-and-forget" proxy, which allows to write myActor ! MyRequest
* TODO: Change this to use a Publisher rather than an RPC Client
* @param client RPC Client
*/
class ProxySender(client: ActorRef, serializer: Serializer) extends Actor with ActorLogging {
def receive: Actor.Receive = {
case msg: AnyRef => {
val (body, props) = serialize(serializer, msg)
val publish = Delivery(body, props)
log.debug("sending %s to %s".format(publish, client))
client ! Publisher.Publish(publish)
}
}
}
}
| Spacelift/akka-mq-proxies | akka-mq-proxies/src/main/scala/space/spacelift/mq/proxy/Proxy.scala | Scala | mit | 9,080 |
//
// MessagePack for Java
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.msgpack.core.buffer
import java.nio.ByteBuffer
import org.msgpack.core.MessagePackSpec
import scala.util.Random
/**
* Created on 2014/05/01.
*/
class MessageBufferTest
extends MessagePackSpec {
"MessageBuffer" should {
"check buffer type" in {
val b = MessageBuffer.newBuffer(0)
info(s"MessageBuffer type: ${b.getClass.getName}")
}
"wrap ByteBuffer considering position and remaining values" taggedAs ("wrap-bb") in {
val d = Array[Byte](10, 11, 12, 13, 14, 15, 16, 17, 18, 19)
val subset = ByteBuffer.wrap(d, 2, 2)
val mb = MessageBuffer.wrap(subset)
mb.getByte(0) shouldBe 12
mb.size() shouldBe 2
}
"have better performance than ByteBuffer" in {
val N = 1000000
val M = 64 * 1024 * 1024
val ub = MessageBuffer.newBuffer(M)
val ud = MessageBuffer.newDirectBuffer(M)
val hb = ByteBuffer.allocate(M)
val db = ByteBuffer.allocateDirect(M)
def bench(f: Int => Unit) {
var i = 0
while (i < N) {
f((i * 4) % M)
i += 1
}
}
val r = new
Random(0)
val rs = new
Array[Int](N)
(0 until N).map(i => rs(i) = r.nextInt(N))
def randomBench(f: Int => Unit) {
var i = 0
while (i < N) {
f((rs(i) * 4) % M)
i += 1
}
}
val rep = 3
info(f"Reading buffers (of size:${M}%,d) ${N}%,d x $rep times")
time("sequential getInt", repeat = rep) {
block("unsafe array") {
var i = 0
while (i < N) {
ub.getInt((i * 4) % M)
i += 1
}
}
block("unsafe direct") {
var i = 0
while (i < N) {
ud.getInt((i * 4) % M)
i += 1
}
}
block("allocate") {
var i = 0
while (i < N) {
hb.getInt((i * 4) % M)
i += 1
}
}
block("allocateDirect") {
var i = 0
while (i < N) {
db.getInt((i * 4) % M)
i += 1
}
}
}
time("random getInt", repeat = rep) {
block("unsafe array") {
var i = 0
while (i < N) {
ub.getInt((rs(i) * 4) % M)
i += 1
}
}
block("unsafe direct") {
var i = 0
while (i < N) {
ud.getInt((rs(i) * 4) % M)
i += 1
}
}
block("allocate") {
var i = 0
while (i < N) {
hb.getInt((rs(i) * 4) % M)
i += 1
}
}
block("allocateDirect") {
var i = 0
while (i < N) {
db.getInt((rs(i) * 4) % M)
i += 1
}
}
}
}
"convert to ByteBuffer" in {
for (t <- Seq(
MessageBuffer.newBuffer(10),
MessageBuffer.newDirectBuffer(10),
MessageBuffer.newOffHeapBuffer(10))
) {
val bb = t.toByteBuffer
bb.position shouldBe 0
bb.limit shouldBe 10
bb.capacity shouldBe 10
}
}
"put ByteBuffer on itself" in {
for (t <- Seq(
MessageBuffer.newBuffer(10),
MessageBuffer.newDirectBuffer(10),
MessageBuffer.newOffHeapBuffer(10))
) {
val b = Array[Byte](0x02, 0x03)
val srcArray = ByteBuffer.wrap(b)
val srcHeap = ByteBuffer.allocate(b.length)
srcHeap.put(b).flip
val srcOffHeap = ByteBuffer.allocateDirect(b.length)
srcOffHeap.put(b).flip
for (src <- Seq(srcArray, srcHeap, srcOffHeap)) {
// Write header bytes
val header = Array[Byte](0x00, 0x01)
t.putBytes(0, header, 0, header.length)
// Write src after the header
t.putByteBuffer(header.length, src, header.length)
t.getByte(0) shouldBe 0x00
t.getByte(1) shouldBe 0x01
t.getByte(2) shouldBe 0x02
t.getByte(3) shouldBe 0x03
}
}
}
}
}
| jackyglony/msgpack-java | msgpack-core/src/test/scala/org/msgpack/core/buffer/MessageBufferTest.scala | Scala | apache-2.0 | 4,691 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.hmrcemailrenderer.templates.ats
import uk.gov.hmrc.hmrcemailrenderer.domain.{MessagePriority, MessageTemplate}
import uk.gov.hmrc.hmrcemailrenderer.templates.FromAddress
import uk.gov.hmrc.hmrcemailrenderer.templates.ServiceIdentifier.AnnualTaxSummary
object AtsTemplates {
private val ats_year_for_subject: Map[String, String] => String =
_.get("taxYear").map(year => s"Your Annual Tax Summary for $year is now ready").
getOrElse(throw new RuntimeException("Missing parameter taxYear"))
val templates = Seq(
MessageTemplate.createWithDynamicSubject(
templateId = "annual_tax_summaries_message_alert",
fromAddress = FromAddress.noReply("HMRC Your Annual Tax Summary"),
service = AnnualTaxSummary,
subject = ats_year_for_subject,
plainTemplate = txt.annualTaxSummariesMessageAlert.f,
htmlTemplate = html.annualTaxSummariesMessageAlert.f,
priority = Some(MessagePriority.Background)
)
)
}
| saurabharora80/hmrc-email-renderer | app/uk/gov/hmrc/hmrcemailrenderer/templates/ats/AtsTemplates.scala | Scala | apache-2.0 | 1,582 |
package leibniz
package inhabitance
//import cats.Functor
import leibniz.macros.newtype
/**
* Witnesses that [[A]] is inhabited.
*/
@newtype final case class Inhabited[A](run: (A => Void) => Void) { A =>
import Inhabited._
def contradicts(f: A => Void): Void = run(f)
//
// /**
// * If [[A]] is inhabited, then any supertype [[B]] of `A`
// * is also inhabited.
// */
// def widen[B](implicit p: A <~< B): ¬¬[B] =
// p.substCoF[Inhabited](this)
def notUninhabited(f: Uninhabited[A]): Void =
contradicts(f.contradicts)
/**
* If [[A]] is inhabited, and there is a total function
* from [[A]] to [[B]], then `B` is also inhabited.
*/
def map[B](f: A => B): ¬¬[B] =
witness[B](k => A.contradicts(a => k(f(a))))
/**
* If [[A]] is inhabited, and you can prove that [[B]] is
* inhabited given a value of `A`, then `B` is also inhabited.
*/
def flatMap[B](f: A => ¬¬[B]) =
witness[B](k => A.contradicts(a => f(a).contradicts(k)))
/**
* If [[A]] and [[B]] are inhabited, then a tuple `(A, B)` is
* also inhabited.
*/
def zip[B](b: ¬¬[B]): ¬¬[(A, B)] =
flatMap(a => b.flatMap(b => Inhabited.value((a, b))))
def proved(implicit ev: Proposition[A]): A =
ev.proved(A)
}
trait InhabitedLowerPriority {
implicit def mkInhabited[A]: ¬¬[A] =
macro internal.MacroUtil.mkInhabited[A]
}
object Inhabited extends InhabitedLowerPriority {
def apply[A](implicit A: ¬¬[A]): ¬¬[A] = A
// implicit val covariant: cats.Functor[Inhabited] = new Functor[Inhabited] {
// override def map[A, B](fa: Inhabited[A])(f: A => B): Inhabited[B] = fa.map(f)
// }
def witness[A](a: (A => Void) => Void): ¬¬[A] =
Inhabited(a)
def value[A](a: A): ¬¬[A] =
witness[A](f => f(a))
def map2[A, B, C](f: (A, B) => C)(implicit A: ¬¬[A], B: ¬¬[B]): ¬¬[C] =
for { a <- A; b <- B } yield f(a, b)
implicit def singleton[A <: Singleton](implicit A: ValueOf[A]): ¬¬[A] =
witness(f => f(A.value))
implicit def inhabited[A](implicit A: ¬¬[A]): ¬¬[¬¬[A]] =
witness(f => f(A))
implicit def uninhabited[A](implicit na: Uninhabited[A]): Uninhabited[¬¬[A]] =
Uninhabited.witness(A => A.notUninhabited(na))
implicit def proposition[A]: Proposition[¬¬[A]] =
(p: ¬¬[¬¬[A]]) => p.flatMap(identity)
implicit def contractible[A](implicit A: ¬¬[A]): Contractible[Inhabited[A]] =
Contractible.witness[¬¬[A]](inhabited, proposition[A])
/**
* Law of excluded middle.
*/
def lem[A]: ¬¬[Either[A => Void, A]] =
witness(k => k(Left(a => k(Right(a)))))
def and[A, B](f: (A, B) => Void): ¬¬[Either[A => Void, B => Void]] =
witness(p => p(Right(b => p(Left(a => f(a, b))))))
def imp[A, B](f: A => B): ¬¬[Either[A => Void, B]] =
witness(k => k(Left(a => k(Right(f(a))))))
def pierce[A]: ¬¬[((A => Void) => A) => A] =
witness(k => k((p: (A => Void) => A) => p((a: A) => k(_ => a))))
} | alexknvl/leibniz | src/main/scala/leibniz/inhabitance/Inhabited.scala | Scala | mit | 2,968 |
/*
* Copyright 2019 Scanamo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scanamo.joda
import org.scanamo.DynamoFormat
import org.joda.time.{ DateTime, Instant }
object JodaFormats {
/** Format for dealing with points in time stored as the number of milliseconds since Epoch.
*/
implicit val jodaInstantAsLongFormat: DynamoFormat[Instant] =
DynamoFormat.coercedXmap[Instant, Long, ArithmeticException](new Instant(_), x => x.getMillis)
/** Convenient, readable format for Joda DateTime, but requires that all dates serialised
* have a consistent chronology and time zone.
*/
implicit val jodaStringFormat: DynamoFormat[DateTime] =
DynamoFormat.coercedXmap[DateTime, String, IllegalArgumentException](
DateTime.parse,
_.toString
)
}
| scanamo/scanamo | joda/src/main/scala/org/scanamo/joda/JodaFormats.scala | Scala | apache-2.0 | 1,313 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import scala.collection.JavaConverters._
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.kafka010.KafkaWriter.validateQuery
import org.apache.spark.sql.sources.v2.writer._
import org.apache.spark.sql.sources.v2.writer.streaming.{StreamingDataWriterFactory, StreamingWriteSupport}
import org.apache.spark.sql.types.StructType
/**
* Dummy commit message. The DataSourceV2 framework requires a commit message implementation but we
* don't need to really send one.
*/
case object KafkaWriterCommitMessage extends WriterCommitMessage
/**
* A [[StreamingWriteSupport]] for Kafka writing. Responsible for generating the writer factory.
*
* @param topic The topic this writer is responsible for. If None, topic will be inferred from
* a `topic` field in the incoming data.
* @param producerParams Parameters for Kafka producers in each task.
* @param schema The schema of the input data.
*/
class KafkaStreamingWriteSupport(
topic: Option[String], producerParams: Map[String, String], schema: StructType)
extends StreamingWriteSupport {
validateQuery(schema.toAttributes, producerParams.toMap[String, Object].asJava, topic)
override def createStreamingWriterFactory(): KafkaStreamWriterFactory =
KafkaStreamWriterFactory(topic, producerParams, schema)
override def commit(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {}
override def abort(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {}
}
/**
* A [[StreamingDataWriterFactory]] for Kafka writing. Will be serialized and sent to executors to
* generate the per-task data writers.
* @param topic The topic that should be written to. If None, topic will be inferred from
* a `topic` field in the incoming data.
* @param producerParams Parameters for Kafka producers in each task.
* @param schema The schema of the input data.
*/
case class KafkaStreamWriterFactory(
topic: Option[String], producerParams: Map[String, String], schema: StructType)
extends StreamingDataWriterFactory {
override def createWriter(
partitionId: Int,
taskId: Long,
epochId: Long): DataWriter[InternalRow] = {
new KafkaStreamDataWriter(topic, producerParams, schema.toAttributes)
}
}
/**
* A [[DataWriter]] for Kafka writing. One data writer will be created in each partition to
* process incoming rows.
*
* @param targetTopic The topic that this data writer is targeting. If None, topic will be inferred
* from a `topic` field in the incoming data.
* @param producerParams Parameters to use for the Kafka producer.
* @param inputSchema The attributes in the input data.
*/
class KafkaStreamDataWriter(
targetTopic: Option[String], producerParams: Map[String, String], inputSchema: Seq[Attribute])
extends KafkaRowWriter(inputSchema, targetTopic) with DataWriter[InternalRow] {
import scala.collection.JavaConverters._
private lazy val producer = CachedKafkaProducer.getOrCreate(
new java.util.HashMap[String, Object](producerParams.asJava))
def write(row: InternalRow): Unit = {
checkForErrors()
sendRow(row, producer)
}
def commit(): WriterCommitMessage = {
// Send is asynchronous, but we can't commit until all rows are actually in Kafka.
// This requires flushing and then checking that no callbacks produced errors.
// We also check for errors before to fail as soon as possible - the check is cheap.
checkForErrors()
producer.flush()
checkForErrors()
KafkaWriterCommitMessage
}
def abort(): Unit = {}
def close(): Unit = {
checkForErrors()
if (producer != null) {
producer.flush()
checkForErrors()
CachedKafkaProducer.close(new java.util.HashMap[String, Object](producerParams.asJava))
}
}
}
| michalsenkyr/spark | external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaStreamingWriteSupport.scala | Scala | apache-2.0 | 4,703 |
package example.consumer
object ConsumerChunkExample {
def main(args: Array[String]): Unit = {
val topicNames = if(args.length == 0) {
List("testTopic")
} else {
args.toList
}
val consumer = ChunkConsumer(topicNames)
val readResponse = consumer.read()
println(readResponse)
}
}
| loveltyoic/scala | src/main/scala/example/consumer/ConsumerChunkExample.scala | Scala | apache-2.0 | 321 |
package recfun
import common._
object Main {
def main(args: Array[String]) {
println("Pascal's Triangle")
for (row <- 0 to 10) {
for (col <- 0 to row)
print(pascal(col, row) + " ")
println()
}
}
/**
* Exercise 1
*/
def pascal(c: Int, r: Int): Int = {
if (c==0 || c==r) 1 else pascal(c-1, r-1) + pascal(c, r-1)
}
/**
* Exercise 2
*/
def balance(chars: List[Char]): Boolean = {
def moveLeft(known: Int, left: List[Char]): Int = {
if (known < 0) Int.MinValue
else if (left.isEmpty) known
else if (left.head == '(') moveLeft(known+1, left.tail)
else if (left.head == ')') moveLeft(known-1, left.tail)
else moveLeft(known, left.tail)
}
moveLeft(0, chars)==0
}
/**
* Exercise 3
*/
def countChange(money: Int, coins: List[Int]): Int = {
def count(money: Int, coins: List[Int]): Int = {
if(money == 0) 1
else if(money < 0) 0
else if(coins.isEmpty && money>=1 ) 0
else count(money, coins.tail) + count(money - coins.head, coins)
}
count(money, coins.sortWith(_.compareTo(_) < 0))
}
}
| huajianmao/learning | coursera/progfun-004/recfun/src/main/scala/recfun/Main.scala | Scala | mit | 1,134 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.recommendation
import scala.language.implicitConversions
import scala.reflect.runtime.universe.TypeTag
import org.apache.spark.sql.{Encoder, Encoders}
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.expressions.Aggregator
import org.apache.spark.util.BoundedPriorityQueue
/**
* Works on rows of the form (K1, K2, V) where K1 & K2 are IDs and V is the score value. Finds
* the top `num` K2 items based on the given Ordering.
*/
private[recommendation] class TopByKeyAggregator[K1: TypeTag, K2: TypeTag, V: TypeTag]
(num: Int, ord: Ordering[(K2, V)])
extends Aggregator[(K1, K2, V), BoundedPriorityQueue[(K2, V)], Array[(K2, V)]] {
override def zero: BoundedPriorityQueue[(K2, V)] = new BoundedPriorityQueue[(K2, V)](num)(ord)
override def reduce(
q: BoundedPriorityQueue[(K2, V)],
a: (K1, K2, V)): BoundedPriorityQueue[(K2, V)] = {
q += {(a._2, a._3)}
}
override def merge(
q1: BoundedPriorityQueue[(K2, V)],
q2: BoundedPriorityQueue[(K2, V)]): BoundedPriorityQueue[(K2, V)] = {
q1 ++= q2
}
override def finish(r: BoundedPriorityQueue[(K2, V)]): Array[(K2, V)] = {
r.toArray.sorted(ord.reverse)
}
override def bufferEncoder: Encoder[BoundedPriorityQueue[(K2, V)]] = {
Encoders.kryo[BoundedPriorityQueue[(K2, V)]]
}
override def outputEncoder: Encoder[Array[(K2, V)]] = ExpressionEncoder[Array[(K2, V)]]()
}
| minixalpha/spark | mllib/src/main/scala/org/apache/spark/ml/recommendation/TopByKeyAggregator.scala | Scala | apache-2.0 | 2,257 |
import sbt._
class JpaProject(info: ProjectInfo) extends DefaultProject(info){//} with ChecksumPlugin{
val slf4jVersion = "1.6.0"
val hibernateEntityManager = "org.hibernate" % "hibernate-entitymanager" % "3.6.1.Final" % "provided"
val sfl4japi = "org.slf4j" % "slf4j-api" % slf4jVersion % "provided"
val sfl4jnop = "org.slf4j" % "slf4j-nop" % slf4jVersion % "provided"
val hsqldb = "hsqldb" % "hsqldb" % "1.8.0.7" % "test"
val scalatest = {
if(buildScalaVersion.contains("2.9"))
"org.scalatest" %% "scalatest" % "1.4.1" % "test"
else
"org.scalatest" % "scalatest" % "1.3" % "test"
}
val jbossRepo = "JBoss repo" at "https://repository.jboss.org/nexus/content/repositories/releases/"
Credentials(Path.userHome / ".ivy2" / ".credentials", log)
val publishTo = {
if(version.toString.endsWith("-SNAPSHOT"))
"Sonatype Nexus Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
else "Sonatype Nexus Staging" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"
}
override def managedStyle = ManagedStyle.Maven
override def deliverProjectDependencies = Nil
override def packageDocsJar = defaultJarPath("-javadoc.jar")
override def packageSrcJar= defaultJarPath("-sources.jar")
lazy val sourceArtifact = Artifact.sources(artifactID)
lazy val docsArtifact = Artifact.javadoc(artifactID)
override def packageToPublishActions = super.packageToPublishActions ++ Seq(packageDocs, packageSrc)
override def pomExtra = {
// If these aren't lazy, then the build crashes looking for
// ${moduleName}/project/build.properties.
(
<name>{name}</name>
<description>Recursivity JPA Project POM</description>
<url>http://github.com/wfaler/recursivity-jpa</url>
<inceptionYear>2010</inceptionYear>
<organization>
<name>Recursivity Commons Project</name>
<url>http://github.com/wfaler/recursivity-jpa</url>
</organization>
<licenses>
<license>
<name>BSD</name>
<url>http://github.com/wfaler/recursivity-jpa/LICENSE</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:git://github.com/wfaler/recursivity-jpa.git</connection>
<url>http://github.com/wfaler/recursivity-jpa</url>
</scm>
<developers>
<developer>
<id>wfaler</id>
<name>Wille Faler</name>
<url>http://blog.recursivity.com</url>
</developer>
</developers>)
}
} | bowler-framework/recursivity-jpa | project/build/JpaProject.scala | Scala | bsd-3-clause | 2,568 |
package ch.ninecode
import java.io.BufferedOutputStream
import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.IOException
import java.util.zip.ZipEntry
import java.util.zip.ZipInputStream
trait Unzip
{
trait Using
{
def using[T <: AutoCloseable, R] (resource: T)(block: T => R): R =
{
try
{
block(resource)
}
finally
{
resource.close()
}
}
}
/**
* This utility extracts files and directories of a standard zip file to
* a destination directory.
*/
class Unzip extends Using
{
// the maximum number of bytes at a time to read from the zip file
val BUFFER_SIZE = 4096
/**
* Make directories.
*
* @param directory the directory path to create
*/
def mkdir (directory: String): Unit =
{
val dir = new File(directory)
if (!dir.exists)
{
val _ = dir.mkdir
}
}
/**
* Iterates over entries in the zip file.
*
* Note: Since there is no way to know if the zip is exhausted except by
* attempting getNextEntry, calling hasNext will attempt to read the next entry
* and store the value in a var for next() to return.
*
* @param zip the opened zip input stream
*/
case class ZipEntries (zip: ZipInputStream) extends Iterator[ZipEntry]
{
@SuppressWarnings(Array("org.wartremover.warts.Var", "org.wartremover.warts.Null"))
var entry: ZipEntry = _
override def hasNext: Boolean =
{
if (null == entry)
entry = zip.getNextEntry
null != entry
}
@SuppressWarnings(Array("org.wartremover.warts.Null"))
override def next (): ZipEntry =
{
val ret = entry
entry = null
ret
}
}
/**
* Extracts a zip file specified by the file to a directory.
*
* The directory will be created if does not exist.
*
* @param file The Zip file.
* @param directory The directory to extract it to
* @throws IOException If there is a problem with the zip extraction
*/
@throws[IOException]
def unzip (file: String, directory: String): Unit =
{
mkdir(directory)
using(new ZipInputStream(new FileInputStream(file)))
{
zip =>
for (entry <- ZipEntries(zip))
{
val path = s"$directory${entry.getName}"
if (entry.isDirectory)
mkdir(path) // if the entry is a directory, make the directory
else
extractFile(zip, path) // if the entry is a file, extract it
zip.closeEntry()
}
}
}
/**
* Iterator over the bytes in the zip entry.
*
* Note: Since there is no way to know if the entry is exhausted except by reading,
* since the available() method doesn't signal EOF until next() has returned -1,
* which, if you ask me, is kind of useless, so
* we need to guard against a -1 return in next(),
* which leads to one extra 0 byte write at the end.
*
* @param zip the zip stream opened at the entry
* @param bytes an array to store the entry bytes in
*/
case class Bytes (zip: ZipInputStream, bytes: Array[Byte]) extends Iterator[Int]
{
override def hasNext: Boolean = 0 != zip.available()
override def next (): Int =
{
val n = zip.read(bytes)
if (-1 == n) 0 else n
}
}
/**
* Extracts a zip entry (file entry).
*
* @param zip The Zip input stream for the file.
* @param path The path to extract he file to.
* @throws IOException If there is a problem with the zip extraction
*/
@throws[IOException]
private def extractFile (zip: ZipInputStream, path: String): Unit =
{
val bytes = new Array[Byte](BUFFER_SIZE)
using(new BufferedOutputStream(new FileOutputStream(path)))
{
bos =>
for (read <- Bytes(zip, bytes))
bos.write(bytes, 0, read)
}
}
}
} | derrickoswald/CIMScala | CIMReader/src/test/scala/ch/ninecode/Unzip.scala | Scala | mit | 4,778 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.it.routers
import com.lightbend.lagom.scaladsl.api.Service.named
import com.lightbend.lagom.scaladsl.api.Descriptor
import com.lightbend.lagom.scaladsl.api.Service
trait AdditionalRoutersService extends Service {
override def descriptor: Descriptor = named("additional-routers")
}
| rcavalcanti/lagom | service/scaladsl/integration-tests/src/test/scala/com/lightbend/lagom/scaladsl/it/routers/AdditionalRoutersService.scala | Scala | apache-2.0 | 402 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Ben Howell
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.benhowell.diminutives.core
import com.typesafe.config.Config
import scala.util.Random
import scala.collection.mutable
/**
* Created by Ben Howell [ben@benhowell.net] on 09-Mar-2014.
*/
object Trial{
def loadExampleRun(config: Config, item: String): Vector[Map[String,String]] = {
val es = Vector[mutable.Map[String,String]]() ++ Configuration.getConfigList(config, item)
.map(e => collection.mutable.Map(create(e).toSeq : _*) )
es.map(m => (m.toMap)).toVector
}
def getTrialBlocks(config: List[Config]): Vector[Vector[Map[String, String]]] = {
return config.map(createRandomTrialBlock(_)).toVector
}
def createRandomTrialRun(config: Config, item: String): Vector[Map[String,String]] = {
val blocks = getTrialBlocks(Configuration.getConfigList(config, item))
var list = Vector[Map[String,String]]()
list ++= Random.shuffle(blocks.flatMap(_.toList))
list
}
def createRandomTrialBlock(config: Config): Vector[Map[String, String]] = {
val c = config.getString("category")
val ns = Vector[String]() ++ Random.shuffle(Configuration.getStringList(config, "names"))
val ts = Vector[mutable.Map[String,String]]() ++ Random.shuffle(Configuration.getConfigList(config, "trials")
.map(e => collection.mutable.Map(create(e).toSeq : _*) ))
(ts,ns).zipped foreach { (t,n) => {
t("name") = n
t("category") = c
}}
ts.map(m => (m.toMap)).toVector
}
def create(config: Config): Map[String,String] = {
val m = new mutable.HashMap[String, String]()
m.put("id", config.getString("id"))
m.put("type", config.getString("type"))
m.put("imagePath", getClass().getResource("/img/").toString)
m.put("imageName", config.getString("image"))
m.put("text", config.getString("text"))
m.put("name", config.getString("name"))
Map() ++ m
}
def compose(trial: Map[String, String]): (String, String, String) = {
val id = trial("id")
val text = trial("text").replace("${name}", trial("name"))
val imgPath = trial("imagePath") + trial("imageName")
(id, text, imgPath)
}
}
| benhowell/diminutives | src/main/scala/net/benhowell/diminutives/core/Trial.scala | Scala | mit | 3,246 |
/* Copyright 2009-2016 EPFL, Lausanne */
object MyTuple6 {
def foo(t: (Int, Int)): (Int, Int) = {
require(t._1 > 0 && t._2 > 1)
t
} ensuring(res => res._1 > 0 && res._2 > 1)
}
| epfl-lara/leon | src/test/resources/regression/verification/purescala/valid/MyTuple6.scala | Scala | gpl-3.0 | 191 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import java.nio.{ByteBuffer, ByteOrder}
import scala.annotation.tailrec
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{UnsafeArrayData, UnsafeMapData, UnsafeRow}
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.execution.columnar.compression.CompressibleColumnAccessor
import org.apache.spark.sql.execution.vectorized.WritableColumnVector
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
/**
* An `Iterator` like trait used to extract values from columnar byte buffer. When a value is
* extracted from the buffer, instead of directly returning it, the value is set into some field of
* a [[InternalRow]]. In this way, boxing cost can be avoided by leveraging the setter methods
* for primitive values provided by [[InternalRow]].
*/
private[columnar] trait ColumnAccessor {
initialize()
protected def initialize(): Unit
def hasNext: Boolean
def extractTo(row: InternalRow, ordinal: Int): Unit
protected def underlyingBuffer: ByteBuffer
}
private[columnar] abstract class BasicColumnAccessor[JvmType](
protected val buffer: ByteBuffer,
protected val columnType: ColumnType[JvmType])
extends ColumnAccessor {
protected def initialize(): Unit = {}
override def hasNext: Boolean = buffer.hasRemaining
override def extractTo(row: InternalRow, ordinal: Int): Unit = {
extractSingle(row, ordinal)
}
def extractSingle(row: InternalRow, ordinal: Int): Unit = {
columnType.extract(buffer, row, ordinal)
}
protected def underlyingBuffer = buffer
}
private[columnar] class NullColumnAccessor(buffer: ByteBuffer)
extends BasicColumnAccessor[Any](buffer, NULL)
with NullableColumnAccessor
private[columnar] abstract class NativeColumnAccessor[T <: AtomicType](
override protected val buffer: ByteBuffer,
override protected val columnType: NativeColumnType[T])
extends BasicColumnAccessor(buffer, columnType)
with NullableColumnAccessor
with CompressibleColumnAccessor[T]
private[columnar] class BooleanColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, BOOLEAN)
private[columnar] class ByteColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, BYTE)
private[columnar] class ShortColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, SHORT)
private[columnar] class IntColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, INT)
private[columnar] class LongColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, LONG)
private[columnar] class FloatColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, FLOAT)
private[columnar] class DoubleColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, DOUBLE)
private[columnar] class StringColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, STRING)
private[columnar] class BinaryColumnAccessor(buffer: ByteBuffer)
extends BasicColumnAccessor[Array[Byte]](buffer, BINARY)
with NullableColumnAccessor
private[columnar] class IntervalColumnAccessor(buffer: ByteBuffer, dataType: CalendarIntervalType)
extends BasicColumnAccessor[CalendarInterval](buffer, CALENDAR_INTERVAL)
with NullableColumnAccessor
private[columnar] class CompactDecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
extends NativeColumnAccessor(buffer, COMPACT_DECIMAL(dataType))
private[columnar] class DecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
extends BasicColumnAccessor[Decimal](buffer, LARGE_DECIMAL(dataType))
with NullableColumnAccessor
private[columnar] class StructColumnAccessor(buffer: ByteBuffer, dataType: StructType)
extends BasicColumnAccessor[UnsafeRow](buffer, STRUCT(dataType))
with NullableColumnAccessor
private[columnar] class ArrayColumnAccessor(buffer: ByteBuffer, dataType: ArrayType)
extends BasicColumnAccessor[UnsafeArrayData](buffer, ARRAY(dataType))
with NullableColumnAccessor
private[columnar] class MapColumnAccessor(buffer: ByteBuffer, dataType: MapType)
extends BasicColumnAccessor[UnsafeMapData](buffer, MAP(dataType))
with NullableColumnAccessor
private[sql] object ColumnAccessor {
@tailrec
def apply(dataType: DataType, buffer: ByteBuffer): ColumnAccessor = {
val buf = buffer.order(ByteOrder.nativeOrder)
dataType match {
case NullType => new NullColumnAccessor(buf)
case BooleanType => new BooleanColumnAccessor(buf)
case ByteType => new ByteColumnAccessor(buf)
case ShortType => new ShortColumnAccessor(buf)
case IntegerType | DateType => new IntColumnAccessor(buf)
case LongType | TimestampType => new LongColumnAccessor(buf)
case FloatType => new FloatColumnAccessor(buf)
case DoubleType => new DoubleColumnAccessor(buf)
case StringType => new StringColumnAccessor(buf)
case BinaryType => new BinaryColumnAccessor(buf)
case dt: DecimalType if dt.precision <= Decimal.MAX_LONG_DIGITS =>
new CompactDecimalColumnAccessor(buf, dt)
case dt: DecimalType => new DecimalColumnAccessor(buf, dt)
case struct: StructType => new StructColumnAccessor(buf, struct)
case array: ArrayType => new ArrayColumnAccessor(buf, array)
case map: MapType => new MapColumnAccessor(buf, map)
case udt: UserDefinedType[_] => ColumnAccessor(udt.sqlType, buffer)
case other => throw QueryExecutionErrors.notSupportTypeError(other)
}
}
def decompress(columnAccessor: ColumnAccessor, columnVector: WritableColumnVector, numRows: Int):
Unit = {
if (columnAccessor.isInstanceOf[NativeColumnAccessor[_]]) {
val nativeAccessor = columnAccessor.asInstanceOf[NativeColumnAccessor[_]]
nativeAccessor.decompress(columnVector, numRows)
} else {
throw QueryExecutionErrors.notSupportNonPrimitiveTypeError()
}
}
def decompress(
array: Array[Byte], columnVector: WritableColumnVector, dataType: DataType, numRows: Int):
Unit = {
val byteBuffer = ByteBuffer.wrap(array)
val columnAccessor = ColumnAccessor(dataType, byteBuffer)
decompress(columnAccessor, columnVector, numRows)
}
}
| BryanCutler/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala | Scala | apache-2.0 | 7,071 |
/**
* Copyright (C) 2011 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.processor
import java.io._
import java.util.regex.Matcher
import org.orbeon.oxf.common.Version
import org.orbeon.oxf.controller.PageFlowControllerProcessor
import org.orbeon.oxf.externalcontext.{ExternalContext, URLRewriter}
import org.orbeon.oxf.pipeline.api.PipelineContext
import org.orbeon.oxf.resources.ResourceManagerWrapper
import org.orbeon.oxf.util.IOUtils._
import org.orbeon.oxf.util.TryUtils._
import org.orbeon.oxf.util._
import org.orbeon.oxf.xforms.AssetPath
import scala.util.Try
import scala.util.control.NonFatal
// NOTE: Should rename to XFormsAssetRewriter?
object XFormsResourceRewriter extends Logging {
// Generate the resources into the given OutputStream. The stream is flushed and closed when done.
def generateAndClose(
assetPaths : List[AssetPath],
namespaceOpt : Option[String],
os : OutputStream,
isCSS : Boolean,
isMinimal : Boolean)(implicit
logger : IndentedLogger
): Unit =
useAndClose(os) { _ β
if (isCSS)
generateCSS(assetPaths, namespaceOpt, os, isMinimal)
else
generateJS(assetPaths, os, isMinimal)
os.flush()
}
private def logFailure[T](path: String)(implicit logger: IndentedLogger): PartialFunction[Throwable, Any] = {
case NonFatal(_) β
error("could not read asset to aggregate", List("asset" β path))
}
private def generateCSS(
assetPaths : List[AssetPath],
namespaceOpt : Option[String],
os : OutputStream,
isMinimal : Boolean)(implicit
logger : IndentedLogger
): Unit = {
val response = NetUtils.getExternalContext.getResponse
val pipelineContext = PipelineContext.get
// Create matcher that matches all paths in case resources are versioned
if (pipelineContext.getAttribute(PageFlowControllerProcessor.PathMatchers) eq null) {
val matchAllPathMatcher = URLRewriterUtils.getMatchAllPathMatcher
pipelineContext.setAttribute(PageFlowControllerProcessor.PathMatchers, matchAllPathMatcher)
}
val rm = ResourceManagerWrapper.instance
// NOTE: The idea is that:
// - we recover and log resource read errors (a file can be missing for example during development)
// - we don't recover when writing (writing the resources will be interrupted)
def tryInputStream(path: String) =
Try(rm.getContentAsStream(path)) onFailure logFailure(path)
// Use iterators so that we don't open all input streams at once
def inputStreamIterator =
for {
asset β assetPaths.iterator
path = asset.assetPath(isMinimal)
is β tryInputStream(path).iterator
} yield
path β is
def tryReadCSS(path: String, is: InputStream) =
Try {
val sbw = new StringBuilderWriter
copyReader(new InputStreamReader(is, "utf-8"), sbw)
sbw.toString
} onFailure
logFailure(path)
val readCSSIterator =
for {
(path, is) β inputStreamIterator
originalCSS β tryReadCSS(path, is).iterator
} yield
path β originalCSS
val outputWriter = new OutputStreamWriter(os, "utf-8")
// Output Orbeon Forms version if allowed
Version.versionStringIfAllowed foreach { version β
outputWriter.write(s"/* This file was produced by $version */\n")
}
// Write and rewrite all resources one after the other
readCSSIterator foreach {
case (path, originalCSS) β
if (! isMinimal)
outputWriter.write("/* Original CSS path: " + path + " */\n")
outputWriter.write(rewriteCSS(originalCSS, path, namespaceOpt, response))
}
outputWriter.flush()
}
private val MatchSelectorAndBlock = """([^\{]*\s*)(\{[^\}]*\})""".r
private val MatchId = """#([\w]+)""".r
private val MatchURL = """url\(("|')?([^"^'^\)]*)("|')?\)""".r
// Public for unit tests
def rewriteCSS(
css : String,
resourcePath : String,
namespaceOpt : Option[String],
response : ExternalContext.Response)(implicit
logger : IndentedLogger
): String = {
// Match and rewrite an id within a selector
def rewriteSelector(s: String) = namespaceOpt match {
case Some(namespace) β MatchId.replaceAllIn(s, e β Matcher.quoteReplacement("#" + namespace + e.group(1)))
case None β s
}
// Rewrite an individual URL
def tryRewriteURL(url: String) =
Try {
val resolvedURI = NetUtils.resolveURI(url, resourcePath)
val rewrittenURI = response.rewriteResourceURL(resolvedURI, URLRewriter.REWRITE_MODE_ABSOLUTE_PATH_OR_RELATIVE)
"url(" + rewrittenURI + ")"
} recover {
case NonFatal(_) β
warn("found invalid URI in CSS file", Seq("uri" β url))
"url(" + url + ")"
}
// Match and rewrite a URL within a block
def rewriteBlock(s: String) =
MatchURL.replaceAllIn(s, e β Matcher.quoteReplacement(tryRewriteURL(e.group(2)).get))
// Find approximately pairs of selectors/blocks and rewrite each part
// Ids are rewritten only if the namespace is not empty
MatchSelectorAndBlock.replaceAllIn(css, e β Matcher.quoteReplacement(rewriteSelector(e.group(1)) + rewriteBlock(e.group(2))))
}
private def generateJS(
assetPaths : List[AssetPath],
os : OutputStream,
isMinimal : Boolean)(implicit
logger : IndentedLogger
): Unit = {
val outputWriter = new OutputStreamWriter(os, "utf-8")
// Output Orbeon Forms version if allowed
Version.versionStringIfAllowed foreach { version β
outputWriter.write(s"// This file was produced by $version\n")
outputWriter.flush()
}
val rm = ResourceManagerWrapper.instance
def tryInputStream(path: String) =
Try(rm.getContentAsStream(path)) onFailure logFailure(path)
// Use iterators so that we don't open all input streams at once
def inputStreamIterator =
assetPaths.iterator flatMap (r β tryInputStream(r.assetPath(isMinimal)).iterator)
// Write all resources one after the other
outputWriter.write(
"""
|(function() {
| if (window.define || window.exports) {
| window.ORBEON = window.ORBEON || {};
| if (window.define) {
| window.ORBEON.define = window.define;
| window.define = null;
| }
| if (window.exports) {
| window.ORBEON.exports = window.exports;
| window.exports = null;
| }
| }
|})();
""".stripMargin)
outputWriter.flush()
inputStreamIterator foreach { is β
useAndClose(is)(NetUtils.copyStream(_, os))
os.write('\n')
}
outputWriter.write(
"""
|(function() {
| if (window.ORBEON.define) {
| window.define = window.ORBEON.define;
| window.ORBEON.define = null;
| }
| if (window.ORBEON.exports) {
| window.exports = window.ORBEON.exports;
| window.ORBEON.exports = null;
| }
|})();
""".stripMargin)
outputWriter.flush()
}
// Compute the last modification date of the given resources.
def computeCombinedLastModified(assetPaths: List[AssetPath], isMinimal: Boolean): Long = {
val rm = ResourceManagerWrapper.instance
// NOTE: Actual aggregation will log missing files so we ignore them here
def lastModified(r: AssetPath) =
Try(rm.lastModified(r.assetPath(isMinimal), false)) getOrElse 0L
if (assetPaths.isEmpty) 0L else assetPaths map lastModified max
}
def cacheAssets(
assetPaths : List[AssetPath],
resourcePath : String,
namespaceOpt : Option[String],
combinedLastModified : Long,
isCSS : Boolean,
isMinimal : Boolean
): Option[File] = {
implicit val indentedLogger = XFormsResourceServer.indentedLogger
val rm = ResourceManagerWrapper.instance
Option(rm.getRealPath(resourcePath)) match {
case Some(realPath) β
// We hope to be able to cache as a resource
def logParameters = Seq("resource path" β resourcePath, "real path" β realPath)
val resourceFile = new File(realPath)
if (resourceFile.exists) {
// Resources exist, generate if needed
val resourceLastModified = resourceFile.lastModified
if (resourceLastModified < combinedLastModified) {
// Resource is out of date, generate
debug("cached combined resources out of date, saving", logParameters)
val fos = new FileOutputStream(resourceFile)
generateAndClose(assetPaths, namespaceOpt, fos, isCSS, isMinimal)(indentedLogger)
} else
debug("cached combined resources exist and are up-to-date", logParameters)
} else {
// Resource doesn't exist, generate
debug("cached combined resources don't exist, saving", logParameters)
resourceFile.getParentFile.mkdirs()
resourceFile.createNewFile()
val fos = new FileOutputStream(resourceFile)
generateAndClose(assetPaths, namespaceOpt, fos, isCSS, isMinimal)(indentedLogger)
}
Some(resourceFile)
case None β
debug("unable to locate real path for cached combined resources, not saving", Seq("resource path" β resourcePath))
None
}
}
}
| brunobuzzi/orbeon-forms | xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/processor/XFormsResourceRewriter.scala | Scala | lgpl-2.1 | 10,220 |
package com.github.gdefacci.raz
import shapeless._
import scalaz.{ -\\/, \\/, \\/- }
trait HPathEncoder[H <: HList, HR <: HList, S <: PathPosition, E <: PathPosition] {
def apply(h: H): HR => TPath[S, E]
}
object HPathEncoder {
private def apply[H <: HList, HR <: HList, S <: PathPosition, E <: PathPosition](f: H => HR => TPath[S, E]) = new HPathEncoder[H, HR, S, E] {
def apply(h: H): HR => TPath[S, E] = f(h)
}
implicit def hnilHPathEncoder[PE, T, S <: PathPosition, E <: PathPosition](implicit toPe:ToPathEncoder[PE,T,S,E]): HPathEncoder[PE :: HNil, T :: HNil, S, E] =
HPathEncoder[PE :: HNil, T :: HNil, S, E](h => h1 => toPe(h.head).encode(h1.head))
implicit def hconsHPathEncoder[H <: HList, HR <: HList, PE,T, S <: PathPosition, E <: PathPosition, S1 <: PathPosition, E1 <: PathPosition](
implicit hr: HPathEncoder[H, HR, S1, E1],
toPe:ToPathEncoder[PE,T,S,E],
pathAppender: PathAppender[E, S1]): HPathEncoder[PE :: H, T :: HR, S, E1] =
HPathEncoder[PE :: H, T :: HR, S, E1]({ h =>
h1 =>
val tp1 = toPe(h.head).encode(h1.head)
val tp2 = hr.apply(h.tail).apply(h1.tail)
tp1.append(tp2)
})
/*
implicit def hnilHPathCodecEncoder[TD, TE, S <: PathPosition, E <: PathPosition]: HPathEncoder[PathCodec[TD, TE, S, E] :: HNil, TE :: HNil, S, E] =
HPathEncoder[PathCodec[TD, TE, S, E] :: HNil, TE :: HNil, S, E](h => h1 => h.head.encoder.encode(h1.head))
implicit def hconsHPathCodecEncoder[H <: HList, HR <: HList, TD, TE, S <: PathPosition, E <: PathPosition, S1 <: PathPosition, E1 <: PathPosition]
(implicit hr: HPathEncoder[H, HR, S1, E1], pathAppender: PathAppender[E, S1]): HPathEncoder[PathCodec[TD, TE, S, E] :: H, TE :: HR, S, E1] =
HPathEncoder[PathCodec[TD, TE, S, E] :: H, TE :: HR, S, E1] { h =>
h1 =>
val tp1 = h.head.encoder.encode(h1.head)
val tp2 = hr.apply(h.tail).apply(h1.tail)
tp1.append(tp2)
}
implicit def hnilHPathConverterEncoder[TD, TE, UT, S <: PathPosition, E <: PathPosition]: HPathEncoder[PathConverter[TD, TE, UT, S, E] :: HNil, TE :: HNil, S, E] =
HPathEncoder[PathConverter[TD, TE, UT, S, E] :: HNil, TE :: HNil, S, E](h => h1 => h.head.encoder.encode(h1.head))
implicit def hconsHPathConverterEncoder[H <: HList, HR <: HList, TD, TE, UT, S <: PathPosition, E <: PathPosition, S1 <: PathPosition, E1 <: PathPosition]
(implicit hr: HPathEncoder[H, HR, S1, E1], pathAppender: PathAppender[E, S1]): HPathEncoder[PathConverter[TD, TE, UT, S, E] :: H, TE :: HR, S, E1] =
HPathEncoder[PathConverter[TD, TE, UT, S, E] :: H, TE :: HR, S, E1]({ h =>
h1 =>
val tp1 = h.head.encoder.encode(h1.head)
val tp2 = hr.apply(h.tail).apply(h1.tail)
tp1.append(tp2)
})
*
*/
} | gdefacci/raz | raz/src/main/scala/com/github/gdefacci/raz/HPathEncoder.scala | Scala | mit | 2,783 |
package at.forsyte.apalache.tla.bmcmt.rules.deprecated
import at.forsyte.apalache.tla.bmcmt._
import at.forsyte.apalache.tla.bmcmt.rewriter.ConstSimplifierForSmt
import at.forsyte.apalache.tla.bmcmt.rules.SubstRule
import at.forsyte.apalache.tla.bmcmt.types.BoolT
import at.forsyte.apalache.tla.lir.convenience.tla
import at.forsyte.apalache.tla.lir.oper.TlaBoolOper
import at.forsyte.apalache.tla.lir.{NameEx, OperEx}
/**
* Implements an equivalence A <=> B by rewriting it to A = B.
*
* @author Igor Konnov
*/
@deprecated("Normalizer takes care of it")
class EquivRule(rewriter: SymbStateRewriter) extends RewritingRule {
private val substRule = new SubstRule(rewriter)
private val simplifier = new ConstSimplifierForSmt()
override def isApplicable(symbState: SymbState): Boolean = {
symbState.ex match {
case OperEx(TlaBoolOper.equiv, _, _) => true
case _ => false
}
}
override def apply(state: SymbState): SymbState = {
state.ex match {
case OperEx(TlaBoolOper.equiv, left, right) =>
var leftState = rewriter.rewriteUntilDone(state.setRex(left))
val rightState = rewriter.rewriteUntilDone(leftState.setRex(right))
var nextState = rightState.updateArena(_.appendCell(BoolT()))
val pred = nextState.arena.topCell
rewriter.solverContext.assertGroundExpr(tla.eql(pred.toNameEx, tla.equiv(leftState.ex, rightState.ex)))
nextState.setRex(pred.toNameEx)
case _ =>
throw new RewriterException("%s is not applicable".format(getClass.getSimpleName), state.ex)
}
}
}
| konnov/dach | tla-bmcmt/src/main/scala/at/forsyte/apalache/tla/bmcmt/rules/deprecated/EquivRule.scala | Scala | apache-2.0 | 1,581 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.ems.core.config.service
import org.beangle.cdi.bind.BindModule
import org.beangle.ems.core.config.service.impl._
class DefaultModule extends BindModule {
protected override def binding(): Unit = {
bind(classOf[DataSourceManagerImpl])
bind(classOf[DbServiceImpl])
bind(classOf[CredentialServiceImpl])
bind(classOf[AppServiceImpl])
bind(classOf[DomainServiceImpl])
}
}
| beangle/ems | core/src/main/scala/org/beangle/ems/core/config/service/DefaultModule.scala | Scala | lgpl-3.0 | 1,136 |
package com.quant.TechAnalysis
/**
* Created by Frank Cash on 5/6/17.
*/
object Average {
/**
* Reference on Resistance <http://www.investopedia.com/articles/technical/061801.asp>
* @param data List of List Double. Takes return from split(xs, n).
* @return Returns the avg resistance
*/
def avgResistance(data:List[List[Double]]): Double = {
var runTotal = 0.0;
for(n <- data){
runTotal += (n.max)
}
return runTotal./(data.length)
}
/**
* Reference on Support <http://www.investopedia.com/articles/technical/061801.asp>
* @param data List of List Double. Takes return from split(xs, n)
* @return Returns the avg support
*/
def avgSupport(data:List[List[Double]]): Double ={
var runTotal = 0.0
for(n <- data){
runTotal += n.min
}
return runTotal./(data.length)
}
/**
* Reference on Moving Average <http://www.investopedia.com/terms/m/movingaverage.asp>
* @param data List[Double] of prices to include in moving average
* @return Moving Average
*/
def movingAvg(movingAvgSize:Double, data:List[Double] ): Double ={
var avg = 0.0
avg = data.sum / movingAvgSize
return avg
}
}
| frankcash/IFTT-Stock-Data-Manipulator | src/main/scala/com/quant/TechAnalysis/Average.scala | Scala | mit | 1,207 |
/*
* Copyright 2016 Renaud Bruneliere
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.bruneli.scalaopt.core.variable
import com.github.bruneli.scalaopt.core.linalg.FromToDoubleConversions.ToDouble
/**
* Define the different type of optimization variable supported
*
* @author bruneli
*/
trait Variable extends Any with ToDouble {
/** Lower bound on variable value if any */
def lower: Option[Double]
/** Upper bound on variable value if any */
def upper: Option[Double]
/** Convert variable into a continuous variable */
def toContinuousVariable: ContinuousVariable
/** Build a new variable with x modified */
def build(x: Double): Variable
}
/**
* Optimization variable with an unknown type
*/
case class Unknown(x: Double) extends AnyVal with ContinuousVariable {
override def lower: Option[Double] = None
override def upper: Option[Double] = None
/** Build a new variable with x modified */
override def build(x: Double): Unknown = Unknown(x)
}
sealed trait Continuous extends Any
sealed trait Discrete extends Any
/**
* Real-valued continuous variable
*/
trait ContinuousVariable extends Any with Variable with Continuous {
override def toContinuousVariable: ContinuousVariable = this
}
/**
* Real-valued continuous variable bounded from below and/or above
*
* @param x variable value
* @param lower lower bound on its value (optional)
* @param upper upper bound on its value (optional)
*/
case class BoundedVariable(
x: Double,
lower: Option[Double],
upper: Option[Double]) extends ContinuousVariable {
/** Build a new variable with x modified */
override def build(x: Double): BoundedVariable = this.copy(x = x)
}
trait DiscreteVariable extends Any with Variable with Discrete {
override def toContinuousVariable: ContinuousVariable = {
BoundedVariable(this.x, this.lower, this.upper)
}
/** Find the closest discrete variable with a value lower or equal to v */
def floor(v: Variable): Option[DiscreteVariable]
/** Find the closest discrete variable with a value greater or equal to v */
def ceil(v: Variable): Option[DiscreteVariable]
}
/**
* Discrete level
*/
case class Level(x: Double) extends AnyVal
/**
* Discrete real-valued variable defined by a finite set of possible levels
*/
case class DiscreteLevelsVariable(
level: Level, levels: Array[Level]) extends DiscreteVariable {
val x = level.x
lazy val sortedLevels = levels.sortBy(_.x)
override def lower: Option[Double] = sortedLevels.headOption.map(_.x)
override def upper: Option[Double] = sortedLevels.lastOption.map(_.x)
override def floor(v: Variable): Option[DiscreteVariable] = {
sortedLevels.takeWhile(_.x <= v.x).lastOption.map(level => this.copy(level = level))
}
override def ceil(v: Variable): Option[DiscreteVariable] = {
sortedLevels.find(_.x >= v.x).map(level => this.copy(level = level))
}
/** Build a new variable with x modified */
override def build(x: Double): DiscreteLevelsVariable = {
val lvl = sortedLevels.find(_.x >= x).getOrElse(sortedLevels.head)
this.copy(level = lvl)
}
}
/**
* Integer variable
*/
case class IntegerVariable(
i: Int,
min: Option[Int] = None,
max: Option[Int] = None) extends DiscreteVariable {
val x = i.toDouble
override def lower: Option[Double] = min.map(_.toDouble)
override def upper: Option[Double] = max.map(_.toDouble)
override def floor(v: Variable): Option[DiscreteVariable] = {
if (lower.isDefined && v.x < lower.get) {
None
} else {
Some(IntegerVariable(Math.floor(v.x).toInt, min, max))
}
}
override def ceil(v: Variable): Option[DiscreteVariable] = {
if (upper.isDefined && v.x > upper.get) {
None
} else {
Some(IntegerVariable(Math.ceil(v.x).toInt, min, max))
}
}
/** Build a new variable with x modified */
override def build(x: Double): IntegerVariable = {
if (x - Math.floor(x) >= 0.5) {
this.copy(i = Math.ceil(x).toInt)
} else {
this.copy(i = Math.floor(x).toInt)
}
}
}
/**
* Binary variable
*/
case class BinaryVariable(b: Boolean) extends DiscreteVariable {
val x = if (b) 1.0 else 0.0
override def lower: Option[Double] = Some(0.0)
override def upper: Option[Double] = Some(1.0)
override def floor(v: Variable): Option[DiscreteVariable] = {
Some(BinaryVariable(false))
}
override def ceil(v: Variable): Option[DiscreteVariable] = {
Some(BinaryVariable(true))
}
/** Build a new variable with x modified */
override def build(x: Double): BinaryVariable = {
BinaryVariable(x >= 0.5)
}
} | bruneli/scalaopt | core/src/main/scala/com/github/bruneli/scalaopt/core/variable/Variable.scala | Scala | apache-2.0 | 5,147 |
object Solution {
def getNumOfCholocates(n: Int, c: Int, m: Int): Int = {
var result = n / c
var wrapers = result
while (wrapers >= m) {
val newCandys = wrapers / m
result += newCandys
wrapers = newCandys + wrapers % m
}
result
}
def main(args: Array[String]) {
val t = readLine.toInt
for (_ <- 1 to t) {
val Array(n, c, m) = readLine.split(" ").map(_.toInt)
println(getNumOfCholocates(n, c, m))
}
}
}
| advancedxy/hackerrank | algorithms/warmup/ChocolateFeast.scala | Scala | mit | 474 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle.scalariform
import org.scalastyle.ScalariformChecker
import org.scalastyle.ScalastyleError
import org.scalastyle.scalariform.VisitorHelper.visit
import scalariform.parser.Generator
import scalariform.parser.CompilationUnit
import org.scalastyle.PositionError
import scalariform.parser.Expr
import scalariform.lexer.{Tokens, Token}
class GeneratorVariableLengthChecker extends ScalariformChecker {
val DefaultMaxVariableLength = 1
val errorKey = "generator.variable.length"
final def verify(ast: CompilationUnit): List[ScalastyleError] = {
val maxVariableLength = getInt("maxVariableLength", DefaultMaxVariableLength)
val it = for (
t <- VisitorHelper.getAll[Generator](ast.immediateChildren(0));
f <- localvisit(t.pattern)
if (isTooLong(f, maxVariableLength))
) yield {
PositionError(f.offset, List(maxVariableLength.toString()))
}
it.toList
}
private def isTooLong(t: Token, maxLength: Int): Boolean = {
t.text.length > maxLength
}
private def localvisit(ast: Any): List[Token] = ast match {
case t: Expr =>
if (t.contents.length == 1 && t.firstToken.tokenType == Tokens.VARID) {
t.firstToken :: localvisit(t.immediateChildren(0))
} else {
localvisit(t.contents)
}
case t: Any => visit(t, localvisit)
}
}
| dwango/scalastyle | src/main/scala/org/scalastyle/scalariform/GeneratorVariableLengthChecker.scala | Scala | apache-2.0 | 2,073 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.persistence.jdbc
import akka.actor.setup.ActorSystemSetup
import akka.actor.ActorSystem
import akka.actor.BootstrapSetup
import akka.cluster.Cluster
import com.lightbend.lagom.internal.persistence.ReadSideConfig
import com.lightbend.lagom.internal.persistence.jdbc.SlickDbTestProvider
import com.lightbend.lagom.internal.persistence.jdbc.SlickOffsetStore
import com.lightbend.lagom.internal.persistence.jdbc.SlickProvider
import com.lightbend.lagom.internal.persistence.testkit.AwaitPersistenceInit.awaitPersistenceInit
import com.lightbend.lagom.internal.scaladsl.persistence.jdbc.JdbcReadSideImpl
import com.lightbend.lagom.internal.scaladsl.persistence.jdbc.OffsetTableConfiguration
import com.lightbend.lagom.persistence.ActorSystemSpec
import com.lightbend.lagom.persistence.PersistenceSpec
import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import play.api.Configuration
import play.api.Environment
import scala.concurrent.Await
import scala.concurrent.duration._
abstract class JdbcPersistenceSpec private (_system: ActorSystem) extends ActorSystemSpec(_system) {
def this(testName: String, config: Config, registry: JsonSerializerRegistry) =
this(
ActorSystem(
testName,
ActorSystemSetup(
BootstrapSetup(
config.withFallback(Configuration.load(Environment.simple()).underlying)
),
JsonSerializerRegistry.serializationSetupFor(registry)
)
)
)
def this(config: Config, registry: JsonSerializerRegistry) =
this(PersistenceSpec.getCallerName(getClass), config, registry)
def this(registry: JsonSerializerRegistry) = this(ConfigFactory.empty(), registry)
import system.dispatcher
protected lazy val slick = new SlickProvider(system, coordinatedShutdown)
protected lazy val jdbcReadSide: JdbcReadSide = new JdbcReadSideImpl(
slick,
new SlickOffsetStore(
system,
slick,
new OffsetTableConfiguration(system.settings.config, ReadSideConfig())
)
)
override def beforeAll(): Unit = {
super.beforeAll()
// Join ourselves - needed because we're using cluster singleton to create tables
val cluster = Cluster(system)
cluster.join(cluster.selfAddress)
// Trigger database to be loaded and registered to JNDI
SlickDbTestProvider.buildAndBindSlickDb(system.name, coordinatedShutdown)
// Trigger tables to be created
Await.ready(slick.ensureTablesCreated(), 20.seconds)
awaitPersistenceInit(system)
}
}
| ignasi35/lagom | persistence-jdbc/scaladsl/src/test/scala/com/lightbend/lagom/scaladsl/persistence/jdbc/JdbcPersistenceSpec.scala | Scala | apache-2.0 | 2,686 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.oap.io
import org.apache.spark.sql.execution.datasources.parquet.ParquetDictionaryWrapper
import org.apache.spark.sql.execution.vectorized.{Dictionary, OapOnHeapColumnVector}
import org.apache.spark.sql.types.ByteType
class ByteTypeDataFiberReaderWriterSuite extends DataFiberReaderWriterSuite {
// byte data use IntegerDictionary
protected val dictionary: Dictionary = new ParquetDictionaryWrapper(
IntegerDictionary(Array(0, 1, 2)))
test("no dic no nulls") {
// write data
val column = new OapOnHeapColumnVector(total, ByteType)
(0 until total).foreach(i => column.putByte(i, i.toByte))
fiberCache = ParquetDataFiberWriter.dumpToCache(column, total)
// init reader
val address = fiberCache.getBaseOffset
val reader = ParquetDataFiberReader(address, ByteType, total)
// read use batch api
val ret1 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(start, num, ret1)
(0 until num).foreach(i => assert(ret1.getByte(i) == (i + start).toByte))
// read use random access api
val ret2 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(rowIdList, ret2)
ints.indices.foreach(i => assert(ret2.getByte(i) == ints(i).toByte))
}
test("with dic no nulls") {
// write data
val column = new OapOnHeapColumnVector(total, ByteType)
column.reserveDictionaryIds(total)
val dictionaryIds = column.getDictionaryIds.asInstanceOf[OapOnHeapColumnVector]
column.setDictionary(dictionary)
(0 until total).foreach(i => dictionaryIds.putInt(i, i % column.dictionaryLength ))
fiberCache = ParquetDataFiberWriter.dumpToCache(column, total)
// init reader
val address = fiberCache.getBaseOffset
val reader = ParquetDataFiberReader(address, ByteType, total)
// read use batch api
val ret1 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(start, num, ret1)
(0 until num).foreach(i =>
assert(ret1.getByte(i) == ((i + start) % column.dictionaryLength).toByte))
// read use random access api
val ret2 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(rowIdList, ret2)
ints.indices.foreach(i =>
assert(ret2.getByte(i) == (ints(i) % column.dictionaryLength).toByte))
}
test("no dic all nulls") {
// write data
val column = new OapOnHeapColumnVector(total, ByteType)
column.putNulls(0, total)
fiberCache = ParquetDataFiberWriter.dumpToCache(column, total)
// init reader
val address = fiberCache.getBaseOffset
val reader = ParquetDataFiberReader(address, ByteType, total)
// read use batch api
val ret1 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(start, num, ret1)
(0 until num).foreach(i => assert(ret1.isNullAt(i)))
// read use random access api
val ret2 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(rowIdList, ret2)
ints.indices.foreach(i => assert(ret2.isNullAt(i)))
}
test("with dic all nulls") {
// write data
val column = new OapOnHeapColumnVector(total, ByteType)
column.reserveDictionaryIds(total)
column.setDictionary(dictionary)
column.putNulls(0, total)
fiberCache = ParquetDataFiberWriter.dumpToCache(column, total)
// init reader
val address = fiberCache.getBaseOffset
val reader = ParquetDataFiberReader(address, ByteType, total)
// read use batch api
val ret1 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(start, num, ret1)
(0 until num).foreach(i => assert(ret1.isNullAt(i)))
// read use random access api
val ret2 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(rowIdList, ret2)
ints.indices.foreach(i => assert(ret2.isNullAt(i)))
}
test("no dic") {
// write data
val column = new OapOnHeapColumnVector(total, ByteType)
(0 until total).foreach(i => {
if (i % 3 == 0) column.putNull(i)
else column.putByte(i, i.toByte)
})
fiberCache = ParquetDataFiberWriter.dumpToCache(column, total)
// init reader
val address = fiberCache.getBaseOffset
val reader = ParquetDataFiberReader(address, ByteType, total)
// read use batch api
val ret1 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(start, num, ret1)
(0 until num).foreach(i => {
if ((i + start) % 3 == 0) assert(ret1.isNullAt(i))
else assert(ret1.getByte(i) == (i + start).toByte)
})
// read use random access api
val ret2 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(rowIdList, ret2)
ints.indices.foreach(i => {
if ((i + start) % 3 == 0) assert(ret2.isNullAt(i))
else assert(ret2.getByte(i) == ints(i).toByte)
})
}
test("with dic") {
// write data
val column = new OapOnHeapColumnVector(total, ByteType)
column.reserveDictionaryIds(total)
val dictionaryIds = column.getDictionaryIds.asInstanceOf[OapOnHeapColumnVector]
column.setDictionary(dictionary)
(0 until total).foreach(i => {
if (i % 3 == 0) column.putNull(i)
else dictionaryIds.putInt(i, i % column.dictionaryLength)
})
fiberCache = ParquetDataFiberWriter.dumpToCache(column, total)
// init reader
val address = fiberCache.getBaseOffset
val reader = ParquetDataFiberReader(address, ByteType, total)
// read use batch api
val ret1 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(start, num, ret1)
(0 until num).foreach(i => {
if ((i + start) % 3 == 0) assert(ret1.isNullAt(i))
else assert(ret1.getByte(i) == ((i + start) % column.dictionaryLength).toByte)
})
// read use random access api
val ret2 = new OapOnHeapColumnVector(total, ByteType)
reader.readBatch(rowIdList, ret2)
ints.indices.foreach(i => {
if ((i + start) % 3 == 0) assert(ret2.isNullAt(i))
else assert(ret2.getByte(i) == (ints(i) % column.dictionaryLength).toByte)
})
}
}
| Intel-bigdata/OAP | oap-cache/oap/src/test/scala/org/apache/spark/sql/execution/datasources/oap/io/ByteTypeDataFiberReaderWriterSuite.scala | Scala | apache-2.0 | 6,794 |
import sbt._
object Ext extends Build
{
lazy val root2 = Project("root2", file("root2")) settings(
TaskKey[Unit]("g") := {}
)
}
| pdalpra/sbt | sbt/src/sbt-test/actions/reload/external/project/Ext.scala | Scala | bsd-3-clause | 133 |
package demo
package components
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import scala.scalajs.js
case class Github(login: String = "", html_url: String = "", avatar_url: String = "", time: Double = 0) {
override def equals(obj: Any): Boolean = obj match {
case that: Github => that.login.equalsIgnoreCase(this.login)
case _ => false
}
}
object GithubUser {
object Styles {
val userGroup = Seq(^.display := "inline-block", ^.textAlign := "center", ^.textDecoration := "none", ^.color := "black")
val userIcon = Seq(^.margin := "10px", ^.display := "block", ^.width := "100px", ^.height := "100px", ^.borderRadius := "50%")
val userName = Seq(^.fontSize := "18px", ^.fontWeight := 500)
}
case class Backend($: BackendScope[Props, _]){
def render(P: Props) = {
<.a( Styles.userGroup, ^.href := P.user.html_url)(
<.img(Styles.userIcon, ^.src := P.user.avatar_url),
<.span(Styles.userName)(P.user.login)
)
}
}
val component = ReactComponentB[Props]("GithubUser")
.renderBackend[Backend]
.build
case class Props(user: Github)
def apply(user: Github,ref: js.UndefOr[String] = "", key: js.Any = {}) = component.set(key, ref)(Props(user))
}
| elacin/scalajs-react-components | demo/src/main/scala/demo/components/GithubUser.scala | Scala | apache-2.0 | 1,262 |
package lila.push
import akka.actor._
import akka.pattern.ask
import chess.format.Forsyth
import lila.common.LightUser
import lila.game.{ Game, GameRepo, Pov, Namer }
import lila.hub.actorApi.map.Ask
import lila.hub.actorApi.round.{ MoveEvent, IsOnGame }
import lila.user.User
import play.api.libs.json._
private final class PushApi(
googlePush: GooglePush,
implicit val lightUser: String => Option[LightUser],
roundSocketHub: ActorSelection) {
def finish(game: Game): Funit =
if (!game.isCorrespondence || game.hasAi) funit
else game.userIds.map { userId =>
Pov.ofUserId(game, userId) ?? { pov =>
IfAway(pov) {
googlePush(userId) {
GooglePush.Data(
title = pov.win match {
case Some(true) => "You won!"
case Some(false) => "You lost."
case _ => "It's a draw."
},
body = s"Your game with ${opponentName(pov)} is over.",
payload = Json.obj(
"userId" -> userId,
"userData" -> Json.obj(
"gameId" -> game.id,
"fullId" -> pov.fullId,
"color" -> pov.color.name,
"fen" -> Forsyth.exportBoard(game.toChess.board),
"lastMove" -> game.castleLastMoveTime.lastMoveString,
"win" -> pov.win)
))
}
}
}
}.sequenceFu.void
def move(move: MoveEvent): Funit = move.mobilePushable ?? {
GameRepo game move.gameId flatMap {
_ ?? { game =>
val pov = Pov(game, !move.color)
game.player(!move.color).userId ?? { userId =>
game.pgnMoves.lastOption ?? { sanMove =>
IfAway(pov) {
googlePush(userId) {
GooglePush.Data(
title = "It's your turn!",
body = s"${opponentName(pov)} played $sanMove",
payload = Json.obj(
"userId" -> userId,
"userData" -> Json.obj(
"gameId" -> game.id,
"fullId" -> pov.fullId,
"color" -> pov.color.name,
"fen" -> Forsyth.exportBoard(game.toChess.board),
"lastMove" -> game.castleLastMoveTime.lastMoveString,
"secondsLeft" -> pov.remainingSeconds)
))
}
}
}
}
}
}
}
private def IfAway(pov: Pov)(f: => Funit): Funit = {
import makeTimeout.short
roundSocketHub ? Ask(pov.gameId, IsOnGame(pov.color)) mapTo manifest[Boolean] flatMap {
case true => funit
case false => f
}
}
private def opponentName(pov: Pov) = Namer playerString pov.opponent
}
| JimmyMow/lila | modules/push/src/main/PushApi.scala | Scala | mit | 2,809 |
object Test extends App {
def assertEquals(a: Any, b: Any): Unit = { assert(a == b, s"$a != $b") }
// @Test def combo: Unit =
{
@argumentative(1, 2) object X
assertEquals(X.toString, "1 2")
}
}
| lrytz/scala | test/macro-annot/run/argumentative/argumentative_2.scala | Scala | apache-2.0 | 212 |
class Vehicle {
var start = 10;
var position = start;
def move(d: Int) { position = position + d }
}
class Car extends Vehicle {
var passengers = 0;
def await(v: Vehicle) {
if (v.position < position)
v.move(position - v.position)
else
move(10)
}
}
class Truck extends Vehicle {
var load = 0;
override def move(d: Int) {
position = if (d <= 55) position + d else 55
}
}
object Main {
def main(args: Array[String]) {
val t = new Truck();
val c = new Car();
c.passengers = 2;
print(c.position); print("\\n");
c.move(60);
print(c.position); print("\\n");
val v: Vehicle = c;
v.move(70);
print(c.position); print("\\n");
c.await(t);
print(t.position); print("\\n");
print(c.position); print("\\n")
}
}
| tobast/compil-petitscala | tests/exec/vehicles.scala | Scala | gpl-3.0 | 792 |
package controllers
import javax.inject.Singleton
import models.DBName
import models.message.{AddThridParty, CheckPassword, UpdatePassword}
import org.slf4j.{Logger, LoggerFactory}
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json._
import play.api.mvc._
import play.modules.reactivemongo.MongoController
import play.modules.reactivemongo.json.collection.JSONCollection
import reactivemongo.bson.BSONObjectID
import scala.concurrent.Future
/**
* Controller for processing request associated with streams.
*/
@Singleton
class Streams extends Controller with MongoController {
private final val logger: Logger = LoggerFactory.getLogger(classOf[Streams])
/** Stream collection. */
def collection: JSONCollection = db.collection[JSONCollection](DBName.stream)
/** Raw Stream collection. */
def rawcollection: JSONCollection = db.collection[JSONCollection](DBName.rawStream)
// ------------------------------------------ //
// Using case classes + Json Writes and Reads //
// ------------------------------------------ //
import models.JsonFormats._
import models._
/**
* get stream with spesific id.
* return Ok with [[Stream]] if sucess, BadRequest if fail.
*/
def getStream(id:String) = Action.async {
collection.find(Json.obj("_id" -> id)).one[Stream].map( _.map( stream =>
Ok(Json.toJson(stream))
) getOrElse BadRequest("Stream not found")
)
}
/**
* get raw stream with spesific id.
* return Ok with [[RawStream]] if sucess, BadRequest if fail
*/
def getRawStream(id:String) = Action.async {
rawcollection.find(Json.obj("_id" -> id)).one[RawStream].map( _.map( raw =>
Ok(Json.toJson(raw))
) getOrElse BadRequest("Raw Stream not found")
)
}
}
| calvinsadewa/backend | app/controllers/Streams.scala | Scala | apache-2.0 | 1,792 |
package controllers
/**
* Created by gbecan on 9/23/15.
*/
abstract class ResultFormat
case class JsonFormat() extends ResultFormat
case class PageFormat() extends ResultFormat
case class EmbedFormat() extends ResultFormat
| gbecan/OpenCompare | org.opencompare/play-app/app/controllers/ResultFormat.scala | Scala | apache-2.0 | 227 |
/*
Copyright 2015 GaΓ«tan La Marca
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* @author
*/
package models.entity.permission
import java.sql.Date
import play.api.db.slick.Profile
trait PermissionComponent { this : Profile =>
import profile.simple._
class Permissions(tag: Tag) extends Table[(Permission)](tag, "PERMISSION") {
def id: Column[Int] = column[Int]("ID", O.PrimaryKey, O.AutoInc)
def name : Column[String] = column[String] ("NAME",O.NotNull)
def refName : Column[String] = column[String] ("REF_NAME",O.NotNull)
def creationDate : Column[Date] = column[Date]("CREATION_DATE",O.NotNull)
def description : Column[String] = column[String]("DESCRIPTION",O.Nullable)
def modificationDate : Column[Date] = column[Date]("MODIFICATION_DATE",O.NotNull)
def updatingUser : Column[String] = column[String]("UPDATING_USER",O.NotNull)
def applicationId : Column[Int] = column[Int]("APPLICATION_ID",O.NotNull)
override def * = (id.?,name,refName,creationDate,description.?,modificationDate,updatingUser,applicationId) <>(Permission.tupled, Permission.unapply)
}
}
case class Permission(id : Option[Int],name : String,refName : String,creationDate : Date,description : Option[String],modificationDate : Date,updatingUser : String,applicationId : Int)
| glamarca/cuam | app/models/entity/permission/Permission.scala | Scala | apache-2.0 | 1,789 |
package scala.lms
package common
import java.io.PrintWriter
import scala.reflect.SourceContext
trait FractionalOps extends ImplicitOps {
def infix_/[A,T](lhs: Rep[T], rhs: Rep[A])(implicit c: A => T, f: Fractional[T], mA: Manifest[A], mT: Manifest[T], pos: SourceContext) = fractional_divide(lhs,implicit_convert[A,T](rhs))
def fractional_divide[T:Fractional:Manifest](lhs: Rep[T], rhs: Rep[T])(implicit pos: SourceContext): Rep[T]
}
trait FractionalOpsExp extends FractionalOps with ImplicitOpsExp with EffectExp {
case class FractionalDivide[T](lhs: Exp[T], rhs: Exp[T])(implicit val f: Fractional[T], val mT: Manifest[T]) extends Def[T]
def fractional_divide[T:Fractional:Manifest](lhs: Exp[T], rhs: Exp[T])(implicit pos: SourceContext) : Rep[T] = FractionalDivide(lhs, rhs)
override def mirror[A:Manifest](e: Def[A], f: Transformer)(implicit pos: SourceContext): Exp[A] = (e match {
case e@FractionalDivide(a,b) => fractional_divide(f(a),f(b))(e.f.asInstanceOf[Fractional[A]],mtype(e.mT),pos)
case Reflect(e@FractionalDivide(a,b), u, es) => reflectMirrored(Reflect(FractionalDivide(f(a),f(b))(e.f.asInstanceOf[Fractional[A]],mtype(e.mT)), mapOver(f,u), f(es)))(mtype(manifest[A]), pos)
case _ => super.mirror(e,f)
}).asInstanceOf[Exp[A]]
}
trait ScalaGenFractionalOps extends ScalaGenBase {
val IR: FractionalOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case FractionalDivide(a,b) => emitValDef(sym, src"$a / $b")
case _ => super.emitNode(sym, rhs)
}
}
trait CLikeGenFractionalOps extends CLikeGenBase {
val IR: FractionalOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = {
rhs match {
case FractionalDivide(a,b) =>
emitValDef(sym, src"$a / $b")
case _ => super.emitNode(sym, rhs)
}
}
}
trait CudaGenFractionalOps extends CudaGenBase with CLikeGenFractionalOps
trait OpenCLGenFractionalOps extends OpenCLGenBase with CLikeGenFractionalOps
trait CGenFractionalOps extends CGenBase with CLikeGenFractionalOps
| scalan/virtualization-lms-core | src/common/FractionalOps.scala | Scala | bsd-3-clause | 2,089 |
package org.jetbrains.plugins.scala.finder
import com.intellij.openapi.project.Project
import com.intellij.psi.search.GlobalSearchScope
import org.jetbrains.plugins.scala.base.SimpleTestCase
import org.junit.Assert
class FilterScopesTest extends SimpleTestCase {
private implicit def getProject: Project = fixture.getProject
def testEqualsAndHashCode(): Unit = {
val delegate = GlobalSearchScope.projectScope(getProject)
val scalaFilterScope1 = ScalaFilterScope(delegate)
val scalaFilterScope2 = ScalaFilterScope(delegate)
val sourceFilterScope1 = SourceFilterScope(delegate)
val sourceFilterScope2 = SourceFilterScope(delegate)
val resolveFilterScope1 = ResolveFilterScope(delegate)
val resolveFilterScope2 = ResolveFilterScope(delegate)
val file = fixture.addFileToProject("worksheet.sc", "2 + 2")
val worksheetResolveFilterScope1 = WorksheetResolveFilterScope(delegate, file.getVirtualFile)
val worksheetResolveFilterScope2 = WorksheetResolveFilterScope(delegate, file.getVirtualFile)
checkEqualsAndHashcode(scalaFilterScope1, scalaFilterScope2)
checkEqualsAndHashcode(sourceFilterScope1, sourceFilterScope2)
checkEqualsAndHashcode(resolveFilterScope1, resolveFilterScope2)
checkEqualsAndHashcode(worksheetResolveFilterScope1, worksheetResolveFilterScope2)
}
def testNotEqualsAndHashCode(): Unit = {
val worksheetFile1 = fixture.addFileToProject("worksheet1.sc", "2 + 2")
val worksheetFile2 = fixture.addFileToProject("worksheet2.sc", "2 + 2")
val scalaFile = fixture.addFileToProject("A.scala", "class A {}")
val delegate1 = GlobalSearchScope.projectScope(getProject)
val delegate2 = GlobalSearchScope.fileScope(scalaFile)
val uniqueScopes = Seq(
ScalaFilterScope(delegate1),
ScalaFilterScope(delegate2),
SourceFilterScope(delegate1),
SourceFilterScope(delegate2),
ResolveFilterScope(delegate1),
ResolveFilterScope(delegate2),
WorksheetResolveFilterScope(delegate1, worksheetFile1.getVirtualFile),
WorksheetResolveFilterScope(delegate1, worksheetFile2.getVirtualFile),
WorksheetResolveFilterScope(delegate2, worksheetFile1.getVirtualFile),
WorksheetResolveFilterScope(delegate2, worksheetFile2.getVirtualFile),
)
for {
i <- uniqueScopes.indices
j <- i + 1 until uniqueScopes.length
} checkNotEqualsAndHashcode(uniqueScopes(i), uniqueScopes(j))
}
private def checkEqualsAndHashcode(scope1: FilterScope, scope2: FilterScope): Unit = {
Assert.assertEquals("Scopes should be equal", scope1, scope2)
Assert.assertEquals("Hashcodes should be equal", scope1.hashCode(), scope2.hashCode())
}
private def checkNotEqualsAndHashcode(scope1: FilterScope, scope2: FilterScope): Unit = {
Assert.assertNotEquals("Scopes should not be equal", scope1, scope2)
Assert.assertNotEquals("Hashcodes should not be equal", scope1.hashCode(), scope2.hashCode())
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/finder/FilterScopesTest.scala | Scala | apache-2.0 | 2,955 |
/*
* CorrelatorCore.scala
* (LeereNull)
*
* Copyright (c) 2011-2014 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.leerenull
import de.sciss.leerenull.CorrelatorSelector.Search
import de.sciss.processor.Processor
import de.sciss.span.Span
import de.sciss.swingplus.GroupPanel
import de.sciss.strugatzki.{FeatureCorrelation, FeatureExtraction}
import FeatureCorrelation.{Match, ConfigBuilder => CSettingsBuilder}
import FeatureExtraction.{Config => ESettings}
import de.sciss.app.AbstractCompoundEdit
import de.sciss.kontur.session.{MatrixDiffusion, AudioTrack, AudioFileElement, FadeSpec, AudioRegion, Session, BasicTimeline}
import java.io.File
import de.sciss.synth.io.AudioFile
import scala.util.{Failure, Success}
import xml.{Node, NodeSeq}
object CorrelatorCore extends GUIGoodies with KonturGoodies with NullGoodies {
object Transform {
def fromXML( n: NodeSeq ) : Transform = {
(n \\ "type").text match {
case "shift" => TransformShift.fromXML( n )
case "resample" => TransformResample.fromXML( n )
case "" => TransformNone
case "none" => TransformNone
}
}
}
sealed trait Transform {
def fscapeOption: Option[ (File, File) => (Boolean => Unit) => Unit ]
def fileID: String
def timeScale : Double
def inverse: Transform
final def toXML : Node = <transform>{toInnerXML}</transform>
protected def toInnerXML : NodeSeq
}
case object TransformNone extends Transform {
def fscapeOption = None
def inverse = this
def fileID = ""
def timeScale = 1.0
protected def toInnerXML = <type>none</type>
}
object TransformShift {
def fromXML( n: NodeSeq ) : TransformShift = {
TransformShift( (n \\ "amount").text.toDouble )
}
}
/** @param amount shift amount in Hertz */
final case class TransformShift( amount: Double ) extends Transform {
def fileID = "_Hlb" + amount.toInt
def inverse = copy( amount = -amount )
def fscapeOption = Some( FScape.shift( amount ) _ )
def timeScale = 1.0
protected def toInnerXML = <type>shift</type><amount>{amount}</amount>
}
object TransformResample {
def fromXML( n: NodeSeq ) : TransformResample = {
TransformResample( (n \\ "amount").text.toDouble )
}
}
/** @param amount resampling amount in semintone cents */
final case class TransformResample( amount: Double ) extends Transform {
def fileID = "_Rsmp" + amount.toInt
def inverse = copy( amount = -amount )
def fscapeOption = Some( FScape.resample( amount ) _ )
def timeScale = math.pow( 2, amount / -1200 )
protected def toInnerXML = <type>resample</type><amount>{amount}</amount>
}
def makeMatchEditor( search: Search, idx: Int )( implicit doc: Session ): Unit = {
search.transform.inverse.fscapeOption match {
case Some( fsc ) =>
val m = search.matches( idx )
val spec = AudioFile.readSpec( m.file )
val wholeSpan = Span( 0L, spec.numFrames )
val truncSpan = Span( math.max( wholeSpan.start, m.punch.start - 176400L ), math.min( wholeSpan.stop, m.punch.stop + 176400L ))
val trunc = truncSpan != wholeSpan
val fName = plainName( m.file ) + (if( trunc ) "_" + m.punch.start + "_" + m.punch.stop else "") +
search.transform.fileID + ".aif"
val fTrns = new File( LeereNull.bounceFolder, fName )
def trnsDone(): Unit = {
val m2 = m.copy( file = fTrns, punch = m.punch.shift( -truncSpan.start ))
val s2 = search.copy( matches = search.matches.patch( idx, IndexedSeq( m2 ), 1 ))
makeMatchEditor2( s2, idx )
}
def runTrns( f: File ): Unit = {
// FScape.shift( f, fTrns, -freq ) { b =>
// if( b ) trnsDone()
// }
fsc( f, fTrns )( if( _ ) trnsDone() )
}
if( fTrns.isFile ) trnsDone() else {
if( trunc ) {
val truncFile = File.createTempFile( "trunc", ".aif" )
truncFile.deleteOnExit()
val dlg = progressDialog( "Extracting span" )
val cutc = AudioFileCutter.Config(m.file, truncFile, truncSpan)
val cutter = AudioFileCutter(cutc)
cutter.addListener {
case Processor.Result(_, Success(())) =>
dlg.stop()
runTrns( truncFile )
case Processor.Result(_, Failure( e )) =>
dlg.stop()
e.printStackTrace()
case prog @ Processor.Progress(_, _) => dlg.progress = prog.toInt
}
dlg.start( cutter )
} else runTrns( m.file )
}
case None => makeMatchEditor2( search, idx )
}
}
private def makeMatchEditor2( search: Search, idx: Int )( implicit doc: Session ): Unit = {
val tls = doc.timelines
val set = search.settings
// val itrns = search.transform.inverse
def frames( afe: AudioFileElement, secs: Double ) = (secs * afe.sampleRate + 0.5).toLong
def fromInputRate( sp: Span ) =
Span( (sp.start / search.transform.timeScale + 0.5).toLong,
(sp.stop / search.transform.timeScale + 0.5).toLong )
val m = search.matches( idx )
val mPunch = fromInputRate( m.punch )
val meta = ESettings.fromXMLFile( set.metaInput )
var ar1Off = -1L
var incorpOff = -1L
var splitPos = -1L
var afe1 : AudioFileElement = null
val vowels = "aeiouAEIOU".toSet
def regionName( id: String, afe: AudioFileElement, pre: String = "$" ) = {
val n1 = plainName( afe.path ) // .filter( _.isLetterOrDigit )
var n2 = n1; while( n2.size > 20 ) {
val i = n2.indexWhere( vowels.contains)
if( i >= 0 ) n2 = n2.substring( 0, i ) + n2.substring( i + 1 )
else n2 = n2.take( 20 )
}
pre + n2 + "_" + id
}
implicit val tl = tls.tryEdit[ BasicTimeline ]( "Add Matcher Timeline" ) { ce0: AbstractCompoundEdit =>
implicit val ce = ce0 // all because IDEA sucks
afe1 = provideAudioFile( meta.audioInput )
var arsStereo = IndexedSeq.empty[ AudioRegion ]
var arsLeft = IndexedSeq.empty[ AudioRegion ]
var arsRight = IndexedSeq.empty[ AudioRegion ]
val pi = set.punchIn
val piSpan = fromInputRate( pi.span )
val fOff1 = math.max( 0L, piSpan.start - frames( afe1, 10 ))
val start1 = 0L
val pre1 = piSpan.start - fOff1
val maxLen1 = set.punchOut.map( p => fromInputRate( p.span ).start ).getOrElse( afe1.numFrames ) - fOff1
val len1 = math.min( maxLen1, piSpan.stop - fOff1 + frames( afe1, 1 ))
val stop1 = start1 + len1
val fadeIn1 = FadeSpec( math.min( pre1, frames( afe1, 1 )))
val fadeOut1= FadeSpec( math.min( len1 - pre1, frames( afe1, 1 )))
val ar1 = AudioRegion( Span( start1, stop1 ), regionName( "pre", afe1, pre = "" ), afe1, fOff1,
fadeIn = Some( fadeIn1 ), fadeOut = Some( fadeOut1 ))
arsStereo :+= ar1
ar1Off = start1 - fOff1
incorpOff = search.offset - ar1Off
splitPos = ar1.span.stop + incorpOff
set.punchOut.foreach { po =>
val poSpan = fromInputRate( po.span )
val fOff2 = math.max( fOff1 + len1, poSpan.start - frames( afe1, 1 ))
val pre2 = poSpan.start - fOff2
// start1 + pre1 = timeline spot where punch begins
val start2 = start1 + pre1 + mPunch.length - pre2
val maxLen2 = afe1.numFrames - fOff2
val len2 = math.min( maxLen2, pre2 + poSpan.length + frames( afe1, 10 ))
val stop2 = start2 + len2
val fadeIn2 = FadeSpec( math.min( pre2, frames( afe1, 1 )))
val fadeOut2 = FadeSpec( math.min( len2 - pre2, frames( afe1, 1 )))
val ar2 = AudioRegion( Span( start2, stop2 ), regionName( "post", afe1, pre = "" ), afe1, fOff2,
fadeIn = Some( fadeIn2 ), fadeOut = Some( fadeOut2 ))
arsStereo :+= ar2
}
def matchRegions( m: Match, pre: String ) : IndexedSeq[ AudioRegion ] = {
val afe2 = provideAudioFile( m.file )
val fOff3 = mPunch.start
val start3 = start1 + pre1
val maxLen3 = afe2.numFrames - fOff3
// merge if gain difference is less than 6 dB
val split3 = set.punchOut.isDefined && (math.max( m.boostOut, m.boostIn ) / math.min( m.boostOut, m.boostIn )) > 2
val boost3 = if( !split3 && set.punchOut.isDefined ) math.sqrt( m.boostOut * m.boostIn ).toFloat else m.boostIn
val fdt3 = mPunch.length / 4
val len3 = if( split3 ) (mPunch.length / 2) + fdt3 else math.min( maxLen3, mPunch.length + frames( afe2, 1 ))
val stop3 = start3 + len3
val fade3 = FadeSpec( fdt3 )
val arStart = AudioRegion( Span( start3, stop3 ), regionName( if( split3 ) "pin" else "punch", afe2, pre = pre ),
afe2, fOff3,
gain = boost3, fadeIn = Some( fade3 ), fadeOut = Some( fade3 ))
//println( "ar3 : " + ar3 + " ; OFFSET = " + fOff3 + " ; m.punch = " + m.punch )
set.punchOut match {
case Some( po ) if split3 =>
val start4 = stop3 - fdt3
val fOff4 = fOff3 + start4 - start3
val len4 = math.min( afe2.numFrames - fOff4, mPunch.length - (start4 - start3) + frames( afe2, 1 ))
val stop4 = start4 + len4
val fade4 = FadeSpec( fdt3 )
val boost4 = m.boostOut
val arStop = AudioRegion( Span( start4, stop4 ), regionName( "pout", afe2, pre = pre ), afe2, fOff4,
gain = boost4, fadeIn = Some( fade4 ), fadeOut = Some( fade4 ))
IndexedSeq( arStart, arStop )
case _ => IndexedSeq( arStart )
}
}
search.master match {
case Some( m2 ) =>
arsLeft ++= matchRegions( m, "$L_" )
arsRight ++= matchRegions( m2, "$R_" )
case None =>
arsStereo ++= matchRegions( m, "$_" )
}
implicit val tl = BasicTimeline.newEmpty( doc )
tl.span = Span( 0L, (arsStereo ++ arsLeft ++ arsRight).map( _.span.stop ).max )
tl.name = uniqueName( tls, "$Matcher" )
tls.editInsert( ce, tls.size, tl )
var trackMap = Map.empty[ AudioTrack, IndexedSeq[ AudioRegion ]]
arsStereo.foreach { ar =>
val tr = placeStereo( ar, diffPrefix = "$", more = trackMap )
trackMap += tr -> (trackMap.getOrElse( tr, IndexedSeq.empty ) :+ ar)
}
arsLeft.foreach { ar =>
val tr = placeLeft( ar, diffPrefix = "$", more = trackMap )
trackMap += tr -> (trackMap.getOrElse( tr, IndexedSeq.empty ) :+ ar)
}
arsRight.foreach { ar =>
val tr = placeRight( ar, diffPrefix = "$", more = trackMap )
trackMap += tr -> (trackMap.getOrElse( tr, IndexedSeq.empty ) :+ ar)
}
tl
}
val tlf = TimelineFrame2 { f =>
// println( "Bye..." )
// f.dispose()
}
val butFlipChans = button( "Flip left/right" ) { b =>
val (trL, trR) = tl.tracks.toList.collect({
case at: AudioTrack if at.name.contains("-L") || at.name.contains("-R") => (at, at.diffusion)
}).collect({
case (at, Some( m: MatrixDiffusion )) => (at, m)
}).partition( _._1.name.contains( "-L" ))
if( trL.nonEmpty || trR.nonEmpty ) tls.joinEdit( "Flip chans" ) { implicit ce =>
def gugu( at: AudioTrack, m: MatrixDiffusion, in: String, out: String, diff: Int => MatrixDiffusion ): Unit = {
val nameOld = at.name
val i = nameOld.indexOf( "-" + in )
val nameNew = nameOld.substring( 0, i + 1 ) + out + nameOld.substring( i + 2 )
at.editRename( ce, nameNew )
val d = diff( m.numInputChannels )
at.editDiffusion( ce, Some( d ))
}
trL.foreach {
case (at, m1) => gugu( at, m1, "L", "R", provideRightDiffusion(diffPrefix = "$") )
}
trR.foreach {
case (at, m1) => gugu( at, m1, "R", "L", provideLeftDiffusion(diffPrefix = "$") )
}
}
}
butFlipChans.visible = search.master.isDefined
val butIncorporate = button( "Incorporate" ) { b =>
nonSyntheticTimelines.headOption.foreach { tl0 =>
// that is, collect all regions beginning with "$", remove this prefix,
// apply offset, and paste them to the main timeline
val arsMap = collectAudioRegions({
case (_, ar) if ar.name.startsWith("$") =>
val i = ar.name.indexOf( '_' )
val diff = ar.name.substring( 1, i )
val arNew = ar.copy( name = ar.name.substring( i + 1 ), span = ar.span.shift( incorpOff ))
(diff, arNew)
}).groupBy( _._1 ).mapValues( _.map( _._2 )) // hell, can this be more messy?
if( arsMap.nonEmpty ) {
implicit val tl = tl0
tl.joinEdit[ Unit ]( "Incorporate" ) { ce0: AbstractCompoundEdit =>
implicit val ce = ce0 // sucky IDEA
set.punchOut.foreach { po =>
val poSpan = fromInputRate( po.span )
val piSpan = fromInputRate( set.punchIn.span )
val splitDelta = mPunch.length - (poSpan.start - piSpan.start) // korrekt?
val splitThresh = (0.3 * tl.rate + 0.5).toLong // secsToFrames( 0.3 )
//println( "pos " + splitPos + " ; delta " + splitDelta )
if( splitDelta != 0L ) insertTimelineSpan( splitPos, splitDelta ) {
case (at, ar) =>
val start = ar.span.start
val stop = ar.span.stop
val mid = (start + stop) >> 1
if( splitPos < mid ) {
if( splitPos - start > splitThresh ) InsertSpan.Split
else InsertSpan.Move
} else {
if( stop - splitPos > splitThresh ) InsertSpan.Split
else InsertSpan.Ignore
}
}
}
var trackMap = Map.empty[ AudioTrack, IndexedSeq[ AudioRegion ]]
arsMap.foreach {
case ("", ars) => ars.foreach { ar =>
val tr = placeStereo( ar, more = trackMap )
trackMap += tr -> (trackMap.getOrElse( tr, IndexedSeq.empty ) :+ ar)
}
case ("L", ars) => ars.foreach { ar =>
val tr = placeLeft( ar, more = trackMap )
trackMap += tr -> (trackMap.getOrElse( tr, IndexedSeq.empty ) :+ ar)
}
case ("R", ars) => ars.foreach { ar =>
val tr = placeRight( ar, more = trackMap )
trackMap += tr -> (trackMap.getOrElse( tr, IndexedSeq.empty ) :+ ar)
}
}
}
}
}
}
//println( "incorpOff " + incorpOff + " : tl.rate " + tl.rate )
val lbIncorporate = label( "Regions will be offset by " + timeString( incorpOff, tl.rate ))
// var panelChildren = IndexedSeq[ Component ]( butIncorporate, lbIncorporate )
val butSearchSplit = button( "New search for this punch length" ) { b =>
// Match( sim: Float, file: File, punch: Span, boostIn: Float, boostOut: Float )
// Search: offset
// Settings( databaseFolder: File, metaInput: File, punchIn: Punch, punchOut: Option[Punch],
// minPunch: Long, maxPunch: Long, normalize: Boolean, maxBoost: Float, numMatches: Int,
// numPerFile: Int, minSpacing: Long )
val arIn = AudioRegion( Span( search.offset, search.offset + afe1.numFrames ), afe1.name, afe1, 0L )
val copy = CSettingsBuilder( search.settings )
copy.minPunch = m.punch.length // in input rate!
copy.maxPunch = m.punch.length // in input rate!
CorrelatorSetup.makeSetup( arIn, copy, search.metas, Some( m ), search.transform )
}
butSearchSplit.visible = search.master.isEmpty
val panel = new GroupPanel {
horizontal = Seq( butIncorporate, lbIncorporate, butSearchSplit, butFlipChans )
vertical = Par( Baseline )( butIncorporate, lbIncorporate, butSearchSplit, butFlipChans )
}
// val bp = new BorderPanel {
// add( panel, BorderPanel.Position.North )
// add( scroll, BorderPanel.Position.South )
// }
tlf.bottomPanel = Some( panel )
tlf.pack() // AndSetMinimum()
}
} | Sciss/LeereNull | src/main/scala/de/sciss/leerenull/CorrelatorCore.scala | Scala | gpl-3.0 | 17,823 |
package api_client
import io.circe.generic.semiauto._
import io.circe.{Decoder, Encoder}
case class CampaignServiceGetResponse(rid: String, rval: Option[CampaignServicePage], errors: Option[Seq[Option[Error]]])
object CampaignServiceGetResponse {
implicit val encoder: Encoder[CampaignServiceGetResponse] = deriveEncoder
implicit val decoder: Decoder[CampaignServiceGetResponse] = deriveDecoder
}
| t-mochizuki/scala-study | yahoo/ads-display-api/api-client/src/main/scala/api_client/CampaignServiceGetResponse.scala | Scala | mit | 404 |
package recfun
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BalanceSuite extends FunSuite {
import Main.balance
test("simple balancing") {
assert(balance("()".toList))
}
test("balance: '(if (zero? x) max (/ 1 x))' is balanced") {
assert(balance("(if (zero? x) max (/ 1 x))".toList))
}
test("balance: 'I told him ...' is balanced") {
assert(balance("I told him (that it's not (yet) done).\\n(But he wasn't listening)".toList))
}
test("balance: ':-)' is unbalanced") {
assert(!balance(":-)".toList))
}
test("balance: counting is not enough") {
assert(!balance("())(".toList))
}
}
| mateusduboli/coursera-progfun | recfun/src/test/scala/recfun/BalanceSuite.scala | Scala | unlicense | 722 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.impl
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
/**
* This class helps with persisting and checkpointing RDDs.
* Specifically, it automatically handles persisting and (optionally) checkpointing, as well as
* unpersisting and removing checkpoint files.
*
* Users should call update() when a new RDD has been created,
* before the RDD has been materialized. After updating [[PeriodicRDDCheckpointer]], users are
* responsible for materializing the RDD to ensure that persisting and checkpointing actually
* occur.
*
* When update() is called, this does the following:
* - Persist new RDD (if not yet persisted), and put in queue of persisted RDDs.
* - Unpersist RDDs from queue until there are at most 3 persisted RDDs.
* - If using checkpointing and the checkpoint interval has been reached,
* - Checkpoint the new RDD, and put in a queue of checkpointed RDDs.
* - Remove older checkpoints.
*
* WARNINGS:
* - This class should NOT be copied (since copies may conflict on which RDDs should be
* checkpointed).
* - This class removes checkpoint files once later RDDs have been checkpointed.
* However, references to the older RDDs will still return isCheckpointed = true.
*
* Example usage:
* {{{
* val (rdd1, rdd2, rdd3, ...) = ...
* val cp = new PeriodicRDDCheckpointer(2, sc)
* rdd1.count();
* // persisted: rdd1
* cp.update(rdd2)
* rdd2.count();
* // persisted: rdd1, rdd2
* // checkpointed: rdd2
* cp.update(rdd3)
* rdd3.count();
* // persisted: rdd1, rdd2, rdd3
* // checkpointed: rdd2
* cp.update(rdd4)
* rdd4.count();
* // persisted: rdd2, rdd3, rdd4
* // checkpointed: rdd4
* cp.update(rdd5)
* rdd5.count();
* // persisted: rdd3, rdd4, rdd5
* // checkpointed: rdd4
* }}}
*
* @param checkpointInterval RDDs will be checkpointed at this interval
* @tparam T RDD element type
*
* TODO: Move this out of MLlib?
*/
private[spark] class PeriodicRDDCheckpointer[T](
checkpointInterval: Int,
sc: SparkContext)
extends PeriodicCheckpointer[RDD[T]](checkpointInterval, sc) {
override protected def checkpoint(data: RDD[T]): Unit = data.checkpoint()
override protected def isCheckpointed(data: RDD[T]): Boolean = data.isCheckpointed
override protected def persist(data: RDD[T]): Unit = {
if (data.getStorageLevel == StorageLevel.NONE) {
data.persist()
}
}
override protected def unpersist(data: RDD[T]): Unit = data.unpersist(blocking = false)
override protected def getCheckpointFiles(data: RDD[T]): Iterable[String] = {
data.getCheckpointFile.map(x => x)
}
}
| Panos-Bletsos/spark-cost-model-optimizer | mllib/src/main/scala/org/apache/spark/mllib/impl/PeriodicRDDCheckpointer.scala | Scala | apache-2.0 | 3,503 |
/**
* The MIT License (MIT)
*
* Copyright (c) 2014 Matt Fellows (OneGeek)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package au.com.onegeek.sbtdotenv
import java.io.File
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
/**
* Created by mfellows on 20/07/2014.
*/
class SbtDotenvSpec extends AnyWordSpec with Matchers {
"The plugin parser" should {
"do nothing if no .env file exists" in {
val file = new File("thisfilecannotexistunlessyoucreateit")
val map = SbtDotenv.parseFile(file)
map should equal(None)
}
"read .env file into an environment Map" in {
val file = new File("./src/test/resources/.dotenv.valid")
SbtDotenv.parseFile(file) should equal(Some(Map(
"EMPTY_VARIABLE" -> "",
"MONGO_PORT" -> "17017",
"COVERALLS_REPO_TOKEN" -> "aoeucaPDc2rvkFugUGlNaCGu3EOeoaeu63WLo5",
"MONGO_URL" -> "http://localhost:$MONGO_PORT/mongo#asdf"
)))
}
"not accept empty lines" in {
SbtDotenv.parse("") should equal(Map())
}
"not accept numeric variable names" in {
SbtDotenv.parse("1234=5678") should equal(Map())
}
"not accept lines with no assignment" in {
SbtDotenv.parse("F") should equal(Map())
}
"accept empty variables" in {
SbtDotenv.parse("EMPTY=\\nONE=TWO") should equal(Map("EMPTY" -> "","ONE" -> "TWO"))
}
"accept unquoted strings containing whitespace" in {
SbtDotenv.parse("SOMETHING=I love kittens") should equal(Map("SOMETHING" -> "I love kittens"))
}
"accept lines with trailing comments" in {
SbtDotenv.parse("WITHOUT_COMMENT=ThisIsValue # here is a comment") should equal(Map("WITHOUT_COMMENT" -> "ThisIsValue"))
}
"accept lines with URLs containing # characters" in {
SbtDotenv.parse("WITH_HASH_URL='http://example.com#awesome-id'") should equal(Map("WITH_HASH_URL" -> "http://example.com#awesome-id"))
}
"accept lines with quoted variables and strips quotes" in {
SbtDotenv.parse("FOO='a=b==ccddd'") should equal(Map("FOO" -> "a=b==ccddd"))
SbtDotenv.parse("FOO=\\"blah # blah \\r blah \\n blah \\"") should equal(Map("FOO" -> "blah # blah \\r blah \\n blah "))
}
"accept lines with whitespace around assignment operator" in {
SbtDotenv.parse("FOO = boo") should equal(Map("FOO" -> "boo"))
}
"accept lines with escaped characters and unescape them" in {
SbtDotenv.parse("FOO=' \\\\\\' \\\\\\' '") should equal(Map("FOO" -> " \\' \\' "))
}
"accept lines with leading whitespace before variable name" in {
SbtDotenv.parse(" FOO=noo") should equal(Map("FOO" -> "noo"))
}
"accept lines with leading export and ignore the export" in {
SbtDotenv.parse(" export FOO=noo") should equal(Map("FOO" -> "noo"))
}
"accept lines with variables containing undescores, periods, and hyphens" in {
SbtDotenv.parse(" export F.OO=period") should equal(Map("F.OO" -> "period"))
SbtDotenv.parse("FO-O=hyphen") should equal(Map("FO-O" -> "hyphen"))
SbtDotenv.parse("FOO__ = underscore") should equal(Map("FOO__" -> "underscore"))
}
"accept multi-line variables" in {
val content = """MY_CERT="-----BEGIN CERTIFICATE-----
|123456789qwertyuiopasdfghjklzxcvbnm
|-----END CERTIFICATE-----
|"
""".stripMargin
SbtDotenv.parse(content) should equal(Map("MY_CERT" -> """-----BEGIN CERTIFICATE-----
|123456789qwertyuiopasdfghjklzxcvbnm
|-----END CERTIFICATE-----
|""".stripMargin))
}
"validate correct lines in a .env file" in {
SbtDotenv.parse("FOO=bar") should equal(Map("FOO" -> "bar"))
SbtDotenv.parse("FOO=1234") should equal(Map("FOO" -> "1234"))
SbtDotenv.parse("COVERALLS_REPO_TOKEN=NTHnTHSNthnTHSntNt09aoesNTH6") should equal(Map("COVERALLS_REPO_TOKEN" -> "NTHnTHSNthnTHSntNt09aoesNTH6"))
}
}
}
| mefellows/sbt-dotenv | src/test/scala/au/com/onegeek/sbtdotenv/SbtDotenvSpec.scala | Scala | mit | 5,120 |
object ch11_18 {
???
}
import ch11_18._
/*
from repl you can test typing:
:load src/main/scala/fpinscala/ch11/Monad.scala
:load src/main/scala/fpinscala/ch11/Exercise18.scala
*/
| rucka/fpinscala | src/main/scala/fpinscala/ch11/Exercise18.scala | Scala | gpl-2.0 | 180 |
package services;
import play.api.libs.json._
import play.api.libs.functional.syntax._
case class WeatherInfo(
lon: Double,
lat: Double,
name: String,
temperature: Double,
rain: Option[Double],
snow: Option[Double],
datetime: org.joda.time.DateTime // UNIX timestamp
)
/*
{"coord":{"lon":11.07,"lat":49.45},"weather":[
{"id":500,"main":"Rain","description":"light rain","icon":"10n"}
],"base":"cmc stations",
"main":{"temp":281.913,"pressure":985.08,"humidity":97,"temp_min":281.913,"temp_max":281.913,"sea_level":1040.49,"grnd_level":985.08},
"wind":{"speed":7.53,"deg":295.002},
"rain":{"3h":2.0375},
"clouds":{"all":68},"dt":1447458072,
"sys":{"message":0.0096,"country":"DE","sunrise":1447395859,"sunset":1447428915},"id":2861650,"name":"Nuremberg","cod":200
}
*/
object WeatherInfo {
import utils.JsonUtils.dateTimeWrites
implicit val writes: Writes[WeatherInfo] = Json.writes[WeatherInfo]
} | gessulat/hackathon_bmvi | bmvi-backend/app/services/WeatherInfo.scala | Scala | gpl-2.0 | 949 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers._
import org.scalactic.exceptions.NullArgumentException
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.TestPendingException
import scala.reflect.NameTransformer.encode
import collection.immutable.TreeSet
// SKIP-SCALATESTJS,NATIVE-START
import org.scalatest.refspec.RefSpec
// SKIP-SCALATESTJS,NATIVE-END
import org.scalactic._
protected[scalatest] class MandarinOrangeFunSuite(ns: Suite*) extends FunSuite {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFunSpec(ns: Suite*) extends FunSpec {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
// SKIP-SCALATESTJS,NATIVE-START
protected[scalatest] class MandarinOrangeSpec(ns: Suite*) extends RefSpec {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
// SKIP-SCALATESTJS,NATIVE-END
protected[scalatest] class MandarinOrangeWordSpec(ns: Suite*) extends WordSpec {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFlatSpec(ns: Suite*) extends FlatSpec {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFreeSpec(ns: Suite*) extends FreeSpec {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFeatureSpec(ns: Suite*) extends FeatureSpec {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangePropSpec(ns: Suite*) extends PropSpec {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
// Named these with a MandarinOrange prefix so they wouldn't confict
// with anything else in the test suite. These need to be top level
// else they end up with dollar signs in the names.
trait MandarinOrangeFixture { this: fixture.TestSuite =>
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
}
protected[scalatest] class MandarinOrangeFixtureFunSuite(ns: Suite*) extends fixture.FunSuite with MandarinOrangeFixture {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFixtureFunSpec(ns: Suite*) extends fixture.FunSpec with MandarinOrangeFixture {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFixtureWordSpec(ns: Suite*) extends fixture.WordSpec with MandarinOrangeFixture {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFixtureFlatSpec(ns: Suite*) extends fixture.FlatSpec with MandarinOrangeFixture {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFixtureFreeSpec(ns: Suite*) extends fixture.FreeSpec with MandarinOrangeFixture {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFixtureFeatureSpec(ns: Suite*) extends fixture.FeatureSpec with MandarinOrangeFixture {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeFixturePropSpec(ns: Suite*) extends fixture.PropSpec with MandarinOrangeFixture {
override def nestedSuites = Vector.empty ++ ns // ns.toVector
}
protected[scalatest] class MandarinOrangeSuites(suites: Suite*) extends Suites(suites: _*)
protected[scalatest] class MandarinOrangeSequential(suites: Suite*) extends Sequential(suites: _*)
protected[scalatest] class MandarinOrangeStepwise(suites: Suite*) extends Stepwise(suites: _*)
// SKIP-SCALATESTJS,NATIVE-START
import PrivateMethodTester._
// SKIP-SCALATESTJS,NATIVE-END
class SuiteSpec extends FunSpec {
describe("the toString method on Suites and SuiteLike traits other than TestNGSuiteLike") {
describe("when the suite contains no nested suites") {
it("should return the simple name of the class (and no parens)") {
import prop.TableDrivenPropertyChecks._
val examples =
Table(
( "suite", "simple name"),
( new FunSuite, "AnyFunSuite"),
( new FunSpec, "AnyFunSpec"),
// SKIP-SCALATESTJS,NATIVE-START
( new RefSpec, "RefSpec"),
// SKIP-SCALATESTJS,NATIVE-END
( new WordSpec, "AnyWordSpec"),
( new FlatSpec, "AnyFlatSpec"),
( new FreeSpec, "AnyFreeSpec"),
( new FeatureSpec, "AnyFeatureSpec"),
( new PropSpec, "AnyPropSpec"),
( new MandarinOrangeFunSuite, "MandarinOrangeFunSuite"),
( new MandarinOrangeFunSpec, "MandarinOrangeFunSpec"),
// SKIP-SCALATESTJS,NATIVE-START
( new MandarinOrangeSpec, "MandarinOrangeSpec"),
// SKIP-SCALATESTJS,NATIVE-END
( new MandarinOrangeWordSpec, "MandarinOrangeWordSpec"),
( new MandarinOrangeFlatSpec, "MandarinOrangeFlatSpec"),
( new MandarinOrangeFreeSpec, "MandarinOrangeFreeSpec"),
( new MandarinOrangeFeatureSpec, "MandarinOrangeFeatureSpec"),
( new MandarinOrangePropSpec, "MandarinOrangePropSpec"),
( new MandarinOrangeFixtureFunSuite, "MandarinOrangeFixtureFunSuite"),
( new MandarinOrangeFixtureFunSpec, "MandarinOrangeFixtureFunSpec"),
( new MandarinOrangeFixtureWordSpec, "MandarinOrangeFixtureWordSpec"),
( new MandarinOrangeFixtureFlatSpec, "MandarinOrangeFixtureFlatSpec"),
( new MandarinOrangeFixtureFreeSpec, "MandarinOrangeFixtureFreeSpec"),
( new MandarinOrangeFixtureFeatureSpec, "MandarinOrangeFixtureFeatureSpec"),
( new MandarinOrangeFixturePropSpec, "MandarinOrangeFixturePropSpec"),
// ( new path.FunSpec, "path.FunSpec"),
// ( new path.FreeSpec, "path.FreeSpec"),
( new Suites, "Suites"),
( new Sequential, "Sequential"),
( new Stepwise, "Stepwise"),
( new MandarinOrangeSuites, "MandarinOrangeSuites"),
( new MandarinOrangeSequential, "MandarinOrangeSequential"),
( new MandarinOrangeStepwise, "MandarinOrangeStepwise")
)
forAll (examples) { (suite, simpleName) =>
assert(suite.toString === simpleName)
}
}
}
describe("when the suite contains one nested suite") {
it("should return the simple name of the class and the nested suite toString wrapped in parens") {
import prop.TableDrivenPropertyChecks._
val examples =
Table(
( "suite", "simple name"),
( new MandarinOrangeFunSuite(new FunSuite), "MandarinOrangeFunSuite(AnyFunSuite)"),
( new MandarinOrangeFunSpec(new FunSuite), "MandarinOrangeFunSpec(AnyFunSuite)"),
// SKIP-SCALATESTJS,NATIVE-START
( new MandarinOrangeSpec(new FunSuite), "MandarinOrangeSpec(AnyFunSuite)"),
// SKIP-SCALATESTJS,NATIVE-END
( new MandarinOrangeWordSpec(new FunSuite), "MandarinOrangeWordSpec(AnyFunSuite)"),
( new MandarinOrangeFlatSpec(new FunSuite), "MandarinOrangeFlatSpec(AnyFunSuite)"),
( new MandarinOrangeFreeSpec(new FunSuite), "MandarinOrangeFreeSpec(AnyFunSuite)"),
( new MandarinOrangeFeatureSpec(new FunSuite), "MandarinOrangeFeatureSpec(AnyFunSuite)"),
( new MandarinOrangePropSpec(new FunSuite), "MandarinOrangePropSpec(AnyFunSuite)"),
( new MandarinOrangeFixtureFunSuite(new FunSuite), "MandarinOrangeFixtureFunSuite(AnyFunSuite)"),
( new MandarinOrangeFixtureFunSpec(new FunSuite), "MandarinOrangeFixtureFunSpec(AnyFunSuite)"),
( new MandarinOrangeFixtureWordSpec(new FunSuite), "MandarinOrangeFixtureWordSpec(AnyFunSuite)"),
( new MandarinOrangeFixtureFlatSpec(new FunSuite), "MandarinOrangeFixtureFlatSpec(AnyFunSuite)"),
( new MandarinOrangeFixtureFreeSpec(new FunSuite), "MandarinOrangeFixtureFreeSpec(AnyFunSuite)"),
( new MandarinOrangeFixtureFeatureSpec(new FunSuite), "MandarinOrangeFixtureFeatureSpec(AnyFunSuite)"),
( new MandarinOrangeFixturePropSpec(new FunSuite), "MandarinOrangeFixturePropSpec(AnyFunSuite)"),
// ( new path.FunSpec(new FunSuite), "path.FunSpec(FunSuite)"),
// ( new path.FreeSpec(new FunSuite), "path.FreeSpec(FunSuite)"),
( new Suites(new FunSuite), "Suites(AnyFunSuite)"),
( new Sequential(new FunSuite), "Sequential(AnyFunSuite)"),
( new Stepwise(new FunSuite), "Stepwise(AnyFunSuite)"),
( new MandarinOrangeSuites(new FunSuite), "MandarinOrangeSuites(AnyFunSuite)"),
( new MandarinOrangeSequential(new FunSuite), "MandarinOrangeSequential(AnyFunSuite)"),
( new MandarinOrangeStepwise(new FunSuite), "MandarinOrangeStepwise(AnyFunSuite)")
)
forAll (examples) { (suite, simpleName) =>
assert(suite.toString === simpleName)
}
}
}
describe("when the suite contains more than one nested suite") {
it("should return the simple name of the class and the nested suite toStrings wrapped in parens and separated by commas") {
import prop.TableDrivenPropertyChecks._
val examples =
Table(
( "suite", "simple name"),
( new MandarinOrangeFunSuite(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFunSuite(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFunSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFunSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
// SKIP-SCALATESTJS,NATIVE-START
( new MandarinOrangeSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
// SKIP-SCALATESTJS,NATIVE-END
( new MandarinOrangeWordSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeWordSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFlatSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFlatSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFreeSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFreeSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFeatureSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFeatureSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangePropSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangePropSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFixtureFunSuite(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFixtureFunSuite(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFixtureFunSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFixtureFunSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFixtureWordSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFixtureWordSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFixtureFlatSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFixtureFlatSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFixtureFreeSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFixtureFreeSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFixtureFeatureSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFixtureFeatureSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeFixturePropSpec(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeFixturePropSpec(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
// ( new path.FunSpec(new PropSpec, new FeatureSpec, new FunSuite), "path.FunSpec(PropSpec, FeatureSpec, FunSuite)"),
// ( new path.FreeSpec(new PropSpec, new FeatureSpec, new FunSuite), "path.FreeSpec(PropSpec, FeatureSpec, FunSuite)"),
( new Suites(new PropSpec, new FeatureSpec, new FunSuite), "Suites(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new Sequential(new PropSpec, new FeatureSpec, new FunSuite), "Sequential(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new Stepwise(new PropSpec, new FeatureSpec, new FunSuite), "Stepwise(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeSuites(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeSuites(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeSequential(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeSequential(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)"),
( new MandarinOrangeStepwise(new PropSpec, new FeatureSpec, new FunSuite), "MandarinOrangeStepwise(AnyPropSpec, AnyFeatureSpec, AnyFunSuite)")
)
forAll (examples) { (suite, simpleName) =>
assert(suite.toString === simpleName)
}
}
}
}
// SKIP-SCALATESTJS,NATIVE-START
describe("The simpleNameForTest method") {
it("should return the correct test simple name with or without Informer") {
val simpleNameForTest = PrivateMethod[String]('simpleNameForTest)
assert((Suite invokePrivate simpleNameForTest("testThis")) === "testThis")
assert((Suite invokePrivate simpleNameForTest("testThis(Informer)")) === "testThis")
assert((Suite invokePrivate simpleNameForTest("test(Informer)")) === "test")
assert((Suite invokePrivate simpleNameForTest("test")) === "test")
}
}
// SKIP-SCALATESTJS,NATIVE-END
describe("A Suite") {
describe("(when its pendingUntilFixed method is invoked)") {
it("should throw TestPendingException if the code block throws an exception") {
intercept[TestPendingException] {
pendingUntilFixed {
assert(1 + 1 === 3)
}
}
}
it("should throw TestFailedException if the code block doesn't throw an exception") {
intercept[TestFailedException] {
pendingUntilFixed {
assert(1 + 2 === 3)
}
}
}
}
}
describe("the stopper") {
it("should stop nested suites from being executed") {
class SuiteA extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteB extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteC extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteD extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
val status = super.run(testName, args)
args.stopper.requestStop()
status
}
}
class SuiteE extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteF extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteG extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
val a = new SuiteA
val b = new SuiteB
val c = new SuiteC
val d = new SuiteD
val e = new SuiteE
val f = new SuiteF
val g = new SuiteG
class IgnoreStopRequestStopper extends Stopper {
def stopRequested: Boolean = false
def requestStop(): Unit = {}
def reset(): Unit = {}
}
val x = Suites(a, b, c, d, e, f, g)
x.run(None, Args(SilentReporter, new IgnoreStopRequestStopper))
assert(a.executed)
assert(b.executed)
assert(c.executed)
assert(d.executed)
assert(e.executed)
assert(f.executed)
assert(g.executed)
val h = new SuiteA
val i = new SuiteB
val j = new SuiteC
val k = new SuiteD
val l = new SuiteE
val m = new SuiteF
val n = new SuiteG
val y = Suites(h, i, j, k, l, m, n)
y.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker, Set.empty))
assert(k.executed)
assert(i.executed)
assert(j.executed)
assert(k.executed)
assert(!l.executed)
assert(!m.executed)
assert(!n.executed)
}
}
// SKIP-SCALATESTJS,NATIVE-START
describe("A Suite's execute method") {
it("should throw NAE if passed null for configMap") {
class MySuite extends Suite
intercept[NullArgumentException] {
(new MySuite).execute(configMap = null)
}
}
it("should throw IAE if a testName is passed that does not exist on the suite") {
class MySuite extends Suite
intercept[IllegalArgumentException] {
(new MySuite).execute(testName = "fred")
}
}
}
// SKIP-SCALATESTJS,NATIVE-END
describe("NoArgTest") {
it("should offer a factory method that takes another NoArgTest and a function that implements apply") {
class SideEffectedFixtureWasSpec extends FunSpec {
type FixtureParam = String
var theFixture = ""
var sideEffectedFixtureWas = ""
override def withFixture(test: NoArgTest): Outcome = {
// These will fail the test if the wrapped tests's TestData is not passed through
assert(test.name == "some test")
assert(test.configMap == ConfigMap.empty)
assert(test.scopes == Seq.empty)
assert(test.text == "some test")
assert(test.tags == Set.empty)
theFixture = "hi"
test()
}
it("some test") { sideEffectedFixtureWas = theFixture }
}
val a = new SideEffectedFixtureWasSpec
a.run(None, Args(SilentReporter))
assert(a.sideEffectedFixtureWas === "hi")
class WrappedFixtureSpec extends SideEffectedFixtureWasSpec {
var withFixtureWasCalled = false
override def withFixture(test: NoArgTest): Outcome = {
super.withFixture(
new NoArgTest {
def apply(): Outcome = {
withFixtureWasCalled = true
theFixture = theFixture.toUpperCase
test()
}
val text: String = test.text
val configMap: ConfigMap = test.configMap
val scopes: collection.immutable.IndexedSeq[String] = test.scopes
val name: String = test.name
val tags: Set[String] = test.tags
val pos: Option[source.Position] = test.pos
}
)
}
}
val b = new WrappedFixtureSpec
b.run(None, Args(SilentReporter))
assert(b.sideEffectedFixtureWas === "HI")
class ShorthandWrappedFixtureSpec extends SideEffectedFixtureWasSpec {
var withFixtureWasCalled = false
override def withFixture(test: NoArgTest): Outcome = {
super.withFixture(
NoArgTest(test) {
withFixtureWasCalled = true
theFixture = theFixture.toUpperCase
test()
}
)
}
}
val c = new ShorthandWrappedFixtureSpec
c.run(None, Args(SilentReporter))
assert(c.sideEffectedFixtureWas === "HI")
}
}
describe("Suite's runNestedSuites method") {
it("should fire SuiteAborted event when after function in BeforeAndAfter nested suite throws RuntimeException") {
class NestedSuite extends FunSuite with BeforeAndAfter {
test("test 1") {}
after {
throw new RuntimeException("oops!")
}
}
class ExampleSuite extends Suite {
override def nestedSuites = Vector(new NestedSuite)
}
val suite = new ExampleSuite
val rep = new EventRecordingReporter
suite.run(None, Args(rep))
assert(rep.suiteStartingEventsReceived.length == 1)
assert(rep.suiteCompletedEventsReceived.length == 0)
assert(rep.suiteAbortedEventsReceived.length == 1)
}
it("should fire SuiteAborted event when afterAll function in BeforeAndAfterAll nested suite throws RuntimeException") {
class NestedSuite extends FunSuite with BeforeAndAfterAll {
test("test 1") {}
override protected def afterAll(): Unit = {
throw new RuntimeException("oops!")
}
}
class ExampleSuite extends Suite {
override def nestedSuites = Vector(new NestedSuite)
}
val suite = new ExampleSuite
val rep = new EventRecordingReporter
suite.run(None, Args(rep))
assert(rep.suiteStartingEventsReceived.length == 1)
assert(rep.suiteCompletedEventsReceived.length == 0)
assert(rep.suiteAbortedEventsReceived.length == 1)
}
it("should fire SuiteAborted event when afterAll function in BeforeAndAfterAllConfigMap nested suite throws RuntimeException") {
class NestedSuite extends FunSuite with BeforeAndAfterAllConfigMap {
test("test 1") {}
override protected def afterAll(configMap: ConfigMap): Unit = {
throw new RuntimeException("oops!")
}
}
class ExampleSuite extends Suite {
override def nestedSuites = Vector(new NestedSuite)
}
val suite = new ExampleSuite
val rep = new EventRecordingReporter
suite.run(None, Args(rep))
assert(rep.suiteStartingEventsReceived.length == 1)
assert(rep.suiteCompletedEventsReceived.length == 0)
assert(rep.suiteAbortedEventsReceived.length == 1)
}
it("should fire SuiteAborted event when afterAll function in BeforeAndAfterEach nested suite throws RuntimeException") {
class NestedSuite extends FunSuite with BeforeAndAfterEach {
test("test 1") {}
override protected def afterEach(): Unit = {
throw new RuntimeException("oops!")
}
}
class ExampleSuite extends Suite {
override def nestedSuites = Vector(new NestedSuite)
}
val suite = new ExampleSuite
val rep = new EventRecordingReporter
suite.run(None, Args(rep))
assert(rep.suiteStartingEventsReceived.length == 1)
assert(rep.suiteCompletedEventsReceived.length == 0)
assert(rep.suiteAbortedEventsReceived.length == 1)
}
it("should fire SuiteAborted event when afterAll function in BeforeAndAfterEachTestData nested suite throws RuntimeException") {
class NestedSuite extends FunSuite with BeforeAndAfterEachTestData {
test("test 1") {}
override protected def afterEach(test: TestData): Unit = {
throw new RuntimeException("oops!")
}
}
class ExampleSuite extends Suite {
override def nestedSuites = Vector(new NestedSuite)
}
val suite = new ExampleSuite
val rep = new EventRecordingReporter
suite.run(None, Args(rep))
assert(rep.suiteStartingEventsReceived.length == 1)
assert(rep.suiteCompletedEventsReceived.length == 0)
assert(rep.suiteAbortedEventsReceived.length == 1)
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/SuiteSpec.scala | Scala | apache-2.0 | 24,438 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.