code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.rasterfoundry.database
import com.rasterfoundry.common.Generators.Implicits._
import com.rasterfoundry.common.SceneWithProjectIdLayerId
import com.rasterfoundry.database.Implicits._
import com.rasterfoundry.datamodel._
import cats.implicits._
import doobie._, doobie.implicits._
import doobie.postgres.implicits._
import org.scalacheck.Prop.forAll
import org.scalatestplus.scalacheck.Checkers
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class SceneToLayerDaoSpec
extends AnyFunSuite
with Matchers
with Checkers
with DBTestConfig
with PropTestHelpers {
test("Insert scenes to a project and accept them") {
check {
forAll {
(
user: User.Create,
org: Organization.Create,
platform: Platform,
project: Project.Create,
scenes: List[Scene.Create],
dsCreate: Datasource.Create
) =>
{
val acceptedSceneAndStlIO = for {
(dbUser, _, _, dbProject) <- insertUserOrgPlatProject(
user,
org,
platform,
project
)
datasource <- DatasourceDao.create(
dsCreate.toDatasource(dbUser),
dbUser
)
scenesInsert <- (scenes map {
fixupSceneCreate(dbUser, datasource, _)
}).traverse(
(scene: Scene.Create) => SceneDao.insert(scene, dbUser)
)
_ <- ProjectDao.addScenesToProject(
scenesInsert map { _.id },
dbProject.id,
dbProject.defaultLayerId,
false
)
acceptedSceneCount <- SceneToLayerDao.acceptScenes(
dbProject.defaultLayerId,
scenesInsert map { _.id }
)
stls <- SceneToLayerDao.query
.filter(fr"project_layer_id = ${dbProject.defaultLayerId}")
.list
} yield (acceptedSceneCount, stls)
val (acceptedSceneCount, stls) =
acceptedSceneAndStlIO.transact(xa).unsafeRunSync
acceptedSceneCount == scenes.length &&
stls.length == scenes.length &&
stls.filter(_.accepted).length == scenes.length
}
}
}
}
test("Verify scenes are returned in correct order for mosaic definition") {
check {
forAll {
(
user: User.Create,
org: Organization.Create,
platform: Platform,
project: Project.Create,
scenes: List[Scene.Create],
dsCreate: Datasource.Create
) =>
{
val mdAndStpsIO = for {
(dbUser, _, _, dbProject) <- insertUserOrgPlatProject(
user,
org,
platform,
project
)
datasource <- DatasourceDao.create(
dsCreate.toDatasource(dbUser),
dbUser
)
scenesInsert <- (scenes map {
fixupSceneCreate(dbUser, datasource, _)
}).traverse(
(scene: Scene.Create) => SceneDao.insert(scene, dbUser)
)
selectedSceneIds = scenesInsert.take(2) map { _.id }
_ <- ProjectDao.addScenesToProject(
scenesInsert map { _.id },
dbProject.id,
dbProject.defaultLayerId,
false
)
_ <- SceneToLayerDao.setManualOrder(
dbProject.defaultLayerId,
scenesInsert map { _.id }
)
mds <- SceneToLayerDao
.getMosaicDefinition(
dbProject.defaultLayerId,
None,
sceneIdSubset = selectedSceneIds
)
stls <- SceneToLayerDao.query
.filter(fr"project_layer_id = ${dbProject.defaultLayerId}")
.filter(selectedSceneIds.toNel map {
Fragments.in(fr"scene_id", _)
})
.list
} yield (mds, stls, selectedSceneIds)
val (mds, stls, _) =
mdAndStpsIO.transact(xa).unsafeRunSync
// Mapping of scene ids to scene order
val sceneMap =
stls.map(s => (s.sceneId, s.sceneOrder.getOrElse(-1))).toMap
// List of scene orders, ordered by the mosaic definitions
val sceneOrders = mds.map(md => sceneMap.getOrElse(md.sceneId, -1))
// If the scenes are returned in the correct order,
// the scene orders of the mosaic definitions will be in order
sceneOrders.sameElements(sceneOrders.sorted)
}
}
}
}
test("Get layer ID and project ID of a scene") {
check {
forAll {
(
user: User.Create,
org: Organization.Create,
platform: Platform,
projectCreate: Project.Create,
scene: Scene.Create,
dsCreate: Datasource.Create
) =>
{
val sceneLayerProjectIO: ConnectionIO[
(Scene.WithRelated, List[SceneWithProjectIdLayerId], Project)
] = for {
(dbUser, _, _, dbProject) <- insertUserOrgPlatProject(
user,
org,
platform,
projectCreate
)
datasource <- DatasourceDao.create(
dsCreate.toDatasource(dbUser),
dbUser
)
sceneInsert <- SceneDao.insert(
fixupSceneCreate(dbUser, datasource, scene),
dbUser
)
_ <- ProjectDao.addScenesToProject(
List(sceneInsert.id),
dbProject.id,
dbProject.defaultLayerId,
true
)
slp <- SceneToLayerDao.getProjectsAndLayersBySceneId(
sceneInsert.id
)
} yield { (sceneInsert, slp, dbProject) }
val (sceneInsert, slp, dbProject) =
sceneLayerProjectIO.transact(xa).unsafeRunSync
slp.toSet == Set(
SceneWithProjectIdLayerId(
sceneInsert.id,
dbProject.id,
dbProject.defaultLayerId
)
)
}
}
}
}
}
| raster-foundry/raster-foundry | app-backend/db/src/test/scala/com/azavea/rf/database/SceneToLayerDaoSpec.scala | Scala | apache-2.0 | 6,545 |
package org.elasticsearch.spark.sql
import scala.collection.Map
import org.apache.commons.logging.Log
import org.apache.commons.logging.LogFactory
import org.apache.spark.Partition
import org.apache.spark.SparkContext
import org.apache.spark.TaskContext
import org.apache.spark.sql.api.java.Row
import org.elasticsearch.hadoop.cfg.Settings
import org.elasticsearch.hadoop.rest.InitializationUtils
import org.elasticsearch.hadoop.rest.RestService.PartitionDefinition
import org.elasticsearch.spark.rdd.AbstractEsRDD
import org.elasticsearch.spark.rdd.AbstractEsRDDIterator
import org.elasticsearch.spark.rdd.EsPartition
// see the comments in ScalaEsRowRDD
private[spark] class JavaEsRowRDD(
@transient sc: SparkContext,
params: Map[String, String] = Map.empty,
schema: SchemaUtils.Schema)
extends AbstractEsRDD[Row](sc, params) {
override def compute(split: Partition, context: TaskContext): JavaEsRowRDDIterator = {
new JavaEsRowRDDIterator(context, split.asInstanceOf[EsPartition].esPartition, schema)
}
}
private[spark] class JavaEsRowRDDIterator(
context: TaskContext,
partition: PartitionDefinition,
schema: SchemaUtils.Schema)
extends AbstractEsRDDIterator[Row](context, partition) {
override def getLogger() = LogFactory.getLog(classOf[JavaEsRowRDD])
override def initReader(settings: Settings, log: Log) = {
InitializationUtils.setValueReaderIfNotSet(settings, classOf[JavaEsRowValueReader], log)
// parse the structure and save the order (requested by Spark) for each Row (root and nested)
// since the data returned from Elastic is likely to not be in the same order
SchemaUtils.setRowInfo(settings, schema.struct)
}
override def createValue(value: Array[Object]): Row = {
// drop the ID
value(1).asInstanceOf[JavaEsRow]
}
} | costin/elasticsearch-hadoop | spark/sql-12/src/main/scala/org/elasticsearch/spark/sql/JavaEsRowRDD.scala | Scala | apache-2.0 | 1,801 |
/* *\
** \ \ / _) \ \ / \ | **
** \ \ / | __ \ _ \ __| \ \ / |\/ | **
** \ \ / | | | __/ | \ \ / | | **
** \_/ _| .__/ \___| _| \_/ _| _| **
** _| **
** **
** ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ **
** **
** http://www.vipervm.org **
** GPLv3 **
\* */
package org.vipervm.runtime.data
import org.vipervm.runtime.data.Primitives._
import org.vipervm.platform._
import org.vipervm.runtime.mm.DataManager
/**
* 2D matrix
*/
class Matrix2D[A](val width:Long, val height:Long)(implicit elem:Primitive[A]) extends MetaView with PrintableMetaView {
type ViewType = BufferView2D
def allocate(memory:MemoryNode):BufferView2D = {
//TODO: manage padding correctly
val buffer = memory.allocate(elem.size*width*height)
new BufferView2D(buffer, 0, elem.size*width, height, 0)
}
def initialize(dataManager:DataManager,f:(Long,Long)=>A):Unit = {
if (isDefined)
throw new Exception("Trying to initialize a data already initialized")
onHost(dataManager) { (view,buf) => {
for (y <- 0L until view.height; x <- 0L until (view.width/4)) {
val index = x*4 + y * (view.width + view.padding) + view.offset
elem.typ match {
case "float" => buf.peer.setFloat(index, f.asInstanceOf[(Long,Long)=>Float](x,y))
case "double" => buf.peer.setDouble(index, f.asInstanceOf[(Long,Long)=>Double](x,y))
}
}
}}.syncWait
}
override protected def hostPrint(view:ViewType,buffer:HostBuffer):String = {
val mem = buffer.peer
val result = new StringBuilder
for (y <- 0L until view.height) {
for (x <- 0L until (view.width/4)) {
val index = x*4 + y * (view.width + view.padding) + view.offset
elem.typ match {
case "float" => result.append(mem.getFloat(index) + " ")
case "double" => result.append(mem.getDouble(index) + " ")
}
}
result.append("\n")
}
result.mkString
}
}
| hsyl20/Scala_ViperVM | src/main/scala/org/vipervm/runtime/data/Matrix2D.scala | Scala | gpl-3.0 | 2,332 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.la
import cc.factorie._
import cc.factorie.util._
/** A sparse Tensor that stores an array of indices having non-zero values and an aligned sized array storing those values. */
trait SparseTensor extends SparseDoubleSeq with Tensor {
def isDense = false
def _makeReadable(): Unit
// unsafe - call makeReadable first
def _unsafeActiveDomainSize: Int
// unsafe - call makeReadable first (if you call this before making readable, you can get a reference to the wrong array)
def _indices: Array[Int]
// unsafe - call makeReadable first
// this has to be a DoubleSeq and not an Array[Int] so we can efficiently return a UniformTensor for binary tensor values
def _valuesSeq: DoubleSeq
def sizeHint(size: Int): Unit
}
trait SparseIndexedTensor extends SparseTensor {
// unsafe - call makeReadable first
def _values: Array[Double]
// unsafe - call makeReadable first
def _valuesSeq = new ArrayDoubleSeq(_values)
}
trait ArraySparseIndexedTensor extends SparseIndexedTensor {
// In subclasses either _length should be set > 0 or _sizeProxy should be set non-null, but not both.
//private var _length: Int = 0
//private var _sizeProxy: Iterable[Any] = null
// TODO Alex and I confirmed that private var access in traits still has getter and setter methods - we need to find out a better way to do this -luke
private var __values: Array[Double] = new Array[Double](4)
private var __indices: Array[Int] = new Array[Int](4) // the indices, in order corresponding to _values
private var _positions: Array[Int] = null // a dense array containing the index into _indices and _values; not yet implemented
private var __npos = 0 // the number of positions in _values and _indices that are actually being used
private var _sorted = 0 // The number of positions in _values & _indices where indices are sorted; if _sorted == _npos then ready for use
// TODO Avoid making these public? But used in BP now. -akm
def _values = __values
def _indices = __indices
def _unsafeActiveDomainSize: Int = __npos
private def setCapacity(cap:Int): Unit = {
require(cap >= __npos)
__indices = java.util.Arrays.copyOf(__indices, cap)
__values = java.util.Arrays.copyOf(__values, cap)
}
def ensureCapacity(cap:Int): Unit = if (__indices.length < cap) setCapacity(math.max(cap, __indices.length + __indices.length/2))
def trim(): Unit = setCapacity(__npos)
// unsafe - call makeReadable first
// TODO There must already be functions somewhere that do this.
private def copyarray(a:Array[Double]): Array[Double] = { if (a eq null) null else java.util.Arrays.copyOf(a, a.length) }
private def copyarray(a:Array[Int]): Array[Int] = { if (a eq null) null else java.util.Arrays.copyOf(a, a.length) }
def copyInto(t:SparseIndexedTensor): Unit = t match {
case t: ArraySparseIndexedTensor =>
t.__values = copyarray(__values); t.__indices = copyarray(__indices); t._positions = copyarray(_positions); t.__npos = __npos; t._sorted = _sorted
}
//def length: Int = if (_sizeProxy ne null) _sizeProxy.size else _length
override def activeDomainSize: Int = { makeReadable(); __npos }
def activeDomain: IntSeq = { makeReadable() ; new TruncatedArrayIntSeq(__indices, __npos) } // TODO Consider making more efficient
override def foreachActiveElement(f:(Int,Double)=>Unit): Unit = { makeReadable(); var i = 0; while (i < __npos) { f(__indices(i), __values(i)); i += 1 } }
override def activeElements: Iterator[(Int,Double)] = {
makeReadable()
new Iterator[(Int,Double)] { // Must not change _indices and _values during iteration!
var i = 0
def hasNext = i < __npos
def next() = { i += 1 ; (__indices(i-1), __values(i-1)) }
}
}
// TODO need to assert that _sorted < __npos always. Add a "checkInvariants" method?? -luke
override def zero(): Unit = { __npos = 0; _sorted = 0 }
override def sum: Double = { var s = 0.0; var i = 0; while (i < __npos) { s += __values(i); i += 1 }; s }
/** Return the position at which index occurs, or -1 if index does not occur. */
def position(index:Int): Int = {
makeReadable()
val pos = java.util.Arrays.binarySearch(__indices, 0, _sorted, index)
if (pos >= 0) pos else -1
}
def position(index:Int, start:Int): Int = { // Just linear search for now; consider binary search with memory of last position
makeReadable()
val pos = java.util.Arrays.binarySearch(__indices, start, _sorted, index)
if (pos >= 0) pos else -1
}
override def toArray: Array[Double] = { val arr = new Array[Double](length); foreachActiveElement((i, v) =>arr(i) = v); arr }
def apply(index:Int): Double = {
// makeReadable is called in this.position
val pos = position(index)
if (pos < 0) 0.0 else __values(pos)
}
override def twoNormSquared: Double = {
makeReadable()
val l = __npos; var result = 0.0; var i = 0
while (i < l) {
val v = __values(i)
result += v * v
i += 1
}
result
}
override def oneNorm: Double = {
val len = activeDomainSize
_values.take(len).map(_.abs).sum
}
override def dot(v:DoubleSeq): Double = {
makeReadable()
v match {
// TODO add fast implementations for Dense here! it's better to keep things off of dense since it's easy to dot against -luke
case v:SingletonBinaryTensor => apply(v.singleIndex)
case v:SingletonIndexedTensor => apply(v.singleIndex) * v.singleValue
case v:ArraySparseIndexedTensor => {
v._makeReadable()
val v1 = if (this.__npos <= v.__npos) this else v
val v2 = if (v.__npos < this.__npos) this else v
var i = 0; var j = -1; var j2 = 0
var result = 0.0
while (i < v1.__npos) {
j2 = v2.position(v1.__indices(i), j+1)
if (j2 >= 0) { result += v1.__values(i) * v2.__values(j2); j = j2 }
i += 1
}
result
}
case v:SparseBinaryTensor => {
v._makeReadable()
var i = 0
val len = v.activeDomainSize
val indices = v._indices
var result = 0.0
var pos = 0
while (i < len && pos >= 0) {
val posTmp = position(indices(i), pos)
if (posTmp >= 0) {
pos = posTmp
result += __values(pos)
}
i += 1
}
result
}
case t: DenseTensor => {
val tArr = t.asArray
val len = activeDomainSize
val indices = _indices
val values = _values
var i = 0
var dot = 0.0
while (i < len) {
dot += tArr(indices(i)) * values(i)
i += 1
}
dot
}
case t: Tensor =>
if (!SparseIndexedTensor.hasLogged) {
SparseIndexedTensor.hasLogged = true
println("Warning: SparseIndexedTensor slow dot with type " + t.getClass.getName)
}
var dot = 0.0
t.foreachActiveElement((i, v) => dot += apply(i)*v)
dot
}
}
override def toString = "SparseIndexedTensor npos="+__npos+" sorted="+_sorted+" ind="+__indices.mkString(",")+" val="+__values.mkString(",")
def _makeReadable(): Unit = makeReadable()
final private def doTheSort(): Array[Int] = {
val newIndices = new Array[Int](__npos)
val len = __npos; var i = 0
while (i < len) { newIndices(i) = i; i += 1}
FastSorting.quickSort(keys = java.util.Arrays.copyOf(__indices, __npos), values = newIndices)
newIndices
}
final private def makeReadableEmpty(): Unit = {
// We can assume that the "readable" part of the vector is empty, and hence we can just sort everything
val sortedIndices = doTheSort()
val newIndices = new Array[Int](__indices.length)
val newValues = new Array[Double](__indices.length)
var prevIndex = __indices(sortedIndices(0))
newIndices(0) = prevIndex
newValues(0) = __values(sortedIndices(0))
var i = 1
var j = 0
while (i < __npos) {
val idx = sortedIndices(i)
if (prevIndex != __indices(idx)) {
j += 1
newIndices(j) = __indices(idx)
prevIndex = __indices(idx)
}
newValues(j) += __values(idx)
i += 1
}
_sorted = j+1
__indices = newIndices
__values = newValues
}
final private def makeReadableIncremental(): Unit = {
var cp = _sorted // "current position", the position next to be placed into sorted order
while (cp < __npos) {
//println("cp="+cp)
val ci = __indices(cp) // "current index", the index next to be placed into sorted order.
val cv = __values(cp) // "current value"
var i = _sorted - 1
//println("i="+i)
// Find the position at which the current index/value belongs
while (i >= 0 && __indices(i) >= ci) i -= 1
i += 1
// Put it there, shifting to make room if necessary
//println("Placing at position "+i)
if (__indices(i) == ci) { if (i != cp) __values(i) += cv else _sorted += 1 }
else insert(i, ci, cv, incrementNpos=false, incrementSorted=true)
//println("sorted="+_sorted)
cp += 1
}
}
final private def makeReadable(): Unit = {
if (_sorted == __npos) return
if ((_sorted <= 10) && (__npos > 0)) {
makeReadableEmpty()
} else {
makeReadableIncremental()
}
__npos = _sorted
// if (__npos * 1.5 > __values.length) trim()
}
// Caller is responsible for making sure there is enough capacity
@inline private def insert(position:Int, index:Int, value:Double, incrementNpos:Boolean, incrementSorted:Boolean): Unit = {
if (__npos - position > 0) {
System.arraycopy(__values, position, __values, position+1, _sorted-position)
System.arraycopy(__indices, position, __indices, position+1, _sorted-position)
}
__indices(position) = index
__values(position) = value
if (incrementNpos) __npos += 1
if (incrementSorted) _sorted += 1
}
override def update(index:Int, value:Double): Unit = {
if (_sorted == 0) {
+=(index,value)
} else {
val p = position(index)
if (p >= 0) __values(p) = value
else +=(index, value)
}
}
// Efficiently support multiple sequential additions
override def +=(index:Int, incr:Double): Unit = if (incr != 0.0) {
ensureCapacity(__npos+1)
__indices(__npos) = index
__values(__npos) = incr
__npos += 1
}
override def +=(s:Double): Unit = throw new Error("Method +=(Double) not defined on class "+getClass.getName)
override def +=(t:DoubleSeq, f:Double): Unit = t match {
case t:SingletonBinaryTensor => +=(t.singleIndex, f)
case t:SingletonIndexedTensor => +=(t.singleIndex, f * t.singleValue)
case t:SparseBinaryTensor => { val len = t.activeDomainSize; val a = t._indices; var i = 0; while (i < len) { +=(a(i), f); i += 1 }}
case t:SparseIndexedTensor => { val len = t.activeDomainSize; val as = t._indices; val vs = t._values; var i = 0; while (i < len) { +=(as(i), f * vs(i)); i += 1 }}
case t:DenseTensor => { val arr = t.asArray; var i = 0; while (i < arr.length) {this += (i, arr(i)*f) ; i += 1} }
case t:Outer2Tensor => {
val ff = f*t.scale
(t.tensor1, t.tensor2) match {
case (t1: DenseTensor, t2: SparseBinaryTensor) =>
var i = 0
val arr = t1.asArray
while (i < arr.length) {
val indices = t2._indices
var j = 0
while (j < t2.activeDomainSize) {
this += (t.singleFlatIndex(i, indices(j)), f*t1(i))
j += 1
}
i += 1
}
case (t1: DenseTensor, t2: SparseTensor) =>
var i = 0
val arr = t1.asArray
while (i < arr.length) {
val len = t2.activeDomainSize
val indices = t2._indices
val values = t2._valuesSeq
var j = 0
while (j < len) {
this += (t.singleFlatIndex(i, indices(j)), ff*t1(i)*values(j))
j += 1
}
i += 1
}
case (t1: SingletonBinaryTensor, t2: DenseTensor) => {
val i0 = t1.singleIndex
val arr = t2.asArray
var i = 0
while (i < arr.length) {
this += (t.singleFlatIndex(i0, i), ff*arr(i))
i += 1
}
}
case (t1: SparseTensor, t2: DenseTensor) =>
var j = 0
val arr = t2.asArray
while (j < arr.length) {
val len = t1.activeDomainSize
val indices = t1._indices
val values = t1._valuesSeq
var i = 0
while (i < len) {
this += (t.singleFlatIndex(indices(i), j), ff*t2(j)*values(i))
i += 1
}
j += 1
}
case (t1: SingletonTensor, t2: SparseTensor) => {
val i0 = t1.singleIndex
// Have to call activeDomainSize/makeReadable before reading out array of indices or this will have mismatched indices and values -luke
val len = t2.activeDomainSize
val arr = t2._indices
val values = t2._valuesSeq
var i = 0
while (i < len) {
val singleidx = t.singleFlatIndex(i0, arr(i))
this += (singleidx, ff*t1.singleValue*values(i))
i += 1
}
}
case (t1: SparseTensor, t2: SparseTensor) => {
val len1 = t1.activeDomainSize
val indices1 = t1._indices
val values1 = t1._valuesSeq
val len2 = t2.activeDomainSize
val indices2 = t2._indices
val values2 = t2._valuesSeq
var i = 0
while (i < len1) {
var j = 0
while (j < len2) {
this += (t.singleFlatIndex(indices1(i), indices2(j)), ff*values1(i)*values2(j))
j += 1
}
i += 1
}
}
case (t1: DenseTensor, t2: DenseTensor) => {
val arr1 = t1.asArray
val arr2 = t2.asArray
var i = 0
while (i < arr1.length) {
var j = 0
while (j < arr2.length) {
this += (t.singleFlatIndex(i, j), arr1(i)*arr2(j)*ff)
j += 1
}
i += 1
}
}
case _ => throw new Error("types are " + t.tensor1.getClass.getName + " and " + t.tensor2.getClass.getName) }
}
case t:Tensor =>
if (!SparseIndexedTensor.hasLogged) {
SparseIndexedTensor.hasLogged = true
println("Warning: SparseIndexedTensor slow += with type " + t.getClass.getName)
}
t.foreachActiveElement((i, v) => this += (i, v * f))
}
/** Increment Array "a" with the contents of this Tensor, but do so at "offset" into array and multiplied by factor "f". */
override def =+(a:Array[Double], offset:Int, f:Double): Unit = {
val indices = __indices
val values = __values
val npos = __npos
var i = 0
while (i < npos) {
a(indices(i) + offset) += f * values(i)
i += 1
}
}
override def expNormalize(): Double = {
var max = Double.MinValue
var i = 0
while (i < __npos) { if (max < __values(i)) max = __values(i); i += 1 }
var sum = 0.0
i = 0
while (i < __npos) {
val e = math.exp(__values(i) - max) //update(i, math.exp(apply(i) - max))
__values(i) = e
sum += e
i += 1
}
i = 0
while (i < __npos) {
__values(i) /= sum
i += 1
}
sum
}
override def exponentiate() {
var i = 0
while (i < __npos) {
__values(i) = math.exp(__values(i))
i += 1
}
}
override def foldActiveElements(seed: Double, f: (Int, Double, Double) => Double): Double = {
var acc = seed; var i = 0
while (i < __npos) { acc = f(__indices(i), __values(i), acc); i += 1 }
acc
}
override def maxNormalize() {
var maxi = 0
var max = Double.MinValue
var i = 0
while (i < __npos) {
if (__values(i) > max) {
max = __values(i)
maxi = __indices(i)
}
i += 1
}
zero()
update(maxi, 1)
}
override def *=(other: Double) {
_makeReadable()
var i = 0
val len = activeDomainSize
while (i < len) {
__values(i) *= other
i += 1
}
}
def sizeHint(size: Int): Unit = ensureCapacity(size)
}
object SparseIndexedTensor {
var hasLogged = false
}
| hlin117/factorie | src/main/scala/cc/factorie/la/SparseIndexedTensor.scala | Scala | apache-2.0 | 17,115 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.persistence.cassandra.testkit
import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraPersistenceSpec
import com.lightbend.lagom.scaladsl.persistence.testkit.AbstractEmbeddedPersistentActorSpec
import com.lightbend.lagom.scaladsl.playjson.EmptyJsonSerializerRegistry
class EmbeddedCassandraPersistentActorSpec extends CassandraPersistenceSpec(EmptyJsonSerializerRegistry) with AbstractEmbeddedPersistentActorSpec
| edouardKaiser/lagom | persistence-cassandra/scaladsl/src/test/scala/com/lightbend/lagom/scaladsl/persistence/cassandra/testkit/EmbeddedCassandraPersistentActorSpec.scala | Scala | apache-2.0 | 540 |
/************************************************************************\\
** Project **
** ______ ______ __ ______ ____ **
** / ____/ / __ / / / / __ / / __/ (c) 2011-2014 **
** / /__ / /_/ / / / / /_/ / / /_ **
** /___ / / ____/ / / / __ / / __/ Erik Osheim, Tom Switzer **
** ____/ / / / / / / / | | / /__ **
** /_____/ /_/ /_/ /_/ |_| /____/ All rights reserved. **
** **
** Redistribution and use permitted under the MIT license. **
** **
\\************************************************************************/
package spire
package random
package rng
import spire.syntax.cfor._
import spire.util.Pack
import java.nio.ByteBuffer
import java.util
/**
* This is a Scala implementation of the Well19937a PRNG based on WELL19937a.c.
*
* <p>The acronym WELL stands for Well Equidistributed Long-period Linear.
*
* <p><b>Reference: </b>
* François Panneton, Pierre L'Ecuyer and Makoto Matsumoto:
* "Improved Long-Period Generators Based on Linear Recurrences Modulo 2",
* <i>ACM Transactions on Mathematical Software,</i> Vol. 32, No. 1, January 2006, pp 1--16.
*
* @see <a href="http://www.iro.umontreal.ca/~panneton/well/WELL19937a.c">WELL19937a.c</a>
* @see <a href="http://www.iro.umontreal.ca/~panneton/WELLRNG.html">Well PRNG Home Page</a>
* @see <a href="http://en.wikipedia.org/wiki/Well_Equidistributed_Long-period_Linear">WELL @ Wikipedia</a>
* @author <a href="mailto:dusan.kysel@gmail.com">Dušan Kysel</a>
*/
final class Well19937a protected[random](state: Array[Int], i0: Int) extends IntBasedGenerator {
import Well19937a.{UpperMask, LowerMask, R, BYTES, mat0pos, mat0neg, mat1, mat3pos}
private var i : Int = i0
def copyInit: Well19937a = new Well19937a(state.clone(), i)
def getSeedBytes(): Array[Byte] = {
val bytes = new Array[Byte](BYTES)
val bb = ByteBuffer.wrap(bytes)
cfor(0)(_ < R, _ + 1) { i => bb.putInt(state(i)) }
bb.putInt(i)
bytes
}
def setSeedBytes(bytes: Array[Byte]): Unit = {
val bs = if (bytes.length < BYTES) util.Arrays.copyOf(bytes, BYTES) else bytes
val bb = ByteBuffer.wrap(bs)
cfor(0)(_ < R, _ + 1) { i => state(i) = bb.getInt }
i = bb.getInt
}
def nextInt(): Int = {
import Well19937acIndexCache._
val z0: Int = (state(vrm1(i)) & LowerMask) | (state(vrm2(i)) & UpperMask)
val z1: Int = mat0neg(-25, state(i)) ^ mat0pos(27, state(vm1(i)))
val z2: Int = mat3pos(9, state(vm2(i))) ^ mat0pos(1, state(vm3(i)))
state(i) = z1 ^ z2
state(vrm1(i)) = mat1(z0) ^ mat0neg(-9, z1) ^ mat0neg(-21, z2) ^ mat0pos(21, state(i))
i = vrm1(i)
state(i)
}
}
object Well19937a extends GeneratorCompanion[Well19937a, (Array[Int], Int)] {
@inline private val UpperMask = 0x7FFFFFFF // = 0xFFFFFFFF ^ Int.MinValue
@inline private val LowerMask = 0x80000000 // = Int.MinValue
// Number of bits in the pool.
@inline private final val K : Int = 19937
// Length of the pool in ints.
@inline private final val R : Int = (K + 31) / 32
// Length of the pool in ints -1.
// @inline private final val R_1 : Int = R - 1
// Length of the pool in ints -2.
// @inline private final val R_2 : Int = R - 2
// Length of the pool and index in bytes
@inline private final val BYTES = R * 4 + 4
// First parameter of the algorithm.
// @inline private final val M1 : Int = 70
// Second parameter of the algorithm.
// @inline private final val M2 : Int = 179
// Third parameter of the algorithm.
// @inline private final val M3 : Int = 449
@inline private final def mat0pos(t: Int, v: Int) = v ^ (v >>> t)
@inline private final def mat0neg(t: Int, v: Int) = v ^ (v << -t)
@inline private final def mat1(v: Int) = v
@inline private final def mat3pos(t: Int, v: Int) = v >>> t
def randomSeed(): (Array[Int], Int) =
(Utils.seedFromInt(R, Utils.intFromTime()), 0)
def fromSeed(seed: (Array[Int], Int)): Well19937a =
seed match {
case (state, stateIndex) =>
assert(state.length == R)
new Well19937a(state, stateIndex)
}
def fromArray(arr: Array[Int]): Well19937a =
fromSeed((Utils.seedFromArray(R, arr), 0))
def fromBytes(bytes: Array[Byte]): Well19937a =
fromArray(Pack.intsFromBytes(bytes, bytes.length / 4))
def fromTime(time: Long = System.nanoTime): Well19937a =
fromSeed((Utils.seedFromInt(R, Utils.intFromTime(time)), 0))
}
| woparry/spire | core/src/main/scala/spire/random/rng/Well19937a.scala | Scala | mit | 4,749 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import com.github.fommil.netlib.BLAS.{getInstance => blas}
import org.apache.spark.Logging
import org.apache.spark.annotation.Experimental
import org.apache.spark.ml.{PredictionModel, Predictor}
import org.apache.spark.ml.param.{Param, ParamMap}
import org.apache.spark.ml.regression.DecisionTreeRegressionModel
import org.apache.spark.ml.tree.{DecisionTreeModel, GBTParams, TreeClassifierParams, TreeEnsembleModel}
import org.apache.spark.ml.util.{Identifiable, MetadataUtils}
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.tree.{GradientBoostedTrees => OldGBT}
import org.apache.spark.mllib.tree.configuration.{Algo => OldAlgo}
import org.apache.spark.mllib.tree.loss.{LogLoss => OldLogLoss, Loss => OldLoss}
import org.apache.spark.mllib.tree.model.{GradientBoostedTreesModel => OldGBTModel}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.DataFrame
/**
* :: Experimental ::
* [[http://en.wikipedia.org/wiki/Gradient_boosting Gradient-Boosted Trees (GBTs)]]
* learning algorithm for classification.
* It supports binary labels, as well as both continuous and categorical features.
* Note: Multiclass labels are not currently supported.
*/
@Experimental
final class GBTClassifier(override val uid: String)
extends Predictor[Vector, GBTClassifier, GBTClassificationModel]
with GBTParams with TreeClassifierParams with Logging {
def this() = this(Identifiable.randomUID("gbtc"))
// Override parameter setters from parent trait for Java API compatibility.
// Parameters from TreeClassifierParams:
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
override def setMinInstancesPerNode(value: Int): this.type =
super.setMinInstancesPerNode(value)
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
/**
* The impurity setting is ignored for GBT models.
* Individual trees are built using impurity "Variance."
*/
override def setImpurity(value: String): this.type = {
logWarning("GBTClassifier.setImpurity should NOT be used")
this
}
// Parameters from TreeEnsembleParams:
override def setSubsamplingRate(value: Double): this.type = super.setSubsamplingRate(value)
override def setSeed(value: Long): this.type = {
logWarning("The 'seed' parameter is currently ignored by Gradient Boosting.")
super.setSeed(value)
}
// Parameters from GBTParams:
override def setMaxIter(value: Int): this.type = super.setMaxIter(value)
override def setStepSize(value: Double): this.type = super.setStepSize(value)
// Parameters for GBTClassifier:
/**
* Loss function which GBT tries to minimize. (case-insensitive)
* Supported: "logistic"
* (default = logistic)
* @group param
*/
val lossType: Param[String] = new Param[String](this, "lossType", "Loss function which GBT" +
" tries to minimize (case-insensitive). Supported options:" +
s" ${GBTClassifier.supportedLossTypes.mkString(", ")}",
(value: String) => GBTClassifier.supportedLossTypes.contains(value.toLowerCase))
setDefault(lossType -> "logistic")
/** @group setParam */
def setLossType(value: String): this.type = set(lossType, value)
/** @group getParam */
def getLossType: String = $(lossType).toLowerCase
/** (private[ml]) Convert new loss to old loss. */
override private[ml] def getOldLossType: OldLoss = {
getLossType match {
case "logistic" => OldLogLoss
case _ =>
// Should never happen because of check in setter method.
throw new RuntimeException(s"GBTClassifier was given bad loss type: $getLossType")
}
}
override protected def train(dataset: DataFrame): GBTClassificationModel = {
val categoricalFeatures: Map[Int, Int] =
MetadataUtils.getCategoricalFeatures(dataset.schema($(featuresCol)))
val numClasses: Int = MetadataUtils.getNumClasses(dataset.schema($(labelCol))) match {
case Some(n: Int) => n
case None => throw new IllegalArgumentException("GBTClassifier was given input" +
s" with invalid label column ${$(labelCol)}, without the number of classes" +
" specified. See StringIndexer.")
// TODO: Automatically index labels: SPARK-7126
}
require(numClasses == 2,
s"GBTClassifier only supports binary classification but was given numClasses = $numClasses")
val oldDataset: RDD[LabeledPoint] = extractLabeledPoints(dataset)
val boostingStrategy = super.getOldBoostingStrategy(categoricalFeatures, OldAlgo.Classification)
val oldGBT = new OldGBT(boostingStrategy)
val oldModel = oldGBT.run(oldDataset)
GBTClassificationModel.fromOld(oldModel, this, categoricalFeatures)
}
override def copy(extra: ParamMap): GBTClassifier = defaultCopy(extra)
}
@Experimental
object GBTClassifier {
// The losses below should be lowercase.
/** Accessor for supported loss settings: logistic */
final val supportedLossTypes: Array[String] = Array("logistic").map(_.toLowerCase)
}
/**
* :: Experimental ::
* [[http://en.wikipedia.org/wiki/Gradient_boosting Gradient-Boosted Trees (GBTs)]]
* model for classification.
* It supports binary labels, as well as both continuous and categorical features.
* Note: Multiclass labels are not currently supported.
* @param _trees Decision trees in the ensemble.
* @param _treeWeights Weights for the decision trees in the ensemble.
*/
@Experimental
final class GBTClassificationModel(
override val uid: String,
private val _trees: Array[DecisionTreeRegressionModel],
private val _treeWeights: Array[Double])
extends PredictionModel[Vector, GBTClassificationModel]
with TreeEnsembleModel with Serializable {
require(numTrees > 0, "GBTClassificationModel requires at least 1 tree.")
require(_trees.length == _treeWeights.length, "GBTClassificationModel given trees, treeWeights" +
s" of non-matching lengths (${_trees.length}, ${_treeWeights.length}, respectively).")
override def trees: Array[DecisionTreeModel] = _trees.asInstanceOf[Array[DecisionTreeModel]]
override def treeWeights: Array[Double] = _treeWeights
override protected def predict(features: Vector): Double = {
// TODO: Override transform() to broadcast model: SPARK-7127
// TODO: When we add a generic Boosting class, handle transform there? SPARK-7129
// Classifies by thresholding sum of weighted tree predictions
val treePredictions = _trees.map(_.rootNode.predict(features))
val prediction = blas.ddot(numTrees, treePredictions, 1, _treeWeights, 1)
if (prediction > 0.0) 1.0 else 0.0
}
override def copy(extra: ParamMap): GBTClassificationModel = {
copyValues(new GBTClassificationModel(uid, _trees, _treeWeights), extra)
}
override def toString: String = {
s"GBTClassificationModel with $numTrees trees"
}
/** (private[ml]) Convert to a model in the old API */
private[ml] def toOld: OldGBTModel = {
new OldGBTModel(OldAlgo.Classification, _trees.map(_.toOld), _treeWeights)
}
}
private[ml] object GBTClassificationModel {
/** (private[ml]) Convert a model from the old API */
def fromOld(
oldModel: OldGBTModel,
parent: GBTClassifier,
categoricalFeatures: Map[Int, Int]): GBTClassificationModel = {
require(oldModel.algo == OldAlgo.Classification, "Cannot convert GradientBoostedTreesModel" +
s" with algo=${oldModel.algo} (old API) to GBTClassificationModel (new API).")
val newTrees = oldModel.trees.map { tree =>
// parent for each tree is null since there is no good way to set this.
DecisionTreeRegressionModel.fromOld(tree, null, categoricalFeatures)
}
val uid = if (parent != null) parent.uid else Identifiable.randomUID("gbtc")
new GBTClassificationModel(parent.uid, newTrees, oldModel.treeWeights)
}
}
| andrewor14/iolap | mllib/src/main/scala/org/apache/spark/ml/classification/GBTClassifier.scala | Scala | apache-2.0 | 9,045 |
package scalan.sql
import java.lang.reflect.Method
import scalan._
import scalan.sql.parser.SqlAST._
trait Scannables extends ScalanDsl {
self: ScannablesDsl with ScalanSql =>
type RScannable[A] = Rep[Scannable[A]]
trait Scannable[Row] extends Def[Scannable[Row]] {
def eRow: Elem[Row]
def sourceIter(): Rep[CursorIter[Row]]
def fullScan(): RRelation[Row] = IterBasedRelation(sourceIter())(eRow)
}
abstract class TableScannable[Row](val table: Rep[Table], val scanId: Rep[Int], val direction: Rep[SortDirection], val fakeDep: Rep[Unit], val kernelInput: Rep[KernelInput])(implicit val eRow: Elem[Row]) extends Scannable[Row] {
override def sourceIter() = TableIter(table, scanId, direction, fakeDep, kernelInput)
def byRowids[B](relation: RRelation[B], f: Rep[B => Rowid]): RRelation[Row] = {
val iter = sourceIter().byRowids(relation.iter, f)
iterBasedRelation(iter)
}
}
abstract class IndexScannable[Row](val table: Rep[Table], val index: Rep[Index], val scanId: Rep[Int], val direction: Rep[SortDirection], val fakeDep: Rep[Unit], val kernelInput: Rep[KernelInput])(implicit val eRow: Elem[Row]) extends Scannable[Row] {
override def sourceIter() = IndexIter(table, index, scanId, direction, fakeDep, kernelInput)
// FIXME assumes all columns in index are ASC
def search(bounds: SearchBounds): RRelation[Row] = {
val index0 = index.asValue
val direction0 = direction.asValue
def inverseIfDescending(op: ComparisonOp) = op.inverseIfDescending(direction0)
val (startBound, endBound) = direction0 match {
case Ascending => (bounds.lowerBound, bounds.upperBound)
case Descending => (bounds.upperBound, bounds.lowerBound)
}
val fixedValues = bounds.fixedValues
val (keyValues, startOpIfAscending) = startBound match {
case None =>
(fixedValues, GreaterEq)
case Some(Bound(value, isInclusive)) =>
(fixedValues :+ value, if (isInclusive) GreaterEq else Greater)
}
val startOp = inverseIfDescending(startOpIfAscending)
val test = fun[Row, Boolean] { _x =>
val x = _x.asRep[Struct]
val firstCondition = endBound match {
case None => toRep(true)
case Some(Bound(value, isInclusive)) =>
val column = index0.columns(fixedValues.length)
val y = field(x, column.name)
val endOp = inverseIfDescending(if (isInclusive) LessEq else Less)
comparisonOp(endOp, y, value)
}
(index0.columns, fixedValues).zipped.foldLeft(firstCondition) {
case (cond, (column, value)) =>
val y = field(x, column.name)
cond && comparisonOp(Eq, y, value)
}
}
val boundedIter = if (keyValues.nonEmpty) {
val repKeyValues = SArray.fromSyms(keyValues.asInstanceOf[List[Rep[Any]]])(AnyElement)
sourceIter().fromKeyWhile(repKeyValues, startOp, test)
} else
sourceIter().takeWhile(test)
// if bounds.isEmpty this is the same as fullScan()
IterBasedRelation(boundedIter)
}
}
}
trait ScannablesDsl extends impl.ScannablesAbs { self: ScalanSql =>
implicit def ScannableElemExtensions[A](ie: Elem[Scannable[A]]) = ie.asInstanceOf[ScannableElem[A, Scannable[A]]]
case class Bound(value: Rep[_], isInclusive: Boolean)
case class SearchBounds(fixedValues: List[Rep[_]], lowerBound: Option[Bound], upperBound: Option[Bound]) {
def isEmpty = fixedValues.isEmpty && lowerBound.isEmpty && upperBound.isEmpty
def addFixedValue(value: Rep[_]) = copy(fixedValues = value :: fixedValues)
}
object SearchBounds {
val empty = SearchBounds(Nil, None, None)
def range(lowerBound: Option[Bound], upperBound: Option[Bound]) = SearchBounds(Nil, lowerBound, upperBound)
}
}
trait ScannablesDslStd extends impl.ScannablesStd { self: ScalanSqlStd =>
}
trait ScannablesDslExp extends impl.ScannablesExp { self: ScalanSqlExp =>
override def getResultElem(receiver: Exp[_], m: Method, args: List[AnyRef]) = receiver.elem match {
case elem: ScannableElem[_, _] =>
m.getName match {
case "fullScan" | "search" | "byRowids" =>
relationElement(elem.eRow)
case _ => super.getResultElem(receiver, m, args)
}
case _ =>
super.getResultElem(receiver, m, args)
}
override def formatConst(x: Any) = x match {
case x: Table => s"Table ${x.name}"
case x: Index => s"Index ${x.name} ON ${x.tableName}"
case _ => super.formatConst(x)
}
}
| scalan/scalan-sql | scalan-sql-core/src/main/scala/scalan/sql/Scannables.scala | Scala | apache-2.0 | 4,551 |
package com.blinkboxbooks.resourceserver
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import com.blinkbox.books.jar.JarManifest
import com.typesafe.scalalogging.StrictLogging
import org.slf4j.MDC
object HttpMonitoring {
private val requestHeaderMdcKeys = Map(
"Accept-Encoding" -> "httpAcceptEncoding",
"User-Agent" -> "httpUserAgent",
"Via" -> "httpVia",
"X-Forwarded-For" -> "httpXForwardedFor",
"X-Requested-With" -> "httpXRequestedWith")
private val responseHeaderMdcKeys = Map(
"Cache-Control" -> "httpCacheControl",
"Content-Length" -> "httpContentLength",
"WWW-Authenticate" -> "httpWWWAuthenticate")
}
trait HttpMonitoring extends StrictLogging {
import HttpMonitoring._
def monitor[T](request: HttpServletRequest, response: HttpServletResponse)(func: => T) = {
val timestamp = System.currentTimeMillis
MDC.put("timestamp", timestamp.toString)
MDC.put("facilityVersion", JarManifest.blinkboxDefault.flatMap(_.implementationVersion).getOrElse("???"))
MDC.put("httpMethod", request.getMethod)
MDC.put("httpPath", request.getPathInfo)
MDC.put("httpPathAndQuery", request.getPathInfo + Option(request.getQueryString).map(q => s"?$q").getOrElse(""))
MDC.put("httpClientIP", request.getRemoteAddr)
requestHeaderMdcKeys.foreach {
case (name, key) => Option(request.getHeader(name)).foreach(MDC.put(key, _))
}
val result = func
val duration = System.currentTimeMillis - timestamp
MDC.put("httpStatus", response.getStatus.toString)
MDC.put("httpApplicationTime", duration.toString)
responseHeaderMdcKeys.foreach {
case (name, key) => Option(response.getHeader(name)).foreach(MDC.put(key, _))
}
val message = s"${request.getMethod} ${request.getPathInfo} returned ${response.getStatus} in ${duration}ms"
if (response.getStatus >= 500) logger.error(message)
else if (response.getStatus >= 400 && response.getStatus != 401) logger.warn(message)
else logger.info(message)
result
}
}
| blinkboxbooks/resource-server.scala | src/main/scala/com/blinkboxbooks/resourceserver/HttpMonitoring.scala | Scala | mit | 2,045 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.util.Calendar
import org.scalatest.exceptions.TestFailedException
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.plans.PlanTestBase
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
class CsvExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper with PlanTestBase {
val badCsv = "\\u0000\\u0000\\u0000A\\u0001AAA"
val gmtId = Option(DateTimeUtils.TimeZoneGMT.getID)
test("from_csv") {
val csvData = "1"
val schema = StructType(StructField("a", IntegerType) :: Nil)
checkEvaluation(
CsvToStructs(schema, Map.empty, Literal(csvData), gmtId),
InternalRow(1)
)
}
test("from_csv - invalid data") {
val csvData = "---"
val schema = StructType(StructField("a", DoubleType) :: Nil)
checkEvaluation(
CsvToStructs(schema, Map("mode" -> PermissiveMode.name), Literal(csvData), gmtId),
InternalRow(null))
// Default mode is Permissive
checkEvaluation(CsvToStructs(schema, Map.empty, Literal(csvData), gmtId), InternalRow(null))
}
test("from_csv null input column") {
val schema = StructType(StructField("a", IntegerType) :: Nil)
checkEvaluation(
CsvToStructs(schema, Map.empty, Literal.create(null, StringType), gmtId),
null
)
}
test("from_csv bad UTF-8") {
val schema = StructType(StructField("a", IntegerType) :: Nil)
checkEvaluation(
CsvToStructs(schema, Map.empty, Literal(badCsv), gmtId),
InternalRow(null))
}
test("from_csv with timestamp") {
val schema = StructType(StructField("t", TimestampType) :: Nil)
val csvData1 = "2016-01-01T00:00:00.123Z"
var c = Calendar.getInstance(DateTimeUtils.TimeZoneGMT)
c.set(2016, 0, 1, 0, 0, 0)
c.set(Calendar.MILLISECOND, 123)
checkEvaluation(
CsvToStructs(schema, Map.empty, Literal(csvData1), gmtId),
InternalRow(c.getTimeInMillis * 1000L)
)
// The result doesn't change because the CSV string includes timezone string ("Z" here),
// which means the string represents the timestamp string in the timezone regardless of
// the timeZoneId parameter.
checkEvaluation(
CsvToStructs(schema, Map.empty, Literal(csvData1), Option("PST")),
InternalRow(c.getTimeInMillis * 1000L)
)
val csvData2 = "2016-01-01T00:00:00"
for (tz <- DateTimeTestUtils.outstandingTimezones) {
c = Calendar.getInstance(tz)
c.set(2016, 0, 1, 0, 0, 0)
c.set(Calendar.MILLISECOND, 0)
checkEvaluation(
CsvToStructs(
schema,
Map("timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss"),
Literal(csvData2),
Option(tz.getID)),
InternalRow(c.getTimeInMillis * 1000L)
)
checkEvaluation(
CsvToStructs(
schema,
Map("timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss",
DateTimeUtils.TIMEZONE_OPTION -> tz.getID),
Literal(csvData2),
gmtId),
InternalRow(c.getTimeInMillis * 1000L)
)
}
}
test("from_csv empty input column") {
val schema = StructType(StructField("a", IntegerType) :: Nil)
checkEvaluation(
CsvToStructs(schema, Map.empty, Literal.create(" ", StringType), gmtId),
InternalRow(null)
)
}
test("forcing schema nullability") {
val input = """1,,"foo""""
val csvSchema = new StructType()
.add("a", LongType, nullable = false)
.add("b", StringType, nullable = false)
.add("c", StringType, nullable = false)
val output = InternalRow(1L, null, UTF8String.fromString("foo"))
val expr = CsvToStructs(csvSchema, Map.empty, Literal.create(input, StringType), gmtId)
checkEvaluation(expr, output)
val schema = expr.dataType
val schemaToCompare = csvSchema.asNullable
assert(schemaToCompare == schema)
}
test("from_csv missing columns") {
val schema = new StructType()
.add("a", IntegerType)
.add("b", IntegerType)
checkEvaluation(
CsvToStructs(schema, Map.empty, Literal.create("1"), gmtId),
InternalRow(1, null)
)
}
test("unsupported mode") {
val csvData = "---"
val schema = StructType(StructField("a", DoubleType) :: Nil)
val exception = intercept[TestFailedException] {
checkEvaluation(
CsvToStructs(schema, Map("mode" -> DropMalformedMode.name), Literal(csvData), gmtId),
InternalRow(null))
}.getCause
assert(exception.getMessage.contains("from_csv() doesn't support the DROPMALFORMED mode"))
}
test("infer schema of CSV strings") {
checkEvaluation(new SchemaOfCsv(Literal.create("1,abc")), "struct<_c0:int,_c1:string>")
}
test("infer schema of CSV strings by using options") {
checkEvaluation(
new SchemaOfCsv(Literal.create("1|abc"), Map("delimiter" -> "|")),
"struct<_c0:int,_c1:string>")
}
test("to_csv - struct") {
val schema = StructType(StructField("a", IntegerType) :: Nil)
val struct = Literal.create(create_row(1), schema)
checkEvaluation(StructsToCsv(Map.empty, struct, gmtId), "1")
}
test("to_csv null input column") {
val schema = StructType(StructField("a", IntegerType) :: Nil)
val struct = Literal.create(null, schema)
checkEvaluation(
StructsToCsv(Map.empty, struct, gmtId),
null
)
}
test("to_csv with timestamp") {
val schema = StructType(StructField("t", TimestampType) :: Nil)
val c = Calendar.getInstance(DateTimeUtils.TimeZoneGMT)
c.set(2016, 0, 1, 0, 0, 0)
c.set(Calendar.MILLISECOND, 0)
val struct = Literal.create(create_row(c.getTimeInMillis * 1000L), schema)
checkEvaluation(StructsToCsv(Map.empty, struct, gmtId), "2016-01-01T00:00:00.000Z")
checkEvaluation(
StructsToCsv(Map.empty, struct, Option("PST")), "2015-12-31T16:00:00.000-08:00")
checkEvaluation(
StructsToCsv(
Map("timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss",
DateTimeUtils.TIMEZONE_OPTION -> gmtId.get),
struct,
gmtId),
"2016-01-01T00:00:00"
)
checkEvaluation(
StructsToCsv(
Map("timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss",
DateTimeUtils.TIMEZONE_OPTION -> "PST"),
struct,
gmtId),
"2015-12-31T16:00:00"
)
}
}
| ahnqirage/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala | Scala | apache-2.0 | 7,194 |
/*
* Skylark
* http://skylark.io
*
* Copyright 2012-2017 Quantarray, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.quantarray.skylark.measure
/**
* Time converter.
*
* @author Araik Grigoryan
*/
trait TimeConverter extends SameMeasureConverter[TimeMeasure]
object TimeConverter
{
def apply(): TimeConverter = new TimeConverter {}
}
| quantarray/skylark | skylark-measure/src/main/scala/com/quantarray/skylark/measure/TimeConverter.scala | Scala | apache-2.0 | 881 |
import org.http4s.server.blaze.BlazeServer
import org.http4s.server.HttpService
import org.http4s.dsl._
import java.net.URLDecoder
object Server extends App {
def parseBody(body: String) = {
body.split("&").map( s => {
val m = s.split("=", 2).map(s => URLDecoder.decode(s, "UTF-8"))
m(0) -> m(1)
}).toMap
}
val service: HttpService = {
case req @ POST -> Root =>
val data = parseBody(text(req).run)
println("from: " + data.getOrElse("from", ""))
println("to: " + data.getOrElse("to", ""))
println("direction: " + data.getOrElse("direction", ""))
Ok("roger that")
}
BlazeServer.newBuilder.mountService(service, "/").run()
}
| Drooids/sipgate.io | examples/scala/Server.scala | Scala | bsd-2-clause | 691 |
package at.ac.tuwien.ifs.corpussimplifier.trec.web.bin
import java.io.File
import at.ac.tuwien.ifs.corpussimplifier.{TerrierConfig, TerrierModel}
import at.ac.tuwien.ifs.trecify.trec.model.{TRECCollection, TRECDocument}
/**
* Created by aldo on 09/06/15.
*/
object Terrier extends App {
override def main(args: Array[String]): Unit = {
val pathCollection = new File(args(0))
val pathSimplifiedCollection = new File(args(1))
val config =
if (args.length > 2)
new TerrierConfig(args(2))
else
null
println(pathCollection)
println(pathSimplifiedCollection)
val collection = new TRECCollection(pathCollection.getAbsolutePath)
val model = new TerrierModel[TRECDocument](config,
collection,
(d: TRECDocument) => d.path.replace(pathCollection.getAbsolutePath, "").replace(".gz", ""),
(d: TRECDocument) => d.docno,
(d: TRECDocument) => d.h3 + "\\n" + d.text)
model.doJob(pathCollection.getAbsolutePath, pathSimplifiedCollection.getAbsolutePath)
}
}
| aldolipani/CorpusSimplifier | src/main/scala/at/ac/tuwien/ifs/corpussimplifier/trec/web/bin/Terrier.scala | Scala | mit | 1,038 |
package org.dhira.core.optimize.listeners
import lombok.Builder
import org.dhira.core.nnet.api.Model
import org.dhira.core.optimize.api.IterationListener
import org.nd4j.linalg.api.ndarray.INDArray
import org.nd4j.linalg.ops.transforms.Transforms
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import java.io.File
import java.io.IOException
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardOpenOption
/**
* An iteration listener that provides details on parameters and gradients at each iteration during traning.
* Attempts to provide much of the same information as the UI histogram iteration listener, but in a text-based
* format (for example, when learning on a system accessed via SSH etc).
* i.e., is intended to aid network tuning and debugging<br>
* This iteration listener is set up to calculate mean, min, max, and mean absolute value
* of each type of parameter and gradient in the network at each iteration.<br>
* These
*
*
* @author Alex Black
*/
object ParamAndGradientIterationListener {
private val MAX_WRITE_FAILURE_MESSAGES: Int = 10
private val logger: Logger = LoggerFactory.getLogger(classOf[ParamAndGradientIterationListener])
}
class ParamAndGradientIterationListener extends IterationListener {
private var isInvoked: Boolean = false
private var iterations: Int = 1
private var totalIterationCount: Long = 0
private var printMean: Boolean = true
private var printHeader: Boolean = true
private var printMinMax: Boolean = true
private var printMeanAbsValue: Boolean = true
private var file: File = null
private var filePath: Path = null
private var outputToConsole: Boolean = false
private var outputToFile: Boolean = false
private var outputToLogger: Boolean = false
private var delimiter: String = "\\t"
private var writeFailureCount: Int = 0
/** Default constructor for output to console only every iteration, tab delimited */
// def this() {
// // this(1, true, true, true, true, true, false, false, null, "\\t")
// }
/** Full constructor with all options.
* Note also: ParamAndGradientIterationListener.builder() can be used instead of this constructor.
* @param iterations calculate and report values every 'iterations' iterations
* @param printHeader Whether to output a header row (i.e., names for each column)
* @param printMean Calculate and display the mean of parameters and gradients
* @param printMinMax Calculate and display the min/max of the parameters and gradients
* @param printMeanAbsValue Calculate and display the mean absolute value
* @param outputToConsole If true, display the values to the console (System.out.println())
* @param outputToFile If true, write the values to a file, one per line
* @param outputToLogger If true, log the values
* @param file File to write values to. May be null, not used if outputToFile == false
* @param delimiter delimiter (for example, "\\t" or "," etc)
*/
@Builder def this(iterations: Int, printHeader: Boolean, printMean: Boolean, printMinMax: Boolean,
printMeanAbsValue: Boolean, outputToConsole: Boolean, outputToFile: Boolean,
outputToLogger: Boolean, file: File, delimiter: String) {
this()
this.printHeader = printHeader
this.printMean = printMean
this.printMinMax = printMinMax
this.printMeanAbsValue = printMeanAbsValue
this.iterations = iterations
this.file = file
if (this.file != null) {
this.filePath = file.toPath
}
this.outputToConsole = outputToConsole
this.outputToFile = outputToFile
this.outputToLogger = outputToLogger
this.delimiter = delimiter
}
def invoked: Boolean = {
return isInvoked
}
def invoke {
isInvoked = true
}
def iterationDone(model: Model, iteration: Int) {
totalIterationCount += 1
if (totalIterationCount == 1 && printHeader) {
val params: Map[String, INDArray] = model.paramTable
model.conf.getVariables
val sb: StringBuilder = new StringBuilder
sb.append("n")
sb.append(delimiter)
sb.append("score")
import scala.collection.JavaConversions._
for (s <- params.keySet) {
if (printMean) sb.append(delimiter).append(s).append("_mean")
if (printMinMax) {
sb.append(delimiter).append(s).append("_min").append(delimiter).append(s).append("_max")
}
if (printMeanAbsValue) sb.append(delimiter).append(s).append("_meanAbsValue")
if (printMean) sb.append(delimiter).append(s).append("_meanG")
if (printMinMax) {
sb.append(delimiter).append(s).append("_minG").append(delimiter).append(s).append("_maxG")
}
if (printMeanAbsValue) sb.append(delimiter).append(s).append("_meanAbsValueG")
}
sb.append("\\n")
if (outputToFile) {
try {
Files.write(filePath, sb.toString.getBytes, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
}
catch {
case e: IOException => {
if (({
writeFailureCount += 1; writeFailureCount - 1
}) < ParamAndGradientIterationListener.MAX_WRITE_FAILURE_MESSAGES) {
ParamAndGradientIterationListener.logger.warn("Error writing to file: {}", e)
}
if (writeFailureCount == ParamAndGradientIterationListener.MAX_WRITE_FAILURE_MESSAGES) {
ParamAndGradientIterationListener.logger.warn("Max file write messages displayed. No more failure messages will be printed")
}
}
}
}
if (outputToLogger) ParamAndGradientIterationListener.logger.info(sb.toString)
if (outputToConsole) System.out.println(sb.toString)
}
if (totalIterationCount % iterations != 0) return
val params: Map[String, INDArray] = model.paramTable
val grads: Map[String, INDArray] = model.gradient.gradientForVariable
val sb: StringBuilder = new StringBuilder
sb.append(totalIterationCount)
sb.append(delimiter)
sb.append(model.score)
import scala.collection.JavaConversions._
for (entry <- params.entrySet) {
val currParams: INDArray = entry.getValue
val currGrad: INDArray = grads.get(entry.getKey)
if (printMean) {
sb.append(delimiter)
sb.append(currParams.meanNumber.doubleValue)
}
if (printMinMax) {
sb.append(delimiter)
sb.append(currParams.minNumber.doubleValue)
sb.append(delimiter)
sb.append(currParams.maxNumber.doubleValue)
}
if (printMeanAbsValue) {
sb.append(delimiter)
val abs: INDArray = Transforms.abs(currParams.dup)
sb.append(abs.meanNumber.doubleValue)
}
if (printMean) {
sb.append(delimiter)
sb.append(currGrad.meanNumber.doubleValue)
}
if (printMinMax) {
sb.append(delimiter)
sb.append(currGrad.minNumber.doubleValue)
sb.append(delimiter)
sb.append(currGrad.maxNumber.doubleValue)
}
if (printMeanAbsValue) {
sb.append(delimiter)
val abs: INDArray = Transforms.abs(currGrad.dup)
sb.append(abs.meanNumber.doubleValue)
}
}
sb.append("\\n")
val out: String = sb.toString
if (outputToLogger) ParamAndGradientIterationListener.logger.info(out)
if (outputToConsole) System.out.print(out)
if (outputToFile) {
try {
Files.write(filePath, out.getBytes, StandardOpenOption.CREATE, StandardOpenOption.APPEND)
}
catch {
case e: IOException => {
if (({
writeFailureCount += 1; writeFailureCount - 1
}) < ParamAndGradientIterationListener.MAX_WRITE_FAILURE_MESSAGES) {
ParamAndGradientIterationListener.logger.warn("Error writing to file: {}", e)
}
if (writeFailureCount == ParamAndGradientIterationListener.MAX_WRITE_FAILURE_MESSAGES) {
ParamAndGradientIterationListener.logger.warn("Max file write messages displayed. No more failure messages will be printed")
}
}
}
}
}
} | Mageswaran1989/aja | src/main/scala/org/aja/dhira/src/main/scala/org/dhira/core/optimize/listeners/ParamAndGradientIterationListener.scala | Scala | apache-2.0 | 8,116 |
import org.scalatest._
abstract class PlaySpec extends WordSpec
with OneInstancePerTest
with Matchers
with OptionValues
| scalatest/scalatest-play | test/PlaySpec.scala | Scala | apache-2.0 | 193 |
package mesosphere.marathon
package core.task.update.impl.steps
import akka.Done
import mesosphere.UnitTest
import mesosphere.marathon.core.instance.TestInstanceBuilder
import mesosphere.marathon.core.instance.update.{InstanceChange, InstanceChangeHandler}
import mesosphere.marathon.core.task.bus.TaskStatusUpdateTestHelper
import mesosphere.marathon.state.PathId
import mesosphere.marathon.test.CaptureLogEvents
import scala.concurrent.Future
class ContinueOnErrorStepTest extends UnitTest {
"ContinueOnErrorStep" should {
"name uses nested name" in {
object nested extends InstanceChangeHandler {
override def name: String = "nested"
override def metricName: String = "metric"
override def process(update: InstanceChange): Future[Done] = {
throw new scala.RuntimeException("not implemted")
}
}
ContinueOnErrorStep(nested).name should equal("continueOnError(nested)")
}
"A successful step should not produce logging output" in {
val f = new Fixture
Given("a nested step that is always successful")
f.nested.process(any) returns Future.successful(Done)
val step = ContinueOnErrorStep(f.nested)
When("executing the step")
val logEvents = CaptureLogEvents.forBlock {
val resultFuture = step.process(TaskStatusUpdateTestHelper.running(f.dummyInstanceBuilder.getInstance()).wrapped)
resultFuture.futureValue
}
Then("it should execute the nested step")
verify(f.nested, times(1)).process(any)
And("not produce any logging output")
logEvents.filter(_.getMessage.contains(s"[${f.dummyInstance.instanceId.idString}]")) should be(empty)
}
"A failing step should log the error but proceed" in {
val f = new Fixture
Given("a nested step that always fails")
f.nested.name returns "nested"
f.nested.process(any) returns Future.failed(new RuntimeException("error!"))
val step = ContinueOnErrorStep(f.nested)
When("executing the step")
val logEvents = CaptureLogEvents.forBlock {
val resultFuture = step.process(TaskStatusUpdateTestHelper.running(f.dummyInstanceBuilder.getInstance()).wrapped)
resultFuture.futureValue
}
Then("it should execute the nested step")
verify(f.nested, times(1)).process(any)
And("produce an error message in the log")
logEvents.map(_.toString) should contain(
s"[ERROR] while executing step nested for [${f.dummyInstance.instanceId.idString}], continue with other steps"
)
}
}
class Fixture {
private[this] val appId: PathId = PathId("/test")
val dummyInstanceBuilder = TestInstanceBuilder.newBuilderWithLaunchedTask(appId)
val dummyInstance = dummyInstanceBuilder.getInstance()
val nested = mock[InstanceChangeHandler]
}
}
| gsantovena/marathon | src/test/scala/mesosphere/marathon/core/task/update/impl/steps/ContinueOnErrorStepTest.scala | Scala | apache-2.0 | 2,841 |
package com.hyenawarrior.oldnorsedictionary.modelview
import com.hyenawarrior.OldNorseGrammar.grammar.adjectival.enums.AdjectiveType
import com.hyenawarrior.OldNorseGrammar.grammar.enums.Case._
import com.hyenawarrior.OldNorseGrammar.grammar.enums.GNumber._
import com.hyenawarrior.OldNorseGrammar.grammar.enums.Pronoun._
import com.hyenawarrior.OldNorseGrammar.grammar.verbs.enums.VerbModeEnum._
import com.hyenawarrior.OldNorseGrammar.grammar.verbs.enums.VerbTenseEnum._
import com.hyenawarrior.OldNorseGrammar.grammar.verbs.enums.VerbVoice._
import com.hyenawarrior.OldNorseGrammar.grammar.verbs.enums.{VerbModeEnum, VerbTenseEnum, VerbVoice}
import com.hyenawarrior.OldNorseGrammar.grammar.enums.{Case, GNumber, Gender, Pronoun}
import com.hyenawarrior.auxiliary.enum.EnumConst
/**
* Created by HyenaWarrior on 2018.02.24..
*/
package object helpers {
private val ABBREVATIONS_OF = Map[Any, String](
SG_1 -> "SG1",
SG_2 -> "SG2",
SG_3 -> "SG3",
PL_1 -> "PL1",
PL_2 -> "PL2",
PL_3 -> "PL3",
// Verb tenses
PRESENT -> "PRS",
PAST -> "PST",
// Voices
ACTIVE -> "ACT",
MEDIO_PASSIVE -> "MID-PAS",
// Moods
INFINITIVE -> "INF",
INDICATIVE -> "IND",
SUBJUNCTIVE -> "SBJV",
IMPERATIVE -> "IMP",
PARTICIPLE -> "PTCP",
// numbers
SINGULAR -> "SG",
PLURAL -> "PL",
// cases
NOMINATIVE -> "NOM",
ACCUSATIVE -> "ACC",
DATIVE -> "DAT",
GENITIVE -> "GEN"
)
def abbrevationOf[T](obj: T): String = obj match {
case ((number: GNumber, caze: Case), definite: Boolean) =>
abbrevationOf(number) + " " + abbrevationOf(caze) + (if(definite) " DEF" else "")
case (mood: VerbModeEnum, voice: VerbVoice, optTense: Option[VerbTenseEnum], optPronoun: Option[Pronoun]) =>
val md = Some(helpers.abbrevationOf(mood))
val vc = Some(helpers.abbrevationOf(voice))
val ts = optTense.map(helpers.abbrevationOf)
val pr = optPronoun.map(helpers.abbrevationOf)
Seq(ts, md, pr, vc).flatten.mkString(" ")
case _ => ABBREVATIONS_OF getOrElse(obj, obj.toString)
}
trait EnumOrdering[T <: EnumConst[T]] extends Ordering[T] {
override def compare(x: T, y: T): Int = x.id() - y.id()
}
implicit object VerbVoiceOrdering extends EnumOrdering[VerbVoice]
implicit object VerbModeOrdering extends EnumOrdering[VerbModeEnum]
implicit object VerbTenseOrdering extends EnumOrdering[VerbTenseEnum]
implicit object PronounOrdering extends EnumOrdering[Pronoun]
implicit object GNumberOrdering extends EnumOrdering[GNumber]
implicit object CaseOrdering extends EnumOrdering[Case]
implicit object GenderOrdering extends EnumOrdering[Gender]
implicit object AdjectiveTypeOrdering extends EnumOrdering[AdjectiveType]
}
| HyenaSoftware/IG-Dictionary | app/src/main/scala/com/hyenawarrior/oldnorsedictionary/modelview/helpers/package.scala | Scala | lgpl-3.0 | 2,793 |
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.driver
import java.net.InetSocketAddress
import com.datastax.driver.core._
import com.datastax.driver.core.policies.DefaultRetryPolicy
import com.google.common.util.concurrent.{FutureCallback, Futures, MoreExecutors}
import com.typesafe.scalalogging.LazyLogging
import scala.collection.mutable
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.{Failure, Success, Try}
/**
* Created with IntelliJ IDEA.
* User: markz
* Date: 3/2/14
* Time: 1:04 PM
* To change this template use File | Settings | File Templates.
*/
trait Dao extends LazyLogging {
def init()
def getSession: Session
def getKeyspace: String
def shutdown()
}
class NativeDriver(clusterName: String, keyspaceName: String, host: String, port: Int, maxConnections: Int = 10, initCommands: Option[List[String]])
extends Dao {
private val pools: PoolingOptions = new PoolingOptions();
pools.setNewConnectionThreshold(HostDistance.LOCAL, 128)
// TODO: need understand what have been changed
/*
pools.setCoreConnectionsPerHost(HostDistance.LOCAL, maxConnections)
pools.setMaxConnectionsPerHost(HostDistance.LOCAL, maxConnections)
pools.setCoreConnectionsPerHost(HostDistance.REMOTE, maxConnections)
pools.setMaxConnectionsPerHost(HostDistance.REMOTE, maxConnections)
*/
val hosts = host.split(",")
private val cluster = new Cluster.Builder()
.addContactPointsWithPorts(new InetSocketAddress(host, port))
.withPoolingOptions(pools)
.withSocketOptions(new SocketOptions().setTcpNoDelay(true))
.withoutJMXReporting() // datastax client depends on old io.dropwizard.metrics (3.2.2),
.build(); // while metrics4.scala depends on newer version (4.0.1).
// The 4.0.x release removed the jmx module to another artifact (metrics-jmx)
// and package (com.codahale.metrics.jmx). while this is true,
// we are better off without JMX reporting of the client.
// In future: consider to re-enable this.
initCommands.foreach { commands =>
Try {
val initSession: Session = cluster.connect()
commands.foreach(initSession.execute)
}
match {
case Success(_) =>
case Failure(err) =>
logger.error("Initial session (needed for keyspace creation and initial tables) creation failed. Killing the process. The exception was: ", err)
sys.exit(1)
}
}
private val session: Session = Try(cluster.connect(keyspaceName)) match {
case Success(s) => s
case Failure(err) =>
logger.error(err.getMessage, err)
logger.info("Now will kill the process.")
sys.exit(1)
}
def init() {
import scala.collection.JavaConverters._
val metaData: Metadata = cluster.getMetadata
val allHosts = metaData.getAllHosts
logger.info(
s"Connected to cluster: ${metaData.getClusterName}, and Hosts: ${allHosts.asScala.map(_.toString).mkString("[", ",", "]")}"
)
}
def getSession: Session = session
def getKeyspace: String = keyspaceName
def shutdown() {
cluster.close()
}
}
object Dao {
def apply(clusterName: String, keyspaceName: String, host: String, port: Int, maxConnections: Int = 10, initCommands: Option[List[String]]) =
new NativeDriver(clusterName, keyspaceName, host, port, maxConnections, initCommands)
}
trait DaoExecution {
def prepare(stmt: String)(implicit daoProxy: Dao): PreparedStatement = daoProxy.getSession.prepare(stmt)
// NOTE: not using any retires here. any client of this trait may wrap this func call with its own retry logic, if any
def executeAsyncInternal(statmentToExec: Statement)(implicit daoProxy: Dao): Future[ResultSet] = {
val p = Promise[ResultSet]()
Try {
daoProxy.getSession
.executeAsync(
statmentToExec
.setIdempotent(true)
.setRetryPolicy(DefaultRetryPolicy.INSTANCE)
)
} match {
case Failure(e) => p.failure(e)
case Success(f: ResultSetFuture) =>
Futures.addCallback(
f,
new FutureCallback[ResultSet]() {
def onSuccess(result: ResultSet): Unit = p.success(result)
def onFailure(t: Throwable): Unit = p.failure(t)
},
MoreExecutors.directExecutor()
)
}
p.future
}
}
| e-orz/CM-Well | server/cmwell-dao/src/main/scala/cmwell/driver/NativeDriver.scala | Scala | apache-2.0 | 5,042 |
package com.twitter.util
class HandledPromise[A] extends Promise[A] {
@volatile var _handled: Option[Throwable] = None
def handled: Option[Throwable] = _handled
setInterruptHandler { case e => _handled = Some(e) }
}
| BuoyantIO/twitter-util | util-core/src/test/scala/com/twitter/util/HandledPromise.scala | Scala | apache-2.0 | 223 |
package org.smartjava
import akka.actor.{ActorSystem, ActorRef}
import java.net.InetSocketAddress
import org.java_websocket.WebSocket
import org.java_websocket.framing.CloseFrame
import org.java_websocket.handshake.ClientHandshake
import org.java_websocket.server.WebSocketServer
import scala.collection.mutable.Map
import akka.event.Logging
/**
* The WSserver companion objects defines a number of distinct messages sendable by this component
*/
object WSServer {
sealed trait WSMessage
case class Message(ws : WebSocket, msg : String) extends WSMessage
case class Open(ws : WebSocket, hs : ClientHandshake) extends WSMessage
case class Close(ws : WebSocket, code : Int, reason : String, external : Boolean) extends WSMessage
case class Error(ws : WebSocket, ex : Exception) extends WSMessage
}
/**
* Create a websocket server that listens on a specific address.
*
* @param port
*/
class WSServer(val port : Int)(implicit system : ActorSystem, db: DB ) extends WebSocketServer(new InetSocketAddress(port)) {
// maps the path to a specific actor.
private val reactors = Map[String, ActorRef]()
// setup some logging based on the implicit passed in actorsystem
private val log = Logging.getLogger(system, this);
// Call this function to bind an actor to a specific path. All incoming
// connections to a specific path will be routed to that specific actor.
final def forResource(descriptor : String, reactor : Option[ActorRef]) {
log.debug("Registring actor:" + reactor + " to " + descriptor);
reactor match {
case Some(actor) => reactors += ((descriptor, actor))
case None => reactors -= descriptor
}
}
// onMessage is called when a websocket message is recieved.
// in this method we check whether we can find a listening
// actor and forward the call to that.
final override def onMessage(ws : WebSocket, msg : String) {
if (null != ws) {
reactors.get(ws.getResourceDescriptor) match {
case Some(actor) => actor ! WSServer.Message(ws, msg)
case None => ws.close(CloseFrame.REFUSE)
}
}
}
final override def onOpen(ws : WebSocket, hs : ClientHandshake) {
log.debug("OnOpen called {} :: {}", ws, hs);
if (null != ws) {
reactors.get(ws.getResourceDescriptor) match {
case Some(actor) => actor ! WSServer.Open(ws, hs)
case None => ws.close(CloseFrame.REFUSE)
}
}
}
final override def onClose(ws : WebSocket, code : Int, reason : String, external : Boolean) {
log.debug("Close called {} :: {} :: {} :: {}", ws, code, reason, external);
if (null != ws) {
reactors.get(ws.getResourceDescriptor) match {
case Some(actor) => actor ! WSServer.Close(ws, code, reason, external)
case None => ws.close(CloseFrame.REFUSE)
}
}
}
final override def onError(ws : WebSocket, ex : Exception) {
log.debug("onError called {} :: {}", ws, ex);
if (null != ws) {
reactors.get(ws.getResourceDescriptor) match {
case Some(actor) => actor ! WSServer.Error(ws, ex)
case None => ws.close(CloseFrame.REFUSE)
}
}
}
}
| djcoder100/smartjava | ws-akka/src/main/scala/WSServer.scala | Scala | mit | 3,130 |
package org.sbtidea
import sbt._
import scala.Some
case class Settings(projectRef: ProjectRef, buildStruct: BuildStructure, state: State) {
def optionalSetting[A](key: SettingKey[A], pr: ProjectRef = projectRef, bs: BuildStructure = buildStruct) : Option[A] = key in pr get bs.data
def logErrorAndFail(errorMessage: String): Nothing = {
state.log.error(errorMessage)
throw new IllegalArgumentException()
}
def setting[A](key: SettingKey[A], errorMessage: => String, pr: ProjectRef = projectRef) : A = {
optionalSetting(key, pr) getOrElse {
logErrorAndFail(errorMessage)
}
}
def settingWithDefault[A](key: SettingKey[A], defaultValue: => A) : A = {
optionalSetting(key) getOrElse defaultValue
}
def task[A](key: TaskKey[A]): A = optionalTask(key).getOrElse(logErrorAndFail("Missing task key: " + key.key.label))
def optionalTask[A](key: TaskKey[A]): Option[A] = EvaluateTask(buildStruct, key, state, projectRef).map(_._2) match {
case Some(Value(v)) => Some(v)
case _ => None
}
}
| mpeltonen/sbt-idea | src/main/scala/org/sbtidea/Settings.scala | Scala | bsd-3-clause | 1,040 |
// My very first Scala script.
// http://docs.scala-lang.org/tutorials/scala-for-java-programmers.html
object Timer {
def oncePerSecond(callback : () => Unit) {
while (true) { callback(); Thread sleep 1000 }
}
def timeFlies() {
println("Time drops like a cannon ball!")
}
def main(args : Array[String]) {
oncePerSecond(timeFlies)
}
}
class Complex(real : Double, imaginary : Double) {
def re = real
def im = imaginary
}
| SwiftsNamesake/Occlusion | helloworld.scala | Scala | mit | 455 |
package grammarcomp
package grammar
import org.scalatest.Matchers
import org.scalatest.FlatSpec
import grammar.examples.Olshansky1977
import clients.AmbiguityChecker
class AmbiguityTest extends FlatSpec with Matchers {
implicit val gctx = new GlobalContext()
implicit val opctx = new AmbiguityContext()
implicit val ectx = new EnumerationContext()
"AmbiguityChecker.checkAmbiguityInStudentGrammars" should " word correctly" in {
val grammar = examples.IfThenElse.reference.cfGrammar
val res1 = new AmbiguityChecker(grammar).checkAmbiguityInStudentGrammar()
res1.isEmpty should be(false)
val res2 = new AmbiguityChecker(Olshansky1977.reference.cfGrammar).checkAmbiguityInStudentGrammar()
res2.isEmpty should be(true)
}
} | epfl-lara/GrammarComparison | src/test/scala/grammarcomp/grammar/AmbiguityTest.scala | Scala | mit | 769 |
package com.youdevise.muck
import scalaz._
import Scalaz._
import org.specs2.specification.{Given, When, Then}
import org.specs2.execute.{Result, ResultLike}
sealed case class Shelf(val innerMap: Map[Slot[_], Any]) {
def get[T](slot: Slot[T]): Option[T] = innerMap.get(slot).map(_.asInstanceOf[T])
def getOrElse[T](slot: Slot[T], default: => T) = get(slot).getOrElse(default)
def \\[T](slot: Slot[T]): Option[T] = get(slot)
def \\\\[T](slot: Slot[T]): T = get(slot).get
def set[T](slot: Slot[T], value: T): Shelf = Shelf(innerMap ++ Seq(slot -> value))
def combine(other: Shelf) = Shelf(innerMap ++ other.innerMap)
}
object Shelf {
def apply(values: (Slot[_], Any)*): Shelf = Shelf(values.toMap)
def combineAll(shelves: Iterable[Shelf]): Shelf = shelves.reduce { _ combine _ }
}
sealed class Slot[T]() { }
object Muck {
type ShelfState[T] = State[Shelf, T]
type ShelfStateMaker[T] = (String) => ShelfState[T]
sealed case class Givens()
sealed case class Whens()
val givens = Givens()
val whens = Whens()
def slot[T] = new Slot[T]
implicit def slot2Reader[T](slot: Slot[T]): State[Shelf, T] = state[Shelf, T] { shelf:Shelf =>
(shelf, (shelf \\\\ slot))
}
sealed class ExtendedSlot[T](val slot: Slot[T]) {
def :=(value: T): State[Shelf, T] = state[Shelf, T] { shelf: Shelf =>
(shelf.set(slot, value), value)
}
def or(value: T): State[Shelf, T] = state[Shelf, T] { shelf: Shelf =>
(shelf, (shelf \\ slot).getOrElse(value))
}
}
implicit def slot2Extended[T](slot: Slot[T]): ExtendedSlot[T] = new ExtendedSlot(slot)
implicit def given(shelfStateMaker: ShelfStateMaker[Givens]): Given[Shelf] = new Given[Shelf] {
def extract(text: String) = shelfStateMaker(text)(Shelf())._1
}
implicit def given(shelfState: ShelfState[Givens]): Given[Shelf] = given { _: String => shelfState }
implicit def when(shelfStateMaker: ShelfStateMaker[Whens]): When[Shelf, Shelf] = new When[Shelf, Shelf] {
def extract(shelf: Shelf, text: String) = shelfStateMaker(text)(shelf)._1
}
implicit def when(shelfState: ShelfState[Whens]): When[Shelf, Shelf] = when { _: String => shelfState }
implicit def when2(shelfStateMaker: ShelfStateMaker[Whens]): When[(Shelf, Shelf), Shelf] = new When[(Shelf, Shelf), Shelf] {
def extract(shelves: (Shelf, Shelf), text: String) = shelfStateMaker(text)(shelves._1 combine shelves._2)._1
}
implicit def when2(shelfState: ShelfState[Whens]): When[(Shelf, Shelf), Shelf] = when2 { _: String => shelfState }
implicit def when3(shelfStateMaker: ShelfStateMaker[Whens]): When[(Shelf, Shelf, Shelf), Shelf] = new When[(Shelf, Shelf, Shelf), Shelf] {
def extract(shelves: (Shelf, Shelf, Shelf), text: String) = shelfStateMaker(text)(Shelf.combineAll(shelves.toIndexedSeq))._1
}
implicit def when3(shelfState: ShelfState[Whens]): When[(Shelf, Shelf, Shelf), Shelf] = when3 { _: String => shelfState }
implicit def then(shelfStateMaker: ShelfStateMaker[ResultLike]): Then[Shelf] = new Then[Shelf] {
def extract(shelf: Shelf, text: String): Result = shelfStateMaker(text)(shelf)._2.toResult
}
implicit def then(shelfState: ShelfState[ResultLike]): Then[Shelf] = then { _: String => shelfState }
} | tim-group/muck | src/main/scala/com/youdevise/muck/Muck.scala | Scala | mit | 3,229 |
/*
* Copyright ixias.net All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license
* For the full copyright and license information,
* please view the LICENSE file that was distributed with this source code.
*/
package ixias.persistence.lifted
import slick.lifted.Query
import ixias.persistence.model.Cursor
import scala.language.implicitConversions
final case class SlickQueryTransformer[R, U](val self: Query[R, U, Seq]) extends AnyVal {
def seek(cursor: Cursor): Query[R, U, Seq] =
cursor.limit match {
case None => if (0 < cursor.offset) self.drop(cursor.offset) else self
case Some(limit) => self.drop(cursor.offset).take(limit)
}
}
trait SlickQueryOps {
implicit def toQueryTransformer[R, U](a: Query[R, U, Seq]) = SlickQueryTransformer(a)
}
| sp1rytus/ixias | framework/ixias-core/src/main/scala/ixias/persistence/lifted/SlickQueryOps.scala | Scala | mit | 816 |
package com.arcusys.learn.quiz.storage.impl.liferay
import com.arcusys.learn.storage.impl.KeyedEntityStorage
import com.arcusys.learn.persistence.liferay.service.LFQuizQuestionLocalServiceUtil
import com.arcusys.learn.persistence.liferay.model.LFQuizQuestion
import com.arcusys.valamis.questionbank.storage.QuestionStorage
import com.arcusys.valamis.quiz.model.QuizQuestion
import scala.collection.JavaConverters._
import com.arcusys.learn.storage.impl.liferay.LiferayCommon._
import com.arcusys.learn.quiz.storage.impl.QuizQuestionFieldsMapper
/**
* User: dkudinov
* Date: 15.3.2013
*/
trait LFQuizQuestionStorageImpl extends KeyedEntityStorage[QuizQuestion] {
protected def doRenew() { LFQuizQuestionLocalServiceUtil.removeAll() }
def questionStorage: QuestionStorage
def getOne(parameters: (String, Any)*) = throw new UnsupportedOperationException
def getAll(parameters: (String, Any)*) = {
val lfResult = parameters match {
case Seq(("quizID", quizID: Int)) =>
LFQuizQuestionLocalServiceUtil.findByQuizID(quizID)
case Seq(("quizID", quizID: Int), ("categoryID", categoryID: Int)) =>
LFQuizQuestionLocalServiceUtil.findByQuizAndCategory(quizID, if (categoryID == -1) null else categoryID)
case _ => LFQuizQuestionLocalServiceUtil.getLFQuizQuestions(-1, -1)
}
lfResult.asScala.map { extract }.sortBy(_.arrangementIndex)
}
def execute(sqlKey: String, parameters: (String, Any)*) {
throw new UnsupportedOperationException
}
def getAll(sqlKey: String, parameters: (String, Any)*) = throw new UnsupportedOperationException
def getOne(sqlKey: String, parameters: (String, Any)*) = throw new UnsupportedOperationException
def modify(sqlKey: String, parameters: (String, Any)*) {
throw new UnsupportedOperationException
}
def create(parameters: (String, Any)*) { throw new UnsupportedOperationException }
def delete(parameters: (String, Any)*) {
idParam(parameters: _*) foreach { LFQuizQuestionLocalServiceUtil.deleteLFQuizQuestion(_) }
}
def modify(parameters: (String, Any)*) {
idParam(parameters: _*).flatMap(getLFEntityById(_)).foreach {
lfEntity =>
doUpdateEntity(null, lfEntity, LFQuizQuestionLocalServiceUtil.updateLFQuizQuestion(_), parameters: _*)
}
}
def createAndGetID(entity: QuizQuestion, parameters: (String, Any)*) = doCreate(entity, parameters: _*).getId.toInt
def createAndGetID(parameters: (String, Any)*): Int = doCreate(null, parameters: _*).getId.toInt
def getByID(id: Int, parameters: (String, Any)*) = getLFEntityById(id) map { extract }
private def getLFEntityById(id: Int) = Option(LFQuizQuestionLocalServiceUtil.getLFQuizQuestion(id))
def create(entity: QuizQuestion, parameters: (String, Any)*) {
doCreate(entity, parameters: _*)
}
private def doCreate(entity: QuizQuestion, parameters: (String, Any)*): LFQuizQuestion = {
doUpdateEntity(entity, LFQuizQuestionLocalServiceUtil.createLFQuizQuestion(), LFQuizQuestionLocalServiceUtil.addLFQuizQuestion(_), parameters: _*)
}
private def doUpdateEntity(entity: QuizQuestion, lfEntity: LFQuizQuestion, update: (LFQuizQuestion) => LFQuizQuestion, parameters: (String, Any)*): LFQuizQuestion = {
// entity is not used - check QuizQuestion_insert.sql and QuizQuestionEntityStorage
(entity, parameters) match {
case (null, _: Seq[(String, Any)]) => {
parameters.foreach {
param =>
param match {
case ("id", id: Int) => lfEntity.setId(id)
// quizID: Int, categoryID: Option[Int], questionID: Int
case ("quizID", quizID: Int) => lfEntity.setQuizId(quizID)
case ("categoryID", categoryID: Option[Int]) => lfEntity.setCategoryId(categoryID)
case ("questionID", questionID: Int) => lfEntity.setQuestionId(questionID)
case ("title", title: String) => lfEntity.setTitle(title)
case ("url", url: String) => lfEntity.setUrl(url)
case ("questionType", questionType: String) => lfEntity.setQuestionType(questionType)
case ("text", text: String) => lfEntity.setPlainText(text)
case ("autoShowAnswer", flag: Boolean) => lfEntity.setAutoShowAnswer(flag)
case ("arrangementIndex", arrangementIndex: Int) => lfEntity.setArrangementIndex(arrangementIndex)
case ("groupId", groupId: Int) => lfEntity.setGroupId(groupId)
}
}
update(lfEntity)
}
}
}
def extract(entity: LFQuizQuestion) = {
createQuizQuestion(new QuizQuestionFieldsMapper {
def questionTypeName = entity.getQuestionType
def id = entity.getId.toInt
def quizId = entity.getQuizId
def categoryId = Option(entity.getCategoryId).map(_.toInt)
def title = Option(entity.getTitle)
def url = entity.getUrl
def text = entity.getPlainText
def questionId = entity.getQuestionId
def autoShowAnswer = Option(entity.getAutoShowAnswer).map(_.asInstanceOf[Boolean]).getOrElse(false)
def arrangementIndex = entity.getArrangementIndex
def groupId = Option(entity.getGroupId)
})
}
def createQuizQuestion(mapper: QuizQuestionFieldsMapper): QuizQuestion
def modify(entity: QuizQuestion, parameters: (String, Any)*) {
parameters match {
case Seq(("parentID", parentID: Option[Int])) => {
val lfEntity = LFQuizQuestionLocalServiceUtil.getLFQuizQuestion(entity.id)
if (parentID.isDefined)
lfEntity.setCategoryId(parentID.get)
else
lfEntity.setCategoryId(null)
LFQuizQuestionLocalServiceUtil.updateLFQuizQuestion(lfEntity)
}
case _ => None
}
}
def idParam(parameters: (String, Any)*): Option[Int] = {
parameters find {
_._1 == "id"
} map { _._2.asInstanceOf[Int] }
}
}
| ViLPy/Valamis | learn-persistence-liferay-wrapper/src/main/scala/com/arcusys/learn/quiz/storage/impl/liferay/LFQuizQuestionStorageImpl.scala | Scala | lgpl-3.0 | 6,008 |
package im.actor.server
import akka.actor.{ ActorRef, ActorSystem }
import akka.contrib.pattern.DistributedPubSubExtension
import akka.stream.Materializer
import akka.util.Timeout
import eu.codearte.jfairy.Fairy
import im.actor.api.{ rpc ⇒ rpcapi }
import im.actor.server.api.rpc.service.auth
import im.actor.server.api.rpc.RpcApiService
import im.actor.server.oauth.GoogleProvider
import im.actor.server.presences.{ PresenceManagerRegion, GroupPresenceManagerRegion }
import im.actor.server.sequence.WeakUpdatesManagerRegion
import im.actor.server.session.{ SessionRegion, Session, SessionConfig }
import org.scalatest.Suite
import slick.driver.PostgresDriver.api._
import scala.concurrent._
import scala.concurrent.duration._
import scalaz.{ -\\/, \\/- }
trait PersistenceHelpers {
protected implicit val timeout = Timeout(5.seconds)
def getUserModel(userId: Int)(implicit db: Database) = Await.result(db.run(persist.User.find(userId).head), timeout.duration)
}
trait UserStructExtensions {
implicit class ExtUser(user: rpcapi.users.User) {
def asModel()(implicit db: Database): models.User =
Await.result(db.run(persist.User.find(user.id).head), 3.seconds)
}
}
trait ServiceSpecHelpers extends PersistenceHelpers with UserStructExtensions {
this: Suite ⇒
protected val system: ActorSystem
protected lazy val mediator: ActorRef = DistributedPubSubExtension(system).mediator
protected val fairy = Fairy.create()
def buildPhone(): Long = {
75550000000L + scala.util.Random.nextInt(999999)
}
def buildEmail(at: String = ""): String = {
val email = fairy.person().email()
if (at.isEmpty) email else email.substring(0, email.lastIndexOf("@")) + s"@$at"
}
def createAuthId()(implicit db: Database): Long = {
val authId = scala.util.Random.nextLong()
Await.result(db.run(persist.AuthId.create(authId, None, None)), 1.second)
authId
}
def createAuthId(userId: Int)(implicit ec: ExecutionContext, system: ActorSystem, db: Database, service: rpcapi.auth.AuthService): Long = {
val authId = scala.util.Random.nextLong()
Await.result(db.run(persist.AuthId.create(authId, None, None)), 1.second)
val phoneNumber = Await.result(db.run(persist.UserPhone.findByUserId(userId)) map (_.head.number), 1.second)
val smsCode = getSmsCode(authId, phoneNumber)
val res = Await.result(service.handleSignUpObsolete(
phoneNumber = phoneNumber,
smsHash = smsCode.smsHash,
smsCode = smsCode.smsCode,
name = fairy.person().fullName(),
deviceHash = scala.util.Random.nextLong.toBinaryString.getBytes(),
deviceTitle = "Specs virtual device",
appId = 42,
appKey = "appKey",
isSilent = false
)(rpcapi.ClientData(authId, scala.util.Random.nextLong(), None)), 5.seconds)
res match {
case \\/-(rsp) ⇒ rsp
case -\\/(e) ⇒ fail(s"Got RpcError ${e}")
}
authId
}
def createSessionId(): Long =
scala.util.Random.nextLong()
def getSmsHash(authId: Long, phoneNumber: Long)(implicit service: rpcapi.auth.AuthService, system: ActorSystem): String = withoutLogs {
val rpcapi.auth.ResponseSendAuthCodeObsolete(smsHash, _) =
Await.result(service.handleSendAuthCodeObsolete(phoneNumber, 1, "apiKey")(rpcapi.ClientData(authId, scala.util.Random.nextLong(), None)), 1.second).toOption.get
smsHash
}
def getSmsCode(authId: Long, phoneNumber: Long)(implicit service: rpcapi.auth.AuthService, system: ActorSystem, db: Database): models.AuthSmsCodeObsolete = withoutLogs {
val res = Await.result(service.handleSendAuthCodeObsolete(phoneNumber, 1, "apiKey")(rpcapi.ClientData(authId, scala.util.Random.nextLong(), None)), 1.second)
res.toOption.get
Await.result(db.run(persist.AuthSmsCodeObsolete.findByPhoneNumber(phoneNumber).head), 5.seconds)
}
def createUser()(implicit service: rpcapi.auth.AuthService, db: Database, system: ActorSystem): (rpcapi.users.User, Long, Long) = {
val authId = createAuthId()
val phoneNumber = buildPhone()
(createUser(authId, phoneNumber), authId, phoneNumber)
}
def createUser(phoneNumber: Long)(implicit service: rpcapi.auth.AuthService, system: ActorSystem, db: Database): rpcapi.users.User =
createUser(createAuthId(), phoneNumber)
//TODO: make same method to work with email
def createUser(authId: Long, phoneNumber: Long)(implicit service: rpcapi.auth.AuthService, system: ActorSystem, db: Database): rpcapi.users.User =
withoutLogs {
val smsCode = getSmsCode(authId, phoneNumber)
val res = Await.result(service.handleSignUpObsolete(
phoneNumber = phoneNumber,
smsHash = smsCode.smsHash,
smsCode = smsCode.smsCode,
name = fairy.person().fullName(),
deviceHash = scala.util.Random.nextLong.toBinaryString.getBytes(),
deviceTitle = "Specs virtual device",
appId = 42,
appKey = "appKey",
isSilent = false
)(rpcapi.ClientData(authId, scala.util.Random.nextLong(), None)), 5.seconds)
res match {
case \\/-(rsp) ⇒ rsp.user
case -\\/(e) ⇒ fail(s"Got RpcError ${e}")
}
}
def buildRpcApiService(services: Seq[im.actor.api.rpc.Service])(implicit system: ActorSystem, db: Database) =
system.actorOf(RpcApiService.props(services), "rpcApiService")
def buildSessionRegion(rpcApiService: ActorRef)(
implicit
weakUpdManagerRegion: WeakUpdatesManagerRegion,
presenceManagerRegion: PresenceManagerRegion,
groupPresenceManagerRegion: GroupPresenceManagerRegion,
system: ActorSystem,
materializer: Materializer
) = {
implicit val sessionConfig = SessionConfig.load(system.settings.config.getConfig("session"))
Session.startRegion(Some(Session.props(mediator)))
}
def buildSessionRegionProxy()(implicit system: ActorSystem) = Session.startRegionProxy()
def buildAuthService()(
implicit
sessionRegion: SessionRegion,
oauth2Service: GoogleProvider,
system: ActorSystem
) = new auth.AuthServiceImpl(new DummyCodeActivation, mediator)
protected def withoutLogs[A](f: ⇒ A)(implicit system: ActorSystem): A = {
val logger = org.slf4j.LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger]
val logLevel = logger.getLevel()
val esLogLevel = system.eventStream.logLevel
logger.setLevel(ch.qos.logback.classic.Level.WARN)
system.eventStream.setLogLevel(akka.event.Logging.WarningLevel)
val res = f
logger.setLevel(logLevel)
system.eventStream.setLogLevel(esLogLevel)
res
}
protected def futureSleep(delay: Long)(implicit ec: ExecutionContext): Future[Unit] = Future { blocking { Thread.sleep(delay) } }
}
| chenbk85/actor-platform | actor-server/actor-testkit/src/main/scala/im/actor/server/ServiceSpecHelpers.scala | Scala | mit | 6,778 |
package com.twitter.inject.thrift.integration
import com.twitter.finagle.http.Status._
import com.twitter.finatra.http.test.{EmbeddedHttpServer, HttpTest}
import com.twitter.finatra.thrift.EmbeddedThriftServer
import com.twitter.inject.server.EmbeddedTwitterServer
import com.twitter.inject.thrift.integration.http_server.EchoHttpServer
import com.twitter.inject.thrift.integration.thrift_server.EchoThriftServer
class EchoHttpServerFeatureTest extends HttpTest {
val thriftServer = new EmbeddedThriftServer(
twitterServer = new EchoThriftServer)
val httpServer = new EmbeddedHttpServer(
twitterServer = new EchoHttpServer,
extraArgs = Seq(
"-thrift.clientId=echo-http-service",
resolverMap("thrift-echo-service" -> thriftServer.thriftHostAndPort)))
"EchoHttpServer" should {
"Echo 3 times" in {
httpServer.httpPost(
path = "/config?timesToEcho=2",
postBody = "",
andExpect = Ok,
withBody = "2")
httpServer.httpPost(
path = "/config?timesToEcho=3",
postBody = "",
andExpect = Ok,
withBody = "3")
httpServer.httpGet(
path = "/echo?msg=Bob",
andExpect = Ok,
withBody = "BobBobBob")
httpServer.assertStat("route/config/POST/response_size", Seq(1, 1))
httpServer.assertStat("route/echo/GET/response_size", Seq(9))
httpServer.close()
thriftServer.close()
}
}
}
| joecwu/finatra | inject/inject-thrift-client/src/test/scala/com/twitter/inject/thrift/integration/EchoHttpServerFeatureTest.scala | Scala | apache-2.0 | 1,435 |
package dotty.tools.dotc
package transform
import core._
import Names._
import dotty.tools.dotc.transform.TreeTransforms.{AnnotationTransformer, TransformerInfo, MiniPhaseTransform, TreeTransformer}
import ast.Trees._
import Flags._
import Types._
import Constants.Constant
import Contexts.Context
import Symbols._
import SymDenotations._
import Decorators._
import dotty.tools.dotc.core.Annotations.ConcreteAnnotation
import dotty.tools.dotc.core.Denotations.SingleDenotation
import scala.collection.mutable
import DenotTransformers._
import typer.Checking
import Names.Name
import NameOps._
import StdNames._
/** The first tree transform
* - ensures there are companion objects for all classes except module classes
* - eliminates some kinds of trees: Imports, NamedArgs
* - stubs out native methods
*/
class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer with AnnotationTransformer { thisTransformer =>
import ast.tpd._
override def phaseName = "firstTransform"
def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type = tp
override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match {
case Select(qual, _) if tree.symbol.exists =>
assert(qual.tpe derivesFrom tree.symbol.owner, i"non member selection of ${tree.symbol.showLocated} from ${qual.tpe}")
case _: TypeTree =>
case _: Import | _: NamedArg | _: TypTree =>
assert(false, i"illegal tree: $tree")
case _ =>
}
/** Reorder statements so that module classes always come after their companion classes, add missing companion classes */
private def reorderAndComplete(stats: List[Tree])(implicit ctx: Context): List[Tree] = {
val moduleClassDefs, singleClassDefs = mutable.Map[Name, Tree]()
def reorder(stats: List[Tree]): List[Tree] = stats match {
case (stat: TypeDef) :: stats1 if stat.symbol.isClass =>
if (stat.symbol is Flags.Module) {
moduleClassDefs += (stat.name -> stat)
singleClassDefs -= stat.name.stripModuleClassSuffix
val stats1r = reorder(stats1)
if (moduleClassDefs contains stat.name) stat :: stats1r else stats1r
} else {
def stats1r = reorder(stats1)
val normalized = moduleClassDefs remove stat.name.moduleClassName match {
case Some(mcdef) =>
mcdef :: stats1r
case None =>
singleClassDefs += (stat.name -> stat)
stats1r
}
stat :: normalized
}
case stat :: stats1 => stat :: reorder(stats1)
case Nil => Nil
}
def newCompanion(name: TermName, forClass: Symbol): Thicket = {
val modul = ctx.newCompleteModuleSymbol(ctx.owner, name, Synthetic, Synthetic,
defn.ObjectType :: Nil, Scopes.newScope)
val mc = modul.moduleClass
if (ctx.owner.isClass) modul.enteredAfter(thisTransformer)
ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, forClass, mc).enteredAfter(thisTransformer)
ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, mc, forClass).enteredAfter(thisTransformer)
ModuleDef(modul, Nil)
}
def addMissingCompanions(stats: List[Tree]): List[Tree] = stats map {
case stat: TypeDef if singleClassDefs contains stat.name =>
val objName = stat.name.toTermName
val nameClash = stats.exists {
case other: MemberDef =>
other.name == objName && other.symbol.info.isParameterless
case _ =>
false
}
val uniqueName = if (nameClash) objName.avoidClashName else objName
Thicket(stat :: newCompanion(uniqueName, stat.symbol).trees)
case stat => stat
}
addMissingCompanions(reorder(stats))
}
override def transformDefDef(ddef: DefDef)(implicit ctx: Context, info: TransformerInfo) = {
if (ddef.symbol.hasAnnotation(defn.NativeAnnot)) {
ddef.symbol.resetFlag(Deferred)
DefDef(ddef.symbol.asTerm,
_ => ref(defn.Sys_errorR).withPos(ddef.pos)
.appliedTo(Literal(Constant("native method stub"))))
} else ddef
}
override def transformStats(trees: List[Tree])(implicit ctx: Context, info: TransformerInfo): List[Tree] =
ast.Trees.flatten(reorderAndComplete(trees)(ctx.withPhase(thisTransformer.next)))
override def transformOther(tree: Tree)(implicit ctx: Context, info: TransformerInfo) = tree match {
case tree: Import => EmptyTree
case tree: NamedArg => transform(tree.arg)
case tree => tree
}
// invariants: all modules have companion objects
// all types are TypeTrees
// all this types are explicit
}
| reactormonk/dotty | src/dotty/tools/dotc/transform/FirstTransform.scala | Scala | bsd-3-clause | 4,640 |
package net.babel.graph
import net.babel.model._
import net.babel.model.TwitterJsonProtocol._
import akka.actor.{ ActorRef, Actor, Props, ActorSystem, FSM, Stash }
import org.apache.commons.codec.binary.Base64
import scala.concurrent.{ Future, future, ExecutionContext, Await }
import scala.concurrent.duration.DurationInt
import scala.util.{ Success, Failure }
import spray.http._
import spray.json.DefaultJsonProtocol
import spray.httpx.encoding.{ Gzip, Deflate }
import spray.httpx.SprayJsonSupport
import spray.client.pipelining._
import SprayJsonSupport._
sealed trait Messages
case class FetchUser(userName: String) extends Messages
case class FetchUserTweets(userName: String) extends Messages
case class FetchFriendIds(userId: Long) extends Messages
case class UserTweets(tweets: List[Tweet]) extends Messages
case class FriendIds(userId: Long, friendIds: List[Long]) extends Messages
case class Authenticate(arg1: String, arg2: String) extends Messages
sealed trait sourceState
case object Authenticated extends sourceState
case object Waiting extends sourceState
case object Unauthenticated extends sourceState
sealed trait ClientData
case object NoData extends ClientData
case class TokenData(token: String) extends ClientData
class TwitterSource extends Actor with FSM[sourceState, ClientData] with Stash {
import ExecutionContext.Implicits.global
val twitterBaseUrl = "https://api.twitter.com"
def pipeline(token: String): HttpRequest => Future[HttpResponse] = {
(
addHeader("Authorization", s"Bearer $token")
~> encode(Gzip)
~> sendReceive
~> decode(Deflate)
)
}
startWith(Unauthenticated, NoData)
when(Waiting, stateTimeout = 16 minutes) {
case Event(FetchFriendIds(userId), TokenData(token)) =>
stash
stay using TokenData(token)
case Event(StateTimeout, TokenData(token)) => {
unstashAll
goto(Authenticated) using TokenData(token)
}
}
when(Authenticated) {
case Event(FetchFriendIds(userId), TokenData(token)) =>
println("Fetching friend ids from " + userId.toString)
val response = pipeline(token) {
Get(s"$twitterBaseUrl/1.1/friends/ids.json?user_id=$userId&count=5000")
}
val users = Await.result(response, 5 seconds)
val friendIds = users ~> unmarshal[Friends]
sender ! FriendIds(userId, friendIds.ids)
// checking the status
val headers = users.headers.map(x => (x.name, x.value)).toMap
if (headers("x-rate-limit-remaining") == "0") {
println("Quota reached moving to idle.")
goto(Waiting) using TokenData(token)
} else {
stay using TokenData(token)
}
case Event(FetchUser(userName), TokenData(token)) =>
println("Fetching info from " + userName)
val response = pipeline(token) {
Get(s"$twitterBaseUrl/1.1/users/lookup.json?screen_name=$userName")
}
val user = Await.result(response, 5 seconds)
println(user.entity)
println(user ~> unmarshal[TwitterUser])
stay using TokenData(token)
case Event(FetchUserTweets(userName), TokenData(token)) =>
println("Fetching tweets from " + userName)
val response = pipeline(token) {
Get(s"$twitterBaseUrl/1.1/statuses/user_timeline.json?screen_name=$userName&count=50&include_rts=true&exclude_replies=true")
}
val tweets = Await.result(response, 5 seconds) ~> unmarshal[List[Tweet]]
sender ! UserTweets(tweets)
stay using TokenData(token)
}
when(Unauthenticated) {
case Event(Authenticate(consumerKey, consumerSecret), NoData) =>
val credentials = Base64.encodeBase64String(s"$consumerKey:$consumerSecret".getBytes())
val pipeline: HttpRequest => Future[TwitterToken] = (
addHeader("Authorization", s"Basic $credentials")
~> encode(Gzip)
~> sendReceive
~> decode(Deflate)
~> unmarshal[TwitterToken]
)
val response = pipeline {
Post(s"$twitterBaseUrl/oauth2/token", FormData(Map("grant_type" -> "client_credentials")))
}
val token = Await.result(response, 10 seconds).access_token
println("Moving to Authenticated.")
unstashAll
goto(Authenticated) using TokenData(token)
case _ =>
stash
stay using NoData
}
} | vallettea/Twitter-Communities | src/main/scala/net/babel/graph/TwitterSource.scala | Scala | mit | 4,307 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.controller.throttle
import io.gatling.core.akka.BaseActor
import akka.actor.FSM
private[throttle] sealed trait ThrottlerControllerState
private[throttle] object ThrottlerControllerState {
case object WaitingToStart extends ThrottlerControllerState
case object Started extends ThrottlerControllerState
case object Overridden extends ThrottlerControllerState
}
private[throttle] sealed trait ThrottlerControllerData
private[throttle] object ThrottlerControllerData {
case object NoData extends ThrottlerControllerData
case class StartedData(tick: Int) extends ThrottlerControllerData
case class OverrideData(overrides: Throttlings, tick: Int) extends ThrottlerControllerData
}
private[throttle] class ThrottlerControllerFSM extends BaseActor with FSM[ThrottlerControllerState, ThrottlerControllerData]
| wiacekm/gatling | gatling-core/src/main/scala/io/gatling/core/controller/throttle/ThrottlerControllerFSM.scala | Scala | apache-2.0 | 1,459 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.stream.sql
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api.config.OptimizerConfigOptions
import org.apache.flink.table.planner.runtime.stream.sql.SplitAggregateITCase.PartialAggMode
import org.apache.flink.table.planner.runtime.utils.StreamingWithAggTestBase.{AggMode, LocalGlobalOff, LocalGlobalOn}
import org.apache.flink.table.planner.runtime.utils.StreamingWithMiniBatchTestBase.MiniBatchOn
import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.{HEAP_BACKEND, ROCKSDB_BACKEND, StateBackendMode}
import org.apache.flink.table.planner.runtime.utils.{StreamingWithAggTestBase, TestingRetractSink}
import org.apache.flink.table.planner.utils.DateTimeTestUtil.{localDate, localDateTime, localTime => mLocalTime}
import org.apache.flink.types.Row
import org.junit.Assert.assertEquals
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Before, Test}
import java.lang.{Integer => JInt, Long => JLong}
import java.math.{BigDecimal => JBigDecimal}
import java.util
import scala.collection.JavaConversions._
import scala.collection.{Seq, mutable}
import scala.util.Random
@RunWith(classOf[Parameterized])
class SplitAggregateITCase(
partialAggMode: PartialAggMode,
aggMode: AggMode,
backend: StateBackendMode)
extends StreamingWithAggTestBase(aggMode, MiniBatchOn, backend) {
@Before
override def before(): Unit = {
super.before()
if (partialAggMode.isPartialAggEnabled) {
tEnv.getConfig.getConfiguration.setBoolean(
OptimizerConfigOptions.TABLE_OPTIMIZER_DISTINCT_AGG_SPLIT_ENABLED, true)
} else {
tEnv.getConfig.getConfiguration.setBoolean(
OptimizerConfigOptions.TABLE_OPTIMIZER_DISTINCT_AGG_SPLIT_ENABLED, false)
}
val data = List(
(1L, 1, "Hello 0"),
(1L, 2, "Hello 1"),
(2L, 3, "Hello 1"),
(3L, 5, "Hello 1"),
(2L, 3, "Hello 2"),
(2L, 4, "Hello 3"),
(2L, 4, null),
(2L, 5, "Hello 4"),
(3L, 5, "Hello 0"),
(2L, 4, "Hello 3"),
(4L, 5, "Hello 2"),
(2L, 4, "Hello 3"),
(4L, 5, null),
(4L, 5, "Hello 3"),
(2L, 2, "Hello 0"),
(4L, 6, "Hello 1"))
val t = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("T", t)
}
@Test
def testCountDistinct(): Unit = {
val ids = List(
1,
2, 2,
3, 3, 3,
4, 4, 4, 4,
5, 5, 5, 5, 5)
val dateTimes = List(
"1970-01-01 00:00:01",
"1970-01-01 00:00:02", null,
"1970-01-01 00:00:04", "1970-01-01 00:00:05", "1970-01-01 00:00:06",
"1970-01-01 00:00:07", null, null, "1970-01-01 00:00:10",
"1970-01-01 00:00:11", "1970-01-01 00:00:11", "1970-01-01 00:00:13",
"1970-01-01 00:00:14", "1970-01-01 00:00:15")
val dates = List(
"1970-01-01",
"1970-01-02", null,
"1970-01-04", "1970-01-05", "1970-01-06",
"1970-01-07", null, null, "1970-01-10",
"1970-01-11", "1970-01-11", "1970-01-13", "1970-01-14", "1970-01-15")
val times = List(
"00:00:01",
"00:00:02", null,
"00:00:04", "00:00:05", "00:00:06",
"00:00:07", null, null, "00:00:10",
"00:00:11", "00:00:11", "00:00:13", "00:00:14", "00:00:15")
val integers = List(
"1",
"2", null,
"4", "5", "6",
"7", null, null, "10",
"11", "11", "13", "14", "15")
val chars = List(
"A",
"B", null,
"D", "E", "F",
"H", null, null, "K",
"L", "L", "N", "O", "P")
val data = new mutable.MutableList[Row]
for (i <- ids.indices) {
val v = integers(i)
val decimal = if (v == null) null else new JBigDecimal(v)
val int = if (v == null) null else JInt.valueOf(v)
val long = if (v == null) null else JLong.valueOf(v)
data.+=(Row.of(
Int.box(ids(i)), localDateTime(dateTimes(i)), localDate(dates(i)),
mLocalTime(times(i)), decimal, int, long, chars(i)))
}
val inputs = Random.shuffle(data)
val rowType = new RowTypeInfo(
Types.INT, Types.LOCAL_DATE_TIME, Types.LOCAL_DATE, Types.LOCAL_TIME,
Types.DECIMAL, Types.INT, Types.LONG, Types.STRING)
val t = failingDataSource(inputs)(rowType).toTable(tEnv, 'id, 'a, 'b, 'c, 'd, 'e, 'f, 'g)
tEnv.createTemporaryView("MyTable", t)
val t1 = tEnv.sqlQuery(
s"""
|SELECT
| id,
| count(distinct a),
| count(distinct b),
| count(distinct c),
| count(distinct d),
| count(distinct e),
| count(distinct f),
| count(distinct g)
|FROM MyTable
|GROUP BY id
""".stripMargin)
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List(
"1,1,1,1,1,1,1,1",
"2,1,1,1,1,1,1,1",
"3,3,3,3,3,3,3,3",
"4,2,2,2,2,2,2,2",
"5,4,4,4,4,4,4,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testSingleDistinctAgg(): Unit = {
val t1 = tEnv.sqlQuery("SELECT COUNT(DISTINCT c) FROM T")
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = List("5")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testMultiCountDistinctAgg(): Unit = {
val t1 = tEnv.sqlQuery("SELECT COUNT(DISTINCT b), COUNT(DISTINCT c) FROM T")
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = List("6,5")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testSingleDistinctAggAndOneOrMultiNonDistinctAgg(): Unit = {
val t1 = tEnv.sqlQuery("SELECT a, SUM(b), COUNT(DISTINCT c), avg(b) FROM T GROUP BY a")
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("1,3,2,1", "2,29,5,3",
"3,10,2,5", "4,21,3,5")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testSingleDistinctAggWithGroupBy(): Unit = {
val t1 = tEnv.sqlQuery("SELECT a, COUNT(DISTINCT c) FROM T GROUP BY a")
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("1,2", "2,5", "3,2", "4,3")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testSingleDistinctAggWithAndNonDistinctAggOnSameColumn(): Unit = {
val t1 = tEnv.sqlQuery("SELECT a, COUNT(DISTINCT b), MAX(b), MIN(b) FROM T GROUP BY a")
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("1,2,2,1", "2,4,5,2", "3,1,5,5", "4,2,6,5")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testSomeColumnsBothInDistinctAggAndGroupBy(): Unit = {
val t1 = tEnv.sqlQuery("SELECT a, COUNT(DISTINCT a), COUNT(b) FROM T GROUP BY a")
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("1,1,2", "2,1,8", "3,1,2", "4,1,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testAggWithFilterClause(): Unit = {
val t1 = tEnv.sqlQuery(
s"""
|SELECT
| a,
| COUNT(DISTINCT b) filter (where not b = 2),
| MAX(b) filter (where not b = 5),
| MIN(b) filter (where not b = 2)
|FROM T
|GROUP BY a
""".stripMargin)
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("1,1,2,1", "2,3,4,3", "3,1,null,5", "4,2,6,5")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testMinMaxWithRetraction(): Unit = {
val t1 = tEnv.sqlQuery(
s"""
|SELECT
| c, MIN(b), MAX(b), COUNT(DISTINCT a)
|FROM(
| SELECT
| a, COUNT(DISTINCT b) as b, MAX(b) as c
| FROM T
| GROUP BY a
|) GROUP BY c
""".stripMargin)
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("2,2,2,1", "5,1,4,2", "6,2,2,1")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testAggWithJoin(): Unit = {
val t1 = tEnv.sqlQuery(
s"""
|SELECT *
|FROM(
| SELECT
| c, MIN(b) as b, MAX(b) as d, COUNT(DISTINCT a) as a
| FROM(
| SELECT
| a, COUNT(DISTINCT b) as b, MAX(b) as c
| FROM T
| GROUP BY a
| ) GROUP BY c
|) as T1 JOIN T ON T1.b + 2 = T.a
""".stripMargin)
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("2,2,2,1,4,5,Hello 2", "2,2,2,1,4,5,Hello 3", "2,2,2,1,4,5,null",
"2,2,2,1,4,6,Hello 1", "5,1,4,2,3,5,Hello 0", "5,1,4,2,3,5,Hello 1",
"6,2,2,1,4,5,Hello 2", "6,2,2,1,4,5,Hello 3", "6,2,2,1,4,5,null",
"6,2,2,1,4,6,Hello 1")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testUvWithRetraction(): Unit = {
val data = (0 until 1000).map {i => (s"${i%10}", s"${i%100}", s"$i")}.toList
val t = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("src", t)
val sql =
s"""
|SELECT
| a,
| COUNT(distinct b) as uv
|FROM (
| SELECT a, b, last_value(c)
| FROM src
| GROUP BY a, b
|) t
|GROUP BY a
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("0,10", "1,10", "2,10", "3,10", "4,10",
"5,10", "6,10", "7,10", "8,10", "9,10")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testCountDistinctWithBinaryRowSource(): Unit = {
// this case is failed before, because of object reuse problem
val data = (0 until 100).map {i => ("1", "1", s"${i%50}", "1")}.toList
// use BinaryRowData source here for StringData reuse
val t = failingBinaryRowSource(data).toTable(tEnv, 'a, 'b, 'c, 'd)
tEnv.registerTable("src", t)
val sql =
s"""
|SELECT
| a,
| b,
| COUNT(distinct c) as uv
|FROM (
| SELECT
| a, b, c, d
| FROM
| src where b <> ''
| UNION ALL
| SELECT
| a, 'ALL' as b, c, d
| FROM
| src where b <> ''
|) t
|GROUP BY
| a, b
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
val sink = new TestingRetractSink
t1.toRetractStream[Row].addSink(sink)
env.execute()
val expected = List("1,1,50", "1,ALL,50")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
}
object SplitAggregateITCase {
case class PartialAggMode(isPartialAggEnabled: Boolean) {
override def toString: String = if (isPartialAggEnabled) "ON" else "OFF"
}
val PartialAggOn = PartialAggMode(isPartialAggEnabled = true)
val PartialAggOff = PartialAggMode(isPartialAggEnabled = false)
@Parameterized.Parameters(name = "PartialAgg={0}, LocalGlobal={1}, StateBackend={2}")
def parameters(): util.Collection[Array[java.lang.Object]] = {
Seq[Array[AnyRef]](
Array(PartialAggOn, LocalGlobalOff, HEAP_BACKEND),
Array(PartialAggOn, LocalGlobalOn, HEAP_BACKEND),
Array(PartialAggOn, LocalGlobalOff, ROCKSDB_BACKEND),
Array(PartialAggOn, LocalGlobalOn, ROCKSDB_BACKEND))
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/SplitAggregateITCase.scala | Scala | apache-2.0 | 12,753 |
package com.softwaremill.codebrag.service.commits
import com.softwaremill.codebrag.common.{Clock, EventBus}
import com.typesafe.scalalogging.slf4j.Logging
import com.softwaremill.codebrag.domain.{MultibranchLoadCommitsResult, PartialCommitInfo, NewCommitsLoadedEvent, RepositoryStatus}
import com.softwaremill.codebrag.dao.repositorystatus.RepositoryStatusDAO
import com.softwaremill.codebrag.repository.Repository
import com.softwaremill.codebrag.dao.branchsnapshot.BranchStateDAO
import com.softwaremill.codebrag.cache.RepositoriesCache
import com.softwaremill.codebrag.service.config.CommitCacheConfig
class CommitImportService(
repoStatusDao: RepositoryStatusDAO,
branchStateDao: BranchStateDAO,
repositoriesCache: RepositoriesCache,
config: CommitCacheConfig,
eventBus: EventBus
)(implicit clock: Clock) extends Logging {
def importRepoCommits(repository: Repository) {
try {
repository.pullChanges()
} catch {
// TODO: refactor a bit and add info to user that remote repository is unavailable
case e: Exception => logger.error("Cannot pull changes from upstream", e)
}
try {
val loaded = repository.loadCommitsSince(branchStateDao.loadBranchesStateAsMap(repository.repoName), config.maxCommitsCachedPerBranch)
if (loaded.commits.nonEmpty) {
publishNewCommitsLoaded(repository, loaded)
}
repositoriesCache.addCommitsToRepo(repository.repoName, loaded)
updateRepoReadyStatus(repository)
} catch {
case e: Exception => {
logger.error("Cannot import repository commits", e)
updateRepoNotReadyStatus(repository, e.getMessage)
}
}
}
def cleanupStaleBranches(repository: Repository) {
repositoriesCache.getRepo(repository.repoName).cleanupStaleBranches()
}
private def updateRepoNotReadyStatus(repository: Repository, errorMsg: String) {
logger.debug(s"Saving repository-not-ready status data to DB with message: $errorMsg")
val repoNotReadyStatus = RepositoryStatus.notReady(repository.repoName, Some(errorMsg))
repoStatusDao.updateRepoStatus(repoNotReadyStatus)
}
private def updateRepoReadyStatus(repository: Repository) {
logger.debug(s"Saving repository-ready status data to DB")
val repoReadyStatus = RepositoryStatus.ready(repository.repoName)
repoStatusDao.updateRepoStatus(repoReadyStatus)
}
private def publishNewCommitsLoaded(repository: Repository, loaded: MultibranchLoadCommitsResult): Unit = {
eventBus.publish(NewCommitsLoadedEvent(
!repositoriesCache.hasRepo(repository.repoName),
repository.repoName,
repository.currentHead.toString,
loaded.uniqueCommits.map { commit =>
PartialCommitInfo(commit)
}.toList
))
}
}
| softwaremill/codebrag | codebrag-service/src/main/scala/com/softwaremill/codebrag/service/commits/CommitImportService.scala | Scala | agpl-3.0 | 2,750 |
package com.datastax.spark.connector.embedded
import java.net.InetAddress
import org.apache.commons.configuration.ConfigurationException
/** A utility trait for integration testing.
* Manages *one* single Cassandra server at a time and enables switching its configuration.
* This is not thread safe, and test suites must not be run in parallel,
* because they will "steal" the server.*/
trait EmbeddedCassandra {
/** Implementation hook. */
def clearCache(): Unit
/** Switches the Cassandra server to use the new configuration if the requested configuration
* is different than the currently used configuration. When the configuration is switched, all
* the state (including data) of the previously running cassandra cluster is lost.
* @param configTemplates name of the cassandra.yaml template resources
* @param forceReload if set to true, the server will be reloaded fresh
* even if the configuration didn't change */
def useCassandraConfig(configTemplates: Seq[String], forceReload: Boolean = false) {
import EmbeddedCassandra._
import UserDefinedProperty._
require(hosts.isEmpty || configTemplates.size <= hosts.size,
"Configuration templates can't be more than the number of specified hosts")
if (getProperty(HostProperty).isEmpty) {
clearCache()
val templatePairs = configTemplates.zipAll(currentConfigTemplates, "missing value", null)
for (i <- configTemplates.indices) {
require(configTemplates(i) != null && configTemplates(i).trim.nonEmpty,
"Configuration template can't be null or empty")
if (templatePairs(i)._2 != templatePairs(i)._1 || forceReload) {
cassandraRunners.lift(i).flatten.foreach(_.destroy())
cassandraRunners = cassandraRunners.patch(i,
Seq(Some(new CassandraRunner(configTemplates(i), getProps(i)))), 1)
currentConfigTemplates = currentConfigTemplates.patch(i, Seq(configTemplates(i)), 1)
}
}
}
}
}
object UserDefinedProperty {
trait TypedProperty {
type ValueType
def convertValueFromString(str: String): ValueType
def checkValueType(obj: Any): ValueType
}
trait IntProperty extends TypedProperty {
type ValueType = Int
def convertValueFromString(str: String) = str.toInt
def checkValueType(obj: Any) =
obj match {
case x: Int => x
case _ => throw new ClassCastException (s"Expected Int but found ${obj.getClass.getName}")
}
}
trait InetAddressProperty extends TypedProperty {
type ValueType = InetAddress
def convertValueFromString(str: String) = InetAddress.getByName(str)
def checkValueType(obj: Any) =
obj match {
case x: InetAddress => x
case _ => throw new ClassCastException (s"Expected InetAddress but found ${obj.getClass.getName}")
}
}
abstract sealed class NodeProperty(val propertyName: String) extends TypedProperty
case object HostProperty extends NodeProperty("IT_TEST_CASSANDRA_HOSTS") with InetAddressProperty
case object PortProperty extends NodeProperty("IT_TEST_CASSANDRA_PORTS") with IntProperty
private def getValueSeq(propertyName: String): Seq[String] = {
sys.env.get(propertyName) match {
case Some(p) => p.split(",").map(e => e.trim).toIndexedSeq
case None => IndexedSeq()
}
}
private def getValueSeq(nodeProperty: NodeProperty): Seq[nodeProperty.ValueType] =
getValueSeq(nodeProperty.propertyName).map(x => nodeProperty.convertValueFromString(x))
val hosts = getValueSeq(HostProperty)
val ports = getValueSeq(PortProperty)
def getProperty(nodeProperty: NodeProperty): Option[String] =
sys.env.get(nodeProperty.propertyName)
def getPropertyOrThrowIfNotFound(nodeProperty: NodeProperty): String =
getProperty(nodeProperty).getOrElse(
throw new ConfigurationException(s"Missing ${nodeProperty.propertyName} in system environment"))
}
object EmbeddedCassandra {
import UserDefinedProperty._
private def countCommaSeparatedItemsIn(s: String): Int =
s.count(_ == ',')
getProperty(HostProperty) match {
case None =>
case Some(hostsStr) =>
val hostCount = countCommaSeparatedItemsIn(hostsStr)
val nativePortsStr = getPropertyOrThrowIfNotFound(PortProperty)
val nativePortCount = countCommaSeparatedItemsIn(nativePortsStr)
require(hostCount == nativePortCount,
"IT_TEST_CASSANDRA_HOSTS must have the same size as IT_TEST_CASSANDRA_NATIVE_PORTS")
}
private[connector] var cassandraRunners: IndexedSeq[Option[CassandraRunner]] = IndexedSeq(None)
private[connector] var currentConfigTemplates: IndexedSeq[String] = IndexedSeq()
def getProps(index: Integer): Map[String, String] = {
require(hosts.isEmpty || index < hosts.length, s"$index index is overflow the size of ${hosts.length}")
val host = getHost(index).getHostAddress
Map(
"seeds" -> host,
"storage_port" -> getStoragePort(index).toString,
"ssl_storage_port" -> getSslStoragePort(index).toString,
"native_transport_port" -> getPort(index).toString,
"jmx_port" -> getJmxPort(index).toString,
"rpc_address" -> host,
"listen_address" -> host,
"cluster_name" -> getClusterName(index),
"keystore_path" -> ClassLoader.getSystemResource("keystore").getPath)
}
def getStoragePort(index: Integer) = 7000 + index
def getSslStoragePort(index: Integer) = 7100 + index
def getJmxPort(index: Integer) = CassandraRunner.DefaultJmxPort + index
def getClusterName(index: Integer) = s"Test Cluster$index"
def getHost(index: Integer): InetAddress = getNodeProperty(index, HostProperty)
def getPort(index: Integer) = getNodeProperty(index, PortProperty)
private def getNodeProperty(index: Integer, nodeProperty: NodeProperty): nodeProperty.ValueType = {
nodeProperty.checkValueType {
nodeProperty match {
case PortProperty if ports.isEmpty => 9042 + index
case PortProperty if index < hosts.size => ports(index)
case HostProperty if hosts.isEmpty => InetAddress.getByName("127.0.0.1")
case HostProperty if index < hosts.size => hosts(index)
case _ => throw new RuntimeException(s"$index index is overflow the size of ${hosts.size}")
}
}
}
Runtime.getRuntime.addShutdownHook(new Thread(new Runnable {
override def run() = cassandraRunners.flatten.foreach(_.destroy())
}))
}
private[connector] class CassandraRunner(val configTemplate: String, props: Map[String, String])
extends Embedded {
import java.io.{File, FileOutputStream, IOException}
import org.apache.cassandra.io.util.FileUtils
import com.google.common.io.Files
import CassandraRunner._
val tempDir = mkdir(new File(Files.createTempDir(), "cassandra-driver-spark"))
val workDir = mkdir(new File(tempDir, "cassandra"))
val dataDir = mkdir(new File(workDir, "data"))
val commitLogDir = mkdir(new File(workDir, "commitlog"))
val cachesDir = mkdir(new File(workDir, "saved_caches"))
val confDir = mkdir(new File(tempDir, "conf"))
val confFile = new File(confDir, "cassandra.yaml")
private val properties = Map("cassandra_dir" -> workDir.toString) ++ props
closeAfterUse(ClassLoader.getSystemResourceAsStream(configTemplate)) { input =>
closeAfterUse(new FileOutputStream(confFile)) { output =>
copyTextFileWithVariableSubstitution(input, output, properties)
}
}
private val classPath = System.getProperty("java.class.path")
private val javaBin = System.getProperty("java.home") + "/bin/java"
private val cassandraConfProperty = "-Dcassandra.config=file:" + confFile.toString
private val superuserSetupDelayProperty = "-Dcassandra.superuser_setup_delay_ms=0"
private val jmxPort = props.getOrElse("jmx_port", DefaultJmxPort)
private val jmxPortProperty = s"-Dcassandra.jmx.local.port=$jmxPort"
private val sizeEstimatesUpdateIntervalProperty =
s"-Dcassandra.size_recorder_interval=$SizeEstimatesUpdateIntervalInSeconds"
private val jammAgent = classPath.split(File.pathSeparator).find(_.matches(".*jamm.*\\\\.jar"))
private val jammAgentProperty = jammAgent.map("-javaagent:" + _).getOrElse("")
private val cassandraMainClass = "org.apache.cassandra.service.CassandraDaemon"
private val process = new ProcessBuilder()
.command(javaBin,
"-Xms2G", "-Xmx2G", "-Xmn384M", "-XX:+UseConcMarkSweepGC",
sizeEstimatesUpdateIntervalProperty,
cassandraConfProperty, jammAgentProperty, superuserSetupDelayProperty, jmxPortProperty,
"-cp", classPath, cassandraMainClass, "-f")
.inheritIO()
.start()
val nativePort = props.get("native_transport_port").get.toInt
if (!waitForPortOpen(InetAddress.getByName(props.get("rpc_address").get), nativePort, 100000))
throw new IOException("Failed to start Cassandra.")
def destroy() {
process.destroy()
process.waitFor()
FileUtils.deleteRecursive(tempDir)
tempDir.delete()
}
}
object CassandraRunner {
val SizeEstimatesUpdateIntervalInSeconds = 5
val DefaultJmxPort = 7199
}
| IMCG/spark-cassandra | spark-cassandra-connector-embedded/src/main/scala/com/datastax/spark/connector/embedded/EmbeddedCassandra.scala | Scala | apache-2.0 | 9,142 |
import sbt._
import Keys._
import org.scalatra.sbt._
import org.scalatra.sbt.PluginKeys._
import com.earldouglas.xwp.JettyPlugin
import com.mojolly.scalate.ScalatePlugin._
import ScalateKeys._
object MyTestServerBuild extends Build {
val Organization = "com.google"
val Name = "My Test Server"
val Version = "0.1.0"
val ScalaVersion = "2.11.7"
val ScalatraVersion = "2.4.0"
lazy val project = Project (
"my-test-server",
file("."),
settings = ScalatraPlugin.scalatraSettings ++ scalateSettings ++ Seq(
organization := Organization,
name := Name,
version := Version,
scalaVersion := ScalaVersion,
resolvers += Classpaths.typesafeReleases,
libraryDependencies ++= Seq(
"org.scalatra" %% "scalatra" % ScalatraVersion,
"org.scalatra" %% "scalatra-scalate" % ScalatraVersion,
"org.scalatra" %% "scalatra-specs2" % ScalatraVersion % "test",
"ch.qos.logback" % "logback-classic" % "1.1.3" % "runtime",
"org.eclipse.jetty" % "jetty-webapp" % "9.2.14.v20151106" % "container",
"javax.servlet" % "javax.servlet-api" % "3.1.0" % "provided",
"org.scalatra" %% "scalatra-json" % "2.4.0",
"org.json4s" %% "json4s-jackson" % "3.3.0"
),
scalateTemplateConfig in Compile <<= (sourceDirectory in Compile){ base =>
Seq(
TemplateConfig(
base / "webapp" / "WEB-INF" / "templates",
Seq.empty, /* default imports should be added here */
Seq(
Binding("context", "_root_.org.scalatra.scalate.ScalatraRenderContext", importMembers = true, isImplicit = true)
), /* add extra bindings here */
Some("templates")
)
)
}
)
).enablePlugins(JettyPlugin)
}
| zhihan/janala2-gradle | server/project/build.scala | Scala | bsd-2-clause | 1,786 |
package is.hail.variant
import is.hail.annotations.Annotation
import is.hail.check.Gen
import is.hail.expr.Parser
import is.hail.utils._
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.json4s._
import scala.collection.JavaConverters._
import scala.language.implicitConversions
object Locus {
val simpleContigs: Seq[String] = (1 to 22).map(_.toString) ++ Seq("X", "Y", "MT")
def apply(contig: String, position: Int, rg: ReferenceGenome): Locus = {
rg.checkLocus(contig, position)
Locus(contig, position)
}
def annotation(contig: String, position: Int, rg: Option[ReferenceGenome]): Annotation = {
rg match {
case Some(ref) => Locus(contig, position, ref)
case None => Annotation(contig, position)
}
}
def sparkSchema: StructType =
StructType(Array(
StructField("contig", StringType, nullable = false),
StructField("position", IntegerType, nullable = false)))
def fromRow(r: Row): Locus = {
Locus(r.getAs[String](0), r.getInt(1))
}
def gen(rg: ReferenceGenome): Gen[Locus] = for {
(contig, length) <- Contig.gen(rg)
pos <- Gen.choose(1, length)
} yield Locus(contig, pos)
def parse(str: String, rg: ReferenceGenome): Locus = {
val elts = str.split(":")
val size = elts.length
if (size < 2)
fatal(s"Invalid string for Locus. Expecting contig:pos -- found '$str'.")
val contig = elts.take(size - 1).mkString(":")
Locus(contig, elts(size - 1).toInt, rg)
}
def parseInterval(str: String, rg: ReferenceGenome, invalidMissing: Boolean = false): Interval =
Parser.parseLocusInterval(str, rg, invalidMissing)
def parseIntervals(arr: Array[String], rg: ReferenceGenome, invalidMissing: Boolean): Array[Interval] = arr.map(parseInterval(_, rg, invalidMissing))
def parseIntervals(arr: java.util.List[String], rg: ReferenceGenome, invalidMissing: Boolean = false): Array[Interval] = parseIntervals(arr.asScala.toArray, rg, invalidMissing)
def makeInterval(contig: String, start: Int, end: Int, includesStart: Boolean, includesEnd: Boolean,
rgBase: ReferenceGenome, invalidMissing: Boolean = false): Interval = {
val rg = rgBase.asInstanceOf[ReferenceGenome]
rg.toLocusInterval(Interval(Locus(contig, start), Locus(contig, end), includesStart, includesEnd), invalidMissing)
}
}
case class Locus(contig: String, position: Int) {
def toRow: Row = Row(contig, position)
def toJSON: JValue = JObject(
("contig", JString(contig)),
("position", JInt(position)))
def copyChecked(rg: ReferenceGenome, contig: String = contig, position: Int = position): Locus = {
rg.checkLocus(contig, position)
Locus(contig, position)
}
def isAutosomalOrPseudoAutosomal(rg: ReferenceGenome): Boolean = isAutosomal(rg) || inXPar(rg) || inYPar(rg)
def isAutosomal(rg: ReferenceGenome): Boolean = !(inX(rg) || inY(rg) || isMitochondrial(rg))
def isMitochondrial(rg: ReferenceGenome): Boolean = rg.isMitochondrial(contig)
def inXPar(rg: ReferenceGenome): Boolean = rg.inXPar(this)
def inYPar(rg: ReferenceGenome): Boolean = rg.inYPar(this)
def inXNonPar(rg: ReferenceGenome): Boolean = inX(rg) && !inXPar(rg)
def inYNonPar(rg: ReferenceGenome): Boolean = inY(rg) && !inYPar(rg)
private def inX(rg: ReferenceGenome): Boolean = rg.inX(contig)
private def inY(rg: ReferenceGenome): Boolean = rg.inY(contig)
override def toString: String = s"$contig:$position"
}
| danking/hail | hail/src/main/scala/is/hail/variant/Locus.scala | Scala | mit | 3,500 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.agg.batch
import org.apache.flink.runtime.execution.Environment
import org.apache.flink.runtime.jobgraph.OperatorID
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord
import org.apache.flink.streaming.runtime.tasks.{OneInputStreamTask, OneInputStreamTaskTestHarness}
import org.apache.flink.table.data.{GenericRowData, RowData, StringData}
import org.apache.flink.table.planner.codegen.agg.AggTestBase
import org.apache.flink.table.planner.utils.RowDataTestUtil
import org.apache.flink.table.runtime.operators.CodeGenOperatorFactory
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
import org.apache.flink.table.types.logical._
import org.apache.flink.util.function.FunctionWithException
import org.junit.Assert
import java.util
import scala.collection.JavaConverters._
/**
* Base agg test.
*/
abstract class BatchAggTestBase extends AggTestBase(isBatchMode = true) {
val globalOutputType = RowType.of(
Array[LogicalType](
new VarCharType(VarCharType.MAX_LENGTH), new VarCharType(VarCharType.MAX_LENGTH),
new BigIntType(),
new DoubleType(),
new BigIntType()),
Array(
"f0", "f4",
"agg1Output",
"agg2Output",
"agg3Output"))
def row(args: Any*): GenericRowData = {
GenericRowData.of(args.map {
case str: String => StringData.fromString(str)
case l: Long => Long.box(l)
case d: Double => Double.box(d)
case o: AnyRef => o
}.toArray[AnyRef]: _*)
}
def testOperator(
args: (CodeGenOperatorFactory[RowData], RowType, RowType),
input: Array[RowData], expectedOutput: Array[GenericRowData]): Unit = {
val testHarness = new OneInputStreamTaskTestHarness[RowData, RowData](
new FunctionWithException[Environment, OneInputStreamTask[RowData, RowData], Exception] {
override def apply(t: Environment) = new OneInputStreamTask(t)
}, 1, 1, InternalTypeInfo.of(args._2), InternalTypeInfo.of(args._3))
testHarness.memorySize = 32 * 100 * 1024
testHarness.setupOutputForSingletonOperatorChain()
val streamConfig = testHarness.getStreamConfig
streamConfig.setStreamOperatorFactory(args._1)
streamConfig.setOperatorID(new OperatorID)
streamConfig.setManagedMemoryFraction(0.99)
testHarness.invoke()
testHarness.waitForTaskRunning()
for (row <- input) {
testHarness.processElement(new StreamRecord[RowData](row, 0L))
}
testHarness.waitForInputProcessing()
testHarness.endInput()
testHarness.waitForTaskCompletion()
val outputs = new util.ArrayList[GenericRowData]()
val outQueue = testHarness.getOutput
while (!outQueue.isEmpty) {
outputs.add(RowDataTestUtil.toGenericRowDeeply(
outQueue.poll().asInstanceOf[StreamRecord[RowData]].getValue, args._3.getChildren))
}
Assert.assertArrayEquals(expectedOutput.toArray[AnyRef], outputs.asScala.toArray[AnyRef])
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/codegen/agg/batch/BatchAggTestBase.scala | Scala | apache-2.0 | 3,768 |
package com.zobot.client.packet.definitions.clientbound.play
import com.zobot.client.packet.Packet
case class Map(itemDamage: Any, scale: Any, trackingPosition: Any, iconCount: Any, iconDirectionAndType: Any, iconX: Any, iconZ: Any, columns: Any, rows: Any, x: Any, z: Any, length: Any, data: Any) extends Packet {
override lazy val packetId = 0x24
override lazy val packetData: Array[Byte] =
fromAny(itemDamage) ++
fromAny(scale) ++
fromAny(trackingPosition) ++
fromAny(iconCount) ++
fromAny(iconDirectionAndType) ++
fromAny(iconX) ++
fromAny(iconZ) ++
fromAny(columns) ++
fromAny(rows) ++
fromAny(x) ++
fromAny(z) ++
fromAny(length) ++
fromAny(data)
}
| BecauseNoReason/zobot | src/main/scala/com/zobot/client/packet/definitions/clientbound/play/Map.scala | Scala | mit | 713 |
// This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
package ducttape.workflow
import collection._
import ducttape.exec.UnpackedRealDagVisitor
import ducttape.exec.UnpackedDagVisitor
import ducttape.versioner.WorkflowVersionInfo
import ducttape.workflow.Types.UnpackedWorkVert
import ducttape.hyperdag.walker.Traversal
import ducttape.hyperdag.walker.Arbitrary
import grizzled.slf4j.Logging
object Visitors extends Logging {
def visitAllRealTasks[A <: UnpackedRealDagVisitor,U](
workflow: HyperWorkflow,
visitor: A,
planPolicy: PlanPolicy,
numCores: Int = 1,
traversal: Traversal = Arbitrary): A = {
debug(s"Visiting workflow using traversal: ${traversal}")
workflow.unpackedWalker(planPolicy, traversal=traversal).foreach(numCores, { v: UnpackedWorkVert =>
val taskT: TaskTemplate = v.packed.value.get
val task: RealTask = taskT.toRealTask(v)
debug(s"Visiting ${task}")
visitor.visit(task)
})
visitor
}
def visitAll[A <: UnpackedDagVisitor](
workflow: HyperWorkflow,
visitor: A,
planPolicy: PlanPolicy,
workflowVersion: WorkflowVersionInfo,
numCores: Int = 1,
traversal: Traversal = Arbitrary): A = {
debug(s"Visiting workflow using traversal: ${traversal}")
workflow.unpackedWalker(planPolicy, traversal=traversal).foreach(numCores, { v: UnpackedWorkVert =>
val taskT: TaskTemplate = v.packed.value.get
val task: VersionedTask = taskT.toRealTask(v).toVersionedTask(workflowVersion)
debug(s"Visiting ${task}")
visitor.visit(task)
})
visitor
}
}
| jhclark/ducttape | src/main/scala/ducttape/workflow/Visitors.scala | Scala | mpl-2.0 | 1,758 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
case class J5(value: Option[String]) extends SchemeReferenceNumberBox {
override def validate(boxRetriever: CT600BoxRetriever): Set[CtValidation] = boxRetriever.retrieveB140().value match {
case Some(true) => validateStringAsMandatory(id, this) ++ validateOptionalStringByRegex(id, this, taxAvoidanceSchemeNumberRegex)
case _ => Set()
}
}
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600j/v3/J5.scala | Scala | apache-2.0 | 1,080 |
package charts.builder.spreadsheet.external
import charts.builder.spreadsheet.SpreadsheetDataSource
import scala.concurrent.Future
import java.io.InputStream
trait CellLink {
type Selector = String
/**
* Source (ie. cell to copy from) selector
*/
def source: Selector
/**
* Destination (ie. cell to copy to) selector
*/
def destination: Selector
override def toString = s""""$source" \\u2192 "$destination""""
}
class SimpleCellLink(val source: String, val destination: String)
extends CellLink
sealed trait ExternalCellRef[S] {
type SpreadsheetReference = S
def source: SpreadsheetReference
def link: CellLink
}
case class UnresolvedRef(val source: String, val link: CellLink)
extends ExternalCellRef[String]
case class ResolvedRef(
val source: Option[SpreadsheetDataSource],
val link: CellLink) extends ExternalCellRef[Option[SpreadsheetDataSource]]
trait ExternalCellRefDetector {
def hasAny(ds: SpreadsheetDataSource): Boolean
def scan(ds: SpreadsheetDataSource): Set[UnresolvedRef]
}
trait ExternalCellRefReplacer {
// If None is returned, then use original
def replace(ds: SpreadsheetDataSource, refs: Set[ResolvedRef]):
Option[InputStream]
}
trait ExternalCellRefResolver {
type DestinationIdentifier = String
def resolve(base: DestinationIdentifier, refs: Set[UnresolvedRef]):
Set[ResolvedRef]
} | uq-eresearch/aorra | app/charts/builder/spreadsheet/external/ExternalCellRef.scala | Scala | mit | 1,390 |
package inloopio.math.timeseries.descriptor
import inloopio.math.PersistenceManager
import inloopio.math.timeseries.TFreq
import java.util.logging.Level
import java.util.logging.Logger
import scala.reflect.ClassTag
/**
* Descriptor is something like NetBeans' DataObject
*
* [S] Service class type
*
* @author Caoyuan Deng
*/
abstract class Descriptor[S](
private var _serviceClassName: String,
private var _freq: TFreq,
private var _active: Boolean)(protected implicit val m: ClassTag[S]) extends Cloneable {
private val log = Logger.getLogger(this.getClass.getName)
/**
* @note According to http://bits.netbeans.org/dev/javadoc/org-openide-modules/org/openide/modules/doc-files/classpath.html:
* The basic thing you need to understand about how modules control class loading is this:
* If module B has a declared dependency on module A, then classes in B can refer to classes in A
* (but A cannot refer to B). If B does not have a declared dependency on A, it cannot refer to A.
* Furthermore, dependencies are not considered transitive for purposes of classloading: if C has
* a declared dependency on B, it can refer to classes in B, but not to A (unless it also declares
* an explicit dependency on A).
*
* Also @see http://wiki.netbeans.org/DevFaqModuleCCE
* Also @see http://netbeans-org.1045718.n5.nabble.com/Class-forName-otherModuleClass-in-library-modules-td3021534.html
*/
private val classLoader = Thread.currentThread.getContextClassLoader
var containerContent: Content = _
/** @Note: covariant type S can not occur in contravariant position in type S of parameter of setter */
private var _serviceInstance: Option[_] = None
def this()(implicit m: ClassTag[S]) {
this(null, TFreq.DAILY, false)
}
def set(serviceClassName: String, freq: TFreq) {
this.serviceClassName = serviceClassName
this.freq = freq.clone
}
def serviceClassName = _serviceClassName
def serviceClassName_=(serviceClassName: String) = {
this._serviceClassName = serviceClassName
}
def freq = _freq
def freq_=(freq: TFreq) = {
this._freq = freq
}
def active = _active
def active_=(active: Boolean) = {
this._active = active
}
def displayName: String
def resetInstance {
_serviceInstance = None
}
def idEquals(serviceClassName: String, freq: TFreq): Boolean = {
this.serviceClassName.equals(serviceClassName) && this.freq.equals(freq)
}
/**
* init and return a server instance
* @param args args to init server instance
*/
def createdServerInstance: S = {
assert(isServiceInstanceCreated, "This method should only be called after serviceInstance created!")
serviceInstance().get
}
def serviceInstance(args: Any*): Option[S] = {
if (_serviceInstance.isEmpty) {
// @Note to pass a variable args to another function, should use type "_*" to extract it as a plain seq,
// other wise, it will be treated as one arg:Seq[_], and the accepting function will compose it as
// Seq(Seq(arg1, arg2, ...)) instead of Seq(arg1, arg2, ...)
_serviceInstance = createServiceInstance(args: _*)
}
_serviceInstance.asInstanceOf[Option[S]]
}
def isServiceInstanceCreated: Boolean = {
_serviceInstance.isDefined
}
protected def createServiceInstance(args: Any*): Option[S]
// --- helpers ---
protected def lookupServiceTemplate(tpe: Class[S], folderName: String): Option[S] = {
val services = PersistenceManager().lookupAllRegisteredServices(tpe, folderName)
services find { service =>
val className = service.asInstanceOf[AnyRef].getClass.getName
className == serviceClassName || className == (serviceClassName + "$") || (className + "$") == serviceClassName
} match {
case None =>
try {
log.warning("Cannot find registeredService of " + tpe + " in folder '" +
folderName + "': " + services.map(_.asInstanceOf[AnyRef].getClass.getName) +
", try Class.forName call: serviceClassName=" + serviceClassName)
val klass = Class.forName(serviceClassName, true, classLoader)
getScalaSingletonInstance(klass) match {
case Some(x: S) => Option(x)
case _ => Option(klass.newInstance.asInstanceOf[S])
}
} catch {
case ex: Exception =>
log.log(Level.SEVERE, "Failed to call Class.forName of class: " + serviceClassName, ex)
None
}
case some => some
}
}
protected def isScalaSingletonClass(klass: Class[_]) = {
klass.getSimpleName.endsWith("$") && klass.getInterfaces.exists(_.getName == "scala.ScalaObject") &&
klass.getDeclaredFields.exists(_.getName == "MODULE$")
}
protected def getScalaSingletonInstance(klass: Class[_]): Option[AnyRef] = {
if (klass.getSimpleName.endsWith("$") && klass.getInterfaces.exists(_.getName == "scala.ScalaObject")) {
klass.getDeclaredFields.find(_.getName == "MODULE$") match {
case Some(x) => Option(x.get(klass))
case None => None
}
} else None
}
override def clone: Descriptor[S] = {
try {
super.clone.asInstanceOf[Descriptor[S]]
} catch {
case ex: CloneNotSupportedException => log.log(Level.SEVERE, ex.getMessage, ex); null
}
}
}
| dcaoyuan/inloopio-libs | inloopio-math/src/main/scala/inloopio/math/timeseries/descriptor/Descriptor.scala | Scala | bsd-3-clause | 5,339 |
package di
package curry
import scala.language.higherKinds
import scala.language.implicitConversions
object Syntax {
implicit class FunctorSyntax[F[_]: Functor, A](a: F[A]) {
def map[B](f: A => B) = Functor[F].map(a)(f)
}
implicit class Function1FunctorSyntax[A1, A](a: Function1[A1, A]) {
def map[B](f: A => B) = Functor[({type f[x] = Function1[A1, x]})#f].map(a)(f)
}
implicit class MonadSyntax[M[_]: Monad, A](a: M[A]) {
def unit[A](a: => A) = Monad[M].unit(a)
def flatMap[B](f: A => M[B]) = Monad[M].flatMap(a)(f)
}
implicit class Function1MonadSyntax[A1, A](a: Function1[A1, A]) {
def unit[A](a: => A) = Monad[({type f[x] = Function1[A1, x]})#f].unit(a)
def flatMap[B](f: A => A1 => B) = Monad[({type f[x] = Function1[A1, x]})#f].flatMap(a)(f)
}
}
| debasishg/di-article | src/main/scala/di/curry/Syntax.scala | Scala | apache-2.0 | 802 |
package com.dt.scala.type_parameterization
/**
* @author Wang Jialin
* Date 2015/7/17
* Contact Information:
* WeChat: 18610086859
* QQ: 1740415547
* Email: 18610086859@126.com
* Tel: 18610086859
*/
trait Logger {
def log (msg : String) }
trait Auth {
auth : Logger =>
def act(msg : String) {
log(msg)
}
}
object DI extends Auth with Logger {
override def log(msg : String) = println(msg); }
object Dependency_Injection {
def main(args: Array[String]) {
DI.act("I hope you'll like it")
}
} | slieer/scala-tutorials | src/main/scala/com/dt/scala/type_parameterization/Dependency_Injection.scala | Scala | apache-2.0 | 567 |
package org.hammerlab.bam.check.full
import hammerlab.show._
import org.hammerlab.bam.check.full.error.Flags
import org.hammerlab.test.Suite
import scala.collection.immutable.BitSet
class FlagsTest
extends Suite {
test("show") {
Flags.fromBitSet(
BitSet(
1, 2, 3
) → 0
)
.show should be(
"negativeReadIdx,tooLargeReadIdx,negativeReadPos"
)
}
}
| ryan-williams/spark-bam | check/src/test/scala/org/hammerlab/bam/check/full/FlagsTest.scala | Scala | apache-2.0 | 397 |
package com.nabijaczleweli.minecrasmer.compat.waila
import com.nabijaczleweli.minecrasmer.entity.tile.TileEntityOverclocker
import com.nabijaczleweli.minecrasmer.reference.{Container, Reference}
import com.nabijaczleweli.minecrasmer.resource.{ReloadableString, ResourcesReloadedEvent}
import net.minecraft.world.World
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent
import net.minecraftforge.fml.relauncher.{Side, SideOnly}
import java.util.{List => jList}
@SideOnly(Side.CLIENT)
object ProviderOverclocker extends AccessoryWailaDataProvider[TileEntityOverclocker] {
Container.eventBus register this
var multiplierMessage = new ReloadableString(s"hud.${Reference.NAMESPACED_PREFIX}compat.waila.accessory.overclocker.multiplier.name")
override protected def getWailaBodyImpl(world: World, currenttip: jList[String], te: TileEntityOverclocker) = {
currenttip add multiplierMessage.format(te.multiplier)
currenttip
}
@SubscribeEvent
def onResourcesReloaded(event: ResourcesReloadedEvent) {
multiplierMessage.reload()
}
}
| nabijaczleweli/ASMifier | src/main/scala/com/nabijaczleweli/minecrasmer/compat/waila/ProviderOverclocker.scala | Scala | mit | 1,055 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.File
import java.net.{URL, URLClassLoader}
import java.nio.charset.StandardCharsets
import java.sql.Timestamp
import java.util.Locale
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import scala.collection.mutable.HashMap
import scala.language.implicitConversions
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.common.`type`.HiveDecimal
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.ql.session.SessionState
import org.apache.hadoop.hive.serde2.io.{DateWritable, TimestampWritable}
import org.apache.hadoop.util.VersionInfo
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.execution.command.DDLUtils
import org.apache.spark.sql.hive.client._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf._
import org.apache.spark.sql.internal.StaticSQLConf.{CATALOG_IMPLEMENTATION, WAREHOUSE_PATH}
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
private[spark] object HiveUtils extends Logging {
def withHiveExternalCatalog(sc: SparkContext): SparkContext = {
sc.conf.set(CATALOG_IMPLEMENTATION.key, "hive")
sc
}
/** The version of hive used internally by Spark SQL. */
val builtinHiveVersion: String = "1.2.1"
val HIVE_METASTORE_VERSION = buildConf("spark.sql.hive.metastore.version")
.doc("Version of the Hive metastore. Available options are " +
s"<code>0.12.0</code> through <code>2.1.1</code>.")
.stringConf
.createWithDefault(builtinHiveVersion)
// A fake config which is only here for backward compatibility reasons. This config has no effect
// to Spark, just for reporting the builtin Hive version of Spark to existing applications that
// already rely on this config.
val FAKE_HIVE_VERSION = buildConf("spark.sql.hive.version")
.doc(s"deprecated, please use ${HIVE_METASTORE_VERSION.key} to get the Hive version in Spark.")
.stringConf
.createWithDefault(builtinHiveVersion)
val HIVE_METASTORE_JARS = buildConf("spark.sql.hive.metastore.jars")
.doc(s"""
| Location of the jars that should be used to instantiate the HiveMetastoreClient.
| This property can be one of three options: "
| 1. "builtin"
| Use Hive ${builtinHiveVersion}, which is bundled with the Spark assembly when
| <code>-Phive</code> is enabled. When this option is chosen,
| <code>spark.sql.hive.metastore.version</code> must be either
| <code>${builtinHiveVersion}</code> or not defined.
| 2. "maven"
| Use Hive jars of specified version downloaded from Maven repositories.
| 3. A classpath in the standard format for both Hive and Hadoop.
""".stripMargin)
.stringConf
.createWithDefault("builtin")
val CONVERT_METASTORE_PARQUET = buildConf("spark.sql.hive.convertMetastoreParquet")
.doc("When set to true, the built-in Parquet reader and writer are used to process " +
"parquet tables created by using the HiveQL syntax, instead of Hive serde.")
.booleanConf
.createWithDefault(true)
val CONVERT_METASTORE_PARQUET_WITH_SCHEMA_MERGING =
buildConf("spark.sql.hive.convertMetastoreParquet.mergeSchema")
.doc("When true, also tries to merge possibly different but compatible Parquet schemas in " +
"different Parquet data files. This configuration is only effective " +
"when \\"spark.sql.hive.convertMetastoreParquet\\" is true.")
.booleanConf
.createWithDefault(false)
val CONVERT_METASTORE_ORC = buildConf("spark.sql.hive.convertMetastoreOrc")
.internal()
.doc("When set to true, the built-in ORC reader and writer are used to process " +
"ORC tables created by using the HiveQL syntax, instead of Hive serde.")
.booleanConf
.createWithDefault(false)
val HIVE_METASTORE_SHARED_PREFIXES = buildConf("spark.sql.hive.metastore.sharedPrefixes")
.doc("A comma separated list of class prefixes that should be loaded using the classloader " +
"that is shared between Spark SQL and a specific version of Hive. An example of classes " +
"that should be shared is JDBC drivers that are needed to talk to the metastore. Other " +
"classes that need to be shared are those that interact with classes that are already " +
"shared. For example, custom appenders that are used by log4j.")
.stringConf
.toSequence
.createWithDefault(jdbcPrefixes)
private def jdbcPrefixes = Seq(
"com.mysql.jdbc", "org.postgresql", "com.microsoft.sqlserver", "oracle.jdbc")
val HIVE_METASTORE_BARRIER_PREFIXES = buildConf("spark.sql.hive.metastore.barrierPrefixes")
.doc("A comma separated list of class prefixes that should explicitly be reloaded for each " +
"version of Hive that Spark SQL is communicating with. For example, Hive UDFs that are " +
"declared in a prefix that typically would be shared (i.e. <code>org.apache.spark.*</code>).")
.stringConf
.toSequence
.createWithDefault(Nil)
val HIVE_THRIFT_SERVER_ASYNC = buildConf("spark.sql.hive.thriftServer.async")
.doc("When set to true, Hive Thrift server executes SQL queries in an asynchronous way.")
.booleanConf
.createWithDefault(true)
/**
* The version of the hive client that will be used to communicate with the metastore. Note that
* this does not necessarily need to be the same version of Hive that is used internally by
* Spark SQL for execution.
*/
private def hiveMetastoreVersion(conf: SQLConf): String = {
conf.getConf(HIVE_METASTORE_VERSION)
}
/**
* The location of the jars that should be used to instantiate the HiveMetastoreClient. This
* property can be one of three options:
* - a classpath in the standard format for both hive and hadoop.
* - builtin - attempt to discover the jars that were used to load Spark SQL and use those. This
* option is only valid when using the execution version of Hive.
* - maven - download the correct version of hive on demand from maven.
*/
private def hiveMetastoreJars(conf: SQLConf): String = {
conf.getConf(HIVE_METASTORE_JARS)
}
/**
* A comma separated list of class prefixes that should be loaded using the classloader that
* is shared between Spark SQL and a specific version of Hive. An example of classes that should
* be shared is JDBC drivers that are needed to talk to the metastore. Other classes that need
* to be shared are those that interact with classes that are already shared. For example,
* custom appenders that are used by log4j.
*/
private def hiveMetastoreSharedPrefixes(conf: SQLConf): Seq[String] = {
conf.getConf(HIVE_METASTORE_SHARED_PREFIXES).filterNot(_ == "")
}
/**
* A comma separated list of class prefixes that should explicitly be reloaded for each version
* of Hive that Spark SQL is communicating with. For example, Hive UDFs that are declared in a
* prefix that typically would be shared (i.e. org.apache.spark.*)
*/
private def hiveMetastoreBarrierPrefixes(conf: SQLConf): Seq[String] = {
conf.getConf(HIVE_METASTORE_BARRIER_PREFIXES).filterNot(_ == "")
}
/**
* Change time configurations needed to create a [[HiveClient]] into unified [[Long]] format.
*/
private[hive] def formatTimeVarsForHiveClient(hadoopConf: Configuration): Map[String, String] = {
// Hive 0.14.0 introduces timeout operations in HiveConf, and changes default values of a bunch
// of time `ConfVar`s by adding time suffixes (`s`, `ms`, and `d` etc.). This breaks backwards-
// compatibility when users are trying to connecting to a Hive metastore of lower version,
// because these options are expected to be integral values in lower versions of Hive.
//
// Here we enumerate all time `ConfVar`s and convert their values to numeric strings according
// to their output time units.
Seq(
ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY -> TimeUnit.SECONDS,
ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.METASTORE_CLIENT_SOCKET_LIFETIME -> TimeUnit.SECONDS,
ConfVars.HMSHANDLERINTERVAL -> TimeUnit.MILLISECONDS,
ConfVars.METASTORE_EVENT_DB_LISTENER_TTL -> TimeUnit.SECONDS,
ConfVars.METASTORE_EVENT_CLEAN_FREQ -> TimeUnit.SECONDS,
ConfVars.METASTORE_EVENT_EXPIRY_DURATION -> TimeUnit.SECONDS,
ConfVars.METASTORE_AGGREGATE_STATS_CACHE_TTL -> TimeUnit.SECONDS,
ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_WRITER_WAIT -> TimeUnit.MILLISECONDS,
ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_READER_WAIT -> TimeUnit.MILLISECONDS,
ConfVars.HIVES_AUTO_PROGRESS_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS_INTERVAL -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_STATS_JDBC_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.HIVE_STATS_RETRIES_WAIT -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES -> TimeUnit.SECONDS,
ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_ZOOKEEPER_CONNECTION_BASESLEEPTIME -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_TXN_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.HIVE_COMPACTOR_CHECK_INTERVAL -> TimeUnit.SECONDS,
ConfVars.HIVE_COMPACTOR_CLEANER_RUN_INTERVAL -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME -> TimeUnit.SECONDS,
ConfVars.HIVE_SERVER2_THRIFT_HTTP_COOKIE_MAX_AGE -> TimeUnit.SECONDS,
ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME -> TimeUnit.SECONDS,
ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.HIVE_SERVER2_ASYNC_EXEC_KEEPALIVE_TIME -> TimeUnit.SECONDS,
ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_SERVER2_IDLE_SESSION_TIMEOUT -> TimeUnit.MILLISECONDS,
ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT -> TimeUnit.MILLISECONDS,
ConfVars.SERVER_READ_SOCKET_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.HIVE_LOCALIZE_RESOURCE_WAIT_INTERVAL -> TimeUnit.MILLISECONDS,
ConfVars.SPARK_CLIENT_FUTURE_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.SPARK_JOB_MONITOR_TIMEOUT -> TimeUnit.SECONDS,
ConfVars.SPARK_RPC_CLIENT_CONNECT_TIMEOUT -> TimeUnit.MILLISECONDS,
ConfVars.SPARK_RPC_CLIENT_HANDSHAKE_TIMEOUT -> TimeUnit.MILLISECONDS
).map { case (confVar, unit) =>
confVar.varname -> HiveConf.getTimeVar(hadoopConf, confVar, unit).toString
}.toMap
}
/**
* Check current Thread's SessionState type
* @return true when SessionState.get returns an instance of CliSessionState,
* false when it gets non-CliSessionState instance or null
*/
def isCliSessionState(): Boolean = {
val state = SessionState.get
var temp: Class[_] = if (state != null) state.getClass else null
var found = false
while (temp != null && !found) {
found = temp.getName == "org.apache.hadoop.hive.cli.CliSessionState"
temp = temp.getSuperclass
}
found
}
/**
* Create a [[HiveClient]] used for execution.
*
* Currently this must always be Hive 13 as this is the version of Hive that is packaged
* with Spark SQL. This copy of the client is used for execution related tasks like
* registering temporary functions or ensuring that the ThreadLocal SessionState is
* correctly populated. This copy of Hive is *not* used for storing persistent metadata,
* and only point to a dummy metastore in a temporary directory.
*/
protected[hive] def newClientForExecution(
conf: SparkConf,
hadoopConf: Configuration): HiveClientImpl = {
logInfo(s"Initializing execution hive, version $builtinHiveVersion")
val loader = new IsolatedClientLoader(
version = IsolatedClientLoader.hiveVersion(builtinHiveVersion),
sparkConf = conf,
execJars = Seq.empty,
hadoopConf = hadoopConf,
config = newTemporaryConfiguration(useInMemoryDerby = true),
isolationOn = false,
baseClassLoader = Utils.getContextOrSparkClassLoader)
loader.createClient().asInstanceOf[HiveClientImpl]
}
/**
* Create a [[HiveClient]] used to retrieve metadata from the Hive MetaStore.
*
* The version of the Hive client that is used here must match the metastore that is configured
* in the hive-site.xml file.
*/
protected[hive] def newClientForMetadata(
conf: SparkConf,
hadoopConf: Configuration): HiveClient = {
val configurations = formatTimeVarsForHiveClient(hadoopConf)
newClientForMetadata(conf, hadoopConf, configurations)
}
protected[hive] def newClientForMetadata(
conf: SparkConf,
hadoopConf: Configuration,
configurations: Map[String, String]): HiveClient = {
val sqlConf = new SQLConf
sqlConf.setConf(SQLContext.getSQLProperties(conf))
val hiveMetastoreVersion = HiveUtils.hiveMetastoreVersion(sqlConf)
val hiveMetastoreJars = HiveUtils.hiveMetastoreJars(sqlConf)
val hiveMetastoreSharedPrefixes = HiveUtils.hiveMetastoreSharedPrefixes(sqlConf)
val hiveMetastoreBarrierPrefixes = HiveUtils.hiveMetastoreBarrierPrefixes(sqlConf)
val metaVersion = IsolatedClientLoader.hiveVersion(hiveMetastoreVersion)
val isolatedLoader = if (hiveMetastoreJars == "builtin") {
if (builtinHiveVersion != hiveMetastoreVersion) {
throw new IllegalArgumentException(
"Builtin jars can only be used when hive execution version == hive metastore version. " +
s"Execution: $builtinHiveVersion != Metastore: $hiveMetastoreVersion. " +
"Specify a vaild path to the correct hive jars using $HIVE_METASTORE_JARS " +
s"or change ${HIVE_METASTORE_VERSION.key} to $builtinHiveVersion.")
}
// We recursively find all jars in the class loader chain,
// starting from the given classLoader.
def allJars(classLoader: ClassLoader): Array[URL] = classLoader match {
case null => Array.empty[URL]
case urlClassLoader: URLClassLoader =>
urlClassLoader.getURLs ++ allJars(urlClassLoader.getParent)
case other => allJars(other.getParent)
}
val classLoader = Utils.getContextOrSparkClassLoader
val jars = allJars(classLoader)
if (jars.length == 0) {
throw new IllegalArgumentException(
"Unable to locate hive jars to connect to metastore. " +
"Please set spark.sql.hive.metastore.jars.")
}
logInfo(
s"Initializing HiveMetastoreConnection version $hiveMetastoreVersion using Spark classes.")
new IsolatedClientLoader(
version = metaVersion,
sparkConf = conf,
hadoopConf = hadoopConf,
execJars = jars.toSeq,
config = configurations,
isolationOn = !isCliSessionState(),
barrierPrefixes = hiveMetastoreBarrierPrefixes,
sharedPrefixes = hiveMetastoreSharedPrefixes)
} else if (hiveMetastoreJars == "maven") {
// TODO: Support for loading the jars from an already downloaded location.
logInfo(
s"Initializing HiveMetastoreConnection version $hiveMetastoreVersion using maven.")
IsolatedClientLoader.forVersion(
hiveMetastoreVersion = hiveMetastoreVersion,
hadoopVersion = VersionInfo.getVersion,
sparkConf = conf,
hadoopConf = hadoopConf,
config = configurations,
barrierPrefixes = hiveMetastoreBarrierPrefixes,
sharedPrefixes = hiveMetastoreSharedPrefixes)
} else {
// Convert to files and expand any directories.
val jars =
hiveMetastoreJars
.split(File.pathSeparator)
.flatMap {
case path if new File(path).getName == "*" =>
val files = new File(path).getParentFile.listFiles()
if (files == null) {
logWarning(s"Hive jar path '$path' does not exist.")
Nil
} else {
files.filter(_.getName.toLowerCase(Locale.ROOT).endsWith(".jar"))
}
case path =>
new File(path) :: Nil
}
.map(_.toURI.toURL)
logInfo(
s"Initializing HiveMetastoreConnection version $hiveMetastoreVersion " +
s"using ${jars.mkString(":")}")
new IsolatedClientLoader(
version = metaVersion,
sparkConf = conf,
hadoopConf = hadoopConf,
execJars = jars.toSeq,
config = configurations,
isolationOn = true,
barrierPrefixes = hiveMetastoreBarrierPrefixes,
sharedPrefixes = hiveMetastoreSharedPrefixes)
}
isolatedLoader.createClient()
}
/** Constructs a configuration for hive, where the metastore is located in a temp directory. */
def newTemporaryConfiguration(useInMemoryDerby: Boolean): Map[String, String] = {
val withInMemoryMode = if (useInMemoryDerby) "memory:" else ""
val tempDir = Utils.createTempDir()
val localMetastore = new File(tempDir, "metastore")
val propMap: HashMap[String, String] = HashMap()
// We have to mask all properties in hive-site.xml that relates to metastore data source
// as we used a local metastore here.
HiveConf.ConfVars.values().foreach { confvar =>
if (confvar.varname.contains("datanucleus") || confvar.varname.contains("jdo")
|| confvar.varname.contains("hive.metastore.rawstore.impl")) {
propMap.put(confvar.varname, confvar.getDefaultExpr())
}
}
propMap.put(WAREHOUSE_PATH.key, localMetastore.toURI.toString)
propMap.put(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
s"jdbc:derby:${withInMemoryMode};databaseName=${localMetastore.getAbsolutePath};create=true")
propMap.put("datanucleus.rdbms.datastoreAdapterClassName",
"org.datanucleus.store.rdbms.adapter.DerbyAdapter")
// SPARK-11783: When "hive.metastore.uris" is set, the metastore connection mode will be
// remote (https://cwiki.apache.org/confluence/display/Hive/AdminManual+MetastoreAdmin
// mentions that "If hive.metastore.uris is empty local mode is assumed, remote otherwise").
// Remote means that the metastore server is running in its own process.
// When the mode is remote, configurations like "javax.jdo.option.ConnectionURL" will not be
// used (because they are used by remote metastore server that talks to the database).
// Because execution Hive should always connects to an embedded derby metastore.
// We have to remove the value of hive.metastore.uris. So, the execution Hive client connects
// to the actual embedded derby metastore instead of the remote metastore.
// You can search HiveConf.ConfVars.METASTOREURIS in the code of HiveConf (in Hive's repo).
// Then, you will find that the local metastore mode is only set to true when
// hive.metastore.uris is not set.
propMap.put(ConfVars.METASTOREURIS.varname, "")
// The execution client will generate garbage events, therefore the listeners that are generated
// for the execution clients are useless. In order to not output garbage, we don't generate
// these listeners.
propMap.put(ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, "")
propMap.put(ConfVars.METASTORE_EVENT_LISTENERS.varname, "")
propMap.put(ConfVars.METASTORE_END_FUNCTION_LISTENERS.varname, "")
// SPARK-21451: Spark will gather all `spark.hadoop.*` properties from a `SparkConf` to a
// Hadoop Configuration internally, as long as it happens after SparkContext initialized.
// Some instances such as `CliSessionState` used in `SparkSQLCliDriver` may also rely on these
// Configuration. But it happens before SparkContext initialized, we need to take them from
// system properties in the form of regular hadoop configurations.
SparkHadoopUtil.get.appendSparkHadoopConfigs(sys.props.toMap, propMap)
propMap.toMap
}
protected val primitiveTypes =
Seq(StringType, IntegerType, LongType, DoubleType, FloatType, BooleanType, ByteType,
ShortType, DateType, TimestampType, BinaryType)
protected[sql] def toHiveString(a: (Any, DataType)): String = a match {
case (struct: Row, StructType(fields)) =>
struct.toSeq.zip(fields).map {
case (v, t) => s""""${t.name}":${toHiveStructString((v, t.dataType))}"""
}.mkString("{", ",", "}")
case (seq: Seq[_], ArrayType(typ, _)) =>
seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (map: Map[_, _], MapType(kType, vType, _)) =>
map.map {
case (key, value) =>
toHiveStructString((key, kType)) + ":" + toHiveStructString((value, vType))
}.toSeq.sorted.mkString("{", ",", "}")
case (null, _) => "NULL"
case (d: Int, DateType) => new DateWritable(d).toString
case (t: Timestamp, TimestampType) => new TimestampWritable(t).toString
case (bin: Array[Byte], BinaryType) => new String(bin, StandardCharsets.UTF_8)
case (decimal: java.math.BigDecimal, DecimalType()) =>
// Hive strips trailing zeros so use its toString
HiveDecimal.create(decimal).toString
case (other, tpe) if primitiveTypes contains tpe => other.toString
}
/** Hive outputs fields of structs slightly differently than top level attributes. */
protected def toHiveStructString(a: (Any, DataType)): String = a match {
case (struct: Row, StructType(fields)) =>
struct.toSeq.zip(fields).map {
case (v, t) => s""""${t.name}":${toHiveStructString((v, t.dataType))}"""
}.mkString("{", ",", "}")
case (seq: Seq[_], ArrayType(typ, _)) =>
seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (map: Map[_, _], MapType(kType, vType, _)) =>
map.map {
case (key, value) =>
toHiveStructString((key, kType)) + ":" + toHiveStructString((value, vType))
}.toSeq.sorted.mkString("{", ",", "}")
case (null, _) => "null"
case (s: String, StringType) => "\\"" + s + "\\""
case (decimal, DecimalType()) => decimal.toString
case (other, tpe) if primitiveTypes contains tpe => other.toString
}
/**
* Infers the schema for Hive serde tables and returns the CatalogTable with the inferred schema.
* When the tables are data source tables or the schema already exists, returns the original
* CatalogTable.
*/
def inferSchema(table: CatalogTable): CatalogTable = {
if (DDLUtils.isDatasourceTable(table) || table.dataSchema.nonEmpty) {
table
} else {
val hiveTable = HiveClientImpl.toHiveTable(table)
// Note: Hive separates partition columns and the schema, but for us the
// partition columns are part of the schema
val partCols = hiveTable.getPartCols.asScala.map(HiveClientImpl.fromHiveColumn)
val dataCols = hiveTable.getCols.asScala.map(HiveClientImpl.fromHiveColumn)
table.copy(schema = StructType(dataCols ++ partCols))
}
}
}
| cin/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala | Scala | apache-2.0 | 24,335 |
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2014 <max.c.lv@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks
import java.io.File
import java.util.Locale
import java.lang.{Process, ProcessBuilder}
import android.app._
import android.content._
import android.content.pm.{PackageInfo, PackageManager}
import android.net.VpnService
import android.os._
import android.support.v4.app.NotificationCompat
import android.support.v4.content.ContextCompat
import android.util.Log
import android.widget.Toast
import com.github.shadowsocks.aidl.Config
import com.github.shadowsocks.utils._
import org.apache.commons.net.util.SubnetUtils
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class ShadowsocksVpnService extends VpnService with BaseService {
val TAG = "ShadowsocksVpnService"
val VPN_MTU = 1500
val PRIVATE_VLAN = "26.26.26.%s"
val PRIVATE_VLAN6 = "fdfe:dcba:9876::%s"
var conn: ParcelFileDescriptor = null
var apps: Array[ProxiedApp] = null
var vpnThread: ShadowsocksVpnThread = null
var closeReceiver: BroadcastReceiver = null
var sslocalProcess: Process = null
var sstunnelProcess: Process = null
var pdnsdProcess: Process = null
var tun2socksProcess: Process = null
def isByass(net: SubnetUtils): Boolean = {
val info = net.getInfo
info.isInRange(config.proxy)
}
def isPrivateA(a: Int): Boolean = {
if (a == 10 || a == 192 || a == 172) {
true
} else {
false
}
}
def isPrivateB(a: Int, b: Int): Boolean = {
if (a == 10 || (a == 192 && b == 168) || (a == 172 && b >= 16 && b < 32)) {
true
} else {
false
}
}
override def onBind(intent: Intent): IBinder = {
val action = intent.getAction
if (VpnService.SERVICE_INTERFACE == action) {
return super.onBind(intent)
} else if (Action.SERVICE == action) {
return binder
}
null
}
def notifyForegroundAlert(title: String, info: String, visible: Boolean) {
val openIntent = new Intent(this, classOf[Shadowsocks])
val contentIntent = PendingIntent.getActivity(this, 0, openIntent, 0)
val closeIntent = new Intent(Action.CLOSE)
val actionIntent = PendingIntent.getBroadcast(this, 0, closeIntent, 0)
val builder = new NotificationCompat.Builder(this)
builder
.setWhen(0)
.setColor(ContextCompat.getColor(this, R.color.material_accent_500))
.setTicker(title)
.setContentTitle(getString(R.string.app_name))
.setContentText(info)
.setContentIntent(contentIntent)
.setSmallIcon(R.drawable.ic_stat_shadowsocks)
.addAction(android.R.drawable.ic_menu_close_clear_cancel, getString(R.string.stop),
actionIntent)
if (visible)
builder.setPriority(NotificationCompat.PRIORITY_DEFAULT)
else
builder.setPriority(NotificationCompat.PRIORITY_MIN)
startForeground(1, builder.build)
}
override def onCreate() {
super.onCreate()
ConfigUtils.refresh(this)
}
override def onRevoke() {
stopRunner()
}
override def stopRunner() {
super.stopRunner()
if (vpnThread != null) {
vpnThread.stopThread()
vpnThread = null
}
stopForeground(true)
// channge the state
changeState(State.STOPPING)
ShadowsocksApplication.track(TAG, "stop")
// reset VPN
killProcesses()
// close connections
if (conn != null) {
conn.close()
conn = null
}
// stop the service if no callback registered
if (getCallbackCount == 0) {
stopSelf()
}
// clean up recevier
if (closeReceiver != null) {
unregisterReceiver(closeReceiver)
closeReceiver = null
}
// channge the state
changeState(State.STOPPED)
}
def getVersionName: String = {
var version: String = null
try {
val pi: PackageInfo = getPackageManager.getPackageInfo(getPackageName, 0)
version = pi.versionName
} catch {
case e: PackageManager.NameNotFoundException =>
version = "Package name not found"
}
version
}
def killProcesses() {
if (sslocalProcess != null) {
sslocalProcess.destroy()
sslocalProcess = null
}
if (sstunnelProcess != null) {
sstunnelProcess.destroy()
sstunnelProcess = null
}
if (tun2socksProcess != null) {
tun2socksProcess.destroy()
tun2socksProcess = null
}
if (pdnsdProcess != null) {
pdnsdProcess.destroy()
pdnsdProcess = null
}
}
override def startRunner(config: Config) {
super.startRunner(config)
vpnThread = new ShadowsocksVpnThread(this)
vpnThread.start()
// register close receiver
val filter = new IntentFilter()
filter.addAction(Intent.ACTION_SHUTDOWN)
filter.addAction(Action.CLOSE)
closeReceiver = (context: Context, intent: Intent) => {
Toast.makeText(context, R.string.stopping, Toast.LENGTH_SHORT).show()
stopRunner()
}
registerReceiver(closeReceiver, filter)
// ensure the VPNService is prepared
if (VpnService.prepare(this) != null) {
val i = new Intent(this, classOf[ShadowsocksRunnerActivity])
i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(i)
return
}
ShadowsocksApplication.track(TAG, "start")
changeState(State.CONNECTING)
Future {
if (config.proxy == "198.199.101.152") {
val holder = ShadowsocksApplication.containerHolder
try {
this.config = ConfigUtils.getPublicConfig(getBaseContext, holder.getContainer, config)
} catch {
case ex: Exception =>
changeState(State.STOPPED, getString(R.string.service_failed))
stopRunner()
this.config = null
}
}
if (config != null) {
// reset the context
killProcesses()
// Resolve the server address
var resolved: Boolean = false
if (!Utils.isNumeric(config.proxy)) {
Utils.resolve(config.proxy, enableIPv6 = true) match {
case Some(addr) =>
config.proxy = addr
resolved = true
case None => resolved = false
}
} else {
resolved = true
}
if (resolved && handleConnection) {
notifyForegroundAlert(getString(R.string.forward_success),
getString(R.string.service_running).formatLocal(Locale.ENGLISH, config.profileName), false)
changeState(State.CONNECTED)
} else {
changeState(State.STOPPED, getString(R.string.service_failed))
stopRunner()
}
}
}
}
/** Called when the activity is first created. */
def handleConnection: Boolean = {
startShadowsocksDaemon()
if (!config.isUdpDns) {
startDnsDaemon()
startDnsTunnel()
}
val fd = startVpn()
if (fd != -1) {
var tries = 1
while (tries < 5) {
Thread.sleep(1000 * tries)
if (System.sendfd(fd) != -1) {
return true
}
tries += 1
}
}
false
}
def startShadowsocksDaemon() {
if (config.route != Route.ALL) {
val acl: Array[String] = config.route match {
case Route.BYPASS_LAN => getResources.getStringArray(R.array.private_route)
case Route.BYPASS_CHN => getResources.getStringArray(R.array.chn_route_full)
}
ConfigUtils.printToFile(new File(Path.BASE + "acl.list"))(p => {
acl.foreach(item => p.println(item))
})
}
val conf = ConfigUtils
.SHADOWSOCKS.formatLocal(Locale.ENGLISH, config.proxy, config.remotePort, config.localPort,
config.sitekey, config.encMethod, 600)
ConfigUtils.printToFile(new File(Path.BASE + "ss-local-vpn.conf"))(p => {
p.println(conf)
})
val cmd = new ArrayBuffer[String]
cmd += (Path.BASE + "ss-local", "-V", "-u"
, "-b", "127.0.0.1"
, "-t", "600"
, "-c", Path.BASE + "ss-local-vpn.conf")
if (config.isAuth) cmd += "-A"
if (config.route != Route.ALL) {
cmd += "--acl"
cmd += (Path.BASE + "acl.list")
}
if (BuildConfig.DEBUG) Log.d(TAG, cmd.mkString(" "))
sslocalProcess = new ProcessBuilder()
.command(cmd)
.redirectErrorStream(false)
.start()
}
def startDnsTunnel() = {
val conf = ConfigUtils
.SHADOWSOCKS.formatLocal(Locale.ENGLISH, config.proxy, config.remotePort, 8163,
config.sitekey, config.encMethod, 10)
ConfigUtils.printToFile(new File(Path.BASE + "ss-tunnel-vpn.conf"))(p => {
p.println(conf)
})
val cmd = new ArrayBuffer[String]
cmd += (Path.BASE + "ss-tunnel"
, "-V"
, "-u"
, "-t", "10"
, "-b", "127.0.0.1"
, "-l", "8163"
, "-L", "8.8.8.8:53"
, "-c", Path.BASE + "ss-tunnel-vpn.conf")
if (config.isAuth) cmd += "-A"
if (BuildConfig.DEBUG) Log.d(TAG, cmd.mkString(" "))
sstunnelProcess = new ProcessBuilder()
.command(cmd)
.redirectErrorStream(false)
.start()
}
def startDnsDaemon() {
val ipv6 = if (config.isIpv6) "" else "reject = ::/0;"
val conf = {
if (config.route == Route.BYPASS_CHN) {
val reject = ConfigUtils.getRejectList(getContext)
val blackList = ConfigUtils.getBlackList(getContext)
ConfigUtils.PDNSD_DIRECT.formatLocal(Locale.ENGLISH, "0.0.0.0", 8153,
reject, blackList, 8163, ipv6)
} else {
ConfigUtils.PDNSD_LOCAL.formatLocal(Locale.ENGLISH, "0.0.0.0", 8153,
8163, ipv6)
}
}
ConfigUtils.printToFile(new File(Path.BASE + "pdnsd-vpn.conf"))(p => {
p.println(conf)
})
val cmd = Path.BASE + "pdnsd -c " + Path.BASE + "pdnsd-vpn.conf"
if (BuildConfig.DEBUG) Log.d(TAG, cmd)
pdnsdProcess = new ProcessBuilder()
.command(cmd.split(" ").toSeq)
.redirectErrorStream(false)
.start()
}
override def getContext = getBaseContext
def startVpn(): Int = {
val builder = new Builder()
builder
.setSession(config.profileName)
.setMtu(VPN_MTU)
.addAddress(PRIVATE_VLAN.formatLocal(Locale.ENGLISH, "1"), 24)
.addDnsServer("8.8.8.8")
if (config.isIpv6) {
builder.addAddress(PRIVATE_VLAN6.formatLocal(Locale.ENGLISH, "1"), 126)
builder.addRoute("::", 0)
}
if (Utils.isLollipopOrAbove) {
if (config.isProxyApps) {
val apps = AppManager.getProxiedApps(this, config.proxiedAppString)
val pkgSet: mutable.HashSet[String] = new mutable.HashSet[String]
for (app <- apps) {
if (app.proxied) {
pkgSet.add(app.packageName)
}
}
for (pkg <- pkgSet) {
if (!config.isBypassApps) {
builder.addAllowedApplication(pkg)
} else {
builder.addDisallowedApplication(pkg)
}
}
}
}
if (config.route == Route.ALL) {
builder.addRoute("0.0.0.0", 0)
} else {
val privateList = getResources.getStringArray(R.array.bypass_private_route)
privateList.foreach(cidr => {
val addr = cidr.split('/')
builder.addRoute(addr(0), addr(1).toInt)
})
}
builder.addRoute("8.8.0.0", 16)
try {
conn = builder.establish()
if (conn == null) changeState(State.STOPPED, getString(R.string.reboot_required))
} catch {
case ex: IllegalStateException =>
changeState(State.STOPPED, ex.getMessage)
conn = null
case ex: Exception =>
ex.printStackTrace()
conn = null
}
if (conn == null) {
stopRunner()
return -1
}
val fd = conn.getFd
var cmd = (Path.BASE +
"tun2socks --netif-ipaddr %s "
+ "--netif-netmask 255.255.255.0 "
+ "--socks-server-addr 127.0.0.1:%d "
+ "--tunfd %d "
+ "--tunmtu %d "
+ "--loglevel 3")
.formatLocal(Locale.ENGLISH,
PRIVATE_VLAN.formatLocal(Locale.ENGLISH, "2"),
config.localPort, fd, VPN_MTU, Path.BASE)
if (config.isIpv6)
cmd += " --netif-ip6addr " + PRIVATE_VLAN6.formatLocal(Locale.ENGLISH, "2")
if (config.isUdpDns)
cmd += " --enable-udprelay"
else
cmd += " --dnsgw %s:8153".formatLocal(Locale.ENGLISH, PRIVATE_VLAN.formatLocal(Locale.ENGLISH, "1"))
if (BuildConfig.DEBUG) Log.d(TAG, cmd)
tun2socksProcess = new ProcessBuilder()
.command(cmd.split(" ").toSeq)
.redirectErrorStream(false)
.start()
fd
}
override def stopBackgroundService() {
stopSelf()
}
override def getTag = TAG
override def getServiceMode = Mode.VPN
}
| tenwx/shadowsocks-android | src/main/scala/com/github/shadowsocks/ShadowsocksVpnService.scala | Scala | gpl-3.0 | 14,396 |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.appjet.ajstdlib;
import scala.collection.mutable.{HashMap,ListBuffer};
import java.util.concurrent.locks.ReentrantLock;
object timer {
var _timings = new HashMap[String,ListBuffer[double]];
var _lock = new ReentrantLock;
var _callstack = new ThreadLocal[ListBuffer[String]];
def start(opname: String) = {
var _localcallstack = _callstack.get();
if (_localcallstack == null) {
_callstack.set(new ListBuffer[String]);
_localcallstack = _callstack.get();
}
_localcallstack += opname;
var _oplabel = _localcallstack.mkString(".");
val startTime: long = System.nanoTime();
new {
def done() {
val elapsedTimeMs: double = (System.nanoTime() - startTime) / 1.0e6;
_lock.lock();
try {
var times = _timings.getOrElse(_oplabel, new ListBuffer[double]);
/*
if (times.size > 100000) {
times = new ListBuffer[double];
}*/
times += elapsedTimeMs;
_timings.put(_oplabel, times);
_localcallstack.remove(_localcallstack.length-1);
} finally {
_lock.unlock();
}
}
}
}
def getOpNames(): Array[String] = {
_lock.lock();
try {
return _timings.keys.toList.toArray;
} finally {
_lock.unlock();
}
}
def getStats(opname: String): Array[double] = {
_lock.lock();
try {
var times:ListBuffer[double] = _timings(opname);
var total = times.foldRight(0.0)(_ + _);
return Array(times.size, total, (total / times.size));
} finally {
_lock.unlock();
}
}
def reset() {
_lock.lock();
_timings = new HashMap[String,ListBuffer[double]];
_lock.unlock();
}
}
| OpeningDesign/SketchSpace | infrastructure/net.appjet.ajstdlib/timer.scala | Scala | apache-2.0 | 2,239 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.hbase
import java.text.SimpleDateFormat
import java.util.TimeZone
import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._
import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.connect.data._
import org.apache.kafka.connect.errors.ConnectException
import scala.collection.JavaConverters._
trait FieldsValuesExtractor {
def get(struct: Struct): Seq[(String, Array[Byte])]
}
case class StructFieldsExtractorBytes(includeAllFields: Boolean, fieldsAliasMap: Map[String, String]) extends FieldsValuesExtractor with StrictLogging {
def get(struct: Struct): Seq[(String, Array[Byte])] = {
val schema = struct.schema()
val fields: Seq[Field] = if (includeAllFields) {
schema.fields().asScala
}
else {
val selectedFields = schema.fields().asScala.filter(f => fieldsAliasMap.contains(f.name()))
val diffSet = fieldsAliasMap.keySet.diff(selectedFields.map(_.name()).toSet)
if (diffSet.nonEmpty) {
val errMsg = s"Following columns ${diffSet.mkString(",")} have not been found. Available columns:${fieldsAliasMap.keys.mkString(",")}"
logger.error(errMsg)
throw new ConnectException(errMsg)
}
selectedFields
}
val fieldsAndValues = fields.flatMap(field =>
getFieldBytes(field, struct).map(bytes => fieldsAliasMap.getOrElse(field.name(), field.name()) -> bytes))
fieldsAndValues
}
private def getFieldBytes(field: Field, struct: Struct): Option[Array[Byte]] = {
Option(struct.get(field))
.map { value =>
Option(field.schema().name()).collect {
case Decimal.LOGICAL_NAME =>
value.asInstanceOf[Any] match {
case _:java.math.BigDecimal => value.fromBigDecimal()
case arr: Array[Byte] => Decimal.toLogical(field.schema, arr).asInstanceOf[Any].fromBigDecimal()
case _ => throw new IllegalArgumentException(s"${field.name()} is not handled for value:$value")
}
case Time.LOGICAL_NAME =>
value.asInstanceOf[Any] match {
case i: Int => StructFieldsExtractorBytes.TimeFormat.format(Time.toLogical(field.schema, i)).asInstanceOf[Any].fromString()
case d:java.util.Date => StructFieldsExtractorBytes.TimeFormat.format(d).asInstanceOf[Any].fromString()
case _ => throw new IllegalArgumentException(s"${field.name()} is not handled for value:$value")
}
case Timestamp.LOGICAL_NAME =>
value.asInstanceOf[Any] match {
case d:java.util.Date => StructFieldsExtractorBytes.DateFormat.format(d).asInstanceOf[Any].fromString()
case l: Long => StructFieldsExtractorBytes.DateFormat.format(Timestamp.toLogical(field.schema, l)).asInstanceOf[Any].fromString()
case _ => throw new IllegalArgumentException(s"${field.name()} is not handled for value:$value")
}
}.getOrElse {
field.schema().`type`() match {
case Schema.Type.BOOLEAN => value.fromBoolean()
case Schema.Type.BYTES => value.fromBytes()
case Schema.Type.FLOAT32 => value.fromFloat()
case Schema.Type.FLOAT64 => value.fromDouble()
case Schema.Type.INT8 => value.fromByte()
case Schema.Type.INT16 => value.fromShort()
case Schema.Type.INT32 => value.fromInt()
case Schema.Type.INT64 => value.fromLong()
case Schema.Type.STRING => value.fromString()
case other => throw new ConnectException(s"$other is not a recognized schema!")
}
}
}
}
}
object StructFieldsExtractorBytes {
val DateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
val TimeFormat = new SimpleDateFormat("HH:mm:ss.SSSZ")
DateFormat.setTimeZone(TimeZone.getTimeZone("UTC"))
} | datamountaineer/stream-reactor | kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/StructFieldsExtractorBytes.scala | Scala | apache-2.0 | 4,480 |
package com.karasiq.shadowcloud.server.http.static
import akka.http.scaladsl.server.{Directives, Route}
import com.karasiq.shadowcloud.server.http.{SCAkkaHttpApiRoutes, SCHttpServerSettings}
trait SCAkkaHttpStaticRoutes { self: SCAkkaHttpApiRoutes with SCHttpServerSettings with Directives ⇒
val scStaticRoute: Route = {
encodeResponse {
pathEndOrSingleSlash {
getFromResource("webapp/index.html")
} ~ {
getFromResourceDirectory("webapp")
}
}
}
}
| Karasiq/shadowcloud | server/static-routes/src/main/scala/com/karasiq/shadowcloud/server/http/static/SCAkkaHttpStaticRoutes.scala | Scala | apache-2.0 | 498 |
import scala.collection.mutable.Stack
object BalancedString2 {
def main( args: Array[String]){
// val s = "[ABC{DEF}]"
// val s = "[{}"
// val s = "[{(())]"
// val s = "[{()()()}{}]"
// val s = "[{()()(){}]"
val s = "{ AAA BHARAT } ]"
val matchingBracks = List(Tuple2('{', '}'), Tuple2('[',']'), Tuple2('(', ')'))
val stack = Stack[Char]()
val len = s.length - 1
val allowedBrackets = List('{','[','(',')',']','}')
for(i <- 0 to len){
if(allowedBrackets.contains(s(i))){
if(stack.isEmpty)
stack.push(s(i))
else{
val matchingParter = matchingBracks.filter(_._2 == s(i))
val isOpening = matchingParter.isEmpty
if(isOpening)
stack.push(s(i))
else{
val tos = stack.pop
if(!(tos == matchingParter.head._1)) {
stack.push(tos)
stack.push(s(i))
}
}
}
}
}
if(stack.isEmpty){
println("Balanced String")
}
else{
println("Unbalanced String")
}
}
} | archit47/Competitive-Programming | src/BalancedString2.scala | Scala | mit | 975 |
/*
* Copyright 2014 Intelix Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package common.actors
import com.typesafe.scalalogging.StrictLogging
trait ActorWithComposableBehavior extends ActorUtils with StrictLogging {
def commonBehavior: Receive = {
case msg: Loggable => logger.info(String.valueOf(msg))
}
final def switchToCustomBehavior(customBehavior: Receive, bid: Option[String] = None) = {
logger.debug(s"Switched to custom behavior, id=$bid")
context.become(customBehavior orElse commonBehavior)
}
final def switchToCommonBehavior() = {
logger.debug("Switched to common behavior")
context.become(commonBehavior)
}
override def receive: Receive = commonBehavior
}
| mglukh/ehub | modules/core/src/main/scala/common/actors/ActorWithComposableBehavior.scala | Scala | apache-2.0 | 1,237 |
package org.jetbrains.plugins.scala
package lang.refactoring.introduceField
import com.intellij.psi.{PsiFile, PsiElement}
import com.intellij.openapi.project.Project
import com.intellij.openapi.editor.Editor
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import org.jetbrains.plugins.scala.lang.refactoring.util.{ConflictsReporter, ScalaVariableValidator, ScalaRefactoringUtil}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil.IntroduceException
import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester
import ScalaIntroduceFieldHandlerBase._
/**
* Nikolay.Tropin
* 7/15/13
*/
class IntroduceFieldContext[T <: PsiElement](val project: Project,
val editor: Editor,
val file: PsiFile,
val element: T,
val types: Array[ScType],
val aClass: ScTemplateDefinition) {
val occurrences = element match {
case expr: ScExpression =>
ScalaRefactoringUtil.getOccurrenceRanges(ScalaRefactoringUtil.unparExpr(expr), aClass.extendsBlock)
case _ => null
}
val validator = ScalaVariableValidator(new ConflictsReporter {
def reportConflicts(conflicts: Array[String], project: Project): Boolean = false
}, project, editor, file, element, occurrences)
val canBeInitInDecl = element match {
case expr: ScExpression => canBeInitializedInDeclaration(expr, aClass)
case _ => throw new IntroduceException
}
val possibleNames = element match {
case expr: ScExpression => NameSuggester.suggestNames(expr, validator)
case _ => throw new IntroduceException
}
def canBeInitLocally(replaceAll: Boolean) = ScalaIntroduceFieldHandlerBase.canBeInitInLocalScope(this, replaceAll)
}
| consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/IntroduceFieldContext.scala | Scala | apache-2.0 | 2,049 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.http4s.tomcat
import cats.effect._
import com.example.http4s.ssl
import org.http4s.server.Server
import org.http4s.tomcat.server.TomcatBuilder
object TomcatSslExample extends IOApp {
override def run(args: List[String]): IO[ExitCode] =
TomcatSslExampleApp.resource[IO].use(_ => IO.never).as(ExitCode.Success)
}
object TomcatSslExampleApp {
def builder[F[_]: Async]: TomcatBuilder[F] =
TomcatExampleApp
.builder[F]
.bindHttp(8443)
.withSSL(ssl.storeInfo, ssl.keyManagerPassword)
def resource[F[_]: Async]: Resource[F, Server] =
builder[F].resource
}
| http4s/http4s | examples/tomcat/src/main/scala/com/example/http4s/tomcat/TomcatSslExample.scala | Scala | apache-2.0 | 1,204 |
/*
* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ccf.transport.json
import ccf.transport.Decoder
import ccf.transport.MalformedDataException
import ccf.transport.{TransportRequest, TransportResponse}
import com.twitter.json.{Json, JsonException}
object JsonDecoder extends Decoder {
def decodeResponse(m: String) = decodeMessage[TransportResponse](m, toResponse)
def decodeRequest(m: String) = decodeMessage[TransportRequest](m, toRequest)
private def decodeMessage[T](m: String, f: (Map[String, String], Option[Any]) => T) = {
try { if (m.isEmpty) None else { Some(parse(m, f)) } }
catch { case e: JsonException => malformedDataException(e.toString) }
}
private def parse[T](msg: String, f: (Map[String, String], Option[Any]) => T) = {
Json.parse(msg) match {
case m: Map[_, _] => {
val typedMap = m.asInstanceOf[Map[Any, Any]]
f(headers(typedMap), content(typedMap))
}
case _ => malformedDataException("Invalid message frame")
}
}
private def headers(m: Map[Any, Any]): Map[String, String] = m.get("headers") match {
case Some(headers) => headersToMap(headers)
case None => malformedDataException("Missing message header")
}
private def headersToMap(headers: Any): Map[String, String] = headers match {
case m: Map[_, _] => {
val seqOfHeaders = for ((k, v) <- m) yield (k.toString, v.toString)
Map[String, String](seqOfHeaders.toList: _*)
}
case _ => malformedDataException("Invalid message header")
}
private def content(m: Map[Any, Any]): Option[Any] = m.get("content")
private def toResponse(h: Map[String, String], c: Option[Any]) = TransportResponse(h, c)
private def toRequest(h: Map[String, String], c: Option[Any]) = TransportRequest(h, c)
private def malformedDataException(s: String) = throw new MalformedDataException(s)
}
| akisaarinen/ccf | ccf/src/main/scala/ccf/transport/json/JsonDecoder.scala | Scala | apache-2.0 | 2,466 |
package me.aihe.dataframe
import me.aihe.dataframe.types.IntType
import org.scalatest.{BeforeAndAfterAll, FunSuite}
/**
* Created by aihe on 12/21/15.
*/
class DataFrameSuite extends FunSuite with BeforeAndAfterAll {
private var df: DataFrame = _
override def beforeAll(): Unit = {
super.beforeAll()
df = DataFrame.loadCSV(path = "data/sample.csv", tableName = "fraud", header = true)
}
test("create") {
val df1 = DataFrame("empty")
assert(df1.length == 0)
assert(df1.width == 0)
val df2 = DataFrame("one", Seq(Column[Int]("c1", 0 until 5, IntType)))
assert(df2.length == 5)
assert(df2.width == 1)
}
test("load csv") {
assert(df.length == 36634)
assert(df.size == 36634)
assert(df.width == 18)
}
test("for expression") {
val rows = for {
r <- df
pid <- r.getAs[Int]("policyID")
if pid.toString.startsWith("1111")
} yield r
val newDF = DataFrame.fromRows(df, rows)
println(newDF.headOption)
assert(newDF.forall(_.getAs[Int]("policyID").exists(_.toString.startsWith("1111"))))
}
test("partition") {
val (df1, df2) = df.partition(r => r.getAs[Int]("policyID").exists(_.toString.startsWith("1")))
println(df1.headOption)
println(df2.headOption)
println(df1.size)
println(df2.length)
assert(df1.forall(_.getAs[Int]("policyID").exists(_.toString.startsWith("1"))))
assert(df2.forall(!_.getAs[Int]("policyID").exists(_.toString.startsWith("1"))))
assert(df.length == 36634)
}
test("take") {
val newDF = df.take(10)
assert(newDF.isInstanceOf[DataFrame] && newDF.size == 10)
}
test("drop") {
val newDF = df.drop(10)
assert(newDF.isInstanceOf[DataFrame] && newDF.size == 36634 - 10)
}
test("filter") {
val newDF = df.filter(r => r.getAs[Int]("policyID").exists(_.toString.startsWith("1")))
println(newDF.headOption)
assert(newDF.isInstanceOf[DataFrame] && newDF.forall(_.getAs[Int]("policyID").exists(_.toString.startsWith("1"))))
}
test("add column with wrong length") {
intercept[IllegalArgumentException] {
df.addColumn(Column[Int]("user_id", 1 to 36633, IntType))
}
}
test("add column") {
val newDF = df.addColumn(Column[Int]("user_id", 1 to 36634, IntType))
assert(newDF.exists(_.getAs[Int]("user_id").exists(_ == 1)))
assert(!newDF.exists(_.getAs[Int]("user_id").exists(_ == 0)))
}
// test("add column 2") {
// val newDF = df.addColumn("user_id", 1 to 36634, IntType)
// assert(newDF.exists(_.getAs[Int]("user_id").exists(_ == 1)))
// assert(!newDF.exists(_.getAs[Int]("user_id").exists(_ == 0)))
// assert(df.width == 18)
// assert(newDF.width == 19)
// }
test("remove column by index") {
val newDF = df.removeColumn(0)
assert(df.width == 18)
assert(newDF.width == 17)
assert(newDF.firstColumn.name != "policyID")
}
test("remove column by wrong index") {
val newDF = df.removeColumn(-1)
assert(df == newDF)
assert(df eq newDF)
}
test("remove column by name") {
val newDF = df.removeColumn("policyID")
assert(df.width == 18)
assert(newDF.width == 17)
}
test("remove column by wrong name") {
val newDF = df.removeColumn("dummy")
assert(df == newDF)
assert(df eq newDF)
}
test("insert column") {
val newDF = df.insertColumn(0, Column[Int]("user_id", 1 to 36634, IntType))
assert(newDF.nameIndexMap("user_id") == 0)
assert(newDF.exists(_.getAs[Int]("user_id").exists(_ == 1)))
assert(!newDF.exists(_.getAs[Int]("user_id").exists(_ == 0)))
}
test("insert column with wrong length") {
intercept[IllegalArgumentException] {
df.insertColumn(0, Column[Int]("user_id", 1 to 36635, IntType))
}
}
test("insert column at wrong position") {
val newDF = df.insertColumn(-1, Column[Int]("user_id", 1 to 36634,
IntType))
assert(newDF.nameIndexMap("user_id") == 0)
assert(newDF.exists(_.getAs[Int]("user_id").exists(_ == 1)))
assert(!newDF.exists(_.getAs[Int]("user_id").exists(_ == 0)))
}
test("update column with wrong length") {
intercept[IllegalArgumentException] {
df.updateColumn(0, Column[Int]("user_id", 1 to 36635, IntType))
}
}
test("update column by index") {
val newDF = df.updateColumn(0, Column[Int]("user_id", 1 to 36634, IntType))
assert(newDF.firstColumn.name == "user_id")
assert(newDF.firstColumn.name != "policyID")
assert(newDF.exists(_.getAs[Int]("user_id").exists(_ == 1)))
assert(!newDF.exists(_.getAs[Int]("user_id").exists(_ == 0)))
}
test("update column by name") {
val newDF = df.updateColumn("policyID", Column[Int]("user_id", 1 to 36634, IntType))
assert(newDF.columnsNameMap.contains("user_id"))
assert(!newDF.columnsNameMap.contains("policyID"))
assert(newDF.exists(_.getAs[Int]("user_id").exists(_ == 1)))
assert(!newDF.exists(_.getAs[Int]("user_id").exists(_ == 0)))
}
test("update column by wrong index") {
val newDF = df.updateColumn(-1, Column[Int]("user_id", 1 to 36634, IntType))
assert(df == newDF)
assert(df eq newDF)
}
test("update column by wrong name") {
val newDF = df.updateColumn("dummy", Column[Int]("user_id", 1 to 36634,
IntType))
assert(df == newDF)
assert(df eq newDF)
}
}
| AiHe/DataFrame | src/test/scala/me/aihe/dataframe/DataFrameSuite.scala | Scala | apache-2.0 | 5,312 |
package io.fintrospect.configuration
import java.net.InetSocketAddress
import org.scalatest.{FunSpec, Matchers}
class AuthorityTest extends FunSpec with Matchers {
describe("Authority") {
it("renders ok") {
Authority(Host.localhost, Port(9999)).toString shouldBe "localhost:9999"
}
it("defaults no port to port 80") {
Authority.unapply("localhost") shouldBe Some(Authority(Host.localhost, Port(80)))
}
it("defaults valid host and port") {
Authority.unapply("localhost:123") shouldBe Some(Authority(Host.localhost, Port(123)))
}
it("invalid port number") {
Authority.unapply("localhost:asd") shouldBe None
}
it("too many parts") {
Authority.unapply("localhost:123:123") shouldBe None
}
it("socket address") {
Authority(Host.localhost, Port(9999)).socketAddress shouldBe new InetSocketAddress("localhost", 9999)
}
}
}
| daviddenton/fintrospect | core/src/test/scala/io/fintrospect/configuration/AuthorityTest.scala | Scala | apache-2.0 | 908 |
package uk.gov.dvla.vehicles.presentation.common.controllers
import com.google.inject.Inject
import play.api.mvc.{Action, Controller}
import play.api.data.Form
import uk.gov.dvla.vehicles.presentation.common.models
import uk.gov.dvla.vehicles.presentation.common.views
import models.ValtechRadioModel
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.CookieImplicits.RichForm
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.ClientSideSessionFactory
class ValtechRadioController @Inject()(implicit clientSideSessionFactory: ClientSideSessionFactory) extends Controller {
private[controllers] val form = Form(
ValtechRadioModel.Form.Mapping
)
def present = Action { implicit request =>
Ok(views.html.valtechRadioView(form.fill()))
}
def submit = Action {
implicit request => {
form.bindFromRequest.fold(
invalidForm => BadRequest(views.html.valtechRadioView(invalidForm)),
validForm => {
val msg = s"Success - you selected a keeper type of ${validForm.keeperType}"
Ok(views.html.success(msg))
}
)
}
}
}
| dvla/vehicles-presentation-common | common-test/app/uk/gov/dvla/vehicles/presentation/common/controllers/ValtechRadioController.scala | Scala | mit | 1,124 |
package spinoco.fs2.cassandra
import cats.effect.{Async, ContextShift, Resource}
import com.datastax.driver.core.{Cluster, QueryLogger}
object client {
/**
* Establish connection with cluster given configuration data passed in.
* One the resulting stream terminates, then the cluster connection terminates too.
*/
def cluster[F[_] : Async: ContextShift](config: Cluster.Builder, queryLogger: Option[QueryLogger] = None): Resource[F,CassandraCluster[F]] =
CassandraCluster.instance(config, queryLogger)
/**
* Establish connection with the cluster and acquire session.
* Session is cleared and connection with cluster terminated when stream completes.
*/
def session[F[_] : Async: ContextShift](config: Cluster.Builder, queryLogger: Option[QueryLogger] = None): Resource[F,CassandraSession[F]] =
cluster(config, queryLogger).flatMap(_.session)
}
| Spinoco/fs2-cassandra | core/src/main/scala/spinoco/fs2/cassandra/client.scala | Scala | mit | 896 |
/***********************************************************************
* Copyright (c) 2015-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.geomesa.nifi.processors.fs
import org.geomesa.nifi.datastore.processor.mixins.{AbstractDataStoreProcessor, AwsDataStoreProcessor}
import org.locationtech.geomesa.fs.data.FileSystemDataStoreFactory.FileSystemDataStoreParams
import org.locationtech.geomesa.utils.geotools.GeoMesaParam
abstract class FileSystemProcessor
extends AbstractDataStoreProcessor(FileSystemDataStoreService.Properties) with AwsDataStoreProcessor {
override protected def configParam: GeoMesaParam[String] = FileSystemDataStoreParams.ConfigsParam
}
| geomesa/geomesa-nifi | geomesa-fs-bundle/geomesa-fs-processors/src/main/scala/org/geomesa/nifi/processors/fs/FileSystemProcessor.scala | Scala | apache-2.0 | 1,006 |
package com.twitter.finagle.mux.lease.exp
import com.twitter.conversions.storage.intToStorageUnitableWholeNumber
import com.twitter.util.StorageUnit
import org.mockito.Mockito.{when, verify}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class MemorySpaceTest extends FunSuite with MockitoSugar {
test("MemorySpace#left should find the number of bytes left before we hit minDiscount") {
val nfo = mock[JvmInfo]
when(nfo.remaining()).thenReturn(10.megabytes)
val range = StorageUnit.zero
val minDiscount = 5.megabytes
val maxDiscount = StorageUnit.zero
val rSnooper = mock[RequestSnooper]
val space = new MemorySpace(nfo, range, minDiscount, maxDiscount, rSnooper)
assert(space.left === 5.megabytes)
verify(nfo).remaining()
}
test("MemorySpace should be able to compute a discount correctly") {
val nfo = mock[JvmInfo]
val range = 10.megabytes
val minDiscount = 5.megabytes
val maxDiscount = 10.megabytes
val rSnooper = mock[RequestSnooper]
when(rSnooper.handleBytes()).thenReturn(2.megabytes)
val rnd = mock[GenerationalRandom]
when(rnd.apply()).thenReturn(107.megabytes.inBytes.toInt)
val space =
new MemorySpace(nfo, range, minDiscount, maxDiscount, rSnooper, NullLogsReceiver, rnd)
assert(space.discount() === 9.megabytes)
verify(rnd).apply()
verify(rSnooper).handleBytes()
}
test("MemorySpace should be able to default to a max") {
val nfo = mock[JvmInfo]
val range = 10.megabytes
val minDiscount = 5.megabytes
val maxDiscount = 8.megabytes
val rSnooper = mock[RequestSnooper]
when(rSnooper.handleBytes()).thenReturn(2.megabytes)
val rnd = mock[GenerationalRandom]
when(rnd.apply()).thenReturn(107.megabytes.inBytes.toInt)
val space =
new MemorySpace(nfo, range, minDiscount, maxDiscount, rSnooper, NullLogsReceiver, rnd)
assert(space.discount() === 8.megabytes)
verify(rnd).apply()
verify(rSnooper).handleBytes()
}
}
| JustinTulloss/finagle | finagle-mux/src/test/scala/com/twitter/finagle/mux/lease/exp/MemorySpaceTest.scala | Scala | apache-2.0 | 2,106 |
import play.api.GlobalSettings
import play.api.mvc.WithFilters
import io.ino.play.ConcurrentRequestsLimiter
object Global extends GlobalSettings /* WithFilters(ConcurrentRequestsLimiter)*/ | inoio/play2-multi-module | modules/backend/app/Global.scala | Scala | apache-2.0 | 189 |
package scaladget.bootstrapnative
import org.scalajs.dom.raw.HTMLElement
import scalatags.JsDom.all._
trait JSDependency{
def path: String
}
object JSDependency {
lazy val BOOTSTRAP_NATIVE = new JSDependency{ def path = "js/bootstrap-native.min.js" }
def withBootstrapNative[T <: HTMLElement](f: => T): Unit = withJS(BOOTSTRAP_NATIVE)(f)
def withJS[T <: HTMLElement](js: JSDependency*)(f: => T): Unit = {
org.scalajs.dom.document.body.appendChild(f)
for {
j <- js
} yield {
org.scalajs.dom.document.body.appendChild(
scalatags.JsDom.tags.script(`type` := "text/javascript", src := j.path).render
)
}
}
} | mathieuleclaire/scaladget | bootstrapnative/src/main/scala/scaladget/bootstrapnative/JsDependency.scala | Scala | agpl-3.0 | 691 |
package com.twitter.finagle.mux.transport
import com.twitter.finagle.{Dentry, Dtab, Failure, Path, tracing}
import com.twitter.io.Buf
import com.twitter.util.Time
import com.twitter.conversions.DurationOps._
import scala.collection.mutable
import org.scalatest.funsuite.AnyFunSuite
class MessageTest extends AnyFunSuite {
import Message._
def buf(n: Int) = Buf.ByteArray.Owned((0 until n).toArray.map(_.toByte))
val body = buf(4)
val goodTags = Seq(8388607, 1, 123)
val goodVersions = Seq(100: Short, 200: Short, 300: Short)
val goodTraceIds = Seq(None, Some(tracing.Trace.nextId))
val goodBufs = Seq(Buf.Empty, buf(1), buf(4), buf(100))
val goodStrings = Seq("", "Hello, world!", "☺☹")
val goodKeys = goodStrings.map(Buf.Utf8(_))
val goodDentries = Seq("/a=>/b", "/foo=>/$/inet/twitter.com/80") map (Dentry.read)
val goodDtabs = goodDentries.permutations map { ds => Dtab(ds.toIndexedSeq) }
val goodDests = Seq("/", "/okay", "/foo/bar/baz") map (Path.read)
val goodDurationLeases = Seq(Message.Tlease.MinLease, Message.Tlease.MaxLease)
val goodTimeLeases = Seq(Time.epoch, Time.now, Time.now + 5.minutes)
val goodContexts =
Seq() ++ (for { k <- goodKeys; v <- goodBufs } yield (k, v)).combinations(2).toSeq
test("d(e(m)) == m") {
val ms = mutable.Buffer[Message]()
ms ++= (for {
tag <- goodTags
version <- goodVersions
ctx <- goodContexts
} yield Tinit(tag, version, ctx))
ms ++= (for {
tag <- goodTags
version <- goodVersions
ctx <- goodContexts
} yield Rinit(tag, version, ctx))
ms ++= (for {
tag <- goodTags
traceId <- goodTraceIds
body <- goodBufs
} yield Treq(tag, traceId, body))
ms ++= (for {
tag <- goodTags
body <- goodBufs
} yield RreqOk(tag, body))
ms ++= (for {
tag <- goodTags
} yield Tdrain(tag))
ms ++= (for {
tag <- goodTags
reason <- goodStrings
} yield Tdiscarded(tag, reason))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
dest <- goodDests
dtab <- goodDtabs
body <- goodBufs
} yield Tdispatch(tag, ctx, dest, dtab, body))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
body <- goodBufs
} yield RdispatchOk(tag, ctx, body))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
err <- goodStrings
} yield RdispatchError(tag, ctx, err))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
} yield RdispatchNack(tag, ctx))
ms ++= (for {
lease <- goodDurationLeases
} yield Tlease(lease))
ms ++= (for {
lease <- goodTimeLeases
} yield Tlease(lease))
def assertEquiv(a: Message, b: Message) = (a, b) match {
case (Tdispatch(tag1, ctxs1, dst1, dtab1, req1), Tdispatch(tag2, ctxs2, dst2, dtab2, req2)) =>
assert(
tag1 == tag2 && ctxs1 == ctxs2 && dst1 == dst2 &&
Equiv[Dtab].equiv(dtab1, dtab2) && req1 == req2
)
case (a, b) => assert(a == b)
}
// Debugging tip: in an error message, 'm' is the RHS.
for (m <- ms)
assertEquiv(decode(encode(m)), m)
}
test("not encode invalid messages") {
assert(intercept[Failure] {
encode(Treq(-1, Some(tracing.Trace.nextId), body))
} == Failure.wrap(BadMessageException("invalid tag number -1")))
assert(intercept[Failure] {
encode(Treq(1 << 24, Some(tracing.Trace.nextId), body))
} == Failure.wrap(BadMessageException("invalid tag number 16777216")))
}
test("not decode invalid messages") {
val short = intercept[Failure] { decode(Buf.Empty) }
assert(short.why.startsWith("short message"))
assert(short.cause.get.isInstanceOf[BadMessageException])
assert(
intercept[Failure] {
decode(Buf.ByteArray.Owned(Array[Byte](0, 0, 0, 1)))
} == Failure.wrap(
BadMessageException(
"unknown message type: 0 [tag=1]. Payload bytes: 0. First 0 bytes of the payload: ''"
)
)
)
assert(
intercept[Failure] {
decode(Buf.ByteArray.Owned(Array[Byte](0, 0, 0, 1, 0x01, 0x02, 0x0e, 0x0f)))
} == Failure.wrap(
BadMessageException(
"unknown message type: 0 [tag=1]. Payload bytes: 4. First 4 bytes of the payload: '01020e0f'"
)
)
)
assert(
intercept[Failure] {
decode(Buf.ByteArray.Owned(Array[Byte](0, 0, 0, 1) ++ Seq.fill(32)(1.toByte).toArray[Byte]))
} == Failure.wrap(
BadMessageException(
"unknown message type: 0 [tag=1]. Payload bytes: 32. First 16 bytes of the payload: '01010101010101010101010101010101'"
)
)
)
}
test("decode fragments") {
val msgs = Seq(
Tdispatch(
Message.Tags.setMsb(goodTags.head),
goodContexts.head,
goodDests.head,
Dtab.empty,
goodBufs.head
),
RdispatchOk(Message.Tags.setMsb(goodTags.last), goodContexts.last, goodBufs.last)
)
for (m <- msgs) {
assert(decode(encode(m)) == Fragment(m.typ, m.tag, m.buf))
}
}
test("extract control messages") {
val tag = 0
val buf = Buf.Empty
assert(ControlMessage.unapply(Treq(tag, None, buf)) == None)
assert(ControlMessage.unapply(RreqOk(0, buf)) == None)
assert(ControlMessage.unapply(Tdispatch(tag, Seq.empty, Path.empty, Dtab.empty, buf)) == None)
assert(ControlMessage.unapply(RdispatchOk(tag, Seq.empty, buf)) == None)
assert(ControlMessage.unapply(Tdrain(tag)) == Some(tag))
assert(ControlMessage.unapply(Rdrain(tag)) == Some(tag))
assert(ControlMessage.unapply(Tping(tag)) == Some(tag))
assert(ControlMessage.unapply(Rping(tag)) == Some(tag))
assert(ControlMessage.unapply(Tdiscarded(tag, "")) == Some(tag))
assert(ControlMessage.unapply(Tlease(0, 0L)) == Some(tag))
}
test("context entries are backed by Buf.Empty or an exact sized ByteArray") {
def checkBuf(buf: Buf): Unit = buf match {
case Buf.Empty => assert(true) // ok
case Buf.ByteArray.Owned(array, 0, end) => assert(end == array.length)
case msg => fail(s"Unexpected Buf: $msg")
}
val msg = RdispatchOk(0, goodContexts.flatten, Buf.Empty)
val RdispatchOk(0, ctxs, Buf.Empty) = decode(Buf.ByteArray.coerce(encode(msg)))
ctxs.foreach {
case (k, v) =>
checkBuf(k)
checkBuf(v)
}
}
test("Message.encode(Message.PreEncoded)") {
val preEncodedMessages = Seq(
PreEncoded.Rping,
PreEncoded.Tping
)
preEncodedMessages.foreach { msg: PreEncoded =>
assert(Message.encode(msg.underlying) == msg.encodedBuf)
assert(msg.encodedBuf eq Message.encode(msg))
assert(msg.toString == msg.underlying.toString)
}
}
}
| twitter/finagle | finagle-mux/src/test/scala/com/twitter/finagle/mux/transport/MessageTest.scala | Scala | apache-2.0 | 6,759 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.models.inception
import com.intel.analytics.bigdl.dataset.{ByteRecord, DataSet}
import com.intel.analytics.bigdl.dataset.image._
import com.intel.analytics.bigdl.nn.Module
import com.intel.analytics.bigdl.optim.{Top1Accuracy, Top5Accuracy, Validator}
import com.intel.analytics.bigdl.utils.Engine
import org.apache.hadoop.io.Text
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
object Test {
Logger.getLogger("org").setLevel(Level.ERROR)
Logger.getLogger("akka").setLevel(Level.ERROR)
Logger.getLogger("breeze").setLevel(Level.ERROR)
import Options._
val imageSize = 224
def main(args: Array[String]) {
testParser.parse(args, new TestParams()).foreach { param =>
val batchSize = param.batchSize.getOrElse(128)
val conf = Engine.createSparkConf().setAppName("Test Inception on ImageNet")
val sc = new SparkContext(conf)
Engine.init
// We set partition number to be node*core, actually you can also assign other partitionNum
val partitionNum = Engine.nodeNumber() * Engine.coreNumber()
val rawData = sc.sequenceFile(param.folder, classOf[Text], classOf[Text], partitionNum)
.map(image => {
ByteRecord(image._2.copyBytes(), DataSet.SeqFileFolder.readLabel(image._1).toFloat)
}).coalesce(partitionNum, true)
val rddData = DataSet.SeqFileFolder.filesToRdd(param.folder, sc, 1000)
val transformer = BytesToBGRImg() -> BGRImgCropper(imageSize, imageSize, CropCenter) ->
HFlip(0.5) -> BGRImgNormalizer(0.485, 0.456, 0.406, 0.229, 0.224, 0.225) -> BGRImgToSample()
val evaluationSet = transformer(rddData)
val model = Module.load[Float](param.model)
val result = model.evaluate(evaluationSet,
Array(new Top1Accuracy[Float], new Top5Accuracy[Float]), param.batchSize)
result.foreach(r => println(s"${r._2} is ${r._1}"))
sc.stop()
}
}
}
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/models/inception/Test.scala | Scala | apache-2.0 | 2,550 |
/*
* Copyright 2014 porter <https://github.com/eikek/porter>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package porter.app
import _root_.akka.actor.ActorSystem
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.matchers.ShouldMatchers
import com.typesafe.config.ConfigFactory
import scala.concurrent.Await
import porter.auth.{AuthToken, PasswordValidator}
import reactivemongo.api.MongoDriver
class MongoStoreTest extends FunSuite with ShouldMatchers with BeforeAndAfterAll {
import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.postfixOps
import scala.concurrent.duration._
import porter.model._
//note, these tests only work with a running mongodb instance
val system = ActorSystem("MongoStoreTest")
val config = ConfigFactory.load().getConfig("porter-mongo-test")
val store = new MongoStore(system, config)
val mutable = PropertyList.mutableSource.toTrue
override protected def afterAll() = {
val client = MongoStore.createMongo(new MongoDriver(system), config)
Await.ready(client.db(config.getString("dbname")).drop(), 5.seconds)
system.shutdown()
}
private def createRealm(): Realm = {
val r = Realm(Ident.randomIdent, "Some Realm")
Await.ready(store.updateRealm(r), 5 seconds)
r
}
test("create and list realms") {
val r = createRealm()
val all = Await.result(store.allRealms, 5 seconds)
all should contain (r)
}
test("create and list groups") {
val r = createRealm()
val g1 = Group("g1", mutable(Map("enabled" -> "true")), Set("some:perm:1"))
val g2 = Group("g2", mutable(Map.empty), Set("some:perm:2", "!some:perm:3"))
Await.result(store.updateGroup(r.id, g1), 5 seconds)
Await.result(store.updateGroup(r.id, g2), 5 seconds)
val all = Await.result(store.allGroups(r.id), 5 seconds).toSet
all should contain (g1)
all should contain (g2)
val og1 = Await.result(store.findGroups(r.id, Set("g1")), 5 seconds)
og1 should be (List(g1))
Await.ready(store.updateGroup(r.id, g2.updatedRules(r => Set.empty)), 5 seconds)
val og2 = Await.result(store.findGroups(r.id, Set("g2")), 5 seconds)
og2 should not be List(g2)
og2(0).name should be (g2.name)
og2(0).props should be (g2.props)
Await.ready(store.deleteGroup(r.id, g2.name), 5 seconds)
val empty = Await.result(store.findGroups(r.id, Set(g2.name)), 5 seconds)
empty should be (List())
}
test("create and list accounts") {
val r = createRealm()
val passw = Password("test")
val acc1 = Account("john", mutable(Map("enabled" -> "true")), Set("g1", "g3"), Seq(passw))
val acc2 = Account("mary", mutable(Map("enabled" -> "false")), Set("g1", "g2"), Seq(passw))
Await.ready(store.updateAccount(r.id, acc1), 5 seconds)
Await.ready(store.updateAccount(r.id, acc2), 5 seconds)
val all = Await.result(store.allAccounts(r.id), 5 seconds)
all.toSet should be (Set(acc1, acc2))
val l1 = Await.result(store.findAccounts(r.id, Set(acc1.name)), 5 seconds)
l1 should be (List(acc1))
val cl1 = Await.result(store.findAccountsFor(r.id, Set(PasswordCredentials("john", "test"))), 5 seconds)
cl1 should be (l1)
Await.ready(store.deleteAccount(r.id, acc1.name), 5 seconds)
val l2 = Await.result(store.allAccounts(r.id), 5 seconds)
l2 should be (List(acc2))
Await.ready(store.updateAccount(r.id, acc2.updatedGroups(s => Set.empty)), 5 seconds)
val l2u = Await.result(store.findAccounts(r.id, Set(acc2.name)), 5 seconds)
l2u should not be List(acc2)
l2u(0).groups should have size 0
val to = PasswordValidator.authenticate(AuthToken(r, acc2, Set(PasswordCredentials("mary", "test"))))
to.toResult.successCount should be (1)
}
test("find groups") {
val r = createRealm()
val g1 = Group("g1", mutable(Map("enabled" -> "true")), Set("some:perm:1"))
val g2 = Group("g2", mutable(Map.empty), Set("some:perm:2", "!some:perm:3"))
Await.result(store.updateGroup(r.id, g1), 5 seconds)
Await.result(store.updateGroup(r.id, g2), 5 seconds)
val list = Await.result(store.findGroups(r.id, Set("g1", "g2")), 5 seconds)
list.toSet should be (Set(g1, g2))
}
}
| eikek/porter | app/src/test/scala/porter/app/MongoStoreTest.scala | Scala | apache-2.0 | 4,721 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats.races
import io.truthencode.ddo.model.feats.{FeatDisplayHelper, RacialFeat}
import org.concordion.api.FullOGNL
import org.concordion.api.option.{ConcordionOptions, MarkdownExtensions}
//import org.concordion.ext.EmbedExtension
//import org.concordion.ext.collapse.CollapseOutputExtension
import org.concordion.integration.junit4.ConcordionRunner
import org.junit.runner.RunWith
@FullOGNL
//@Extensions(Array(classOf[EmbedExtension], classOf[CollapseOutputExtension]))
@RunWith(classOf[ConcordionRunner])
@ConcordionOptions(
declareNamespaces = Array("ext", "urn:concordion-extensions:2010"),
markdownExtensions = Array(
MarkdownExtensions.WIKILINKS,
MarkdownExtensions.AUTOLINKS,
MarkdownExtensions.TASKLISTITEMS)
)
class Races extends FeatDisplayHelper {
override val displayEnum: E = RacialFeat
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/specs/scala/io/truthencode/ddo/model/feats/races/Races.scala | Scala | apache-2.0 | 1,502 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.ops
import com.intel.analytics.bigdl.nn.SoftPlus
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
class SoftplusGrad[T: ClassTag, D: ClassTag]
(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D])
extends UnaryGrad[T, D](true, true) {
override val module: Module = SoftPlus[T, D]()
override def getClassTagNumerics() : (Array[ClassTag[_]], Array[TensorNumeric[_]]) = {
(Array[ClassTag[_]](scala.reflect.classTag[T], scala.reflect.classTag[D]),
Array[TensorNumeric[_]](ev, ev2))
}
}
object SoftplusGrad {
def apply[T: ClassTag, D: ClassTag]()
(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D]): SoftplusGrad[T, D] =
new SoftplusGrad[T, D]()
}
| qiuxin2012/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/ops/SoftplusGrad.scala | Scala | apache-2.0 | 1,372 |
package pbt
import ReverseExample._
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.scalatest.{Matchers, FlatSpec}
class ReverseExampleTest extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks {
behavior of "reverseStrings"
// normal unit test
it should "reverse a string" in {
val reversed = reverseStrings(List("A", "list", "of", "Strings"))
reversed should equal(List("Strings", "of", "list", "A"))
}
// forall with native implementation
it should "also reverse all strings" in {
forAll {
(ss: List[String]) => reverseStrings(ss) should equal(ss.reverse)
}
}
it should "have the same size" in {
forAll {
(ss: List[String]) => reverseStrings(ss) should have length ss.length
}
}
it should "have the same elements" in {
forAll {
(ss: List[String]) => reverseStrings(ss) should contain theSameElementsAs ss
}
}
it should "give the same after reversing twice" in {
forAll {
(ss: List[String]) => reverseStrings(reverseStrings(ss)) should equal(ss)
}
}
behavior of "genericReverse"
it should "give the same reverse as reverseStrings" in
forAll {
(ss: List[String]) => genericReverse(ss) should equal(reverseStrings(ss))
}
it should "reverse for all all" in
forAll {
(a: List[Int]) => genericReverse(a) should equal(a.reverse)
}
}
| TimSoethout/PropertyBasedTestingScalaCheck | code/src/test/scala/pbt/ReverseExampleTest.scala | Scala | mit | 1,394 |
package com.sfxcode.sapphire.core.demo.tutorial.controller
/**
* Created by tom on 20.10.15.
*/
class WorkspaceController extends AbstractViewController {
}
| sfxcode/sapphire-demo | tutorial/src/main/scala/com/sfxcode/sapphire/core/demo/tutorial/controller/WorkspaceController.scala | Scala | apache-2.0 | 164 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.usc.irds.sparkler.base
//import ch.qos.logback.classic.{Level, Logger}
import edu.usc.irds.sparkler.base.Loggable.selectedLogLevel
import org.slf4j.LoggerFactory
/**
* Created by thammegr on 6/7/16.
*/
trait Loggable {
lazy val LOG = LoggerFactory.getLogger(getClass)
def setLogLevel() : Unit ={
}
}
object Loggable {
var selectedLogLevel = "INFO"
} | USCDataScience/sparkler | sparkler-app/src/main/scala/edu/usc/irds/sparkler/base/Loggable.scala | Scala | apache-2.0 | 1,181 |
import sbt._
import sbt.Keys._
import com.typesafe.sbteclipse.core.EclipsePlugin.EclipseKeys._
import com.typesafe.sbteclipse.core.EclipsePlugin._
import com.typesafe.sbt.SbtScalariform
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
import scalariform.formatter.preferences._
import xerial.sbt.Sonatype._
import xerial.sbt.Sonatype.SonatypeKeys._
import sbtassembly.AssemblyPlugin.autoImport._
object ScalariformBuild extends Build {
// This is to make sure nobody tries to compile with 1.6 as the target JDK.
// Not clear if this will actually work on 1.8, needs to be tested when that is out.
val validateJavaVersion = taskKey[Unit]("Check if we are running using required Java version")
val mismatchedSpecificationMessage =
"""|Java 1.7 is required for building the `misc` subproject of Scalariform.
|
|This is due to a dependency on the javax.swing library, which
|had an API change from 1.6 to 1.7.
|
|Using 1.7 to build requires setting SBT to use JDK 1.7 or higher -- if SBT is
|booting on JDK 1.6, you will get a javax.swing related compilation error.""".stripMargin
lazy val commonSettings = Defaults.defaultSettings ++ SbtScalariform.defaultScalariformSettings ++ sonatypeSettings ++ Seq(
organization := "org.scalariform",
profileName := "org.scalariform",
version := "0.1.6",
scalaVersion := "2.10.5",
crossScalaVersions := Seq(
"2.11.6",
"2.10.5",
"2.9.3", "2.9.2" //"2.9.1-1", "2.9.1", "2.9.0-1", "2.9.0"
),
exportJars := true, // Needed for cli oneJar
retrieveManaged := true,
scalacOptions += "-deprecation",
EclipseKeys.withSource := true,
EclipseKeys.eclipseOutput := Some("bin"))
lazy val subprojectSettings = commonSettings ++ Seq(
ScalariformKeys.preferences <<= baseDirectory.apply(getScalariformPreferences))
def getScalariformPreferences(dir: File) =
PreferencesImporterExporter.loadPreferences((dir / ".." / "formatterPreferences.properties").getPath)
lazy val root: Project = Project("root", file("."), settings = commonSettings ++ Seq(
publish := (),
publishLocal := ())
) aggregate (scalariform, cli, misc)
implicit class Regex(sc: StringContext) {
def r = new util.matching.Regex(sc.parts.mkString, sc.parts.tail.map(_ => "x"): _*)
}
def getScalaTestDependency(scalaVersion: String) = scalaVersion match {
case r"2.11.\\d+[-\\w]*" => "org.scalatest" % "scalatest_2.11" % "2.1.5" % "test"
case r"2.10.\\d+[-\\w]*" => "org.scalatest" % "scalatest_2.10" % "2.0" % "test"
case "2.9.3" => "org.scalatest" %% "scalatest" % "1.9.1" % "test"
case _ => "org.scalatest" %% "scalatest" % "1.7.2" % "test"
}
def get2_11Dependencies(scalaVersion: String): List[ModuleID] = scalaVersion match {
case r"2.11.\\d+[-\\w]*" => List(
"org.scala-lang.modules" %% "scala-xml" % "1.0.1",
"org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.1"
)
case _ => Nil
}
def publishSettings(projectName: String) = Seq(
pomExtra := pomExtraXml,
publishMavenStyle := true,
publishArtifact in Test := false,
publishArtifact in (Compile, packageDoc) := true,
publishArtifact in (Compile, packageSrc) := true,
pomIncludeRepository := { _ ⇒ false },
sbtbuildinfo.Plugin.buildInfoKeys := Seq[sbtbuildinfo.Plugin.BuildInfoKey](version),
sourceGenerators in Compile <+= sbtbuildinfo.Plugin.buildInfo,
sbtbuildinfo.Plugin.buildInfoPackage := projectName
)
lazy val scalariform: Project = Project("scalariform", file("scalariform"), settings =
subprojectSettings ++ sbtbuildinfo.Plugin.buildInfoSettings ++ publishSettings("scalariform") ++ eclipseSettings ++
Seq(
libraryDependencies <<= (scalaVersion, libraryDependencies) { (sv, deps) ⇒
deps ++ get2_11Dependencies(sv) :+ getScalaTestDependency(sv)
},
testOptions in Test += Tests.Argument("-oI"),
EclipseKeys.createSrc := EclipseCreateSrc.Default + EclipseCreateSrc.Managed,
publishTo <<= isSnapshot(getPublishToRepo)))
def getPublishToRepo(isSnapshot: Boolean) =
if (isSnapshot)
Some("snapshots" at "https://oss.sonatype.org/content/repositories/snapshots")
else
Some("releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2")
lazy val cli = Project("cli", file("cli"), settings = subprojectSettings ++ publishSettings("cli") ++
sbtbuildinfo.Plugin.buildInfoSettings ++
Seq(
libraryDependencies += "commons-io" % "commons-io" % "1.4",
mainClass in (Compile, packageBin) := Some("scalariform.commandline.Main"),
mainClass in assembly := Some("scalariform.commandline.Main"),
publishTo <<= isSnapshot(getPublishToRepo),
artifact in (Compile, assembly) := {
val art = (artifact in (Compile, assembly)).value
art.copy(`classifier` = Some("assembly"))
}
) ++ addArtifact(artifact in (Compile, assembly), assembly)
) dependsOn (scalariform)
lazy val misc: Project = Project("misc", file("misc"), settings = subprojectSettings ++
Seq(
libraryDependencies ++= Seq(
"commons-io" % "commons-io" % "1.4",
"com.miglayout" % "miglayout" % "3.7.4"),
publish := (),
publishLocal := (),
validateJavaVersion := {
val specJavaVersion = sys.props("java.specification.version")
val compatibleJavaVersion = specJavaVersion == "1.7" || specJavaVersion == "1.8"
if (!compatibleJavaVersion)
sys.error(mismatchedSpecificationMessage)
},
// this means we'll validate required Java version only _right before_ running the compile
// command in misc subproject. In particular, build won't fail if user is not interested
// in building `misc` subproject.
compile in Compile := ((compile in Compile) dependsOn validateJavaVersion).value,
mainClass in (Compile, run) := Some("scalariform.gui.Main"))) dependsOn (scalariform, cli)
def pomExtraXml =
<inceptionYear>2010</inceptionYear>
<url>http://github.com/mdr/scalariform</url>
<licenses>
<license>
<name>MIT License</name>
<url>http://www.opensource.org/licenses/mit-license.php</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>git@github.com:daniel-trinh/scalariform.git</url>
<connection>scm:git:git@github.com:daniel-trinh/scalariform</connection>
</scm>
<developers>
<developer>
<id>mdr</id>
<name>Matt Russell</name>
<url>https://github.com/mdr/</url>
</developer>
<developer>
<id>daniel-trinh</id>
<name>Daniel Trinh</name>
<url>https://github.com/daniel-trinh/</url>
</developer>
</developers>
}
| triggerNZ/scalariform | project/Build.scala | Scala | mit | 6,827 |
/*
* Copyright (c) 2014 Dufresne Management Consulting LLC.
*/
package com.nickelsoftware.bettercare4me.hedis.hedis2014;
import org.scalatestplus.play.OneAppPerSuite
import org.scalatestplus.play.PlaySpec
import com.nickelsoftware.bettercare4me.hedis.HEDISRulesTestSpec
import com.nickelsoftware.bettercare4me.hedis.Scorecard
class CDC_LDL_C_TestValueRuleTestSpec extends PlaySpec {
"The CDC_LDL_C_TestValueRule class representing Diabetes Lipid Test < 100 mg/dL HEDIS rule" must {
"validate patient that meet the measure criteria" in {
for (i <- 1 to 20) {
val (patient, patientHistory, rule) = HEDISRulesTestSpec.setupTest(CDC_LDL_C_Value.name, 100, 0, 100)
val scorecard = rule.scoreRule(Scorecard(), patient, patientHistory)
rule.isPatientEligible(scorecard) mustBe true
rule.isPatientExcluded(scorecard) mustBe false
rule.isPatientMeetMeasure(scorecard) mustBe true
}
}
"validate patient that does not meet the measure criteria and is not excluded" in {
for (i <- 1 to 100) {
val (patient, patientHistory, rule) = HEDISRulesTestSpec.setupTest(CDC_LDL_C_Value.name, 100, 0, 0)
val scorecard = rule.scoreRule(Scorecard(), patient, patientHistory)
rule.isPatientEligible(scorecard) mustBe true
rule.isPatientExcluded(scorecard) mustBe false
if (rule.isPatientMeetMeasure(scorecard)) {
fail("Meet Measure should have failed, but rule fired: " + scorecard.hedisRuleMap(rule.name).meetMeasure.criteriaScore.keySet)
}
}
}
}
} | reactivecore01/bettercare4.me | play/test/com/nickelsoftware/bettercare4me/hedis/hedis2014/CDC_LDL_C_TestValueRuleTestSpec.scala | Scala | apache-2.0 | 1,578 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import controllers.actions.{AuthAction, NoSessionCheckAction}
import models.{AuthenticatedRequest, EmpRef, UserName}
import org.scalatestplus.play.PlaySpec
import play.api.Application
import play.api.i18n.{I18nSupport, Lang, MessagesApi}
import play.api.inject.bind
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.mvc._
import play.api.test.FakeRequest
import play.api.test.Helpers._
import support.TestAuthUser
import uk.gov.hmrc.auth.core.retrieve.Name
import utils.{TestAuthAction, TestNoSessionCheckAction}
class StartPageControllerSpec extends PlaySpec with FakePBIKApplication with TestAuthUser with I18nSupport {
override def messagesApi: MessagesApi = app.injector.instanceOf[MessagesApi]
implicit val lang: Lang = Lang("en-GB")
override lazy val fakeApplication: Application = GuiceApplicationBuilder(
disabled = Seq(classOf[com.kenshoo.play.metrics.PlayModule])
).configure(config)
.overrides(bind[AuthAction].to(classOf[TestAuthAction]))
.overrides(bind[NoSessionCheckAction].to(classOf[TestNoSessionCheckAction]))
.build()
val startPageController: StartPageController = app.injector.instanceOf[StartPageController]
"StartPage Controller" must {
"return OK and the correct view for a GET" in {
implicit val request: FakeRequest[AnyContentAsEmpty.type] = mockrequest
implicit val authenticatedRequest: AuthenticatedRequest[AnyContentAsEmpty.type] =
AuthenticatedRequest(EmpRef("taxOfficeNumber", "taxOfficeReference"), UserName(Name(None, None)), request)
val result = startPageController.onPageLoad().apply(authenticatedRequest)
status(result) mustEqual OK
contentAsString(result) must include(messagesApi("StartPage.heading"))
contentAsString(result) must include(messagesApi("StartPage.p5"))
}
}
}
| hmrc/pbik-frontend | test/controllers/StartPageControllerSpec.scala | Scala | apache-2.0 | 2,450 |
package monocle.law
import cats.Id
import cats.data.Const
import cats.kernel.Monoid
import monocle.Prism
import monocle.internal.{IsEq, Monoids}
case class PrismLaws[S, A](prism: Prism[S, A]) {
import IsEq.syntax
def partialRoundTripOneWay(s: S): IsEq[S] =
prism.getOrModify(s).fold(identity, prism.reverseGet) <==> s
def roundTripOtherWay(a: A): IsEq[Option[A]] =
prism.getOption(prism.reverseGet(a)) <==> Some(a)
def modifyIdentity(s: S): IsEq[S] =
prism.modify(identity)(s) <==> s
def composeModify(s: S, f: A => A, g: A => A): IsEq[S] =
prism.modify(g)(prism.modify(f)(s)) <==> prism.modify(g compose f)(s)
def consistentSetModify(s: S, a: A): IsEq[S] =
prism.set(a)(s) <==> prism.modify(_ => a)(s)
def consistentModifyModifyId(s: S, f: A => A): IsEq[S] =
prism.modify(f)(s) <==> prism.modifyF[Id](f)(s)
def consistentGetOptionModifyId(s: S): IsEq[Option[A]] = {
implicit val optionMonoid: Monoid[Option[A]] = Monoids.firstOption
prism.getOption(s) <==> prism.modifyF[Const[Option[A], ?]](a => Const(Some(a)))(s).getConst
}
}
| aoiroaoino/Monocle | core/shared/src/main/scala/monocle/law/PrismLaws.scala | Scala | mit | 1,089 |
package com.automatatutor.renderer
import com.automatatutor.SpecificationWithExamplesInsideBootedLiftSession
import com.automatatutor.model.User
import com.automatatutor.model.Course
import org.specs2.specification.Step
class UserRendererTest extends SpecificationWithExamplesInsideBootedLiftSession("") { def is = s2"""
For an enrolled user ${ Step(enrolledCourse.enroll(user))}
UserRenderer.renderDeleteLink should return a link ${renderer.renderDeleteLink.head.label must beEqualTo("a") }
that points to /users/index ${(renderer.renderDeleteLink \ "@href").text must startWith("/users/index") }
"""
isolated
lazy val user = { val user = User.create.firstName("Thomas").lastName("Andersen"); user.save(); user }
lazy val enrolledCourse = { val course = Course.create; course.save(); course }
lazy val supervisedCourse = { val course = Course.create; course.save(); course }
lazy val renderer = new UserRenderer(user);
} | AutomataTutor/automatatutor-frontend | src/test/scala/com/automatatutor/renderer/UserRendererTest.scala | Scala | mit | 945 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.e2
/** Collection of engine and evaluation libraries that have no dependency on
* PredictionIO.
*/
package object engine {}
package object evaluation {}
| ydanilenko/PredictionIO | e2/src/main/scala/io/prediction/e2/package.scala | Scala | apache-2.0 | 794 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.clustering
import scala.util.Random
import org.scalatest.FunSuite
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.util.{LocalClusterSparkContext, MLlibTestSparkContext}
import org.apache.spark.mllib.util.TestingUtils._
class KMeansSuite extends FunSuite with MLlibTestSparkContext {
import org.apache.spark.mllib.clustering.KMeans.{K_MEANS_PARALLEL, RANDOM}
test("single cluster") {
val data = sc.parallelize(Array(
Vectors.dense(1.0, 2.0, 6.0),
Vectors.dense(1.0, 3.0, 0.0),
Vectors.dense(1.0, 4.0, 6.0)
))
val center = Vectors.dense(1.0, 3.0, 4.0)
// No matter how many runs or iterations we use, we should get one cluster,
// centered at the mean of the points
var model = KMeans.train(data, k = 1, maxIterations = 1)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 2)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 1, initializationMode = RANDOM)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(
data, k = 1, maxIterations = 1, runs = 1, initializationMode = K_MEANS_PARALLEL)
assert(model.clusterCenters.head ~== center absTol 1E-5)
}
test("no distinct points") {
val data = sc.parallelize(
Array(
Vectors.dense(1.0, 2.0, 3.0),
Vectors.dense(1.0, 2.0, 3.0),
Vectors.dense(1.0, 2.0, 3.0)),
2)
val center = Vectors.dense(1.0, 2.0, 3.0)
// Make sure code runs.
var model = KMeans.train(data, k=2, maxIterations=1)
assert(model.clusterCenters.size === 2)
}
test("more clusters than points") {
val data = sc.parallelize(
Array(
Vectors.dense(1.0, 2.0, 3.0),
Vectors.dense(1.0, 3.0, 4.0)),
2)
// Make sure code runs.
var model = KMeans.train(data, k=3, maxIterations=1)
assert(model.clusterCenters.size === 3)
}
test("deterministic initialization") {
// Create a large-ish set of points for clustering
val points = List.tabulate(1000)(n => Vectors.dense(n, n))
val rdd = sc.parallelize(points, 3)
for (initMode <- Seq(RANDOM, K_MEANS_PARALLEL)) {
// Create three deterministic models and compare cluster means
val model1 = KMeans.train(rdd, k = 10, maxIterations = 2, runs = 1,
initializationMode = initMode, seed = 42)
val centers1 = model1.clusterCenters
val model2 = KMeans.train(rdd, k = 10, maxIterations = 2, runs = 1,
initializationMode = initMode, seed = 42)
val centers2 = model2.clusterCenters
centers1.zip(centers2).foreach { case (c1, c2) =>
assert(c1 ~== c2 absTol 1E-14)
}
}
}
test("single cluster with big dataset") {
val smallData = Array(
Vectors.dense(1.0, 2.0, 6.0),
Vectors.dense(1.0, 3.0, 0.0),
Vectors.dense(1.0, 4.0, 6.0)
)
val data = sc.parallelize((1 to 100).flatMap(_ => smallData), 4)
// No matter how many runs or iterations we use, we should get one cluster,
// centered at the mean of the points
val center = Vectors.dense(1.0, 3.0, 4.0)
var model = KMeans.train(data, k = 1, maxIterations = 1)
assert(model.clusterCenters.size === 1)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 2)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 1, initializationMode = RANDOM)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 1,
initializationMode = K_MEANS_PARALLEL)
assert(model.clusterCenters.head ~== center absTol 1E-5)
}
test("single cluster with sparse data") {
val n = 10000
val data = sc.parallelize((1 to 100).flatMap { i =>
val x = i / 1000.0
Array(
Vectors.sparse(n, Seq((0, 1.0 + x), (1, 2.0), (2, 6.0))),
Vectors.sparse(n, Seq((0, 1.0 - x), (1, 2.0), (2, 6.0))),
Vectors.sparse(n, Seq((0, 1.0), (1, 3.0 + x))),
Vectors.sparse(n, Seq((0, 1.0), (1, 3.0 - x))),
Vectors.sparse(n, Seq((0, 1.0), (1, 4.0), (2, 6.0 + x))),
Vectors.sparse(n, Seq((0, 1.0), (1, 4.0), (2, 6.0 - x)))
)
}, 4)
data.persist()
// No matter how many runs or iterations we use, we should get one cluster,
// centered at the mean of the points
val center = Vectors.sparse(n, Seq((0, 1.0), (1, 3.0), (2, 4.0)))
var model = KMeans.train(data, k = 1, maxIterations = 1)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 2)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 5)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 1, initializationMode = RANDOM)
assert(model.clusterCenters.head ~== center absTol 1E-5)
model = KMeans.train(data, k = 1, maxIterations = 1, runs = 1,
initializationMode = K_MEANS_PARALLEL)
assert(model.clusterCenters.head ~== center absTol 1E-5)
data.unpersist()
}
test("k-means|| initialization") {
case class VectorWithCompare(x: Vector) extends Ordered[VectorWithCompare] {
@Override def compare(that: VectorWithCompare): Int = {
if(this.x.toArray.foldLeft[Double](0.0)((acc, x) => acc + x * x) >
that.x.toArray.foldLeft[Double](0.0)((acc, x) => acc + x * x)) -1 else 1
}
}
val points = Seq(
Vectors.dense(1.0, 2.0, 6.0),
Vectors.dense(1.0, 3.0, 0.0),
Vectors.dense(1.0, 4.0, 6.0),
Vectors.dense(1.0, 0.0, 1.0),
Vectors.dense(1.0, 1.0, 1.0)
)
val rdd = sc.parallelize(points)
// K-means|| initialization should place all clusters into distinct centers because
// it will make at least five passes, and it will give non-zero probability to each
// unselected point as long as it hasn't yet selected all of them
var model = KMeans.train(rdd, k = 5, maxIterations = 1)
assert(model.clusterCenters.sortBy(VectorWithCompare(_))
.zip(points.sortBy(VectorWithCompare(_))).forall(x => x._1 ~== (x._2) absTol 1E-5))
// Iterations of Lloyd's should not change the answer either
model = KMeans.train(rdd, k = 5, maxIterations = 10)
assert(model.clusterCenters.sortBy(VectorWithCompare(_))
.zip(points.sortBy(VectorWithCompare(_))).forall(x => x._1 ~== (x._2) absTol 1E-5))
// Neither should more runs
model = KMeans.train(rdd, k = 5, maxIterations = 10, runs = 5)
assert(model.clusterCenters.sortBy(VectorWithCompare(_))
.zip(points.sortBy(VectorWithCompare(_))).forall(x => x._1 ~== (x._2) absTol 1E-5))
}
test("two clusters") {
val points = Seq(
Vectors.dense(0.0, 0.0),
Vectors.dense(0.0, 0.1),
Vectors.dense(0.1, 0.0),
Vectors.dense(9.0, 0.0),
Vectors.dense(9.0, 0.2),
Vectors.dense(9.2, 0.0)
)
val rdd = sc.parallelize(points, 3)
for (initMode <- Seq(RANDOM, K_MEANS_PARALLEL)) {
// Two iterations are sufficient no matter where the initial centers are.
val model = KMeans.train(rdd, k = 2, maxIterations = 2, runs = 1, initMode)
val predicts = model.predict(rdd).collect()
assert(predicts(0) === predicts(1))
assert(predicts(0) === predicts(2))
assert(predicts(3) === predicts(4))
assert(predicts(3) === predicts(5))
assert(predicts(0) != predicts(3))
}
}
}
class KMeansClusterSuite extends FunSuite with LocalClusterSparkContext {
test("task size should be small in both training and prediction") {
val m = 4
val n = 200000
val points = sc.parallelize(0 until m, 2).mapPartitionsWithIndex { (idx, iter) =>
val random = new Random(idx)
iter.map(i => Vectors.dense(Array.fill(n)(random.nextDouble)))
}.cache()
for (initMode <- Seq(KMeans.RANDOM, KMeans.K_MEANS_PARALLEL)) {
// If we serialize data directly in the task closure, the size of the serialized task would be
// greater than 1MB and hence Spark would throw an error.
val model = KMeans.train(points, 2, 2, 1, initMode)
val predictions = model.predict(points).collect()
val cost = model.computeCost(points)
}
}
}
| trueyao/spark-lever | mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala | Scala | apache-2.0 | 10,303 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.lang.reflect.{ParameterizedType, Type}
import scala.reflect.runtime.universe.TypeTag
import scala.util.Try
import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.internal.Logging
import org.apache.spark.sql.api.java._
import org.apache.spark.sql.catalyst.{JavaTypeInference, ScalaReflection}
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF}
import org.apache.spark.sql.execution.aggregate.ScalaUDAF
import org.apache.spark.sql.execution.python.UserDefinedPythonFunction
import org.apache.spark.sql.expressions.{UserDefinedAggregateFunction, UserDefinedFunction}
import org.apache.spark.sql.types.DataType
import org.apache.spark.util.Utils
/**
* Functions for registering user-defined functions. Use `SparkSession.udf` to access this:
*
* {{{
* spark.udf
* }}}
*
* @note The user-defined functions must be deterministic.
*
* @since 1.3.0
*/
@InterfaceStability.Stable
class UDFRegistration private[sql] (functionRegistry: FunctionRegistry) extends Logging {
protected[sql] def registerPython(name: String, udf: UserDefinedPythonFunction): Unit = {
log.debug(
s"""
| Registering new PythonUDF:
| name: $name
| command: ${udf.func.command.toSeq}
| envVars: ${udf.func.envVars}
| pythonIncludes: ${udf.func.pythonIncludes}
| pythonExec: ${udf.func.pythonExec}
| dataType: ${udf.dataType}
""".stripMargin)
functionRegistry.createOrReplaceTempFunction(name, udf.builder)
}
/**
* Register a user-defined aggregate function (UDAF).
*
* @param name the name of the UDAF.
* @param udaf the UDAF needs to be registered.
* @return the registered UDAF.
*
* @since 1.5.0
*/
def register(name: String, udaf: UserDefinedAggregateFunction): UserDefinedAggregateFunction = {
def builder(children: Seq[Expression]) = ScalaUDAF(children, udaf)
functionRegistry.createOrReplaceTempFunction(name, builder)
udaf
}
/**
* Register a user-defined function (UDF), for a UDF that's already defined using the DataFrame
* API (i.e. of type UserDefinedFunction).
*
* @param name the name of the UDF.
* @param udf the UDF needs to be registered.
* @return the registered UDF.
*
* @since 2.2.0
*/
def register(name: String, udf: UserDefinedFunction): UserDefinedFunction = {
def builder(children: Seq[Expression]) = udf.apply(children.map(Column.apply) : _*).expr
functionRegistry.createOrReplaceTempFunction(name, builder)
udf
}
// scalastyle:off line.size.limit
/* register 0-22 were generated by this script
(0 to 22).map { x =>
val types = (1 to x).foldRight("RT")((i, s) => {s"A$i, $s"})
val typeTags = (1 to x).map(i => s"A${i}: TypeTag").foldLeft("RT: TypeTag")(_ + ", " + _)
val inputTypes = (1 to x).foldRight("Nil")((i, s) => {s"ScalaReflection.schemaFor[A$i].dataType :: $s"})
println(s"""
/**
* Register a Scala closure of ${x} arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[$typeTags](name: String, func: Function$x[$types]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try($inputTypes).toOption
def builder(e: Seq[Expression]) = if (e.length == $x) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: $x; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}""")
}
(1 to 22).foreach { i =>
val extTypeArgs = (1 to i).map(_ => "_").mkString(", ")
val anyTypeArgs = (1 to i).map(_ => "Any").mkString(", ")
val anyCast = s".asInstanceOf[UDF$i[$anyTypeArgs, Any]]"
val anyParams = (1 to i).map(_ => "_: Any").mkString(", ")
println(s"""
|/**
| * Register a user-defined function with ${i} arguments.
| * @since 1.3.0
| */
|def register(name: String, f: UDF$i[$extTypeArgs, _], returnType: DataType): Unit = {
| val func = f$anyCast.call($anyParams)
|def builder(e: Seq[Expression]) = if (e.length == $i) {
| ScalaUDF(func, returnType, e)
|} else {
| throw new AnalysisException("Invalid number of arguments for function " + name +
| ". Expected: $i; Found: " + e.length)
|}
|functionRegistry.createOrReplaceTempFunction(name, builder)
|}""".stripMargin)
}
*/
/**
* Register a Scala closure of 0 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag](name: String, func: Function0[RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 0) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 0; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 1 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag](name: String, func: Function1[A1, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 1) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 1; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 2 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag](name: String, func: Function2[A1, A2, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 2) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 2; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 3 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag](name: String, func: Function3[A1, A2, A3, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 3) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 3; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 4 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag](name: String, func: Function4[A1, A2, A3, A4, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 4) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 4; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 5 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag](name: String, func: Function5[A1, A2, A3, A4, A5, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 5) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 5; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 6 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag](name: String, func: Function6[A1, A2, A3, A4, A5, A6, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 6) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 6; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 7 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag](name: String, func: Function7[A1, A2, A3, A4, A5, A6, A7, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 7) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 7; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 8 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag](name: String, func: Function8[A1, A2, A3, A4, A5, A6, A7, A8, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 8) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 8; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 9 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag](name: String, func: Function9[A1, A2, A3, A4, A5, A6, A7, A8, A9, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 9) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 9; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 10 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag](name: String, func: Function10[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 10) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 10; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 11 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag](name: String, func: Function11[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 11) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 11; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 12 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag](name: String, func: Function12[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 12) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 12; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 13 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag](name: String, func: Function13[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 13) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 13; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 14 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag](name: String, func: Function14[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 14) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 14; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 15 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag](name: String, func: Function15[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 15) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 15; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 16 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag](name: String, func: Function16[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: ScalaReflection.schemaFor[A16].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 16) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 16; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 17 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag](name: String, func: Function17[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: ScalaReflection.schemaFor[A16].dataType :: ScalaReflection.schemaFor[A17].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 17) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 17; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 18 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag](name: String, func: Function18[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: ScalaReflection.schemaFor[A16].dataType :: ScalaReflection.schemaFor[A17].dataType :: ScalaReflection.schemaFor[A18].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 18) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 18; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 19 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag](name: String, func: Function19[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: ScalaReflection.schemaFor[A16].dataType :: ScalaReflection.schemaFor[A17].dataType :: ScalaReflection.schemaFor[A18].dataType :: ScalaReflection.schemaFor[A19].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 19) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 19; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 20 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag](name: String, func: Function20[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: ScalaReflection.schemaFor[A16].dataType :: ScalaReflection.schemaFor[A17].dataType :: ScalaReflection.schemaFor[A18].dataType :: ScalaReflection.schemaFor[A19].dataType :: ScalaReflection.schemaFor[A20].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 20) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 20; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 21 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag, A21: TypeTag](name: String, func: Function21[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: ScalaReflection.schemaFor[A16].dataType :: ScalaReflection.schemaFor[A17].dataType :: ScalaReflection.schemaFor[A18].dataType :: ScalaReflection.schemaFor[A19].dataType :: ScalaReflection.schemaFor[A20].dataType :: ScalaReflection.schemaFor[A21].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 21) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 21; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
/**
* Register a Scala closure of 22 arguments as user-defined function (UDF).
* @tparam RT return type of UDF.
* @since 1.3.0
*/
def register[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag, A21: TypeTag, A22: TypeTag](name: String, func: Function22[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, RT]): UserDefinedFunction = {
val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
val inputTypes = Try(ScalaReflection.schemaFor[A1].dataType :: ScalaReflection.schemaFor[A2].dataType :: ScalaReflection.schemaFor[A3].dataType :: ScalaReflection.schemaFor[A4].dataType :: ScalaReflection.schemaFor[A5].dataType :: ScalaReflection.schemaFor[A6].dataType :: ScalaReflection.schemaFor[A7].dataType :: ScalaReflection.schemaFor[A8].dataType :: ScalaReflection.schemaFor[A9].dataType :: ScalaReflection.schemaFor[A10].dataType :: ScalaReflection.schemaFor[A11].dataType :: ScalaReflection.schemaFor[A12].dataType :: ScalaReflection.schemaFor[A13].dataType :: ScalaReflection.schemaFor[A14].dataType :: ScalaReflection.schemaFor[A15].dataType :: ScalaReflection.schemaFor[A16].dataType :: ScalaReflection.schemaFor[A17].dataType :: ScalaReflection.schemaFor[A18].dataType :: ScalaReflection.schemaFor[A19].dataType :: ScalaReflection.schemaFor[A20].dataType :: ScalaReflection.schemaFor[A21].dataType :: ScalaReflection.schemaFor[A22].dataType :: Nil).toOption
def builder(e: Seq[Expression]) = if (e.length == 22) {
ScalaUDF(func, dataType, e, inputTypes.getOrElse(Nil), Some(name), nullable)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 22; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
UserDefinedFunction(func, dataType, inputTypes).withName(name).withNullability(nullable)
}
//////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////
/**
* Register a Java UDF class using reflection, for use from pyspark
*
* @param name udf name
* @param className fully qualified class name of udf
* @param returnDataType return type of udf. If it is null, spark would try to infer
* via reflection.
*/
private[sql] def registerJava(name: String, className: String, returnDataType: DataType): Unit = {
try {
val clazz = Utils.classForName(className)
val udfInterfaces = clazz.getGenericInterfaces
.filter(_.isInstanceOf[ParameterizedType])
.map(_.asInstanceOf[ParameterizedType])
.filter(e => e.getRawType.isInstanceOf[Class[_]] && e.getRawType.asInstanceOf[Class[_]].getCanonicalName.startsWith("org.apache.spark.sql.api.java.UDF"))
if (udfInterfaces.length == 0) {
throw new AnalysisException(s"UDF class ${className} doesn't implement any UDF interface")
} else if (udfInterfaces.length > 1) {
throw new AnalysisException(s"It is invalid to implement multiple UDF interfaces, UDF class ${className}")
} else {
try {
val udf = clazz.newInstance()
val udfReturnType = udfInterfaces(0).getActualTypeArguments.last
var returnType = returnDataType
if (returnType == null) {
returnType = JavaTypeInference.inferDataType(udfReturnType)._1
}
udfInterfaces(0).getActualTypeArguments.length match {
case 2 => register(name, udf.asInstanceOf[UDF1[_, _]], returnType)
case 3 => register(name, udf.asInstanceOf[UDF2[_, _, _]], returnType)
case 4 => register(name, udf.asInstanceOf[UDF3[_, _, _, _]], returnType)
case 5 => register(name, udf.asInstanceOf[UDF4[_, _, _, _, _]], returnType)
case 6 => register(name, udf.asInstanceOf[UDF5[_, _, _, _, _, _]], returnType)
case 7 => register(name, udf.asInstanceOf[UDF6[_, _, _, _, _, _, _]], returnType)
case 8 => register(name, udf.asInstanceOf[UDF7[_, _, _, _, _, _, _, _]], returnType)
case 9 => register(name, udf.asInstanceOf[UDF8[_, _, _, _, _, _, _, _, _]], returnType)
case 10 => register(name, udf.asInstanceOf[UDF9[_, _, _, _, _, _, _, _, _, _]], returnType)
case 11 => register(name, udf.asInstanceOf[UDF10[_, _, _, _, _, _, _, _, _, _, _]], returnType)
case 12 => register(name, udf.asInstanceOf[UDF11[_, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 13 => register(name, udf.asInstanceOf[UDF12[_, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 14 => register(name, udf.asInstanceOf[UDF13[_, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 15 => register(name, udf.asInstanceOf[UDF14[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 16 => register(name, udf.asInstanceOf[UDF15[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 17 => register(name, udf.asInstanceOf[UDF16[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 18 => register(name, udf.asInstanceOf[UDF17[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 19 => register(name, udf.asInstanceOf[UDF18[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 20 => register(name, udf.asInstanceOf[UDF19[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 21 => register(name, udf.asInstanceOf[UDF20[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 22 => register(name, udf.asInstanceOf[UDF21[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case 23 => register(name, udf.asInstanceOf[UDF22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _]], returnType)
case n =>
throw new AnalysisException(s"UDF class with ${n} type arguments is not supported.")
}
} catch {
case e @ (_: InstantiationException | _: IllegalArgumentException) =>
throw new AnalysisException(s"Can not instantiate class ${className}, please make sure it has public non argument constructor")
}
}
} catch {
case e: ClassNotFoundException => throw new AnalysisException(s"Can not load class ${className}, please make sure it is on the classpath")
}
}
/**
* Register a Java UDAF class using reflection, for use from pyspark
*
* @param name UDAF name
* @param className fully qualified class name of UDAF
*/
private[sql] def registerJavaUDAF(name: String, className: String): Unit = {
try {
val clazz = Utils.classForName(className)
if (!classOf[UserDefinedAggregateFunction].isAssignableFrom(clazz)) {
throw new AnalysisException(s"class $className doesn't implement interface UserDefinedAggregateFunction")
}
val udaf = clazz.newInstance().asInstanceOf[UserDefinedAggregateFunction]
register(name, udaf)
} catch {
case e: ClassNotFoundException => throw new AnalysisException(s"Can not load class ${className}, please make sure it is on the classpath")
case e @ (_: InstantiationException | _: IllegalArgumentException) =>
throw new AnalysisException(s"Can not instantiate class ${className}, please make sure it has public non argument constructor")
}
}
/**
* Register a user-defined function with 1 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF1[_, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF1[Any, Any]].call(_: Any)
def builder(e: Seq[Expression]) = if (e.length == 1) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 1; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 2 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF2[_, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF2[Any, Any, Any]].call(_: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 2) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 2; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 3 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF3[_, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF3[Any, Any, Any, Any]].call(_: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 3) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 3; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 4 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF4[_, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF4[Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 4) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 4; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 5 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF5[_, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF5[Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 5) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 5; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 6 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF6[_, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF6[Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 6) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 6; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 7 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF7[_, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF7[Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 7) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 7; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 8 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF8[_, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF8[Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 8) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 8; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 9 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF9[_, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF9[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 9) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 9; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 10 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF10[_, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF10[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 10) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 10; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 11 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF11[_, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF11[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 11) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 11; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 12 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF12[_, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF12[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 12) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 12; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 13 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF13[_, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF13[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 13) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 13; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 14 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF14[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF14[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 14) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 14; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 15 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF15[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF15[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 15) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 15; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 16 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF16[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF16[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 16) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 16; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 17 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF17[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF17[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 17) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 17; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 18 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF18[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF18[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 18) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 18; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 19 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF19[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF19[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 19) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 19; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 20 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF20[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF20[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 20) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 20; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 21 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF21[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF21[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 21) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 21; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
/**
* Register a user-defined function with 22 arguments.
* @since 1.3.0
*/
def register(name: String, f: UDF22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], returnType: DataType): Unit = {
val func = f.asInstanceOf[UDF22[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]].call(_: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any, _: Any)
def builder(e: Seq[Expression]) = if (e.length == 22) {
ScalaUDF(func, returnType, e)
} else {
throw new AnalysisException("Invalid number of arguments for function " + name +
". Expected: 22; Found: " + e.length)
}
functionRegistry.createOrReplaceTempFunction(name, builder)
}
// scalastyle:on line.size.limit
}
| nilsgrabbert/spark | sql/core/src/main/scala/org/apache/spark/sql/UDFRegistration.scala | Scala | apache-2.0 | 60,827 |
package uk.co.pollett.flink.newsreader.nlp
import java.util.Date
import org.scalatest.FlatSpec
import org.scalamock.scalatest.MockFactory
import uk.co.pollett.flink.newsreader.rss.Entry
class EnrichSpec extends FlatSpec with MockFactory {
def e = Entry("", "","https://blog.pollett.co.uk/chat/aws-s3-at-speed/", new Date(), "", None, None, None, None, None)
"An entry" should "return more content" in {
def enriched = Enrich.enrich(e)
assert(enriched.body.isDefined)
}
}
| pollett/flink-newsreader | src/test/scala/uk/co/pollett/flink/newsreader/nlp/EnrichSpec.scala | Scala | mit | 490 |
package org.nexbook.performance.result
import scala.io.Source
import scala.util.matching.Regex
/**
* Created by milczu on 07.01.16.
*/
object LogFileTimeResultExtractor {
type NanoTime = Long
type ClOrdId = String
val nanoTimeGroupIndex = 1
val clOrdIdGroupIndex = 4
def extractTimes(filePath: String, className: String): Map[ClOrdId, NanoTime] = {
val regex: Regex = s"^\\\\d{2}:\\\\d{2}:\\\\d{2}\\\\.\\\\d{3}\\\\s(\\\\d*)\\\\s\\\\[.*\\\\]\\\\s(TRACE|DEBUG|INFO).*($className)\\\\s\\\\-\\\\s([0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}).*$$".r
def matchingLine(line: String): Boolean = regex.findAllMatchIn(line).nonEmpty
def extractResultFromLine(line: String): (ClOrdId, NanoTime) = {
regex.findAllIn(line).matchData.toList.map(m => (m.group(clOrdIdGroupIndex), m.group(nanoTimeGroupIndex).toLong)).head
}
Source.fromFile(filePath).getLines().filter(matchingLine).map(extractResultFromLine).toMap
}
}
| milczarekIT/nexbook | src/test/scala/org/nexbook/performance/result/LogFileTimeResultExtractor.scala | Scala | apache-2.0 | 943 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.events
import org.testng.annotations.Test
import org.scalatestplus.testng.TestNGSuite
import org.scalatest.DoNotDiscover
@DoNotDiscover
class TestLocationTestNGSuite extends TestNGSuite with TestLocationServices {
val suiteTypeName = "org.scalatest.events.TestLocationTestNGSuite"
val expectedSuiteStartingList = Nil
val expectedSuiteCompletedList = Nil
val expectedSuiteAbortedList = Nil
val expectedTestSucceededList = Nil
val expectedTestFailedList = List(SeeStackDepthExceptionPair("testFail"))
val expectedInfoProvidedList = Nil
@Test
def testFail(): Unit = {
fail()
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/events/TestLocationTestNGSuite.scala | Scala | apache-2.0 | 1,229 |
package dx.compiler
import java.nio.file.{Path, Paths}
import com.typesafe.config.{Config, ConfigFactory}
import dx.api.{DxApi, DxFile, DxPath, DxProject, DxRecord, Field, InstanceTypeDbQuery}
import dx.core.io.{DxFileAccessProtocol, DxFileDescCache, DxPathConfig}
import dx.core.languages.wdl.{ParseSource, WdlVarLinksConverter}
import spray.json.JsValue
import wdlTools.types.{TypedAbstractSyntax => TAT}
import wdlTools.util.{FileSourceResolver, FileUtils}
import scala.jdk.CollectionConverters._
object CompilerFlag extends Enumeration {
type CompilerFlag = Value
val All, IR, NativeWithoutRuntimeAsset = Value
}
// Tree printer types for the execTree option
sealed trait TreePrinter
case object JsonTreePrinter extends TreePrinter
case object PrettyTreePrinter extends TreePrinter
// Packing of all compiler flags in an easy to digest format
case class CompilerOptions(archive: Boolean,
compileMode: CompilerFlag.Value,
defaults: Option[Path],
extras: Option[Extras],
fatalValidationWarnings: Boolean,
force: Boolean,
importDirs: Vector[Path],
inputs: Vector[Path],
leaveWorkflowsOpen: Boolean,
locked: Boolean,
projectWideReuse: Boolean,
reorg: Boolean,
streamAllFiles: Boolean,
execTree: Option[TreePrinter],
runtimeTraceLevel: Int,
dxApi: DxApi)
case class Top(cOpt: CompilerOptions) {
private val dxApi = cOpt.dxApi
private val logger = dxApi.logger
// The mapping from region to project name is list of (region, proj-name) pairs.
// Get the project for this region.
private def getProjectWithRuntimeLibrary(region2project: Map[String, String],
region: String): (String, String) = {
val destination = region2project.get(region) match {
case None => throw new Exception(s"Region ${region} is currently unsupported")
case Some(dest) => dest
}
// The destionation is something like:
val parts = destination.split(":")
if (parts.length == 1) {
(parts(0), "/")
} else if (parts.length == 2) {
(parts(0), parts(1))
} else {
throw new Exception(s"Bad syntax for destination ${destination}")
}
}
// the regions live in dxWDL.conf
def getRegions: Map[String, String] = {
val config = ConfigFactory.load(DX_WDL_RUNTIME_CONF_FILE)
val l: Vector[Config] = config.getConfigList("dxWDL.region2project").asScala.toVector
val region2project: Map[String, String] = l.map { pair =>
val r = pair.getString("region")
val projName = pair.getString("path")
r -> projName
}.toMap
region2project
}
// Find the runtime dxWDL asset with the correct version. Look inside the
// project configured for this region.
private def getAssetId(region: String): String = {
val region2project = getRegions
val (projNameRt, folder) = getProjectWithRuntimeLibrary(region2project, region)
val dxProjRt = dxApi.resolveProject(projNameRt)
logger.trace(s"Looking for asset-id in ${projNameRt}:/${folder}")
val assetDxPath = s"${DxPath.DX_URL_PREFIX}${dxProjRt.getId}:${folder}/${DX_WDL_ASSET}"
val dxObj = dxApi.resolveOnePath(assetDxPath, Some(dxProjRt))
if (!dxObj.isInstanceOf[DxRecord])
throw new Exception(s"Found dx object of wrong type ${dxObj} at ${assetDxPath}")
dxObj.getId
}
// We need the dxWDL runtime library cloned into this project, so it will
// be available to all subjobs we run.
private def cloneRtLibraryToProject(region: String,
dxWDLrtId: String,
dxProject: DxProject): Unit = {
val region2project = getRegions
val (projNameRt, _) = getProjectWithRuntimeLibrary(region2project, region)
val dxProjRt = dxApi.resolveProject(projNameRt)
dxApi.cloneAsset(dxApi.record(dxWDLrtId), dxProject, DX_WDL_ASSET, dxProjRt)
}
// Backend compiler pass
private def compileNative(
bundle: IR.Bundle,
folder: String,
dxProject: DxProject,
runtimePathConfig: DxPathConfig,
wdlVarLinksConverter: WdlVarLinksConverter
): Native.Results = {
val dxWDLrtId: Option[String] = cOpt.compileMode match {
case CompilerFlag.IR =>
throw new Exception("Invalid value IR for compilation mode")
case CompilerFlag.NativeWithoutRuntimeAsset =>
// Testing mode, we don't need the runtime library to check native
// compilation.
None
case CompilerFlag.All =>
// get billTo and region from the project, then find the runtime asset
// in the current region.
val region = dxProject.describe(Set(Field.Region)).region match {
case Some(s) => s
case None => throw new Exception(s"Cannot get region for project ${dxProject}")
}
val lrtId = getAssetId(region)
cloneRtLibraryToProject(region, lrtId, dxProject)
Some(lrtId)
}
// get list of available instance types
val instanceTypeDB = InstanceTypeDbQuery(dxApi).query(dxProject)
// Efficiently build a directory of the currently existing applets.
// We don't want to build them if we don't have to.
val dxObjDir = DxObjectDirectory(bundle, dxProject, folder, cOpt.projectWideReuse, dxApi)
// Generate dx:applets and dx:workflow from the IR
Native(
dxWDLrtId,
folder,
dxProject,
dxObjDir,
instanceTypeDB,
runtimePathConfig,
wdlVarLinksConverter,
bundle.typeAliases,
cOpt.extras,
cOpt.runtimeTraceLevel,
cOpt.leaveWorkflowsOpen,
cOpt.force,
cOpt.archive,
cOpt.locked,
cOpt.dxApi
).apply(bundle)
}
// check the declarations in [graph], and make sure they
// do not contain the reserved '___' substring.
private def checkDeclarations(varNames: Vector[String]): Unit = {
for (varName <- varNames)
if (varName contains "___")
throw new Exception(s"Variable ${varName} is using the reserved substring ___")
}
// check that streaming annotations are only done for files.
private def validate(callable: TAT.Callable): Unit = {
callable match {
case wf: TAT.Workflow =>
if (wf.parameterMeta.isDefined) {
logger.warning("dxWDL workflows ignore their parameter meta section")
}
checkDeclarations(wf.inputs.map(_.name))
checkDeclarations(wf.outputs.map(_.name))
val allDeclarations: Vector[TAT.Declaration] = wf.body.collect {
case d: TAT.Declaration => d
}
checkDeclarations(allDeclarations.map(_.name))
case task: TAT.Task =>
checkDeclarations(task.inputs.map(_.name))
checkDeclarations(task.outputs.map(_.name))
}
}
// Scan the JSON inputs files for dx:files, and batch describe them. This
// reduces the number of API calls.
private def bulkFileDescribe(
bundle: IR.Bundle,
dxProject: DxProject
): (Map[String, DxFile], DxFileDescCache) = {
val defResults: InputFileScanResults = cOpt.defaults match {
case None => InputFileScanResults(Map.empty, Vector.empty)
case Some(path) =>
InputFileScan(bundle, dxProject, dxApi).apply(path)
}
val allResults: InputFileScanResults = cOpt.inputs.foldLeft(defResults) {
case (accu: InputFileScanResults, inputFilePath) =>
val res = InputFileScan(bundle, dxProject, dxApi).apply(inputFilePath)
InputFileScanResults(accu.path2file ++ res.path2file, accu.dxFiles ++ res.dxFiles)
}
val allFiles = dxApi.fileBulkDescribe(allResults.dxFiles)
(allResults.path2file, DxFileDescCache(allFiles))
}
private def wdlToIR(source: Path): IR.Bundle = {
val (_, language, everythingBundle, allSources, adjunctFiles) =
ParseSource(dxApi).apply(source, cOpt.importDirs)
// validate
everythingBundle.allCallables.foreach { case (_, c) => validate(c) }
everythingBundle.primaryCallable match {
case None => ()
case Some(x) => validate(x)
}
// Compile the WDL workflow into an Intermediate
// Representation (IR)
val defaultRuntimeAttrs = cOpt.extras match {
case None => WdlRuntimeAttrs(Map.empty)
case Some(ex) => ex.defaultRuntimeAttributes
}
val reorgApp: Either[Boolean, ReorgAttrs] = cOpt.extras match {
case None => Left(cOpt.reorg)
case Some(ex) =>
ex.customReorgAttributes match {
case None => Left(cOpt.reorg)
case Some(cOrg) => Right(cOrg)
}
}
// TODO: load default hints from attrs
val defaultHintAttrs = WdlHintAttrs(Map.empty)
GenerateIR(dxApi, defaultRuntimeAttrs, defaultHintAttrs)
.apply(everythingBundle, allSources, language, cOpt.locked, reorgApp, adjunctFiles)
}
// Compile IR only
private def handleInputFiles(bundle: IR.Bundle,
fileResolver: FileSourceResolver,
pathToDxFile: Map[String, DxFile],
dxFileDescCache: DxFileDescCache): IR.Bundle = {
val inputFile =
InputFile(fileResolver, dxFileDescCache, pathToDxFile, bundle.typeAliases, dxApi)
val bundle2: IR.Bundle = cOpt.defaults match {
case None => bundle
case Some(path) => inputFile.embedDefaults(bundle, path)
}
// generate dx inputs from the Cromwell-style input specification.
cOpt.inputs.foreach { path =>
val dxInputs = inputFile.dxFromInputJson(bundle2, path)
// write back out as xxxx.dx.json
val filename = FileUtils.replaceFileSuffix(path, ".dx.json")
val parent = path.getParent
val dxInputFile =
if (parent != null) {
parent.resolve(filename)
} else {
Paths.get(filename)
}
FileUtils.writeFileContent(dxInputFile, dxInputs.prettyPrint)
logger.trace(s"Wrote dx JSON input file ${dxInputFile}")
}
bundle2
}
// compile and generate intermediate code only
def applyOnlyIR(source: Path, dxProject: DxProject): IR.Bundle = {
// generate IR
val bundle: IR.Bundle = wdlToIR(source)
// lookup platform files in bulk
val (pathToDxFile, dxFileDescCache) = bulkFileDescribe(bundle, dxProject)
val dxProtocol = DxFileAccessProtocol(dxApi, dxFileDescCache)
val fileResolver =
FileSourceResolver.create(userProtocols = Vector(dxProtocol), logger = logger)
// handle changes resulting from setting defaults, and
// generate DNAx input files.
handleInputFiles(bundle, fileResolver, pathToDxFile, dxFileDescCache)
}
// Compile up to native dx applets and workflows
def apply(source: Path,
folder: String,
dxProject: DxProject,
runtimePathConfig: DxPathConfig,
execTree: Option[TreePrinter]): (String, Option[Either[String, JsValue]]) = {
val bundle: IR.Bundle = wdlToIR(source)
// lookup platform files in bulk
val (pathToDxFile, dxFileDescCache) = bulkFileDescribe(bundle, dxProject)
val dxProtocol = DxFileAccessProtocol(dxApi, dxFileDescCache)
val fileResolver =
FileSourceResolver.create(userProtocols = Vector(dxProtocol), logger = logger)
// generate IR
val bundle2: IR.Bundle = handleInputFiles(bundle, fileResolver, pathToDxFile, dxFileDescCache)
// Up to this point, compilation does not require
// the dx:project. This allows unit testing without
// being logged in to the platform. For the native
// pass the dx:project is required to establish
// (1) the instance price list and database
// (2) the output location of applets and workflows
val wdlVarLinksConverter =
WdlVarLinksConverter(dxApi, fileResolver, dxFileDescCache, bundle2.typeAliases)
val cResults =
compileNative(bundle2, folder, dxProject, runtimePathConfig, wdlVarLinksConverter)
cResults.primaryCallable match {
case None =>
val ids = cResults.execDict.map { case (_, r) => r.dxExec.getId }.mkString(",")
(ids, None)
case Some(wf) =>
val treeReprOpt = execTree.map { treePrinter =>
val tree = new Tree(cResults.execDict)
treePrinter match {
case PrettyTreePrinter =>
Left(
Tree.generateTreeFromJson(tree.apply(wf).asJsObject)
)
case JsonTreePrinter => Right(tree.apply(wf)) // Convert to string
}
}
(wf.dxExec.getId, treeReprOpt)
}
}
}
| dnanexus-rnd/dxWDL | src/main/scala/dx/compiler/Top.scala | Scala | apache-2.0 | 12,844 |
/*
* Created on 2010/04/24
* Copyright (c) 2010-2014, Wei-ju Wu.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of Wei-ju Wu nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.zmpp.glulx
import java.util.logging._
import java.util.List
import java.util.ArrayList
import org.zmpp.base._
/**
* Implementation of the Glulx Heap. The current implementation works like this:
* <ol>
* <li>
* Always allocate from the highest current address and append new blocks
* to the end of the memory block list. This ensures that the list is always
* sorted in ascending address order, so we can perform binary search on it
* This means allocate() is O(1)
* </li>
* <li>
* Lookup and removal are performed using binary search, so these operations
* can be done in O(log n)
* </li>
* <li>
* The binary search we use compares an address with the range that
* a memory block represents.
* </li>
* </ol>
*/
class MemoryHeap(val address: Int) extends Memory {
val logger = Logger.getLogger("glulx")
private var _memoryBlocks: List[DefaultMemory] = new ArrayList[DefaultMemory]
private var _highAddress = address
private def compareAddress(addr: Int, memblock: DefaultMemory) = {
if (addr < memblock.address) -1
else if (addr >= memblock.address && addr < memblock.address + memblock.size) 0
else 1
}
private def memblockAtRecursive(addr: Int, left: Int, right: Int): DefaultMemory = {
if (left > right) null
else {
val middle = left + (right - left) / 2
val compVal = compareAddress(addr, _memoryBlocks.get(middle))
if (compVal == 0) _memoryBlocks.get(middle)
else if (compVal < 0) memblockAtRecursive(addr, left, middle - 1)
else memblockAtRecursive(addr, middle + 1, right)
}
}
def buffer: Array[Byte] = throw new UnsupportedOperationException("buffer() not supported")
def size = 0 // undefined
def memblockAt(addr: Int): DefaultMemory = {
val block = memblockAtRecursive(addr, 0, _memoryBlocks.size - 1)
//printf("SEARCH BLOCK AT ADDR $%02x FOUND: %b\\n", addr, block != null)
block
}
def maxAddress: Int = _highAddress
def allocate(size: Int) = {
val blockAddress = _highAddress
logger.info("ALLOCATE HEAP MEM WITH SIZE: %d ADDR: $%02x".format(size, blockAddress))
_highAddress += size
val block = DefaultMemory.create(blockAddress, size)
_memoryBlocks.add(block)
blockAddress
}
def free(addr: Int) {
logger.info("FREE HEAP MEM AT ADDR: $%02x".format(addr))
_memoryBlocks.remove(memblockAt(addr))
}
def active: Boolean = _memoryBlocks.size > 0
// Memory interface, map to allocated memory blocks
def byteAt (addr: Int): Int = memblockAt(addr).byteAt(addr)
def setByteAt (addr: Int, value: Int) = memblockAt(addr).setByteAt(addr, value)
def shortAt (addr: Int): Int = memblockAt(addr).shortAt(addr)
def setShortAt(addr: Int, value: Int) = memblockAt(addr).setShortAt(addr, value)
def intAt (addr: Int): Int = memblockAt(addr).intAt(addr)
def setIntAt (addr: Int, value: Int) = memblockAt(addr).setIntAt(addr, value)
// copying data
def copyBytesTo(dest: Array[Byte], srcOffset: Int, numBytes: Int) {
memblockAt(srcOffset).copyBytesTo(dest, srcOffset, numBytes)
}
def copyBytesTo(dstOffset: Int, srcOffset: Int, numBytes: Int) {
memblockAt(srcOffset).copyBytesTo(dstOffset, srcOffset, numBytes)
}
def copyBytesFrom(src: Array[Byte], srcOffset: Int, destOffset: Int, numBytes: Int) {
memblockAt(destOffset).copyBytesFrom(src, srcOffset, destOffset, numBytes)
}
}
| weiju/zmpp2 | zmpp-glulx/src/main/scala/org/zmpp/glulx/MemoryHeap.scala | Scala | bsd-3-clause | 5,011 |
package mesosphere.marathon
package core.task.tracker.impl
import akka.Done
import akka.actor.{Status, Terminated}
import akka.testkit.{TestActorRef, TestProbe}
import com.typesafe.config.ConfigFactory
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.instance.update.{InstanceUpdateOpResolver, InstanceUpdateOperation}
import mesosphere.marathon.core.instance.{Goal, Instance, TestInstanceBuilder}
import mesosphere.marathon.core.task.TaskCondition
import mesosphere.marathon.core.task.bus.TaskStatusUpdateTestHelper
import mesosphere.marathon.core.task.tracker.impl.InstanceTrackerActor.UpdateContext
import mesosphere.marathon.core.task.tracker.{InstanceTracker, InstanceTrackerUpdateStepProcessor}
import mesosphere.marathon.state.{AppDefinition, PathId}
import mesosphere.marathon.storage.repository.InstanceView
import mesosphere.marathon.test.{SettableClock, TestCrashStrategy}
import org.scalatest.concurrent.Eventually
import org.scalatest.prop.TableDrivenPropertyChecks.{Table, forAll}
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
/**
* Most of the functionality is tested at a higher level in [[mesosphere.marathon.tasks.InstanceTrackerImplTest]].
*/
class InstanceTrackerActorTest extends AkkaUnitTest with Eventually {
override lazy val akkaConfig =
ConfigFactory.parseString(""" akka.actor.guardian-supervisor-strategy = "akka.actor.StoppingSupervisorStrategy" """)
.withFallback(ConfigFactory.load())
val metricsModules = Table(
("name", "module"),
("dropwizard", MetricsModule(AllConf.withTestConfig()))
)
forAll (metricsModules) { (name: String, metricsModule: MetricsModule) =>
s"InstanceTrackerActor (metrics = $name)" should {
"failures while loading the initial data are escalated" in {
val f = new Fixture
Given("a failing task loader")
f.instancesLoader.load() returns Future.failed(new RuntimeException("severe simulated loading failure"))
When("the task tracker starts")
f.instanceTrackerActor
Then("it will call the failing load method")
verify(f.instancesLoader).load()
And("it will eventually die")
watch(f.instanceTrackerActor)
expectMsgClass(classOf[Terminated]).getActor should be(f.instanceTrackerActor)
}
"answers with loaded data (empty)" in {
val f = new Fixture
Given("an empty task loader result")
val appDataMap = InstanceTracker.InstancesBySpec.empty
f.instancesLoader.load() returns Future.successful(appDataMap)
When("the task tracker actor gets a List query")
val probe = TestProbe()
probe.send(f.instanceTrackerActor, InstanceTrackerActor.List)
Then("it will eventually answer")
probe.expectMsg(appDataMap)
}
"answers with loaded data (some data)" in {
val f = new Fixture
Given("a task loader with one running instance")
val appId: PathId = PathId("/app")
val instance = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(instance)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("the task tracker actor gets a List query")
val probe = TestProbe()
probe.send(f.instanceTrackerActor, InstanceTrackerActor.List)
Then("it will eventually answer")
probe.expectMsg(appDataMap)
}
"correctly calculates metrics for loaded data" in {
val f = new Fixture
Given("an task loader with one staged and two running instances")
val appId: PathId = PathId("/app")
val staged = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val runningOne = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningTwo = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(staged, runningOne, runningTwo)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("the task tracker has started up")
val probe = TestProbe()
probe.send(f.instanceTrackerActor, InstanceTrackerActor.List)
probe.expectMsg(appDataMap)
Then("it will have set the correct metric counts")
f.actorMetrics.runningTasksMetric.value should be(2)
f.actorMetrics.stagedTasksMetric.value should be(1)
}
"correctly updates metrics for staged task gets deleted" in {
val f = new Fixture
Given("an task loader with one staged and two running instances")
val appId: PathId = PathId("/app")
val staged = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val runningOne = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningTwo = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(staged, runningOne, runningTwo)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("staged task gets deleted")
val probe = TestProbe()
val helper = TaskStatusUpdateTestHelper.killed(staged)
val update = helper.operation.asInstanceOf[InstanceUpdateOperation.MesosUpdate]
probe.send(f.instanceTrackerActor, UpdateContext(f.clock.now() + 3.days, update))
probe.expectMsg(helper.effect)
Then("it will have set the correct metric counts")
f.actorMetrics.runningTasksMetric.value should be(2)
f.actorMetrics.stagedTasksMetric.value should be(0)
}
"correctly updates metrics for running task gets deleted" in {
val f = new Fixture
Given("an task loader with one staged and two running instances")
val appId: PathId = PathId("/app")
val staged = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val runningOne = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningTwo = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(staged, runningOne, runningTwo)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("running task gets deleted")
val probe = TestProbe()
val helper = TaskStatusUpdateTestHelper.killed(runningOne)
val update = helper.operation.asInstanceOf[InstanceUpdateOperation.MesosUpdate]
probe.send(f.instanceTrackerActor, UpdateContext(f.clock.now() + 3.days, update))
probe.expectMsg(helper.effect)
Then("it will have set the correct metric counts")
f.actorMetrics.runningTasksMetric.value should be(1)
f.actorMetrics.stagedTasksMetric.value should be(1)
And("update steps have been processed 2 times")
verify(f.stepProcessor, times(1)).process(any)(any[ExecutionContext])
}
"correctly updates metrics for updated tasks" in {
val f = new Fixture
Given("an task loader with one staged and two running instances")
val appId: PathId = PathId("/app")
val staged = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val runningOne = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningTwo = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(staged, runningOne, runningTwo)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("staged task transitions to running")
val probe = TestProbe()
val stagedInstanceNowRunning = TestInstanceBuilder.newBuilderWithInstanceId(staged.instanceId).addTaskRunning().getInstance()
val (_, stagedTaskNowRunning) = stagedInstanceNowRunning.tasksMap.head
val mesosStatus = stagedTaskNowRunning.status.mesosStatus.get
val helper = TaskStatusUpdateTestHelper.taskUpdateFor(staged, TaskCondition(mesosStatus), mesosStatus)
val update = helper.operation
probe.send(f.instanceTrackerActor, UpdateContext(f.clock.now() + 3.days, update))
probe.expectMsg(helper.effect)
Then("it will have set the correct metric counts")
f.actorMetrics.runningTasksMetric.value should be(3)
f.actorMetrics.stagedTasksMetric.value should be(0)
And("update steps are processed")
verify(f.stepProcessor).process(any)(any[ExecutionContext])
}
"correctly updates metrics for created tasks" in {
val f = new Fixture
Given("an task loader with one staged and two running instances")
val appId: PathId = PathId("/app")
val appDef = AppDefinition(id = appId)
val staged = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val scheduled = Instance.scheduled(appDef)
val runningOne = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningTwo = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(staged, runningOne, runningTwo, scheduled)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("a new staged task gets added")
val probe = TestProbe()
val helper = TaskStatusUpdateTestHelper.provision(scheduled, f.clock.now())
val update = helper.operation
probe.send(f.instanceTrackerActor, UpdateContext(f.clock.now() + 3.days, update))
probe.expectMsg(helper.effect)
Then("it will have set the correct metric counts")
eventually {
f.actorMetrics.runningTasksMetric.value should be(2)
f.actorMetrics.stagedTasksMetric.value should be(2)
}
And("update steps are processed")
verify(f.stepProcessor).process(any)(any[ExecutionContext])
}
"updates repository as well as internal state for instance update" in {
Given("an task loader with one staged and two running instances")
val f = new Fixture
val appId: PathId = PathId("/app")
val appDef = AppDefinition(id = appId)
val staged = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val scheduled = Instance.scheduled(appDef)
val runningOne = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningTwo = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(staged, runningOne, runningTwo, scheduled)
f.instancesLoader.load() returns Future.successful(appDataMap)
val probe = TestProbe()
val helper = TaskStatusUpdateTestHelper.provision(scheduled, f.clock.now())
val update = UpdateContext(f.clock.now() + 3.days, helper.operation)
When("Instance update is received")
probe.send(f.instanceTrackerActor, update)
probe.expectMsg(helper.effect)
Then("instance repository save is called")
verify(f.repository).store(helper.wrapped.instance)
And("internal state is updated")
probe.send(f.instanceTrackerActor, InstanceTrackerActor.List)
probe.expectMsg(InstanceTracker.InstancesBySpec.forInstances(staged, runningOne, runningTwo, helper.wrapped.instance))
}
"fails when repository call fails for update" in {
val f = new Fixture
Given("an task loader with one staged and two running instances")
val appId: PathId = PathId("/app")
val scheduled = Instance.scheduled(AppDefinition(appId))
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(scheduled)
f.instancesLoader.load() returns Future.successful(appDataMap)
And("repository that returns error for store operation")
f.repository.store(any) returns Future.failed(new RuntimeException("fail"))
When("an update to provisioned is sent")
val probe = TestProbe()
val helper = TaskStatusUpdateTestHelper.provision(scheduled, f.clock.now())
val update = UpdateContext(f.clock.now() + 3.days, helper.operation)
probe.send(f.instanceTrackerActor, update)
Then("Failure message is received")
probe.fishForSpecificMessage() {
case _: Status.Failure => true
case _ => false
}
And("Internal state did not change")
probe.send(f.instanceTrackerActor, InstanceTrackerActor.List)
probe.expectMsg(appDataMap)
}
"updates repository as well as internal state for instance expunge" in {
Given("a task loader with update operation received")
val f = new Fixture
val appId: PathId = PathId("/app")
val running = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningDecommissioned = running.copy(state = running.state.copy(goal = Goal.Decommissioned))
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(runningDecommissioned)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("a running and decommissioned task is killed")
val probe = TestProbe()
val helper = TaskStatusUpdateTestHelper.killed(runningDecommissioned)
val update = helper.operation.asInstanceOf[InstanceUpdateOperation.MesosUpdate]
And("and expunged")
probe.send(f.instanceTrackerActor, UpdateContext(f.clock.now() + 3.days, update))
probe.expectMsg(helper.effect)
Then("repository is updated")
verify(f.repository).delete(helper.wrapped.id)
And("internal state is updated")
probe.send(f.instanceTrackerActor, InstanceTrackerActor.List)
probe.expectMsg(InstanceTracker.InstancesBySpec.empty)
}
"fails after failure during repository call to expunge" in {
val f = new Fixture
Given("an task instance tracker with initial state")
val appId: PathId = PathId("/app")
val running = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val runningDecommissioned = running.copy(state = running.state.copy(goal = Goal.Decommissioned))
val appDataMap = InstanceTracker.InstancesBySpec.forInstances(runningDecommissioned)
f.instancesLoader.load() returns Future.successful(appDataMap)
When("a task in decommissioned gets killed")
val probe = TestProbe()
val instance = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val helper = TaskStatusUpdateTestHelper.killed(instance)
val update = helper.operation.asInstanceOf[InstanceUpdateOperation.MesosUpdate]
And("repository store operation fails")
f.repository.delete(instance.instanceId) returns Future.failed(new RuntimeException("fail"))
probe.send(f.instanceTrackerActor, UpdateContext(f.clock.now() + 3.days, update))
Then("failure message is sent")
probe.fishForSpecificMessage() {
case _: Status.Failure => true
case _ => false
}
And("internal state did not change")
probe.send(f.instanceTrackerActor, InstanceTrackerActor.List)
probe.expectMsg(appDataMap)
}
}
class Fixture {
val clock = SettableClock.ofNow()
val updateResolver = new InstanceUpdateOpResolver(clock)
lazy val instancesLoader = mock[InstancesLoader]
lazy val stepProcessor = mock[InstanceTrackerUpdateStepProcessor]
lazy val metrics = metricsModule.metrics
lazy val actorMetrics = new InstanceTrackerActor.ActorMetrics(metrics)
lazy val repository = mock[InstanceView]
repository.store(any) returns Future.successful(Done)
repository.delete(any) returns Future.successful(Done)
val emptyInstances = InstanceTracker.InstancesBySpec.empty
val crashStrategy = new TestCrashStrategy
instancesLoader.load() returns Future.successful(emptyInstances)
stepProcessor.process(any)(any[ExecutionContext]) returns Future.successful(Done)
lazy val instanceTrackerActor = TestActorRef[InstanceTrackerActor](InstanceTrackerActor.props(actorMetrics, instancesLoader, stepProcessor, updateResolver, repository, clock, crashStrategy))
def verifyNoMoreInteractions(): Unit = {
noMoreInteractions(instancesLoader)
reset(instancesLoader)
}
}
}
}
| gsantovena/marathon | src/test/scala/mesosphere/marathon/core/task/tracker/impl/InstanceTrackerActorTest.scala | Scala | apache-2.0 | 16,649 |
package controllers
import play.api.data.Form
import play.api.mvc._
import lila.api.Context
import lila.app._
import lila.qa.{ QuestionId, Question, AnswerId, Answer, QaAuth }
import views._
object QaAnswer extends QaController {
def create(id: QuestionId) = AuthBody { implicit ctx =>
me =>
WithQuestion(id) { q =>
implicit val req = ctx.body
forms.answer.bindFromRequest.fold(
err => renderQuestion(q, Some(err)),
data => api.answer.create(data, q, me) map { answer =>
Redirect(routes.QaQuestion.show(q.id, q.slug) + "#answer-" + answer.id)
}
)
}
}
def accept(questionId: QuestionId, answerId: AnswerId) = AuthBody { implicit ctx =>
me =>
(api.question findById questionId) zip (api.answer findById answerId) flatMap {
case (Some(q), Some(a)) if (QaAuth canEdit q) =>
api.answer.accept(q, a) inject Redirect(routes.QaQuestion.show(q.id, q.slug))
case _ => notFound
}
}
def vote(questionId: QuestionId, answerId: AnswerId) = AuthBody { implicit ctx =>
me =>
implicit val req = ctx.body
forms.vote.bindFromRequest.fold(
err => fuccess(BadRequest),
v => api.answer.vote(answerId, me, v == 1) map {
case Some(vote) => Ok(html.qa.vote(routes.QaAnswer.vote(questionId, answerId).url, vote, true))
case None => NotFound
}
)
}
def doEdit(questionId: QuestionId, answerId: AnswerId) = AuthBody { implicit ctx =>
me =>
WithOwnAnswer(questionId, answerId) { q =>
a =>
implicit val req = ctx.body
forms.editAnswer.bindFromRequest.fold(
err => renderQuestion(q),
body => api.answer.edit(body, a.id) map {
case None => NotFound
case Some(a2) => Redirect(routes.QaQuestion.show(q.id, q.slug) + "#answer-" + a2.id)
}
)
}
}
def remove(questionId: QuestionId, answerId: AnswerId) = Secure(_.ModerateQa) { implicit ctx =>
me =>
OptionFuRedirect(api.answer findById answerId) { a =>
(api.answer remove a.id) >>
Env.mod.logApi.deleteQaAnswer(me.id, a.userId, a.body) inject
routes.QaQuestion.show(questionId, "redirect")
}
}
def moveTo(questionId: QuestionId, answerId: AnswerId) = AuthBody { implicit ctx =>
me =>
WithOwnAnswer(questionId, answerId) { q =>
a =>
implicit val req = ctx.body
forms.moveAnswer.bindFromRequest.fold(
err => renderQuestion(q), {
case "question" => api.answer.moveToQuestionComment(a, q) inject
Redirect(routes.QaQuestion.show(q.id, q.slug))
case str => parseIntOption(str).fold(renderQuestion(q)) { answerId =>
api.answer.moveToAnswerComment(a, answerId) inject
Redirect(routes.QaQuestion.show(q.id, q.slug))
}
}
)
}
}
}
| terokinnunen/lila | app/controllers/QaAnswer.scala | Scala | mit | 2,996 |
package com.twitter.finagle.pushsession
import com.twitter.finagle.Status
import com.twitter.finagle.ssl.session.SslSessionInfo
import com.twitter.util.{Future, Time, Try}
import java.net.SocketAddress
import java.util.concurrent.Executor
/**
* Base proxy implementation for [[PushChannelHandle]]
*
* Implementations should override methods as appropriate.
*/
abstract class PushChannelHandleProxy[In, Out](underlying: PushChannelHandle[In, Out])
extends PushChannelHandle[In, Out] {
def serialExecutor: Executor = underlying.serialExecutor
def registerSession(newSession: PushSession[In, Out]): Unit =
underlying.registerSession(newSession)
def send(messages: Iterable[Out])(onComplete: (Try[Unit]) => Unit): Unit =
underlying.send(messages)(onComplete)
def send(message: Out)(onComplete: (Try[Unit]) => Unit): Unit =
underlying.send(message)(onComplete)
def sendAndForget(message: Out): Unit = underlying.sendAndForget(message)
def sendAndForget(messages: Iterable[Out]): Unit = underlying.sendAndForget(messages)
def sslSessionInfo: SslSessionInfo = underlying.sslSessionInfo
def status: Status = underlying.status
def remoteAddress: SocketAddress = underlying.remoteAddress
def localAddress: SocketAddress = underlying.localAddress
def onClose: Future[Unit] = underlying.onClose
def close(deadline: Time): Future[Unit] = underlying.close(deadline)
}
| twitter/finagle | finagle-core/src/main/scala/com/twitter/finagle/pushsession/PushChannelHandleProxy.scala | Scala | apache-2.0 | 1,412 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolkit.neuralnetwork.util
import libcog._
object NormalizedLowPass {
def apply(input: Field, timeConstant: Float): Field = {
require(timeConstant >= 0f && timeConstant <= 1f)
// Step: input weight, recurrent weight
// 0: 1.00 0.00
// 1: 0.50 0.50
// 2: 0.33 0.67
// 3: 0.25 0.75
// 4: 0.20 0.80
val counter = ScalarField(1f)
counter <== counter + 1f
val inWeight = max(1f / counter, timeConstant)
val recWeight = 1f - inWeight
val state = Field(input.fieldType)
state <== recWeight * state + inWeight * input
state
}
}
| hpe-cct/cct-nn | src/main/scala/toolkit/neuralnetwork/util/NormalizedLowPass.scala | Scala | apache-2.0 | 1,221 |
trait Foo {
type A
type B
}
object Foo {
type Bar = Foo { type B = <ref>A }
} | jastice/intellij-scala | scala/scala-impl/testdata/resolve/failed/typeAlias/SCL13742.scala | Scala | apache-2.0 | 84 |
package com.arcusys.valamis.web.servlet.social.response
import com.arcusys.valamis.web.servlet.course.CourseResponse
object Activities extends Enumeration{
val Lesson, Course, Certificate, UserStatus, LiferayEntry = Value
}
sealed trait ActivityObjectResponse {
def tpe: Activities.Value
val withImage: Boolean
}
case class ActivityPackageResponse(
id: Long,
title: String,
logo: Option[String],
course: Option[CourseResponse],
comment: Option[String],
tpe: Activities.Value = Activities.Lesson,
override val withImage: Boolean = true,
url: Option[String]
) extends ActivityObjectResponse
case class ActivityCourseResponse(
id: Long,
title: String,
logoCourse: Option[String],
tpe: Activities.Value = Activities.Course,
override val withImage: Boolean = true
) extends ActivityObjectResponse
case class ActivityCertificateResponse(
id: Long,
title: String,
logo: Option[String],
tpe: Activities.Value = Activities.Certificate,
override val withImage: Boolean = true,
url: Option[String]
) extends ActivityObjectResponse
case class ActivityUserStatusResponse(
comment: String,
tpe: Activities.Value = Activities.UserStatus,
override val withImage:Boolean = false
) extends ActivityObjectResponse
case class LActivityEntryResponse(
id: Long,
title: String,
body: String,
tpe: Activities.Value = Activities.LiferayEntry,
liferayEntry: Boolean = true,
override val withImage:Boolean = false
) extends ActivityObjectResponse | igor-borisov/valamis | valamis-portlets/src/main/scala/com/arcusys/valamis/web/servlet/social/response/ActivityObjectResponse.scala | Scala | gpl-3.0 | 1,489 |
import sbt._
class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
val android = "org.scala-tools.sbt" % "sbt-android-plugin" % "0.5.1"
}
| mike-burns/ohlaunch | project/plugins/plugins.scala | Scala | bsd-3-clause | 153 |
/*
* Original implementation (C) 2016 Eugene Yokota
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package giter8
import org.scalacheck._
import sbt.io._, syntax._
object FormatSpecification extends Properties("Format") {
import Prop.forAll
property("plainConversion") = forAll(nonDollar) { x =>
conversion(x, Map.empty[String, String]) == x
}
property("escapeDollar") = forAll(asciiString) { (x) =>
conversion("\\$" + x, Map.empty[String, String]) == "$" + x
}
property("formatUppercase") = forAll(asciiString, asciiString, nonDollar) { (x, y, z) =>
conversion(s"""$$$x;format="upper"$$$z""", Map(x -> y)) == y.toUpperCase + z
}
property("formatLowercase") = forAll(asciiString, asciiString, nonDollar) { (x, y, z) =>
conversion(s"""$$$x;format="lower"$$$z""", Map(x -> y)) == y.toLowerCase + z
}
property("formatSnakecase") =
conversion("""$x;format="snake"$""", Map("x" -> "My-Example-Project")) == "My_Example_Project"
lazy val hiragana = (0x3041 to 0x3094).toList
lazy val nonDollarChar: Gen[Char] = Gen.oneOf(
((0x20 to 0xff).toList ::: hiragana).filter(x => Character.isDefined(x) && x != 0x24 && x != 0x5c).map(_.toChar))
lazy val nonDollar: Gen[String] = Gen.sized { size =>
Gen.listOfN(size, nonDollarChar).map(_.mkString)
} filter { _.nonEmpty }
lazy val asciiChar: Gen[Char] =
Gen.oneOf(((0x41 to 0x5a).toList ::: (0x61 to 0x7a).toList).filter(x => Character.isDefined(x)).map(_.toChar))
lazy val asciiString: Gen[String] = Gen.sized { size =>
Gen.listOfN(size, asciiChar).map(_.mkString)
} filter { _.nonEmpty }
def conversion(inContent: String, ps: Map[String, String]): String = synchronized {
IO.withTemporaryDirectory { tempDir =>
val in = tempDir / "in.txt"
val out = tempDir / "out.txt"
IO.write(in, inContent, IO.utf8)
G8(in, out, tempDir, ps)
val outContent = IO.read(out, IO.utf8)
// println(outContent)
outContent
}
}
}
| wolfendale/giter8 | library/src/test/scala/FormatSpecification.scala | Scala | apache-2.0 | 2,494 |
package org.openmole.gui.client.core
import org.openmole.gui.ext.data.GUIPluginAsJS
import org.openmole.gui.ext.api.Api
import org.openmole.gui.ext.data.{ AllPluginExtensionData, AuthenticationPluginFactory, GUIPluginFactory, WizardPluginFactory }
import autowire._
import scala.concurrent.ExecutionContext.Implicits.global
import boopickle.Default._
import rx._
import scala.scalajs.js
import scala.scalajs.js.annotation.JSExportTopLevel
/*
* Copyright (C) 30/11/16 // mathieu.leclaire@openmole.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
object Plugins {
def fetch(f: Parameters ⇒ Unit) = {
Post()[Api].getGUIPlugins.call().foreach { p ⇒
val authFact = p.authentications.map { gp ⇒ Plugins.buildJSObject[AuthenticationPluginFactory](gp) }
val wizardFactories = p.wizards.map { gp ⇒ Plugins.buildJSObject[WizardPluginFactory](gp) }
f(Parameters(authFact, wizardFactories))
}
}
def buildJSObject[T](obj: GUIPluginAsJS) = {
scalajs.js.eval(s"${obj.jsObject.split('.').takeRight(2).head}").asInstanceOf[T]
}
case class Parameters(authenticationFactories: Seq[AuthenticationPluginFactory], wizardFactories: Seq[WizardPluginFactory])
}
| openmole/openmole | openmole/gui/client/org.openmole.gui.client.core/src/main/scala/org/openmole/gui/client/core/Plugins.scala | Scala | agpl-3.0 | 1,816 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
import java.net.{URI, URL}
import javax.servlet.DispatcherType
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import scala.collection.mutable.ArrayBuffer
import scala.language.implicitConversions
import scala.xml.Node
import org.eclipse.jetty.client.api.Response
import org.eclipse.jetty.client.HttpClient
import org.eclipse.jetty.client.http.HttpClientTransportOverHTTP
import org.eclipse.jetty.proxy.ProxyServlet
import org.eclipse.jetty.server.{HttpConnectionFactory, Request, Server, ServerConnector}
import org.eclipse.jetty.server.handler._
import org.eclipse.jetty.servlet._
import org.eclipse.jetty.servlets.gzip.GzipHandler
import org.eclipse.jetty.util.component.LifeCycle
import org.eclipse.jetty.util.thread.{QueuedThreadPool, ScheduledExecutorScheduler}
import org.json4s.JValue
import org.json4s.jackson.JsonMethods.{pretty, render}
import org.apache.spark.{SecurityManager, SparkConf, SSLOptions}
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
* Utilities for launching a web server using Jetty's HTTP Server class
*/
private[spark] object JettyUtils extends Logging {
val SPARK_CONNECTOR_NAME = "Spark"
val REDIRECT_CONNECTOR_NAME = "HttpsRedirect"
// Base type for a function that returns something based on an HTTP request. Allows for
// implicit conversion from many types of functions to jetty Handlers.
type Responder[T] = HttpServletRequest => T
class ServletParams[T <% AnyRef](val responder: Responder[T],
val contentType: String,
val extractFn: T => String = (in: Any) => in.toString) {}
// Conversions from various types of Responder's to appropriate servlet parameters
implicit def jsonResponderToServlet(responder: Responder[JValue]): ServletParams[JValue] =
new ServletParams(responder, "text/json", (in: JValue) => pretty(render(in)))
implicit def htmlResponderToServlet(responder: Responder[Seq[Node]]): ServletParams[Seq[Node]] =
new ServletParams(responder, "text/html", (in: Seq[Node]) => "<!DOCTYPE html>" + in.toString)
implicit def textResponderToServlet(responder: Responder[String]): ServletParams[String] =
new ServletParams(responder, "text/plain")
def createServlet[T <% AnyRef](
servletParams: ServletParams[T],
securityMgr: SecurityManager,
conf: SparkConf): HttpServlet = {
// SPARK-10589 avoid frame-related click-jacking vulnerability, using X-Frame-Options
// (see http://tools.ietf.org/html/rfc7034). By default allow framing only from the
// same origin, but allow framing for a specific named URI.
// Example: spark.ui.allowFramingFrom = https://example.com/
val allowFramingFrom = conf.getOption("spark.ui.allowFramingFrom")
val xFrameOptionsValue =
allowFramingFrom.map(uri => s"ALLOW-FROM $uri").getOrElse("SAMEORIGIN")
new HttpServlet {
override def doGet(request: HttpServletRequest, response: HttpServletResponse) {
try {
if (securityMgr.checkUIViewPermissions(request.getRemoteUser)) {
response.setContentType("%s;charset=utf-8".format(servletParams.contentType))
response.setStatus(HttpServletResponse.SC_OK)
val result = servletParams.responder(request)
response.setHeader("Cache-Control", "no-cache, no-store, must-revalidate")
response.setHeader("X-Frame-Options", xFrameOptionsValue)
response.getWriter.print(servletParams.extractFn(result))
} else {
response.setStatus(HttpServletResponse.SC_FORBIDDEN)
response.setHeader("Cache-Control", "no-cache, no-store, must-revalidate")
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"User is not authorized to access this page.")
}
} catch {
case e: IllegalArgumentException =>
response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage)
case e: Exception =>
logWarning(s"GET ${request.getRequestURI} failed: $e", e)
throw e
}
}
// SPARK-5983 ensure TRACE is not supported
protected override def doTrace(req: HttpServletRequest, res: HttpServletResponse): Unit = {
res.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED)
}
}
}
/** Create a context handler that responds to a request with the given path prefix */
def createServletHandler[T <% AnyRef](
path: String,
servletParams: ServletParams[T],
securityMgr: SecurityManager,
conf: SparkConf,
basePath: String = ""): ServletContextHandler = {
createServletHandler(path, createServlet(servletParams, securityMgr, conf), basePath)
}
/** Create a context handler that responds to a request with the given path prefix */
def createServletHandler(
path: String,
servlet: HttpServlet,
basePath: String): ServletContextHandler = {
val prefixedPath = if (basePath == "" && path == "/") {
path
} else {
(basePath + path).stripSuffix("/")
}
val contextHandler = new ServletContextHandler
val holder = new ServletHolder(servlet)
contextHandler.setContextPath(prefixedPath)
contextHandler.addServlet(holder, "/")
contextHandler
}
/** Create a handler that always redirects the user to the given path */
def createRedirectHandler(
srcPath: String,
destPath: String,
beforeRedirect: HttpServletRequest => Unit = x => (),
basePath: String = "",
httpMethods: Set[String] = Set("GET")): ServletContextHandler = {
val prefixedDestPath = basePath + destPath
val servlet = new HttpServlet {
override def doGet(request: HttpServletRequest, response: HttpServletResponse): Unit = {
if (httpMethods.contains("GET")) {
doRequest(request, response)
} else {
response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED)
}
}
override def doPost(request: HttpServletRequest, response: HttpServletResponse): Unit = {
if (httpMethods.contains("POST")) {
doRequest(request, response)
} else {
response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED)
}
}
private def doRequest(request: HttpServletRequest, response: HttpServletResponse): Unit = {
beforeRedirect(request)
// Make sure we don't end up with "//" in the middle
val newUrl = new URL(new URL(request.getRequestURL.toString), prefixedDestPath).toString
response.sendRedirect(newUrl)
}
// SPARK-5983 ensure TRACE is not supported
protected override def doTrace(req: HttpServletRequest, res: HttpServletResponse): Unit = {
res.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED)
}
}
createServletHandler(srcPath, servlet, basePath)
}
/** Create a handler for serving files from a static directory */
def createStaticHandler(resourceBase: String, path: String): ServletContextHandler = {
val contextHandler = new ServletContextHandler
contextHandler.setInitParameter("org.eclipse.jetty.servlet.Default.gzip", "false")
val staticHandler = new DefaultServlet
val holder = new ServletHolder(staticHandler)
Option(Utils.getSparkClassLoader.getResource(resourceBase)) match {
case Some(res) =>
holder.setInitParameter("resourceBase", res.toString)
case None =>
throw new Exception("Could not find resource path for Web UI: " + resourceBase)
}
contextHandler.setContextPath(path)
contextHandler.addServlet(holder, "/")
contextHandler
}
/** Create a handler for proxying request to Workers and Application Drivers */
def createProxyHandler(
prefix: String,
target: String): ServletContextHandler = {
val servlet = new ProxyServlet {
override def rewriteTarget(request: HttpServletRequest): String = {
val rewrittenURI = createProxyURI(
prefix, target, request.getRequestURI(), request.getQueryString())
if (rewrittenURI == null) {
return null
}
if (!validateDestination(rewrittenURI.getHost(), rewrittenURI.getPort())) {
return null
}
rewrittenURI.toString()
}
override def newHttpClient(): HttpClient = {
// SPARK-21176: Use the Jetty logic to calculate the number of selector threads (#CPUs/2),
// but limit it to 8 max.
// Otherwise, it might happen that we exhaust the threadpool since in reverse proxy mode
// a proxy is instantiated for each executor. If the head node has many processors, this
// can quickly add up to an unreasonably high number of threads.
val numSelectors = math.max(1, math.min(8, Runtime.getRuntime().availableProcessors() / 2))
new HttpClient(new HttpClientTransportOverHTTP(numSelectors), null)
}
override def filterServerResponseHeader(
clientRequest: HttpServletRequest,
serverResponse: Response,
headerName: String,
headerValue: String): String = {
if (headerName.equalsIgnoreCase("location")) {
val newHeader = createProxyLocationHeader(
prefix, headerValue, clientRequest, serverResponse.getRequest().getURI())
if (newHeader != null) {
return newHeader
}
}
super.filterServerResponseHeader(
clientRequest, serverResponse, headerName, headerValue)
}
}
val contextHandler = new ServletContextHandler
val holder = new ServletHolder(servlet)
contextHandler.setContextPath(prefix)
contextHandler.addServlet(holder, "/")
contextHandler
}
/** Add filters, if any, to the given list of ServletContextHandlers */
def addFilters(handlers: Seq[ServletContextHandler], conf: SparkConf) {
val filters: Array[String] = conf.get("spark.ui.filters", "").split(',').map(_.trim())
filters.foreach {
case filter : String =>
if (!filter.isEmpty) {
logInfo("Adding filter: " + filter)
val holder : FilterHolder = new FilterHolder()
holder.setClassName(filter)
// Get any parameters for each filter
conf.get("spark." + filter + ".params", "").split(',').map(_.trim()).toSet.foreach {
param: String =>
if (!param.isEmpty) {
val parts = param.split("=")
if (parts.length == 2) holder.setInitParameter(parts(0), parts(1))
}
}
val prefix = s"spark.$filter.param."
conf.getAll
.filter { case (k, v) => k.length() > prefix.length() && k.startsWith(prefix) }
.foreach { case (k, v) => holder.setInitParameter(k.substring(prefix.length()), v) }
val enumDispatcher = java.util.EnumSet.of(DispatcherType.ASYNC, DispatcherType.ERROR,
DispatcherType.FORWARD, DispatcherType.INCLUDE, DispatcherType.REQUEST)
handlers.foreach { case(handler) => handler.addFilter(holder, "/*", enumDispatcher) }
}
}
}
/**
* Attempt to start a Jetty server bound to the supplied hostName:port using the given
* context handlers.
*
* If the desired port number is contended, continues incrementing ports until a free port is
* found. Return the jetty Server object, the chosen port, and a mutable collection of handlers.
*/
def startJettyServer(
hostName: String,
port: Int,
sslOptions: SSLOptions,
handlers: Seq[ServletContextHandler],
conf: SparkConf,
serverName: String = ""): ServerInfo = {
addFilters(handlers, conf)
val gzipHandlers = handlers.map { h =>
h.setVirtualHosts(Array("@" + SPARK_CONNECTOR_NAME))
val gzipHandler = new GzipHandler
gzipHandler.setHandler(h)
gzipHandler
}
// Bind to the given port, or throw a java.net.BindException if the port is occupied
def connect(currentPort: Int): ((Server, Option[Int]), Int) = {
val pool = new QueuedThreadPool
if (serverName.nonEmpty) {
pool.setName(serverName)
}
pool.setDaemon(true)
val server = new Server(pool)
val connectors = new ArrayBuffer[ServerConnector]()
val collection = new ContextHandlerCollection
// Create a connector on port currentPort to listen for HTTP requests
val httpConnector = new ServerConnector(
server,
null,
// Call this full constructor to set this, which forces daemon threads:
new ScheduledExecutorScheduler(s"$serverName-JettyScheduler", true),
null,
-1,
-1,
new HttpConnectionFactory())
httpConnector.setPort(currentPort)
connectors += httpConnector
val httpsConnector = sslOptions.createJettySslContextFactory() match {
case Some(factory) =>
// If the new port wraps around, do not try a privileged port.
val securePort =
if (currentPort != 0) {
(currentPort + 400 - 1024) % (65536 - 1024) + 1024
} else {
0
}
val scheme = "https"
// Create a connector on port securePort to listen for HTTPS requests
val connector = new ServerConnector(server, factory)
connector.setPort(securePort)
connector.setName(SPARK_CONNECTOR_NAME)
connectors += connector
// redirect the HTTP requests to HTTPS port
httpConnector.setName(REDIRECT_CONNECTOR_NAME)
collection.addHandler(createRedirectHttpsHandler(connector, scheme))
Some(connector)
case None =>
// No SSL, so the HTTP connector becomes the official one where all contexts bind.
httpConnector.setName(SPARK_CONNECTOR_NAME)
None
}
// As each acceptor and each selector will use one thread, the number of threads should at
// least be the number of acceptors and selectors plus 1. (See SPARK-13776)
var minThreads = 1
connectors.foreach { connector =>
// Currently we only use "SelectChannelConnector"
// Limit the max acceptor number to 8 so that we don't waste a lot of threads
connector.setAcceptQueueSize(math.min(connector.getAcceptors, 8))
connector.setHost(hostName)
// The number of selectors always equals to the number of acceptors
minThreads += connector.getAcceptors * 2
}
pool.setMaxThreads(math.max(pool.getMaxThreads, minThreads))
val errorHandler = new ErrorHandler()
errorHandler.setShowStacks(true)
errorHandler.setServer(server)
server.addBean(errorHandler)
gzipHandlers.foreach(collection.addHandler)
server.setHandler(collection)
server.setConnectors(connectors.toArray)
try {
server.start()
((server, httpsConnector.map(_.getLocalPort())), httpConnector.getLocalPort)
} catch {
case e: Exception =>
server.stop()
pool.stop()
throw e
}
}
val ((server, securePort), boundPort) = Utils.startServiceOnPort(port, connect, conf,
serverName)
ServerInfo(server, boundPort, securePort,
server.getHandler().asInstanceOf[ContextHandlerCollection])
}
private def createRedirectHttpsHandler(
httpsConnector: ServerConnector,
scheme: String): ContextHandler = {
val redirectHandler: ContextHandler = new ContextHandler
redirectHandler.setContextPath("/")
redirectHandler.setVirtualHosts(Array("@" + REDIRECT_CONNECTOR_NAME))
redirectHandler.setHandler(new AbstractHandler {
override def handle(
target: String,
baseRequest: Request,
request: HttpServletRequest,
response: HttpServletResponse): Unit = {
if (baseRequest.isSecure) {
return
}
val httpsURI = createRedirectURI(scheme, baseRequest.getServerName,
httpsConnector.getLocalPort, baseRequest.getRequestURI, baseRequest.getQueryString)
response.setContentLength(0)
response.encodeRedirectURL(httpsURI)
response.sendRedirect(httpsURI)
baseRequest.setHandled(true)
}
})
redirectHandler
}
def createProxyURI(prefix: String, target: String, path: String, query: String): URI = {
if (!path.startsWith(prefix)) {
return null
}
val uri = new StringBuilder(target)
val rest = path.substring(prefix.length())
if (!rest.isEmpty()) {
if (!rest.startsWith("/")) {
uri.append("/")
}
uri.append(rest)
}
val rewrittenURI = URI.create(uri.toString())
if (query != null) {
return new URI(
rewrittenURI.getScheme(),
rewrittenURI.getAuthority(),
rewrittenURI.getPath(),
query,
rewrittenURI.getFragment()
).normalize()
}
rewrittenURI.normalize()
}
def createProxyLocationHeader(
prefix: String,
headerValue: String,
clientRequest: HttpServletRequest,
targetUri: URI): String = {
val toReplace = targetUri.getScheme() + "://" + targetUri.getAuthority()
if (headerValue.startsWith(toReplace)) {
clientRequest.getScheme() + "://" + clientRequest.getHeader("host") +
prefix + headerValue.substring(toReplace.length())
} else {
null
}
}
// Create a new URI from the arguments, handling IPv6 host encoding and default ports.
private def createRedirectURI(
scheme: String, server: String, port: Int, path: String, query: String) = {
val redirectServer = if (server.contains(":") && !server.startsWith("[")) {
s"[${server}]"
} else {
server
}
val authority = s"$redirectServer:$port"
new URI(scheme, authority, path, query, null).toString
}
}
private[spark] case class ServerInfo(
server: Server,
boundPort: Int,
securePort: Option[Int],
private val rootHandler: ContextHandlerCollection) {
def addHandler(handler: ContextHandler): Unit = {
handler.setVirtualHosts(Array("@" + JettyUtils.SPARK_CONNECTOR_NAME))
rootHandler.addHandler(handler)
if (!handler.isStarted()) {
handler.start()
}
}
def removeHandler(handler: ContextHandler): Unit = {
rootHandler.removeHandler(handler)
if (handler.isStarted) {
handler.stop()
}
}
def stop(): Unit = {
server.stop()
// Stop the ThreadPool if it supports stop() method (through LifeCycle).
// It is needed because stopping the Server won't stop the ThreadPool it uses.
val threadPool = server.getThreadPool
if (threadPool != null && threadPool.isInstanceOf[LifeCycle]) {
threadPool.asInstanceOf[LifeCycle].stop
}
}
}
| spark0001/spark2.1.1 | core/src/main/scala/org/apache/spark/ui/JettyUtils.scala | Scala | apache-2.0 | 19,555 |
package p.q
class X {
override def toString() = "p.q.X"
}
| scala/scala | test/files/neg/t10662b/pqx_1.scala | Scala | apache-2.0 | 62 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package extraction
package methods
import stainless.ast.{Symbol, SymbolIdentifier}
import SymbolIdentifier.unsafeToSymbolIdentifier
/** Ensures invariants of ancestors are enforced by
* conjoining call to parent invariant in each invariant.
*/
class SuperInvariants(override val s: Trees, override val t: Trees)
(using override val context: inox.Context)
extends oo.CachingPhase
with IdentitySorts
with SimpleFunctions
with oo.IdentityTypeDefs
with oo.IdentityClasses { self =>
override protected final val funCache = new ExtractionCache({ (fd, context) =>
FunctionKey(fd) + SetKey(context.ancestorsInvariants(fd).map(FunctionKey(_)))
})
override protected def getContext(symbols: s.Symbols) = new TransformerContext(self.s, self.t)(using symbols)
protected class TransformerContext(override val s: self.s.type, override val t: self.t.type)
(using val symbols: s.Symbols) extends oo.ConcreteTreeTransformer(s, t) {
import s._
import symbols._
def ancestorsInvariants(fd: s.FunDef): Set[s.FunDef] = {
fd.getClassDef.map { cd =>
cd.ancestors.flatMap(_.cd.methods).map(symbols.getFunction).filter(_.isInvariant).toSet
}.getOrElse(Set.empty)
}
override def transform(e: s.Expr): t.Expr = e match {
case _ => super.transform(e)
}
override def transform(fd: s.FunDef): t.FunDef = {
if (!fd.isMethod || !fd.isInvariant) {
super.transform(fd)
} else {
val nfd = superInvariantCall(fd).map { call =>
fd.copy(fullBody = s.And(call, fd.fullBody).copiedFrom(fd.fullBody))
}.getOrElse(fd)
super.transform(nfd)
}
}
private def getSuperInvariant(tcd: s.TypedClassDef, inv: s.FunDef): Option[(s.TypedClassDef, s.FunDef)] = {
val sym = inv.id.unsafeToSymbolIdentifier.symbol
val superInv = tcd.cd.methods
.find(_.symbol == sym)
.map(fd => tcd -> symbols.getFunction(fd))
superInv orElse {
tcd.parents.headOption.flatMap(getSuperInvariant(_, inv))
}
}
private def superInvariantCall(inv: s.FunDef): Option[s.Expr] = {
require(inv.isMethod && inv.isInvariant)
for {
cd <- inv.getClassDef
parent <- cd.ancestors.headOption
(superClass, superInv) <- getSuperInvariant(parent, inv)
} yield {
s.MethodInvocation(
s.Super(superClass.toType).copiedFrom(inv.fullBody),
superInv.id,
superInv.typeArgs,
Seq.empty
).copiedFrom(inv.fullBody)
}
}
}
override protected def extractFunction(context: TransformerContext, fd: s.FunDef): t.FunDef = {
context.transform(fd)
}
}
object SuperInvariants {
def apply(ts: Trees)(using inox.Context): ExtractionPipeline {
val s: ts.type
val t: ts.type
} = {
class Impl(override val s: ts.type, override val t: ts.type) extends SuperInvariants(s, t)
new Impl(ts, ts)
}
}
| epfl-lara/stainless | core/src/main/scala/stainless/extraction/methods/SuperInvariants.scala | Scala | apache-2.0 | 3,064 |
object TypeMembers1 {
abstract class Foo {
type Bar
def x: Bar
}
case class SomeFoo() extends Foo {
type Bar = Int
def x: Int = 42
}
def test(sf: SomeFoo) = {
assert(sf.x == 42)
}
}
| epfl-lara/stainless | frontends/benchmarks/extraction/valid/TypeMembers1.scala | Scala | apache-2.0 | 219 |
package observatory.utils
import observatory.TestingContext
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FunSuite, Matchers}
@RunWith(classOf[JUnitRunner])
class TemperatureTest extends FunSuite with Matchers with TestingContext {
import Temperature._
test("fahrenheitToCelsius should convert temperature to 0C correctly") {
fahrenheitToCelsius(32) shouldEqual 0
}
test("fahrenheitToCelsius should convert temperature to -23.33C correctly") {
fahrenheitToCelsius(-10) shouldEqual -23.33 +- tolerance
}
test("fahrenheitToCelsius should convert temperature to 32.22C correctly") {
fahrenheitToCelsius(90) shouldEqual 32.22 +- tolerance
}
}
| masipauskas/coursera-scala | capstone/observatory/src/test/scala/observatory/utils/TemperatureTest.scala | Scala | unlicense | 717 |
package debop4s.timeperiod.timerange
import debop4s.timeperiod.TimeSpec._
import debop4s.timeperiod._
import debop4s.timeperiod.utils.Times
import org.joda.time.DateTime
/**
* debop4s.timeperiod.timerange.QuarterRange
* @author 배성혁 sunghyouk.bae@gmail.com
* @since 2013. 12. 29. 오후 5:41
*/
@SerialVersionUID(-5373404703149628573L)
class QuarterRange(private[this] val _year: Int,
private[this] val _quarter: Quarter,
private[this] val _calendar: ITimeCalendar = DefaultTimeCalendar)
extends QuarterTimeRange(_year, _quarter, 1, _calendar) {
def this() = this(Times.today.getYear, Times.quarterOf(Times.today), DefaultTimeCalendar)
def this(year: Int, quarter: Quarter) = this(year, quarter, DefaultTimeCalendar)
def this(moment: DateTime) = this(moment.getYear, Times.quarterOf(moment), DefaultTimeCalendar)
def this(moment: DateTime, calendar: ITimeCalendar) = this(moment.getYear, Times.quarterOf(moment), calendar)
def year: Int = startYear
def getYear = year
def quarter: Quarter = startQuarter
def getQuarter = quarter
def addQuarters(quarters: Int): QuarterRange = {
val yq = Times.addQuarter(startYear, startQuarter, quarters)
new QuarterRange(yq.year, yq.quarter, calendar)
}
def nextQuarter: QuarterRange = addQuarters(1)
def previousQuarter: QuarterRange = addQuarters(-1)
}
object QuarterRange {
def apply(): QuarterRange = apply(Times.now)
def apply(year: Int, quarter: Quarter): QuarterRange = apply(year, quarter, DefaultTimeCalendar)
def apply(year: Int, quarter: Quarter, calendar: ITimeCalendar): QuarterRange =
new QuarterRange(year, quarter, calendar)
def apply(moment: DateTime): QuarterRange = apply(moment, DefaultTimeCalendar)
def apply(moment: DateTime, calendar: ITimeCalendar): QuarterRange =
new QuarterRange(moment.getYear, Times.quarterOfMonth(moment.getMonthOfYear), calendar)
} | debop/debop4s | debop4s-timeperiod/src/main/scala/debop4s/timeperiod/timerange/QuarterRange.scala | Scala | apache-2.0 | 1,927 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.