code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package org.wquery.path.operations
import org.wquery.NonNumericValuesException
import org.wquery.lang._
import org.wquery.lang.operations._
import org.wquery.model._
import org.wquery.path._
import org.wquery.query.SetVariable
import org.wquery.utils.IntOptionW._
import scala.collection.mutable.ListBuffer
import scalaz.Scalaz._
sealed abstract class PathOp extends AlgebraOp
/*
* Set operations
*/
case class UnionOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
DataSet(leftOp.evaluate(wordNet, bindings, context).paths union rightOp.evaluate(wordNet, bindings, context).paths)
}
def leftType(pos: Int) = leftOp.leftType(pos) ++ rightOp.leftType(pos)
def rightType(pos: Int) = leftOp.rightType(pos) ++ rightOp.rightType(pos)
val minTupleSize = leftOp.minTupleSize min rightOp.minTupleSize
val maxTupleSize = leftOp.maxTupleSize max rightOp.maxTupleSize
def bindingsPattern = BindingsPattern()
val referencedVariables = leftOp.referencedVariables ++ rightOp.referencedVariables
}
case class ExceptOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
val leftSet = leftOp.evaluate(wordNet, bindings, context)
val rightSet = rightOp.evaluate(wordNet, bindings, context)
DataSet(leftSet.paths.filterNot(rightSet.paths.contains))
}
def leftType(pos: Int) = leftOp.leftType(pos)
def rightType(pos: Int) = leftOp.rightType(pos)
val minTupleSize = leftOp.minTupleSize
val maxTupleSize = leftOp.maxTupleSize
def bindingsPattern = BindingsPattern()
val referencedVariables = leftOp.referencedVariables ++ rightOp.referencedVariables
}
case class IntersectOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
DataSet(leftOp.evaluate(wordNet, bindings, context).paths intersect rightOp.evaluate(wordNet, bindings, context).paths)
}
def leftType(pos: Int) = leftOp.leftType(pos) intersect rightOp.leftType(pos)
def rightType(pos: Int) = leftOp.rightType(pos) intersect rightOp.rightType(pos)
val minTupleSize = leftOp.minTupleSize max rightOp.minTupleSize
val maxTupleSize = leftOp.maxTupleSize min rightOp.maxTupleSize
def bindingsPattern = BindingsPattern()
val referencedVariables = leftOp.referencedVariables ++ rightOp.referencedVariables
}
case class JoinOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
val leftSet = leftOp.evaluate(wordNet, bindings, context)
val rightSet = rightOp.evaluate(wordNet, bindings, context)
val leftPathVarNames = leftSet.pathVars.keySet
val leftStepVarNames = leftSet.stepVars.keySet
val rightPathVarNames = rightSet.pathVars.keySet
val rightStepVarNames = rightSet.stepVars.keySet
val pathBuffer = DataSetBuffers.createPathBuffer
val pathVarBuffers = DataSetBuffers.createPathVarBuffers(leftPathVarNames union rightPathVarNames)
val stepVarBuffers = DataSetBuffers.createStepVarBuffers(leftStepVarNames union rightStepVarNames)
for (i <- 0 until leftSet.pathCount; j <- 0 until rightSet.pathCount) {
val leftTuple = leftSet.paths(i)
val rightTuple = rightSet.paths(j)
pathBuffer.append(leftTuple ++ rightTuple)
leftPathVarNames.foreach(x => pathVarBuffers(x).append(leftSet.pathVars(x)(i)))
leftStepVarNames.foreach(x => stepVarBuffers(x).append(leftSet.stepVars(x)(i)))
val offset = leftTuple.size
rightPathVarNames.foreach { x =>
val pos = rightSet.pathVars(x)(j)
pathVarBuffers(x).append((pos._1 + offset, pos._2 + offset))
}
rightStepVarNames.foreach(x => stepVarBuffers(x).append(rightSet.stepVars(x)(j) + offset))
}
DataSet.fromBuffers(pathBuffer, pathVarBuffers, stepVarBuffers)
}
def leftType(pos: Int) ={
val leftMinSize = leftOp.minTupleSize
val leftMaxSize = leftOp.maxTupleSize
if (pos < leftMinSize) {
leftOp.leftType(pos)
} else if (leftMaxSize.some(pos < _).none(true)) { // pos < leftMaxSize or leftMaxSize undefined
val rightOpTypes = for (i <- 0 to pos - leftMinSize) yield rightOp.leftType(i)
(rightOpTypes :+ leftOp.leftType(pos)).flatten.toSet
} else { // leftMaxSize defined and pos >= leftMaxSize
(for (i <- leftMinSize to leftMaxSize.get) yield rightOp.leftType(pos - i)).flatten.toSet
}
}
def rightType(pos: Int) ={
val rightMinSize = rightOp.minTupleSize
val rightMaxSize = rightOp.maxTupleSize
if (pos < rightMinSize) {
rightOp.rightType(pos)
} else if (rightMaxSize.some(pos < _).none(true)) { // pos < rightMaxSize or rightMaxSize undefined
val leftOpTypes = for (i <- 0 to pos - rightMinSize) yield leftOp.rightType(i)
(leftOpTypes :+ rightOp.rightType(pos)).flatten.toSet
} else { // rightMaxSize defined and pos >= rightMaxSize
(for (i <- rightMinSize to rightMaxSize.get) yield leftOp.rightType(pos - i)).flatten.toSet
}
}
val minTupleSize = leftOp.minTupleSize + rightOp.minTupleSize
val maxTupleSize = leftOp.maxTupleSize + rightOp.maxTupleSize
def bindingsPattern = leftOp.bindingsPattern union rightOp.bindingsPattern
val referencedVariables = leftOp.referencedVariables ++ rightOp.referencedVariables
}
/*
* Arithmetic operations
*/
abstract class BinaryArithmeticOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
val leftSet = leftOp.evaluate(wordNet, bindings, context).paths.map(_.last)
val rightSet = rightOp.evaluate(wordNet, bindings, context).paths.map(_.last)
DataSet(for (leftVal <- leftSet; rightVal <- rightSet) yield List(combine(leftVal, rightVal)))
}
def combine(left: Any, right: Any): Any
def leftType(pos: Int) = if (pos == 0) {
val leftOpType = leftOp.leftType(pos)
val rightOpType = rightOp.leftType(pos)
if (leftOpType == rightOpType) leftOpType else Set(FloatType)
} else {
Set.empty
}
def rightType(pos: Int) = if (pos == 0) {
val leftOpType = leftOp.rightType(pos)
val rightOpType = rightOp.rightType(pos)
if (leftOpType == rightOpType) leftOpType else Set(FloatType)
} else {
Set.empty
}
val minTupleSize = 1
val maxTupleSize = some(1)
def bindingsPattern = BindingsPattern()
val referencedVariables = leftOp.referencedVariables ++ rightOp.referencedVariables
}
case class AddOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends BinaryArithmeticOp(leftOp, rightOp) {
def combine(leftVal: Any, rightVal: Any): Any = (leftVal, rightVal) match {
case (leftVal: Double, rightVal: Double) =>
leftVal + rightVal
case (leftVal: Double, rightVal: Int) =>
leftVal + rightVal
case (leftVal: Int, rightVal: Double) =>
leftVal + rightVal
case (leftVal: Int, rightVal: Int) =>
leftVal + rightVal
case _ =>
throw new NonNumericValuesException("+")
}
}
case class SubOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends BinaryArithmeticOp(leftOp, rightOp) {
def combine(leftVal: Any, rightVal: Any): Any = (leftVal, rightVal) match {
case (leftVal: Double, rightVal: Double) =>
leftVal - rightVal
case (leftVal: Double, rightVal: Int) =>
leftVal - rightVal
case (leftVal: Int, rightVal: Double) =>
leftVal - rightVal
case (leftVal: Int, rightVal: Int) =>
leftVal - rightVal
case _ =>
throw new NonNumericValuesException("-")
}
}
case class MulOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends BinaryArithmeticOp(leftOp, rightOp) {
def combine(leftVal: Any, rightVal: Any): Any = (leftVal, rightVal) match {
case (leftVal: Double, rightVal: Double) =>
leftVal * rightVal
case (leftVal: Double, rightVal: Int) =>
leftVal * rightVal
case (leftVal: Int, rightVal: Double) =>
leftVal * rightVal
case (leftVal: Int, rightVal: Int) =>
leftVal * rightVal
case _ =>
throw new NonNumericValuesException("*")
}
}
case class DivOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends BinaryArithmeticOp(leftOp, rightOp) {
def combine(leftVal: Any, rightVal: Any): Any = (leftVal, rightVal) match {
case (leftVal: Double, rightVal: Double) =>
leftVal / rightVal
case (leftVal: Double, rightVal: Int) =>
leftVal / rightVal
case (leftVal: Int, rightVal: Double) =>
leftVal / rightVal
case (leftVal: Int, rightVal: Int) =>
leftVal.toDouble / rightVal.toDouble
case _ =>
throw new NonNumericValuesException("/")
}
}
case class IntDivOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends BinaryArithmeticOp(leftOp, rightOp) {
def combine(leftVal: Any, rightVal: Any): Any = (leftVal, rightVal) match {
case (leftVal: Double, rightVal: Double) =>
(leftVal - leftVal % rightVal) / rightVal
case (leftVal: Double, rightVal: Int) =>
(leftVal - leftVal % rightVal) / rightVal
case (leftVal: Int, rightVal: Double) =>
(leftVal - leftVal % rightVal) / rightVal
case (leftVal: Int, rightVal: Int) =>
leftVal / rightVal
case _ =>
throw new NonNumericValuesException("div")
}
}
case class ModOp(leftOp: AlgebraOp, rightOp: AlgebraOp) extends BinaryArithmeticOp(leftOp, rightOp) {
def combine(leftVal: Any, rightVal: Any): Any = (leftVal, rightVal) match {
case (leftVal: Double, rightVal: Double) =>
leftVal % rightVal
case (leftVal: Double, rightVal: Int) =>
leftVal % rightVal
case (leftVal: Int, rightVal: Double) =>
leftVal % rightVal
case (leftVal: Int, rightVal: Int) =>
leftVal % rightVal
case _ =>
throw new NonNumericValuesException("mod")
}
}
case class MinusOp(op: AlgebraOp) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
DataSet(op.evaluate(wordNet, bindings, context).paths.map(_.last).map {
case x: Int => List(-x)
case x: Double => List(-x)
case _ => throw new NonNumericValuesException("-")
})
}
def leftType(pos: Int) = op.leftType(pos)
def rightType(pos: Int) = op.rightType(pos)
val minTupleSize = 1
val maxTupleSize = some(1)
def bindingsPattern = BindingsPattern()
val referencedVariables = op.referencedVariables
}
/*
* Declarative operations
*/
case class SelectOp(op: AlgebraOp, condition: Condition) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
val dataSet = op.evaluate(wordNet, bindings, context)
val pathVarNames = dataSet.pathVars.keySet
val filterPathVarNames = pathVarNames.filter(pathVarName => condition.referencedVariables.contains(TupleVariable(pathVarName)))
val stepVarNames = dataSet.stepVars.keySet
val filterStepVarNames = stepVarNames.filter(stepVarName => condition.referencedVariables.contains(StepVariable(stepVarName)))
val pathBuffer = DataSetBuffers.createPathBuffer
val pathVarBuffers = DataSetBuffers.createPathVarBuffers(pathVarNames)
val stepVarBuffers = DataSetBuffers.createStepVarBuffers(stepVarNames)
val binds = Bindings(bindings, false)
for (i <- 0 until dataSet.pathCount) {
val tuple = dataSet.paths(i)
for (pathVar <- filterPathVarNames) {
val varPos = dataSet.pathVars(pathVar)(i)
binds.bindTupleVariable(pathVar, tuple.slice(varPos._1, varPos._2))
}
for (stepVar <- filterStepVarNames) {
val varPos = dataSet.stepVars(stepVar)(i)
binds.bindStepVariable(stepVar, tuple(varPos))
}
if (condition.satisfied(wordNet, binds, context)) {
pathBuffer.append(tuple)
pathVarNames.foreach(x => pathVarBuffers(x).append(dataSet.pathVars(x)(i)))
stepVarNames.foreach(x => stepVarBuffers(x).append(dataSet.stepVars(x)(i)))
}
}
DataSet.fromBuffers(pathBuffer, pathVarBuffers, stepVarBuffers)
}
def leftType(pos: Int) = op.leftType(pos)
def rightType(pos: Int) = op.rightType(pos)
val minTupleSize = op.minTupleSize
val maxTupleSize = op.maxTupleSize
def bindingsPattern = op.bindingsPattern
val referencedVariables = op.referencedVariables ++ (condition.referencedVariables -- op.bindingsPattern.variables)
}
case class ProjectOp(op: AlgebraOp, projectOp: AlgebraOp) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
val dataSet = op.evaluate(wordNet, bindings, context)
val buffer = new DataSetBuffer
val pathVarNames = dataSet.pathVars.keys
val filterPathVarNames = pathVarNames.filter(pathVarName => projectOp.referencedVariables.contains(TupleVariable(pathVarName)))
val stepVarNames = dataSet.stepVars.keys
val filterStepVarNames = stepVarNames.filter(stepVarName => projectOp.referencedVariables.contains(StepVariable(stepVarName)))
val binds = Bindings(bindings, false)
for (i <- 0 until dataSet.pathCount) {
val tuple = dataSet.paths(i)
for (pathVar <- filterPathVarNames) {
val varPos = dataSet.pathVars(pathVar)(i)
binds.bindTupleVariable(pathVar, tuple.slice(varPos._1, varPos._2))
}
for (stepVar <- filterStepVarNames) {
val varPos = dataSet.stepVars(stepVar)(i)
binds.bindStepVariable(stepVar, tuple(varPos))
}
buffer.append(projectOp.evaluate(wordNet, binds, context))
}
buffer.toDataSet
}
def leftType(pos: Int) = projectOp.leftType(pos)
def rightType(pos: Int) = projectOp.rightType(pos)
val minTupleSize = projectOp.minTupleSize
val maxTupleSize = projectOp.maxTupleSize
def bindingsPattern = projectOp.bindingsPattern
val referencedVariables = op.referencedVariables ++ (projectOp.referencedVariables -- op.bindingsPattern.variables)
}
case class ExtendOp(op: AlgebraOp, pattern: RelationalPattern, variables: VariableTemplate) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
val dataSet = op.evaluate(wordNet, bindings, context)
val extensionSet = pattern.extend(wordNet, bindings, new DataExtensionSet(dataSet))
val dataSetPathVarNames = dataSet.pathVars.keySet
val dataSetStepVarNames = dataSet.stepVars.keySet
val pathBuffer = DataSetBuffers.createPathBuffer
val pathVarBuffers = DataSetBuffers.createPathVarBuffers(variables.pathVariableName.some(dataSetPathVarNames + _).none(dataSetPathVarNames))
val stepVarBuffers = DataSetBuffers.createStepVarBuffers(dataSetStepVarNames union variables.stepVariableNames)
for ((pathPos, extension) <- extensionSet.extensions) {
val path = dataSet.paths(pathPos)
val extensionSize = extension.size
val pathShift = 0
val extensionShift = path.size
pathBuffer.append(path ++ extension)
for (pathVar <- dataSetPathVarNames) {
val (left, right) = dataSet.pathVars(pathVar)(pathPos)
pathVarBuffers(pathVar).append((left + pathShift, right + pathShift))
}
for (stepVar <- dataSetStepVarNames)
stepVarBuffers(stepVar).append(dataSet.stepVars(stepVar)(pathPos) + pathShift)
for (pathVar <- variables.pathVariableName)
pathVarBuffers(pathVar).append(variables.pathVariableIndexes(extensionSize, extensionShift))
for (stepVar <- variables.leftVariablesNames)
stepVarBuffers(stepVar).append(variables.leftIndex(stepVar, extensionSize, extensionShift))
for (stepVar <- variables.rightVariablesNames)
stepVarBuffers(stepVar).append(variables.rightIndex(stepVar, extensionSize, extensionShift))
}
DataSet.fromBuffers(pathBuffer, pathVarBuffers, stepVarBuffers)
}
def leftType(pos: Int) = {
if (pos < op.minTupleSize) {
op.leftType(pos)
} else if (op.maxTupleSize.some(pos < _).none(true)) { // pos < maxTupleSize or maxTupleSize undefined
val extendOpTypes = for (i <- 0 to pos - op.minTupleSize) yield pattern.leftType(i)
(extendOpTypes :+ op.leftType(pos)).flatten.toSet
} else { // maxTupleSize defined and pos >= maxTupleSize
(for (i <- op.minTupleSize to op.maxTupleSize.get) yield pattern.leftType(pos - i)).flatten.toSet
}
}
def rightType(pos: Int) = {
if (pos < pattern.minTupleSize) {
pattern.rightType(pos)
} else if (pattern.maxTupleSize.some(pos < _).none(true)) { // pos < maxSize or maxSize undefined
val extendOpTypes = for (i <- 0 to pos - pattern.minTupleSize) yield op.rightType(i)
(extendOpTypes :+ pattern.rightType(pos)).flatten.toSet
} else { // maxSize defined and pos >= maxSize
(for (i <- pattern.minTupleSize to pattern.maxTupleSize.get) yield op.rightType(pos - i)).flatten.toSet
}
}
val minTupleSize = op.minTupleSize + pattern.minTupleSize
val maxTupleSize = op.maxTupleSize + pattern.maxTupleSize
def bindingsPattern = {
val pattern = op.bindingsPattern
pattern.bindVariablesTypes(variables, this)
pattern
}
val referencedVariables = op.referencedVariables
}
case class BindOp(op: AlgebraOp, variables: VariableTemplate) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
op.evaluate(wordNet, bindings, context).bindVariables(variables)
}
def leftType(pos: Int) = op.leftType(pos)
def rightType(pos: Int) = op.rightType(pos)
val minTupleSize = op.minTupleSize
val maxTupleSize = op.maxTupleSize
def bindingsPattern = {
val pattern = op.bindingsPattern
pattern.bindVariablesTypes(variables, op)
pattern
}
val referencedVariables = op.referencedVariables
}
/*
* Elementary operations
*/
case class SynsetFetchOp(op: AlgebraOp) extends AlgebraOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
if (context.creation) {
val senses = op.evaluate(wordNet, bindings, context).paths.map(_.last.asInstanceOf[Sense])
if (senses.nonEmpty) {
val synset = wordNet.getSynset(senses.head).some { synset =>
if (wordNet.getSenses(synset) == senses)
synset
else
new NewSynset(senses)
}.none(new NewSynset(senses))
DataSet.fromValue(synset)
} else {
DataSet.empty
}
} else {
val pathBuffer = new ListBuffer[List[Any]]
for (path <- op.evaluate(wordNet, bindings, context).paths if path.nonEmpty) {
path.last match {
case word: String =>
val senses = wordNet.getSenses(word)
for (sense <- senses)
wordNet.getSynset(sense).foreach(synset => pathBuffer.append(List(synset)))
case sense: Sense =>
wordNet.getSynset(sense).foreach(synset => pathBuffer.append(List(synset)))
case _ =>
/* do nothing */
}
}
DataSet(pathBuffer.toList)
}
}
def leftType(pos: Int) = if (pos == 0) Set(SynsetType) else Set.empty
def rightType(pos: Int) = if (pos == 0) Set(SynsetType) else Set.empty
val minTupleSize = 1
val maxTupleSize = some(1)
def bindingsPattern = BindingsPattern()
val referencedVariables = op.referencedVariables
}
class NewSynset(val senses: List[Sense]) extends Synset("synset#" + senses.head.toString)
case class FetchOp(relation: Relation, from: List[(String, List[Any])], to: List[String], withArcs: Boolean = true) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = {
if (context.creation && WordNet.domainRelations.contains(relation) && from.forall { case (source, values) => values.nonEmpty }) {
DataSet(from.flatMap{ case (_, value) => List(value) })
} else {
wordNet.fetch(relation, from, to, withArcs)
}
}
def leftType(pos: Int) = typeAt(pos)
def rightType(pos: Int) = typeAt(from.size + to.size - 1 - pos)
private def typeAt(pos: Int): Set[DataType] = {
val args = from.map(_._1) ++ to
if (args.isDefinedAt(pos))
Set(relation.demandArgument(args(pos)).nodeType)
else
Set.empty
}
val minTupleSize = if (withArcs) 2*to.size - 1 else to.size
val maxTupleSize = some(if (withArcs) 2*to.size - 1 else to.size)
def bindingsPattern = BindingsPattern()
val referencedVariables = ∅[Set[Variable]]
}
object FetchOp {
val words = FetchOp(WordNet.WordSet, List((Relation.Src, Nil)), List(Relation.Src))
val senses = FetchOp(WordNet.SenseSet, List((Relation.Src, Nil)), List(Relation.Src))
val synsets = FetchOp(WordNet.SynsetSet, List((Relation.Src, Nil)), List(Relation.Src))
val possyms = FetchOp(WordNet.PosSet, List((Relation.Src, Nil)), List(Relation.Src))
def wordByValue(value: String)
= FetchOp(WordNet.WordSet, List((Relation.Src, List(value))), List(Relation.Src))
def senseByValue(sense: Sense) = {
FetchOp(WordNet.SenseSet,
List((Relation.Src, List(sense))), List(Relation.Src))
}
def sensesByWordFormAndSenseNumber(word: String, num: Int) = {
FetchOp(WordNet.SenseToWordFormSenseNumberAndPos,
List((Relation.Dst, List(word)), ("num", List(num))), List(Relation.Src))
}
}
case class ConstantOp(dataSet: DataSet) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = dataSet
def leftType(pos: Int) = dataSet.leftType(pos)
def rightType(pos: Int) = dataSet.rightType(pos)
val minTupleSize = dataSet.minTupleSize
val maxTupleSize = dataSet.maxTupleSize
def bindingsPattern = BindingsPattern() // assumed that a constant dataset does not contain variable bindings
val referencedVariables = ∅[Set[Variable]]
}
object ConstantOp {
def fromValue(value: Any) = ConstantOp(DataSet.fromValue(value))
val empty = ConstantOp(DataSet.empty)
}
/*
* Reference operations
*/
case class SetVariableRefOp(variable: SetVariable, types: (AlgebraOp, Int, Boolean)) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = bindings.demandSetVariable(variable.name)
def leftType(pos: Int) = {
if (types._3)
types._1.leftType(pos + types._2)
else if (pos == 0)
types._1.leftType(types._2)
else DataType.empty
}
def rightType(pos: Int) = types._1.rightType(pos)
val minTupleSize = if (types._3) types._1.minTupleSize - types._2 else 1
val maxTupleSize = if (types._3) types._1.maxTupleSize.map(_ - types._2) else Some(1)
def bindingsPattern = BindingsPattern()
val referencedVariables = Set[Variable](variable)
}
case class PathVariableRefOp(variable: TupleVariable, types: (AlgebraOp, Int, Int)) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = DataSet.fromTuple(bindings.demandTupleVariable(variable.name))
def leftType(pos: Int) = types._1.leftType(pos + types._2)
def rightType(pos: Int) = types._1.rightType(pos + types._3)
val minTupleSize = types._1.minTupleSize - types._2
val maxTupleSize = types._1.maxTupleSize.map(_ - types._3)
def bindingsPattern = BindingsPattern()
val referencedVariables = Set[Variable](variable)
}
case class StepVariableRefOp(variable: StepVariable, types: Set[DataType]) extends PathOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = DataSet.fromValue(bindings.demandStepVariable(variable.name))
def leftType(pos: Int) = if (pos == 0) types else Set.empty
def rightType(pos: Int) = if (pos == 0) types else Set.empty
val minTupleSize = 1
val maxTupleSize = some(1)
def bindingsPattern = BindingsPattern()
val referencedVariables = Set[Variable](variable)
}
| marekkubis/wquery | src/main/scala/org/wquery/path/operations/pathOps.scala | Scala | bsd-3-clause | 23,583 |
package nexus.testbase.ops
import nexus.diff._
import nexus.diff.ops._
import nexus.prob._
import nexus._
import nexus.syntax._
import org.scalatest._
/**
* Tests R^n^ -> R functions.
* @author Tongfei Chen
*/
class OpVSTests[T[_], R](gen: Stochastic[R])(implicit T: IsRealTensorK[T, R], RToFloat: CastToFloat[R]) extends FunSuite {
class Axis
val len = 10
class Prop(op: Op1[T[Axis], R], gen: Stochastic[T[Axis]]) extends ApproxProp[T[Axis], R](op, gen) {
implicit val R = T.R
def autoGrad(x: T[Axis]) = {
val y = op.forward(x)
op.backward(R.one, y, x)
}
def numGrad(x: T[Axis]) =
T.tabulate[Axis](x.shape(0)) { i =>
val z: R = x(i)
val δ = z * relativeDiff
val δx = T.tabulate[Axis](x.shape(0)) { j => if (j == i) δ else R.zero }
(op.forward(x + δx) - op.forward(x - δx)) / (δ * 2d)
}
def error(ag: T[Axis], ng: T[Axis]): R = {
L2Norm(Id(ag - ng)) / L2Norm(Id(ag))
}
}
val ops = Seq(
L1Norm.l1NormF[T, R, Axis],
L2Norm.l2NormF[T, R, Axis]
)
for (op <- ops) {
test(s"${op.name}'s automatic derivative is close to its numerical approximation on $T") {
val prop = new Prop(op, Stochastic.apply(T.tabulate(len)(_ => gen.sample)))
assert(prop.passedCheck())
}
}
}
| ctongfei/nexus | test-base/src/main/scala/nexus/testbase/ops/OpVSTests.scala | Scala | mit | 1,311 |
package latis.reader.tsml
import latis.reader.tsml.ml.Tsml
import latis.dm._
import latis.util.iterator.MappingIterator
import latis.data.SampleData
import latis.data.IterableData
import java.io.RandomAccessFile
import java.nio.ByteOrder
import java.nio.channels.FileChannel.MapMode
import latis.data.buffer.ByteBufferData
import latis.data.SampledData
import latis.data.seq.DataSeq
import latis.util.iterator.PeekIterator
import latis.util.DataUtils
import ucar.nc2.NetcdfFile
import ucar.nc2.dataset.NetcdfDataset
import ucar.nc2.util.EscapeStrings
import latis.data.Data
import ucar.ma2.InvalidRangeException
class NetcdfAdapter2(tsml: Tsml) extends ColumnarBinaryAdapter(tsml){
// private var ncFile: NetcdfFile = null
//
// override def init {
// val location = getUrl.toString
// ncFile = NetcdfDataset.openFile(location, null)
// }
//
// override def findScalarData(s: Scalar): Option[IterableData] = {
// val name = s.getMetadata("origName") match {
// case Some(s) => s
// case None => s.getName
// }
//
// val section = s.getMetadata("section") match {
// case Some(s) => s
// case None => ""
// }
//
// val escapedName = EscapeStrings.backslashEscape(name, ".")
// val ncvar = ncFile.findVariable(escapedName)
//
// val it = new NetcdfIterator(ncvar, s)
//
// val recSize = s match {
// case _: Real => 8
// case t: Text => t.getMetadata("length").getOrElse("8").toInt
// }
//
// Some(IterableData(it, recSize))
// }
//
//
// override def close = {ncFile.close}
//
// class NetcdfIterator(v: ucar.nc2.Variable, lv: Variable) extends PeekIterator[Data] {
//
// val shape = v.getShape
// val sec = lv.getMetadata("section").getOrElse(":,".*(shape.length)).split(",")
// val varsec = sec.reverse.dropWhile(_ != ":").reverse //special logic for handling 0,:,n sections
// val div = varsec.zip(shape).scanRight(1)(_._2*_).drop(1)
// def section = (div.zip(shape).map(z => (counter/z._1)%z._2) ++ sec.drop(varsec.length)).mkString(",")
// var counter = -1
//
// def getNext = if(counter+1 >= div(0)) null else try {
// counter += 1
// val sec = section
// val arr = v.read(sec).reduce
// lv match {
// case _: Text => Data(arr.getObject(0).toString)
// case _: Real => Data(arr.getDouble(0))
// }
// } //catch {case e: InvalidRangeException => null}
// }
} | dlindhol/latis-netcdf | src/main/scala/latis/reader/tsml/NetcdfAdapter2.scala | Scala | epl-1.0 | 2,439 |
/*
* Copyright (C) 2009 Lalit Pant <pant.lalit@gmail.com>
*
* The contents of this file are subject to the GNU General Public License
* Version 3 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.gnu.org/copyleft/gpl.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
*/
package net.kogics.kojo.core
trait GeomCanvas {
type GPoint <: Point with Labelled with MoveablePoint
type GLine <: Line with Labelled
type GSegment <: LineSegment with Labelled
type GAngle <: Angle with Labelled
type GText <: Text with Labelled
def clear(): Unit
def showAxes(): Unit
def hideAxes(): Unit
def point(label: String, x: Double, y: Double): GPoint
def point(label: String, on: GLine, x: Double, y: Double): GPoint
def line(label: String, p1: GPoint, p2: GPoint): GLine
def lineSegment(label: String, p1: GPoint, p2: GPoint): GSegment
def intersect(label: String, l1: GLine, l2: GLine): GPoint
def angle(label: String, p1: GPoint, p2: GPoint, p3: GPoint): GAngle
def text(content: String, x: Double, y: Double): GText
}
| richardfontana/fontana2007-t | KojoEnv/src/net/kogics/kojo/core/GeomCanvas.scala | Scala | gpl-3.0 | 1,329 |
/*
* Copyright (C) 2014 GRNET S.A.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gr.grnet.egi.vmcatcher.cmdline
import com.beust.jcommander.Parameters
/**
*
*/
@Parameters(
commandNames = Array("usage"),
commandDescription = "Show usage"
)
class Usage
| grnet/snf-vmcatcher | src/main/scala/gr/grnet/egi/vmcatcher/cmdline/Usage.scala | Scala | gpl-3.0 | 877 |
// Copyright (C) 2012 Cyrus Innovation
package tmc.CrafterTracker.services
import com.google.gson.GsonBuilder
import tmc.CrafterTracker.adapters.DateTimeAdapter
import org.joda.time.DateTime
import com.mongodb.util.JSON
import tmc.CrafterTracker.domain.Player
import com.mongodb.{BasicDBObject, DBObject, DBCollection}
// Created by cyrus on 5/9/12 at 9:44 AM
object PlayerRepository {
var collection: DBCollection = null
val gson = new GsonBuilder().registerTypeAdapter(classOf[DateTime], DateTimeAdapter).create
def save(player: Player) {
val playerObject = JSON.parse(gson.toJson(player, classOf[Player])).asInstanceOf[DBObject]
val updateWhere = new BasicDBObject("username", player.username)
collection.update(updateWhere, playerObject, true, false)
}
def findByPlayerName(playerName: String): Player = {
val playerObject: DBObject = collection.findOne(new BasicDBObject("username", playerName))
if (playerObject == null) return null
gson.fromJson(playerObject.toString, classOf[Player])
}
def exists(playerName: String): Boolean = {
if (findByPlayerName(playerName) == null) return false
true
}
def count(): Long = {
collection.count()
}
}
| TheMineCart/CrafterTracker | src/main/scala/tmc/CrafterTracker/services/PlayerRepository.scala | Scala | gpl-3.0 | 1,210 |
package TAPL2.FullIsoRec
import TAPL2.FullEquiRec.FullEquiRec
import TAPL2.Util._
import TAPL2.{Term, Ty}
case class TyVar(i: String) extends Ty
case class TyRecord(els: List[(String, Ty)]) extends Ty
case class TyArr(t1: Ty, t2: Ty) extends Ty
case object TyNat extends Ty
case class TyVariant(els: List[(String, Ty)]) extends Ty
case object TyBool extends Ty
case object TyString extends Ty
case object TyFloat extends Ty
case object TyUnit extends Ty
case class TyRec(id: String, ty: Ty) extends Ty
case object TmTrue extends Term
case object TmFalse extends Term
case class TmIf(cond: Term, t1: Term, t2: Term) extends Term
case class TmVar(i: String) extends Term
case class TmString(s: String) extends Term
case class TmAscribe(t: Term, ty: Ty) extends Term
case class TmRecord(fields: List[(String, Term)]) extends Term
case class TmProj(t: Term, proj: String) extends Term
case class TmAbs(v: String, ty: Ty, t: Term) extends Term
case class TmApp(t1: Term, t2: Term) extends Term
case object TmZero extends Term
case class TmSucc(t: Term) extends Term
case class TmPred(t: Term) extends Term
case class TmIsZero(t: Term) extends Term
case class TmInert(ty: Ty) extends Term
case class TmCase(sel: Term, branches: List[(String, String, Term)]) extends Term
case class TmTag(tag: String, t: Term, ty: Ty) extends Term
case class TmLet(l: String, t1: Term, t2: Term) extends Term
case object TmUnit extends Term
case class TmFix(t: Term) extends Term
case class TmFold(ty: Ty) extends Term
case class TmUnfold(ty: Ty) extends Term
/*
/* <13> */
object Fold {
trait Parser[F <: {val pE : PackratParser[Term]; val pT : PackratParser[Ty]}] {
lexical.reserved += ("fold", "unfold")
lexical.delimiters += ("[", "]")
val pFoldE: (=> F) => PackratParser[Term] = l => {
lazy val e = l.pE
lazy val t = l.pT
List(
"fold" ~> ("[" ~> t <~ "]") ~ e ^^ { case ty ~ ex => TmFold(ex, ty) },
"unfold" ~> ("[" ~> t <~ "]") ~ e ^^ { case ty ~ ex => TmUnfold(ex, ty) }
).reduce((a, b) => a ||| b)
}
}
}
object FullIsoRec {
trait Parser[L <: {val pE : PackratParser[E]; val pT : PackratParser[T]}]
extends FullEquiRec.Parser[L] with Fold.Parser[L] {
val pFullIsoRecE = pFullEquiRecE | pFoldE
val pFullIsoRecT = pFullEquiRecT
}
}
object TestFullIsoRec {
import _
class List[E, T](pe: PackratParser[E], pt: PackratParser[T]) {
val pE = pe
val pT = pt
}
def parse[E, T](inp: String)(alg: FullIsoRec.Alg[E, T]) = {
def parser(l: => List[E, T]): List[E, T] = {
val lang = new FullIsoRec.Parser[E, T, List[E, T]] {}
new List[E, T](lang.pFullIsoRecE(alg)(l), lang.pFullIsoRecT(alg)(l))
}
runParser(fix(parser).pE)(inp)
}
def parseAndPrint(inp: String) = parse(inp)(new FullIsoRec.Print {})
}*/ | hy-zhang/parser | Scala/Old/TAPL2/FullIsoRec/FullIsoRec.scala | Scala | bsd-3-clause | 2,839 |
package org.apache.spark.streaming.talos
import org.apache.spark.Partition
/**
* Created by jiasheng on 16-3-15.
*/
private[talos]
class TalosRDDPartition(
val index: Int,
val offsetRange: OffsetRange
) extends Partition {
def count(): Long = offsetRange.untilOffset - offsetRange.fromOffset
}
| XiaoMi/galaxy-sdk-java | galaxy-talos-client/galaxy-talos-spark/src/main/scala/org/apache/spark/streaming/talos/TalosRDDPartition.scala | Scala | apache-2.0 | 304 |
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.
package com.microsoft.ml.spark
import org.apache.spark.ml.Transformer
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.types.StructType
class SelectColumnsSuite extends TransformerFuzzingTest {
import session.implicits._
test("Select all columns in a data frame") {
val input = makeBasicDF()
val result = new SelectColumns()
.setCols(Array("numbers", "words", "more"))
.transform(input)
assert(verifyResult(input, result))
}
test("Test: Select two columns in a data frame") {
val expected = Seq(
("guitars", "drums"),
("piano", "trumpet"),
("bass", "cymbals")
).toDF("words", "more")
val result = new SelectColumns()
.setCols(Array("words", "more"))
.transform(makeBasicDF())
assert(verifyResult(expected, result))
}
test("Test: Select columns with spaces") {
val expected = Seq(
("guitars", "drums"),
("piano", "trumpet"),
("bass", "cymbals")
).toDF("words", "Scored Labels")
val result = new SelectColumns()
.setCols(Array("words", "Scored Labels"))
.transform(makeBasicDF().withColumnRenamed("more", "Scored Labels"))
assert(verifyResult(expected, result))
}
test("Test: Select one column from the data frame") {
val expected = Seq(
"guitars",
"piano",
"bass"
).toDF("words")
val result = new SelectColumns()
.setCols(Array("words"))
.transform(makeBasicDF())
assert(verifyResult(expected, result))
}
test("Invalid column specified") {
try {
new SelectColumns().setCol("four").transform(makeBasicDF())
fail()
} catch {
case _: NoSuchElementException =>
}
}
override def setParams(fitDataset: DataFrame, transformer: Transformer): Transformer =
transformer.asInstanceOf[SelectColumns].setCols(fitDataset.columns)
override def createDataset: DataFrame = makeBasicDF()
override def schemaForDataset: StructType = ???
override def getTransformer(): Transformer = new SelectColumns()
}
| rastala/mmlspark | src/pipeline-stages/src/test/scala/SelectColumnsSuite.scala | Scala | mit | 2,181 |
package play
import play.navigator._
import play.api.mvc._
object TestResource extends PlayResourcesController[Int] {
def index() = Action { Ok("index") }
def `new`() = Action { Ok("new") }
def create() = Action { Ok("create") }
def show(id: Int) = Action { Ok("show") }
def edit(id: Int) = Action { Ok("edit") }
def update(id: Int) = Action { Ok("update") }
def delete(id: Int) = Action { Ok("delete") }
}
case class FirstModule(parent: PlayNavigator) extends PlayModule(parent) with Controller {
val home = GET on root to (() => Action { Ok("FirstModule index") } )
val foobar = GET on "foo" / "bar" / * to ((i: Int) => Action { Ok("FirstModule foo/bar/" + i) } )
}
case class SecondModule(parent: PlayNavigator) extends PlayModule(parent) with Controller {
val home = GET on root to (() => Action { Ok("SecondModule index") } )
val foobar = GET on "foo" / "bar" / * to ((i: Int) => Action { Ok("SecondModule foo/bar/" + i) } )
}
object RoutesDefinition extends PlayNavigator with Controller {
// import controllers._
val first = "first" --> FirstModule
val second = "second" / "module" --> SecondModule
val fun0 = () => Action { Ok("index") }
val fun1 = (a: String) => Action { Ok("index") }
val fun2 = (a: String, b: String) => Action { Ok("index") }
val fun3 = (a: String, b: String, c: String) => Action { Ok("index") }
// Basic
val home = GET on root to fun0
val a = GET on "a" to fun0
val ab = GET on "a" / "b" to fun0
val abc = GET on "a" / "b" / "c" to fun0
val abcd = GET on "a" / "b" / "c" / "d" to fun0
val abcde = GET on "a" / "b" / "c" / "d" / "e" to fun0
// Methods
val mOptions = OPTIONS on "options" to fun0
val mGet = GET on "get" to fun0
val mHead = HEAD on "head" to fun0
val mPost = POST on "post" to fun0
val mPut = PUT on "put" to fun0
val mDelete = DELETE on "delete" to fun0
val mTrace = TRACE on "trace" to fun0
val mConnect = CONNECT on "connect" to fun0
val param1 = GET on "param1" / * to fun1
val param2 = GET on "param2" / * / * to fun2
val param3 = GET on "param3" / * / * / * to fun3
// Catches /long/a/b/c/.../z
var catchall = GET on "catchall" / ** to fun1
GET on "reallycatchall" / * / * / ** to ((a: Int, b: Int, s: String) => Action { Ok("catchall and more = " + a + " " + b + " " + s) })
GET on "reallycatchall" / ** to ((s: String) => Action { Ok("catchall = " + s) })
// Require extension: /ext/{param}.{ext}
val extjson = GET on "ext" / * as "json" to fun1
val extxml = GET on "ext" / * as "xml" to fun1
// Redirect
GET on "redirect-me" to redirect("http://google.com")
GET on "xa" / "ya" to (() => Action { Ok("xa & ya") })
GET on "x" / * to ((x: Int) => Action { Ok("xint = " + x) })
GET on "x" / * to ((x: Double) => Action { Ok("xdouble = " + x) })
GET on "x" / * to ((x: String) => Action { Ok("xstring = " + x) })
GET on "mext" / * as "json" to ((x: Int) => Action { Ok("mext json = " + x) })
GET on "mext" / * as "xml" to ((x: Int) => Action { Ok("mext xml = " + x) })
GET on "b" / * / * / * / * / * / * to (
(a: Boolean, b: Boolean, c: Boolean, d: Boolean, e: Boolean, f: Boolean) =>
Action { Ok("bool = " + List(a,b,c,d,e,f).mkString(" ")) }
)
// REST routes
val res = resources("test-resources", TestResource)
// Namespace ...
namespace("api"){
namespace("v1"){
GET on "index" to fun0
}
}
// ... or with reverse routing support
val api = new Namespace("api"){
val about = GET on "about" to fun0
val v2 = new Namespace("v2"){
val about = GET on "about" to fun0
}
}
// and back to top-level namespace
val afternamespace = GET on "about" to fun0
// }
// object PlayRoutesDefinition extends PlayNavigator {
// GET on "redirect-me" to redirect("http://google.com")
}
| teamon/play-navigator | src/test/scala/play/navigator/RoutesDefinition.scala | Scala | mit | 4,015 |
package bad.robot.radiate.ui.swing
import java.awt.BasicStroke.{CAP_BUTT, JOIN_MITER}
import java.awt.Color.darkGray
import java.awt._
object Debug {
def drawOutlineOfRegion(region: Rectangle, graphics: Graphics2D) {
graphics.drawRect(region.x, region.y, region.width, region.height)
}
def drawOutlineOfRegion(region: Rectangle, graphics: Graphics2D, color: Color) {
val original = graphics.getColor
graphics.setColor(color)
graphics.setStroke(new BasicStroke(1, CAP_BUTT, JOIN_MITER, 5, Array(5f), 0.0f))
graphics.drawRect(region.x, region.y, region.width, region.height)
graphics.setColor(original)
}
def drawCentreLines(region: Rectangle, graphics: Graphics2D) {
drawCentreLines(region, graphics, darkGray)
}
def drawCentreLines(region: Rectangle, graphics: Graphics2D, color: Color) {
val original = graphics.getColor
graphics.setColor(color)
graphics.setStroke(new BasicStroke(1, CAP_BUTT, JOIN_MITER, 5, Array(5f), 0.0f))
graphics.drawLine(region.x, region.y, region.width, region.height)
graphics.drawLine(region.width, region.y, region.x, region.height)
graphics.drawLine(region.x, region.height / 2, region.width, region.height / 2)
graphics.drawLine(region.x + region.width / 2, region.y, region.x + region.width / 2, region.height)
graphics.setColor(original)
}
} | tobyweston/radiate | src/main/scala/bad/robot/radiate/ui/swing/Debug.scala | Scala | apache-2.0 | 1,355 |
package nl.malienkolders.htm.admin.snippet
import net.liftweb._
import http._
import mapper._
import util.Helpers._
import scala.xml.NodeSeq
import nl.malienkolders.htm.lib.model._
class TournamentList {
def render = {
var name = ""
var identifier = ""
var label = ""
def process(): Unit = {
if (Arena.count < 1) {
Arena.create.name("Arena 1").save()
}
val t = Tournament.create.identifier(identifier).name(name).mnemonic(label)
t.save()
S.redirectTo("/tournaments/list")
}
"#newTournament" #> (
"#name" #> SHtml.text(name, name = _) &
"#identifier" #> SHtml.text(identifier, identifier = _) &
"#label" #> SHtml.text(label, label = _) &
"#submitNewTournament" #> SHtml.submit("Submit", process))
}
} | hema-tournament-manager/htm | htm-admin/src/main/scala/nl/malienkolders/htm/admin/snippet/TournamentList.scala | Scala | apache-2.0 | 831 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.suiteprop
import org.scalatest._
// SKIP-SCALATESTJS,NATIVE-START
import refspec.RefSpec
// SKIP-SCALATESTJS,NATIVE-END
import org.scalatest.{ freespec, funspec }
import org.scalatest.featurespec.AnyFeatureSpec
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.propspec.AnyPropSpec
import org.scalatest.wordspec.AnyWordSpec
class DeprecatedFirstTestIgnoredExamples extends org.scalatest.suiteprop.SuiteExamples {
trait Services {
val theTestNames = Vector("first test", "second test")
}
trait NestedTestNames extends Services {
override val theTestNames = Vector("A subject should first test", "A subject should second test")
}
trait DeeplyNestedTestNames extends Services {
override val theTestNames = Vector("A subject when created should first test", "A subject when created should second test")
}
trait NestedTestNamesWithMust extends Services {
override val theTestNames = Vector("A subject must first test", "A subject must second test")
}
trait DeeplyNestedTestNamesWithMust extends Services {
override val theTestNames = Vector("A subject when created must first test", "A subject when created must second test")
}
trait NestedTestNamesWithCan extends Services {
override val theTestNames = Vector("A subject can first test", "A subject can second test")
}
trait DeeplyNestedTestNamesWithCan extends Services {
override val theTestNames = Vector("A subject when created can first test", "A subject when created can second test")
}
type FixtureServices = Services
// SKIP-SCALATESTJS,NATIVE-START
class SpecExample extends RefSpec with Services {
@Ignore def `test first`: Unit = {}
def `test second`: Unit = {}
override val theTestNames = Vector("test first", "test second")
}
// SKIP-SCALATESTJS,NATIVE-END
class FunSuiteExample extends AnyFunSuite with Services {
ignore("first test") {}
test("second test") {}
}
class FixtureFunSuiteExample extends StringFixtureFunSuite with Services {
ignore("first test") { s => }
test("second test") { s => }
}
class FunSpecExample extends AnyFunSpec with Services {
ignore("first test") {}
it("second test") {}
}
class NestedFunSpecExample extends AnyFunSpec with NestedTestNames {
describe("A subject") {
ignore("should first test") {}
it("should second test") {}
}
}
class DeeplyNestedFunSpecExample extends AnyFunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
ignore("should first test") {}
it("should second test") {}
}
}
}
class FixtureFunSpecExample extends StringFixtureFunSpec with Services {
ignore("first test") { s => }
it("second test") { s => }
}
class NestedFixtureFunSpecExample extends StringFixtureFunSpec with NestedTestNames {
describe("A subject") {
ignore("should first test") { s => }
it("should second test") { s => }
}
}
class DeeplyNestedFixtureFunSpecExample extends StringFixtureFunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
ignore("should first test") { s => }
it("should second test") { s => }
}
}
}
class PathFunSpecExample extends funspec.PathAnyFunSpec with Services {
ignore("first test") {}
it("second test") {}
override def newInstance = new PathFunSpecExample
}
class NestedPathFunSpecExample extends funspec.PathAnyFunSpec with NestedTestNames {
describe("A subject") {
ignore("should first test") {}
it("should second test") {}
}
override def newInstance = new NestedPathFunSpecExample
}
class DeeplyNestedPathFunSpecExample extends funspec.PathAnyFunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
ignore("should first test") {}
it("should second test") {}
}
}
override def newInstance = new DeeplyNestedPathFunSpecExample
}
class WordSpecExample extends AnyWordSpec with Services {
"first test" ignore {}
"second test" in {}
}
class NestedWordSpecExample extends AnyWordSpec with NestedTestNames {
"A subject" should {
"first test" ignore {}
"second test" in {}
}
}
class DeeplyNestedWordSpecExample extends AnyWordSpec with DeeplyNestedTestNames {
"A subject" when {
"created" should {
"first test" ignore {}
"second test" in {}
}
}
}
class FixtureWordSpecExample extends StringFixtureWordSpec with Services {
"first test" ignore { s => }
"second test" in { s => }
}
class NestedFixtureWordSpecExample extends StringFixtureWordSpec with NestedTestNames {
"A subject" should {
"first test" ignore { s => }
"second test" in { s => }
}
}
class DeeplyNestedFixtureWordSpecExample extends StringFixtureWordSpec with DeeplyNestedTestNames {
"A subject" when {
"created" should {
"first test" ignore { s => }
"second test" in { s => }
}
}
}
class NestedWordSpecWithMustExample extends AnyWordSpec with NestedTestNamesWithMust {
"A subject" must {
"first test" ignore {}
"second test" in {}
}
}
class DeeplyNestedWordSpecWithMustExample extends AnyWordSpec with DeeplyNestedTestNamesWithMust {
"A subject" when {
"created" must {
"first test" ignore {}
"second test" in {}
}
}
}
class NestedFixtureWordSpecWithMustExample extends StringFixtureWordSpec with NestedTestNamesWithMust {
"A subject" must {
"first test" ignore { s => }
"second test" in { s => }
}
}
class DeeplyNestedFixtureWordSpecWithMustExample extends StringFixtureWordSpec with DeeplyNestedTestNamesWithMust {
"A subject" when {
"created" must {
"first test" ignore { s => }
"second test" in { s => }
}
}
}
class NestedWordSpecWithCanExample extends AnyWordSpec with NestedTestNamesWithCan {
"A subject" can {
"first test" ignore {}
"second test" in {}
}
}
class DeeplyNestedWordSpecWithCanExample extends AnyWordSpec with DeeplyNestedTestNamesWithCan {
"A subject" when {
"created" can {
"first test" ignore {}
"second test" in {}
}
}
}
class NestedFixtureWordSpecWithCanExample extends StringFixtureWordSpec with NestedTestNamesWithCan {
"A subject" can {
"first test" ignore { s => }
"second test" in { s => }
}
}
class DeeplyNestedFixtureWordSpecWithCanExample extends StringFixtureWordSpec with DeeplyNestedTestNamesWithCan {
"A subject" when {
"created" can {
"first test" ignore { s => }
"second test" in { s => }
}
}
}
class FlatSpecExample extends AnyFlatSpec with Services {
it should "first test" ignore {}
it should "second test" in {}
override val theTestNames = Vector("should first test", "should second test")
}
class SubjectFlatSpecExample extends AnyFlatSpec with NestedTestNames {
behavior of "A subject"
it should "first test" ignore {}
it should "second test" in {}
}
class ShorthandSubjectFlatSpecExample extends AnyFlatSpec with NestedTestNames {
"A subject" should "first test" ignore {}
it should "second test" in {}
}
class FixtureFlatSpecExample extends StringFixtureFlatSpec with Services {
it should "first test" ignore { s => }
it should "second test" in { s => }
override val theTestNames = Vector("should first test", "should second test")
}
class SubjectFixtureFlatSpecExample extends StringFixtureFlatSpec with NestedTestNames {
behavior of "A subject"
it should "first test" ignore { s => }
it should "second test" in { s => }
}
class ShorthandSubjectFixtureFlatSpecExample extends StringFixtureFlatSpec with NestedTestNames {
"A subject" should "first test" ignore { s => }
it should "second test" in { s => }
}
class FlatSpecWithMustExample extends AnyFlatSpec with Services {
it must "first test" ignore {}
it must "second test" in {}
override val theTestNames = Vector("must first test", "must second test")
}
class SubjectFlatSpecWithMustExample extends AnyFlatSpec with NestedTestNamesWithMust {
behavior of "A subject"
it must "first test" ignore {}
it must "second test" in {}
}
class ShorthandSubjectFlatSpecWithMustExample extends AnyFlatSpec with NestedTestNamesWithMust {
"A subject" must "first test" ignore {}
it must "second test" in {}
}
class FixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with Services {
it must "first test" ignore { s => }
it must "second test" in { s => }
override val theTestNames = Vector("must first test", "must second test")
}
class SubjectFixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with NestedTestNamesWithMust {
behavior of "A subject"
it must "first test" ignore { s => }
it must "second test" in { s => }
}
class ShorthandSubjectFixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with NestedTestNamesWithMust {
"A subject" must "first test" ignore { s => }
it must "second test" in { s => }
}
class FlatSpecWithCanExample extends AnyFlatSpec with Services {
it can "first test" ignore {}
it can "second test" in {}
override val theTestNames = Vector("can first test", "can second test")
}
class SubjectFlatSpecWithCanExample extends AnyFlatSpec with NestedTestNamesWithCan {
behavior of "A subject"
it can "first test" ignore {}
it can "second test" in {}
}
class ShorthandSubjectFlatSpecWithCanExample extends AnyFlatSpec with NestedTestNamesWithCan {
"A subject" can "first test" ignore {}
it can "second test" in {}
}
class FixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with Services {
it can "first test" ignore { s => }
it can "second test" in { s => }
override val theTestNames = Vector("can first test", "can second test")
}
class SubjectFixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with NestedTestNamesWithCan {
behavior of "A subject"
it can "first test" ignore { s => }
it can "second test" in { s => }
}
class ShorthandSubjectFixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with NestedTestNamesWithCan {
"A subject" can "first test" ignore { s => }
it can "second test" in { s => }
}
class FreeSpecExample extends AnyFreeSpec with Services {
"first test" ignore {}
"second test" in {}
}
class NestedFreeSpecExample extends AnyFreeSpec with NestedTestNames {
"A subject" - {
"should first test" ignore {}
"should second test" in {}
}
}
class DeeplyNestedFreeSpecExample extends AnyFreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" ignore {}
"should second test" in {}
}
}
}
class FixtureFreeSpecExample extends StringFixtureFreeSpec with Services {
"first test" ignore { s => }
"second test" in { s => }
}
class NestedFixtureFreeSpecExample extends StringFixtureFreeSpec with NestedTestNames {
"A subject" - {
"should first test" ignore { s => }
"should second test" in { s => }
}
}
class DeeplyNestedFixtureFreeSpecExample extends StringFixtureFreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" ignore { s => }
"should second test" in { s => }
}
}
}
class PathFreeSpecExample extends freespec.PathAnyFreeSpec with Services {
"first test" ignore {}
"second test" in {}
override def newInstance = new PathFreeSpecExample
}
class NestedPathFreeSpecExample extends freespec.PathAnyFreeSpec with NestedTestNames {
"A subject" - {
"should first test" ignore {}
"should second test" in {}
}
override def newInstance = new NestedPathFreeSpecExample
}
class DeeplyNestedPathFreeSpecExample extends freespec.PathAnyFreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" ignore {}
"should second test" in {}
}
}
override def newInstance = new DeeplyNestedPathFreeSpecExample
}
class FeatureSpecExample extends AnyFeatureSpec with Services {
ignore("first test") {}
Scenario("second test") {}
override val theTestNames = Vector("Scenario: first test", "Scenario: second test")
}
class NestedFeatureSpecExample extends AnyFeatureSpec with Services {
Feature("A feature") {
ignore("first test") {}
Scenario("second test") {}
}
override val theTestNames = Vector("Feature: A feature Scenario: first test", "A feature Scenario: second test")
}
class FixtureFeatureSpecExample extends StringFixtureFeatureSpec with Services {
ignore("first test") { s => }
Scenario("second test") { s => }
override val theTestNames = Vector("Scenario: first test", "Scenario: second test")
}
class NestedFixtureFeatureSpecExample extends StringFixtureFeatureSpec with Services {
Feature("A feature") {
ignore("first test") { s => }
Scenario("second test") { s => }
}
override val theTestNames = Vector("Feature: A feature Scenario: first test", "A feature Scenario: second test")
}
class PropSpecExample extends AnyPropSpec with Services {
ignore("first test") {}
property("second test") {}
}
class FixturePropSpecExample extends StringFixturePropSpec with Services {
ignore("first test") { s => }
property("second test") { s => }
}
// SKIP-SCALATESTJS,NATIVE-START
lazy val spec = new SpecExample
// SKIP-SCALATESTJS,NATIVE-END
lazy val funSuite = new FunSuiteExample
lazy val fixtureFunSuite = new FixtureFunSuiteExample
lazy val funSpec = new FunSpecExample
lazy val nestedFunSpec = new NestedFunSpecExample
lazy val deeplyNestedFunSpec = new DeeplyNestedFunSpecExample
lazy val fixtureFunSpec = new FixtureFunSpecExample
lazy val nestedFixtureFunSpec = new NestedFixtureFunSpecExample
lazy val deeplyNestedFixtureFunSpec = new DeeplyNestedFixtureFunSpecExample
lazy val pathFunSpec = new PathFunSpecExample
lazy val nestedPathFunSpec = new NestedPathFunSpecExample
lazy val deeplyNestedPathFunSpec = new DeeplyNestedPathFunSpecExample
lazy val wordSpec = new WordSpecExample
lazy val nestedWordSpec = new NestedWordSpecExample
lazy val deeplyNestedWordSpec = new DeeplyNestedWordSpecExample
lazy val fixtureWordSpec = new FixtureWordSpecExample
lazy val nestedFixtureWordSpec = new NestedFixtureWordSpecExample
lazy val deeplyNestedFixtureWordSpec = new DeeplyNestedFixtureWordSpecExample
lazy val nestedWordSpecWithMust = new NestedWordSpecWithMustExample
lazy val deeplyNestedWordSpecWithMust = new DeeplyNestedWordSpecWithMustExample
lazy val nestedFixtureWordSpecWithMust = new NestedFixtureWordSpecWithMustExample
lazy val deeplyNestedFixtureWordSpecWithMust = new DeeplyNestedFixtureWordSpecWithMustExample
lazy val nestedWordSpecWithCan = new NestedWordSpecWithCanExample
lazy val deeplyNestedWordSpecWithCan = new DeeplyNestedWordSpecWithCanExample
lazy val nestedFixtureWordSpecWithCan = new NestedFixtureWordSpecWithCanExample
lazy val deeplyNestedFixtureWordSpecWithCan = new DeeplyNestedFixtureWordSpecWithCanExample
lazy val flatSpec = new FlatSpecExample
lazy val subjectFlatSpec = new SubjectFlatSpecExample
lazy val shorthandSubjectFlatSpec = new ShorthandSubjectFlatSpecExample
lazy val fixtureFlatSpec = new FixtureFlatSpecExample
lazy val subjectFixtureFlatSpec = new SubjectFixtureFlatSpecExample
lazy val shorthandSubjectFixtureFlatSpec = new ShorthandSubjectFixtureFlatSpecExample
lazy val flatSpecWithMust = new FlatSpecWithMustExample
lazy val subjectFlatSpecWithMust = new SubjectFlatSpecWithMustExample
lazy val shorthandSubjectFlatSpecWithMust = new ShorthandSubjectFlatSpecWithMustExample
lazy val fixtureFlatSpecWithMust = new FixtureFlatSpecWithMustExample
lazy val subjectFixtureFlatSpecWithMust = new SubjectFixtureFlatSpecWithMustExample
lazy val shorthandSubjectFixtureFlatSpecWithMust = new ShorthandSubjectFixtureFlatSpecWithMustExample
lazy val flatSpecWithCan = new FlatSpecWithCanExample
lazy val subjectFlatSpecWithCan = new SubjectFlatSpecWithCanExample
lazy val shorthandSubjectFlatSpecWithCan = new ShorthandSubjectFlatSpecWithCanExample
lazy val fixtureFlatSpecWithCan = new FixtureFlatSpecWithCanExample
lazy val subjectFixtureFlatSpecWithCan = new SubjectFixtureFlatSpecWithCanExample
lazy val shorthandSubjectFixtureFlatSpecWithCan = new ShorthandSubjectFixtureFlatSpecWithCanExample
lazy val freeSpec = new FreeSpecExample
lazy val nestedFreeSpec = new NestedFreeSpecExample
lazy val deeplyNestedFreeSpec = new DeeplyNestedFreeSpecExample
lazy val fixtureFreeSpec = new FixtureFreeSpecExample
lazy val nestedFixtureFreeSpec = new NestedFixtureFreeSpecExample
lazy val deeplyNestedFixtureFreeSpec = new DeeplyNestedFixtureFreeSpecExample
lazy val pathFreeSpec = new PathFreeSpecExample
lazy val nestedPathFreeSpec = new NestedPathFreeSpecExample
lazy val deeplyNestedPathFreeSpec = new DeeplyNestedPathFreeSpecExample
lazy val featureSpec = new FeatureSpecExample
lazy val nestedFeatureSpec = new NestedFeatureSpecExample
lazy val fixtureFeatureSpec = new FixtureFeatureSpecExample
lazy val nestedFixtureFeatureSpec = new NestedFixtureFeatureSpecExample
lazy val propSpec = new PropSpecExample
lazy val fixturePropSpec = new FixturePropSpecExample
// Two ways to ignore in a flat spec, so add two more examples
override def examples = super.examples ++
Vector(
new FlatSpecExample2,
new FixtureFlatSpecExample2,
new FlatSpecWithMustExample2,
new FixtureFlatSpecWithMustExample2,
new FlatSpecWithCanExample2,
new FixtureFlatSpecWithCanExample2
)
class FlatSpecExample2 extends AnyFlatSpec with Services {
ignore should "first test" in {}
it should "second test" in {}
override val theTestNames = Vector("should first test", "should second test")
}
class FixtureFlatSpecExample2 extends StringFixtureFlatSpec with Services {
ignore should "first test" in { s => }
it should "second test" in { s => }
override val theTestNames = Vector("should first test", "should second test")
}
class FlatSpecWithMustExample2 extends AnyFlatSpec with Services {
ignore must "first test" in {}
it must "second test" in {}
override val theTestNames = Vector("must first test", "must second test")
}
class FixtureFlatSpecWithMustExample2 extends StringFixtureFlatSpec with Services {
ignore must "first test" in { s => }
it must "second test" in { s => }
override val theTestNames = Vector("must first test", "must second test")
}
class FlatSpecWithCanExample2 extends AnyFlatSpec with Services {
ignore can "first test" in {}
it can "second test" in {}
override val theTestNames = Vector("can first test", "can second test")
}
class FixtureFlatSpecWithCanExample2 extends StringFixtureFlatSpec with Services {
ignore can "first test" in { s => }
it can "second test" in { s => }
override val theTestNames = Vector("can first test", "can second test")
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/suiteprop/DeprecatedFirstTestIgnoredExamples.scala | Scala | apache-2.0 | 20,223 |
package io.github.jpivarski
import scala.language.experimental.macros
import scala.language.postfixOps
import scala.reflect.macros.Context
import scala.annotation.StaticAnnotation
package object phases {
// only one annotation/macro in this package: "declare"; the rest are fake (syntax-only) annotations that get removed before compilation
class declare(transitions: Tuple2[Phase, Phase]*)(debug: Boolean = false) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro declare_impl
}
// ... but in some contexts (the console, for instance), the interpreter/compiler tries to resolve all annotations to real, existing classes before evaluating declare, so we need to give the user a way to assign dummy indexes
class Phase extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro Phase.impl
}
object Phase {
def apply() = new Phase
def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
// applies to anything and does nothing
import c.universe._
val expandees =
annottees.map(_.tree).toList match {
case (_: ValDef) :: (rest @ (_ :: _)) => rest
case (_: TypeDef) :: (rest @ (_ :: _)) => rest
case x => x
}
c.Expr[Any](Block(expandees, Literal(Constant(()))))
}
}
def declare_impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
import c.universe._
// incidentally, this is a purely functional algorithm... it just worked out that way
// get (what we think is the) class definition and maybe its companion object (for static declarations)
val companionPair = annottees.map(_.tree).toList
val classDefList = companionPair collect {case x: ClassDef => x}
// make sure this annotation is applied to a class
if (classDefList.size != 1)
c.error(c.enclosingPosition, "@phases.declare can only be used on classes")
val classDef = classDefList.head
val ClassDef(_, className, _, _) = classDef
// if there's no companion object, make one
val companionObjectDef = companionPair.collect({case x: ModuleDef => x}).
headOption.getOrElse(q"object ${newTermName(className.toString)}")
// extracting constructor arguments is a little tricky because all we have is the AST, not their evaluated values
val Apply(_, lastArgsList) = c.prefix.tree
val penultimateArgsList = c.prefix.tree collect {
case Apply(Apply(_, x), _) => x
} flatten
// determine if the user wants to debug their code (view the expanded form on the terminal during compilation)
val debugAsNamedArg = lastArgsList.collect({
case AssignOrNamedArg(Ident(x), Literal(Constant(y: java.lang.Boolean))) if (x.toString == "debug") =>
y.booleanValue
}).headOption
val debugAsFirstArg = lastArgsList.headOption.collect({
case Literal(Constant(y: java.lang.Boolean)) => y.booleanValue
})
val debug = debugAsNamedArg.getOrElse(debugAsFirstArg.getOrElse(false))
// walk through the annotation's arguments to find the transitions
// they should have the form State1 -> State2 and always go between distinct states (no reflexive)
val transitions = lastArgsList ++ penultimateArgsList collect {
case Apply(Select(x, y), z :: Nil) if (y.toString == "$minus$greater") =>
val from = x.toString
val to = z.toString
if (from == to)
c.error(c.enclosingPosition, "@phases.declare transition must go between two distinct states, not " + from + " -> " + to)
(from, to)
}
if (transitions.isEmpty)
c.error(c.enclosingPosition, "@phases.declare requires at least one transition")
// the phase names are distinct beginning or endpoints of the transitions
// by the above logic, we know that there are at least two of these
val phaseNames = (transitions.map(_._1) ++ transitions.map(_._2)).distinct
// ClassParam and classParams keep track of the class's parameter list for later use in making transition functions (only; not used for anything else)
case class ClassParam(flags: FlagSet, privateWithin: Name, partitionedAnnotations: (List[Tree], List[Tree]), paramName: TermName, tpt: Tree, rhs: Tree) {
def has(phase: String) = (partitionedAnnotations._1.isEmpty) || (partitionedAnnotations._1 exists {
case Apply(Select(New(x), _), Nil) if (phase == x.toString) => true
case _ => false
})
def toParameter = ValDef(Modifiers(flags, privateWithin, partitionedAnnotations._2), paramName, tpt, rhs)
def toArgument = Ident(paramName)
}
val classParams = {
val ClassDef(_, _, _, Template(_, _, body)) = classDef
body collect {case DefDef(_, methodName, _, vparamss, _, _) if (methodName.toString == "<init>") =>
vparamss flatMap {_ collect {case ValDef(mods, paramName, tpt, rhs) =>
ClassParam(
mods.flags,
mods.privateWithin,
mods.annotations partition {
case Apply(Select(New(x), _), Nil) if (phaseNames contains x.toString) => true
case _ => false
},
paramName,
tpt,
rhs)
}}} flatten
}
// for making transition functions between phases (this is MORE difficult to express as a quasiquote than a raw AST)
def makeTransitionMethod(from: String, to: String): Tree = {
val methodArgs = classParams filter {x => x.has(to) && !x.has(from)} map {_.toParameter}
val constructorArgs = classParams filter {x => x.has(to)} map {_.toArgument}
DefDef(
Modifiers(),
newTermName("to" + to.head.toUpper + to.tail),
List(),
List(methodArgs),
TypeTree(),
Apply(Select(New(Select(Ident(newTermName(className.toString)), newTypeName(to))), nme.CONSTRUCTOR), constructorArgs))
}
// the main Transformer class for removing fake annotations and definitions that are (fake-)annotated for a subclass
// when phasesToKeep is None, we're building the superclass (no phases)
// when phasesToKeep is Some(phase), we're building a subclass (one phase)
class ClassDefTransformer(phaseToKeep: Option[String]) extends Transformer {
val getPhases = {
case Apply(Select(New(x), _), Nil) if (phaseNames contains x.toString) => x.toString
}: PartialFunction[Tree, String]
// ValDefs can be parameters, vals, or vars; this function handles a variety of cases with boolean switches
def transformValDef(valDef: ValDef, includeGeneral: Boolean, includeSpecific: Boolean): Tree = {
val phases = valDef.mods.annotations collect getPhases
val otherAnnotations = valDef.mods.annotations filter {!getPhases.isDefinedAt(_)}
val isGeneral = (phases.isEmpty || phases.size == phaseNames.size)
val isSpecific = phaseToKeep match {
case None => false
case Some(phase) => (!isGeneral && phases.contains(phase))
}
if ((includeGeneral && isGeneral) || (includeSpecific && isSpecific))
ValDef(
transformModifiers(Modifiers(valDef.mods.flags, valDef.mods.privateWithin, otherAnnotations)),
valDef.name,
transform(valDef.tpt),
transform(valDef.rhs))
else
EmptyTree
}
// the <init> methods for subclasses are very simple; however, this is still easier to express as an AST than a quasiquote
def transformInit(tree: Tree, args: List[String]): Tree = tree match {
case Block(Apply(fun, _) :: Nil, expr) => transform(Block(Apply(fun, args map {x => Ident(newTermName(x))}) :: Nil, expr))
case x => transform(tree)
}
// the main transform function
override def transform(tree: Tree): Tree = tree match {
// several things happen to the ClassDef:
// (1) constructor code (all non ValDef at this level) is removed from subclasses (to avoid double-execution when they call their superclass's constructor)
// (2) transition methods are added
// (3) subclasses get named after the phase they represent
// (4) subclasses point to the superclass as their parent
case ClassDef(mods, name, tparams, Template(parents, self, body)) =>
val basicTransform = body map {transform(_)} filter {_ != EmptyTree}
val transformedBody = phaseToKeep match {
case Some(phase) =>
val transitionMethods = transitions filter {_._1 == phase} map {case (from, to) => makeTransitionMethod(from, to)}
// drop everything but the field and methods declarations (to execute constructor code in superclass only once)
// and then add transition metods
(basicTransform collect {case x: ValDef => x; case x: DefDef => x}) ++ transitionMethods
case None =>
basicTransform
}
ClassDef(
transformModifiers(mods),
phaseToKeep match {
case None => name // the superclass should keep its original name
case Some(phase) => newTypeName(phase) // the subclasses should be named after their phase
},
transformTypeDefs(tparams),
Template(phaseToKeep match {
case None => parents // the superclass should keep its original list of superclasses
case Some(phase) => List(Ident(name)) // the subclasses should point to the superclass
}, transformValDef(self), transformedBody))
// the ValDef case is already handled in the transformValDef method above
case x: ValDef => transformValDef(x, true, phaseToKeep != None)
// DefDefs are function definitions, including the constructor (named <init>).
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
val phases = mods.annotations collect getPhases
val otherAnnotations = mods.annotations filter {!getPhases.isDefinedAt(_)}
val transformedVparamss = vparamss map {_ map {x => transformValDef(x, true, phaseToKeep != None)} collect {case y: ValDef => y}}
val isGeneral = (phases.isEmpty || phases.size == phaseNames.size)
val isSpecific = phaseToKeep match {
case None => false
case Some(phase) => (!isGeneral && phases.contains(phase))
}
if (name.toString == "<init>" || (phaseToKeep == None && isGeneral) || (phaseToKeep != None && isSpecific))
DefDef(
transformModifiers(Modifiers(mods.flags, mods.privateWithin, otherAnnotations)),
name,
transformTypeDefs(tparams),
transformedVparamss,
transform(tpt),
if (name.toString == "<init>" && phaseToKeep != None)
transformInit(rhs, vparamss flatMap {_ map {transformValDef(_, true, false)} collect {case ValDef(_, n, _, _) => n.toString}})
else
transform(rhs))
else
EmptyTree
// all other cases refer to the general Transformer for a simple walk
case _ => super.transform(tree)
}
}
// transform the original class to make the superclass
val superclassDef = (new ClassDefTransformer(None)).transform(classDef)
// transform the original class to make subclasses, one for each phase
val phaseDefs = phaseNames map {n => (new ClassDefTransformer(Some(n))).transform(classDef)}
// insert the subclasses into the companion object so that they are statically declared
val companionWithSubclasses = {
val ModuleDef(mods, name, Template(parents, self, body)) = companionObjectDef
ModuleDef(mods, name, Template(parents, self, body ++ phaseDefs))
}
// optionally print out what we've done
if (debug) {
println(show(superclassDef))
println(show(companionWithSubclasses))
}
// and send it to the Scala compiler
c.Expr[Any](Block(List(superclassDef, companionWithSubclasses), Literal(Constant(()))))
}
}
| jpivarski/phases | project_2.10.5/src/main/scala/io/github/jpivarski/phases.scala | Scala | apache-2.0 | 12,125 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.flink.table.api.DataTypes
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.expressions.utils.ArrayTypeTestBase
import org.apache.flink.table.planner.utils.DateTimeTestUtil.{localDate, localDateTime, localTime => gLocalTime}
import org.junit.Test
class ArrayTypeTest extends ArrayTypeTestBase {
@Test
def testArrayLiterals(): Unit = {
// primitive literals
testAllApis(array(1, 2, 3), "array(1, 2, 3)", "ARRAY[1, 2, 3]", "[1, 2, 3]")
testAllApis(
array(true, true, true),
"array(true, true, true)",
"ARRAY[TRUE, TRUE, TRUE]",
"[true, true, true]")
// object literals
testTableApi(array(BigDecimal(1), BigDecimal(1)), "array(1p, 1p)", "[1, 1]")
testAllApis(
array(array(array(1), array(1))),
"array(array(array(1), array(1)))",
"ARRAY[ARRAY[ARRAY[1], ARRAY[1]]]",
"[[[1], [1]]]")
testAllApis(
array(1 + 1, 3 * 3),
"array(1 + 1, 3 * 3)",
"ARRAY[1 + 1, 3 * 3]",
"[2, 9]")
testAllApis(
array(nullOf(DataTypes.INT), 1),
"array(Null(INT), 1)",
"ARRAY[NULLIF(1,1), 1]",
"[null, 1]")
testAllApis(
array(array(nullOf(DataTypes.INT), 1)),
"array(array(Null(INT), 1))",
"ARRAY[ARRAY[NULLIF(1,1), 1]]",
"[[null, 1]]")
// implicit conversion
testTableApi(
Array(1, 2, 3),
"array(1, 2, 3)",
"[1, 2, 3]")
testTableApi(
Array[Integer](1, 2, 3),
"array(1, 2, 3)",
"[1, 2, 3]")
testAllApis(
Array(localDate("1985-04-11"), localDate("2018-07-26")),
"array('1985-04-11'.toDate, '2018-07-26'.toDate)",
"ARRAY[DATE '1985-04-11', DATE '2018-07-26']",
"[1985-04-11, 2018-07-26]")
testAllApis(
Array(gLocalTime("14:15:16"), gLocalTime("17:18:19")),
"array('14:15:16'.toTime, '17:18:19'.toTime)",
"ARRAY[TIME '14:15:16', TIME '17:18:19']",
"[14:15:16, 17:18:19]")
testAllApis(
Array(localDateTime("1985-04-11 14:15:16"), localDateTime("2018-07-26 17:18:19")),
"array('1985-04-11 14:15:16'.toTimestamp, '2018-07-26 17:18:19'.toTimestamp)",
"ARRAY[TIMESTAMP '1985-04-11 14:15:16', TIMESTAMP '2018-07-26 17:18:19']",
"[1985-04-11 14:15:16.000, 2018-07-26 17:18:19.000]")
testAllApis(
Array(BigDecimal(2.0002), BigDecimal(2.0003)),
"Array(2.0002p, 2.0003p)",
"ARRAY[CAST(2.0002 AS DECIMAL(10,4)), CAST(2.0003 AS DECIMAL(10,4))]",
"[2.0002, 2.0003]")
testAllApis(
Array(Array(x = true)),
"Array(Array(true))",
"ARRAY[ARRAY[TRUE]]",
"[[true]]")
testAllApis(
Array(Array(1, 2, 3), Array(3, 2, 1)),
"Array(Array(1, 2, 3), Array(3, 2, 1))",
"ARRAY[ARRAY[1, 2, 3], ARRAY[3, 2, 1]]",
"[[1, 2, 3], [3, 2, 1]]")
// implicit type cast only works on SQL APIs.
testSqlApi("ARRAY[CAST(1 AS DOUBLE), CAST(2 AS FLOAT)]", "[1.0, 2.0]")
}
@Test
def testArrayField(): Unit = {
testAllApis(
array('f0, 'f1),
"array(f0, f1)",
"ARRAY[f0, f1]",
"[null, 42]")
testAllApis(
array('f0, 'f1),
"array(f0, f1)",
"ARRAY[f0, f1]",
"[null, 42]")
testAllApis(
'f2,
"f2",
"f2",
"[1, 2, 3]")
testAllApis(
'f3,
"f3",
"f3",
"[1984-03-12, 1984-02-10]")
testAllApis(
'f5,
"f5",
"f5",
"[[1, 2, 3], null]")
testAllApis(
'f6,
"f6",
"f6",
"[1, null, null, 4]")
testAllApis(
'f2,
"f2",
"f2",
"[1, 2, 3]")
testAllApis(
'f2.at(1),
"f2.at(1)",
"f2[1]",
"1")
testAllApis(
'f3.at(1),
"f3.at(1)",
"f3[1]",
"1984-03-12")
testAllApis(
'f3.at(2),
"f3.at(2)",
"f3[2]",
"1984-02-10")
testAllApis(
'f5.at(1).at(2),
"f5.at(1).at(2)",
"f5[1][2]",
"2")
testAllApis(
'f5.at(2).at(2),
"f5.at(2).at(2)",
"f5[2][2]",
"null")
testAllApis(
'f4.at(2).at(2),
"f4.at(2).at(2)",
"f4[2][2]",
"null")
testAllApis(
'f11.at(1),
"f11.at(1)",
"f11[1]",
"1")
}
@Test
def testArrayOperations(): Unit = {
// cardinality
testAllApis(
'f2.cardinality(),
"f2.cardinality()",
"CARDINALITY(f2)",
"3")
testAllApis(
'f4.cardinality(),
"f4.cardinality()",
"CARDINALITY(f4)",
"null")
testAllApis(
'f11.cardinality(),
"f11.cardinality()",
"CARDINALITY(f11)",
"1")
// element
testAllApis(
'f9.element(),
"f9.element()",
"ELEMENT(f9)",
"1")
testAllApis(
'f8.element(),
"f8.element()",
"ELEMENT(f8)",
"4.0")
testAllApis(
'f10.element(),
"f10.element()",
"ELEMENT(f10)",
"null")
testAllApis(
'f4.element(),
"f4.element()",
"ELEMENT(f4)",
"null")
testAllApis(
'f11.element(),
"f11.element()",
"ELEMENT(f11)",
"1")
// comparison
testAllApis(
'f2 === 'f5.at(1),
"f2 === f5.at(1)",
"f2 = f5[1]",
"true")
testAllApis(
'f6 === array(1, 2, 3),
"f6 === array(1, 2, 3)",
"f6 = ARRAY[1, 2, 3]",
"false")
testAllApis(
'f2 !== 'f5.at(1),
"f2 !== f5.at(1)",
"f2 <> f5[1]",
"false")
testAllApis(
'f2 === 'f7,
"f2 === f7",
"f2 = f7",
"false")
testAllApis(
'f2 !== 'f7,
"f2 !== f7",
"f2 <> f7",
"true")
testAllApis(
'f11 === 'f11,
"f11 === f11",
"f11 = f11",
"true")
testAllApis(
'f11 === 'f9,
"f11 === f9",
"f11 = f9",
"true")
testAllApis(
'f11 !== 'f11,
"f11 !== f11",
"f11 <> f11",
"false")
testAllApis(
'f11 !== 'f9,
"f11 !== f9",
"f11 <> f9",
"false")
}
@Test
def testArrayTypeCasting(): Unit = {
testTableApi(
'f3.cast(DataTypes.ARRAY(DataTypes.DATE)),
"f3.cast(OBJECT_ARRAY(SQL_DATE))",
"[1984-03-12, 1984-02-10]"
)
}
}
| fhueske/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/ArrayTypeTest.scala | Scala | apache-2.0 | 7,057 |
/*
Copyright 2012 Georgia Tech Research Institute
Author: lance.gatlin@gtri.gatech.edu
This file is part of org.gtri.util.scala.statemachine library.
org.gtri.util.scala.statemachine library is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
org.gtri.util.scala.statemachine library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with org.gtri.util.scala.statemachine library. If not, see <http://www.gnu.org/licenses/>.
*/
package org.gtri.util.scala.statemachine
object IssueSeverityCode extends Enumeration {
type IssueSeverityCode = Value
val DEBUG, INFO, WARN, ERROR, FATAL = Value
}
| gtri-iead/org.gtri.util.scala | statemachine/src/main/scala/org/gtri/util/scala/statemachine/IssueSeverityCode.scala | Scala | gpl-3.0 | 1,076 |
package org.http4s
package client
package middleware
import cats.effect._
import fs2.io.readInputStream
import org.http4s.Uri.uri
import org.http4s.dsl.io._
import scala.io.Source
/**
* Common Tests for Logger, RequestLogger, and ResponseLogger
*/
class LoggerSpec extends Http4sSpec {
val testApp = HttpApp[IO] {
case req @ POST -> Root / "post" =>
Ok(req.body)
case GET -> Root / "request" =>
Ok("request response")
case _ =>
NotFound()
}
def testResource = getClass.getResourceAsStream("/testresource.txt")
def body: EntityBody[IO] =
readInputStream[IO](IO.pure(testResource), 4096, testBlocker)
val expectedBody: String = Source.fromInputStream(testResource).mkString
"ResponseLogger" should {
val responseLoggerClient =
ResponseLogger(true, true)(Client.fromHttpApp(testApp))
"not affect a Get" in {
val req = Request[IO](uri = uri("/request"))
responseLoggerClient.status(req).unsafeRunSync() must_== Status.Ok
}
"not affect a Post" in {
val req = Request[IO](uri = uri("/post"), method = POST).withBodyStream(body)
val res = responseLoggerClient.expect[String](req)
res.unsafeRunSync() must_== expectedBody
}
}
"RequestLogger" should {
val requestLoggerClient = RequestLogger.apply(true, true)(Client.fromHttpApp(testApp))
"not affect a Get" in {
val req = Request[IO](uri = uri("/request"))
requestLoggerClient.status(req).unsafeRunSync() must_== Status.Ok
}
"not affect a Post" in {
val req = Request[IO](uri = uri("/post"), method = POST).withBodyStream(body)
val res = requestLoggerClient.expect[String](req)
res.unsafeRunSync() must_== expectedBody
}
}
"Logger" should {
val loggerApp =
Logger(true, true)(Client.fromHttpApp(testApp)).toHttpApp
"not affect a Get" in {
val req = Request[IO](uri = uri("/request"))
loggerApp(req) must returnStatus(Status.Ok)
}
"not affect a Post" in {
val req = Request[IO](uri = uri("/post"), method = POST).withBodyStream(body)
val res = loggerApp(req)
res must returnStatus(Status.Ok)
res must returnBody(expectedBody)
}
}
}
| aeons/http4s | client/src/test/scala/org/http4s/client/middleware/LoggerSpec.scala | Scala | apache-2.0 | 2,216 |
package com.cloudera.hue.livy.repl
import com.cloudera.hue.livy.Logging
import com.cloudera.hue.livy.msgs.ExecuteRequest
import com.fasterxml.jackson.core.JsonParseException
import org.json4s.{DefaultFormats, MappingException}
import org.scalatra._
import org.scalatra.json.JacksonJsonSupport
import _root_.scala.concurrent.{Future, ExecutionContext}
object WebApp extends Logging
class WebApp(session: Session) extends ScalatraServlet with FutureSupport with JacksonJsonSupport {
override protected implicit def executor: ExecutionContext = ExecutionContext.global
override protected implicit val jsonFormats = DefaultFormats
before() {
contentType = formats("json")
session.state match {
case Session.ShuttingDown() => halt(500, "Shutting down")
case _ => {}
}
}
get("/") {
val state = session.state match {
case Session.NotStarted() => "not_started"
case Session.Starting() => "starting"
case Session.Idle() => "idle"
case Session.Busy() => "busy"
case Session.Error() => "error"
case Session.ShuttingDown() => "shutting_down"
case Session.ShutDown() => "shut_down"
}
Map("state" -> state)
}
post("/execute") {
val req = parsedBody.extract[ExecuteRequest]
val rep = session.execute(req.code)
new AsyncResult { val is = rep }
}
get("/history") {
session.history()
}
get("/history/:statementId") {
val statementId = params("statementId").toInt
session.history(statementId) match {
case Some(statement) => statement
case None => NotFound("Statement not found")
}
}
delete("/") {
session.close()
Future {
Thread.sleep(1000)
System.exit(0)
}
}
error {
case e: JsonParseException => BadRequest(e.getMessage)
case e: MappingException => BadRequest(e.getMessage)
case e =>
WebApp.error("internal error", e)
InternalServerError(e.toString)
}
}
| nvoron23/hue | apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/WebApp.scala | Scala | apache-2.0 | 1,951 |
package com.crealytics.google.analytics
import org.apache.spark.sql.sources.DataSourceRegister
class DefaultSource15 extends DefaultSource with DataSourceRegister {
override def shortName(): String = "google-analytics"
}
| crealytics/spark-google-analytics | src/main/scala/com/crealytics/google/analytics/DefaultSource15.scala | Scala | apache-2.0 | 225 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package utils
import leon.purescala._
import leon.purescala.Definitions.Program
import leon.solvers.isabelle.AdaptationPhase
import leon.verification.InjectAsserts
import leon.xlang.{NoXLangFeaturesChecking, XLangDesugaringPhase, XLangCleanupPhase}
class PreprocessingPhase(genc: Boolean = false) extends LeonPhase[Program, Program] {
val name = "preprocessing"
val description = "Various preprocessings on Leon programs"
override def run(ctx: LeonContext, p: Program): (LeonContext, Program) = {
def debugTrees(title: String) =
PrintTreePhase(title).when(ctx.reporter.isDebugEnabled(DebugSectionTrees))
val pipeBegin =
debugTrees("Program after extraction") andThen
MethodLifting andThen
TypingPhase andThen
xlang.EffectsChecking andThen
synthesis.ConversionPhase
val pipeBeginWithInlining =
if(ctx.findOptionOrDefault(Main.MainComponent.optLazyEval)) {
// here disable inlining
pipeBegin
} else pipeBegin andThen InliningPhase
// Do not desugar xlang when generating C code
val pipeX = (
XLangDesugaringPhase andThen
debugTrees("Program after xlang desugaring")
) when (!genc)
def pipeEnd = (
InjectAsserts andThen
FunctionClosure andThen
//XLangCleanupPhase andThen
AdaptationPhase
) when (!genc)
val phases =
pipeBeginWithInlining andThen
pipeX andThen
pipeEnd andThen
debugTrees("Program after pre-processing")
phases.run(ctx, p)
}
}
| epfl-lara/leon | src/main/scala/leon/utils/PreprocessingPhase.scala | Scala | gpl-3.0 | 1,667 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import scala.Predef.$conforms
import slamdata.Predef._
import org.scalacheck.{Gen, Arbitrary, Shrink}
import scalaz._, Scalaz._
import scalaz.scalacheck.ScalaCheckBinding._
package object scalacheck {
def nonEmptyListSmallerThan[A: Arbitrary](n: Int): Arbitrary[NonEmptyList[A]] = {
val listGen = Gen.containerOfN[List,A](n, Arbitrary.arbitrary[A])
Apply[Arbitrary].apply2[A, List[A], NonEmptyList[A]](Arbitrary(Arbitrary.arbitrary[A]), Arbitrary(listGen))((a, rest) =>
NonEmptyList.nel(a, IList.fromList(rest)))
}
def listSmallerThan[A: Arbitrary](n: Int): Arbitrary[List[A]] =
Arbitrary(Gen.containerOfN[List,A](n, Arbitrary.arbitrary[A]))
implicit def shrinkIList[A](implicit s: Shrink[List[A]]): Shrink[IList[A]] =
Shrink(as => s.shrink(as.toList).map(IList.fromFoldable(_)))
implicit def shrinkISet[A: Order](implicit s: Shrink[Set[A]]): Shrink[ISet[A]] =
Shrink(as => s.shrink(as.toSet).map(ISet.fromFoldable(_)))
/** Resize a generator, applying a scale factor so that the resulting
* values still respond to the incoming size value (which is controlled by
* the `maxSize` parameter), but typically scaled down to produce more
* modestly-sized values. */
def scaleSize[A](gen: Gen[A], f: Int => Int): Gen[A] =
Gen.sized(size => Gen.resize(f(size), gen))
/** Scaling function raising the incoming size to some power, typically less
* than 1. If your generator constructs values with dimension 2 (e.g.
* `List[List[Int]]`), then 0.5 is a good choice. */
def scalePow(exp: Double): Int => Int =
size => scala.math.pow(size.toDouble, exp).toInt
/** Scaling function which just adjusts the size so as to use at most
* `desiredLimit`, assuming the default `maxSize` is in effect. */
def scaleLinear(desiredLimit: Int): Int => Int = {
val externalLimit = 200
val factor = externalLimit/desiredLimit
_/factor
}
}
| drostron/quasar | connector/src/test/scala/quasar/scalacheck/package.scala | Scala | apache-2.0 | 2,536 |
package org.raml.parser
import java.util
import cats.data.Validated.{Invalid, Valid}
import cats.data.{NonEmptyList, Validated, ValidatedNel, Xor}
import org.raml.domain.{Api, Method, Resource}
import org.yaml.snakeyaml.ObjectAndNodes
import java.util.{Map ⇒ JMap}
import java.util.ArrayList
import scala.collection.mutable
import scala.collection.immutable.IndexedSeq
import scala.reflect.ClassTag
import java.net.URI
//import cats._
//import cats.syntax.eq._
//import cats.std.all.Eq
//import cats.syntax.eq._
import scala.collection.JavaConverters._
import org.raml.utils.AsInstanceOfOption._
class ApiParser(objectAndNodes: ObjectAndNodes) {
type MapT = mutable.Map[String, AnyRef]
type JMapT = JMap[String, AnyRef]
private[this] val root: MapT = objectAndNodes.`object`.asInstanceOf[JMap[String, AnyRef]].asScala
private[this] val object2node = objectAndNodes.object2node.asScala
private[this] val MethodNames = Set("get", "patch", "put", "post", "delete", "options", "head")
private[this] var parsingErrors = List.empty[ParserError]
private[this] var api = Api("")
def apply(): ValidatedNel[ParserError, Api] = {
parse[String](root, "title", { s ⇒ api = api.copy(title = s) })
parseOptional[String](root, "baseUri", { s ⇒ api = api.copy(baseUri = s) })
val description = parseOption[String](root, "description")
val version = parseOption[String](root, "version")
val baseUri = parseOption[String](root, "baseUri")
api = api.copy(
description = description.getOrElse(""),
version = version.getOrElse(""),
baseUri = baseUri.getOrElse("")
)
api = api.copy(resources = parseMatching(root, isResourceKey, parseResource))
api = api.copy(protocols = protocols(root, baseUri))
if (parsingErrors.nonEmpty)
Invalid(NonEmptyList(parsingErrors.head, parsingErrors.tail))
else
Valid(api)
}
/**
* @return API protocols
*/
def protocols(root: MapT, baseUri: Option[String]): IndexedSeq[String] = {
var protocols: Set[String] = parseOption[ArrayList[String]](root, "protocols").map(_.asScala.toSet).getOrElse(Set.empty[String])
baseUri.foreach { uri ⇒
protocols = protocols + uri.split(":")(0)
}
val invalid = protocols.filterNot(validProtocol)
invalid.foreach { protocol ⇒
parsingErrors = parsingErrors :+ ParserError(s"Invalid protocol $protocol")
}
protocols.map(_.toLowerCase).toIndexedSeq
}
def validProtocol(protocol: String): Boolean = protocol match {
case s: String if s matches "(?i)(http|https)" ⇒ true
case _ ⇒ false
}
/**
* @return true if the key identifies a resource, starts with '/'
*/
def isResourceKey(key: String): Boolean =
key.nonEmpty && key.head == '/'
def parseResource(resourceMap: MapT): Resource = {
var resource = Resource()
parseOptional[String](resourceMap, "displayName", { s ⇒ resource = resource.copy(displayName = s) })
parseOptional[String](resourceMap, "description", { s ⇒ resource = resource.copy(description = s) })
// TODO annotations
resource = resource.copy(methods = parseMatching(resourceMap, isMethodKey, parseMethod))
resource
}
/**
* Applies parseF to elements from xs to which keyMatch returns true and return a sequence of its results
*/
def parseMatching[T](xs: MapT, keyMatch: (String) ⇒ Boolean, parseF: (MapT) ⇒ T): IndexedSeq[T] = {
xs.keys.filter(keyMatch).flatMap { key ⇒
xs(key).asInstanceOfOption[JMapT] match {
case Some(mapAtKey) ⇒
Some(parseF(mapAtKey.asScala))
case None ⇒
parsingErrors = parsingErrors :+ ParserError(s"Expected a mapping at: $key", object2node.get(xs(key)))
None
}
}.toIndexedSeq
}
def isMethodKey(key: String): Boolean =
MethodNames.contains(key.toLowerCase)
def parseMethod(methodMap: MapT): Method = {
var method = Method()
parseOptional[String](methodMap, "displayName", { s ⇒ method = method.copy(displayName = s) })
parseOptional[String](methodMap, "description", { s ⇒ method = method.copy(description = s) })
// TODO
method
}
/**
* Look for an optional key, and run the side effect f when found and the type matches
*/
def parseOptional[T](xs: MapT, key: String, f: T ⇒ Unit): Unit = {
xs.get(key).foreach { value ⇒
try {
f(value.asInstanceOf[T])
} catch {
case e: ClassCastException ⇒
parsingErrors = parsingErrors :+ ParserError(e.toString, object2node.get(value))
}
}
}
def parseOption[T](xs: MapT, key: String): Option[T] = {
xs.get(key).foreach { value ⇒
try {
return Some(value.asInstanceOf[T])
} catch {
case e: ClassCastException ⇒
parsingErrors = parsingErrors :+ ParserError(e.toString, object2node.get(value))
}
}
return None
}
/**
* Look for a mandatory key, and run the side effect f when found and the type matches, otherwise is a parsing error
*/
def parse[T](xs: MapT, key: String, f: T ⇒ Unit): Unit = {
if (!xs.contains(key)) {
parsingErrors = parsingErrors :+ ParserError(s"key not found: $key")
return
}
val value = xs(key)
try {
f(value.asInstanceOf[T])
} catch {
case e: ClassCastException ⇒
parsingErrors = parsingErrors :+ ParserError(e.toString, object2node.get(value))
}
}
}
| larroy/Scala_raml_parser | src/main/scala/org/raml/parser/ApiParser.scala | Scala | apache-2.0 | 5,453 |
package com.twitter.finagle.netty4.http.handler
import io.netty.buffer._
import io.netty.channel.embedded.EmbeddedChannel
import io.netty.handler.codec.http._
import org.junit.runner.RunWith
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.junit.JUnitRunner
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.scalatest.{FunSuite, OneInstancePerTest}
@RunWith(classOf[JUnitRunner])
class UnpoolHttpHandlerTest
extends FunSuite
with GeneratorDrivenPropertyChecks
with OneInstancePerTest {
val channel = new EmbeddedChannel(UnpoolHttpHandler)
// Generates random HTTP contents with:
// - Capacity: [1..100]
// - Write-Index: [0..Capacity]
// - Read-Index: [0..Write-Index]
def genHttpContent: Gen[HttpContent] =
for {
capacity <- Gen.choose(1, 100)
bytes <- Gen.listOfN(capacity, Arbitrary.arbByte.arbitrary)
writer <- Gen.choose(0, capacity)
reader <- Gen.choose(0, writer)
content <- Gen.oneOf(
Unpooled
.buffer(capacity)
.setBytes(0, bytes.toArray)
.writerIndex(writer)
.readerIndex(reader),
Unpooled
.directBuffer(capacity)
.setBytes(0, bytes.toArray)
.writerIndex(writer)
.readerIndex(reader)
)
httpContent <- Gen.oneOf(
new DefaultHttpContent(content),
new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, content),
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", content)
)
} yield httpContent
test("convert to heap") {
forAll(genHttpContent) { in: HttpContent =>
// We need to retain-duplicate so we can check the equality on the input
// buffer after its being released by DirectToHeap.
channel.writeInbound(in.retainedDuplicate())
val out = channel.readInbound[HttpContent]
// The output buffer should never be direct (unless it's an `EmptyByteBuf`).
assert(out.content.isInstanceOf[EmptyByteBuf] || !out.content.isDirect)
// The output buffer should be equal to input.
assert(!(in.content eq out.content) && ByteBufUtil.equals(in.content, out.content))
// The input buffer should've been released.
assert(in.release())
}
}
test("bypass HTTP messages") {
val msg = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/")
channel.writeInbound(msg)
assert(channel.readInbound[HttpMessage] eq msg)
}
test("bypass empty HTTP contents") {
channel.writeInbound(LastHttpContent.EMPTY_LAST_CONTENT)
assert(channel.readInbound[HttpContent] eq LastHttpContent.EMPTY_LAST_CONTENT)
}
test("map unreadable buf holders into empty buf holders") {
val in = new DefaultHttpContent(Unpooled.directBuffer().retainedSlice(0, 0))
channel.writeInbound(in)
assert(channel.readInbound[HttpContent].content eq Unpooled.EMPTY_BUFFER)
assert(in.release())
}
}
| mkhq/finagle | finagle-netty4-http/src/test/scala/com/twitter/finagle/netty4/http/handler/UnpoolHttpHandlerTest.scala | Scala | apache-2.0 | 2,952 |
/*
* Copyright (c) 2014 Dufresne Management Consulting LLC.
*/
package com.nickelsoftware.bettercare4me.utils
/**
* Root level exception of all application level exception
*/
case class NickelException(message: String) extends Exception(message)
| reactivecore01/bettercare4.me | play/app/com/nickelsoftware/bettercare4me/utils/NickelException.scala | Scala | apache-2.0 | 252 |
package com.github.j5ik2o.dddbase.example.repository.util
import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, Suite }
import scalikejdbc.config.DBs
import scalikejdbc.{ ConnectionPool, GlobalSettings, LoggingSQLAndTimeSettings }
trait SkinnySpecSupport extends BeforeAndAfter with BeforeAndAfterAll with JdbcSpecSupport {
self: Suite with FlywayWithMySQLSpecSupport =>
override protected def beforeAll(): Unit = {
super.beforeAll()
Class.forName("com.mysql.jdbc.Driver")
ConnectionPool.singleton(s"jdbc:mysql://localhost:${jdbcPort}/dddbase?useSSL=false", "dddbase", "dddbase")
GlobalSettings.loggingSQLAndTime = LoggingSQLAndTimeSettings(
enabled = true,
logLevel = 'DEBUG,
warningEnabled = true,
warningThresholdMillis = 1000L,
warningLogLevel = 'WARN
)
}
override protected def afterAll(): Unit = {
DBs.closeAll()
super.afterAll()
}
}
| j5ik2o/scala-ddd-base-functional | example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/util/SkinnySpecSupport.scala | Scala | mit | 917 |
package com.wavesplatform.state
import java.io.File
import com.wavesplatform.Application
import com.wavesplatform.account.AddressScheme
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.database.{LevelDBWriter, openDB}
import com.wavesplatform.lang.directives.DirectiveSet
import com.wavesplatform.settings.WavesSettings
import com.wavesplatform.transaction.smart.WavesEnvironment
import com.wavesplatform.utils.ScorexLogging
import monix.eval.Coeval
import org.iq80.leveldb.DB
import org.openjdk.jmh.annotations.{Param, Scope, State, TearDown}
@State(Scope.Benchmark)
abstract class DBState extends ScorexLogging {
@Param(Array("waves.conf"))
var configFile = ""
lazy val settings: WavesSettings = Application.loadApplicationConfig(Some(new File(configFile)).filter(_.exists()))
lazy val db: DB = openDB(settings.dbSettings.directory)
lazy val levelDBWriter: LevelDBWriter =
LevelDBWriter.readOnly(
db,
settings.copy(dbSettings = settings.dbSettings.copy(maxCacheSize = 1))
)
AddressScheme.current = new AddressScheme { override val chainId: Byte = 'W' }
lazy val environment = new WavesEnvironment(
AddressScheme.current.chainId,
Coeval.raiseError(new NotImplementedError("`tx` is not implemented")),
Coeval(levelDBWriter.height),
levelDBWriter,
null,
DirectiveSet.contractDirectiveSet,
ByteStr.empty
)
@TearDown
def close(): Unit = {
db.close()
}
}
| wavesplatform/Waves | benchmark/src/main/scala/com/wavesplatform/state/DBState.scala | Scala | mit | 1,456 |
package org.deeplearning4j.app
import org.nd4j.linalg.factory.Nd4j
/**
* Created by agibsonccc on 2/9/15.
*/
class Run extends App {
override def main(args: Array[String]) {
//create a 5 length row vector
val arr = Nd4j.zeros(5)
//add 1 in place
arr.addi(1)
println(arr)
//column vector
val arrT = arr.transpose()
}
}
| SkymindIO/scala-spark-examples | src/main/scala/org/deeplearning4j/app/Run.scala | Scala | apache-2.0 | 359 |
package com.nvrun
import org.scalatest._
import scala.util.Success
import com.nvrun.RaceSeriesParser._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class RaceSeriesParserSpec extends FlatSpec with Matchers {
"A Race Series Parser" should "parse the Resolution Run winner" in {
val parser = new ResultParser(" 1 OpM First Last, 12, State College, PA 18:17")
val result = parser.InputLine.run()
result should be(Success(Vector(Result(Place(1), None, Category("OpM"), Name("First", "Last"), Age(12), Some(Location("State College", "PA")), Time(None, 18, 17), None))))
}
"A Race Series Parser" should "parse the second place runner when their location is unknown" in {
val parser = new ResultParser(" 2 OpM First Last, 24 16:09")
val result = parser.InputLine.run()
result should be(Success(Vector(Result(Place(2), None, Category("OpM"), Name("First", "Last"), Age(24), None, Time(None, 16, 9), None))))
}
"A Race Series Parser" should "parse the starred women's results" in {
val parser = new ResultParser("13 OpW First Last, 24*, Boalsburg, PA 17:13")
val result = parser.InputLine.run()
result should be(Success(Vector(Result(Place(13), None, Category("OpW"), Name("First", "Last"), Age(24), Some(Location("Boalsburg", "PA")), Time(None, 17, 13), None))))
}
"A Race Series Parser" should "parse a time with hours, minutes, and seconds" in {
val parser = new ResultParser("13 OpW First Last, 87*, Boalsburg, PA 1:17:13")
val result = parser.InputLine.run()
result should be(Success(Vector(Result(Place(13), None, Category("OpW"), Name("First", "Last"), Age(87), Some(Location("Boalsburg", "PA")), Time(Some(1), 17, 13), None))))
}
"A Race Series Parser" should "parse a hyphenated last name" in {
val parser = new ResultParser("13 OpW First Before-After, 24*, Boalsburg, PA 17:13")
val result = parser.InputLine.run()
result should be(Success(Vector(Result(Place(13), None, Category("OpW"), Name("First", "Before-After"), Age(24), Some(Location("Boalsburg", "PA")), Time(None, 17, 13), None))))
}
"A Race Series Parser" should "parse a line with a per-mile pace" in {
val parser = new ResultParser("1 OpM Chris Cipro, 27, Centre Hall, PA 16:22 5:17")
val result = parser.InputLine.run()
parser.print(result)
result should be (Success(Vector(Result(Place(1), None, Category("OpM"), Name("Chris", "Cipro"), Age(27), Some(Location("Centre Hall", "PA")), Time(None, 16, 22), Some(Pace(5, 17))))))
}
"A Race Series Parser" should "parse a first, middle, and compound last name" in {
val parser = new ResultParser("183 12 M55 First M LastA LastB, 57, State College, PA 24:35")
val result = parser.InputLine.run()
result should be(Success(Vector(Result(Place(183), Some(ClassPlace(12)), Category("M55"), Name("First", "LastA LastB"), Age(57), Some(Location("State College", "PA")), Time(None, 24, 35), None))))
}
"A Race Series Parser" should "parse the arbitrary-sized location names" in {
val parser = new ResultParser("13 OpW First Last, 24*, Pine Grove Mills, PA 17:13")
val result = parser.InputLine.run()
result should be(Success(Vector(Result(Place(13), None, Category("OpW"), Name("First", "Last"), Age(24), Some(Location("Pine Grove Mills", "PA")), Time(None, 17, 13), None))))
}
"A Race Series Parser" should "parse unknown runners" in {
val parser = new ResultParser(" 80 1rr03a5 21:15")
val result = parser.InputLine.run()
result should be(Success(Vector(Unknown(Place(80), UnknownName("1rr03a5"), Time(None, 21, 15), None))))
}
}
| aeffrig/scalaries | src/test/scala/com/nvrun/RaceSeriesParserSpec.scala | Scala | lgpl-3.0 | 3,864 |
package common.actors
import akka.actor.ActorRef
import akka.stream.scaladsl.{Sink, Source}
import akka.stream.{ActorMaterializer, OverflowStrategy}
import scala.concurrent.Future
trait ActorMessageDelivering {
def sendMessageWithFunc[T](f: ActorRef => Unit)(implicit actorMaterializer: ActorMaterializer): Future[T] = {
Source.actorRef[T](1024, OverflowStrategy.fail).mapMaterializedValue(f).runWith(Sink.head[T])
}
def sendMessageToActor[T](recipient: ActorRef, message: Any)(
implicit actorMaterializer: ActorMaterializer
): Future[T] = {
sendMessageWithFunc[T](sender => recipient.tell(message, sender))
}
}
| sysgears/apollo-universal-starter-kit | modules/core/server-scala/src/main/scala/common/actors/ActorMessageDelivering.scala | Scala | mit | 640 |
/*
* Copyright 2019 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.spline.persistence
import org.apache.commons.lang3.StringUtils.trimToNull
import za.co.absa.commons.lang.OptionImplicits.StringWrapper
import za.co.absa.spline.persistence.ArangoConnectionURL.ArangoSecureDbScheme
import java.net.MalformedURLException
import scala.util.matching.Regex
case class ArangoConnectionURL(scheme: String, user: Option[String], password: Option[String], hosts: Seq[(String, Int)], dbName: String) {
import za.co.absa.commons.lang.OptionImplicits._
require(user.isDefined || password.isEmpty, "user cannot be blank if password is specified")
def asString: String = {
val userInfo = trimToNull(Seq(user, password.map(_ => "*****")).flatten.mkString(":"))
val commaSeparatedHostsString = hosts.map { case (host, port) => s"$host:$port" }.mkString(",")
new StringBuilder()
.append(s"$scheme://")
.having(userInfo.nonBlankOption)(_ append _ append "@")
.append(commaSeparatedHostsString)
.append(s"/$dbName")
.result()
}
def isSecure: Boolean = scheme == ArangoSecureDbScheme
}
object ArangoConnectionURL {
private[persistence] val ArangoDbScheme = "arangodb"
private[persistence] val ArangoSecureDbScheme = "arangodbs"
private val DefaultPort = 8529
private val ArangoConnectionUrlRegex = {
val scheme = s"^($ArangoDbScheme|$ArangoSecureDbScheme)"
val user = "([^@:]+)"
val password = "(.+)" // NOSONAR
val dbName = "(\\\\S+)"
val hostList = {
val hostWithPort = "[^@:]+(?::\\\\d+)?"
s"($hostWithPort(?:,$hostWithPort)*)"
}
new Regex(s"$scheme://(?:$user(?::$password)?@)?$hostList/$dbName")
}
val HumanReadableFormat = s"$ArangoDbScheme|$ArangoSecureDbScheme://[user[:password]@]host[:port]/database"
def apply(url: String): ArangoConnectionURL = try {
val ArangoConnectionUrlRegex(scheme, user, password, commaSeparatedHostWithPortList, dbName) = url
val hosts: Array[(String, Int)] = commaSeparatedHostWithPortList
.split(",")
.map(hostPortString => {
val Array(host, port) = hostPortString.split(":").padTo(2, DefaultPort.toString)
(host, port.toInt)
})
ArangoConnectionURL(
scheme = scheme,
user = user.nonBlankOption,
password = password.nonBlankOption,
hosts = hosts,
dbName = dbName
)
} catch {
case e: scala.MatchError => throw new MalformedURLException(e.getMessage)
}
}
| AbsaOSS/spline | persistence/src/main/scala/za/co/absa/spline/persistence/ArangoConnectionURL.scala | Scala | apache-2.0 | 3,028 |
/*
* Copyright (C) 2012 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.workflow
import org.openmole.core.workflow.mole._
package task {
import org.openmole.core.context._
trait TaskPackage {
def newRNG(context: Context) = Task.buildRNG(context)
def implicits = new {
def +=(p: Val[_]) = MoleTask.implicits.modify(_ ++ Seq(p.name))
}
}
}
| openmole/openmole | openmole/core/org.openmole.core.workflow/src/main/scala/org/openmole/core/workflow/task/package.scala | Scala | agpl-3.0 | 1,040 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Properties
import junit.framework.Assert._
import kafka.api.{ApiVersion, KAFKA_082}
import kafka.message._
import kafka.utils.{TestUtils, CoreUtils}
import org.apache.kafka.common.config.ConfigException
import org.apache.kafka.common.protocol.SecurityProtocol
import org.junit.{Assert, Test}
import org.scalatest.Assertions.intercept
class KafkaConfigTest {
@Test
def testLogRetentionTimeHoursProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.LogRetentionTimeHoursProp, "1")
val cfg = KafkaConfig.fromProps(props)
assertEquals(60L * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeMinutesProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.LogRetentionTimeMinutesProp, "30")
val cfg = KafkaConfig.fromProps(props)
assertEquals(30 * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeMsProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.LogRetentionTimeMillisProp, "1800000")
val cfg = KafkaConfig.fromProps(props)
assertEquals(30 * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeNoConfigProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
val cfg = KafkaConfig.fromProps(props)
assertEquals(24 * 7 * 60L * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeBothMinutesAndHoursProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.LogRetentionTimeMinutesProp, "30")
props.put(KafkaConfig.LogRetentionTimeHoursProp, "1")
val cfg = KafkaConfig.fromProps(props)
assertEquals( 30 * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeBothMinutesAndMsProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.LogRetentionTimeMillisProp, "1800000")
props.put(KafkaConfig.LogRetentionTimeMinutesProp, "10")
val cfg = KafkaConfig.fromProps(props)
assertEquals( 30 * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionUnlimited() {
val props1 = TestUtils.createBrokerConfig(0,TestUtils.MockZkConnect, port = 8181)
val props2 = TestUtils.createBrokerConfig(0,TestUtils.MockZkConnect, port = 8181)
val props3 = TestUtils.createBrokerConfig(0,TestUtils.MockZkConnect, port = 8181)
val props4 = TestUtils.createBrokerConfig(0,TestUtils.MockZkConnect, port = 8181)
val props5 = TestUtils.createBrokerConfig(0,TestUtils.MockZkConnect, port = 8181)
props1.put("log.retention.ms", "-1")
props2.put("log.retention.minutes", "-1")
props3.put("log.retention.hours", "-1")
val cfg1 = KafkaConfig.fromProps(props1)
val cfg2 = KafkaConfig.fromProps(props2)
val cfg3 = KafkaConfig.fromProps(props3)
assertEquals("Should be -1", -1, cfg1.logRetentionTimeMillis)
assertEquals("Should be -1", -1, cfg2.logRetentionTimeMillis)
assertEquals("Should be -1", -1, cfg3.logRetentionTimeMillis)
props4.put("log.retention.ms", "-1")
props4.put("log.retention.minutes", "30")
val cfg4 = KafkaConfig.fromProps(props4)
assertEquals("Should be -1", -1, cfg4.logRetentionTimeMillis)
props5.put("log.retention.ms", "0")
intercept[IllegalArgumentException] {
val cfg5 = KafkaConfig.fromProps(props5)
}
}
@Test
def testLogRetentionValid {
val props1 = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
val props2 = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
val props3 = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props1.put("log.retention.ms", "0")
props2.put("log.retention.minutes", "0")
props3.put("log.retention.hours", "0")
intercept[IllegalArgumentException] {
val cfg1 = KafkaConfig.fromProps(props1)
}
intercept[IllegalArgumentException] {
val cfg2 = KafkaConfig.fromProps(props2)
}
intercept[IllegalArgumentException] {
val cfg3 = KafkaConfig.fromProps(props3)
}
}
@Test
def testAdvertiseDefaults() {
val port = "9999"
val hostName = "fake-host"
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect)
props.remove(KafkaConfig.ListenersProp)
props.put(KafkaConfig.HostNameProp, hostName)
props.put(KafkaConfig.PortProp, port)
val serverConfig = KafkaConfig.fromProps(props)
val endpoints = serverConfig.advertisedListeners
val endpoint = endpoints.get(SecurityProtocol.PLAINTEXT).get
assertEquals(endpoint.host, hostName)
assertEquals(endpoint.port, port.toInt)
}
@Test
def testAdvertiseConfigured() {
val advertisedHostName = "routable-host"
val advertisedPort = "1234"
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect)
props.put(KafkaConfig.AdvertisedHostNameProp, advertisedHostName)
props.put(KafkaConfig.AdvertisedPortProp, advertisedPort)
val serverConfig = KafkaConfig.fromProps(props)
val endpoints = serverConfig.advertisedListeners
val endpoint = endpoints.get(SecurityProtocol.PLAINTEXT).get
assertEquals(endpoint.host, advertisedHostName)
assertEquals(endpoint.port, advertisedPort.toInt)
}
@Test
def testAdvertisePortDefault() {
val advertisedHostName = "routable-host"
val port = "9999"
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect)
props.put(KafkaConfig.AdvertisedHostNameProp, advertisedHostName)
props.put(KafkaConfig.PortProp, port)
val serverConfig = KafkaConfig.fromProps(props)
val endpoints = serverConfig.advertisedListeners
val endpoint = endpoints.get(SecurityProtocol.PLAINTEXT).get
assertEquals(endpoint.host, advertisedHostName)
assertEquals(endpoint.port, port.toInt)
}
@Test
def testAdvertiseHostNameDefault() {
val hostName = "routable-host"
val advertisedPort = "9999"
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect)
props.put(KafkaConfig.HostNameProp, hostName)
props.put(KafkaConfig.AdvertisedPortProp, advertisedPort)
val serverConfig = KafkaConfig.fromProps(props)
val endpoints = serverConfig.advertisedListeners
val endpoint = endpoints.get(SecurityProtocol.PLAINTEXT).get
assertEquals(endpoint.host, hostName)
assertEquals(endpoint.port, advertisedPort.toInt)
}
@Test
def testDuplicateListeners() {
val props = new Properties()
props.put(KafkaConfig.BrokerIdProp, "1")
props.put(KafkaConfig.ZkConnectProp, "localhost:2181")
// listeners with duplicate port
props.put(KafkaConfig.ListenersProp, "PLAINTEXT://localhost:9091,TRACE://localhost:9091")
assert(!isValidKafkaConfig(props))
// listeners with duplicate protocol
props.put(KafkaConfig.ListenersProp, "PLAINTEXT://localhost:9091,PLAINTEXT://localhost:9092")
assert(!isValidKafkaConfig(props))
// advertised listeners with duplicate port
props.put(KafkaConfig.AdvertisedListenersProp, "PLAINTEXT://localhost:9091,TRACE://localhost:9091")
assert(!isValidKafkaConfig(props))
}
@Test
def testBadListenerProtocol() {
val props = new Properties()
props.put(KafkaConfig.BrokerIdProp, "1")
props.put(KafkaConfig.ZkConnectProp, "localhost:2181")
props.put(KafkaConfig.ListenersProp, "BAD://localhost:9091")
assert(!isValidKafkaConfig(props))
}
@Test
def testListenerDefaults() {
val props = new Properties()
props.put(KafkaConfig.BrokerIdProp, "1")
props.put(KafkaConfig.ZkConnectProp, "localhost:2181")
// configuration with host and port, but no listeners
props.put(KafkaConfig.HostNameProp, "myhost")
props.put(KafkaConfig.PortProp, "1111")
val conf = KafkaConfig.fromProps(props)
assertEquals(CoreUtils.listenerListToEndPoints("PLAINTEXT://myhost:1111"), conf.listeners)
// configuration with null host
props.remove(KafkaConfig.HostNameProp)
val conf2 = KafkaConfig.fromProps(props)
assertEquals(CoreUtils.listenerListToEndPoints("PLAINTEXT://:1111"), conf2.listeners)
assertEquals(CoreUtils.listenerListToEndPoints("PLAINTEXT://:1111"), conf2.advertisedListeners)
assertEquals(null, conf2.listeners(SecurityProtocol.PLAINTEXT).host)
// configuration with advertised host and port, and no advertised listeners
props.put(KafkaConfig.AdvertisedHostNameProp, "otherhost")
props.put(KafkaConfig.AdvertisedPortProp, "2222")
val conf3 = KafkaConfig.fromProps(props)
assertEquals(conf3.advertisedListeners, CoreUtils.listenerListToEndPoints("PLAINTEXT://otherhost:2222"))
}
@Test
def testVersionConfiguration() {
val props = new Properties()
props.put(KafkaConfig.BrokerIdProp, "1")
props.put(KafkaConfig.ZkConnectProp, "localhost:2181")
val conf = KafkaConfig.fromProps(props)
assertEquals(ApiVersion.latestVersion, conf.interBrokerProtocolVersion)
props.put(KafkaConfig.InterBrokerProtocolVersionProp,"0.8.2.0")
val conf2 = KafkaConfig.fromProps(props)
assertEquals(KAFKA_082, conf2.interBrokerProtocolVersion)
// check that 0.8.2.0 is the same as 0.8.2.1
props.put(KafkaConfig.InterBrokerProtocolVersionProp,"0.8.2.1")
val conf3 = KafkaConfig.fromProps(props)
assertEquals(KAFKA_082, conf3.interBrokerProtocolVersion)
//check that latest is newer than 0.8.2
assert(ApiVersion.latestVersion.onOrAfter(conf3.interBrokerProtocolVersion))
}
private def isValidKafkaConfig(props: Properties): Boolean = {
try {
KafkaConfig.fromProps(props)
true
} catch {
case e: IllegalArgumentException => false
}
}
@Test
def testUncleanLeaderElectionDefault() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
val serverConfig = KafkaConfig.fromProps(props)
assertEquals(serverConfig.uncleanLeaderElectionEnable, true)
}
@Test
def testUncleanElectionDisabled() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.UncleanLeaderElectionEnableProp, String.valueOf(false))
val serverConfig = KafkaConfig.fromProps(props)
assertEquals(serverConfig.uncleanLeaderElectionEnable, false)
}
@Test
def testUncleanElectionEnabled() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.UncleanLeaderElectionEnableProp, String.valueOf(true))
val serverConfig = KafkaConfig.fromProps(props)
assertEquals(serverConfig.uncleanLeaderElectionEnable, true)
}
@Test
def testUncleanElectionInvalid() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.UncleanLeaderElectionEnableProp, "invalid")
intercept[ConfigException] {
KafkaConfig.fromProps(props)
}
}
@Test
def testLogRollTimeMsProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.LogRollTimeMillisProp, "1800000")
val cfg = KafkaConfig.fromProps(props)
assertEquals(30 * 60L * 1000L, cfg.logRollTimeMillis)
}
@Test
def testLogRollTimeBothMsAndHoursProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.LogRollTimeMillisProp, "1800000")
props.put(KafkaConfig.LogRollTimeHoursProp, "1")
val cfg = KafkaConfig.fromProps(props)
assertEquals( 30 * 60L * 1000L, cfg.logRollTimeMillis)
}
@Test
def testLogRollTimeNoConfigProvided() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
val cfg = KafkaConfig.fromProps(props)
assertEquals(24 * 7 * 60L * 60L * 1000L, cfg.logRollTimeMillis )
}
@Test
def testDefaultCompressionType() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
val serverConfig = KafkaConfig.fromProps(props)
assertEquals(serverConfig.compressionType, "producer")
}
@Test
def testValidCompressionType() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put("compression.type", "gzip")
val serverConfig = KafkaConfig.fromProps(props)
assertEquals(serverConfig.compressionType, "gzip")
}
@Test
def testInvalidCompressionType() {
val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181)
props.put(KafkaConfig.CompressionTypeProp, "abc")
intercept[IllegalArgumentException] {
KafkaConfig.fromProps(props)
}
}
@Test
def testFromPropsInvalid() {
def getBaseProperties(): Properties = {
val validRequiredProperties = new Properties()
validRequiredProperties.put(KafkaConfig.ZkConnectProp, "127.0.0.1:2181")
validRequiredProperties
}
// to ensure a basis is valid - bootstraps all needed validation
KafkaConfig.fromProps(getBaseProperties())
KafkaConfig.configNames().foreach(name => {
name match {
case KafkaConfig.ZkConnectProp => // ignore string
case KafkaConfig.ZkSessionTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ZkConnectionTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ZkSyncTimeMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.BrokerIdProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumNetworkThreadsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.NumIoThreadsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.BackgroundThreadsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.QueuedMaxRequestsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.PortProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.HostNameProp => // ignore string
case KafkaConfig.AdvertisedHostNameProp => //ignore string
case KafkaConfig.AdvertisedPortProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.SocketSendBufferBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.SocketReceiveBufferBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.MaxConnectionsPerIpOverridesProp =>
assertPropertyInvalid(getBaseProperties(), name, "127.0.0.1:not_a_number")
case KafkaConfig.ConnectionsMaxIdleMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumPartitionsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogDirsProp => // ignore string
case KafkaConfig.LogDirProp => // ignore string
case KafkaConfig.LogSegmentBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", Message.MinHeaderSize - 1)
case KafkaConfig.LogRollTimeMillisProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRollTimeHoursProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionTimeMillisProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionTimeMinutesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionTimeHoursProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanupIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogCleanupPolicyProp => assertPropertyInvalid(getBaseProperties(), name, "unknown_policy", "0")
case KafkaConfig.LogCleanerIoMaxBytesPerSecondProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanerDedupeBufferSizeProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "1024")
case KafkaConfig.LogCleanerDedupeBufferLoadFactorProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanerEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean")
case KafkaConfig.LogCleanerDeleteRetentionMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanerMinCleanRatioProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogIndexSizeMaxBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "3")
case KafkaConfig.LogFlushIntervalMessagesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogFlushSchedulerIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogFlushIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumRecoveryThreadsPerDataDirProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.AutoCreateTopicsEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.MinInSyncReplicasProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.ControllerSocketTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.DefaultReplicationFactorProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaLagTimeMaxMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaSocketTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-2")
case KafkaConfig.ReplicaSocketReceiveBufferBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaFetchMaxBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaFetchWaitMaxMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaFetchMinBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumReplicaFetchersProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaHighWatermarkCheckpointIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.FetchPurgatoryPurgeIntervalRequestsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ProducerPurgatoryPurgeIntervalRequestsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.AutoLeaderRebalanceEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.LeaderImbalancePerBrokerPercentageProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LeaderImbalanceCheckIntervalSecondsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.UncleanLeaderElectionEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.ControlledShutdownMaxRetriesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ControlledShutdownRetryBackoffMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ControlledShutdownEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.ConsumerMinSessionTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ConsumerMaxSessionTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.OffsetMetadataMaxSizeProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.OffsetsLoadBufferSizeProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicReplicationFactorProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicPartitionsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicSegmentBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicCompressionCodecProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-1")
case KafkaConfig.OffsetsRetentionMinutesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsRetentionCheckIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetCommitTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetCommitRequiredAcksProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-2")
case KafkaConfig.ProducerQuotaBytesPerSecondDefaultProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.ConsumerQuotaBytesPerSecondDefaultProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.ProducerQuotaBytesPerSecondOverridesProp => // ignore string
case KafkaConfig.ConsumerQuotaBytesPerSecondOverridesProp => // ignore string
case KafkaConfig.NumQuotaSamplesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.QuotaWindowSizeSecondsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.DeleteTopicEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.MetricNumSamplesProp => assertPropertyInvalid(getBaseProperties, name, "not_a_number", "-1", "0")
case KafkaConfig.MetricSampleWindowMsProp => assertPropertyInvalid(getBaseProperties, name, "not_a_number", "-1", "0")
case KafkaConfig.MetricReporterClassesProp => // ignore string
//SSL Configs
case KafkaConfig.PrincipalBuilderClassProp =>
case KafkaConfig.SSLProtocolProp => // ignore string
case KafkaConfig.SSLProviderProp => // ignore string
case KafkaConfig.SSLEnabledProtocolsProp =>
case KafkaConfig.SSLKeystoreTypeProp => // ignore string
case KafkaConfig.SSLKeystoreLocationProp => // ignore string
case KafkaConfig.SSLKeystorePasswordProp => // ignore string
case KafkaConfig.SSLKeyPasswordProp => // ignore string
case KafkaConfig.SSLTruststoreTypeProp => // ignore string
case KafkaConfig.SSLTruststorePasswordProp => // ignore string
case KafkaConfig.SSLTruststoreLocationProp => // ignore string
case KafkaConfig.SSLKeyManagerAlgorithmProp =>
case KafkaConfig.SSLTrustManagerAlgorithmProp =>
case KafkaConfig.SSLClientAuthProp => // ignore string
case nonNegativeIntProperty => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-1")
}
})
}
@Test
def testSpecificProperties(): Unit = {
val defaults = new Properties()
defaults.put(KafkaConfig.ZkConnectProp, "127.0.0.1:2181")
// For ZkConnectionTimeoutMs
defaults.put(KafkaConfig.ZkSessionTimeoutMsProp, "1234")
defaults.put(KafkaConfig.MaxReservedBrokerIdProp, "1")
defaults.put(KafkaConfig.BrokerIdProp, "1")
defaults.put(KafkaConfig.HostNameProp, "127.0.0.1")
defaults.put(KafkaConfig.PortProp, "1122")
defaults.put(KafkaConfig.MaxConnectionsPerIpOverridesProp, "127.0.0.1:2, 127.0.0.2:3")
defaults.put(KafkaConfig.LogDirProp, "/tmp1,/tmp2")
defaults.put(KafkaConfig.LogRollTimeHoursProp, "12")
defaults.put(KafkaConfig.LogRollTimeJitterHoursProp, "11")
defaults.put(KafkaConfig.LogRetentionTimeHoursProp, "10")
//For LogFlushIntervalMsProp
defaults.put(KafkaConfig.LogFlushSchedulerIntervalMsProp, "123")
defaults.put(KafkaConfig.OffsetsTopicCompressionCodecProp, SnappyCompressionCodec.codec.toString)
val config = KafkaConfig.fromProps(defaults)
Assert.assertEquals("127.0.0.1:2181", config.zkConnect)
Assert.assertEquals(1234, config.zkConnectionTimeoutMs)
Assert.assertEquals(1, config.maxReservedBrokerId)
Assert.assertEquals(1, config.brokerId)
Assert.assertEquals("127.0.0.1", config.hostName)
Assert.assertEquals(1122, config.advertisedPort)
Assert.assertEquals("127.0.0.1", config.advertisedHostName)
Assert.assertEquals(Map("127.0.0.1" -> 2, "127.0.0.2" -> 3), config.maxConnectionsPerIpOverrides)
Assert.assertEquals(List("/tmp1", "/tmp2"), config.logDirs)
Assert.assertEquals(12 * 60L * 1000L * 60, config.logRollTimeMillis)
Assert.assertEquals(11 * 60L * 1000L * 60, config.logRollTimeJitterMillis)
Assert.assertEquals(10 * 60L * 1000L * 60, config.logRetentionTimeMillis)
Assert.assertEquals(123L, config.logFlushIntervalMs)
Assert.assertEquals(SnappyCompressionCodec, config.offsetsTopicCompressionCodec)
}
private def assertPropertyInvalid(validRequiredProps: => Properties, name: String, values: Any*) {
values.foreach((value) => {
val props = validRequiredProps
props.setProperty(name, value.toString)
intercept[Exception] {
KafkaConfig.fromProps(props)
}
})
}
}
| mpoindexter/kafka | core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala | Scala | apache-2.0 | 27,292 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import sbt.Keys._
import uk.gov.hmrc.versioning.SbtGitVersioning
object HmrcBuild extends Build {
import uk.gov.hmrc._
import DefaultBuildSettings._
val appName = "play-auditing"
lazy val microservice = Project(appName, file("."))
.enablePlugins(SbtAutoBuildPlugin, SbtGitVersioning)
.settings(
targetJvm := "jvm-1.7",
version := "0.1.0",
libraryDependencies ++= AppDependencies(),
crossScalaVersions := Seq("2.11.6"),
resolvers := Seq(
Resolver.bintrayRepo("hmrc", "releases"),
"typesafe-releases" at "http://repo.typesafe.com/typesafe/releases/"
)
)
}
private object AppDependencies {
import play.PlayImport._
import play.core.PlayVersion
val compile = Seq(
"com.typesafe.play" %% "play" % PlayVersion.current,
ws,
"uk.gov.hmrc" %% "time" % "1.1.0",
"uk.gov.hmrc" %% "http-exceptions" % "0.3.0",
"uk.gov.hmrc" %% "http-verbs" % "1.9.0-2-g06d7946"
)
trait TestDependencies {
lazy val scope: String = "test"
lazy val test: Seq[ModuleID] = ???
}
object Test {
def apply() = new TestDependencies {
override lazy val test = Seq(
"com.typesafe.play" %% "play-test" % PlayVersion.current % scope,
"commons-codec" % "commons-codec" % "1.7" % scope,
"org.scalatest" %% "scalatest" % "2.2.4" % scope,
"org.scalacheck" %% "scalacheck" % "1.12.2" % scope,
"org.pegdown" % "pegdown" % "1.4.2" % scope,
"com.github.tomakehurst" % "wiremock" % "1.52" % scope,
"uk.gov.hmrc" %% "http-verbs" % "1.9.0-2-g06d7946"
)
}.test
}
def apply() = compile ++ Test()
}
| beyond-code-github/play-auditing | project/HmrcBuild.scala | Scala | apache-2.0 | 2,265 |
package com.ajjpj.adiagram_.ui.presentation
import com.ajjpj.adiagram_.render.base.{PartialImageWithShadow, PartialImage}
import javafx.scene.canvas.Canvas
import com.ajjpj.adiagram_.ui.{Zoom, AScreenPos}
import com.ajjpj.adiagram_.geometry.APoint
/**
* @author arno
*/
object ShapePresentationHelper {
private def drawOnCanvas(i: PartialImage, c: Canvas) {
c.setWidth (i.img.getWidth)
c.setHeight (i.img.getHeight)
c.getGraphicsContext2D.clearRect(0, 0, c.getWidth, c.getHeight) //TODO is there a better way for this?
c.getGraphicsContext2D.drawImage(i.img, 0, 0)
}
def refreshPos(canvas: Canvas, pos: APoint, offset: APoint, zoom: Zoom): Unit = {
val canvasPos = AScreenPos.fromModel(pos + offset, zoom)
canvas.setLayoutX (canvasPos.x)
canvas.setLayoutY (canvasPos.y)
}
def refreshPos(shapeCanvas: Canvas, shadowCanvas: Canvas, pos: APoint, shapeOffset: APoint, shadowOffset: APoint, zoom: Zoom): Unit = {
refreshPos(shapeCanvas, pos, shapeOffset, zoom)
refreshPos(shadowCanvas, pos, shadowOffset, zoom)
}
def drawShapeOnCanvas(pi: PartialImageWithShadow, pos: APoint, shapeCanvas: Canvas, shadowCanvas: Canvas, zoom: Zoom) {
drawOnCanvas(pi.shape, shapeCanvas)
refreshPos(shapeCanvas, pos, pi.shape.renderOffset, zoom)
pi.shadow match {
case Some(sh) =>
drawOnCanvas(sh, shadowCanvas)
refreshPos(shadowCanvas, pos, sh.renderOffset, zoom)
case None =>
shadowCanvas.setWidth(0)
shadowCanvas.setHeight(0)
}
}
}
| arnohaase/a-diagram | src/main/scala-old/com/ajjpj/adiagram_/ui/presentation/ShapePresentationHelper.scala | Scala | apache-2.0 | 1,535 |
/*
* Copyright 2016 Alexey Kuzin <amkuzink@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package choiceroulette.gui.roulette.arc
import javafx.css.{CssMetaData, _}
import javafx.scene.paint.Paint
import com.sun.javafx.css.converters.PaintConverter
/** [[StyleableObjectProperty]] for setting [[ArcsPane]]'s arc colors.
*
* @author Alexey Kuzin <amkuzink@gmail.com>
*/
class ArcColorProperty(arcPane: ArcsPaneJfxDelegate,
name: String,
metaData: CssMetaData[ArcsPaneJfxDelegate, Paint]) extends StyleableObjectProperty[Paint] {
var setArcColor: Paint => Unit = _
override def invalidated(): Unit = {
get() match {
case paint: Paint => setArcColor(paint)
case _ =>
}
}
override def getCssMetaData: CssMetaData[_ <: Styleable, Paint] = metaData
override def getName: String = name
override def getBean: ArcsPaneJfxDelegate = arcPane
}
object ArcColorProperty {
private class ArcColorMetaData(property: String,
styleableProperty: ArcsPaneJfxDelegate => StyleableProperty[Paint]) extends
CssMetaData[ArcsPaneJfxDelegate, Paint](property, PaintConverter.getInstance()) {
override def isSettable(styleable: ArcsPaneJfxDelegate): Boolean = true
override def getStyleableProperty(styleable: ArcsPaneJfxDelegate): StyleableProperty[Paint] =
styleableProperty(styleable)
}
val firstCssMetaData: CssMetaData[ArcsPaneJfxDelegate, Paint] =
new ArcColorMetaData("-arc-first-background-color", _.arcFirstColorProperty)
val secondCssMetaData: CssMetaData[ArcsPaneJfxDelegate, Paint] =
new ArcColorMetaData("-arc-second-background-color", _.arcSecondColorProperty)
}
| leviathan941/choiceroulette | guiapp/src/main/scala/choiceroulette/gui/roulette/arc/ArcColorProperty.scala | Scala | apache-2.0 | 2,239 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.serializer
import java.io.{File}
import java.lang.reflect.Modifier
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.ops.{Exp => ExpOps, Pow => PowOps, Select => SelectOps, Sum => SumOps, Tile => TileOps}
import com.intel.analytics.bigdl.nn.tf.{DecodeGif => DecodeGifOps, DecodeJpeg => DecodeJpegOps, DecodePng => DecodePngOps, DecodeRaw => DecodeRawOps}
import com.intel.analytics.bigdl.utils.RandomGenerator.RNG
import com.intel.analytics.bigdl.utils.tf.loaders.{Pack => _}
import com.intel.analytics.bigdl.utils.{Shape => KShape}
import org.reflections.Reflections
import org.reflections.scanners.SubTypesScanner
import org.reflections.util.{ClasspathHelper, ConfigurationBuilder, FilterBuilder}
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import scala.collection.JavaConverters._
import scala.collection.mutable
abstract class SerializerSpecHelper extends FlatSpec with Matchers with BeforeAndAfterAll{
val postFix = "bigdl"
val excludedClass = new mutable.HashSet[String]()
val excludedPackage = new mutable.HashSet[String]()
private val expected = new mutable.HashSet[String]()
val tested = new mutable.HashSet[String]()
private var executedCount = 0
protected def getPackage(): String = ""
protected def addExcludedClass(): Unit = {}
protected def addExcludedPackage(): Unit = {}
protected def getExpected(): mutable.Set[String] = expected
override protected def beforeAll() = {
addExcludedClass
addExcludedPackage
val filterBuilder = new FilterBuilder()
excludedPackage.foreach(filterBuilder.excludePackage(_))
val reflections = new Reflections(new ConfigurationBuilder()
.filterInputsBy(filterBuilder)
.setUrls(ClasspathHelper.forPackage(getPackage()))
.setScanners(new SubTypesScanner()))
val subTypes = reflections.getSubTypesOf(classOf[AbstractModule[_, _, _]])
.asScala.filter(sub => !Modifier.isAbstract(sub.getModifiers)).
filter(sub => !excludedClass.contains(sub.getName))
subTypes.foreach(sub => expected.add(sub.getName))
}
protected def runSerializationTest(module : AbstractModule[_, _, Float],
input : Activity, cls: Class[_] = null) : Unit = {
runSerializationTestWithMultiClass(module, input,
if (cls == null) Array(module.getClass) else Array(cls))
}
protected def runSerializationTestWithMultiClass(module : AbstractModule[_, _, Float],
input : Activity, classes: Array[Class[_]]) : Unit = {
val name = module.getName
val serFile = File.createTempFile(name, postFix)
val originForward = module.evaluate().forward(input)
ModulePersister.saveToFile[Float](serFile.getAbsolutePath, null, module.evaluate(), true)
RNG.setSeed(1000)
val loadedModule = ModuleLoader.loadFromFile[Float](serFile.getAbsolutePath)
val afterLoadForward = loadedModule.forward(input)
if (serFile.exists) {
serFile.delete
}
afterLoadForward should be (originForward)
classes.foreach(cls => {
if (getExpected.contains(cls.getName)) {
tested.add(cls.getName)
}
})
}
override protected def afterAll() = {
println(s"total ${getExpected.size}, remaining ${getExpected.size - tested.size}")
tested.filter(!getExpected.contains(_)).foreach(t => {
println(s"$t do not need to be tested")
})
getExpected.foreach(exp => {
require(tested.contains(exp), s" $exp not included in the test!")
})
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/utils/serializer/SerializerSpecHelper.scala | Scala | apache-2.0 | 4,174 |
package com.nekogata.backlogger.domain.setting
trait UserIdRepository {
def get(): Int
def store(id: Int): Unit
}
| Shinpeim/BackLogger | scala/src/main/scala/com/nekogata/backlogger/domain/setting/UserIdRepository.scala | Scala | mit | 119 |
package examples.streaming
import org.apache.spark.streaming.{ StreamingContext, Seconds }
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
/**
* Use the NetCat server to send messages to this programme.
* Counts the number of words whose length exceeds 5.
* This is an example of stateless transformations.
*/
object TestVanillaStreaming {
def main(args: Array[String]): Unit = {
if (args.length != 2) {
println("Usage: examples.streaming.TestVanillaStreaming host port")
sys.exit(-1)
}
val ssc = new StreamingContext(new SparkConf().setAppName("TestVanillaStreamingJob"),
Seconds(5))
/* As the application is a simple test we are overriding the default
Receiver's setting of StorageLevel.MEMORY_AND_DISK_SER_2 */
val msg = ssc.socketTextStream(args(0), args(1).toInt, StorageLevel.MEMORY_ONLY)
val wordsMoreThanFiveChars = msg.flatMap(_.split(" ")).filter(_.length > 5)
println(">>> Print number of words having more than 5 characters in the message...")
wordsMoreThanFiveChars.count().print()
ssc.start()
ssc.awaitTermination()
ssc.stop()
}
} | prithvirajbose/spark-dev | src/main/scala/examples/streaming/TestVanillaStreaming.scala | Scala | gpl-3.0 | 1,143 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.join
import org.apache.flink.api.common.state._
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.tuple.{Tuple2 => JTuple2}
import org.apache.flink.configuration.Configuration
import org.apache.flink.table.api.StreamQueryConfig
import org.apache.flink.table.runtime.types.CRow
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
/**
* Connect data for left stream and right stream. Base class for stream non-window outer Join.
*
* @param leftType the input type of left stream
* @param rightType the input type of right stream
* @param resultType the output type of join
* @param genJoinFuncName the function code of other non-equi condition
* @param genJoinFuncCode the function name of other non-equi condition
* @param isLeftJoin the type of join, whether it is the type of left join
* @param queryConfig the configuration for the query to generate
*/
abstract class NonWindowOuterJoin(
leftType: TypeInformation[Row],
rightType: TypeInformation[Row],
resultType: TypeInformation[CRow],
genJoinFuncName: String,
genJoinFuncCode: String,
isLeftJoin: Boolean,
queryConfig: StreamQueryConfig)
extends NonWindowJoin(
leftType,
rightType,
resultType,
genJoinFuncName,
genJoinFuncCode,
queryConfig) {
// result row, all fields from right will be null. Used for output when there is no matched rows.
protected var leftResultRow: Row = _
// result row, all fields from left will be null. Used for output when there is no matched rows.
protected var rightResultRow: Row = _
override def open(parameters: Configuration): Unit = {
super.open(parameters)
leftResultRow = new Row(resultType.getArity)
rightResultRow = new Row(resultType.getArity)
LOG.debug(s"Instantiating NonWindowOuterJoin")
}
/**
* Join current row with other side rows. Preserve current row if there are no matched rows
* from other side. The RowWrapper has been reset before we call preservedJoin and we also
* assume that the current change of cRowWrapper is equal to value.change.
*
* @param inputRow the input row
* @param inputRowFromLeft the flag indicat whether input row is from left
* @param otherSideState the other side state
* @return the number of matched rows
*/
def preservedJoin(
inputRow: Row,
inputRowFromLeft: Boolean,
otherSideState: MapState[Row, JTuple2[Long, Long]]): Long = {
val otherSideIterator = otherSideState.iterator()
while (otherSideIterator.hasNext) {
val otherSideEntry = otherSideIterator.next()
val otherSideRow = otherSideEntry.getKey
val otherSideCntAndExpiredTime = otherSideEntry.getValue
// join
cRowWrapper.setTimes(otherSideCntAndExpiredTime.f0)
callJoinFunction(inputRow, inputRowFromLeft, otherSideRow, cRowWrapper)
// clear expired data. Note: clear after join to keep closer to the original semantics
if (stateCleaningEnabled && curProcessTime >= otherSideCntAndExpiredTime.f1) {
otherSideIterator.remove()
}
}
val joinCnt = cRowWrapper.getEmitCnt
// The result is NULL from the other side, if there is no match.
if (joinCnt == 0) {
cRowWrapper.setTimes(1)
collectAppendNull(inputRow, inputRowFromLeft, cRowWrapper)
}
joinCnt
}
/**
* Join current row with other side rows. Retract previous output row if matched condition
* changed, i.e, matched condition is changed from matched to unmatched or vice versa. The
* RowWrapper has been reset before we call retractJoin and we also assume that the current
* change of cRowWrapper is equal to value.change.
*/
def retractJoin(
value: CRow,
inputRowFromLeft: Boolean,
currentSideState: MapState[Row, JTuple2[Long, Long]],
otherSideState: MapState[Row, JTuple2[Long, Long]]): Unit = {
val inputRow = value.row
val otherSideIterator = otherSideState.iterator()
// approximate number of record in current side. We only check whether number equals to 0, 1
// or bigger
val recordNum: Long = approxiRecordNumInState(currentSideState)
while (otherSideIterator.hasNext) {
val otherSideEntry = otherSideIterator.next()
val otherSideRow = otherSideEntry.getKey
val otherSideCntAndExpiredTime = otherSideEntry.getValue
cRowWrapper.setTimes(otherSideCntAndExpiredTime.f0)
// retract previous preserved record append with null
if (recordNum == 1 && value.change) {
cRowWrapper.setChange(false)
collectAppendNull(otherSideRow, !inputRowFromLeft, cRowWrapper)
// recover for the next iteration
cRowWrapper.setChange(true)
}
// do normal join
callJoinFunction(inputRow, inputRowFromLeft, otherSideRow, cRowWrapper)
// output preserved record append with null if have to
if (!value.change && recordNum == 0) {
cRowWrapper.setChange(true)
collectAppendNull(otherSideRow, !inputRowFromLeft, cRowWrapper)
// recover for the next iteration
cRowWrapper.setChange(false)
}
// clear expired data. Note: clear after join to keep closer to the original semantics
if (stateCleaningEnabled && curProcessTime >= otherSideCntAndExpiredTime.f1) {
otherSideIterator.remove()
}
}
}
/**
* Return approximate number of records in corresponding state. Only check if record number is
* 0, 1 or bigger.
*/
def approxiRecordNumInState(currentSideState: MapState[Row, JTuple2[Long, Long]]): Long = {
var recordNum = 0L
val it = currentSideState.iterator()
while(it.hasNext && recordNum < 2) {
recordNum += it.next().getValue.f0
}
recordNum
}
/**
* Append input row with default null value if there is no match and Collect.
*/
def collectAppendNull(
inputRow: Row,
inputFromLeft: Boolean,
out: Collector[Row]): Unit = {
var i = 0
if (inputFromLeft) {
while (i < inputRow.getArity) {
leftResultRow.setField(i, inputRow.getField(i))
i += 1
}
out.collect(leftResultRow)
} else {
while (i < inputRow.getArity) {
val idx = rightResultRow.getArity - inputRow.getArity + i
rightResultRow.setField(idx, inputRow.getField(i))
i += 1
}
out.collect(rightResultRow)
}
}
}
| yew1eb/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/join/NonWindowOuterJoin.scala | Scala | apache-2.0 | 7,310 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import cats.laws._
import cats.laws.discipline._
import monix.execution.Callback
import monix.execution.ExecutionModel.AlwaysAsyncExecution
import monix.execution.exceptions.DummyException
import scala.util.{Failure, Success, Try}
object TaskNowSuite extends BaseTestSuite {
test("Task.now should work synchronously") { implicit s =>
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Success("result")))
}
test("Task.now.runAsync: CancelableFuture should be synchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Success("result")))
}
test("Task.now.runAsync(callback) should work synchronously") { implicit s =>
var result = Option.empty[Try[String]]
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, Some(Success("result")))
}
test("Task.now.runAsync(callback) should be asynchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
var result = Option.empty[Try[String]]
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, None)
s2.tick()
assertEquals(result, Some(Success("result")))
}
test("Task.raiseError should work synchronously") { implicit s =>
var wasTriggered = false
val dummy = DummyException("dummy")
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError(trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.raiseError.runAsync: CancelableFuture should be synchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
val dummy = DummyException("dummy")
var wasTriggered = false
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError[String](trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.raiseError.runAsync(callback) should work synchronously") { implicit s =>
var result = Option.empty[Try[String]]
val dummy = DummyException("dummy")
var wasTriggered = false
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError[String](trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, Some(Failure(dummy)))
}
test("Task.raiseError.runAsync(callback) should be asynchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
val dummy = DummyException("dummy")
var result = Option.empty[Try[String]]
var wasTriggered = false
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError[String](trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, None)
s2.tick()
assertEquals(result, Some(Failure(dummy)))
}
test("Task.now.map should work") { implicit s =>
Coeval.now(1).map(_ + 1).value()
check1 { a: Int =>
Task.now(a).map(_ + 1) <-> Task.now(a + 1)
}
}
test("Task.raiseError.map should be the same as Task.raiseError") { implicit s =>
check {
val dummy = DummyException("dummy")
Task.raiseError[Int](dummy).map(_ + 1) <-> Task.raiseError(dummy)
}
}
test("Task.raiseError.flatMap should be the same as Task.flatMap") { implicit s =>
check {
val dummy = DummyException("dummy")
Task.raiseError[Int](dummy).flatMap(Task.now) <-> Task.raiseError(dummy)
}
}
test("Task.raiseError.flatMap should be protected") { implicit s =>
check {
val dummy = DummyException("dummy")
val err = DummyException("err")
Task.raiseError[Int](dummy).flatMap[Int](_ => throw err) <-> Task.raiseError(dummy)
}
}
test("Task.now.flatMap should protect against user code") { implicit s =>
val ex = DummyException("dummy")
val t = Task.now(1).flatMap[Int](_ => throw ex)
check(t <-> Task.raiseError(ex))
}
test("Task.now.flatMap should be tail recursive") { implicit s =>
def loop(n: Int, idx: Int): Task[Int] =
Task.now(idx).flatMap { a =>
if (idx < n) loop(n, idx + 1).map(_ + 1)
else
Task.now(idx)
}
val iterations = s.executionModel.recommendedBatchSize * 20
val f = loop(iterations, 0).runToFuture
s.tickOne()
assertEquals(f.value, None)
s.tick()
assertEquals(f.value, Some(Success(iterations * 2)))
}
test("Task.now should not be cancelable") { implicit s =>
val t = Task.now(10)
val f = t.runToFuture
f.cancel()
s.tick()
assertEquals(f.value, Some(Success(10)))
}
test("Task.raiseError should not be cancelable") { implicit s =>
val dummy = DummyException("dummy")
val t = Task.raiseError(dummy)
val f = t.runToFuture
f.cancel()
s.tick()
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.now.coeval") { implicit s =>
val result = Task.now(100).runSyncStep
assertEquals(result, Right(100))
}
test("Task.raiseError.coeval") { implicit s =>
val dummy = DummyException("dummy")
val result = Task.raiseError(dummy).attempt.runSyncStep
assertEquals(result, Right(Left(dummy)))
}
}
| monifu/monix | monix-eval/shared/src/test/scala/monix/eval/TaskNowSuite.scala | Scala | apache-2.0 | 6,901 |
package opennlp.scalabha.tree
import opennlp.scalabha.log.SimpleLogger
import java.io.{OutputStreamWriter, BufferedWriter}
import org.clapper.argot.{ArgotUsageException, ArgotParser, ArgotConverters}
import opennlp.scalabha.model.TreeNode
import collection.mutable.HashMap
object TokenChecker {
import ArgotConverters._
val parser = new ArgotParser(this.getClass.getName, preUsage = Some("Version 0.0"))
val help = parser.flag[Boolean](List("h", "help"), "print help")
val input = parser.option[String](List("i", "input"), "FILE", "input inputFile in which to check tokens")
val tokens = parser.option[String](List("tok"), "FILE", "tokens to check")
val silent = parser.flag[Boolean](List("s"), "Set this flag to silence warnings and errors in the tree parser.")
val log = new SimpleLogger(this.getClass.getName, SimpleLogger.WARN, new BufferedWriter(new OutputStreamWriter(System.err)))
val treeBankMarkup = Map(
("(" -> "-LRB-"),
("[" -> "-LSB-"),
(")" -> "-RRB-"),
("]" -> "-RSB-")
)
val treeBankTransforms = Map(
("“" -> "\""),
("”" -> "\""),
("❞" -> "\""),
("❝" -> "\""),
("″" -> "\""),
("‟" -> "\""),
("ʹ" -> "'"),
("ʼ" -> "'"),
("ˈ" -> "'"),
("٬" -> "'"),
("‘" -> "'"),
("’" -> "'"),
("′" -> "'"),
("'" -> "'"),
("" -> "")
)
def spprintRepr(map: Map[String, Int], join: String): String = {
val regex = "[^(]+\\((.*)\\)".r
val regex(string) = map.toList.sorted.toString
string.replace(", ", join)
}
private def checkDict(dict: Map[String, String], a: String, b: String): Boolean = {
(dict.contains(b) && dict(b).equals(a)) ||
(dict.contains(a) && dict(a).equals(b))
}
def checkForMarkup(a: String, b: String): Boolean = {
checkDict(treeBankMarkup, a, b)
}
def checkForTransform(a: String, b: String): Boolean = {
checkDict(treeBankTransforms, a, b)
}
def checkTokensInLine(treeTokens: List[String], tokFileTokens: List[String]): Boolean = {
if (treeTokens.length != tokFileTokens.length) {
//log.err("Lists should be the same length: %s %s\n".format(treeTokens, tokFileTokens))
log.err("Fail: \n\ttree: %s is not the same length as \n\ttok: %s\n".format(treeTokens, tokFileTokens))
false
} else if (treeTokens.length == 0) {
true
} else {
val a :: as = treeTokens
val b :: bs = tokFileTokens
if (a != b) {
if (checkForMarkup(a, b)) {
checkTokensInLine(as, bs)
} else if (checkForTransform(a,b)) {
log.err(("Fail: It looks like there is a tree character "+a+" that should have been the character "+b+":"+
"\n\ttree:%s\n\t tok:%s\n").format(treeTokens,tokFileTokens))
false
} else {
//log.err("%s does not match %s\n".format(a, b))
log.err(("Fail: \"%s\" does not match \"%s\" in:" +
"\n\ttree:%s\n\t tok:%s\n").format(a, b, treeTokens, tokFileTokens))
false
}
} else {
checkTokensInLine(as, bs)
}
}
}
def checkTokens(infile: Iterator[String], tokfile: Iterator[String]): List[String] = {
for (((inTreeLine, tokLine), index) <- (infile zip tokfile).toList.zipWithIndex) yield {
val inTree = MultiLineTreeParser("trees", index, inTreeLine)
inTree match {
case Some(root) =>
val inTreeTokens: List[String] = root.getTokenStrings
val tokTokens = tokLine.replace("<EOS>", "").split("\\s+").toList
checkTokensInLine(inTreeTokens, tokTokens) match {
case true => "%d: pass".format(index)
case false => "%d: fail".format(index)
}
case _ => "%d: Fail - Couldn't parse tree. See parser log messages.".format(index)
}
}
}
def main(args: Array[String]) {
try {
parser.parse(args)
if (help.value.isDefined) {
parser.usage()
}
MultiLineTreeParser.log.logLevel = silent.value match {
case Some(_) => SimpleLogger.NONE
case _ => MultiLineTreeParser.log.logLevel
}
val input_file = input.value match {
case Some(filename: String) => scala.io.Source.fromFile(filename, "UTF-8").getLines()
case _ => parser.usage()
}
val token_file = tokens.value match {
case Some(filename: String) => scala.io.Source.fromFile(filename, "UTF-8").getLines()
case _ => parser.usage()
}
log.trace("comparing tokens from %s to those in the trees in %s\n".format(tokens.value.get, input.value.get))
println(checkTokens(input_file, token_file).mkString("\n"))
val (theseWarnings, theseErrors) = log.getStats()
val (parseWarnings, parseErrors) = MultiLineTreeParser.log.getStats()
val (warnings, errors) = (theseWarnings + parseWarnings, theseErrors + parseErrors)
log.summary("Warnings,Errors: %s\n".format((warnings, errors)))
} catch {
case e: ArgotUsageException =>
println(e.message)
}
}
}
| eponvert/Scalabha | src/main/scala/opennlp/scalabha/tree/TokenChecker.scala | Scala | apache-2.0 | 5,035 |
package com.github.caiiiycuk.ast2uml
abstract class AstNodeSpec
case class AstAccessSpec(level: String) extends AstNodeSpec
case class AstNode(name: String, specs: Seq[AstNodeSpec]) {
override def toString = name
}
abstract class Ast(node: AstNode, childs: Seq[Ast]) {
def astChilds = childs
def astName = node.name
protected def toString(paddingChar: Char = '~', padding: String = ""): String = {
val childString = childs.map(_.toString(paddingChar, padding + paddingChar)).mkString
val astClass = s"${padding}(${getClass.getSimpleName})"
val postfix = " " * (20 - astClass.length)
s"${astClass}${postfix}${astName}\\n${childString}"
}
override def toString() = toString('~')
}
case class AstRoot(childs: List[Ast]) extends Ast(AstNode(".", Seq()), childs)
case class AstNamespace(node: AstNode, childs: List[Ast]) extends Ast(node, childs)
abstract class AstRecord(node: AstNode, childs: List[Ast]) extends Ast(node, childs) {
def toStruct = AstClass(node, childs)
def toClass = AstClass(node, childs)
}
case class AstClass(node: AstNode, childs: List[Ast]) extends AstRecord(node, childs)
case class AstStruct(node: AstNode, childs: List[Ast]) extends AstRecord(node, childs)
case class AstUnknown(node: AstNode, childs: List[Ast]) extends Ast(node, childs)
case class AstField(node: AstNode, `type`: Option[String], childs: List[Ast]) extends Ast(node, childs) {
override def astName = node.specs.headOption match {
case Some(AstAccessSpec("public")) =>
s"+${`type`.getOrElse("???")} ${node}"
case Some(AstAccessSpec("protected")) =>
s"#${`type`.getOrElse("???")} ${node}"
case _ =>
s"-${`type`.getOrElse("???")} ${node}"
}
}
case class AstMethod(node: AstNode, signature: Option[String], childs: List[Ast]) extends Ast(node, childs) {
override def astName = node.specs.headOption match {
case Some(AstAccessSpec("public")) =>
s"+${node}(${signature.getOrElse("???")})"
case Some(AstAccessSpec("protected")) =>
s"#${node}(${signature.getOrElse("???")})"
case _ =>
s"-${node}(${signature.getOrElse("???")})"
}
} | caiiiycuk/ast2uml | src/main/scala/com/github/caiiiycuk/ast2uml/AstTree.scala | Scala | gpl-3.0 | 2,127 |
/**
* Copyright 2009 Latterfrosken Software Development Limited
*
* This file is part of Lafros GUI-App.
*
* Lafros GUI-App is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* Lafros GUI-App is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with Lafros GUI-App. If not, see <http://www.gnu.org/licenses/>. */
package com.lafros.gui.app
import javax.swing.{JApplet, JMenuBar}
import scala.swing.{Component, Label, MenuBar, RootPanel, ScrollPane}
/**
* runs the <tt>App</tt> as an applet.
* <p>The web page in which the applet is to be
* embedded should contain an element similar to the following one:<pre>
* <applet code="com.lafros.gui.app.Applet"
* width="300"
* height="200"
* archive="lafros-gui-app-1.0r1.jar"
* archive="lafros-gui-cmds-1.0r1.jar"
* archive="lafros-gui-alerts-1.0r1.jar">
* <param name="App" value="org.myorg.myapp.app">
* </applet></pre></p>
* <p>Note that the Alerts .jar file need only be included if Alerts are used by
* the app itself.</p>
*
* @author Rob Dickens */
class Applet extends JApplet { outer =>
private var appCreated = false // dispatch-thread only
private var app: App = _ // dispatch-thread only
private var _appContext: AppletAppContext = _ // appContext already defined!
private var startedBefore = false // dispatch-thread only
val rootPanel = new RootPanel {
def peer = outer
override def contents_=(c: Component) {
super.contents_=(c)
peer.validate()
}
}
override def init() {
//println("applet.init(): "+ this)
App.invokeAndWait {
appCreated = if (Applet.onlyTrueTheFirstTime) try {
app = createApp
true
}
catch {
case ex: Exception =>
displayWarning("failed to create app:<br>"+ ex.getMessage)
false
}
else {
// classloader being reused, indicating applet-page reloaded or returned to,
// which scala.actors can't tolerate - that is, all bets are off
displayWarning("To reload or load another instance of this applet, please do the following:"+
"<ol>"+
"<li>type 'x' into the Java console window[1];"+
"<li>press the browser's reload-page button."+
"</ol>"+
"1 Please consider altering your Java preferences if this is not shown.")
false
}
if (appCreated) {
_appContext = new AppletAppContext(this, args)
wrap("init") {
app.initApplet(_appContext)
}
}
}
//println("..applet.init()")
}
override def start() = App.invokeAndWait {
if (appCreated) wrap("start") {
if (startedBefore) app.restartApplet() else {
app.start()
startedBefore = true
}
}
}
override def stop() = App.invokeAndWait {
if (appCreated) wrap("stop") {
app.stopApplet()
}
}
override def destroy() = App.invokeAndWait {
//println("applet.destroy()..")
if (appCreated) {
try {
app.terminate()
}
catch {
case ex: Exception => ex.printStackTrace
}
if (_appContext != null) _appContext.terminate()
}
//println("..applet.destroy()")
}
private def wrap(action: String)(f: => Unit) = try {
f
}
catch {
case ex: Exception =>
val msg = "failed to "+ action +"() applet: "+ ex
println(msg)
showStatus(msg)
}
override def setJMenuBar(arg: JMenuBar) = _appContext.menuBar = arg match {
case null => null
case _ => new MenuBar {
override lazy val peer = arg
}
}
//
private[app] def setJMenuBarPrivately(arg: JMenuBar) = super.setJMenuBar(arg)
private def args = {
var l: List[String] = Nil
def appendedParam(i: Int) = getParameter("arg"+ i) match {
case null => false
case untrimmed => untrimmed.trim match {
case "" => false
case trimmed => l = (trimmed :: l)
true
}
}
{
var i = 0
while (appendedParam(i)) i += 1
}
l.toArray
}
private def createApp = {
val name = {
val p = getParameter("App")
if (p == null) throw new RuntimeException("missing parameter: App - name of impl'n class to run as applet")
if (p.endsWith("$")) p else p +"$"
}
try {
val field = {
val c1ass = Class.forName(name)
c1ass.getDeclaredField("MODULE$")
}
field.get(null).asInstanceOf[App]
}
catch {
case ex: ClassNotFoundException =>
throw new RuntimeException("App impl'n class not found: "+ name)
case ex: IllegalAccessException =>
throw new RuntimeException("App impl'n class could not be instantiated: "+ ex)
case ex: InstantiationException =>
throw new RuntimeException("App impl'n class could not be instantiated: "+ ex)
}
}
private def displayWarning(msg: String) {
val sp = new ScrollPane
rootPanel.contents = sp
sp.viewportView = new Label("<html>"+ msg +"</html>")
}
}
private object Applet {
private[this] var _onlyTrueTheFirstTime = true
def onlyTrueTheFirstTime = synchronized {
val res = _onlyTrueTheFirstTime
if (_onlyTrueTheFirstTime) _onlyTrueTheFirstTime = false
res
}
}
| robcd/lafros-gui | lafros-gui-app/src/main/scala/com/lafros/gui/app/Applet.scala | Scala | gpl-3.0 | 5,729 |
//import org.openstack.api.restful.ceilometer.v2.FilterExpressions.SimpleQueryPackage.{Operator, SimpleQuery}
import java.util.Date
import it.unibo.ing.utils._
import org.openstack.api.restful.ceilometer.v2.FilterExpressions.SimpleQueryPackage.Goodies._
import org.openstack.api.restful.ceilometer.v2.FilterExpressions.FieldValue._
import org.openstack.api.restful.ceilometer.v2.elements.{Resource, Meter}
import org.openstack.clients.ceilometer.v2.ICeilometerClient
import org.scalatest._
import scala.collection.IterableLike
import scala.collection.generic.CanBuildFrom
import scala.concurrent.Await
import scala.concurrent.duration._
/**
* Created by tmnd on 26/11/14.
*/
import java.net.URL
class ClientTests extends FlatSpec with Matchers{
val keystoneURL = new URL("http://137.204.57.150:5000")
val ceilometerURL = new URL("http://137.204.57.150:8777")
val tenantName = "ceilometer_project"
val username = "amurgia"
val password = "PUs3dAs?"
lazy val client: ICeilometerClient = org.openstack.clients.ceilometer.v2.CeilometerClient.getInstance(ceilometerURL, keystoneURL, tenantName, username, password,30000, 360000)
lazy val meters = client.listAllMeters
lazy val resources = client.listAllResources
"there " should " be some meters " in {
meters.isEmpty should be (false)
println(s"there are ${meters.size} meters")
}
"there " should " be some statistics about meters in the last 1 hour" in {
val theMeter = meters.head.name
val startDate = new Date(new Date().getTime - 3600000)
val endDate = new Date()
val stats = client.getStatistics(theMeter, startDate, endDate)
stats.isEmpty should be (false)
}
"there " should " be some resources " in {
resources should not be None
resources.isEmpty should be (false)
println(s"there are ${resources.size} resources")
}
"there " should " be some samples about resources in the last 1 hour" in {
val start = new Date((new Date().getTime - 3600000))
val end = new Date(start.getTime + 3600000)
println("start " + start)
println("end " + end)
val samples1 = client.getSamplesOfResource(resources.head.resource_id, start, end)
val samples2 = client.getSamplesOfMeter(meters.head.name, start, end)
samples1 should not be None
samples1.isEmpty should be (false)
samples2 should not be None
samples2.isEmpty should be (false)
for (s <- samples1.distinctBy(_.meter))
println(s.meter)
/*
println(s"there are ${samples1.get.size} samples")
println(s"there are ${samples2.get.size} samples")
println("avaiable resources :")
println("-----------------------------------")
for(r <- resources.get)
println(r.resource_id + " " + r.links)
println("-----------------------------------")
println("avaiable meters :")
println("-----------------------------------")
for(r <- meters.get.distinctBy(_.name))
println(r.name + " " + r.`type`)
println("-----------------------------------")
*/
}
implicit class RichCollection[A, Repr](xs: IterableLike[A, Repr]){
def distinctBy[B, That](f: A => B)(implicit cbf: CanBuildFrom[Repr, A, That]) = {
val builder = cbf(xs.repr)
val i = xs.iterator
var set = Set[B]()
while (i.hasNext) {
val o = i.next
val b = f(o)
if (!set(b)) {
set += b
builder += o
}
}
builder.result
}
}
}
| tmnd1991/ceilometerAPI4s | src/test/scala/ClientTests.scala | Scala | apache-2.0 | 3,439 |
package de.qualitune.checks.exceptions
/**
* CPSTextInterpreter - parses and interprets the CPSText DSL.
* Copyright (C) 2011 Max Leuthaeuser
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
case class DuplicateCPSDeviceException(errorMsg: String) extends Exception(errorMsg) | max-leuthaeuser/CPSTextInterpreter | src/main/scala/de/qualitune/checks/exceptions/DuplicateCPSDeviceException.scala | Scala | gpl-3.0 | 899 |
package rescala.extra.lattices.delta.crdt.basic
import kofre.decompose.DotStore.{DotMap, DotSet}
import kofre.decompose.interfaces.AWSetInterface
import kofre.decompose.interfaces.AWSetInterface.{AWSetCompanion, State}
import kofre.decompose.{UIJDLattice}
import rescala.extra.replication.AntiEntropy
/** [[BasicCRDT Basic]] implementation of [[rescala.extra.lattices.delta.interfaces.AWSetInterface AWSetInterface]]
*
* Instead of the class constructor, you should use the apply method of the companion object to create new instances.
*
* @tparam E Type of the elements stored in the set
* @tparam C Type of the causal context used for this causal CRDT
*/
class AWSet[E](
val state: State[E],
protected val antiEntropy: AntiEntropy[State[E]]
) extends AWSetInterface[E, AWSet[E]] with BasicCRDT[State[E], AWSet[E]] {
override protected def copy(state: State[E]): AWSet[E] = new AWSet(state, antiEntropy)
}
object AWSet extends AWSetCompanion {
/** Creates a new AWSet instance
*
* @param antiEntropy AntiEntropy instance used for exchanging deltas with other replicas
* @tparam E Type of the elements stored in the set
* @tparam C Type of the causal context used for this causal CRDT
*/
def apply[E](antiEntropy: AntiEntropy[State[E]]): AWSet[E] =
new AWSet(UIJDLattice[State[E]].bottom, antiEntropy)
}
| guidosalva/REScala | Code/Extensions/Replication/src/main/scala/rescala/extra/lattices/delta/crdt/basic/AWSet.scala | Scala | apache-2.0 | 1,359 |
package io.peregrine
import io.peregrine.test.FlatSpecHelper
import com.twitter.finagle.http.Cookie
class CsrfFilterSpec extends ShouldSpec with FlatSpecHelper {
class MockView extends View("csrf_test", "mock.test", "")
override val server: PeregrineServer = new PeregrineServer()
server.addFilter(new CsrfFilter {
override def generateToken = "TEST_TOKEN"
})
server.register(testController)
/* server.registerViewRenderer(new ViewRenderer() {
val format = "csrf_test"
def render(template: String, view: View): String = {
view.csrfToken.getOrElse("token_not_found")
}
})*/
// default behaviour
object testController extends Controller {
get("/get_with_no_problems") { req =>
render.plain("no error")
}
post("/get_with_no_problems") { req =>
render.plain("no error")
}
post("/post_with_no_csrf_token") { req =>
render.plain("souldn't see this")
}
post("/to_view") { req =>
render.view(new MockView())
}
get("/to_view") { req =>
render.view(new MockView())
}
}
"GET" should "not check for csrf_token" in {
get("/get_with_no_problems")
response.code should equal(200)
response.body should equal("no error")
}
"POST with no token" should "fail and show the correct message" in {
post("/post_with_no_csrf_token")
response.code should equal(403)
response.body should equal("CSRF failed")
}
"POST with token" should "pass if it's the same token" in {
post("/get_with_no_problems",
params = Map("_csrf_token" -> "TEST_TOKEN"),
headers = Map("Cookie" -> "_authenticity_token=TEST_TOKEN;")
)
response.code should equal(200)
response.body should equal("no error")
}
"POST with token" should "fail if it's not the same token in params" in {
post("/get_with_no_problems",
params = Map("_csrf_token" -> "DIFF_TOKEN"),
headers = Map("Cookie" -> "_authenticity_token=TEST_TOKEN;")
)
response.code should equal(403)
response.body should equal("CSRF failed")
}
"POST with token" should "fail if it's not the same token in cookies" in {
post("/get_with_no_problems",
params = Map("_csrf_token" -> "TEST_TOKEN"),
headers = Map("Cookie" -> "_authenticity_token=DIFF_TOKEN;")
)
response.code should equal(403)
response.body should equal("CSRF failed")
}
"POST to view" should "apply the correct token to view to be rendered" in {
post("/to_view",
params = Map("_csrf_token" -> "TEST_TOKEN"),
headers = Map("Cookie" -> "_authenticity_token=TEST_TOKEN;")
)
response.code should equal(200)
response.body should equal("TEST_TOKEN")
}
"GET to view" should "apply the token to view no matter the GET verb" in {
get("/to_view")
response.code should equal(200)
response.body should equal("TEST_TOKEN")
}
}
| pairi/pairi | src/test/scala/io/peregrine/CsrfFilterSpec.scala | Scala | apache-2.0 | 2,884 |
/*
*
* o o o o o
* | o | |\\ /| | /
* | o-o o--o o-o oo | | O | oo o-o OO o-o o o
* | | | | | | | | | | | | | | | | \\ | | \\ /
* O---oo-o o--O | o-o o-o-o o o o-o-o o o o-o o
* |
* o--o
* o--o o o--o o o
* | | | | o | |
* O-Oo oo o-o o-O o-o o-O-o O-o o-o | o-O o-o
* | \\ | | | | | | | | | | | | | |-' | | | \\
* o o o-o-o o o-o o-o o o o o | o-o o o-o o-o
*
* Logical Markov Random Fields (LoMRF).
*
*
*/
package lomrf.mln.learning.structure.hypergraph
import lomrf.logic.AtomSignature
import lomrf.logic.parser.KBParser
import lomrf.mln.model.ConstantsSet
import org.scalatest.{ FunSpec, Matchers }
/**
* Specification test for path templates.
*/
final class PathTemplateSpecTest extends FunSpec with Matchers {
// Predicate schema having template atoms, evidence atoms and non-evidence atoms
private val predicateSchema = Map(
AtomSignature("TemplateAtom_1", 2) -> Vector("X", "T"),
AtomSignature("TemplateAtom_2", 2) -> Vector("X", "T"),
AtomSignature("EvidenceAtom_1", 2) -> Vector("Y", "T"),
AtomSignature("EvidenceAtom_2", 2) -> Vector("T", "T"),
AtomSignature("NonEvidenceAtom_1", 2) -> Vector("X", "T"))
// Empty function schema
private val functionsSchema = Map.empty[AtomSignature, (String, Vector[String])]
// Constants domain
private val constantsDomain = Map(
"T" -> ConstantsSet((1 to 10).map(_.toString)),
"X" -> ConstantsSet("X1", "X2", "X3", "X4"),
"Y" -> ConstantsSet("Y1", "Y2", "Y3", "Y4"))
private val parser = new KBParser(predicateSchema, functionsSchema)
// ------------------------------------------------------------------------------------------------------------------
// --- TEST: The Event Calculus case
// ------------------------------------------------------------------------------------------------------------------
// Template atoms
private val templateAtomsPerAxiom = Seq(
AtomSignature("TemplateAtom_1", 2),
AtomSignature("TemplateAtom_2", 2),
AtomSignature("TemplateAtom_2", 2),
AtomSignature("TemplateAtom_1", 2))
val axioms = Seq(
"EvidenceAtom_2(t1, t0) ^ TemplateAtom_1(x, t0) => NonEvidenceAtom_1(x, t1).",
"EvidenceAtom_2(t1, t0) ^ TemplateAtom_2(x, t0) => !NonEvidenceAtom_1(x, t1).",
"EvidenceAtom_2(t1, t0) ^ NonEvidenceAtom_1(x, t0) ^ !TemplateAtom_2(x, t0) => NonEvidenceAtom_1(x, t1).",
"EvidenceAtom_2(t1, t0) ^ !NonEvidenceAtom_1(x, t0) ^ !TemplateAtom_1(x, t0) => !NonEvidenceAtom_1(x, t1).").map(parser.parseLogicalSentence).flatMap(_.toCNF(constantsDomain))
info(axioms.map(_.literals.map(_.toText).mkString(" v ")).mkString("\\n"))
//val pathTemplate = PathTemplate(Set("X", "T"))
//axioms zip templateAtomsPerAxiom foreach { case (axiom, template) => pathTemplate + (axiom, template) }
//info(s"$pathTemplate")
// TODO
}
| anskarl/LoMRF | src/test/scala/lomrf/mln/learning/structure/hypergraph/PathTemplateSpecTest.scala | Scala | apache-2.0 | 3,074 |
package livehl.common.tools
import collection.JavaConversions._
import scala.Array.canBuildFrom
object ExceptionTool {
/**
* 裁取堆栈,仅筛选包含当前项目的包
*
* the tx
* @return the throwable
* @author 黄林
*/
lazy val packageString = {
ExceptionTool.getClass.getName.split('.').take(3).mkString(".")
}
def cutStackTrace(tx: Throwable): Throwable = {
cutStackTrace(tx, packageString, false)
}
/**
* 裁取堆栈,仅筛选包含指定包的条目.
*
* @param tx
* the tx
* @param packName
* the pack name
* @param printHead
* 是否打印第一次出现包以前的详细堆栈
* @return the throwable
* @author 黄林 Cut stack trace.
*/
def cutStackTrace(tx: Throwable, packName: String,
printHead: Boolean) = {
val stes = tx.getStackTrace().filter(_.toString().contains(packName))
if (printHead) {
def getFrist(ste: Array[StackTraceElement]): Array[StackTraceElement] = {
val res = new Array[StackTraceElement](0)
if (ste.size > 0 && !ste(0).toString().contains(packName)) {
(res :+ ste(0)) ++ getFrist(ste.drop(1))
} else {
res
}
}
val heads = getFrist(tx.getStackTrace())
tx.setStackTrace(stes ++ heads);
} else {
tx.setStackTrace(stes);
}
tx
}
/**
* 裁取堆栈,仅筛选包含cn.city.in的包,包含前段错误堆栈
*
* @param tx
* the tx
* @return the throwable
* @author 黄林
*/
def cutStackTraceWithHead(tx: Throwable) = {
cutStackTrace(tx, packageString, true)
}
/**
* 获取所有堆栈的文本消息
*
* @return the all
* @author 黄林
*/
def getAllStackTraces() = {
val threadDumpMap = Thread
.getAllStackTraces();
val sb = new StringBuffer();
val threadDump = threadDumpMap
.entrySet();
for (entry <- threadDump) {
sb.append(entry.getKey().getName() + "\\r\\n");
for (ste <- entry.getValue()) {
sb.append("\\t" + ste.toString() + "\\r\\n");
}
}
sb.toString()
}
/**
* 迭代输出堆栈为字符串
*
* @param tx
* the tx
* @return the stack trace string
* @author 黄林
*/
def getStackTraceString(tx: Throwable): String = {
val sb = new StringBuffer(tx.getMessage() + "\\r\\n")
for (st <- tx.getStackTrace()) {
sb.append(st.toString() + "\\r\\n")
}
if (null != tx.getCause()) {
sb.append("\\t" + getStackTraceString(tx.getCause()) + "\\r\\n");
}
sb.toString()
}
}
| livehl/common | src/main/scala/livehl/common/tools/ExceptionTool.scala | Scala | apache-2.0 | 2,568 |
package model
import java.time.Instant
// A (very) simple representation of a trade
// See: http://api.bitcoincharts.com/v1/trades.csv?symbol=bitstampUSD
case class Trade(timestamp: Instant, price: Double, amount: Double)
object Trade {
def apply(unixtime: Long, price: Double, amount: Double) =
new Trade(Instant.ofEpochSecond(unixtime), price, amount)
def apply(price: Double, amount: Double) =
new Trade(Instant.now(), price, amount)
}
| lancearlaus/akka-streams-http-presentation | src/main/scala/model/Trade.scala | Scala | apache-2.0 | 455 |
package com.twitter.finagle.service
import com.twitter.conversions.time._
import com.twitter.finagle._
import com.twitter.util.{Duration, Time, Timer, TimerTask, Try}
private[finagle] object FailureAccrualFactory {
def wrapper(
numFailures: Int, markDeadFor: Duration)(timer: Timer): ServiceFactoryWrapper = {
new ServiceFactoryWrapper {
def andThen[Req, Rep](factory: ServiceFactory[Req, Rep]) =
new FailureAccrualFactory(factory, numFailures, markDeadFor, timer)
}
}
val role = Stack.Role("FailureAccrual")
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.service.FailFastFactory]].
* @param numFailures The number of consecutive failures before marking an endpoint as dead.
* @param markDeadFor The duration to mark an endpoint as dead.
*/
case class Param(numFailures: Int, markDeadFor: Duration)
implicit object Param extends Stack.Param[Param] {
val default = Param(5, 5.seconds)
}
/**
* Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.service.FailureAccrualFactory]].
*/
def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Simple[ServiceFactory[Req, Rep]] {
val role = FailureAccrualFactory.role
val description = "Backoff from hosts that we cannot successfully make requests to"
def make(next: ServiceFactory[Req, Rep])(implicit params: Params) = {
val FailureAccrualFactory.Param(n, d) = get[FailureAccrualFactory.Param]
val param.Timer(timer) = get[param.Timer]
wrapper(n, d)(timer) andThen next
}
}
}
/**
* A [[com.twitter.finagle.ServiceFactory]] that accrues failures, marking
* itself unavailable when deemed unhealthy according to its parameterization.
*
* TODO: treat different failures differently (eg. connect failures
* vs. not), enable different backoff strategies.
*/
class FailureAccrualFactory[Req, Rep](
underlying: ServiceFactory[Req, Rep],
numFailures: Int,
markDeadFor: Duration,
timer: Timer
) extends ServiceFactory[Req, Rep]
{
private[this] var failureCount = 0
@volatile private[this] var markedDead = false
private[this] var reviveTimerTask: Option[TimerTask] = None
private[this] def didFail() = synchronized {
failureCount += 1
if (failureCount >= numFailures) markDead()
}
private[this] def didSucceed() = synchronized {
failureCount = 0
}
protected def markDead() = synchronized {
if (!markedDead) {
markedDead = true
val timerTask = timer.schedule(markDeadFor.fromNow) { revive() }
reviveTimerTask = Some(timerTask)
}
}
protected def revive() = synchronized {
markedDead = false
reviveTimerTask foreach { _.cancel() }
reviveTimerTask = None
}
protected def isSuccess(response: Try[Rep]): Boolean = response.isReturn
def apply(conn: ClientConnection) =
underlying(conn) map { service =>
new Service[Req, Rep] {
def apply(request: Req) = {
service(request) respond { response =>
if (isSuccess(response)) didSucceed()
else didFail()
}
}
override def close(deadline: Time) = service.close(deadline)
override def isAvailable =
service.isAvailable && FailureAccrualFactory.this.isAvailable
}
} onFailure { _ => didFail() }
override def isAvailable = !markedDead && underlying.isAvailable
def close(deadline: Time) = underlying.close(deadline) ensure {
// We revive to make sure we've cancelled timer tasks, etc.
revive()
}
override val toString = "failure_accrual_%s".format(underlying.toString)
}
| yancl/finagle-6.22.0 | finagle-core/src/main/scala/com/twitter/finagle/service/FailureAccrualFactory.scala | Scala | apache-2.0 | 3,668 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend.scaffold.play.scaffolds
import ammonite.ops.Path
import com.github.dnvriend.scaffold.play.enabler.EnablerResult
final case class ScaffoldContext(baseDir: Path, srcDir: Path, resourceDir: Path, testDir: Path, organization: String, projectName: String, enabled: List[EnablerResult])
| dnvriend/sbt-scaffold-play | src/main/scala/com/github/dnvriend/scaffold/play/scaffolds/ScaffoldContext.scala | Scala | apache-2.0 | 909 |
package com.airbnb.common.ml.search
import scala.util.Random
import com.airbnb.common.ml.util.{RandomUtil, ScalaLogging}
trait MonteCarloSearch {
// return something the small the better.
def eval(params: Map[String, Any]): Double
// dispose called after each action,
// for trainer has native resource, do clean up in dispose
def dispose(): Unit
// use List in this case to keep the order.
def toString(params: List[(String, Any)]): String = {
params.map(x => {
x._2
}).mkString("\\001")
}
}
object MonteCarloSearch extends ScalaLogging {
def run(
model: MonteCarloSearch,
dynamicParams: List[(String, List[Any])],
stableParams: List[(String, Any)],
numIters: Int,
tol: Double,
minLoss: Double
): (String, Double) = {
val randomizer = scala.util.Random
var iters = 0
var prevLoss = Double.MaxValue
var bestParams = ""
while (iters < numIters) {
val currentParams = getParams(dynamicParams, randomizer)
val finalParams = currentParams ::: stableParams
val loss = model.eval(finalParams.toMap)
logger.info(s" prevLoss $prevLoss, loss $loss")
if (loss < prevLoss) {
// keep the model, its obviously better than the previous one
bestParams = model.toString(currentParams)
// TODO save best model
prevLoss = loss
}
iters += 1
if (iters > 1) {
val diff = loss - prevLoss
if (diff < tol && loss < minLoss) {
logger.info(s"search stop by diff $diff loss $loss")
iters = numIters
}
}
}
model.dispose()
// save bestParams.
logger.info(s" bestParams $bestParams $prevLoss")
(bestParams, prevLoss)
}
def getParams(
params: List[(String, List[Any])],
randomizer: Random
): List[(String, Any)] = {
params.map((x) => {
val choices = x._2
if(choices.length == 2) {
// pick random number between choices(0) and choices(1)
(x._1, RandomUtil.randomNumber(choices, randomizer))
} else if (choices.length == 1) {
(x._1, choices.head)
} else {
// pick random index from choices
(x._1, RandomUtil.randomIndex(choices, randomizer))
}
})
}
}
| TDDFT/aerosolve | airlearner/airlearner-xgboost/src/main/scala/com/airbnb/common/ml/search/MonteCarloSearch.scala | Scala | apache-2.0 | 2,267 |
package cn.gridx.scala.lang.datetime
import java.util.Date
import org.joda.time._
/**
* Created by tao on 12/21/15.
*/
object jodatimes {
def main(args: Array[String]): Unit = {
getWeekDay
}
/**
* 将long类型的数据转换成DateTime
**/
def long2Date(): Unit = {
val ts = 1447372799*1000
val dt = new DateTime(ts)
println(dt)
println(new Date(ts))
}
/**
* 比较日期
* */
def compareDate(): Unit = {
val d = DateTime.now.toString("yyyy-MM-dd HH:mm:ss")
val d1 = "2015-12-21 19:44:25"
println(d1.compareTo(d)) // -1 , 表示小于
}
/**
* 向前或者向后移动几天/几个月
* */
def moveDate(): Unit = {
val today = new DateTime(System.currentTimeMillis())
println(s"今天 $today")
val d1 = today.minusDays(365)
val d2 = today.plusDays(365)
println(s"365天之前 $d1")
println(s"365天之后 $d2")
}
/**
* 取得一个时间点对应的整点
* 例如, 3:55 对应的整点就是 3:00
* */
def getSharpClock(): Unit = {
val today = new DateTime(System.currentTimeMillis())
val hour = today.getHourOfDay
println(hour)
}
/**
* 计算两个点之间的间隔时长
* */
def calcInterval(): Unit = {
var d1 = new DateTime("2016-01-01T12:00:00")
var d2 = new DateTime("2016-01-02T13:20:30")
val days = Days.daysBetween(d1, d2).getDays
println(s"days = $days")
d1 = new DateTime("2016-01-01T12:00:00")
d2 = new DateTime("2016-01-01T13:20:30")
val hours = Hours.hoursBetween(d1, d2).getHours
println(s"hours = $hours")
val minutes = Minutes.minutesBetween(d1, d2).getMinutes
println(s"minutes = $minutes")
val seconds = Seconds.secondsBetween(d1, d2).getSeconds
println(s"seconds = $seconds")
}
/**
* 找到给定日期的00:00:00这个时刻
* */
def startOfSameDay(d: DateTime) = {
val year = d.getYear
val month = d.getMonthOfYear
val day = d.getDayOfMonth
val hour = d.getHourOfDay
val minute = d.getMinuteOfHour
val second = d.getSecondOfMinute
new DateTime(s"${year}-${month}-${day}")
}
/**
* 2015-02-01T13:59:00-08:00 -> 2015-02-02T00:00:00.000-08:00
* */
private def startOfNextDay(d: DateTime) = {
val year = d.getYear
val month = d.getMonthOfYear
val day = d.getDayOfMonth
val hour = d.getHourOfDay
val minute = d.getMinuteOfHour
val second = d.getSecondOfMinute
new DateTime(s"${year}-${month}-${day}").plusDays(1)
}
/**
* 判断是周几?
* 是工作日还是周末?
* */
private def getWeekDay(): Unit = {
val d = new DateTime("2017-01-01")
println(d)
println(s"dayOfWeek = ${d.getDayOfWeek}")
}
}
| TaoXiao/Scala | lang/src/main/scala/cn/gridx/scala/lang/datetime/jodatimes.scala | Scala | apache-2.0 | 2,788 |
package com.hanhuy.android.common
import android.util.Log
import scala.concurrent.{Future, ExecutionContext}
import scala.util.Try
/**
* @author pfnguyen
*/
object Futures {
object CurrentThread extends ExecutionContext {
override def execute(runnable: Runnable) = runnable.run()
override def reportFailure(cause: Throwable) = Log.w(
"Futures", cause.getMessage, cause)
}
object MainThread extends ExecutionContext {
override def execute(runnable: Runnable) = UiBus.run(runnable.run())
override def reportFailure(cause: Throwable) = throw cause
}
implicit object AsyncThread extends ExecutionContext {
override def execute(runnable: Runnable) =
_threadpool.execute(runnable)
override def reportFailure(cause: Throwable) = Log.w(
"Futures", cause.getMessage, cause)
}
implicit class RichFuturesType(val f: Future.type) extends AnyVal {
/** run on the UI thread immediately if on UI thread, otherwise post to UI */
@inline final def main[A](b: => A) = f.apply(b)(MainThread)
// ensure posting at the end of the event queue, rather than
// running immediately if currently on the main thread
/** run on the UI thread asynchronously regardless of current thread */
@inline final def mainEx[A](b: => A) = f.apply(b)(iota.std.MainThreadExecutionContext)
}
implicit class RichFutures[T](val f: Future[T]) extends AnyVal {
type S[U] = PartialFunction[T,U]
type F[U] = PartialFunction[Throwable,U]
type C[U] = Try[T] => U
@inline final def onSuccessHere[U] = f.onSuccess( _: S[U])(CurrentThread)
@inline final def onFailureHere[U] = f.onFailure( _: F[U])(CurrentThread)
@inline final def onCompleteHere[U] = f.onComplete(_: C[U])(CurrentThread)
@inline final def onSuccessMain[U] = f.onSuccess( _: S[U])(MainThread)
@inline final def onFailureMain[U] = f.onFailure( _: F[U])(MainThread)
@inline final def onCompleteMain[U] = f.onComplete(_: C[U])(MainThread)
@inline final def ~[A >: T](next: => Future[A]): Future[A] = f.flatMap(_ => next)
}
def traverseO[A, B](o: Option[A])(f: A => Future[B])(implicit ev: ExecutionContext): Future[Option[B]] =
(o map f).fold(Future.successful(Option.empty[B]))(_.flatMap(x => Future.successful(Some(x)))(ev))
def sequenceO[A](o: Option[Future[A]])(implicit ev: ExecutionContext): Future[Option[A]] = traverseO(o)(identity)(ev)
}
| pfn/android-common | src/main/scala/Futures.scala | Scala | apache-2.0 | 2,404 |
package com.sksamuel.elastic4s.requests.snapshots
import com.sksamuel.elastic4s.{Index, Indexes}
import com.sksamuel.exts.OptionImplicits._
case class CreateSnapshotRequest(snapshotName: String,
repositoryName: String,
indices: Indexes = Indexes.Empty,
ignoreUnavailable: Option[Boolean] = None,
waitForCompletion: Option[Boolean] = None,
partial: Option[Boolean] = None,
includeGlobalState: Option[Boolean] = None) {
require(snapshotName.nonEmpty, "snapshot name must not be null or empty")
require(repositoryName.nonEmpty, "repo name must not be null or empty")
def partial(p: Boolean): CreateSnapshotRequest = copy(partial = p.some)
def includeGlobalState(global: Boolean): CreateSnapshotRequest = copy(includeGlobalState = global.some)
def ignoreUnavailable(ignore: Boolean): CreateSnapshotRequest = copy(ignoreUnavailable = ignore.some)
def waitForCompletion(w: Boolean): CreateSnapshotRequest = copy(waitForCompletion = w.some)
def index(index: Index): CreateSnapshotRequest = copy(indices = index.toIndexes)
def indices(indices: Indexes): CreateSnapshotRequest = copy(indices = indices)
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/snapshots/CreateSnapshotRequest.scala | Scala | apache-2.0 | 1,328 |
package io.circe.testing
import cats.instances.list._
import io.circe.{
Decoder,
DecodingFailure,
Encoder,
Json,
JsonBiggerDecimal,
JsonNumber,
JsonObject,
KeyDecoder,
KeyEncoder
}
import io.circe.numbers.BiggerDecimal
import io.circe.numbers.testing.{ IntegralString, JsonNumberString }
import org.scalacheck.{ Arbitrary, Cogen, Gen }
trait ArbitraryInstances extends ArbitraryJsonNumberTransformer with CogenInstances with ShrinkInstances {
/**
* The maximum depth of a generated JSON value.
*/
protected def maxJsonDepth: Int = 5
/**
* The maximum number of values in a generated JSON array.
*/
protected def maxJsonArraySize: Int = 10
/**
* The maximum number of key-value pairs in a generated JSON object.
*/
protected def maxJsonObjectSize: Int = 10
implicit val arbitraryBiggerDecimal: Arbitrary[BiggerDecimal] = Arbitrary(
Gen.oneOf(
Arbitrary.arbitrary[JsonNumberString].map(s => BiggerDecimal.parseBiggerDecimalUnsafe(s.value)),
Arbitrary.arbitrary[Long].map(BiggerDecimal.fromLong),
Arbitrary.arbitrary[Double].map(BiggerDecimal.fromDoubleUnsafe),
Arbitrary.arbitrary[BigInt].map(_.underlying).map(BiggerDecimal.fromBigInteger),
Arbitrary.arbitrary[BigDecimal].map(_.underlying).map(BiggerDecimal.fromBigDecimal),
Gen.const(BiggerDecimal.NegativeZero)
)
)
implicit val arbitraryJsonNumber: Arbitrary[JsonNumber] = Arbitrary(
Gen
.oneOf(
Arbitrary.arbitrary[IntegralString].map(input => JsonNumber.fromDecimalStringUnsafe(input.value)),
Arbitrary.arbitrary[JsonNumberString].map(input => JsonNumber.fromDecimalStringUnsafe(input.value)),
Arbitrary.arbitrary[BiggerDecimal].map(input => JsonBiggerDecimal(input, input.toString)),
Arbitrary.arbitrary[BigDecimal].map(Json.fromBigDecimal(_).asNumber.get),
Arbitrary.arbitrary[BigInt].map(Json.fromBigInt(_).asNumber.get),
Arbitrary.arbitrary[Long].map(Json.fromLong(_).asNumber.get),
Arbitrary.arbitrary[Double].map(Json.fromDoubleOrString(_).asNumber.get),
Arbitrary.arbitrary[Float].map(Json.fromFloatOrString(_).asNumber.get)
)
.map(transformJsonNumber)
)
private[this] val genNull: Gen[Json] = Gen.const(Json.Null)
private[this] val genBool: Gen[Json] = Arbitrary.arbitrary[Boolean].map(Json.fromBoolean)
private[this] val genString: Gen[Json] = Arbitrary.arbitrary[String].map(Json.fromString)
private[this] val genNumber: Gen[Json] = Arbitrary.arbitrary[JsonNumber].map(Json.fromJsonNumber)
private[this] def genArray(depth: Int): Gen[Json] = Gen.choose(0, maxJsonArraySize).flatMap { size =>
Gen.listOfN(size, genJsonAtDepth(depth + 1)).map(Json.arr)
}
private[this] def genJsonObject(depth: Int): Gen[JsonObject] = Gen.choose(0, maxJsonObjectSize).flatMap { size =>
val fields = Gen.listOfN(
size,
for {
key <- Arbitrary.arbitrary[String]
value <- genJsonAtDepth(depth + 1)
} yield key -> value
)
Gen.oneOf(
fields.map(JsonObject.fromIterable),
fields.map(JsonObject.fromFoldable[List])
)
}
private[this] def genJsonAtDepth(depth: Int): Gen[Json] = {
val genJsons = List(genNumber, genString) ++ (
if (depth < maxJsonDepth) List(genArray(depth), genJsonObject(depth).map(Json.fromJsonObject)) else Nil
)
Gen.oneOf(genNull, genBool, genJsons: _*)
}
implicit val arbitraryJson: Arbitrary[Json] = Arbitrary(genJsonAtDepth(0))
implicit val arbitraryJsonObject: Arbitrary[JsonObject] = Arbitrary(genJsonObject(0))
implicit val arbitraryDecodingFailure: Arbitrary[DecodingFailure] = Arbitrary(
Arbitrary.arbitrary[String].map(DecodingFailure(_, Nil))
)
implicit def arbitraryKeyEncoder[A: Cogen]: Arbitrary[KeyEncoder[A]] = Arbitrary(
Arbitrary.arbitrary[A => String].map(KeyEncoder.instance)
)
implicit def arbitraryKeyDecoder[A: Arbitrary]: Arbitrary[KeyDecoder[A]] = Arbitrary(
Arbitrary.arbitrary[String => Option[A]].map(KeyDecoder.instance)
)
implicit def arbitraryEncoder[A: Cogen]: Arbitrary[Encoder[A]] = Arbitrary(
Arbitrary.arbitrary[A => Json].map(Encoder.instance)
)
implicit def arbitraryDecoder[A: Arbitrary]: Arbitrary[Decoder[A]] = Arbitrary(
Arbitrary.arbitrary[Json => Either[DecodingFailure, A]].map(f => Decoder.instance(c => f(c.value)))
)
implicit def arbitraryAsObjectEncoder[A: Cogen]: Arbitrary[Encoder.AsObject[A]] = Arbitrary(
Arbitrary.arbitrary[A => JsonObject].map(Encoder.AsObject.instance)
)
implicit def arbitraryAsArrayEncoder[A: Cogen]: Arbitrary[Encoder.AsArray[A]] = Arbitrary(
Arbitrary.arbitrary[A => Vector[Json]].map(Encoder.AsArray.instance)
)
}
| travisbrown/circe | modules/testing/shared/src/main/scala/io/circe/testing/ArbitraryInstances.scala | Scala | apache-2.0 | 4,719 |
/*
* Copyright 2016-2020 47 Degrees Open Source <https://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package microsites
import java.nio.file._
import cats.effect.{ContextShift, IO, Timer}
import com.typesafe.sbt.sbtghpages.GhpagesPlugin.autoImport._
import com.typesafe.sbt.site.SitePlugin.autoImport.makeSite
import io.circe._
import io.circe.generic.semiauto._
import io.circe.syntax._
import mdoc.MdocPlugin.autoImport._
import microsites.github.GitHubOps
import microsites.ioops.FileWriter._
import microsites.ioops._
import microsites.ioops.syntax._
import microsites.util.MicrositeHelper
import org.http4s.client.blaze.BlazeClientBuilder
import sbt.Keys._
import sbt._
import sbt.complete.DefaultParsers.OptNotSpace
import sbt.io.{IO => FIO}
import scala.concurrent.ExecutionContext
import scala.sys.process._
trait MicrositeKeys {
sealed abstract class GitHostingService(val name: String) extends Product with Serializable
final case object GitHub extends GitHostingService("GitHub")
final case object GitLab extends GitHostingService("GitLab")
final case object Bitbucket extends GitHostingService("Bitbucket")
final case class Other(value: String) extends GitHostingService(value)
sealed abstract class PushWith(val name: String) extends Product with Serializable
final case object GHPagesPlugin extends PushWith("ghPagesPlugin")
final case object GitHub4s extends PushWith("github4s")
object GitHostingService {
implicit def string2GitHostingService(name: String): GitHostingService = {
List(GitHub, GitLab, Bitbucket)
.find(_.name.toLowerCase == name.toLowerCase)
.getOrElse(Other(name))
}
}
case class Version(name: String, own: Boolean)
object Version {
implicit val encoder: Encoder[Version] = deriveEncoder[Version]
implicit val decoder: Decoder[Version] = deriveDecoder[Version]
}
val makeMicrosite: TaskKey[Unit] = taskKey[Unit]("Main task to build a microsite")
val makeMdoc: TaskKey[Unit] =
taskKey[Unit]("Sequential tasks to compile mdoc and move the result")
val createMicrositeVersions: TaskKey[Unit] =
taskKey[Unit](
"Task to create the different microsites going through the list specified in the settings"
)
val moveMicrositeVersions: TaskKey[Unit] =
taskKey[Unit](
"Task to move the different microsites to their final publishing directory destination"
)
val makeVersionsJson: TaskKey[Unit] =
taskKey[Unit](
"Task that will create the expected formattted JSON with the versions specified in the settings"
)
val makeVersionedMicrosite: TaskKey[Unit] =
taskKey[Unit]("Task similar to makeMicrosite, adding a version selector")
val makeMultiversionMicrosite: TaskKey[Unit] = taskKey[Unit](
"Main task to build a microsite, including version selector, and microsite different versions"
)
val pushMicrosite: TaskKey[Unit] =
taskKey[Unit]("Task to just push files up.")
val publishMicrosite: TaskKey[Unit] =
taskKey[Unit]("Task helper that wraps the `publishMicrositeCommand`.")
val publishMultiversionMicrosite: TaskKey[Unit] =
taskKey[Unit]("Task helper that wraps the `publishMultiversionMicrositeCommand`.")
val microsite: TaskKey[Seq[File]] = taskKey[Seq[File]]("Create microsite files")
val micrositeMakeExtraMdFiles: TaskKey[File] =
taskKey[File]("Create microsite extra md files")
val micrositeName: SettingKey[String] = settingKey[String]("Microsite name")
val micrositeDescription: SettingKey[String] = settingKey[String]("Microsite description")
val micrositeAuthor: SettingKey[String] = settingKey[String]("Microsite author")
val micrositeHomepage: SettingKey[String] = settingKey[String]("Microsite homepage")
val micrositeOrganizationHomepage: SettingKey[String] =
settingKey[String]("Microsite organisation homepage")
val micrositeTwitter: SettingKey[String] = settingKey[String]("Microsite twitter")
val micrositeTwitterCreator: SettingKey[String] = settingKey[String]("Microsite twitter")
val micrositeShareOnSocial: SettingKey[Boolean] = settingKey[Boolean](
"Optional. Includes links to share on social media in the layout. Enabled by default."
)
val micrositeUrl: SettingKey[String] = settingKey[String]("Microsite site absolute url prefix")
val micrositeBaseUrl: SettingKey[String] = settingKey[String]("Microsite site base url")
val micrositeDocumentationUrl: SettingKey[String] =
settingKey[String]("Microsite site documentation url")
val micrositeDocumentationLabelDescription: SettingKey[String] =
settingKey[String]("Microsite site documentation Label Description")
val micrositeHighlightTheme: SettingKey[String] = settingKey[String]("Microsite Highlight Theme")
val micrositeHighlightLanguages: SettingKey[Seq[String]] =
settingKey[Seq[String]]("Microsite Highlight Languages")
val micrositeConfigYaml: SettingKey[ConfigYml] =
settingKey[ConfigYml]("Microsite _config.yml file configuration.")
val micrositeImgDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite images directory. By default, it'll be the resourcesDirectory + '/microsite/img'"
)
val micrositeCssDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite CSS directory. By default, it'll be the resourcesDirectory + '/microsite/css'"
)
val micrositeSassDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite SASS directory. By default, it'll be the resourcesDirectory + '/microsite/sass'"
)
val micrositeJsDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite Javascript directory. By default, it'll be the resourcesDirectory + '/microsite/js'"
)
val micrositeCDNDirectives: SettingKey[CdnDirectives] = settingKey[CdnDirectives](
"Optional. Microsite CDN directives lists (for css and js imports). By default, both lists are empty."
)
val micrositeExternalLayoutsDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite external layouts directory. By default, it'll be the resourcesDirectory + '/microsite/layout'"
)
val micrositeExternalIncludesDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite external includes (partial layouts) directory. By default, it'll be the resourcesDirectory + '/microsite/includes'"
)
val micrositeDataDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite Data directory, useful to define the microsite data files " +
"(https://jekyllrb.com/docs/datafiles/). By default, it'll be the resourcesDirectory + '/microsite/data'"
)
val micrositeStaticDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite static files directory. By default, it'll be the resourcesDirectory + '/microsite/static'"
)
val micrositeExtraMdFiles: SettingKey[Map[File, ExtraMdFileConfig]] =
settingKey[Map[File, ExtraMdFileConfig]](
"Optional. This key is useful when you want to include automatically markdown documents as a part of your microsite, and these files are located in different places. The map key is related with the source file, the map value corresponds with the target relative file path and the document meta-information configuration. By default, the map is empty."
)
val micrositeExtraMdFilesOutput: SettingKey[File] = settingKey[File](
"Optional. Microsite output location for extra-md files. Default is resourceManaged + '/jekyll/extra_md'"
)
val micrositePluginsDirectory: SettingKey[File] = settingKey[File](
"Optional. Microsite Plugins directory. By default, it'll be the resourcesDirectory + '/microsite/plugins'"
)
val micrositePalette: SettingKey[Map[String, String]] =
settingKey[Map[String, String]]("Microsite palette")
val micrositeFavicons: SettingKey[Seq[MicrositeFavicon]] = settingKey[Seq[MicrositeFavicon]](
"Optional. List of filenames and sizes for the PNG/ICO files to be used as favicon for the generated site, located in '/microsite/img'. The sizes should be described with a string (i.e.: \"16x16\"). By default, favicons with different sizes will be generated from the navbar_brand2x.jpg file."
)
val micrositeGithubOwner: SettingKey[String] = settingKey[String](
"Microsite Github owner, defaults to the information found in the 'origin' Git remote"
)
val micrositeGithubRepo: SettingKey[String] = settingKey[String](
"Microsite Github repo, defaults to the information found in the 'origin' Git remote"
)
val micrositeGithubToken: SettingKey[Option[String]] =
settingKey[Option[String]]("Microsite Github token for pushing the microsite")
val micrositeGithubLinks: SettingKey[Boolean] = settingKey[Boolean](
"Optional. Includes Github links (forks, stars) in the layout. Enabled by default."
)
val micrositeGitHostingService: SettingKey[GitHostingService] =
settingKey[GitHostingService]("Service used for git hosting. By default, it'll be GitHub.")
val micrositeGitHostingUrl: SettingKey[String] = settingKey[String](
"In the case where your project isn't hosted on Github, use this setting to point users to git host (e.g. 'https://internal.gitlab.com/<user>/<project>')."
)
val micrositePushSiteWith: SettingKey[PushWith] =
settingKey[PushWith](
"Determines what will be chosen for pushing the site. The options are sbt-ghpages plugin and github4s library."
)
val micrositeAnalyticsToken: SettingKey[String] =
settingKey[String](
"Optional. Add your property id of Google Analytics to add a Google Analytics tracker"
)
val micrositeGitterChannel: SettingKey[Boolean] = settingKey[Boolean](
"Optional. Includes Gitter sidecar Chat functionality. Enabled by default."
)
val micrositeGitterChannelUrl: SettingKey[String] = settingKey[String](
"Optional. Add custom Gitter sidecar Chat URL. By default is owner/repository."
)
val micrositeFooterText: SettingKey[Option[String]] = settingKey[Option[String]](
"Optional. Customize the second line in the footer."
)
val pushMicrositeCommandKey: String = "pushMicrositeCommand"
val publishMicrositeCommandKey: String = "publishMicrositeCommand"
val publishMultiversionMicrositeCommandKey: String = "publishMultiversionMicrositeCommand"
val micrositeEditButton: SettingKey[Option[MicrositeEditButton]] =
settingKey[Option[MicrositeEditButton]](
"Optional. Add a button in DocsLayout pages that links to the file in the repository."
)
val micrositeTheme: SettingKey[String] = settingKey[String](
"Optional. 'light' by default. Set it to 'pattern' to generate the pattern theme design."
)
val micrositeVersionList: SettingKey[Seq[String]] =
settingKey[Seq[String]]("Optional. Microsite available versions")
}
object MicrositeKeys extends MicrositeKeys
trait MicrositeAutoImportSettings extends MicrositeKeys {
lazy val fr = new FileReader
def createVersionsJson(targetDir: String, content: List[Version]): File = {
val jekyllDir = "jekyll"
val targetPath = s"$targetDir$jekyllDir/_data/versions.json"
createFile(targetPath)
writeContentToFile(content.asJson.toString, targetPath)
targetPath.toFile
}
def generateVersionList(versionStringList: List[String], ownVersion: String) = {
versionStringList
.map(version => Version(version, own = (ownVersion == version)))
.toList
}
def pluginProjects(pluginName: String): Option[Array[String]] = {
val sbtPluginsOutput = "sbt --error plugins".lineStream
val pluginLine =
sbtPluginsOutput.find(_.trim.startsWith(s"$pluginName: enabled in "))
pluginLine.map(_.trim.stripPrefix(s"$pluginName: enabled in ").split(", "))
}
// Generate a microsite externally through sbt and sbt-microsites tasks
def createMicrositeVersion(
sourceDir: String,
targetDir: String,
baseUrl: String,
version: String
): Unit = {
val newBaseUrl =
if (version.nonEmpty) s"$baseUrl/$version" else baseUrl
val pluginName = "microsites.MicrositesPlugin"
val sbtMicrositesProjects = pluginProjects(pluginName)
sbtMicrositesProjects match {
case Some(projects) =>
List(
"sbt",
s"""clean; project ${projects(0)}; set micrositeBaseUrl := "$newBaseUrl"; makeMicrosite"""
).!
Files.move(
Paths.get(sourceDir),
Paths.get(s"$targetDir/$version"),
StandardCopyOption.REPLACE_EXISTING
)
()
case None => System.err.println(s"$pluginName not found in version $version")
}
}
lazy val micrositeHelper: Def.Initialize[MicrositeHelper] = Def.setting {
val baseUrl =
if (!micrositeBaseUrl.value.isEmpty && !micrositeBaseUrl.value.startsWith("/"))
s"/${micrositeBaseUrl.value}"
else micrositeBaseUrl.value
val baseCssList = List(
s"css/${micrositeTheme.value}-style.scss"
)
val customCssList =
fr.fetchFilesRecursively(List(micrositeCssDirectory.value), validFile("css")) match {
case Right(cssList) => cssList.map(css => s"css/${css.getName}")
case _ => Nil
}
val customScssList =
fr.fetchFilesRecursively(List(micrositeCssDirectory.value), validFile("scss")) match {
case Right(scssList) => scssList.map(scss => s"css/${scss.getName}")
case _ => Nil
}
val fullCssList = baseCssList ++ customCssList ++ customScssList
val defaultYamlCustomVariables = Map(
"name" -> micrositeName.value,
"description" -> micrositeDescription.value,
"version" -> version.value,
"org" -> organizationName.value,
"baseurl" -> baseUrl,
"docs" -> true,
"markdown" -> "kramdown",
"highlighter" -> "rouge",
"exclude" -> List("css"),
"include" -> fullCssList,
"sass" -> Map(
"load_paths" -> List("_sass", "_sass_custom"),
"style" -> "compressed",
"sourcemap" -> "never"
),
"collections" -> Map("mdoc" -> Map("output" -> true))
)
val userCustomVariables = micrositeConfigYaml.value
val configWithAllCustomVariables = userCustomVariables.copy(
yamlCustomProperties = defaultYamlCustomVariables ++ userCustomVariables.yamlCustomProperties
)
new MicrositeHelper(
MicrositeSettings(
identity = MicrositeIdentitySettings(
name = micrositeName.value,
description = micrositeDescription.value,
author = micrositeAuthor.value,
homepage = micrositeHomepage.value,
organizationHomepage = micrositeOrganizationHomepage.value,
twitter = micrositeTwitter.value,
twitterCreator = micrositeTwitterCreator.value,
analytics = micrositeAnalyticsToken.value
),
visualSettings = MicrositeVisualSettings(
highlightTheme = micrositeHighlightTheme.value,
highlightLanguages = micrositeHighlightLanguages.value,
palette = micrositePalette.value,
favicons = micrositeFavicons.value,
shareOnSocial = micrositeShareOnSocial.value,
theme = micrositeTheme.value
),
templateTexts = MicrositeTemplateTexts(
micrositeFooterText.value
),
configYaml = configWithAllCustomVariables,
fileLocations = MicrositeFileLocations(
micrositeImgDirectory = micrositeImgDirectory.value,
micrositeCssDirectory = micrositeCssDirectory.value,
micrositeSassDirectory = micrositeSassDirectory.value,
micrositeJsDirectory = micrositeJsDirectory.value,
micrositeCDNDirectives = micrositeCDNDirectives.value,
micrositeExternalLayoutsDirectory = micrositeExternalLayoutsDirectory.value,
micrositeExternalIncludesDirectory = micrositeExternalIncludesDirectory.value,
micrositeDataDirectory = micrositeDataDirectory.value,
micrositeStaticDirectory = micrositeStaticDirectory.value,
micrositeExtraMdFiles = micrositeExtraMdFiles.value,
micrositeExtraMdFilesOutput = micrositeExtraMdFilesOutput.value,
micrositePluginsDirectory = micrositePluginsDirectory.value
),
urlSettings = MicrositeUrlSettings(
micrositeUrl = micrositeUrl.value,
micrositeBaseUrl = micrositeBaseUrl.value,
micrositeDocumentationUrl = micrositeDocumentationUrl.value,
micrositeDocumentationLabelDescription = micrositeDocumentationLabelDescription.value
),
gitSettings = MicrositeGitSettings(
githubOwner = micrositeGitHostingService.value match {
case GitHub => micrositeGithubOwner.value
case _ => ""
},
githubRepo = micrositeGitHostingService.value match {
case GitHub => micrositeGithubRepo.value
case _ => ""
},
githubLinks = micrositeGithubLinks.value,
gitHostingService = micrositeGitHostingService.value.name,
gitHostingUrl = micrositeGitHostingUrl.value,
gitSidecarChat = micrositeGitterChannel.value,
gitSidecarChatUrl = micrositeGitterChannelUrl.value
),
editButtonSettings = MicrositeEditButtonSettings(
micrositeEditButton.value
),
multiversionSettings = MicrositeMultiversionSettings(
micrositeVersionList.value
)
)
)
}
lazy val micrositeTasksSettings = Seq(
microsite := micrositeHelper.value
.createResources(resourceManagedDir = (resourceManaged in Compile).value),
micrositeMakeExtraMdFiles := micrositeHelper.value.buildAdditionalMd(),
makeMdoc :=
Def.sequential(mdoc.toTask(""), micrositeMakeExtraMdFiles).value,
makeMicrosite :=
Def.sequential(microsite, makeMdoc, makeSite).value,
makeVersionsJson := {
"which git".! match {
case 0 => ()
case n => sys.error("Could not run git, error: " + n)
}
val sourceDir = (resourceManaged in Compile).value
val targetDir: String = sourceDir.getAbsolutePath.ensureFinalSlash
val currentBranchTag = "git name-rev --name-only HEAD".!!.trim
val versionList = generateVersionList(
(currentBranchTag :: micrositeVersionList.value.toList),
currentBranchTag
)
createVersionsJson(targetDir, versionList)
},
createMicrositeVersions := {
"which git".! match {
case 0 => ()
case n => sys.error("Could not run git, error: " + n)
}
val publishingDir = (target in makeSite).value
val genDocsDir = ".sbt-versioned-docs"
val currentBranchTag = "git name-rev --name-only HEAD".!!.trim
scala.reflect.io.Directory(new File(genDocsDir)).deleteRecursively()
createDir(genDocsDir)
micrositeVersionList.value.foreach { tag =>
s"git checkout -f $tag".!
createMicrositeVersion(
publishingDir.getAbsolutePath,
genDocsDir,
micrositeBaseUrl.value,
tag
)
}
s"git checkout -f $currentBranchTag".!
},
moveMicrositeVersions := {
val publishingDir = (target in makeSite).value
val genDocsDir = ".sbt-versioned-docs"
micrositeVersionList.value.foreach { tag =>
Files.move(
Paths.get(s"$genDocsDir/$tag"),
Paths.get(s"${publishingDir.getPath()}/$tag"),
StandardCopyOption.REPLACE_EXISTING
)
}
},
makeVersionedMicrosite :=
Def.sequential(microsite, makeVersionsJson, makeMdoc, makeSite).value,
makeMultiversionMicrosite :=
Def
.sequential(createMicrositeVersions, clean, makeVersionedMicrosite, moveMicrositeVersions)
.value,
ghpagesPrivateMappings := {
sbt.Path.allSubpaths((target in makeSite).value).toList
},
pushMicrosite := Def.taskDyn {
val siteDir: File = (target in makeSite).value
val noJekyll: Boolean = ghpagesNoJekyll.value
val branch: String = ghpagesBranch.value
val pushSiteWith: PushWith = micrositePushSiteWith.value
val gitHosting: GitHostingService = micrositeGitHostingService.value
val githubOwner: String = micrositeGithubOwner.value
val githubRepo: String = micrositeGithubRepo.value
val githubToken: Option[String] = micrositeGithubToken.value
implicit val cs: ContextShift[IO] = IO.contextShift(ExecutionContext.global)
implicit val ec: ExecutionContext = ExecutionContext.global
implicit val t: Timer[IO] = IO.timer(ExecutionContext.global)
lazy val log: Logger = streams.value.log
(pushSiteWith.name, gitHosting.name) match {
case (GHPagesPlugin.name, _) => ghpagesPushSite
case (GitHub4s.name, GitHub.name) if githubToken.nonEmpty =>
Def.task({
val commitMessage = sys.env.getOrElse("SBT_GHPAGES_COMMIT_MESSAGE", "updated site")
log.info(s"""Committing files from ${siteDir.getAbsolutePath} into branch '$branch'
| * repo: $githubOwner/$githubRepo
| * commitMessage: $commitMessage""".stripMargin)
BlazeClientBuilder[IO](ec).resource
.use { client =>
val ghOps: GitHubOps[IO] =
new GitHubOps[IO](client, githubOwner, githubRepo, githubToken)
if (noJekyll) FIO.touch(siteDir / ".nojekyll")
ghOps
.commitDir(branch, commitMessage, siteDir)
.map(_ => log.info("Success committing files"))
.handleErrorWith { e =>
IO {
e.printStackTrace()
log.error(s"Error committing files")
}
}
}
.unsafeRunSync()
})
case (GitHub4s.name, GitHub.name) =>
Def.task(
log.error(
s"You must provide a GitHub token through the `micrositeGithubToken` setting for pushing with github4s"
)
)
case (GitHub4s.name, hosting) =>
Def.task(log.warn(s"github4s doesn't have support for $hosting"))
case _ =>
Def.task(
log.error(
s"""Unexpected match case (pushSiteWith, gitHosting) = ("${pushSiteWith.name}", "${gitHosting.name}")"""
)
)
}
}.value,
publishMicrosite :=
Def.sequential(clean, makeMicrosite, pushMicrosite).value,
publishMultiversionMicrosite :=
Def.sequential(clean, makeMultiversionMicrosite, pushMicrosite).value
)
val pushMicrositeCommand: Command = Command(pushMicrositeCommandKey)(_ => OptNotSpace) {
(st, _) =>
val extracted = Project.extract(st)
extracted.runTask(pushMicrosite, st)._1
}
val publishMicrositeCommand: Command = Command(publishMicrositeCommandKey)(_ => OptNotSpace) {
(st, _) =>
val extracted = Project.extract(st)
extracted.runTask(publishMicrosite, st)._1
}
val publishMultiversionMicrositeCommand: Command =
Command(publishMultiversionMicrositeCommandKey)(_ => OptNotSpace) { (st, _) =>
val extracted = Project.extract(st)
extracted.runTask(publishMultiversionMicrosite, st)._1
}
private[this] def validFile(extension: String)(file: File): Boolean =
file.getName.endsWith(s".$extension")
}
| 47deg/sbt-microsites | src/main/scala/microsites/MicrositeKeys.scala | Scala | apache-2.0 | 24,210 |
package org.hammerlab.bam.check.indexed
import hammerlab.path._
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.hammerlab.args.ByteRanges
import org.hammerlab.bam.check.Blocks
import org.hammerlab.bgzf.Pos
import org.hammerlab.bgzf.block.Metadata
import org.hammerlab.kryo.Registrar
import scala.collection.immutable.SortedSet
import scala.reflect.ClassTag
case class BlocksAndIndexedRecords(blocks: RDD[Metadata],
records: RDD[SortedSet[Pos]])
object BlocksAndIndexedRecords
extends Registrar {
def apply[U: ClassTag]()(
implicit
path: Path,
sc: SparkContext,
rangesBroadcast: Broadcast[Option[ByteRanges]],
blockArgs: Blocks.Args,
recordArgs: IndexedRecordPositions.Args
): BlocksAndIndexedRecords = {
val Blocks(blocks, bounds) = Blocks()
val posBounds =
bounds
.copy(
partitions =
bounds
.partitions
.map {
_.map {
case (start, endOpt) ⇒
(
Pos(start, 0),
endOpt.map(Pos(_, 0))
)
}
}
)
val indexedRecords = IndexedRecordPositions(recordArgs.path)
val repartitionedRecords = indexedRecords.toSets(posBounds)
BlocksAndIndexedRecords(
blocks,
repartitionedRecords
)
}
register(
Blocks
)
}
| ryan-williams/spark-bam | check/src/main/scala/org/hammerlab/bam/check/indexed/BlocksAndIndexedRecords.scala | Scala | apache-2.0 | 1,521 |
package com.phasmid.laScala.parser
import scala.util._
import scala.util.matching.Regex
import scala.util.parsing.combinator.JavaTokenParsers
abstract class BaseFunctionStringParser extends JavaTokenParsers {
def parseCompoundFunctionString(s: String): Try[(String, Seq[String], String)] =
parseAll(functionStringParser, s) match {
case this.Success(p, _) => scala.util.Success(p)
case this.Failure(x, _) => FunctionParser.failure(s, x, "compound function string")
case this.Error(x, _) => FunctionParser.failure(s, x, "compound function string")
}
def parseFunctionString(s: String): Try[String] =
parseAll(functionString, s) match {
case this.Success(p, _) => scala.util.Success(p)
case this.Failure(x, _) => FunctionParser.failure(s, x, "function string")
case this.Error(x, _) => FunctionParser.failure(s, x, "function string")
}
def functionStringParser: Parser[(String, List[String], String)] = prefix ~ repsep(functionString, word) ~ suffix ^^ {
case p ~ fs ~ s => (p, fs, s)
}
def word: Parser[String] = FunctionParser.word
def functionString: Parser[String]
def prefix: Parser[String]
def suffix: Parser[String]
}
object FunctionParser {
val word: Regex = """\\s*\\w+\\s*""".r
def failure(s: String, x: String, z: String) = {
scala.util.Failure(new Exception(
s"""failed to parse "$s" as a $z: $x""".stripMargin))
}
} | rchillyard/LaScala | src/main/scala/com/phasmid/laScala/parser/FunctionParser.scala | Scala | lgpl-2.1 | 1,423 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.allqueries
import java.util
import scala.collection.JavaConverters._
import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.{CarbonEnv, Row}
import org.apache.spark.sql.hive.CarbonRelation
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.datamap.dev.DataMap
import org.apache.carbondata.core.datamap.{DataMapChooser, DataMapStoreManager, Segment, TableDataMap}
import org.apache.carbondata.core.datastore.block.SegmentPropertiesAndSchemaHolder
import org.apache.carbondata.core.indexstore.blockletindex.{BlockDataMap, BlockletDataMap}
import org.apache.carbondata.core.indexstore.schema.CarbonRowSchema
import org.apache.carbondata.core.indexstore.Blocklet
import org.apache.carbondata.core.metadata.datatype.DataTypes
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension
import org.apache.carbondata.core.readcommitter.TableStatusReadCommittedScope
import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression
import org.apache.carbondata.core.scan.expression.logical.AndExpression
import org.apache.carbondata.core.scan.expression.{ColumnExpression, LiteralExpression}
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf
/**
* test class for validating COLUMN_META_CACHE and CACHE_LEVEL
*/
class TestQueryWithColumnMetCacheAndCacheLevelProperty extends QueryTest with BeforeAndAfterAll {
override def beforeAll(): Unit = {
dropSchema
}
override def afterAll(): Unit = {
dropSchema
}
private def dropSchema: Unit = {
sql("drop table if exists metaCache")
sql("drop table if exists column_min_max_cache_test")
sql("drop table if exists minMaxSerialize")
}
private def createAndLoadTable(cacheLevel: String): Unit = {
sql(s"CREATE table column_min_max_cache_test (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp, attendance int, utilization int,salary int) STORED BY 'carbondata' TBLPROPERTIES('DICTIONARY_INCLUDE'='empno','column_meta_cache'='workgroupcategoryname,designation,salary,attendance', 'CACHE_LEVEL'= '$cacheLevel')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
"TABLE column_min_max_cache_test OPTIONS('DELIMITER'=',', " +
"'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
}
private def getDataMaps(dbName: String,
tableName: String,
segmentId: String,
isSchemaModified: Boolean = false): List[DataMap[_ <: Blocklet]] = {
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
.lookupRelation(Some(dbName), tableName)(sqlContext.sparkSession)
.asInstanceOf[CarbonRelation]
val carbonTable = relation.carbonTable
assert(carbonTable.getTableInfo.isSchemaModified == isSchemaModified)
val segment: Segment = Segment.getSegment(segmentId, carbonTable.getTablePath)
val defaultDataMap: TableDataMap = DataMapStoreManager.getInstance()
.getDefaultDataMap(carbonTable)
val dataMaps: List[DataMap[_ <: Blocklet]] = defaultDataMap.getDataMapFactory
.getDataMaps(segment).asScala.toList
dataMaps
}
private def validateMinMaxColumnsCacheLength(dataMaps: List[DataMap[_ <: Blocklet]],
expectedLength: Int, storeBlockletCount: Boolean = false): Boolean = {
val index = dataMaps(0).asInstanceOf[BlockDataMap].getSegmentPropertiesIndex
val summarySchema = SegmentPropertiesAndSchemaHolder.getInstance()
.getSegmentPropertiesWrapper(index).getTaskSummarySchema(storeBlockletCount, false)
val minSchemas = summarySchema(0).asInstanceOf[CarbonRowSchema.StructCarbonRowSchema]
.getChildSchemas
minSchemas.length == expectedLength
}
test("verify if number of columns cached are as per the COLUMN_META_CACHE property dataMap instance is as per CACHE_LEVEL property") {
sql("drop table if exists metaCache")
sql("create table metaCache(name string, c1 string, c2 string) stored by 'carbondata'")
sql("insert into metaCache select 'a','aa','aaa'")
checkAnswer(sql("select * from metaCache"), Row("a", "aa", "aaa"))
var dataMaps = getDataMaps("default", "metaCache", "0")
// validate dataMap is non empty, its an instance of BlockDataMap and minMaxSchema length is 3
assert(dataMaps.nonEmpty)
assert(dataMaps(0).isInstanceOf[BlockDataMap])
assert(validateMinMaxColumnsCacheLength(dataMaps, 3, true))
var segmentPropertyIndex = dataMaps(0).asInstanceOf[BlockDataMap].getSegmentPropertiesIndex
// alter table to add column_meta_cache and cache_level
sql(
"alter table metaCache set tblproperties('column_meta_cache'='c2,c1', 'CACHE_LEVEL'='BLOCKLET')")
var wrapper = SegmentPropertiesAndSchemaHolder.getInstance()
.getSegmentPropertiesWrapper(segmentPropertyIndex)
// after alter operation cache should be cleaned and cache should be evicted
assert(null == wrapper)
checkAnswer(sql("select * from metaCache"), Row("a", "aa", "aaa"))
// validate dataMap is non empty, its an instance of BlockletDataMap and minMaxSchema length
// is 1
dataMaps = getDataMaps("default", "metaCache", "0")
assert(dataMaps.nonEmpty)
assert(dataMaps(0).isInstanceOf[BlockletDataMap])
assert(validateMinMaxColumnsCacheLength(dataMaps, 2))
// alter table to add same value as previous with order change for column_meta_cache and cache_level
segmentPropertyIndex = dataMaps(0).asInstanceOf[BlockDataMap].getSegmentPropertiesIndex
sql(
"alter table metaCache set tblproperties('column_meta_cache'='c1,c2', 'CACHE_LEVEL'='BLOCKLET')")
wrapper = SegmentPropertiesAndSchemaHolder.getInstance()
.getSegmentPropertiesWrapper(segmentPropertyIndex)
// after alter operation cache should not be cleaned as value are unchanged
assert(null != wrapper)
// alter table to cache no column in column_meta_cache
segmentPropertyIndex = dataMaps(0).asInstanceOf[BlockDataMap].getSegmentPropertiesIndex
sql(
"alter table metaCache set tblproperties('column_meta_cache'='')")
wrapper = SegmentPropertiesAndSchemaHolder.getInstance()
.getSegmentPropertiesWrapper(segmentPropertyIndex)
// after alter operation cache should be cleaned and cache should be evicted
assert(null == wrapper)
checkAnswer(sql("select * from metaCache"), Row("a", "aa", "aaa"))
// validate dataMap is non empty, its an instance of BlockletDataMap and minMaxSchema length
// is 0
dataMaps = getDataMaps("default", "metaCache", "0")
assert(dataMaps.nonEmpty)
assert(dataMaps(0).isInstanceOf[BlockletDataMap])
assert(validateMinMaxColumnsCacheLength(dataMaps, 0))
// alter table to cache no column in column_meta_cache
segmentPropertyIndex = dataMaps(0).asInstanceOf[BlockDataMap].getSegmentPropertiesIndex
sql(
"alter table metaCache unset tblproperties('column_meta_cache', 'cache_level')")
wrapper = SegmentPropertiesAndSchemaHolder.getInstance()
.getSegmentPropertiesWrapper(segmentPropertyIndex)
// after alter operation cache should be cleaned and cache should be evicted
assert(null == wrapper)
checkAnswer(sql("select * from metaCache"), Row("a", "aa", "aaa"))
// validate dataMap is non empty, its an instance of BlockletDataMap and minMaxSchema length
// is 3
dataMaps = getDataMaps("default", "metaCache", "0")
assert(dataMaps.nonEmpty)
assert(dataMaps(0).isInstanceOf[BlockDataMap])
assert(validateMinMaxColumnsCacheLength(dataMaps, 3))
}
test("test UPDATE scenario after column_meta_cache") {
sql("drop table if exists metaCache")
sql("create table metaCache(name string, c1 string, c2 string) stored by 'carbondata' TBLPROPERTIES('COLUMN_META_CACHE'='')")
sql("insert into metaCache select 'a','aa','aaa'")
sql("insert into metaCache select 'b','bb','bbb'")
sql("update metaCache set(c1)=('new_c1') where c1='aa'").show()
checkAnswer(sql("select c1 from metaCache"), Seq(Row("new_c1"), Row("bb")))
}
test("test queries with column_meta_cache and cache_level='BLOCK'") {
dropSchema
// set cache_level
createAndLoadTable("BLOCK")
// check count(*)
checkAnswer(sql("select count(*) from column_min_max_cache_test"), Row(10))
// check query on cached dimension columns
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where workgroupcategoryname='developer' OR designation='PL'"),
Row(6))
// check query on cached dimension column and non cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where empname='pramod' and " +
"workgroupcategoryname='developer'"),
Row(1))
// query on cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where workgroupcategoryname='developer'"),
Row(5))
// check query on non cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where empname='pramod' and " +
"deptname='network'"),
Row(0))
// check query on cached dimension and measure column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where attendance='77' and " +
"salary='11248' and workgroupcategoryname='manager'"),
Row(1))
// check query on cached dimension and measure column with one non cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where attendance='77' and " +
"salary='11248' OR deptname='network'"),
Row(4))
}
test("test queries with column_meta_cache and cache_level='BLOCKLET'") {
dropSchema
// set cache_level
createAndLoadTable("BLOCKLET")
// check count(*)
checkAnswer(sql("select count(*) from column_min_max_cache_test"), Row(10))
// check query on cached dimension columns
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where workgroupcategoryname='developer' OR designation='PL'"),
Row(6))
// check query on cached dimension column and non cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where empname='pramod' and " +
"workgroupcategoryname='developer'"),
Row(1))
// query on cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where workgroupcategoryname='developer'"),
Row(5))
// check query on non cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where empname='pramod' and " +
"deptname='network'"),
Row(0))
// check query on cached dimension and measure column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where attendance='77' and " +
"salary='11248' and workgroupcategoryname='manager'"),
Row(1))
// check query on cached dimension and measure column with one non cached column
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where attendance='77' and " +
"salary='11248' OR deptname='network'"),
Row(4))
}
test("test update on column cached") {
dropSchema
// set cache_level
createAndLoadTable("BLOCKLET")
sql("update column_min_max_cache_test set (designation)=('SEG') where empname='ayushi'").show()
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where empname='ayushi' and " +
"designation='SEG'"),
Row(1))
}
test("test update on column not cached") {
dropSchema
// set cache_level
createAndLoadTable("BLOCKLET")
sql(
"update column_min_max_cache_test set (workgroupcategoryname)=('solution engrr') where " +
"workgroupcategoryname='developer'")
.show()
checkAnswer(sql(
"select count(*) from column_min_max_cache_test where workgroupcategoryname='solution " +
"engrr'"),
Row(5))
}
test("verify column caching with alter add column") {
sql("drop table if exists alter_add_column_min_max")
sql("create table alter_add_column_min_max (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId double,productionDate Timestamp,deliveryDate timestamp,deliverycharge double) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('table_blocksize'='1','COLUMN_META_CACHE'='AMSize','CACHE_LEVEL'='BLOCKLET')")
sql("insert into alter_add_column_min_max select '1AA1','8RAM size','4','Chinese','guangzhou',2738,1,'2014-07-01 12:07:28','2014-07-01 12:07:28',25")
sql("alter table alter_add_column_min_max add columns(age int, name string)")
sql("ALTER TABLE alter_add_column_min_max SET TBLPROPERTIES('COLUMN_META_CACHE'='age,name')")
sql("insert into alter_add_column_min_max select '1AA1','8RAM size','4','Chinese','guangzhou',2738,1,'2014-07-01 12:07:28','2014-07-01 12:07:28',25,29,'Rahul'")
checkAnswer(sql("select count(*) from alter_add_column_min_max where AMSize='8RAM size'"), Row(2))
sql("drop table if exists alter_add_column_min_max")
}
test("verify min/max getting serialized to executor when cache_level = blocklet") {
sql("drop table if exists minMaxSerialize")
sql("create table minMaxSerialize(name string, c1 string, c2 string) stored by 'carbondata' TBLPROPERTIES('CACHE_LEVEL'='BLOCKLET', 'COLUMN_META_CACHE'='c1,c2')")
sql("insert into minMaxSerialize select 'a','aa','aaa'")
checkAnswer(sql("select * from minMaxSerialize where name='a'"), Row("a", "aa", "aaa"))
checkAnswer(sql("select * from minMaxSerialize where name='b'"), Seq.empty)
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
.lookupRelation(Some("default"), "minMaxSerialize")(sqlContext.sparkSession)
.asInstanceOf[CarbonRelation]
val carbonTable = relation.carbonTable
// form a filter expression and generate filter resolver tree
val columnExpression = new ColumnExpression("name", DataTypes.STRING)
columnExpression.setDimension(true)
val dimension: CarbonDimension = carbonTable.getDimensionByName(carbonTable.getTableName, "name")
columnExpression.setDimension(dimension)
columnExpression.setCarbonColumn(dimension)
val literalValueExpression = new LiteralExpression("a", DataTypes.STRING)
val literalNullExpression = new LiteralExpression(null, DataTypes.STRING)
val notEqualsExpression = new NotEqualsExpression(columnExpression, literalNullExpression)
val equalsExpression = new NotEqualsExpression(columnExpression, literalValueExpression)
val andExpression = new AndExpression(notEqualsExpression, equalsExpression)
val resolveFilter: FilterResolverIntf =
CarbonTable.resolveFilter(andExpression, carbonTable.getAbsoluteTableIdentifier)
val exprWrapper = DataMapChooser.getDefaultDataMap(carbonTable, resolveFilter)
val segment = new Segment("0", new TableStatusReadCommittedScope(carbonTable
.getAbsoluteTableIdentifier, new Configuration(false)))
// get the pruned blocklets
val prunedBlocklets = exprWrapper.prune(List(segment).asJava, null)
prunedBlocklets.asScala.foreach { blocklet =>
// all the blocklets should have useMinMaxForPrune flag set to true
assert(blocklet.getDetailInfo.isUseMinMaxForPruning)
}
}
}
| sgururajshetty/carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala | Scala | apache-2.0 | 16,391 |
package core
import akka.actor.{Props, ActorLogging, Actor}
import spray.routing.{StandardRoute, RequestContext, HttpService}
import akka.util.Timeout
import scala.concurrent.duration._
import spray.httpx.Json4sSupport
import org.apache.commons.math3.random.{Well44497b}
import reflect.ClassTag
import spray.routing.directives.{RouteDirectives, LogEntry}
import spray.http.{HttpHeader, StatusCode, HttpRequest}
import org.joda.time.DateTime
import spray.httpx.encoding.Gzip
import java.nio.ByteOrder
import spray.httpx.marshalling.Marshaller
import spray.http.HttpHeaders.RawHeader
case class GetFloatNumbers(amount: Int = 1)
case class GetLongNumbers(amount: Int = 1)
case class Reseed()
case class RandomNumbers[T](numbers: Seq[T], createdAt: String = DateTime.now().toString())
class RandomNumberGenerator extends Actor with ActorLogging {
val rng = new Well44497b(util.Random.nextLong())
val maxNumber = context.system.settings.config.getInt("rng.max-amount-random-numbers")
def getSaveAmount(amount: Int): Int = if (amount > maxNumber) maxNumber else amount
def getRandomNumbers[T](amount: Int, nextRandomNumber: () => T): Seq[T] = {
for (i <- 0 until getSaveAmount(amount)) yield nextRandomNumber()
}
def receive = {
case GetLongNumbers(amount) =>
val numbers = getRandomNumbers(amount, rng.nextLong)
val b = new akka.util.ByteStringBuilder()
sender ! RandomNumbers(numbers)
case GetFloatNumbers(amount) =>
val numbers = getRandomNumbers(amount, rng.nextFloat)
sender ! RandomNumbers(numbers)
case Reseed() =>
log.info("Reseed")
rng.setSeed(util.Random.nextLong())
case msg => s"Cannot map $msg"
}
}
trait RngRoute extends HttpService {
implicit val timeout = Timeout(5 seconds)
val rng = actorRefFactory.actorOf(Props(new RandomNumberGenerator), name = "RandomNumberGenerator")
import akka.pattern._
import spray.http.MediaTypes._
import RngJsonProtocol.RngFormatLong
import RngJsonProtocol.RngFormatFloat
import scala.concurrent.ExecutionContext.Implicits.global
import spray.httpx.SprayJsonSupport._
val rngRoute = pathPrefix("rns") {
getFromResourceDirectory("public") ~
get {
clientIP { ip =>
parameter("amount".as[Int] ? 1) { amount =>
logRequest(showRequest _) {
respondWithMediaType(`application/json`) {
pathSuffix("long") {
complete {
(rng ? GetLongNumbers(amount)).mapTo[RandomNumbers[Long]]
}
} ~
pathSuffix("float") {
complete {
(rng ? GetFloatNumbers(amount)).mapTo[RandomNumbers[Float]]
}
}
}
}
}
}
}
}
import akka.event.Logging._
def showRequest(request: HttpRequest) = LogEntry(request.toString, InfoLevel)
}
class RngService extends Actor with RngRoute with ActorLogging {
implicit def actorRefFactory = context
def receive = runRoute(rngRoute)
import context.dispatcher
context.system.scheduler.schedule(10 seconds, 10 seconds, self, Reseed())
}
| matlockx/rng | src/main/scala/core/RngService.scala | Scala | apache-2.0 | 3,158 |
/**
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.crossdata.models
case class ConnectionHostModel(host: String, port: String) | luismcl/crossdata | core/src/main/scala/org/apache/spark/sql/crossdata/models/ConnectionHostModel.scala | Scala | apache-2.0 | 727 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.api
import com.typesafe.config.Config
import play.api.Configuration
trait LagomConfigComponent {
def configuration: Configuration
def config: Config = configuration.underlying
}
| rstento/lagom | service/scaladsl/api/src/main/scala/com/lightbend/lagom/scaladsl/api/LagomConfigComponent.scala | Scala | apache-2.0 | 299 |
package scalaprops
package scalazlaws
import scalaprops.Property.forAll
import scalaprops.Properties.properties
import scalaz._
object monoid {
def leftIdentity[A: Equal: Gen](implicit A: Monoid[A]): Property =
forAll(A.monoidLaw.leftIdentity _)
def rightIdentity[A: Equal: Gen](implicit A: Monoid[A]): Property =
forAll(A.monoidLaw.rightIdentity _)
def laws[A: Monoid: Equal: Gen]: Properties[ScalazLaw] =
properties(ScalazLaw.monoid)(
ScalazLaw.monoidLeftIdentity -> leftIdentity[A],
ScalazLaw.monoidRightIdentity -> rightIdentity[A]
)
def all[A: Monoid: Equal: Gen]: Properties[ScalazLaw] =
Properties.fromProps(ScalazLaw.monoidAll, semigroup.laws[A], monoid.laws[A])
}
| scalaprops/scalaprops | scalaz/src/main/scala/scalaprops/scalazlaws/monoid.scala | Scala | mit | 718 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.check.url
import io.gatling.core.check._
import io.gatling.core.check.extractor.regex._
import io.gatling.core.session._
import io.gatling.http.check.HttpCheck
import io.gatling.http.check.HttpCheckBuilders._
import io.gatling.http.response.Response
trait CurrentLocationRegexCheckType
trait CurrentLocationRegexOfType {
self: CurrentLocationRegexCheckBuilder[String] =>
def ofType[X: GroupExtractor] = new CurrentLocationRegexCheckBuilder[X](pattern, patterns)
}
object CurrentLocationRegexCheckBuilder {
def currentLocationRegex(pattern: Expression[String], patterns: Patterns) =
new CurrentLocationRegexCheckBuilder[String](pattern, patterns) with CurrentLocationRegexOfType
private val ExtractorFactory = new RegexExtractorFactoryBase("currentLocationRegex")
}
class CurrentLocationRegexCheckBuilder[X: GroupExtractor](
private[url] val pattern: Expression[String],
private[url] val patterns: Patterns
)
extends DefaultMultipleFindCheckBuilder[CurrentLocationRegexCheckType, CharSequence, X] {
import CurrentLocationRegexCheckBuilder.ExtractorFactory._
override def findExtractor(occurrence: Int) = pattern.map(newRegexSingleExtractor[X](_, occurrence, patterns))
override def findAllExtractor = pattern.map(newRegexMultipleExtractor[X](_, patterns))
override def countExtractor = pattern.map(newRegexCountExtractor(_, patterns))
}
object CurrentLocationRegexProvider extends CheckProtocolProvider[CurrentLocationRegexCheckType, HttpCheck, Response, CharSequence] {
override val specializer: Specializer[HttpCheck, Response] = UrlSpecializer
override val preparer: Preparer[Response, String] = UrlStringPreparer
}
| wiacekm/gatling | gatling-http/src/main/scala/io/gatling/http/check/url/CurrentLocationRegexCheckBuilder.scala | Scala | apache-2.0 | 2,308 |
package colossus.core
import scala.concurrent.duration._
/**
* Simple class which contains parameters for configuring a polling operation
* @param interval The interval of the poll
* @param maximumTries The number of times to execute the poll
*/
case class PollingDuration(interval : FiniteDuration, maximumTries : Option[Long]) {
def isExpended(tries : Long) : Boolean = {
maximumTries.fold(true)(_ > tries)
}
}
object PollingDuration {
/**
* Adds support for specifying maximumTries in terms of a [[scala.concurrent.duration.FiniteDuration]]
* @param interval The interval of the poll
* @param maxDuration The maximum amount of time to execute the poll. None means indefinitely.
* @return
*/
def apply(interval : FiniteDuration, maxDuration : FiniteDuration) : PollingDuration = {
PollingDuration(interval, Some(Math.round(maxDuration / interval)))
}
val NoRetry = PollingDuration(100.milliseconds, Some(0L))
}
| noikiy/colossus | colossus/src/main/scala/colossus/core/PollingDuration.scala | Scala | apache-2.0 | 960 |
package dotty.communitybuild
import java.nio.file._
import java.io.{PrintWriter, File}
import java.nio.charset.StandardCharsets.UTF_8
import org.junit.{Ignore, Test}
import org.junit.Assert.{assertEquals, fail}
import org.junit.experimental.categories.Category
import CommunityBuildRunner.run
class TestCategory
given testRunner: CommunityBuildRunner with
override def failWith(msg: String) = { fail(msg); ??? }
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestA:
@Test def izumiReflect = projects.izumiReflect.run()
@Test def scalaSTM = projects.scalaSTM.run()
@Test def scalatest = projects.scalatest.run()
@Test def scalatestplusTestNG = projects.scalatestplusTestNG.run()
// 'Sciss/Lucre' dependencies:
// @Test def scissEqual = projects.scissEqual .run()
// @Test def scissFingerTree = projects.scissFingerTree.run()
// @Test def scissLog = projects.scissLog .run()
// @Test def scissModel = projects.scissModel .run()
// @Test def scissNumbers = projects.scissNumbers .run()
// @Test def scissSerial = projects.scissSerial .run()
// @Test def scissAsyncFile = projects.scissAsyncFile .run()
// @Test def scissSpan = projects.scissSpan .run()
@Test def scissLucre = projects.scissLucre.run()
@Test def zio = projects.zio.run()
end CommunityBuildTestA
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestB:
@Test def cats = projects.cats.run()
@Test def catsEffect3 = projects.catsEffect3.run()
@Test def catsMtl = projects.catsMtl.run()
@Test def coop = projects.coop.run()
@Test def discipline = projects.discipline.run()
@Test def disciplineMunit = projects.disciplineMunit.run()
@Test def disciplineSpecs2 = projects.disciplineSpecs2.run()
@Test def fs2 = projects.fs2.run()
@Test def munit = projects.munit.run()
@Test def munitCatsEffect = projects.munitCatsEffect.run()
@Test def perspective = projects.perspective.run()
@Test def scalacheckEffect = projects.scalacheckEffect.run()
@Test def scodec = projects.scodec.run()
@Test def scodecBits = projects.scodecBits.run()
@Test def monocle = projects.monocle.run()
@Test def simulacrumScalafixAnnotations = projects.simulacrumScalafixAnnotations.run()
end CommunityBuildTestB
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestC:
@Test def akka = projects.akka.run()
@Test def algebra = projects.algebra.run()
@Test def betterfiles = projects.betterfiles.run()
@Test def cask = projects.cask.run()
// Temporarily disabled until problem discovered in comments to #9449 is fixed
// @Test def dottyCpsAsync = projects.dottyCpsAsync.run()
@Test def effpi = projects.effpi.run()
@Test def endpoints4s = projects.endpoints4s.run()
@Test def fansi = projects.fansi.run()
@Test def fastparse = projects.fastparse.run()
@Test def geny = projects.geny.run()
@Test def intent = projects.intent.run()
@Test def jacksonModuleScala = projects.jacksonModuleScala.run()
@Test def libretto = projects.libretto.run()
@Test def minitest = projects.minitest.run()
@Test def onnxScala = projects.onnxScala.run()
@Test def oslib = projects.oslib.run()
// @Test def oslibWatch = projects.oslibWatch.run()
@Test def playJson = projects.playJson.run()
@Test def pprint = projects.pprint.run()
@Test def protoquill = projects.protoquill.run()
@Test def requests = projects.requests.run()
@Test def scalacheck = projects.scalacheck.run()
@Test def scalaCollectionCompat = projects.scalaCollectionCompat.run()
@Test def scalaJava8Compat = projects.scalaJava8Compat.run()
@Test def scalap = projects.scalap.run()
@Test def scalaParallelCollections = projects.scalaParallelCollections.run()
@Test def scalaParserCombinators = projects.scalaParserCombinators.run()
@Test def scalaPB = projects.scalaPB.run()
@Test def scalatestplusScalacheck = projects.scalatestplusScalacheck.run()
@Test def scalaXml = projects.scalaXml.run()
@Test def scalaz = projects.scalaz.run()
@Test def scas = projects.scas.run()
@Test def sconfig = projects.sconfig.run()
@Test def shapeless = projects.shapeless.run()
@Test def spire = projects.spire.run()
@Test def sourcecode = projects.sourcecode.run()
@Test def specs2 = projects.specs2.run()
@Test def stdLib213 = projects.stdLib213.run()
@Test def ujson = projects.ujson.run()
@Test def upickle = projects.upickle.run()
@Test def utest = projects.utest.run()
@Test def verify = projects.verify.run()
@Test def xmlInterpolator = projects.xmlInterpolator.run()
end CommunityBuildTestC
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestForwardCompat:
@Test def catsEffect3ForwardCompat = projects.catsEffect3ForwardCompat.run()
@Test def catsForwardCompat = projects.catsForwardCompat.run()
@Test def catsMtlForwardCompat = projects.catsMtlForwardCompat.run()
@Test def coopForwardCompat = projects.coopForwardCompat.run()
@Test def disciplineForwardCompat = projects.disciplineForwardCompat.run()
@Test def disciplineMunitForwardCompat = projects.disciplineMunitForwardCompat.run()
@Test def disciplineSpecs2ForwardCompat = projects.disciplineSpecs2ForwardCompat.run()
@Test def munitForwardCompat = projects.munitForwardCompat.run()
@Test def scalacheckForwardCompat = projects.scalacheckForwardCompat.run()
@Test def simulacrumScalafixAnnotationsForwardCompat = projects.simulacrumScalafixAnnotationsForwardCompat.run()
end CommunityBuildTestForwardCompat
| dotty-staging/dotty | community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala | Scala | apache-2.0 | 5,509 |
/*
* MIT License
*
* Copyright (c) 2016 Ramjet Anvil
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.ramjetanvil.padrone.util
object UnitOfMeasure {
case class Meters(value: Double) extends AnyVal {
def toKiloMeters: KiloMeters = KiloMeters(value / 1000)
}
case class KiloMeters(value: Double) extends AnyVal {
def toMeters: Meters = Meters(value * 1000)
}
implicit val meterOrdering = Ordering[Double].on[Meters](m => m.value)
implicit val kiloMeterOrdering = Ordering[Double].on[KiloMeters](m => m.value)
}
| RamjetAnvil/padrone | server/src/main/scala/com/ramjetanvil/padrone/util/UnitOfMeasure.scala | Scala | mit | 1,579 |
package epic.trees
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.io._
import epic.preprocess.TreebankTokenizer
import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
/**
* PennTreeReader due to Adam Pauls.
*
* This reader returns empty categories as leaves of the tree below the -NONE-. These leaves
* span 0 words.
*
* For example,
* (TOP (S-IMP (NP-SBJ (-NONE- *PRO*))
(VP (VB Look)
(PP-DIR (IN at)
(NP (DT that))))
(. /.)))
* will return (TOP[0:4] (S-IMP[0:4] (NP-SBJ[0:0] (-NONE-[0:0] (*PRO*[0:0]))) (VP[0:4]...)
*
* @author adpauls
* @author dlwh
*/
class PennTreeReader(reader: Reader,
isEmptyCategory: String=>Boolean = _ == "-NONE-",
rootLabel : String = "TOP",
unescapeTokens: Boolean = true) extends Iterator[(Tree[String],IndexedSeq[String])] {
def this(f: File) = this(new BufferedReader(new FileReader(f)))
private val in = new PushbackReader(reader, 4)
private var nextTree = readRootTree()
def hasNext = (nextTree != null)
def next() = {
if (!hasNext) throw new NoSuchElementException()
val tree = nextTree
nextTree = readRootTree()
if(nextTree == null) {
in.close()
}
tree
}
private def readRootTree() : (Tree[String], IndexedSeq[String]) = {
readWhiteSpace()
if (!isLeftParen(peek())) null
else {
val tree = readTree(true, 0)
tree
}
}
private def readTree(isRoot : Boolean, pos : Int) : (Tree[String],IndexedSeq[String]) = {
readLeftParen()
val label = {
val labelx = readLabel()
if (isRoot && labelx.length == 0) rootLabel else labelx
}
if (isEmptyCategory(label)) {
val emptyChild = readLeaf()
readRightParen()
Tree(label, IndexedSeq(Tree(emptyChild, IndexedSeq.empty, Span(pos, pos))), Span(pos, pos)) -> IndexedSeq.empty
} else {
val (children,words) = readChildren(pos)
val spanEnd = pos + words.length
readRightParen()
Tree[String](label, children, Span(pos, spanEnd)) -> words
}
}
private def readLabel() = {
readWhiteSpace()
readText(false, false)
}
private def readText(atLeastOnex : Boolean, skipLeftParen : Boolean) = {
var atLeastOne = atLeastOnex
val sb = new StringBuilder()
var ch = in.read()
while (atLeastOne || (!isWhiteSpace(ch) && (skipLeftParen || !isLeftParen(ch)) && !isRightParen(ch) && ch != -1)) {
sb.append(ch.toChar)
ch = in.read()
atLeastOne = false
}
in.unread(ch)
sb.toString()
}
private def readChildren(pos : Int) : (IndexedSeq[Tree[String]],IndexedSeq[String]) = {
val words = ArrayBuffer[String]()
var currPos = pos
readWhiteSpace()
val children = new ArrayBuffer[Tree[String]]()
while (!isRightParen(peek())) {
readWhiteSpace()
if (isLeftParen(peek())) {
if (isTextParen()) {
words += readLeaf()
currPos += 1
} else {
val (tree,w) = readTree(isRoot = false, pos = currPos)
currPos = tree.end
words ++= w
children.add(tree)
}
} else if (peek() == 65535) {
throw new RuntimeException("Unmatched parentheses in tree input.")
} else {
words += readLeaf()
currPos += 1
}
readWhiteSpace()
}
children -> words
}
private def isTextParen() = {
var numRead = 0
var ch = in.read()
while (isLeftParen(ch)) {
numRead += 1
ch = in.read()
}
val yes = numRead > 0 && (isRightParen(ch))
in.unread(ch)
for (i <- 0 until numRead) {
in.unread('(')
}
yes
}
private def peek() = {
val ch = in.read()
in.unread(ch)
ch
}
private def readLeaf() = {
var label = readText(true, true)
if(unescapeTokens)
label = TreebankTokenizer.treebankTokenToToken(label)
if(label.startsWith("/") && label.length == 2 && label(1) != '/') {
label = label.drop(1) // ontonotes escapes periods as /.
}
label
}
private def readLeftParen() = {
readWhiteSpace()
val ch = in.read()
if (!isLeftParen(ch)) throw new RuntimeException("Format error reading tree. Expected '(' but got " + ch)
}
private def readRightParen() = {
readWhiteSpace()
val ch = in.read()
if (!isRightParen(ch)) throw new RuntimeException("Format error reading tree.")
}
private def readWhiteSpace() = {
var ch = in.read()
while (isWhiteSpace(ch)) {
ch = in.read()
}
in.unread(ch)
}
private def isWhiteSpace(ch : Int) = {
(ch == ' ' || ch == '\\t' || ch == '\\f' || ch == '\\r' || ch == '\\n')
}
private def isLeftParen(ch : Int) = {
ch == '('
}
private def isRightParen(ch : Int) = {
ch == ')'
}
}
| maxim-rabinovich/epic | src/main/scala/epic/trees/PennTreeReader.scala | Scala | apache-2.0 | 5,409 |
package com.mesosphere.cosmos.finch
import cats.data.NonEmptyList
import com.mesosphere.cosmos.http.CompoundMediaType
import com.mesosphere.cosmos.http.MediaType
import io.circe.Encoder
import io.circe.Json
import io.circe.syntax._
import org.scalatest.FreeSpec
final class DispatchingMediaTypedEncoderSpec extends FreeSpec {
import DispatchingMediaTypedEncoderSpec._
"encoderFor(MediaType) should" - {
"return the first encoder with a media type compatible with the argument" in {
val Some(mediaTypedEncoder) = ThreeElementEncoder(CompoundMediaType(MediaType("foo", "bar")))
assertResult(Json.fromInt(1))(mediaTypedEncoder.encoder(()))
assertResult(NonEmptyList.of(MediaType("foo", "bar")))(mediaTypedEncoder.mediaTypes)
}
"indicate failure if no compatible encoder is found" - {
"because there are no encoders" in {
val dispatchingEncoder = DispatchingMediaTypedEncoder(Set.empty[MediaTypedEncoder[String]])
assertResult(None)(dispatchingEncoder(CompoundMediaType(TestingMediaTypes.applicationJson)))
}
"because there are only incompatible encoders" in {
assertResult(None)(ThreeElementEncoder(CompoundMediaType(TestingMediaTypes.applicationJson)))
}
}
}
"mediaTypes should return the media types of each of the encoders" - {
"zero elements" in {
assertResult(Set.empty)(DispatchingMediaTypedEncoder(Set.empty[MediaTypedEncoder[String]]).mediaTypes)
}
"three elements" in {
val expected = Set(MediaType("foo", "foo"), MediaType("foo", "bar"), MediaType("foo", "baz"))
assertResult(expected)(ThreeElementEncoder.mediaTypes)
}
}
}
object DispatchingMediaTypedEncoderSpec {
val ThreeElementEncoder: DispatchingMediaTypedEncoder[Unit] = DispatchingMediaTypedEncoder(Set(
MediaTypedEncoder(Encoder.instance[Unit](_ => 0.asJson), MediaType("foo", "foo")),
MediaTypedEncoder(Encoder.instance[Unit](_ => 1.asJson), MediaType("foo", "bar")),
MediaTypedEncoder(Encoder.instance[Unit](_ => 2.asJson), MediaType("foo", "baz"))
))
}
| takirala/cosmos | cosmos-test-common/src/test/scala/com/mesosphere/cosmos/finch/DispatchingMediaTypedEncoderSpec.scala | Scala | apache-2.0 | 2,080 |
package com.simplex9.splendor.solver
import com.simplex9.splendor.actiongenerator.ActionGenerator
import com.simplex9.splendor.valueestimator.StateEvaluator
import com.simplex9.splendor.{Action, Param, State}
/**
* Created by hongbo on 6/21/17.
*/
class Solver(state: State, playerIndex: Int, startPlayer: Int) {
var totalStates = 0
var pruned = 0
var searchedLevel = 0
def solve() : Option[Action] = {
var bestAction : List[Action] = Nil
var bestScore : Int = -Param.INF
var maxLevel = 0
while (searchedLevel == maxLevel) {
try {
maxLevel += 1
val (score, action) = search(state, playerIndex, 0, maxLevel, -Param.INF, Param.INF)
bestAction = action
bestScore = score
} catch {
case e : LimitReachedException =>
System.out.println(s"* Level=${maxLevel -1} Score=$bestScore pruned=$pruned")
for (action <- bestAction) {
System.out.println(" " + action)
}
return bestAction.headOption
}
}
bestAction.headOption
}
def search(state: State, currentPlayerIndex: Int,
level: Int, maxLevel: Int,
_alpha: Int, _beta: Int): (Int, List[Action]) = {
if (level > searchedLevel) searchedLevel = level
totalStates += 1
if (totalStates > Param.MAX_SEARCH_STATES) throw LimitReachedException()
val isMinNode = currentPlayerIndex != playerIndex
var isLeaf = level == maxLevel
val actions = ActionGenerator.generate(state, currentPlayerIndex, startPlayer)
if (actions == Nil) isLeaf = true
if (isLeaf) {
val score = StateEvaluator.evaluate(state, playerIndex)
return (score, Nil)
}
var i = 1
var bestAction : List[Action] = Nil
if (isMinNode) {
var minVal = Param.INF
var beta = _beta
for (action <- actions) {
val newState = state.transform(action)
val (score, nextActions) = search(newState, nextPlayer(currentPlayerIndex), level + 1, maxLevel,
_alpha, beta)
if (score < minVal) {
minVal = score
bestAction = action :: nextActions
}
if (score < beta) beta = score
if (beta <= _alpha) {
pruned += (actions.size - i)
return (minVal, bestAction)
}
i += 1
}
(minVal, bestAction)
} else {
var maxVal = -Param.INF
var alpha = _alpha
for (action <- actions) {
val newState = state.transform(action)
val (score, nextActions) = search(newState, nextPlayer(currentPlayerIndex), level + 1, maxLevel,
alpha, _beta)
if (score > maxVal) {
maxVal = score
bestAction = action :: nextActions
}
if (score > alpha) alpha = score
if (_beta <= alpha) {
pruned += (actions.size -i)
return (maxVal, bestAction)
}
i += 1
}
(maxVal, bestAction)
}
}
def nextPlayer(currentPlayer: Int) = (currentPlayer + 1) % state.players.length
}
case class LimitReachedException() extends Exception {
}
| liuhb86/splendor-ai | app/com/simplex9/splendor/solver/Solver.scala | Scala | mit | 3,093 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus
import com.twitter.util.{ Future }
import com.twitter.concurrent.AsyncSemaphore
/**
* Adds batched writes to BatchedReadableStore
*
* @param store the store to read and write values to
* @param maxMultiPutSize a multiPut to `store` will fetch values for at most `maxMultiPutSize` keys
* @param maxConcurrentMultiPuts the maximum number of multiputs to concurrently issue
* @param maxMultiGetSize a multiGet to `store` will fetch values for at most `maxMultiGetSize` keys
* @param maxConcurrentMultiGets the maximum number of multigets to concurrently issue
*/
class BatchedStore[K, V](
store: Store[K, V],
maxMultiPutSize: Int,
maxConcurrentMultiPuts: Int,
maxMultiGetSize: Int,
maxConcurrentMultiGets: Int)
(implicit fc: FutureCollector[(K, V)])
extends BatchedReadableStore[K, V](store, maxMultiGetSize, maxConcurrentMultiGets)
with Store[K,V] {
// do we need different knobs for gets and puts
// or should we use the same max size and max concurrent for both?
protected val writeConnectionLock = new AsyncSemaphore(maxConcurrentMultiPuts)
override def put(kv: (K, Option[V])): Future[Unit] = store.put(kv)
override def multiPut[K1 <: K](kvs: Map[K1, Option[V]]): Map[K1, Future[Unit]] = {
kvs
.grouped(maxMultiPutSize)
.map{ keyBatch: Map[K1, Option[V]] =>
// mapCollect the result of the multiput so we can release the permit at the end
val batchResult: Future[Map[K1, Unit]] = writeConnectionLock
.acquire()
.flatMap { permit =>
FutureOps.mapCollect(store.multiPut(keyBatch)).ensure{ permit.release() }
}
// now undo the mapCollect to yield a Map of future
FutureOps.liftValues(keyBatch.keySet, batchResult)
}
.reduceOption(_ ++ _)
.getOrElse(Map.empty)
}
}
| rubanm/storehaus | storehaus-core/src/main/scala/com/twitter/storehaus/BatchedStore.scala | Scala | apache-2.0 | 2,461 |
/*
* Copyright 2009-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb.json.scalaz
import scalaz.ValidationNel
import scalaz.Validation._
import scalaz.std.option._
import scalaz.std.list._
import scalaz.syntax.traverse._
import net.liftweb.json._
trait Base { this: Types =>
implicit def boolJSON: JSON[Boolean] = new JSON[Boolean] {
def read(json: JValue) = json match {
case JBool(b) => success(b)
case x => failure(UnexpectedJSONError(x, classOf[JBool])).toValidationNel
}
def write(value: Boolean) = JBool(value)
}
implicit def intJSON: JSON[Int] = new JSON[Int] {
def read(json: JValue) = json match {
case JInt(x) => success(x.intValue)
case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel
}
def write(value: Int) = JInt(BigInt(value))
}
implicit def longJSON: JSON[Long] = new JSON[Long] {
def read(json: JValue) = json match {
case JInt(x) => success(x.longValue)
case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel
}
def write(value: Long) = JInt(BigInt(value))
}
implicit def doubleJSON: JSON[Double] = new JSON[Double] {
def read(json: JValue) = json match {
case JDouble(x) => success(x)
case x => failure(UnexpectedJSONError(x, classOf[JDouble])).toValidationNel
}
def write(value: Double) = JDouble(value)
}
implicit def stringJSON: JSON[String] = new JSON[String] {
def read(json: JValue) = json match {
case JString(x) => success(x)
case x => failure(UnexpectedJSONError(x, classOf[JString])).toValidationNel
}
def write(value: String) = JString(value)
}
implicit def bigintJSON: JSON[BigInt] = new JSON[BigInt] {
def read(json: JValue) = json match {
case JInt(x) => success(x)
case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel
}
def write(value: BigInt) = JInt(value)
}
implicit def jvalueJSON: JSON[JValue] = new JSON[JValue] {
def read(json: JValue) = success(json)
def write(value: JValue) = value
}
implicit def listJSONR[A: JSONR]: JSONR[List[A]] = new JSONR[List[A]] {
def read(json: JValue) = json match {
case JArray(xs) => {
xs.map(fromJSON[A]).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, A]
}
case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel
}
}
implicit def listJSONW[A: JSONW]: JSONW[List[A]] = new JSONW[List[A]] {
def write(values: List[A]) = JArray(values.map(x => toJSON(x)))
}
implicit def optionJSONR[A: JSONR]: JSONR[Option[A]] = new JSONR[Option[A]] {
def read(json: JValue) = json match {
case JNothing | JNull => success(None)
case x => fromJSON[A](x).map(some)
}
}
implicit def optionJSONW[A: JSONW]: JSONW[Option[A]] = new JSONW[Option[A]] {
def write(value: Option[A]) = value.map(x => toJSON(x)).getOrElse(JNothing)
}
implicit def mapJSONR[A: JSONR]: JSONR[Map[String, A]] = new JSONR[Map[String, A]] {
def read(json: JValue) = json match {
case JObject(fs) =>
val r = fs.map(f => fromJSON[A](f.value).map(v => (f.name, v))).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, (String, A)]
r.map(_.toMap)
case x => failure(UnexpectedJSONError(x, classOf[JObject])).toValidationNel
}
}
implicit def mapJSONW[A: JSONW]: JSONW[Map[String, A]] = new JSONW[Map[String, A]] {
def write(values: Map[String, A]) = {
JObject(
values.map {
case (k, v) => JField(k, toJSON(v))
}.to(List): _*
)
}
}
}
| lift/framework | core/json-scalaz7/src/main/scala-2.13/net/liftweb/json/scalaz/Base.scala | Scala | apache-2.0 | 4,167 |
package xitrum
import scala.collection.mutable.{Map => MMap}
import xitrum.util.TypeCheck
/**
* @define none `None`
* @define some [[scala.Some]]
* @define option [[scala.Option]]
* @define p `p`
* @define f `f`
* @define coll option
* @define Coll `Option`
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
* @define collectExample
* @define undefinedorder
* @define thatinfo the class of the returned collection. In the standard library configuration, `That` is `Iterable[B]`
* @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
* representation type `Repr` and the new element type `B`.
*/
abstract class OptVar[+A](implicit m: Manifest[A]) {
protected[this] val key = getClass.getName
def getAll(implicit action: Action): MMap[String, Any]
/**
* App developer may change type of a SessionVar when modifying his app code. If
* the session is not reset, the user with old session version will be stuck.
* He will always see 500 "server error" and won't be able to recover, unless
* he removes the session cookie.
*
* We clear all, instead of just removing the key, to avoid inconsistent app logic,
* which is worse than the ClassCastException.
*/
private def clearAllOnClassCastException(maybeA: Any)(implicit action: Action) {
val rClass = m.runtimeClass
if (!TypeCheck.isInstance(rClass, maybeA)) {
action.log.warn(s"Value $maybeA of key $key can't be cast to $rClass, $this is now cleared to try to recover from ClassCastException on next call")
getAll.clear()
throw new ClassCastException(s"Value $maybeA of key $key can't be cast to $rClass")
}
}
def get(implicit action: Action): A = {
val a = getAll(action)(key)
clearAllOnClassCastException(a)
a.asInstanceOf[A]
}
def set[B >: A](value: B)(implicit action: Action) { getAll.update(key, value) }
def remove()(implicit action: Action): Option[A] = {
getAll.remove(key) match {
case None =>
None
case Some(a) =>
clearAllOnClassCastException(a)
Some(a.asInstanceOf[A])
}
}
def toOption(implicit action: Action): Option[A] = {
getAll.get(key) match {
case None =>
None
case Some(a) =>
clearAllOnClassCastException(a)
Some(a.asInstanceOf[A])
}
}
//----------------------------------------------------------------------------
// Methods copied from Option (can't extend Option because it's sealed).
/** Returns true if the option is $none, false otherwise.
*/
def isEmpty(implicit action: Action) = !getAll.isDefinedAt(key)
/** Returns true if the option is an instance of $some, false otherwise.
*/
def isDefined(implicit action: Action) = getAll.isDefinedAt(key)
/** Returns the option's value if the option is nonempty, otherwise
* return the result of evaluating `default`.
*
* @param default the default expression.
*/
@inline final def getOrElse[B >: A](default: => B)(implicit action: Action): B =
if (isEmpty) default else this.get
/** Returns the option's value if it is nonempty,
* or `null` if it is empty.
* Although the use of null is discouraged, code written to use
* $option must often interface with code that expects and returns nulls.
* @example {{{
* val initalText: Option[String] = getInitialText
* val textField = new JComponent(initalText.orNull,20)
* }}}
*/
@inline final def orNull[A1 >: A](implicit ev: Null <:< A1, action: Action): A1 = this getOrElse ev(null)
/** Returns a $some containing the result of applying $f to this $option's
* value if this $option is nonempty.
* Otherwise return $none.
*
* @note This is similar to `flatMap` except here,
* $f does not need to wrap its result in an $option.
*
* @param f the function to apply
* @see flatMap
* @see foreach
*/
@inline final def map[B](f: A => B)(implicit action: Action): Option[B] =
if (isEmpty) None else Some(f(this.get))
/** Returns the result of applying $f to this $option's
* value if the $option is nonempty. Otherwise, evaluates
* expression `ifEmpty`.
*
* @note This is equivalent to `$option map f getOrElse ifEmpty`.
*
* @param ifEmpty the expression to evaluate if empty.
* @param f the function to apply if nonempty.
*/
@inline final def fold[B](ifEmpty: => B)(f: A => B)(implicit action: Action): B =
if (isEmpty) ifEmpty else f(this.get)
/** Returns the result of applying $f to this $option's value if
* this $option is nonempty.
* Returns $none if this $option is empty.
* Slightly different from `map` in that $f is expected to
* return an $option (which could be $none).
*
* @param f the function to apply
* @see map
* @see foreach
*/
@inline final def flatMap[B](f: A => Option[B])(implicit action: Action): Option[B] =
if (isEmpty) None else f(this.get)
def flatten[B](implicit ev: A <:< Option[B], action: Action): Option[B] =
if (isEmpty) None else ev(this.get)
/** Returns this $option if it is nonempty '''and''' applying the predicate $p to
* this $option's value returns true. Otherwise, return $none.
*
* @param p the predicate used for testing.
*/
@inline final def filter(p: A => Boolean)(implicit action: Action): Option[A] =
if (isEmpty || p(this.get)) toOption else None
/** Returns this $option if it is nonempty '''and''' applying the predicate $p to
* this $option's value returns false. Otherwise, return $none.
*
* @param p the predicate used for testing.
*/
@inline final def filterNot(p: A => Boolean)(implicit action: Action): Option[A] =
if (isEmpty || !p(this.get)) toOption else None
/** Returns false if the option is $none, true otherwise.
* @note Implemented here to avoid the implicit conversion to Iterable.
*/
final def nonEmpty(implicit action: Action) = isDefined
/** Necessary to keep $option from being implicitly converted to
* [[scala.collection.Iterable]] in `for` comprehensions.
*/
@inline final def withFilter(p: A => Boolean)(implicit action: Action): WithFilter = new WithFilter(p)
/** We need a whole WithFilter class to honor the "doesn't create a new
* collection" contract even though it seems unlikely to matter much in a
* collection with max size 1.
*/
class WithFilter(p: A => Boolean)(implicit action: Action) {
def map[B](f: A => B): Option[B] = toOption filter p map f
def flatMap[B](f: A => Option[B]): Option[B] = toOption filter p flatMap f
def foreach[U](f: A => U): Unit = toOption filter p foreach f
def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x))
}
/** Tests whether the option contains a given value as an element.
*
* @example {{{
* // Returns true because Some instance contains string "something" which equals "something".
* Some("something") contains "something"
*
* // Returns false because "something" != "anything".
* Some("something") contains "anything"
*
* // Returns false when method called on None.
* None contains "anything"
* }}}
*
* @param elem the element to test.
* @return `true` if the option has an element that is equal (as
* determined by `==`) to `elem`, `false` otherwise.
*/
final def contains[A1 >: A](elem: A1)(implicit action: Action): Boolean =
!isEmpty && this.get == elem
/** Returns true if this option is nonempty '''and''' the predicate
* $p returns true when applied to this $option's value.
* Otherwise, returns false.
*
* @param p the predicate to test
*/
@inline final def exists(p: A => Boolean)(implicit action: Action): Boolean =
!isEmpty && p(this.get)
/** Returns true if this option is empty '''or''' the predicate
* $p returns true when applied to this $option's value.
*
* @param p the predicate to test
*/
@inline final def forall(p: A => Boolean)(implicit action: Action): Boolean = isEmpty || p(this.get)
/** Apply the given procedure $f to the option's value,
* if it is nonempty. Otherwise, do nothing.
*
* @param f the procedure to apply.
* @see map
* @see flatMap
*/
@inline final def foreach[U](f: A => U)(implicit action: Action) {
if (!isEmpty) f(this.get)
}
/** Returns a $some containing the result of
* applying `pf` to this $option's contained
* value, '''if''' this option is
* nonempty '''and''' `pf` is defined for that value.
* Returns $none otherwise.
*
* @example {{{
* // Returns Some(HTTP) because the partial function covers the case.
* Some("http") collect {case "http" => "HTTP"}
*
* // Returns None because the partial function doesn't cover the case.
* Some("ftp") collect {case "http" => "HTTP"}
*
* // Returns None because None is passed to the collect method.
* None collect {case value => value}
* }}}
*
* @param pf the partial function.
* @return the result of applying `pf` to this $option's
* value (if possible), or $none.
*/
@inline final def collect[B](pf: PartialFunction[A, B])(implicit action: Action): Option[B] =
if (!isEmpty) pf.lift(this.get) else None
/** Returns this $option if it is nonempty,
* otherwise return the result of evaluating `alternative`.
* @param alternative the alternative expression.
*/
@inline final def orElse[B >: A](alternative: => Option[B])(implicit action: Action): Option[B] =
if (isEmpty) alternative else toOption
/** Returns a singleton iterator returning the $option's value
* if it is nonempty, or an empty iterator if the option is empty.
*/
def iterator(implicit action: Action): Iterator[A] =
if (isEmpty) collection.Iterator.empty else collection.Iterator.single(this.get)
/** Returns a singleton list containing the $option's value
* if it is nonempty, or the empty list if the $option is empty.
*/
def toList(implicit action: Action): List[A] =
if (isEmpty) List() else new ::(this.get, Nil)
/** Returns a [[scala.util.Left]] containing the given
* argument `left` if this $option is empty, or
* a [[scala.util.Right]] containing this $option's value if
* this is nonempty.
*
* @param left the expression to evaluate and return if this is empty
* @see toLeft
*/
@inline final def toRight[X](left: => X)(implicit action: Action) =
if (isEmpty) Left(left) else Right(this.get)
/** Returns a [[scala.util.Right]] containing the given
* argument `right` if this is empty, or
* a [[scala.util.Left]] containing this $option's value
* if this $option is nonempty.
*
* @param right the expression to evaluate and return if this is empty
* @see toRight
*/
@inline final def toLeft[X](right: => X)(implicit action: Action) =
if (isEmpty) Right(right) else Left(this.get)
}
| caiiiycuk/xitrum | src/main/scala/xitrum/OptVar.scala | Scala | mit | 11,074 |
package io.github.mandar2812.dynaml.utils.sumac
import org.scalatest.FunSuite
import org.scalatest.Matchers
import io.github.mandar2812.dynaml.utils.sumac.validation.{FileExists, Positive, Range, Required}
import java.io.File
//import io.github.mandar2812.dynaml.utils.sumac.ThreeOrFour
class ValidationSuite extends FunSuite with Matchers {
def parse(args: Map[String,String], msg: String)(builder: => FieldArgs) {
val a = builder
val exc = withClue(args){the[ArgException] thrownBy {a.parse(args)}}
withClue(args){exc.getMessage should include(msg)}
}
test("@Required") {
def parseInt(args: Map[String,String], msg: String) = {
parse(args, msg){new IntRequiredArgs()}
}
parseInt(Map("a" -> "1"), "must specify a value for b")
parseInt(Map("b" -> "1"), "must specify a value for a")
//also an error if values are given, but they match the defaults
parseInt(Map("a" -> "0", "b" -> "7"), "must specify a value for ")
val intArgs = new IntRequiredArgs()
intArgs.parse(Map("a" -> "1", "b" -> "0"))
intArgs.a should be (1)
intArgs.b should be (0)
//make sure that the checks still apply when called programmatically (doesn't depend on strings at all)
intArgs.a = 0
the[ArgException] thrownBy {intArgs.runValidation()}
def parseString(args: Map[String,String], msg: String) = {
parse(args, msg){new StringRequiredArgs()}
}
parseString(Map("e" -> "a"), "must specify a value for f")
parseString(Map("f" -> "hi"), "must specify a value for e")
parseString(Map("e" -> "<null>", "f" -> "hi"), "must specify a value for e")
parseString(Map("e" -> "blah", "f" -> "blah"), "must specify a value for f")
}
test("@Positive") {
def parseP(args: Map[String,String], msg: String) {
parse(args, msg){new PositiveArgs()}
}
parseP(Map("c" -> "1.0"), "must specify a positive value for a")
parseP(Map("a" -> "3"), "must specify a positive value for c")
parseP(Map("a" -> "3", "c" -> "-3.8"), "must specify a positive value for c")
parseP(Map("a" -> "-3", "c" -> "3.8"), "must specify a positive value for a")
parseP(Map("a" -> "0", "c" -> "1"), "must specify a positive value for a")
val a = new PositiveArgs()
a.parse(Map("a" -> "1", "c" -> "7.9"))
a.a should be (1)
a.c should be (7.9f)
}
test("@FileExists") {
val tmpFile = File.createTempFile("file",".tmp")
val tmpPath = tmpFile.getAbsolutePath
tmpFile.deleteOnExit()
def parseF(args: Map[String,String], msg: String) {
parse(args, msg){new FileExistsArgs()}
}
parseF(Map("path" -> "fakeFile.tmp", "file" -> tmpPath), "must specify a file that exists for path, current value = fakeFile.tmp")
parseF(Map("path" -> tmpPath, "file" -> "fakeFile.tmp"), "must specify a file that exists for file, current value = fakeFile.tmp")
parseF(Map("path" -> null, "file" -> tmpPath), "must specify a valid file name for path")
val a = new FileExistsArgs()
a.parse(Map("path" -> tmpPath, "file" -> tmpPath))
a.file should be(tmpFile)
a.path should be(tmpPath)
}
test("@Range") {
def parseR(args: Map[String,String], msg: String) {
parse(args, msg) {new RangeArgs()}
}
val msgX = "must specify a value between 3.0 and 8.0 for x"
parseR(Map("y" -> "-80"), msgX)
parseR(Map("x"->"1", "y" -> "-80"), msgX)
parseR(Map("x" -> "9", "y" -> "-80"), msgX)
val msgY = "must specify a value between -83.0 and -72.0 for y"
parseR(Map("x" -> "5"), msgY)
parseR(Map("x" -> "5", "y" -> "5"), msgY)
parseR(Map("x" -> "5", "y" -> "-90"), msgY)
val a = new RangeArgs()
a.parse(Map("x"->"4", "y" -> "-77"))
a.x should be (4)
a.y should be (-77)
}
test("user-defined") {
//silly example of user-defined annotation validations
parse(Map("x" -> "7"), "x must be 3 or 4"){new UserDefinedAnnotationArgs()}
val a1 = new UserDefinedAnnotationArgs()
a1.parse(Map("x" -> "3"))
a1.x should be (3)
//this arg class hasn't registered any validation w/ the annotation, so it is irrelevant
val a2 = new UnregisteredAnnotationArgs()
a2.parse(Map("x" -> "7"))
a2.x should be (7)
val a3 = new UserDefinedAnnotationUpdateArgs()
a3.parse(Map("x" -> "17"))
a3.x should be (3)
a3.parse(Map("x" -> "4"))
a3.x should be (4)
}
test("multi-annotation") {
parse(Map("b"->"-4"), "must specify a positive value for b"){new MultiAnnotationArgs}
parse(Map("b"->"7"), "must specify a value for b"){new MultiAnnotationArgs}
val a = new MultiAnnotationArgs()
a.parse(Map("b" -> "3"))
a.b should be (3)
}
test("nested validation") {
val b = new BarArgs()
b.parse(Array[String]("--bar.foo", "hi"))
b.bar.foo should be ("hi")
val b2 = new BarArgs()
val exc = the[ArgException] thrownBy {b2.parse(Array[String]())}
exc.getMessage should include("must specify a value for bar.foo")
//make sure the args dont' get muddled at all if a nested arg has the same name
val o = new OuterRequired()
o.parse(Array("--x", "6", "--inner.x", "7"))
o.x should be (6)
o.inner.x should be (7)
def t(s:String*): ArgException = {
val a = new OuterRequired()
the[ArgException] thrownBy {a.parse(s.toArray)}
}
val exc1 = t()
val exc2 = t("--x", "6")
val exc3 = t("--inner.x", "7")
val exc4 = t("--x", "1","--inner.x", "7")
val exc5 = t("--x", "5","--inner.x", "567")
exc1.getMessage should include ("must specify a value for")
Seq(exc2,exc5).foreach{_.getMessage should include ("must specify a value for inner.x")}
Seq(exc3,exc4).foreach{_.getMessage should include ("must specify a value for x")}
val o2 = new OuterRequired()
o2.parse(Array[String]("--x", "567","--inner.x", "1"))
o2.x should be (567)
o2.inner.x should be (1)
}
}
class IntRequiredArgs extends FieldArgs {
@Required
var a: Int = _
@Required
var b: Int = 7
var c = 19
}
class StringRequiredArgs extends FieldArgs {
@Required
var e: String = _
@Required
var f: String = "blah"
}
class PositiveArgs extends FieldArgs {
@Positive
var a: Int = _
var b: Int = _
@Positive
var c: Float = _
var d: Float = _
}
class RangeArgs extends FieldArgs {
@Range(min=3,max=8)
var x: Int = _
@Range(min= -83, max= -72)
var y: Float = _
}
class MultiAnnotationArgs extends FieldArgs {
@Positive @Required
var b = 7
}
class UserDefinedAnnotationArgs extends FieldArgs {
@ThreeOrFour
var x: Int = _
registerAnnotationValidation(classOf[ThreeOrFour]){(_, value, _, name) =>
if (value != 3 && value != 4) {
throw new ArgException(name + " must be 3 or 4")
}
}
}
class UserDefinedAnnotationUpdateArgs extends FieldArgs {
@ThreeOrFour
var x: Int = _
registerAnnotationValidationUpdate(classOf[ThreeOrFour]){(_, value, _, name, holder) =>
if (value != 3 && value !=4) {
holder.setValue(3)
}
}
}
class UnregisteredAnnotationArgs extends FieldArgs {
@ThreeOrFour
var x: Int = _
}
class FileExistsArgs extends FieldArgs {
@FileExists
var path: String = _
@FileExists
var file: File = _
}
class FooArgs extends FieldArgs {
@Required
var foo: String = _
}
class BarArgs extends FieldArgs {
var bar = new FooArgs()
}
class OuterRequired extends FieldArgs {
@Required
var x = 1
var inner: InnerRequired = _
}
class InnerRequired extends FieldArgs {
@Required
var x = 567
}
| transcendent-ai-labs/DynaML | dynaml-core/src/test/scala/io/github/mandar2812/dynaml/utils/sumac/ValidationSuite.scala | Scala | apache-2.0 | 7,508 |
/**
* Copyright 2014 www.alaraph.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This is my solution to problem:
* https://www.hackerrank.com/challenges/functions-and-fractals-sierpinski-triangles
* mauro@alaraph.com
*/
package com.alaraph.hackerrank.sierpinski
object Solution {
def drawTriangles(N: Int) = {
type Matrix = Vector[Vector[Char]]
def sieve(row: Int, offset: Int, base: Int, c: Char, matrix: Matrix): Matrix =
if (base > 0)
sieve(row + 1, offset + 1, base - 2, c, matrix.updated(row, matrix(row).patch(offset, c.toString * base, base)))
else matrix
def init: Matrix =
sieve(0, 0, 63, '1', (for (i <- 1 to 32) yield ("_" * 63).toVector).toVector).reverse
def drawTrianglesAcc(n: Int, row: Int, offset: Int, base: Int, acc: Matrix): Matrix = {
if (n < N)
drawTrianglesAcc(n + 1,
row + base / 4 + 1,
offset + base - base / 4,
base / 2,
drawTrianglesAcc(n + 1,
row + base / 4 + 1,
offset - base / 4 - 1,
base / 2,
drawTrianglesAcc(n + 1,
row - base / 4 - 1,
offset + base / 4 + 1,
base / 2,
sieve(row, offset, base, '_', acc))))
else acc
}
if ((0 to 5).toSet.contains(N))
print(drawTrianglesAcc(0, 16, 16, 31, init).map(_.mkString("", "", "")).mkString("", "\\n", ""))
else
println("Invalid parameter value N=%d".format(N))
}
def main(args: Array[String]) {
drawTriangles(readInt())
}
} | maumorelli/alaraph | hackerrank/src/com/alaraph/hackerrank/sierpinski/Solution.scala | Scala | apache-2.0 | 2,165 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.logical
import org.apache.calcite.adapter.enumerable.EnumerableTableScan
import org.apache.calcite.plan.RelOptRule.{any, operand}
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelOptRuleOperand}
import org.apache.calcite.rel.logical.LogicalTableScan
/**
* Rule that converts an EnumerableTableScan into a LogicalTableScan.
* We need this rule because Calcite creates an EnumerableTableScan
* when parsing a SQL query. We convert it into a LogicalTableScan
* so we can merge the optimization process with any plan that might be created
* by the Table API.
*/
class EnumerableToLogicalTableScan(
operand: RelOptRuleOperand,
description: String) extends RelOptRule(operand, description) {
override def onMatch(call: RelOptRuleCall): Unit = {
val oldRel = call.rel(0).asInstanceOf[EnumerableTableScan]
val table = oldRel.getTable
val newRel = LogicalTableScan.create(oldRel.getCluster, table, oldRel.getHints)
call.transformTo(newRel)
}
}
object EnumerableToLogicalTableScan {
val INSTANCE = new EnumerableToLogicalTableScan(
operand(classOf[EnumerableTableScan], any),
"EnumerableToLogicalTableScan")
}
| rmetzger/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/rules/logical/EnumerableToLogicalTableScan.scala | Scala | apache-2.0 | 2,013 |
/*
Copyright 2011 Ben Biddington
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.coriander.oauth.core.uri
abstract class UrlEncoder {
def %%(value : String) : String = {
encode(value)
}
def encode(value : String) : String
} | ben-biddington/Coriander.OAuth | src/org/coriander/oauth/core/uri/URLEncoder.scala | Scala | apache-2.0 | 743 |
package mesosphere.marathon
package metrics
sealed trait MetricPrefix {
val name: String
}
/**
* Metrics relating to our API.
*/
case object ApiMetric extends MetricPrefix {
val name = "api"
}
/**
* Metrics relating to the application code.
*/
case object ServiceMetric extends MetricPrefix {
val name = "service"
}
| guenter/marathon | src/main/scala/mesosphere/marathon/metrics/MetricPrefixes.scala | Scala | apache-2.0 | 333 |
/* __ __ *\
* / /____ ___ ____ ___ ___ _/ / lasius *
* / __/ -_) _ `/ _ \/ _ \/ _ `/ / contributed by tegonal *
* \__/\__/\_, /\___/_//_/\_,_/_/ http://tegonal.com/ *
* /___/ *
* *
* This program is free software: you can redistribute it and/or modify it *
* under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 3 of the License, *
* or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, but *
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for *
* more details. *
* *
* You should have received a copy of the GNU General Public License along *
* with this program. If not, see http://www.gnu.org/licenses/ *
* *
\* */
package models
import com.tegonal.play.json.TypedId._
import models.BaseFormat._
import org.joda.time.{DateTime, Duration}
import play.api.libs.json._
import reactivemongo.bson.BSONObjectID
trait OperatorEntity[I <: BaseId[_], E] extends BaseEntity[I] {
val duration: Duration
val day: DateTime
def invert: E
def duration(duration: Duration): E
}
case class BookingByProjectId(value: BSONObjectID = BSONObjectID.generate) extends BaseBSONObjectId
object BookingByProjectId {
implicit val idFormat: Format[BookingByProjectId] = BaseFormat.idformat[BookingByProjectId](BookingByProjectId.apply _)
}
case class BookingByCategoryId(value: BSONObjectID = BSONObjectID.generate) extends BaseBSONObjectId
object BookingByCategoryId {
implicit val idFormat: Format[BookingByCategoryId] = BaseFormat.idformat[BookingByCategoryId](BookingByCategoryId.apply _)
}
case class BookingByTagId(value: BSONObjectID = BSONObjectID.generate) extends BaseBSONObjectId
object BookingByTagId {
implicit val idFormat: Format[BookingByTagId] = BaseFormat.idformat[BookingByTagId](BookingByTagId.apply _)
}
case class BookingByProject(_id: BookingByProjectId, userId: UserId, day: DateTime, projectId: ProjectId, duration: Duration) extends OperatorEntity[BookingByProjectId, BookingByProject] {
val id = _id
def invert: BookingByProject = {
BookingByProject(id, userId, day, projectId, Duration.ZERO.minus(duration))
}
def duration(duration: Duration): BookingByProject = {
copy(duration = duration)
}
}
object BookingByProject {
implicit val bookingByProjectFormat = Json.format[BookingByProject]
}
case class BookingByCategory(_id: BookingByCategoryId, userId: UserId, day: DateTime, categoryId: CategoryId, duration: Duration) extends OperatorEntity[BookingByCategoryId, BookingByCategory] {
val id = _id
def invert: BookingByCategory = {
BookingByCategory(id, userId, day, categoryId, Duration.ZERO.minus(duration))
}
def duration(duration: Duration): BookingByCategory = {
copy(duration = duration)
}
}
object BookingByCategory {
implicit val bookingByCategoryFormat = Json.format[BookingByCategory]
}
case class BookingByTag(_id: BookingByTagId, userId: UserId, day: DateTime, tagId: TagId, duration: Duration) extends OperatorEntity[BookingByTagId, BookingByTag] {
val id = _id
def duration(duration: Duration): BookingByTag = {
copy(duration = duration)
}
def invert: BookingByTag = {
BookingByTag(id, userId, day, tagId, Duration.ZERO.minus(duration))
}
}
object BookingByTag {
implicit val bookingByTagFormat = Json.format[BookingByTag]
}
| tegonal/lasius | app/models/BookingStatistics.scala | Scala | gpl-3.0 | 4,207 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
/**
* Interface used to listen for job completion or failure events after submitting a job to the
* DAGScheduler. The listener is notified each time a task succeeds, as well as if the whole
* job fails (and no further taskSucceeded events will happen).
* Job正在提交的作业,一个Job可能由一个到多个Task组成,
* 接口用来听提交Job到DAGScheduler完成或失败事件,每次任务成功时,侦听器都被通知,以及如果整个工作失败
*/
private[spark] trait JobListener {
def taskSucceeded(index: Int, result: Any)
def jobFailed(exception: Exception)
}
| tophua/spark1.52 | core/src/main/scala/org/apache/spark/scheduler/JobListener.scala | Scala | apache-2.0 | 1,430 |
/*
* Copyright © 2017 University of Texas at Arlington
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uta.diql.core
import scala.reflect.macros.whitebox.Context
import java.io._
object DistributedEvaluator {
var distributed: ScaldingCodeGenerator = new { val c = null } with ScaldingCodeGenerator
}
abstract class QueryCodeGenerator {
val context: Context
val cg = new { val c: context.type = context } with ScaldingCodeGenerator
val optimizer = new { val c: context.type = context } with Optimizer
/** Translate a DIQL query to Scala byte code */
def code_generator ( e: Expr, query_text: String, line: Int, debug: Boolean,
env: cg.Environment = Map() ): context.Expr[Any] = {
import context.universe.{Expr=>_,_}
import Normalizer.normalizeAll
import Pretty.{print=>pretty_print}
try {
cg.line = line
distributed = cg
cg.typecheck(e,env)
val oe = normalizeAll(optimizer.optimizeAll(e,env))
if (diql_explain)
println("Optimized term:\\n"+pretty_print(oe.toString))
cg.typecheck(oe,env)
val de = if (debug)
normalizeAll(Call("debug",
List(Provenance.embedLineage(oe,cg.isDistributed(_)),
BoolConst(cg.isDistributed(oe)),
Call("List",Provenance.exprs.map(StringConst(_))))))
else oe
if (debug && diql_explain)
println("Debugging term:\\n"+pretty_print(de.toString))
val ec = cg.codeGen(de,env)
if (diql_explain)
println("Scala code:\\n"+showCode(ec))
val tp = cg.getType(ec,env)
if (diql_explain)
println("Scala type: "+showCode(tp))
context.Expr[Any](ec)
} catch {
case ex: Any
=> println(ex)
if (diql_explain) {
val sw = new StringWriter
ex.printStackTrace(new PrintWriter(sw))
println(sw.toString)
}
context.Expr[Any](q"()")
}
}
}
| fegaras/DIQL | src/scalding/scala/edu/uta/diql/QueryCodeGenerator.scala | Scala | apache-2.0 | 2,581 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v2
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600e.v2.retriever.CT600EBoxRetriever
import uk.gov.hmrc.ct.ct600e.validations.ValidateDeclarationNameOrStatus
case class E1031(value: Option[String]) extends CtBoxIdentifier("Claimer's status") with CtOptionalString with Input
with ValidatableBox[CT600EBoxRetriever] with ValidateDeclarationNameOrStatus[CT600EBoxRetriever] {
override def validate(boxRetriever: CT600EBoxRetriever): Set[CtValidation] = validateDeclarationNameOrStatus("E1031", this)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600e/v2/E1031.scala | Scala | apache-2.0 | 1,148 |
package scommons.client.app
import scommons.react._
case class AppMainPanelProps(name: String = "App",
user: String = "user",
copyright: String = "copyright",
version: String = "version")
object AppMainPanel extends FunctionComponent[AppMainPanelProps] {
protected def render(compProps: Props): ReactElement = {
val props = compProps.wrapped
<.>()(
<(AppHeader())(^.wrapped := AppHeaderProps(props.name, props.user))(),
<.div(^.className := "container-fluid")(
compProps.children
),
<(AppFooter())(^.wrapped := AppFooterProps(props.copyright, props.version))()
)
}
}
| viktor-podzigun/scommons | ui/src/main/scala/scommons/client/app/AppMainPanel.scala | Scala | apache-2.0 | 711 |
package lila.worldMap
import com.sanoma.cda.geoip.IpLocation
case class Location(
country: String,
lat: Double,
lon: Double)
object Location {
def apply(ipLoc: IpLocation): Option[Location] = for {
country <- ipLoc.countryName
point <- ipLoc.geoPoint
} yield Location(country, point.latitude, point.longitude)
}
| danilovsergey/i-bur | modules/worldMap/src/main/model.scala | Scala | mit | 334 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kudu.index
import java.nio.charset.StandardCharsets
import java.util.Collections
import org.apache.kudu.Schema
import org.apache.kudu.client.{CreateTableOptions, PartialRow}
import org.locationtech.geomesa.index.conf.splitter.DefaultSplitter
import org.locationtech.geomesa.index.index.IndexKeySpace
import org.locationtech.geomesa.index.index.IndexKeySpace._
import org.locationtech.geomesa.index.index.attribute.{AttributeIndex, AttributeIndexKey, AttributeIndexKeySpace, AttributeIndexValues}
import org.locationtech.geomesa.kudu.data.KuduFeature
import org.locationtech.geomesa.kudu.schema.KuduColumnAdapter
import org.locationtech.geomesa.kudu.schema.KuduIndexColumnAdapter._
import org.locationtech.geomesa.kudu.{KuduAttributeFilterStrategy, KuduValue}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.index.ByteArrays
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
case object KuduAttributeIndex extends KuduAttributeIndex
trait KuduAttributeIndex extends KuduTieredFeatureIndex[AttributeIndexValues[Any], AttributeIndexKey]
with KuduAttributeFilterStrategy {
import scala.collection.JavaConverters._
override val name: String = AttributeIndex.Name
override val version: Int = 1
override protected val keyColumns: Seq[KuduColumnAdapter[_]] =
Seq(IdxColumnAdapter, ValueColumnAdapter, SecondaryColumnAdapter, FeatureIdAdapter)
override protected def keySpace: AttributeIndexKeySpace = AttributeIndexKeySpace
override protected def tieredKeySpace(sft: SimpleFeatureType): Option[IndexKeySpace[_, _]] =
AttributeIndex.TieredOptions.find(_.supports(sft))
override protected def configurePartitions(sft: SimpleFeatureType,
schema: Schema,
config: Map[String, String],
options: CreateTableOptions): Unit = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
// add hash splits based on our shards, which we don't need to actually store as a separate column
val shards = sft.getAttributeShards
if (shards > 1) {
options.addHashPartitions(Collections.singletonList(ValueColumnAdapter.name), shards)
}
options.setRangePartitionColumns(Seq(IdxColumnAdapter.name, ValueColumnAdapter.name).asJava)
SimpleFeatureTypes.getSecondaryIndexedAttributes(sft).foreach { d =>
val splits = {
val upperBound = new String(ByteRange.UnboundedUpperRange, StandardCharsets.UTF_8)
val configured = DefaultSplitter.Parser.attributeSplits(d.getLocalName, d.getType.getBinding, config)
if (configured.isEmpty) { Seq("", upperBound) } else {
// add an upper and lower bound as our splits don't have endpoints
val builder = Seq.newBuilder[String]
builder.sizeHint(configured.length + 2)
builder += ""
builder ++= configured.sorted
builder += upperBound
builder.result.distinct
}
}
val i = sft.indexOf(d.getLocalName).toShort
splits.sliding(2).foreach { case Seq(lo, hi) =>
val lower = schema.newPartialRow()
val upper = schema.newPartialRow()
lower.addShort(0, i)
upper.addShort(0, i)
lower.addString(1, lo)
upper.addString(1, hi)
options.addRangePartition(lower, upper)
}
}
}
override protected def createKeyValues(toIndexKey: SimpleFeature => Seq[AttributeIndexKey])
(kf: KuduFeature): Seq[Seq[KuduValue[_]]] = {
val fid = KuduValue(kf.feature.getID, FeatureIdAdapter)
toIndexKey(kf.feature).map { key =>
Seq(
KuduValue(key.i, IdxColumnAdapter),
KuduValue(key.value, ValueColumnAdapter),
KuduValue(Array.empty[Byte], SecondaryColumnAdapter),
fid
)
}
}
override protected def createKeyValues(toIndexKey: SimpleFeature => Seq[AttributeIndexKey],
toTieredIndexKey: SimpleFeature => Seq[Array[Byte]])
(kf: KuduFeature): Seq[Seq[KuduValue[_]]] = {
val fid = KuduValue(kf.feature.getID, FeatureIdAdapter)
val tiers = toTieredIndexKey(kf.feature)
toIndexKey(kf.feature).flatMap { key =>
tiers.map { secondary =>
Seq(
KuduValue(key.i, IdxColumnAdapter),
KuduValue(key.value, ValueColumnAdapter),
KuduValue(secondary, SecondaryColumnAdapter),
fid
)
}
}
}
override protected def toRowRanges(sft: SimpleFeatureType,
schema: Schema,
range: ScanRange[AttributeIndexKey]): (Option[PartialRow], Option[PartialRow]) = {
range match {
case SingleRowRange(row) => (lower(schema, row), upper(schema, row))
case BoundedRange(lo, hi) => (lower(schema, lo), upper(schema, hi))
case PrefixRange(prefix) => (lower(schema, prefix, prefix = true), upper(schema, prefix, prefix = true))
case LowerBoundedRange(lo) => (lower(schema, lo), upper(schema, lo.copy(value = null, inclusive = false)))
case UpperBoundedRange(hi) => (lower(schema, hi.copy(value = null, inclusive = false)), upper(schema, hi))
case UnboundedRange(empty) => (lower(schema, empty), upper(schema, empty))
case _ => throw new IllegalArgumentException(s"Unexpected range type $range")
}
}
override protected def toTieredRowRanges(sft: SimpleFeatureType,
schema: Schema,
range: ScanRange[AttributeIndexKey],
tiers: => Seq[ByteRange],
minTier: => Array[Byte],
maxTier: => Array[Byte]): Seq[(Option[PartialRow], Option[PartialRow])] = {
range match {
case SingleRowRange(row) =>
// single row range - can use all the tiered values
tiers.map {
case BoundedByteRange(lo, hi) => (lower(schema, row, lo), upper(schema, row, hi))
case SingleRowByteRange(r) => (lower(schema, row, r), upper(schema, row, r))
case r => throw new IllegalArgumentException(s"Unexpected range type $r")
}
case BoundedRange(lo, hi) => Seq((lower(schema, lo, minTier), upper(schema, hi, maxTier)))
case PrefixRange(prefix) => Seq((lower(schema, prefix, prefix = true), upper(schema, prefix, prefix = true)))
case LowerBoundedRange(lo) => Seq((lower(schema, lo, minTier), upper(schema, lo.copy(value = null, inclusive = false))))
case UpperBoundedRange(hi) => Seq((lower(schema, hi.copy(value = null, inclusive = false)), upper(schema, hi, maxTier)))
case UnboundedRange(empty) => Seq((lower(schema, empty), upper(schema, empty)))
case _ => throw new IllegalArgumentException(s"Unexpected range type $range")
}
}
private def lower(schema: Schema,
key: AttributeIndexKey,
secondary: Array[Byte] = Array.empty,
prefix: Boolean = false): Some[PartialRow] = {
val row = schema.newPartialRow()
IdxColumnAdapter.writeToRow(row, key.i)
if (key.value == null) {
ValueColumnAdapter.writeToRow(row, "")
} else if (prefix || key.inclusive) {
ValueColumnAdapter.writeToRow(row, key.value)
} else {
ValueColumnAdapter.writeToRow(row, key.value + new String(ByteArrays.ZeroByteArray, StandardCharsets.UTF_8))
}
SecondaryColumnAdapter.writeToRow(row, secondary)
FeatureIdAdapter.writeToRow(row, "")
Some(row)
}
private def upper(schema: Schema,
key: AttributeIndexKey,
secondary: Array[Byte] = Array.empty,
prefix: Boolean = false): Some[PartialRow] = {
val row = schema.newPartialRow()
if (key.value == null) {
if (key.i + 1 == Short.MaxValue) {
// push the exclusive value to the value column to avoid numeric overflow
IdxColumnAdapter.writeToRow(row, key.i)
ValueColumnAdapter.writeToRow(row, new String(ByteArrays.ZeroByteArray, StandardCharsets.UTF_8))
} else {
IdxColumnAdapter.writeToRow(row, (key.i + 1).toShort)
ValueColumnAdapter.writeToRow(row, "")
}
SecondaryColumnAdapter.writeToRow(row, Array.empty)
} else {
IdxColumnAdapter.writeToRow(row, key.i)
// note: we can't tier exclusive end points, as we can't calculate previous rows
if (prefix || secondary.length == 0 || !key.inclusive) {
if (key.inclusive) {
// use 3 consecutive max-bytes as the exclusive upper bound - hopefully no values will match this...
ValueColumnAdapter.writeToRow(row, key.value + new String(ByteRange.UnboundedUpperRange, StandardCharsets.UTF_8))
} else {
ValueColumnAdapter.writeToRow(row, key.value)
}
SecondaryColumnAdapter.writeToRow(row, Array.empty)
} else {
ValueColumnAdapter.writeToRow(row, key.value)
SecondaryColumnAdapter.writeToRow(row, secondary)
}
}
FeatureIdAdapter.writeToRow(row, "")
Some(row)
}
}
| ddseapy/geomesa | geomesa-kudu/geomesa-kudu-datastore/src/main/scala/org/locationtech/geomesa/kudu/index/KuduAttributeIndex.scala | Scala | apache-2.0 | 9,812 |
package org.jetbrains.plugins.scala
package lang
package completion
import com.intellij.codeInsight.completion._
import com.intellij.codeInsight.lookup.{InsertHandlerDecorator, LookupElement, LookupElementDecorator}
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.editor.Document
import com.intellij.openapi.util.Computable
import com.intellij.patterns.PlatformPatterns
import com.intellij.psi._
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.util.ProcessingContext
import org.jetbrains.annotations.Nullable
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaRuntimeTypeEvaluator
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.completion.ScalaAfterNewCompletionUtil._
import org.jetbrains.plugins.scala.lang.completion.ScalaCompletionUtil._
import org.jetbrains.plugins.scala.lang.completion.lookups.{LookupElementManager, ScalaLookupItem}
import org.jetbrains.plugins.scala.lang.lexer.{ScalaLexer, ScalaTokenTypes}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScInterpolated, ScReferenceElement, ScStableCodeReferenceElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScBlock, ScModificationTrackerOwner, ScNewTemplateDefinition, ScReferenceExpression}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFun
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportStmt
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import org.jetbrains.plugins.scala.lang.psi.fake.FakePsiMethod
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.impl.base.ScStableCodeReferenceElementImpl
import org.jetbrains.plugins.scala.lang.psi.impl.base.types.ScTypeProjectionImpl
import org.jetbrains.plugins.scala.lang.psi.impl.expr.ScReferenceExpressionImpl
import org.jetbrains.plugins.scala.lang.psi.types.{ScAbstractType, ScType}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.lang.resolve.processor.CompletionProcessor
import org.jetbrains.plugins.scala.lang.resolve.{ResolveUtils, ScalaResolveResult}
import org.jetbrains.plugins.scala.lang.scaladoc.lexer.ScalaDocTokenType
import scala.annotation.tailrec
import scala.util.Random
/**
* @author Alexander Podkhalyuzin
* Date: 16.05.2008
*/
abstract class ScalaCompletionContributor extends CompletionContributor {
def getDummyIdentifier(offset: Int, file: PsiFile): String = {
CompletionInitializationContext.DUMMY_IDENTIFIER
}
def positionFromParameters(parameters: CompletionParameters): PsiElement = {
@tailrec
def inner(element: PsiElement): PsiElement = element match {
case null => parameters.getPosition //we got to the top of the tree and didn't find a modificationTrackerOwner
case owner: ScModificationTrackerOwner if owner.isValidModificationTrackerOwner() =>
if (owner.containingFile.contains(parameters.getOriginalFile)) {
owner.getMirrorPositionForCompletion(getDummyIdentifier(parameters.getOffset, parameters.getOriginalFile),
parameters.getOffset - owner.getTextRange.getStartOffset).getOrElse(parameters.getPosition)
} else parameters.getPosition
case _ => inner(element.getContext)
}
inner(parameters.getOriginalPosition)
}
}
class ScalaBasicCompletionContributor extends ScalaCompletionContributor {
private val addedElements = collection.mutable.Set[String]()
extend(CompletionType.BASIC, PlatformPatterns.psiElement(), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet) {
val dummyPosition = positionFromParameters(parameters)
val (position, inString, inInterpolatedString) = dummyPosition.getNode.getElementType match {
case ScalaTokenTypes.tIDENTIFIER | ScalaDocTokenType.DOC_TAG_VALUE_TOKEN => (dummyPosition, false, false)
case ScalaTokenTypes.tSTRING | ScalaTokenTypes.tMULTILINE_STRING =>
//it's ok to use parameters here as we want just to calculate offset
val offsetInString = parameters.getOffset - parameters.getPosition.getTextRange.getStartOffset + 1
val interpolated =
ScalaPsiElementFactory.createExpressionFromText("s" + dummyPosition.getText, dummyPosition.getContext)
(interpolated.findElementAt(offsetInString), true, false)
case ScalaTokenTypes.tINTERPOLATED_STRING | ScalaTokenTypes.tINTERPOLATED_MULTILINE_STRING =>
val position = dummyPosition.getContext
if (!position.isInstanceOf[ScInterpolated]) return
if (!parameters.getPosition.getParent.isInstanceOf[ScInterpolated]) return
val interpolated = position.asInstanceOf[ScInterpolated]
val dummyInterpolated = parameters.getPosition.getParent.asInstanceOf[ScInterpolated]
//we use here file copy as we want to work with offsets.
val offset = parameters.getOffset
val dummyInjections = dummyInterpolated.getInjections
val index = dummyInjections.lastIndexWhere { expr =>
expr.getTextRange.getEndOffset <= offset
}
//it's ok to use parameters here as we want just to calculate offset
val offsetInString = offset - dummyInterpolated.getTextRange.getStartOffset
val res = ScalaBasicCompletionContributor.getStartEndPointForInterpolatedString(interpolated, index, offsetInString)
if (res.isEmpty) return
val (exprStartInString, endPoint) = res.get
val stringText = interpolated.getText
val newInterpolated =
ScalaPsiElementFactory.createExpressionFromText(stringText.substring(0, exprStartInString) + "{" +
stringText.substring(exprStartInString, endPoint) + "}" +
stringText.substring(endPoint), position.getContext)
(newInterpolated.findElementAt(offsetInString + 1), false, true)
case _ => return
}
result.restartCompletionWhenNothingMatches()
val expectedTypesAfterNew: Array[ScType] =
if (afterNewPattern.accepts(position, context)) {
val element = position
val newExpr: ScNewTemplateDefinition = PsiTreeUtil.getContextOfType(element, classOf[ScNewTemplateDefinition])
newExpr.expectedTypes().map {
case ScAbstractType(_, lower, upper) => upper
case tp => tp
}
} else Array.empty
//if prefix is capitalized, class name completion is enabled
val classNameCompletion = shouldRunClassNameCompletion(positionFromParameters(parameters), parameters, result.getPrefixMatcher)
val insertedElement: PsiElement = position
if (!inString && !inInterpolatedString && !ScalaPsiUtil.fileContext(insertedElement).isInstanceOf[ScalaFile]) return
val lookingForAnnotations: Boolean =
Option(insertedElement.getContainingFile findElementAt (insertedElement.getTextOffset - 1)) exists {
_.getNode.getElementType == ScalaTokenTypes.tAT
}
var elementAdded = false
def addElement(el: LookupElement) {
if (result.getPrefixMatcher.prefixMatches(el))
elementAdded = true
result.addElement(el)
addedElements += el.getLookupString
}
position.getContext match {
case ref: ScReferenceElement =>
val isInImport = ScalaPsiUtil.getContextOfType(ref, true, classOf[ScImportStmt]) != null
def applyVariant(variant: Object, addElement: LookupElement => Unit = addElement) {
variant match {
case el: ScalaLookupItem =>
if (inString) el.isInSimpleString = true
if (inInterpolatedString) el.isInInterpolatedString = true
val elem = el.element
elem match {
case clazz: PsiClass =>
import scala.collection.mutable.{HashMap => MHashMap}
val renamedMap = new MHashMap[String, (String, PsiNamedElement)]
el.isRenamed.foreach(name => renamedMap += ((clazz.name, (name, clazz))))
val isExcluded: Boolean = ApplicationManager.getApplication.runReadAction(new Computable[Boolean] {
def compute: Boolean = {
JavaCompletionUtil.isInExcludedPackage(clazz, false)
}
})
if (!isExcluded && !classNameCompletion && (!lookingForAnnotations || clazz.isAnnotationType)) {
if (afterNewPattern.accepts(position, context)) {
addElement(getLookupElementFromClass(expectedTypesAfterNew, clazz, renamedMap))
} else {
addElement(el)
}
}
case _ if lookingForAnnotations =>
case f: FakePsiMethod if f.name.endsWith("_=") && parameters.getInvocationCount < 2 => //don't show _= methods for vars in basic completion
case fun: ScFun => addElement(el)
case param: ScClassParameter =>
addElement(el)
case patt: ScBindingPattern =>
val context = ScalaPsiUtil.nameContext(patt)
context match {
case memb: PsiMember =>
if (parameters.getInvocationCount > 1 ||
ResolveUtils.isAccessible(memb, position, forCompletion = true)) addElement(el)
case _ => addElement(el)
}
case memb: PsiMember =>
if (parameters.getInvocationCount > 1 || ResolveUtils.isAccessible(memb, position,
forCompletion = true))
addElement(el)
case _ => addElement(el)
}
case _ =>
}
}
def postProcessMethod(resolveResult: ScalaResolveResult) {
import org.jetbrains.plugins.scala.lang.psi.types.Nothing
val qualifierType = resolveResult.fromType.getOrElse(Nothing)
val lookupItems: Seq[ScalaLookupItem] = LookupElementManager.getLookupElement(
resolveResult,
isInImport = isInImport,
qualifierType = qualifierType,
isInStableCodeReference = ref.isInstanceOf[ScStableCodeReferenceElement])
lookupItems.foreach(applyVariant(_))
}
def completionProcessor(ref: ScReferenceElement,
collectImplicit: Boolean = false,
postProcess: ScalaResolveResult => Unit = postProcessMethod): CompletionProcessor =
new CompletionProcessor(ref.getKinds(incomplete = false, completion = true), ref, collectImplicit, postProcess = postProcess)
@tailrec
def addThisAndSuper(elem: PsiElement): Unit = {
elem match {
case t: ScNewTemplateDefinition => //do nothing, impossible to invoke
case t: ScTemplateDefinition =>
addElement(new ScalaLookupItem(t, t.name + ".this"))
addElement(new ScalaLookupItem(t, t.name + ".super"))
case _ =>
}
val context = elem.getContext
if (context != null) addThisAndSuper(context)
}
ref match {
case refImpl: ScStableCodeReferenceElementImpl => refImpl.doResolve(refImpl, completionProcessor(refImpl))
case refImpl: ScReferenceExpressionImpl =>
refImpl.doResolve(refImpl, completionProcessor(refImpl, collectImplicit = true))
if (ScalaCompletionUtil.completeThis(refImpl))
addThisAndSuper(refImpl)
case refImpl: ScTypeProjectionImpl => refImpl.doResolve(completionProcessor(refImpl))
case _ =>
for (variant <- ref.asInstanceOf[PsiReference].getVariants) {
applyVariant(variant)
}
}
if (!elementAdded && !classNameCompletion && ScalaCompletionUtil.shouldRunClassNameCompletion(
positionFromParameters(parameters), parameters,
result.getPrefixMatcher, checkInvocationCount = false, lookingForAnnotations = lookingForAnnotations)) {
ScalaClassNameCompletionContributor.completeClassName(dummyPosition, parameters, context, result)
}
//adds runtime completions for evaluate expression in debugger
val runtimeQualifierType: ScType = getQualifierCastType(ref, parameters)
if (runtimeQualifierType != null) {
def addElementWithDecorator(elem: LookupElement, decorator: InsertHandlerDecorator[LookupElement]) {
if (!addedElements.contains(elem.getLookupString)) {
val newElem = LookupElementDecorator.withInsertHandler(elem, decorator)
result.addElement(newElem)
addedElements += elem.getLookupString
}
}
def postProcess(resolveResult: ScalaResolveResult): Unit = {
val lookupItems: Seq[ScalaLookupItem] = LookupElementManager.getLookupElement(
resolveResult,
isInImport = isInImport,
qualifierType = runtimeQualifierType,
isInStableCodeReference = ref.isInstanceOf[ScStableCodeReferenceElement],
isInSimpleString = inString,
isInInterpolatedString = inInterpolatedString
)
val decorator = castDecorator(runtimeQualifierType.canonicalText)
lookupItems.foreach(item => applyVariant(item, addElementWithDecorator(_, decorator)))
}
val newRef = createReferenceWithQualifierType(runtimeQualifierType, ref.getContext, ref)
newRef match {
case refImpl: ScReferenceExpressionImpl =>
refImpl.doResolve(refImpl, completionProcessor(refImpl, collectImplicit = true, postProcess))
case _ =>
}
}
case _ =>
}
if (position.getNode.getElementType == ScalaDocTokenType.DOC_TAG_VALUE_TOKEN) result.stopHere()
}
})
override def advertise(parameters: CompletionParameters): String = {
if (!parameters.getOriginalFile.isInstanceOf[ScalaFile]) return null
val messages = Array[String](
null
)
messages apply (new Random).nextInt(messages.length)
}
override def getDummyIdentifier(offset: Int, file: PsiFile): String = {
val element = file.findElementAt(offset)
val ref = file.findReferenceAt(offset)
if (element != null && ref != null) {
val text = ref match {
case ref: PsiElement => ref.getText
case ref: PsiReference => ref.getElement.getText //this case for anonymous method in ScAccessModifierImpl
}
val id = if (isOpChar(text(text.length - 1))) {
"+++++++++++++++++++++++"
} else {
val rest = ref match {
case ref: PsiElement => text.substring(offset - ref.getTextRange.getStartOffset + 1)
case ref: PsiReference =>
val from = offset - ref.getElement.getTextRange.getStartOffset + 1
if (from < text.length && from >= 0) text.substring(from) else ""
}
if (ScalaNamesUtil.isKeyword(rest)) {
CompletionUtil.DUMMY_IDENTIFIER
} else {
CompletionUtil.DUMMY_IDENTIFIER_TRIMMED
}
}
if (ref.getElement != null &&
ref.getElement.getPrevSibling != null &&
ref.getElement.getPrevSibling.getNode.getElementType == ScalaTokenTypes.tSTUB) id + "`" else id
} else {
if (element != null && element.getNode.getElementType == ScalaTokenTypes.tSTUB) {
CompletionUtil.DUMMY_IDENTIFIER_TRIMMED + "`"
} else {
val actualElement = file.findElementAt(offset + 1)
if (actualElement != null && ScalaNamesUtil.isKeyword(actualElement.getText)) {
CompletionUtil.DUMMY_IDENTIFIER
} else {
CompletionUtil.DUMMY_IDENTIFIER_TRIMMED
}
}
}
}
override def beforeCompletion(context: CompletionInitializationContext) {
addedElements.clear()
val offset: Int = context.getStartOffset - 1
val file: PsiFile = context.getFile
context.setDummyIdentifier(getDummyIdentifier(offset, file))
super.beforeCompletion(context)
}
private def isOpChar(c: Char): Boolean = {
ScalaNamesUtil.isIdentifier("+" + c)
}
@Nullable
private def getQualifierCastType(ref: ScReferenceElement, parameters: CompletionParameters): ScType = {
ref match {
case refExpr: ScReferenceExpression =>
(for (qualifier <- refExpr.qualifier) yield {
val evaluator = refExpr.getContainingFile.getCopyableUserData(ScalaRuntimeTypeEvaluator.KEY)
if (evaluator != null) evaluator(qualifier) else null
}).orNull
case _ => null
}
}
private def createReferenceWithQualifierType(qualType: ScType, context: PsiElement, child: PsiElement): ScReferenceElement = {
val text =
s"""|{
| val xxx: ${qualType.canonicalText} = null
| xxx.xxx
|}""".stripMargin
val block = ScalaPsiElementFactory.createExpressionWithContextFromText(text, context, child).asInstanceOf[ScBlock]
block.exprs.last.asInstanceOf[ScReferenceElement]
}
private def castDecorator(canonTypeName: String) = new InsertHandlerDecorator[LookupElement] {
def handleInsert(context: InsertionContext, item: LookupElementDecorator[LookupElement]) {
val document: Document = context.getEditor.getDocument
context.commitDocument()
val file = PsiDocumentManager.getInstance(context.getProject).getPsiFile(document)
val ref: ScReferenceElement =
PsiTreeUtil.findElementOfClassAtOffset(file, context.getStartOffset, classOf[ScReferenceElement], false)
if (ref != null) {
ref.qualifier match {
case None =>
case Some(qual) =>
val castString = s".asInstanceOf[$canonTypeName]"
document.insertString(qual.getTextRange.getEndOffset, castString)
context.commitDocument()
ScalaPsiUtil.adjustTypes(file)
PsiDocumentManager.getInstance(file.getProject).doPostponedOperationsAndUnblockDocument(document)
context.getEditor.getCaretModel.moveToOffset(context.getTailOffset)
}
}
item.getDelegate.handleInsert(context)
}
}
}
object ScalaBasicCompletionContributor {
def getStartEndPointForInterpolatedString(interpolated: ScInterpolated, index: Int, offsetInString: Int): Option[(Int, Int)] = {
val injections = interpolated.getInjections
if (index != -1) {
val expr = injections(index)
if (expr.isInstanceOf[ScBlock]) return None
val stringText = interpolated.getText
val pointPosition = expr.getTextRange.getEndOffset - interpolated.getTextRange.getStartOffset
if (stringText.charAt(pointPosition) == '.') {
val restString = stringText.substring(pointPosition + 1)
val lexer = new ScalaLexer()
val noQuotes = if (interpolated.isMultiLineString) 3 else 1
lexer.start(restString, 0, restString.length - noQuotes)
if (lexer.getTokenType == ScalaTokenTypes.tIDENTIFIER) {
val endPoint = lexer.getTokenEnd + pointPosition + 1
if (endPoint >= offsetInString) {
val exprStartInString = expr.getTextRange.getStartOffset - interpolated.getTextRange.getStartOffset
Some(exprStartInString, endPoint)
} else None
} else None
} else None
} else None
}
} | ghik/intellij-scala | src/org/jetbrains/plugins/scala/lang/completion/ScalaBasicCompletionContributor.scala | Scala | apache-2.0 | 20,130 |
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
class C { class A { def foo = ??? } }
val c = cm.classSymbol(classOf[C#A])
println(c)
println(c.fullName)
println(c.info)
}
| som-snytt/dotty | tests/disabled/reflect/run/t5256e.scala | Scala | apache-2.0 | 252 |
package domain.model
/**
* GraPHPizer source code analytics engine
* Copyright (C) 2015 Martin Helmich <kontakt@martin-helmich.de>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import org.neo4j.graphdb.{Label, RelationshipType}
import scala.language.implicitConversions
object ModelLabelTypes extends Enumeration {
type LabelType = Value
val Package, Class, Interface, Trait, Method, Type, Property, Parameter = Value
implicit def conv(l: LabelType): Label = new Label {
override def name(): String = l.toString
}
}
| martin-helmich/graphpizer-server | app/domain/model/ModelLabelTypes.scala | Scala | gpl-3.0 | 1,143 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
import scala.collection.JavaConverters._
import scala.util.Failure
import org.apache.spark.Logging
import org.apache.spark.rdd.PairRDDFunctions
import org.apache.spark.streaming._
import org.apache.spark.streaming.ui.UIUtils
import org.apache.spark.util.{EventLoop, ThreadUtils, Utils}
private[scheduler] sealed trait JobSchedulerEvent
private[scheduler] case class JobStarted(job: Job, startTime: Long) extends JobSchedulerEvent
private[scheduler] case class JobCompleted(job: Job, completedTime: Long) extends JobSchedulerEvent
private[scheduler] case class ErrorReported(msg: String, e: Throwable) extends JobSchedulerEvent
/**
* This class schedules jobs to be run on Spark. It uses the JobGenerator to generate
* the jobs and runs them using a thread pool.
*/
private[streaming]
class JobScheduler(val ssc: StreamingContext) extends Logging {
// Use of ConcurrentHashMap.keySet later causes an odd runtime problem due to Java 7/8 diff
// https://gist.github.com/AlainODea/1375759b8720a3f9f094
private val jobSets: java.util.Map[Time, JobSet] = new ConcurrentHashMap[Time, JobSet]
private val numConcurrentJobs = ssc.conf.getInt("spark.streaming.concurrentJobs", 1)
private val jobExecutor =
ThreadUtils.newDaemonFixedThreadPool(numConcurrentJobs, "streaming-job-executor")
private val jobGenerator = new JobGenerator(this)
val clock = jobGenerator.clock
val listenerBus = new StreamingListenerBus()
// These two are created only when scheduler starts.
// eventLoop not being null means the scheduler has been started and not stopped
var receiverTracker: ReceiverTracker = null
// A tracker to track all the input stream information as well as processed record number
var inputInfoTracker: InputInfoTracker = null
private var eventLoop: EventLoop[JobSchedulerEvent] = null
def start(): Unit = synchronized {
if (eventLoop != null) return // scheduler has already been started
logDebug("Starting JobScheduler")
eventLoop = new EventLoop[JobSchedulerEvent]("JobScheduler") {
override protected def onReceive(event: JobSchedulerEvent): Unit = processEvent(event)
override protected def onError(e: Throwable): Unit = reportError("Error in job scheduler", e)
}
eventLoop.start()
// attach rate controllers of input streams to receive batch completion updates
for {
inputDStream <- ssc.graph.getInputStreams
rateController <- inputDStream.rateController
} ssc.addStreamingListener(rateController)
listenerBus.start(ssc.sparkContext)
receiverTracker = new ReceiverTracker(ssc)
inputInfoTracker = new InputInfoTracker(ssc)
receiverTracker.start()
jobGenerator.start()
logInfo("Started JobScheduler")
}
def stop(processAllReceivedData: Boolean): Unit = synchronized {
if (eventLoop == null) return // scheduler has already been stopped
logDebug("Stopping JobScheduler")
if (receiverTracker != null) {
// First, stop receiving
receiverTracker.stop(processAllReceivedData)
}
// Second, stop generating jobs. If it has to process all received data,
// then this will wait for all the processing through JobScheduler to be over.
jobGenerator.stop(processAllReceivedData)
// Stop the executor for receiving new jobs
logDebug("Stopping job executor")
jobExecutor.shutdown()
// Wait for the queued jobs to complete if indicated
val terminated = if (processAllReceivedData) {
jobExecutor.awaitTermination(1, TimeUnit.HOURS) // just a very large period of time
} else {
jobExecutor.awaitTermination(2, TimeUnit.SECONDS)
}
if (!terminated) {
jobExecutor.shutdownNow()
}
logDebug("Stopped job executor")
// Stop everything else
listenerBus.stop()
eventLoop.stop()
eventLoop = null
logInfo("Stopped JobScheduler")
}
def submitJobSet(jobSet: JobSet) {
if (jobSet.jobs.isEmpty) {
logInfo("No jobs added for time " + jobSet.time)
} else {
listenerBus.post(StreamingListenerBatchSubmitted(jobSet.toBatchInfo))
jobSets.put(jobSet.time, jobSet)
jobSet.jobs.foreach(job => jobExecutor.execute(new JobHandler(job)))
logInfo("Added jobs for time " + jobSet.time)
}
}
def getPendingTimes(): Seq[Time] = {
jobSets.asScala.keys.toSeq
}
def reportError(msg: String, e: Throwable) {
eventLoop.post(ErrorReported(msg, e))
}
def isStarted(): Boolean = synchronized {
eventLoop != null
}
private def processEvent(event: JobSchedulerEvent) {
try {
event match {
case JobStarted(job, startTime) => handleJobStart(job, startTime)
case JobCompleted(job, completedTime) => handleJobCompletion(job, completedTime)
case ErrorReported(m, e) => handleError(m, e)
}
} catch {
case e: Throwable =>
reportError("Error in job scheduler", e)
}
}
private def handleJobStart(job: Job, startTime: Long) {
val jobSet = jobSets.get(job.time)
val isFirstJobOfJobSet = !jobSet.hasStarted
jobSet.handleJobStart(job)
if (isFirstJobOfJobSet) {
// "StreamingListenerBatchStarted" should be posted after calling "handleJobStart" to get the
// correct "jobSet.processingStartTime".
listenerBus.post(StreamingListenerBatchStarted(jobSet.toBatchInfo))
}
job.setStartTime(startTime)
listenerBus.post(StreamingListenerOutputOperationStarted(job.toOutputOperationInfo))
logInfo("Starting job " + job.id + " from job set of time " + jobSet.time)
}
private def handleJobCompletion(job: Job, completedTime: Long) {
val jobSet = jobSets.get(job.time)
jobSet.handleJobCompletion(job)
job.setEndTime(completedTime)
listenerBus.post(StreamingListenerOutputOperationCompleted(job.toOutputOperationInfo))
logInfo("Finished job " + job.id + " from job set of time " + jobSet.time)
if (jobSet.hasCompleted) {
jobSets.remove(jobSet.time)
jobGenerator.onBatchCompletion(jobSet.time)
logInfo("Total delay: %.3f s for time %s (execution: %.3f s)".format(
jobSet.totalDelay / 1000.0, jobSet.time.toString,
jobSet.processingDelay / 1000.0
))
listenerBus.post(StreamingListenerBatchCompleted(jobSet.toBatchInfo))
}
job.result match {
case Failure(e) =>
reportError("Error running job " + job, e)
case _ =>
}
}
private def handleError(msg: String, e: Throwable) {
logError(msg, e)
ssc.waiter.notifyError(e)
}
private class JobHandler(job: Job) extends Runnable with Logging {
import JobScheduler._
def run() {
try {
val formattedTime = UIUtils.formatBatchTime(
job.time.milliseconds, ssc.graph.batchDuration.milliseconds, showYYYYMMSS = false)
val batchUrl = s"/streaming/batch/?id=${job.time.milliseconds}"
val batchLinkText = s"[output operation ${job.outputOpId}, batch time ${formattedTime}]"
ssc.sc.setJobDescription(
s"""Streaming job from <a href="$batchUrl">$batchLinkText</a>""")
ssc.sc.setLocalProperty(BATCH_TIME_PROPERTY_KEY, job.time.milliseconds.toString)
ssc.sc.setLocalProperty(OUTPUT_OP_ID_PROPERTY_KEY, job.outputOpId.toString)
// We need to assign `eventLoop` to a temp variable. Otherwise, because
// `JobScheduler.stop(false)` may set `eventLoop` to null when this method is running, then
// it's possible that when `post` is called, `eventLoop` happens to null.
var _eventLoop = eventLoop
if (_eventLoop != null) {
_eventLoop.post(JobStarted(job, clock.getTimeMillis()))
// Disable checks for existing output directories in jobs launched by the streaming
// scheduler, since we may need to write output to an existing directory during checkpoint
// recovery; see SPARK-4835 for more details.
PairRDDFunctions.disableOutputSpecValidation.withValue(true) {
job.run()
}
_eventLoop = eventLoop
if (_eventLoop != null) {
_eventLoop.post(JobCompleted(job, clock.getTimeMillis()))
}
} else {
// JobScheduler has been stopped.
}
} finally {
ssc.sc.setLocalProperty(JobScheduler.BATCH_TIME_PROPERTY_KEY, null)
ssc.sc.setLocalProperty(JobScheduler.OUTPUT_OP_ID_PROPERTY_KEY, null)
}
}
}
}
private[streaming] object JobScheduler {
val BATCH_TIME_PROPERTY_KEY = "spark.streaming.internal.batchTime"
val OUTPUT_OP_ID_PROPERTY_KEY = "spark.streaming.internal.outputOpId"
}
| chenc10/Spark-PAF | streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala | Scala | apache-2.0 | 9,486 |
package com.twitter.finagle.param
import com.twitter.finagle.Stack
/**
* Provides the `withLoadBalancer` (default balancer) API entry point.
*
* @see [[DefaultLoadBalancingParams]]
*/
trait WithDefaultLoadBalancer[A <: Stack.Parameterized[A]] { self: Stack.Parameterized[A] =>
/**
* An entry point for configuring the client's load balancer that implements
* a strategy for choosing one host/node from a replica set to service
* a request.
*
* The default setup for a Finagle client is to use power of two choices
* algorithm to distribute load across endpoints, and comparing nodes
* via a least loaded metric.
*
* @see [[http://twitter.github.io/finagle/guide/Clients.html#load-balancing]]
*/
val withLoadBalancer: DefaultLoadBalancingParams[A] = new DefaultLoadBalancingParams(self)
}
| liamstewart/finagle | finagle-core/src/main/scala/com/twitter/finagle/param/WithDefaultLoadBalancer.scala | Scala | apache-2.0 | 829 |
class Foo {
def foo(x: Int) = {
val a: x.type = x
val b: Foo.type = Foo
val c: Foo.Bar.type = Foo.Bar
val d: 1 = 1
val e: "abc" = "abc"
}
}
object Foo {
object Bar
}
| som-snytt/dotty | tests/pos/simpleSingleton.scala | Scala | apache-2.0 | 193 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.joins
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.catalyst.plans.{FullOuter, JoinType, LeftOuter, RightOuter}
import org.apache.spark.sql.execution.{BinaryNode, SparkPlan}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.util.collection.CompactBuffer
case class BroadcastNestedLoopJoin(
left: SparkPlan,
right: SparkPlan,
buildSide: BuildSide,
joinType: JoinType,
condition: Option[Expression]) extends BinaryNode {
// TODO: Override requiredChildDistribution.
override private[sql] lazy val metrics = Map(
"numLeftRows" -> SQLMetrics.createLongMetric(sparkContext, "number of left rows"),
"numRightRows" -> SQLMetrics.createLongMetric(sparkContext, "number of right rows"),
"numOutputRows" -> SQLMetrics.createLongMetric(sparkContext, "number of output rows"))
/** BuildRight means the right relation <=> the broadcast relation. */
private val (streamed, broadcast) = buildSide match {
case BuildRight => (left, right)
case BuildLeft => (right, left)
}
override def outputsUnsafeRows: Boolean = left.outputsUnsafeRows || right.outputsUnsafeRows
override def canProcessUnsafeRows: Boolean = true
private[this] def genResultProjection: InternalRow => InternalRow = {
if (outputsUnsafeRows) {
UnsafeProjection.create(schema)
} else {
identity[InternalRow]
}
}
override def outputPartitioning: Partitioning = streamed.outputPartitioning
override def output: Seq[Attribute] = {
joinType match {
case LeftOuter =>
left.output ++ right.output.map(_.withNullability(true))
case RightOuter =>
left.output.map(_.withNullability(true)) ++ right.output
case FullOuter =>
left.output.map(_.withNullability(true)) ++ right.output.map(_.withNullability(true))
case x =>
throw new IllegalArgumentException(
s"BroadcastNestedLoopJoin should not take $x as the JoinType")
}
}
@transient private lazy val boundCondition =
newPredicate(condition.getOrElse(Literal(true)), left.output ++ right.output)
protected override def doExecute(): RDD[InternalRow] = {
val (numStreamedRows, numBuildRows) = buildSide match {
case BuildRight => (longMetric("numLeftRows"), longMetric("numRightRows"))
case BuildLeft => (longMetric("numRightRows"), longMetric("numLeftRows"))
}
val numOutputRows = longMetric("numOutputRows")
val broadcastedRelation =
sparkContext.broadcast(broadcast.execute().map { row =>
numBuildRows += 1
row.copy()
}.collect().toIndexedSeq)
/** All rows that either match both-way, or rows from streamed joined with nulls. */
val matchesOrStreamedRowsWithNulls = streamed.execute().mapPartitions { streamedIter =>
val matchedRows = new CompactBuffer[InternalRow]
// TODO: Use Spark's BitSet.
val includedBroadcastTuples =
new scala.collection.mutable.BitSet(broadcastedRelation.value.size)
val joinedRow = new JoinedRow
val leftNulls = new GenericMutableRow(left.output.size)
val rightNulls = new GenericMutableRow(right.output.size)
val resultProj = genResultProjection
streamedIter.foreach { streamedRow =>
var i = 0
var streamRowMatched = false
numStreamedRows += 1
while (i < broadcastedRelation.value.size) {
val broadcastedRow = broadcastedRelation.value(i)
buildSide match {
case BuildRight if boundCondition(joinedRow(streamedRow, broadcastedRow)) =>
matchedRows += resultProj(joinedRow(streamedRow, broadcastedRow)).copy()
streamRowMatched = true
includedBroadcastTuples += i
case BuildLeft if boundCondition(joinedRow(broadcastedRow, streamedRow)) =>
matchedRows += resultProj(joinedRow(broadcastedRow, streamedRow)).copy()
streamRowMatched = true
includedBroadcastTuples += i
case _ =>
}
i += 1
}
(streamRowMatched, joinType, buildSide) match {
case (false, LeftOuter | FullOuter, BuildRight) =>
matchedRows += resultProj(joinedRow(streamedRow, rightNulls)).copy()
case (false, RightOuter | FullOuter, BuildLeft) =>
matchedRows += resultProj(joinedRow(leftNulls, streamedRow)).copy()
case _ =>
}
}
Iterator((matchedRows, includedBroadcastTuples))
}
val includedBroadcastTuples = matchesOrStreamedRowsWithNulls.map(_._2)
val allIncludedBroadcastTuples = includedBroadcastTuples.fold(
new scala.collection.mutable.BitSet(broadcastedRelation.value.size)
)(_ ++ _)
val leftNulls = new GenericMutableRow(left.output.size)
val rightNulls = new GenericMutableRow(right.output.size)
val resultProj = genResultProjection
/** Rows from broadcasted joined with nulls. */
val broadcastRowsWithNulls: Seq[InternalRow] = {
val buf: CompactBuffer[InternalRow] = new CompactBuffer()
var i = 0
val rel = broadcastedRelation.value
(joinType, buildSide) match {
case (RightOuter | FullOuter, BuildRight) =>
val joinedRow = new JoinedRow
joinedRow.withLeft(leftNulls)
while (i < rel.length) {
if (!allIncludedBroadcastTuples.contains(i)) {
buf += resultProj(joinedRow.withRight(rel(i))).copy()
}
i += 1
}
case (LeftOuter | FullOuter, BuildLeft) =>
val joinedRow = new JoinedRow
joinedRow.withRight(rightNulls)
while (i < rel.length) {
if (!allIncludedBroadcastTuples.contains(i)) {
buf += resultProj(joinedRow.withLeft(rel(i))).copy()
}
i += 1
}
case _ =>
}
buf.toSeq
}
// TODO: Breaks lineage.
sparkContext.union(
matchesOrStreamedRowsWithNulls.flatMap(_._1),
sparkContext.makeRDD(broadcastRowsWithNulls)
).map { row =>
// `broadcastRowsWithNulls` doesn't run in a job so that we have to track numOutputRows here.
numOutputRows += 1
row
}
}
}
| pronix/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala | Scala | apache-2.0 | 7,205 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.mvc
import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths }
import java.time.{ LocalDateTime, ZoneOffset }
import java.util.concurrent.atomic.AtomicInteger
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import org.specs2.mutable._
import play.api.http.HeaderNames._
import play.api.http._
import play.api.http.Status._
import play.api.i18n._
import play.api.{ Application, Play }
import play.core.test._
import scala.concurrent.Await
import scala.concurrent.duration._
class ResultsSpec extends Specification {
import scala.concurrent.ExecutionContext.Implicits.global
import play.api.mvc.Results._
implicit val fileMimeTypes: FileMimeTypes = new DefaultFileMimeTypesProvider(FileMimeTypesConfiguration()).get
val fileCounter = new AtomicInteger(1)
def freshFileName: String = s"test${fileCounter.getAndIncrement}.tmp"
def withFile[T](block: (File, String) => T): T = {
val fileName = freshFileName
val file = new File(fileName)
try {
file.createNewFile()
block(file, fileName)
} finally file.delete()
}
def withPath[T](block: (Path, String) => T): T = {
val fileName = freshFileName
val file = Paths.get(fileName)
try {
Files.createFile(file)
block(file, fileName)
} finally Files.delete(file)
}
lazy val cookieHeaderEncoding = new DefaultCookieHeaderEncoding()
lazy val sessionCookieBaker = new DefaultSessionCookieBaker()
lazy val flashCookieBaker = new DefaultFlashCookieBaker()
// bake the results cookies into the headers
def bake(result: Result): Result = {
result.bakeCookies(cookieHeaderEncoding, sessionCookieBaker, flashCookieBaker)
}
"Result" should {
"have status" in {
val Result(ResponseHeader(status, _, _), _, _, _, _) = Ok("hello")
status must be_==(200)
}
"support Content-Type overriding" in {
val Result(ResponseHeader(_, _, _), body, _, _, _) = Ok("hello").as("text/html")
body.contentType must beSome("text/html")
}
"support headers manipulation" in {
val Result(ResponseHeader(_, headers, _), _, _, _, _) =
Ok("hello").as("text/html").withHeaders("Set-Cookie" -> "yes", "X-YOP" -> "1", "X-Yop" -> "2")
headers.size must_== 2
headers must havePair("Set-Cookie" -> "yes")
headers must not havePair ("X-YOP" -> "1")
headers must havePair("X-Yop" -> "2")
}
"support date headers manipulation" in {
val Result(ResponseHeader(_, headers, _), _, _, _, _) =
Ok("hello").as("text/html").withDateHeaders(DATE ->
LocalDateTime.of(2015, 4, 1, 0, 0).atZone(ZoneOffset.UTC))
headers must havePair(DATE -> "Wed, 01 Apr 2015 00:00:00 GMT")
}
"support cookies helper" in withApplication {
val setCookieHeader = cookieHeaderEncoding.encodeSetCookieHeader(Seq(Cookie("session", "items"), Cookie("preferences", "blue")))
val decodedCookies = cookieHeaderEncoding.decodeSetCookieHeader(setCookieHeader).map(c => c.name -> c).toMap
decodedCookies.size must be_==(2)
decodedCookies("session").value must be_==("items")
decodedCookies("preferences").value must be_==("blue")
val newCookieHeader = cookieHeaderEncoding.mergeSetCookieHeader(setCookieHeader, Seq(Cookie("lang", "fr"), Cookie("session", "items2")))
val newDecodedCookies = cookieHeaderEncoding.decodeSetCookieHeader(newCookieHeader).map(c => c.name -> c).toMap
newDecodedCookies.size must be_==(3)
newDecodedCookies("session").value must be_==("items2")
newDecodedCookies("preferences").value must be_==("blue")
newDecodedCookies("lang").value must be_==("fr")
val Result(ResponseHeader(_, headers, _), _, _, _, _) = bake {
Ok("hello").as("text/html")
.withCookies(Cookie("session", "items"), Cookie("preferences", "blue"))
.withCookies(Cookie("lang", "fr"), Cookie("session", "items2"))
.discardingCookies(DiscardingCookie("logged"))
}
val setCookies = cookieHeaderEncoding.decodeSetCookieHeader(headers("Set-Cookie")).map(c => c.name -> c).toMap
setCookies must haveSize(4)
setCookies("session").value must be_==("items2")
setCookies("session").maxAge must beNone
setCookies("preferences").value must be_==("blue")
setCookies("lang").value must be_==("fr")
setCookies("logged").maxAge must beSome(Cookie.DiscardedMaxAge)
}
"properly add and discard cookies" in {
val result = Ok("hello").as("text/html")
.withCookies(Cookie("session", "items"), Cookie("preferences", "blue"))
.withCookies(Cookie("lang", "fr"), Cookie("session", "items2"))
.discardingCookies(DiscardingCookie("logged"))
result.newCookies.length must_== 4
result.newCookies.find(_.name == "logged").map(_.value) must beSome("")
val resultDiscarded = result.discardingCookies(DiscardingCookie("preferences"), DiscardingCookie("lang"))
resultDiscarded.newCookies.length must_== 4
resultDiscarded.newCookies.find(_.name == "preferences").map(_.value) must beSome("")
resultDiscarded.newCookies.find(_.name == "lang").map(_.value) must beSome("")
}
"provide convenience method for setting cookie header" in withApplication {
def testWithCookies(
cookies1: List[Cookie],
cookies2: List[Cookie],
expected: Option[Set[Cookie]]) = {
val result = bake { Ok("hello").withCookies(cookies1: _*).withCookies(cookies2: _*) }
result.header.headers.get("Set-Cookie").map(cookieHeaderEncoding.decodeSetCookieHeader(_).to[Set]) must_== expected
}
val preferencesCookie = Cookie("preferences", "blue")
val sessionCookie = Cookie("session", "items")
testWithCookies(
List(),
List(),
None)
testWithCookies(
List(preferencesCookie),
List(),
Some(Set(preferencesCookie)))
testWithCookies(
List(),
List(sessionCookie),
Some(Set(sessionCookie)))
testWithCookies(
List(),
List(sessionCookie, preferencesCookie),
Some(Set(sessionCookie, preferencesCookie)))
testWithCookies(
List(sessionCookie, preferencesCookie),
List(),
Some(Set(sessionCookie, preferencesCookie)))
testWithCookies(
List(preferencesCookie),
List(sessionCookie),
Some(Set(preferencesCookie, sessionCookie)))
}
"support clearing a language cookie using clearingLang" in withApplication { app: Application =>
implicit val messagesApi = app.injector.instanceOf[MessagesApi]
val cookie = cookieHeaderEncoding.decodeSetCookieHeader(bake(Ok.clearingLang).header.headers("Set-Cookie")).head
cookie.name must_== Play.langCookieName
cookie.value must_== ""
}
"allow discarding a cookie by deprecated names method" in withApplication {
cookieHeaderEncoding.decodeSetCookieHeader(bake(Ok.discardingCookies(DiscardingCookie("blah"))).header.headers("Set-Cookie")).head.name must_== "blah"
}
"allow discarding multiple cookies by deprecated names method" in withApplication {
val baked = bake { Ok.discardingCookies(DiscardingCookie("foo"), DiscardingCookie("bar")) }
val cookies = cookieHeaderEncoding.decodeSetCookieHeader(baked.header.headers("Set-Cookie")).map(_.name)
cookies must containTheSameElementsAs(Seq("foo", "bar"))
}
"support sending a file with Ok status" in withFile { (file, fileName) =>
val rh = Ok.sendFile(file).header
(rh.status aka "status" must_== OK) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""inline; filename="$fileName""""))
}
"support sending a file with Unauthorized status" in withFile { (file, fileName) =>
val rh = Unauthorized.sendFile(file).header
(rh.status aka "status" must_== UNAUTHORIZED) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""inline; filename="$fileName""""))
}
"support sending a file attached with Unauthorized status" in withFile { (file, fileName) =>
val rh = Unauthorized.sendFile(file, inline = false).header
(rh.status aka "status" must_== UNAUTHORIZED) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""attachment; filename="$fileName""""))
}
"support sending a file with PaymentRequired status" in withFile { (file, fileName) =>
val rh = PaymentRequired.sendFile(file).header
(rh.status aka "status" must_== PAYMENT_REQUIRED) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""inline; filename="$fileName""""))
}
"support sending a file attached with PaymentRequired status" in withFile { (file, fileName) =>
val rh = PaymentRequired.sendFile(file, inline = false).header
(rh.status aka "status" must_== PAYMENT_REQUIRED) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""attachment; filename="$fileName""""))
}
"support sending a file with filename" in withFile { (file, fileName) =>
val rh = Ok.sendFile(file, fileName = _ => "测 试.tmp").header
(rh.status aka "status" must_== OK) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""inline; filename="? ?.tmp"; filename*=utf-8''%e6%b5%8b%20%e8%af%95.tmp"""))
}
"support sending a path with Ok status" in withPath { (file, fileName) =>
val rh = Ok.sendPath(file).header
(rh.status aka "status" must_== OK) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""inline; filename="$fileName""""))
}
"support sending a path with Unauthorized status" in withPath { (file, fileName) =>
val rh = Unauthorized.sendPath(file).header
(rh.status aka "status" must_== UNAUTHORIZED) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""inline; filename="$fileName""""))
}
"support sending a path attached with Unauthorized status" in withPath { (file, fileName) =>
val rh = Unauthorized.sendPath(file, inline = false).header
(rh.status aka "status" must_== UNAUTHORIZED) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""attachment; filename="$fileName""""))
}
"support sending a path with filename" in withPath { (file, fileName) =>
val rh = Ok.sendPath(file, fileName = _ => "测 试.tmp").header
(rh.status aka "status" must_== OK) and
(rh.headers.get(CONTENT_DISPOSITION) aka "disposition" must beSome(s"""inline; filename="? ?.tmp"; filename*=utf-8''%e6%b5%8b%20%e8%af%95.tmp"""))
}
"allow checking content length" in withPath { (file, fileName) =>
val content = "test"
Files.write(file, content.getBytes(StandardCharsets.ISO_8859_1))
val rh = Ok.sendPath(file)
rh.body.contentLength must beSome(content.length)
}
"sendFile should honor onClose" in withFile { (file, fileName) =>
implicit val system = ActorSystem()
implicit val mat = ActorMaterializer()
try {
var fileSent = false
val res = Results.Ok.sendFile(file, onClose = () => {
fileSent = true
})
// Actually we need to wait until the Stream completes
Await.ready(res.body.dataStream.runWith(Sink.ignore), 60.seconds)
// and then we need to wait until the onClose completes
Thread.sleep(500)
fileSent must be_==(true)
} finally {
Await.ready(system.terminate(), 60.seconds)
}
}
"support redirects for reverse routed calls" in {
Results.Redirect(Call("GET", "/path")).header must_== Status(303).withHeaders(LOCATION -> "/path").header
}
"support redirects for reverse routed calls with custom statuses" in {
Results.Redirect(Call("GET", "/path"), TEMPORARY_REDIRECT).header must_== Status(TEMPORARY_REDIRECT).withHeaders(LOCATION -> "/path").header
}
"redirect with a fragment" in {
val url = "http://host:port/path?k1=v1&k2=v2"
val fragment = "my-fragment"
val expectedLocation = url + "#" + fragment
Results.Redirect(Call("GET", url, fragment)).header.headers.get(LOCATION) must_== Option(expectedLocation)
}
"redirect with a fragment and status" in {
val url = "http://host:port/path?k1=v1&k2=v2"
val fragment = "my-fragment"
val expectedLocation = url + "#" + fragment
Results.Redirect(Call("GET", url, fragment), 301).header.headers.get(LOCATION) must_== Option(expectedLocation)
}
"brew coffee with a teapot, short and stout" in {
val Result(ResponseHeader(status, _, _), body, _, _, _) = ImATeapot("no coffee here").as("short/stout")
status must be_==(418)
body.contentType must beSome("short/stout")
}
"brew coffee with a teapot, long and sweet" in {
val Result(ResponseHeader(status, _, _), body, _, _, _) = ImATeapot("still no coffee here").as("long/sweet")
status must be_==(418)
body.contentType must beSome("long/sweet")
}
}
}
| Shruti9520/playframework | framework/src/play/src/test/scala/play/api/mvc/ResultsSpec.scala | Scala | apache-2.0 | 13,302 |
package skinny.micro.util
import skinny.micro.routing.PathPatternParser
import skinny.micro.rl.UrlCodingUtils
object UriDecoder {
def firstStep(uri: String): String = {
UrlCodingUtils.urlDecode(
toDecode = UrlCodingUtils.ensureUrlEncoding(uri),
toSkip = PathPatternParser.PathReservedCharacters)
}
def secondStep(uri: String): String = {
uri.replaceAll("%23", "#")
.replaceAll("%2F", "/")
.replaceAll("%3F", "?")
}
}
| xerial/skinny-micro | micro/src/main/scala/skinny/micro/util/UriDecoder.scala | Scala | bsd-2-clause | 462 |
package rest
import akka.actor.ActorSystem
import entities.JsonProtocol._
import entities.{Issue, SimpleIssue}
import org.scalatest.Matchers._
import service.IssuesService
import spray.http.HttpEntity
import spray.http.MediaTypes._
import spray.http.StatusCodes._
import spray.httpx.SprayJsonSupport._
import spray.routing.HttpService._
import utils.H2TestDbModule
class IssuesRoutesTest extends AbstractRestTest {
val service = new IssuesService with H2TestDbModule
val issues = new IssuesHttpService(service) {
override implicit def actorRefFactory: ActorSystem = system
}
it should "return NonFound when Issue doesn't exist" in {
Get("/issue/1") ~> issues.IssueGetRoute ~> check {
status shouldEqual NotFound
handled shouldBe true
}
}
it should "return an issue" in {
for (issId <- service.insert(Issue(1, "", 0, "Foobar!", 0, 0))) yield {
Get(s"/issue/$issId") ~> issues.IssueGetRoute ~> check {
handled shouldBe true
status shouldEqual OK
responseAs[Option[Issue]].isEmpty shouldBe false
}
}
}
it should "create a issue when getting POST request" in {
Post("/issue", SimpleIssue("", 1, "iss", 2, 3)) ~> issues.IssuePostRoute ~> check {
handled shouldBe true
status shouldEqual Created
header("Location").get.value should fullyMatch regex "/issue/\\\\d*"
}
}
it should "not handle the invalid json" in {
Post("/issue", HttpEntity(`application/json`, """{"name":"foo", "password":"foo" }""")) ~>
sealRoute(issues.IssuePostRoute) ~> check {
status shouldEqual BadRequest
response.entity.asString should include("malformed")
}
}
it should "not handle an empty post" in {
Post("/issue") ~> sealRoute(issues.IssuePostRoute) ~> check {
status shouldEqual BadRequest
}
}
it should "not handle wrong URL" in {
Post("/issues") ~> sealRoute(issues.IssuePostRoute) ~> check {
status shouldEqual NotFound
}
}
it should "posted entity should be the same as original" in {
val issue = Issue(0, "", 1, "Foobar!", 2, 3)
Post("/issue", issue.toSimple) ~> issues.IssuePostRoute ~> check {
handled shouldBe true
status shouldEqual Created
val location = header("Location")
location shouldBe defined
Get(location.get.value) ~> issues.IssueGetRoute ~> check {
val obrained = responseAs[Issue]
obrained shouldEqual issue.copy(id = obrained.id)
}
}
}
}
| Kanris826/spray-slick-swagger | src/test/scala/rest/IssuesRoutesTest.scala | Scala | apache-2.0 | 2,491 |
package edu.gemini.osgi.tools.idea
import java.io.File
import edu.gemini.osgi.tools.FileUtils._
object IdeaProject {
def containsAttribute(n: xml.Node, a: String, v: String): Boolean =
n.attribute(a).exists(_.text == v)
def fixedComponents: xml.NodeBuffer =
<component name="CompilerConfiguration">
<option name="DEFAULT_COMPILER" value="Javac" />
<resourceExtensions />
<wildcardResourcePatterns>
<entry name="?*.properties" />
<entry name="?*.xml" />
<entry name="?*.gif" />
<entry name="?*.png" />
<entry name="?*.jpeg" />
<entry name="?*.jpg" />
<entry name="?*.html" />
<entry name="?*.css" />
<entry name="?*.xsl" />
<entry name="?*.dtd" />
<entry name="?*.tld" />
<entry name="?*.txt" />
<entry name="?*.ftl" />
<entry name="?*.xsd" />
<entry name="?*.cfg" />
<entry name="?*.conf" />
<entry name="?*.csv" />
<entry name="?*.aiff" />
<entry name="?*.jks" />
<entry name="?*.vm" />
<entry name="?*.fits" />
</wildcardResourcePatterns>
<annotationProcessing enabled="false" useClasspath="true" />
</component>
<component name="DependencyValidationManager">
<option name="SKIP_IMPORT_STATEMENTS" value="false" />
</component>
<component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
<component name="EntryPointsManager">
<entry_points version="2.0" />
</component>
<component name="InspectionProjectProfileManager">
<profiles>
<profile version="1.0" is_locked="false">
<option name="myName" value="Project Default" />
<option name="myLocal" value="false" />
<inspection_tool class="CloneDeclaresCloneNotSupported" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="JavaDoc" enabled="false" level="WARNING" enabled_by_default="false">
<option name="TOP_LEVEL_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="INNER_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="METHOD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="@return@param@throws or @exception" />
</value>
</option>
<option name="FIELD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="IGNORE_DEPRECATED" value="false" />
<option name="IGNORE_JAVADOC_PERIOD" value="true" />
<option name="IGNORE_DUPLICATED_THROWS" value="false" />
<option name="IGNORE_POINT_TO_ITSELF" value="false" />
<option name="myAdditionalJavadocTags" value="" />
</inspection_tool>
<inspection_tool class="NonJREEmulationClassesInClientCode" level="ERROR" enabled="false" />
<inspection_tool class="SimplifiableIfStatement" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="StringBufferReplaceableByStringBuilder" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="SuspiciousNameCombination" enabled="false" level="WARNING" enabled_by_default="false">
<group names="x,width,left,right" />
<group names="y,height,top,bottom" />
</inspection_tool>
</profile>
</profiles>
<option name="PROJECT_PROFILE" value="Project Default" />
<option name="USE_PROJECT_PROFILE" value="true" />
<version value="1.0" />
</component>
<component name="JavadocGenerationManager">
<option name="OUTPUT_DIRECTORY" />
<option name="OPTION_SCOPE" value="protected" />
<option name="OPTION_HIERARCHY" value="true" />
<option name="OPTION_NAVIGATOR" value="true" />
<option name="OPTION_INDEX" value="true" />
<option name="OPTION_SEPARATE_INDEX" value="true" />
<option name="OPTION_DOCUMENT_TAG_USE" value="false" />
<option name="OPTION_DOCUMENT_TAG_AUTHOR" value="false" />
<option name="OPTION_DOCUMENT_TAG_VERSION" value="false" />
<option name="OPTION_DOCUMENT_TAG_DEPRECATED" value="true" />
<option name="OPTION_DEPRECATED_LIST" value="true" />
<option name="OTHER_OPTIONS" value="" />
<option name="HEAP_SIZE" />
<option name="LOCALE" />
<option name="OPEN_IN_BROWSER" value="true" />
</component>
<component name="NullableNotNullManager">
<option name="myDefaultNullable" value="org.jetbrains.annotations.Nullable" />
<option name="myDefaultNotNull" value="org.jetbrains.annotations.NotNull" />
<option name="myNullables">
<value>
<list size="0" />
</value>
</option>
<option name="myNotNulls">
<value>
<list size="3">
<item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.NotNull" />
<item index="1" class="java.lang.String" itemvalue="javax.annotation.Nonnull" />
<item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.NonNull" />
</list>
</value>
</option>
</component>
}
import IdeaProject._
import sbt.ScalaInstance
class IdeaProject(idea: Idea, scalaInstance: ScalaInstance, imls: List[File]) {
val name = "%s.ipr".format(idea.app.id)
val iprFile = new File(idea.projDir, name)
// def imlFile(bl: BundleLoc) = new File(bl.loc, "%s.iml".format(IdeaModule.moduleName(bl)))
private def emptyProject(javaVersion: String): xml.Elem =
<project version="4">
<component name="ArtifactManager"/>
{fixedComponents}
<component name="ProjectModuleManager">
<modules/>
</component>
<component name="ProjectResources">
<default-html-doctype>http://www.w3.org/1999/xhtml</default-html-doctype>
</component>
{rootComponent(javaVersion)}
<component name="ProjectRunConfigurationManager"/>
<component name="ScalacSettings">
<option name="SCALAC_BEFORE" value="false" />
</component>
<component name="ScalacSettings">
<option name="COMPILER_LIBRARY_NAME" value="scala-compiler" />
<option name="COMPILER_LIBRARY_LEVEL" value="Project" />
</component>
<component name="SvnBranchConfigurationManager">
<option name="mySupportsUserInfoFilter" value="true" />
</component>
<component name="VcsDirectoryMappings"/>
{libraryTable}
<component name="ScalaCompilerConfiguration" />
</project>
private def initialProject(javaVersion: String): xml.Elem =
emptyProject(javaVersion: String)
// if (iprFile.exists()) xml.XML.loadFile(iprFile) else emptyProject(javaVersion: String)
private def updatedProject(proj: xml.Elem): xml.Elem = {
val initMap = components(proj)
val compMap = initMap ++ List(
"ArtifactManager" -> artifactComponent,
"ProjectModuleManager" -> moduleComponent,
"ProjectRunConfigurationManager" -> updatedRunConfigurationComponent(initMap.get("ProjectRunConfigurationManager")) //,
// "VcsDirectoryMappings" -> vcsComponent
)
<project version="4">{ compMap.keys.toList.sorted.map(compMap) }</project>
}
private def components(proj: xml.Elem): Map[String, xml.Node] =
(proj.child map { n => (n \ "@name").text -> n }).toMap
def project(javaVersion: String): xml.Elem = updatedProject(initialProject(javaVersion))
/*
def project: xml.Elem =
<project version="4">
{artifactComponent}
{IdeaProject.fixedComponents}
{moduleComponent}
<component name="ProjectResources">
<default-html-doctype>http://www.w3.org/1999/xhtml</default-html-doctype>
</component>
{rootComponent}
{runConfigurationComponent}
<component name="ScalacSettings">
<option name="SCALAC_BEFORE" value="false" />
</component>
<component name="SvnBranchConfigurationManager">
<option name="mySupportsUserInfoFilter" value="true" />
</component>
{vcsComponent}
</project>
*/
private def projRelativePath(to: File): String =
"$PROJECT_DIR$/%s".format(relativePath(idea.projDir, to))
def artifactComponent: xml.Elem =
<component name="ArtifactManager">
{idea.app.srcBundles.map(bl => artifact(bl))}
</component>
private def artifact(bl: BundleLoc): xml.Elem =
<artifact type="jar" build-on-make="true" name={artifactName(bl)}>
<output-path>{projRelativePath(idea.distBundleDir)}</output-path>
<root id="archive" name={"%s-%s.jar".format(bl.name, bl.version)}>
<element id="directory" name="META-INF">
<element id="file-copy" path={projRelativePath(new File(bl.loc, "META-INF/MANIFEST.MF"))} />
</element>
<element id="directory" name="lib">
{privateLibs(bl).map(lib => <element id="file-copy" path={projRelativePath(lib)} />)}
</element>
<element id="module-output" name={IdeaModule.moduleName(bl)} />
</root>
</artifact>
private def privateLibs(bl: BundleLoc): List[File] = {
val libDir = new File(bl.loc, "lib")
if (libDir.exists()) libDir.listFiles(jarFilter).toList else List.empty
}
// TODO: felix module, see below
def moduleComponent: xml.Elem =
<component name="ProjectModuleManager">
<modules>
{imls.map(module)}
</modules>
</component>
// def moduleComponent: xml.Elem =
// <component name="ProjectModuleManager">
// <modules>
// {imls.map(module)}
// <module fileurl="file://$PROJECT_DIR$/felix.iml" filepath="$PROJECT_DIR$/felix.iml" />
// </modules>
// </component>
private def module(iml: File): xml.Elem = {
val imlPath = projRelativePath(iml)
<module fileurl={"file://%s".format(imlPath)} filepath={imlPath} />
}
def rootComponent(javaVersion: String): xml.Elem =
<component name="ProjectRootManager" version="2" languageLevel={"JDK_" + javaVersion.replace('.','_') } assert-keyword="true" jdk-15="true" project-jdk-name={ javaVersion } project-jdk-type="JavaSDK">
<output url={"file://%s".format(projRelativePath(idea.distOutDir))} />
</component>
def vcsComponent: xml.Elem =
<component name="VcsDirectoryMappings">
{idea.app.srcBundles.map(vcs)}
</component>
private def vcs(bl: BundleLoc): xml.Elem =
<mapping directory="$PROJECT_DIR$" vcs="Git" />
def runConfigurationComponent: xml.Elem =
<component name="ProjectRunConfigurationManager">
{appRunConfiguration}
</component>
def updatedRunConfigurationComponent(existing: Option[xml.Node]): xml.Elem =
<component name="ProjectRunConfigurationManager">
{runConfigurations(existing)}
</component>
// Keep all existing run configurations (if any) except the main one, which
// is updated.
private def runConfigurations(existing: Option[xml.Node]): Seq[xml.Node] =
existing map { opt =>
appRunConfiguration +: opt.child.filterNot(containsAttribute(_, "name", idea.appName))
} getOrElse Seq(appRunConfiguration)
private def appRunConfiguration: xml.Elem =
<configuration default="false" name={idea.appName} type="Application" factoryName="Application">
<option name="MAIN_CLASS_NAME" value="org.apache.felix.main.Main" />
<option name="VM_PARAMETERS" value={vmargs} />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" value="" />
<option name="ENABLE_SWING_INSPECTOR" value="false" />
<option name="ENV_VARIABLES" />
<option name="PASS_PARENT_ENVS" value="true" />
<module name="felix" />
<envs />
<RunnerSettings RunnerId="Run" />
<ConfigurationWrapper RunnerId="Run" />
<method>
{buildArtifacts}
</method>
</configuration>
private def vmargs: String =
"%s -Dfelix.config.properties=file:felix-config.properties".format(idea.app.vmargs.mkString(" "))
private def buildArtifacts: xml.Elem =
<option name="BuildArtifacts" enabled="true">
{idea.app.srcBundles.map(bl => <artifact name={artifactName(bl)} />)}
</option>
private def artifactName(bl: BundleLoc): String = "%s_%s".format(bl.name, bl.version)
private def languageLevel: String =
scala.util.Properties.versionNumberString.split('.').take(2).mkString("Scala_", "_", "")
private def libraryTable: xml.Elem =
<component name="libraryTable">
<library name="scala-sdk" type="Scala">
<properties>
<option name="languageLevel" value={languageLevel} />
<compiler-classpath>
{scalaInstance.allJars.map(jarUrl)}
</compiler-classpath>
</properties>
<CLASSES />
<JAVADOC />
<SOURCES />
</library>
</component>
private def jarUrl(jarFile: File): xml.Elem =
<root url={s"jar://${projRelativePath(jarFile)}"} />
}
| arturog8m/ocs | project/src/main/scala/edu/gemini/osgi/tools/idea/IdeaProject.scala | Scala | bsd-3-clause | 13,714 |
package scalan
package it.lms
import scalan.community.ScalanCommunityDslExp
import scalan.compilation.lms._
import scalan.compilation.lms.scalac.LmsCompilerScala
import scalan.it.smoke.CommunitySqlItTests
import scalan.sql._
class LmsSqlItTests extends CommunitySqlItTests {
class ProgExp extends ProgCommunity with ScalanCommunityDslExp with SqlDslExp with TablesDslExp with LmsCompilerScala { self =>
def makeBridge[A, B] = new CoreBridge[A, B] {
val scalan = self
val lms = new CommunityLmsBackend
}
}
override val progStaged = new ProgExp
test("test24simpleSelectTest") {
val in = Array((1, 1.1), (2, 2.2), (3, 3.3), (4, 4.4), (5, 5.5))
compareOutputWithSequential(progStaged)(progSeq.selectUsingIndex, progStaged.selectUsingIndex, "selectUsingIndex", in)
}
test("test25innerJoin") {
val in = (Array((1, 1.0), (2, 0.0), (3, 1.0), (4, 1.0), (5, 0.0)), Array(("one", 1), ("two", 2), ("three", 3), ("four", 4), ("five", 5)))
compareOutputWithSequential(progStaged)(progSeq.innerJoin, progStaged.innerJoin, "innerJoin", in)
}
test("test26hashJoin") {
val in = (Array((1, 1.0), (2, 0.0), (3, 1.0), (4, 1.0), (5, 0.0)), Array(("one", 1), ("two", 2), ("three", 1), ("four", 4), ("five", 5)))
compareOutputWithSequential(progStaged)(progSeq.hashJoin, progStaged.hashJoin, "hashJoin", in)
}
test("test27simpleIf") {
val in = (Array(2.0,3.0), 4.0)
compareOutputWithSequential(progStaged)(progSeq.simpleIf, progStaged.simpleIf, "simpleIf", in)
}
/*
test("test27ifTest") {
val in = (1, 0.0)
compareOutputWithSequential(progStaged)(progSeq.ifTest, progStaged.ifTest, "ifTest", in)
}
*/
test("test28selectCount") {
val in = Array((1, 1.1), (2, 2.2), (3, 3.3), (4, 4.4), (5, 5.5))
compareOutputWithSequential(progStaged)(progSeq.selectCount, progStaged.selectCount, "selectCount", in)
}
test("test29sqlBenchmark") {
val in = Array((1, "11243"), (2, "21235"), (3, "12343"), (4, "13455"), (5, "543123"))
compareOutputWithSequential(progStaged)(progSeq.sqlBenchmark, progStaged.sqlBenchmark, "sqlBenchmark", in)
}
test("test30groupBy") {
val in = Array((1, 1.1), (2, 2.2), (3, 3.3), (4, 4.4), (5, 5.5))
compareOutputWithSequential(progStaged)(progSeq.groupBy, progStaged.groupBy, "groupBy", in)
}
test("test31columnarStore") {
val in = Array((1, "11243"), (2, "21235"), (3, "12343"), (4, "13455"), (5, "543123"))
compareOutputWithSequential(progStaged)(progSeq.columnarStore, progStaged.columnarStore, "columnarStore", in)
}
test("testColumnarStoreR3") {
val in = Array(("11243", (1, 1.0)), ("21235", (2, 2.0)), ("12343", (3, 3.0)), ("13455", (4, 4.0)), ("543123", (5, 5.0)))
compareOutputWithSequential(progStaged)(progSeq.columnarStoreR3, progStaged.columnarStoreR3, "columnarStoreR3", in)
}
test("test32sqlParBenchmark") {
val in = Array((1, "11243"), (2, "21235"), (3, "12343"), (4, "13455"), (5, "543123"))
compareOutputWithSequential(progStaged)(progSeq.sqlParBenchmark, progStaged.sqlParBenchmark, "sqlParBenchmark", in)
}
test("test34sqlIndexBenchmark") {
val in = Array((1, "11243"), (2, "21235"), (3, "12343"), (4, "13455"), (5, "543123"))
compareOutputWithSequential(progStaged)(progSeq.sqlIndexBenchmark, progStaged.sqlIndexBenchmark, "sqlIndexBenchmark", in)
}
test("test35sqlGroupBy") {
val in = Array((1, "red"), (2, "green"), (3, "red"), (4, "blue"), (5, "blue"), (6, "green"), (7, "red"), (8, "blue"), (9, "red"))
compareOutputWithSequential(progStaged)(progSeq.sqlGroupBy, progStaged.sqlGroupBy, "sqlGroupBy", in)
}
test("test36sqlMapReduce") {
val in = (4, Array((1, "red"), (2, "green"), (3, "red"), (4, "blue"), (5, "blue"), (6, "green"), (7, "red"), (8, "blue"), (9, "red")))
compareOutputWithSequential(progStaged)(progSeq.sqlMapReduce, progStaged.sqlMapReduce, "sqlMapReduce", in)
}
test("test37sqlParallelJoin") {
val in = (Array((1, "red"), (2, "green"), (3, "blue")), (Array((1, "one"), (2, "two"), (3, "three")), Array((1, "true"), (2, "false"), (3, "unknown"))))
compareOutputWithSequential(progStaged)(progSeq.sqlParallelJoin, progStaged.sqlParallelJoin, "sqlParallelJoin", in)
}
test("test38sqlAggJoin") {
val in = (Array((1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)),
(Array((1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)),
(Array((1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)),
(Array((1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)),
(Array((1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)), 4)))))
compareOutputWithSequential(progStaged)(progSeq.sqlAggJoin, progStaged.sqlAggJoin, "sqlAggJoin", in)
}
test("test39sqlIndexJoin") {
val in = (Array((1, "red"), (2, "green"), (3, "blue")), Array((1, "one"), (2, "two"), (1, "three"), (1, "four"), (2, "five")))
compareOutputWithSequential(progStaged)(progSeq.sqlIndexJoin, progStaged.sqlIndexJoin, "sqlIndexJoin", in)
}
test("test40sqlColumnarStoreBenchmark") {
val in = (4, Array((1, "11243"), (2, "21235"), (3, "12343"), (4, "13455"), (5, "543123")))
compareOutputWithSequential(progStaged)(progSeq.sqlColumnarStoreBenchmark, progStaged.sqlColumnarStoreBenchmark, "sqlColumnarStoreBenchmark", in)
}
test("test41sqlDsl") {
val in = 10
compareOutputWithSequential(progStaged)(progSeq.sqlDsl, progStaged.sqlDsl, "sqlDsl", in)
}
val tpch_data_dir = "../benchmarks/sql/"
test("tpchQ1") {
val in = scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray
compareOutputWithSequential(progStaged)(progSeq.tpchQ1, progStaged.tpchQ1, "tpchQ1", in)
}
test("TPCH_Q1_hor_seq") {
val in = scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q1_hor_seq, progStaged.TPCH_Q1_hor_seq, "TPCH_Q1_hor_seq", in)
}
test("TPCH_Q1_ver_seq") {
val in = scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q1_ver_seq, progStaged.TPCH_Q1_ver_seq, "TPCH_Q1_ver_seq", in)
}
test("TPCH_Q1_hor_par") {
val in = scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q1_hor_par, progStaged.TPCH_Q1_hor_par, "TPCH_Q1_hor_par", in)
}
test("TPCH_Q1_ver_par") {
val in = scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q1_ver_par, progStaged.TPCH_Q1_ver_par, "TPCH_Q1_ver_par", in)
}
test("TPCH_Q3_hor_seq") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q3_hor_seq, progStaged.TPCH_Q3_hor_seq, "TPCH_Q3_hor_seq", in)
}
test("TPCH_Q3_ver_seq") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q3_ver_seq, progStaged.TPCH_Q3_ver_seq, "TPCH_Q3_ver_seq", in)
}
test("TPCH_Q3_hor_par") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q3_hor_par, progStaged.TPCH_Q3_hor_par, "TPCH_Q3_hor_par", in)
}
test("TPCH_Q3_ver_par") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q3_ver_par, progStaged.TPCH_Q3_ver_par, "TPCH_Q3_ver_par", in)
}
test("TPCH_Q10_hor_seq") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "nation-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q10_hor_seq, progStaged.TPCH_Q10_hor_seq, "TPCH_Q10_hor_seq", in)
}
test("TPCH_Q10_ver_seq") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "nation-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q10_ver_seq, progStaged.TPCH_Q10_ver_seq, "TPCH_Q10_ver_seq", in)
}
test("TPCH_Q10_hor_par") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "nation-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q10_hor_par, progStaged.TPCH_Q10_hor_par, "TPCH_Q10_hor_par", in)
}
test("TPCH_Q10_ver_par") {
val in = Array(scala.io.Source.fromFile(tpch_data_dir + "customer-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "orders-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "lineitem-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "nation-small.tbl").getLines().map(s => s.split("\\\\|")).toArray,
scala.io.Source.fromFile(tpch_data_dir + "nation-small.tbl").getLines().map(s => s.split("\\\\|")).toArray)
compareOutputWithSequential(progStaged)(progSeq.TPCH_Q10_ver_par, progStaged.TPCH_Q10_ver_par, "TPCH_Q10_ver_par", in)
}
test("test42Tuple") {
val in = (Array((1, "red"), (2, "green"), (3, "blue")), Array((1, "one"), (2, "two"), (3, "three")))
compareOutputWithSequential(progStaged)(progSeq.testTuple, progStaged.testTuple, "testTuple", in)
}
}
| knizhnik/scalan-sql | lms-backend/src/test/scala/scalan/it/lms/LmsSqlItTests.scala | Scala | apache-2.0 | 11,803 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.AccessLog
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 19/10/17.
*/
/**
* Access Log Repository
* @param session
* @param executionContext
*/
class AccessLogRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.AccessLogRepository[AccessLog , Int]
with AccessLogMapping {
def getById(id: Int): Future[AccessLog] = {
Future(run(queryAccessLog.filter(_.accessLogId == lift(id))).headOption.get)
}
def getByUUID(uuid: UUID): Future[AccessLog] = {
Future(run(queryAccessLog.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getAll() : Future[List[AccessLog]] = {
Future(run(queryAccessLog))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[AccessLog]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countAccessLog()
elements <- if (offset > count) Future.successful(Nil)
else selectAccessLog(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countAccessLog() = {
Future(run(queryAccessLog.size).toInt)
}
private def selectAccessLog(offset: Int, limit: Int): Future[Seq[AccessLog]] = {
Future(run(queryAccessLog).drop(offset).take(limit).toSeq)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/AccessLogRepository.scala | Scala | gpl-3.0 | 2,619 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.