code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package org.scalacvx.dcp
/**
* Created by lorenzo on 8/16/15.
*/
trait Sign {
def unary_- = this match {
case Positive => Negative
case Negative => Positive
case _ => this
}
def +(that:Sign): Sign = (this, that) match {
case (Positive, Positive) => Positive
case (Negative, Negative) => Negative
case (_,_) => NoSign
}
def -(that: Sign) = this + (-that)
def *(that: Sign):Sign = (this, that) match {
case (NoSign, _) => NoSign
case (_, NoSign) => NoSign
case (Positive, Positive) => Positive
case (Negative, Negative) => Positive
case (Positive, Negative) => Negative
case (Negative, Positive) => Negative
case (_, _) => NoSign
}
def *(that: Monotonicity): Monotonicity = (this, that) match {
case (Positive, _) => that
case (Negative, _) => -that
case (_, _) => NoMonotonicity
}
}
case object Positive extends Sign
case object Negative extends Sign
case object NoSign extends Sign | lorenzolucido/ScalaCVX | src/main/scala/org/scalacvx/dcp/Sign.scala | Scala | mit | 973 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.wikitext
import org.eclipse.mylyn.wikitext.core.parser.Attributes
import org.eclipse.mylyn.wikitext.core.parser.DocumentBuilder.BlockType
import org.eclipse.mylyn.internal.wikitext.confluence.core.block.AbstractConfluenceDelimitedBlock
import java.lang.String
import collection.mutable.ListBuffer
import org.fusesource.scalate.util.Threads._
import util.parsing.input.CharSequenceReader
import util.parsing.combinator.RegexParsers
import org.fusesource.scalate.support.RenderHelper
import org.fusesource.scalate._
import org.fusesource.scalate.filter.{Pipeline, Filter}
import java.io.{File, ByteArrayInputStream, ByteArrayOutputStream}
import util.{Files, Log, IOUtil}
object Pygmentize extends Log with Filter with TemplateEngineAddOn {
/**
* Add the markdown filter to the template engine.
*/
def apply(te: TemplateEngine) = {
te.filters += "pygmentize" -> Pygmentize
// add the imports
te.importStatements :+= "import org.fusesource.scalate.wikitext.PygmentizeHelpers._"
}
def filter(context: RenderContext, content: String): String = {
pygmentize(content)
}
// lets calculate once on startup
private lazy val _installed: Boolean = {
try {
var process = Runtime.getRuntime.exec(Array("pygmentize", "-V"))
thread("pygmetize err handler") {
IOUtil.copy(process.getErrorStream, System.err)
}
val out = new ByteArrayOutputStream()
thread("pygmetize out handler") {
IOUtil.copy(process.getInputStream, out)
}
process.waitFor
if (process.exitValue != 0) {
false
} else {
val output = new String(out.toByteArray).trim
debug("Pygmentize installed: " + output)
true
}
}
catch {
case e: Exception => debug(e, "Failed to start pygmetize: " + e)
false
}
}
def isInstalled: Boolean = _installed
def unindent(data:String):String = unindent( data.split("""\\r?\\n""").toList )
def unindent(data:Seq[String]):String = {
var content = data
// To support indenting the macro.. we figure out the indent level of the
// code block by looking at the indent of the last line
val indent_re = """^([ \\t]+)$""".r
content.lastOption match {
case Some(indent_re(indent)) =>
// strip off those indents.
content = content.map( _.replaceFirst("""^[ \\t]{"""+indent.size+"""}""", "") )
case _ =>
}
content.mkString("\\n")
}
object OptionParser extends RegexParsers {
override def skipWhitespace = false
val lang = """[\\w0-9_-]+""".r
val key = """[\\w0-9_-]+""".r
val value = """[\\w0-9_-]+""".r
val attributes = repsep( key ~ ("="~>value), whiteSpace )^^ { list =>
var rc = Map[String, String]()
for( (x~y) <-list ) {
rc += x->y
}
rc
}
val option_line: Parser[(Option[String], Map[String,String])] =
guard(key~"=") ~> attributes <~ opt(whiteSpace) ^^ { case y => (None,y) } |
lang ~ opt(whiteSpace ~> attributes <~ opt(whiteSpace)) ^^ {
case x~Some(y) => (Some(x),y)
case x~None => (Some(x), Map())
}
def apply(in: String) = {
(phrase(opt(whiteSpace)~>option_line)(new CharSequenceReader(in))) match {
case Success(result, _) => Some(result)
// case NoSuccess(message, next) => throw new Exception(message+" at "+next.pos)
case NoSuccess(message, next) => None
}
}
}
def pygmentize(data:String, options:String=""):String = {
var lang1 = "text"
var lines = false
var wide = false
val opts = OptionParser(options)
opts match {
case Some((lang, atts)) =>
lang1 = lang.getOrElse(lang1)
for( (key,value) <- atts) {
key match {
case "lines" => lines = java.lang.Boolean.parseBoolean(value)
case "wide" => wide = java.lang.Boolean.parseBoolean(value)
}
}
case _ =>
}
val content = unindent(data)
// Now look for header sections...
val header_re = """(?s)\\n------+\\s*\\n\\s*([^:\\s]+)\\s*:\\s*([^\\n]+)\\n------+\\s*\\n(.*)""".r
header_re.findFirstMatchIn("\\n"+data) match {
case Some(m1) =>
lang1 = m1.group(1)
var title1 = m1.group(2)
var data1 = m1.group(3)
header_re.findFirstMatchIn(data1) match {
case Some(m2) =>
data1 = data1.substring(0, m2.start )
var lang2 = m2.group(1)
var title2 = m2.group(2)
var data2 = m2.group(3)
val colored1 = pygmentize(data1, lang1, lines)
val colored2 = pygmentize(data2, lang2, lines)
var rc = """<div class="compare"><div class="compare-left"><h3>%s</h3><div class="syntax">%s</div></div><div class="compare-right"><h3>%s</h3><div class="syntax">%s</div></div><br class="clear"/></div>
|""".stripMargin.format(title1, colored1, title2, colored2)
if( wide ) {
rc = """<div class="wide">%s</div>""".format(rc)
}
rc
case None =>
"""<div class="compare"><h3>%s</h3><div class="syntax">%s</div></div>
|""".stripMargin.format(title1, pygmentize(data1, lang1, lines))
}
case None =>
"""<div class="syntax">%s</div>
|""".stripMargin.format(pygmentize(data, lang1, lines))
}
}
def pygmentize(body:String, lang:String, lines:Boolean):String = {
if (!isInstalled) {
"<pre name='code' class='brush: " + lang + "; gutter: " + lines + ";'><code>" +RenderHelper.sanitize(body) + "</code></pre>"
} else {
var options = "style=colorful"
if( lines ) {
options += ",linenos=1"
}
var process = Runtime.getRuntime.exec(Array("pygmentize", "-O", options, "-f", "html", "-l", lang))
thread("pygmetize err handler") {
IOUtil.copy(process.getErrorStream, System.err)
}
thread("pygmetize in handler") {
IOUtil.copy(new ByteArrayInputStream(body.getBytes), process.getOutputStream)
process.getOutputStream.close
}
val out = new ByteArrayOutputStream()
IOUtil.copy(process.getInputStream, out)
process.waitFor
if (process.exitValue != 0) {
throw new RuntimeException("'pygmentize' execution failed: %d. Did you install it from http://pygments.org/download/ ?".format(process.exitValue))
}
new String(out.toByteArray).replaceAll("""\\r?\\n""", "
")
}
}
}
/**
* View helper methods for use inside templates
*/
object PygmentizeHelpers {
// TODO add the text version and the macro version......
// TODO is there a simpler polymophic way to write functions like this
// that operate on text content from a String, block, URI, File, Resource etc...
def pygmentizeFile(file: File, lang: String = "", lines: Boolean = false): String = {
val content = IOUtil.loadTextFile(file)
val defaultLang = getOrUseExtension(lang, file.toString)
Pygmentize.pygmentize(content, defaultLang)
}
def pygmentizeUri(uri: String, lang: String = "", lines: Boolean = false)(implicit resourceContext: RenderContext): String = {
val content = resourceContext.load(uri)
val defaultLang = getOrUseExtension(lang, uri)
Pygmentize.pygmentize(content, defaultLang)
}
protected def getOrUseExtension(lang: String, uri: String): String = {
if (lang.isEmpty) {
Files.extension(uri)
} else {
lang
}
}
}
class PygmentsBlock extends AbstractConfluenceDelimitedBlock("pygmentize") {
var language:String = _
var lines:Boolean = false
var content = ListBuffer[String]()
override def beginBlock() = {
val attributes = new Attributes();
attributes.setCssClass("syntax");
builder.beginBlock(BlockType.DIV, attributes);
}
override def handleBlockContent(value:String) = {
// collect all the content lines..
content += value
}
override def endBlock() = {
import Pygmentize._
builder.charactersUnescaped(pygmentize(unindent(content), language, lines))
content.clear
builder.endBlock();
}
override def setOption(option: String) = {
language = option.toLowerCase();
}
override def setOption(key: String, value:String) = {
key match {
case "lines" => lines = value=="true"
case "lang" => language = value
}
}
}
| janurag/scalate | scalate-wikitext/src/main/scala/org/fusesource/scalate/wikitext/PygmentsBlock.scala | Scala | apache-2.0 | 9,131 |
/**
* Created by star on 02/03/17.
*/
package IFDS
trait FlowFunction[D] {
/**
* @param d a fact
* @return the target fact
*/
def apply(d: D): Set[D] = {
computeTargets(d)
}
def computeTargets(d: D): Set[D] = ???
/*val funcs: FlowFunction[D]
val curr: HashSet[D]
curr.+(d)
for (func <- funcs) {
val next: HashSet[D]
for(d <- curr)
next.+(func.computeTargets(d))
curr = next
}
return curr
*/
}
| packlnd/IFDS-RA | src/main/scala/IFDS/FlowFunction.scala | Scala | mit | 475 |
package edu.gemini.spModel.core
import scalaz._
import Scalaz._
import org.scalacheck.Prop.forAll
import org.specs2.ScalaCheck
import org.specs2.mutable.Specification
object MagnitudeSpec extends Specification with ScalaCheck with Arbitraries with Helpers {
"MagnitudeSpec Serialization" should {
"Support Java Binary" !
forAll { (ee: Magnitude) =>
canSerialize(ee)
}
}
}
| arturog8m/ocs | bundle/edu.gemini.spModel.core/src/test/scala/edu/gemini/spModel/core/MagnitudeSpec.scala | Scala | bsd-3-clause | 408 |
/*
* Copyright (C) 2013 The Mango Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* The code of this project is a port of (or wrapper around) the Guava-libraries.
* See http://code.google.com/p/guava-libraries/
*
* @author Markus Schneider
*/
package org.feijoas.mango.common.collect.mutable
import scala.math.Ordering.Int
import org.feijoas.mango.common.annotations.Beta
import org.feijoas.mango.common.collect.AsOrdered
import org.feijoas.mango.common.collect.RangeSetBehaviors
import org.feijoas.mango.common.collect.RangeSetWrapperBehaviours
import org.scalatest.FreeSpec
import com.google.common.collect.{RangeSet => GuavaRangeSet}
/** Tests for [[ImmutableRangeSetWrapperTest]]
*
* @author Markus Schneider
* @since 0.8
*/
class TreeRangeSetWrapperTest extends FreeSpec with RangeSetBehaviors with RangeSetWrapperBehaviours {
"A TreeRangeSetWrapper" - {
behave like rangeSet(TreeRangeSetWrapper.newBuilder[Int, Int.type])
behave like mutableRangeSet(TreeRangeSetWrapper.newBuilder[Int, Int.type])
behave like rangeSetWithBuilder(TreeRangeSetWrapper.newBuilder[Int, Int.type])
behave like mutableWrapper((guava: GuavaRangeSet[AsOrdered[Int]]) => TreeRangeSetWrapper[Int, Int.type](guava))
}
}
| feijoas/mango | src/test/scala/org/feijoas/mango/common/collect/mutable/TreeRangeSetWrapperTest.scala | Scala | apache-2.0 | 1,761 |
package com.datastax.spark.connector.writer
import scala.language.existentials
import scala.reflect.runtime.universe._
import scala.util.control.NonFatal
import org.apache.spark.sql.catalyst.ReflectionLock.SparkReflectionLock
import com.datastax.spark.connector.util.{ReflectionUtil, Symbols}
import com.datastax.spark.connector.{ColumnRef, GettableByIndexData, TupleValue, UDTValue}
import com.datastax.spark.connector.cql.StructDef
import com.datastax.spark.connector.mapper._
import com.datastax.spark.connector.types.{ColumnType, ListType, MapType, SetType, TupleType, TypeConverter}
import com.datastax.spark.connector.util.Logging
private[connector] object MappedToGettableDataConverter extends Logging{
/**
* When the Spark Cassandra Connector is running on a separate
* classloader it is possible that application classes will
* not be accessible. To avoid this scenario we can forcibly
* pass through the classloader from the application provided
* class to all converters created for children of this converter.
*/
def apply[T : TypeTag : ColumnMapper](
struct: StructDef,
columnSelection: IndexedSeq[ColumnRef],
forceClassLoader: Option[ClassLoader] = None): TypeConverter[struct.ValueRepr] =
new TypeConverter[struct.ValueRepr] {
/** To determine which default column mapper to use for UDT fields.
* If this class is a Java bean, then the UDT fields will be mapped with
* the `JavaBeanColumnMapper` */
private val isJavaBean =
implicitly[ColumnMapper[T]].isInstanceOf[JavaBeanColumnMapper[_]]
private val columnMap =
implicitly[ColumnMapper[T]].columnMapForWriting(struct, columnSelection)
/** Returns the column mapper associated with the given type.
* Used to find column mappers for UDT columns.
* For tuples, returns [[com.datastax.spark.connector.mapper.TupleColumnMapper TupleColumnMapper]],
* for Java beans uses
* [[com.datastax.spark.connector.mapper.JavaBeanColumnMapper JavaBeanColumnMapper]],
* and for everything else uses
* [[com.datastax.spark.connector.mapper.DefaultColumnMapper DefaultColumnMapper]] */
private def columnMapper[U: TypeTag]: ColumnMapper[U] = {
logDebug(s"Finding a UDT ColumnMapper for typeTag ${typeTag[U]}")
val tpe = SparkReflectionLock.synchronized(typeTag[U].tpe)
if (ReflectionUtil.isScalaTuple(tpe))
new TupleColumnMapper[U]
else if (isJavaBean)
new JavaBeanColumnMapper[U]()(ReflectionUtil.classTag[U])
else
new DefaultColumnMapper[U]
}
/** Returns true for tuple types that provide full type information of their components */
private def isTypedTuple(sym: Symbol): Boolean =
ReflectionUtil.isScalaTuple(sym) ||
sym == Symbols.PairSymbol ||
sym == Symbols.TripleSymbol
/** Returns a converter for converting given column to appropriate type savable to Cassandra */
private def converter[U : TypeTag](columnType: ColumnType[_]): TypeConverter[_ <: AnyRef] = {
SparkReflectionLock.synchronized {
val scalaType = typeTag[U].tpe
logDebug(s"Getting converter for $columnType to $scalaType")
(columnType, scalaType) match {
// Collections need recursive call to get the converter for the collection elements
// to handle nested UDT values and tuples properly.
case (ListType(argColumnType), TypeRef(_, _, List(argScalaType))) =>
val argConverter = converter(argColumnType, argScalaType)
TypeConverter.javaArrayListConverter(argConverter)
case (SetType(argColumnType), TypeRef(_, _, List(argScalaType))) =>
val argConverter = converter(argColumnType, argScalaType)
TypeConverter.javaHashSetConverter(argConverter)
case (MapType(keyColumnType, valueColumnType),
TypeRef(_, _, List(keyScalaType, valueScalaType))) =>
val keyConverter = converter(keyColumnType, keyScalaType)
val valueConverter = converter(valueColumnType, valueScalaType)
TypeConverter.javaHashMapConverter(keyConverter, valueConverter)
case (tt @ TupleType(argColumnType1, argColumnType2),
TypeRef(_, Symbols.PairSymbol, List(argScalaType1, argScalaType2))) =>
val c1 = converter(argColumnType1.columnType, argScalaType1)
val c2 = converter(argColumnType2.columnType, argScalaType2)
tt.converterToCassandra(IndexedSeq(c1, c2))
case (tt @ TupleType(argColumnType1, argColumnType2, argColumnType3),
TypeRef(_, Symbols.TripleSymbol, List(argScalaType1, argScalaType2, argScalaType3))) =>
val c1 = converter(argColumnType1.columnType, argScalaType1)
val c2 = converter(argColumnType2.columnType, argScalaType2)
val c3 = converter(argColumnType3.columnType, argScalaType3)
tt.converterToCassandra(IndexedSeq(c1, c2, c3))
// If tuple with mismatched number of components, don't continue:
case (tt: TupleType, TypeRef(_, symbol, args))
if isTypedTuple(symbol) && tt.columns.length != args.length =>
throw new IllegalArgumentException(
s"Expected ${tt.columns.length} components in the tuple, " +
s"instead of ${args.length} in $scalaType")
// UDTValue, TupleValue:
case (t: StructDef, _) if scalaType <:< typeOf[GettableByIndexData] =>
columnType.converterToCassandra
//Options
case (t: StructDef, TypeRef(_, _, List(argScalaType))) if scalaType <:< typeOf[Option[Any]] =>
type U2 = u2 forSome {type u2}
implicit val tt = ReflectionUtil.typeToTypeTag[U2](argScalaType)
implicit val cm: ColumnMapper[U2] = columnMapper[U2]
apply[U2](t, t.columnRefs, Some(childClassloader))
// UDTs mapped to case classes and tuples mapped to Scala tuples.
// ColumnMappers support mapping Scala tuples, so we don't need a special case for them.
case (t: StructDef, _) =>
implicit val cm: ColumnMapper[U] = columnMapper[U]
apply[U](t, t.columnRefs, Some(childClassloader))
// Primitive types
case _ =>
columnType.converterToCassandra
}
}
}
/** Returns a converter that can convert
* a Scala type given by `Type` to a Cassandra value of given `ColumnType` */
private def converter(columnType: ColumnType[_], tpe: Type): TypeConverter[_ <: AnyRef] = {
type U = u forSome {type u}
implicit val tt = ReflectionUtil.typeToTypeTag[U](tpe)
converter[U](columnType)
}
@transient
private val tpe = SparkReflectionLock.synchronized {
typeTag[T].tpe
}
@transient
private val mirror = forceClassLoader match {
case Some(cl) => runtimeMirror(cl)
case None => typeTag[T].mirror
}
/**
* All converters descended from this converter should use the same classloader even if
* this class (MappedToGettableDataConverter) happens to be on a different classloader.
*/
@transient
private val childClassloader = mirror.classLoader
logDebug(s"Finding a class for $tpe in ${mirror.classLoader}")
private val cls = mirror.runtimeClass(typeTag[T].tpe).asInstanceOf[Class[T]]
private val typeName = tpe.toString
val columnNames =
columnSelection.map(_.columnName)
private val getterByColumnName = columnMap.getters.map {
case (name, colRef) => (colRef.columnName, name)
}
private val getters =
columnNames.map(getterByColumnName)
@transient
private val scalaTypes: IndexedSeq[Type] =
getters.map(ReflectionUtil.returnType(tpe, _))
private val columnTypes: IndexedSeq[ColumnType[_]] =
columnNames.map(c => struct.columnByName(c).columnType)
private val extractor =
new PropertyExtractor(cls, getters)
private val converters = {
for (i <- columnNames.indices) yield {
try {
val ct = columnTypes(i)
val st = scalaTypes(i)
converter(ct, st)
} catch {
case NonFatal(e) =>
throw new IllegalArgumentException(
s"""Failed to get converter for field "${getters(i)}"
|of type ${scalaTypes(i)} in $typeName
|mapped to column "${columnNames(i)}" of "${struct.name}"
|""".stripMargin.replaceAll("\\n", " "), e)
}
}
}
override def targetTypeTag = typeTag[struct.ValueRepr]
override def convertPF = {
case obj if cls.isInstance(obj) =>
val columnValues = extractor.extract(obj.asInstanceOf[T])
for (i <- columnValues.indices)
columnValues(i) = converters(i).convert(columnValues(i))
struct.newInstance(columnValues: _*)
case Some(obj) if cls.isInstance(obj) =>
val columnValues = extractor.extract(obj.asInstanceOf[T])
for (i <- columnValues.indices)
columnValues(i) = converters(i).convert(columnValues(i))
struct.newInstance(columnValues: _*)
case None =>
null.asInstanceOf[struct.ValueRepr]
}
}
}
| shashwat7/spark-cassandra-connector | spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/writer/MappedToGettableDataConverter.scala | Scala | apache-2.0 | 9,566 |
package verystickypistonsmod
import net.minecraft.block.Block
import net.minecraft.block.material.Material
import net.minecraft.creativetab.CreativeTabs
import net.minecraft.util.AxisAlignedBB
import net.minecraft.world.World
import java.util.Random
import net.minecraft.block.BlockPistonBase
import net.minecraft.item.ItemStack
import net.minecraft.entity.EntityLivingBase
import net.minecraft.util.Facing
import minecraftscalalib.Helpful._
import net.minecraft.block.Block
import net.minecraft.block.BlockPistonMoving
class BlockVeryStickyPiston(val block_ID:Int) extends BlockPistonBase(block_ID, true) {
private val BLOCK_MOVING_LIMIT = 13
def IsIndirectlyPowered(c:Coordinates, eventParam:Int): Boolean = {
c match {
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y - 1,z, 0) && eventParam != 0) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y + 1,z, 1) && eventParam != 1) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y ,z - 1, 2) && eventParam != 2) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y ,z + 1, 3) && eventParam != 3) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x + 1,y ,z , 5) && eventParam != 5) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x - 1,y ,z , 4) && eventParam != 4) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y ,z , 0) && eventParam != 0) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y + 2,z , 1) && eventParam != 4) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y + 1,z - 1, 2)) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x ,y + 1,z + 1, 3)) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x -1 ,y + 1,z , 4)) => true
case Coordinates(w, x, y, z) if (w.getIndirectPowerOutput(x + 1,y + 1,z , 5)) => true
case _ => false
}
}
/**
* Called when the block receives a BlockEvent - see World.addBlockEvent. By default, passes it on to the tile
* entity at this location. Args: world, x, y, z, blockID, EventID, event parameter
*/
override def onBlockEventReceived(w:World, x:Int, y:Int, z:Int, blockID:Int, eventID:Int, eventParam:Int): Boolean = {
val isIndPowered = this.IsIndirectlyPowered(Coordinates(w, x,y,z), eventID)
eventID match {
//Client acts
case 1 if(!w.isRemote && isIndPowered ) => {
w.setBlockMetadataWithNotify(z, y, z, eventParam | 8, 2)
false
}
case 0 if(!w.isRemote && !isIndPowered ) => false
//Server
case 0 if(!tryExtend(w, x, y, z, eventParam))=> false
//Extends
case 0 if(tryExtend(w, x, y, z, eventParam))=> {
w.setBlockMetadataWithNotify(z, y, z, eventParam | 8, 2)
true
}
// Lots happens here, need to finish
// Retracts
case 1 if(!tryRetract() ) => false
case 1 if(tryRetract() ) => true
case _ => false
}
}
//Need to finish
private def tryRetract():Boolean = {
true
}
/**
* attempts to extend the piston. returns false if impossible.
*/
private def tryExtend(w:World, x:Int, y:Int, z:Int, f:Int): Boolean = {
val x1 = x + Facing.offsetsXForSide(f)
val y1 = y + Facing.offsetsYForSide(f)
val z1 = z + Facing.offsetsZForSide(f)
val blocks = getBlocksToPush(new Coordinates(w,x1,y1,z1),f)
if(blocks.isEmpty) {
return false
}
val revb = blocks.reverse
revb foreach{ b =>
val m = b.getoffsetCoordinatesForFacing(f);
val blockId = b.getBlockId()
val metadata = b.world.getBlockMetadata(b);
b.world.setBlock(m, Block.pistonMoving.blockID, metadata, 4);
val te = BlockPistonMoving.getTileEntity(m.x, m.y, m.z, true, false)
b.world.setBlockTileEntity(m, te)
b.world.notifyBlocksOfNeighborChange(m.x, m.y, m.z, blockId)
}
true
}
/**
* Get's blocks to Move
*/
private def getBlocksToPush(fromPos:Coordinates, f:Int):List[Coordinates] = {
val dist = 0 to BLOCK_MOVING_LIMIT
val fistCoord = List[Coordinates](fromPos.getoffsetCoordinatesForFacing(f))
val coordList = dist.foldLeft(fistCoord){
case (list,b:Int) => list.head.getoffsetCoordinatesForFacing(f) :: list
}
val blocks = coordList.takeWhile{ c =>
canMoveBlockAtCoordinates(c) && canBlockAtCoordinatesBePushedInto(c)
}
if(canPushIntoLastCoord(blocks.lastOption,f).isEmpty) {
List()
}
else {
blocks
}
}
def canPushIntoLastCoord(lastBlock:Option[Coordinates],facing:Int):Option[Coordinates] = {
for(
x <- lastBlock;
y = x.getoffsetCoordinatesForFacing(facing)
if canBlockAtCoordinatesBePushedInto(y)
) yield y
}
/**
* False means that it can't be pushed, and am blocked to move it
*
*/
def canMoveBlockAtCoordinates(c:Coordinates):Boolean = {
c.getBlockId match {
case Block.obsidian.blockID => false
case b if (Block.blocksList(b).getMobilityFlag() == 2) => false
case _ if(c.world.blockHasTileEntity(c)) => false
case _ => true
}
}
/**
* False means that it can't be pushed, but pistons can move over it.
* Thus ends our stack of blocks to move.
*/
def canBlockAtCoordinatesBePushedInto(c:Coordinates):Boolean = {
c.getBlockId match {
case b if (Block.blocksList(b).getMobilityFlag() == 1) => false
case _ => true
}
}
} | nmarshall23/Very-Sticky-Pistons-Mod | src/main/verystickypistonsmod/BlockVeryStickyPiston.scala | Scala | mit | 5,895 |
// Copyright 2016 Jim Pivarski
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.dianahep.scaroot
import scala.collection.immutable.SortedSet
import scala.collection.mutable
import scala.language.experimental.macros
import scala.reflect.macros.Context
import scala.reflect.runtime.universe.Type
import scala.reflect.runtime.universe.WeakTypeTag
import com.sun.jna.Pointer
import com.sun.jna.Memory
import com.sun.jna.NativeLong
import org.dianahep.scaroot.reader.schema._
import org.dianahep.scaroot.reader.factory._
package reader {
/////////////////////////////////////////////////// class to use when no My[TYPE] is supplied
class Generic(val fields: Map[String, Any]) {
def apply(field: String): Any = fields(field)
override def toString() = s"""Generic(Map(${fields.map({case (k, v) => "\\"" + k + "\\"" + " -> " + v.toString}).mkString(", ")}))"""
}
object Generic {
def apply(fields: Map[String, Any]) = new Generic(fields)
def unapply(x: Generic) = Some(x.fields)
}
/////////////////////////////////////////////////// user-friendly representation of non-String bytes (like Python 3)
class Bytes(array: Array[Byte]) extends Seq[Byte] {
def apply(idx: Int) = array(idx)
def iterator = Iterator[Byte](array: _*)
def length = array.size
def decode = new String(array)
def decode(charset: String) = new String(array, charset)
override def toString() = s"""Bytes(${array.mkString(", ")})"""
}
object Bytes {
def apply(array: Array[Byte]) = new Bytes(array)
def apply(iterable: Iterable[Byte]) = new Bytes(iterable.toArray)
def apply(bytes: Byte*) = new Bytes(bytes.toArray)
def encode(str: String) = new Bytes(str.getBytes)
def encode(str: String, charset: String) = new Bytes(str.getBytes(charset))
}
/////////////////////////////////////////////////// user's class specification (a factory-factory!)
abstract class My[TYPE] {
def dataType: Type
def name: String
def fieldTypes: List[(String, Type)]
def apply(factories: List[(String, Factory[_])]): FactoryClass[TYPE]
}
object My {
implicit def apply[TYPE]: My[TYPE] = macro applyImpl[TYPE]
def applyImpl[TYPE : c.WeakTypeTag](c: Context): c.Expr[My[TYPE]] = {
import c.universe._
val dataClass = weakTypeOf[TYPE]
val dataClassName = dataClass.toString
val constructorParams = dataClass.declarations.collectFirst {
case m: MethodSymbol if (m.isPrimaryConstructor) => m
}.get.paramss.head
var i = 0
val fieldTypes = List.newBuilder[Tree]
val getFields = List.newBuilder[Tree]
constructorParams.foreach {param =>
val name = param.asTerm.name.decodedName.toString
val tpe = param.typeSignature
fieldTypes += q"""$name -> weakTypeOf[$tpe]"""
getFields += q"""factoryArray($i).asInstanceOf[Factory[$tpe]](dataStream)"""
i += 1
}
c.Expr[My[TYPE]](q"""
import scala.reflect.runtime.universe.weakTypeOf
import org.dianahep.scaroot.reader._
import org.dianahep.scaroot.reader.schema._
import org.dianahep.scaroot.reader.factory._
new My[$dataClass] {
// What you know at compile-time...
val dataType = weakTypeOf[$dataClass].asInstanceOf[scala.reflect.runtime.universe.Type]
val name = $dataClassName
val fieldTypes = List(..${fieldTypes.result})
def apply(factories: List[(String, Factory[_])]) =
new FactoryClass[$dataClass](name, factories) {
// What you know when you read a ROOT schema...
// (I'll do the type-checking in the factory-builder, not here. Better error messages that way.)
val factoryArray = factories.map(_._2).toArray
// Fast runtime loop...
def apply(dataStream: DataStream) = {
new $dataClass(..${getFields.result})
}
}
}
""")
}
}
/////////////////////////////////////////////////// entry point for iterating over ROOT files
object LoadLibsOnce {
// Avoid conflict between Java's signal handlers and ROOT's (which causes rare segmentation faults).
private var isready = false
RootReaderCPPLibrary.resetSignals()
isready = true
def ready = isready
// Keep track of which libraries have already been loaded to avoid loading them multiple times.
val includeDirs = mutable.Set[String]()
val loadedLibraries = mutable.Set[String]()
def include(dir: String) {
if (!(includeDirs contains dir)) {
RootReaderCPPLibrary.addInclude(dir)
includeDirs += dir
}
}
def apply(lib: String) {
if (!(loadedLibraries contains lib)) {
RootReaderCPPLibrary.loadLibrary(lib)
loadedLibraries += lib
}
}
}
class RootTreeIterator[TYPE : WeakTypeTag : My](fileLocations: Seq[String],
treeLocation: String,
includes: Seq[String] = Nil,
libs: Seq[String] = Nil,
inferTypes: Boolean = false,
myclasses: Map[String, My[_]] = Map[String, My[_]](),
start: Long = 0L,
end: Long = -1L,
microBatchSize: Int = 10) extends Iterator[TYPE] {
if (fileLocations.isEmpty)
throw new RuntimeException("Cannot build RootTreeIterator over an empty set of files.")
if (start < 0)
throw new IllegalArgumentException(s"The start ($start) must be greater than or equal to zero.")
if (end >= 0 && start >= end)
throw new IllegalArgumentException(s"If an ending index is given (greater than or equal to zero), then start ($start) must be less than end ($end).")
if (microBatchSize < 1)
throw new IllegalArgumentException(s"The microBatchSize ($microBatchSize) must be greater than or equal to one.")
val loadLibsOnce = LoadLibsOnce
while (!loadLibsOnce.ready) { Thread.sleep(1) }
includes foreach {dir => loadLibsOnce.include(dir)}
libs foreach {lib => loadLibsOnce(lib)}
if (inferTypes) {
val errorMessage: String = RootReaderCPPLibrary.inferTypes(fileLocations(0), treeLocation)
if (!errorMessage.isEmpty)
throw new RuntimeException(errorMessage)
}
// Pack of state variables that all have to be kept in sync!
// Limit user access to setIndex, reset, and incrementIndex, which should preserve interrelationships.
private var done = true
private var treeWalker = Pointer.NULL
private var entryIndex = 0L
private var fileIndex = 0
private var entryInFileIndex = 0L
private var microBatchIndex = 0
private var entriesInFileArray = Array.fill[Long](fileLocations.size)(-1L) // opening files is expensive
private def entriesInFile(i: Int) = {
if (entriesInFileArray(i) < 0) {
RootReaderCPPLibrary.reset(treeWalker, fileLocations(i))
entriesInFileArray(i) = RootReaderCPPLibrary.numEntriesInCurrentTree(treeWalker)
}
entriesInFileArray(i)
}
def index = entryIndex
// Go to a random position (not a common feature for an Iterator to have, but useful, particularly for implementing "start").
def setIndex(index: Long) {
if (index < start || (end >= 0 && index >= end))
throw new IllegalArgumentException(s"The index ($index) must be between start ($start) and end ($end).")
entryIndex = 0L
fileIndex = 0
entryInFileIndex = 0L
microBatchIndex = 0
while (entryIndex < index) {
if (fileIndex >= entriesInFileArray.size) {
done = true
entryIndex = -1L
throw new IllegalArgumentException(s"Total number of entries is ${entriesInFileArray.sum}, so $index would be beyond the last.")
}
if (entryIndex + entriesInFile(fileIndex) <= index) {
fileIndex += 1
entryIndex += entriesInFile(fileIndex)
}
else {
entryInFileIndex = index - entryIndex
entryIndex = index
}
}
RootReaderCPPLibrary.reset(treeWalker, fileLocations(fileIndex))
done = false
}
def reset() { setIndex(0L) } // synonym
// Go forward by one (the usual case).
def incrementIndex() {
entryIndex += 1L
entryInFileIndex += 1L
microBatchIndex += 1
if (microBatchIndex >= microBatchSize)
microBatchIndex = 0
if (entryInFileIndex >= entriesInFile(fileIndex)) {
fileIndex += 1
entryInFileIndex = 0L
microBatchIndex = 0
if (fileIndex >= entriesInFileArray.size)
done = true
else
RootReaderCPPLibrary.reset(treeWalker, fileLocations(fileIndex))
}
if (end >= 0 && entryIndex >= end)
done = true
}
val schema: SchemaClass = {
treeWalker = RootReaderCPPLibrary.newTreeWalker(fileLocations(0), treeLocation, "")
if (RootReaderCPPLibrary.valid(treeWalker) == 0)
throw new RuntimeException(RootReaderCPPLibrary.errorMessage(treeWalker))
done = (RootReaderCPPLibrary.next(treeWalker) == 0)
while (!done && RootReaderCPPLibrary.resolved(treeWalker) == 0) {
RootReaderCPPLibrary.resolve(treeWalker)
done = (RootReaderCPPLibrary.next(treeWalker) == 0)
}
Schema(treeWalker)
}
import scala.reflect.runtime.universe.weakTypeOf
val allmyclasses =
if (!(myclasses.keySet contains schema.name) && !(weakTypeOf[TYPE] =:= weakTypeOf[Generic] || weakTypeOf[TYPE] =:= weakTypeOf[AnyRef]))
myclasses.updated(schema.name, implicitly[My[TYPE]])
else
myclasses
val factory = FactoryClass[TYPE](schema, allmyclasses)
setIndex(start)
private var bufferSize = new NativeLong(64*1024)
private var buffer = new Memory(bufferSize.longValue)
private var byteBuffer = buffer.getByteBuffer(0, bufferSize.longValue)
private var byteBufferDataStream = new ByteBufferDataStream(byteBuffer)
private var statusByte = 1.toByte
def repr = RootReaderCPPLibrary.repr(treeWalker)
private def thisMicroBatchSize =
if (entriesInFile(fileIndex) - entryInFileIndex > microBatchSize)
microBatchSize
else
(entriesInFile(fileIndex) - entryInFileIndex).toInt
def hasNext = !done
def next() = {
if (done)
throw new RuntimeException("next() called on empty RootTreeIterator (create a new one to run over the data again)")
if (microBatchIndex == 0) {
// Set the status byte to 1 (writing) and let C++ write to the buffer.
statusByte = 1
buffer.setByte(0, statusByte)
RootReaderCPPLibrary.copyToBuffer(treeWalker, entryInFileIndex, thisMicroBatchSize, buffer, bufferSize)
byteBuffer.rewind()
}
// Check the status byte to find out if copying failed due to a buffer that's too small (the only error we handle).
statusByte = byteBuffer.get
while (statusByte == 2) {
// Get a new, bigger buffer (and let the old one be garbage collected).
bufferSize = new NativeLong(bufferSize.longValue * 2L)
buffer = new Memory(bufferSize.longValue)
byteBuffer = buffer.getByteBuffer(0, bufferSize.longValue)
byteBufferDataStream = new ByteBufferDataStream(byteBuffer)
// Try, try again.
microBatchIndex = 0
statusByte = 1
buffer.setByte(0, statusByte)
RootReaderCPPLibrary.copyToBuffer(treeWalker, entryInFileIndex, thisMicroBatchSize, buffer, bufferSize)
byteBuffer.rewind()
statusByte = byteBuffer.get
}
// Interpret the data in the buffer, creating Scala objects.
val out = factory(byteBufferDataStream)
// Increment the counter and see if it's time to step to the next file.
incrementIndex()
out
}
}
object RootTreeIterator {
def apply[TYPE : WeakTypeTag : My](fileLocations: Seq[String],
treeLocation: String,
includes: Seq[String] = Nil,
libs: Seq[String] = Nil,
inferTypes: Boolean = false,
myclasses: Map[String, My[_]] = Map[String, My[_]](),
start: Long = 0L,
end: Long = -1L,
microBatchSize: Int = 10) =
new RootTreeIterator(fileLocations, treeLocation, includes, libs, inferTypes, myclasses, start, end, microBatchSize)
}
/////////////////////////////////////////////////// interface to XRootD for creating file sets and splits
class XRootD(baseurl: String) {
private val ensureResetSignals = LoadLibsOnce
private val fs = RootReaderCPPLibrary.xrootdFileSystem(baseurl)
if (fileSize("/") < 0)
throw new IllegalArgumentException(s"""XRootD server cannot be found at "$baseurl".""")
def fileSize(path: String): Long = RootReaderCPPLibrary.xrootdFileSize(fs, path).longValue
def listDirectory(path: String): List[String] = {
RootReaderCPPLibrary.xrootdDirectoryBegin(fs, path)
val builder = List.newBuilder[String]
var done = false
while (!done) {
val item = RootReaderCPPLibrary.xrootdDirectoryEntry(fs)
if (item == null)
done = true
else
builder += item
}
RootReaderCPPLibrary.xrootdDirectoryEnd(fs)
builder.result
}
}
object XRootD {
case class File(url: String, size: Long)
val URLPattern = """(root://[^\\/]*)(.*)""".r
private def matches(xrootd: XRootD, established: String, path: List[String]): List[String] = path match {
case dir :: rest =>
val (regex, branches) = globToRegex(dir)
if (branches) {
val pattern = java.util.regex.Pattern.compile(regex)
val results = xrootd.listDirectory(established + "/")
results.filter(pattern.matcher(_).lookingAt).map(established + "/" + _).flatMap(matches(xrootd, _, rest))
}
else
matches(xrootd, established + "/" + dir, rest)
case Nil =>
List(established)
}
def apply(globurl: String): Seq[File] = globurl match {
case URLPattern(baseurl, pathurl) =>
val xrootd = new XRootD(baseurl)
val results = matches(xrootd, "", pathurl.split('/').toList).map(baseurl + _).map(x => File(x, xrootd.fileSize(x)))
// Drop duplicates and sort results with biggest first.
SortedSet[File](results: _*)(Ordering.by[File, Long](-_.size)).toSeq
case _ =>
throw new IllegalArgumentException(s"""Not an XRootD URL: "$globurl"""")
}
def balance(globurl: String, partitions: Int): Seq[Seq[File]] = {
val out = Array.fill(partitions)(mutable.ListBuffer[File]())
def size(files: Iterable[File]) = files.map(_.size).sum
def minimum = {
var best = -1L
var besti = 0
0 until partitions foreach {i =>
val s = size(out(i))
if (best < 0 || s < best) {
best = s
besti = i
}
}
besti
}
apply(globurl) foreach {file =>
out(minimum) += file
}
out.map(_.toList).toSeq
}
// http://stackoverflow.com/a/17369948/1623645
private def globToRegex(pattern: String): (String, Boolean) = {
val sb = new java.lang.StringBuilder
var inGroup = 0
var inClass = 0
var firstIndexInClass = -1
val arr = pattern.toCharArray
var i = 0
var branches = false
while (i < arr.length) {
var ch = arr(i)
ch match {
case '\\\\' =>
i += 1
if (i >= arr.length)
sb.append('\\\\')
else {
var next = arr(i)
if (next == 'Q' || next == 'E')
sb.append('\\\\') // extra escape needed
if (next != ',')
sb.append('\\\\') // only one escape needed
sb.append(next)
}
case '*' if (inClass == 0) =>
branches = true
sb.append(".*")
case '*' =>
sb.append("*")
case '?' if (inClass == 0) =>
branches = true
sb.append('.')
case '?' =>
sb.append('?')
case '[' =>
branches = true
inClass += 1
firstIndexInClass = i + 1
sb.append('[')
case ']' =>
inClass -= 1
sb.append(']')
case '.' | '(' | ')' | '+' | '|' | '^' | '$' | '@' | '%' =>
if (inClass == 0 || (firstIndexInClass == i && ch == '^'))
sb.append('\\\\')
sb.append(ch)
case '!' =>
if (firstIndexInClass == i)
sb.append('^')
else
sb.append('!')
case '{' =>
branches = true
inGroup += 1
sb.append('(')
case '}' =>
inGroup -= 1
sb.append(')')
case ',' =>
if (inGroup > 0)
sb.append('|')
else
sb.append(',')
case _ =>
sb.append(ch)
}
i += 1
}
(sb.toString, branches)
}
}
}
| diana-hep/rootconverter | scaroot-reader/src/main/scala/org/dianahep/scaroot/reader.scala | Scala | apache-2.0 | 18,163 |
package rat.client.modules
import diode.data.Pot
import diode.react.ModelProxy
import diode.react.ReactPot._
import diode.react._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import japgolly.scalajs.react.vdom.SvgTags.{rect, svg}
import japgolly.scalajs.react.vdom.SvgAttrs.{height, style, width}
import rat.client.components.Bootstrap.{Button, Card, Panel}
import rat.client.components.Table.{TableHeading, TableHeadingItem, TableItem}
import rat.client.components.piechart.PieChartValue
import rat.client.components._
import rat.client.services.{CalculateAgreement, GetStatistics, StatisticHelper}
import rat.shared.AnnotationStatistics
object Dashboard {
@inline private def bss = GlobalStyles.bootstrapStyles
class Backend($: BackendScope[ModelProxy[StatisticHelper], Unit]) {
def render(props: ModelProxy[StatisticHelper]) = {
val proxy = props.zoom(_.annotationStatistics)
<.div(
Card(Card.Props(header = Some(Card.Header("Dashboard",
Button(Button.Props(proxy.dispatch(CalculateAgreement),
addStyles = Seq(bss.pullRight, bss.buttonXS)), "Recalculate Agreement")))),
proxy().renderPending(_ => <.div(^.textAlign := "center", Icon.spinnerAnimateLarge)),
proxy().renderFailed(ex => <.div(<.p("Failed to load"))),
proxy().renderReady(annotationStats => {
val dta = List(
PieChartValue(s"MyTasks:${annotationStats.myRemaining}", annotationStats.myRemaining),
PieChartValue(s"Bronze:${annotationStats.allSubmitted}", annotationStats.allSubmitted),
PieChartValue(s"Silver:${annotationStats.ready2Gold}", annotationStats.ready2Gold),
PieChartValue(s"Gold:${annotationStats.goldSize}", annotationStats.goldSize)
)
<.div(
<.div(^.className := "row",
<.div(^.className := "col-md-12", ^.textAlign := "center",
if (annotationStats.allSubmitted > 0) {
piechart.PieChart(dta.filter(_.piVal > 0))
}
else
<.h5("No submitted task found!")
)
),
if(annotationStats.ontologyStat.nonEmpty)
<.div(^.className := "row",
<.div(^.className := "col-md-12",
Table(
tableHeading = TableHeading(
List(
TableHeadingItem("ID", false, false, Callback.empty),
TableHeadingItem("Word", false, false, Callback.empty),
TableHeadingItem("New ONT Type", false, false, Callback.empty),
TableHeadingItem("Current Words", false, false, Callback.empty)
)),
tableItems = annotationStats.ontologyStat.map(t =>
TableItem(
List(
List(t.id),
List(t.word),
List(t.tpe),
List(t.currentWords.mkString(", "))
)
)
)
)
)
)
else <.div()
)
})
)
)
}
}
val component = ReactComponentB[ModelProxy[StatisticHelper]]("Dashboard")
.stateless
.renderBackend[Backend]
.componentWillMount(scope =>
scope.props.dispatch(GetStatistics)
)
.build
def apply(proxy: ModelProxy[StatisticHelper]) = component(proxy)
}
| omidb/rat | client/src/main/scala/rat/client/modules/Dashboard.scala | Scala | apache-2.0 | 3,757 |
/*
* Copyright (c) 2017 Uber Technologies, Inc. (hoodie-dev-group@uber.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package com.uber.hoodie
import java.nio.ByteBuffer
import java.sql.{Date, Timestamp}
import java.util
import com.databricks.spark.avro.SchemaConverters
import com.databricks.spark.avro.SchemaConverters.IncompatibleSchemaException
import org.apache.avro.Schema.Type._
import org.apache.avro.generic.GenericData.{Fixed, Record}
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.avro.{Schema, SchemaBuilder}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.GenericRow
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import scala.collection.JavaConverters._
object AvroConversionUtils {
def createRdd(df: DataFrame, structName: String, recordNamespace: String): RDD[GenericRecord] = {
val dataType = df.schema
val encoder = RowEncoder.apply(dataType).resolveAndBind()
df.queryExecution.toRdd.map(encoder.fromRow)
.mapPartitions { records =>
if (records.isEmpty) Iterator.empty
else {
val convertor = createConverterToAvro(dataType, structName, recordNamespace)
records.map { x => convertor(x).asInstanceOf[GenericRecord] }
}
}
}
def createDataFrame(rdd: RDD[GenericRecord], schemaStr: String, ss : SparkSession): Dataset[Row] = {
if (rdd.isEmpty()) {
ss.emptyDataFrame
} else {
ss.createDataFrame(rdd.mapPartitions { records =>
if (records.isEmpty) Iterator.empty
else {
val schema = Schema.parse(schemaStr)
val dataType = convertAvroSchemaToStructType(schema)
val convertor = createConverterToRow(schema, dataType)
records.map { x => convertor(x).asInstanceOf[Row] }
}
}, convertAvroSchemaToStructType(Schema.parse(schemaStr))).asInstanceOf[Dataset[Row]]
}
}
def getNewRecordNamespace(elementDataType: DataType,
currentRecordNamespace: String,
elementName: String): String = {
elementDataType match {
case StructType(_) => s"$currentRecordNamespace.$elementName"
case _ => currentRecordNamespace
}
}
/**
* NOTE : This part of code is copied from com.databricks.spark.avro.SchemaConverters.scala (133:310) (spark-avro)
*
* Returns a converter function to convert row in avro format to GenericRow of catalyst.
*
* @param sourceAvroSchema Source schema before conversion inferred from avro file by passed in
* by user.
* @param targetSqlType Target catalyst sql type after the conversion.
* @return returns a converter function to convert row in avro format to GenericRow of catalyst.
*/
def createConverterToRow(sourceAvroSchema: Schema,
targetSqlType: DataType): AnyRef => AnyRef = {
def createConverter(avroSchema: Schema,
sqlType: DataType, path: List[String]): AnyRef => AnyRef = {
val avroType = avroSchema.getType
(sqlType, avroType) match {
// Avro strings are in Utf8, so we have to call toString on them
case (StringType, STRING) | (StringType, ENUM) =>
(item: AnyRef) => if (item == null) null else item.toString
// Byte arrays are reused by avro, so we have to make a copy of them.
case (IntegerType, INT) | (BooleanType, BOOLEAN) | (DoubleType, DOUBLE) |
(FloatType, FLOAT) | (LongType, LONG) =>
identity
case (BinaryType, FIXED) =>
(item: AnyRef) =>
if (item == null) {
null
} else {
item.asInstanceOf[Fixed].bytes().clone()
}
case (BinaryType, BYTES) =>
(item: AnyRef) =>
if (item == null) {
null
} else {
val byteBuffer = item.asInstanceOf[ByteBuffer]
val bytes = new Array[Byte](byteBuffer.remaining)
byteBuffer.get(bytes)
bytes
}
case (struct: StructType, RECORD) =>
val length = struct.fields.length
val converters = new Array[AnyRef => AnyRef](length)
val avroFieldIndexes = new Array[Int](length)
var i = 0
while (i < length) {
val sqlField = struct.fields(i)
val avroField = avroSchema.getField(sqlField.name)
if (avroField != null) {
val converter = createConverter(avroField.schema(), sqlField.dataType,
path :+ sqlField.name)
converters(i) = converter
avroFieldIndexes(i) = avroField.pos()
} else if (!sqlField.nullable) {
throw new IncompatibleSchemaException(
s"Cannot find non-nullable field ${sqlField.name} at path ${path.mkString(".")} " +
"in Avro schema\\n" +
s"Source Avro schema: $sourceAvroSchema.\\n" +
s"Target Catalyst type: $targetSqlType")
}
i += 1
}
(item: AnyRef) => {
if (item == null) {
null
} else {
val record = item.asInstanceOf[GenericRecord]
val result = new Array[Any](length)
var i = 0
while (i < converters.length) {
if (converters(i) != null) {
val converter = converters(i)
result(i) = converter(record.get(avroFieldIndexes(i)))
}
i += 1
}
new GenericRow(result)
}
}
case (arrayType: ArrayType, ARRAY) =>
val elementConverter = createConverter(avroSchema.getElementType, arrayType.elementType,
path)
val allowsNull = arrayType.containsNull
(item: AnyRef) => {
if (item == null) {
null
} else {
item.asInstanceOf[java.lang.Iterable[AnyRef]].asScala.map { element =>
if (element == null && !allowsNull) {
throw new RuntimeException(s"Array value at path ${path.mkString(".")} is not " +
"allowed to be null")
} else {
elementConverter(element)
}
}
}
}
case (mapType: MapType, MAP) if mapType.keyType == StringType =>
val valueConverter = createConverter(avroSchema.getValueType, mapType.valueType, path)
val allowsNull = mapType.valueContainsNull
(item: AnyRef) => {
if (item == null) {
null
} else {
item.asInstanceOf[java.util.Map[AnyRef, AnyRef]].asScala.map { x =>
if (x._2 == null && !allowsNull) {
throw new RuntimeException(s"Map value at path ${path.mkString(".")} is not " +
"allowed to be null")
} else {
(x._1.toString, valueConverter(x._2))
}
}.toMap
}
}
case (sqlType, UNION) =>
if (avroSchema.getTypes.asScala.exists(_.getType == NULL)) {
val remainingUnionTypes = avroSchema.getTypes.asScala.filterNot(_.getType == NULL)
if (remainingUnionTypes.size == 1) {
createConverter(remainingUnionTypes.head, sqlType, path)
} else {
createConverter(Schema.createUnion(remainingUnionTypes.asJava), sqlType, path)
}
} else avroSchema.getTypes.asScala.map(_.getType) match {
case Seq(t1) => createConverter(avroSchema.getTypes.get(0), sqlType, path)
case Seq(a, b) if Set(a, b) == Set(INT, LONG) && sqlType == LongType =>
(item: AnyRef) => {
item match {
case null => null
case l: java.lang.Long => l
case i: java.lang.Integer => new java.lang.Long(i.longValue())
}
}
case Seq(a, b) if Set(a, b) == Set(FLOAT, DOUBLE) && sqlType == DoubleType =>
(item: AnyRef) => {
item match {
case null => null
case d: java.lang.Double => d
case f: java.lang.Float => new java.lang.Double(f.doubleValue())
}
}
case other =>
sqlType match {
case t: StructType if t.fields.length == avroSchema.getTypes.size =>
val fieldConverters = t.fields.zip(avroSchema.getTypes.asScala).map {
case (field, schema) =>
createConverter(schema, field.dataType, path :+ field.name)
}
(item: AnyRef) => if (item == null) {
null
} else {
val i = GenericData.get().resolveUnion(avroSchema, item)
val converted = new Array[Any](fieldConverters.length)
converted(i) = fieldConverters(i)(item)
new GenericRow(converted)
}
case _ => throw new IncompatibleSchemaException(
s"Cannot convert Avro schema to catalyst type because schema at path " +
s"${path.mkString(".")} is not compatible " +
s"(avroType = $other, sqlType = $sqlType). \\n" +
s"Source Avro schema: $sourceAvroSchema.\\n" +
s"Target Catalyst type: $targetSqlType")
}
}
case (left, right) =>
throw new IncompatibleSchemaException(
s"Cannot convert Avro schema to catalyst type because schema at path " +
s"${path.mkString(".")} is not compatible (avroType = $left, sqlType = $right). \\n" +
s"Source Avro schema: $sourceAvroSchema.\\n" +
s"Target Catalyst type: $targetSqlType")
}
}
createConverter(sourceAvroSchema, targetSqlType, List.empty[String])
}
def createConverterToAvro(dataType: DataType,
structName: String,
recordNamespace: String): Any => Any = {
dataType match {
case BinaryType => (item: Any) =>
item match {
case null => null
case bytes: Array[Byte] => ByteBuffer.wrap(bytes)
}
case IntegerType | LongType |
FloatType | DoubleType | StringType | BooleanType => identity
case ByteType => (item: Any) =>
if (item == null) null else item.asInstanceOf[Byte].intValue
case ShortType => (item: Any) =>
if (item == null) null else item.asInstanceOf[Short].intValue
case _: DecimalType => (item: Any) => if (item == null) null else item.toString
case TimestampType => (item: Any) =>
if (item == null) null else item.asInstanceOf[Timestamp].getTime
case DateType => (item: Any) =>
if (item == null) null else item.asInstanceOf[Date].getTime
case ArrayType(elementType, _) =>
val elementConverter = createConverterToAvro(
elementType,
structName,
getNewRecordNamespace(elementType, recordNamespace, structName))
(item: Any) => {
if (item == null) {
null
} else {
val sourceArray = item.asInstanceOf[Seq[Any]]
val sourceArraySize = sourceArray.size
val targetList = new util.ArrayList[Any](sourceArraySize)
var idx = 0
while (idx < sourceArraySize) {
targetList.add(elementConverter(sourceArray(idx)))
idx += 1
}
targetList
}
}
case MapType(StringType, valueType, _) =>
val valueConverter = createConverterToAvro(
valueType,
structName,
getNewRecordNamespace(valueType, recordNamespace, structName))
(item: Any) => {
if (item == null) {
null
} else {
val javaMap = new util.HashMap[String, Any]()
item.asInstanceOf[Map[String, Any]].foreach { case (key, value) =>
javaMap.put(key, valueConverter(value))
}
javaMap
}
}
case structType: StructType =>
val builder = SchemaBuilder.record(structName).namespace(recordNamespace)
val schema: Schema = SchemaConverters.convertStructToAvro(
structType, builder, recordNamespace)
val fieldConverters = structType.fields.map(field =>
createConverterToAvro(
field.dataType,
field.name,
getNewRecordNamespace(field.dataType, recordNamespace, field.name)))
(item: Any) => {
if (item == null) {
null
} else {
val record = new Record(schema)
val convertersIterator = fieldConverters.iterator
val fieldNamesIterator = dataType.asInstanceOf[StructType].fieldNames.iterator
val rowIterator = item.asInstanceOf[Row].toSeq.iterator
while (convertersIterator.hasNext) {
val converter = convertersIterator.next()
record.put(fieldNamesIterator.next(), converter(rowIterator.next()))
}
record
}
}
}
}
def convertStructTypeToAvroSchema(structType: StructType,
structName: String,
recordNamespace: String): Schema = {
val builder = SchemaBuilder.record(structName).namespace(recordNamespace)
SchemaConverters.convertStructToAvro(structType, builder, recordNamespace)
}
def convertAvroSchemaToStructType(avroSchema: Schema): StructType = {
SchemaConverters.toSqlType(avroSchema).dataType.asInstanceOf[StructType];
}
}
| vinothchandar/hoodie | hoodie-spark/src/main/scala/com/uber/hoodie/AvroConversionUtils.scala | Scala | apache-2.0 | 14,573 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.server
import java.net._
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.{ Properties, Try }
import akka.actor.Props
import org.ensime.AkkaBackCompat
import org.ensime.fixture._
import org.ensime.util.EnsimeSpec
import org.ensime.util.ensimefile.Implicits.DefaultCharset
import org.ensime.util.file._
class ServerStartupSpec extends EnsimeSpec
with IsolatedEnsimeConfigFixture
with IsolatedTestKitFixture
with AkkaBackCompat {
val original = EnsimeConfigFixture.EmptyTestProject
Properties.setProp("ensime.server.test", "true")
"Server" should "start up and bind to random ports" in {
withEnsimeConfig { implicit config =>
withTestKit { implicit tk =>
import tk._
val protocol = new SwankProtocol
system.actorOf(Props(new ServerActor(config, protocol)), "ensime-main")
eventually(timeout(30 seconds), interval(1 second)) {
PortUtil.port(config.cacheDir, "http").isDefined
PortUtil.port(config.cacheDir, "port").isDefined
}
}
}
}
it should "start up and bind to preferred ports" in {
withEnsimeConfig { implicit config =>
withTestKit { implicit tk =>
import tk._
// this can fail randomly. No general solution.
val preferredHttp = 10001
val preferredTcp = 10002
(config.cacheDir / "http").writeString(preferredHttp.toString)
(config.cacheDir / "port").writeString(preferredTcp.toString)
val protocol = new SwankProtocol
system.actorOf(Props(new ServerActor(config, protocol)), "ensime-main")
eventually(timeout(30 seconds), interval(1 second)) {
val http = new Socket
val tcp = new Socket
try {
http.connect(new InetSocketAddress("127.0.0.1", preferredHttp))
tcp.connect(new InetSocketAddress("127.0.0.1", preferredTcp))
http.isConnected() && tcp.isConnected()
} finally {
Try(http.close())
Try(tcp.close())
}
}
}
}
}
it should "shutdown if preferred TCP port is not available" in {
withEnsimeConfig { implicit config =>
withTestKit { implicit tk =>
import tk._
val preferredTcp = 10004
(config.cacheDir / "port").writeString(preferredTcp.toString)
val tcpHog = new ServerSocket().bind(new InetSocketAddress("127.0.0.1", preferredTcp))
val protocol = new SwankProtocol
system.actorOf(Props(new ServerActor(config, protocol)), "ensime-main")
Await.result(system.whenTerminated, akkaTimeout.duration)
}
}
}
it should "shutdown if preferred HTTP port is not available" in {
withEnsimeConfig { implicit config =>
withTestKit { implicit tk =>
import tk._
val preferredHttp = 10003
(config.cacheDir / "http").writeString(preferredHttp.toString)
val httpHog = new ServerSocket().bind(new InetSocketAddress("127.0.0.1", preferredHttp))
val protocol = new SwankProtocol
system.actorOf(Props(new ServerActor(config, protocol)), "ensime-main")
Await.result(system.whenTerminated, akkaTimeout.duration)
}
}
}
}
| espinhogr/ensime-server | server/src/it/scala/org/ensime/server/ServerStartupSpec.scala | Scala | gpl-3.0 | 3,381 |
import sys.process._
import java.io._
import scala.language.postfixOps
val writer = new PrintWriter(new File("blockHashes.txt"))
val totalBlocks = ("bitcoin-cli getblockcount" !!).trim.toInt
val hashes = (0 to totalBlocks).par.map(hash => s"bitcoin-cli getblockhash $hash" !!)
hashes.foreach(writer.write)
writer.close | Jiri-Kremser/bitcoin-insights | bitcoin-cli-dump/dumpBlockHashes.scala | Scala | apache-2.0 | 319 |
/* Copyright 2009-2018 EPFL, Lausanne */
package inox
package transformers
import utils._
trait SimplifierWithPath extends SimplifierWithPC {
import trees._
import symbols._
class Env private(
private val conditions: Set[Expr],
private val exprSubst: Map[Variable, Expr]
) extends PathLike[Env] with SolvingPath {
override def withBinding(p: (ValDef, Expr)): Env = p match {
case (vd, expr @ (_: ADT | _: Tuple | _: Lambda)) =>
new Env(conditions, exprSubst + (vd.toVariable -> expr))
case (vd, v: Variable) =>
val exp = expand(v)
if (v != exp) new Env(conditions, exprSubst + (vd.toVariable -> exp))
else this
case _ => this
}
override def withBound(vd: ValDef): Env = this // We don't need to track such bounds.
override def withCond(cond: Expr): Env = new Env(conditions + cond, exprSubst)
override def negate: Env = new Env(Set(not(and(conditions.toSeq : _*))), exprSubst)
override def merge(that: Env): Env =
new Env(conditions ++ that.conditions, exprSubst ++ that.exprSubst)
override def expand(expr: Expr): Expr = expr match {
case v: Variable => exprSubst.getOrElse(v, v)
case _ => expr
}
override def implies(expr: Expr): Boolean = conditions contains expr
override def toString: String = conditions.toString
}
implicit object Env extends PathProvider[Env] {
def empty = new Env(Set(), Map())
}
override def initEnv = Env.empty
}
| romac/inox | src/main/scala/inox/transformers/SimplifierWithPath.scala | Scala | apache-2.0 | 1,490 |
package unfiltered.response
import org.specs2.mutable._
object JsonSpec extends Specification with unfiltered.specs2.jetty.Served {
import unfiltered.response._
import unfiltered.request.{Path => UFPath}
import org.json4s.JsonDSL._
class TestPlan extends unfiltered.filter.Plan {
def intent = {
case UFPath("/") =>
Json(("foo" -> "bar") ~ ("baz" -> "boom"))
}
}
def setup = { _.plan(new TestPlan) }
"Json Response should" should {
"produce a json response" in {
val (body, contentType) = http(host <:< Map("Accept" -> "application/json") >+ { r =>
(r as_str, r >:> { _.filterKeys { _ == "Content-Type" } })
})
body must_== """{"foo":"bar","baz":"boom"}"""
contentType must haveValue(Set("application/json; charset=utf-8"))
}
}
}
| beni55/unfiltered | json4s/src/test/scala/JsonSpec.scala | Scala | mit | 814 |
//package dsentric.operators
//
//import dsentric._
//import dsentric.failure._
//import org.scalatest.funspec.AnyFunSpec
//import org.scalatest.matchers.should.Matchers
//
//class ContractValidationSpec extends AnyFunSpec with Matchers {
//
// import Dsentric._
// import PessimisticCodecs._
//
// describe("Contract validation") {
// describe("Contract structure") {
// object Empty extends Contract
//
// it("Should validate an empty Contract") {
// Empty.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// Empty.$ops.validate(DObject("key" := "value")) shouldBe ValidationFailures.empty
// }
// it("Should validate deltas") {
// Empty.$ops.validate(DObject("key" := "value"), DObject("key" := 123)) shouldBe ValidationFailures.empty
// Empty.$ops.validate(DObject("key" := DNull), DObject("key" := 123)) shouldBe ValidationFailures.empty
// }
// }
// }
//
// describe("Expected field validation") {
// describe("Expected field structure") {
// object ExpectedField extends Contract {
// val exp = \\[String]
// }
// it("Should fail if field not found") {
// ExpectedField.$ops.validate(DObject.empty) should contain(ExpectedFailure(ExpectedField.exp))
// }
// it("Should fail if field is of wrong type") {
// ExpectedField.$ops.validate(DObject("exp" := false)) should contain(IncorrectTypeFailure(ExpectedField.exp, false))
// }
// it("Should succeed if field exists and is of correct type") {
// ExpectedField.$ops.validate(DObject("exp" := "test")) shouldBe ValidationFailures.empty
// }
// it("Should fail if delta and field is empty") {
// ExpectedField.$ops.validate(DObject.empty, DObject.empty) should contain(ExpectedFailure(ExpectedField.exp))
// }
// it("Should fail if delta is incorrect type") {
// ExpectedField.$ops.validate(DObject("exp" := false), DObject("exp" := "test")) should contain(IncorrectTypeFailure(ExpectedField.exp, false))
// }
// it("Should succeed if delta is correct type") {
// ExpectedField.$ops.validate(DObject("exp" := "test"), DObject("exp" := "test2")) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is correct type and state is incorrect") {
// ExpectedField.$ops.validate(DObject("exp" := "test"), DObject("exp" := false)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is empty and state is incorrect") {
// ExpectedField.$ops.validate(DObject.empty, DObject("exp" := false)) shouldBe ValidationFailures.empty
// }
// it("Should fail if delta is null") {
// ExpectedField.$ops.validate(DObject("exp" := DNull), DObject("exp" := "test")) should contain(ExpectedFailure(ExpectedField.exp))
// }
// }
// describe("Expected field with validator") {
// object ExpectedValidator extends Contract {
// val expGT = \\[Int](Validators.>(5))
// }
// it("Should succeed if value is valid") {
// ExpectedValidator.$ops.validate(DObject("expGT" := 6)) shouldBe ValidationFailures.empty
// }
// it("Should fail if value is invalid") {
// ExpectedValidator.$ops.validate(DObject("expGT" := 3)) should contain(NumericalFailure(ExpectedValidator, Path("expGT"), 3, 5, "greater than"))
// }
// it("Should succeed if delta is valid and state is invalid") {
// ExpectedValidator.$ops.validate(DObject("expGT" := 6), DObject("expGT" := 3)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is valid and state is valid") {
// ExpectedValidator.$ops.validate(DObject("expGT" := 6), DObject("expGT" := 7)) shouldBe ValidationFailures.empty
// }
// it("Should fail if delta is invalid") {
// ExpectedValidator.$ops.validate(DObject("expGT" := 3), DObject("expGT" := 7)) should contain(NumericalFailure(ExpectedValidator, Path("expGT"), 3, 5, "greater than"))
// }
// }
// }
//
// describe("Maybe field validation") {
// describe("Maybe field structure") {
// object MaybeField extends Contract {
// val myb = \\?[Long]
// }
// it("Should succeed if field not found") {
// MaybeField.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should fail if field is of wrong type") {
// MaybeField.$ops.validate(DObject("myb" := false)) should contain(IncorrectTypeFailure(MaybeField.myb, false))
// }
// it("Should succeed if field exists and is of correct type") {
// MaybeField.$ops.validate(DObject("myb" := 434)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta and field is empty") {
// MaybeField.$ops.validate(DObject.empty, DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should fail if delta is incorrect type") {
// MaybeField.$ops.validate(DObject("myb" := false), DObject("myb" := 1324)) should contain(IncorrectTypeFailure(MaybeField.myb, false))
// }
// it("Should succeed if delta is correct type") {
// MaybeField.$ops.validate(DObject("myb" := 4123), DObject("myb" := 432)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is correct type and state is incorrect") {
// MaybeField.$ops.validate(DObject("myb" := 1234), DObject("myb" := false)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is empty and state is incorrect") {
// MaybeField.$ops.validate(DObject.empty, DObject("myb" := false)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is null") {
// MaybeField.$ops.validate(DObject("myb" := DNull), DObject("myb" := "test")) shouldBe ValidationFailures.empty
// }
// }
// describe("Maybefield with validator") {
// object MaybeValidator extends Contract {
// val mybGT = \\?[Int](Validators.>(5))
// }
// it("Should succeed if value is valid") {
// MaybeValidator.$ops.validate(DObject("mybGT" := 6)) shouldBe ValidationFailures.empty
// }
// it("Should fail if value is invalid") {
// MaybeValidator.$ops.validate(DObject("mybGT" := 3)) should contain(NumericalFailure(MaybeValidator, Path("mybGT"), 3, 5, "greater than"))
// }
// it("Should succeed if delta is valid and state is invalid") {
// MaybeValidator.$ops.validate(DObject("mybGT" := 6), DObject("mybGT" := 3)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is valid and state is valid") {
// MaybeValidator.$ops.validate(DObject("mybGT" := 6), DObject("mybGT" := 7)) shouldBe ValidationFailures.empty
// }
// it("Should fail if delta is invalid") {
// MaybeValidator.$ops.validate(DObject("mybGT" := 3), DObject("mybGT" := 7)) should contain(NumericalFailure(MaybeValidator, Path("mybGT"), 3, 5, "greater than"))
// }
// }
// }
//
// describe("Default field validation") {
// describe("Default field structure") {
// object DefaultField extends Contract {
// val dfl = \\
// }
// it("Should succeed if field not found") {
// DefaultField.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should fail if field is of wrong type") {
// DefaultField.$ops.validate(DObject("dfl" := false)) should contain(IncorrectTypeFailure(DefaultField.dfl, false))
// }
// it("Should succeed if field exists and is of correct type") {
// DefaultField.$ops.validate(DObject("dfl" := 5312)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta and field is empty") {
// DefaultField.$ops.validate(DObject.empty, DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should fail if delta is incorrect type") {
// DefaultField.$ops.validate(DObject("dfl" := false), DObject("dfl" := 1324)) should contain(IncorrectTypeFailure(DefaultField.dfl, false))
// }
// it("Should succeed if delta is correct type") {
// DefaultField.$ops.validate(DObject("dfl" := 123), DObject("dfl" := 5122)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is correct type and state is incorrect") {
// DefaultField.$ops.validate(DObject("dfl" := 5321), DObject("dfl" := false)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is empty and state is incorrect") {
// DefaultField.$ops.validate(DObject.empty, DObject("dfl" := false)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is null") {
// DefaultField.$ops.validate(DObject("dfl" := DNull), DObject("dfl" := "test")) shouldBe ValidationFailures.empty
// }
// }
// describe("Default field with validator") {
// object DefaultValidator extends Contract {
// val dflGT = \\?[Int](Validators.>(5))
// }
// it("Should succeed if value is valid") {
// DefaultValidator.$ops.validate(DObject("dflGT" := 6)) shouldBe ValidationFailures.empty
// }
// it("Should fail if value is invalid") {
// DefaultValidator.$ops.validate(DObject("dflGT" := 3)) should contain(NumericalFailure(DefaultValidator, Path("dflGT"), 3, 5, "greater than"))
// }
// it("Should succeed if delta is valid and state is invalid") {
// DefaultValidator.$ops.validate(DObject("dflGT" := 6), DObject("dflGT" := 3)) shouldBe ValidationFailures.empty
// }
// it("Should succeed if delta is valid and state is valid") {
// DefaultValidator.$ops.validate(DObject("dflGT" := 6), DObject("dflGT" := 7)) shouldBe ValidationFailures.empty
// }
// it("Should fail if delta is invalid") {
// DefaultValidator.$ops.validate(DObject("dflGT" := 3), DObject("dflGT" := 7)) should contain(NumericalFailure(DefaultValidator, Path("dflGT"), 3, 5, "greater than"))
// }
// }
// }
//
// describe("Expected object validation") {
//
// describe("Nested object structure") {
// object ExpectedEmpty extends Contract {
// val nested = new \\\\ {}
// }
// object ExpectedExpected extends Contract {
// val nested = new \\\\ {
// val exp = \\[String]
// }
// }
// object ExpectedMaybe extends Contract {
// val nested = new \\\\ {
// val myb = \\?[String]
// }
// }
// it("Should be valid if object is empty and no expected properties") {
// ExpectedEmpty.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should be valid if object is empty and only maybe properties") {
// ExpectedMaybe.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should fail if object is empty and has expected properties") {
// ExpectedExpected.$ops.validate(DObject.empty) should contain(ExpectedFailure(ExpectedExpected.nested.exp))
// }
// it("Should fail if object is not an object") {
// ExpectedExpected.$ops.validate(DObject("nested" := 123)) should contain(IncorrectTypeFailure(ExpectedExpected.nested, 123))
// }
// describe("with deltas") {
// it("Should succeed if nested is null and object has no expected properties") {
// ExpectedEmpty.$ops.validate(DObject("nested" := DNull), DObject("nested" ::= ("value" := 123))) shouldBe ValidationFailures.empty
// ExpectedMaybe.$ops.validate(DObject("nested" := DNull), DObject("nested" ::= ("myb" := "value"))) shouldBe ValidationFailures.empty
// }
// it("Should fail if nested is null and object has expected properties") {
// ExpectedExpected.$ops.validate(DObject("nested" := DNull), DObject("nested" ::= ("exp" := "value"))) should contain(ExpectedFailure(ExpectedExpected.nested.exp))
// }
// it("Should fail if contents fail") {
// ExpectedExpected.$ops.validate(DObject("nested" ::= ("exp" := DNull)), DObject("nested" ::= ("exp" := "value"))) should contain(ExpectedFailure(ExpectedExpected.nested.exp))
// }
// it("Should succeed if contents succeed") {
// ExpectedExpected.$ops.validate(DObject("nested" ::= ("exp" := "value2")), DObject("nested" ::= ("exp" := "value"))) shouldBe ValidationFailures.empty
// }
// }
// }
// describe("Nested object validation") {
// object ExpectedValid extends Contract {
// val noRemoval = new \\\\(Validators.noKeyRemoval) {
// val myb = \\?[String](Validators.nonEmptyOrWhiteSpace)
// }
// val oneOrTwo = new \\\\(Validators.minLength(1) && Validators.maxLength(2)) {}
// }
// it("Should succeed if object validation succeeds") {
// ExpectedValid.$ops.validate(DObject("noRemoval" ::= ("myb" := "value"), "oneOrTwo" ::= ("value" := false)))
// }
// it("Should fail if empty expected object would fail and no object value provided") {
// ExpectedValid.$ops.validate(DObject.empty) should contain(MinimumLengthFailure(ExpectedValid, ExpectedValid.oneOrTwo._path, 1, 0))
// }
// it("Should fail if object validation fails") {
// ExpectedValid.$ops.validate(DObject("oneOrTwo" ::= ("value" := false, "value2" := 123, "value3" := "v"))) should contain (MaximumLengthFailure(ExpectedValid, ExpectedValid.oneOrTwo._path, 2, 3))
// }
// it("Should fail if nested property fails") {
// ExpectedValid.$ops.validate(DObject("noRemoval" ::= ("myb" := ""), "oneOrTwo" ::= ("value" := false))) should contain (NonEmptyOrWhitespaceFailure(ExpectedValid, ExpectedValid.noRemoval.myb._path))
// }
// describe("with deltas") {
// it("Should fail on Null if empty object would fail") {
// ExpectedValid.$ops.validate(DObject("oneOrTwo" := DNull), DObject("oneOrTwo" ::= ("value" := false))) should contain (MinimumLengthFailure(ExpectedValid, ExpectedValid.oneOrTwo._path, 1, 0))
// ExpectedValid.$ops.validate(DObject("noRemoval" := DNull), DObject("noRemoval" ::= ("myb" := "value"))) should contain (KeyRemovalFailure(ExpectedValid, ExpectedValid.noRemoval._path, "myb"))
// }
// it("Should fail on delta if final state fails") {
// ExpectedValid.$ops.validate(DObject("oneOrTwo" ::= ("value3" := 123)), DObject("oneOrTwo" ::= ("value" := false, "value2" := "b"))) should contain (MaximumLengthFailure(ExpectedValid, ExpectedValid.oneOrTwo._path, 2, 3))
// ExpectedValid.$ops.validate(DObject("noRemoval" ::= ("remove" := DNull)), DObject("noRemoval" ::= ("myb" := "value", "remove" := 3))) should contain (KeyRemovalFailure(ExpectedValid, ExpectedValid.noRemoval._path, "remove"))
// }
// it("Should succeeed if delta makes initial bad state, correct") {
// ExpectedValid.$ops.validate(DObject("oneOrTwo" ::= ("value3" := DNull)), DObject("oneOrTwo" ::= ("value" := false, "value2" := 123, "value3" := "vb"))) shouldBe ValidationFailures.empty
// }
// }
// }
// }
//
// describe("Maybe object validation") {
//
// describe("Nested object structure") {
// object MaybeEmpty extends Contract {
// val nested = new \\\\? {}
// }
// object MaybeExpected extends Contract {
// val nested = new \\\\? {
// val exp = \\[String]
// }
// }
// object MaybeMaybe extends Contract {
// val nested = new \\\\? {
// val myb = \\?[String]
// }
// }
// it("Should be valid if object is empty and no expected properties") {
// MaybeEmpty.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should be valid if object is empty and only maybe properties") {
// MaybeMaybe.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should be valid if object is empty and has expected properties") {
// MaybeExpected.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should fail if object is not an object") {
// MaybeExpected.$ops.validate(DObject("nested" := 123)) should contain(IncorrectTypeFailure(MaybeExpected.nested, 123))
// }
// describe("with deltas") {
// it("Should succeed if nested is null and object has no expected properties") {
// MaybeEmpty.$ops.validate(DObject("nested" := DNull), DObject("nested" ::= ("value" := 123))) shouldBe ValidationFailures.empty
// MaybeMaybe.$ops.validate(DObject("nested" := DNull), DObject("nested" ::= ("myb" := "value"))) shouldBe ValidationFailures.empty
// }
// it("Should succeed if nested is null and object has expected properties") {
// MaybeExpected.$ops.validate(DObject("nested" := DNull), DObject("nested" ::= ("exp" := "value"))) shouldBe ValidationFailures.empty
// }
// it("Should fail if contents fail") {
// MaybeExpected.$ops.validate(DObject("nested" ::= ("exp" := DNull)), DObject("nested" ::= ("exp" := "value"))) should contain(ExpectedFailure(MaybeExpected.nested.exp))
// }
// it("Should succeed if contents succeed") {
// MaybeExpected.$ops.validate(DObject("nested" ::= ("exp" := "value2")), DObject("nested" ::= ("exp" := "value"))) shouldBe ValidationFailures.empty
// }
// }
// }
// describe("Nested object validation") {
// object MaybeValid extends Contract {
// val noRemoval = new \\\\?(Validators.noKeyRemoval) {
// val myb = \\?[String](Validators.nonEmptyOrWhiteSpace)
// }
// val oneOrTwo = new \\\\?(Validators.minLength(1) && Validators.maxLength(2)) {}
//
// val reserved = new \\\\?(Validators.reserved)
// }
// it("Should succeed if object validation succeeds") {
// MaybeValid.$ops.validate(DObject("noRemoval" ::= ("myb" := "value"), "oneOrTwo" ::= ("value" := false)))
// }
// it("Should succeed if empty expected object would fail and no object value provided") {
// MaybeValid.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should fail if object validation fails") {
// MaybeValid.$ops.validate(DObject("oneOrTwo" ::= ("value" := false, "value2" := 123, "value3" := "v"))) should contain (MaximumLengthFailure(MaybeValid, MaybeValid.oneOrTwo._path, 2, 3))
// }
// it("Should fail if nested property fails") {
// MaybeValid.$ops.validate(DObject("noRemoval" ::= ("myb" := ""), "oneOrTwo" ::= ("value" := false))) should contain (NonEmptyOrWhitespaceFailure(MaybeValid, MaybeValid.noRemoval.myb._path))
// }
// it("Should fail if reserved element exists") {
// MaybeValid.$ops.validate(DObject("reserved" ::= ("value" := 1))) should contain (ReservedFailure(MaybeValid, MaybeValid.reserved._path))
// }
// describe("with deltas") {
// it("Should succeed on Null even if empty object would fail") {
// MaybeValid.$ops.validate(DObject("oneOrTwo" := DNull), DObject("oneOrTwo" ::= ("value" := false))) shouldBe ValidationFailures.empty
// MaybeValid.$ops.validate(DObject("noRemoval" := DNull), DObject("noRemoval" ::= ("myb" := "value"))) shouldBe ValidationFailures.empty
// }
// it("Should fail on delta if final state fails") {
// MaybeValid.$ops.validate(DObject("oneOrTwo" ::= ("value3" := 123)), DObject("oneOrTwo" ::= ("value" := false, "value2" := "b"))) should contain (MaximumLengthFailure(MaybeValid, MaybeValid.oneOrTwo._path, 2, 3))
// MaybeValid.$ops.validate(DObject("noRemoval" ::= ("remove" := DNull)), DObject("noRemoval" ::= ("myb" := "value", "remove" := 3))) should contain (KeyRemovalFailure(MaybeValid, MaybeValid.noRemoval._path, "remove"))
// }
// it("Should succeeed if delta makes initial bad state, correct") {
// MaybeValid.$ops.validate(DObject("oneOrTwo" ::= ("value3" := DNull)), DObject("oneOrTwo" ::= ("value" := false, "value2" := 123, "value3" := "vb"))) shouldBe ValidationFailures.empty
// }
// }
// }
// }
//
// describe("Objects validation") {
// describe("Objects structure") {
// object ObjectContract extends Contract{
// val exp = \\[Int]
// }
// object Objects extends Contract {
// val objects = \\::(ObjectContract)
// }
// it("Should succeed if objects is empty") {
// Objects.$ops.validate(DObject.empty) shouldBe ValidationFailures.empty
// }
// it("Should succeed if object is an empty vector") {
// Objects.$ops.validate(DObject("objects" := Vector.empty[DObject])) shouldBe ValidationFailures.empty
// }
// it("Should fail if objects is not a vector of objects") {
// Objects.$ops.validate(DObject("objects" := Vector("one", "two"))) should contain (IncorrectTypeFailure(Objects, Objects.objects._path \\ 0, Objects.objects._valueCodec, "one"))
// }
// it("Should succeed if objects contains valid objects") {
// Objects.$ops.validate(DObject("objects" := Vector(DObject("exp" := 1), DObject("exp" := 2)))) shouldBe ValidationFailures.empty
// }
// it("Should fail if contained objects are invalid") {
// Objects.$ops.validate(DObject("objects" := Vector(DObject("exp" := 1), DObject.empty))) should contain (ExpectedFailure(Objects, Objects.objects._path \\ 1 \\ "exp"))
// Objects.$ops.validate(DObject("objects" := Vector(DObject("exp" := 1, "additional" := "failed")))) should contain (ClosedContractFailure(Objects, Objects.objects._path \\ 0, "additional"))
// }
// describe("with deltas") {
// it("Should succeed if objects is null") {
// Objects.$ops.validate(DObject("objects" := DNull), DObject("objects" := Vector(DObject("exp" := 1), DObject("exp" := 2)))) shouldBe ValidationFailures.empty
// }
// it("Should fail if contents fail") {
// Objects.$ops.validate(
// DObject("objects" := Vector(DObject("exp" := "value"))),
// DObject("objects" := Vector(DObject("exp" := 1), DObject("exp" := 2)))
// ) should contain (IncorrectTypeFailure(Objects, Objects.objects._path \\ 0 \\ "exp", ObjectContract.exp._codec, "value"))
// }
// it("Should succeed if contents succeed") {
// Objects.$ops.validate(
// DObject("objects" := Vector(DObject("exp" := 3))),
// DObject("objects" := Vector(DObject("exp" := 1), DObject("exp" := 2)))
// ) shouldBe ValidationFailures.empty
// }
// }
// }
// describe("Objects validation") {
// object ObjectContract extends Contract {
// val exp = \\[Int](Validators.>(3))
// val maybe = \\?[String](Validators.nonEmptyOrWhiteSpace)
// }
// object LengthObjects extends Contract {
// val objects = \\::(ObjectContract, Validators.nonEmpty)
// }
// object ReservedObjects extends Contract {
// val objects = \\::(ObjectContract, Validators.reserved)
// }
// it("Should return validation failure if objects validation is invalid") {
// LengthObjects.$ops.validate(DObject.empty) should contain (MinimumLengthFailure(LengthObjects, LengthObjects.objects._path, 1, 0))
// LengthObjects.$ops.validate(DObject("objects" := Vector.empty[DObject])) should contain (MinimumLengthFailure(LengthObjects, LengthObjects.objects._path, 1, 0))
// }
// it("Should return reserved failure") {
// ReservedObjects.$ops.validate(DObject("objects" := Vector(DObject("exp" := 5)))) should contain(ReservedFailure(ReservedObjects, ReservedObjects.objects._path))
// }
// it("Should return validation failures for objects") {
// val base = DObject("objects" := Vector(DObject("exp" := 3, "maybe" := "\\t")))
// val failures = LengthObjects.$ops.validate(base)
// failures should contain (NumericalFailure(LengthObjects, LengthObjects.objects._path \\ 0 \\ "exp", 3, 3, "greater than"))
// failures should contain (NonEmptyOrWhitespaceFailure(LengthObjects, LengthObjects.objects._path \\ 0 \\ "maybe"))
// }
//
// describe("With deltas") {
// it("Should fail if contents fail") {
// LengthObjects.$ops.validate(
// DObject("objects" := Vector(DObject("exp" := 2))),
// DObject("objects" := Vector(DObject("exp" := 5), DObject("exp" := 7, "maybe" := "value")))
// ) should contain (NumericalFailure(LengthObjects, LengthObjects.objects._path \\ 0 \\ "exp", 2, 3, "greater than"))
// }
// it("Should succeed if content succeeds") {
// LengthObjects.$ops.validate(
// DObject("objects" := Vector(DObject("exp" := 5), DObject("exp" := 7, "maybe" := "value"))),
// DObject("objects" := Vector(DObject("exp" := 2)))
// ) shouldBe ValidationFailures.empty
// }
// }
// }
// }
//
// describe("Map objects validation") {
// object ObjectContract extends Contract {
// val exp = \\[Int](Validators.>(3))
// val maybe = \\?[String](Validators.nonEmptyOrWhiteSpace)
// }
// object LengthMapObjects extends Contract {
//
// val objects = \\->(ObjectContract, Validators.nonEmpty)
// }
// object ReservedMapObjects extends Contract {
// val objects = \\->(ObjectContract, Validators.reserved)
// }
// it("Should return validation failure if objects validation is invalid") {
// LengthMapObjects.$ops.validate(DObject.empty) should contain (MinimumLengthFailure(LengthMapObjects, LengthMapObjects.objects._path, 1, 0))
// LengthMapObjects.$ops.validate(DObject("objects" := Map.empty[String, DObject])) should contain (MinimumLengthFailure(LengthMapObjects, LengthMapObjects.objects._path, 1, 0))
// }
// it("Should return reserved failure") {
// ReservedMapObjects.$ops.validate(DObject("objects" := Map("one" -> DObject("exp" := 5)))) should contain(ReservedFailure(ReservedMapObjects, ReservedMapObjects.objects._path))
// }
// it("Should return validation failures for objects") {
// val base = DObject("objects" := Map("one" -> DObject("exp" := 3, "maybe" := "\\t")))
// val failures = LengthMapObjects.$ops.validate(base)
// failures should contain (NumericalFailure(LengthMapObjects, LengthMapObjects.objects._path \\ "one" \\ "exp", 3, 3, "greater than"))
// failures should contain (NonEmptyOrWhitespaceFailure(LengthMapObjects, LengthMapObjects.objects._path \\ "one" \\ "maybe"))
// }
//
// describe("With deltas") {
// it("Should fail if contents fail") {
// LengthMapObjects.$ops.validate(
// DObject("objects" := Map("one" -> DObject("exp" := 2))),
// DObject("objects" := Map("one" -> DObject("exp" := 5), "two" -> DObject("exp" := 7, "maybe" := "value")))
// ) should contain (NumericalFailure(LengthMapObjects, LengthMapObjects.objects._path \\ "one" \\ "exp", 2, 3, "greater than"))
// }
// it("Should succeed if content succeeds") {
// LengthMapObjects.$ops.validate(
// DObject("objects" := Map("one" -> DObject("exp" := 5), "two" -> DObject("exp" := 7, "maybe" := "value"))),
// DObject("objects" := Map("one" -> DObject("exp" := 2)))
// ) shouldBe ValidationFailures.empty
// }
// }
//
// }
//
//}
| HigherState/dsentric | maps/src/test/scala/dsentric/operators/ContractValidationSpec.scala | Scala | apache-2.0 | 27,320 |
package tutorial
import minCostFlow.Graph._
import collection.mutable.MutableList
class PersistentReport(override val graph: PersistentGraph, flow: Flow)
extends Report(graph, flow) {
import graph._
class TutorBuffer {
val remains = Array(groupSizeOfTutor: _*)
val assigned = Array.fill(tutors.size)(MutableList.empty[Int])
val isAssigned = collection.mutable.Set.empty[Int] // collection of assigned students
// should be called before calling "add to slot"
// to ensure all preassigned gets a place
def addPreassigned(student: Int, slot: Int, tutor: Int): Unit = {
// sanity check: optimizer-produced slot agrees with preassigned slot
assert(slotOfStudent(student) == Some(slot))
assert(slotOfTutor(tutor) == Some(slot))
assign(student, tutor)
}
// has no effect on pre-assigned students
def addToSlot(student: Int, slot: Int): Unit =
if (! isAssigned(student)) {
// there should be some vacancy at the time slot
val Some(tutor) = tutorsOfSlot(slot).find(i => remains(i) > 0)
assign(student, tutor) // also updates isAssigned
}
def result: IndexedSeq[Seq[Int]] = assigned
// has no effect on already assigned students
private[this]
def assign(student: Int, tutor: Int): Unit =
if (! isAssigned(student)) {
// check that vacancy exists
assert(remains(tutor) > 0)
remains(tutor) -= 1
assigned(tutor) += student
isAssigned += student
}
}
// make sure pre-assigned students stay with their tutors
// @return mapping tutor-id to sequences of the tutor's students
override def computeStudentsOfTutor: IndexedSeq[Seq[Int]] = {
val buffer = new TutorBuffer
for ( (student, (slot, tutor)) <- preassigned )
buffer.addPreassigned(student, slot, tutor)
for ( (maybeSlot, student) <- slotOfStudent.zipWithIndex ; slot <- maybeSlot )
// has no effect on preassigned students
buffer.addToSlot(student, slot)
buffer.result
}
// return sequence of students newly assigned in this assignment
def assignedStudents: Seq[data.Student] =
for {
(studentData, student) <- userData.validStudents.zipWithIndex
if ! studentData.isAssigned(tutorData) // student wasn't assigned before
group <- formatAssignedGroup(student) // student is assigned now
}
yield studentData.copy(assignedGroup = Some(group))
// generate values of "assigned_group" such as "tuesday_08-alex"
def formatAssignedGroup(student: Int): Option[String] =
for {
slot <- slotOfStudent(student)
tutor = tutorOfStudent(student)
}
yield tutorData.formatSlotTutor(slot, tutor)
// generate human-readable tutorial appointment such as:
// Zeit: Dienstag, 08.00 Uhr
// Ort: Raum VB N3, Morgenstelle
// Tutor: ivan_the_terrible
def formatAssignedGroupForHuman(student: Int): Option[String] =
for {
slot <- slotOfStudent(student)
tutor = tutorOfStudent(student)
// stupidly assign the ith room to the ith tutor
// this may bite us if some tutor weren't scheduled in the first run
room = roomData.roomNames(slot)(tutorsOfSlot(slot).indexOf(tutor))
slotName = roomData.slotNames(slot)
}
yield s"""|Zeit: ${util.WeekdayTranslator.germanTimeslot(slotName)}
|Ort: ${config.roomPrefix}$room${config.roomSuffix}
|Tutor: ${tutorData.usernames(tutor)}""".stripMargin
def formatAssignedGroupForTutorOf(student: Int): Option[String] =
for {
slot <- slotOfStudent(student)
tutor = tutorOfStudent(student)
// stupidly assign the ith room to the ith tutor
// this may bite us if some tutor weren't scheduled in the first run
room = roomData.roomNames(slot)(tutorsOfSlot(slot).indexOf(tutor))
slotName = roomData.slotNames(slot)
// get all students of tutor and print their usernames
// (username is required, name is not)
allStudents = studentsOfTutor(tutor).map {
case i =>
userData.validStudents(i).username
}
}
yield s"""|Zeit: ${util.WeekdayTranslator.germanTimeslot(slotName)}
|Ort: ${config.roomPrefix}$room${config.roomSuffix}
|Studierende: ${allStudents.toList.sorted.mkString(", ")}""".stripMargin
def getSlotRoom(student: Int): Option[(String, String)] =
for {
slot <- slotOfStudent(student)
tutor = tutorOfStudent(student)
// stupidly assign the ith room to the ith tutor
// this may bite us if some tutor weren't scheduled in the first run
room = roomData.roomNames(slot)(tutorsOfSlot(slot).indexOf(tutor))
slotName = roomData.slotNames(slot)
}
yield (
util.WeekdayTranslator.germanTimeslot(slotName),
s"${config.roomPrefix}$room${config.roomSuffix}" )
}
| yfcai/tutorial-assignment | main/tutorial.PersistentReport.scala | Scala | unlicense | 4,875 |
package test
class C[T] {
def foo(x: D) = { System.out.println("D foo"); }
def foo(x: T) = { System.out.println("T foo"); }
}
object C {
def main(args: Array[String]) =
new C[D]().foo(new D()) // error: ambiguous
}
class C1[T] {
def foo(x: D) = { System.out.println("D foo"); }
}
class C2[T] {
def foo(x: D) = { System.out.println("D foo"); }
}
class D {}
class X {
def foo(x: D): D
def foo(x: D): D // error: already defined
}
| som-snytt/dotty | tests/neg-custom-args/allow-double-bindings/i1240.scala | Scala | apache-2.0 | 464 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.table
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
/**
* Tests for column functions.
*/
class ColumnFunctionsTest extends TableTestBase {
val util = batchTestUtil()
@Test
def testOrderBy(): Unit = {
val t = util.addTableSource[(Int, Long, String, Int, Long, String)](
'a, 'b, 'c, 'd, 'e, 'f)
val tab1 = t.orderBy(withColumns(1, 2 to 3))
val tab2 = t.orderBy("withColumns(1, 2 to 3)")
verifyTableEquals(tab1, tab2)
util.verifyPlan(tab1)
}
}
@SerialVersionUID(1L)
object TestFunc extends ScalarFunction {
def eval(a: Double, b: Long): Double = {
a
}
}
| jinglining/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/ColumnFunctionsTest.scala | Scala | apache-2.0 | 1,622 |
package ml.wolfe.term
import ml.wolfe._
import ml.wolfe.term.TermImplicits._
import scala.util.Random
/**
* @author riedel
*/
class VectorSpecs extends WolfeSpec {
implicit val random = new Random(0)
"An vector variable term" should {
"evaluate to a vector" in {
val x = Vectors(2).Var
val result = x.eval(x := vector(1.0, 2.0))
result should equal(vector(1.0, 2.0))
}
"provide its constant gradient" in {
val x = Vectors(2).Var
val result = x.diff(x)(x := vector(2.0, 1.0))
result should equal(vector(1.0, 1.0))
}
"access its elements" in {
val x = Vectors(2).Var
val i = Ints.Var
val term = x(i)
term.eval(x := vector(1.0, 2.0), i := 1) should be (2.0)
}
"support addition" in {
val x = Vectors(2).Var
val t = x + x
t.eval(x := vector(1,2)) should equal (vector(2,4))
}
}
"A vector apply term" should {
"provide its constant gradient" ignore {
val x = Vectors(1).Var
val t = x(0)
val result = t.diff(x)(x := vector(3.0))
result(0) should be (1.0)
}
}
}
| wolfe-pack/wolfe | wolfe-core/src/test/scala/ml/wolfe/term/VectorSpecs.scala | Scala | apache-2.0 | 1,118 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalog
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset}
import org.apache.spark.sql.types.StructType
/**
* Catalog interface for Spark. To access this, use `SparkSession.catalog`.
*
* @since 2.0.0
*/
@InterfaceStability.Stable
abstract class Catalog {
/**
* Returns the current default database in this session.
*
* @since 2.0.0
*/
def currentDatabase: String
/**
* Sets the current default database in this session.
*
* @since 2.0.0
*/
def setCurrentDatabase(dbName: String): Unit
/**
* Returns a list of databases available across all sessions.
*
* @since 2.0.0
*/
def listDatabases(): Dataset[Database]
/**
* Returns a list of tables in the current database.
* This includes all temporary tables.
*
* @since 2.0.0
*/
def listTables(): Dataset[Table]
/**
* Returns a list of tables in the specified database.
* This includes all temporary tables.
*
* @since 2.0.0
*/
@throws[AnalysisException]("database does not exist")
def listTables(dbName: String): Dataset[Table]
/**
* Returns a list of functions registered in the current database.
* This includes all temporary functions
*
* @since 2.0.0
*/
def listFunctions(): Dataset[Function]
/**
* Returns a list of functions registered in the specified database.
* This includes all temporary functions
*
* @since 2.0.0
*/
@throws[AnalysisException]("database does not exist")
def listFunctions(dbName: String): Dataset[Function]
/**
* Returns a list of columns for the given table in the current database or
* the given temporary table.
*
* @since 2.0.0
*/
@throws[AnalysisException]("table does not exist")
def listColumns(tableName: String): Dataset[Column]
/**
* Returns a list of columns for the given table in the specified database.
*
* @since 2.0.0
*/
@throws[AnalysisException]("database or table does not exist")
def listColumns(dbName: String, tableName: String): Dataset[Column]
/**
* Get the database with the specified name. This throws an AnalysisException when the database
* cannot be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("database does not exist")
def getDatabase(dbName: String): Database
/**
* Get the table or view with the specified name. This table can be a temporary view or a
* table/view in the current database. This throws an AnalysisException when no Table
* can be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("table does not exist")
def getTable(tableName: String): Table
/**
* Get the table or view with the specified name in the specified database. This throws an
* AnalysisException when no Table can be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("database or table does not exist")
def getTable(dbName: String, tableName: String): Table
/**
* Get the function with the specified name. This function can be a temporary function or a
* function in the current database. This throws an AnalysisException when the function cannot
* be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("function does not exist")
def getFunction(functionName: String): Function
/**
* Get the function with the specified name. This throws an AnalysisException when the function
* cannot be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("database or function does not exist")
def getFunction(dbName: String, functionName: String): Function
/**
* Check if the database with the specified name exists.
*
* @since 2.1.0
*/
def databaseExists(dbName: String): Boolean
/**
* Check if the table or view with the specified name exists. This can either be a temporary
* view or a table/view in the current database.
*
* @since 2.1.0
*/
def tableExists(tableName: String): Boolean
/**
* Check if the table or view with the specified name exists in the specified database.
*
* @since 2.1.0
*/
def tableExists(dbName: String, tableName: String): Boolean
/**
* Check if the function with the specified name exists. This can either be a temporary function
* or a function in the current database.
*
* @since 2.1.0
*/
def functionExists(functionName: String): Boolean
/**
* Check if the function with the specified name exists in the specified database.
*
* @since 2.1.0
*/
def functionExists(dbName: String, functionName: String): Boolean
/**
* :: Experimental ::
* Creates an external table from the given path and returns the corresponding DataFrame.
* It will use the default data source configured by spark.sql.sources.default.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(tableName: String, path: String): DataFrame
/**
* :: Experimental ::
* Creates an external table from the given path based on a data source
* and returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(tableName: String, path: String, source: String): DataFrame
/**
* :: Experimental ::
* Creates an external table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
options: java.util.Map[String, String]): DataFrame
/**
* :: Experimental ::
* (Scala-specific)
* Creates an external table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
options: Map[String, String]): DataFrame
/**
* :: Experimental ::
* Create an external table from the given path based on a data source, a schema and
* a set of options. Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
schema: StructType,
options: java.util.Map[String, String]): DataFrame
/**
* :: Experimental ::
* (Scala-specific)
* Create an external table from the given path based on a data source, a schema and
* a set of options. Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
schema: StructType,
options: Map[String, String]): DataFrame
/**
* Drops the local temporary view with the given view name in the catalog.
* If the view has been cached before, then it will also be uncached.
*
* Local temporary view is session-scoped. Its lifetime is the lifetime of the session that
* created it, i.e. it will be automatically dropped when the session terminates. It's not
* tied to any databases, i.e. we can't use `db1.view1` to reference a local temporary view.
*
* Note that, the return type of this method was Unit in Spark 2.0, but changed to Boolean
* in Spark 2.1.
*
* @param viewName the name of the view to be dropped.
* @return true if the view is dropped successfully, false otherwise.
* @since 2.0.0
*/
def dropTempView(viewName: String): Boolean
/**
* Drops the global temporary view with the given view name in the catalog.
* If the view has been cached before, then it will also be uncached.
*
* Global temporary view is cross-session. Its lifetime is the lifetime of the Spark application,
* i.e. it will be automatically dropped when the application terminates. It's tied to a system
* preserved database `_global_temp`, and we must use the qualified name to refer a global temp
* view, e.g. `SELECT * FROM _global_temp.view1`.
*
* @param viewName the name of the view to be dropped.
* @return true if the view is dropped successfully, false otherwise.
* @since 2.1.0
*/
def dropGlobalTempView(viewName: String): Boolean
/**
* Returns true if the table is currently cached in-memory.
*
* @since 2.0.0
*/
def isCached(tableName: String): Boolean
/**
* Caches the specified table in-memory.
*
* @since 2.0.0
*/
def cacheTable(tableName: String): Unit
/**
* Removes the specified table from the in-memory cache.
*
* @since 2.0.0
*/
def uncacheTable(tableName: String): Unit
/**
* Removes all cached tables from the in-memory cache.
*
* @since 2.0.0
*/
def clearCache(): Unit
/**
* Invalidate and refresh all the cached metadata of the given table. For performance reasons,
* Spark SQL or the external data source library it uses might cache certain metadata about a
* table, such as the location of blocks. When those change outside of Spark SQL, users should
* call this function to invalidate the cache.
*
* If this table is cached as an InMemoryRelation, drop the original cached version and make the
* new version cached lazily.
*
* @since 2.0.0
*/
def refreshTable(tableName: String): Unit
/**
* Invalidate and refresh all the cached data (and the associated metadata) for any dataframe that
* contains the given data source path. Path matching is by prefix, i.e. "/" would invalidate
* everything that is cached.
*
* @since 2.0.0
*/
def refreshByPath(path: String): Unit
}
| ZxlAaron/mypros | sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala | Scala | apache-2.0 | 10,596 |
// Map, Filter, Reduce
object MapFilterReduce extends App{
// 1. Generating sequence
val a = (0L to 9999999L).toArray
// Stopwatch start after generating sequence
var start = System.currentTimeMillis
var result =
// 2. Mapping the sequence into another (Defered execution)
a.map(_*2)
// 3. Filtering the sequence (Defered execution)
.filter(_%3==0)
// 4. Reducing the sequence
.fold(0L)(_+_)
var end = System.currentTimeMillis
// As a result
println(result)
println((end-start).toString() + " [msec]")
} | kuroyakov/minimum-examples | Scala/mapFilterReduce.scala | Scala | mit | 589 |
def compose[A, B, C]: (A => B) => (C => A) => C => B =
f => g => c => f(g(c))
def contramap[A, B, R](f: B => A)(g: Op[R, A]): Op[R, B] =
flip(compose[A, R, B])(f)(g)
// or just: (f andThen g) | hmemcpy/milewski-ctfp-pdf | src/content/1.8/code/scala/snippet28.scala | Scala | gpl-3.0 | 198 |
package models.daos
import models._
import org.joda.time.DateTime
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import scalikejdbc._
import scalikejdbc.async._
import scala.concurrent.Future
trait ProductDAO extends KiwiERPDAO[Product] {
override val tableName = "products"
override val columnNames = Seq(
"id",
"name",
"description",
"created_at",
"updated_at",
"deleted_at"
)
def apply(pr: ResultName[Product])(rs: WRS): Product = Product(
rs.long(pr.id),
rs.string(pr.name),
rs.stringOpt(pr.description),
rs.jodaDateTime(pr.createdAt),
rs.jodaDateTime(pr.updatedAt),
rs.jodaDateTimeOpt(pr.deletedAt)
)
lazy val s = syntax("pr")
val pr = s
private val (pa, i, io) = (Parts.pa, Inventory.i, InventoryOrder.io)
def create(name: String, description: Option[String])
(implicit s: ADS = AsyncDB.sharedSession): Future[Product] = {
val createdAt = DateTime.now
val updatedAt = createdAt
updateFutureAndReturnGeneratedKey {
insertInto(Product)
.namedValues(
column.name -> name,
column.description -> description,
column.createdAt -> createdAt,
column.updatedAt -> updatedAt
)
.returningId
} map { id =>
Product(id, name, description, createdAt, updatedAt)
}
}
def findWithPartsAndInventoriesAndInventoryOrders
(id: Long)(implicit s: ADS = AsyncDB.sharedSession): Future[Product] = withSQL {
selectFrom[Product](Product as pr)
.leftJoin(Parts as pa).on(
sqls
.eq(pa.productId, pr.id)
.and.isNull(pa.deletedAt)
)
.leftJoin(Inventory as i).on(
sqls
.eq(i.partsId, pa.id)
.and.isNull(i.deletedAt)
)
.leftJoin(InventoryOrder as io).on(
sqls
.eq(io.partsId, pa.id)
.and.isNull(io.deletedAt)
)
.where.eq(pr.id, id)
.and.append(isNotDeleted)
}.one(apply(pr)).toManies(Parts.opt(pa), Inventory.opt(i), InventoryOrder.opt(io)).map { (product, partsSeq, inventories, inventoryOrders) =>
product.copy(
partsSeq = partsSeq map { parts =>
parts.copy(
inventories = inventories filter (_.partsId == parts.id),
inventoryOrders = inventoryOrders filter (_.partsId == parts.id)
)
}
)
}.single().future map getOrNotFound
def findWithPartsSeq(id: Long)(implicit s: ADS = AsyncDB.sharedSession): Future[Product] =
withSQL {
selectFrom[Product](Product as pr)
.leftJoin(Parts as pa).on(
sqls
.eq(pa.productId, pr.id)
.and.isNull(pa.deletedAt)
)
.where.eq(pr.id, id)
.and.append(isNotDeleted)
}.one(apply(pr)).toMany(Parts.opt(pa)).map { (product, partsSeq) =>
product.copy(partsSeq = partsSeq)
}.single().future map getOrNotFound
def save(id: Long)
(name: String, description: Option[String])
(implicit s: ADS = AsyncDB.sharedSession): Future[Int] = updateFutureOrNotFound {
update(Product)
.set(
column.name -> name,
column.description -> description,
column.updatedAt -> DateTime.now
)
.where.eq(column.id, id)
.and.isNull(column.deletedAt)
}
}
| KIWIKIGMBH/kiwierp | kiwierp-backend/app/models/daos/ProductDAO.scala | Scala | mpl-2.0 | 3,301 |
package unfiltered.kit
import unfiltered.request.BasicAuth
import unfiltered.response._
/** Self-contained basic auth */
object Auth {
def defaultFail(realm: String) = Unauthorized ~> WWWAuthenticate("""Basic realm="%s"""" format realm)
def basic[A,B](is: (String, String) => Boolean, realm: String = "secret")(
intent: unfiltered.Cycle.Intent[A,B], onFail: ResponseFunction[B] = defaultFail(realm)) = {
intent.fold(
{ _ => Pass },
{
case (BasicAuth(u, p), rf) => if(is(u,p)) rf else onFail
case _ => onFail
}
)
}
}
| beni55/unfiltered | library/src/main/scala/kit/auth.scala | Scala | mit | 570 |
package com.softwaremill.codebrag.service.updater
import akka.actor.{Props, ActorSystem}
import com.softwaremill.codebrag.service.commits.CommitImportService
import com.typesafe.scalalogging.slf4j.Logging
import com.softwaremill.codebrag.repository.Repository
object RepositoryUpdateScheduler extends Logging {
def scheduleUpdates(actorSystem: ActorSystem, repositories: Seq[Repository], commitImportService: CommitImportService) {
repositories.foreach(scheduleSingleRepoUpdate(actorSystem, _, commitImportService))
}
private def scheduleSingleRepoUpdate(actorSystem: ActorSystem, repository: Repository, commitImportService: CommitImportService) {
val props = Props(new RepoUpdateActor(commitImportService, repository))
val actorName = s"${repository.repoName}-repo-update-actor"
val actor = actorSystem.actorOf(props, actorName)
import actorSystem.dispatcher
actorSystem.scheduler.scheduleOnce(RepoUpdateActor.InitialDelay, actor, RepoUpdateActor.Update(scheduleNext = true))
}
}
| frodejohansen/codebrag | codebrag-service/src/main/scala/com/softwaremill/codebrag/service/updater/RepositoryUpdateScheduler.scala | Scala | agpl-3.0 | 1,019 |
import sbt._
import Keys._
object Build extends Build {
// Can't upgrade to 2.11.x yet as kafka still depends on 2.10.x
val ScalaVersion = "2.10.4"
lazy val root = Project("generator", file(".")) settings(
version := "0.5",
scalaVersion := ScalaVersion,
organization := "com.cloudwick",
scalacOptions ++= Seq("-unchecked", "-deprecation"),
libraryDependencies ++= Dependencies.compile,
libraryDependencies ++= Dependencies.testDependencies,
resolvers ++= Dependencies.resolvers
)
object Dependencies {
val compile = Seq(
"ch.qos.logback" % "logback-classic" % "1.1.2",
"com.github.scopt" %% "scopt" % "3.3.0",
"org.apache.avro" % "avro" % "1.7.6",
"commons-lang" % "commons-lang" % "2.6",
"org.apache.commons" % "commons-math" % "2.2",
"org.apache.commons" % "commons-io" % "1.3.2",
"org.apache.kafka" %% "kafka" % "0.8.0"
exclude("javax.jms", "jms")
exclude("com.sun.jdmk", "jmxtools")
exclude("com.sun.jmx", "jmxri")
excludeAll ExclusionRule(organization = "org.slf4j"),
"org.apache.zookeeper" % "zookeeper" % "3.4.6"
exclude("javax.jms", "jms")
exclude("com.sun.jdmk", "jmxtools")
exclude("com.sun.jmx", "jmxri")
excludeAll ExclusionRule(organization = "org.slf4j")
)
val testDependencies = Seq(
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalamock" %% "scalamock-scalatest-support" % "3.2" % "test"
)
val resolvers = Seq(
"amateras-repo" at "http://amateras.sourceforge.jp/mvn/"
)
}
}
| davinashreddy/generator | project/Build.scala | Scala | apache-2.0 | 1,617 |
package com.wavesplatform.api.common
import com.wavesplatform.account.Address
import com.wavesplatform.api.common.LeaseInfo.Status
import com.wavesplatform.common.state.ByteStr
object LeaseInfo {
type Status = Status.Value
//noinspection TypeAnnotation
object Status extends Enumeration {
val Active = Value(1)
val Canceled = Value(0)
val Expired = Value(2)
}
}
case class LeaseInfo(
id: ByteStr,
originTransactionId: ByteStr,
sender: Address,
recipient: Address,
amount: Long,
height: Int,
status: Status,
cancelHeight: Option[Int] = None,
cancelTransactionId: Option[ByteStr] = None
)
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/api/common/LeaseInfo.scala | Scala | mit | 649 |
/* *********************************************************************
* This file is part of the MELIA theorem prover
*
* Copyright (c) NICTA/Peter Baumgartner <Peter.Baumgartner@nicta.com.au>
*
* MELIA is free software: you can redistribute it
* and/or modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* MELIA is distributed in the hope that it will be
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MELIA. If not, see <http://www.gnu.org/licenses/>.
* ********************************************************************* */
object Formula {
import Term._
import Predefined._
import Eqn._
import Literal._
import Clauses._
import Misc._
import Expression._
import Subst._
import Signature._
import Type._
/**
* Representation of first-order formulas (not: clauses)
*/
// Thrown when a set of Formulas is detected as inconsistent
class Inconsistent extends Exception
// Rewrites rules for CNF conversion (as partial functions).
// Notice that a FormulaRewriteRules may embody several rewrite rules
// (several cases), hence the plural
type FormulaRewriteRules = PartialFunction[Formula,Formula]
val elimIffNot : FormulaRewriteRules = {
case IffNot(f1, f2) => Neg(Iff(f1, f2))
}
val elimIff : FormulaRewriteRules = {
case Iff(f1, f2) => And(Implies(f1, f2), Implies(f2, f1))
}
val pulloutQuants : FormulaRewriteRules = {
case Neg(QuantForm(q, xs, f)) => QuantForm(q.dual, xs, Neg(f))
case And(g, QuantForm(q, xs, f)) => {
val (rho,xsrho) = xs.mkRenaming()
QuantForm(q, xsrho, And(g,rho(f)))
}
case And(QuantForm(q, xs, f), g) => {
val (rho,xsrho) = xs.mkRenaming()
QuantForm(q, xsrho, And(rho(f),g))
}
case Or(g, QuantForm(q, xs, f)) => {
val (rho,xsrho) = xs.mkRenaming()
QuantForm(q, xsrho, Or(g, rho(f)))
}
case Or(QuantForm(q, xs, f), g) => {
val (rho,xsrho) = xs.mkRenaming()
QuantForm(q, xsrho, Or(rho(f),g))
}
// Not needed - assume eliminated earlier
/*
case Implies(QuantForm(q, xs, f), g) => {
val rho = QuantForm(q, xs, f).mkRenaming(xs)
QuantForm(q.dual, rho(xs).asInstanceOf[List[Var]], Implies(rho(f),g))
}
case Implies(g, QuantForm(q, xs, f)) => {
val rho = QuantForm(q, xs, f).mkRenaming(xs)
QuantForm(q, rho(xs).asInstanceOf[List[Var]], Implies(g,rho(f)))
}
case Implied(QuantForm(q, xs, f), g) => {
val rho = QuantForm(q, xs, f).mkRenaming(xs)
QuantForm(q, rho(xs).asInstanceOf[List[Var]], Implied(rho(f),g))
}
case Implied(g, QuantForm(q, xs, f)) => {
val rho = QuantForm(q, xs, f).mkRenaming(xs)
QuantForm(q.dual, rho(xs).asInstanceOf[List[Var]], Implied(g,rho(f)))
}
*/
}
val elimImpl : FormulaRewriteRules = {
case Implies(f1, f2) => Or(Neg(f1), f2)
case Implied(f2, f1) => Or(Neg(f1), f2)
}
val elimNegNeg : FormulaRewriteRules = {
case Neg(Neg(f1)) => f1
}
// Pushes all negation symbols in front of atoms.
// Assume that => and <= have been eliminated before
val pushdownNeg : FormulaRewriteRules = {
case Neg(Neg(f1)) => f1
case Neg(Or(f1, f2)) => And(Neg(f1), Neg(f2))
case Neg(And(f1, f2)) => Or(Neg(f1), Neg(f2))
// Implicit negation symbol in IffNot !
case Neg(Iff(f1, f2)) =>
(f1, f2) match {
// try to get ORs:
case (And(_,_), _) => Iff(Neg(f1), f2)
case (_, And(_,_)) => Iff(f1, Neg(f2))
// try to not disrupt the structure:
case (Neg(_), _) => Iff(Neg(f1), f2)
case (_, Neg(_)) => Iff(f1, Neg(f2))
// Default
case (_, _) => Iff(f1, Neg(f2))
}
case Neg(QuantForm(q, xs, f)) => QuantForm(q.dual, xs, Neg(f))
}
val pushdownOr : FormulaRewriteRules = {
case Or(And(f1, f2), f3) => And(Or(f1, f3), Or(f2, f3))
case Or(f1, And(f2, f3)) => And(Or(f1, f2), Or(f1, f3))
}
val elimTrivial : FormulaRewriteRules = {
case And(f, TrueAtom) => f
case And(TrueAtom, f) => f
case And(f, FalseAtom) => FalseAtom
case And(FalseAtom, f) => FalseAtom
case Or(f, TrueAtom) => TrueAtom
case Or(TrueAtom, f) => TrueAtom
case Or(f, FalseAtom) => f
case Or(FalseAtom, f) => f
case Neg(FalseAtom) => TrueAtom
case Neg(TrueAtom) => FalseAtom
}
// Exploit Associativity
val flattenAndOr : FormulaRewriteRules = {
case And(And(f,g),h) => And(f,And(g,h))
case Or(Or(f,g),h) => Or(f,Or(g,h))
}
/*
// todo: implement this
// Exploit Commutativity to sort, useful as preparation to exploit idempotency
// but not yet implemented yet either.
val sortAndOr : FormulaRewriteRules = {
// Assume in right-associative order
}
*/
val normalizeQuantifiers : FormulaRewriteRules = {
case Exists(xs1, Exists(xs2, f)) => Exists(xs1 ::: xs2, f)
case Forall(xs1, Forall(xs2, f)) => Forall(xs1 ::: xs2, f)
}
abstract class Formula extends Expression[Formula] {
// Take the arguments as a substitution rv -> t and apply it to this
def applyCSubst(rvt: (SymBGConst, FunTerm)): Formula
// Apply a collection of of such substitutions to this
def applyCSubst(rvts: Iterable[(SymBGConst, FunTerm)]): Formula = {
var res = this
for (rvt <- rvts) res = res.applyCSubst(rvt)
res
}
// Assume that only a universal quantifier is present, if at all
def matrix =
this match {
case Forall(xs, f) => (f, xs)
case f => (f, List())
}
def skolemize:Formula = {
val normalized = reduceInnermost(normalizeQuantifiers)
normalized match {
case Forall(xs1, Exists(xs2, f)) => {
var sigma = Subst() // The skolemization substitution, for all vars in xs2
// The extended signature
// var resSig = sig
for (x2 <- xs2) {
val skoFun = "skf_" + skfCtr.next()
val skoTermRank = Rank(xs1.typesOf -> x2.typeOf)
// See if the skolem term goes into BG or into FG
val skoTerm =
if ((Sigma.BGTypes contains skoTermRank.resType) &&
skoTermRank.argsTypes.isEmpty &&
Flags.paramsOpSet.value == "BG") {
Sigma = Sigma addBG (skoFun -> skoTermRank)
FreeBGConst(skoFun)
} else {
Sigma += (skoFun -> skoTermRank)
FunTerm(skoFun, xs1.varsOf)
}
sigma += (x2.varOf -> skoTerm)
}
Forall(xs1,sigma(f)).skolemize
}
// Case of one single row of exists-quantifier, without forall in front.
// todo: factor out common code
case Exists(xs2, f) => {
var sigma = Subst() // The skolemization substitution, for all vars in xs2
// The extended signature
// var resSig = sig
for (x2 <- xs2) {
val skoFun = "skc_" + skfCtr.next()
val skoTerm =
if ((Sigma.BGTypes contains x2.typeOf) &&
Flags.paramsOpSet.value == "BG") {
Sigma = Sigma addBG (skoFun -> Rank0(x2.typeOf))
FreeBGConst(skoFun)
} else {
Sigma += (skoFun -> Rank0(x2.typeOf))
Const(skoFun)
}
}
sigma(f).skolemize
}
case f => f // no quantifiers
}
}
def toClausesOptimized = {
// First, Tseitin transformation, which gives a list of formulas.
val fs1 = this.
reduceInnermost(elimIffNot).
reduceInnermost(elimImpl).
Tseitin
// Second, each obtained formula is turned into a set of clauses using
// standard transformations.
// Tseitin results in a formula all whose quantifiers are universal
// and appear in postiive polarity only. This is important below.
fs1 flatMap { f1 =>
val f2 = f1.
// May introduce Negations again (but quantifiers remain positive)
reduceInnermost(elimIff).
reduceInnermost(elimImpl).
// Only now quantifiers can be pulled out, as <=> has been eliminated
reduceInnermost(pulloutQuants).
// remove double quantifiers
reduceInnermost(normalizeQuantifiers)
// Need to remember the types of the variables, which are
// stored with the quantifiers .
val (f2matrix, f2varTypes) = f2.matrix
val f3s = (f2matrix.
// The matrix now is made from Neg, And, and Or,
// convert to CNF
reduceInnermost(pushdownNeg).
reduceInnermost(elimNegNeg).
reduceInnermost(pushdownOr).
reduceInnermost(flattenAndOr).
reduceInnermost(elimTrivial).
toList(And) map { _.toClause(f2varTypes) })
f3s filterNot { _.C.isTautology}
}
}
// Not recommended
def toClausesStandard = {
val f1 = this.
reduceInnermost(elimIffNot).
reduceInnermost(elimIff).
reduceInnermost(elimImpl).
// Only now quantifiers can be pulled out, as arrows have been eliminated
reduceInnermost(pulloutQuants).
// Skolemize now
skolemize
// Need to remember the types of the variables, which are
// stored with the quantifiers .
val (f1matrix, f1varTypes) = f1.matrix
val f2s = (f1matrix.
// Only universal quantifiers in front now - remove
// The matrix now is made from Neg, And, and Or,
// convert to CNF
reduceInnermost(pushdownNeg orElse
elimNegNeg orElse
pushdownOr orElse
flattenAndOr).
reduceInnermost(elimTrivial).
toList(And) map { _.toClause(f1varTypes) })
f2s filterNot { _.C.isTautology}
}
def toClauses =
if (Flags.cnfConversion.value == "optimized")
toClausesOptimized
else
toClausesStandard
// Mixin (parts of) Expression
def mgus(that: Formula) = {
assert(false, { println("Formula.scala: mgus not implemented") })
List()
}
// delegate to subclasses
def matchers(that: Formula, gammas: List[Subst]):List[Subst]
def compl = this match {
case Neg(f) => f
case _ => Neg(this)
}
def toLiteral(varTypes: List[TypedVar]) = {
// The FalseAtom does not have a representation in the clause language
assume(this != FalseAtom, { println("Unexpected FalseAtom in toLiteral") })
this match {
// Isn't the first case covered anyway?
case Atom("$true", Nil) => TrueLit // better use unapply for TrueAtom
case Atom(p, args) => Lit(true, Atom(p, args).toEqn(varTypes))
case Neg(Atom(p, args)) => Lit(false, Atom(p, args).toEqn(varTypes))
}
}
def toClause(varTypes: List[TypedVar]): Clause = {
// Assume this is an Or of literals.
// If this contains a FalseAtom, it will in fact be a singleton, but we don't
// rely on that.
// println("toClause: " + this)
// Convert this to a list of Literals and simplify it.
var asLiterals = List[Lit]()
for (fss <- toList(Or).tails;
if !fss.isEmpty;
f = fss.head;
fs = fss.tail)
if (fs contains f.compl)
return TrueClause
else if ((! (fs contains f)) && (f != FalseAtom))
asLiterals ::= f.toLiteral(varTypes)
// Put together the clause consisting of a C part and a c part
// by going over asLiterals, purifying them and put the result into either
// C or c
var CLits = List[Lit]()
var cLits = List[Formula]()
var open = asLiterals
while (! open.isEmpty) {
// Take the next literal
val l = open.head
open = open.tail
// Purify it, gives the pure literal together with the definitions
// (equations) resulting from purification
val (lPure, lDefs) = l.purify
if (lPure.isBGLiteral)
// Goes into the constraint part. Notice constraint part is implicitly
// negated
cLits ::= lPure.compl.toFormula
else
CLits ::= lPure // into the foreground part
// lDefs are equations, but we need literals
open :::= lDefs map { Lit(false, _) }
}
Clause(OClause(CLits), Constraint(cLits), "input")
}
// Convert this, a nested formula of operators op, to a list.
// This is always possibly, the result will be a singleton of the operator
// in this does not match.
def toList(op: BinOp):List[Formula] =
this match {
case BinOpForm(hop, f1, f2) => {
if (op == hop)
f1 :: f2.toList(op)
else
List(this)
}
case _ => List(this)
}
def isComment = isInstanceOf[Atom] && asInstanceOf[Atom].pred == "$comment"
def isLiteral = this match {
case Atom(_,_) => true
case Neg(Atom(_,_)) => true
case _ => false
}
def nameFor = Atom("def_" + defCtr.next(), vars.toList)
def univClosure(varTypes: List[TypedVar]) =
if (vars.isEmpty)
this
else
Forall(vars.toList map { x => (x,varTypes(x)) }, this)
/**
* Todo: following comments no longer accurate.
*
* Structure-preserving elimination of nested operators that would cause
* exponential blow-up when multipled out. Inspired by Tseitin's transformation.
* Assume that Negation has been pushed inwards and double negation has been
* eliminated, so that it can occur in front of atoms only.
* Also assume that IffNot and, Implies and Implied have been eliminated
* And that all quantifier alternations are proper, i.e. no forall-forall and no exists-exists
*/
def Tseitin:List[Formula] = {
def norm(f: Formula) =
f.reduceInnermost(pushdownNeg orElse elimNegNeg)
// open is a list of triples (name, body, varTypes) where
// - name simple (see below),
// - body needs possibly Tseitinisation, and
// - varTypes is a list of variables governing both the variables
// in name and body.
// hTseitin may extend open as it proceeds.
var open = List[(Formula, Formula, List[TypedVar])]((TrueAtom, norm(this), List()))
var done = List[Formula]()
def hTseitin(f: Formula, embeddingOp: Option[BinOp], varTypes: List[TypedVar]):Formula = {
// f is the formula whose complex proper subterms are to be extracted.
// Assume f has been 'norm'alized, so that negation can occur only in front
// of atoms.
// embeddingOp can be And or Or only.
// varTypes is a list of free variables that contains (at least) each free
// variable in f.
def ExistsMaybe(xs: List[TypedVar], f: Formula) =
if (xs.isEmpty) f else Exists(xs, f)
// Returs name, for convenience
def definition(positive: Boolean, name: Atom, body: Formula, varTypes: List[TypedVar]) = {
val Atom(pred, vars) = name
Sigma += (pred -> Rank((vars.asInstanceOf[List[Var]]
map { varTypes(_) }) -> OType))
if (positive)
open ::= (name, body, varTypes)
else
open ::= (norm(Neg(name)), norm(Neg(body)), varTypes)
name
}
def isSimplePos(f: Formula):Boolean =
f match {
case Atom(_, _) => true
case Neg(Atom(_, _)) => true
// Quantifiers OK, as long as there's a simple formula underneath
case Forall(_, f) => isSimplePos(f)
case _ => false
}
def isSimpleNeg(f: Formula):Boolean =
f match {
case Atom(_, _) => true
case Neg(Atom(_, _)) => true
// Quantifiers OK, as long as there's a simple formula underneath
case Exists(_, f) => isSimplePos(f)
case _ => false
}
// Body of hTseitin
if (isSimplePos(f))
f
else f match {
case Forall(xs, f1) => {
// Continue descending
Forall(xs, hTseitin(f1, embeddingOp, varTypes ++ xs))
}
case Exists(xs, Or(f1, f2)) =>
// Distribute exists
hTseitin(Or(Exists(xs, f1), Exists(xs, f2)),
embeddingOp,
varTypes)
case Exists(xs, f1) => {
// Try to distribute over And
f1 match {
case And(f2l, f2r) => {
// try to distribute Exists xs over f2l and f2r
// This can be done for the variables that occur in f2l (resp f2r) only
val f2lRelVars = (xs restrictTo f2l.vars) restrictToNot(f2r.vars)
val f2rRelVars = (xs restrictTo f2r.vars) restrictToNot(f2l.vars)
val f2SharedVars = xs restrictToNot (f2lRelVars ::: f2rRelVars).varsOf.toSet
if (f2SharedVars.length < xs.length)
// Successfully shifted some of xs variables down
return hTseitin(ExistsMaybe(f2SharedVars,
And(ExistsMaybe(f2lRelVars, f2l),
ExistsMaybe(f2rRelVars, f2r))),
embeddingOp,
varTypes)
}
// If the above is not successful or f is not of the form Exists-And
// we do Skolemization
case _ => ()
}
// The skolemization substitution, for all vars in xs
var sigma = Subst()
for (x <- xs) {
// As an invariant of descending into the formula, varTypes
// contains all free variables in f, however possible more.
// It is clear that the skolem term constructed here needs to be
// parametrized only in the free variables of f, which could be
// fewer than in varTypes. Hence we collect these as the relevant
// variables first.
val relVarTypes = varTypes restrictTo f.vars
val skoFun = "skf_" + skfCtr.next()
val skoTermRank = Rank(relVarTypes.typesOf -> x.typeOf)
// See if we get a BG or a FG term
if ((Sigma.BGTypes contains skoTermRank.resType) &&
skoTermRank.argsTypes.isEmpty && // i.e. a constant
Flags.paramsOpSet.value == "BG") {
Sigma = Sigma addBG (skoFun -> skoTermRank)
sigma += (x.varOf -> FreeBGConst(skoFun))
}
else {
// Foreground Term
Sigma += (skoFun -> skoTermRank)
sigma += (x.varOf -> FunTerm(skoFun, relVarTypes.varsOf))
}
}
hTseitin(sigma(f1), embeddingOp, varTypes)
}
case Iff(f1, f2) => {
// f is of the form f1 <=> f2
// The result is (f1Pos => f2Pos) /\\ (f2Neg => f1Neg)
// where open is extended by
// Neg(f1Pos) => Neg(f1) (contrapositive of f1 => f1Pos)
// f2Pos => f2
// f1Neg => f1
// Neg(f2Neg) => Neg(f2) (contrapositive of f2 => f2Neg)
// If f1 is simple then f1Neg, f1Pos don't have to be built,
// similarly for f2.
// todo: skolemize in place if isSimple
val f1Pos = if (isSimpleNeg(f1)) f1 else definition(false, f1.nameFor, f1, varTypes)
val f2Pos = if (isSimplePos(f2)) f2 else definition(true, f2.nameFor, f2, varTypes)
val f1Neg = if (isSimplePos(f1)) f1 else definition(true, f1.nameFor, f1, varTypes)
val f2Neg = if (isSimpleNeg(f2)) f2 else definition(false, f2.nameFor, f2, varTypes)
And(Implies(f1Pos, f2Pos), Implies(f2Neg, f1Neg))
}
case BinOpForm(op, f1, f2) => {
// op can be And or Or only
if (embeddingOp == None || (embeddingOp == Some(op)))
// Continue descending
BinOpForm(op,
hTseitin(f1, Some(op), varTypes),
hTseitin(f2, Some(op), varTypes))
else
// embeddingOp is different to op, so we need to extract.
// Context can only be a positive one, as all negation has been pushed
// inwards and we're not inside an Iff
definition(true, f.nameFor, f, varTypes)
}
}
}
// Body of Tseitin
while (!open.isEmpty) {
// Select the first open element
// Invariant:
val (selectedName, selectedBody, varTypes) = open.head
//println("==> selected = " + selected)
open = open.tail
done ::= Implies(selectedName, hTseitin(selectedBody, None, varTypes)).
univClosure(varTypes)
}
done
}
/**
* Apply a given list of FormulaRewriteRules, innermost-first order
*/
def reduceInnermost(rules: FormulaRewriteRules):Formula = {
// Rewrite the subformulas of this
// println("rewrite: " + this)
val h = this match {
case Atom(_, _) => this
case UnOpForm(op, f) => UnOpForm(op, f.reduceInnermost(rules))
case BinOpForm(op, f1, f2) => BinOpForm(op, f1.reduceInnermost(rules),
f2.reduceInnermost(rules))
case QuantForm(q, xs, f) => QuantForm(q, xs, f.reduceInnermost(rules))
}
// Rule applicable at top-level?
if (rules isDefinedAt h)
// Apply it, and start over
rules(h).reduceInnermost(rules)
else
// No rule is applicable at top level
// and as all subformulas are rewritten we can stop
h
}
/**
* Apply a given list of FormulaRewriteRules, outermost-first order
*/
def reduceOutermost(rules: FormulaRewriteRules):Formula = {
def hReduceOutermost(f: Formula):Option[Formula] =
if (rules isDefinedAt f)
// apply it, and this is the result
Some(rules(f))
else f match {
case Atom(_, _) => None
case UnOpForm(op, g) => {
hReduceOutermost(g) match {
case None => None
case Some(gRes) => Some(UnOpForm(op, gRes))
}
}
case BinOpForm(op, g1, g2) => {
hReduceOutermost(g1) match {
case None => {
hReduceOutermost(g2) match {
case None => None
case Some(g2Res) => Some(BinOpForm(op, g1, g2Res))
}
}
case Some(g1Res) => Some(BinOpForm(op, g1Res, g2))
}
}
case QuantForm(q, xs, g) => {
hReduceOutermost(g) match {
case None => None
case Some(gRes) => Some(QuantForm(q, xs, gRes))
}
}
case x => {
println("should not get " + x)
None
}
}
// Body of reduceOutermost
var f = this
var fReduced = hReduceOutermost(f)
while (fReduced != None) {
f = fReduced.get
fReduced = hReduceOutermost(f)
}
f
}
}
/**
* The syntactic classes of all formulas
*/
case class Atom(pred: String, args:List[Term]) extends Formula {
val vars = args.vars
val rvars = args.rvars
val params = args.params
val freeBGConsts = args.freeBGConsts
def matchers(that: Formula, gammas:List[Subst]) =
that match {
case Atom(thatpred, thatargs) =>
if (pred == thatpred)
Expression.matchers(args, thatargs, gammas)
else
List()
case _ => List()
}
def applySubst(sigma: Subst) = Atom(pred, sigma(args))
def applyCSubst(rvt: (SymBGConst, FunTerm)) = Atom(pred, args map { _.applyCSubst(rvt) })
override def toString =
if (pred == "=")
"(" + args(0).toString + " ≈ " + args(1).toString + ")"
else
Sigma.getFormatFn(pred)(args)
// pred + args.toMyString("","(",",",")")
// Convert an Atom to an equation.
// Assume it is wel-typed.
def toEqn(varTypes: List[TypedVar]) =
if (pred == "=") {
// We need to figure out the type of the equation.
// The critical case is an equation between two variables.
// This is when the varTypes map in sig is needed.
// Because we assume well-typedness it suffices to look at one argument
val typ = Sigma.typeOf(args(0), varTypes)
// val typ = IType
Eqn(args(0),args(1), typ)
}
else
Eqn(FunTerm(pred, args), TT, OType)
// PredEqn(pred,args)
}
// Equations *are* atoms (with predicate symbol "=")
// Equality is polymorphic, and the type of Eqn in the clausal form later
// will be determined in the clause normalform transformation later
object Equation {
def apply(s: Term, t:Term) = new Atom("=", List(s, t))
def unapply(f: Formula) = f match {
case Atom("=", s :: t :: Nil) => Some((s,t))
case _ => None
}
}
// Disequations are currently used internally by QE only.
// Handy to introduce them here generally.
object DisEquation {
def apply(s: Term, t:Term) = new Atom("!=", List(s, t))
def unapply(f: Formula) = f match {
case Atom("!=", s :: t :: Nil) => Some((s,t))
case _ => None
}
}
def Comment(s: String) = Atom("$comment", List(Const(""""""" + s + """"""")))
case class UnOpForm(op: UnOp, f: Formula) extends Formula {
val vars = f.vars
val rvars = f.rvars
val params = f.params
val freeBGConsts = f.freeBGConsts
def matchers(that: Formula, gammas:List[Subst]) =
that match {
case UnOpForm(thatop, thatf) =>
if (op == thatop) f.matchers(thatf, gammas) else List()
case _ => List()
}
def applySubst(sigma: Subst) = UnOpForm(op, sigma(f))
def applyCSubst(rvt: (SymBGConst, FunTerm)) = UnOpForm(op, f.applyCSubst(rvt))
override def toString = op + f.toString
}
abstract class Op
abstract class UnOp extends Op{
override def toString: String
}
object Neg extends UnOp {
override def toString = "¬"
def apply(f: Formula) = UnOpForm(Neg, f)
def unapply(g: Formula) =
g match {
case UnOpForm(Neg, f) => Some(f)
case _ => None
}
}
case class BinOpForm(op: BinOp, f1: Formula, f2: Formula) extends Formula {
val vars = f1.vars union f2.vars
val rvars = f1.rvars union f2.rvars
val params = f1.params union f2.params
val freeBGConsts = f1.freeBGConsts union f2.freeBGConsts
def matchers(that: Formula, gammas:List[Subst]) =
that match {
case BinOpForm(thatop, thatf1, thatf2) =>
if (op == thatop)
Expression.matchers(List(f1,f2), List(thatf1,thatf2), gammas)
else
List()
case _ => List()
}
override def toString = "(" + f1.toString + " " + op.toString + " " + f2.toString + ")"
def applyCSubst(rvt: (SymBGConst, FunTerm)) = BinOpForm(op, f1.applyCSubst(rvt), f2.applyCSubst(rvt))
def applySubst(sigma: Subst) = BinOpForm(op, sigma(f1), sigma(f2))
}
abstract class BinOp extends Op { override def toString: String }
object And extends BinOp {
override def toString = "∧"
def apply(f1: Formula, f2: Formula) = BinOpForm(And, f1, f2)
def unapply(g: Formula) =
g match {
case BinOpForm(And, f1, f2) => Some((f1, f2))
case _ => None
}
}
object Or extends BinOp {
override def toString = "∨"
def apply(f1: Formula, f2: Formula) = BinOpForm(Or, f1, f2)
def unapply(g: Formula) =
g match {
case BinOpForm(Or, f1, f2) => Some((f1, f2))
case _ => None
}
}
object Iff extends BinOp {
override def toString = "⇔"
def apply(f1: Formula, f2: Formula) = BinOpForm(Iff, f1, f2)
def unapply(g: Formula) =
g match {
case BinOpForm(Iff, f1, f2) => Some((f1, f2))
case _ => None
}
}
object IffNot extends BinOp {
override def toString = "⇎"
def apply(f1: Formula, f2: Formula) = BinOpForm(IffNot, f1, f2)
def unapply(g: Formula) =
g match {
case BinOpForm(IffNot, f1, f2) => Some((f1, f2))
case _ => None
}
}
object Implies extends BinOp {
override def toString = "⇒"
def apply(f1: Formula, f2: Formula) = BinOpForm(Implies, f1, f2)
def unapply(g: Formula) =
g match {
case BinOpForm(Implies, f1, f2) => Some((f1, f2))
case _ => None
}
}
object Implied extends BinOp {
override def toString = "⇐"
def apply(f1: Formula, f2: Formula) = BinOpForm(Implied, f1, f2)
def unapply(g: Formula) =
g match {
case BinOpForm(Implied, f1, f2) => Some((f1, f2))
case _ => None
}
}
case class QuantForm(q: Quantifier, xs: List[TypedVar], f:Formula) extends Formula {
assume(!xs.isEmpty, { println("Empty list of variables in quantification") })
val vars = f.vars -- xs.varsOf
val rvars = f.rvars
val params = f.params
val freeBGConsts = f.freeBGConsts
def matchers(that: Formula, gammas: List[Subst]) = {
assert(false, { println("Formula.scala: matchers on quantified formulas not implemented") })
List()
}
// As usual, unintended capturing of variables in the range of sigma by quantified
// variable must be avoided. Renaming away the quantified variable before applying sigma
// solves the problem.
def applySubst(sigma: Subst) = {
val (rho, xsrho) = xs.mkRenaming()
QuantForm(q, xsrho, sigma(rho(f)))
}
def applyCSubst(rvt: (SymBGConst, FunTerm)) = {
val (rho, xsrho) = xs.mkRenaming()
QuantForm(q, xsrho, rho(f).applyCSubst(rvt))
}
override def toString = "(" + q + " " + (xs map { _.toMyString }).toMyString(""," ","") + " " + f + ")"
}
abstract class Quantifier {
def dual: Quantifier
override def toString: String
}
object Forall extends Quantifier {
override def toString = "∀"
def dual = Exists
def apply(xs: List[TypedVar], f:Formula) = QuantForm(Forall, xs, f)
def unapply(g: Formula) =
g match {
case QuantForm(Forall, xs, f) => Some((xs, f))
case _ => None
}
}
object Exists extends Quantifier {
override def toString = "∃"
def dual = Forall
def apply(xs: List[TypedVar], f:Formula) = QuantForm(Exists, xs, f)
def unapply(g: Formula) =
g match {
case QuantForm(Exists, xs, f) => Some((xs, f))
case _ => None
}
}
val skfCtr = new Counter
val defCtr = new Counter
// def varsTypes(vts: List[(Var,Type)]) = vts map { _._2 }
}
| newca12/MELIA | src/Formula.scala | Scala | gpl-3.0 | 28,629 |
package com.arcusys.valamis.settings.storage
import com.arcusys.valamis.settings.model.SiteDependentSetting
trait SiteDependentSettingStorage {
def getAll: Seq[SiteDependentSetting]
def getByKey(key: String): Seq[SiteDependentSetting]
def getBySite(siteID: Int): Seq[SiteDependentSetting]
def getBySiteAndKey(siteID: Int, key: String): Option[SiteDependentSetting]
def createAndGetID(entity: SiteDependentSetting): Int
def modify(siteID: Int, key: String, value: Option[String])
def renew()
}
| ViLPy/Valamis | valamis-core/src/main/scala/com/arcusys/valamis/settings/storage/SiteDependentSettingStorage.scala | Scala | lgpl-3.0 | 509 |
package models.helpers
import io.strongtyped.active.slick.{JdbcProfileProvider, EntityActions}
// TODO
// this class subclasses ActiveSlick's EntityActions but only saves/updates/... entities belonging to the current tenant
abstract class TenantEntityActions extends EntityActions with JdbcProfileProvider {
}
| thomastoye/speelsysteem | app/models/helpers/TenantEntityActions.scala | Scala | gpl-2.0 | 314 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.examples
import java.io.File
import java.text.SimpleDateFormat
import java.util.Date
import scala.util.Random
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}
import org.apache.spark.sql.types._
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
/**
* A query test case
* @param sqlText SQL statement
* @param queryType type of query: scan, filter, aggregate, topN
* @param desc description of the goal of this test case
*/
case class Query(sqlText: String, queryType: String, desc: String)
// scalastyle:off println
object CompareTest {
def parquetTableName: String = "comparetest_parquet"
def carbonTableName(version: String): String = s"comparetest_carbonV$version"
// Table schema:
// +-------------+-----------+-------------+-------------+------------+
// | Column name | Data type | Cardinality | Column type | Dictionary |
// +-------------+-----------+-------------+-------------+------------+
// | city | string | 8 | dimension | yes |
// +-------------+-----------+-------------+-------------+------------+
// | country | string | 1103 | dimension | yes |
// +-------------+-----------+-------------+-------------+------------+
// | planet | string | 100,007 | dimension | yes |
// +-------------+-----------+-------------+-------------+------------+
// | id | string | 10,000,000 | dimension | no |
// +-------------+-----------+-------------+-------------+------------+
// | m1 | short | NA | measure | no |
// +-------------+-----------+-------------+-------------+------------+
// | m2 | int | NA | measure | no |
// +-------------+-----------+-------------+-------------+------------+
// | m3 | big int | NA | measure | no |
// +-------------+-----------+-------------+-------------+------------+
// | m4 | double | NA | measure | no |
// +-------------+-----------+-------------+-------------+------------+
// | m5 | double | NA | measure | no |
// +-------------+-----------+-------------+-------------+------------+
private def generateDataFrame(spark: SparkSession): DataFrame = {
val r = new Random()
val rdd = spark.sparkContext
.parallelize(1 to 10 * 1000 * 1000, 4)
.map { x =>
("city" + x % 8, "country" + x % 1103, "planet" + x % 10007, "IDENTIFIER" + x.toString,
(x % 16).toShort, x / 2, (x << 1).toLong, x.toDouble / 13, x.toDouble / 11)
}.map { x =>
Row(x._1, x._2, x._3, x._4, x._5, x._6, x._7, x._8, x._9)
}
val schema = StructType(
Seq(
StructField("city", StringType, nullable = false),
StructField("country", StringType, nullable = false),
StructField("planet", StringType, nullable = false),
StructField("id", StringType, nullable = false),
StructField("m1", ShortType, nullable = false),
StructField("m2", IntegerType, nullable = false),
StructField("m3", LongType, nullable = false),
StructField("m4", DoubleType, nullable = false),
StructField("m5", DoubleType, nullable = false)
)
)
spark.createDataFrame(rdd, schema)
}
// performance test queries, they are designed to test various data access type
val queries: Array[Query] = Array(
// ===========================================================================
// == FULL SCAN AGGREGATION ==
// ===========================================================================
Query(
"select sum(m1) from $table",
"full scan",
"full scan query, 1 aggregate"
),
Query(
"select sum(m1), sum(m2) from $table",
"full scan",
"full scan query, 2 aggregate"
),
Query(
"select sum(m1), sum(m2), sum(m3) from $table",
"full scan",
"full scan query, 3 aggregate"
),
Query(
"select sum(m1), sum(m2), sum(m3), sum(m4) from $table",
"full scan",
"full scan query, 4 aggregate"
),
Query(
"select sum(m1), sum(m2), sum(m3), sum(m4), avg(m5) from $table",
"full scan",
"full scan query, 5 aggregate"
),
Query(
"select count(distinct id) from $table",
"full scan",
"full scan and count distinct of high card column"
),
Query(
"select count(distinct country) from $table",
"full scan",
"full scan and count distinct of medium card column"
),
Query(
"select count(distinct city) from $table",
"full scan",
"full scan and count distinct of low card column"
),
// ===========================================================================
// == FULL SCAN GROUP BY AGGREGATE ==
// ===========================================================================
Query(
"select country, sum(m1) from $table group by country",
"aggregate",
"group by on big data, on medium card column, medium result set,"
),
Query(
"select city, sum(m1) from $table group by city",
"aggregate",
"group by on big data, on low card column, small result set,"
),
Query(
"select id, sum(m1) as metric from $table group by id order by metric desc limit 100",
"topN",
"top N on high card column"
),
Query(
"select country,sum(m1) as metric from $table group by country order by metric desc limit 10",
"topN",
"top N on medium card column"
),
Query(
"select city,sum(m1) as metric from $table group by city order by metric desc limit 10",
"topN",
"top N on low card column"
),
// ===========================================================================
// == FILTER SCAN GROUP BY AGGREGATION ==
// ===========================================================================
Query(
"select country, sum(m1) from $table where city='city8' group by country ",
"filter scan and aggregate",
"group by on large data, small result set"
),
Query(
"select id, sum(m1) from $table where planet='planet10' group by id",
"filter scan and aggregate",
"group by on medium data, large result set"
),
Query(
"select city, sum(m1) from $table where country='country12' group by city ",
"filter scan and aggregate",
"group by on medium data, small result set"
),
// ===========================================================================
// == FILTER SCAN ==
// ===========================================================================
Query(
"select * from $table where city = 'city3' limit 10000",
"filter scan",
"filter on low card dimension, limit, medium result set, fetch all columns"
),
Query(
"select * from $table where country = 'country9' ",
"filter scan",
"filter on low card dimension, medium result set, fetch all columns"
),
Query(
"select * from $table where planet = 'planet101' ",
"filter scan",
"filter on medium card dimension, small result set, fetch all columns"
),
Query(
"select * from $table where id = '408938' ",
"filter scan",
"filter on high card dimension"
),
Query(
"select * from $table where country='country10000' ",
"filter scan",
"filter on low card dimension, not exist"
),
Query(
"select * from $table where country='country2' and city ='city8' ",
"filter scan",
"filter on 2 dimensions, small result set, fetch all columns"
),
Query(
"select * from $table where city='city1' and country='country2' and planet ='planet3' ",
"filter scan",
"filter on 3 dimensions, small result set, fetch all columns"
),
Query(
"select * from $table where m1 < 3",
"filter scan",
"filter on measure, small result set, fetch all columns"
),
Query(
"select * from $table where id like '1%' ",
"fuzzy filter scan",
"like filter, big result set"
),
Query(
"select * from $table where id like '%111'",
"fuzzy filter scan",
"like filter, medium result set"
),
Query(
"select * from $table where id like 'xyz%' ",
"fuzzy filter scan",
"like filter, full scan but not exist"
)
)
private def loadParquetTable(spark: SparkSession, input: DataFrame): Double = time {
// partitioned by last 1 digit of id column
val dfWithPartition = input.withColumn("partitionCol", input.col("id").%(10))
dfWithPartition.write
.partitionBy("partitionCol")
.mode(SaveMode.Overwrite)
.parquet(parquetTableName)
}
private def loadCarbonTable(spark: SparkSession, input: DataFrame, version: String): Double = {
CarbonProperties.getInstance().addProperty(
CarbonCommonConstants.CARBON_DATA_FILE_VERSION,
version
)
spark.sql(s"drop table if exists ${carbonTableName(version)}")
time {
input.write
.format("carbondata")
.option("tableName", carbonTableName(version))
.option("tempCSV", "false")
.option("single_pass", "true")
.option("dictionary_exclude", "id") // id is high cardinality column
.option("table_blocksize", "32")
.mode(SaveMode.Overwrite)
.save()
}
}
// load data into parquet, carbonV2, carbonV3
private def prepareTable(spark: SparkSession): Unit = {
val df = generateDataFrame(spark).cache
println(s"loading ${df.count} records, schema: ${df.schema}")
val loadParquetTime = loadParquetTable(spark, df)
val loadCarbonV3Time = loadCarbonTable(spark, df, version = "3")
println(s"load completed, time: $loadParquetTime, $loadCarbonV3Time")
df.unpersist()
spark.read.parquet(parquetTableName).registerTempTable(parquetTableName)
}
// Run all queries for the specified table
private def runQueries(spark: SparkSession, tableName: String): Array[(Double, Int)] = {
println(s"start running queries for $tableName...")
var result: Array[Row] = null
queries.zipWithIndex.map { case (query, index) =>
val sqlText = query.sqlText.replace("$table", tableName)
print(s"running query ${index + 1}: $sqlText ")
val rt = time {
result = spark.sql(sqlText).collect()
}
println(s"=> $rt sec")
(rt, result.length)
}
}
// run testcases and print comparison result
private def runTest(spark: SparkSession): Unit = {
val formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
val date = new Date
val timestamp = date.getTime
// run queries on parquet and carbon
val parquetResult: Array[(Double, Int)] = runQueries(spark, parquetTableName)
// do GC and sleep for some time before running next table
System.gc()
Thread.sleep(1000)
System.gc()
Thread.sleep(1000)
val carbonResult: Array[(Double, Int)] = runQueries(spark, carbonTableName("3"))
// check result by comparing output from parquet and carbon
parquetResult.zipWithIndex.foreach { case (result, index) =>
if (result._2 != carbonResult(index)._2) {
sys.error(s"result not matching for query ${index + 1}: " +
s"${result._2} and ${carbonResult(index)._2}")
}
}
// print all response time in JSON format, so that it can be analyzed later
queries.zipWithIndex.foreach { case (query, index) =>
println("{" +
s""""query":"${index + 1}", """ +
s""""parquetTime":${parquetResult(index)._1}, """ +
s""""carbonTime":${carbonResult(index)._1}, """ +
s""""fetched":${parquetResult(index)._2}, """ +
s""""type":"${query.queryType}", """ +
s""""desc":"${query.desc}", """ +
s""""date": "${formatter.format(date)}" """ +
"}"
)
}
}
def main(args: Array[String]): Unit = {
CarbonProperties.getInstance()
.addProperty("carbon.enable.vector.reader", "true")
.addProperty("enable.unsafe.sort", "true")
.addProperty("carbon.blockletgroup.size.in.mb", "32")
import org.apache.spark.sql.CarbonSession._
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/examples/spark2/target/store"
val spark = SparkSession
.builder()
.master("local")
.enableHiveSupport()
.getOrCreateCarbonSession(storeLocation)
spark.sparkContext.setLogLevel("warn")
prepareTable(spark)
runTest(spark)
spark.close()
}
def time(code: => Unit): Double = {
val start = System.currentTimeMillis()
code
// return time in second
(System.currentTimeMillis() - start).toDouble / 1000
}
}
// scalastyle:on println
| ksimar/incubator-carbondata | examples/spark2/src/main/scala/org/apache/carbondata/examples/CompareTest.scala | Scala | apache-2.0 | 14,054 |
package edu.tum.cs.isabelle
import acyclic.file
/**
* Minimal API for managing some Isabelle version. It is centered around the
* notion of an [[Environment environment]], which captures the base
* functionality of an Isabelle process, e.g. starting and stopping an
* instance. API clients should go through the higher-level
* [[edu.tum.cs.isabelle.Implementations implementations]] and
* [[edu.tum.cs.isabelle.System system]] interfaces.
*/
package object api {
type Properties = List[(String, String)]
type Markup = (String, Properties)
}
| wneuper/libisabelle | pide-interface/src/main/scala/package.scala | Scala | mit | 557 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.util.concurrent.CopyOnWriteArrayList
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging
/**
* An event bus which posts events to its listeners.
*/
private[spark] trait ListenerBus[L <: AnyRef, E] extends Logging {
// Marked `private[spark]` for access in tests.
private[spark] val listeners = new CopyOnWriteArrayList[L]
/**
* Add a listener to listen events. This method is thread-safe and can be called in any thread.
*/
final def addListener(listener: L): Unit = {
listeners.add(listener)
}
/**
* Remove a listener and it won't receive any events. This method is thread-safe and can be called
* in any thread.
*/
final def removeListener(listener: L): Unit = {
listeners.remove(listener)
}
/**
* Post the event to all registered listeners. The `postToAll` caller should guarantee calling
* `postToAll` in the same thread for all events.
*/
final def postToAll(event: E): Unit = {
// JavaConverters can create a JIterableWrapper if we use asScala.
// However, this method will be called frequently. To avoid the wrapper cost, here we use
// Java Iterator directly.
val iter = listeners.iterator
while (iter.hasNext) {
val listener = iter.next()
try {
doPostEvent(listener, event)
} catch {
case NonFatal(e) =>
logError(s"Listener ${Utils.getFormattedClassName(listener)} threw an exception", e)
}
}
}
/**
* Post an event to the specified listener. `onPostEvent` is guaranteed to be called in the same
* thread for all listeners.
*/
protected def doPostEvent(listener: L, event: E): Unit
private[spark] def findListenersByClass[T <: L : ClassTag](): Seq[T] = {
val c = implicitly[ClassTag[T]].runtimeClass
listeners.asScala.filter(_.getClass == c).map(_.asInstanceOf[T]).toSeq
}
}
| sh-cho/cshSpark | util/ListenerBus.scala | Scala | apache-2.0 | 2,788 |
package io.getquill
import com.github.mauricio.async.db.{ QueryResult => DBQueryResult }
import com.github.mauricio.async.db.mysql.MySQLConnection
import com.github.mauricio.async.db.mysql.MySQLQueryResult
import com.github.mauricio.async.db.pool.PartitionedConnectionPool
import com.typesafe.config.Config
import io.getquill.context.async.{ AsyncContext, UUIDStringEncoding }
import io.getquill.util.LoadConfig
import io.getquill.util.Messages.fail
import com.github.mauricio.async.db.general.ArrayRowData
class MysqlAsyncContext[N <: NamingStrategy](naming: N, pool: PartitionedConnectionPool[MySQLConnection])
extends AsyncContext(MySQLDialect, naming, pool) with UUIDStringEncoding {
def this(naming: N, config: MysqlAsyncContextConfig) = this(naming, config.pool)
def this(naming: N, config: Config) = this(naming, MysqlAsyncContextConfig(config))
def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
override protected def extractActionResult[O](returningColumn: String, returningExtractor: Extractor[O])(result: DBQueryResult): O = {
result match {
case r: MySQLQueryResult =>
returningExtractor(new ArrayRowData(0, Map.empty, Array(r.lastInsertId)))
case _ =>
fail("This is a bug. Cannot extract returning value.")
}
}
}
| mentegy/quill | quill-async-mysql/src/main/scala/io/getquill/MysqlAsyncContext.scala | Scala | apache-2.0 | 1,311 |
package class_instance_refinement_tool
import java.awt.Dimension
import java.awt.Font
import java.io.BufferedWriter
import java.io.FileOutputStream
import java.io.OutputStreamWriter
import scala.collection.mutable.ListBuffer
import scala.io.Source
import scala.swing.event.ListSelectionChanged
import scala.swing.Action
import scala.swing.BorderPanel
import scala.swing.Button
import scala.swing.Component
import scala.swing.FileChooser
import scala.swing.FlowPanel
import scala.swing.GridPanel
import scala.swing.Label
import scala.swing.ListView
import scala.swing.MainFrame
import scala.swing.Menu
import scala.swing.MenuBar
import scala.swing.MenuItem
import scala.swing.ScrollPane
import scala.swing.SimpleSwingApplication
import scala.swing.SplitPane
import scala.swing.Table
import scala.swing.TextField
import org.scalaquery.ql.basic.BasicDriver.Implicit._
import org.scalaquery.ql.basic.BasicTable
import org.scalaquery.session.Database.threadLocalSession
import org.scalaquery.session.Database
import javax.swing.BorderFactory
import class_instance_extractor.ClassInstanceList
/**
* 2. Refining class-instance relationships and identifying alignment target classes
* 2-2. class_instance_refinement_tool.ClassInstanceRefinementTool.scala
* - Inputs
* -- inputs_and_outputs/merged_class_instance_list.db
* -- inputs_and_outputs/merged-class-list.txt
* - Output
* -- class-instance-refinement-results-20120302.txt
*/
object ClassInstanceRefinementTool extends SimpleSwingApplication {
val inputClassList = "inputs_and_outputs/merged-class-list.txt"
val jwoClassList: ListBuffer[String] = ListBuffer()
for (cls <- Source.fromFile(inputClassList).getLines) {
jwoClassList += cls
}
val restNumLabel = new Label {
font = new Font("", Font.PLAIN, 30)
}
restNumLabel.text = jwoClassList.size.toString()
val searchTextField = new TextField(30) {
font = new Font("", Font.PLAIN, 30)
}
//val searchButton = new Button(Action("クラス検索") {
val searchButton = new Button(Action("Search Class") {
val searchText = searchTextField.text
if (searchText.size == 0) {
jwoClassListView.listData = jwoClassList
} else {
jwoClassListView.listData = jwoClassList.filter {
c => c.matches(searchText)
}
}
})
val searchPanel = new BorderPanel() {
preferredSize = new Dimension(50, 50)
add(restNumLabel, BorderPanel.Position.West)
add(searchTextField, BorderPanel.Position.Center)
add(searchButton, BorderPanel.Position.East)
}
//val refinedClassTextLabel = new Label("修正クラス名:")
val refinedClassTextLabel = new Label("Refined Class Name:")
val refinedClassTextField = new TextField(30) {
font = new Font("", Font.PLAIN, 30)
}
//val supClassTextLabel = new Label("上位クラス名:")
val supClassTextLabel = new Label("Super Class Name:")
val supClassTextField = new TextField(30) {
font = new Font("", Font.PLAIN, 30)
}
val refinedClassPanel = new BorderPanel() {
preferredSize = new Dimension(50, 50)
add(refinedClassTextLabel, BorderPanel.Position.West)
add(refinedClassTextField, BorderPanel.Position.Center)
}
val supClassPanel = new BorderPanel() {
preferredSize = new Dimension(50, 50)
add(supClassTextLabel, BorderPanel.Position.West)
add(supClassTextField, BorderPanel.Position.Center)
}
val northPanel = new GridPanel(3, 1) {
contents += searchPanel
contents += refinedClassPanel
contents += supClassPanel
}
val jwoClassListView = new ListView[String] {
selection.intervalMode = ListView.IntervalMode.MultiInterval
}
jwoClassListView.listData = jwoClassList
val jwoInstanceListView = new ListView[String] {
selection.intervalMode = ListView.IntervalMode.Single
}
val correctJWOClassTable = new Table {
override lazy val model = super.model.asInstanceOf[javax.swing.table.DefaultTableModel]
//model.addColumn("修正前のクラス名")
model.addColumn("Original Class")
//model.addColumn("修正後のクラス名")
model.addColumn("Refined Class")
//model.addColumn("上位クラス名")
model.addColumn("Super Class")
}
val wrongJWOClassListView = new ListView[String] {
selection.intervalMode = ListView.IntervalMode.MultiInterval
}
val resultPanel = new GridPanel(2, 1) {
contents += new ScrollPane(correctJWOClassTable) {
//border = BorderFactory.createTitledBorder("正しいインスタンスを持つクラス")
border = BorderFactory.createTitledBorder("Classes having correct instances")
}
contents += new ScrollPane(wrongJWOClassListView) {
//border = BorderFactory.createTitledBorder("誤ったインスタンスを持つクラス")
border = BorderFactory.createTitledBorder("Classes having wrong instances")
}
}
val centerPanel = new GridPanel(1, 3) {
contents += new ScrollPane(jwoClassListView) {
//border = BorderFactory.createTitledBorder("クラスリスト")
border = BorderFactory.createTitledBorder("Class List")
}
contents += new ScrollPane(jwoInstanceListView) {
//border = BorderFactory.createTitledBorder("インスタンスリスト")
border = BorderFactory.createTitledBorder("Instance List")
}
contents += resultPanel
}
// val correctClassButton = new Button(Action("正しいクラス") {
val correctClassButton = new Button(Action("Correct Class") {
for (cls <- jwoClassListView.selection.items) {
var refinedClass = refinedClassTextField.text
var supClass = supClassTextField.text
if (refinedClass.size == 0) {
refinedClass = cls
}
if (supClass.size == 0) {
supClass = "-"
}
correctJWOClassTable.model.addRow(Array[AnyRef](cls, refinedClass, supClass))
}
jwoClassList --= jwoClassListView.selection.items
jwoClassListView.listData = jwoClassListView.listData.filterNot { c => jwoClassListView.selection.items.contains(c) }
jwoInstanceListView.listData = ListBuffer()
refinedClassTextField.text = ""
supClassTextField.text = ""
restNumLabel.text = jwoClassList.size.toString
})
//val wrongClassButton = new Button(Action("誤ったクラス") {
val wrongClassButton = new Button(Action("Wrong class") {
wrongJWOClassListView.listData ++= jwoClassListView.selection.items
jwoClassList --= jwoClassListView.selection.items
jwoClassListView.listData = jwoClassListView.listData.filterNot { c => jwoClassListView.selection.items.contains(c) }
jwoInstanceListView.listData = ListBuffer()
restNumLabel.text = jwoClassList.size.toString
})
//val undoButton = new Button(Action("元に戻す") {
val undoButton = new Button(Action("Undo") {
jwoClassListView.listData ++= wrongJWOClassListView.selection.items
jwoClassList ++= wrongJWOClassListView.selection.items
wrongJWOClassListView.listData = wrongJWOClassListView.listData.filterNot { c => wrongJWOClassListView.selection.items.contains(c) }
for (selectedRow <- correctJWOClassTable.selection.rows.toList.reverse) {
val selectedJWOClass = correctJWOClassTable.model.getValueAt(selectedRow, 0)
jwoClassListView.listData = selectedJWOClass.toString :: jwoClassListView.listData.toList
jwoClassList += selectedJWOClass.toString
correctJWOClassTable.model.removeRow(selectedRow)
}
restNumLabel.text = jwoClassList.size.toString
})
val southPanel = new FlowPanel() {
contents += correctClassButton
contents += wrongClassButton
contents += undoButton
}
val classInstanceDB = Database.forURL(url = "jdbc:sqlite:inputs_and_outputs/merged_class_instance_list.db", driver = "org.sqlite.JDBC")
def setInstanceList() = {
val selectedClass = jwoClassListView.selection.items(0)
classInstanceDB withSession {
val instanceList = ListBuffer[String]()
val q = for { result <- ClassInstanceList if result.jwoClass === selectedClass }
yield result.jwoClass ~ result.jwoInstance
for ((jwoClass, jwoInstance) <- q.list.take(100)) {
// println(jwoClass + "," + jwoInstance)
instanceList += jwoInstance
}
jwoInstanceListView.listData = instanceList
}
}
listenTo(jwoClassListView.selection)
reactions += {
case ListSelectionChanged(source, range, live) =>
if (source == jwoClassListView) {
if (jwoClassListView.selection.items.size == 1) {
setInstanceList()
}
}
}
def saveClassInstanceRefinementResults(root: Component) = {
val fileChooser = new FileChooser()
fileChooser.showSaveDialog(root) match {
case FileChooser.Result.Approve =>
val writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileChooser.selectedFile), "UTF-8"))
for (i <- 0 until correctJWOClassTable.model.getRowCount()) {
val jwoClass = correctJWOClassTable.model.getValueAt(i, 0)
val refinedJWOClass = correctJWOClassTable.model.getValueAt(i, 1)
val supClass = correctJWOClassTable.model.getValueAt(i, 2)
writer.write(true + "\\t" + jwoClass + "\\t" + refinedJWOClass + "\\t" + supClass)
writer.newLine()
}
for (c <- wrongJWOClassListView.listData) {
writer.write(false + "\\t" + c + "\\t-\\t-")
writer.newLine()
}
writer.close
case FileChooser.Result.Cancel =>
case FileChooser.Result.Error =>
}
}
def openClassInstanceRefinementResults(root: Component) = {
val fileChooser = new FileChooser()
fileChooser.showOpenDialog(root) match {
case FileChooser.Result.Approve =>
val source = Source.fromFile(fileChooser.selectedFile, "utf-8")
for (line <- source.getLines()) {
val Array(isCorrect, jwoClass, refinedJWOClass, supClass) = line.split("\\t")
if (isCorrect == "true") {
correctJWOClassTable.model.addRow(Array[AnyRef](jwoClass, refinedJWOClass, supClass))
} else if (isCorrect == "false") {
wrongJWOClassListView.listData = jwoClass :: wrongJWOClassListView.listData.toList
}
jwoClassList -= jwoClass
}
jwoClassListView.listData = jwoClassList
restNumLabel.text = jwoClassList.size.toString
println(jwoClassList)
case FileChooser.Result.Cancel =>
case FileChooser.Result.Error =>
}
}
def getMenuBar: MenuBar = {
new MenuBar {
// val aMenu = new Menu("ファイル")
val aMenu = new Menu("File")
aMenu.contents += new MenuItem(Action("クラスーインスタンス関係洗練結果を開く") {
openClassInstanceRefinementResults(this)
})
aMenu.contents += new MenuItem(Action("クラスーインスタンス関係洗練結果を保存") {
saveClassInstanceRefinementResults(this)
})
aMenu.contents += new MenuItem(Action("終了") {
System.exit(0)
})
contents += aMenu
}
}
def top = new MainFrame {
// title = "クラスーインスタンス関係洗練ツール ver.2012.02.27"
title = "Class-instance relationships refinement tool"
menuBar = getMenuBar
contents = new BorderPanel() {
add(northPanel, BorderPanel.Position.North)
add(centerPanel, BorderPanel.Position.Center)
add(southPanel, BorderPanel.Position.South)
}
size = new Dimension(1024, 700)
centerOnScreen()
}
} | t-morita/JWO_Refinement_Tools | src/main/scala/class_instance_refinement_tool/ClassInstanceRefinementTool.scala | Scala | apache-2.0 | 11,769 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.withdrawal
import play.api.libs.json.Json
case class WithdrawSubscriptionResponse (processingDate: String)
object WithdrawSubscriptionResponse {
implicit val format = Json.format[WithdrawSubscriptionResponse]
}
| hmrc/amls-frontend | app/models/withdrawal/WithdrawSubscriptionResponse.scala | Scala | apache-2.0 | 836 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kafka
import java.io.File
import scala.collection.mutable
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.Random
import kafka.serializer.StringDecoder
import kafka.utils.{ZKGroupTopicDirs, ZkUtils}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
import org.scalatest.concurrent.Eventually
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Milliseconds, StreamingContext}
import org.apache.spark.util.Utils
class ReliableKafkaStreamSuite extends SparkFunSuite
with BeforeAndAfterAll with BeforeAndAfter with Eventually {
private val sparkConf = new SparkConf()
.setMaster("local[4]")
.setAppName(this.getClass.getSimpleName)
.set("spark.streaming.receiver.writeAheadLog.enable", "true")
private val data = Map("a" -> 10, "b" -> 10, "c" -> 10)
private var kafkaTestUtils: KafkaTestUtils = _
private var groupId: String = _
private var kafkaParams: Map[String, String] = _
private var ssc: StreamingContext = _
private var tempDirectory: File = null
override def beforeAll(): Unit = {
kafkaTestUtils = new KafkaTestUtils
kafkaTestUtils.setup()
groupId = s"test-consumer-${Random.nextInt(10000)}"
kafkaParams = Map(
"zookeeper.connect" -> kafkaTestUtils.zkAddress,
"group.id" -> groupId,
"auto.offset.reset" -> "smallest"
)
tempDirectory = Utils.createTempDir()
}
override def afterAll(): Unit = {
Utils.deleteRecursively(tempDirectory)
if (kafkaTestUtils != null) {
kafkaTestUtils.teardown()
kafkaTestUtils = null
}
}
before {
ssc = new StreamingContext(sparkConf, Milliseconds(500))
ssc.checkpoint(tempDirectory.getAbsolutePath)
}
after {
if (ssc != null) {
ssc.stop()
ssc = null
}
}
test("Reliable Kafka input stream with single topic") {
val topic = "test-topic"
kafkaTestUtils.createTopic(topic)
kafkaTestUtils.sendMessages(topic, data)
// Verify whether the offset of this group/topic/partition is 0 before starting.
assert(getCommitOffset(groupId, topic, 0) === None)
val stream = KafkaUtils.createStream[String, String, StringDecoder, StringDecoder](
ssc, kafkaParams, Map(topic -> 1), StorageLevel.MEMORY_ONLY)
val result = new mutable.HashMap[String, Long]()
stream.map { case (k, v) => v }.foreachRDD { r =>
val ret = r.collect()
ret.foreach { v =>
val count = result.getOrElseUpdate(v, 0) + 1
result.put(v, count)
}
}
ssc.start()
eventually(timeout(20000 milliseconds), interval(200 milliseconds)) {
// A basic process verification for ReliableKafkaReceiver.
// Verify whether received message number is equal to the sent message number.
assert(data.size === result.size)
// Verify whether each message is the same as the data to be verified.
data.keys.foreach { k => assert(data(k) === result(k).toInt) }
// Verify the offset number whether it is equal to the total message number.
assert(getCommitOffset(groupId, topic, 0) === Some(29L))
}
}
test("Reliable Kafka input stream with multiple topics") {
val topics = Map("topic1" -> 1, "topic2" -> 1, "topic3" -> 1)
topics.foreach { case (t, _) =>
kafkaTestUtils.createTopic(t)
kafkaTestUtils.sendMessages(t, data)
}
// Before started, verify all the group/topic/partition offsets are 0.
topics.foreach { case (t, _) => assert(getCommitOffset(groupId, t, 0) === None) }
// Consuming all the data sent to the broker which will potential commit the offsets internally.
val stream = KafkaUtils.createStream[String, String, StringDecoder, StringDecoder](
ssc, kafkaParams, topics, StorageLevel.MEMORY_ONLY)
stream.foreachRDD(_ => Unit)
ssc.start()
eventually(timeout(20000 milliseconds), interval(100 milliseconds)) {
// Verify the offset for each group/topic to see whether they are equal to the expected one.
topics.foreach { case (t, _) => assert(getCommitOffset(groupId, t, 0) === Some(29L)) }
}
}
/** Getting partition offset from Zookeeper. */
private def getCommitOffset(groupId: String, topic: String, partition: Int): Option[Long] = {
val topicDirs = new ZKGroupTopicDirs(groupId, topic)
val zkPath = s"${topicDirs.consumerOffsetDir}/$partition"
ZkUtils.readDataMaybeNull(kafkaTestUtils.zookeeperClient, zkPath)._1.map(_.toLong)
}
}
| Panos-Bletsos/spark-cost-model-optimizer | external/kafka-0-8/src/test/scala/org/apache/spark/streaming/kafka/ReliableKafkaStreamSuite.scala | Scala | apache-2.0 | 5,367 |
package spoiwo.model.enums
object FontScheme {
lazy val None: FontScheme = FontScheme("None")
lazy val Major: FontScheme = FontScheme("Major")
lazy val Minor: FontScheme = FontScheme("Minor")
}
case class FontScheme private (value: String) {
override def toString: String = value
}
| norbert-radyk/spoiwo | core/src/main/scala/spoiwo/model/enums/FontScheme.scala | Scala | mit | 294 |
package de.sciss.fscape
import de.sciss.fscape.Ops._
import de.sciss.kollflitz.Vec
import scala.concurrent.Promise
import scala.util.Success
class DelayNSpec extends UGenSpec {
"The DelayN UGen" should "work as intended" in {
val n = 4
for {
padLen <- Seq(0, 1, 10, 100, 512, 2049 - n)
dlyLen <- Seq(-1, 0, 1, 10, 100, 512, 513, 2000)
maxLen <- Seq(-1, 0, 1, 10, 100, 512, 513, 2000)
} {
val p = Promise[Vec[Int]]()
val g = Graph {
import graph._
val in = ArithmSeq(start = 1, length = n) ++ DC(0).take(padLen)
val d = DelayN(in, maxLength = maxLen, length = dlyLen)
DebugIntPromise(d, p)
}
runGraph(g, 512)
assert(p.isCompleted)
val res = p.future.value.get
val inSq = (1 to n) ++ Vector.fill(padLen)(0)
val dlyLenClip = math.max(0, math.min(dlyLen, maxLen))
val postLen = maxLen - dlyLenClip
val exp = Vector.fill(dlyLenClip)(0) ++ inSq ++ Vector.fill(postLen)(0)
assert (res === Success(exp), s"padLen $padLen, dlyLen $dlyLen, maxLen $maxLen")
}
}
it should "support delay time modulation" in {
val p = Promise[Vec[Int]]()
val g = Graph {
import graph._
val in = ArithmSeq(start = 1, length = 8)
val dl = GenWindow.Line(4) * 4 // 0 to 3 and repeated
val d = DelayN(in, maxLength = 4, length = dl)
DebugIntPromise(d, p)
}
runGraph(g, 512)
assert(p.isCompleted)
val res = getPromiseVec(p)
val inSq = (1 to 8) ++ Vector.fill(4)(0)
// val dlyLen0 = (0 until 4) ++ (0 until 4)
// val dlyLen = dlyLen0.padTo(8 + dlyLen0.last, dlyLen0.last)
val dlyLen = Vector.tabulate(8 + 4)(i => i % 4)
val indices = dlyLen.zipWithIndex.map { case (dl, i) => -dl + i }
val exp = indices.map { i => if (i < 0) 0 else inSq(i) }
assert (res === exp)
}
} | Sciss/FScape-next | core/jvm/src/test/scala/de/sciss/fscape/DelayNSpec.scala | Scala | agpl-3.0 | 1,940 |
package org.faker
/**
* Provides Information about which locale to use for fake data generation.
*
* The faker objects will use a sequence of data files representing different locales to generate their values.
* The "de-at" locale, for instance, will use the data files de_at.yml, de.yml and en.yml (in that sequence). The companion
* object will provide constants for all locales supported by this faker library.
*
* {{{
* scala> Faker.Name.name // will use the default locale (FakerLocale.EN)
* res5: String = Cleve Christiansen
*
* scala> Faker.Name.name(Faker.FakerLocale.DE) // uses the DE-locale and EN as fallback
* res7: String = Lina von Harnapp
*
* scala> implicit val defaultLocale = Faker.FakerLocale.DE_AT // will use the "DE-AT" as locale and "DE" and "EN" as fallback
* defaultLocale: org.faker.Faker.FakerLocale = FakerLocale(de-at)
*
* scala> Faker.Name.name
* res12: String = Cosima Oschkenat
*
* }}}
* @param localeKey
*/
case class FakerLocale(localeKey: String) {
val fallbackLanguage = "en"
lazy val locales = (localesList(localeKey) :+ fallbackLanguage).distinct
def localesList(locale: String): Seq[String] = {
val pattern = "^(\\w+)-.*$".r
pattern.findFirstIn(locale) match {
case Some(pattern(firstPart)) =>
if (FakerLocale.allLocales.exists(fl => fl.localeKey == firstPart))
Vector(locale, firstPart)
else
Vector(locale)
case _ =>
Vector(locale)
}
}
}
/**
* companion object for [[FakerLocale]]
*
* provides all supported locales supported.
*
* {{{
* scala> Faker.FakerLocale.EN
* res13: org.faker.Faker.FakerLocale = FakerLocale(en)
*
* Faker.FakerLocale.allLocales
* res14: scala.collection.immutable.Vector[org.faker.Faker.FakerLocale] = Vector(FakerLocale(de-at), ...)
* }}}
*/
object FakerLocale {
val DE_AT = FakerLocale("de-at")
val DE_CH = FakerLocale("de-ch")
val DE = FakerLocale("de")
val EN_AU = FakerLocale("en-au")
val EN_BORK = FakerLocale("en-bork")
val EN_CA = FakerLocale("en-ca")
val EN_GB = FakerLocale("en-gb")
val EN_IND = FakerLocale("en-ind")
val EN_NEP = FakerLocale("en-nep")
val EN_US = FakerLocale("en-us")
val EN = FakerLocale("en")
val ES = FakerLocale("es")
val FA = FakerLocale("fa")
val FR = FakerLocale("fr")
val IT = FakerLocale("it")
val JA = FakerLocale("ja")
val KO = FakerLocale("ko")
val NB_NO = FakerLocale("nb-no")
val NL = FakerLocale("nl")
val PL = FakerLocale("pl")
val PT_BR = FakerLocale("pt-br")
val RU = FakerLocale("ru")
val SK = FakerLocale("sk")
val VI = FakerLocale("vi")
val default = FakerLocale.EN
val allLocales = Vector(DE_AT, DE_CH, DE, EN_AU, EN_BORK, EN_CA, EN_GB, EN_IND, EN_NEP, EN_US, EN, ES, FA, FR, IT, JA, KO, NB_NO, NL, PL, PT_BR, RU, SK, VI)
}
| ralli/faker_scala | src/main/scala/org/faker/FakerLocale.scala | Scala | bsd-3-clause | 2,841 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.flatspec.oneargtest
import org.scalatest.fixture
import java.io._
class ExampleSpec extends fixture.FlatSpec {
case class FixtureParam(file: File, writer: FileWriter)
def withFixture(test: OneArgTest) = {
// create the fixture
val file = File.createTempFile("hello", "world")
val writer = new FileWriter(file)
val theFixture = FixtureParam(file, writer)
try {
writer.write("ScalaTest is ") // set up the fixture
withFixture(test.toNoArgTest(theFixture)) // "loan" the fixture to the test
}
finally writer.close() // clean up the fixture
}
"Testing" should "be easy" in { f =>
f.writer.write("easy!")
f.writer.flush()
assert(f.file.length === 18)
}
it should "be fun" in { f =>
f.writer.write("fun!")
f.writer.flush()
assert(f.file.length === 17)
}
}
| dotty-staging/scalatest | examples/src/test/scala/org/scalatest/examples/flatspec/oneargtest/ExampleSpec.scala | Scala | apache-2.0 | 1,468 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection
package generic
/** Any collection (including maps) whose keys (or elements) are ordered.
*
* @author Sean McDirmid
* @since 2.8
*/
trait Sorted[K, +This <: Sorted[K, This]] {
def ordering : Ordering[K]
/** The current collection */
protected def repr: This
/** return as a projection the set of keys in this collection */
def keySet: SortedSet[K]
/** Returns the first key of the collection. */
def firstKey: K
/** Returns the last key of the collection. */
def lastKey: K
/** Comparison function that orders keys. */
def compare(k0: K, k1: K): Int = ordering.compare(k0, k1)
/** Creates a ranged projection of this collection. Any mutations in the
* ranged projection will update this collection and vice versa.
*
* Note: keys are not guaranteed to be consistent between this collection
* and the projection. This is the case for buffers where indexing is
* relative to the projection.
*
* @param from The lower-bound (inclusive) of the ranged projection.
* `None` if there is no lower bound.
* @param until The upper-bound (exclusive) of the ranged projection.
* `None` if there is no upper bound.
*/
def rangeImpl(from: Option[K], until: Option[K]): This
/** Creates a ranged projection of this collection with no upper-bound.
*
* @param from The lower-bound (inclusive) of the ranged projection.
*/
def from(from: K): This = rangeImpl(Some(from), None)
/** Creates a ranged projection of this collection with no lower-bound.
*
* @param until The upper-bound (exclusive) of the ranged projection.
*/
def until(until: K): This = rangeImpl(None, Some(until))
/** Creates a ranged projection of this collection with both a lower-bound
* and an upper-bound.
*
* @param from The lower-bound (inclusive) of the ranged projection.
* @param until The upper-bound (exclusive) of the ranged projection.
*/
def range(from: K, until: K): This = rangeImpl(Some(from), Some(until))
/** Create a range projection of this collection with no lower-bound.
* @param to The upper-bound (inclusive) of the ranged projection.
*/
def to(to: K): This = {
val i = keySet.from(to).iterator
if (i.isEmpty) return repr
val next = i.next()
if (compare(next, to) == 0)
if (i.isEmpty) repr
else until(i.next())
else
until(next)
}
/**
* Creates an iterator over all the keys(or elements) contained in this
* collection greater than or equal to `start`
* according to the ordering of this collection. x.keysIteratorFrom(y)
* is equivalent to but often more efficient than
* x.from(y).keysIterator.
*
* @param start The lower bound (inclusive)
* on the keys to be returned
*/
def keysIteratorFrom(start: K): Iterator[K]
protected def hasAll(j: Iterator[K]): Boolean = {
val i = keySet.iterator
if (i.isEmpty) return j.isEmpty
var in = i.next()
while (j.hasNext) {
val jn = j.next()
while ({
val n = compare(jn, in)
if (n == 0) false
else if (n < 0) return false
else if (!i.hasNext) return false
else true
}) in = i.next()
}
true
}
}
| felixmulder/scala | src/library/scala/collection/generic/Sorted.scala | Scala | bsd-3-clause | 3,797 |
package hlt
object Constants {
////////////////////////////////////////////////////////////////////////
// Implementation-independent language-agnostic constants
/** Max number of units of distance a ship can travel in a turn */
val MAX_SPEED = 7
/** Radius of a ship */
val SHIP_RADIUS = 0.5
/** Starting health of ship, also its max */
val MAX_SHIP_HEALTH = 255
/** Starting health of ship, also its max */
val BASE_SHIP_HEALTH = 255
/** Weapon cooldown period */
val WEAPON_COOLDOWN = 1
/** Weapon damage radius */
val WEAPON_RADIUS = 5.0
/** Weapon damage */
val WEAPON_DAMAGE = 64
/** Radius in which explosions affect other entities */
val EXPLOSION_RADIUS = 10.0
/** Distance from the edge of the planet at which ships can try to dock */
val DOCK_RADIUS = 4.0
/** Number of turns it takes to dock a ship */
val DOCK_TURNS = 5
/** Number of production units per turn contributed by each docked ship */
val BASE_PRODUCTIVITY = 6
/** Distance from the planets edge at which new ships are created */
val SPAWN_RADIUS = 2.0
////////////////////////////////////////////////////////////////////////
// Implementation-specific constants
val FORECAST_FUDGE_FACTOR: Double = SHIP_RADIUS + 0.1
val MAX_NAVIGATION_CORRECTIONS = 90
}
| HaliteChallenge/Halite-II | airesources/Scala/src/main/scala/hlt/Constants.scala | Scala | mit | 1,303 |
package org.reactivecouchbase
import scala.concurrent.duration._
/**
* A bunch of implicit conversion for handling the TTL issue of Couchbase operations.
* If TTL bigger than 30 days, then provide a UNIX epoch
**/
object CouchbaseExpiration {
sealed trait CouchbaseExpirationTiming
case class CouchbaseExpirationTiming_byInt(value: Int) extends CouchbaseExpirationTiming
case class CouchbaseExpirationTiming_byDuration(value: Duration) extends CouchbaseExpirationTiming
implicit def from_CouchabaseGetObject_fromString(a: Int): CouchbaseExpirationTiming = new CouchbaseExpirationTiming_byInt(a)
implicit def from_CouchabaseGetObject_fromString(a: Duration): CouchbaseExpirationTiming = new CouchbaseExpirationTiming_byDuration(a)
implicit def from_CouchbaseExpirationTiming_to_int(a: CouchbaseExpirationTiming): Int = a match {
case i: CouchbaseExpirationTiming_byInt => i.value
case d: CouchbaseExpirationTiming_byDuration => if (d.value <= 30.days) d.value.toSeconds.toInt else ((System.currentTimeMillis() + d.value.toMillis) / 1000).toInt
}
} | wsargent/ReactiveCouchbase-core | driver/src/main/scala/org/reactivecouchbase/CouchbaseExpirationManagement.scala | Scala | apache-2.0 | 1,075 |
/*
* Copyright 2014 YMC. See LICENSE for details.
*/
package models
case class Color(red: Float, green: Float, blue: Float) {
def this(red: Int, green: Int, blue: Int) = this(red.toFloat / 255, blue.toFloat / 255, green.toFloat / 255)
def redInt: Int = (red * 255).round
def blueInt: Int = (blue * 255).round
def greenInt: Int = (green * 255).round
def toHtmlCode = "#%02x%02x%02x".format(redInt, blueInt, greenInt)
def toInt = (redInt << 16) | (blueInt << 8) | greenInt
}
| Connexity/hannibal | app/models/Color.scala | Scala | apache-2.0 | 492 |
package com.pointr.spark.rdd
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import com.pointr.tcp.rpc.{P2pConnectionParams, ServerIfConf, SolverServerIf}
import com.pointr.tcp.rpc.TcpServer.DefaultConfPath
import com.pointr.tcp.util.YamlUtils
import scala.reflect.ClassTag
object SolverRDD {
val weightsMergePolicy: String = "best"
}
class SolverRDD[KVO:ClassTag,T:ClassTag](sc: SparkContext, parent: RDD[KVO], p2pParams: P2pConnectionParams)
extends P2pRDD[KVO,T](sc, parent, p2pParams, new SolverServerIf(YamlUtils.toScala[ServerIfConf](DefaultConfPath))) {
}
| OpenChaiSpark/OCspark | p2prdd/src/main/scala/com/pointr/spark/rdd/SolverRDD.scala | Scala | apache-2.0 | 592 |
import java.io.PrintWriter
import java.net.ServerSocket
import breeze.linalg.DenseVector
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.{StreamingLinearRegressionWithSGD, LabeledPoint}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import scala.util.Random
/**
* A producer application that generates random linear regression data.
*/
object StreamingModelProducer {
import breeze.linalg._
def main(args: Array[String]) {
// Maximum number of events per second
val MaxEvents = 100
val NumFeatures = 100
val random = new Random()
/** Function to generate a normally distributed dense vector */
def generateRandomArray(n: Int) = Array.tabulate(n)(_ => random.nextGaussian())
// Generate a fixed random model weight vector
val w = new DenseVector(generateRandomArray(NumFeatures))
val intercept = random.nextGaussian() * 10
/** Generate a number of random product events */
def generateNoisyData(n: Int) = {
(1 to n).map { i =>
val x = new DenseVector(generateRandomArray(NumFeatures))
val y: Double = w.dot(x)
val noisy = y + intercept //+ 0.1 * random.nextGaussian()
(noisy, x)
}
}
// create a network producer
val listener = new ServerSocket(9999)
println("Listening on port: 9999")
while (true) {
val socket = listener.accept()
new Thread() {
override def run = {
println("Got client connected from: " + socket.getInetAddress)
val out = new PrintWriter(socket.getOutputStream(), true)
while (true) {
Thread.sleep(1000)
val num = random.nextInt(MaxEvents)
val data = generateNoisyData(num)
data.foreach { case (y, x) =>
val xStr = x.data.mkString(",")
val eventStr = s"$y\\t$xStr"
out.write(eventStr)
out.write("\\n")
}
out.flush()
println(s"Created $num events...")
}
socket.close()
}
}.start()
}
}
}
/**
* A simple streaming linear regression that prints out predicted value for each batch
*/
object SimpleStreamingModel {
def main(args: Array[String]) {
val ssc = new StreamingContext("local[2]", "First Streaming App", Seconds(10))
val stream = ssc.socketTextStream("localhost", 9999)
val NumFeatures = 100
val zeroVector = DenseVector.zeros[Double](NumFeatures)
val model = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(zeroVector.data))
.setNumIterations(1)
.setStepSize(0.01)
// create a stream of labeled points
val labeledStream = stream.map { event =>
val split = event.split("\\t")
val y = split(0).toDouble
val features = split(1).split(",").map(_.toDouble)
LabeledPoint(label = y, features = Vectors.dense(features))
}
// train and test model on the stream, and print predictions for illustrative purposes
model.trainOn(labeledStream)
model.predictOn(labeledStream).print()
ssc.start()
ssc.awaitTermination()
}
}
/**
* A streaming regression model that compares the model performance of two models, printing out metrics for
* each batch
*/
object MonitoringStreamingModel {
import org.apache.spark.SparkContext._
def main(args: Array[String]) {
val ssc = new StreamingContext("local[2]", "First Streaming App", Seconds(10))
val stream = ssc.socketTextStream("localhost", 9999)
val NumFeatures = 100
val zeroVector = DenseVector.zeros[Double](NumFeatures)
val model1 = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(zeroVector.data))
.setNumIterations(1)
.setStepSize(0.01)
val model2 = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(zeroVector.data))
.setNumIterations(1)
.setStepSize(1.0)
// create a stream of labeled points
val labeledStream = stream.map { event =>
val split = event.split("\\t")
val y = split(0).toDouble
val features = split(1).split(",").map(_.toDouble)
LabeledPoint(label = y, features = Vectors.dense(features))
}
// train both models on the same stream
model1.trainOn(labeledStream)
model2.trainOn(labeledStream)
// use transform to create a stream with model error rates
val predsAndTrue = labeledStream.transform { rdd =>
val latest1 = model1.latestModel()
val latest2 = model2.latestModel()
rdd.map { point =>
val pred1 = latest1.predict(point.features)
val pred2 = latest2.predict(point.features)
(pred1 - point.label, pred2 - point.label)
}
}
// print out the MSE and RMSE metrics for each model per batch
predsAndTrue.foreachRDD { (rdd, time) =>
val mse1 = rdd.map { case (err1, err2) => err1 * err1 }.mean()
val rmse1 = math.sqrt(mse1)
val mse2 = rdd.map { case (err1, err2) => err2 * err2 }.mean()
val rmse2 = math.sqrt(mse2)
println(
s"""
|-------------------------------------------
|Time: $time
|-------------------------------------------
""".stripMargin)
println(s"MSE current batch: Model 1: $mse1; Model 2: $mse2")
println(s"RMSE current batch: Model 1: $rmse1; Model 2: $rmse2")
println("...\\n")
}
ssc.start()
ssc.awaitTermination()
}
}
| PacktPublishing/Machine-Learning-with-Spark | Chapter-10/scala-spark-streaming-app/src/main/scala/StreamingModel.scala | Scala | mit | 5,488 |
package net.surguy.less
import java.io._
import org.mozilla.javascript._
import org.mozilla.javascript.tools.shell._
import io.Source
/**
* Use the JavaScript Less compiler to convert a Less file into CSS.
*
* This is based on the "LessCompiler" class in the Play Framework code, and is used under the Apache 2 License.
*/
object JavaScriptLessCompiler {
private def compiler(minify: Boolean) = {
val ctx = Context.enter
val global = new Global; global.init(ctx)
val scope = ctx.initStandardObjects(global)
val wrappedLessCompiler = Context.javaToJS(this, scope)
ScriptableObject.putProperty(scope, "LessCompiler", wrappedLessCompiler)
ctx.evaluateString(scope,
"""
var timers = [],
window = {
document: {
getElementById: function(id) {
return [];
},
getElementsByTagName: function(tagName) {
return [];
}
},
location: {
protocol: 'file:',
hostname: 'localhost',
port: '80'
},
setInterval: function(fn, time) {
var num = timers.length;
timers[num] = fn.call(this, null);
return num;
}
},
document = window.document,
location = window.location,
setInterval = window.setInterval;
""",
"browser.js",
1, null)
ctx.evaluateReader(scope, new InputStreamReader(
this.getClass.getClassLoader.getResource("less-1.3.0.js").openConnection().getInputStream),
"less-1.3.0.js",
1, null)
ctx.evaluateString(scope,
"""
var compile = function(source) {
var compiled;
var dependencies = [source];
window.less.Parser.importer = function(path, paths, fn, env) {
var imported = LessCompiler.resolve(source, path);
var input = String(LessCompiler.readContent(imported));
dependencies.push(imported)
new(window.less.Parser)({
optimization:3,
filename:path
}).parse(input, function (e, root) {
if(e instanceof Object) {
throw e;
}
fn(e, root, input);
});
}
new(window.less.Parser)({optimization:3, filename:String(source.getCanonicalPath())}).parse(String(LessCompiler.readContent(source)), function (e,root) {
if(e instanceof Object) {
throw e;
}
compiled = root.toCSS({compress: """ + (if (minify) "true" else "false") + """})
})
return {css:compiled, dependencies:dependencies}
}
""",
"compiler.js",
1, null)
val compilerFunction = scope.get("compile", scope).asInstanceOf[Function]
Context.exit()
(source: File) => {
val result = Context.call(null, compilerFunction, scope, scope, Array(source)).asInstanceOf[Scriptable]
val css = ScriptableObject.getProperty(result, "css").asInstanceOf[String]
val dependencies = ScriptableObject.getProperty(result, "dependencies").asInstanceOf[NativeArray]
css -> (0 until dependencies.getLength.toInt).map(ScriptableObject.getProperty(dependencies, _) match {
case f: File => f
case o: NativeJavaObject => o.unwrap.asInstanceOf[File]
})
}
}
private lazy val debugCompiler = compiler(minify = false)
private lazy val minCompiler = compiler(minify = true)
def compile(source: File): (String, Option[String], Seq[File]) = {
try {
val debug = debugCompiler(source)
val min = minCompiler(source)
(debug._1, Some(min._1), debug._2)
} catch {
case e: JavaScriptException => {
val error = e.getValue.asInstanceOf[Scriptable]
val filename = ScriptableObject.getProperty(error, "filename").asInstanceOf[String]
val file = if (filename == source.getAbsolutePath) source else resolve(source, filename)
throw AssetCompilationException(Some(file),
ScriptableObject.getProperty(error, "message").asInstanceOf[String],
Some(ScriptableObject.getProperty(error, "line").asInstanceOf[Double].intValue),
Some(ScriptableObject.getProperty(error, "column").asInstanceOf[Double].intValue))
}
}
}
// Note that these functions are used from EcmaScript via Rhino
def readContent(file: File) = Source.fromFile(file).getLines().mkString("\\n").replace("\\r", "")
def resolve(originalSource: File, imported: String) = new File(originalSource.getParentFile, imported)
}
case class AssetCompilationException(source: Option[File], message: String, atLine: Option[Int], column: Option[Int]) extends RuntimeException(message) {
def line = atLine.map(_.asInstanceOf[java.lang.Integer]).orNull
def position = column.map(_.asInstanceOf[java.lang.Integer]).orNull
def input = source.filter(_.exists()).map( _.getAbsolutePath ).orNull
def sourceName = source.map(_.getAbsolutePath).orNull
}
| inigo/less-scala | src/test/scala/net/surguy/less/JavaScriptLessCompiler.scala | Scala | apache-2.0 | 5,767 |
package a40
trait Number1 {
def method1(number2: Number2): Number3
}
case class Number1Positive(tail: Number1) extends Number1 {
override def method1(number2: Number2): Number3 = number2.method2(tail, Number2Zero)
}
case object Number1Zero extends Number1 {
override def method1(number2: Number2): Number3 = Number3Positive(Number3Zero)
}
trait Number2 {
def method2(number1: Number1, number2: Number2): Number3
}
trait Number2Positive extends Number2 {
override def method2(number1: Number1, number2: Number2): Number3
}
case object Number2Zero extends Number2 {
override def method2(number1: Number1, number2: Number2): Number3 = number1.method1(number2)
}
trait Number3 {
def length: Int
}
case class Number3Positive(tail: Number3) extends Number3 {
override def length: Int = tail.length + 1
}
case object Number3Zero extends Number3 {
override def length: Int = 0
}
| djx314/ubw | a40-感应天地/src/main/scala/a40/Number1.scala | Scala | bsd-3-clause | 892 |
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.jawa.core.ast
import com.github.javaparser.ast.stmt.BlockStmt
import org.argus.jawa.core.ast.classfile.BytecodeInstructions
import org.argus.jawa.core.compiler.lexer.{Keywords, Token, Tokens}
import org.argus.jawa.core.compiler.lexer.Tokens._
import org.argus.jawa.core.compiler.parser._
import org.argus.jawa.core.compiler.util.CaseClassReflector
import org.argus.jawa.core.elements.{JavaKnowledge, JawaType, Signature}
import org.argus.jawa.core.io.{DefaultReporter, NoPosition, Position}
import org.argus.jawa.core.util._
import scala.language.implicitConversions
import scala.util.{Failure, Success}
/**
* @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a>
*/
sealed trait JawaAstNode extends CaseClassReflector with JavaKnowledge {
//for CompilationUnit it will be null
var enclosingTopLevelClass: TypeDefSymbol = _
def getAllChildrenInclude: IList[JawaAstNode] = {
this :: getAllChildren
}
def getAllChildren: IList[JawaAstNode] = {
val allAsts: MList[JawaAstNode] = mlistEmpty
val worklist: MList[JawaAstNode] = mlistEmpty
allAsts += this
allAsts ++= this.immediateChildren
worklist ++= this.immediateChildren
while(worklist.nonEmpty){
val node = worklist.remove(0)
allAsts ++= node.immediateChildren
worklist ++= node.immediateChildren
}
allAsts.toList
}
def immediateChildren: IList[JawaAstNode] = productIterator.toList flatten immediateAstNodes
private def immediateAstNodes(n: Any): IList[JawaAstNode] = n match {
case a: JawaAstNode => List(a)
case _: Token => Nil
case Some(x) => immediateAstNodes(x)
case xs: IList[_] => xs flatMap { immediateAstNodes }
case xs: MList[_] => xs.toList flatMap { immediateAstNodes }
case Left(x) => immediateAstNodes(x)
case Right(x) => immediateAstNodes(x)
case (l, r) => immediateAstNodes(l) ++ immediateAstNodes(r)
case (x, y, z) => immediateAstNodes(x) ++ immediateAstNodes(y) ++ immediateAstNodes(z)
case _: Position => Nil
case _: BlockStmt => Nil
case true | false | Nil | None => Nil
}
def toCode: String
override def toString: FileResourceUri = s"$toCode@$pos"
def pos: Position
}
sealed trait ParsableAstNode extends JawaAstNode
case class CompilationUnit(topDecls: IList[ClassOrInterfaceDeclaration])(implicit val pos: Position) extends ParsableAstNode {
def localTypResolved: Boolean = topDecls.forall(_.methods.forall(_.resolvedBody.locals.forall(_.typOpt.isDefined)))
def toCode: String = topDecls.map{td => td.toCode}.mkString("\\n")
}
sealed trait Declaration extends JawaAstNode {
def annotations: IList[Annotation]
def accessModifier: String = {
annotations.find { a => a.key == "AccessFlag" } match{
case Some(a) => a.value
case None => ""
}
}
}
sealed trait JawaSymbol extends JawaAstNode {
def id: Token
}
sealed trait DefSymbol extends JawaSymbol
sealed trait RefSymbol extends JawaSymbol
sealed trait ClassSym {
def typ: JawaType
}
sealed trait MethodSym {
def signature: Signature
}
sealed trait FieldSym{
def FQN: String
def baseType: JawaType
def fieldName: String
}
sealed trait VarSym{
def varName: String
def owner: MethodDeclaration
}
sealed trait LocationSym{
def location: String
var locationIndex: Int = 0
def owner: MethodDeclaration
}
case class TypeDefSymbol(id: Token)(implicit val pos: Position) extends DefSymbol with ClassSym {
def this(name: String, pos: Position) = this(Token(Tokens.ID, pos, s"`$name`"))(pos)
def this(name: String) = this(name, NoPosition)
def typ: JawaType = getTypeFromJawaName(id.text)
def toCode: String = id.rawText
}
case class TypeSymbol(id: Token)(implicit val pos: Position) extends RefSymbol with ClassSym {
def this(name: String, pos: Position) = this(Token(Tokens.ID, pos, s"`$name`"))(pos)
def this(name: String) = this(name, NoPosition)
def typ: JawaType = getTypeFromJawaName(id.text)
def toCode: String = id.rawText
}
case class MethodDefSymbol(id: Token)(implicit val pos: Position) extends DefSymbol with MethodSym {
def this(name: String, pos: Position) = this(Token(Tokens.ID, pos, s"`$name`"))(pos)
def this(name: String) = this(name, NoPosition)
var signature: Signature = _
def baseType: JawaType = signature.getClassType
def methodName: String = id.text
def toCode: String = id.rawText
}
case class MethodNameSymbol(id: Token)(implicit val pos: Position) extends RefSymbol with MethodSym {
def this(name: String, pos: Position) = this(Token(Tokens.ID, pos, s"`$name`"))(pos)
def this(name: String) = this(name, NoPosition)
var signature: Signature = _
def baseType: JawaType = signature.getClassType
def methodName: String = id.text
def toCode: String = id.rawText
}
case class FieldDefSymbol(id: Token)(implicit val pos: Position) extends DefSymbol with FieldSym {
def this(name: String, pos: Position) = this(Token({if(name.contains("@@")) Tokens.STATIC_ID else Tokens.ID}, pos, s"`$name`"))(pos)
def this(name: String) = this(name, NoPosition)
def FQN: String = id.text.replaceAll("@@", "")
def baseType: JawaType = getClassTypeFromFieldFQN(FQN)
def fieldName: String = getFieldNameFromFieldFQN(FQN)
def toCode: String = id.rawText
}
case class FieldNameSymbol(id: Token)(implicit val pos: Position) extends RefSymbol with FieldSym {
def this(name: String, pos: Position) = this(Token({if(name.contains("@@")) Tokens.STATIC_ID else Tokens.ID}, pos, s"`$name`"))(pos)
def this(name: String) = this(name, NoPosition)
def FQN: String = id.text.replaceAll("@@", "")
def baseType: JawaType = getClassTypeFromFieldFQN(FQN)
def fieldName: String = getFieldNameFromFieldFQN(FQN)
def toCode: String = id.rawText
}
case class SignatureSymbol(id: Token)(implicit val pos: Position) extends RefSymbol with MethodSym {
def this(sig: String, pos: Position) = this(Token(Tokens.ID, pos, s"`$sig`"))(pos)
def this(sig: String) = this(sig, NoPosition)
def this(sig: Signature) = this(sig.signature)
def signature: Signature = new Signature(id.text)
def methodName: String = signature.methodName
def toCode: String = id.rawText
}
case class VarDefSymbol(id: Token)(implicit val pos: Position) extends DefSymbol with VarSym {
def this(name: String, pos: Position) = this(Token(Tokens.ID, pos, {if(Keywords.isKeyWord(name)) s"`$name`" else name}))(pos)
def this(name: String) = this(name, NoPosition)
var owner: MethodDeclaration = _
def varName: String = id.text
def toCode: String = id.rawText
}
case class VarSymbol(id: Token)(implicit val pos: Position) extends RefSymbol with VarSym {
def this(name: String, pos: Position) = this(Token(Tokens.ID, pos, {if(Keywords.isKeyWord(name)) s"`$name`" else name}))(pos)
def this(name: String) = this(name, NoPosition)
var owner: MethodDeclaration = _
def varName: String = id.text
def toCode: String = id.rawText
}
/**
* LocationSymbol is following form: #L00001. or just #
*/
case class LocationDefSymbol(id: Token)(implicit val pos: Position) extends DefSymbol with LocationSym {
def this(name: String, pos: Position) = this(Token(Tokens.LOCATION_ID, pos, s"#$name."))(pos)
def this(name: String) = this(name, NoPosition)
var owner: MethodDeclaration = _
def location: String = {
if(id.text == "#") id.text
else id.text.substring(1, id.text.length() - 1)
}
def toCode: String = id.rawText
}
/**
* JumpLocationSymbol is following form: L00001
*/
case class LocationSymbol(id: Token)(implicit val pos: Position) extends RefSymbol with LocationSym {
def this(name: String, pos: Position) = this(Token(Tokens.ID, pos, name))(pos)
def this(name: String) = this(name, NoPosition)
var owner: MethodDeclaration = _
def location: String = id.text
def toCode: String = id.rawText
}
case class ClassOrInterfaceDeclaration(
cityp: TypeDefSymbol,
annotations: IList[Annotation],
extendsAndImplementsClausesOpt: Option[ExtendsAndImplementsClauses],
instanceFields: IList[InstanceFieldDeclaration],
staticFields: IList[StaticFieldDeclaration],
methods: IList[MethodDeclaration])(implicit val pos: Position) extends Declaration with ParsableAstNode {
def isInterface: Boolean = {
annotations.exists { a => a.key == "kind" && a.value == "interface" }
}
def parents: IList[JawaType] = extendsAndImplementsClausesOpt match {case Some(e) => e.parents case None => ilistEmpty}
def superClassOpt: Option[JawaType] = extendsAndImplementsClausesOpt match{case Some(e) => e.superClassOpt case None => None}
def interfaces: IList[JawaType] = extendsAndImplementsClausesOpt match {case Some(e) => e.interfaces case None => ilistEmpty}
def fields: IList[Field with Declaration] = instanceFields ++ staticFields
def typ: JawaType = cityp.typ
def toCode: String = {
val instancePart = if(instanceFields.isEmpty) "" else s"\\n ${instanceFields.map(f => f.toCode).mkString("\\n ")}\\n"
val staticPart = if(staticFields.isEmpty) "" else s"${staticFields.map(sf => sf.toCode).mkString("\\n")}\\n"
val methodPart = if(methods.isEmpty) "" else s"${methods.map(m => m.toCode).mkString("\\n")}"
s"record ${cityp.toCode} ${annotations.map(anno => anno.toCode).mkString(" ")}${extendsAndImplementsClausesOpt match {case Some(eic) => " " + eic.toCode case None => ""}}\\n{$instancePart}\\n$staticPart$methodPart"
}
}
case class Annotation(
annotationID: Token,
annotationValueOpt: Option[AnnotationValue])(implicit val pos: Position) extends JawaAstNode {
def this(key: String, valueOpt: Option[AnnotationValue]) = this(Token(Tokens.ID, NoPosition, key), valueOpt)(NoPosition)
def this(key: String, value: AnnotationValue) = this(key, Some(value))
def this(key: String) = this(key, None)
def key: String = annotationID.text
def value: String = annotationValueOpt.map(_.value).getOrElse("")
def toCode: String = {
s"@${annotationID.rawText}${annotationValueOpt match {case Some(av) => " " + av.toCode case None => ""}}"
}
}
sealed trait AnnotationValue extends JawaAstNode {
def value: String
}
case class TypeExpressionValue(typExp: TypeExpression)(implicit val pos: Position) extends AnnotationValue {
def this(str: String, d: Int, pos: Position) = this(new TypeExpression(str, d, pos))(pos)
def this(str: String, d: Int) = this(str, d, NoPosition)
def this(str: String) = this(str, 0)
def value: String = typExp.typ.name
def toCode: String = typExp.toCode
}
case class SymbolValue(sym: JawaSymbol)(implicit val pos: Position) extends AnnotationValue {
def value: String = sym.id.text
def toCode: String = sym.toCode
}
case class TokenValue(token: Token)(implicit val pos: Position) extends AnnotationValue {
def this(str: String, pos: Position) = this(Token(Tokens.ID, pos, str))(pos)
def this(str: String) = this(str, NoPosition)
def value: String = token.text
def toCode: String = token.rawText
}
case class StatementValue(statements: IList[Statement])(implicit val pos: Position) extends AnnotationValue {
def value: String = statements.map{statement => statement.toCode}.mkString(", ")
def toCode: String = s"($value)"
}
case class ExtendsAndImplementsClauses(parentTyps: IList[ExtendAndImplement])(implicit val pos: Position) extends JawaAstNode {
require(parentTyps.count(t => t.isExtend) <= 1)
def parents: IList[JawaType] = parentTyps.map(t => t.typ)
def superClassOpt: Option[JawaType] = parentTyps.find(t => t.isExtend).map(t => t.typ)
def interfaces: IList[JawaType] = parentTyps.filter(t => t.isImplement).map(t => t.typ)
def toCode: String = s"extends ${parentTyps.map(t => t.toCode).mkString(", ")}"
}
case class ExtendAndImplement(
parentTyp: TypeSymbol,
annotations: IList[Annotation])(implicit val pos: Position) extends JawaAstNode {
def typ: JawaType = parentTyp.typ
def isExtend: Boolean = annotations.exists { a => a.key == "kind" && a.value == "class" }
def isImplement: Boolean = annotations.exists { a => a.key == "kind" && a.value == "interface" }
def toCode: String = s"${parentTyp.toCode} ${annotations.map(anno => anno.toCode).mkString(" ")}"
}
sealed trait Field extends JawaAstNode {
def typ: Type
def fieldSymbol: FieldDefSymbol
def FQN: String
def fieldName: String = getFieldNameFromFieldFQN(FQN)
def isStatic: Boolean
}
case class InstanceFieldDeclaration(
typ: Type,
fieldSymbol: FieldDefSymbol,
annotations: IList[Annotation])(implicit val pos: Position) extends Field with Declaration {
def FQN: String = fieldSymbol.FQN
def isStatic: Boolean = false
def toCode: String = s"${typ.toCode} ${fieldSymbol.toCode} ${annotations.map(anno => anno.toCode).mkString(" ")}".trim + ";"
}
case class StaticFieldDeclaration(
typ: Type,
fieldSymbol: FieldDefSymbol,
annotations: IList[Annotation])(implicit val pos: Position) extends Field with Declaration {
def FQN: String = fieldSymbol.FQN
def isStatic: Boolean = true
def toCode: String = s"global ${typ.toCode} ${fieldSymbol.toCode} ${annotations.map(anno => anno.toCode).mkString(" ")}".trim + ";"
}
case class TypeExpression(typ_ : Type)(implicit val pos: Position) extends JawaAstNode {
def this(str: String, d: Int, pos: Position) = this(new Type(str, d, pos))(pos)
def this(str: String, d: Int) = this(str, d, NoPosition)
def this(str: String) = this(str, 0)
def this(t: JawaType) = this(t.baseTyp, t.dimensions)
def typ: JawaType = typ_.typ
def toCode: String = s"^${typ_.toCode}"
}
case class Type(base: TypeSymbol, typeFragments: IList[TypeFragment])(implicit val pos: Position) extends JawaAstNode {
def this(str: String, d: Int, pos: Position) = this(new TypeSymbol(str, pos), (0 until d).map(_ => TypeFragment()(pos)).toList)(pos)
def this(str: String, d: Int) = this(str, d, NoPosition)
def this(str: String) = this(str, 0)
def this(t: JawaType) = this(t.baseTyp, t.dimensions)
def dimensions: Int = typeFragments.size
def baseType: JawaType = base.typ
def typ: JawaType = getType(baseType.baseTyp, dimensions)
def toCode: String = s"${base.toCode}${typeFragments.map(tf => tf.toCode).mkString("")}"
}
case class TypeFragment()(implicit val pos: Position) extends JawaAstNode {
def toCode: String = "[]"
}
case class MethodDeclaration(
returnType: Type,
methodSymbol: MethodDefSymbol,
params: IList[Parameter],
annotations: IList[Annotation],
var body: Body)(implicit val pos: Position) extends Declaration with ParsableAstNode {
def isConstructor: Boolean = isJawaConstructor(name)
def name: String = methodSymbol.id.text.substring(methodSymbol.id.text.lastIndexOf(".") + 1)
def owner: String = signature.getClassName
def signature: Signature = new Signature(annotations.find { a => a.key == "signature" }.get.value)
def thisParam: Option[Parameter] = params.find(x => x.isThis)
def param(i: Int): Parameter = i match {
case n if n >= 0 && n < paramList.size => paramList(n)
case _ => throw new IndexOutOfBoundsException("List size " + paramList.size + " but index " + i)
}
def paramList: IList[Parameter] = params.filterNot(_.isThis)
def resolvedBody: ResolvedBody = body match {
case rb: ResolvedBody => rb
case ub: Body with Unresolved =>
body = ub.resolve(signature)
body.asInstanceOf[ResolvedBody]
}
def toCode: String = {
val annoPart = annotations.map(anno => anno.toCode).mkString(" ")
s"procedure ${returnType.toCode} ${methodSymbol.toCode}(${params.map(p => p.toCode).mkString(", ")}) $annoPart".trim + "\\n" + resolvedBody.toCode
}
}
case class Parameter(
typ: Type,
paramSymbol: VarDefSymbol,
annotations: IList[Annotation])(implicit val pos: Position) extends JawaAstNode {
def this(t: JawaType, name: String, annotations: IList[Annotation]) = this(new Type(t), new VarDefSymbol(name), annotations)(NoPosition)
def isThis: Boolean = annotations.exists { a => a.key == "kind" && a.value == "this" }
def isObject: Boolean = annotations.exists { a => a.key == "kind" && (a.value == "this" || a.value == "object") }
def name: String = paramSymbol.id.text
def toCode: String = {
val annoPart = annotations.map(anno => anno.toCode).mkString(" ")
s"${typ.toCode} ${paramSymbol.toCode} $annoPart".trim
}
}
sealed trait Body extends ParsableAstNode
sealed trait Unresolved {
def resolve(sig: Signature): ResolvedBody
}
case class UnresolvedBodyJawa(bodytokens: IList[Token])(implicit val pos: Position) extends Body with Unresolved {
def resolve(sig: Signature): ResolvedBody = JawaParser.parse[Body](bodytokens, resolveBody = true, new DefaultReporter, classOf[Body]) match {
case Success(body) => body.asInstanceOf[ResolvedBody]
case Failure(t) => throw t
}
def toCode: String = "{}"
}
//case class UnresolvedBodyJava(bodyBlock: BlockStmt)(implicit val pos: Position, cr: ClassResolver) extends Body with Unresolved {
// def resolve(sig: Signature): ResolvedBody = {
// cr.processBody(sig, bodyBlock)
// }
// def toCode: String = "{}"
//}
case class UnresolvedBodyBytecode(bytecode: BytecodeInstructions)(implicit val pos: Position) extends Body with Unresolved {
def resolve(sig: Signature): ResolvedBody = {
bytecode.genJawa
}
def toCode: String = "{}"
}
case class ResolvedBody(
locals: IList[LocalVarDeclaration],
locations: IList[Location],
catchClauses: IList[CatchClause])(implicit val pos: Position) extends Body {
def getCatchClauses(index: Int): IList[CatchClause] = {
catchClauses.filter{ cc =>
index >= cc.range.fromLocation.locationIndex && index <= cc.range.toLocation.locationIndex
}
}
def location(locUri: String): Location = locations.find(l => l.locationUri.equals(locUri)).getOrElse(throw new RuntimeException(s"Location uri $locUri not found in \\n$toCode"))
def location(locIndex: Int): Location = locations.lift(locIndex).getOrElse(throw new RuntimeException(s"Location index $locIndex not found in \\n$toCode"))
def toCode: String = {
val localPart = if(locals.isEmpty) "" else s" ${locals.map(l => l.toCode).mkString("\\n ")}\\n\\n"
val locationPart = if(locations.isEmpty) "" else s" ${locations.map(l => l.toCode).mkString("\\n ")}\\n\\n"
val ccPart = if(catchClauses.isEmpty) "" else s" ${catchClauses.map(cc => cc.toCode).mkString("\\n ")}\\n}"
s"{\\n$localPart$locationPart$ccPart}"
}
}
case class LocalVarDeclaration(
typOpt: Option[Type],
varSymbol: VarDefSymbol)(implicit val pos: Position) extends Declaration {
def this(t: JawaType, name: String) = this(Some(new Type(t)), new VarDefSymbol(name))(NoPosition)
def annotations: IList[Annotation] = ilistEmpty
def typ: JawaType = typOpt match {
case Some(t) => t.typ
case None => JavaKnowledge.OBJECT
}
def toCode: String = s"${typOpt match {case Some(t) => t.toCode + " " case None => ""}}${varSymbol.toCode};"
}
case class Location(
locationSymbol: LocationDefSymbol,
statement: Statement)(implicit val pos: Position) extends ParsableAstNode {
def this(loc: String, statement: Statement) = this(new LocationDefSymbol(loc), statement)(NoPosition)
def locationUri: String = {
if(locationSymbol.id.length <= 1) ""
else locationSymbol.location
}
def locationIndex: Int = locationSymbol.locationIndex
def toCode: String = s"${locationSymbol.toCode} ${statement.toCode}".trim
}
/**
* Statements:
* Assignment
* EmptyStatement
* MonitorStatement
* Jump
* ThrowStatement
*/
sealed trait Statement extends JawaAstNode
/**
* Jumps:
* CallStatement
* GotoStatement
* IfStatement
* ReturnStatement
* SwitchStatement
*/
sealed trait Jump extends Statement
/**
* Assignments:
* AssignmentStatement
* CallStatement
*/
sealed trait Assignment extends Statement {
def getLhs: Option[Expression with LHS]
def getRhs: Expression with RHS
}
case class CallStatement(
lhsOpt: Option[VariableNameExpression],
rhs: CallRhs,
annotations: IList[Annotation])(implicit val pos: Position) extends Assignment with Jump {
def this(retVar: Option[String], methodName: String, argNames: IList[String], sig: Signature, kind: String) =
this(
retVar.map(r => new VariableNameExpression(r)),
new CallRhs(methodName, argNames),
List(
new Annotation("signature", SymbolValue(new SignatureSymbol(sig))(NoPosition)),
new Annotation("kind", new TokenValue(kind))
)
)(NoPosition)
//default is virtual call
def kind: String = annotations.find { a => a.key == "kind" }.map(_.value).getOrElse("virtual")
def signature: Signature = new Signature(annotations.find { a => a.key == "signature" }.get.value)
def isStatic: Boolean = kind == "static"
def isVirtual: Boolean = kind == "virtual"
def isSuper: Boolean = kind == "super"
def isDirect: Boolean = kind == "direct"
def isInterface: Boolean = kind == "interface"
def recvVarOpt: Option[VarSymbol] = if(isStatic) None else Some(rhs.varSymbols.head)
def argVars: IList[VarSymbol] = if(isStatic) rhs.varSymbols else rhs.varSymbols.tail
def argVar(i: Int): VarSymbol = {
i match {
case n if n >= 0 && n < argVars.size => argVars(n)
case _ => throw new IndexOutOfBoundsException("List size " + argVars.size + " but index " + i)
}
}
def recvOpt: Option[String] = if(isStatic) None else Some(rhs.arg(0))
def args: IList[String] = if(isStatic) rhs.varSymbols.map(_.id.text) else rhs.varSymbols.tail.map(_.id.text)
def arg(i: Int): String = {
i match {
case n if n >= 0 && n < args.size => args(n)
case _ => throw new IndexOutOfBoundsException("List size " + args.size + " but index " + i)
}
}
override def getLhs: Option[Expression with LHS] = lhsOpt
override def getRhs: Expression with RHS = rhs
def toCode: String = {
val lhsPart = lhsOpt match {
case Some(lhs) => lhs.toCode + ":= "
case None => ""
}
val rhsPart = rhs.toCode
val annoPart = annotations.map(anno => anno.toCode).mkString(" ")
s"call $lhsPart$rhsPart $annoPart".trim + ";"
}
}
case class CallRhs(
methodNameSymbol: MethodNameSymbol,
varSymbols: IList[VarSymbol])(implicit val pos: Position) extends Expression with RHS {
def this(name: String, argNames: IList[String]) = this(new MethodNameSymbol(name), argNames.map(n => new VarSymbol(n)))(NoPosition)
def arg(i: Int): String = i match {
case n if n >= 0 && n < varSymbols.size => varSymbols(n).id.text
case _ => throw new IndexOutOfBoundsException("List size " + varSymbols.size + " but index " + i)
}
def toCode: String = s"${methodNameSymbol.toCode}(${varSymbols.map(vs => vs.toCode).mkString(", ")})"
}
case class AssignmentStatement(
lhs: Expression with LHS,
rhs: Expression with RHS,
annotations: IList[Annotation])(implicit val pos: Position) extends Assignment {
def this(lName: String, rName: String, annotations: IList[Annotation]) = this(new VariableNameExpression(lName), new VariableNameExpression(rName), annotations)(NoPosition)
def this(lName: String, i: Int) = this(new VariableNameExpression(lName), new LiteralExpression(i), ilistEmpty)(NoPosition)
def this(lName: String, l: Long) = this(new VariableNameExpression(lName), new LiteralExpression(l), ilistEmpty)(NoPosition)
def this(lName: String, f: Float) = this(new VariableNameExpression(lName), new LiteralExpression(f), ilistEmpty)(NoPosition)
def this(lName: String, d: Double) = this(new VariableNameExpression(lName), new LiteralExpression(d), ilistEmpty)(NoPosition)
def this(lName: String, rExpr: Expression with RHS, annotations: IList[Annotation]) = this(new VariableNameExpression(lName), rExpr, annotations)(NoPosition)
def this(lExpr: Expression with LHS, rName: String, annotations: IList[Annotation]) = this(lExpr, new VariableNameExpression(rName) , annotations)(NoPosition)
def kind: String = annotations.find { a => a.key == "kind" }.map(_.value).getOrElse({if(rhs.isInstanceOf[Expression with New])"object" else ""})
override def getLhs: Option[Expression with LHS] = Some(lhs)
override def getRhs: Expression with RHS = rhs
def toCode: String = {
val annoPart = annotations.map(anno => anno.toCode).mkString(" ")
s"${lhs.toCode}:= ${rhs.toCode} $annoPart".trim + ";"
}
}
case class ThrowStatement(varSymbol: VarSymbol)(implicit val pos: Position) extends Statement {
def this(name: String) = this(new VarSymbol(name))(NoPosition)
def toCode: String = s"throw ${varSymbol.toCode};"
}
case class IfStatement(
cond: BinaryExpression,
targetLocation: LocationSymbol)(implicit val pos: Position) extends Jump {
def this(cond: BinaryExpression, label: String) = this(cond, new LocationSymbol(label))(NoPosition)
def toCode: String = s"if ${cond.toCode} then goto ${targetLocation.toCode};"
}
case class GotoStatement(targetLocation: LocationSymbol)(implicit val pos: Position) extends Jump {
def this(target: String) = this(new LocationSymbol(target))(NoPosition)
def toCode: String = s"goto ${targetLocation.toCode};"
}
case class SwitchStatement(
condition: VarSymbol,
cases: IList[SwitchCase],
defaultCaseOpt: Option[SwitchDefaultCase])(implicit val pos: Position) extends Jump {
def this(cond: String, cases: IList[SwitchCase], default: SwitchDefaultCase) = this(new VarSymbol(cond), cases, Some(default))(NoPosition)
def this(cond: String, cases: IList[SwitchCase]) = this(new VarSymbol(cond), cases, None)(NoPosition)
def toCode: String = s"switch ${condition.toCode}\\n ${cases.map(c => c.toCode).mkString("\\n ")}${defaultCaseOpt match {case Some(d) => "\\n " + d.toCode case None => ""}};"
}
case class SwitchCase(
constant: Token,
targetLocation: LocationSymbol)(implicit val pos: Position) extends JawaAstNode {
def this(i: Int, target: String) = this(Token(Tokens.INTEGER_LITERAL, NoPosition, s"$i"), new LocationSymbol(target))(NoPosition)
def toCode: String = s"| ${constant.rawText} => goto ${targetLocation.toCode}"
}
case class SwitchDefaultCase(targetLocation: LocationSymbol)(implicit val pos: Position) extends JawaAstNode {
def this(target: String) = this(new LocationSymbol(target))(NoPosition)
def toCode: String = s"| else => goto ${targetLocation.toCode}"
}
case class ReturnStatement(
varOpt: Option[VarSymbol],
annotations: IList[Annotation])(implicit val pos: Position) extends Jump {
def this() = this(None, List(new Annotation("kind", new TokenValue("void"))))(NoPosition)
def this(name: String, annotations: IList[Annotation]) = this(Some(new VarSymbol(name)), annotations)(NoPosition)
def this(name: String) = this(name, ilistEmpty)
def kind: String = annotations.find { a => a.key == "kind" }.map(_.value).getOrElse("")
def toCode: String = {
val varPart = varOpt match {case Some(v) => " " + v.toCode case None => ""}
val annoPart = annotations.map(anno => anno.toCode).mkString(" ")
s"return$varPart $annoPart".trim + ";"
}
}
case class MonitorStatement(
monitor: Token,
varSymbol: VarSymbol)(implicit val pos: Position) extends Statement {
def this(m: String, varName: String) = this({if(m == "monitorenter") Token(Tokens.MONITOR_ENTER, NoPosition, m) else Token(Tokens.MONITOR_EXIT, NoPosition, m)}, new VarSymbol(varName))(NoPosition)
def isEnter: Boolean = monitor.tokenType == MONITOR_ENTER
def isExit: Boolean = monitor.tokenType == MONITOR_EXIT
def toCode: String = s"@${monitor.rawText} ${varSymbol.toCode}"
}
case class EmptyStatement(annotations: MList[Annotation])(implicit val pos: Position) extends Statement {
def this() = this(mlistEmpty)(NoPosition)
def toCode: String = annotations.map(anno => anno.toCode).mkString(" ")
}
sealed trait Expression extends JawaAstNode
/** LHS expressions:
* AccessExpression
* IndexingExpression
* VariableNameExpression
* StaticFieldAccessExpression
*/
sealed trait LHS extends RHS
/** RHS expressions:
* AccessExpression
* BinaryExpression
* CallRhs
* CastExpression
* CmpExpression
* ConstClassExpression
* ExceptionExpression
* IndexingExpression
* InstanceOfExpression
* LengthExpression
* LiteralExpression
* VariableNameExpression
* StaticFieldAccessExpression
* NewExpression
* NullExpression
* TupleExpression
* UnaryExpression
*/
sealed trait RHS
case class VariableNameExpression(varSymbol: VarSymbol)(implicit val pos: Position) extends Expression with LHS {
def this(name: String, pos: Position) = this(new VarSymbol(name))(pos)
def this(name: String) = this(name, NoPosition)
def name: String = varSymbol.varName
def toCode: String = varSymbol.toCode
}
case class StaticFieldAccessExpression(fieldNameSymbol: FieldNameSymbol, typExp: TypeExpression)(implicit val pos: Position) extends Expression with LHS {
def this(name: String, typ: JawaType) = this(new FieldNameSymbol(name), new TypeExpression(typ))(NoPosition)
def name: String = fieldNameSymbol.FQN
def toCode: String = s"${fieldNameSymbol.toCode} @type ${typExp.toCode}"
def typ: JawaType = typExp.typ
}
case class ExceptionExpression(typExp: TypeExpression)(implicit val pos: Position) extends Expression with RHS {
def this(t: JawaType) = this(new TypeExpression(t))(NoPosition)
def typ: JawaType = typExp.typ
def toCode: String = s"Exception @type ${typExp.toCode}"
}
case class NullExpression(nul: Token)(implicit val pos: Position) extends Expression with RHS {
def this() = this(Token(Tokens.NULL, NoPosition, "null"))(NoPosition)
def toCode: String = nul.rawText
}
case class ConstClassExpression(typExp: TypeExpression)(implicit val pos: Position) extends Expression with RHS {
def this(t: JawaType) = this(new TypeExpression(t))(NoPosition)
def toCode: String = s"constclass @type ${typExp.toCode}"
}
case class LengthExpression(varSymbol: VarSymbol)(implicit val pos: Position) extends Expression with RHS {
def this(name: String) = this(new VarSymbol(name))(NoPosition)
def toCode: String = s"length @variable ${varSymbol.toCode}"
}
case class IndexingExpression(
varSymbol: VarSymbol,
indices: IList[IndexingSuffix])(implicit val pos: Position) extends Expression with LHS {
def this(base: String, idxs: IList[String]) = this(new VarSymbol(base), idxs.map(i => new IndexingSuffix(i)))(NoPosition)
def base: String = varSymbol.varName
def dimensions: Int = indices.size
def toCode: String = s"${varSymbol.toCode}${indices.map(i => i.toCode).mkString("")}"
}
case class IndexingSuffix(index: Either[VarSymbol, LiteralExpression])(implicit val pos: Position) extends JawaAstNode {
def this(idx: String) = this(Left(new VarSymbol(idx)))(NoPosition)
def this(idx: Int) = this(Right(new LiteralExpression(idx)))(NoPosition)
def toCode: String = s"[${index match {case Left(vs) => vs.toCode case Right(le) => le.toCode}}]"
}
case class AccessExpression(
varSymbol: VarSymbol,
fieldSym: FieldNameSymbol,
typExp: TypeExpression)(implicit val pos: Position) extends Expression with LHS {
def this(varName: String, fieldName: String, typ: JawaType) = this(new VarSymbol(varName), new FieldNameSymbol(fieldName), new TypeExpression(typ))(NoPosition)
def base: String = varSymbol.varName
def fieldName: String = fieldSym.fieldName
def toCode: String = s"${varSymbol.toCode}.${fieldSym.toCode} @type ${typExp.toCode}"
def typ: JawaType = typExp.typ
}
case class TupleExpression(constants: IList[LiteralExpression])(implicit val pos: Position) extends Expression with RHS {
def integers: IList[Int] = constants.map(c => c.getInt)
def toCode: String = s"(${constants.map(c => c.toCode).mkString(", ")})"
}
case class CastExpression(
typ: Type,
varSym: VarSymbol)(implicit val pos: Position) extends Expression with RHS {
def this(t: JawaType, varName: String) = this(new Type(t), new VarSymbol(varName))(NoPosition)
def varName: String = varSym.varName
def toCode: String = s"(${typ.toCode}) ${varSym.toCode}"
}
trait New {
def typ: JawaType
}
case class NewExpression(base: Type)(implicit val pos: Position) extends Expression with RHS with New {
def this(t: JawaType) = this(new Type(t))(NoPosition)
def typ: JawaType = base.typ
def toCode: String = s"new ${base.toCode}"
}
case class NewArrayExpression(
base: Type,
varSymbols: IList[VarSymbol])(implicit val pos: Position) extends Expression with RHS with New {
def this(t: JawaType, varNames: IList[String]) = this(new Type(t), varNames.map(vn => new VarSymbol(vn)))(NoPosition)
def dimensions: Int = base.dimensions + 1
def baseType: JawaType = base.typ
def typ: JawaType = getType(baseType.baseTyp, dimensions)
def toCode: String = s"new ${base.toCode}[${varSymbols.map(vs => vs.toCode).mkString("," )}]"
}
case class InstanceOfExpression(
varSymbol: VarSymbol,
typExp: TypeExpression)(implicit val pos: Position) extends Expression with RHS {
def this(varName: String, t: JawaType) = this(new VarSymbol(varName), new TypeExpression(t))(NoPosition)
def toCode: String = s"instanceof @variable ${varSymbol.toCode} @type ${typExp.toCode}"
}
case class LiteralExpression(constant: Token)(implicit val pos: Position) extends Expression with RHS {
def this(i: Int, pos: Position) = this(Token(Tokens.INTEGER_LITERAL, pos, s"$i"))(pos)
def this(i: Int) = this(i, NoPosition)
def this(l: Long, pos: Position) = this(Token(Tokens.INTEGER_LITERAL, pos, s"${l}L"))(pos)
def this(l: Long) = this(l, NoPosition)
def this(f: Float, pos: Position) = this(Token(Tokens.FLOATING_POINT_LITERAL, pos, s"${f}F"))(pos)
def this(f: Float) = this(f, NoPosition)
def this(d: Double, pos: Position) = this(Token(Tokens.FLOATING_POINT_LITERAL, pos, s"${d}D"))(pos)
def this(d: Double) = this(d, NoPosition)
def this(c: Char, pos: Position) = this(Token(Tokens.CHARACTER_LITERAL, pos, s"$c"))(pos)
def this(c: Char) = this(c, NoPosition)
def this(s: String, pos: Position) = this(Token(Tokens.STRING_LITERAL, pos, "\\"" +s + "\\""))(pos)
def this(s: String) = this(s, NoPosition)
private def getLiteral: String = {
val lit = constant.text
constant.tokenType match {
case STRING_LITERAL =>
lit.substring(1, lit.length() - 1)
case FLOATING_POINT_LITERAL =>
lit match {
case x if x.endsWith("F") => x.substring(0, x.length() - 1)
case x if x.endsWith("D") => x.substring(0, x.length() - 1)
case _ => lit
}
case INTEGER_LITERAL =>
lit match {
case x if x.endsWith("I") => x.substring(0, x.length() - 1)
case x if x.endsWith("L") => x.substring(0, x.length() - 1)
case _ => lit
}
case CHARACTER_LITERAL =>
lit
case _ =>
"0"
}
}
def isString: Boolean = constant.tokenType == STRING_LITERAL
def isInt: Boolean = constant.tokenType == INTEGER_LITERAL && !constant.text.endsWith("L")
def isLong: Boolean = constant.tokenType == INTEGER_LITERAL && !constant.text.endsWith("I")
def isFloat: Boolean = constant.tokenType == FLOATING_POINT_LITERAL && !constant.text.endsWith("D")
def isDouble: Boolean = constant.tokenType == FLOATING_POINT_LITERAL && !constant.text.endsWith("F")
def getInt: Int = getLiteral.toInt
def getLong: Long = getLiteral.toLong
def getFloat: Float = getLiteral.toFloat
def getDouble: Double = getLiteral.toDouble
def getString: String = getLiteral
def toCode: String = constant.rawText
}
case class UnaryExpression(
op: Token,
unary: VarSymbol)(implicit val pos: Position) extends Expression with RHS {
def this(op: String, name: String) = this(Token(Tokens.OP, NoPosition, op), new VarSymbol(name))(NoPosition)
def toCode: String = s"${op.rawText}${unary.toCode}"
}
case class BinaryExpression(
left: VarSymbol,
op: Token,
right: Either[VarSymbol, Either[LiteralExpression, NullExpression]])(implicit val pos: Position) extends Expression with RHS {
def this(l: String, op: String, r: String) = this(new VarSymbol(l), Token(Tokens.OP, NoPosition, op), Left(new VarSymbol(r)))(NoPosition)
def this(l: String, op: String, le: LiteralExpression) = this(new VarSymbol(l), Token(Tokens.OP, NoPosition, op), Right(Left(le)))(NoPosition)
def this(l: String, op: String) = this(new VarSymbol(l), Token(Tokens.OP, NoPosition, op), Right(Right(new NullExpression())))(NoPosition)
def toCode: String = s"${left.toCode} ${op.rawText} ${right match {case Left(vs) => vs.toCode case Right(ln) => ln match {case Left(le) => le.toCode case Right(ne) => ne.toCode}}}"
}
case class CmpExpression(
cmp: Token,
var1Symbol: VarSymbol,
var2Symbol: VarSymbol)(implicit val pos: Position) extends Expression with RHS {
def this(c: String, v1Name: String, v2Name: String) = this(Token(Tokens.CMP, NoPosition, c), new VarSymbol(v1Name), new VarSymbol(v2Name))(NoPosition)
def paramType: JawaType = {
cmp.text match {
case "fcmpl" | "fcmpg" => JavaKnowledge.FLOAT
case "dcmpl" | "dcmpg" => JavaKnowledge.DOUBLE
case "lcmp" => JavaKnowledge.LONG
}
}
def toCode: String = s"${cmp.rawText}(${var1Symbol.toCode}, ${var2Symbol.toCode})"
}
case class CatchClause(
typ: Type,
range: CatchRange,
targetLocation: LocationSymbol)(implicit val pos: Position) extends JawaAstNode {
def this(t: JawaType, from: String, to: String, target: String) = this(new Type(t), new CatchRange(from, to), new LocationSymbol(target))(NoPosition)
def from: String = range.fromLocation.location
def to: String = range.toLocation.location
def toCode: String = s"catch ${typ.toCode} ${range.toCode} goto ${targetLocation.toCode};"
}
case class CatchRange(
fromLocation: LocationSymbol,
toLocation: LocationSymbol)(implicit val pos: Position) extends JawaAstNode {
def this(from: String, to: String) = this(new LocationSymbol(from), new LocationSymbol(to))(NoPosition)
def toCode: String = s"@[${fromLocation.toCode}..${toLocation.toCode}]"
} | arguslab/Argus-SAF | jawa/src/main/scala/org/argus/jawa/core/ast/JawaAstNode.scala | Scala | apache-2.0 | 38,495 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.accounts.frs102.retriever.AbridgedAccountsBoxRetriever
import uk.gov.hmrc.ct.box.CtValidation
class AC7601Spec extends WordSpec with Matchers with MockitoSugar {
val boxRetriever = mock[AbridgedAccountsBoxRetriever]
"AC7601" should {
"pass validation when not populated if AC4700 not set" in {
when (boxRetriever.ac7600()).thenReturn (AC7600(None))
AC7601(None).validate(boxRetriever) shouldBe empty
}
"pass validation when not populated if AC4700 false" in {
when (boxRetriever.ac7600()).thenReturn (AC7600(Some(false)))
AC7601(None).validate(boxRetriever) shouldBe empty
}
"give cannot be set error when populated if AC4700 not true" in {
when (boxRetriever.ac7600()).thenReturn (AC7600(Some(false)))
AC7601(Some("")).validate(boxRetriever) shouldBe Set(CtValidation(Some("AC7601"), "error.AC7601.cannot.exist", None))
AC7601(Some("%&^%./[]")).validate(boxRetriever) shouldBe Set(CtValidation(Some("AC7601"), "error.AC7601.cannot.exist", None))
AC7601(Some("legal content")).validate(boxRetriever) shouldBe Set(CtValidation(Some("AC7601"), "error.AC7601.cannot.exist", None))
}
"pass validation when legal and AC4700 true" in {
when (boxRetriever.ac7600()).thenReturn (AC7600(Some(true)))
AC7601(Some("l")).validate(boxRetriever) shouldBe empty
AC7601(Some("legal content")).validate(boxRetriever) shouldBe empty
}
"fail appropriate validations when AC4700 true" in {
when (boxRetriever.ac7600()).thenReturn (AC7600(Some(true)))
AC7601(None).validate(boxRetriever) shouldBe Set(CtValidation(Some("AC7601"), "error.AC7601.required", None))
AC7601(Some("")).validate(boxRetriever) shouldBe Set(CtValidation(Some("AC7601"), "error.AC7601.required", None))
AC7601(Some("%&^%./[]")).validate(boxRetriever) shouldBe Set(CtValidation(Some("AC7601"), "error.AC7601.regexFailure", Some(List("^"))))
}
}
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC7601Spec.scala | Scala | apache-2.0 | 2,737 |
package slick.jdbc
import slick.sql.{FixedSqlStreamingAction, FixedSqlAction, SqlActionComponent}
import scala.language.{existentials, higherKinds}
import java.sql.{PreparedStatement, Statement}
import scala.collection.mutable.Builder
import scala.concurrent.Future
import scala.util.Try
import scala.util.control.NonFatal
import slick.SlickException
import slick.dbio._
import slick.ast._
import slick.ast.Util._
import slick.ast.TypeUtil.:@
import slick.lifted.{CompiledStreamingExecutable, Query, FlatShapeLevel, Shape}
import slick.relational.{ResultConverter, CompiledMapping}
import slick.util.{CloseableIterator, DumpInfo, SQLBuilder, ignoreFollowOnError}
trait JdbcActionComponent extends SqlActionComponent { self: JdbcProfile =>
type ProfileAction[+R, +S <: NoStream, -E <: Effect] = FixedSqlAction[R, S, E]
type StreamingProfileAction[+R, +T, -E <: Effect] = FixedSqlStreamingAction[R, T, E]
abstract class SimpleJdbcProfileAction[+R](_name: String, val statements: Vector[String]) extends SynchronousDatabaseAction[R, NoStream, Backend, Effect] with ProfileAction[R, NoStream, Effect] { self =>
def run(ctx: Backend#Context, sql: Vector[String]): R
final override def getDumpInfo = super.getDumpInfo.copy(name = _name)
final def run(ctx: Backend#Context): R = run(ctx, statements)
final def overrideStatements(_statements: Iterable[String]): ProfileAction[R, NoStream, Effect] = new SimpleJdbcProfileAction[R](_name, _statements.toVector) {
def run(ctx: Backend#Context, sql: Vector[String]): R = self.run(ctx, statements)
}
}
protected object StartTransaction extends SynchronousDatabaseAction[Unit, NoStream, Backend, Effect] {
def run(ctx: Backend#Context): Unit = {
ctx.pin
ctx.session.startInTransaction
}
def getDumpInfo = DumpInfo(name = "StartTransaction")
}
protected object Commit extends SynchronousDatabaseAction[Unit, NoStream, Backend, Effect] {
def run(ctx: Backend#Context): Unit =
try ctx.session.endInTransaction(ctx.session.conn.commit()) finally ctx.unpin
def getDumpInfo = DumpInfo(name = "Commit")
}
protected object Rollback extends SynchronousDatabaseAction[Unit, NoStream, Backend, Effect] {
def run(ctx: Backend#Context): Unit =
try ctx.session.endInTransaction(ctx.session.conn.rollback()) finally ctx.unpin
def getDumpInfo = DumpInfo(name = "Rollback")
}
protected class PushStatementParameters(p: JdbcBackend.StatementParameters) extends SynchronousDatabaseAction[Unit, NoStream, Backend, Effect] {
def run(ctx: Backend#Context): Unit = ctx.pushStatementParameters(p)
def getDumpInfo = DumpInfo(name = "PushStatementParameters", mainInfo = p.toString)
}
protected object PopStatementParameters extends SynchronousDatabaseAction[Unit, NoStream, Backend, Effect] {
def run(ctx: Backend#Context): Unit = ctx.popStatementParameters
def getDumpInfo = DumpInfo(name = "PopStatementParameters")
}
protected class SetTransactionIsolation(ti: Int) extends SynchronousDatabaseAction[Int, NoStream, Backend, Effect] {
def run(ctx: Backend#Context): Int = {
val c = ctx.session.conn
val old = c.getTransactionIsolation
c.setTransactionIsolation(ti)
old
}
def getDumpInfo = DumpInfo(name = "SetTransactionIsolation")
}
class JdbcActionExtensionMethods[E <: Effect, R, S <: NoStream](a: DBIOAction[R, S, E]) {
/** Run this Action transactionally. This does not guarantee failures to be atomic in the
* presence of error handling combinators. If multiple `transactionally` combinators are
* nested, only the outermost one will be backed by an actual database transaction. Depending
* on the outcome of running the Action it surrounds, the transaction is committed if the
* wrapped Action succeeds, or rolled back if the wrapped Action fails. When called on a
* [[slick.dbio.SynchronousDatabaseAction]], this combinator gets fused into the
* action. */
def transactionally: DBIOAction[R, S, E with Effect.Transactional] = SynchronousDatabaseAction.fuseUnsafe(
StartTransaction.andThen(a).cleanUp(eo => if(eo.isEmpty) Commit else Rollback)(DBIO.sameThreadExecutionContext)
.asInstanceOf[DBIOAction[R, S, E with Effect.Transactional]]
)
/** Run this Action with the specified transaction isolation level. This should be used around
* the outermost `transactionally` Action. The semantics of using it inside a transaction are
* database-dependent. It does not create a transaction by itself but it pins the session. */
def withTransactionIsolation(ti: TransactionIsolation): DBIOAction[R, S, E] = {
val isolated =
(new SetTransactionIsolation(ti.intValue)).flatMap(old => a.andFinally(new SetTransactionIsolation(old)))(DBIO.sameThreadExecutionContext)
val fused =
if(a.isInstanceOf[SynchronousDatabaseAction[_, _, _, _]]) SynchronousDatabaseAction.fuseUnsafe(isolated)
else isolated
fused.withPinnedSession
}
/** Run this Action with the given statement parameters. Any unset parameter will use the
* current value. The following parameters can be set:
*
* @param rsType The JDBC `ResultSetType`
* @param rsConcurrency The JDBC `ResultSetConcurrency`
* @param rsHoldability The JDBC `ResultSetHoldability`
* @param statementInit A function which is run on every `Statement` or `PreparedStatement`
* directly after creating it. This can be used to set additional
* statement parameters (e.g. `setQueryTimeout`). When multuple
* `withStatementParameters` Actions are nested, all init functions
* are run, starting with the outermost one.
* @param fetchSize The fetch size for all statements or 0 for the default. */
def withStatementParameters(rsType: ResultSetType = null,
rsConcurrency: ResultSetConcurrency = null,
rsHoldability: ResultSetHoldability = null,
statementInit: Statement => Unit = null,
fetchSize: Int = 0): DBIOAction[R, S, E] =
(new PushStatementParameters(JdbcBackend.StatementParameters(rsType, rsConcurrency, rsHoldability, statementInit, fetchSize))).
andThen(a).andFinally(PopStatementParameters)
}
///////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////// Query Actions
///////////////////////////////////////////////////////////////////////////////////////////////
type QueryActionExtensionMethods[R, S <: NoStream] = QueryActionExtensionMethodsImpl[R, S]
type StreamingQueryActionExtensionMethods[R, T] = StreamingQueryActionExtensionMethodsImpl[R, T]
def createQueryActionExtensionMethods[R, S <: NoStream](tree: Node, param: Any): QueryActionExtensionMethods[R, S] =
new QueryActionExtensionMethods[R, S](tree, param)
def createStreamingQueryActionExtensionMethods[R, T](tree: Node, param: Any): StreamingQueryActionExtensionMethods[R, T] =
new StreamingQueryActionExtensionMethods[R, T](tree, param)
class MutatingResultAction[T](rsm: ResultSetMapping, elemType: Type, collectionType: CollectionType, sql: String, param: Any, sendEndMarker: Boolean) extends SynchronousDatabaseAction[Nothing, Streaming[ResultSetMutator[T]], Backend, Effect] with ProfileAction[Nothing, Streaming[ResultSetMutator[T]], Effect] { streamingAction =>
class Mutator(val prit: PositionedResultIterator[T], val bufferNext: Boolean, val inv: QueryInvokerImpl[T]) extends ResultSetMutator[T] {
val pr = prit.pr
val rs = pr.rs
var current: T = _
/** The state of the stream. 0 = in result set, 1 = before end marker, 2 = after end marker. */
var state = 0
def row = if(state > 0) throw new SlickException("After end of result set") else current
def row_=(value: T): Unit = {
if(state > 0) throw new SlickException("After end of result set")
pr.restart
inv.updateRowValues(pr, value)
rs.updateRow()
}
def += (value: T): Unit = {
rs.moveToInsertRow()
pr.restart
inv.updateRowValues(pr, value)
rs.insertRow()
if(state == 0) rs.moveToCurrentRow()
}
def delete: Unit = {
if(state > 0) throw new SlickException("After end of result set")
rs.deleteRow()
if(invokerPreviousAfterDelete) rs.previous()
}
def emitStream(ctx: Backend#StreamingContext, limit: Long): this.type = {
var count = 0L
try {
while(count < limit && state == 0) {
if(!pr.nextRow) state = if(sendEndMarker) 1 else 2
if(state == 0) {
current = inv.extractValue(pr)
count += 1
ctx.emit(this)
}
}
if(count < limit && state == 1) {
ctx.emit(this)
state = 2
}
} catch {
case NonFatal(ex) =>
try prit.close() catch ignoreFollowOnError
throw ex
}
if(state < 2) this else null
}
def end = if(state > 1) throw new SlickException("After end of result set") else state > 0
override def toString = s"Mutator(state = $state, current = $current)"
}
type StreamState = Mutator
def statements = List(sql)
def run(ctx: Backend#Context) =
throw new SlickException("The result of .mutate can only be used in a streaming way")
override def emitStream(ctx: Backend#StreamingContext, limit: Long, state: StreamState): StreamState = {
val mu = if(state ne null) state else {
val inv = createQueryInvoker[T](rsm, param, sql)
new Mutator(
inv.results(0, defaultConcurrency = invokerMutateConcurrency, defaultType = invokerMutateType)(ctx.session).right.get,
ctx.bufferNext,
inv)
}
mu.emitStream(ctx, limit)
}
override def cancelStream(ctx: Backend#StreamingContext, state: StreamState): Unit = state.prit.close()
override def getDumpInfo = super.getDumpInfo.copy(name = "mutate")
def overrideStatements(_statements: Iterable[String]): MutatingResultAction[T] =
new MutatingResultAction[T](rsm, elemType, collectionType, _statements.head, param, sendEndMarker)
}
class QueryActionExtensionMethodsImpl[R, S <: NoStream](tree: Node, param: Any) extends super.QueryActionExtensionMethodsImpl[R, S] {
def result: ProfileAction[R, S, Effect.Read] = {
def findSql(n: Node): String = n match {
case c: CompiledStatement => c.extra.asInstanceOf[SQLBuilder.Result].sql
case ParameterSwitch(cases, default) =>
findSql(cases.find { case (f, n) => f(param) }.map(_._2).getOrElse(default))
}
(tree match {
case (rsm @ ResultSetMapping(_, compiled, CompiledMapping(_, elemType))) :@ (ct: CollectionType) =>
val sql = findSql(compiled)
new StreamingInvokerAction[R, Any, Effect] { streamingAction =>
protected[this] def createInvoker(sql: Iterable[String]) = createQueryInvoker(rsm, param, sql.head)
protected[this] def createBuilder = ct.cons.createBuilder(ct.elementType.classTag).asInstanceOf[Builder[Any, R]]
def statements = List(sql)
override def getDumpInfo = super.getDumpInfo.copy(name = "result")
}
case First(rsm @ ResultSetMapping(_, compiled, _)) =>
val sql = findSql(compiled)
new SimpleJdbcProfileAction[R]("result", Vector(sql)) {
def run(ctx: Backend#Context, sql: Vector[String]): R =
createQueryInvoker[R](rsm, param, sql.head).first(ctx.session)
}
}).asInstanceOf[ProfileAction[R, S, Effect.Read]]
}
}
class StreamingQueryActionExtensionMethodsImpl[R, T](tree: Node, param: Any) extends QueryActionExtensionMethodsImpl[R, Streaming[T]](tree, param) with super.StreamingQueryActionExtensionMethodsImpl[R, T] {
override def result: StreamingProfileAction[R, T, Effect.Read] = super.result.asInstanceOf[StreamingProfileAction[R, T, Effect.Read]]
/** Same as `mutate(sendEndMarker = false)`. */
def mutate: ProfileAction[Nothing, Streaming[ResultSetMutator[T]], Effect.Read with Effect.Write] = mutate(false)
/** Create an Action that can be streamed in order to modify a mutable result set. All stream
* elements will be the same [[slick.jdbc.ResultSetMutator]] object but it is in a different state each
* time. Thre resulting stream is always non-buffered and events can be processed either
* synchronously or asynchronously (but all processing must happen in sequence).
*
* @param sendEndMarker If set to true, an extra event is sent after the end of the result
* set, poviding you with a chance to insert additional rows after
* seeing all results. Only `end` (to check for this special event) and
* `insert` may be called in the ResultSetMutator in this case. */
def mutate(sendEndMarker: Boolean = false): ProfileAction[Nothing, Streaming[ResultSetMutator[T]], Effect.Read with Effect.Write] = {
val sql = tree.findNode(_.isInstanceOf[CompiledStatement]).get
.asInstanceOf[CompiledStatement].extra.asInstanceOf[SQLBuilder.Result].sql
val (rsm @ ResultSetMapping(_, _, CompiledMapping(_, elemType))) :@ (ct: CollectionType) = tree
new MutatingResultAction[T](rsm, elemType, ct, sql, param, sendEndMarker)
}
}
///////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////// Delete Actions
///////////////////////////////////////////////////////////////////////////////////////////////
type DeleteActionExtensionMethods = DeleteActionExtensionMethodsImpl
def createDeleteActionExtensionMethods(tree: Node, param: Any): DeleteActionExtensionMethods =
new DeleteActionExtensionMethods(tree, param)
class DeleteActionExtensionMethodsImpl(tree: Node, param: Any) {
/** An Action that deletes the data selected by this query. */
def delete: ProfileAction[Int, NoStream, Effect.Write] = {
val ResultSetMapping(_, CompiledStatement(_, sres: SQLBuilder.Result, _), _) = tree
new SimpleJdbcProfileAction[Int]("delete", Vector(sres.sql)) {
def run(ctx: Backend#Context, sql: Vector[String]): Int = ctx.session.withPreparedStatement(sql.head) { st =>
sres.setter(st, 1, param)
st.executeUpdate
}
}
}
}
///////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////// Schema Actions
///////////////////////////////////////////////////////////////////////////////////////////////
type SchemaActionExtensionMethods = SchemaActionExtensionMethodsImpl
def createSchemaActionExtensionMethods(schema: SchemaDescription): SchemaActionExtensionMethods =
new SchemaActionExtensionMethodsImpl(schema)
class SchemaActionExtensionMethodsImpl(schema: SchemaDescription) extends super.SchemaActionExtensionMethodsImpl {
def create: ProfileAction[Unit, NoStream, Effect.Schema] = new SimpleJdbcProfileAction[Unit]("schema.create", schema.createStatements.toVector) {
def run(ctx: Backend#Context, sql: Vector[String]): Unit =
for(s <- sql) ctx.session.withPreparedStatement(s)(_.execute)
}
def truncate: ProfileAction[Unit, NoStream, Effect.Schema] = new SimpleJdbcProfileAction[Unit]("schema.truncate" , schema.truncateStatements.toVector ){
def run(ctx: Backend#Context, sql: Vector[String]): Unit =
for(s <- sql) ctx.session.withPreparedStatement(s)(_.execute)
}
def drop: ProfileAction[Unit, NoStream, Effect.Schema] = new SimpleJdbcProfileAction[Unit]("schema.drop", schema.dropStatements.toVector) {
def run(ctx: Backend#Context, sql: Vector[String]): Unit =
for(s <- sql) ctx.session.withPreparedStatement(s)(_.execute)
}
}
///////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////// Update Actions
///////////////////////////////////////////////////////////////////////////////////////////////
type UpdateActionExtensionMethods[T] = UpdateActionExtensionMethodsImpl[T]
def createUpdateActionExtensionMethods[T](tree: Node, param: Any): UpdateActionExtensionMethods[T] =
new UpdateActionExtensionMethodsImpl[T](tree, param)
class UpdateActionExtensionMethodsImpl[T](tree: Node, param: Any) {
protected[this] val ResultSetMapping(_,
CompiledStatement(_, sres: SQLBuilder.Result, _),
CompiledMapping(_converter, _)) = tree
protected[this] val converter = _converter.asInstanceOf[ResultConverter[JdbcResultConverterDomain, T]]
/** An Action that updates the data selected by this query. */
def update(value: T): ProfileAction[Int, NoStream, Effect.Write] = {
new SimpleJdbcProfileAction[Int]("update", Vector(sres.sql)) {
def run(ctx: Backend#Context, sql: Vector[String]): Int = ctx.session.withPreparedStatement(sql.head) { st =>
st.clearParameters
converter.set(value, st)
sres.setter(st, converter.width+1, param)
st.executeUpdate
}
}
}
/** Get the statement usd by `update` */
def updateStatement: String = sres.sql
}
///////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////// Insert Actions
///////////////////////////////////////////////////////////////////////////////////////////////
type InsertActionExtensionMethods[T] = CountingInsertActionComposer[T]
def createInsertActionExtensionMethods[T](compiled: CompiledInsert): InsertActionExtensionMethods[T] =
new CountingInsertActionComposerImpl[T](compiled)
def createReturningInsertActionComposer[U, QR, RU](compiled: CompiledInsert, keys: Node, mux: (U, QR) => RU): ReturningInsertActionComposer[U, RU] =
new ReturningInsertActionComposerImpl[U, QR, RU](compiled, keys, mux)
protected lazy val useServerSideUpsert = capabilities contains JdbcCapabilities.insertOrUpdate
protected lazy val useTransactionForUpsert = !useServerSideUpsert
protected lazy val useServerSideUpsertReturning = useServerSideUpsert
protected lazy val useTransactionForUpsertReturning = !useServerSideUpsertReturning
//////////////////////////////////////////////////////////// InsertActionComposer Traits
/** Extension methods to generate the JDBC-specific insert actions. */
trait SimpleInsertActionComposer[U] extends super.InsertActionExtensionMethodsImpl[U] {
/** The return type for `insertOrUpdate` operations */
type SingleInsertOrUpdateResult
/** Get the SQL statement for a standard (soft) insert */
def insertStatement: String
/** Get the SQL statement for a forced insert */
def forceInsertStatement: String
/** Insert a single row, skipping AutoInc columns. */
def += (value: U): ProfileAction[SingleInsertResult, NoStream, Effect.Write]
/** Insert a single row, including AutoInc columns. This is not supported
* by all database engines (see
* [[slick.jdbc.JdbcCapabilities.forceInsert]]). */
def forceInsert(value: U): ProfileAction[SingleInsertResult, NoStream, Effect.Write]
/** Insert multiple rows, skipping AutoInc columns.
* Uses JDBC's batch update feature if supported by the JDBC driver.
* Returns Some(rowsAffected), or None if the database returned no row
* count for some part of the batch. If any part of the batch fails, an
* exception is thrown. */
def ++= (values: Iterable[U]): ProfileAction[MultiInsertResult, NoStream, Effect.Write]
/** Insert multiple rows, including AutoInc columns.
* This is not supported by all database engines (see
* [[slick.jdbc.JdbcCapabilities.forceInsert]]).
* Uses JDBC's batch update feature if supported by the JDBC driver.
* Returns Some(rowsAffected), or None if the database returned no row
* count for some part of the batch. If any part of the batch fails, an
* exception is thrown. */
def forceInsertAll(values: Iterable[U]): ProfileAction[MultiInsertResult, NoStream, Effect.Write]
/** Insert a single row if its primary key does not exist in the table,
* otherwise update the existing record. */
def insertOrUpdate(value: U): ProfileAction[SingleInsertOrUpdateResult, NoStream, Effect.Write]
}
/** Extension methods to generate the JDBC-specific insert actions. */
trait InsertActionComposer[U] extends SimpleInsertActionComposer[U] {
/** The result type of operations that insert data produced by another query */
type QueryInsertResult
/** Get the SQL statement for inserting a single row from a scalar expression */
def forceInsertStatementFor[TT](c: TT)(implicit shape: Shape[_ <: FlatShapeLevel, TT, U, _]): String
/** Get the SQL statement for inserting data produced by another query */
def forceInsertStatementFor[TT, C[_]](query: Query[TT, U, C]): String
/** Get the SQL statement for inserting data produced by another query */
def forceInsertStatementFor[TT, C[_]](compiledQuery: CompiledStreamingExecutable[Query[TT, U, C], _, _]): String
/** Insert a single row from a scalar expression */
def forceInsertExpr[TT](c: TT)(implicit shape: Shape[_ <: FlatShapeLevel, TT, U, _]): ProfileAction[QueryInsertResult, NoStream, Effect.Write]
/** Insert data produced by another query */
def forceInsertQuery[TT, C[_]](query: Query[TT, U, C]): ProfileAction[QueryInsertResult, NoStream, Effect.Write]
/** Insert data produced by another query */
def forceInsertQuery[TT, C[_]](compiledQuery: CompiledStreamingExecutable[Query[TT, U, C], _, _]): ProfileAction[QueryInsertResult, NoStream, Effect.Write]
}
/** An InsertInvoker that returns the number of affected rows. */
trait CountingInsertActionComposer[U] extends InsertActionComposer[U] {
type SingleInsertResult = Int
type MultiInsertResult = Option[Int]
type SingleInsertOrUpdateResult = Int
type QueryInsertResult = Int
/** Add a mapping from the inserted values and the generated key to compute a new return value. */
def returning[RT, RU, C[_]](value: Query[RT, RU, C]): ReturningInsertActionComposer[U, RU]
}
/** An InsertActionComposer that returns generated keys or other columns. */
trait ReturningInsertActionComposer[U, RU] extends InsertActionComposer[U] with IntoInsertActionComposer[U, RU] { self =>
/** Specifies a mapping from inserted values and generated keys to a desired value.
* @param f Function that maps inserted values and generated keys to a desired value.
* @tparam R target type of the mapping */
def into[R](f: (U, RU) => R): IntoInsertActionComposer[U, R]
}
/** An InsertActionComposer that returns a mapping of the inserted and generated data. */
trait IntoInsertActionComposer[U, RU] extends SimpleInsertActionComposer[U] { self =>
type SingleInsertResult = RU
type MultiInsertResult = Seq[RU]
type SingleInsertOrUpdateResult = Option[RU]
type QueryInsertResult = Seq[RU]
}
//////////////////////////////////////////////////////////// InsertActionComposer Implementations
protected abstract class InsertActionComposerImpl[U](val compiled: CompiledInsert) extends InsertActionComposer[U] {
protected[this] def buildQueryBasedInsert[TT, C[_]](query: Query[TT, U, C]): SQLBuilder.Result =
compiled.forceInsert.ibr.buildInsert(queryCompiler.run(query.toNode).tree)
protected[this] def buildQueryBasedInsert[TT, C[_]](compiledQuery: CompiledStreamingExecutable[Query[TT, U, C], _, _]): SQLBuilder.Result =
compiled.forceInsert.ibr.buildInsert(compiledQuery.compiledQuery)
def insertStatement = compiled.standardInsert.sql
def forceInsertStatement = compiled.forceInsert.sql
def += (value: U): ProfileAction[SingleInsertResult, NoStream, Effect.Write] =
new SingleInsertAction(compiled.standardInsert, value)
def forceInsert(value: U): ProfileAction[SingleInsertResult, NoStream, Effect.Write] =
new SingleInsertAction(compiled.forceInsert, value)
def ++= (values: Iterable[U]): ProfileAction[MultiInsertResult, NoStream, Effect.Write] =
new MultiInsertAction(compiled.standardInsert, values)
def forceInsertAll(values: Iterable[U]): ProfileAction[MultiInsertResult, NoStream, Effect.Write] =
new MultiInsertAction(compiled.forceInsert, values)
def insertOrUpdate(value: U): ProfileAction[SingleInsertOrUpdateResult, NoStream, Effect.Write] =
new InsertOrUpdateAction(value)
def forceInsertStatementFor[TT](c: TT)(implicit shape: Shape[_ <: FlatShapeLevel, TT, U, _]) =
buildQueryBasedInsert(Query(c)(shape)).sql
def forceInsertStatementFor[TT, C[_]](query: Query[TT, U, C]) =
buildQueryBasedInsert(query).sql
def forceInsertStatementFor[TT, C[_]](compiledQuery: CompiledStreamingExecutable[Query[TT, U, C], _, _]) =
buildQueryBasedInsert(compiledQuery).sql
def forceInsertExpr[TT](c: TT)(implicit shape: Shape[_ <: FlatShapeLevel, TT, U, _]): ProfileAction[QueryInsertResult, NoStream, Effect.Write] =
new InsertQueryAction(buildQueryBasedInsert((Query(c)(shape))), null)
def forceInsertQuery[TT, C[_]](query: Query[TT, U, C]): ProfileAction[QueryInsertResult, NoStream, Effect.Write] =
new InsertQueryAction(buildQueryBasedInsert(query), null)
def forceInsertQuery[TT, C[_]](compiledQuery: CompiledStreamingExecutable[Query[TT, U, C], _, _]): ProfileAction[QueryInsertResult, NoStream, Effect.Write] =
new InsertQueryAction(buildQueryBasedInsert(compiledQuery), compiledQuery.param)
protected def useServerSideUpsert = self.useServerSideUpsert
protected def useTransactionForUpsert = self.useTransactionForUpsert
protected def useBatchUpdates(implicit session: Backend#Session) = session.capabilities.supportsBatchUpdates
protected def retOne(st: Statement, value: U, updateCount: Int): SingleInsertResult
protected def retMany(values: Iterable[U], individual: Seq[SingleInsertResult]): MultiInsertResult
protected def retManyBatch(st: Statement, values: Iterable[U], updateCounts: Array[Int]): MultiInsertResult
protected def retOneInsertOrUpdate(st: Statement, value: U, updateCount: Int): SingleInsertOrUpdateResult
protected def retOneInsertOrUpdateFromInsert(st: Statement, value: U, updateCount: Int): SingleInsertOrUpdateResult
protected def retOneInsertOrUpdateFromUpdate: SingleInsertOrUpdateResult
protected def retQuery(st: Statement, updateCount: Int): QueryInsertResult
protected def preparedInsert[T](sql: String, session: Backend#Session)(f: PreparedStatement => T) =
session.withPreparedStatement(sql)(f)
protected def preparedOther[T](sql: String, session: Backend#Session)(f: PreparedStatement => T) =
session.withPreparedStatement(sql)(f)
class SingleInsertAction(a: compiled.Artifacts, value: U) extends SimpleJdbcProfileAction[SingleInsertResult]("SingleInsertAction", Vector(a.sql)) {
def run(ctx: Backend#Context, sql: Vector[String]) = preparedInsert(a.sql, ctx.session) { st =>
st.clearParameters()
a.converter.set(value, st)
val count = st.executeUpdate()
retOne(st, value, count)
}
}
class MultiInsertAction(a: compiled.Artifacts, values: Iterable[U]) extends SimpleJdbcProfileAction[MultiInsertResult]("MultiInsertAction", Vector(a.sql)) {
def run(ctx: Backend#Context, sql: Vector[String]) = {
val sql1 = sql.head
if(!useBatchUpdates(ctx.session) || (values.isInstanceOf[IndexedSeq[_]] && values.asInstanceOf[IndexedSeq[_]].length < 2))
retMany(values, values.map { v =>
preparedInsert(sql1, ctx.session) { st =>
st.clearParameters()
a.converter.set(v, st)
retOne(st, v, st.executeUpdate())
}
}(collection.breakOut): Vector[SingleInsertResult])
else preparedInsert(a.sql, ctx.session) { st =>
st.clearParameters()
for(value <- values) {
a.converter.set(value, st)
st.addBatch()
}
val counts = st.executeBatch()
retManyBatch(st, values, counts)
}
}
}
class InsertOrUpdateAction(value: U) extends SimpleJdbcProfileAction[SingleInsertOrUpdateResult]("InsertOrUpdateAction",
if(useServerSideUpsert) Vector(compiled.upsert.sql) else Vector(compiled.checkInsert.sql, compiled.updateInsert.sql, compiled.standardInsert.sql)) {
def run(ctx: Backend#Context, sql: Vector[String]) = {
def f: SingleInsertOrUpdateResult =
if(useServerSideUpsert) nativeUpsert(value, sql.head)(ctx.session) else emulate(value, sql(0), sql(1), sql(2))(ctx.session)
if(useTransactionForUpsert) {
ctx.session.startInTransaction
var done = false
try {
val res = f
done = true
ctx.session.endInTransaction(ctx.session.conn.commit())
res
} finally {
if(!done)
try ctx.session.endInTransaction(ctx.session.conn.rollback()) catch ignoreFollowOnError
}
} else f
}
protected def nativeUpsert(value: U, sql: String)(implicit session: Backend#Session): SingleInsertOrUpdateResult =
preparedInsert(sql, session) { st =>
st.clearParameters()
compiled.upsert.converter.set(value, st)
val count = st.executeUpdate()
retOneInsertOrUpdate(st, value, count)
}
protected def emulate(value: U, checkSql: String, updateSql: String, insertSql: String)(implicit session: Backend#Session): SingleInsertOrUpdateResult = {
val found = preparedOther(checkSql, session) { st =>
st.clearParameters()
compiled.checkInsert.converter.set(value, st)
val rs = st.executeQuery()
try rs.next() finally rs.close()
}
if(found) preparedOther(updateSql, session) { st =>
st.clearParameters()
compiled.updateInsert.converter.set(value, st)
st.executeUpdate()
retOneInsertOrUpdateFromUpdate
} else preparedInsert(insertSql, session) { st =>
st.clearParameters()
compiled.standardInsert.converter.set(value, st)
val count = st.executeUpdate()
retOneInsertOrUpdateFromInsert(st, value, count)
}
}
}
class InsertQueryAction(sbr: SQLBuilder.Result, param: Any) extends SimpleJdbcProfileAction[QueryInsertResult]("InsertQueryAction", Vector(sbr.sql)) {
def run(ctx: Backend#Context, sql: Vector[String]) = preparedInsert(sql.head, ctx.session) { st =>
st.clearParameters()
sbr.setter(st, 1, param)
retQuery(st, st.executeUpdate())
}
}
}
protected class CountingInsertActionComposerImpl[U](compiled: CompiledInsert) extends InsertActionComposerImpl[U](compiled) with CountingInsertActionComposer[U] {
def returning[RT, RU, C[_]](value: Query[RT, RU, C]): ReturningInsertActionComposer[U, RU] =
createReturningInsertActionComposer[U, RU, RU](compiled, value.toNode, (_, r) => r)
protected def retOne(st: Statement, value: U, updateCount: Int) = updateCount
protected def retOneInsertOrUpdate(st: Statement, value: U, updateCount: Int) = 1
protected def retOneInsertOrUpdateFromInsert(st: Statement, value: U, updateCount: Int) = 1
protected def retOneInsertOrUpdateFromUpdate = 1
protected def retQuery(st: Statement, updateCount: Int) = updateCount
protected def retMany(values: Iterable[U], individual: Seq[SingleInsertResult]) = Some(individual.sum)
protected def retManyBatch(st: Statement, values: Iterable[U], updateCounts: Array[Int]) = {
var unknown = false
var count = 0
for((res, idx) <- updateCounts.zipWithIndex) res match {
case Statement.SUCCESS_NO_INFO => unknown = true
case Statement.EXECUTE_FAILED => throw new SlickException("Failed to insert row #" + (idx+1))
case i => count += i
}
if(unknown) None else Some(count)
}
}
protected class ReturningInsertActionComposerImpl[U, QR, RU](compiled: CompiledInsert, val keys: Node, val mux: (U, QR) => RU) extends InsertActionComposerImpl[U](compiled) with ReturningInsertActionComposer[U, RU] {
def into[R](f: (U, RU) => R): IntoInsertActionComposer[U, R] =
createReturningInsertActionComposer[U, QR, R](compiled, keys, (v, r) => f(v, mux(v, r)))
override protected def useServerSideUpsert = self.useServerSideUpsertReturning
override protected def useTransactionForUpsert = self.useTransactionForUpsertReturning
protected def checkInsertOrUpdateKeys: Unit =
if(keyReturnOther) throw new SlickException("Only a single AutoInc column may be returned from an insertOrUpdate call")
protected def buildKeysResult(st: Statement): Invoker[QR] =
ResultSetInvoker[QR](_ => st.getGeneratedKeys)(pr => keyConverter.read(pr.rs).asInstanceOf[QR])
// Returning keys from batch inserts is generally not supported
override protected def useBatchUpdates(implicit session: Backend#Session) = false
protected lazy val (keyColumns, keyConverter, keyReturnOther) = compiled.buildReturnColumns(keys)
override protected def preparedInsert[T](sql: String, session: Backend#Session)(f: PreparedStatement => T) =
session.withPreparedInsertStatement(sql, keyColumns.toArray)(f)
protected def retOne(st: Statement, value: U, updateCount: Int) = mux(value, buildKeysResult(st).first(null))
protected def retMany(values: Iterable[U], individual: Seq[SingleInsertResult]) = individual
protected def retManyBatch(st: Statement, values: Iterable[U], updateCounts: Array[Int]) =
(values, buildKeysResult(st).buildColl[Vector](null, implicitly)).zipped.map(mux)(collection.breakOut)
protected def retQuery(st: Statement, updateCount: Int) =
buildKeysResult(st).buildColl[Vector](null, implicitly).asInstanceOf[QueryInsertResult] // Not used with "into"
protected def retOneInsertOrUpdate(st: Statement, value: U, updateCount: Int): SingleInsertOrUpdateResult =
if(updateCount != 1) None else buildKeysResult(st).firstOption(null).map(r => mux(value, r))
protected def retOneInsertOrUpdateFromInsert(st: Statement, value: U, updateCount: Int): SingleInsertOrUpdateResult =
Some(mux(value, buildKeysResult(st).first(null)))
protected def retOneInsertOrUpdateFromUpdate: SingleInsertOrUpdateResult = None
}
}
| xavier-fernandez/slick | slick/src/main/scala/slick/jdbc/JdbcActionComponent.scala | Scala | bsd-2-clause | 35,084 |
/*
* comma, A Code Measurement and Analysis Tool
* Copyright (C) 2010-2015 Steffen Kram
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.uniulm.iai.comma.measurement.ast
import com.buschmais.jqassistant.core.store.api.model.Descriptor
import de.uniulm.iai.comma.lib.ast.javasource.{EnhancedCommonTree, JavaLexer}
import de.uniulm.iai.comma.model.{Value, Measure}
import scala.collection.JavaConversions._
object CommentVisitor extends TreeVisitorFactory {
override def measures() = Vector(
Measure.LINE_COMMENT_COUNT,
Measure.LINE_COMMENT_LENGTH,
Measure.BLOCK_COMMENT_COUNT,
Measure.BLOCK_COMMENT_LINES,
Measure.BLOCK_COMMENT_LENGTH,
Measure.JAVADOC_COUNT,
Measure.JAVADOC_LINES,
Measure.BLOCK_COMMENT_LENGTH
)
override def createVisitor(descriptor: Descriptor, artifact: Option[String]): CommentVisitor = {
new CommentVisitor(artifact)
}
}
class CommentVisitor(artifact: Option[String]) extends TreeVisitor {
/** Store number of source level line comments. */
private var _lineCommentCount = 0
def lineCommentCount = _lineCommentCount
/** Store total length of source level line comments. */
private var _lineCommentLength = 0l
def lineCommentLength = _lineCommentLength
/** Store total number of block comments. */
private var _blockCommentCount = 0
def blockCommentCount = _blockCommentCount
/** Store total number of source level block comment lines. */
private var _blockCommentLines = 0
def blockCommentLines = _blockCommentLines
/** Store total length of source level block comments. */
private var _blockCommentLength = 0l
def blockCommentLength = _blockCommentLength
/** Store total number of javadoc comments. */
private var _javadocCommentCount = 0
def javadocCommentCount = _javadocCommentCount
/** Store total number of source level javadoc comment lines. */
private var _javadocCommentLines = 0
def javadocCommentLines = _javadocCommentLines
/** Store total length of source level javadoc comments. */
private var _javadocCommentLength = 0l
def javadocCommentLength = _javadocCommentLength
def visit(node: EnhancedCommonTree) = {
(node.getPrecedingComments ++ node.getFollowingComments).foreach { t =>
t.getType match {
case JavaLexer.LINE_COMMENT => {
_lineCommentCount += 1
_lineCommentLength += t.getText.length
}
case JavaLexer.BLOCK_COMMENT => {
_blockCommentCount += 1
_blockCommentLines += t.getText.linesIterator.size
_blockCommentLength += t.getText.length
}
case JavaLexer.JAVADOC_COMMENT => {
_javadocCommentCount += 1
_javadocCommentLines += t.getText.linesIterator.size
_javadocCommentLength += t.getText.length
}
case _ => // ignore, ... should be never reached anyway.
}
}
}
/**
* Comment values are only counted if the analysed structure is the topmost structure.
* It does not make sense to aggregate comment counts across structure boundaries. If
* thats what is needed, than a new visitor should fulfill that task.
*
* @param node
* @param topmostStructure
*/
override def visit(node: EnhancedCommonTree, topmostStructure: Boolean) {
if (topmostStructure) visit(node)
}
override def measuredValues(): Iterable[Value] = {
val builder = Vector.newBuilder[Value]
if (lineCommentCount > 0) {
builder += Value(artifact, Measure.LINE_COMMENT_COUNT, lineCommentCount)
builder += Value(artifact, Measure.LINE_COMMENT_LENGTH, lineCommentLength)
}
if (blockCommentCount > 0) {
builder += Value(artifact, Measure.BLOCK_COMMENT_COUNT, blockCommentCount)
builder += Value(artifact, Measure.BLOCK_COMMENT_LINES, blockCommentLines)
builder += Value(artifact, Measure.BLOCK_COMMENT_LENGTH, blockCommentLength)
}
if (javadocCommentCount > 0) {
builder += Value(artifact, Measure.JAVADOC_COUNT, javadocCommentCount)
builder += Value(artifact, Measure.JAVADOC_LINES, javadocCommentLines)
builder += Value(artifact, Measure.JAVADOC_LENGTH, javadocCommentLength)
}
builder.result()
}
}
| stefri/jqassistant-plugin-javasrc | src/main/scala/de/uniulm/iai/comma/measurement/ast/CommentVisitor.scala | Scala | gpl-3.0 | 4,807 |
object B {
def bar = A.foo
} | dotty-staging/dotty | sbt-test/source-dependencies/check-recompilations/B.scala | Scala | apache-2.0 | 30 |
/*
* Copyright 2016 Coral realtime streaming analytics (http://coral-streaming.github.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.coral.actors.transform
import java.util.concurrent.TimeUnit
import akka.actor.{Cancellable, Props}
import akka.event.slf4j.Logger
import io.coral.actors.CoralActor
import io.coral.utils.Utils
import org.json4s.JsonAST.JValue
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.uncommons.maths.random._
import scala.collection.mutable.{Stack => mStack}
import scala.concurrent.duration.Duration
import scala.util.matching.Regex
import scala.collection.mutable.{Map => mMap}
/**
* An actor that can generate random data based on a definition in the
* JSON constructor. The format object has the same format as the
* expected output but with a sampling from a normal distribution,
* a uniform distribution or a list of choices in it, as in the example
* below. The field value contains the parameters of the distribution.
* The rate sets the number of objects that it generates per second
* and times sets the total number of objects that will be emitted.
* Delay sets the time to wait before emitting the first object.
*
* {
* "format": {
* // Normal distribution with mu 100 and sigma 10
* "field1": "N(100, 10)",
* // choose randomly from a list of values
* "field2": "['a', 'b', 'c']",
* // uniform distribution with max = 100
* "field3": "U(100)"
* }, "timer": {
* // per second
* "rate": "10",
* // total number of items to generate
* "times": "100",
* // initial delay in seconds
* "delay": "10"
* }
* }
*/
case class StartGenerator()
case class GenerateData()
case class StopGenerator()
object GeneratorActor {
implicit val formats = org.json4s.DefaultFormats
def getParams(json: JValue) = {
for {
// The structure of the object to emit
format <- (json \\ "params" \\ "format").extractOpt[JObject]
generators <- getGenerators(format)
rate <- (json \\ "params" \\ "timer" \\ "rate").extractOpt[Double]
delay <- getDoubleValueOrZero(json \\ "params" \\ "timer" \\ "delay")
if rate >= 0
} yield {
val times: Option[Int] = getTimes(json \\ "params" \\ "timer" \\ "times")
(format, generators, rate, times, delay)
}
}
/**
* Return a map of generators based on the input JSON format.
* @param format The JSON object which contains the generators, if any.
* @return A Map which points generator strings to their generators.
* Example: "N(100, 2)" -> NormalGenerator(...)
* Every time a "N(100, 2)" generator is encountered in the
* format JSON, this generator is called.
*/
def getGenerators(format: JObject): Option[Map[String, Generator]] = {
try {
val values: mStack[JValue] = new mStack()
val result = mMap.empty[String, Generator]
values.pushAll(format.children)
while (!values.isEmpty) {
val next = values.pop
next match {
case JObject(j) =>
// Nested object, add all children
values.pushAll(j.children)
case JString(s) =>
val generator = try {
s.substring(0, 2) match {
case "N(" => // N(mu, sigma)
new NormalGenerator(s)
case "U(" => // U(max)
new UniformGenerator(s)
case "['" => // "['e1', 'e2', ...]"
new ArrayChoiceGenerator(s)
case _ =>
// Assume that the string is a simple constant string
new ConstantGenerator(s)
}
} catch {
case _: Exception => new ConstantGenerator(s)
}
result.put(s, generator)
case _ =>
}
}
Some(result.toMap)
} catch {
case e: Exception =>
Logger(getClass.getName).error(e.getMessage())
Utils.stackTraceToString(e)
None
}
}
/**
* Returns the double value of a given JValue.
* in case it cannot be parsed or it is not a numeric value, return 0.
* The minimum value is 0.
*/
def getDoubleValueOrZero(value: JValue) = {
val result = value match {
case JInt(d) => d.toDouble
case JDouble(d) => d
case _ => 0
}
Some(result.max(0))
}
/**
* Same thing but then for integers.
*/
def getIntValueOrZero(value: JValue) = {
getDoubleValueOrZero(value).map(_.toInt)
}
def getTimes(value: JValue) = {
value.extractOpt[Int]
}
def apply(json: JValue): Option[Props] = {
getParams(json).map(_ => Props(classOf[GeneratorActor], json))
}
}
class GeneratorActor(json: JObject) extends CoralActor(json) {
val (format, generators, rate, times, delay) = GeneratorActor.getParams(json).get
// time to wait before ticking again in milliseconds
def timerDuration: Double = 1000.0 / rate
// The timestamp on which the generator was started
var startTime: Long = _
// The number of messages generated until now
var count = 0
// Whether or not the generator was started
var started = false
// time since last data was generated
var previousTime: Long = _
override def preStart() {
super.preStart()
}
override def receiveExtra: Receive = {
case StartGenerator() =>
if (!times.isDefined || (times.isDefined && times.get > 0)) {
startTime = System.currentTimeMillis
log.info(s"Starting generator with an interval of $timerDuration milliseconds")
if (delay > 0) {
in(Duration((delay * 1000).toLong, TimeUnit.MILLISECONDS)) {
self ! GenerateData()
started = true
}
} else {
self ! GenerateData()
started = true
}
}
case StopGenerator() =>
if (started) {
started = false
}
case GenerateData() =>
generateData(format)
}
override def state = Map(
("rate", render(rate)),
("times", render(times)),
("delay", render(delay)),
("format", render(format)),
("count", render(count))
)
/**
* Generate data based on a JObject format definition.
* The fields are maintained but replaced with values from
* a uniform, normal or discrete choice distribution.
* @param format The format JSON object to process
* @return A JSON object in the same format but with values
* filled in.
*/
def generateData(format: JObject): JValue = {
if (times.isDefined && count >= times.get) {
self ! StopGenerator()
JNothing
} else {
count += 1
// This is done in a "string processing" mode because of performance reasons.
// Not sure how much slower or faster merge and "~" will be, but assuming this is faster
val template = pretty(render(format))
val result = generators.keys.foldLeft(template)((acc, curr) => {
val value = generators(curr).nextValue
acc.replace("\\"" + curr + "\\"", value.toString)
})
timeNextGenerate()
val parsed = parse(result).asInstanceOf[JObject]
emit(parsed)
parsed
}
}
/**
* Calculate when the next generateData event should be fired.
* The GeneratorActor can be too fast in comparison to the "wall" time,
* or it can be too slow. In the case it is too slow (the actual rate of messages
* is lower than the given rate) there is really nothing that can be done.
* In the case it is too fast, slow down until we are in sync with the given
* rate again.
*
* This was not implemented using system.scheduler because it
* is too inacurrate and this gives us more control.
*/
def timeNextGenerate() {
val actuallyProcessed = count
val currentTime: Long = System.currentTimeMillis
// The amount of time actually passed since the generator was started
val actualPassedMillis = currentTime - startTime
// The amount of messages that should have been processed
val shouldHaveProcessed = (actualPassedMillis * (rate / 1000)).toInt
// The difference between actual and expected number of messages
val delta = actuallyProcessed - shouldHaveProcessed
if (delta <= 0) {
// When delta < 0, we are too slow but we cannot do anything about that
if (started) {
self ! GenerateData()
}
} else if (delta > 0) {
// Too fast, get synced with expected time
val waitFor = (delta * timerDuration).toLong
if (started) {
in(Duration(waitFor, TimeUnit.MILLISECONDS)) {
self ! GenerateData()
}
}
}
}
}
abstract class Generator {
def nextValue: Any
}
/**
* Generate data from a normal distribution, with average mu and standard deviation sigma.
* @param name The name of the field to generate
* @param mu The average of the normal distribution
* @param sigma The standard deviation of the normal distribution
* @return A new double value sampled from the generator function
* with the given average and standard deviation.
*/
class NormalGenerator(format: String) extends Generator {
private var mu: Double = _
private var sigma: Double = _
private var normal: GaussianGenerator = _
// Generate data from normal distribution
val regex = new Regex( """N\\(([-+]?[0-9]*\\.?[0-9]+),\\s*([-+]?[0-9]*\\.?[0-9]+)\\)""", "mu", "sigma")
regex.findFirstMatchIn(format) match {
case Some(m) =>
mu = m.group("mu").toDouble
sigma = m.group("sigma").toDouble
normal = new GaussianGenerator(mu, sigma, new MersenneTwisterRNG())
case None => throw new IllegalArgumentException(format)
}
def nextValue: Any = {
normal.nextValue()
}
}
/**
* Generate a new uniform random number.
* @param formatString The format that looks like "U(100)" which means
* that a uniform number between 0 and 100 should be
* generated, with 0 inclusive and 100 exclusive.
* @return A number sampled from this distribution.
*/
class UniformGenerator(format: String) extends Generator {
private var min: Double = _
private var max: Double = _
private var uniform: ContinuousUniformGenerator = _
// Generate data from uniform distribution
val regex = new Regex( """U\\(([-+]?[0-9]*\\.?[0-9]+)\\)""", "max")
regex.findFirstMatchIn(format) match {
case Some(m) =>
val max = m.group("max").toDouble
uniform = new ContinuousUniformGenerator(0, max, new MersenneTwisterRNG())
case None => throw new IllegalArgumentException(format)
}
def nextValue: Any = {
uniform.nextValue()
}
}
/**
* Choose a random value from an array of values.
* @param formatString The array. It looks like "['a', 'b', 'c', 'd']".
* @return A value picked from this array.
*/
class ArrayChoiceGenerator(format: String) extends Generator {
private var items: List[String] = _
private var array: DiscreteUniformGenerator = _
if (!format.matches("""\\[(,?('.*?'))+\\]""")) {
throw new IllegalArgumentException(format)
}
items = format.replace("[", "").replace("]", "").split(",").map(i => {
i.replace("'", "").trim
}).toList
if (items.size == 0) {
throw new IllegalArgumentException(format)
}
array = new DiscreteUniformGenerator(0, items.size - 1, new MersenneTwisterRNG())
def nextValue: Any = {
"\\"" + items(array.nextValue()) + "\\""
}
}
/**
* Generate always the same string, the input format string
* @param format The format string that will be returned
*/
class ConstantGenerator(format: String) extends Generator {
def nextValue: Any = {
"\\"" + format + "\\""
}
} | coral-streaming/coral | src/main/scala/io/coral/actors/transform/GeneratorActor.scala | Scala | apache-2.0 | 11,590 |
package org.bitcoins.rpc.client.v18
import org.bitcoins.rpc.client.common.Client
import org.bitcoins.rpc.jsonmodels.AnalyzePsbtResult
import org.bitcoins.rpc.serializers.JsonSerializers._
import play.api.libs.json._
import scala.concurrent.Future
/**
* Set of utilities to analyze, join, and update existing PSBTs
* @see [[https://bitcoincore.org/en/doc/0.18.0/rpc/rawtransactions/analyzepsbt/]]
* @see [[https://bitcoincore.org/en/doc/0.18.0/rpc/rawtransactions/joinpsbts/]]
* @see [[https://bitcoincore.org/en/doc/0.18.0/rpc/rawtransactions/utxoupdatepsbt/]]
*/
trait V18PsbtRpc {
self: Client =>
def analyzePsbt(psbt: String): Future[AnalyzePsbtResult] = {
bitcoindCall[AnalyzePsbtResult]("analyzepsbt", List(JsString(psbt)))
}
def joinPsbts(txs: Seq[String]): Future[String] = {
bitcoindCall[String]("joinpsbts", List(Json.toJson(txs)))
}
def utxoUpdatePsbt(psbt: String): Future[String] = {
bitcoindCall[String]("utxoupdatepsbt", List(JsString(psbt)))
}
}
| bitcoin-s/bitcoin-s-core | bitcoind-rpc/src/main/scala/org/bitcoins/rpc/client/v18/V18PsbtRpc.scala | Scala | mit | 1,004 |
package com.sksamuel.scapegoat.inspections.unnecessary
import com.sksamuel.scapegoat.InspectionTest
import com.sksamuel.scapegoat.inspections.unneccesary.UnusedMethodParameter
class UnusedMethodParameterTest213 extends InspectionTest {
override val inspections = Seq(new UnusedMethodParameter)
"UnusedMethodParameter in Scala 2.13" - {
"should not report warning" - {
"for unused parameters if they are annotated with @unused (#340)" in {
val code =
"""| import scala.annotation.unused
| class Test {
| def foo(@unused a:String, b: Int): Unit = {
| println(b)
| }
| } """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
compiler.scapegoat.feedback.warns.size shouldBe 0
}
}
}
}
| sksamuel/scalac-scapegoat-plugin | src/test/scala-2.13/com/sksamuel/scapegoat/inspections/unnecessary/UnusedMethodParameterTest213.scala | Scala | apache-2.0 | 867 |
/*
* Copyright (c) 2014-2015 Paul Bernard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Spectrum Finance is based in part on:
* QuantLib. http://quantlib.org/
*
*/
package org.quantintel.ql.time
import org.quantintel.lang.collections.distributed.{SessionBasedCollection}
/**
* Functional api around day count capabilities
*
* @author Paul Bernard
*/
package object daycounters {
/**
*
* @param name name of day count method
* @param calendar open calendar
* @return the day counter associated with a given name and calendar
*/
private def dayCounter(name: String, calendar: Calendar): DayCounter = {
import org.quantintel.ql.time.daycounters.Business252.Business252
name match {
// common contemporary convention names
case x if x =="Act/Act" || x == "ISDA" || x == "HISTORICAL" || x == "ACTUAL365"
=> ActualActual(ActualActualConvention.ISDA)
case "Act/360" => Actual360(Actual360Convention.ACTUAL360)
case "Act/365L" => Actual365(Actual365Convention.ACT365L)
case "Act/365F" => Actual365(Actual365Convention.ACT365F)
case "Act/365A" => Actual365(Actual365Convention.ACT365A)
case "NL/365" => Actual365(Actual365Convention.ACT365NL)
case "30/360" => Thirty360(Thirty360Convention.USA)
case "30/360 US" => Thirty360(Thirty360Convention.THIRTY360US)
case "30/360 ISDA" => Thirty360(Thirty360Convention.THIRTY360ISDA)
case "30E/360" => Thirty360(Thirty360Convention.EUROPEAN)
case "30E+/360" => Thirty360(Thirty360Convention.EP)
case "30E/360 ISDA" => Thirty360(Thirty360Convention.ISDA)
// additional convention names
case "FRENCH" => Actual360(Actual360Convention.FRENCH)
case x if x == "ISMA" || x == "BOND" => ActualActual(ActualActualConvention.ISMA)
case x if x == "AFB" || x == "EURO" => ActualActual(ActualActualConvention.AFB)
case "BOND BASIS" => Thirty360(Thirty360Convention.BONDBASIS)
case "EUROBOND BASIS" => Thirty360(Thirty360Convention.EUROBONDBASIS)
case "30/360 US (NASD)" => Thirty360(Thirty360Convention.THIRTY360USNASD)
case "30/360 SIA" => Thirty360(Thirty360Convention.SIA)
case "30/360 BMA" => Thirty360(Thirty360Convention.BMA)
case "30/360 (Italian)" => Thirty360(Thirty360Convention.ITALIAN)
case "BUS/252" => new Business252(calendar)
case _ => Actual360(Actual360Convention.ACTUAL360) // default
}
}
/**
* Calculates day count year fraction
*
* @param dcmname day count method name
* @param cal calendar instance used to calculate day count year fraction.
* @param dateStart from date
* @param dateEnd to date
* @return year faction
*/
def yearFraction(dcmname: String, cal: Calendar, dateStart : Date, dateEnd :Date) : Double = {
assert(dcmname=="BUS/252")
dayCounter(dcmname, cal).yearFraction(dateStart, dateEnd)
}
/**
* Calculates day count year fraction.
*
* @param dcmname day count method name
* @param cal calendar instance used to calculated day count year fraction.
* @param dateStart start date
* @param dateEnd end date
* @param refStartDate reference start date
* @param refEndDate reference end date
* @return year fraction
*/
def yearFraction(dcmname: String, cal: Calendar, dateStart : Date, dateEnd :Date, refStartDate: Date, refEndDate: Date)
: Double = {
assert(dcmname=="BUS/252")
dayCounter(dcmname, cal).yearFraction(dateStart, dateEnd, refStartDate, refEndDate)
}
/**
* Calculates Day count year fraction
*
* @param dcmname day count method name
* @param dateStart start date
* @param dateEnd end date
* @return year fraction
*/
def yearFraction(dcmname: String, dateStart : Date, dateEnd :Date) : Double = {
assert(dcmname!="BUS/252")
dayCounter(dcmname, null).yearFraction(dateStart, dateEnd)
}
/**
* Calculates date count year fraction.
*
* @param dcmname day count method name
* @param dateStart start date
* @param dateEnd end date
* @param refStartDate reference start date
* @param refEndDate reference end date
* @return
*/
def yearFraction(dcmname: String, dateStart : Date, dateEnd :Date, refStartDate: Date, refEndDate: Date)
: Double = {
assert(dcmname!="BUS/252")
dayCounter(dcmname, null).yearFraction(dateStart, dateEnd, refStartDate, refEndDate)
}
/**
* Calculates day Count
*
* @param dcmname day count method name
* @param dateFrom from date
* @param dateTo to date
* @return day count
*/
def dayCount(dcmname: String, dateFrom: Date, dateTo: Date) : Long = {
assert(dcmname!="BUS/252")
dayCounter(dcmname, null).dayCount(dateFrom, dateTo)
}
/**
* Calculates day count
*
* @param dcmname day count method name
* @param cal instance of calendar used to calculate day count
* @param dateFrom from date
* @param dateTo to date
* @return day count
*/
def dayCount(dcmname: String, cal: Calendar, dateFrom: Date, dateTo: Date) : Long = {
assert(dcmname=="BUS/252")
dayCounter(dcmname, cal).dayCount(dateFrom, dateTo)
}
/**
* Returns a Calendar from a given session with the name indicated.
*
* @param session a session id
* @param calendarId calendar id within a session
* @return a Calendar
*/
def calendar (session: SessionBasedCollection, calendarId: String) : Calendar = {
val cal = session.calendar.get(calendarId)
cal match {
case Some(c) => c
case None => null
}
}
}
| quantintel/spectrum | financial/src/main/scala/org/quantintel/ql/time/daycounters/package.scala | Scala | apache-2.0 | 6,098 |
package lila.team
import lila.db.api.$find
import lila.user.{ User, UserRepo }
import tube.teamTube
private[team] final class Cli(api: TeamApi) extends lila.common.Cli {
def process = {
case "team" :: "join" :: team :: users => perform(team, users)(api.doJoin)
case "team" :: "quit" :: team :: users => perform(team, users)(api.doQuit)
case "team" :: "enable" :: team :: Nil => perform(team)(api.enable)
case "team" :: "disable" :: team :: Nil => perform(team)(api.disable)
case "team" :: "delete" :: team :: Nil => perform(team)(api.delete)
case "team" :: "recompute" :: "nbMembers" :: Nil =>
api.recomputeNbMembers inject "done"
}
private def perform(teamId: String)(op: Team => Funit): Fu[String] =
$find byId teamId flatMap {
_.fold(fufail[String]("Team not found")) { u => op(u) inject "Success" }
}
private def perform(teamId: String, userIds: List[String])(op: (Team, String) => Funit): Fu[String] =
$find byId teamId flatMap {
_.fold(fufail[String]("Team not found")) { team =>
UserRepo nameds userIds flatMap { users =>
users.map(user => fuloginfo(user.username) >> op(team, user.id)).sequenceFu
} inject "Success"
}
}
}
| Enigmahack/lila | modules/team/src/main/Cli.scala | Scala | mit | 1,244 |
package org.dohrm.storyline.users.directives
import akka.actor.ActorRef
import akka.http.scaladsl.server._
import akka.pattern._
import org.dohrm.auth0.directives.Auth0
import org.dohrm.auth0.models.Auth0User
import org.dohrm.storyline.users.actors.GetWithGrants
import org.dohrm.storyline.users.models.{UserWithGrants, User}
import org.dohrm.toolkit.actor.response.Positive
import org.dohrm.toolkit.context.{ActorContext, FutureContext}
import org.joda.time.DateTime
import scala.util.{Failure, Success}
trait UserSecurity {
self: FutureContext with ActorContext with Auth0 =>
import akka.http.scaladsl.server.Directives._
def userRepositoryActor: ActorRef
private implicit def auth0ToUser(auth0: Auth0User): User = User("", "", "", "", DateTime.now, Some(DateTime.now))
implicit def authorizedWithGrants: Directive1[UserWithGrants] =
authorized.flatMap { auth0 =>
onComplete(userRepositoryActor ? GetWithGrants(auth0)).flatMap {
case Success(Positive(user: UserWithGrants, _)) =>
provide(user)
case Failure(ex) =>
reject(AuthorizationFailedRejection)
}
}
}
| dohr-michael/storyline | src/main/scala/org/dohrm/storyline/users/directives/UserSecurity.scala | Scala | mit | 1,134 |
package org.ausdigital.apecconnect.db.model
/**
* Type that given an id and metadata can create a persisted instance of itself.
*
* Use this is the pending type does not match the persisted (or record data) type.
*
* @tparam P the type of the persisted instance.
*/
trait Persistable[P] {
def persisted(metaData: MetaData): P
}
| TeamAusDigital/apec-connect | server/modules/db/src/main/scala/org/ausdigital/apecconnect/db/model/Persistable.scala | Scala | apache-2.0 | 337 |
package com.lvxingpai.model.misc
import javax.validation.constraints.Min
import com.lvxingpai.model.mixin.{ ObjectIdEnabled, ImagesEnabled }
import com.lvxingpai.model.poi.Description
import java.util.Date
import java.util.{ List => JList }
import scala.beans.BeanProperty
/**
* web用的推荐内容
* Created by pengyt on 2015/10/21.
*/
class Recommendation extends ObjectIdEnabled with ImagesEnabled {
/**
* 名称
*/
@BeanProperty
var name: String = null
/**
* 图片
*/
@BeanProperty
var imageList: JList[String] = _
/**
* 热门景点
*/
@BeanProperty
var hotVs: Int = 0
/**
* 热门城市
*/
@BeanProperty
var hotCity: Int = 0
/**
* 新鲜出炉
*/
@BeanProperty
var newItemWeight: Int = 0
/**
* 不可不去
*/
@BeanProperty
var mustGoWeight: Int = 0
/**
* 小编推荐
*/
@BeanProperty
var editorWeight: Int = 0
/**
* 人气之旅
*/
@BeanProperty
var popularityWeight: Int = 0
/**
* 路线编辑的昵称
*/
@BeanProperty
var editorNickName: String = null
/**
* 路线编辑的头像
*/
@BeanProperty
var editorAvatar: String = null
/**
* 路线编辑的头像
*/
@BeanProperty
var editorDate: Date = null
/**
* 浏览量
*/
@Min(value = 0)
@BeanProperty
var planViews: Int = 0
/**
* 介绍
*/
@BeanProperty
var description: Description = null
/**
* 理由
*/
@BeanProperty
var reason: String = null
}
| Lvxingpai/core-model | src/main/scala/com/lvxingpai/model/misc/Recommendation.scala | Scala | apache-2.0 | 1,502 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.counter.helper
import com.typesafe.config.Config
import org.apache.s2graph.counter.config.S2CounterConfig
import org.apache.s2graph.counter.util.Hashes
import org.slf4j.LoggerFactory
import redis.clients.jedis.exceptions.JedisException
import redis.clients.jedis.{Jedis, JedisPool, JedisPoolConfig}
import scala.util.Try
class WithRedis(config: Config) {
lazy val s2config = new S2CounterConfig(config)
private val log = LoggerFactory.getLogger(getClass)
val poolConfig = new JedisPoolConfig()
poolConfig.setMaxTotal(150)
poolConfig.setMaxIdle(50)
poolConfig.setMaxWaitMillis(200)
val jedisPools = s2config.REDIS_INSTANCES.map { case (host, port) =>
new JedisPool(poolConfig, host, port)
}
def getBucketIdx(key: String): Int = {
Hashes.murmur3(key) % jedisPools.size
}
def doBlockWithIndex[T](idx: Int)(f: Jedis => T): Try[T] = {
Try {
val pool = jedisPools(idx)
var jedis: Jedis = null
try {
jedis = pool.getResource
f(jedis)
}
catch {
case e: JedisException =>
pool.returnBrokenResource(jedis)
jedis = null
throw e
}
finally {
if (jedis != null) {
pool.returnResource(jedis)
}
}
}
}
def doBlockWithKey[T](key: String)(f: Jedis => T): Try[T] = {
doBlockWithIndex(getBucketIdx(key))(f)
}
}
| jongwook/incubator-s2graph | s2counter_core/src/main/scala/org/apache/s2graph/counter/helper/WithRedis.scala | Scala | apache-2.0 | 2,254 |
package iwai.cellmon.model.core.entity
import spray.json._
sealed abstract class LocationProvider(val name: String)
case object ManualLocationProvider extends LocationProvider("MANUAL")
case object GpsLocationProvider extends LocationProvider("GPS")
case object LocationProvider {
trait JsonProtocol extends DefaultJsonProtocol {
implicit object LocationProviderFormat extends RootJsonFormat[LocationProvider] {
override def write(p: LocationProvider): JsValue = p.name.toJson
override def read(value: JsValue): LocationProvider = value match {
case JsString(name) if name == ManualLocationProvider.name => ManualLocationProvider
case JsString(name) if name == GpsLocationProvider.name => GpsLocationProvider
case _ => deserializationError("Expected LocationProvider as JsString, but got " + value)
}
}
}
object JsonProtocol extends JsonProtocol
} | iwaiawi/cellmon | src/main/scala/iwai/cellmon/model/core/entity/LocationProvider.scala | Scala | apache-2.0 | 886 |
package org.workcraft.plugins.petri2
import org.workcraft.scala.Expressions._
import scalaz._
import Scalaz._
import org.workcraft.graphics.Graphics._
import java.awt.BasicStroke
import org.workcraft.graphics.Graphics
import org.workcraft.graphics.LabelPositioning
import org.workcraft.graphics.BoundedColorisableGraphicalContent
import org.workcraft.gui.CommonVisualSettings
import org.workcraft.graphics.Touchable
import java.awt.geom.Rectangle2D
import java.awt.Color
import org.workcraft.graphics.VisualCurveProperties
object VisualPlace {
def image (tokens: Expression[Int], label: Expression[String], settings: CommonVisualSettings) : Expression[BoundedColorisableGraphicalContent] =
(label <**> tokens.map(TokenPainter.image(_, settings)))( (label, tokensImage) => {
val place = circle(settings.size, Some((new BasicStroke (settings.strokeWidth.toFloat), settings.foregroundColor)), Some(settings.fillColor)).boundedColorisableGraphicalContent
val placeWithTokens = tokensImage match {
case Some(i) => place.compose(i)
case _ => place
}
val labelImage = Graphics.label (label, settings.effectiveLabelFont, settings.foregroundColor).boundedColorisableGraphicalContent
labelImage.alignSideways(placeWithTokens, LabelPositioning.Bottom).compose(placeWithTokens)
})
val touchable = CommonVisualSettings.settings.map(settings => Touchable.fromCircle(settings.size/2))
}
object VisualArc {
val properties = VisualCurveProperties(Color.BLACK, Some(org.workcraft.graphics.Arrow(0.2, 0.4)), new BasicStroke(0.05f))
}
object VisualTransition {
def image (label: Expression[String], settings: CommonVisualSettings) : Expression[BoundedColorisableGraphicalContent] =
label.map{label =>
val transitionImage = rectangle (settings.size, settings.size, Some ((new BasicStroke(settings.strokeWidth.toFloat), settings.foregroundColor)), Some(settings.fillColor)).boundedColorisableGraphicalContent
val labelImage = Graphics.label(label, settings.effectiveLabelFont, settings.foregroundColor).boundedColorisableGraphicalContent
(labelImage alignSideways (transitionImage, LabelPositioning.Bottom)).compose(transitionImage)
}
val touchable = CommonVisualSettings.settings.map(settings => Touchable.fromRect(new Rectangle2D.Double (-settings.size/2, -settings.size/2, settings.size, settings.size)))
}
| tuura/workcraft-2.2 | PetriNetPlugin2/src/main/scala/org/workcraft/plugins/petri2/VisualPetriNet.scala | Scala | gpl-3.0 | 2,417 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager.utils
import kafka.manager.utils.zero81.{PreferredLeaderElectionErrors, PreferredReplicaLeaderElectionCommand}
/**
* @author hiral
*/
class TestPreferredReplicaLeaderElection extends CuratorAwareTest {
import PreferredLeaderElectionErrors._
test("preferred replica leader election with empty set") {
checkError[ElectionSetEmptyOnWrite] {
withCurator { curator =>
PreferredReplicaLeaderElectionCommand.writePreferredReplicaElectionData(curator,Set.empty)
}
}
}
test("preferred replica leader election") {
withCurator { curator =>
val set = Set(TopicAndPartition("mytopic",1),TopicAndPartition("mytopic",2),TopicAndPartition("mytopic",3))
PreferredReplicaLeaderElectionCommand.writePreferredReplicaElectionData(curator,set)
val json: String = curator.getData.forPath(ZkUtils.PreferredReplicaLeaderElectionPath)
assert(json == "{\\"version\\":1,\\"partitions\\":[{\\"topic\\":\\"mytopic\\",\\"partition\\":1},{\\"topic\\":\\"mytopic\\",\\"partition\\":2},{\\"topic\\":\\"mytopic\\",\\"partition\\":3}]}")
}
}
test("preferred replica leader election already running") {
checkError[ElectionAlreadyInProgress] {
withCurator { curator =>
val set = Set(TopicAndPartition("mytopic", 1), TopicAndPartition("mytopic", 2), TopicAndPartition("mytopic", 3))
PreferredReplicaLeaderElectionCommand.writePreferredReplicaElectionData(curator, set)
val json: String = curator.getData.forPath(ZkUtils.PreferredReplicaLeaderElectionPath)
assert(json == "{\\"version\\":1,\\"partitions\\":[{\\"topic\\":\\"mytopic\\",\\"partition\\":1},{\\"topic\\":\\"mytopic\\",\\"partition\\":2},{\\"topic\\":\\"mytopic\\",\\"partition\\":3}]}")
}
}
}
}
| evertrue/kafka-manager | test/kafka/manager/utils/TestPreferredReplicaLeaderElection.scala | Scala | apache-2.0 | 1,847 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box.{CtInteger, CtBoxIdentifier, Linked}
import uk.gov.hmrc.ct.computations.CP511
case class B190(value: Int) extends CtBoxIdentifier(name = "Income from a property business") with CtInteger
object B190 extends Linked[CP511, B190] {
override def apply(source: CP511): B190 = B190(source.value)
}
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B190.scala | Scala | apache-2.0 | 961 |
package net.mkowalski.sparkfim.apriori
import net.mkowalski.sparkfim.model.{Item, MinSupport}
import net.mkowalski.sparkfim.test.SparkTest
class AprioriTidListMinerTest extends SparkTest {
private val inputFilePartition = List(
(1, Array(1, 2, 3, 4, 5)),
(2, Array(1, 3, 4)),
(3, Array(2, 4))
)
test("Find 2-FIs TID-lists") {
val expectedTids = Map(
Item(2, 4) -> Array(1, 3),
Item(2, 5) -> Array(1),
Item(4, 5) -> Array(1)
)
val previousSingletons = List(Item(2), Item(4), Item(5))
val result = runTest(MinSupport(1), previousSingletons)
verifyTidListResult(expectedTids, result)
}
test("Find 3-FIs TID-lists") {
val expectedTids = Map(Item(2, 4, 5) -> Array(1))
val previousItems = List(Item(2, 4), Item(2, 5), Item(4, 5))
val result = runTest(MinSupport(1), previousItems)
verifyTidListResult(expectedTids, result)
}
test("Find none of 4-FIs TID-lists") {
val previousItems = List(Item(2, 4, 5))
val result = runTest(MinSupport(1), previousItems)
assert(result.isEmpty)
}
test("Should return only frequent TID-lists") {
val expectedTids = Map(
Item(2, 4) -> Array(1, 3)
)
val previousSingletons = List(Item(2), Item(4), Item(5))
val result = runTest(MinSupport(2), previousSingletons)
verifyTidListResult(expectedTids, result)
}
private def runTest(minSupport: MinSupport, previousItems: List[Item]): Map[Item, Array[Int]] = {
val minSupBc = sc.broadcast(minSupport)
val candidatesBc = sc.broadcast(CandidatesGenerator.generateFrom(previousItems))
val result = AprioriTidListMiner(minSupBc).mine(sc.parallelize(inputFilePartition), candidatesBc)
result.collect().toMap
}
private def verifyTidListResult(expectedTids: Map[Item, Array[Int]], result: Map[Item, Array[Int]]): Unit = {
assert(result.size == expectedTids.size, "Result map has different size")
expectedTids.foreach {
case (item, tids) => assert(tids sameElements result(item), "Invalid TID-list for " + item)
}
}
}
| mjkowalski/spark-fim | src/test/scala/net/mkowalski/sparkfim/apriori/AprioriTidListMinerTest.scala | Scala | mit | 2,055 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa
package object plugin {
object properties {
val FS_DEFAULTFS = "fs.defaultFS"
val YARN_RESOURCEMANAGER_ADDRESS = "yarn.resourcemanager.address"
val YARN_SCHEDULER_ADDRESS = "yarn.resourcemanager.scheduler.address"
val MAPREDUCE_FRAMEWORK_NAME = "mapreduce.framework.name"
val ACCUMULO_MONITOR = "accumulo.monitor.address"
def values: List[String] = {
List(FS_DEFAULTFS,
YARN_RESOURCEMANAGER_ADDRESS,
YARN_SCHEDULER_ADDRESS,
MAPREDUCE_FRAMEWORK_NAME,
ACCUMULO_MONITOR)
}
}
}
| jwkessi/geomesa | geomesa-plugin/src/main/scala/org/locationtech/geomesa/plugin/package.scala | Scala | apache-2.0 | 1,264 |
package blended.itestsupport.condition
import akka.testkit.{TestActorRef, TestProbe}
import blended.itestsupport.condition.ConditionActor.CheckCondition
import blended.itestsupport.condition.ConditionActor.ConditionCheckResult
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import akka.actor.ActorSystem
class ParallelCheckerSpec extends AnyWordSpec
with Matchers {
private implicit val system : ActorSystem = ActorSystem("ParallelChecker")
"The Condition Checker" should {
"respond with a satisfied message on an empty list of conditions" in {
val probe = TestProbe()
val checker = TestActorRef(ConditionActor.props(ParallelComposedCondition()))
checker.tell(CheckCondition, probe.ref)
probe.expectMsg(ConditionCheckResult(List.empty[Condition], List.empty[Condition]))
}
"respond with a satisfied message after a single wrapped condition has been satisfied" in {
val probe = TestProbe()
val conditions = (1 to 1).map { i => new AlwaysTrue() }.toList
val condition = ParallelComposedCondition(conditions.toSeq:_*)
val checker = TestActorRef(ConditionActor.props(condition))
checker.tell(CheckCondition, probe.ref)
probe.expectMsg(ConditionCheckResult(conditions, List.empty[Condition]))
}
"respond with a satisfied message after some wrapped conditions have been satisfied" in {
val probe = TestProbe()
val conditions = (1 to 5).map { i => new AlwaysTrue() }.toList
val condition = ParallelComposedCondition(conditions.toSeq:_*)
val checker = TestActorRef(ConditionActor.props(condition))
checker.tell(CheckCondition, probe.ref)
probe.expectMsg(ConditionCheckResult(conditions, List.empty[Condition]))
}
"respond with a timeout message after a single wrapped condition has timed out" in {
val probe = TestProbe()
val conditions = (1 to 1).map { i => new NeverTrue() }.toList
val condition = ParallelComposedCondition(conditions.toSeq:_*)
val checker = TestActorRef(ConditionActor.props(condition))
checker.tell(CheckCondition, probe.ref)
probe.expectMsg(ConditionCheckResult(List.empty[Condition], conditions))
}
"respond with a timeout message containing the timed out conditions only" in {
val probe = TestProbe()
val conditions = List(
new AlwaysTrue(),
new AlwaysTrue(),
new NeverTrue(),
new AlwaysTrue(),
new AlwaysTrue()
)
val condition = ParallelComposedCondition(conditions.toSeq:_*)
val checker = TestActorRef(ConditionActor.props(condition))
checker.tell(CheckCondition, probe.ref)
probe.expectMsg(ConditionCheckResult(
conditions.filter(_.isInstanceOf[AlwaysTrue]),
conditions.filter(_.isInstanceOf[NeverTrue])
))
}
}
}
| woq-blended/blended | blended.itestsupport/src/test/scala/blended/itestsupport/condition/ParallelCheckerSpec.scala | Scala | apache-2.0 | 2,881 |
package org.http4s
package argonaut.test // Get out of argonaut package so we can import custom instances
import _root_.argonaut._
import cats.effect.IO
import cats.syntax.applicative._
import java.nio.charset.StandardCharsets
import org.http4s.Status.Ok
import org.http4s.argonaut._
import org.http4s.headers.`Content-Type`
import jawn.JawnDecodeSupportSpec
import org.specs2.specification.core.Fragment
class ArgonautSpec extends JawnDecodeSupportSpec[Json] with Argonauts {
val ArgonautInstancesWithCustomErrors = ArgonautInstances.builder
.withEmptyBodyMessage(MalformedMessageBodyFailure("Custom Invalid JSON: empty body"))
.withParseExceptionMessage(_ => MalformedMessageBodyFailure("Custom Invalid JSON"))
.withJsonDecodeError((json, message, history) =>
InvalidMessageBodyFailure(
s"Custom Could not decode JSON: $json, error: $message, cursor: $history"))
.build
testJsonDecoder(jsonDecoder)
testJsonDecoderError(ArgonautInstancesWithCustomErrors.jsonDecoder)(
emptyBody = { case MalformedMessageBodyFailure("Custom Invalid JSON: empty body", _) => ok },
parseError = { case MalformedMessageBodyFailure("Custom Invalid JSON", _) => ok }
)
sealed case class Foo(bar: Int)
val foo = Foo(42)
implicit val FooCodec = CodecJson.derive[Foo]
"json encoder" should {
val json = Json("test" -> jString("ArgonautSupport"))
"have json content type" in {
jsonEncoder.headers.get(`Content-Type`) must_== Some(
`Content-Type`(MediaType.application.json))
}
"write compact JSON" in {
writeToString(json) must_== ("""{"test":"ArgonautSupport"}""")
}
"write JSON according to custom encoders" in {
val custom = ArgonautInstances.withPrettyParams(PrettyParams.spaces2).build
import custom._
writeToString(json) must_== ("""{
| "test" : "ArgonautSupport"
|}""".stripMargin)
}
"write JSON according to explicit printer" in {
writeToString(json)(jsonEncoderWithPrettyParams(PrettyParams.spaces2)) must_== ("""{
| "test" : "ArgonautSupport"
|}""".stripMargin)
}
}
"jsonEncoderOf" should {
"have json content type" in {
jsonEncoderOf[IO, Foo].headers.get(`Content-Type`) must_== Some(
`Content-Type`(MediaType.application.json))
}
"write compact JSON" in {
writeToString(foo)(jsonEncoderOf[IO, Foo]) must_== ("""{"bar":42}""")
}
"write JSON according to custom encoders" in {
val custom = ArgonautInstances.withPrettyParams(PrettyParams.spaces2).build
import custom._
writeToString(foo)(jsonEncoderOf) must_== ("""{
| "bar" : 42
|}""".stripMargin)
}
"write JSON according to explicit printer" in {
writeToString(foo)(jsonEncoderWithPrinterOf(PrettyParams.spaces2)) must_== ("""{
| "bar" : 42
|}""".stripMargin)
}
}
"json" should {
"handle the optionality of jNumber" in {
// TODO Urgh. We need to make testing these smoother.
// https://github.com/http4s/http4s/issues/157
def getBody(body: EntityBody[IO]): IO[Array[Byte]] = body.compile.to[Array]
val req = Request[IO]().withEntity(jNumberOrNull(157))
req
.decode { json: Json =>
Response[IO](Ok).withEntity(json.number.flatMap(_.toLong).getOrElse(0L).toString).pure[IO]
}
.map(_.body)
.flatMap(getBody)
.map(new String(_, StandardCharsets.UTF_8))
.map(_ must_== "157")
}
}
"jsonOf" should {
"decode JSON from an Argonaut decoder" in {
jsonOf[IO, Foo]
.decode(Request[IO]().withEntity(jObjectFields("bar" -> jNumberOrNull(42))), strict = true)
.value
.map(_ must beRight(Foo(42)))
}
// https://github.com/http4s/http4s/issues/514
Fragment.foreach(Seq("ärgerlich", """"ärgerlich"""")) { wort =>
sealed case class Umlaut(wort: String)
implicit val codec = CodecJson.derive[Umlaut]
s"handle JSON with umlauts: $wort" >> {
val json = Json("wort" -> jString(wort))
val result =
jsonOf[IO, Umlaut].decode(Request[IO]().withEntity(json), strict = true)
result.value.map(_ must_== Right(Umlaut(wort)))
}
}
"fail with custom message from an Argonaut decoder" in {
val result = ArgonautInstancesWithCustomErrors
.jsonOf[IO, Foo]
.decode(Request[IO]().withEntity(jObjectFields("bar1" -> jNumberOrNull(42))), strict = true)
result.value.map(_ must beLeft(InvalidMessageBodyFailure(
"Custom Could not decode JSON: {\\"bar1\\":42.0}, error: Attempt to decode value on failed cursor., cursor: CursorHistory(List(El(CursorOpDownField(bar),false)))")))
}
}
"Uri codec" should {
"round trip" in {
// TODO would benefit from Arbitrary[Uri]
val uri = Uri.uri("http://www.example.com/")
uri.asJson.as[Uri].result must beRight(uri)
}
}
"Message[F].decodeJson[A]" should {
"decode json from a message" in {
val req = Request[IO]().withEntity(foo.asJson)
req.decodeJson[Foo] must returnValue(foo)
}
"fail on invalid json" in {
val req = Request[IO]().withEntity(List(13, 14).asJson)
req.decodeJson[Foo].attempt.map(_ must beLeft)
}
}
}
| ChristopherDavenport/http4s | argonaut/src/test/scala/org/http4s/argonaut/ArgonautSpec.scala | Scala | apache-2.0 | 5,742 |
package beam.utils.csv.conversion
import java.io.File
import scala.xml.parsing.ConstructingParser
import scala.xml.{Node, NodeSeq}
class HouseholdsXml2CsvConverter(householdAttributesXml: File) extends Xml2CsvFileConverter {
override val fields: Seq[String] = Seq("householdId", "incomeValue", "locationX", "locationY")
private type HouseholdId = String
private type HouseHoldIdToAttributes = Map[HouseholdId, HouseHoldAttributes]
private case class Household(householdId: HouseholdId, income: Income, locationX: Double, locationY: Double) {
override def toString: String = Seq(householdId, income.value, locationX, locationY).mkString(FieldSeparator)
}
private case class Income(currency: String, period: String, value: String) {
override def toString: String = Seq(value, currency).mkString(FieldSeparator)
}
private case class Vehicle(refId: Int) {
override def toString: String = refId.toString
}
private case class Person(refId: Int) {
override def toString: String = refId.toString
}
private case class HouseHoldAttributes(
householdId: HouseholdId,
homeCoordX: Double,
homeCoordY: Double,
housingType: String
) {
override def toString: String = {
val values = Seq(householdId, homeCoordX, homeCoordY, housingType)
values.mkString(FieldSeparator)
}
}
private def readHouseHoldIdToAttributes(): HouseHoldIdToAttributes = {
def toHouseholdAttributes(node: Node): HouseHoldAttributes = {
val attrs = node \\\\ "attribute"
def fromSeq(name: String): String = attrs.find(_.attributes("name").text == name).get.text
HouseHoldAttributes(
householdId = node.attributes("id").toString,
homeCoordX = fromSeq("homecoordx").toDouble,
homeCoordY = fromSeq("homecoordy").toDouble,
housingType = fromSeq("housingtype")
)
}
val parser = ConstructingParser.fromFile(householdAttributesXml, preserveWS = true)
val doc = parser.document()
val householdNodes: NodeSeq = doc.docElem \\\\ "objectattributes" \\ "object"
householdNodes.toIterator
.map(node => toHouseholdAttributes(node))
.map(hha => (hha.householdId, hha))
.toMap
}
private def toHousehold(node: Node, houseHoldIdToAttributes: HouseHoldIdToAttributes): Household = {
val id = node.attributes("id").toString
Household(
householdId = id,
income = toIncome((node \\ "income").head),
locationX = houseHoldIdToAttributes(id).homeCoordX,
locationY = houseHoldIdToAttributes(id).homeCoordY
)
}
private def toIncome(node: Node): Income = {
Income(
period = node.attributes("period").text.trim,
value = node.text.trim,
currency = node.attributes("currency").text.trim
)
}
override def contentIterator(sourceFile: File): Iterator[String] = {
val parser = ConstructingParser.fromFile(sourceFile, preserveWS = true)
val doc = parser.document()
val householdNodes: NodeSeq = doc.docElem \\\\ "households" \\ "household"
val householdIdsToAttributes = readHouseHoldIdToAttributes()
householdNodes.toIterator.map(node => toHousehold(node, householdIdsToAttributes).toString + LineSeparator)
}
}
/*
<household id="1">
<members>
<personId refId="1" />
<personId refId="2" />
<personId refId="3" />
</members>
<vehicles>
<vehicleDefinitionId refId="1" />
<vehicleDefinitionId refId="2" />
</vehicles>
<income currency="usd" period="year">50000</income>
</household>
*/
| colinsheppard/beam | src/main/scala/beam/utils/csv/conversion/HouseholdsXml2CsvConverter.scala | Scala | gpl-3.0 | 3,544 |
/***
* Copyright 2016 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker
import com.rackspace.cloud.api.wadl.Converters._
import org.junit.runner.RunWith
import org.scalatestplus.junit.JUnitRunner
import com.rackspace.com.papi.components.checker.wadl.WADLException
@RunWith(classOf[JUnitRunner])
class ValidatorWADLNegPlainParamSuite extends BaseValidatorSuite {
val wadlWithVer20Param = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true"/>
<!-- a silly xpath 2.0 assertion that will always return true -->
<param style="plain" path="string(current-dateTime())" required="true"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>
val wadlWithVer30Param = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true"/>
<!-- a silly xpath 2.0 assertion that will always return true -->
<param style="plain" path="string(current-dateTime())" required="true"/>
<!-- a silly XPath 3.0 example -->
<param style="plain" path="let $t := current-dateTime() return string($t)" required="true"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>
val wadlWithVer31Param = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true"/>
<!-- a silly xpath 2.0 assertion that will always return true -->
<param style="plain" path="string(current-dateTime())" required="true"/>
<!-- a silly XPath 3.0 example -->
<param style="plain" path="let $t := current-dateTime() return string($t)" required="true"/>
<!-- a silly XPath 3.1 example -->
<param style="plain" path="let $t := map { 't' : true() } return $t('t')" required="true"/>
<!-- another silly XPath 3.1 example -->
<param style="plain" path="let $t := 'foo'=>replace('f', 'b') return $t" required="true"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>
test ("2.0 XPath should fail when 1.0 is set") {
val thrown = intercept[WADLException] {
Validator((localWADLURI, wadlWithVer20Param),
TestConfig(false, false, true, true, true, 10, true))
}
assert (thrown.getMessage().contains("Error while compiling XPath"))
assert (thrown.getMessage().contains("current-dateTime"))
}
test ("2.0 XPath should succeed when 2.0 is set") {
Validator((localWADLURI, wadlWithVer20Param),
TestConfig(false, false, true, true, true, 20, true))
}
test ("3.0 XPath should fail when 2.0 is set") {
val thrown = intercept[WADLException] {
Validator((localWADLURI, wadlWithVer30Param),
TestConfig(false, false, true, true, true, 20, true))
}
assert (thrown.getMessage().contains("Error while compiling XPath"))
assert (thrown.getMessage().contains("'let' is not permitted in XPath 2.0"))
}
test ("3.0 XPath should succeed when 3.0 is set") {
Validator((localWADLURI, wadlWithVer30Param),
TestConfig(false, false, true, true, true, 30, true))
}
test ("3.1 XPath should fail when 3.0 is set") {
val thrown = intercept[WADLException] {
Validator((localWADLURI, wadlWithVer31Param),
TestConfig(false, false, true, true, true, 30, true))
}
assert (thrown.getMessage().contains("Error while compiling XPath"))
assert (thrown.getMessage().contains("use XPath 3.1"))
}
test ("3.1 XPath should succeed when 3.1 is set") {
Validator((localWADLURI, wadlWithVer31Param),
TestConfig(false, false, true, true, true, 31, true))
}
}
| rackerlabs/api-checker | core/src/test/scala/com/rackspace/com/papi/components/checker/ValidatorWADLNegPlainParamSuite.scala | Scala | apache-2.0 | 8,231 |
package slick.lifted
import slick.relational.{ProductResultConverter, SimpleFastPathResultConverter, ResultConverterDomain, TypeMappingResultConverter}
import scala.language.{existentials, implicitConversions, higherKinds}
import scala.language.experimental.macros
import scala.annotation.implicitNotFound
import scala.annotation.unchecked.uncheckedVariance
import scala.reflect.macros.blackbox.Context
import slick.SlickException
import slick.util.{ConstArray, ProductWrapper, TupleSupport}
import slick.ast._
import scala.reflect.ClassTag
/** A type class that encodes the unpacking `Mixed => Unpacked` of a
* `Query[Mixed]` to its result element type `Unpacked` and the packing to a
* fully packed type `Packed`, i.e. a type where everything which is not a
* transparent container is wrapped in a `Column[_]`.
*
* =Example:=
* - Mixed: (Column[Int], Column[(Int, String)], (Int, Option[Double]))
* - Unpacked: (Int, (Int, String), (Int, Option[Double]))
* - Packed: (Column[Int], Column[(Int, String)], (Column[Int], Column[Option[Double]]))
* - Linearized: (Int, Int, String, Int, Option[Double])
*/
@implicitNotFound(msg = "No matching Shape found.\\nSlick does not know how to map the given types.\\nPossible causes: T in Table[T] does not match your * projection. Or you use an unsupported type in a Query (e.g. scala List).\\n Required level: ${Level}\\n Source type: ${Mixed_}\\n Unpacked type: ${Unpacked_}\\n Packed type: ${Packed_}\\n")
abstract class Shape[Level <: ShapeLevel, -Mixed_, Unpacked_, Packed_] {
type Mixed = Mixed_ @uncheckedVariance
type Unpacked = Unpacked_
type Packed = Packed_
/** Convert a value of this Shape's (mixed) type to the fully packed type */
def pack(value: Mixed): Packed
/** Return the fully packed Shape */
def packedShape: Shape[Level, Packed, Unpacked, Packed]
/** Build a packed representation containing QueryParameters that can extract
* data from the unpacked representation later.
* This method is not available for shapes where Mixed and Unpacked are
* different types. */
def buildParams(extract: Any => Unpacked): Packed
/** Encode a reference into a value of this Shape.
* This method may not be available for shapes where Mixed and Packed are
* different types. */
def encodeRef(value: Mixed, path: Node): Any
/** Return an AST Node representing a mixed value. */
def toNode(value: Mixed): Node
}
object Shape extends ConstColumnShapeImplicits with AbstractTableShapeImplicits with TupleShapeImplicits {
implicit final def primitiveShape[T, Level <: ShapeLevel](implicit tm: TypedType[T]): Shape[Level, T, T, ConstColumn[T]] = new Shape[Level, T, T, ConstColumn[T]] {
def pack(value: Mixed) = LiteralColumn(value)
def packedShape = RepShape[Level, Packed, Unpacked]
def buildParams(extract: Any => Unpacked): Packed = new ConstColumn[T](new QueryParameter(extract, tm))(tm)
def encodeRef(value: Mixed, path: Node) =
throw new SlickException("Shape does not have the same Mixed and Packed type")
def toNode(value: Mixed): Node = pack(value).toNode
}
@inline implicit final def unitShape[Level <: ShapeLevel]: Shape[Level, Unit, Unit, Unit] =
unitShapePrototype.asInstanceOf[Shape[Level, Unit, Unit, Unit]]
val unitShapePrototype: Shape[FlatShapeLevel, Unit, Unit, Unit] = new Shape[FlatShapeLevel, Unit, Unit, Unit] {
def pack(value: Mixed) = ()
def packedShape: Shape[FlatShapeLevel, Packed, Unpacked, Packed] = this
def buildParams(extract: Any => Unpacked) = ()
def encodeRef(value: Mixed, path: Node) = ()
def toNode(value: Mixed) = ProductNode(ConstArray.empty)
}
}
trait AbstractTableShapeImplicits extends RepShapeImplicits {
@inline implicit final def tableShape[Level >: FlatShapeLevel <: ShapeLevel, T, C <: AbstractTable[_]](implicit ev: C <:< AbstractTable[T]) = RepShape[Level, C, T]
}
trait ConstColumnShapeImplicits extends RepShapeImplicits {
/** A Shape for ConstColumns. It is identical to `columnShape` but it
* ensures that a `ConstColumn[T]` packs to itself, not just to
* `Rep[T]`. This allows ConstColumns to be used as fully packed
* types when compiling query functions. */
@inline implicit def constColumnShape[T, Level <: ShapeLevel] = RepShape[Level, ConstColumn[T], T]
}
trait RepShapeImplicits extends OptionShapeImplicits {
/** A Shape for single-column Reps. */
@inline implicit def repColumnShape[T : BaseTypedType, Level <: ShapeLevel] = RepShape[Level, Rep[T], T]
/** A Shape for Option-valued Reps. */
@inline implicit def optionShape[M, U, P, Level <: ShapeLevel](implicit sh: Shape[_ <: Level, Rep[M], U, Rep[P]]): Shape[Level, Rep[Option[M]], Option[U], Rep[Option[P]]] =
RepShape.asInstanceOf[Shape[Level, Rep[Option[M]], Option[U], Rep[Option[P]]]]
}
trait OptionShapeImplicits {
/** A Shape for Option-valued non-Reps. */
@inline implicit def anyOptionShape[M, U, P, Level <: ShapeLevel](implicit sh: Shape[_ <: Level, M, U, P]): Shape[Level, Rep[Option[M]], Option[U], Rep[Option[P]]] =
RepShape.asInstanceOf[Shape[Level, Rep[Option[M]], Option[U], Rep[Option[P]]]]
}
/** Shape for Rep values (always fully packed) */
object RepShape extends Shape[FlatShapeLevel, Rep[_], Any, Rep[_]] {
def apply[Level <: ShapeLevel, MP <: Rep[_], U]: Shape[Level, MP, U, MP] = this.asInstanceOf[Shape[Level, MP, U, MP]]
def pack(value: Mixed): Packed = value
def packedShape: Shape[FlatShapeLevel, Packed, Unpacked, Packed] = this
def buildParams(extract: Any => Unpacked): Packed =
throw new SlickException("Shape does not have the same Mixed and Unpacked type")
def encodeRef(value: Mixed, path: Node) = value.encodeRef(path)
def toNode(value: Mixed): Node = value.toNode
}
/** Base class for Shapes of record values which are represented by
* ProductNodes in the AST.
*
* @tparam C The supertype for the record values.
* @tparam M The mixed type of the Shape (a subtype of C).
* @tparam U The unpacked type of the Shape (a subtype of C).
* @tparam P The fully packed type of the Shape (a subtype of C).
*/
abstract class ProductNodeShape[Level <: ShapeLevel, C, M <: C, U <: C, P <: C] extends Shape[Level, M, U, P] {
/** The Shapes for the product elements. */
val shapes: Seq[Shape[_, _, _, _]]
/** Build a record value represented by this Shape from its element values. */
def buildValue(elems: IndexedSeq[Any]): Any
/** Create a copy of this Shape with new element Shapes. This is used for
* packing Shapes recursively. */
def copy(shapes: Seq[Shape[_ <: ShapeLevel, _, _, _]]): Shape[Level, _, _, _]
/** Get the element value from a record value at the specified index. */
def getElement(value: C, idx: Int): Any
/** Get an Iterator of a record value's element values. The default
* implementation repeatedly calls `getElement`. */
def getIterator(value: C): Iterator[Any] =
shapes.iterator.zipWithIndex.map(t => getElement(value, t._2))
def pack(value: Mixed) = {
val elems = shapes.iterator.zip(getIterator(value)).map{ case (p, f) => p.pack(f.asInstanceOf[p.Mixed]) }
buildValue(elems.toIndexedSeq).asInstanceOf[Packed]
}
def packedShape: Shape[Level, Packed, Unpacked, Packed] =
copy(shapes.map(_.packedShape.asInstanceOf[Shape[_ <: ShapeLevel, _, _, _]])).asInstanceOf[Shape[Level, Packed, Unpacked, Packed]]
def buildParams(extract: Any => Unpacked): Packed = {
val elems = shapes.iterator.zipWithIndex.map { case (p, idx) =>
def chExtract(u: C): p.Unpacked = getElement(u, idx).asInstanceOf[p.Unpacked]
p.buildParams(extract.andThen(chExtract))
}
buildValue(elems.toIndexedSeq).asInstanceOf[Packed]
}
def encodeRef(value: Mixed, path: Node) = {
val elems = shapes.iterator.zip(getIterator(value)).zipWithIndex.map {
case ((p, x), pos) => p.encodeRef(x.asInstanceOf[p.Mixed], Select(path, ElementSymbol(pos + 1)))
}
buildValue(elems.toIndexedSeq)
}
def toNode(value: Mixed): Node = ProductNode(ConstArray.from(shapes.iterator.zip(getIterator(value)).map {
case (p, f) => p.toNode(f.asInstanceOf[p.Mixed])
}.toIterable))
}
/** Base class for ProductNodeShapes with a type mapping */
abstract class MappedProductShape[Level <: ShapeLevel, C, M <: C, U <: C, P <: C] extends ProductNodeShape[Level, C, M, U, P] {
override def toNode(value: Mixed) = TypeMapping(super.toNode(value), MappedScalaType.Mapper(toBase, toMapped, None), classTag)
def toBase(v: Any) = new ProductWrapper(getIterator(v.asInstanceOf[C]).toIndexedSeq)
def toMapped(v: Any) = buildValue(TupleSupport.buildIndexedSeq(v.asInstanceOf[Product]))
def classTag: ClassTag[U]
}
/** Base class for ProductNodeShapes with a type mapping to a type that extends scala.Product */
abstract class MappedScalaProductShape[Level <: ShapeLevel, C <: Product, M <: C, U <: C, P <: C](implicit val classTag: ClassTag[U]) extends MappedProductShape[Level, C, M, U, P] {
override def getIterator(value: C) = value.productIterator
def getElement(value: C, idx: Int) = value.productElement(idx)
}
/** Shape for Scala tuples of all arities */
final class TupleShape[Level <: ShapeLevel, M <: Product, U <: Product, P <: Product](val shapes: Shape[_, _, _, _]*) extends ProductNodeShape[Level, Product, M, U, P] {
override def getIterator(value: Product) = value.productIterator
def getElement(value: Product, idx: Int) = value.productElement(idx)
def buildValue(elems: IndexedSeq[Any]) = TupleSupport.buildTuple(elems)
def copy(shapes: Seq[Shape[_ <: ShapeLevel, _, _, _]]) = new TupleShape(shapes: _*)
}
/** A generic case class shape that can be used to lift a case class of
* plain Scala types to a case class of lifted types. This allows the type
* to be used as a record type (like tuples and HLists) in the Lifted
* Embedding.
*
* Example:
*
* {{{
* case class C(a: Int, b: Option[String])
* case class LiftedC(a: Column[Int], b: Column[Option[String]])
* implicit object cShape extends CaseClassShape(LiftedC.tupled, C.tupled)
* }}}
*/
class CaseClassShape[P <: Product, LiftedTuple, LiftedCaseClass <: P, PlainTuple, PlainCaseClass <: P](
mapLifted: LiftedTuple => LiftedCaseClass, mapPlain: PlainTuple => PlainCaseClass)(
implicit columnShapes: Shape[FlatShapeLevel, LiftedTuple, PlainTuple, LiftedTuple], classTag: ClassTag[PlainCaseClass])
extends MappedScalaProductShape[FlatShapeLevel, P, LiftedCaseClass, PlainCaseClass, LiftedCaseClass] {
val shapes = columnShapes.asInstanceOf[TupleShape[_,_,_,_]].shapes
override def toMapped(v: Any) = mapPlain(v.asInstanceOf[PlainTuple])
def buildValue(elems: IndexedSeq[Any]) = mapLifted(TupleSupport.buildTuple(elems).asInstanceOf[LiftedTuple])
def copy(s: Seq[Shape[_ <: ShapeLevel, _, _, _]]) = new CaseClassShape(mapLifted, mapPlain) { override val shapes = s }
}
/** A generic Product class shape that can be used to lift a class of
* plain Scala types to a class of lifted types. This allows the type
* to be used as a record type (like tuples and HLists) in the Lifted
* Embedding.
*
* This can help with mapping tables >22 columns to classes, especially
* when using code generation. This can be used for Scala 2.11 case classes >22 fields.
*
* Example:
*
* {{{
* def columnShape[T](implicit s: Shape[FlatShapeLevel, Column[T], T, Column[T]]) = s
* class C(val a: Int, val b: Option[String]) extends Product{
* def canEqual(that: Any): Boolean = that.isInstanceOf[C]
* def productArity: Int = 2
* def productElement(n: Int): Any = Seq(a, b)(n)
* }
* class LiftedC(val a: Column[Int], val b: Column[Option[String]]) extends Product{
* def canEqual(that: Any): Boolean = that.isInstanceOf[LiftedC]
* def productArity: Int = 2
* def productElement(n: Int): Any = Seq(a, b)(n)
* }
* implicit object cShape extends ProductClassShape(
* Seq(columnShape[Int], columnShape[Option[String]]),
* seq => new LiftedC(seq(0).asInstanceOf[Column[Int]], seq(1).asInstanceOf[Column[Option[String]]]),
* seq => new C(seq(0).asInstanceOf[Int], seq(1).asInstanceOf[Option[String]])
* )
* }}}
*/
class ProductClassShape[E <: Product,C <: Product](
val shapes: Seq[Shape[_, _, _, _]],
mapLifted: Seq[Any] => C,
mapPlain:Seq[Any] => E
)(implicit classTag: ClassTag[E]) extends MappedScalaProductShape[
FlatShapeLevel, Product, C, E, C
]{
override def toMapped(v: Any) = mapPlain(v.asInstanceOf[Product].productIterator.toSeq)
def buildValue(elems: IndexedSeq[Any]) = mapLifted(elems)
def copy(s: Seq[Shape[_ <: ShapeLevel, _, _, _]]) = new ProductClassShape(s, mapLifted, mapPlain)
}
/** The level of a Shape, i.e. what kind of types it allows.
* Subtypes of this trait are used as a phantom type for Shape resolution.
* There are no instances of any ShapeLevel. */
trait ShapeLevel
/** ShapeLevel that allows nested collections. */
trait NestedShapeLevel extends ShapeLevel
/** ShapeLevel that does not allow nested collections.
* This is the standard level for executable queries. */
trait FlatShapeLevel extends NestedShapeLevel
/** ShapeLevel that only allows records of individual columns.
* This level is used for parameters of compiled queries. */
trait ColumnsShapeLevel extends FlatShapeLevel
/** A value together with its Shape */
case class ShapedValue[T, U](value: T, shape: Shape[_ <: FlatShapeLevel, T, U, _]) extends Rep[U] {
def encodeRef(path: Node): ShapedValue[T, U] = {
val fv = shape.encodeRef(value, path).asInstanceOf[T]
if(fv.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this else new ShapedValue(fv, shape)
}
def toNode = shape.toNode(value)
def packedValue[R](implicit ev: Shape[_ <: FlatShapeLevel, T, _, R]): ShapedValue[R, U] = ShapedValue(shape.pack(value).asInstanceOf[R], shape.packedShape.asInstanceOf[Shape[FlatShapeLevel, R, U, _]])
def zip[T2, U2](s2: ShapedValue[T2, U2]) = new ShapedValue[(T, T2), (U, U2)]((value, s2.value), Shape.tuple2Shape(shape, s2.shape))
def <>[R : ClassTag](f: (U => R), g: (R => Option[U])) = new MappedProjection[R, U](shape.toNode(value), MappedScalaType.Mapper(g.andThen(_.get).asInstanceOf[Any => Any], f.asInstanceOf[Any => Any], None), implicitly[ClassTag[R]])
@inline def shaped: ShapedValue[T, U] = this
def mapTo[R <: Product with Serializable](implicit rCT: ClassTag[R]): MappedProjection[R, U] = macro ShapedValue.mapToImpl[R, U]
}
object ShapedValue {
@inline implicit def shapedValueShape[T, U, Level <: ShapeLevel] = RepShape[Level, ShapedValue[T, U], U]
def mapToImpl[R <: Product with Serializable, U](c: Context { type PrefixType = ShapedValue[_, U] })(rCT: c.Expr[ClassTag[R]])(implicit rTag: c.WeakTypeTag[R], uTag: c.WeakTypeTag[U]): c.Tree = {
import c.universe._
val rSym = symbolOf[R]
if(!rSym.isClass || !rSym.asClass.isCaseClass)
c.abort(c.enclosingPosition, s"${rSym.fullName} must be a case class")
val rModule = rSym.companion match {
case NoSymbol => q"${rSym.name.toTermName}" // This can happen for case classes defined inside of methods
case s => q"$s"
}
val fields = rTag.tpe.decls.collect {
case s: TermSymbol if s.isVal && s.isCaseAccessor => (TermName(s.name.toString.trim), s.typeSignature, TermName(c.freshName()))
}.toIndexedSeq
val (f, g) = if(uTag.tpe <:< c.typeOf[slick.collection.heterogeneous.HList]) { // Map from HList
val rTypeAsHList = fields.foldRight[Tree](tq"_root_.slick.collection.heterogeneous.HNil.type") {
case ((_, t, _), z) => tq"_root_.slick.collection.heterogeneous.HCons[$t, $z]"
}
val pat = fields.foldRight[Tree](pq"_root_.slick.collection.heterogeneous.HNil") {
case ((_, _, n), z) => pq"_root_.slick.collection.heterogeneous.HCons($n, $z)"
}
val cons = fields.foldRight[Tree](q"_root_.slick.collection.heterogeneous.HNil") {
case ((n, _, _), z) => q"v.$n :: $z"
}
(q"({ case $pat => new $rTag(..${fields.map(_._3)}) } : ($rTypeAsHList => $rTag)): ($uTag => $rTag)",
q"{ case v => $cons }: ($rTag => $uTag)")
} else if(fields.length == 1) { // Map from single value
(q"($rModule.apply _) : ($uTag => $rTag)",
q"(($rModule.unapply _) : $rTag => Option[$uTag]).andThen(_.get)")
} else { // Map from tuple
(q"($rModule.tupled) : ($uTag => $rTag)",
q"(($rModule.unapply _) : $rTag => Option[$uTag]).andThen(_.get)")
}
val fpName = Constant("Fast Path of ("+fields.map(_._2).mkString(", ")+").mapTo["+rTag.tpe+"]")
val fpChildren = fields.map { case (_, t, n) => q"val $n = next[$t]" }
val fpReadChildren = fields.map { case (_, _, n) => q"$n.read(r)" }
val fpSetChildren = fields.map { case (fn, _, n) => q"$n.set(value.$fn, pp)" }
val fpUpdateChildren = fields.map { case (fn, _, n) => q"$n.update(value.$fn, pr)" }
q"""
val ff = $f.asInstanceOf[_root_.scala.Any => _root_.scala.Any] // Resolving f first creates more useful type errors
val gg = $g.asInstanceOf[_root_.scala.Any => _root_.scala.Any]
val fpMatch: (_root_.scala.Any => _root_.scala.Any) = {
case tm @ _root_.slick.relational.TypeMappingResultConverter(_: _root_.slick.relational.ProductResultConverter[_, _], _, _) =>
new _root_.slick.relational.SimpleFastPathResultConverter[_root_.slick.relational.ResultConverterDomain, $rTag](tm.asInstanceOf[_root_.slick.relational.TypeMappingResultConverter[_root_.slick.relational.ResultConverterDomain, $rTag, _]]) {
..$fpChildren
override def read(r: Reader): $rTag = new $rTag(..$fpReadChildren)
override def set(value: $rTag, pp: Writer): _root_.scala.Unit = {..$fpSetChildren}
override def update(value: $rTag, pr: Updater): _root_.scala.Unit = {..$fpUpdateChildren}
override def getDumpInfo = super.getDumpInfo.copy(name = $fpName)
}
case tm => tm
}
new _root_.slick.lifted.MappedProjection[$rTag, $uTag](${c.prefix}.toNode,
_root_.slick.ast.MappedScalaType.Mapper(gg, ff, _root_.scala.Some(fpMatch)), $rCT)
"""
}
}
/** A limited version of ShapedValue which can be constructed for every type
* that has a valid shape. We use it to enforce that a table's * projection
* has a valid shape. A ProvenShape has itself a Shape so it can be used in
* place of the value that it wraps for purposes of packing and unpacking. */
trait ProvenShape[U] {
def value: Any
val shape: Shape[_ <: FlatShapeLevel, _, U, _]
def packedValue[R](implicit ev: Shape[_ <: FlatShapeLevel, _, U, R]): ShapedValue[R, U]
def toNode = packedValue(shape).toNode
}
object ProvenShape {
/** Convert an appropriately shaped value to a ProvenShape */
implicit def proveShapeOf[T, U](v: T)(implicit sh: Shape[_ <: FlatShapeLevel, T, U, _]): ProvenShape[U] =
new ProvenShape[U] {
def value = v
val shape: Shape[_ <: FlatShapeLevel, _, U, _] = sh.asInstanceOf[Shape[FlatShapeLevel, _, U, _]]
def packedValue[R](implicit ev: Shape[_ <: FlatShapeLevel, _, U, R]): ShapedValue[R, U] = ShapedValue(sh.pack(value).asInstanceOf[R], sh.packedShape.asInstanceOf[Shape[FlatShapeLevel, R, U, _]])
}
/** The Shape for a ProvenShape */
implicit def provenShapeShape[T, P](implicit shape: Shape[_ <: FlatShapeLevel, T, T, P]): Shape[FlatShapeLevel, ProvenShape[T], T, P] = new Shape[FlatShapeLevel, ProvenShape[T], T, P] {
def pack(value: Mixed): Packed =
value.shape.pack(value.value.asInstanceOf[value.shape.Mixed]).asInstanceOf[Packed]
def packedShape: Shape[FlatShapeLevel, Packed, Unpacked, Packed] =
shape.packedShape.asInstanceOf[Shape[FlatShapeLevel, Packed, Unpacked, Packed]]
def buildParams(extract: Any => Unpacked): Packed =
shape.buildParams(extract.asInstanceOf[Any => shape.Unpacked])
def encodeRef(value: Mixed, path: Node) =
value.shape.encodeRef(value.value.asInstanceOf[value.shape.Mixed], path)
def toNode(value: Mixed): Node =
value.shape.toNode(value.value.asInstanceOf[value.shape.Mixed])
}
}
class MappedProjection[T, P](child: Node, mapper: MappedScalaType.Mapper, classTag: ClassTag[T]) extends Rep[T] {
type Self = MappedProjection[_, _]
override def toString = "MappedProjection"
override def toNode: Node = TypeMapping(child, mapper, classTag)
def encodeRef(path: Node): MappedProjection[T, P] = new MappedProjection[T, P](child, mapper, classTag) {
override def toNode = path
}
def genericFastPath(f: Function[Any, Any]) = new MappedProjection[T, P](child, mapper.copy(fastPath = Some(f)), classTag)
}
object MappedProjection {
/** The Shape for a MappedProjection */
@inline implicit final def mappedProjectionShape[Level >: FlatShapeLevel <: ShapeLevel, T, P] = RepShape[Level, MappedProjection[T, P], T]
}
| AtkinsChang/slick | slick/src/main/scala/slick/lifted/Shape.scala | Scala | bsd-2-clause | 20,829 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.expressions.{AttributeMap, AttributeReference}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Statistics}
import org.apache.spark.sql.sources.BaseRelation
import org.apache.spark.util.Utils
/**
* Used to link a [[BaseRelation]] in to a logical query plan.
*/
case class LogicalRelation(
relation: BaseRelation,
output: Seq[AttributeReference],
catalogTable: Option[CatalogTable],
override val isStreaming: Boolean)
extends LeafNode with MultiInstanceRelation {
// Only care about relation when canonicalizing.
override lazy val canonicalized: LogicalPlan = copy(
output = output.map(QueryPlan.normalizeExprId(_, output)),
catalogTable = None)
override def computeStats(): Statistics = {
catalogTable
.flatMap(_.stats.map(_.toPlanStats(output)))
.getOrElse(Statistics(sizeInBytes = relation.sizeInBytes))
}
/** Used to lookup original attribute capitalization */
val attributeMap: AttributeMap[AttributeReference] = AttributeMap(output.map(o => (o, o)))
/**
* Returns a new instance of this LogicalRelation. According to the semantics of
* MultiInstanceRelation, this method returns a copy of this object with
* unique expression ids. We respect the `expectedOutputAttributes` and create
* new instances of attributes in it.
*/
override def newInstance(): LogicalRelation = {
this.copy(output = output.map(_.newInstance()))
}
override def refresh(): Unit = relation match {
case fs: HadoopFsRelation => fs.location.refresh()
case _ => // Do nothing.
}
override def simpleString: String = s"Relation[${Utils.truncatedString(output, ",")}] $relation"
}
object LogicalRelation {
def apply(relation: BaseRelation, isStreaming: Boolean = false): LogicalRelation =
LogicalRelation(relation, relation.schema.toAttributes, None, isStreaming)
def apply(relation: BaseRelation, table: CatalogTable): LogicalRelation =
LogicalRelation(relation, relation.schema.toAttributes, Some(table), false)
}
| 1haodian/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala | Scala | apache-2.0 | 3,099 |
package org.jetbrains.plugins.scala
package lang
package structureView
package itemsPresentations
package impl
import javax.swing._
import org.jetbrains.plugins.scala.icons.Icons
import org.jetbrains.plugins.scala.lang.psi.api.base._;
/**
* @author Alexander Podkhalyuzin
* Date: 16.05.2008
*/
class ScalaPrimaryConstructorItemPresentation(private val element: ScPrimaryConstructor) extends ScalaItemPresentation(element) {
def getPresentableText: String = {
ScalaElementPresentation.getPrimaryConstructorPresentableText(myElement.asInstanceOf[ScPrimaryConstructor])
}
override def getIcon(open: Boolean): Icon = {
Icons.FUNCTION
}
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/structureView/itemsPresentations/impl/ScalaPrimaryConstructorItemPresentation.scala | Scala | apache-2.0 | 654 |
package controllers
import javax.inject.Singleton
import akka.util.CompactByteString
import play.api.http.HttpEntity.Strict
import play.api.mvc._
import play.api.libs.json.Json
import play.mvc.Http
@Singleton
class Application extends Controller {
val header = ResponseHeader(OK, Map(Http.HeaderNames.SERVER -> "EXAMPLE"))
implicit val helloWorldWrites = Json.writes[HelloWorld]
def getJsonMessage = Action {
val helloWorld = HelloWorld(message = "Hello, World!")
Result(header, Strict(CompactByteString(Json.toJson(helloWorld).toString()), Some("application/json")))
}
val plaintext = Action {
Result(header, Strict(CompactByteString("Hello, World!"), Some("text/plain")))
}
}
case class HelloWorld(message: String) | zane-techempower/FrameworkBenchmarks | frameworks/Scala/play2-scala/play2-scala/app/controllers/Application.scala | Scala | bsd-3-clause | 745 |
package _99Problems.WorkingWithLists
/**
* Remove the Kth element from a list.
*/
object P20 {
def removeAt[T](k: Int, list: List[T]): (List[T], T) =
(list.take(k) ++ list.drop(k + 1), list(k))
}
| dandxy89/LearningScala | src/main/scala/_99Problems/WorkingWithLists/P20.scala | Scala | mit | 209 |
package zzb.datatype
import org.scalatest.{MustMatchers, WordSpec}
import zzb.datatype.testuse.UserInfo
/**
* Created by Simon on 2014/6/27
*/
class TPropertyTest extends WordSpec with MustMatchers {
"Property " must {
"property access" in {
import UserInfo._
val u0: UserInfo.Pack = UserInfo(userName := "simon",
misc := Map("water" -> 100, "price" -> 23.1, "date" -> "1978-12-03 06:32:33"))
u0.misc("water") mustBe Some(100)
u0.misc("water") mustBe Some("100")
u0.misc("water").get.isNumber mustBe true
u0.misc("water").get.toInt mustBe 100
u0.misc("water999").getOrElse("33") mustBe "33"
val p0 = u0.misc
val p1 = p0 + ("age" -> 40)
val u1 = u0 <~ (u0.misc + ("age" -> 40))
u1.misc("age") mustBe Some(40)
u1.misc("age") mustBe Some("40")
val u2 = u1 <~ (u1.misc - "water")
u2.misc("water") mustBe None
u2.misc.getOrElse("water", "nothis") mustBe "nothis"
u2.misc("age") mustBe Some(40)
}
"Property default " in {
import UserInfo._
val u0: UserInfo.Pack = UserInfo(userName := "simon")
u0.misc.size mustBe 0
}
}
}
| stepover/zzb | zzb-datatype/src/test/scala/zzb/datatype/TPropertyTest.scala | Scala | mit | 1,174 |
package spire.math
import spire.implicits._
import org.scalatest.Matchers
import org.scalacheck.Arbitrary._
import org.scalatest._
import prop._
import org.scalacheck._
import Gen._
import Arbitrary.arbitrary
import ArbitrarySupport._
class QuaternionCheck extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
type H = Quaternion[Real]
val zero = Quaternion.zero[Real]
val one = Quaternion.one[Real]
property("q + 0 = q") {
forAll { (q: H) =>
q + Real.zero shouldBe q
q + zero shouldBe q
}
}
property("q + -q = 0") {
forAll { (q: H) =>
q + (-q) shouldBe zero
}
}
property("q1 + -q2 = q1 - q2") {
forAll { (q1: H, q2: H) =>
q1 + (-q2) shouldBe q1 - q2
}
}
property("q1 + q2 = q2 + q1") {
forAll { (q1: H, q2: H) =>
q1 + q2 shouldBe q2 + q1
}
}
property("(q1 + q2) + a3 = q1 + (q2 + q3)") {
forAll { (q1: H, q2: H, q3: H) =>
(q1 + q2) + q3 shouldBe q1 + (q2 + q3)
}
}
property("q * 0 = q") {
forAll { (q: H) =>
q * Real.zero shouldBe zero
q * zero shouldBe zero
}
}
property("q * 1 = q") {
forAll { (q: H) =>
q * Real.one shouldBe q
q * one shouldBe q
}
}
property("q * 2 = q + q") {
forAll { (q: H) =>
q * Real(2) shouldBe q + q
}
}
property("q1 * (q2 + q3) = q1 * q2 + q1 * q3") {
forAll { (q1: H, q2: H, q3: H) =>
q1 * (q2 + q3) shouldBe q1 * q2 + q1 * q3
}
}
property("(q1 * q2) * a3 = q1 * (q2 * q3)") {
forAll { (q1: H, q2: H, q3: H) =>
(q1 * q2) * q3 shouldBe q1 * (q2 * q3)
}
}
property("q * q.reciprocal = 1") {
forAll { (q: H) =>
if (q != zero) (q * q.reciprocal) shouldBe one
}
}
property("1 / q = 1.reciprocal") {
forAll { (q: H) =>
if (q != zero) (one / q) shouldBe q.reciprocal
}
}
property("q.pow(2) = q * q") {
forAll { (q: H) =>
q.pow(2) shouldBe q * q
}
}
// exact checking isn't quite working in all cases, ugh
val tolerance = Real(Rational(1, 1000000000))
import spire.compat.ordering
def dumpDiff(label: String, base: H, gen: H): Unit = {
println(s"$label $base $gen")
val (gr, gi, gj, gk) = (gen.r, gen.i, gen.j, gen.k)
val (br, bi, bj, bk) = (base.r, base.i, base.j, base.k)
if (br != gr) println(s" r: ${br.repr} != ${gr.repr} (${br.toRational} and ${gr.toRational}) [${(br-gr).signum}] <${br-gr}>")
if (bi != gi) println(s" i: ${bi.repr} != ${gi.repr} (${bi.toRational} and ${gi.toRational}) [${(bi-gi).signum}] <${bi-gi}>")
if (bj != gj) println(s" j: ${bj.repr} != ${gj.repr} (${bj.toRational} and ${gj.toRational}) [${(bj-gj).signum}] <${bj-gj}>")
if (bk != gk) println(s" k: ${bk.repr} != ${gk.repr} (${bk.toRational} and ${gk.toRational}) [${(bk-gk).signum}] <${bk-gk}>")
}
def inexactEq(x: H, y: H): Unit =
if (x != y) {
//dumpDiff("ouch", x, y)
(x - y).abs should be < tolerance // sadface
} else {
x shouldBe y
}
property("q.sqrt.pow(2) = q") {
forAll { (q: H) =>
val r = q.sqrt.pow(2)
inexactEq(q, r)
}
}
property("q.nroot(3).pow(3) = q") {
forAll { (a: Short, b: Short, c: Short, d: Short) =>
val q = Quaternion(Real(a), Real(b), Real(c), Real(d))
val r = q.nroot(3).pow(3)
inexactEq(q, r)
}
}
property("q.nroot(k).pow(k) = q") {
forAll { (a: Short, b: Short, c: Short, d: Short, k0: Int) =>
val q = Quaternion(Real(a), Real(b), Real(c), Real(d))
val k = (k0 % 5).abs + 1
val r = q.nroot(k).pow(k)
inexactEq(q, r)
}
}
// property("q.fpow(1/k) = q.nroot(k)") {
// forAll { (q: H, k0: Int) =>
// val k = (k0 % 10).abs + 1
// q.nroot(k) shouldBe q.fpow(Real(Rational(1, k)))
// }
// }
//
// property("q.fpow(1/k).fpow(k) = q") {
// forAll { (q: H, k0: Byte) =>
// val k = Real(Rational((k0 % 10).abs))
// val ik = k.reciprocal
// if (k == Real.zero) {
// q.fpow(k) shouldBe one
// } else {
// q.fpow(ik).fpow(k) shouldBe q
// }
// }
// }
property("q = q.r iff q.isReal") {
forAll { (q: H) =>
q == q.r shouldBe q.isReal
}
}
property("q.hashCode = c.hashCode") {
forAll { (r: Real, i: Real) =>
val q1 = Quaternion(r, i, Real.zero, Real.zero)
val c1 = Complex(r, i)
q1.hashCode shouldBe c1.hashCode
val q2 = Quaternion(r)
val c2 = Complex(r)
q2.hashCode shouldBe c2.hashCode
q2.hashCode shouldBe r.hashCode
}
}
property("q = c") {
val z = Real.zero
forAll { (r: Real, i: Real) =>
Quaternion(r, i, z, z) shouldBe Complex(r, i)
Quaternion(r, z, z, z) shouldBe Complex(r, z)
Quaternion(z, i, z, z) shouldBe Complex(z, i)
}
forAll { (r: Real, i: Real, j: Real, k: Real) =>
Quaternion(r, i, j, k) == Complex(r, i) shouldBe (j == Real.zero && k == Real.zero)
}
}
}
| woparry/spire | tests/src/test/scala/spire/math/QuaternionCheck.scala | Scala | mit | 4,959 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.http.model
import scala.collection.JavaConversions._
import io.gatling.commons.util.StringHelper.Eol
import io.netty.buffer.Unpooled
import io.netty.handler.codec.http._
import org.asynchttpclient.netty.util.ByteBufUtils
object SafeHttpRequest {
def fromNettyRequest(nettyRequest: FullHttpRequest): SafeHttpRequest = {
val request = SafeHttpRequest(
nettyRequest.getProtocolVersion,
nettyRequest.getMethod,
nettyRequest.getUri,
nettyRequest.headers,
nettyRequest.trailingHeaders,
ByteBufUtils.byteBuf2Bytes(nettyRequest.content)
)
nettyRequest.release()
request
}
}
case class SafeHttpRequest(
httpVersion: HttpVersion,
method: HttpMethod,
uri: String,
headers: HttpHeaders,
trailingHeaders: HttpHeaders,
body: Array[Byte]
) {
def toNettyRequest: FullHttpRequest = {
val request = new DefaultFullHttpRequest(httpVersion, method, uri, Unpooled.wrappedBuffer(body))
request.headers.set(headers)
request.trailingHeaders.set(trailingHeaders)
request
}
def summary: String =
s"""$httpVersion $method $uri
|${(headers ++ trailingHeaders).map { entry => s"${entry.getKey}: ${entry.getValue}" }.mkString(Eol)}""".stripMargin
}
| GabrielPlassard/gatling | gatling-recorder/src/main/scala/io/gatling/recorder/http/model/SafeHttpRequest.scala | Scala | apache-2.0 | 1,928 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.keras.layers
import com.intel.analytics.bigdl.nn.{MulConstant => BMulConstant}
import com.intel.analytics.zoo.pipeline.api.keras.layers.{MulConstant => ZMulConstant}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper
import com.intel.analytics.zoo.pipeline.api.keras.serializer.ModuleSerializationTest
class MulConstantSpec extends ZooSpecHelper {
"MulConstant 0 Zoo" should "be the same as BigDL" in {
val blayer = BMulConstant[Float](0f)
val zlayer = ZMulConstant[Float](0f, inputShape = Shape(4, 5))
zlayer.build(Shape(-1, 4, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 5))
val input = Tensor[Float](Array(3, 4, 5)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}
"MulConstant -1 Zoo" should "be the same as BigDL" in {
val blayer = BMulConstant[Float](-1)
val zlayer = ZMulConstant[Float](-1, inputShape = Shape(4, 8, 8))
zlayer.build(Shape(-1, 4, 8, 8))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 8, 8))
val input = Tensor[Float](Array(3, 4, 8, 8)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}
}
class MulConstantSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = ZMulConstant[Float](-1, inputShape = Shape(4, 8, 8))
layer.build(Shape(2, 4, 8, 8))
val input = Tensor[Float](2, 4, 8, 8).rand()
runSerializationTest(layer, input)
}
}
| intel-analytics/analytics-zoo | zoo/src/test/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/MulConstantSpec.scala | Scala | apache-2.0 | 2,187 |
package com.twitter.finagle.stats
import java.util.concurrent.locks.AbstractQueuedSynchronizer
import scala.annotation.tailrec
import com.twitter.app.GlobalFlag
object rwLockSpinWait
// Defaults to -1 which allows the NonReentrantReadWriteLock class to
// set a system specific default.
extends GlobalFlag[Int](
-1,
"""Experimental flag. Control how many times NonReentrantReadWriteLock
| retries an acquire before executing its slow path acquire. Default
| value of -1 lets the system decide how long to spin""".stripMargin)
/**
* This is a cheaper read-write lock than the ReentrantReadWriteLock.
*
* It does even less under the hood than ReentrantReadWriteLock, and is intended
* for use on the hot path.
*
* To acquire the write lock, call NonReentrantReadWriteLock#acquire, and to release, call
* NonReentrantReadWriteLock#release. The argument that's passed in is unused.
*
* To acquire the read lock, call NonReentrantReadWriteLock#acquireShared, and to release,
* call NonReentrantReadWriteLock#releaseShared. The argument that's passed in is unused.
*
* Note that this lock is non-reentrant, non-fair, and you can't acquire the
* write lock while holding the read lock. Under constant read use, writers may
* starve, so this is better for bursty read workloads.
*/
private[stats] final class NonReentrantReadWriteLock extends AbstractQueuedSynchronizer {
// we haven't provided an implementation for isHeldExclusively because the
// docs for AQS advise implementors to provide implementations "as applicable"
// and our use case doesn't require knowing when the lock is held exclusively.
private val SpinMax: Int = if (rwLockSpinWait() == -1) {
// Default number of spins for aarch64 machines set to 5 by default.
// See below comment on reason.
if (System.getProperty("os.arch") == "aarch_64") 5
else 1
} else rwLockSpinWait()
override def tryAcquireShared(num: Int): Int = {
tryAcquireShared(getState(), 1)
}
// Try to acquire the lock up to SpinMax times before failing and
// allowing AbstractQueuedSynchronizer to execute the slow-path lock.
// This is important on Arm systems where compareAndSetState may fail
// more often due to its weaker memory model.
@tailrec
private[this] def tryAcquireShared(cur: Int, tries: Int): Int = {
if (cur >= 0 && compareAndSetState(cur, cur + 1)) 1
// When JDK8 support is deprecated, Thread.onSpinWait() should be
// used in the else if clause to avoid flooding the memory
// system with compareAndSetState in a tight loop.
else if (tries < SpinMax) tryAcquireShared(getState(), tries + 1)
else -1
}
@tailrec
override def tryReleaseShared(num: Int): Boolean = {
val cur = getState()
if (compareAndSetState(cur, cur - 1)) true
else tryReleaseShared(num)
}
override def tryAcquire(num: Int): Boolean = {
val cur = getState()
cur == 0 && compareAndSetState(0, -1)
}
override def tryRelease(num: Int): Boolean = {
setState(0)
true
}
}
| twitter/finagle | finagle-stats-core/src/main/scala/com/twitter/finagle/stats/NonReentrantReadWriteLock.scala | Scala | apache-2.0 | 3,053 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.cfclerk.exceptions
class VariableException(message:String) extends RuntimeException(message) {
def this() = this("Invalid variable")
} | fanf/cf-clerk | src/main/scala/com/normation/cfclerk/exceptions/VariableException.scala | Scala | agpl-3.0 | 1,833 |
// code-examples/Traits/ui2/clickable.scala
package ui2
trait Clickable {
def click()
}
| XClouded/t4f-core | scala/src/tmp/ui2/clickable.scala | Scala | apache-2.0 | 92 |
/*
* Copyright 2020 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package example.armeria.grpc.scala
import com.linecorp.armeria.common.grpc.GrpcSerializationFormats
import com.linecorp.armeria.common.scalapb.ScalaPbJsonMarshaller
import com.linecorp.armeria.server.Server
import com.linecorp.armeria.server.docs.{DocService, DocServiceFilter}
import com.linecorp.armeria.server.grpc.GrpcService
import com.linecorp.armeria.server.logging.LoggingService
import example.armeria.grpc.scala.hello.{HelloRequest, HelloServiceGrpc}
import io.grpc.reflection.v1alpha.ServerReflectionGrpc
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext
object Main {
private val logger = LoggerFactory.getLogger(this.getClass)
def main(args: Array[String]): Unit = {
val server = newServer(8080, 8443)
Runtime.getRuntime.addShutdownHook(new Thread(() => {
server.stop().join()
logger.info("Server has been stopped.")
}))
server.start().join()
logger.info(
"Server has been started. Serving DocService at http://127.0.0.1:{}/docs",
server.activeLocalPort)
}
def newServer(httpPort: Int, httpsPort: Int): Server = {
val exampleRequest = HelloRequest("Armeria")
val grpcService =
GrpcService
.builder()
.addService(HelloServiceGrpc.bindService(new HelloServiceImpl, ExecutionContext.global))
.supportedSerializationFormats(GrpcSerializationFormats.values)
.jsonMarshallerFactory(_ => ScalaPbJsonMarshaller())
.enableUnframedRequests(true)
.build()
val serviceName = HelloServiceGrpc.SERVICE.getName
Server
.builder()
.http(httpPort)
.https(httpsPort)
.tlsSelfSigned()
.decorator(LoggingService.newDecorator())
.service(grpcService)
.serviceUnder(
"/docs",
DocService
.builder()
.exampleRequests(serviceName, "Hello", exampleRequest)
.exampleRequests(serviceName, "LazyHello", exampleRequest)
.exampleRequests(serviceName, "BlockingHello", exampleRequest)
.exclude(DocServiceFilter.ofServiceName(ServerReflectionGrpc.SERVICE_NAME))
.build()
)
.build()
}
}
| line/armeria | examples/grpc-scala/src/main/scala/example/armeria/grpc/scala/Main.scala | Scala | apache-2.0 | 2,781 |
package codesniffer.codefunnel
/**
* Created by Bowen Cai on 2/28/2016.
*/
trait Mapper[K,D] {
def map(key: K, data: D, ctx:Context)
}
| xkommando/CodeSniffer | codefunnel/src/main/scala/codesniffer/codefunnel/Mapper.scala | Scala | lgpl-3.0 | 156 |
package com.github.novamage.svalidator.validation.simple
/** Chain builder that requires providing a field name for the generation of error messages further on
*
* @tparam A Type of the instance being validated
* @tparam B Type of the extracted property being validated
*/
class FieldListRequiringSimpleValidatorRuleBuilder[A, B](propertyListExpression: A => List[B], markIndexesOfErrors: Boolean) {
/** Applies the given string as the field name for any error messages generated during this chain builder.
*
* @param fieldName Field name to use for error messages
*/
def ForField(fieldName: String): SimpleListValidationRuleStarterBuilder[A, B, Nothing] = {
new SimpleListValidationRuleStarterBuilder(propertyListExpression, None, Nil, fieldName, markIndexesOfErrors, None, None, None)
}
/** Applies the name of the given [[scala.Symbol Symbol]] as the field name for any error messages generated during this chain builder.
*
* @param fieldName Field name to use for error messages
*/
def ForField(fieldName: Symbol): SimpleListValidationRuleStarterBuilder[A, B, Nothing] = {
new SimpleListValidationRuleStarterBuilder[A, B, Nothing](propertyListExpression, None, Nil, fieldName.name, markIndexesOfErrors, None, None, None)
}
}
| NovaMage/SValidator | src/main/scala/com/github/novamage/svalidator/validation/simple/FieldListRequiringSimpleValidatorRuleBuilder.scala | Scala | mit | 1,285 |
package com.mpc.scalats
import com.mpc.scalats.configuration.Config
import com.mpc.scalats.core.{ Logger, TypeScriptGenerator }
object Main {
def main(args: Array[String]): Unit = {
val logger = Logger(
org.slf4j.LoggerFactory getLogger TypeScriptGenerator.getClass)
TypeScriptGenerator.generateFromClassNames(args.toList, logger)(Config())
}
}
| miloszpp/scala-ts | src/main/scala/com/mpc/scalats/Main.scala | Scala | mit | 367 |
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.macros.impl
import scala.reflect.macros.Context
import com.twitter.scalding._
/**
* This class contains the core macro implementations. This is in a separate module to allow it to be in a
* separate compilation unit, which makes it easier to provide helper methods interfacing with macros.
*/
object TupleSetterImpl {
def caseClassTupleSetterImpl[T](c: Context)(implicit T: c.WeakTypeTag[T]): c.Expr[TupleSetter[T]] =
caseClassTupleSetterCommonImpl(c, false)
def caseClassTupleSetterWithUnknownImpl[T](c: Context)(implicit
T: c.WeakTypeTag[T]
): c.Expr[TupleSetter[T]] =
caseClassTupleSetterCommonImpl(c, true)
def caseClassTupleSetterCommonImpl[T](c: Context, allowUnknownTypes: Boolean)(implicit
T: c.WeakTypeTag[T]
): c.Expr[TupleSetter[T]] = {
import c.universe._
val tupTerm = newTermName(c.fresh("tup"))
val (finalIdx, set) = CaseClassBasedSetterImpl(c)(tupTerm, allowUnknownTypes, TupleFieldSetter)
val res = q"""
new _root_.com.twitter.scalding.TupleSetter[$T] with _root_.com.twitter.bijection.macros.MacroGenerated {
override def apply(t: $T): _root_.cascading.tuple.Tuple = {
val $tupTerm = _root_.cascading.tuple.Tuple.size($finalIdx)
$set
$tupTerm
}
override val arity: _root_.scala.Int = $finalIdx
}
"""
c.Expr[TupleSetter[T]](res)
}
}
| twitter/scalding | scalding-core/src/main/scala/com/twitter/scalding/macros/impl/TupleSetterImpl.scala | Scala | apache-2.0 | 1,963 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.history
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import org.apache.spark.status.api.v1.ApplicationInfo
import org.apache.spark.ui.{UIUtils, WebUIPage}
private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") {
def render(request: HttpServletRequest): Seq[Node] = {
// stripXSS is called first to remove suspicious characters used in XSS attacks
val requestedIncomplete =
Option(UIUtils.stripXSS(request.getParameter("showIncomplete"))).getOrElse("false").toBoolean
val allAppsSize = parent.getApplicationList()
.count(isApplicationCompleted(_) != requestedIncomplete)
val eventLogsUnderProcessCount = parent.getEventLogsUnderProcess()
val lastUpdatedTime = parent.getLastUpdatedTime()
val providerConfig = parent.getProviderConfig()
val content =
<script src={UIUtils.prependBaseUri("/static/historypage-common.js")}></script>
<div>
<div class="container-fluid">
<ul class="unstyled">
{providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }}
</ul>
{
if (eventLogsUnderProcessCount > 0) {
<p>There are {eventLogsUnderProcessCount} event log(s) currently being
processed which may result in additional applications getting listed on this page.
Refresh the page to view updates. </p>
}
}
{
if (lastUpdatedTime > 0) {
<p>Last updated: <span id="last-updated">{lastUpdatedTime}</span></p>
}
}
{
if (allAppsSize > 0) {
<script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++
<div id="history-summary" class="row-fluid"></div> ++
<script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++
<script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++
<script>setAppLimit({parent.maxApplications})</script>
} else if (requestedIncomplete) {
<h4>No incomplete applications found!</h4>
} else if (eventLogsUnderProcessCount > 0) {
<h4>No completed applications found!</h4>
} else {
<h4>No completed applications found!</h4> ++ parent.emptyListingHtml
}
}
<a href={makePageLink(!requestedIncomplete)}>
{
if (requestedIncomplete) {
"Back to completed applications"
} else {
"Show incomplete applications"
}
}
</a>
</div>
</div>
UIUtils.basicSparkPage(content, "History Server", true)
}
private def makePageLink(showIncomplete: Boolean): String = {
UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete)
}
private def isApplicationCompleted(appInfo: ApplicationInfo): Boolean = {
appInfo.attempts.nonEmpty && appInfo.attempts.head.completed
}
}
| ron8hu/spark | core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala | Scala | apache-2.0 | 3,935 |
//
// Nexus Tools - code generators for Nexus framework
// http://github.com/threerings/nexus/blob/master/LICENSE
package com.threerings.nexus.gencode
import javax.annotation.processing.SupportedAnnotationTypes
import javax.lang.model.element.TypeElement
import org.junit.Assert._
import org.junit.Test
/**
* Tests the factory generator.
*/
class FactoryGenTest {
@Test def testFactoryName {
assertEquals("Factory_Foo", Generator.factoryName("Foo"))
assertEquals("foo.bar.Factory_Foo", Generator.factoryName("foo.bar.Foo"))
}
@Test def testService {
val source = FactoryTestCompiler.genSource("TestService.java", """
package foo.bar;
public interface TestService extends com.threerings.nexus.distrib.NexusService {
react.RFuture<Integer> addOne (int value);
void launchMissiles ();
void sendData (byte[] data);
}
""")
// System.err.println(source)
}
@Test def testMeta {
val meta = FactoryTestCompiler.genMeta("TestService.java", """
public interface TestService extends com.threerings.nexus.distrib.NexusService {
react.RFuture<Integer> addOne (int value);
void launchMissiles ();
}
""")
assertEquals(meta.serviceName, "TestService")
assertEquals(2, meta.methods.size)
checkMethod(meta.methods.get(0), "addOne", 1)
checkArg(meta.methods.get(0).args.get(0), "int", "Integer")
checkMethod(meta.methods.get(1), "launchMissiles", 0)
}
private def checkMethod (m :ServiceMetadata.Method, name :String, args :Int) {
assertEquals(name, m.name)
assertEquals(args, m.args.size)
}
private def checkArg (arg :ServiceMetadata.Arg, `type` :String, boxedType :String) {
assertEquals(`type`, arg.`type`)
assertEquals(boxedType, arg.boxedType)
}
}
object FactoryTestCompiler extends TestCompiler {
def genSource (filename :String, content :String) :String =
process(new GenSourceProcessor, filename, content)
@SupportedAnnotationTypes(Array("*"))
class GenSourceProcessor extends TestProcessor[String] {
override def result = _source
override protected def generate (elem :TypeElement, metas :Seq[Metadata]) {
val out = new java.io.StringWriter
Generator.generateFactory(elem, metas.collect { case sm :ServiceMetadata => sm }, out)
_source = out.toString
}
protected var _source = ""
}
def genMeta (filename :String, content :String) :ServiceMetadata =
process(new GenMetaProcessor, filename, content)
@SupportedAnnotationTypes(Array("*"))
class GenMetaProcessor extends TestProcessor[ServiceMetadata] {
override def result = _meta
override protected def generate (elem :TypeElement, metas :Seq[Metadata]) {
(metas.collect { case sm :ServiceMetadata => sm }).map { m => _meta = m }
}
protected var _meta :ServiceMetadata = _
}
override protected def stockObjects = List(nexusServiceObj, rFutureObj)
private def nexusServiceObj = mkTestObject("NexusService.java", """
package com.threerings.nexus.distrib;
public interface NexusService {}
""")
private def rFutureObj = mkTestObject("RFuture.java", """
package react;
public interface RFuture<T> {}
""")
}
| threerings/nexus | tools/src/test/scala/com/threerings/nexus/gencode/FactoryGenTest.scala | Scala | bsd-3-clause | 3,215 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.util.NoSuchElementException
import scala.collection.mutable.Buffer
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
class NextIteratorSuite extends SparkFunSuite with Matchers {
test("one iteration") {
val i = new StubIterator(Buffer(1))
i.hasNext should be (true)
i.next should be (1)
i.hasNext should be (false)
intercept[NoSuchElementException] { i.next() }
}
test("two iterations") {
val i = new StubIterator(Buffer(1, 2))
i.hasNext should be (true)
i.next should be (1)
i.hasNext should be (true)
i.next should be (2)
i.hasNext should be (false)
intercept[NoSuchElementException] { i.next() }
}
test("empty iteration") {
val i = new StubIterator(Buffer())
i.hasNext should be (false)
intercept[NoSuchElementException] { i.next() }
}
test("close is called once for empty iterations") {
val i = new StubIterator(Buffer())
i.hasNext should be (false)
i.hasNext should be (false)
i.closeCalled should be (1)
}
test("close is called once for non-empty iterations") {
val i = new StubIterator(Buffer(1, 2))
i.next should be (1)
i.next should be (2)
// close isn't called until we check for the next element
i.closeCalled should be (0)
i.hasNext should be (false)
i.closeCalled should be (1)
i.hasNext should be (false)
i.closeCalled should be (1)
}
class StubIterator(ints: Buffer[Int]) extends NextIterator[Int] {
var closeCalled = 0
override def getNext(): Int = {
if (ints.size == 0) {
finished = true
0
} else {
ints.remove(0)
}
}
override def close(): Unit = {
closeCalled += 1
}
}
}
| maropu/spark | core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala | Scala | apache-2.0 | 2,630 |
package scalaprops
object SizeTestsDefinitions {
// see https://github.com/typelevel/scalacheck/issues/305
sealed trait Tree {
def depth: Int = {
var max = 0
val m = new scala.collection.mutable.Queue[(Int, Branch)]
def handle(t: Tree, s: Int) =
t match {
case Leaf => max = max max s
case b: Branch =>
m += (s + 1) -> b
}
handle(this, 0)
while (m.nonEmpty) {
val (s, b) = m.dequeue
handle(b.left, s)
handle(b.right, s)
}
max
}
}
case object Leaf extends Tree
case class Branch(left: Tree, right: Tree) extends Tree
object Tree {
implicit val recursive = derive.Recursive[Tree](Gen.value(Leaf))
}
}
| scalaprops/scalaprops-shapeless | test/shared/src/main/scala-2/scalaprops/SizeTestsDefinitions.scala | Scala | apache-2.0 | 745 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lucidworks.spark.example.ml
import com.lucidworks.spark.SparkApp
import com.lucidworks.spark.ml.feature.LuceneTextAnalyzerTransformer
import org.apache.commons.cli.CommandLine
import org.apache.commons.cli.Option.{builder => OptionBuilder}
import org.apache.spark.SparkConf
import org.apache.spark.ml.classification.NaiveBayes
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
import org.apache.spark.ml.feature._
import org.apache.spark.ml.tuning.{CrossValidator, CrossValidatorModel, ParamGridBuilder}
import org.apache.spark.ml.{Pipeline, PipelineStage}
import org.apache.spark.mllib.evaluation.MulticlassMetrics
import org.apache.spark.sql.SparkSession
/** An example of building a spark.ml classification model to predict the newsgroup of
* articles from the 20 newsgroups data (see [[http://qwone.com/~jason/20Newsgroups/]])
* hosted in a Solr collection.
*
* == Prerequisites ==
*
* You must run `mvn -DskipTests package` in the spark-solr project, and you must download
* a Spark 1.6.1 binary distribution and point the environment variable `$SPARK_HOME`
* to the unpacked distribution directory.
*
* Follow the instructions in the [[NewsgroupsIndexer]] example's scaladoc to populate a Solr
* collection with articles from the above-linked 20 newsgroup data.
*
* == Example invocation ==
*
* {{{
* $SPARK_HOME/bin/spark-submit --master 'local[2]' --class com.lucidworks.spark.SparkApp \\
* target/spark-solr-2.0.0-SNAPSHOT-shaded.jar ml-pipeline-scala
* }}}
*
* To see a description of all available options, run the following:
*
* {{{
* $SPARK_HOME/bin/spark-submit --class com.lucidworks.spark.SparkApp \\
* target/spark-solr-2.0.0-SNAPSHOT-shaded.jar ml-pipeline-scala --help
* }}}
*/
class MLPipelineScala extends SparkApp.RDDProcessor {
import MLPipelineScala._
def getName = "ml-pipeline-scala"
def getOptions = Array(
OptionBuilder().longOpt("query").hasArg.argName("QUERY").required(false).desc(
s"Query to identify documents in the training set. Default: $DefaultQuery").build(),
OptionBuilder().longOpt("labelField").hasArg.argName("FIELD").required(false).desc(
s"Field containing the label in Solr training set documents. Default: $DefaultLabelField").build(),
OptionBuilder().longOpt("contentFields").hasArg.argName("FIELDS").required(false).desc(
s"Comma-separated list of text field(s) in Solr training set documents. Default: $DefaultContentFields").build(),
// CrossValidator model export only supports NaiveBayes in Spark 1.6.1, but in Spark 2.0,
// OneVsRest+LogisticRegression will be supported: https://issues.apache.org/jira/browse/SPARK-11892
// OptionBuilder().longOpt("classifier").hasArg.argName("TYPE").required(false).desc(
// s"Classifier type: either NaiveBayes or LogisticRegression. Default: $DefaultClassifier").build(),
OptionBuilder().longOpt("sample").hasArg.argName("FRACTION").required(false).desc(
s"Fraction (0 to 1) of full dataset to sample from Solr. Default: $DefaultSample").build(),
OptionBuilder().longOpt("collection").hasArg.argName("NAME").required(false).desc(
s"Solr source collection. Default: $DefaultCollection").build())
override def run(conf: SparkConf, cli: CommandLine): Int = {
val sparkSession: SparkSession = SparkSession.builder().config(conf).getOrCreate()
val labelField = cli.getOptionValue("labelField", DefaultLabelField)
val contentFields = cli.getOptionValue("contentFields", DefaultContentFields).split(",").map(_.trim)
val sampleFraction = cli.getOptionValue("sample", DefaultSample).toDouble
val options = Map(
"zkhost" -> cli.getOptionValue("zkHost", DefaultZkHost),
"collection" -> cli.getOptionValue("collection", DefaultCollection),
"query" -> cli.getOptionValue("query", DefaultQuery),
"fields" -> s"""id,$labelField,${contentFields.mkString(",")}""")
val solrData = sparkSession.read.format("solr").options(options).load
val sampledSolrData = solrData.sample(withReplacement = false, sampleFraction)
// Configure an ML pipeline, which consists of the following stages:
// index string labels, analyzer, hashingTF, classifier model, convert predictions to string labels.
// ML needs labels as numeric (double) indexes ... our training data has string labels, convert using a StringIndexer
// see: https://spark.apache.org/docs/1.6.0/api/java/index.html?org/apache/spark/ml/feature/StringIndexer.html
val labelIndexer = new StringIndexer().setInputCol(labelField).setOutputCol(LabelCol).fit(sampledSolrData)
val analyzer = new LuceneTextAnalyzerTransformer().setInputCols(contentFields).setOutputCol(WordsCol)
// Vectorize!
val hashingTF = new HashingTF().setInputCol(WordsCol).setOutputCol(FeaturesCol)
val nb = new NaiveBayes()
val estimatorStage:PipelineStage = nb
println(s"Using estimator: $estimatorStage")
val labelConverter = new IndexToString().setInputCol(PredictionCol)
.setOutputCol(PredictedLabelCol).setLabels(labelIndexer.labels)
val pipeline = new Pipeline().setStages(Array(labelIndexer, analyzer, hashingTF, estimatorStage, labelConverter))
val Array(trainingData, testData) = sampledSolrData.randomSplit(Array(0.7, 0.3))
val evaluator = new MulticlassClassificationEvaluator().setLabelCol(LabelCol)
.setPredictionCol(PredictionCol).setMetricName("precision")
// We use a ParamGridBuilder to construct a grid of parameters to search over,
// with 3 values for hashingTF.numFeatures, 2 values for lr.regParam, 2 values for
// analyzer.analysisSchema, and both possibilities for analyzer.prefixTokensWithInputCol.
// This grid will have 3 x 2 x 2 x 2 = 24 parameter settings for CrossValidator to choose from.
val paramGrid = new ParamGridBuilder()
.addGrid(hashingTF.numFeatures, Array(1000, 5000))
.addGrid(analyzer.analysisSchema, Array(WhitespaceTokSchema, StdTokLowerSchema))
.addGrid(analyzer.prefixTokensWithInputCol)
.addGrid(nb.smoothing, Array(1.0, 0.5)).build
// We now treat the Pipeline as an Estimator, wrapping it in a CrossValidator instance.
// This will allow us to jointly choose parameters for all Pipeline stages.
// A CrossValidator requires an Estimator, a set of Estimator ParamMaps, and an Evaluator.
val cv = new CrossValidator().setEstimator(pipeline).setEvaluator(evaluator)
.setEstimatorParamMaps(paramGrid).setNumFolds(3)
val cvModel = cv.fit(trainingData)
// save it to disk
cvModel.write.overwrite.save("ml-pipeline-model")
// read it off disk
val loadedCvModel = CrossValidatorModel.load("ml-pipeline-model")
val predictions = loadedCvModel.transform(testData)
predictions.cache()
val accuracyCrossFold = evaluator.evaluate(predictions)
println(s"Cross-Fold Test Error = ${1.0 - accuracyCrossFold}")
// TODO: remove - debug
for (r <- predictions.select("id", labelField, PredictedLabelCol).sample(false, 0.1).collect) {
println(s"${r(0)}: actual=${r(1)}, predicted=${r(2)}")
}
val metrics = new MulticlassMetrics(
predictions
.select(PredictionCol, LabelCol).rdd
.map(r => (r.getDouble(0), r.getDouble(1))))
// output the Confusion Matrix
println(s"""Confusion Matrix
|${metrics.confusionMatrix}\\n""".stripMargin)
// compute the false positive rate per label
println(s"""\\nF-Measure: ${metrics.fMeasure}
|label\\tfpr\\n""".stripMargin)
val labels = labelConverter.getLabels
for (i <- labels.indices)
println(s"${labels(i)}\\t${metrics.falsePositiveRate(i.toDouble)}")
0
}
}
object MLPipelineScala {
val LabelCol = "label"
val WordsCol = "words"
val PredictionCol = "prediction"
val FeaturesCol = "features"
val PredictedLabelCol = "predictedLabel"
val DefaultZkHost = "localhost:9983"
val DefaultQuery = "content_txt_en:[* TO *] AND newsgroup_s:[* TO *]"
val DefaultLabelField = "newsgroup_s"
val DefaultContentFields = "content_txt_en,Subject_txt_en"
val DefaultCollection = "ml20news"
val DefaultSample = "1.0"
val WhitespaceTokSchema =
"""{ "analyzers": [{ "name": "ws_tok", "tokenizer": { "type": "whitespace" } }],
| "fields": [{ "regex": ".+", "analyzer": "ws_tok" }] }""".stripMargin
val StdTokLowerSchema =
"""{ "analyzers": [{ "name": "std_tok_lower", "tokenizer": { "type": "standard" },
| "filters": [{ "type": "lowercase" }] }],
| "fields": [{ "regex": ".+", "analyzer": "std_tok_lower" }] }""".stripMargin
}
| LucidWorks/spark-solr | src/main/scala/com/lucidworks/spark/example/ml/MLPipelineScala.scala | Scala | apache-2.0 | 9,459 |
/*
* Copyright (c) <2012-2013>, Amanj Sherwany <http://www.amanj.me>
* All rights reserved.
* */
package ch.usi.inf.l3.moolc.evaluator
import _root_.ch.usi.inf.l3.moolc.ast._
class CBPEvaluator(pgm: Program) {
private var staticMethodID = 0
private var doCheckStore = true
private val classBank = new ClassBank
private var methodBankMap: Map[String, MethodBank] = Map.empty
def start = {
val clazzes = for(clazz <- pgm.classes) yield {
var store = addArgsToStore(clazz.body.vars)
val cnstStore = addArgsToStore(clazz.args, store)
val (const, _, constStore) = peval(clazz.body.const, cnstStore)
store = store.addEnvToSelf(clazz.body.vars, constStore)
val methods = for(method <- clazz.body.methods) yield {
val mstore = addArgsToStore(method.args, store)
val mbody = peval(method.expr, mstore)._1
MMethod(method.mod, method.name, method.tpe, method.args,
method.pos, mbody)
}
val smethods = methodBankMap.get(clazz.name.name) match{
case Some(bank) => bank.getSpecializedMethodsList
case _ => Nil
}
MClass(clazz.name, clazz.args,
MClassBody(clazz.body.vars, const, smethods ++ methods), clazz.pos)
}
Program(clazzes ++ classBank.getAllSpecializedClasses)
}
private def feval(expr: Expression, store: ConstraintStore) :
(Expression, ConstraintStore) = {
expr match{
case x : Constant => (x, store)
case obj: ObjectValue => (obj, store)
case cname: ClassName => (cname, store)
case Binary(op, x, y, _) =>
val (x1, store1) = feval(x, store)
val (y1, store2) = feval(y, store1)
(x1, y1) match{
case (Constant(a, _), Constant(b, _)) => (doOp(op, a, b), store2)
case _ => (Binary(op, x1, y1, NoPosition), store2)
}
case x: Var =>
store.get(x) match{
case Some(v: PEKnownValue) => (v.getExpr, store)
case Some(Top) => throw new Error("You cannot put CT here")
case _ => (x, store)
}
case Assign(lhs, rhs, _) =>
val (r, store1) = feval(rhs, store)
(r, store1.add(lhs, CTValue(r)))
case Seq(e1, e2, _) =>
val (_, store1) = feval(e1, store)
feval(e2, store1)
case cond @ Condition(c, t, f, _) =>
val (cond, condStore) = feval(c, store)
cond match {
case (Constant(MBool(true), _)) => feval(t, condStore)
case (Constant(MBool(false), _)) => feval(f, condStore)
case _ => throw new Error(c + " You should not reach here")
}
case While(c, b, _) =>
feval(Condition(c, Seq(b, While(c, b, NoPosition),
NoPosition), Empty, NoPosition), store)
case StaticCall(clazz, mthd, argList, _) =>
val args = fevalArgs(argList, store)
val method = pgm.getClass(clazz).get.getMethod(mthd).get
val argPairs = argList zip args
val argParamPair = argPairs zip method.args
val newStore = store.newStore(argParamPair)
feval(method.expr, newStore)
case DynamicCall(obj, mthd, argList, _) =>
store.get(obj) match{
case Some(CTValue(ObjectValue(mobj))) =>
val args = fevalArgs(argList, store)
val method = pgm.getClass(mobj.clazz).get.getMethod(mthd).get
val argPairs = argList zip args
val argParamPair = argPairs zip method.args
mobj.store = mobj.store.addEnv(argParamPair, store)
feval(method.expr, mobj.store)
case _ => throw new Error("Object values does not exist")
}
case This(clazz, mthd, argList, _) =>
val args = fevalArgs(argList, store)
val method = pgm.getClass(clazz).get.getMethod(mthd).get
val argPairs = argList zip args
val argParamPair = argPairs zip method.args
val newStore = store.newStore(argParamPair)
feval(method.expr, newStore)
case invoke @ Invoke(cname, mname, args, _) =>
val (c, store1) = feval(cname, store)
val (m, store2) = feval(mname, store1)
(c, m) match{
case (Constant(x: MString, _), Constant(y: MString, _)) =>
feval(StaticCall(ClassName(x.value, NoPosition), y.value,
args, NoPosition), store2)
case _ => (invoke, store2)
}
case New(cname, argList, _) =>
val args = fevalArgs(argList, store)
val clazz = pgm.getClass(cname).get
val argPairs = argList zip args
val argParamPair = argPairs zip clazz.args
val newStore = store.newStore(argParamPair)
var (newConst, constStore) = feval(clazz.body.const, newStore)
val objStore =
constStore.addEnvToSelf(clazz.body.vars, constStore)
(ObjectValue(MObject(cname, objStore)), store)
case Return(expr, _) => feval(expr, store)
case Empty | Semi => (Empty, store)
case _ => throw new Error(expr +
" Print, CT, RT, and IsCT cannot appear here")
}
}
def peval(expr: Expression, store: ConstraintStore):
(Expression, PEValue, ConstraintStore) = {
return null;
}
private def doOp(op: Operation, s1: PEKnownValue, s2: PEKnownValue): Expression ={
(s1.getExpr, s2.getExpr) match{
case (v1: Constant, v2: Constant) =>
val x = v1.value
val y = v2.value
doOp(op, x, y)
case _ => Binary(op, s1.getExpr, s2.getExpr, NoPosition)
}
}
private def doOp(op: Operation, x: Premitive, y: Premitive): Constant = {
val result = op match {
case Add =>
(x, y) match{
case (MInt(a), MInt(b)) =>
MInt(a + b)
case (MString(_), MInt(_))
| (MInt(_), MString(_))
| (MString(_), MString(_)) =>
MString("" + x.optionValue.get + y.optionValue.get)
case _ => throw new Error("" + x + y +" Never happens")
}
case Sub =>
(x, y) match{
case (MInt(a), MInt(b)) => MInt(a - b)
case _ => throw new Error("" + x + y +" Never happens")
}
case Mul =>
(x, y) match{
case (MInt(a), MInt(b)) => MInt(a * b)
case _ => throw new Error("" + x + y +" Never happens")
}
case Div =>
(x, y) match{
case (MInt(a), MInt(b)) => MInt(a / b)
case _ => throw new Error("" + x + y +" Never happens")
}
case Eq => MBool(x.optionValue.get == y.optionValue.get)
case Neq => MBool(x.optionValue.get != y.optionValue.get)
case Lt =>
(x, y) match{
case (MInt(a), MInt(b)) => MBool(a < b)
case _ => throw new Error("" + x + y +" Never happens")
}
case Gt =>
(x, y) match{
case (MInt(a), MInt(b)) => MBool(a > b)
case _ => throw new Error("" + x + y +" Never happens")
}
case Mod =>
(x, y) match{
case (MInt(a), MInt(b)) => MInt(a % b)
case _ => throw new Error("" + x + y +" Never happens")
}
}
Constant(result, NoPosition)
}
private def fevalArgs(args: List[Expression], store: ConstraintStore) :
List[CTValue] = {
val evalArgs = for(arg <- args) yield{
CTValue(feval(arg, store)._1)
}
evalArgs
}
private def addArgsToStore(args: List[Var],
store: ConstraintStore = new ConstraintStore()
): ConstraintStore = {
var temp = store
var tail = args
while(tail != Nil){
val head = tail.head
tail = tail.tail
temp = store.add(head, Bottom)
}
temp
}
} | amanjpro/mool-compiler | src/main/scala/ch/usi/inf/l3/moolc/eval/CBPEvaluator.scala | Scala | bsd-3-clause | 7,132 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.sql.Timestamp
import org.apache.spark.sql.catalyst.analysis.AnsiTypeCoercion._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
class AnsiTypeCoercionSuite extends AnalysisTest {
import TypeCoercionSuite._
// scalastyle:off line.size.limit
// The following table shows all implicit data type conversions that are not visible to the user.
// +----------------------+----------+-----------+-------------+----------+------------+------------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | Source Type\\CAST TO | ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType | NumericType | IntegralType |
// +----------------------+----------+-----------+-------------+----------+------------+------------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | ByteType | ByteType | ShortType | IntegerType | LongType | DoubleType | DoubleType | Dec(10, 2) | X | X | X | X | X | X | X | X | X | X | DecimalType(3, 0) | ByteType | ByteType |
// | ShortType | X | ShortType | IntegerType | LongType | DoubleType | DoubleType | Dec(10, 2) | X | X | X | X | X | X | X | X | X | X | DecimalType(5, 0) | ShortType | ShortType |
// | IntegerType | X | X | IntegerType | LongType | DoubleType | DoubleType | X | X | X | X | X | X | X | X | X | X | X | DecimalType(10, 0) | IntegerType | IntegerType |
// | LongType | X | X | X | LongType | DoubleType | DoubleType | X | X | X | X | X | X | X | X | X | X | X | DecimalType(20, 0) | LongType | LongType |
// | FloatType | X | X | X | X | FloatType | DoubleType | X | X | X | X | X | X | X | X | X | X | X | DecimalType(30, 15) | DoubleType | X |
// | DoubleType | X | X | X | X | X | DoubleType | X | X | X | X | X | X | X | X | X | X | X | DecimalType(14, 7) | FloatType | X |
// | Dec(10, 2) | X | X | X | X | DoubleType | DoubleType | Dec(10, 2) | X | X | X | X | X | X | X | X | X | X | DecimalType(10, 2) | Dec(10, 2) | X |
// | BinaryType | X | X | X | X | X | X | X | BinaryType | X | X | X | X | X | X | X | X | X | X | X | X |
// | BooleanType | X | X | X | X | X | X | X | X | BooleanType | X | X | X | X | X | X | X | X | X | X | X |
// | StringType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
// | DateType | X | X | X | X | X | X | X | X | X | X | DateType | TimestampType | X | X | X | X | X | X | X | X |
// | TimestampType | X | X | X | X | X | X | X | X | X | X | X | TimestampType | X | X | X | X | X | X | X | X |
// | ArrayType | X | X | X | X | X | X | X | X | X | X | X | X | ArrayType* | X | X | X | X | X | X | X |
// | MapType | X | X | X | X | X | X | X | X | X | X | X | X | X | MapType* | X | X | X | X | X | X |
// | StructType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | StructType* | X | X | X | X | X |
// | NullType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType(38, 18) | DoubleType | IntegerType |
// | CalendarIntervalType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | CalendarIntervalType | X | X | X |
// +----------------------+----------+-----------+-------------+----------+------------+------------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// Note: StructType* is castable when all the internal child types are castable according to the table.
// Note: ArrayType* is castable when the element type is castable according to the table.
// Note: MapType* is castable when both the key type and the value type are castable according to the table.
// scalastyle:on line.size.limit
private def shouldCast(from: DataType, to: AbstractDataType, expected: DataType): Unit = {
// Check default value
val castDefault = AnsiTypeCoercion.implicitCast(default(from), to)
assert(DataType.equalsIgnoreCompatibleNullability(
castDefault.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
// Check null value
val castNull = AnsiTypeCoercion.implicitCast(createNull(from), to)
assert(DataType.equalsIgnoreCaseAndNullability(
castNull.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
}
private def shouldNotCast(from: DataType, to: AbstractDataType): Unit = {
// Check default value
val castDefault = AnsiTypeCoercion.implicitCast(default(from), to)
assert(castDefault.isEmpty, s"Should not be able to cast $from to $to, but got $castDefault")
// Check null value
val castNull = AnsiTypeCoercion.implicitCast(createNull(from), to)
assert(castNull.isEmpty, s"Should not be able to cast $from to $to, but got $castNull")
}
private def shouldCastStringLiteral(to: AbstractDataType, expected: DataType): Unit = {
val input = Literal("123")
val castResult = AnsiTypeCoercion.implicitCast(input, to)
assert(DataType.equalsIgnoreCaseAndNullability(
castResult.map(_.dataType).getOrElse(null), expected),
s"Failed to cast String literal to $to")
}
private def shouldNotCastStringLiteral(to: AbstractDataType): Unit = {
val input = Literal("123")
val castResult = AnsiTypeCoercion.implicitCast(input, to)
assert(castResult.isEmpty, s"Should not be able to cast String literal to $to")
}
private def shouldNotCastStringInput(to: AbstractDataType): Unit = {
val input = AttributeReference("s", StringType)()
val castResult = AnsiTypeCoercion.implicitCast(input, to)
assert(castResult.isEmpty, s"Should not be able to cast non-foldable String input to $to")
}
private def default(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.default(internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.default(keyDataType), Literal.default(valueDataType)))
case _ => Literal.default(dataType)
}
private def createNull(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.create(null, internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.create(null, keyDataType), Literal.create(null, valueDataType)))
case _ => Literal.create(null, dataType)
}
// Check whether the type `checkedType` can be cast to all the types in `castableTypes`,
// but cannot be cast to the other types in `allTypes`.
private def checkTypeCasting(checkedType: DataType, castableTypes: Seq[DataType]): Unit = {
val nonCastableTypes = allTypes.filterNot(castableTypes.contains)
castableTypes.foreach { tpe =>
shouldCast(checkedType, tpe, tpe)
}
nonCastableTypes.foreach { tpe =>
shouldNotCast(checkedType, tpe)
}
}
private def checkWidenType(
widenFunc: (DataType, DataType) => Option[DataType],
t1: DataType,
t2: DataType,
expected: Option[DataType],
isSymmetric: Boolean = true): Unit = {
var found = widenFunc(t1, t2)
assert(found == expected,
s"Expected $expected as wider common type for $t1 and $t2, found $found")
// Test both directions to make sure the widening is symmetric.
if (isSymmetric) {
found = widenFunc(t2, t1)
assert(found == expected,
s"Expected $expected as wider common type for $t2 and $t1, found $found")
}
}
test("implicit type cast - ByteType") {
val checkedType = ByteType
checkTypeCasting(checkedType, castableTypes = numericTypes)
shouldCast(checkedType, DecimalType, DecimalType.ByteDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - ShortType") {
val checkedType = ShortType
checkTypeCasting(checkedType, castableTypes = numericTypes.filterNot(_ == ByteType))
shouldCast(checkedType, DecimalType, DecimalType.ShortDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - IntegerType") {
val checkedType = IntegerType
checkTypeCasting(checkedType, castableTypes =
Seq(IntegerType, LongType, FloatType, DoubleType, DecimalType.SYSTEM_DEFAULT))
shouldCast(IntegerType, DecimalType, DecimalType.IntDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - LongType") {
val checkedType = LongType
checkTypeCasting(checkedType, castableTypes =
Seq(LongType, FloatType, DoubleType, DecimalType.SYSTEM_DEFAULT))
shouldCast(checkedType, DecimalType, DecimalType.LongDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - FloatType") {
val checkedType = FloatType
checkTypeCasting(checkedType, castableTypes = Seq(FloatType, DoubleType))
shouldCast(checkedType, DecimalType, DecimalType.FloatDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DoubleType") {
val checkedType = DoubleType
checkTypeCasting(checkedType, castableTypes = Seq(DoubleType))
shouldCast(checkedType, DecimalType, DecimalType.DoubleDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DecimalType(10, 2)") {
val checkedType = DecimalType(10, 2)
checkTypeCasting(checkedType, castableTypes = fractionalTypes)
shouldCast(checkedType, DecimalType, checkedType)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BinaryType") {
val checkedType = BinaryType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BooleanType") {
val checkedType = BooleanType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
shouldNotCast(checkedType, StringType)
}
test("implicit type cast - unfoldable StringType") {
val nonCastableTypes = allTypes.filterNot(_ == StringType)
nonCastableTypes.foreach { dt =>
shouldNotCastStringInput(dt)
}
shouldNotCastStringInput(DecimalType)
shouldNotCastStringInput(NumericType)
}
test("implicit type cast - foldable StringType") {
atomicTypes.foreach { dt =>
shouldCastStringLiteral(dt, dt)
}
allTypes.filterNot(atomicTypes.contains).foreach { dt =>
shouldNotCastStringLiteral(dt)
}
shouldCastStringLiteral(DecimalType, DecimalType.defaultConcreteType)
shouldCastStringLiteral(NumericType, DoubleType)
}
test("implicit type cast - DateType") {
val checkedType = DateType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, TimestampType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
shouldNotCast(checkedType, StringType)
}
test("implicit type cast - TimestampType") {
val checkedType = TimestampType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - unfoldable ArrayType(StringType)") {
val input = AttributeReference("a", ArrayType(StringType))()
val nonCastableTypes = allTypes.filterNot(_ == StringType)
nonCastableTypes.map(ArrayType(_)).foreach { dt =>
assert(AnsiTypeCoercion.implicitCast(input, dt).isEmpty)
}
assert(AnsiTypeCoercion.implicitCast(input, DecimalType).isEmpty)
assert(AnsiTypeCoercion.implicitCast(input, NumericType).isEmpty)
}
test("implicit type cast - foldable arrayType(StringType)") {
val input = Literal(Array("1"))
assert(AnsiTypeCoercion.implicitCast(input, ArrayType(StringType)) == Some(input))
(numericTypes ++ datetimeTypes ++ Seq(BinaryType)).foreach { dt =>
assert(AnsiTypeCoercion.implicitCast(input, ArrayType(dt)) ==
Some(Cast(input, ArrayType(dt))))
}
}
test("implicit type cast between two Map types") {
val sourceType = MapType(IntegerType, IntegerType, true)
val castableTypes =
Seq(IntegerType, LongType, FloatType, DoubleType, DecimalType.SYSTEM_DEFAULT)
val targetTypes = castableTypes.map { t =>
MapType(t, sourceType.valueType, valueContainsNull = true)
}
val nonCastableTargetTypes = allTypes.filterNot(castableTypes.contains(_)).map {t =>
MapType(t, sourceType.valueType, valueContainsNull = true)
}
// Tests that its possible to setup implicit casts between two map types when
// source map's key type is integer and the target map's key type are either Byte, Short,
// Long, Double, Float, Decimal(38, 18) or String.
targetTypes.foreach { targetType =>
shouldCast(sourceType, targetType, targetType)
}
// Tests that its not possible to setup implicit casts between two map types when
// source map's key type is integer and the target map's key type are either Binary,
// Boolean, Date, Timestamp, Array, Struct, CalendarIntervalType or NullType
nonCastableTargetTypes.foreach { targetType =>
shouldNotCast(sourceType, targetType)
}
// Tests that its not possible to cast from nullable map type to not nullable map type.
val targetNotNullableTypes = allTypes.filterNot(_ == IntegerType).map { t =>
MapType(t, sourceType.valueType, valueContainsNull = false)
}
val sourceMapExprWithValueNull =
CreateMap(Seq(Literal.default(sourceType.keyType),
Literal.create(null, sourceType.valueType)))
targetNotNullableTypes.foreach { targetType =>
val castDefault =
AnsiTypeCoercion.implicitCast(sourceMapExprWithValueNull, targetType)
assert(castDefault.isEmpty,
s"Should not be able to cast $sourceType to $targetType, but got $castDefault")
}
}
test("implicit type cast - StructType().add(\\"a1\\", StringType)") {
val checkedType = new StructType().add("a1", StringType)
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - NullType") {
val checkedType = NullType
checkTypeCasting(checkedType, castableTypes = allTypes)
shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT)
shouldCast(checkedType, NumericType, NumericType.defaultConcreteType)
shouldCast(checkedType, IntegralType, IntegralType.defaultConcreteType)
}
test("implicit type cast - CalendarIntervalType") {
val checkedType = CalendarIntervalType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("eligible implicit type cast - TypeCollection") {
shouldCast(StringType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(BinaryType, TypeCollection(StringType, BinaryType), BinaryType)
shouldCast(StringType, TypeCollection(BinaryType, StringType), StringType)
shouldCast(IntegerType, TypeCollection(IntegerType, BinaryType), IntegerType)
shouldCast(IntegerType, TypeCollection(BinaryType, IntegerType), IntegerType)
shouldCast(BinaryType, TypeCollection(BinaryType, IntegerType), BinaryType)
shouldCast(BinaryType, TypeCollection(IntegerType, BinaryType), BinaryType)
shouldCast(DecimalType.SYSTEM_DEFAULT,
TypeCollection(IntegerType, DecimalType), DecimalType.SYSTEM_DEFAULT)
shouldCast(DecimalType(10, 2), TypeCollection(IntegerType, DecimalType), DecimalType(10, 2))
shouldCast(DecimalType(10, 2), TypeCollection(DecimalType, IntegerType), DecimalType(10, 2))
shouldCast(
ArrayType(StringType, false),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, false))
shouldCast(
ArrayType(StringType, true),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, true))
// When there are multiple convertible types in the `TypeCollection`, use the closest
// convertible data type among convertible types.
shouldCast(IntegerType, TypeCollection(BinaryType, FloatType, LongType), LongType)
shouldCast(ShortType, TypeCollection(BinaryType, LongType, IntegerType), IntegerType)
shouldCast(ShortType, TypeCollection(DateType, LongType, IntegerType, DoubleType), IntegerType)
// If the result is Float type and Double type is also among the convertible target types,
// use Double Type instead of Float type.
shouldCast(LongType, TypeCollection(FloatType, DoubleType, StringType), DoubleType)
}
test("ineligible implicit type cast - TypeCollection") {
shouldNotCast(IntegerType, TypeCollection(StringType, BinaryType))
shouldNotCast(IntegerType, TypeCollection(BinaryType, StringType))
shouldNotCast(IntegerType, TypeCollection(DateType, TimestampType))
shouldNotCast(IntegerType, TypeCollection(DecimalType(10, 2), StringType))
shouldNotCastStringInput(TypeCollection(NumericType, BinaryType))
// When there are multiple convertible types in the `TypeCollection` and there is no such
// a data type that can be implicit cast to all the other convertible types in the collection.
Seq(TypeCollection(NumericType, BinaryType),
TypeCollection(NumericType, DecimalType, BinaryType),
TypeCollection(IntegerType, LongType, BooleanType),
TypeCollection(DateType, TimestampType, BooleanType)).foreach { typeCollection =>
shouldNotCastStringLiteral(typeCollection)
shouldNotCast(NullType, typeCollection)
}
}
test("tightest common bound for types") {
def widenTest(t1: DataType, t2: DataType, expected: Option[DataType]): Unit =
checkWidenType(AnsiTypeCoercion.findTightestCommonType, t1, t2, expected)
// Null
widenTest(NullType, NullType, Some(NullType))
// Boolean
widenTest(NullType, BooleanType, Some(BooleanType))
widenTest(BooleanType, BooleanType, Some(BooleanType))
widenTest(IntegerType, BooleanType, None)
widenTest(LongType, BooleanType, None)
// Integral
widenTest(NullType, ByteType, Some(ByteType))
widenTest(NullType, IntegerType, Some(IntegerType))
widenTest(NullType, LongType, Some(LongType))
widenTest(ShortType, IntegerType, Some(IntegerType))
widenTest(ShortType, LongType, Some(LongType))
widenTest(IntegerType, LongType, Some(LongType))
widenTest(LongType, LongType, Some(LongType))
// Floating point
widenTest(NullType, FloatType, Some(FloatType))
widenTest(NullType, DoubleType, Some(DoubleType))
widenTest(FloatType, DoubleType, Some(DoubleType))
widenTest(FloatType, FloatType, Some(FloatType))
widenTest(DoubleType, DoubleType, Some(DoubleType))
// Integral mixed with floating point.
widenTest(IntegerType, FloatType, Some(DoubleType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(LongType, FloatType, Some(DoubleType))
widenTest(LongType, DoubleType, Some(DoubleType))
widenTest(DecimalType(2, 1), DecimalType(3, 2), None)
widenTest(DecimalType(2, 1), DoubleType, None)
widenTest(DecimalType(2, 1), IntegerType, None)
widenTest(DoubleType, DecimalType(2, 1), None)
// StringType
widenTest(NullType, StringType, Some(StringType))
widenTest(StringType, StringType, Some(StringType))
widenTest(IntegerType, StringType, None)
widenTest(LongType, StringType, None)
// TimestampType
widenTest(NullType, TimestampType, Some(TimestampType))
widenTest(TimestampType, TimestampType, Some(TimestampType))
widenTest(DateType, TimestampType, Some(TimestampType))
widenTest(IntegerType, TimestampType, None)
widenTest(StringType, TimestampType, None)
// ComplexType
widenTest(NullType,
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
widenTest(StringType, MapType(IntegerType, StringType, true), None)
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
widenTest(
StructType(Seq(StructField("a", IntegerType))),
StructType(Seq(StructField("b", IntegerType))),
None)
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", DoubleType, nullable = false))),
Some(StructType(Seq(StructField("a", DoubleType, nullable = false)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", IntegerType, nullable = false))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = false)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", IntegerType, nullable = true))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = true))),
StructType(Seq(StructField("a", IntegerType, nullable = false))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = true))),
StructType(Seq(StructField("a", IntegerType, nullable = true))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
widenTest(
StructType(Seq(StructField("a", IntegerType))),
StructType(Seq(StructField("A", IntegerType))),
None)
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkWidenType(
AnsiTypeCoercion.findTightestCommonType,
StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType))),
StructType(Seq(StructField("A", IntegerType), StructField("b", IntegerType))),
Some(StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType)))),
isSymmetric = false)
}
widenTest(
ArrayType(IntegerType, containsNull = true),
ArrayType(IntegerType, containsNull = false),
Some(ArrayType(IntegerType, containsNull = true)))
widenTest(
ArrayType(NullType, containsNull = true),
ArrayType(IntegerType, containsNull = false),
Some(ArrayType(IntegerType, containsNull = true)))
widenTest(
MapType(IntegerType, StringType, valueContainsNull = true),
MapType(IntegerType, StringType, valueContainsNull = false),
Some(MapType(IntegerType, StringType, valueContainsNull = true)))
widenTest(
MapType(NullType, NullType, true),
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, true)))
widenTest(
new StructType()
.add("arr", ArrayType(IntegerType, containsNull = true), nullable = false),
new StructType()
.add("arr", ArrayType(IntegerType, containsNull = false), nullable = true),
Some(new StructType()
.add("arr", ArrayType(IntegerType, containsNull = true), nullable = true)))
widenTest(
new StructType()
.add("null", NullType, nullable = true),
new StructType()
.add("null", IntegerType, nullable = false),
Some(new StructType()
.add("null", IntegerType, nullable = true)))
widenTest(
ArrayType(NullType, containsNull = false),
ArrayType(IntegerType, containsNull = false),
Some(ArrayType(IntegerType, containsNull = false)))
widenTest(MapType(NullType, NullType, false),
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(
new StructType()
.add("null", NullType, nullable = false),
new StructType()
.add("null", IntegerType, nullable = false),
Some(new StructType()
.add("null", IntegerType, nullable = false)))
}
test("wider common type for decimal and array") {
def widenTestWithoutStringPromotion(
t1: DataType,
t2: DataType,
expected: Option[DataType],
isSymmetric: Boolean = true): Unit = {
checkWidenType(
AnsiTypeCoercion.findWiderTypeWithoutStringPromotionForTwo, t1, t2, expected, isSymmetric)
}
widenTestWithoutStringPromotion(
new StructType().add("num", IntegerType),
new StructType().add("num", LongType).add("str", StringType),
None)
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
widenTestWithoutStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("A", LongType),
Some(new StructType().add("a", LongType)),
isSymmetric = false)
}
// Without string promotion
widenTestWithoutStringPromotion(IntegerType, StringType, None)
widenTestWithoutStringPromotion(StringType, TimestampType, None)
widenTestWithoutStringPromotion(ArrayType(LongType), ArrayType(StringType), None)
widenTestWithoutStringPromotion(ArrayType(StringType), ArrayType(TimestampType), None)
widenTestWithoutStringPromotion(
MapType(LongType, IntegerType), MapType(StringType, IntegerType), None)
widenTestWithoutStringPromotion(
MapType(IntegerType, LongType), MapType(IntegerType, StringType), None)
widenTestWithoutStringPromotion(
MapType(StringType, IntegerType), MapType(TimestampType, IntegerType), None)
widenTestWithoutStringPromotion(
MapType(IntegerType, StringType), MapType(IntegerType, TimestampType), None)
widenTestWithoutStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("a", StringType),
None)
widenTestWithoutStringPromotion(
new StructType().add("a", StringType),
new StructType().add("a", IntegerType),
None)
}
private def ruleTest(rule: Rule[LogicalPlan],
initial: Expression, transformed: Expression): Unit = {
ruleTest(Seq(rule), initial, transformed)
}
private def ruleTest(
rules: Seq[Rule[LogicalPlan]],
initial: Expression,
transformed: Expression): Unit = {
val testRelation = LocalRelation(AttributeReference("a", IntegerType)())
val analyzer = new RuleExecutor[LogicalPlan] {
override val batches = Seq(Batch("Resolution", FixedPoint(3), rules: _*))
}
comparePlans(
analyzer.execute(Project(Seq(Alias(initial, "a")()), testRelation)),
Project(Seq(Alias(transformed, "a")()), testRelation))
}
test("cast NullType for expressions that implement ExpectsInputTypes") {
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
AnyTypeUnaryExpression(Literal.create(null, NullType)),
AnyTypeUnaryExpression(Literal.create(null, NullType)))
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
NumericTypeUnaryExpression(Literal.create(null, NullType)),
NumericTypeUnaryExpression(Literal.create(null, DoubleType)))
}
test("cast NullType for binary operators") {
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)))
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
NumericTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
NumericTypeBinaryOperator(Literal.create(null, DoubleType), Literal.create(null, DoubleType)))
}
test("coalesce casts") {
val rule = AnsiTypeCoercion.FunctionArgumentConversion
val intLit = Literal(1)
val longLit = Literal.create(1L)
val doubleLit = Literal(1.0)
val stringLit = Literal.create("c", StringType)
val nullLit = Literal.create(null, NullType)
val floatNullLit = Literal.create(null, FloatType)
val floatLit = Literal.create(1.0f, FloatType)
val doubleNullLit = Cast(floatNullLit, DoubleType)
val timestampLit = Literal.create(Timestamp.valueOf("2017-04-12 00:00:00"), TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
val tsArrayLit = Literal(Array(new Timestamp(System.currentTimeMillis())))
val strArrayLit = Literal(Array("c"))
val intArrayLit = Literal(Array(1))
ruleTest(rule,
Coalesce(Seq(doubleLit, intLit, floatLit)),
Coalesce(Seq(doubleLit, Cast(intLit, DoubleType), Cast(floatLit, DoubleType))))
ruleTest(rule,
Coalesce(Seq(longLit, intLit, decimalLit)),
Coalesce(Seq(Cast(longLit, DecimalType(22, 0)),
Cast(intLit, DecimalType(22, 0)), decimalLit)))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit)),
Coalesce(Seq(Cast(nullLit, IntegerType), intLit)))
ruleTest(rule,
Coalesce(Seq(timestampLit, stringLit)),
Coalesce(Seq(timestampLit, stringLit)))
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, intLit)),
Coalesce(Seq(Cast(nullLit, DoubleType), doubleNullLit, Cast(intLit, DoubleType))))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit, decimalLit, doubleLit)),
Coalesce(Seq(Cast(nullLit, DoubleType), Cast(intLit, DoubleType),
Cast(decimalLit, DoubleType), doubleLit)))
// There is no a common type among Float/Double/String
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, doubleLit, stringLit)),
Coalesce(Seq(nullLit, floatNullLit, doubleLit, stringLit)))
// There is no a common type among Timestamp/Int/String
ruleTest(rule,
Coalesce(Seq(timestampLit, intLit, stringLit)),
Coalesce(Seq(timestampLit, intLit, stringLit)))
ruleTest(rule,
Coalesce(Seq(tsArrayLit, intArrayLit, strArrayLit)),
Coalesce(Seq(tsArrayLit, intArrayLit, strArrayLit)))
}
test("CreateArray casts") {
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0f, FloatType)
:: Nil),
CreateArray(Literal(1.0)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0f, FloatType), DoubleType)
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal("a")
:: Nil),
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal("a")
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal(1)
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(13, 3))
:: Literal(1).cast(DecimalType(13, 3))
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal.create(null, DecimalType(22, 10))
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(22, 10)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(38, 38))
:: Nil))
}
test("CreateMap casts") {
// type coercion for map keys
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal.create(2.0f, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Cast(Literal(1), DoubleType)
:: Literal("a")
:: Cast(Literal.create(2.0f, FloatType), DoubleType)
:: Literal("b")
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateMap(Literal.create(null, DecimalType(5, 3))
:: Literal("a")
:: Literal.create(2.0f, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Literal.create(null, DecimalType(5, 3)).cast(DoubleType)
:: Literal("a")
:: Literal.create(2.0f, FloatType).cast(DoubleType)
:: Literal("b")
:: Nil))
// type coercion for map values
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2)
:: Literal(3.0)
:: Nil),
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2)
:: Literal(3.0)
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0)).cast(DecimalType(38, 38))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38))
:: Nil))
// type coercion for both map keys and values
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2.0)
:: Literal(3.0)
:: Nil),
CreateMap(Cast(Literal(1), DoubleType)
:: Literal("a")
:: Literal(2.0)
:: Literal(3.0)
:: Nil))
}
test("greatest/least cast") {
for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) {
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0f, FloatType)
:: Nil),
operator(Literal(1.0)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0f, FloatType), DoubleType)
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
operator(Literal(1L)
:: Literal(1)
:: Literal(new java.math.BigDecimal("1000000000000000000000"))
:: Nil),
operator(Cast(Literal(1L), DecimalType(22, 0))
:: Cast(Literal(1), DecimalType(22, 0))
:: Literal(new java.math.BigDecimal("1000000000000000000000"))
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal(1.0)
:: Literal.create(null, DecimalType(10, 5)).cast(DoubleType)
:: Literal(1).cast(DoubleType)
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
operator(Literal.create(null, DecimalType(15, 0))
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal.create(null, DecimalType(15, 0)).cast(DecimalType(20, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(20, 5))
:: Literal(1).cast(DecimalType(20, 5))
:: Nil))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
operator(Literal.create(2L, LongType)
:: Literal(1)
:: Literal.create(null, DecimalType(10, 5))
:: Nil),
operator(Literal.create(2L, LongType).cast(DecimalType(25, 5))
:: Literal(1).cast(DecimalType(25, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(25, 5))
:: Nil))
}
}
test("nanvl casts") {
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(1.0, DoubleType)),
NaNvl(Cast(Literal.create(1.0f, FloatType), DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0f, FloatType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(1.0f, FloatType), DoubleType)))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)),
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0f, FloatType), Cast(Literal.create(null, NullType), FloatType)))
ruleTest(AnsiTypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(null, NullType), DoubleType)))
}
test("type coercion for If") {
val rule = AnsiTypeCoercion.IfCoercion
val intLit = Literal(1)
val doubleLit = Literal(1.0)
val trueLit = Literal.create(true, BooleanType)
val falseLit = Literal.create(false, BooleanType)
val stringLit = Literal.create("c", StringType)
val floatLit = Literal.create(1.0f, FloatType)
val timestampLit = Literal.create(Timestamp.valueOf("2017-04-12 00:00:00"), TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
ruleTest(rule,
If(Literal(true), Literal(1), Literal(1L)),
If(Literal(true), Cast(Literal(1), LongType), Literal(1L)))
ruleTest(rule,
If(Literal.create(null, NullType), Literal(1), Literal(1)),
If(Literal.create(null, BooleanType), Literal(1), Literal(1)))
ruleTest(rule,
If(AssertTrue(trueLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(trueLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(AssertTrue(falseLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(falseLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(trueLit, intLit, doubleLit),
If(trueLit, Cast(intLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, doubleLit),
If(trueLit, Cast(floatLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, decimalLit),
If(trueLit, Cast(floatLit, DoubleType), Cast(decimalLit, DoubleType)))
ruleTest(rule,
If(falseLit, stringLit, doubleLit),
If(falseLit, stringLit, doubleLit))
ruleTest(rule,
If(trueLit, timestampLit, stringLit),
If(trueLit, timestampLit, stringLit))
}
test("type coercion for CaseKeyWhen") {
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
CaseKeyWhen(Literal(1.toShort), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Cast(Literal(1.toShort), IntegerType), Seq(Literal(1), Literal("a")))
)
ruleTest(AnsiTypeCoercion.CaseWhenCoercion,
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a")))
)
ruleTest(AnsiTypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(1.2))),
Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Literal(1.2))),
Cast(Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2)), DoubleType))
)
ruleTest(AnsiTypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(100L))),
Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Cast(Literal(100L), DecimalType(22, 2)))),
Cast(Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2)), DecimalType(22, 2)))
)
}
test("type coercion for Stack") {
val rule = AnsiTypeCoercion.StackCoercion
ruleTest(rule,
Stack(Seq(Literal(3), Literal(1), Literal(2), Literal(null))),
Stack(Seq(Literal(3), Literal(1), Literal(2), Literal.create(null, IntegerType))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(1.0), Literal(null), Literal(3.0))),
Stack(Seq(Literal(3), Literal(1.0), Literal.create(null, DoubleType), Literal(3.0))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(null), Literal("2"), Literal("3"))),
Stack(Seq(Literal(3), Literal.create(null, StringType), Literal("2"), Literal("3"))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null))),
Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(1), Literal("2"),
Literal(null), Literal(null))),
Stack(Seq(Literal(2),
Literal(1), Literal("2"),
Literal.create(null, IntegerType), Literal.create(null, StringType))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(1), Literal(null),
Literal(null), Literal("2"))),
Stack(Seq(Literal(2),
Literal(1), Literal.create(null, StringType),
Literal.create(null, IntegerType), Literal("2"))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(null), Literal(1),
Literal("2"), Literal(null))),
Stack(Seq(Literal(2),
Literal.create(null, StringType), Literal(1),
Literal("2"), Literal.create(null, IntegerType))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(null), Literal(null),
Literal(1), Literal("2"))),
Stack(Seq(Literal(2),
Literal.create(null, IntegerType), Literal.create(null, StringType),
Literal(1), Literal("2"))))
ruleTest(rule,
Stack(Seq(Subtract(Literal(3), Literal(1)),
Literal(1), Literal("2"),
Literal(null), Literal(null))),
Stack(Seq(Subtract(Literal(3), Literal(1)),
Literal(1), Literal("2"),
Literal.create(null, IntegerType), Literal.create(null, StringType))))
}
test("type coercion for Concat") {
val rule = AnsiTypeCoercion.ConcatCoercion
ruleTest(rule,
Concat(Seq(Literal("ab"), Literal("cde"))),
Concat(Seq(Literal("ab"), Literal("cde"))))
ruleTest(rule,
Concat(Seq(Literal(null), Literal("abc"))),
Concat(Seq(Cast(Literal(null), StringType), Literal("abc"))))
ruleTest(rule,
Concat(Seq(Literal(1), Literal("234"))),
Concat(Seq(Literal(1), Literal("234"))))
ruleTest(rule,
Concat(Seq(Literal("1"), Literal("234".getBytes()))),
Concat(Seq(Literal("1"), Literal("234".getBytes()))))
ruleTest(rule,
Concat(Seq(Literal(1L), Literal(2.toByte), Literal(0.1))),
Concat(Seq(Literal(1L), Literal(2.toByte), Literal(0.1))))
ruleTest(rule,
Concat(Seq(Literal(true), Literal(0.1f), Literal(3.toShort))),
Concat(Seq(Literal(true), Literal(0.1f), Literal(3.toShort))))
ruleTest(rule,
Concat(Seq(Literal(1L), Literal(0.1))),
Concat(Seq(Literal(1L), Literal(0.1))))
ruleTest(rule,
Concat(Seq(Literal(Decimal(10)))),
Concat(Seq(Literal(Decimal(10)))))
ruleTest(rule,
Concat(Seq(Literal(BigDecimal.valueOf(10)))),
Concat(Seq(Literal(BigDecimal.valueOf(10)))))
ruleTest(rule,
Concat(Seq(Literal(java.math.BigDecimal.valueOf(10)))),
Concat(Seq(Literal(java.math.BigDecimal.valueOf(10)))))
ruleTest(rule,
Concat(Seq(Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))),
Concat(Seq(Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))))
ruleTest(rule,
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))),
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))))
}
test("type coercion for Elt") {
val rule = AnsiTypeCoercion.EltCoercion
ruleTest(rule,
Elt(Seq(Literal(1), Literal("ab"), Literal("cde"))),
Elt(Seq(Literal(1), Literal("ab"), Literal("cde"))))
ruleTest(rule,
Elt(Seq(Literal(1.toShort), Literal("ab"), Literal("cde"))),
Elt(Seq(Cast(Literal(1.toShort), IntegerType), Literal("ab"), Literal("cde"))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(null), Literal("abc"))),
Elt(Seq(Literal(2), Cast(Literal(null), StringType), Literal("abc"))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(1), Literal("234"))),
Elt(Seq(Literal(2), Literal(1), Literal("234"))))
ruleTest(rule,
Elt(Seq(Literal(3), Literal(1L), Literal(2.toByte), Literal(0.1))),
Elt(Seq(Literal(3), Literal(1L), Literal(2.toByte), Literal(0.1))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(true), Literal(0.1f), Literal(3.toShort))),
Elt(Seq(Literal(2), Literal(true), Literal(0.1f), Literal(3.toShort))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(1L), Literal(0.1))),
Elt(Seq(Literal(1), Literal(1L), Literal(0.1))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(Decimal(10)))),
Elt(Seq(Literal(1), Literal(Decimal(10)))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(BigDecimal.valueOf(10)))),
Elt(Seq(Literal(1), Literal(BigDecimal.valueOf(10)))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(java.math.BigDecimal.valueOf(10)))),
Elt(Seq(Literal(1), Literal(java.math.BigDecimal.valueOf(10)))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))),
Elt(Seq(Literal(2), Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))),
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))))
}
private def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
private val timeZoneResolver = ResolveTimeZone
private def widenSetOperationTypes(plan: LogicalPlan): LogicalPlan = {
timeZoneResolver(AnsiTypeCoercion.WidenSetOperationTypes(plan))
}
test("WidenSetOperationTypes for except and intersect") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", LongType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val expectedTypes = Seq(LongType, DecimalType.SYSTEM_DEFAULT, DoubleType, DoubleType)
val r1 = widenSetOperationTypes(
Except(firstTable, secondTable, isAll = false)).asInstanceOf[Except]
val r2 = widenSetOperationTypes(
Intersect(firstTable, secondTable, isAll = false)).asInstanceOf[Intersect]
checkOutput(r1.left, expectedTypes)
checkOutput(r1.right, expectedTypes)
checkOutput(r2.left, expectedTypes)
checkOutput(r2.right, expectedTypes)
// Check if a Project is added
assert(r1.left.isInstanceOf[Project])
assert(r1.right.isInstanceOf[Project])
assert(r2.left.isInstanceOf[Project])
assert(r2.right.isInstanceOf[Project])
}
test("WidenSetOperationTypes for union") {
val firstTable = LocalRelation(
AttributeReference("i", DateType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", DateType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val thirdTable = LocalRelation(
AttributeReference("m", TimestampType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", FloatType)(),
AttributeReference("q", DoubleType)())
val forthTable = LocalRelation(
AttributeReference("m", DateType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", ByteType)(),
AttributeReference("q", DoubleType)())
val expectedTypes = Seq(TimestampType, DecimalType.SYSTEM_DEFAULT, DoubleType, DoubleType)
val unionRelation = widenSetOperationTypes(
Union(firstTable :: secondTable :: thirdTable :: forthTable :: Nil)).asInstanceOf[Union]
assert(unionRelation.children.length == 4)
checkOutput(unionRelation.children.head, expectedTypes)
checkOutput(unionRelation.children(1), expectedTypes)
checkOutput(unionRelation.children(2), expectedTypes)
checkOutput(unionRelation.children(3), expectedTypes)
assert(unionRelation.children.head.isInstanceOf[Project])
assert(unionRelation.children(1).isInstanceOf[Project])
assert(unionRelation.children(2).isInstanceOf[Project])
assert(unionRelation.children(3).isInstanceOf[Project])
}
test("Transform Decimal precision/scale for union except and intersect") {
def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
val left1 = LocalRelation(
AttributeReference("l", DecimalType(10, 8))())
val right1 = LocalRelation(
AttributeReference("r", DecimalType(5, 5))())
val expectedType1 = Seq(DecimalType(10, 8))
val r1 = widenSetOperationTypes(Union(left1, right1)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(
Except(left1, right1, isAll = false)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(
Intersect(left1, right1, isAll = false)).asInstanceOf[Intersect]
checkOutput(r1.children.head, expectedType1)
checkOutput(r1.children.last, expectedType1)
checkOutput(r2.left, expectedType1)
checkOutput(r2.right, expectedType1)
checkOutput(r3.left, expectedType1)
checkOutput(r3.right, expectedType1)
val plan1 = LocalRelation(AttributeReference("l", DecimalType(10, 5))())
val rightTypes = Seq(ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType)
val expectedTypes = Seq(DecimalType(10, 5), DecimalType(10, 5), DecimalType(15, 5),
DecimalType(25, 5), DoubleType, DoubleType)
rightTypes.zip(expectedTypes).foreach { case (rType, expectedType) =>
val plan2 = LocalRelation(
AttributeReference("r", rType)())
val r1 = widenSetOperationTypes(Union(plan1, plan2)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(
Except(plan1, plan2, isAll = false)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(
Intersect(plan1, plan2, isAll = false)).asInstanceOf[Intersect]
checkOutput(r1.children.last, Seq(expectedType))
checkOutput(r2.right, Seq(expectedType))
checkOutput(r3.right, Seq(expectedType))
val r4 = widenSetOperationTypes(Union(plan2, plan1)).asInstanceOf[Union]
val r5 = widenSetOperationTypes(
Except(plan2, plan1, isAll = false)).asInstanceOf[Except]
val r6 = widenSetOperationTypes(
Intersect(plan2, plan1, isAll = false)).asInstanceOf[Intersect]
checkOutput(r4.children.last, Seq(expectedType))
checkOutput(r5.left, Seq(expectedType))
checkOutput(r6.left, Seq(expectedType))
}
}
test("SPARK-32638: corrects references when adding aliases in WidenSetOperationTypes") {
val t1 = LocalRelation(AttributeReference("v", DecimalType(10, 0))())
val t2 = LocalRelation(AttributeReference("v", DecimalType(11, 0))())
val p1 = t1.select(t1.output.head).as("p1")
val p2 = t2.select(t2.output.head).as("p2")
val union = p1.union(p2)
val wp1 = widenSetOperationTypes(union.select(p1.output.head, $"p2.v"))
assert(wp1.isInstanceOf[Project])
// The attribute `p1.output.head` should be replaced in the root `Project`.
assert(wp1.expressions.forall(_.find(_ == p1.output.head).isEmpty))
val wp2 = widenSetOperationTypes(Aggregate(Nil, sum(p1.output.head).as("v") :: Nil, union))
assert(wp2.isInstanceOf[Aggregate])
assert(wp2.missingInput.isEmpty)
}
/**
* There are rules that need to not fire before child expressions get resolved.
* We use this test to make sure those rules do not fire early.
*/
test("make sure rules do not fire early") {
// InConversion
val inConversion = AnsiTypeCoercion.InConversion
ruleTest(inConversion,
In(UnresolvedAttribute("a"), Seq(Literal(1))),
In(UnresolvedAttribute("a"), Seq(Literal(1)))
)
ruleTest(inConversion,
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1))),
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1)))
)
ruleTest(inConversion,
In(Literal("a"), Seq(Literal(1), Literal("b"))),
In(Literal("a"), Seq(Literal(1), Literal("b")))
)
}
test("SPARK-15776 Divide expression's dataType should be casted to Double or Decimal " +
"in aggregation function like sum") {
val rules = Seq(FunctionArgumentConversion, Division)
// Casts Integer to Double
ruleTest(rules, sum(Divide(4, 3)), sum(Divide(Cast(4, DoubleType), Cast(3, DoubleType))))
// Left expression is Double, right expression is Int. Another rule ImplicitTypeCasts will
// cast the right expression to Double.
ruleTest(rules, sum(Divide(4.0, 3)), sum(Divide(4.0, 3)))
// Left expression is Int, right expression is Double
ruleTest(rules, sum(Divide(4, 3.0)), sum(Divide(Cast(4, DoubleType), Cast(3.0, DoubleType))))
// Casts Float to Double
ruleTest(
rules,
sum(Divide(4.0f, 3)),
sum(Divide(Cast(4.0f, DoubleType), Cast(3, DoubleType))))
// Left expression is Decimal, right expression is Int. Another rule DecimalPrecision will cast
// the right expression to Decimal.
ruleTest(rules, sum(Divide(Decimal(4.0), 3)), sum(Divide(Decimal(4.0), 3)))
}
test("SPARK-17117 null type coercion in divide") {
val rules = Seq(FunctionArgumentConversion, Division, ImplicitTypeCasts)
val nullLit = Literal.create(null, NullType)
ruleTest(rules, Divide(1L, nullLit), Divide(Cast(1L, DoubleType), Cast(nullLit, DoubleType)))
ruleTest(rules, Divide(nullLit, 1L), Divide(Cast(nullLit, DoubleType), Cast(1L, DoubleType)))
}
test("cast WindowFrame boundaries to the type they operate upon") {
// Can cast frame boundaries to order dataType.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(3), Literal(2147483648L))),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, Cast(3, LongType), Literal(2147483648L)))
)
// Cannot cast frame boundaries to order dataType.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal.default(DateType), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L))),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal.default(DateType), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L)))
)
// Should not cast SpecialFrameBoundary.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing)),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing))
)
}
test("SPARK-29000: skip to handle decimals in ImplicitTypeCasts") {
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Literal(1)),
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Literal(1)))
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Cast(1, IntegerType)),
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Cast(1, IntegerType)))
}
test("SPARK-31468: null types should be casted to decimal types in ImplicitTypeCasts") {
Seq(AnyTypeBinaryOperator(_, _), NumericTypeBinaryOperator(_, _)).foreach { binaryOp =>
// binaryOp(decimal, null) case
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
Literal.create(null, NullType)),
binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
Cast(Literal.create(null, NullType), DecimalType.SYSTEM_DEFAULT)))
// binaryOp(null, decimal) case
ruleTest(AnsiTypeCoercion.ImplicitTypeCasts,
binaryOp(Literal.create(null, NullType),
Literal.create(null, DecimalType.SYSTEM_DEFAULT)),
binaryOp(Cast(Literal.create(null, NullType), DecimalType.SYSTEM_DEFAULT),
Literal.create(null, DecimalType.SYSTEM_DEFAULT)))
}
}
test("SPARK-31761: byte, short and int should be cast to long for IntegralDivide's datatype") {
val rules = Seq(FunctionArgumentConversion, Division, ImplicitTypeCasts)
// Casts Byte to Long
ruleTest(AnsiTypeCoercion.IntegralDivision, IntegralDivide(2.toByte, 1.toByte),
IntegralDivide(Cast(2.toByte, LongType), Cast(1.toByte, LongType)))
// Casts Short to Long
ruleTest(AnsiTypeCoercion.IntegralDivision, IntegralDivide(2.toShort, 1.toShort),
IntegralDivide(Cast(2.toShort, LongType), Cast(1.toShort, LongType)))
// Casts Integer to Long
ruleTest(AnsiTypeCoercion.IntegralDivision, IntegralDivide(2, 1),
IntegralDivide(Cast(2, LongType), Cast(1, LongType)))
// should not be any change for Long data types
ruleTest(AnsiTypeCoercion.IntegralDivision, IntegralDivide(2L, 1L), IntegralDivide(2L, 1L))
// one of the operand is byte
ruleTest(AnsiTypeCoercion.IntegralDivision, IntegralDivide(2L, 1.toByte),
IntegralDivide(2L, Cast(1.toByte, LongType)))
// one of the operand is short
ruleTest(AnsiTypeCoercion.IntegralDivision, IntegralDivide(2.toShort, 1L),
IntegralDivide(Cast(2.toShort, LongType), 1L))
// one of the operand is int
ruleTest(AnsiTypeCoercion.IntegralDivision, IntegralDivide(2, 1L),
IntegralDivide(Cast(2, LongType), 1L))
}
test("Promote string literals") {
val rule = AnsiTypeCoercion.PromoteStringLiterals
val stringLiteral = Literal("123")
val castStringLiteralAsInt = Cast(stringLiteral, IntegerType)
val castStringLiteralAsDouble = Cast(stringLiteral, DoubleType)
val castStringLiteralAsDate = Cast(stringLiteral, DateType)
val castStringLiteralAsTimestamp = Cast(stringLiteral, TimestampType)
ruleTest(rule,
GreaterThan(stringLiteral, Literal(1)),
GreaterThan(castStringLiteralAsInt, Literal(1)))
ruleTest(rule,
LessThan(Literal(true), stringLiteral),
LessThan(Literal(true), Cast(stringLiteral, BooleanType)))
ruleTest(rule,
EqualTo(Literal(Array(1, 2)), stringLiteral),
EqualTo(Literal(Array(1, 2)), stringLiteral))
ruleTest(rule,
GreaterThan(stringLiteral, Literal(0.5)),
GreaterThan(castStringLiteralAsDouble, Literal(0.5)))
val dateLiteral = Literal(java.sql.Date.valueOf("2021-01-01"))
ruleTest(rule,
EqualTo(stringLiteral, dateLiteral),
EqualTo(castStringLiteralAsDate, dateLiteral))
val timestampLiteral = Literal(Timestamp.valueOf("2021-01-01 00:00:00"))
ruleTest(rule,
EqualTo(stringLiteral, timestampLiteral),
EqualTo(castStringLiteralAsTimestamp, timestampLiteral))
ruleTest(rule, Add(stringLiteral, Literal(1)),
Add(castStringLiteralAsInt, Literal(1)))
ruleTest(rule, Divide(stringLiteral, Literal(1)),
Divide(castStringLiteralAsInt, Literal(1)))
ruleTest(rule,
In(Literal(1), Seq(stringLiteral, Literal(2))),
In(Literal(1), Seq(castStringLiteralAsInt, Literal(2))))
ruleTest(rule,
In(Literal(1.0), Seq(stringLiteral, Literal(2.2))),
In(Literal(1.0), Seq(castStringLiteralAsDouble, Literal(2.2))))
ruleTest(rule,
In(dateLiteral, Seq(stringLiteral)),
In(dateLiteral, Seq(castStringLiteralAsDate)))
ruleTest(rule,
In(timestampLiteral, Seq(stringLiteral)),
In(timestampLiteral, Seq(castStringLiteralAsTimestamp)))
}
}
| BryanCutler/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercionSuite.scala | Scala | apache-2.0 | 65,449 |
import java.io.File
import com.google.inject.AbstractModule
import jira.JiraClient
import jira.JiraConfig
import jira.JiraQuery
import jira.JiraQueryImpl
import org.quartz.SchedulerFactory
import org.quartz.impl.StdSchedulerFactory
import phabricator._
import play.api.Environment
import play.api.Configuration
import slack._
class ReportsMetricsModule(env: Environment, config: Configuration)
extends AbstractModule {
override def configure() = {
import scala.collection.JavaConversions._
// Jira specific configurations
bind(classOf[JiraConfig]).toInstance {
val configs = config.getConfig("jira").get
JiraConfig(configs.getString("consumerKey").get,
new File(configs.getString("privateKeyFile").get),
configs.getString("baseUrl").get,
configs.getString("callback").get)
}
bind(classOf[JiraClient])
bind(classOf[JiraQuery]).to(classOf[JiraQueryImpl]).asEagerSingleton()
// Phabricator specific configuration
bind(classOf[PhabricatorConfig]).toInstance {
val configs = config.getConfig("phabricator").get
PhabricatorConfig(
apiUrl = configs.getString("apiUrl").get,
user = configs.getString("user").get,
certificate = configs.getString("certificate").get)
}
bind(classOf[PhabricatorClient]).to(classOf[PhabricatorClientImpl])
bind(classOf[PhabricatorQuery]).to(classOf[PhabricatorQueryImpl])
bind(classOf[PhabricatorReporter]).to(classOf[PhabricatorReporterImpl])
// Slack specific configuration
bind(classOf[SlackConfig]).toInstance {
val slackConfig = config.getConfig("slack").get
val configs = slackConfig.getConfigList("teams").get
SlackConfig(configs.map { teamConfig =>
SlackTeamConfig(teamConfig.getString("channelName").get,
teamConfig.getString("hookUrl").get,
teamConfig.getStringList("users").get.toList)
}.toList,
slackConfig.getString("commandToken").get)
}
bind(classOf[SlackJob])
bind(classOf[GuiceJobFactory])
bind(classOf[SchedulerFactory]).toInstance(new StdSchedulerFactory("quartz.properties"))
bind(classOf[SlackScheduler]).asEagerSingleton
bind(classOf[SlackCommandInterpreter]).to(classOf[SlackCommandInterpreterImpl])
}
}
| rsumbaly/phabricator-report | app/ReportsMetricsModule.scala | Scala | apache-2.0 | 2,278 |
/*
* Copyright 1998-2018 Linux.org.ru
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.org.linux.realtime
import java.io.IOException
import akka.NotUsed
import akka.actor.{Actor, ActorLogging, ActorRef, ActorSystem, PoisonPill, Props, SupervisorStrategy, Terminated, Timers}
import akka.pattern.ask
import akka.util.Timeout
import com.typesafe.scalalogging.StrictLogging
import org.springframework.beans.factory.annotation.Qualifier
import org.springframework.context.annotation.{Bean, Configuration}
import org.springframework.stereotype.Service
import org.springframework.web.socket.config.annotation.{EnableWebSocket, WebSocketConfigurer, WebSocketHandlerRegistry}
import org.springframework.web.socket.handler.TextWebSocketHandler
import org.springframework.web.socket.{CloseStatus, PingMessage, TextMessage, WebSocketSession}
import ru.org.linux.comment.CommentService
import ru.org.linux.realtime.RealtimeEventHub.{NewComment, SessionTerminated, Subscribe, Tick}
import ru.org.linux.spring.SiteConfig
import ru.org.linux.topic.TopicDao
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.{Await, ExecutionContext}
import scala.concurrent.duration._
import scala.util.control.NonFatal
// TODO ignore list support
// TODO fix face conditions on simultaneous posting comment, subscription and missing processing
class RealtimeEventHub extends Actor with ActorLogging with Timers {
private val data = new mutable.HashMap[Int, mutable.Set[ActorRef]] with mutable.MultiMap[Int, ActorRef]
private val sessions = new mutable.HashMap[String, ActorRef]
private var maxDataSize: Int = 0
timers.startPeriodicTimer(Tick, Tick, 5.minutes)
override def supervisorStrategy = SupervisorStrategy.stoppingStrategy
override def receive: Receive = {
case Subscribe(session, _) if sessions.contains(session.getId) ⇒
log.warning(s"Session ${session.getId} already subscribed")
case Subscribe(session, topic) ⇒
val actor = context.actorOf(RealtimeSessionActor.props(session))
context.watch(actor)
data.addBinding(topic, actor)
sessions += (session.getId -> actor)
val dataSize = context.children.size
if (dataSize > maxDataSize) {
maxDataSize = dataSize
}
sender() ! NotUsed
case Terminated(actorRef) ⇒
log.debug(s"RealtimeSessionActor $actorRef terminated")
data.find(_._2.contains(actorRef)) match {
case Some((msgid, _)) ⇒
log.debug(s"Removed $actorRef")
data.removeBinding(msgid, actorRef)
case None ⇒
log.warning(s"Unknown actor was terminated $actorRef")
}
sessions.find(_._2 == actorRef).foreach { f ⇒
sessions.remove(f._1)
}
case SessionTerminated(id) ⇒
sessions.get(id) foreach { actor ⇒
log.debug("Session was terminated, stopping actor")
actor ! PoisonPill
}
case msg@NewComment(msgid, _) ⇒
log.debug(s"New comment in topic $msgid")
data.getOrElse(msgid, Set.empty).foreach {
_ ! msg
}
case Tick ⇒
log.info(s"Realtime hub: maximum number connections was $maxDataSize")
maxDataSize = 0
}
}
object RealtimeEventHub {
case class GetEmmiterForTopic(msgid: Int, missedComments: Vector[Int])
case class NewComment(msgid: Int, cid: Int)
case object Tick
case class Subscribe(session: WebSocketSession, topic: Int)
case class SessionTerminated(session: String)
def props = Props(new RealtimeEventHub())
}
class RealtimeSessionActor(session: WebSocketSession) extends Actor with ActorLogging {
private implicit val ec: ExecutionContext = context.dispatcher
private val schedule = context.system.scheduler.schedule(5.seconds, 1.minute, self, Tick)
private def notifyComment(comment: Int): Unit = {
session.sendMessage(new TextMessage(comment.toString))
}
override def receive: Receive = {
case NewComment(_, cid) ⇒
try {
notifyComment(cid)
} catch handleExceptions
case Tick ⇒
log.debug("Sending keepalive")
try {
session.sendMessage(new PingMessage())
} catch handleExceptions
}
private def handleExceptions: PartialFunction[Throwable, Unit] = {
case ex: IOException ⇒
log.debug(s"Terminated by IOException ${ex.toString}")
context.stop(self)
}
@scala.throws[Exception](classOf[Exception])
override def postStop(): Unit = {
schedule.cancel()
session.close()
}
}
object RealtimeSessionActor {
def props(session: WebSocketSession) = Props(new RealtimeSessionActor(session))
}
@Service
class RealtimeWebsocketHandler(@Qualifier("realtimeHubWS") hub: ActorRef,
topicDao: TopicDao, commentService: CommentService) extends TextWebSocketHandler
with StrictLogging {
private implicit val Timeout: Timeout = 30.seconds
override def afterConnectionEstablished(session: WebSocketSession): Unit = {
logger.debug(s"Connected!")
}
override def handleTextMessage(session: WebSocketSession, message: TextMessage): Unit = {
try {
val request = message.getPayload
logger.debug(s"Got request: $request")
val (topicId, maybeComment) = request.split(" ", 2) match {
case Array(t) ⇒
t.toInt -> None
case Array(t, comment) ⇒
t.toInt -> Some(comment.toInt)
}
val topic = topicDao.getById(topicId)
val last = maybeComment.getOrElse(0)
val comments = commentService.getCommentList(topic, false)
val missed = comments.getList.asScala.map(_.getId).dropWhile(_ <= last).toVector
missed.foreach { cid ⇒
logger.debug(s"Sending missed comment $cid")
session.sendMessage(new TextMessage(cid.toString))
}
val result = hub ? Subscribe(session, topic.getId)
Await.result(result, 10.seconds)
} catch {
case NonFatal(e) ⇒
logger.warn("WS request failed", e)
session.close(CloseStatus.SERVER_ERROR)
}
}
override def afterConnectionClosed(session: WebSocketSession, status: CloseStatus): Unit = {
logger.debug(s"Session terminated with status $status")
hub ! SessionTerminated(session.getId)
}
}
@Configuration
class RealtimeConfigurationBeans(actorSystem: ActorSystem) {
@Bean(Array("realtimeHubWS"))
def hub: ActorRef = actorSystem.actorOf(RealtimeEventHub.props)
}
@Configuration
@EnableWebSocket
class RealtimeConfigurationWS(handler: RealtimeWebsocketHandler, config: SiteConfig) extends WebSocketConfigurer {
override def registerWebSocketHandlers(registry: WebSocketHandlerRegistry): Unit = {
registry.addHandler(handler, "/ws").setAllowedOrigins(config.getSecureUrl)
}
} | hizel/lorsource | src/main/scala/ru/org/linux/realtime/RealtimeEventHub.scala | Scala | apache-2.0 | 7,291 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.coordinator
import kafka.utils.nonthreadsafe
import java.util.UUID
import org.apache.kafka.common.protocol.Errors
import collection.mutable
private[coordinator] sealed trait GroupState { def state: Byte }
/**
* Group is preparing to rebalance
*
* action: respond to heartbeats with REBALANCE_IN_PROGRESS
* respond to sync group with REBALANCE_IN_PROGRESS
* remove member on leave group request
* park join group requests from new or existing members until all expected members have joined
* allow offset commits from previous generation
* allow offset fetch requests
* transition: some members have joined by the timeout => AwaitingSync
* all members have left the group => Dead
*/
private[coordinator] case object PreparingRebalance extends GroupState { val state: Byte = 1 }
/**
* Group is awaiting state assignment from the leader
*
* action: respond to heartbeats with REBALANCE_IN_PROGRESS
* respond to offset commits with REBALANCE_IN_PROGRESS
* park sync group requests from followers until transition to Stable
* allow offset fetch requests
* transition: sync group with state assignment received from leader => Stable
* join group from new member or existing member with updated metadata => PreparingRebalance
* leave group from existing member => PreparingRebalance
* member failure detected => PreparingRebalance
*/
private[coordinator] case object AwaitingSync extends GroupState { val state: Byte = 5}
/**
* Group is stable
*
* action: respond to member heartbeats normally
* respond to sync group from any member with current assignment
* respond to join group from followers with matching metadata with current group metadata
* allow offset commits from member of current generation
* allow offset fetch requests
* transition: member failure detected via heartbeat => PreparingRebalance
* leave group from existing member => PreparingRebalance
* leader join-group received => PreparingRebalance
* follower join-group with new metadata => PreparingRebalance
*/
private[coordinator] case object Stable extends GroupState { val state: Byte = 3 }
/**
* Group has no more members
*
* action: respond to join group with UNKNOWN_MEMBER_ID
* respond to sync group with UNKNOWN_MEMBER_ID
* respond to heartbeat with UNKNOWN_MEMBER_ID
* respond to leave group with UNKNOWN_MEMBER_ID
* respond to offset commit with UNKNOWN_MEMBER_ID
* allow offset fetch requests
* transition: Dead is a final state before group metadata is cleaned up, so there are no transitions
*/
private[coordinator] case object Dead extends GroupState { val state: Byte = 4 }
private object GroupMetadata {
private val validPreviousStates: Map[GroupState, Set[GroupState]] =
Map(Dead -> Set(PreparingRebalance),
AwaitingSync -> Set(PreparingRebalance),
Stable -> Set(AwaitingSync),
PreparingRebalance -> Set(Stable, AwaitingSync))
}
/**
* Group contains the following metadata:
*
* Membership metadata:
* 1. Members registered in this group
* 2. Current protocol assigned to the group (e.g. partition assignment strategy for consumers)
* 3. Protocol metadata associated with group members
*
* State metadata:
* 1. group state
* 2. generation id
* 3. leader id
*/
@nonthreadsafe
private[coordinator] class GroupMetadata(val groupId: String, val protocolType: String) {
private val members = new mutable.HashMap[String, MemberMetadata]
private var state: GroupState = Stable
var generationId = 0
var leaderId: String = null
var protocol: String = null
def is(groupState: GroupState) = state == groupState
def not(groupState: GroupState) = state != groupState
def has(memberId: String) = members.contains(memberId)
def get(memberId: String) = members(memberId)
def add(memberId: String, member: MemberMetadata) {
assert(supportsProtocols(member.protocols))
if (leaderId == null)
leaderId = memberId
members.put(memberId, member)
}
def remove(memberId: String) {
members.remove(memberId)
if (memberId == leaderId) {
leaderId = if (members.isEmpty) {
null
} else {
members.keys.head
}
}
}
def currentState = state
def isEmpty = members.isEmpty
def notYetRejoinedMembers = members.values.filter(_.awaitingJoinCallback == null).toList
def allMembers = members.values.toList
def rebalanceTimeout = members.values.foldLeft(0) {(timeout, member) =>
timeout.max(member.sessionTimeoutMs)
}
// TODO: decide if ids should be predictable or random
def generateNextMemberId = UUID.randomUUID().toString
def canRebalance = state == Stable || state == AwaitingSync
def transitionTo(groupState: GroupState) {
assertValidTransition(groupState)
state = groupState
}
def selectProtocol: String = {
if (members.isEmpty)
throw new IllegalStateException("Cannot select protocol for empty group")
// select the protocol for this group which is supported by all members
val candidates = candidateProtocols
// let each member vote for one of the protocols and choose the one with the most votes
val votes: List[(String, Int)] = allMembers
.map(_.vote(candidates))
.groupBy(identity)
.mapValues(_.size)
.toList
votes.maxBy(_._2)._1
}
private def candidateProtocols = {
// get the set of protocols that are commonly supported by all members
allMembers
.map(_.protocols)
.reduceLeft((commonProtocols, protocols) => commonProtocols & protocols)
}
def supportsProtocols(memberProtocols: Set[String]) = {
isEmpty || (memberProtocols & candidateProtocols).nonEmpty
}
def initNextGeneration = {
assert(notYetRejoinedMembers == List.empty[MemberMetadata])
generationId += 1
protocol = selectProtocol
transitionTo(AwaitingSync)
}
def currentMemberMetadata: Map[String, Array[Byte]] = {
if (is(Dead) || is(PreparingRebalance))
throw new IllegalStateException("Cannot obtain member metadata for group in state %s".format(state))
members.map{ case (memberId, memberMetadata) => (memberId, memberMetadata.metadata(protocol))}.toMap
}
private def assertValidTransition(targetState: GroupState) {
if (!GroupMetadata.validPreviousStates(targetState).contains(state))
throw new IllegalStateException("Group %s should be in the %s states before moving to %s state. Instead it is in %s state"
.format(groupId, GroupMetadata.validPreviousStates(targetState).mkString(","), targetState, state))
}
} | jack6215/kafka | core/src/main/scala/kafka/coordinator/GroupMetadata.scala | Scala | apache-2.0 | 7,558 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.