code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package edu.rice.habanero.benchmarks.threadring
import edu.rice.habanero.actors.{ScalazActor, ScalazActorState, ScalazPool}
import edu.rice.habanero.benchmarks.threadring.ThreadRingConfig.{DataMessage, ExitMessage, PingMessage}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object ThreadRingScalazActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new ThreadRingScalazActorBenchmark)
}
private final class ThreadRingScalazActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
ThreadRingConfig.parseArgs(args)
}
def printArgInfo() {
ThreadRingConfig.printArgs()
}
def runIteration() {
val numActorsInRing = ThreadRingConfig.N
val ringActors = Array.tabulate[ScalazActor[AnyRef]](numActorsInRing)(i => {
val loopActor = new ThreadRingActor(i, numActorsInRing)
loopActor.start()
loopActor
})
for ((loopActor, i) <- ringActors.view.zipWithIndex) {
val nextActor = ringActors((i + 1) % numActorsInRing)
loopActor.send(new DataMessage(nextActor))
}
ringActors(0).send(new PingMessage(ThreadRingConfig.R))
ScalazActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double): Unit = {
if (lastIteration) {
ScalazPool.shutdown()
}
}
}
private class ThreadRingActor(id: Int, numActorsInRing: Int) extends ScalazActor[AnyRef] {
private var nextActor: ScalazActor[AnyRef] = null
override def process(msg: AnyRef) {
msg match {
case pm: PingMessage =>
if (pm.hasNext) {
nextActor.send(pm.next())
} else {
nextActor.send(new ExitMessage(numActorsInRing))
}
case em: ExitMessage =>
if (em.hasNext) {
nextActor.send(em.next())
}
exit()
case dm: DataMessage =>
nextActor = dm.data.asInstanceOf[ScalazActor[AnyRef]]
}
}
}
}
| shamsmahmood/savina | src/main/scala/edu/rice/habanero/benchmarks/threadring/ThreadRingScalazActorBenchmark.scala | Scala | gpl-2.0 | 2,145 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.nio
import scala.scalajs.js.typedarray._
object DoubleBuffer {
private final val HashSeed = 2140173175 // "java.nio.DoubleBuffer".##
def allocate(capacity: Int): DoubleBuffer =
wrap(new Array[Double](capacity))
def wrap(array: Array[Double], offset: Int, length: Int): DoubleBuffer =
HeapDoubleBuffer.wrap(array, 0, array.length, offset, length, false)
def wrap(array: Array[Double]): DoubleBuffer =
wrap(array, 0, array.length)
// Extended API
def wrap(array: Float64Array): DoubleBuffer =
TypedArrayDoubleBuffer.wrap(array)
}
abstract class DoubleBuffer private[nio] (
_capacity: Int, private[nio] val _array: Array[Double],
private[nio] val _arrayOffset: Int)
extends Buffer(_capacity) with Comparable[DoubleBuffer] {
private[nio] type ElementType = Double
private[nio] type BufferType = DoubleBuffer
private[nio] type TypedArrayType = Float64Array
def this(_capacity: Int) = this(_capacity, null, -1)
def slice(): DoubleBuffer
def duplicate(): DoubleBuffer
def asReadOnlyBuffer(): DoubleBuffer
def get(): Double
def put(d: Double): DoubleBuffer
def get(index: Int): Double
def put(index: Int, d: Double): DoubleBuffer
@noinline
def get(dst: Array[Double], offset: Int, length: Int): DoubleBuffer =
GenBuffer(this).generic_get(dst, offset, length)
def get(dst: Array[Double]): DoubleBuffer =
get(dst, 0, dst.length)
@noinline
def put(src: DoubleBuffer): DoubleBuffer =
GenBuffer(this).generic_put(src)
@noinline
def put(src: Array[Double], offset: Int, length: Int): DoubleBuffer =
GenBuffer(this).generic_put(src, offset, length)
final def put(src: Array[Double]): DoubleBuffer =
put(src, 0, src.length)
@inline final def hasArray(): Boolean =
GenBuffer(this).generic_hasArray()
@inline final def array(): Array[Double] =
GenBuffer(this).generic_array()
@inline final def arrayOffset(): Int =
GenBuffer(this).generic_arrayOffset()
@inline override def position(newPosition: Int): DoubleBuffer = {
super.position(newPosition)
this
}
@inline override def limit(newLimit: Int): DoubleBuffer = {
super.limit(newLimit)
this
}
@inline override def mark(): DoubleBuffer = {
super.mark()
this
}
@inline override def reset(): DoubleBuffer = {
super.reset()
this
}
@inline override def clear(): DoubleBuffer = {
super.clear()
this
}
@inline override def flip(): DoubleBuffer = {
super.flip()
this
}
@inline override def rewind(): DoubleBuffer = {
super.rewind()
this
}
def compact(): DoubleBuffer
def isDirect(): Boolean
// toString(): String inherited from Buffer
@noinline
override def hashCode(): Int =
GenBuffer(this).generic_hashCode(DoubleBuffer.HashSeed)
override def equals(that: Any): Boolean = that match {
case that: DoubleBuffer => compareTo(that) == 0
case _ => false
}
@noinline
def compareTo(that: DoubleBuffer): Int =
GenBuffer(this).generic_compareTo(that)(_.compareTo(_))
def order(): ByteOrder
// Internal API
private[nio] def load(index: Int): Double
private[nio] def store(index: Int, elem: Double): Unit
@inline
private[nio] def load(startIndex: Int,
dst: Array[Double], offset: Int, length: Int): Unit =
GenBuffer(this).generic_load(startIndex, dst, offset, length)
@inline
private[nio] def store(startIndex: Int,
src: Array[Double], offset: Int, length: Int): Unit =
GenBuffer(this).generic_store(startIndex, src, offset, length)
}
| nicolasstucki/scala-js | javalib/src/main/scala/java/nio/DoubleBuffer.scala | Scala | apache-2.0 | 3,868 |
import scala.language.experimental.macros
import scala.reflect.macros.whitebox.Context
object Macros {
def impl(c: Context) = {
import c.universe._
val thisMacro = c.macroApplication.symbol
val depth = c.enclosingMacros.count(_.macroApplication.symbol == thisMacro)
if (depth > 1) c.abort(c.enclosingPosition, "") // avoid StackOverflow
val inscope = c.inferImplicitValue(c.mirror.staticClass("SourceLocation").toType)
val outer = c.Expr[SourceLocation](if (!inscope.isEmpty) inscope else Literal(Constant(null)))
val Apply(fun, args) = c.enclosingImplicits(0).tree
val fileName = fun.pos.source.file.file.getName
val line = fun.pos.line
val charOffset = fun.pos.point
def literal[T](x: T) = c.Expr[T](Literal(Constant(x)))
c.universe.reify { SourceLocation1(outer.splice, literal(fileName).splice, literal(line).splice, literal(charOffset).splice) }
}
implicit def sourceLocation: SourceLocation1 = macro impl
}
trait SourceLocation {
/** Source location of the outermost call */
val outer: SourceLocation
/** The name of the source file */
val fileName: String
/** The line number */
val line: Int
/** The character offset */
val charOffset: Int
}
case class SourceLocation1(val outer: SourceLocation, val fileName: String, val line: Int, val charOffset: Int) extends SourceLocation
| scala/scala | test/files/run/macro-sip19-revised/Impls_Macros_1.scala | Scala | apache-2.0 | 1,365 |
/**********************************************************************************************************************
* This file is part of Scrupal, a Scalable Reactive Web Application Framework for Content Management *
* *
* Copyright (c) 2015, Reactific Software LLC. All Rights Reserved. *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed *
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for *
* the specific language governing permissions and limitations under the License. *
**********************************************************************************************************************/
package scrupal.utils
import play.api.libs.json._
object Validation {
/** Base class of a location where validation can occur.
* A location is just a name of something but permits member selection and index subscripting for nested locations.
*/
sealed trait Location {
def location : String
def index(i : Int) : IndexedLocation = IndexedLocation(this, i)
def select[KT](s : KT) : SelectedLocation[KT]= SelectedLocation[KT](this, s)
}
/** The default valication location */
object DefaultLocation extends Location { def location = "somewhere" }
/** A Simple, non-nsted validation location */
case class SimpleLocation(location : String) extends Location
case class TypedLocation[VT](value : VT) extends Location {
def location = value.toString
}
/** An index subscripted validation location */
case class IndexedLocation(parent : Location, index : Int) extends Location {
def location : String = s"${parent.location}[$index]"
}
/** A member selected valication location */
case class SelectedLocation[KT](parent : Location, key : KT) extends Location {
def location : String = s"${parent.location}.${key.toString}"
}
case class Exception[VR](result : Results[VR]) extends java.lang.Exception(result.message)
/** The most abstract kind of validation result.
*
* Note that results can be accumulated with the add(vr)
*/
sealed trait Results[VT] {
def ref : Location
def value : VT
def isError : Boolean
def tossOnError = {
if (isError)
throw new Exception(this)
}
def msgBldr : StringBuilder = { new StringBuilder }
def message : String = {
val bldr = msgBldr
bldr.append(", at ").append(ref.location).append(".")
bldr.toString()
}
def jsonMessage : JsObject
def add(vr : Failure[VT]) : Results[VT] = {
this match {
case Success(oref, oval) ⇒ Failures(oref, oval, vr)
case Failures(oref, oval, oerrors) ⇒ Failures(oref, oval, Seq(oerrors, vr) : _*)
case x : Failure[VT] ⇒ Failures(ref, value, Seq(x, vr) : _*)
}
}
def errorMap : Map[Location, Seq[Results[_]]]
}
/** What is returned when the validation succeeds */
case class Success[VT](ref : Location, value : VT) extends Results[VT] {
def isError = false
override def msgBldr = { super.msgBldr.append("Validation succeeded") }
def errorMap = Map.empty[Location, Seq[Results[_]]]
def jsonMessage = JsObject(Seq("form" → JsString(ref.location), "valid" → JsBoolean(value = true)))
}
/** Base class of the various kinds of error results */
trait Failure[VT] extends Results[VT] {
def ref : Location
def isError = true
def jsonMessage : JsObject = JsObject(Seq(ref.location → JsString(msgBldr.toString())))
def errorMap : Map[Location, Seq[Results[_]]] = { Map(ref -> Seq(this)) }
}
/** Validation failure consisting of other error results */
case class Failures[VT](ref : Location, value : VT, errors : Failure[_]*) extends Failure[VT] {
override def msgBldr : StringBuilder = {
val s = super.msgBldr
s.append("Failed to validate ").append(ref.location).append(": \\n")
for (err ← errors) {
s.append(err.msgBldr).append("\\n")
}
s
}
override def jsonMessage : JsObject = {
val grouped = errors.groupBy { vr ⇒ vr.ref }
JsObject(Seq(
"location" → JsString(ref.location), "valid" → JsBoolean(value = false), "errors" → JsObject(
grouped.map {
case (ref, errs) ⇒ ref.location -> JsArray(errs.map { err ⇒ err.jsonMessage })
}
)
))
}
override def errorMap : Map[Location, Seq[Results[_]]] = { errors.groupBy { vr ⇒ vr.ref } }
}
/** Validation failure with a simple error message */
case class StringFailure[VT](ref : Location, value : VT, errMsg : String) extends Failure[VT] {
override def msgBldr : StringBuilder = {
super.msgBldr.append(errMsg)
}
}
/** Validation failure with an exception */
case class ThrowableFailure[VT](ref : Location, value : VT, cause : java.lang.Throwable) extends Failure[VT] {
override def msgBldr : StringBuilder = {
super.msgBldr.append(cause.getClass.getName).append(": ").append(cause.getMessage)
}
}
/** The Validator of a type of thing
*
* @tparam VT The type of the thing being validated
*/
trait Validator[VT] {
/** A Type alias for the ValidationResult, for brevity */
type VResult = Results[VT]
/** Validate value of type VType with this validator
*
* @param ref The location at which the value occurs
* @param value the VType to be validated
* @return Any of the ValidationResults
*/
def validate(ref : Location, value : VT) : VResult
protected def simplify(ref : Location, value : VT, classes : String)(validator : (VT) ⇒ Option[String]) : VResult = {
validator(value) match {
case Some("") ⇒ wrongClass(ref, value, classes)
case Some(msg : String) ⇒ StringFailure(ref, value, msg)
case None ⇒ Success(ref, value)
}
}
protected def wrongClass(ref : Location, value : VT, expected : String) : VResult = {
StringFailure(ref, value, s"Expected value of type $expected but got ${value.getClass.getSimpleName} instead.")
}
}
/** A Validator for a sequence (array, list, vector) of things
* This traverses the sequence, delegates the validation of the sequence's elements, and collects the results
* @tparam ET The element Type
* @tparam ST
*/
trait SeqValidator[ET, ST] extends Validator[ST] {
def toSeq(st : ST) : Seq[ET]
def validateElement(ref : IndexedLocation, v : ET) : Results[ET]
def validate(ref : Location, value : ST) : VResult = {
var idx : Int = -1
val errors : Seq[Failure[ET]] = {
for (
v ← toSeq(value);
e = validateElement(ref.index({ idx += 1; idx }), v) if e.isError
) yield {
e.asInstanceOf[Failure[ET]]
}
}
if (errors.isEmpty)
Success(ref, value)
else
Failures[ST](ref, value, errors : _*)
}
}
trait MapValidator[KT, ET, MT] extends Validator[MT] {
def toMap(mt : MT) : scala.collection.Map[KT, ET]
def validateElement(ref : SelectedLocation[KT], k: KT, v : ET) : Results[ET]
def validate(ref : Location, value : MT) : VResult = {
val errors : Seq[Failure[ET]] = {
for (
(k, v) ← toMap(value);
e = validateElement(ref.select(k), k, v) if e.isError
) yield { e.asInstanceOf[Failure[ET]] }
}.toSeq
if (errors.isEmpty)
Success(ref, value)
else
Failures[MT](ref, value, errors : _*)
}
}
trait StringMapValidator[ET] extends MapValidator[String, ET, Map[String, ET]]
trait JsArrayValidator extends SeqValidator[JsValue, JsArray] {
override def toSeq(st : JsArray) : Seq[JsValue] = st.value
def validateElement(ref : IndexedLocation, v : JsValue) : Results[JsValue]
}
/** Generic Value Validator as a Function. You can apply these validations in other validations making them
* composable.
*/
trait JsObjectValidator extends MapValidator[String, JsValue, JsObject] {
override def toMap(mt : JsObject) : scala.collection.Map[String, JsValue] = mt.value
def validateElement(ref : SelectedLocation[String], k: String, v : JsValue) : Results[JsValue]
}
}
| scrupal/scrupal | scrupal-utils/src/main/scala/scrupal/utils/Validation.scala | Scala | apache-2.0 | 9,225 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.analytic
import java.util.Date
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.data.simple.SimpleFeatureCollection
import org.geotools.process.ProcessException
import org.geotools.process.factory.{DescribeParameter, DescribeProcess, DescribeResult}
import org.locationtech.geomesa.process.GeoMesaProcess
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.util.ProgressListener
/**
* Returns a single feature that is the head of a track of related simple features
*/
@DescribeProcess(
title = "Track Label Process",
description = "Returns a single feature appropriate for labelling a track of features"
)
class TrackLabelProcess extends GeoMesaProcess {
@throws(classOf[ProcessException])
@DescribeResult(name = "result", description = "Label features")
def execute(@DescribeParameter(name = "data", description = "Input features")
featureCollection: SimpleFeatureCollection,
@DescribeParameter(name = "track", description = "Track attribute to use for grouping features")
track: String,
@DescribeParameter(name = "dtg", description = "Date attribute to use for ordering tracks", min = 0)
dtg: String,
monitor: ProgressListener): SimpleFeatureCollection = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
val sft = featureCollection.getSchema
lazy val sftString = s"${sft.getTypeName}: ${SimpleFeatureTypes.encodeType(sft)}"
val trackField = Option(track).map(sft.indexOf).filter(_ != -1).getOrElse {
throw new IllegalArgumentException(s"Invalid track field $track for schema $sftString")
}
val dtgField = Option(dtg).map(sft.indexOf).orElse(sft.getDtgIndex)
// noinspection ExistsEquals
if (dtgField.exists(_ == -1)) {
throw new IllegalArgumentException(s"Invalid track field $track for schema $sftString")
}
val results = new ListFeatureCollection(sft)
val grouped = SelfClosingIterator(featureCollection).toSeq.groupBy(_.getAttribute(trackField))
dtgField match {
case None => grouped.foreach { case (_, features) => results.add(features.head) }
case Some(d) => grouped.foreach { case (_, features) => results.add(features.maxBy(_.getAttribute(d).asInstanceOf[Date])) }
}
results
}
}
| ddseapy/geomesa | geomesa-process/geomesa-process-vector/src/main/scala/org/locationtech/geomesa/process/analytic/TrackLabelProcess.scala | Scala | apache-2.0 | 2,960 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import scala.collection.mutable
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.SubExprUtils._
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.optimizer.BooleanSimplification
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.TypeUtils
import org.apache.spark.sql.connector.catalog.TableChange.{AddColumn, After, ColumnPosition, DeleteColumn, RenameColumn, UpdateColumnComment, UpdateColumnNullability, UpdateColumnPosition, UpdateColumnType}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
/**
* Throws user facing errors when passed invalid queries that fail to analyze.
*/
trait CheckAnalysis extends PredicateHelper {
protected def isView(nameParts: Seq[String]): Boolean
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
/**
* Override to provide additional checks for correct analysis.
* These rules will be evaluated after our built-in check rules.
*/
val extendedCheckRules: Seq[LogicalPlan => Unit] = Nil
protected def failAnalysis(msg: String): Nothing = {
throw new AnalysisException(msg)
}
protected def containsMultipleGenerators(exprs: Seq[Expression]): Boolean = {
exprs.flatMap(_.collect {
case e: Generator => e
}).length > 1
}
protected def hasMapType(dt: DataType): Boolean = {
dt.existsRecursively(_.isInstanceOf[MapType])
}
protected def mapColumnInSetOperation(plan: LogicalPlan): Option[Attribute] = plan match {
case _: Intersect | _: Except | _: Distinct =>
plan.output.find(a => hasMapType(a.dataType))
case d: Deduplicate =>
d.keys.find(a => hasMapType(a.dataType))
case _ => None
}
private def checkLimitLikeClause(name: String, limitExpr: Expression): Unit = {
limitExpr match {
case e if !e.foldable => failAnalysis(
s"The $name expression must evaluate to a constant value, but got " +
limitExpr.sql)
case e if e.dataType != IntegerType => failAnalysis(
s"The $name expression must be integer type, but got " +
e.dataType.catalogString)
case e =>
e.eval() match {
case null => failAnalysis(
s"The evaluated $name expression must not be null, but got ${limitExpr.sql}")
case v: Int if v < 0 => failAnalysis(
s"The $name expression must be equal to or greater than 0, but got $v")
case _ => // OK
}
}
}
def checkAnalysis(plan: LogicalPlan): Unit = {
// We transform up and order the rules so as to catch the first possible failure instead
// of the result of cascading resolution failures.
plan.foreachUp {
case p if p.analyzed => // Skip already analyzed sub-plans
case u: UnresolvedNamespace =>
u.failAnalysis(s"Namespace not found: ${u.multipartIdentifier.quoted}")
case u: UnresolvedTable =>
u.failAnalysis(s"Table not found: ${u.multipartIdentifier.quoted}")
case u: UnresolvedTableOrView =>
u.failAnalysis(s"Table or view not found: ${u.multipartIdentifier.quoted}")
case u: UnresolvedRelation =>
u.failAnalysis(s"Table or view not found: ${u.multipartIdentifier.quoted}")
case InsertIntoStatement(u: UnresolvedRelation, _, _, _, _) =>
failAnalysis(s"Table not found: ${u.multipartIdentifier.quoted}")
case u: UnresolvedV2Relation if isView(u.originalNameParts) =>
u.failAnalysis(
s"Invalid command: '${u.originalNameParts.quoted}' is a view not a table.")
case u: UnresolvedV2Relation =>
u.failAnalysis(s"Table not found: ${u.originalNameParts.quoted}")
case AlterTable(_, _, u: UnresolvedV2Relation, _) if isView(u.originalNameParts) =>
u.failAnalysis(
s"Invalid command: '${u.originalNameParts.quoted}' is a view not a table.")
case AlterTable(_, _, u: UnresolvedV2Relation, _) =>
failAnalysis(s"Table not found: ${u.originalNameParts.quoted}")
case operator: LogicalPlan =>
// Check argument data types of higher-order functions downwards first.
// If the arguments of the higher-order functions are resolved but the type check fails,
// the argument functions will not get resolved, but we should report the argument type
// check failure instead of claiming the argument functions are unresolved.
operator transformExpressionsDown {
case hof: HigherOrderFunction
if hof.argumentsResolved && hof.checkArgumentDataTypes().isFailure =>
hof.checkArgumentDataTypes() match {
case TypeCheckResult.TypeCheckFailure(message) =>
hof.failAnalysis(
s"cannot resolve '${hof.sql}' due to argument data type mismatch: $message")
}
}
operator transformExpressionsUp {
case a: Attribute if !a.resolved =>
val from = operator.inputSet.toSeq.map(_.qualifiedName).mkString(", ")
a.failAnalysis(s"cannot resolve '${a.sql}' given input columns: [$from]")
case e: Expression if e.checkInputDataTypes().isFailure =>
e.checkInputDataTypes() match {
case TypeCheckResult.TypeCheckFailure(message) =>
e.failAnalysis(
s"cannot resolve '${e.sql}' due to data type mismatch: $message")
}
case c: Cast if !c.resolved =>
failAnalysis(s"invalid cast from ${c.child.dataType.catalogString} to " +
c.dataType.catalogString)
case g: Grouping =>
failAnalysis("grouping() can only be used with GroupingSets/Cube/Rollup")
case g: GroupingID =>
failAnalysis("grouping_id() can only be used with GroupingSets/Cube/Rollup")
case w @ WindowExpression(AggregateExpression(_, _, true, _, _), _) =>
failAnalysis(s"Distinct window functions are not supported: $w")
case w @ WindowExpression(_: OffsetWindowFunction,
WindowSpecDefinition(_, order, frame: SpecifiedWindowFrame))
if order.isEmpty || !frame.isOffset =>
failAnalysis("An offset window function can only be evaluated in an ordered " +
s"row-based window frame with a single offset: $w")
case w @ WindowExpression(e, s) =>
// Only allow window functions with an aggregate expression or an offset window
// function or a Pandas window UDF.
e match {
case _: AggregateExpression | _: OffsetWindowFunction | _: AggregateWindowFunction =>
w
case f: PythonUDF if PythonUDF.isWindowPandasUDF(f) =>
w
case _ =>
failAnalysis(s"Expression '$e' not supported within a window function.")
}
case s: SubqueryExpression =>
checkSubqueryExpression(operator, s)
s
}
operator match {
case etw: EventTimeWatermark =>
etw.eventTime.dataType match {
case s: StructType
if s.find(_.name == "end").map(_.dataType) == Some(TimestampType) =>
case _: TimestampType =>
case _ =>
failAnalysis(
s"Event time must be defined on a window or a timestamp, but " +
s"${etw.eventTime.name} is of type ${etw.eventTime.dataType.catalogString}")
}
case f: Filter if f.condition.dataType != BooleanType =>
failAnalysis(
s"filter expression '${f.condition.sql}' " +
s"of type ${f.condition.dataType.catalogString} is not a boolean.")
case Filter(condition, _) if hasNullAwarePredicateWithinNot(condition) =>
failAnalysis("Null-aware predicate sub-queries cannot be used in nested " +
s"conditions: $condition")
case j @ Join(_, _, _, Some(condition), _) if condition.dataType != BooleanType =>
failAnalysis(
s"join condition '${condition.sql}' " +
s"of type ${condition.dataType.catalogString} is not a boolean.")
case Aggregate(groupingExprs, aggregateExprs, child) =>
def isAggregateExpression(expr: Expression): Boolean = {
expr.isInstanceOf[AggregateExpression] || PythonUDF.isGroupedAggPandasUDF(expr)
}
def checkValidAggregateExpression(expr: Expression): Unit = expr match {
case expr: Expression if isAggregateExpression(expr) =>
val aggFunction = expr match {
case agg: AggregateExpression => agg.aggregateFunction
case udf: PythonUDF => udf
}
aggFunction.children.foreach { child =>
child.foreach {
case expr: Expression if isAggregateExpression(expr) =>
failAnalysis(
s"It is not allowed to use an aggregate function in the argument of " +
s"another aggregate function. Please use the inner aggregate function " +
s"in a sub-query.")
case other => // OK
}
if (!child.deterministic) {
failAnalysis(
s"nondeterministic expression ${expr.sql} should not " +
s"appear in the arguments of an aggregate function.")
}
}
case e: Attribute if groupingExprs.isEmpty =>
// Collect all [[AggregateExpressions]]s.
val aggExprs = aggregateExprs.filter(_.collect {
case a: AggregateExpression => a
}.nonEmpty)
failAnalysis(
s"grouping expressions sequence is empty, " +
s"and '${e.sql}' is not an aggregate function. " +
s"Wrap '${aggExprs.map(_.sql).mkString("(", ", ", ")")}' in windowing " +
s"function(s) or wrap '${e.sql}' in first() (or first_value) " +
s"if you don't care which value you get."
)
case e: Attribute if !groupingExprs.exists(_.semanticEquals(e)) =>
failAnalysis(
s"expression '${e.sql}' is neither present in the group by, " +
s"nor is it an aggregate function. " +
"Add to group by or wrap in first() (or first_value) if you don't care " +
"which value you get.")
case e if groupingExprs.exists(_.semanticEquals(e)) => // OK
case e => e.children.foreach(checkValidAggregateExpression)
}
def checkValidGroupingExprs(expr: Expression): Unit = {
if (expr.find(_.isInstanceOf[AggregateExpression]).isDefined) {
failAnalysis(
"aggregate functions are not allowed in GROUP BY, but found " + expr.sql)
}
// Check if the data type of expr is orderable.
if (!RowOrdering.isOrderable(expr.dataType)) {
failAnalysis(
s"expression ${expr.sql} cannot be used as a grouping expression " +
s"because its data type ${expr.dataType.catalogString} is not an orderable " +
s"data type.")
}
if (!expr.deterministic) {
// This is just a sanity check, our analysis rule PullOutNondeterministic should
// already pull out those nondeterministic expressions and evaluate them in
// a Project node.
failAnalysis(s"nondeterministic expression ${expr.sql} should not " +
s"appear in grouping expression.")
}
}
groupingExprs.foreach(checkValidGroupingExprs)
aggregateExprs.foreach(checkValidAggregateExpression)
case CollectMetrics(name, metrics, _) =>
if (name == null || name.isEmpty) {
operator.failAnalysis(s"observed metrics should be named: $operator")
}
// Check if an expression is a valid metric. A metric must meet the following criteria:
// - Is not a window function;
// - Is not nested aggregate function;
// - Is not a distinct aggregate function;
// - Has only non-deterministic functions that are nested inside an aggregate function;
// - Has only attributes that are nested inside an aggregate function.
def checkMetric(s: Expression, e: Expression, seenAggregate: Boolean = false): Unit = {
e match {
case _: WindowExpression =>
e.failAnalysis(
"window expressions are not allowed in observed metrics, but found: " + s.sql)
case _ if !e.deterministic && !seenAggregate =>
e.failAnalysis(s"non-deterministic expression ${s.sql} can only be used " +
"as an argument to an aggregate function.")
case a: AggregateExpression if seenAggregate =>
e.failAnalysis(
"nested aggregates are not allowed in observed metrics, but found: " + s.sql)
case a: AggregateExpression if a.isDistinct =>
e.failAnalysis(
"distinct aggregates are not allowed in observed metrics, but found: " + s.sql)
case a: AggregateExpression if a.filter.isDefined =>
e.failAnalysis("aggregates with filter predicate are not allowed in " +
"observed metrics, but found: " + s.sql)
case _: Attribute if !seenAggregate =>
e.failAnalysis (s"attribute ${s.sql} can only be used as an argument to an " +
"aggregate function.")
case _: AggregateExpression =>
e.children.foreach(checkMetric (s, _, seenAggregate = true))
case _ =>
e.children.foreach(checkMetric (s, _, seenAggregate))
}
}
metrics.foreach(m => checkMetric(m, m))
case Sort(orders, _, _) =>
orders.foreach { order =>
if (!RowOrdering.isOrderable(order.dataType)) {
failAnalysis(
s"sorting is not supported for columns of type ${order.dataType.catalogString}")
}
}
case GlobalLimit(limitExpr, _) => checkLimitLikeClause("limit", limitExpr)
case LocalLimit(limitExpr, _) => checkLimitLikeClause("limit", limitExpr)
case Tail(limitExpr, _) => checkLimitLikeClause("tail", limitExpr)
case _: Union | _: SetOperation if operator.children.length > 1 =>
def dataTypes(plan: LogicalPlan): Seq[DataType] = plan.output.map(_.dataType)
def ordinalNumber(i: Int): String = i match {
case 0 => "first"
case 1 => "second"
case i => s"${i}th"
}
val ref = dataTypes(operator.children.head)
operator.children.tail.zipWithIndex.foreach { case (child, ti) =>
// Check the number of columns
if (child.output.length != ref.length) {
failAnalysis(
s"""
|${operator.nodeName} can only be performed on tables with the same number
|of columns, but the first table has ${ref.length} columns and
|the ${ordinalNumber(ti + 1)} table has ${child.output.length} columns
""".stripMargin.replace("\\n", " ").trim())
}
// Check if the data types match.
dataTypes(child).zip(ref).zipWithIndex.foreach { case ((dt1, dt2), ci) =>
// SPARK-18058: we shall not care about the nullability of columns
if (TypeCoercion.findWiderTypeForTwo(dt1.asNullable, dt2.asNullable).isEmpty) {
failAnalysis(
s"""
|${operator.nodeName} can only be performed on tables with the compatible
|column types. ${dt1.catalogString} <> ${dt2.catalogString} at the
|${ordinalNumber(ci)} column of the ${ordinalNumber(ti + 1)} table
""".stripMargin.replace("\\n", " ").trim())
}
}
}
case create: V2CreateTablePlan =>
val references = create.partitioning.flatMap(_.references).toSet
val badReferences = references.map(_.fieldNames).flatMap { column =>
create.tableSchema.findNestedField(column) match {
case Some(_) =>
None
case _ =>
Some(s"${column.quoted} is missing or is in a map or array")
}
}
if (badReferences.nonEmpty) {
failAnalysis(s"Invalid partitioning: ${badReferences.mkString(", ")}")
}
create.tableSchema.foreach(f => TypeUtils.failWithIntervalType(f.dataType))
case write: V2WriteCommand if write.resolved =>
write.query.schema.foreach(f => TypeUtils.failWithIntervalType(f.dataType))
// If the view output doesn't have the same number of columns neither with the child
// output, nor with the query column names, throw an AnalysisException.
// If the view's child output can't up cast to the view output,
// throw an AnalysisException, too.
case v @ View(desc, output, child) if child.resolved && !v.sameOutput(child) =>
val queryColumnNames = desc.viewQueryColumnNames
val queryOutput = if (queryColumnNames.nonEmpty) {
if (output.length != queryColumnNames.length) {
// If the view output doesn't have the same number of columns with the query column
// names, throw an AnalysisException.
throw new AnalysisException(
s"The view output ${output.mkString("[", ",", "]")} doesn't have the same" +
"number of columns with the query column names " +
s"${queryColumnNames.mkString("[", ",", "]")}")
}
val resolver = SQLConf.get.resolver
queryColumnNames.map { colName =>
child.output.find { attr =>
resolver(attr.name, colName)
}.getOrElse(throw new AnalysisException(
s"Attribute with name '$colName' is not found in " +
s"'${child.output.map(_.name).mkString("(", ",", ")")}'"))
}
} else {
child.output
}
output.zip(queryOutput).foreach {
case (attr, originAttr) if !attr.dataType.sameType(originAttr.dataType) =>
// The dataType of the output attributes may be not the same with that of the view
// output, so we should cast the attribute to the dataType of the view output
// attribute. Will throw an AnalysisException if the cast is not a up-cast.
if (!Cast.canUpCast(originAttr.dataType, attr.dataType)) {
throw new AnalysisException(s"Cannot up cast ${originAttr.sql} from " +
s"${originAttr.dataType.catalogString} to ${attr.dataType.catalogString} " +
"as it may truncate\\n")
}
case _ =>
}
case alter: AlterTable if alter.table.resolved =>
val table = alter.table
def findField(operation: String, fieldName: Array[String]): StructField = {
// include collections because structs nested in maps and arrays may be altered
val field = table.schema.findNestedField(fieldName, includeCollections = true)
if (field.isEmpty) {
alter.failAnalysis(
s"Cannot $operation missing field ${fieldName.quoted} in ${table.name} schema: " +
table.schema.treeString)
}
field.get._2
}
def positionArgumentExists(
position: ColumnPosition,
struct: StructType,
fieldsAdded: Seq[String]): Unit = {
position match {
case after: After =>
val allFields = struct.fieldNames ++ fieldsAdded
if (!allFields.contains(after.column())) {
alter.failAnalysis(s"Couldn't resolve positional argument $position amongst " +
s"${allFields.mkString("[", ", ", "]")}")
}
case _ =>
}
}
def findParentStruct(operation: String, fieldNames: Array[String]): StructType = {
val parent = fieldNames.init
val field = if (parent.nonEmpty) {
findField(operation, parent).dataType
} else {
table.schema
}
field match {
case s: StructType => s
case o => alter.failAnalysis(s"Cannot $operation ${fieldNames.quoted}, because " +
s"its parent is not a StructType. Found $o")
}
}
def checkColumnNotExists(
operation: String,
fieldNames: Array[String],
struct: StructType): Unit = {
if (struct.findNestedField(fieldNames, includeCollections = true).isDefined) {
alter.failAnalysis(s"Cannot $operation column, because ${fieldNames.quoted} " +
s"already exists in ${struct.treeString}")
}
}
val colsToDelete = mutable.Set.empty[Seq[String]]
// 'colsToAdd' keeps track of new columns being added. It stores a mapping from a parent
// name of fields to field names that belong to the parent. For example, if we add
// columns "a.b.c", "a.b.d", and "a.c", 'colsToAdd' will become
// Map(Seq("a", "b") -> Seq("c", "d"), Seq("a") -> Seq("c")).
val colsToAdd = mutable.Map.empty[Seq[String], Seq[String]]
alter.changes.foreach {
case add: AddColumn =>
// If a column to add is a part of columns to delete, we don't need to check
// if column already exists - applies to REPLACE COLUMNS scenario.
if (!colsToDelete.contains(add.fieldNames())) {
checkColumnNotExists("add", add.fieldNames(), table.schema)
}
val parent = findParentStruct("add", add.fieldNames())
val parentName = add.fieldNames().init
val fieldsAdded = colsToAdd.getOrElse(parentName, Nil)
positionArgumentExists(add.position(), parent, fieldsAdded)
TypeUtils.failWithIntervalType(add.dataType())
colsToAdd(parentName) = fieldsAdded :+ add.fieldNames().last
case update: UpdateColumnType =>
val field = findField("update", update.fieldNames)
val fieldName = update.fieldNames.quoted
update.newDataType match {
case _: StructType =>
alter.failAnalysis(s"Cannot update ${table.name} field $fieldName type: " +
s"update a struct by updating its fields")
case _: MapType =>
alter.failAnalysis(s"Cannot update ${table.name} field $fieldName type: " +
s"update a map by updating $fieldName.key or $fieldName.value")
case _: ArrayType =>
alter.failAnalysis(s"Cannot update ${table.name} field $fieldName type: " +
s"update the element by updating $fieldName.element")
case u: UserDefinedType[_] =>
alter.failAnalysis(s"Cannot update ${table.name} field $fieldName type: " +
s"update a UserDefinedType[${u.sql}] by updating its fields")
case _: CalendarIntervalType =>
alter.failAnalysis(s"Cannot update ${table.name} field $fieldName to " +
s"interval type")
case _ =>
// update is okay
}
if (!Cast.canUpCast(field.dataType, update.newDataType)) {
alter.failAnalysis(
s"Cannot update ${table.name} field $fieldName: " +
s"${field.dataType.simpleString} cannot be cast to " +
s"${update.newDataType.simpleString}")
}
case update: UpdateColumnNullability =>
val field = findField("update", update.fieldNames)
val fieldName = update.fieldNames.quoted
if (!update.nullable && field.nullable) {
alter.failAnalysis(
s"Cannot change nullable column to non-nullable: $fieldName")
}
case updatePos: UpdateColumnPosition =>
findField("update", updatePos.fieldNames)
val parent = findParentStruct("update", updatePos.fieldNames())
val parentName = updatePos.fieldNames().init
positionArgumentExists(
updatePos.position(),
parent,
colsToAdd.getOrElse(parentName, Nil))
case rename: RenameColumn =>
findField("rename", rename.fieldNames)
checkColumnNotExists(
"rename", rename.fieldNames().init :+ rename.newName(), table.schema)
case update: UpdateColumnComment =>
findField("update", update.fieldNames)
case delete: DeleteColumn =>
findField("delete", delete.fieldNames)
// REPLACE COLUMNS has deletes followed by adds. Remember the deleted columns
// so that add operations do not fail when the columns to add exist and they
// are to be deleted.
colsToDelete += delete.fieldNames
case _ =>
// no validation needed for set and remove property
}
case _ => // Fallbacks to the following checks
}
operator match {
case o if o.children.nonEmpty && o.missingInput.nonEmpty =>
val missingAttributes = o.missingInput.mkString(",")
val input = o.inputSet.mkString(",")
val msgForMissingAttributes = s"Resolved attribute(s) $missingAttributes missing " +
s"from $input in operator ${operator.simpleString(SQLConf.get.maxToStringFields)}."
val resolver = plan.conf.resolver
val attrsWithSameName = o.missingInput.filter { missing =>
o.inputSet.exists(input => resolver(missing.name, input.name))
}
val msg = if (attrsWithSameName.nonEmpty) {
val sameNames = attrsWithSameName.map(_.name).mkString(",")
s"$msgForMissingAttributes Attribute(s) with the same name appear in the " +
s"operation: $sameNames. Please check if the right attribute(s) are used."
} else {
msgForMissingAttributes
}
failAnalysis(msg)
case p @ Project(exprs, _) if containsMultipleGenerators(exprs) =>
failAnalysis(
s"""Only a single table generating function is allowed in a SELECT clause, found:
| ${exprs.map(_.sql).mkString(",")}""".stripMargin)
case j: Join if !j.duplicateResolved =>
val conflictingAttributes = j.left.outputSet.intersect(j.right.outputSet)
failAnalysis(
s"""
|Failure when resolving conflicting references in Join:
|$plan
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
|""".stripMargin)
case i: Intersect if !i.duplicateResolved =>
val conflictingAttributes = i.left.outputSet.intersect(i.right.outputSet)
failAnalysis(
s"""
|Failure when resolving conflicting references in Intersect:
|$plan
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
""".stripMargin)
case e: Except if !e.duplicateResolved =>
val conflictingAttributes = e.left.outputSet.intersect(e.right.outputSet)
failAnalysis(
s"""
|Failure when resolving conflicting references in Except:
|$plan
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
""".stripMargin)
// TODO: although map type is not orderable, technically map type should be able to be
// used in equality comparison, remove this type check once we support it.
case o if mapColumnInSetOperation(o).isDefined =>
val mapCol = mapColumnInSetOperation(o).get
failAnalysis("Cannot have map type columns in DataFrame which calls " +
s"set operations(intersect, except, etc.), but the type of column ${mapCol.name} " +
"is " + mapCol.dataType.catalogString)
case o if o.expressions.exists(!_.deterministic) &&
!o.isInstanceOf[Project] && !o.isInstanceOf[Filter] &&
!o.isInstanceOf[Aggregate] && !o.isInstanceOf[Window] =>
// The rule above is used to check Aggregate operator.
failAnalysis(
s"""nondeterministic expressions are only allowed in
|Project, Filter, Aggregate or Window, found:
| ${o.expressions.map(_.sql).mkString(",")}
|in operator ${operator.simpleString(SQLConf.get.maxToStringFields)}
""".stripMargin)
case _: UnresolvedHint =>
throw new IllegalStateException(
"Internal error: logical hint operator should have been removed during analysis")
case f @ Filter(condition, _)
if PlanHelper.specialExpressionsInUnsupportedOperator(f).nonEmpty =>
val invalidExprSqls = PlanHelper.specialExpressionsInUnsupportedOperator(f).map(_.sql)
failAnalysis(
s"""
|Aggregate/Window/Generate expressions are not valid in where clause of the query.
|Expression in where clause: [${condition.sql}]
|Invalid expressions: [${invalidExprSqls.mkString(", ")}]""".stripMargin)
case other if PlanHelper.specialExpressionsInUnsupportedOperator(other).nonEmpty =>
val invalidExprSqls =
PlanHelper.specialExpressionsInUnsupportedOperator(other).map(_.sql)
failAnalysis(
s"""
|The query operator `${other.nodeName}` contains one or more unsupported
|expression types Aggregate, Window or Generate.
|Invalid expressions: [${invalidExprSqls.mkString(", ")}]""".stripMargin
)
case _ => // Analysis successful!
}
}
checkCollectedMetrics(plan)
extendedCheckRules.foreach(_(plan))
plan.foreachUp {
case o if !o.resolved =>
failAnalysis(s"unresolved operator ${o.simpleString(SQLConf.get.maxToStringFields)}")
case _ =>
}
plan.setAnalyzed()
}
/**
* Validates subquery expressions in the plan. Upon failure, returns an user facing error.
*/
private def checkSubqueryExpression(plan: LogicalPlan, expr: SubqueryExpression): Unit = {
def checkAggregateInScalarSubquery(
conditions: Seq[Expression],
query: LogicalPlan, agg: Aggregate): Unit = {
// Make sure correlated scalar subqueries contain one row for every outer row by
// enforcing that they are aggregates containing exactly one aggregate expression.
val aggregates = agg.expressions.flatMap(_.collect {
case a: AggregateExpression => a
})
if (aggregates.isEmpty) {
failAnalysis("The output of a correlated scalar subquery must be aggregated")
}
// SPARK-18504/SPARK-18814: Block cases where GROUP BY columns
// are not part of the correlated columns.
val groupByCols = AttributeSet(agg.groupingExpressions.flatMap(_.references))
// Collect the local references from the correlated predicate in the subquery.
val subqueryColumns = getCorrelatedPredicates(query).flatMap(_.references)
.filterNot(conditions.flatMap(_.references).contains)
val correlatedCols = AttributeSet(subqueryColumns)
val invalidCols = groupByCols -- correlatedCols
// GROUP BY columns must be a subset of columns in the predicates
if (invalidCols.nonEmpty) {
failAnalysis(
"A GROUP BY clause in a scalar correlated subquery " +
"cannot contain non-correlated columns: " +
invalidCols.mkString(","))
}
}
// Skip subquery aliases added by the Analyzer.
// For projects, do the necessary mapping and skip to its child.
def cleanQueryInScalarSubquery(p: LogicalPlan): LogicalPlan = p match {
case s: SubqueryAlias => cleanQueryInScalarSubquery(s.child)
case p: Project => cleanQueryInScalarSubquery(p.child)
case child => child
}
// Validate the subquery plan.
checkAnalysis(expr.plan)
expr match {
case ScalarSubquery(query, conditions, _) =>
// Scalar subquery must return one column as output.
if (query.output.size != 1) {
failAnalysis(
s"Scalar subquery must return only one column, but got ${query.output.size}")
}
if (conditions.nonEmpty) {
cleanQueryInScalarSubquery(query) match {
case a: Aggregate => checkAggregateInScalarSubquery(conditions, query, a)
case Filter(_, a: Aggregate) => checkAggregateInScalarSubquery(conditions, query, a)
case fail => failAnalysis(s"Correlated scalar subqueries must be aggregated: $fail")
}
// Only certain operators are allowed to host subquery expression containing
// outer references.
plan match {
case _: Filter | _: Aggregate | _: Project | _: SupportsSubquery => // Ok
case other => failAnalysis(
"Correlated scalar sub-queries can only be used in a " +
s"Filter/Aggregate/Project and a few commands: $plan")
}
}
case inSubqueryOrExistsSubquery =>
plan match {
case _: Filter | _: SupportsSubquery | _: Join => // Ok
case _ =>
failAnalysis(s"IN/EXISTS predicate sub-queries can only be used in" +
s" Filter/Join and a few commands: $plan")
}
}
// Validate to make sure the correlations appearing in the query are valid and
// allowed by spark.
checkCorrelationsInSubquery(expr.plan)
}
/**
* Validate that collected metrics names are unique. The same name cannot be used for metrics
* with different results. However multiple instances of metrics with with same result and name
* are allowed (e.g. self-joins).
*/
private def checkCollectedMetrics(plan: LogicalPlan): Unit = {
val metricsMap = mutable.Map.empty[String, LogicalPlan]
def check(plan: LogicalPlan): Unit = plan.foreach { node =>
node match {
case metrics @ CollectMetrics(name, _, _) =>
metricsMap.get(name) match {
case Some(other) =>
// Exact duplicates are allowed. They can be the result
// of a CTE that is used multiple times or a self join.
if (!metrics.sameResult(other)) {
failAnalysis(
s"Multiple definitions of observed metrics named '$name': $plan")
}
case None =>
metricsMap.put(name, metrics)
}
case _ =>
}
node.expressions.foreach(_.foreach {
case subquery: SubqueryExpression =>
check(subquery.plan)
case _ =>
})
}
check(plan)
}
/**
* Validates to make sure the outer references appearing inside the subquery
* are allowed.
*/
private def checkCorrelationsInSubquery(sub: LogicalPlan): Unit = {
// Validate that correlated aggregate expression do not contain a mixture
// of outer and local references.
def checkMixedReferencesInsideAggregateExpr(expr: Expression): Unit = {
expr.foreach {
case a: AggregateExpression if containsOuter(a) =>
val outer = a.collect { case OuterReference(e) => e.toAttribute }
val local = a.references -- outer
if (local.nonEmpty) {
val msg =
s"""
|Found an aggregate expression in a correlated predicate that has both
|outer and local references, which is not supported yet.
|Aggregate expression: ${SubExprUtils.stripOuterReference(a).sql},
|Outer references: ${outer.map(_.sql).mkString(", ")},
|Local references: ${local.map(_.sql).mkString(", ")}.
""".stripMargin.replace("\\n", " ").trim()
failAnalysis(msg)
}
case _ =>
}
}
// Make sure a plan's subtree does not contain outer references
def failOnOuterReferenceInSubTree(p: LogicalPlan): Unit = {
if (hasOuterReferences(p)) {
failAnalysis(s"Accessing outer query column is not allowed in:\\n$p")
}
}
// Make sure a plan's expressions do not contain :
// 1. Aggregate expressions that have mixture of outer and local references.
// 2. Expressions containing outer references on plan nodes other than Filter.
def failOnInvalidOuterReference(p: LogicalPlan): Unit = {
p.expressions.foreach(checkMixedReferencesInsideAggregateExpr)
if (!p.isInstanceOf[Filter] && p.expressions.exists(containsOuter)) {
failAnalysis(
"Expressions referencing the outer query are not supported outside of WHERE/HAVING " +
s"clauses:\\n$p")
}
}
// SPARK-17348: A potential incorrect result case.
// When a correlated predicate is a non-equality predicate,
// certain operators are not permitted from the operator
// hosting the correlated predicate up to the operator on the outer table.
// Otherwise, the pull up of the correlated predicate
// will generate a plan with a different semantics
// which could return incorrect result.
// Currently we check for Aggregate and Window operators
//
// Below shows an example of a Logical Plan during Analyzer phase that
// show this problem. Pulling the correlated predicate [outer(c2#77) >= ..]
// through the Aggregate (or Window) operator could alter the result of
// the Aggregate.
//
// Project [c1#76]
// +- Project [c1#87, c2#88]
// : (Aggregate or Window operator)
// : +- Filter [outer(c2#77) >= c2#88)]
// : +- SubqueryAlias t2, `t2`
// : +- Project [_1#84 AS c1#87, _2#85 AS c2#88]
// : +- LocalRelation [_1#84, _2#85]
// +- SubqueryAlias t1, `t1`
// +- Project [_1#73 AS c1#76, _2#74 AS c2#77]
// +- LocalRelation [_1#73, _2#74]
def failOnNonEqualCorrelatedPredicate(found: Boolean, p: LogicalPlan): Unit = {
if (found) {
// Report a non-supported case as an exception
failAnalysis(s"Correlated column is not allowed in a non-equality predicate:\\n$p")
}
}
var foundNonEqualCorrelatedPred: Boolean = false
// Simplify the predicates before validating any unsupported correlation patterns in the plan.
AnalysisHelper.allowInvokingTransformsInAnalyzer { BooleanSimplification(sub).foreachUp {
// Whitelist operators allowed in a correlated subquery
// There are 4 categories:
// 1. Operators that are allowed anywhere in a correlated subquery, and,
// by definition of the operators, they either do not contain
// any columns or cannot host outer references.
// 2. Operators that are allowed anywhere in a correlated subquery
// so long as they do not host outer references.
// 3. Operators that need special handlings. These operators are
// Filter, Join, Aggregate, and Generate.
//
// Any operators that are not in the above list are allowed
// in a correlated subquery only if they are not on a correlation path.
// In other word, these operators are allowed only under a correlation point.
//
// A correlation path is defined as the sub-tree of all the operators that
// are on the path from the operator hosting the correlated expressions
// up to the operator producing the correlated values.
// Category 1:
// ResolvedHint, Distinct, LeafNode, Repartition, and SubqueryAlias
case _: ResolvedHint | _: Distinct | _: LeafNode | _: Repartition | _: SubqueryAlias =>
// Category 2:
// These operators can be anywhere in a correlated subquery.
// so long as they do not host outer references in the operators.
case p: Project =>
failOnInvalidOuterReference(p)
case s: Sort =>
failOnInvalidOuterReference(s)
case r: RepartitionByExpression =>
failOnInvalidOuterReference(r)
// Category 3:
// Filter is one of the two operators allowed to host correlated expressions.
// The other operator is Join. Filter can be anywhere in a correlated subquery.
case f: Filter =>
val (correlated, _) = splitConjunctivePredicates(f.condition).partition(containsOuter)
// Find any non-equality correlated predicates
foundNonEqualCorrelatedPred = foundNonEqualCorrelatedPred || correlated.exists {
case _: EqualTo | _: EqualNullSafe => false
case _ => true
}
failOnInvalidOuterReference(f)
// Aggregate cannot host any correlated expressions
// It can be on a correlation path if the correlation contains
// only equality correlated predicates.
// It cannot be on a correlation path if the correlation has
// non-equality correlated predicates.
case a: Aggregate =>
failOnInvalidOuterReference(a)
failOnNonEqualCorrelatedPredicate(foundNonEqualCorrelatedPred, a)
// Join can host correlated expressions.
case j @ Join(left, right, joinType, _, _) =>
joinType match {
// Inner join, like Filter, can be anywhere.
case _: InnerLike =>
failOnInvalidOuterReference(j)
// Left outer join's right operand cannot be on a correlation path.
// LeftAnti and ExistenceJoin are special cases of LeftOuter.
// Note that ExistenceJoin cannot be expressed externally in both SQL and DataFrame
// so it should not show up here in Analysis phase. This is just a safety net.
//
// LeftSemi does not allow output from the right operand.
// Any correlated references in the subplan
// of the right operand cannot be pulled up.
case LeftOuter | LeftSemi | LeftAnti | ExistenceJoin(_) =>
failOnInvalidOuterReference(j)
failOnOuterReferenceInSubTree(right)
// Likewise, Right outer join's left operand cannot be on a correlation path.
case RightOuter =>
failOnInvalidOuterReference(j)
failOnOuterReferenceInSubTree(left)
// Any other join types not explicitly listed above,
// including Full outer join, are treated as Category 4.
case _ =>
failOnOuterReferenceInSubTree(j)
}
// Generator with join=true, i.e., expressed with
// LATERAL VIEW [OUTER], similar to inner join,
// allows to have correlation under it
// but must not host any outer references.
// Note:
// Generator with requiredChildOutput.isEmpty is treated as Category 4.
case g: Generate if g.requiredChildOutput.nonEmpty =>
failOnInvalidOuterReference(g)
// Category 4: Any other operators not in the above 3 categories
// cannot be on a correlation path, that is they are allowed only
// under a correlation point but they and their descendant operators
// are not allowed to have any correlated expressions.
case p =>
failOnOuterReferenceInSubTree(p)
}}
}
}
| goldmedal/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala | Scala | apache-2.0 | 45,926 |
/* __ *\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
** /____/\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\* */
package org.scalajs.testsuite.javalib.math
import java.math.BigInteger
import java.util.Arrays
import org.scalajs.jasminetest.JasmineTest
object BigIntegerTest extends JasmineTest {
describe("java.lang.Math.BigInteger Constructors") {
it("should accept 3 as a Byte Array") {
val bi = new BigInteger(Array[Byte](3))
expect(bi.intValue()).toEqual(3)
}
it("should accept 127 as a Byte Array") {
val bi = new BigInteger(Array[Byte](127))
expect(bi.intValue()).toEqual(127)
}
it("should accept 3 as aLong") {
val bi = BigInteger.valueOf(3L)
expect(bi.intValue()).toEqual(3)
expect(bi.longValue()).toEqual(3L)
}
it("should accept 999999999 as aLong") {
val bi = BigInteger.valueOf(999999999L)
expect(bi.intValue()).toEqual(999999999)
expect(bi.longValue()).toEqual(999999999L)
}
it("should accept 9999999999 as aLong") {
val bi = BigInteger.valueOf(9999999999L)
expect(bi.longValue()).toEqual(9999999999L)
}
it("should accept -999999999 as aLong") {
val bi = BigInteger.valueOf(-999999999L)
expect(bi.intValue()).toEqual(-999999999)
expect(bi.longValue()).toEqual(-999999999L)
}
it("should accept -9999999999 as aLong") {
val bi = BigInteger.valueOf(-9999999999L)
expect(bi.longValue()).toEqual(-9999999999L)
}
it("should accept 99 as a string") {
val bi = new BigInteger("99")
expect(bi.intValue()).toEqual(99)
expect(bi.longValue()).toEqual(99L)
}
it("should accept 999999999 as sting") {
val bi = new BigInteger("999999999")
expect(bi.intValue()).toEqual(999999999)
expect(bi.longValue()).toEqual(999999999L)
}
it("should accept 9999999999 as a string") {
val bi = new BigInteger("9999999999")
expect(bi.longValue()).toEqual(9999999999L)
}
it("should accept -99 as a string") {
val bi = new BigInteger("-99")
expect(bi.intValue()).toEqual(-99)
expect(bi.longValue()).toEqual(-99L)
}
it("should accept -999999999 as sting") {
val bi = new BigInteger("-999999999")
expect(bi.intValue()).toEqual(-999999999)
expect(bi.longValue()).toEqual(-999999999L)
}
it("should accept -9999999999 as a string") {
val bi = new BigInteger("-9999999999")
expect(bi.longValue()).toEqual(-9999999999L)
}
it("should intialise from byte array of Pos two's complement") {
val eBytesSignum = Array[Byte](27, -15, 65, 39)
val eBytes = Array[Byte](27, -15, 65, 39)
val expSignum = new BigInteger(eBytesSignum)
expect(Arrays.equals(eBytes, expSignum.toByteArray)).toBeTruthy
}
it("should intialise from byte array of Neg two's complement") {
val eBytesSignum = Array[Byte](-27, -15, 65, 39)
val eBytes = Array[Byte](-27, -15, 65, 39)
val expSignum = new BigInteger(eBytesSignum)
expect(Arrays.equals(eBytes, expSignum.toByteArray)).toBeTruthy
}
it("should intialise from Pos byte array with explicit sign") {
val eBytes = Array[Byte](27, -15, 65, 39)
val eSign = 1
val exp = new BigInteger(eSign, eBytes)
expect(Arrays.equals(eBytes, exp.toByteArray)).toBeTruthy
}
it("should intialise from Zero byte array with explicit sign") {
val eBytes = Array[Byte](0, 0, 0, 0)
val eSign = 0
val exp = new BigInteger(eSign, eBytes)
expect(Arrays.equals(Array[Byte](0), exp.toByteArray)).toBeTruthy
}
it("should intialise from Neg small byte array with explicit sign") {
val eBytes = Array[Byte](27)
val eSign = -1
val eRes = Array[Byte](-27)
val exp = new BigInteger(eSign, eBytes)
expect(Arrays.equals(eRes, exp.toByteArray)).toBeTruthy
}
it("should intialise from Neg byte array with explicit sign") {
val eBytes = Array[Byte](27, -15, 65, 39)
val eSign = -1
val eRes = Array[Byte](-28, 14, -66, -39)
val exp = new BigInteger(eSign, eBytes)
expect(Arrays.equals(eRes, exp.toByteArray)).toBeTruthy
}
it("should intialise both Pos byte arrays arrays the same") {
val eBytes = Array[Byte](27, -15, 65, 39)
val eSign = 1
val exp = new BigInteger(eSign, eBytes)
val eBytesSignum = Array[Byte](27, -15, 65, 39)
val expSignum = new BigInteger(eBytesSignum)
expect(expSignum.compareTo(exp)).toEqual(0)
expect(Arrays.equals(eBytes, exp.toByteArray)).toBeTruthy
expect(Arrays.equals(eBytes, expSignum.toByteArray)).toBeTruthy
expect(Arrays.equals(exp.toByteArray, expSignum.toByteArray)).toBeTruthy
}
it("should intialise both Neg byte arrays arrays the same") {
val eBytes = Array[Byte](27, -15, 65, 39)
val eSign = -1
val eRes = Array[Byte](-28, 14, -66, -39)
val exp = new BigInteger(eSign, eBytes)
val eBytesSignum = Array[Byte](-28, 14, -66, -39)
val expSignum = new BigInteger(eBytesSignum)
expect(expSignum.toString).toEqual(exp.toString)
expect(Arrays.equals(eRes, exp.toByteArray)).toBeTruthy
expect(Arrays.equals(eBytesSignum, expSignum.toByteArray)).toBeTruthy
expect(Arrays.equals(exp.toByteArray, expSignum.toByteArray)).toBeTruthy
}
}
}
| jmnarloch/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/javalib/math/BigIntegerTest.scala | Scala | bsd-3-clause | 5,792 |
package example
import org.scalatra.test.scalatest.ScalatraFlatSpec
import skinny.micro._
import scala.concurrent._
import scala.concurrent.duration._
class ConcurrencyTestSpec extends ScalatraFlatSpec {
implicit val context = SkinnyMicroBase.defaultExecutionContext
addFilter(new AsyncWebApp {
get("/async") { implicit ctx =>
Future {
"OK"
}
}
get("/sync") { implicit ctx =>
"OK"
}
get("/content-type-1") { implicit ctx =>
Thread.sleep(50)
Future {
Thread.sleep(50)
contentType = "text/csv"
Thread.sleep(20)
"foo,bar,baz"
}
}
get("/content-type-2") { implicit ctx =>
Thread.sleep(50)
contentType = "text/csv"
Future {
Thread.sleep(20)
"foo,bar,baz"
}
}
get("/content-type-3") { implicit ctx =>
Thread.sleep(50)
Future {
Thread.sleep(50)
Ok(body = "foo,bar,baz", contentType = Some("text/csv"))
}
}
}, "/*")
it should "work fine in sync mode for concurrent requests" in {
val listOfFutureBodies = (1 to 3).map(_ => Future { get("/sync") { body } })
val fListOfBodies = Future.sequence(listOfFutureBodies)
Await.result(fListOfBodies, 3.seconds).foreach(_ should equal("OK"))
}
it should "work fine in async mode for sequential requests" in {
(1 to 10).foreach { _ =>
get("/async") { body should equal("OK") }
}
}
it should "work fine in async mode for concurrent requests" in {
val listOfFutureBodies = (1 to 3).map(_ => Future { get("/async") { body } })
val fListOfBodies = Future.sequence(listOfFutureBodies)
Await.result(fListOfBodies, 3.seconds).foreach(_ should equal("OK"))
}
it should "work fine in sync mode for concurrent requests 2" in {
val listOfFutureBodies = (1 to 3).map(_ => Future { get("/sync") { body } })
val fListOfBodies = Future.sequence(listOfFutureBodies)
Await.result(fListOfBodies, 3.seconds).foreach(_ should equal("OK"))
}
it should "work fine in async mode for sequential requests 2" in {
(1 to 10).foreach { _ =>
get("/async") { body should equal("OK") }
}
}
it should "set content-type in async mode 1" in {
(1 to 5).foreach { _ =>
Await.result(Future.sequence((1 to 10).map { _ =>
Future {
get("/content-type-1") {
status should equal(200)
header("Content-Type") should equal("text/csv; charset=UTF-8")
body should equal("foo,bar,baz")
}
}
}), 3.seconds)
}
}
it should "set content-type in async mode 2" in {
(1 to 5).foreach { _ =>
Await.result(Future.sequence((1 to 10).map { _ =>
Future {
get("/content-type-2") {
status should equal(200)
header("Content-Type") should equal("text/csv; charset=UTF-8")
body should equal("foo,bar,baz")
}
}
}), 3.seconds)
}
}
it should "set content-type in async mode 3" in {
(1 to 5).foreach { _ =>
Await.result(Future.sequence((1 to 10).map { _ =>
Future {
get("/content-type-3") {
status should equal(200)
header("Content-Type") should equal("text/csv; charset=UTF-8")
body should equal("foo,bar,baz")
}
}
}), 3.seconds)
}
}
}
| xerial/skinny-micro | micro/src/test/scala/example/ConcurrencyTestSpec.scala | Scala | bsd-2-clause | 3,381 |
package scatch
sealed trait Identity {
def zero: Int = 0
}
object A {
implicit def IdentityTo[A](x: A) = new Identity
}
object B {
implicit def IdentityTo[A](x: A) = new Identity {
val value = null
}
}
object Scratch {
{
import A._
"A".zero
}
{
import B._
"B".zero
}
}
/*package scatch
sealed trait Identity {
def zero: Int = 0
}
object A {
implicit def IdentityTo[A](x: A) = new Identity
}
object B {
implicit def IdentityTo[A](x: A) = new Identity {
val value = null
}
}
object Scratch {
{
import A._
"A".zero
}
{
import B._
"B".zero
}
}*/ | ilinum/intellij-scala | testdata/optimize/implicits/ImplicitReference2.scala | Scala | apache-2.0 | 619 |
package org.nisshiee.chatwork_slack_relay.domain.slack
import monocle.macros.Lenses
@Lenses case class Author(
name: String,
link: String,
icon: String)
| nisshiee/chatwork-slack-relay | domain/src/main/scala/entities/slack/Author.scala | Scala | mit | 161 |
package mesosphere.marathon.core.leadership
import scala.concurrent.Future
trait LeadershipCoordinator {
/** Prepare for starting. After the Future completes, all actors are ready to receive messages. */
def prepareForStart(): Future[Unit]
def stop(): Unit
}
| sepiroth887/marathon | src/main/scala/mesosphere/marathon/core/leadership/LeadershipCoordinator.scala | Scala | apache-2.0 | 267 |
package controllers
import scala.concurrent.Future
import play.api.mvc._
import play.api.data._
import play.api.data.Forms._
import play.api.Play.current
import play.api.i18n.Messages.Implicits._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import views._
import model.PIMAidDBContext._
import model.PIMAidDBContext.driver.api._
object MedicationProductGenericTypesController extends Controller {
val genericTypeMedicationProductForm = Form(
tuple(
"genericTypeId" -> longNumber.transform(
(id: Long) => GenericTypeID(id),
(genericTypeId: GenericTypeID) => genericTypeId.value
),
"medicationProductId" -> longNumber.transform(
(id: Long) => MedicationProductID(id),
(medicationProductId: MedicationProductID) => medicationProductId.value
)
)
)
def list(medicationProductId: MedicationProductID) = Action.async { implicit rs =>
db.run(for {
productOption <- MedicationProduct.one(medicationProductId).include(MedicationProduct.genericTypes).result
genericTypes <- GenericType.all.result
} yield productOption match {
case Some(medicationProduct) =>
Ok(html.medicationProductGenericTypes.list(medicationProduct, genericTypes, genericTypeMedicationProductForm))
case _ =>
NotFound
})
}
def save(medicationProductId: MedicationProductID) = Action.async { implicit rs =>
db.run(MedicationProduct.one(medicationProductId).include(MedicationProduct.genericTypes).result).flatMap {
case Some(medicationProduct) =>
genericTypeMedicationProductForm.bindFromRequest.fold(
formWithErrors =>
db.run(GenericType.all.result).map { genericTypes =>
BadRequest(html.medicationProductGenericTypes.list(medicationProduct, genericTypes, formWithErrors))
},
genericTypeMedicationProduct =>
db.run(TableQuery[GenericTypesMedicationProducts] += genericTypeMedicationProduct).map { _=>
Redirect(routes.MedicationProductGenericTypesController.list(medicationProductId))
.flashing("success" -> "The generic type was successfully added to the medication product.")
}
)
case _ =>
Future.successful(NotFound)
}
}
def remove(medicationProductId: MedicationProductID, id: GenericTypeID) = Action.async { implicit rs =>
db.run(MedicationProduct.one(medicationProductId).result).flatMap {
case Some(medicationProduct) =>
db.run(GenericType.one(id).result).map {
case Some(genericType) =>
Ok(html.medicationProductGenericTypes.remove(medicationProduct, genericType))
case _ =>
NotFound
}
case _ => Future.successful(NotFound)
}
}
def delete(medicationProductId: MedicationProductID, id: GenericTypeID) = Action.async { implicit rs =>
val action = TableQuery[GenericTypesMedicationProducts]
.filter(x => x.genericTypeId === id && x.medicationProductId === medicationProductId)
.delete
db.run(action).map { _ =>
Redirect(routes.MedicationProductGenericTypesController.list(medicationProductId))
.flashing("success" -> "The generic type was succesfully removed from the medication product.")
}
}
}
| RSSchermer/pim-aid | app/controllers/MedicationProductGenericTypesController.scala | Scala | mit | 3,301 |
package android
import Keys._
import sbt._
import sbt.Keys.onLoad
object AndroidPlugin extends AutoPlugin {
override def trigger = allRequirements
override def requires = plugins.JvmPlugin
val autoImport = android.Keys
override def buildSettings = Plugin.androidCommands
override def globalSettings = (onLoad := onLoad.value andThen { s =>
val e = Project.extract(s)
val androids = e.structure.allProjects map (p => ProjectRef(e.structure.root, p.id)) filter {
ref => e.getOpt(projectLayout in ref).isDefined
}
val androidIds = androids.map(_.project).toSet
def checkForExport(p: ProjectRef): Seq[ProjectRef] = {
Project.getProject(p, e.structure).toSeq flatMap { prj =>
val deps = prj.dependencies map (_.project)
val nonAndroid = deps filterNot (prj => androidIds(prj.project))
(deps flatMap checkForExport) ++ (nonAndroid filterNot (d => e.getOpt(sbt.Keys.exportJars in d) exists (_ == true)))
}
}
androids flatMap { p =>
checkForExport(p)
} foreach { unexported =>
s.log.warn(s"${unexported.project} is an Android dependency but does not specify `exportJars := true`")
}
val s2 = androids.headOption.fold(s)(a =>
e.runTask(updateCheck in a, s)._1
)
androids.foldLeft(s2) { (s, ref) =>
e.runTask(antLayoutDetector in ref, s)._1
}
}) :: Nil
}
| wpc009/android-sdk-plugin | src/autoPlugin.scala | Scala | bsd-3-clause | 1,386 |
/*^
===========================================================================
TwoBinManager
===========================================================================
Copyright (C) 2016-2017 Gianluca Costa
===========================================================================
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program. If not, see
<http://www.gnu.org/licenses/gpl-3.0.html>.
===========================================================================
*/
package info.gianlucacosta.twobinmanager.generators.spectrum.algorithm
import info.gianlucacosta.twobinpack.core.{BlockDimension, BlockPool}
import scala.util.Random
/**
* Spectrum generator algorithm.
*
* The name come from its algorithm, iterating over a quantized spectrum of block dimensions:
*
* <ol>
* <li>
* Generate a quantized grid whose X axis is [minBlockDimension.width .. maxBlockDimension.width]
* and whose Y axis is [minBlockDimension.height .. maxBlockDimension.height]
* </li>
*
* <li>
* For every cell in the grid, choose a random quantity, in the given quantityRange,
* for the block having the current width and height
* </li>
*
* <li>
* Generate a first block pool, gathering the blocks
* </li>
*
* <li>
* If rotation is enabled, grid symmetry makes horizontal blocks and their vertical
* counterparts sum up, possibly violating the quantity constraint: in such case,
* generate a new random quantity for the overall block pair
* </li>
* </ol>
*/
object SpectrumAlgorithm {
def createRandomPool(
minBlockDimension: BlockDimension,
maxBlockDimension: BlockDimension,
quantityRange: Range,
canRotateBlocks: Boolean
): BlockPool = {
require(
minBlockDimension.width <= maxBlockDimension.width,
"It must be: minimum block width <= maximum block width"
)
require(
minBlockDimension.height <= maxBlockDimension.height,
"It must be: minimum block height <= maximum block height"
)
require(
quantityRange.start >= 0,
"The minimum quantity must be >= 0"
)
require(
quantityRange.start <= quantityRange.end,
"It must be: minimum quantity <= maximum quantity"
)
val partiallyBoundedBlocks: Map[BlockDimension, Int] =
Range.inclusive(minBlockDimension.width, maxBlockDimension.width).flatMap(blockWidth => {
Range.inclusive(minBlockDimension.height, maxBlockDimension.height).map(blockHeight => {
val blockDimension =
BlockDimension(
blockWidth,
blockHeight
)
val quantity =
getRandomQuantity(quantityRange)
blockDimension -> quantity
})
})
.toMap
.filter {
case (blockDimension, quantity) =>
quantity > 0
}
val partiallyBoundedBlockPool =
BlockPool.create(
canRotateBlocks,
partiallyBoundedBlocks
)
if (canRotateBlocks) {
val boundedBlocks: Map[BlockDimension, Int] =
partiallyBoundedBlockPool.blocks.map {
case (blockDimension, quantity) =>
if (quantity > quantityRange.end) {
val fixedQuantity =
getRandomQuantity(quantityRange)
blockDimension -> fixedQuantity
} else
blockDimension -> quantity
}
BlockPool.create(
canRotateBlocks,
boundedBlocks
)
} else
partiallyBoundedBlockPool
}
private def getRandomQuantity(quantityRange: Range): Int =
quantityRange.start +
Random.nextInt(
quantityRange.end - quantityRange.start + 1
)
}
| giancosta86/TwoBinManager | src/main/scala/info/gianlucacosta/twobinmanager/generators/spectrum/algorithm/SpectrumAlgorithm.scala | Scala | gpl-3.0 | 4,321 |
package com.nyavro.manythanks.ws.mark
import spray.json.DefaultJsonProtocol
trait MarkFormat extends DefaultJsonProtocol {
implicit val format = jsonFormat3(Mark)
}
| nyavro/manythanks | webService/src/main/scala/com/nyavro/manythanks/ws/mark/MarkFormat.scala | Scala | apache-2.0 | 169 |
package sbt
package std
import scala.annotation.tailrec
import scala.reflect.macros._
import sbt.util.OptJsonWriter
private[sbt] object KeyMacro {
def settingKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[SettingKey[T]] =
keyImpl2[T, SettingKey[T]](c) { (name, mf, ojw) =>
c.universe.reify { SettingKey[T](name.splice, description.splice)(mf.splice, ojw.splice) }
}
def taskKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[TaskKey[T]] =
keyImpl[T, TaskKey[T]](c) { (name, mf) =>
c.universe.reify { TaskKey[T](name.splice, description.splice)(mf.splice) }
}
def inputKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[InputKey[T]] =
keyImpl[T, InputKey[T]](c) { (name, mf) =>
c.universe.reify { InputKey[T](name.splice, description.splice)(mf.splice) }
}
def keyImpl[T: c.WeakTypeTag, S: c.WeakTypeTag](c: blackbox.Context)(
f: (c.Expr[String], c.Expr[Manifest[T]]) => c.Expr[S]
): c.Expr[S] =
f(getName(c), getImplicit[Manifest[T]](c))
private def keyImpl2[T: c.WeakTypeTag, S: c.WeakTypeTag](c: blackbox.Context)(
f: (c.Expr[String], c.Expr[Manifest[T]], c.Expr[OptJsonWriter[T]]) => c.Expr[S]
): c.Expr[S] =
f(getName(c), getImplicit[Manifest[T]](c), getImplicit[OptJsonWriter[T]](c))
private def getName[S: c.WeakTypeTag, T: c.WeakTypeTag](c: blackbox.Context): c.Expr[String] = {
import c.universe._
val enclosingValName = definingValName(
c,
methodName =>
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""")
c.Expr[String](Literal(Constant(enclosingValName)))
}
private def getImplicit[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[T] = {
import c.universe._
c.Expr[T](c.inferImplicitValue(weakTypeOf[T]))
}
def definingValName(c: blackbox.Context, invalidEnclosingTree: String => String): String = {
import c.universe.{ Apply => ApplyTree, _ }
val methodName = c.macroApplication.symbol.name
def processName(n: Name): String =
n.decodedName.toString.trim // trim is not strictly correct, but macros don't expose the API necessary
@tailrec def enclosingVal(trees: List[c.Tree]): String = {
trees match {
case vd @ ValDef(_, name, _, _) :: ts => processName(name)
case (_: ApplyTree | _: Select | _: TypeApply) :: xs => enclosingVal(xs)
// lazy val x: X = <methodName> has this form for some reason (only when the explicit type is present, though)
case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: xs if mods.hasFlag(Flag.LAZY) =>
processName(name)
case _ =>
c.error(c.enclosingPosition, invalidEnclosingTree(methodName.decodedName.toString))
"<error>"
}
}
enclosingVal(enclosingTrees(c).toList)
}
def enclosingTrees(c: blackbox.Context): Seq[c.Tree] =
c.asInstanceOf[reflect.macros.runtime.Context]
.callsiteTyper
.context
.enclosingContextChain
.map(_.tree.asInstanceOf[c.Tree])
}
| Duhemm/sbt | main-settings/src/main/scala/sbt/std/KeyMacro.scala | Scala | bsd-3-clause | 3,167 |
package zzb.srvbox
import akka.actor.{Props, ActorSystem}
import zzb.rest._
import akka.routing.{RoundRobinPool, FromConfig}
import com.typesafe.config.{ConfigFactory, Config}
import com.typesafe.scalalogging.slf4j.Logging
import com.typesafe.config.ConfigException.NotResolved
import scala.collection.mutable
import zzb.config.EnvConfigLoader
import scala.concurrent.Future
import akka.util.Timeout
import scala.concurrent.duration._
import zzb.srvbox.SrvManageProtocol._
import akka.pattern._
/**
* Created by Simon on 2014/5/28
*/
class BoxBuilder(system: ActorSystem, config: Config) extends Logging {
val boxActor = system.actorOf(Props[BoxActor], name = "boxActor")
private[srvbox] def startBoxedServices(services: Seq[ServiceOption]): Future[List[ServiceStatus]] = {
import system.dispatcher
def start(name: String)(implicit timeout: Timeout) = (boxActor ? RequestStart(name)).mapTo[ServiceStatus]
//顺序执行服务的启动,前一个服务启动成功后再启动下一个
def startServices(srvList: List[ServiceOption]): Future[List[ServiceStatus]] = {
require(srvList.length > 0)
srvList match {
case s :: Nil =>
logger.info(s"Start service: ${s.name} ")
Future.sequence(List(start(s.name)(s.startTimeout seconds)))
case s :: tail =>
logger.info(s"Start service: ${s.name} ")
for {
status0 <- start(s.name)(s.startTimeout seconds)
statusTail <- startServices(tail)
} yield status0 :: statusTail
}
}
for (s ← services) {
logger.info(s"Register service : ${s.name}")
boxActor ! Register(s.name, s.className, s.shareSystem == 1)
}
startServices(services.toList)
}
}
object BoxBuilder extends EnvConfigLoader with Logging {
private[srvbox] def getSelectService(args: List[String]) = {
var mainConfigFile = "srvbox"
if (args != Nil) {
mainConfigFile = args.head.replace(".conf", "")
}
val selectService = if (args != Nil) args.tail else Nil
var config = loadConfig(mainConfigFile).getOrElse(
throw new NotResolved(s" Not found config file '$mainConfigFile.conf' ")
).resolve()
//配置文件检查
val servicesOpts = mutable.Map[String, ServiceOption]()
val boxConfig = config.getConfig("services")
//装载缺省值配置,缺省值配置文件在资源包中
val defaultServiceConfig = ConfigFactory.load("defaultBoxedService.conf").getConfig("service")
import scala.collection.JavaConversions._
val configedServices = boxConfig.getStringList("names").toList
val exeService =
if (selectService != Nil) {
configedServices.filter(selectService.contains(_))
} else configedServices
for (serviceName ← exeService) {
val serviceConfig = boxConfig.getConfig(serviceName).withFallback(defaultServiceConfig) //提取主配置中的服务配置信息
val serviceOwnConfig = loadConfig(serviceName, withSysConfig = false).getOrElse(serviceConfig) //装载服务单独的配置信息,没有就用主配置中的
val mergedServiceConfig = serviceOwnConfig.withFallback(serviceConfig).resolve() //合并两个配置为最终的服务配置,服务独立配置文件的信息优先采用
config = config.withValue(s"services.$serviceName", mergedServiceConfig.root()) //将合并后的额配置置入主配置
servicesOpts(serviceName) = ServiceOption(serviceName,
serviceConfig.getInt("init-start"),
serviceConfig.getInt("share-actorSystem"),
serviceConfig.getString("serviceClass"),
mergedServiceConfig,
serviceConfig.getInt("start-timeout"),
if (serviceConfig.hasPath("depend-on")) serviceConfig.getStringList("depend-on").toList else Nil
)
}
//检查子服务类型能否找到
servicesOpts.values.foreach(opt ⇒ Class.forName(opt.className))
(config, servicesOpts.values.toList)
}
}
case class ServiceOption(name: String, initStart: Int, shareSystem: Int, className: String, config: Config, startTimeout: Int, dependOn: List[String] = Nil)
| xiefeifeihu/zzb | zzb-box/src/main/scala/zzb/srvbox/BoxBuilder.scala | Scala | mit | 4,132 |
package uk.org.nbn.nbnv.importer.spatial
import uk.org.nbn.nbnv.importer.testing.BaseFunSuite
import org.mockito.Mockito._
class BritishGridSquareInfoSuite extends BaseFunSuite {
val knownGridRef_1m = "NN1663471237"
val knownGridRef_10m = "NN16637123"
val knownGridRef_100m = "NN166712"
val knownGridRef_1000m = "NN1671"
val knownGridRef_2000m = "NN17Q"
val knownGridRef_10000m = "NN17"
test("should identify projection as OSGB36") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
bgr.projection should be ("OSGB36")
}
test("should identify EPSG code as 27700") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
bgr.epsgCode should be (27700)
}
test("should output an unblurred grid referce") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
bgr.gridReference should be (knownGridRef_100m)
}
test("1m grid ref should be blured to 100m grid automatically") {
val bgr = BritishGridSquareInfo(knownGridRef_1m)
bgr.gridReference should be (knownGridRef_100m)
bgr.gridReferencePrecision should be (100)
}
test("10m grid ref should be blured to 100m grid automatically") {
val bgr = BritishGridSquareInfo(knownGridRef_10m)
bgr.gridReference should be (knownGridRef_100m)
bgr.gridReferencePrecision should be (100)
}
test("100m grid ref should have precision = 100") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
bgr.gridReferencePrecision should be (100)
}
test("1000m grid ref should have precision = 1000") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m)
bgr.gridReferencePrecision should be (1000)
}
test("2000m DINTY grid ref should have precision = 2000") {
val bgr = BritishGridSquareInfo(knownGridRef_2000m)
bgr.gridReferencePrecision should be (2000)
}
test("10000m grid ref should have precision = 10000") {
val bgr = BritishGridSquareInfo(knownGridRef_10000m)
bgr.gridReferencePrecision should be (10000)
}
test("should blur 100m grid ref to 1000m") {
val bgr = BritishGridSquareInfo(knownGridRef_100m, Some(1000))
bgr.gridReference should be (knownGridRef_1000m)
bgr.gridReferencePrecision should be(1000)
}
test("should blur 100m grid ref to 2000m") {
val bgr = BritishGridSquareInfo(knownGridRef_100m, Some(2000))
bgr.gridReference should be (knownGridRef_2000m)
bgr.gridReferencePrecision should be (2000)
}
test("should blur 100m grid ref to 10000m") {
val bgr = BritishGridSquareInfo(knownGridRef_100m, Some(10000))
bgr.gridReference should be (knownGridRef_10000m)
bgr.gridReferencePrecision should be (10000)
}
test("should blur DINTY grid ref to 10000m") {
val bgr = BritishGridSquareInfo(knownGridRef_2000m, Some(10000))
bgr.gridReference should be (knownGridRef_10000m)
bgr.gridReferencePrecision should be (10000)
}
test("should normailise precision of 30 to 100m") {
val bgr = BritishGridSquareInfo(knownGridRef_100m, Some(30))
bgr.gridReferencePrecision should be (100)
bgr.gridReference should be (knownGridRef_100m)
}
test("should normailise precision of 150 to 1000m") {
val bgr = BritishGridSquareInfo(knownGridRef_100m, Some(150))
bgr.gridReferencePrecision should be (1000)
bgr.gridReference should be (knownGridRef_1000m)
}
test("should normailise precision of 1200 to 2000m") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m, Some(1200))
bgr.gridReferencePrecision should be (2000)
bgr.gridReference should be (knownGridRef_2000m)
}
test("should normailise precision of 8000 to 10000m") {
val bgr = BritishGridSquareInfo(knownGridRef_2000m, Some(8000))
bgr.gridReferencePrecision should be (10000)
bgr.gridReference should be (knownGridRef_10000m)
}
test("should give WKT for 100m grid square in WGS84") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
bgr.wgs84Polygon matches (TestResources.polygonWKTRegex)
}
test("should give WKT for 1000m grid square in WGS84") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m)
bgr.wgs84Polygon matches (TestResources.polygonWKTRegex)
}
test("should give WKT for 2000m grid square in WGS84") {
val bgr = BritishGridSquareInfo(knownGridRef_2000m)
bgr.wgs84Polygon matches (TestResources.polygonWKTRegex)
}
test("should give WKT for 10000m grid square in WGS84") {
val bgr = BritishGridSquareInfo(knownGridRef_10000m)
bgr.wgs84Polygon matches (TestResources.polygonWKTRegex)
}
test("should give 1000m grid square as parent of 100m") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
bgr.getParentGridSquareInfo match {
case Some(parent) => {
parent.gridReference should be (knownGridRef_1000m)
parent.gridReferencePrecision should be (1000)
}
case None => fail("no parent grid reference")
}
}
test("should give 2000m grid square as parent of 1000m") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m)
bgr.getParentGridSquareInfo match {
case Some(parent) => {
parent.gridReference should be (knownGridRef_2000m)
parent.gridReferencePrecision should be (2000)
}
case None => fail("no parent grid reference")
}
}
test("should give 10000m grid square as parent of 2000m") {
val bgr = BritishGridSquareInfo(knownGridRef_2000m)
bgr.getParentGridSquareInfo match {
case Some(parent) => {
parent.gridReference should be (knownGridRef_10000m)
parent.gridReferencePrecision should be (10000)
}
case None => fail("no parent grid reference")
}
}
test("should be no parent of 10000m grid square") {
val bgr = BritishGridSquareInfo(knownGridRef_10000m)
bgr.getParentGridSquareInfo should be (None)
}
test("should compute 2000m grid ref from 100m grid ref") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
val lowerBgr = bgr.getLowerPrecisionGridSquareInfo(2000)
lowerBgr should not be (null)
lowerBgr.gridReference should be (knownGridRef_2000m)
lowerBgr.gridReferencePrecision should be (2000)
}
test("should compute 10000m grid ref from 1000m grid ref") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m)
val lowerBgr = bgr.getLowerPrecisionGridSquareInfo(10000)
lowerBgr should not be (null)
lowerBgr.gridReference should be (knownGridRef_10000m)
lowerBgr.gridReferencePrecision should be (10000)
}
test("should return same grid square if requested precision is lower") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m)
val lowerBgr = bgr.getLowerPrecisionGridSquareInfo(100)
lowerBgr should be (bgr)
}
test("should return same grid square if requested precision is the same") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m)
val lowerBgr = bgr.getLowerPrecisionGridSquareInfo(1000)
lowerBgr should be (bgr)
}
test("should give WKT for 100m grid square") {
val bgr = BritishGridSquareInfo(knownGridRef_100m)
bgr.sourceProjectionPolygon matches (TestResources.polygonWKTRegex)
}
test("should give WKT for 1000m grid square") {
val bgr = BritishGridSquareInfo(knownGridRef_1000m)
bgr.sourceProjectionPolygon matches (TestResources.polygonWKTRegex)
}
test("should give WKT for 2000m grid square") {
val bgr = BritishGridSquareInfo(knownGridRef_2000m)
bgr.sourceProjectionPolygon matches (TestResources.polygonWKTRegex)
}
test("should give WKT for 10000m grid square") {
val bgr = BritishGridSquareInfo(knownGridRef_10000m)
bgr.sourceProjectionPolygon matches (TestResources.polygonWKTRegex)
}
test("should give correct easting and northing for grid ref TQ24") {
val bgr = BritishGridSquareInfo("TQ24")
val (easting, northing) = bgr.getEastingNorthing
easting should be (520000)
northing should be (140000)
}
test("should give correct grid ref for easting 408759 & norhting 424612")
{
val bgr = BritishGridSquareInfo(408759, 424612)
bgr.gridReference should be ("SE087246")
}
test("should give correct easting northing for grid ref SE087246")
{
val bgr = BritishGridSquareInfo("SE087246")
val (easting, northing) = bgr.getEastingNorthing
easting should be (408700)
northing should be (424600)
}
test("should give correct grid ref for easting 520814 & norhting 296511")
{
val bgr = BritishGridSquareInfo(520814, 296511)
bgr.gridReference should be ("TL208965")
}
test("should give correct easting northing for grid ref TL208965")
{
val bgr = BritishGridSquareInfo("TL208965")
val (easting, northing) = bgr.getEastingNorthing
easting should be (520800)
northing should be (296500)
}
test("should give correct grid ref for easting 259207 & norhting 665548")
{
val bgr = BritishGridSquareInfo(259207, 665548)
bgr.gridReference should be ("NS592655")
}
test("should give correct easting northing for grid ref NS592655")
{
val bgr = BritishGridSquareInfo("NS592655")
val (easting, northing) = bgr.getEastingNorthing
easting should be (259200)
northing should be (665500)
}
test("should give correct grid ref for easting 447275 & norhting 1141792")
{
val bgr = BritishGridSquareInfo(447275, 1141792)
bgr.gridReference should be ("HU472417")
}
test("should give correct easting northing for grid ref HU472417")
{
val bgr = BritishGridSquareInfo("HU472417")
val (easting, northing) = bgr.getEastingNorthing
easting should be (447200)
northing should be (1141700)
}
test("should give correct grid ref for Lat 60.157057 lng -1.1515654") {
val bgr = BritishGridSquareInfo(60.157942,-1.1501909)
bgr.gridReference should be ("HU472417")
}
}
| JNCC-dev-team/nbn-importer | importer/src/test/scala/uk/org/nbn/nbnv/importer/spatial/BritishGridSquareInfoSuite.scala | Scala | apache-2.0 | 10,124 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class EchoSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val echo = Echo[Float]().setName("echo")
val input = Tensor[Float](10).apply1(_ => Random.nextFloat())
runSerializationTest(echo, input)
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/EchoSpec.scala | Scala | apache-2.0 | 1,031 |
package org.bitcoins.crypto
import scodec.bits.ByteVector
import scala.util.Try
/** Created by chris on 2/26/16.
* Trait to implement ubiquitous factory functions across our codebase
*/
abstract class Factory[+T] {
/** Creates a T out of a hex string. */
def fromHex(hex: String): T = fromBytes(CryptoBytesUtil.decodeHex(hex))
/** Deserializes the given hex string to a T
* If the hex is not correct, [[None]] is returned
*/
def fromHexOpt(hex: String): Option[T] = {
fromHexT(hex).toOption
}
/** Deserializes the given hex string
* if the hex is not correct, we give you a [[Failure]]
*/
def fromHexT(hex: String): Try[T] = {
Try(fromHex(hex))
}
/** Creates a T out of a hex string in little endian. */
def fromHexLE(hex: String): T = fromBytesLE(CryptoBytesUtil.decodeHex(hex))
/** Creates a T out of a sequence of bytes. */
def fromBytes(bytes: ByteVector): T
/** Deserializes the given [[ByteVector]] to a T
* If the [[ByteVector]] is not correct, [[None]] is returned
*/
def fromBytesOpt(bytes: ByteVector): Option[T] = {
fromBytesT(bytes).toOption
}
/** Deserializes the given [[ByteVector]] string
* if the [[ByteVector]] is not correct, we give you a [[Failure]]
*/
def fromBytesT(bytes: ByteVector): Try[T] = {
Try(fromBytes(bytes))
}
/** Creates a T out of a sequence of bytes in little endian. */
def fromBytesLE(bytes: ByteVector): T = fromBytes(bytes.reverse)
/** Creates a T out of a sequence of bytes. */
def apply(bytes: ByteVector): T = fromBytes(bytes)
/** Creates a T from a hex string. */
def apply(hex: String): T = fromHex(hex)
/** Allows a `def foo[C: Factory]()` construction. */
implicit def self: Factory[T] = this
}
| bitcoin-s/bitcoin-s | crypto/src/main/scala/org/bitcoins/crypto/Factory.scala | Scala | mit | 1,761 |
package com.lonelyplanet.akka.http.extensions
import org.zalando.jsonapi.JsonapiRootObjectWriter
import org.zalando.jsonapi.model.{Error, RootObject}
case class ErrorMessage(message: String, id: Option[String], detail: Option[String] = None)
object ErrorMessage {
implicit val errorMessageWriter = new JsonapiRootObjectWriter[ErrorMessage] {
override def toJsonapi(message: ErrorMessage): RootObject = {
RootObject(
errors = Some(List(Error(
title = Some(message.message),
id = Some(message.id.getOrElse("undefined")),
detail = message.detail
)))
)
}
}
}
| lonelyplanet/akka-http-extensions | src/main/scala/com/lonelyplanet/akka/http/extensions/ErrorMessage.scala | Scala | apache-2.0 | 628 |
package com.evojam.nlp.date
import java.io.{FileNotFoundException, File}
import scala.collection.JavaConversions._
import com.google.inject.Inject
import epic.sequences.SemiCRF
import org.joda.time.{Interval, DateTime}
import org.ocpsoft.prettytime.nlp.PrettyTimeParser
import com.evojam.nlp.ner.NamedEntityRecognizer
import com.evojam.nlp.util.ObjectLoader
private[nlp] class DateParserImpl @Inject() (
prettyTimeParser: PrettyTimeParser,
loader: ObjectLoader,
ner: NamedEntityRecognizer,
config: DateParserConfig) extends DateParser {
private[this] lazy val datesCrf: SemiCRF[String, String] =
(config.resource match {
case true => loader.loadResource[SemiCRF[String, String]](config.datesSemiCrf)
case false => loader.load[SemiCRF[String, String]](new File(config.datesSemiCrf))
}).getOrElse(throw new FileNotFoundException(s"Unable to locate dates CRF: $config"))
override def parseDate(sentence: String) =
prettyTimeParser
.parse(sentence)
.headOption
.map(new DateTime(_))
override def parseDates(sentence: String) =
prettyTimeParser
.parse(sentence)
.map(new DateTime(_))
.toList
override def parseInterval(sentence: String) =
prettyTimeParser
.parse(sentence)
.headOption
.map(date =>
new Interval(
new DateTime(date),
addInterval(new DateTime(date), sentence)))
override def parseInterval(fromSentence: String, toSentence: String) =
for {
from <- prettyTimeParser.parse(fromSentence).headOption
to <- prettyTimeParser.parse(toSentence).headOption
} yield new Interval(new DateTime(from), new DateTime(to))
private def addInterval(startDate: DateTime, sentence: String): DateTime =
periodFromSentence(sentence) match {
case Year => startDate.plusYears(1)
case Month => startDate.plusMonths(1)
case Week => startDate.plusWeeks(1)
case Day => startDate.plusDays(1)
case UndefinedPeriod => startDate
}
private def periodFromSentence(sentence: String): Period =
ner.tokenize(sentence.toLowerCase)(datesCrf)
.foldLeft(UndefinedPeriod: Period) {
case (shortest, (tag, _)) =>
val period = periodFromTag(tag)
(period.value < shortest.value) match {
case true => period
case false => shortest
}
}
private def periodFromTag(tag: String): Period =
tag match {
case "YEAR" => Year
case "MONTH" => Month
case "WEEK" => Week
case "DAY" => Day
case _ => UndefinedPeriod
}
}
| evojam/scala-common | src/main/scala/com/evojam/nlp/date/DateParserImpl.scala | Scala | apache-2.0 | 2,592 |
package com.sksamuel.scapegoat.inspections.unneccesary
import com.sksamuel.scapegoat._
/** @author Stephen Samuel */
class UnnecessaryIf extends Inspection {
def inspector(context: InspectionContext): Inspector = new Inspector(context) {
override def postTyperTraverser = Some apply new context.Traverser {
import context.global._
override def inspect(tree: Tree): Unit = {
tree match {
case If(cond, Literal(Constant(true)), Literal(Constant(false))) =>
context.warn("Unnecessary if condition.",
tree.pos,
Levels.Info,
"If comparison is not needed. Use the condition. Eg, instead of if (a ==b) true else false, simply use a == b. : " + tree
.toString().take(500),
UnnecessaryIf.this)
case If(cond, Literal(Constant(false)), Literal(Constant(true))) =>
context.warn("Unncessary if condition.",
tree.pos,
Levels.Info,
"If comparison is not needed. Use the negated condition. Eg, instead of if (a ==b) false else true, simply use !(a == b). : " + tree
.toString().take(500),
UnnecessaryIf.this)
case _ => continue(tree)
}
}
}
}
} | pwwpche/scalac-scapegoat-plugin | src/main/scala/com/sksamuel/scapegoat/inspections/unneccesary/UnnecessaryIf.scala | Scala | apache-2.0 | 1,276 |
case class Fix[F[_]](x: F[Fix[F]]) | hmemcpy/milewski-ctfp-pdf | src/content/3.8/code/scala/snippet10.scala | Scala | gpl-3.0 | 34 |
package feh.tec.nxt.run
import feh.tec.rubik.RubikCubeImage
import feh.util.{FileNameUtils, AbsolutePath}
import rinterface._
trait ColorStats {
def sides: Seq[RubikCubeImage.Side[Int]]
def filePrefix: String
def plotsDir: AbsolutePath
lazy val RInterface = new RInterface(System.out)
RInterface.startR
lazy val R = RInterface.engine
// Rengine.DEBUG = 1
lazy val rSideNames = SidesMaps.default.readOrder.map(_.name)
// val colors = rSideNames.zip(1 to 6).toMap.mapValues(List.fill[Int](9)(_))
lazy val colors = (
for((RubikCubeImage.Side(colors, sideOpt), side) <- sides zip rSideNames)
yield {
val i = sideOpt.getOrElse(side)
R.assign(i.toString, colors.values.toArray)
i -> colors.values
}
).toMap
lazy val names = rSideNames.map(n => '\"' + n.toString + '\"')
def boxplot() = {
colors
R.withPng(plotsDir / FileNameUtils.formatDateFile(filePrefix, ".png")){
_.eval(s"boxplot(${rSideNames.mkString(",")}, names=${names.mkString("c(", ",", ")")})")
}
}
}
| fehu/int-sis--Rubik | nxt-r/src/main/scala/feh/tec/nxt/run/ColorStats.scala | Scala | mit | 1,059 |
/*******************************************************************************
Copyright (c) 2012-2013, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.visualization
import kr.ac.kaist.jsaf.analysis.typing.OrderMap
import java.io.File
import java.io.FileWriter
import kr.ac.kaist.jsaf.exceptions.JSAFError
import kr.ac.kaist.jsaf.analysis.typing.Worklist
import scala.collection.mutable.ListBuffer
import java.io.IOException
import java.io.BufferedInputStream
import net.liftweb.json.JsonDSL._
import net.liftweb.json._
import kr.ac.kaist.jsaf.analysis.cfg._
object FunCFGWriter {
def NormalNodeShape:String = "fontname=\\"Consolas\\" shape=record, fontsize=10"
def NormalEdgeStyle:String = "fontname=\\"Consolas\\" style=solid"
def ExcEdgeStyle:String = "fontname=\\"Consolas\\" style=dashed,label=\\"exc\\""
def call2AftcallEdgeStyle:String = "fontname=\\"Consolas\\" style=dotted,color=gray,dir=none"
def exit2ExcExitEdgeStyle:String = "fontname=\\"Consolas\\" style=invis,dir=none"
def newLine = ";\\n\\t"
def nodeShape(shape:String):String = "[" + shape + "]"
def edgeStyle(style:String):String = "[" + style + "]"
def nodeInstLabel(label:String, insts:List[CFGInst], returnVar:Option[CFGId]):String = {
val sb = new StringBuilder
val escapeChars = Array('<','>','|','"','\\\\')
sb.append("[label=\\"").append(label).append("|{")
var first = true;
returnVar match {
case Some(x) =>
sb.append("[EDGE] after-call(")
sb.append(escape(x.toString().toArray, escapeChars))
sb.append(")")
first = false;
case None => ()
}
for(inst<-insts) {
if (first) first = false
else sb.append("\\\\l")
sb.append(escape(inst.toString().toArray, escapeChars))
}
sb.append("\\\\l}\\"]")
sb.toString
}
def escape(src:Array[Char], cs:Array[Char]):String = {
var sb = new StringBuilder
for(ch <-src) {
if(cs.contains(ch)) sb.append("\\\\".+(ch))
else sb.append(ch);
}
sb.toString();
}
def getLabel(node:Node):String = {
node._2 match {
case LBlock(id) => "Block" + id
case LEntry => "Entry" + node._1
case LExit => "Exit" + node._1
case LExitExc => "ExitExc" + node._1
}
}
def connectEdge(label:String, succs:Set[Node], edgStyle:String, o:OrderMap):String = {
val sb = new StringBuilder
sb.append(label).append("->{")
for(succ <-succs) {
sb.append(getLabel(succ)).append(";")
}
sb.append("}").append(edgeStyle(edgStyle))
sb.toString()
}
// node [label=...]
def drawNode(cfg:CFG, node:Node, o:OrderMap):String = node._2 match {
case LBlock(id) =>
cfg.getCmd(node) match {
case Block(insts) =>
val order = o.get(node) match {
case Some(i) => "["+i+"]"
case None => ""
}
getLabel(node) +
nodeShape(NormalNodeShape) +
nodeInstLabel(getLabel(node)+"\\\\l"+order, insts, cfg.getReturnVar(node)) +
newLine
}
case _ =>
val order = o.get(node) match {
case Some(i) => "["+i+"]"
case None => ""
}
getLabel(node) +
nodeShape(NormalNodeShape) +
"[label=\\"" + getLabel(node)+"\\\\l" + order + "\\"]" +
newLine
}
// node1 -> node2 [style=...]
def drawEdge(cfg:CFG, node:Node, o:OrderMap):String = node._2 match {
case LBlock(id) =>
cfg.getCmd(node) match {
case Block(insts) =>
val sb = new StringBuilder
if (cfg.getCalls.contains(node)) {
val ac = cfg.getAftercallFromCall(node)
sb.append(connectEdge(getLabel(node), Set(ac), call2AftcallEdgeStyle, o)).append(newLine).toString()
}
if(!cfg.getSucc(node).isEmpty) {
sb.append(connectEdge(getLabel(node), cfg.getSucc(node), NormalEdgeStyle, o)).append(newLine)
}
cfg.getExcSucc.get(node) match {
case Some(succ) =>
sb.append(connectEdge(getLabel(node), Set(succ), ExcEdgeStyle, o)).append(newLine)
case None => ()
}
sb.toString()
}
case LEntry|LExitExc if(!cfg.getSucc(node).isEmpty) => connectEdge(getLabel(node), cfg.getSucc(node), NormalEdgeStyle, o) + newLine
case LExit => connectEdge(getLabel(node), Set((node._1,LExitExc)), exit2ExcExitEdgeStyle, o) + newLine + "{rank=same;" + getLabel(node) + " " + "ExitExc"+node._1 +"}" + newLine
case _ => ""
}
def drawGraph(cfg:CFG, nodes:List[Node], o:OrderMap) = {
//val nodes = cfg.getNodes.reverse
val sb = new StringBuilder
sb.append("digraph \\"DirectedGraph\\" {\\n")
sb.append("\\tfontsize=12;node [fontsize=12];edge [fontsize=12];\\n\\t")
for(node <-nodes) {
sb.append(drawNode(cfg, node, o)).append(drawEdge(cfg, node, o))
}
sb.append("\\n}\\n").toString()
}
def spawnDot(dotExe: String, outputFile: String, dotFile: File): Unit = {
val cmdarray = Array(dotExe, "-Tsvg", "-o", outputFile, "-v", dotFile.getAbsolutePath)
System.out.println("Spawning process" + cmdarray.foldLeft("")((r,s) => r + " " + s))
try {
val p = Runtime.getRuntime.exec(cmdarray)
val output = new BufferedInputStream(p.getInputStream)
val error = new BufferedInputStream(p.getErrorStream)
var repeat = true
var repeatCount = 0
while (repeat) {
try {
Thread.sleep(500)
repeatCount = repeatCount + 1
} catch {
case e1:InterruptedException =>
e1.printStackTrace
// just ignore and continue
}
if (output.available > 0) {
val data = ListBuffer[Byte]()
val nRead = output.read(data.toArray)
//System.err.println("read " + nRead + " bytes from output stream")
}
if (error.available > 0) {
val data = ListBuffer[Byte]()
val nRead = error.read(data.toArray)
//System.err.println("read " + nRead + " bytes from error stream")
}
try {
if (repeatCount > 120) {
p.destroy
System.out.println("Drawing %s takes more than one minute. Aborted.".format(outputFile))
return
}
p.exitValue
// if we get here, the process has terminated
repeat = false
//System.out.println("process terminated with exit code " + p.exitValue)
} catch {
case _:IllegalThreadStateException =>
// this means the process has not yet terminated.
repeat = true
}
}
} catch {
case e:IOException =>
e.printStackTrace
JSAFError.error("IOException DotUtil.")
}
}
def writeDotFile(cfg:CFG, nodes:List[Node], o: OrderMap, dotpath: String, funcId:FunctionId) = {
try {
val f = new File(dotpath+"/f"+funcId+".dot")
val fw = new FileWriter(f)
fw.write(drawGraph(cfg, nodes, o));
fw.close
f
} catch {
case e:Throwable =>
JSAFError.error("Error writing dot file " + dotpath + ".")
}
}
def getIdNodes(nodes:List[Node], funcId:Int):List[Node] = {
var newNodes:List[Node] = List()
for (node <- nodes) {
if (node._1 == funcId)
newNodes ::= node
}
newNodes
}
def write(cfg:CFG, callgraph:List[FunctionId], nodes:List[Node], outputPath: String, dotExe: String) = {
val o = Worklist.computes(cfg).getOrder()
nodes.par.map(node => node._2 match {
case LEntry if callgraph.contains(node._1) =>
val funcId:FunctionId = node._1
spawnDot(dotExe, outputPath+"/f"+funcId+".svg", writeDotFile(cfg, getIdNodes(nodes, funcId), o, outputPath, funcId))
case _ =>
})
}
}
| daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/visualization/FunCFGWriter.scala | Scala | bsd-3-clause | 8,160 |
package org.reactivebird.api
import play.api.libs.iteratee._
import scala.concurrent.{ExecutionContext, Future}
import org.reactivebird.models.{ResultSetWithMaxId, ResultSetWithCursor, CanBeIdentified}
import org.reactivebird.TwitterErrorRateLimitExceeded
import akka.actor.ActorSystem
trait Page {
val count: Option[Int]
}
case class MaxIdPage(count: Option[Int], sinceId: Option[Long], maxId: Option[Long]) extends Page
case class CursorPage(count: Option[Int], cursor: Option[Long])
trait Paging[A] {
implicit val system: ActorSystem
implicit val exec = system.dispatcher
val enumerator: Enumerator[Seq[A]]
def items: Future[Seq[A]] = {
val iterator = Iteratee.fold[Seq[A], Seq[A]](Seq.empty[A]){ (acc, elt) => acc ++ elt }
enumerator run iterator
}
def items(n: Int): Future[Seq[A]] = {
enumerator run concateneN(n)
}
def pages: Future[Seq[Seq[A]]] = {
val iterator = Iteratee.fold[Seq[A], Seq[Seq[A]]](Seq.empty[Seq[A]]){ (acc, elt) => acc :+ elt }
enumerator run iterator
}
def pages(n: Int): Future[Seq[Seq[A]]] = {
enumerator run takeN(n)
}
private[this] def concateneN(n: Int): Iteratee[Seq[A], Seq[A]] = {
def step(idx: Int, acc: Seq[A])(i: Input[Seq[A]]): Iteratee[Seq[A], Seq[A]] = {
i match {
case Input.EOF | Input.Empty => Done(acc, Input.EOF)
case Input.El(e) =>
if (idx < n)
Cont[Seq[A], Seq[A]](i => step(idx + e.size, acc ++ e)(i))
else
Done(acc.take(n), Input.EOF)
}
}
Cont[Seq[A], Seq[A]](i => step(0, Seq.empty[A])(i))
}
private[this] def takeN(n: Int): Iteratee[Seq[A], Seq[Seq[A]]] = {
def step(idx: Int, acc: Seq[Seq[A]])(i: Input[Seq[A]]): Iteratee[Seq[A], Seq[Seq[A]]] = i match {
case Input.EOF | Input.Empty => Done(acc, Input.EOF)
case Input.El(e) =>
if (idx < n)
Cont[Seq[A], Seq[Seq[A]]](i => step(idx + 1, acc :+ e)(i))
else
Done(acc, Input.EOF)
}
Cont[Seq[A], Seq[Seq[A]]](i => step(0, Seq.empty[Seq[A]])(i))
}
}
case class CursorPaging[A](pageable: CursorPage => Future[ResultSetWithCursor[A]], itemsPerPage: Int = 2000)(implicit val system: ActorSystem)
extends Paging[A] {
private val seedPage = CursorPage(Some(itemsPerPage), Some(-1))
val enumerator: Enumerator[Seq[A]] = Enumerator.unfoldM[CursorPage, Seq[A]](seedPage){ currentPage =>
if (currentPage.cursor.get == 0)
Future.successful[Option[(CursorPage, Seq[A])]]{ None }
else {
pageable(currentPage) map {
case ResultSetWithCursor(items, nextCursor) => Some(CursorPage(Some(itemsPerPage), Some(nextCursor)) -> items)
} recoverWith {
case e: TwitterErrorRateLimitExceeded => Future.successful[Option[(CursorPage, Seq[A])]]{ None }
}
}
}
}
case class IdPaging[A <: CanBeIdentified](
pageable: MaxIdPage => Future[ResultSetWithMaxId[A]],
itemsPerPage: Int = 200,
sinceId: Option[Long] = None)(
implicit val system: ActorSystem)
extends Paging[A] {
private val seedPage = MaxIdPage(Some(itemsPerPage), sinceId, None)
override val enumerator: Enumerator[Seq[A]] = Enumerator.unfoldM[MaxIdPage, Seq[A]](seedPage){ currentPage =>
pageable(currentPage) map { r =>
if (r.items.nonEmpty)
Some((MaxIdPage(Some(itemsPerPage), sinceId, Some(r.maxId)), r.items))
else
Option.empty[(MaxIdPage, Seq[A])]
} recoverWith {
case e: TwitterErrorRateLimitExceeded => Future.successful[Option[(MaxIdPage, Seq[A])]]{ None }
}
}
}
| benoitguigal/reactive-bird | src/main/scala/org/reactivebird/api/Paging.scala | Scala | mit | 3,566 |
package com.sksamuel.scrimage.canvas
import com.sksamuel.scrimage.Image
import org.scalatest.{WordSpec, Matchers}
class WatermarkTest extends WordSpec with Matchers {
val image = Image.fromResource("/gibson.jpg")
"watermarker" should {
"add repeated watermark" in {
val marked = image.filter(new WatermarkCoverFilter("watermark", size = 36, antiAlias = false))
marked shouldBe Image.fromResource("/com/sksamuel/scrimage/canvas/watermarked_repeated.png")
}
"add stamped watermark" in {
val marked = image.filter(new WatermarkStampFilter("watermark", size = 48, alpha = 0.2, antiAlias = false))
marked shouldBe Image.fromResource("/com/sksamuel/scrimage/canvas/watermarked_centered.png")
}
"add located watermark" in {
val marked = image
.filter(new WatermarkFilter("watermark", 25, image.height - 100, size = 48, alpha = 0.2, antiAlias = false))
marked shouldBe Image.fromResource("/com/sksamuel/scrimage/canvas/watermark_at.png")
}
}
}
| carlosFattor/scrimage | scrimage-core/src/test/scala/com/sksamuel/scrimage/canvas/WatermarkTest.scala | Scala | apache-2.0 | 1,014 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert
package network
package netty
import org.jboss.netty.channel.group.ChannelGroup
import org.jboss.netty.channel._
import server.{MessageExecutor, MessageHandlerRegistry, RequestContext => NorbertRequestContext}
import common.CachedNetworkStatistics
import protos.NorbertProtos
import logging.Logging
import java.util.UUID
import jmx.JMX.MBean
import org.jboss.netty.handler.codec.oneone.{OneToOneEncoder, OneToOneDecoder}
import jmx.{FinishedRequestTimeTracker, JMX}
import java.lang.String
import com.google.protobuf.{ByteString}
import norbertutils._
import util.ProtoUtils
case class RequestContext(requestId: UUID, receivedAt: Long = System.currentTimeMillis) extends NorbertRequestContext
@ChannelPipelineCoverage("all")
class RequestContextDecoder extends OneToOneDecoder {
def decode(ctx: ChannelHandlerContext, channel: Channel, msg: Any) = {
val norbertMessage = msg.asInstanceOf[NorbertProtos.NorbertMessage]
val requestId = new UUID(norbertMessage.getRequestIdMsb, norbertMessage.getRequestIdLsb)
if (norbertMessage.getStatus != NorbertProtos.NorbertMessage.Status.OK) {
val ex = new InvalidMessageException("Invalid request, message has status set to ERROR")
Channels.write(ctx, Channels.future(channel), ResponseHelper.errorResponse(requestId, ex))
throw ex
}
(RequestContext(requestId), norbertMessage)
}
}
@ChannelPipelineCoverage("all")
class RequestContextEncoder extends OneToOneEncoder with Logging {
def encode(ctx: ChannelHandlerContext, channel: Channel, msg: Any) = {
val (context, norbertMessage) = msg.asInstanceOf[(RequestContext, NorbertProtos.NorbertMessage)]
norbertMessage
}
}
@ChannelPipelineCoverage("all")
class ServerFilterChannelHandler(messageExecutor: MessageExecutor) extends SimpleChannelHandler with Logging {
override def handleUpstream(ctx: ChannelHandlerContext, e: ChannelEvent) {
if (e.isInstanceOf[MessageEvent]) {
val (context, norbertMessage) = e.asInstanceOf[MessageEvent].getMessage.asInstanceOf[(RequestContext, NorbertProtos.NorbertMessage)]
messageExecutor.filters.foreach { filter =>
filter match {
case f : NettyServerFilter => continueOnError(f.onMessage(norbertMessage, context))
}
}
}
super.handleUpstream(ctx, e)
}
override def handleDownstream(ctx: ChannelHandlerContext, e: ChannelEvent) {
if (e.isInstanceOf[MessageEvent]) {
val (context, norbertMessage) = e.asInstanceOf[MessageEvent].getMessage.asInstanceOf[(RequestContext, NorbertProtos.NorbertMessage)]
messageExecutor.filters.reverse.foreach { filter =>
filter match {
case f :NettyServerFilter => continueOnError(f.postMessage(norbertMessage, context))
}
}
}
super.handleDownstream(ctx, e)
}
}
@ChannelPipelineCoverage("all")
class ServerChannelHandler(clientName: Option[String],
serviceName: String,
channelGroup: ChannelGroup,
messageHandlerRegistry: MessageHandlerRegistry,
messageExecutor: MessageExecutor,
requestStatisticsWindow: Long,
avoidByteStringCopy: Boolean) extends SimpleChannelHandler with Logging {
private val statsActor = CachedNetworkStatistics[Int, UUID](SystemClock, requestStatisticsWindow, 200L)
val statsJmx = JMX.register(new NetworkServerStatisticsMBeanImpl(clientName, serviceName, statsActor))
def shutdown: Unit = {
statsJmx.foreach { JMX.unregister(_) }
}
override def channelOpen(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
val channel = e.getChannel
log.trace("channelOpen: " + channel)
channelGroup.add(channel)
}
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
val (context, norbertMessage) = e.getMessage.asInstanceOf[(RequestContext, NorbertProtos.NorbertMessage)]
val channel = e.getChannel
val messageName = norbertMessage.getMessageName
val requestBytes = ProtoUtils.byteStringToByteArray(norbertMessage.getMessage, avoidByteStringCopy)
statsActor.beginRequest(0, context.requestId)
val (handler, is, os) = try {
val handler: Any => Any = messageHandlerRegistry.handlerFor(messageName)
val is: InputSerializer[Any, Any] = messageHandlerRegistry.inputSerializerFor(messageName)
val os: OutputSerializer[Any, Any] = messageHandlerRegistry.outputSerializerFor(messageName)
(handler, is, os)
} catch {
case ex: InvalidMessageException =>
Channels.write(ctx, Channels.future(channel), (context, ResponseHelper.errorResponse(context.requestId, ex)))
statsActor.endRequest(0, context.requestId)
throw ex
}
val request = is.requestFromBytes(requestBytes)
try {
messageExecutor.executeMessage(request, Option((either: Either[Exception, Any]) => {
responseHandler(context, e.getChannel, either)(is, os)
}), Some(context))(is)
}
catch {
case ex: HeavyLoadException =>
Channels.write(ctx, Channels.future(channel), (context, ResponseHelper.errorResponse(context.requestId, ex, NorbertProtos.NorbertMessage.Status.HEAVYLOAD)))
statsActor.endRequest(0, context.requestId)
}
}
override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent) = log.info(e.getCause, "Caught exception in channel: %s".format(e.getChannel))
def responseHandler[RequestMsg, ResponseMsg](context: RequestContext, channel: Channel, either: Either[Exception, ResponseMsg])
(implicit is: InputSerializer[RequestMsg, ResponseMsg], os: OutputSerializer[RequestMsg, ResponseMsg]) {
val response = either match {
case Left(ex) => ResponseHelper.errorResponse(context.requestId, ex)
case Right(responseMsg) =>
ResponseHelper.responseBuilder(context.requestId)
.setMessageName(os.responseName)
.setMessage(ProtoUtils.byteArrayToByteString(os.responseToBytes(responseMsg), avoidByteStringCopy))
.build
}
log.debug("Sending response: %s".format(response))
channel.write((context, response))
statsActor.endRequest(0, context.requestId)
}
}
private[netty] object ResponseHelper {
def responseBuilder(requestId: UUID) = {
NorbertProtos.NorbertMessage.newBuilder.setRequestIdMsb(requestId.getMostSignificantBits).setRequestIdLsb(requestId.getLeastSignificantBits)
}
def errorResponse(requestId: UUID, ex: Exception, status: NorbertProtos.NorbertMessage.Status = NorbertProtos.NorbertMessage.Status.ERROR) = {
responseBuilder(requestId)
.setMessageName(ex.getClass.getName)
.setStatus(status)
.setErrorMessage(if (ex.getMessage == null) "" else ex.getMessage)
.build
}
}
trait NetworkServerStatisticsMBean {
def getRequestsPerSecond: Int
def getAverageRequestProcessingTime: Double
def getMedianTime: Double
def get90PercentileTime: Double
def get99PercentileTime: Double
}
class NetworkServerStatisticsMBeanImpl(clientName: Option[String], serviceName: String, val stats: CachedNetworkStatistics[Int, UUID])
extends MBean(classOf[NetworkServerStatisticsMBean], JMX.name(clientName, serviceName)) with NetworkServerStatisticsMBean {
def getMedianTime = stats.getStatistics(0.5).map(_.finished.values.map(_.percentile)).flatten.sum
def getRequestsPerSecond = stats.getStatistics(0.5).map(_.rps().values).flatten.sum
def getAverageRequestProcessingTime = stats.getStatistics(0.5).map { stats =>
val total = stats.finished.values.map(_.total).sum
val size = stats.finished.values.map(_.size).sum
safeDivide(total.toDouble, size)(0.0)
} getOrElse(0.0)
def get90PercentileTime = stats.getStatistics(0.90).map(_.finished.values.map(_.percentile)).flatten.sum
def get99PercentileTime = stats.getStatistics(0.99).map(_.finished.values.map(_.percentile)).flatten.sum
}
| linkedin-sna/norbert | network/src/main/scala/com/linkedin/norbert/network/netty/ServerChannelHandler.scala | Scala | apache-2.0 | 8,567 |
package julienrf.variants
import scala.language.experimental.macros
import play.api.libs.json.{Writes, Reads, Format, __}
import scala.reflect.macros.Context
object Variants {
/**
* @tparam A The base type of a case class hierarchy.
* @return A [[play.api.libs.json.Format]] for the type hierarchy of `A`. It uses an additional field named `$variant`
* to discriminate between the possible subtypes of `A`.
*/
def format[A]: Format[A] = macro Impl.format[A]
/**
* @param discriminator Format of the type discriminator field.
* @tparam A Base type of case class hierarchy.
* @return A [[play.api.libs.json.Format]] for the type hierarchy of `A`.
*/
def format[A](discriminator: Format[String]): Format[A] = macro Impl.formatDiscriminator[A]
/**
* @tparam A The base type of a case class hierarchy.
* @return A [[play.api.libs.json.Reads]] for the type hierarchy of `A`. It relies on an additional field named `$variant`
* to discriminate between the possible subtypes of `A`.
*/
def reads[A]: Reads[A] = macro Impl.reads[A]
/**
* @param discriminator Decoder of the type discriminator field.
* @tparam A Base type of case class hierarchy.
* @return A [[play.api.libs.json.Reads]] for the type hierarchy of `A`.
*/
def reads[A](discriminator: Reads[String]): Reads[A] = macro Impl.readsDiscriminator[A]
/**
* @tparam A The base type of a case class hierarchy.
* @return A [[play.api.libs.json.Writes]] for the type hierarchy of `A`. It uses an additional field named `$variant`
* to discriminate between the possible subtypes of `A`.
*/
def writes[A]: Writes[A] = macro Impl.writes[A]
/**
* @param discriminator Name of the type discriminator field.
* @tparam A Base type of case class hierarchy.
* @return A [[play.api.libs.json.Writes]] for the type hierarchy of `A`.
*/
def writes[A](discriminator: Writes[String]): Writes[A] = macro Impl.writesDiscriminator[A]
private object Impl {
val defaultDiscriminator = (__ \\ "$variant").format[String]
/**
* Given the following definition of class hierarchy `Foo`:
*
* {{{
* sealed trait Foo
* case class Bar(x: Int) extends Foo
* case class Baz(s: String) extends Foo
* case object Bah extends Foo
* }}}
*
* `Variants.format[Foo]` expands to the following:
*
* {{{
* {
* import play.api.libs.json.{Writes, Reads}
*
* val writes = Writes[Foo] {
* case bar: Bar => Json.toJson(bar)(Json.writes[Bar]).as[JsObject] + ("$variant" -> JsString("Bar"))
* case baz: Baz => Json.toJson(baz)(Json.writes[Baz]).as[JsObject] + ("$variant" -> JsString("Baz"))
* case _: Bah => JsObject(Seq("$variant" -> JsString("Bah")))
* }
*
* val reads = Reads[Foo] { json =>
* (json \\ "$variant").validate[String].flatMap {
* case "Bar" => Json.fromJson(json)(Json.reads[Bar])
* case "Baz" => Json.fromJson(json)(Json.reads[Baz])
* case "Bah" => JsSuccess(Bah)
* }
* }
*
* Format(reads, writes)
* }
*
* }}}
*
*/
def format[A : c.WeakTypeTag](c: Context): c.Expr[Format[A]] = {
import c.universe._
formatDiscriminator[A](c)(reify(defaultDiscriminator))
}
def formatDiscriminator[A : c.WeakTypeTag](c: Context)(discriminator: c.Expr[Format[String]]): c.Expr[Format[A]] = {
import c.universe._
val (baseClass, variants) = baseAndVariants[A](c)
val writes = writesTree(c)(baseClass, variants, discriminator)
val reads = readsTree(c)(baseClass, variants, discriminator)
c.Expr[Format[A]](q"play.api.libs.json.Format[$baseClass]($reads, $writes)")
}
def reads[A : c.WeakTypeTag](c: Context): c.Expr[Reads[A]] = {
import c.universe._
readsDiscriminator[A](c)(reify(defaultDiscriminator))
}
def readsDiscriminator[A : c.WeakTypeTag](c: Context)
(discriminator: c.Expr[Reads[String]]): c.Expr[Reads[A]] = {
import c.universe._
val (baseClass, variants) = baseAndVariants[A](c)
c.Expr[Reads[A]](readsTree(c)(baseClass, variants, discriminator))
}
def writes[A : c.WeakTypeTag](c: Context): c.Expr[Writes[A]] = {
import c.universe._
writesDiscriminator[A](c)(reify(defaultDiscriminator))
}
def writesDiscriminator[A : c.WeakTypeTag](c: Context)(discriminator: c.Expr[Writes[String]]): c.Expr[Writes[A]] = {
val (baseClass, variants) = baseAndVariants[A](c)
c.Expr[Writes[A]](writesTree(c)(baseClass, variants, discriminator))
}
/*
* Get the class hierarchy and checks that the hierarchy is closed
*/
def baseAndVariants[A : c.WeakTypeTag](c: Context): (c.universe.ClassSymbol, Set[c.universe.ClassSymbol]) = {
import c.universe._
val baseClass = weakTypeOf[A].typeSymbol.asClass
baseClass.typeSignature // SI-7046
if (!baseClass.isSealed) {
c.abort(c.enclosingPosition, s"$baseClass is not sealed")
}
// Get all the possible variants of this type
val variants = baseClass.knownDirectSubclasses.map(_.asClass)
for (variant <- variants if !(variant.isCaseClass || variant.isModuleClass)) {
c.abort(c.enclosingPosition, s"$variant is not a case class nor a case object")
}
baseClass -> variants
}
def writesTree(c: Context)(baseClass: c.universe.ClassSymbol, variants: Set[c.universe.ClassSymbol], discriminator: c.Expr[Writes[String]]): c.Tree = {
import c.universe._
val writesCases = for (variant <- variants) yield {
if (!variant.isModuleClass) {
val term = newTermName(c.fresh())
cq"""$term: $variant => play.api.libs.json.Json.toJson($term)(play.api.libs.json.Json.writes[$variant]).as[play.api.libs.json.JsObject] ++ $discriminator.writes(${variant.name.decodedName.toString})"""
} else {
cq"""_: $variant => $discriminator.writes(${variant.name.decodedName.toString})"""
}
}
q"play.api.libs.json.Writes[$baseClass] { case ..$writesCases }"
}
def readsTree(c: Context)(baseClass: c.universe.ClassSymbol, variants: Set[c.universe.ClassSymbol], discriminator: c.Expr[Reads[String]]): c.Tree = {
import c.universe._
val readsCases = for (variant <- variants) yield {
if (!variant.isModuleClass) {
cq"""${variant.name.decodedName.toString} => play.api.libs.json.Json.fromJson(json)(play.api.libs.json.Json.reads[$variant])"""
} else {
cq"""${variant.name.decodedName.toString} => play.api.libs.json.JsSuccess(${newTermName(variant.name.decodedName.toString)})"""
}
}
q"""
play.api.libs.json.Reads[$baseClass](json =>
$discriminator.reads(json).flatMap { case ..$readsCases }
)
"""
}
}
}
| lkt/play-json-variants | src/main/scala/julienrf/variants/Variants.scala | Scala | mit | 7,006 |
package com.cyrusinnovation.computation.persistence.writer
import org.joda.time.DateTime
import java.text.SimpleDateFormat
object LibraryInspectorForYaml extends LibraryInspector {
protected override def dateTime(d: DateTime): String = {
val formatter = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy")
formatter.format(d.toDate)
}
}
| psfblair/computation-engine | persistence/src/main/scala/com/cyrusinnovation/computation/persistence/writer/LibraryInspectorForYaml.scala | Scala | apache-2.0 | 352 |
/*
* Copyright (c) 2011-14 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
import poly._
import ops.hlist.Mapper
import test._
import testutil._
/** Polymorphic singleton function. */
object singleton extends (Id ~> Set) {
def apply[T](t : T) = Set(t)
}
/** Polymorphic function selecting an arbitrary element from a non-empty `Set`. */
object choose extends (Set ~> Option) {
def apply[T](s : Set[T]) = s.headOption
}
/** Polymorphic function creating singleton `List`s. */
object list extends (Id ~> List) {
def apply[T](t : T) = List(t)
}
/** Polymorphic function returning the head of a `List`. */
object headOption extends (List ~> Option) {
def apply[T](l : List[T]) = l.headOption
}
/** Polymorphic function which injects a value into an `Option`. */
object option extends (Id ~> Option) {
def apply[T](t : T) = Option(t)
}
/** Polymorphic function testing whether or not an `Option` is defined. */
object isDefined extends (Option ~>> Boolean) {
def apply[T](o : Option[T]) = o.isDefined
}
/** Polymorphic function which opens an `Option`. */
object get extends (Option ~> Id) {
def apply[T](o : Option[T]) = o.get
}
/** Polymorphic addition with type specific cases. */
object plus extends Poly2 {
implicit val caseInt = at[Int, Int](_ + _)
implicit val caseDouble = at[Double, Double](_ + _)
implicit val caseString = at[String, String](_ + _)
implicit def caseList[T] = at[List[T], List[T]](_ ::: _)
}
/** Polymorphic zero with type specific cases. */
object zero extends Poly0 {
implicit val zeroInt = at(0)
implicit val zeroDouble = at(0.0)
implicit val zeroString = at("")
implicit def zeroList[T] = at[List[T]](Nil)
}
class PolyTests {
object toInt extends (Id ~>> Int) {
def apply[T](t : T) = t.toString.toInt
}
object size extends Poly1 {
implicit def default[T] = at[T](_ => 1)
implicit def caseInt = at[Int](_ => 1)
implicit def caseString = at[String](_.length)
implicit def caseList[T] = at[List[T]](_.length)
implicit def caseOption[T](implicit st : Case.Aux[T, Int]) = at[Option[T]](t => 1+(t map size).getOrElse(0))
implicit def caseTuple[T, U](implicit st : Case.Aux[T, Int], su : Case.Aux[U, Int]) = at[(T, U)]{ case (t, u) => size(t)+size(u) }
}
@Test
def testHRFn {
implicitly[choose.Case[Set[Int]]]
implicitly[size.Case[Int]]
implicitly[option.Case[Int]]
implicitly[singleton.Case[Int]]
val si = size(23)
assertEquals(1, si)
val ss = size("foo")
assertEquals(3, ss)
val sl = size(List(1, 2, 3))
assertEquals(3, sl)
val so = size(Option(23))
assertEquals(2, so)
val st = size((23, "foo"))
assertEquals(4, st)
val ls = List("foo", "bar", "baz")
val lss = ls map size
typed[List[Int]](lss)
assertEquals(List(3, 3, 3), lss)
val lsi = ls map identity
typed[List[String]](lsi)
assertEquals(ls, lsi)
val is = identity("foo")
typed[String](is)
assertEquals("foo", is)
// Direct application
val s1 = singleton(23)
typed[Set[Int]](s1)
assertEquals(Set(23), s1)
val s2 = singleton("foo")
typed[Set[String]](s2)
assertEquals(Set("foo"), s2)
def app[G[_]](f : Int => G[Int]) = f(23)
val as = app(singleton)
typed[Set[Int]](as)
assertEquals(Set(23), as)
val al = app(list)
typed[List[Int]](al)
assertEquals(List(23), al)
// Implicit conversion to monomorphic function values
val l1 = List(1, 2, 3) map singleton
typed[List[Set[Int]]](l1)
assertEquals(List(Set(1), Set(2), Set(3)), l1)
val l2 = List("foo", "bar", "baz") map list
typed[List[List[String]]](l2)
assertEquals(List(List("foo"), List("bar"), List("baz")), l2)
val l3 = List(List(1), List(2), List(4)) map headOption
typed[List[Option[Int]]](l3)
assertEquals(List(Option(1), Option(2), Option(4)), l3)
// Use as polymorphic function values
def pairApply[G[_]](f : Id ~> G) = (f(23), f("foo"))
val a1 = pairApply(singleton)
typed[(Set[Int], Set[String])](a1)
assertEquals((Set(23), Set("foo")), a1)
val a2 = pairApply(list)
typed[(List[Int], List[String])](a2)
assertEquals((List(23), List("foo")), a2)
// Use as polymorphic function values with type specific cases
def pairApply2(f : Poly1)(implicit ci : f.Case[Int], cs : f.Case[String]) = (f(23), f("foo"))
val a4 = pairApply2(singleton)
typed[(Set[Int], Set[String])](a4)
assertEquals((Set(23), Set("foo")), a4)
val a5 = pairApply2(list)
typed[(List[Int], List[String])](a5)
assertEquals((List(23), List("foo")), a5)
val a6 = pairApply2(size)
typed[(Int, Int)](a6)
assertEquals((1, 3), a6)
def pairMap[G[_]](f : Id ~> G) = (List(1, 2, 3) map f, List("foo", "bar", "baz") map f)
val m1 = pairMap(singleton)
typed[(List[Set[Int]], List[Set[String]])](m1)
assertEquals((List(Set(1), Set(2), Set(3)), List(Set("foo"), Set("bar"), Set("baz"))), m1)
val m2 = pairMap(list)
typed[(List[List[Int]], List[List[String]])](m2)
assertEquals((List(List(1), List(2), List(3)), List(List("foo"), List("bar"), List("baz"))), m2)
val l5 = List(1, 2, 3)
val l6 = l5 map option
typed[List[Option[Int]]](l6)
assertEquals(List(Option(1), Option(2), Option(3)), l6)
val l7 = l6 map isDefined
typed[List[Boolean]](l7)
assertEquals(List(true, true, true), l7)
val lsi2 = List(Set(1), Set(2), Set(3))
val loi2 = lsi2 map choose
typed[List[Option[Int]]](loi2)
assertEquals(List(Option(1), Option(2), Option(3)), loi2)
val l8 = 23 :: "foo" :: List(1, 2, 3, 4) :: Option("bar") :: (23, "foo") :: 2.0 :: HNil
val l9 = l8 map size
typed[Int :: Int :: Int :: Int :: Int :: Int :: HNil](l9)
assertEquals(1 :: 3 :: 4 :: 4 :: 4 :: 1 :: HNil, l9)
def hlistMap(f : Poly)(implicit mapper : Mapper[f.type, Int :: String :: HNil]) =
(23 :: "foo" :: HNil) map f
val hm1 = hlistMap(singleton)
typed[Set[Int] :: Set[String] :: HNil](hm1)
assertEquals(Set(23) :: Set("foo") :: HNil, hm1)
val hm2 = hlistMap(list)
typed[List[Int] :: List[String] :: HNil](hm2)
assertEquals(List(23) :: List("foo") :: HNil, hm2)
}
@Test
def testCompose {
val so = singleton compose option
val sos = so("foo")
typed[Set[Option[String]]](sos)
assertEquals(Set(Option("foo")), sos)
val soi = so(23)
typed[Set[Option[Int]]](soi)
assertEquals(Set(Option(23)), soi)
}
@Test
def testPolyVal {
val i1 = zero[Int]
typed[Int](i1)
assertEquals(0, i1)
val i2 = 23+zero[Int]
typed[Int](i2)
assertEquals(23, i2)
val s1 = zero[String]
typed[String](s1)
assertEquals("", s1)
val s2 = "foo"+zero[String]
typed[String](s2)
assertEquals("foo", s2)
val l1 = zero[List[Int]]
typed[List[Int]](l1)
assertEquals(Nil, l1)
val l2 = List(23)++zero[List[Int]]
typed[List[Int]](l2)
assertEquals(List(23), l2)
}
// Polymophic function value with type-specific cases for two
// argument types. Result type is dependent on argument type
object bidi extends Poly1 {
implicit val caseInt = at[Int](_.toString)
implicit val caseString = at[String](_.toInt)
}
@Test
def testBinary {
val bi = bidi(23)
typed[String](bi)
assertEquals("23", bi)
val bs = bidi("23")
typed[Int](bs)
assertEquals(23, bs)
val lis = 1 :: "2" :: 3 :: "4" :: HNil
val blis = lis map bidi
typed[String :: Int :: String :: Int :: HNil](blis)
assertEquals("1" :: 2 :: "3" :: 4 :: HNil, blis)
}
@Test
def testRotateLeft {
object isd extends Poly3 {
implicit val default = at[Int, String, Double] {
case (i, s, d) => s"i: $i, s: $s, d: $d"
}
}
val r1 = isd(1, "foo", 2.0)
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}", r1)
val sdi = isd.rotateLeft[Nat._1]
val r2 = sdi("foo", 2.0, 1)
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}", r2)
val dis = isd.rotateLeft[Nat._2]
val r3 = dis(2.0, 1, "foo")
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}", r3)
object isdc extends Poly4 {
implicit val default = at[Int, String, Double, Char] {
case (i, s, d, c) => s"i: $i, s: $s, d: $d, c: $c"
}
}
val r4 = isdc(1, "foo", 2.0, 'a')
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}, c: a", r4)
val sdci = isdc.rotateLeft[Nat._1]
val r5 = sdci("foo", 2.0, 'a', 1)
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}, c: a", r5)
val dcis = isdc.rotateLeft[Nat._2]
val r6 = dcis(2.0, 'a', 1, "foo")
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}, c: a", r6)
}
@Test
def testRotateRight {
object isd extends Poly3 {
implicit val default = at[Int, String, Double] {
case (i, s, d) => s"i: $i, s: $s, d: $d"
}
}
val r1 = isd(1, "foo", 2.0)
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}", r1)
val dis = isd.rotateRight[Nat._1]
val r2 = dis(2.0, 1, "foo")
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}", r2)
val sdi = isd.rotateRight[Nat._2]
val r3 = sdi("foo", 2.0, 1)
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}", r3)
object isdc extends Poly4 {
implicit val default = at[Int, String, Double, Char] {
case (i, s, d, c) => s"i: $i, s: $s, d: $d, c: $c"
}
}
val r4 = isdc(1, "foo", 2.0, 'a')
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}, c: a", r4)
val cisd = isdc.rotateRight[Nat._1]
val r5 = cisd('a', 1, "foo", 2.0)
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}, c: a", r5)
val dcis = isdc.rotateRight[Nat._2]
val r6 = dcis(2.0, 'a', 1, "foo")
assertTypedEquals[String](s"i: 1, s: foo, d: ${2.0}, c: a", r6)
}
@Test
def testPoly1Builder {
val myPoly = Poly1.at[Int]( x => x).at[String](_.length).at[Boolean](if(_) 1 else 0).build
import myPoly._
val r1 = myPoly(10)
assertTypedEquals[Int](10, r1)
val r2 = myPoly("hello")
assertTypedEquals[Int](5, r2)
val r3 = myPoly(true)
assertTypedEquals[Int](1, r3)
}
@Test
def testPoly2Builder {
val myPoly = Poly2.at[Int, Int]((acc, x) => acc + x).
at[Int, String]((acc, s) => acc + s.length).
at[Int, Boolean]((acc, b) => acc + (if(b) 1 else 0)).
build
import myPoly._
val r1 = myPoly(5, 10)
assertTypedEquals[Int](15, r1)
val r2 = myPoly(5, "hello")
assertTypedEquals[Int](10, r2)
val r3 = myPoly(5, true)
assertTypedEquals[Int](6, r3)
}
}
| triggerNZ/shapeless | core/src/test/scala/shapeless/poly.scala | Scala | apache-2.0 | 11,386 |
package scala.meta.metai
import scala.collection.immutable.ListMap
import scala.meta.io.AbsolutePath
import scala.meta.io.Classpath
final class Result private (val status: ListMap[AbsolutePath, Boolean]) {
def isSuccess: Boolean = {
status.forall(_._2)
}
def classpath: Option[Classpath] = {
if (isSuccess) Some(Classpath(status.toList.map(_._1)))
else None
}
override def toString: String = {
s"Result($status)"
}
}
object Result {
def apply(status: ListMap[AbsolutePath, Boolean]): Result = {
new Result(status)
}
}
| olafurpg/scalameta | semanticdb/metai/src/main/scala/scala/meta/metai/Result.scala | Scala | bsd-3-clause | 559 |
/*
* Copyright 2013 Michael Krolikowski
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mkroli.dns4s.dsl
import com.github.mkroli.dns4s.Message
import com.github.mkroli.dns4s.section.QuestionSection
trait QuestionSectionModifier { self =>
def ~(qsm: QuestionSectionModifier): QuestionSectionModifier = new QuestionSectionModifier {
override def apply(qs: QuestionSection): QuestionSection = qsm(self(qs))
}
def apply(qs: QuestionSection): QuestionSection
}
object Questions {
def apply[T](question: T*)(implicit toQuestionSection: T => QuestionSection): MessageModifier = new MessageModifier {
override def apply(msg: Message) =
msg.copy(header = msg.header.copy(qdcount = msg.header.qdcount + question.size), question = msg.question ++ question.map(toQuestionSection))
}
def unapply(msg: Message): Option[Seq[QuestionSection]] = Some(msg.question.toList)
}
private[dsl] abstract class QuestionExtractor[T](e: QuestionSection => T) {
def unapply(qs: QuestionSection): Option[T] = Some(e(qs))
}
object QName extends QuestionExtractor(_.qname) {
def apply(qname: String): QuestionSectionModifier = new QuestionSectionModifier {
override def apply(qs: QuestionSection) = qs.copy(qname = qname)
}
}
object QType extends QuestionExtractor(_.qtype) {
def apply(qtype: Int): QuestionSectionModifier = new QuestionSectionModifier {
override def apply(qs: QuestionSection) = qs.copy(qtype = qtype)
}
}
object QClass extends QuestionExtractor(_.qclass) {
def apply(qclass: Int): QuestionSectionModifier = new QuestionSectionModifier {
override def apply(qs: QuestionSection) = qs.copy(qclass = qclass)
}
}
| mkroli/dns4s | core/src/main/scala/com/github/mkroli/dns4s/dsl/Question.scala | Scala | apache-2.0 | 2,191 |
package com.github.kelebra.akka.js.snake.state
import com.github.kelebra.akka.js.snake.{Block, `↑`, `←`}
import org.scalatest.{Inside, Matchers, WordSpec}
class StateTransitionTest extends WordSpec with Matchers with StateTransition with Inside {
"State transition" should {
"initialize empty state" in {
val empty = State()
val start = Block(0, 1, 1)
inside(initialized(empty, ↑, start)) { case State(direction, blocks, _) =>
blocks should be(start :: Nil)
direction should be(↑)
}
}
"not initialize state one more time" in {
val empty = State()
val block = Block(0, 0, 1)
val direction = ↑
val init = initialized(empty, direction, block)
init should be(initialized(init, direction.opposite, Block(0, 0, 2)))
}
"change direction and reverse blocks for opposite direction" in {
val direction = ↑
val blocks = Block(1, 2, 3) :: Block(3, 2, 1) :: Nil
val state = State(direction, blocks, None)
directed(state, direction.opposite) should be(state.copy(direction = direction.opposite, body = blocks.reverse))
}
"change direction only" in {
val direction = `↑`
val blocks = Block(1, 2, 3) :: Block(3, 2, 1) :: Nil
val state = State(direction, blocks, None)
directed(state, `←`) should be(state.copy(direction = `←`))
}
}
}
| kelebra/akka-js-snake | src/test/scala/com/github/kelebra/akka/js/snake/state/StateTransitionTest.scala | Scala | mit | 1,401 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.vitreoussoftware.test
import org.scalatest._
import org.scalatest.prop.PropertyChecks
/**
* Base for setting up tests using ScalaTest
*/
abstract class UnitSpec extends FlatSpec with Matchers with Inside with OptionValues with EitherValues with Inspectors
/**
* Base for Property based testing
*/
abstract class PropertySpec extends UnitSpec with PropertyChecks
/**
* Describe / it syntax Behavioral tests
*/
abstract class BehaviorSpec extends fixture.FunSpec with Matchers with Inside with OptionValues with EitherValues with Inspectors | JMBattista/Bioinformatics | scalatestspec/src/main/scala/com/vitreoussoftware/test/TestSpecs.scala | Scala | mit | 1,300 |
/* RetryPolicy.scala
*
* Copyright (c) 2013-2014 linkedin.com
* Copyright (c) 2013-2015 zman.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package atmos
import scala.concurrent.{ blocking, ExecutionContext, Future, Promise }
import scala.concurrent.duration._
import scala.util.{ Try, Success, Failure }
import rummage.Clock
/**
* A policy that enables customizable retries for arbitrary operations.
*
* @param termination The strategy for determining when to abort a retry operation.
* @param backoff The strategy used to calculate delays between retries.
* @param monitor The monitor that is notified of retry events.
* @param classifier The classifier for errors raised during retry operations. This field is deprecated and will be
* used as a fallback for the `errors` classifier, which should be used instead.
* @param results The classifier for results returned during retry operations.
* @param errors The classifier for errors raised during retry operations.
*/
case class RetryPolicy(
termination: TerminationPolicy = RetryPolicy.defaultTermination,
backoff: BackoffPolicy = RetryPolicy.defaultBackoff,
monitor: EventMonitor = RetryPolicy.defaultMonitor,
@deprecated("Use `errors` instead of `classifier`.", "2.1") classifier: ErrorClassifier = RetryPolicy.defaultErrors,
results: ResultClassifier = RetryPolicy.defaultResults,
errors: ErrorClassifier = RetryPolicy.defaultErrors) {
import RetryPolicy.Outcome
/**
* Performs the specified operation synchronously, retrying according to this policy.
*
* @tparam T The return type of the operation being retried.
* @param operation The operation to repeatedly perform.
* @param clock The clock used to track time and wait out backoff delays.
*/
def retry[T]()(operation: => T)(implicit clock: Clock): T =
new SyncRetryOperation(None, operation _).run()
/**
* Performs the specified named operation synchronously, retrying according to this policy.
*
* @tparam T The return type of the operation being retried.
* @param name The name of the operation.
* @param operation The operation to repeatedly perform.
* @param clock The clock used to track time and wait out backoff delays.
*/
def retry[T](name: String)(operation: => T)(implicit clock: Clock): T =
new SyncRetryOperation(Some(name), operation _).run()
/**
* Performs the specified optionally named operation synchronously, retrying according to this policy.
*
* @tparam T The return type of the operation being retried.
* @param name The optional name of the operation.
* @param operation The operation to repeatedly perform.
* @param clock The clock used to track time and wait out backoff delays.
*/
def retry[T](name: Option[String])(operation: => T)(implicit clock: Clock): T =
new SyncRetryOperation(name, operation _).run()
/**
* Performs the specified operation asynchronously, retrying according to this policy.
*
* @tparam T The return type of the operation being retried.
* @param operation The operation to repeatedly perform.
* @param ec The execution context to retry on.
* @param clock The clock used to track time and schedule backoff notifications.
*/
def retryAsync[T]()(operation: => Future[T])(implicit ec: ExecutionContext, clock: Clock): Future[T] =
new AsyncRetryOperation(None, operation _).run()
/**
* Performs the specified optionally named operation asynchronously, retrying according to this policy.
*
* @tparam T The return type of the operation being retried.
* @param name The name of the operation.
* @param operation The operation to repeatedly perform.
* @param ec The execution context to retry on.
* @param clock The clock used to track time and schedule backoff notifications.
*/
def retryAsync[T](name: String)(operation: => Future[T])(implicit ec: ExecutionContext, clock: Clock): Future[T] =
new AsyncRetryOperation(Some(name), operation _).run()
/**
* Performs the specified optionally named operation asynchronously, retrying according to this policy.
*
* @tparam T The return type of the operation being retried.
* @param name The optional name of the operation.
* @param operation The operation to repeatedly perform.
* @param ec The execution context to retry on.
* @param clock The clock used to track time and schedule backoff notifications.
*/
def retryAsync[T](name: Option[String])(operation: => Future[T])(implicit ec: ExecutionContext, clock: Clock): Future[T] =
new AsyncRetryOperation(name, operation _).run()
/**
* Base representation of a single operation being retried.
*
* @param name The name of this operation.
*/
private abstract class RetryOperation(name: Option[String])(implicit val clock: Clock) {
/** The time that this retry operation started at. */
val startedAt = clock.tick
/** The number of times the operation has been attempted. */
def failedAttempts: Int
/** Sets the number of times the operation has been attempted. */
def failedAttempts_=(failedAttempts: Int): Unit
/** Cached copy of the `classifier` field used as a fallback for the `errors` field. */
private val errorClassifier = errors orElse classifier
/**
* Analyzes the outcome of an attempt and determines the next step to take.
*
* @tparam T The return type of the operation being retried.
* @param outcome The outcome of the most recent attempt.
*/
protected def afterAttempt[T](outcome: Try[T]): Outcome[T] = outcome match {
case Success(result) => results.applyOrElse(result, ResultClassification) match {
case ResultClassification.Acceptable =>
Outcome.Return(result)
case ResultClassification.Unacceptable(status) =>
afterAttemptFailed(outcome, Outcome.Return(result), status)
}
case Failure(thrown) =>
afterAttemptFailed(outcome, Outcome.Throw(thrown), errorClassifier.applyOrElse(thrown, ErrorClassification))
}
/**
* Analyzes the outcome of a failed attempt and determines the next step to take.
*
* @tparam T The return type of the operation being retried.
* @param outcome The outcome of the most recent attempt.
* @param terminate The result to return if the retry operation should terminate.
* @param status The classification of the failed attempt.
*/
private def afterAttemptFailed[T](outcome: Try[T], terminate: Outcome[T], status: ErrorClassification): Outcome[T] = {
failedAttempts += 1
if (status isFatal) {
monitor.interrupted(name, outcome, failedAttempts)
terminate
} else {
val nextBackoff = backoff.nextBackoff(failedAttempts, outcome)
val nextAttemptAt = clock.tick - startedAt + nextBackoff
if (termination.shouldTerminate(failedAttempts, nextAttemptAt)) {
monitor.aborted(name, outcome, failedAttempts)
terminate
} else {
monitor.retrying(name, outcome, failedAttempts, nextBackoff, status.isSilent)
Outcome.Continue(nextBackoff)
}
}
}
}
/**
* A retry operation that runs entirely on the calling thread.
*
* @tparam T The return type of the operation being retried.
* @param name The name of this operation.
* @param operation The operation to repeatedly perform.
* @param c The clock used to track time and wait out backoff delays.
*/
private final class SyncRetryOperation[T](name: Option[String], operation: () => T)(implicit c: Clock)
extends RetryOperation(name) {
override var failedAttempts = 0
/** Repeatedly performs this operation synchronously until interrupted or aborted. */
@annotation.tailrec
def run(): T = afterAttempt {
try Success(operation()) catch { case thrown: Throwable => Failure(thrown) }
} match {
case Outcome.Continue(backoffDuration) =>
clock.syncWait(backoffDuration)
run()
case Outcome.Throw(thrown) =>
throw thrown
case Outcome.Return(result) =>
result
}
}
/**
* A retry operation that runs entirely on the provided execution context.
*
* @tparam T The return type of the operation being retried.
* @param name The name of this operation.
* @param operation The operation to repeatedly perform.
* @param ec The execution context to retry on.
* @param c The clock used to track time and schedule backoff notifications.
*/
private final class AsyncRetryOperation[T](name: Option[String], operation: () => Future[T])(
implicit ec: ExecutionContext, c: Clock) extends RetryOperation(name) with (Try[T] => Unit) {
@volatile override var failedAttempts = 0
/** The ultimate outcome of this operation. */
val promise = Promise[T]()
/** Repeatedly performs this operation asynchronously until interrupted or aborted. */
def run(): Future[T] = {
try spawn() onComplete this catch { case thrown: Throwable => promise.failure(thrown) }
promise.future
}
/* Respond to the completion of the future. */
override def apply(attempt: Try[T]) = {
var notifyOnError = true
try {
afterAttempt(attempt) match {
case Outcome.Continue(backoffDuration) =>
clock.asyncWait(backoffDuration) onComplete {
case Success(_) =>
try spawn() onComplete this catch {
case thrown: Throwable =>
notifyOnError = false
promise.failure(thrown)
}
case Failure(thrown) =>
notifyOnError = false
promise.failure(thrown)
}
case Outcome.Throw(thrown) =>
notifyOnError = false
promise.failure(thrown)
case Outcome.Return(result) =>
notifyOnError = false
promise.success(result)
}
} catch {
case t: Throwable if notifyOnError => promise.failure(t)
}
}
/** Runs the user-supplied function and spawns an asynchronous operation. */
private def spawn(): Future[T] =
try operation() catch { case thrown: Throwable => Future.failed(thrown) }
}
}
/**
* Factory for retry policies.
*/
object RetryPolicy {
/** The default strategy for determining when to abort a retry operation. */
val defaultTermination: TerminationPolicy = termination.LimitAttempts()
/** The default strategy used to calculate delays between retries. */
val defaultBackoff: BackoffPolicy = backoff.FibonacciBackoff()
/** The default monitor that is notified of retry events. */
val defaultMonitor: EventMonitor = monitor.IgnoreEvents
/** The default classifier for results returned during retry operations. */
val defaultResults: ResultClassifier = ResultClassifier.empty
/** The default classifier for errors raised during retry operations. */
val defaultErrors: ErrorClassifier = ErrorClassifier.empty
/** The default classifier for errors raised during retry operations. */
@deprecated("Use `defaultErrors` instead of `defaultClassifier`.", "2.1")
val defaultClassifier: ErrorClassifier = defaultErrors
/**
* Internal representation of the outcome of a retry attempt.
*
* @tparam T The return type of the operation being retried.
*/
private sealed trait Outcome[+T]
/**
* Definitions of the supported outcome types.
*/
private object Outcome {
/**
* An outcome that signals an operation should be retried.
*
* @param backoffDuration The amount of time that should be allowed to pass before retrying.
*/
case class Continue(backoffDuration: FiniteDuration) extends Outcome[Nothing]
/**
* An outcome that signals an operation should terminate by throwing an exception.
*
* @param thrown The exception to terminate with.
*/
case class Throw(thrown: Throwable) extends Outcome[Nothing]
/**
* An outcome that signals an operation should terminate by returning a result.
*
* @tparam T The return type of the operation being retried.
* @param result The result to terminate with.
*/
case class Return[T](result: T) extends Outcome[T]
}
} | zmanio/atmos | src/main/scala/atmos/RetryPolicy.scala | Scala | apache-2.0 | 13,136 |
/*
* PatternObjView.scala
* (Mellite)
*
* Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.mellite.impl.patterns
import de.sciss.icons.raphael
import de.sciss.lucre.edit.UndoManager
import de.sciss.lucre.swing._
import de.sciss.lucre.swing.edit.EditVar
import de.sciss.lucre.synth.Txn
import de.sciss.lucre.{Ident, Obj, Plain, Source, SpanLikeObj, Txn => LTxn}
import de.sciss.mellite.impl.code.CodeFrameImpl
import de.sciss.mellite.impl.objview.ObjListViewImpl.NonEditable
import de.sciss.mellite.impl.objview.{NoArgsListObjViewFactory, ObjListViewImpl, ObjViewImpl}
import de.sciss.mellite.impl.timeline.ObjTimelineViewBasicImpl
import de.sciss.mellite.{CodeFrame, CodeView, GUI, ObjListView, ObjTimelineView, ObjView, RunnerToggleButton, Shapes, UniverseHandler}
import de.sciss.patterns
import de.sciss.patterns.graph.Pat
import de.sciss.proc.Implicits._
import de.sciss.proc.{Code, Pattern}
import javax.swing.Icon
import scala.swing.Button
object PatternObjView extends NoArgsListObjViewFactory with ObjTimelineView.Factory {
type E[~ <: LTxn[~]] = Pattern[~]
val icon : Icon = ObjViewImpl.raphaelIcon(Shapes.Pattern)
val prefix : String = "Pattern"
def humanName : String = prefix
def tpe : Obj.Type = Pattern
def category : String = ObjView.categComposition
def mkListView[T <: Txn[T]](obj: Pattern[T])(implicit tx: T): PatternObjView[T] with ObjListView[T] = {
// val vr = Pattern.Var.unapply(obj).getOrElse {
// val _vr = Pattern.newVar[T](obj)
// _vr
// }
new ListImpl(tx.newHandle(obj)).initAttrs(obj)
}
def mkTimelineView[T <: Txn[T]](id: Ident[T], span: SpanLikeObj[T], obj: Pattern[T],
context: ObjTimelineView.Context[T])(implicit tx: T): ObjTimelineView[T] = {
val res = new TimelineImpl[T](tx.newHandle(obj)).initAttrs(id, span, obj)
res
}
private final class ListImpl[T <: Txn[T]](val objH: Source[T, Pattern[T]])
extends Impl[T]
private final class TimelineImpl[T <: Txn[T]](val objH : Source[T, Pattern[T]])
extends Impl[T] with ObjTimelineViewBasicImpl[T]
def makeObj[T <: Txn[T]](name: String)(implicit tx: T): List[Obj[T]] = {
val obj = Pattern.newVar[T](Pattern.empty[T])
if (name.nonEmpty) obj.name = name
obj :: Nil
}
private abstract class Impl[T <: Txn[T]]
extends PatternObjView[T]
with ObjListView /* .Int */[T]
with ObjViewImpl.Impl[T]
with ObjListViewImpl.EmptyRenderer[T]
with NonEditable[T]
/* with NonViewable[T] */ {
override def objH: Source[T, Pattern[T]]
override def obj(implicit tx: T): Pattern[T] = objH()
type E[~ <: LTxn[~]] = Pattern[~]
final def factory: ObjView.Factory = PatternObjView
final def isViewable = true
// currently this just opens a code editor. in the future we should
// add a scans map editor, and a convenience button for the attributes
override def openView(parent: Option[Window[T]])
(implicit tx: T, handler: UniverseHandler[T]): Option[Window[T]] = {
Pattern.Var.unapply(obj).map { vr =>
import de.sciss.mellite.Mellite.compiler
val frame = codeFrame(vr)
frame
}
}
// ---- adapter for editing an Pattern's source ----
}
private def codeFrame[T <: Txn[T]](obj: Pattern.Var[T])
(implicit tx: T, universeHandler: UniverseHandler[T],
compiler: Code.Compiler): CodeFrame[T] =
universeHandler(obj, CodeFrame) {
import universeHandler.universe
val codeObj = CodeFrameImpl.mkSource(obj = obj, codeTpe = Pattern.Code)()
val swapObjOpt = codeObj.attr.$[Code.Obj](CodeView.attrSwap)
val built0 = swapObjOpt.isEmpty
val objH = tx.newHandle(obj)
val code0 = (swapObjOpt getOrElse codeObj).value match {
case cs: Pattern.Code => cs
case other => sys.error(s"Pattern source code does not produce patterns.Graph: ${other.tpe.humanName}")
}
val handler = new CodeView.Handler[T, Unit, Pat[_]] {
override def in(): Unit = ()
override def save(in: Unit, out: Pat[_])(implicit tx: T, undo: UndoManager[T]): Unit = {
val obj = objH()
EditVar.exprUndo[T, Pat[_], Pattern]("Change Pattern Graph", obj, Pattern.newConst[T](out))
}
def dispose()(implicit tx: T): Unit = ()
}
val viewEval = View.wrap[T, Button] {
val actionEval = new swing.Action("Evaluate") { self =>
import universe.cursor
def apply(): Unit = {
implicit val ctx: patterns.Context[Plain] = patterns.Context()
val n = 60
val res0 = cursor.step { implicit tx =>
val obj = objH()
val g = obj.value
val st = g.expand
st.toIterator.take(n).toList
}
val abbr = res0.lengthCompare(n) == 0
val res = if (abbr) res0.init else res0
println(res.mkString("[", ", ", if (abbr) " ...]" else "]"))
}
}
GUI.toolButton(actionEval, raphael.Shapes.Quote)
}
val viewPower = RunnerToggleButton(obj)
val bottom = viewEval :: viewPower :: Nil
implicit val undo: UndoManager[T] = UndoManager()
CodeFrameImpl.newInstance(obj, objH, codeObj, code0,
handler = Some(handler),
bottom = bottom,
rightViewOpt = None,
built0 = built0,
canBounce = true
)
}
}
trait PatternObjView[T <: LTxn[T]] extends ObjView[T] {
type Repr = Pattern[T]
} | Sciss/Mellite | app/src/main/scala/de/sciss/mellite/impl/patterns/PatternObjView.scala | Scala | agpl-3.0 | 5,890 |
/*
* Copyright (c) 2008, Michael Pradel
* All rights reserved. See LICENSE for details.
*/
package applications;
import scala.collection.mutable.HashSet
import scala.roles.dp._
import util.PerformanceAnalysis._
object VisitorTestApp {
import CompanyDataStructure._
def main(args : Array[String]) : Unit = {
val company = new Company { val name = "foo" }
val joe: Manager = new Manager { val name = "Joe"; var salary = 200 }
val paul: Manager = new Manager { val name = "Paul"; var salary = 300 }
val tom: Employee = new Employee { val name = "Tom"; var salary = 100 }
val dep1 = new Department { val name = "dep1"; var manager = joe }
val dep2 = new Department { val name = "dep2"; var manager = paul }
company.departments += dep1
dep1.subunits += dep2
dep2.subunits += tom
val visC = new Visitor{}
val salaryVis = new SalaryVisitor {}
val printVis = new PrintVisitor {}
measureTime {
(company -: visC.element).accept(printVis -: visC.visitor)
(company -: visC.element).accept(salaryVis -: visC.visitor)
(company -: visC.element).accept(printVis -: visC.visitor)
}
}
trait SalaryVisitor extends VisitorImpl {
def visitImpl[ElementType <: AnyRef](e: ElementType) {
// unfortunately, can only match on dynamic type of e, TODO: is that a problem?
e match {
case e: Company => e.departments.foreach(visitImpl)
case e: Department => e.manager.salary += 10; e.subunits.foreach(visitImpl)
case e: Employee => e.salary += 10
case _ => println("did not match: element has type " + e.getClass)
}
}
}
trait PrintVisitor extends VisitorImpl {
def visitImpl[ElementType <: AnyRef](e: ElementType) {
e match {
case e: Company => println("Company " + e.name + " with departments:"); e.departments.foreach(visitImpl)
case e: Department => println("Department " + e.name + " with Manager:"); visitImpl(e.manager); println("..and subunits:"); e.subunits.foreach(visitImpl)
case e: Employee => println("Employee " + e.name + ", salary: " + e.salary)
case _ => println("did not match: element has type " + e.getClass)
}
}
}
}
object CompanyDataStructure {
trait Company {
val name: String
val departments = new HashSet[Department]
}
trait SubUnit
trait Department extends SubUnit {
val name: String
var manager: Manager
val subunits = new HashSet[SubUnit]
}
trait Employee extends SubUnit {
val name: String
var salary: Int
}
type Manager = Employee
}
| tupshin/Scala-Roles | examples/applications/VisitorTestApp.scala | Scala | bsd-3-clause | 2,675 |
/*
Copyright (c) 2017-2021, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum
import org.sireum.test._
class BuiltInTypesTest extends TestSuite {
val tests = Tests {
* - assert(z"1" == Z(1))
* - assert(z"1" == Z(1L))
* - assert(z"1" == Z(BigInt(1)))
* - assertMatch(Z(1)) { case z"1" => }
* - assertMatch(Z(2)) { case z"2" => }
* - assert(c"0" == C('0'))
* - assertMatch(C('0')) { case c"0" => }
* - assert(f32"0" == F32(0f))
* - assertMatch(F32(0f)) { case f32"0" => }
* - assert(f64"0" == F64(0d))
* - assertMatch(F64(0d)) { case f64"0" => }
* - assert(string"abc" == String("abc"))
* - assertMatch(String("abc")) { case string"abc" => }
}
}
| sireum/v3-logika-runtime | library/shared/src/test/scala/org/sireum/BuiltInTypesTest.scala | Scala | bsd-2-clause | 2,006 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2019 Helge Holzmann (Internet Archive) <helge@archive.org>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.archive.archivespark.specific.warc.specs
import org.archive.archivespark.dataspecs.TextDataSpec
import org.archive.archivespark.sparkling.cdx.CdxRecord
class CdxHdfsSpec private (val dataPath: String) extends TextDataSpec[CdxRecord] {
override def parse(data: String): Option[CdxRecord] = CdxRecord.fromString(data)
}
object CdxHdfsSpec {
def apply(path: String) = new CdxHdfsSpec(path)
} | helgeho/ArchiveSpark | src/main/scala/org/archive/archivespark/specific/warc/specs/CdxHdfsSpec.scala | Scala | mit | 1,599 |
package tu.knowledge.frame
/**
* @author max talanov
* date 2012-05-03
* time: 11:56 PM
*/
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import tu.model.knowledge.primitive.KnowledgeString
import tu.model.knowledge.semanticnetwork.{SemanticNetworkLink, SemanticNetworkNode}
import tu.model.knowledge.frame.TypedFrame
import tu.model.knowledge.{Resource, Probability, KnowledgeURI}
@RunWith(classOf[JUnitRunner])
class FrameTest extends FunSuite {
val namespace = "testNamespace"
val name = "name"
val revision = "rev"
val uri = new KnowledgeURI(namespace, name, revision)
val probability = new Probability
val sourceContent = "Source"
val destinationContent = "Dest"
val source: SemanticNetworkNode[KnowledgeString] = new SemanticNetworkNode(new KnowledgeString(sourceContent, uri), List[SemanticNetworkLink](), uri)
val destination: SemanticNetworkNode[KnowledgeString] = new SemanticNetworkNode(new KnowledgeString(destinationContent, uri), List[SemanticNetworkLink](), uri)
test("test Ok") {
assert(true)
}
test("TypedFrame should contain several resources") {
val f = new TypedFrame(Map[KnowledgeURI, Resource](), uri)
f.resources = Map[KnowledgeURI, Resource](source.uri -> source, destination.uri -> destination)
expect(f.resources.get(destination.uri).get)(destination)
}
}
| keskival/2 | model.knowledge/src/test/scala/tu/knowledge/frame/FrameTest.scala | Scala | gpl-3.0 | 1,405 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package expressions
import base.Constructor
import lexer.ScalaTokenTypes
import builder.ScalaPsiBuilder
import util.ParserUtils
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
/*
* AnnotationExpr ::= Constr [[nl] '{' {NameValuePair} '}']
*/
object AnnotationExpr {
def parse(builder: ScalaPsiBuilder): Boolean = {
val annotExprMarker = builder.mark
if (!Constructor.parse(builder, isAnnotation = true)) {
annotExprMarker.drop()
return false
}
builder.getTokenType match {
case ScalaTokenTypes.tLBRACE => {
if (builder.twoNewlinesBeforeCurrentToken) {
annotExprMarker.done(ScalaElementTypes.ANNOTATION_EXPR)
return true
}
builder.advanceLexer() //Ate }
builder.enableNewlines
def foo() {
while (NameValuePair.parse(builder)) {
builder.getTokenType match {
case ScalaTokenTypes.tCOMMA => builder.advanceLexer()
case _ =>
}
while (builder.getTokenType == ScalaTokenTypes.tCOMMA) {
builder.error(ScalaBundle.message("wrong.annotation.expression"))
builder.advanceLexer()
}
}
}
ParserUtils.parseLoopUntilRBrace(builder, foo _)
builder.restoreNewlinesState
annotExprMarker.done(ScalaElementTypes.ANNOTATION_EXPR)
true
}
case _ => {
annotExprMarker.done(ScalaElementTypes.ANNOTATION_EXPR)
true
}
}
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/parser/parsing/expressions/AnnotationExpr.scala | Scala | apache-2.0 | 1,590 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.client
import java.lang.{Boolean => JBoolean, Integer => JInteger, Long => JLong}
import java.lang.reflect.{InvocationTargetException, Method, Modifier}
import java.net.URI
import java.util.{ArrayList => JArrayList, List => JList, Locale, Map => JMap, Set => JSet}
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.api.{EnvironmentContext, Function => HiveFunction, FunctionType}
import org.apache.hadoop.hive.metastore.api.{MetaException, PrincipalType, ResourceType, ResourceUri}
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.io.AcidUtils
import org.apache.hadoop.hive.ql.metadata.{Hive, HiveException, Partition, Table}
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc
import org.apache.hadoop.hive.ql.processors.{CommandProcessor, CommandProcessorFactory}
import org.apache.hadoop.hive.ql.session.SessionState
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException
import org.apache.spark.sql.catalyst.catalog.{CatalogFunction, CatalogTablePartition, CatalogUtils, FunctionResource, FunctionResourceType}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{AtomicType, IntegralType, StringType}
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
/**
* A shim that defines the interface between [[HiveClientImpl]] and the underlying Hive library used
* to talk to the metastore. Each Hive version has its own implementation of this class, defining
* version-specific version of needed functions.
*
* The guideline for writing shims is:
* - always extend from the previous version unless really not possible
* - initialize methods in lazy vals, both for quicker access for multiple invocations, and to
* avoid runtime errors due to the above guideline.
*/
private[client] sealed abstract class Shim {
/**
* Set the current SessionState to the given SessionState. Also, set the context classloader of
* the current thread to the one set in the HiveConf of this given `state`.
*/
def setCurrentSessionState(state: SessionState): Unit
/**
* This shim is necessary because the return type is different on different versions of Hive.
* All parameters are the same, though.
*/
def getDataLocation(table: Table): Option[String]
def setDataLocation(table: Table, loc: String): Unit
def getAllPartitions(hive: Hive, table: Table): Seq[Partition]
def getPartitionsByFilter(hive: Hive, table: Table, predicates: Seq[Expression]): Seq[Partition]
def getCommandProcessor(token: String, conf: HiveConf): CommandProcessor
def getDriverResults(driver: Driver): Seq[String]
def getMetastoreClientConnectRetryDelayMillis(conf: HiveConf): Long
def alterTable(hive: Hive, tableName: String, table: Table): Unit
def alterPartitions(hive: Hive, tableName: String, newParts: JList[Partition]): Unit
def createPartitions(
hive: Hive,
db: String,
table: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit
def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit
def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit
def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit
def createFunction(hive: Hive, db: String, func: CatalogFunction): Unit
def dropFunction(hive: Hive, db: String, name: String): Unit
def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit
def alterFunction(hive: Hive, db: String, func: CatalogFunction): Unit
def getFunctionOption(hive: Hive, db: String, name: String): Option[CatalogFunction]
def listFunctions(hive: Hive, db: String, pattern: String): Seq[String]
def dropIndex(hive: Hive, dbName: String, tableName: String, indexName: String): Unit
def dropTable(
hive: Hive,
dbName: String,
tableName: String,
deleteData: Boolean,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit
def dropPartition(
hive: Hive,
dbName: String,
tableName: String,
part: JList[String],
deleteData: Boolean,
purge: Boolean): Unit
protected def findStaticMethod(klass: Class[_], name: String, args: Class[_]*): Method = {
val method = findMethod(klass, name, args: _*)
require(Modifier.isStatic(method.getModifiers()),
s"Method $name of class $klass is not static.")
method
}
protected def findMethod(klass: Class[_], name: String, args: Class[_]*): Method = {
klass.getMethod(name, args: _*)
}
}
private[client] class Shim_v0_12 extends Shim with Logging {
// See HIVE-12224, HOLD_DDLTIME was broken as soon as it landed
protected lazy val holdDDLTime = JBoolean.FALSE
// deletes the underlying data along with metadata
protected lazy val deleteDataInDropIndex = JBoolean.TRUE
private lazy val startMethod =
findStaticMethod(
classOf[SessionState],
"start",
classOf[SessionState])
private lazy val getDataLocationMethod = findMethod(classOf[Table], "getDataLocation")
private lazy val setDataLocationMethod =
findMethod(
classOf[Table],
"setDataLocation",
classOf[URI])
private lazy val getAllPartitionsMethod =
findMethod(
classOf[Hive],
"getAllPartitionsForPruner",
classOf[Table])
private lazy val getCommandProcessorMethod =
findStaticMethod(
classOf[CommandProcessorFactory],
"get",
classOf[String],
classOf[HiveConf])
private lazy val getDriverResultsMethod =
findMethod(
classOf[Driver],
"getResults",
classOf[JArrayList[String]])
private lazy val createPartitionMethod =
findMethod(
classOf[Hive],
"createPartition",
classOf[Table],
classOf[JMap[String, String]],
classOf[Path],
classOf[JMap[String, String]],
classOf[String],
classOf[String],
JInteger.TYPE,
classOf[JList[Object]],
classOf[String],
classOf[JMap[String, String]],
classOf[JList[Object]],
classOf[JList[Object]])
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val dropIndexMethod =
findMethod(
classOf[Hive],
"dropIndex",
classOf[String],
classOf[String],
classOf[String],
JBoolean.TYPE)
private lazy val alterTableMethod =
findMethod(
classOf[Hive],
"alterTable",
classOf[String],
classOf[Table])
private lazy val alterPartitionsMethod =
findMethod(
classOf[Hive],
"alterPartitions",
classOf[String],
classOf[JList[Partition]])
override def setCurrentSessionState(state: SessionState): Unit = {
// Starting from Hive 0.13, setCurrentSessionState will internally override
// the context class loader of the current thread by the class loader set in
// the conf of the SessionState. So, for this Hive 0.12 shim, we add the same
// behavior and make shim.setCurrentSessionState of all Hive versions have the
// consistent behavior.
Thread.currentThread().setContextClassLoader(state.getConf.getClassLoader)
startMethod.invoke(null, state)
}
override def getDataLocation(table: Table): Option[String] =
Option(getDataLocationMethod.invoke(table)).map(_.toString())
override def setDataLocation(table: Table, loc: String): Unit =
setDataLocationMethod.invoke(table, new URI(loc))
// Follows exactly the same logic of DDLTask.createPartitions in Hive 0.12
override def createPartitions(
hive: Hive,
database: String,
tableName: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit = {
val table = hive.getTable(database, tableName)
parts.foreach { s =>
val location = s.storage.locationUri.map(
uri => new Path(table.getPath, new Path(uri))).orNull
val params = if (s.parameters.nonEmpty) s.parameters.asJava else null
val spec = s.spec.asJava
if (hive.getPartition(table, spec, false) != null && ignoreIfExists) {
// Ignore this partition since it already exists and ignoreIfExists == true
} else {
if (location == null && table.isView()) {
throw new HiveException("LOCATION clause illegal for view partition");
}
createPartitionMethod.invoke(
hive,
table,
spec,
location,
params, // partParams
null, // inputFormat
null, // outputFormat
-1: JInteger, // numBuckets
null, // cols
null, // serializationLib
null, // serdeParams
null, // bucketCols
null) // sortCols
}
}
}
override def getAllPartitions(hive: Hive, table: Table): Seq[Partition] =
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]].asScala.toSeq
override def getPartitionsByFilter(
hive: Hive,
table: Table,
predicates: Seq[Expression]): Seq[Partition] = {
// getPartitionsByFilter() doesn't support binary comparison ops in Hive 0.12.
// See HIVE-4888.
logDebug("Hive 0.12 doesn't support predicate pushdown to metastore. " +
"Please use Hive 0.13 or higher.")
getAllPartitions(hive, table)
}
override def getCommandProcessor(token: String, conf: HiveConf): CommandProcessor =
getCommandProcessorMethod.invoke(null, token, conf).asInstanceOf[CommandProcessor]
override def getDriverResults(driver: Driver): Seq[String] = {
val res = new JArrayList[String]()
getDriverResultsMethod.invoke(driver, res)
res.asScala
}
override def getMetastoreClientConnectRetryDelayMillis(conf: HiveConf): Long = {
conf.getIntVar(HiveConf.ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY) * 1000L
}
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
JBoolean.FALSE, inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, holdDDLTime)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, holdDDLTime, listBucketingEnabled: JBoolean)
}
override def dropIndex(hive: Hive, dbName: String, tableName: String, indexName: String): Unit = {
dropIndexMethod.invoke(hive, dbName, tableName, indexName, deleteDataInDropIndex)
}
override def dropTable(
hive: Hive,
dbName: String,
tableName: String,
deleteData: Boolean,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = {
if (purge) {
throw new UnsupportedOperationException("DROP TABLE ... PURGE")
}
hive.dropTable(dbName, tableName, deleteData, ignoreIfNotExists)
}
override def alterTable(hive: Hive, tableName: String, table: Table): Unit = {
alterTableMethod.invoke(hive, tableName, table)
}
override def alterPartitions(hive: Hive, tableName: String, newParts: JList[Partition]): Unit = {
alterPartitionsMethod.invoke(hive, tableName, newParts)
}
override def dropPartition(
hive: Hive,
dbName: String,
tableName: String,
part: JList[String],
deleteData: Boolean,
purge: Boolean): Unit = {
if (purge) {
throw new UnsupportedOperationException("ALTER TABLE ... DROP PARTITION ... PURGE")
}
hive.dropPartition(dbName, tableName, part, deleteData)
}
override def createFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
throw new AnalysisException("Hive 0.12 doesn't support creating permanent functions. " +
"Please use Hive 0.13 or higher.")
}
def dropFunction(hive: Hive, db: String, name: String): Unit = {
throw new NoSuchPermanentFunctionException(db, name)
}
def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit = {
throw new NoSuchPermanentFunctionException(db, oldName)
}
def alterFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
throw new NoSuchPermanentFunctionException(db, func.identifier.funcName)
}
def getFunctionOption(hive: Hive, db: String, name: String): Option[CatalogFunction] = {
None
}
def listFunctions(hive: Hive, db: String, pattern: String): Seq[String] = {
Seq.empty[String]
}
}
private[client] class Shim_v0_13 extends Shim_v0_12 {
private lazy val setCurrentSessionStateMethod =
findStaticMethod(
classOf[SessionState],
"setCurrentSessionState",
classOf[SessionState])
private lazy val setDataLocationMethod =
findMethod(
classOf[Table],
"setDataLocation",
classOf[Path])
private lazy val getAllPartitionsMethod =
findMethod(
classOf[Hive],
"getAllPartitionsOf",
classOf[Table])
private lazy val getPartitionsByFilterMethod =
findMethod(
classOf[Hive],
"getPartitionsByFilter",
classOf[Table],
classOf[String])
private lazy val getCommandProcessorMethod =
findStaticMethod(
classOf[CommandProcessorFactory],
"get",
classOf[Array[String]],
classOf[HiveConf])
private lazy val getDriverResultsMethod =
findMethod(
classOf[Driver],
"getResults",
classOf[JList[Object]])
override def setCurrentSessionState(state: SessionState): Unit =
setCurrentSessionStateMethod.invoke(null, state)
override def setDataLocation(table: Table, loc: String): Unit =
setDataLocationMethod.invoke(table, new Path(loc))
override def createPartitions(
hive: Hive,
db: String,
table: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit = {
val addPartitionDesc = new AddPartitionDesc(db, table, ignoreIfExists)
parts.zipWithIndex.foreach { case (s, i) =>
addPartitionDesc.addPartition(
s.spec.asJava, s.storage.locationUri.map(CatalogUtils.URIToString(_)).orNull)
if (s.parameters.nonEmpty) {
addPartitionDesc.getPartition(i).setPartParams(s.parameters.asJava)
}
}
hive.createPartitions(addPartitionDesc)
}
override def getAllPartitions(hive: Hive, table: Table): Seq[Partition] =
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]].asScala.toSeq
private def toHiveFunction(f: CatalogFunction, db: String): HiveFunction = {
val resourceUris = f.resources.map { resource =>
new ResourceUri(ResourceType.valueOf(
resource.resourceType.resourceType.toUpperCase(Locale.ROOT)), resource.uri)
}
new HiveFunction(
f.identifier.funcName,
db,
f.className,
null,
PrincipalType.USER,
(System.currentTimeMillis / 1000).toInt,
FunctionType.JAVA,
resourceUris.asJava)
}
override def createFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
hive.createFunction(toHiveFunction(func, db))
}
override def dropFunction(hive: Hive, db: String, name: String): Unit = {
hive.dropFunction(db, name)
}
override def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit = {
val catalogFunc = getFunctionOption(hive, db, oldName)
.getOrElse(throw new NoSuchPermanentFunctionException(db, oldName))
.copy(identifier = FunctionIdentifier(newName, Some(db)))
val hiveFunc = toHiveFunction(catalogFunc, db)
hive.alterFunction(db, oldName, hiveFunc)
}
override def alterFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
hive.alterFunction(db, func.identifier.funcName, toHiveFunction(func, db))
}
private def fromHiveFunction(hf: HiveFunction): CatalogFunction = {
val name = FunctionIdentifier(hf.getFunctionName, Option(hf.getDbName))
val resources = hf.getResourceUris.asScala.map { uri =>
val resourceType = uri.getResourceType() match {
case ResourceType.ARCHIVE => "archive"
case ResourceType.FILE => "file"
case ResourceType.JAR => "jar"
case r => throw new AnalysisException(s"Unknown resource type: $r")
}
FunctionResource(FunctionResourceType.fromString(resourceType), uri.getUri())
}
CatalogFunction(name, hf.getClassName, resources)
}
override def getFunctionOption(hive: Hive, db: String, name: String): Option[CatalogFunction] = {
try {
Option(hive.getFunction(db, name)).map(fromHiveFunction)
} catch {
case NonFatal(e) if isCausedBy(e, s"$name does not exist") =>
None
}
}
private def isCausedBy(e: Throwable, matchMassage: String): Boolean = {
if (e.getMessage.contains(matchMassage)) {
true
} else if (e.getCause != null) {
isCausedBy(e.getCause, matchMassage)
} else {
false
}
}
override def listFunctions(hive: Hive, db: String, pattern: String): Seq[String] = {
hive.getFunctions(db, pattern).asScala
}
/**
* Converts catalyst expression to the format that Hive's getPartitionsByFilter() expects, i.e.
* a string that represents partition predicates like "str_key=\\"value\\" and int_key=1 ...".
*
* Unsupported predicates are skipped.
*/
def convertFilters(table: Table, filters: Seq[Expression]): String = {
/**
* An extractor that matches all binary comparison operators except null-safe equality.
*
* Null-safe equality is not supported by Hive metastore partition predicate pushdown
*/
object SpecialBinaryComparison {
def unapply(e: BinaryComparison): Option[(Expression, Expression)] = e match {
case _: EqualNullSafe => None
case _ => Some((e.left, e.right))
}
}
object ExtractableLiteral {
def unapply(expr: Expression): Option[String] = expr match {
case Literal(null, _) => None // `null`s can be cast as other types; we want to avoid NPEs.
case Literal(value, _: IntegralType) => Some(value.toString)
case Literal(value, _: StringType) => Some(quoteStringLiteral(value.toString))
case _ => None
}
}
object ExtractableLiterals {
def unapply(exprs: Seq[Expression]): Option[Seq[String]] = {
// SPARK-24879: The Hive metastore filter parser does not support "null", but we still want
// to push down as many predicates as we can while still maintaining correctness.
// In SQL, the `IN` expression evaluates as follows:
// > `1 in (2, NULL)` -> NULL
// > `1 in (1, NULL)` -> true
// > `1 in (2)` -> false
// Since Hive metastore filters are NULL-intolerant binary operations joined only by
// `AND` and `OR`, we can treat `NULL` as `false` and thus rewrite `1 in (2, NULL)` as
// `1 in (2)`.
// If the Hive metastore begins supporting NULL-tolerant predicates and Spark starts
// pushing down these predicates, then this optimization will become incorrect and need
// to be changed.
val extractables = exprs
.filter {
case Literal(null, _) => false
case _ => true
}.map(ExtractableLiteral.unapply)
if (extractables.nonEmpty && extractables.forall(_.isDefined)) {
Some(extractables.map(_.get))
} else {
None
}
}
}
object ExtractableValues {
private lazy val valueToLiteralString: PartialFunction[Any, String] = {
case value: Byte => value.toString
case value: Short => value.toString
case value: Int => value.toString
case value: Long => value.toString
case value: UTF8String => quoteStringLiteral(value.toString)
}
def unapply(values: Set[Any]): Option[Seq[String]] = {
val extractables = values.toSeq.map(valueToLiteralString.lift)
if (extractables.nonEmpty && extractables.forall(_.isDefined)) {
Some(extractables.map(_.get))
} else {
None
}
}
}
object NonVarcharAttribute {
// hive varchar is treated as catalyst string, but hive varchar can't be pushed down.
private val varcharKeys = table.getPartitionKeys.asScala
.filter(col => col.getType.startsWith(serdeConstants.VARCHAR_TYPE_NAME) ||
col.getType.startsWith(serdeConstants.CHAR_TYPE_NAME))
.map(col => col.getName).toSet
def unapply(attr: Attribute): Option[String] = {
if (varcharKeys.contains(attr.name)) {
None
} else {
Some(attr.name)
}
}
}
def convertInToOr(name: String, values: Seq[String]): String = {
values.map(value => s"$name = $value").mkString("(", " or ", ")")
}
val useAdvanced = SQLConf.get.advancedPartitionPredicatePushdownEnabled
object ExtractAttribute {
def unapply(expr: Expression): Option[Attribute] = {
expr match {
case attr: Attribute => Some(attr)
case Cast(child @ AtomicType(), dt: AtomicType, _)
if Cast.canSafeCast(child.dataType.asInstanceOf[AtomicType], dt) => unapply(child)
case _ => None
}
}
}
def convert(expr: Expression): Option[String] = expr match {
case In(ExtractAttribute(NonVarcharAttribute(name)), ExtractableLiterals(values))
if useAdvanced =>
Some(convertInToOr(name, values))
case InSet(ExtractAttribute(NonVarcharAttribute(name)), ExtractableValues(values))
if useAdvanced =>
Some(convertInToOr(name, values))
case op @ SpecialBinaryComparison(
ExtractAttribute(NonVarcharAttribute(name)), ExtractableLiteral(value)) =>
Some(s"$name ${op.symbol} $value")
case op @ SpecialBinaryComparison(
ExtractableLiteral(value), ExtractAttribute(NonVarcharAttribute(name))) =>
Some(s"$value ${op.symbol} $name")
case And(expr1, expr2) if useAdvanced =>
val converted = convert(expr1) ++ convert(expr2)
if (converted.isEmpty) {
None
} else {
Some(converted.mkString("(", " and ", ")"))
}
case Or(expr1, expr2) if useAdvanced =>
for {
left <- convert(expr1)
right <- convert(expr2)
} yield s"($left or $right)"
case _ => None
}
filters.flatMap(convert).mkString(" and ")
}
private def quoteStringLiteral(str: String): String = {
if (!str.contains("\\"")) {
s""""$str""""
} else if (!str.contains("'")) {
s"""'$str'"""
} else {
throw new UnsupportedOperationException(
"""Partition filter cannot have both `"` and `'` characters""")
}
}
override def getPartitionsByFilter(
hive: Hive,
table: Table,
predicates: Seq[Expression]): Seq[Partition] = {
// Hive getPartitionsByFilter() takes a string that represents partition
// predicates like "str_key=\\"value\\" and int_key=1 ..."
val filter = convertFilters(table, predicates)
val partitions =
if (filter.isEmpty) {
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]]
} else {
logDebug(s"Hive metastore filter is '$filter'.")
val tryDirectSqlConfVar = HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL
// We should get this config value from the metaStore. otherwise hit SPARK-18681.
// To be compatible with hive-0.12 and hive-0.13, In the future we can achieve this by:
// val tryDirectSql = hive.getMetaConf(tryDirectSqlConfVar.varname).toBoolean
val tryDirectSql = hive.getMSC.getConfigValue(tryDirectSqlConfVar.varname,
tryDirectSqlConfVar.defaultBoolVal.toString).toBoolean
try {
// Hive may throw an exception when calling this method in some circumstances, such as
// when filtering on a non-string partition column when the hive config key
// hive.metastore.try.direct.sql is false
getPartitionsByFilterMethod.invoke(hive, table, filter)
.asInstanceOf[JArrayList[Partition]]
} catch {
case ex: InvocationTargetException if ex.getCause.isInstanceOf[MetaException] &&
!tryDirectSql =>
logWarning("Caught Hive MetaException attempting to get partition metadata by " +
"filter from Hive. Falling back to fetching all partition metadata, which will " +
"degrade performance. Modifying your Hive metastore configuration to set " +
s"${tryDirectSqlConfVar.varname} to true may resolve this problem.", ex)
// HiveShim clients are expected to handle a superset of the requested partitions
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]]
case ex: InvocationTargetException if ex.getCause.isInstanceOf[MetaException] &&
tryDirectSql =>
throw new RuntimeException("Caught Hive MetaException attempting to get partition " +
"metadata by filter from Hive. You can set the Spark configuration setting " +
s"${SQLConf.HIVE_MANAGE_FILESOURCE_PARTITIONS.key} to false to work around this " +
"problem, however this will result in degraded performance. Please report a bug: " +
"https://issues.apache.org/jira/browse/SPARK", ex)
}
}
partitions.asScala.toSeq
}
override def getCommandProcessor(token: String, conf: HiveConf): CommandProcessor =
getCommandProcessorMethod.invoke(null, Array(token), conf).asInstanceOf[CommandProcessor]
override def getDriverResults(driver: Driver): Seq[String] = {
val res = new JArrayList[Object]()
getDriverResultsMethod.invoke(driver, res)
res.asScala.map { r =>
r match {
case s: String => s
case a: Array[Object] => a(0).asInstanceOf[String]
}
}
}
}
private[client] class Shim_v0_14 extends Shim_v0_13 {
// true if this is an ACID operation
protected lazy val isAcid = JBoolean.FALSE
// true if list bucketing enabled
protected lazy val isSkewedStoreAsSubdir = JBoolean.FALSE
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val dropTableMethod =
findMethod(
classOf[Hive],
"dropTable",
classOf[String],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val getTimeVarMethod =
findMethod(
classOf[HiveConf],
"getTimeVar",
classOf[HiveConf.ConfVars],
classOf[TimeUnit])
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
holdDDLTime, inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
isSrcLocal: JBoolean, isAcid)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, holdDDLTime,
isSrcLocal: JBoolean, isSkewedStoreAsSubdir, isAcid)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, holdDDLTime, listBucketingEnabled: JBoolean, isAcid)
}
override def dropTable(
hive: Hive,
dbName: String,
tableName: String,
deleteData: Boolean,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = {
dropTableMethod.invoke(hive, dbName, tableName, deleteData: JBoolean,
ignoreIfNotExists: JBoolean, purge: JBoolean)
}
override def getMetastoreClientConnectRetryDelayMillis(conf: HiveConf): Long = {
getTimeVarMethod.invoke(
conf,
HiveConf.ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY,
TimeUnit.MILLISECONDS).asInstanceOf[Long]
}
}
private[client] class Shim_v1_0 extends Shim_v0_14
private[client] class Shim_v1_1 extends Shim_v1_0 {
// throws an exception if the index does not exist
protected lazy val throwExceptionInDropIndex = JBoolean.TRUE
private lazy val dropIndexMethod =
findMethod(
classOf[Hive],
"dropIndex",
classOf[String],
classOf[String],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE)
override def dropIndex(hive: Hive, dbName: String, tableName: String, indexName: String): Unit = {
dropIndexMethod.invoke(hive, dbName, tableName, indexName, throwExceptionInDropIndex,
deleteDataInDropIndex)
}
}
private[client] class Shim_v1_2 extends Shim_v1_1 {
// txnId can be 0 unless isAcid == true
protected lazy val txnIdInLoadDynamicPartitions: JLong = 0L
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JLong.TYPE)
private lazy val dropOptionsClass =
Utils.classForName("org.apache.hadoop.hive.metastore.PartitionDropOptions")
private lazy val dropOptionsDeleteData = dropOptionsClass.getField("deleteData")
private lazy val dropOptionsPurge = dropOptionsClass.getField("purgeData")
private lazy val dropPartitionMethod =
findMethod(
classOf[Hive],
"dropPartition",
classOf[String],
classOf[String],
classOf[JList[String]],
dropOptionsClass)
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, holdDDLTime, listBucketingEnabled: JBoolean, isAcid,
txnIdInLoadDynamicPartitions)
}
override def dropPartition(
hive: Hive,
dbName: String,
tableName: String,
part: JList[String],
deleteData: Boolean,
purge: Boolean): Unit = {
val dropOptions = dropOptionsClass.newInstance().asInstanceOf[Object]
dropOptionsDeleteData.setBoolean(dropOptions, deleteData)
dropOptionsPurge.setBoolean(dropOptions, purge)
dropPartitionMethod.invoke(hive, dbName, tableName, part, dropOptions)
}
}
private[client] class Shim_v2_0 extends Shim_v1_2 {
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JLong.TYPE)
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
isSrcLocal: JBoolean, isAcid)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, isSrcLocal: JBoolean,
isSkewedStoreAsSubdir, isAcid)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, listBucketingEnabled: JBoolean, isAcid, txnIdInLoadDynamicPartitions)
}
}
private[client] class Shim_v2_1 extends Shim_v2_0 {
// true if there is any following stats task
protected lazy val hasFollowingStatsTask = JBoolean.FALSE
// TODO: Now, always set environmentContext to null. In the future, we should avoid setting
// hive-generated stats to -1 when altering tables by using environmentContext. See Hive-12730
protected lazy val environmentContextInAlterTable = null
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JLong.TYPE,
JBoolean.TYPE,
classOf[AcidUtils.Operation])
private lazy val alterTableMethod =
findMethod(
classOf[Hive],
"alterTable",
classOf[String],
classOf[Table],
classOf[EnvironmentContext])
private lazy val alterPartitionsMethod =
findMethod(
classOf[Hive],
"alterPartitions",
classOf[String],
classOf[JList[Partition]],
classOf[EnvironmentContext])
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
isSrcLocal: JBoolean, isAcid, hasFollowingStatsTask)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, isSrcLocal: JBoolean,
isSkewedStoreAsSubdir, isAcid, hasFollowingStatsTask)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, listBucketingEnabled: JBoolean, isAcid, txnIdInLoadDynamicPartitions,
hasFollowingStatsTask, AcidUtils.Operation.NOT_ACID)
}
override def alterTable(hive: Hive, tableName: String, table: Table): Unit = {
alterTableMethod.invoke(hive, tableName, table, environmentContextInAlterTable)
}
override def alterPartitions(hive: Hive, tableName: String, newParts: JList[Partition]): Unit = {
alterPartitionsMethod.invoke(hive, tableName, newParts, environmentContextInAlterTable)
}
}
private[client] class Shim_v2_2 extends Shim_v2_1
private[client] class Shim_v2_3 extends Shim_v2_1
| tejasapatil/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala | Scala | apache-2.0 | 39,836 |
package japgolly.scalajs.react
import japgolly.scalajs.react.internal._
/**
* Created by alonsodomin on 13/03/2017.
*/
trait CatsReact extends CatsReactExt with CatsReactInstances with CatsReactState
object CatsReact extends CatsReact | matthughes/scalajs-react | cats/src/main/scala/japgolly/scalajs/react/CatsReact.scala | Scala | apache-2.0 | 241 |
package com.lateralthoughts.points.model.inputs
import java.time.{Clock, OffsetDateTime}
import java.util.UUID
import com.lateralthoughts.points.model.records.{RewardingAction, RewardingActionCategory}
sealed trait RewardingActionInput extends Input[RewardingAction]
case class NewRewardingActionInput(name: String,
category: InnerRewardingActionCategoryInput,
description: String,
points: Int) extends SaveInput[RewardingAction] with RewardingActionInput {
def generate(category: RewardingActionCategory): RewardingAction = {
val id = UUID.randomUUID()
val name = this.name
val description = this.description
val points = this.points
val createdAt = OffsetDateTime.now(Clock.systemUTC())
val updatedAt = OffsetDateTime.now(Clock.systemUTC())
RewardingAction(id, name, category, description, points, createdAt, updatedAt)
}
}
case class UpdateRewardingActionInput(name: Option[String],
category: Option[InnerRewardingActionCategoryInput],
description: Option[String],
points: Option[Int]) extends UpdateInput[RewardingAction] with RewardingActionInput {
def update(rewardingAction: RewardingAction, category: RewardingActionCategory) = {
val id = rewardingAction.id
val name = pick(this.name, rewardingAction.name)
val description = pick(this.description, rewardingAction.description)
val points = pick(this.points, rewardingAction.points)
val createdAt = rewardingAction.createdAt
val updatedAt = updateDate(rewardingAction, Seq(name, description, points))
RewardingAction(id, name.field, category, description.field, points.field, createdAt, updatedAt)
}
} | vincentdoba/points | points-server/src/main/scala/com/lateralthoughts/points/model/inputs/RewardingActionInput.scala | Scala | mit | 1,850 |
package SqlDsl
trait SqlOperator
object > extends SqlOperator
object Eq extends SqlOperator | fadeddata/sqldsl | src/main/scala/SqlDsl/SqlOperator.scala | Scala | mit | 92 |
package de.unifreiburg.cs.proglang.jgs.jimpleutils
import soot.{RefType, SootClass, SootMethod, Type}
import scala.collection.JavaConversions._
object Supertypes {
/**
* @return The supertypes of { @code sootClass}. Traverses the supertype DAG in preorder. The stream may contain
* duplicates.
*/
def enumerate(sootClass: SootClass): Iterator[SootClass] = {
val immediateSuperTypes: List[SootClass] = (if (sootClass.hasSuperclass) List(sootClass.getSuperclass) else List()) ++ sootClass.getInterfaces
immediateSuperTypes.iterator ++ immediateSuperTypes.flatMap(Supertypes.enumerate)
}
/**
* @return The methods that m1 overrides.
*/
def findOverridden(m1: SootMethod): Iterator[SootMethod] = {
val c1: SootClass = m1.getDeclaringClass
//Supertypes.enumerate(c1).flatMap(c -> c.getMethods().stream()).filter(m -> overrides(m1, m))
for (c <- Supertypes.enumerate(c1);
m <- c.getMethods if overrides(m1, m))
yield m
}
/**
* @return true if { @code m1} overrides { @code m2}.
*/
def overrides(m1: SootMethod, m2: SootMethod): Boolean = {
if (m1.isStatic || m2.isStatic) {
return false
}
if ((m1.getName == "<init>") || (m2.getName == "<init>")) {
return false
}
if ((m1.getName == m2.getName) && subTypeOf(m1.getDeclaringClass, m2.getDeclaringClass) && (m1.getParameterTypes == m2.getParameterTypes)) {
val rt1: Type = m1.getReturnType
val rt2: Type = m2.getReturnType
if (rt1.isInstanceOf[RefType] && rt2.isInstanceOf[RefType]) {
return subTypeOf((rt1.asInstanceOf[RefType]).getSootClass, (rt2.asInstanceOf[RefType]).getSootClass)
}
else {
return rt1 == rt2
}
}
else {
return false
}
}
/**
* @return true if { @code c1} is a subtype of { @code c2}
*/
def subTypeOf(c1: SootClass, c2: SootClass): Boolean = Supertypes.enumerate(c1).contains(c2)
} | luminousfennell/jgs | GradualConstraints/src/main/java/de/unifreiburg/cs/proglang/jgs/jimpleutils/Supertypes.scala | Scala | bsd-3-clause | 1,978 |
import sbt._
/** Finagle protocol extensions. */
object Finagle extends Base {
val buoyantCore = projectDir("finagle/buoyant")
.withTwitterLibs(Deps.finagle("netty4"))
.withTests()
val h2 = projectDir("finagle/h2")
.dependsOn(buoyantCore)
.withLibs(
Deps.netty4("codec-http2"), Deps.netty4("handler"),
"io.netty" % "netty-tcnative-boringssl-static" % "1.1.33.Fork23")
.withTests()
.withE2e()
val all = aggregateDir("finagle", buoyantCore, h2)
}
| hhtpcd/linkerd | project/Finagle.scala | Scala | apache-2.0 | 491 |
class C {
// this always worked
// during isApplicableToMethod will use formalTypes to eliminate the repeated param in the formal types,
// but we keep the repeated marker in the arguments -- here's a debug log:
/*
isCompatibleArgs false (List(Int*), List(Int))
isAsSpecific false: (xs: Int*)Int >> (x: Int)Int?
--> the repeated case is not more specific than the single-arg case because
you can't apply something of `Int*` to `Int`
isCompatibleArgs true (List(Int), List(Int))
isAsSpecific true: (x: Int)Int >> (xs: Int*)Int?
--> the single param case is more specific than the repeated param case, because
you can apply a single argument to the method with the repeated param
isCompatibleArgs true (List(Int), List(Int))
isAsSpecific true: (x: Int)Int >> (xs: Int*)Int?
isCompatibleArgs false (List(Int*), List(Int))
isAsSpecific false: (xs: Int*)Int >> (x: Int)Int?
isCompatibleArgs true (List(Int), List(Int))
isAsSpecific true: (x: Int)Int >> (xs: Int*)Int?
isCompatibleArgs false (List(Int*), List(Int))
isAsSpecific false: (xs: Int*)Int >> (x: Int)Int?
inferMethodAlternative applicable List(method foo, method foo) --> ranked: List(method foo)
*/
def foo(xs: Int*): Int = xs.toSeq.head
def foo(x: Int): Int = x
foo(2)
// this should also type check, resolving to the non-repeated case,
// but there was a bug in the polymorphic case of isApplicableToMethod
// (adjustTypeArgs would remove the incompatibility in applying something
// expecting type T to a T*, as the latter would be turned into Seq[T])
/*
isAsSpecific false: [T](xs: T*)T >> [T](x: T)T?
isAsSpecific true: [T](x: T)T >> [T](xs: T*)T?
isAsSpecific true: [T](x: T)T >> [T](xs: T*)T?
isAsSpecific false: [T](xs: T*)T >> [T](x: T)T?
isAsSpecific true: [T](x: T)T >> [T](xs: T*)T?
isAsSpecific false: [T](xs: T*)T >> [T](x: T)T?
inferMethodAlternative applicable List(method fooT, method fooT) --> ranked: List(method fooT)
*/
def fooT[T](xs: T*): T = xs.toSeq.head
def fooT[T](x: T): T = x
fooT(2)
// from 4775
def f[T](x: T): T = x
def f[T](x: T, xs: T*): T = x
f(5)
}
| scala/scala | test/files/pos/overload_poly_repeated.scala | Scala | apache-2.0 | 2,099 |
package com.github.cdow.actor.debugger
import java.net.InetSocketAddress
import akka.actor.{FSM, Props, ActorRef}
import akka.io.Tcp._
import akka.io.{Tcp, IO}
import akka.util.ByteString
import com.github.cdow.actor.MainMessage
sealed trait DebuggerState
object DebuggerState {
case object Idle extends DebuggerState
case object Bound extends DebuggerState
case object Connected extends DebuggerState
case object Running extends DebuggerState
}
object DebuggerActor {
def props(port: Int, listener: ActorRef) = Props(new DebuggerActor(port, listener))
}
class DebuggerActor(port: Int, listener: ActorRef) extends FSM[DebuggerState, Option[ActorRef]] {
import context.system
import DebuggerState._
val HANDSHAKE = ByteString.fromString("JDWP-Handshake", "US-ASCII")
IO(Tcp) ! Bind(self, new InetSocketAddress(port))
startWith(Idle, None)
when(Idle) {
case Event(Tcp.Bound(localAddress), None) =>
goto(Bound)
case Event(CommandFailed(_: Bind), None) =>
context stop self
stay()
}
when(Bound) {
case Event(Tcp.Connected(remote, local), None) =>
listener ! MainMessage.DebuggerConnected
val connection = sender()
connection ! Register(self)
goto(Connected) using Some(connection)
}
when(Connected) {
case Event(Received(HANDSHAKE), Some(connection)) =>
connection ! Write(HANDSHAKE)
goto(Running)
case Event(_ :ConnectionClosed, Some(_)) =>
listener ! MainMessage.DebuggerDisconnected
goto(Bound) using None
}
when(Running) {
case Event(data: ByteString, Some(connection)) =>
connection ! Write(data)
stay
case Event(Received(data), Some(_)) =>
listener ! data
stay
case Event(_: ConnectionClosed, Some(_)) =>
listener ! MainMessage.DebuggerDisconnected
goto(Bound) using None
}
initialize()
}
| cdow/sbt-debug-plugin | src/main/scala/com/github/cdow/actor/debugger/DebuggerActor.scala | Scala | isc | 1,791 |
package lib
import com.bryzek.apidoc.api.v0.models.{Original, OriginalForm, OriginalType}
import org.scalatest.{FunSpec, ShouldMatchers}
class OriginalUtilSpec extends FunSpec with ShouldMatchers {
it("original") {
val data = TestHelper.readFile("../spec/apidoc-api.json")
OriginalUtil.toOriginal(OriginalForm(data = data)) should be(
Original(
OriginalType.ApiJson,
data
)
)
}
describe("guessType") {
it("apiJson") {
OriginalUtil.guessType(TestHelper.readFile("../spec/apidoc-api.json")) should be(Some(OriginalType.ApiJson))
OriginalUtil.guessType(TestHelper.readFile("../spec/apidoc-spec.json")) should be(Some(OriginalType.ApiJson))
}
it("serviceJson") {
OriginalUtil.guessType(TestHelper.readFile("../core/src/test/resources/apidoc-service.json")) should be(Some(OriginalType.ServiceJson))
}
it("swaggerJson") {
OriginalUtil.guessType(TestHelper.readFile("../swagger/src/test/resources/petstore-with-external-docs.json")) should be(Some(OriginalType.SwaggerJson))
}
it("avroIdl") {
OriginalUtil.guessType(" @namespace ") should be(Some(OriginalType.AvroIdl))
OriginalUtil.guessType(" protocol bar {} ") should be(Some(OriginalType.AvroIdl))
}
it("unknown") {
OriginalUtil.guessType(" ") should be(None)
}
it("poorly formatted json") {
OriginalUtil.guessType("{ ") should be(None)
}
}
}
| Seanstoppable/apidoc | api/test/lib/OriginalUtilSpec.scala | Scala | mit | 1,455 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.controllers
import org.joda.time.format.ISODateTimeFormat
import play.api.libs.json._
import play.api.libs.json.JsString
import org.joda.time.{LocalDate, DateTime, DateTimeZone, LocalDateTime}
import scala.util.Try
object RestFormats extends RestFormats
trait RestFormats {
private val dateTimeFormat = ISODateTimeFormat.dateTime.withZoneUTC
private val localDateRegex = """^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)$""".r
implicit val localDateTimeRead: Reads[LocalDateTime] = new Reads[LocalDateTime] {
override def reads(json: JsValue): JsResult[LocalDateTime] = {
json match {
case JsString(s) => Try {
JsSuccess(new LocalDateTime(dateTimeFormat.parseDateTime(s), DateTimeZone.UTC))
}.getOrElse {
JsError(s"Could not parse $s as a DateTime with format ${dateTimeFormat.toString}")
}
case _ => JsError(s"Expected value to be a string, was actually $json")
}
}
}
implicit val localDateTimeWrite: Writes[LocalDateTime] = new Writes[LocalDateTime] {
def writes(dateTime: LocalDateTime): JsValue = JsString(dateTimeFormat.print(dateTime.toDateTime(DateTimeZone.UTC)))
}
implicit val dateTimeRead: Reads[DateTime] = new Reads[DateTime] {
override def reads(json: JsValue): JsResult[DateTime] = {
json match {
case JsString(s) => Try {
JsSuccess(dateTimeFormat.parseDateTime(s))
}.getOrElse {
JsError(s"Could not parse $s as a DateTime with format ${dateTimeFormat.toString}")
}
case _ => JsError(s"Expected value to be a string, was actually $json")
}
}
}
implicit val dateTimeWrite: Writes[DateTime] = new Writes[DateTime] {
def writes(dateTime: DateTime): JsValue = JsString(dateTimeFormat.print(dateTime))
}
implicit val localDateRead: Reads[LocalDate] = new Reads[LocalDate] {
override def reads(json: JsValue): JsResult[LocalDate] = {
json match {
case JsString(s@localDateRegex(y, m, d)) => Try {
JsSuccess(new LocalDate(y.toInt, m.toInt, d.toInt))
}.getOrElse {
JsError(s"$s is not a valid date")
}
case JsString(s) => JsError(s"Cannot parse $s as a LocalDate")
case _ => JsError(s"Expected value to be a string, was actually $json")
}
}
}
implicit val localDateWrite: Writes[LocalDate] = new Writes[LocalDate] {
def writes(date: LocalDate): JsValue = JsString("%04d-%02d-%02d".format(date.getYear, date.getMonthOfYear, date.getDayOfMonth))
}
implicit val dateTimeFormats = Format(dateTimeRead, dateTimeWrite)
implicit val localDateTimeFormats = Format(localDateTimeRead, localDateTimeWrite)
implicit val localDateFormats = Format(localDateRead, localDateWrite)
}
| liquidarmour/http-verbs | src/main/scala/uk/gov/hmrc/play/controllers/RestFormats.scala | Scala | apache-2.0 | 3,364 |
/* ___ _ ___ _ _ *\\
** / __| |/ (_) | | The SKilL Generator **
** \\__ \\ ' <| | | |__ (c) 2013-16 University of Stuttgart **
** |___/_|\\_\\_|_|____| see LICENSE **
\\* */
package de.ust.skill.generator.skill
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import de.ust.skill.generator.common.KnownGenerators
import de.ust.skill.main.CommandLine
/**
* Java specific tests.
*
* @author Timm Felden
*/
@RunWith(classOf[JUnitRunner])
class EscapingTest extends FunSuite {
def check(language : String, words : Array[String], escaping : Array[Boolean]) {
CommandLine.exit = { s ⇒ fail(s) }
val result = CommandLine.checkEscaping(language, words)
assert(result === escaping.mkString(" "))
}
val known = KnownGenerators.all.map(_.newInstance.getLanguageName)
// if is a keyword in all real languages
for (l ← known if !Set[String]("sidl", "skill", "statistics", "ecore").contains(l))
test(s"${l} - none")(check(l, Array("if"), Array(true)))
// some language keywords
test("Ada - keywords") {
check("ada", Array("while", "others", "in", "out", "case"), Array(true, true, true, true, true))
}
test("Java - keywords") {
check("java", Array("int", "is", "not", "a", "class"), Array(true, false, false, false, true))
}
}
| skill-lang/skill | src/test/scala/de/ust/skill/generator/skill/EscapingTest.scala | Scala | bsd-3-clause | 1,557 |
package world
/**
* Created by franblas on 17/04/17.
*/
class Point(x: Int, y: Int) {
def getDistance(x2: Int, y2: Int): Int = {
val dx = x - x2
val dy = y - y2
math.sqrt(dx*dx + dy*dy).toInt
}
def inRadius(centerX: Int, centerY: Int, radius: Int): Boolean = {
val radius2 = radius*radius
val dx = x - centerX
val dy = y - centerY
val dist = dx*dx + dy*dy
if (dist >= radius2) false
else true
}
}
| franblas/NAOC | src/main/scala/world/Point.scala | Scala | mit | 449 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.data.hibernate.spring
import org.beangle.commons.lang.Assert
import org.hibernate.FlushMode
import org.hibernate.Session
import org.hibernate.Transaction
import org.springframework.transaction.support.ResourceHolderSupport
/**
* @author chaostone
*/
class SessionHolder(val session: Session) extends ResourceHolderSupport {
Assert.notNull(session, "Session must not be null")
var transaction: Transaction = _
var previousFlushMode: FlushMode = _
override def clear(): Unit = {
super.clear()
this.transaction = null
this.previousFlushMode = null
}
}
| beangle/data | hibernate/src/main/scala/org/beangle/data/hibernate/spring/SessionHolder.scala | Scala | lgpl-3.0 | 1,321 |
package com.ecfront.ez.framework.service.auth.model
import com.ecfront.common.Resp
import com.ecfront.ez.framework.core.i18n.I18NProcessor.Impl
import com.ecfront.ez.framework.service.auth.CacheManager
import com.ecfront.ez.framework.service.jdbc._
import scala.beans.BeanProperty
/**
* 资源实体
*/
@Entity("Resource")
case class EZ_Resource() extends BaseModel with SecureModel {
@Unique
@Require
@Desc("Code", 1000, 0) // method@uri
@BeanProperty var code: String = _
@Index
@Require
@Desc("Method", 10, 0)
@BeanProperty var method: String = _
@Index
@Require
@Desc("URI", 800, 0)
@BeanProperty var uri: String = _
@Require
@Desc("Name", 200, 0)
@BeanProperty var name: String = _
}
object EZ_Resource extends SecureStorage[EZ_Resource] {
def apply(method: String, uri: String, name: String): EZ_Resource = {
val res = EZ_Resource()
res.method = method
res.uri = uri
res.name = name.x
res
}
override def preSave(model: EZ_Resource): Resp[EZ_Resource] = {
preSaveOrUpdate(model)
}
override def preUpdate(model: EZ_Resource): Resp[EZ_Resource] = {
preSaveOrUpdate(model)
}
override def preSaveOrUpdate(model: EZ_Resource): Resp[EZ_Resource] = {
if (model.method == null || model.method.trim.isEmpty || model.uri == null || model.uri.trim.isEmpty) {
logger.warn(s"Require【method】and【uri】")
Resp.badRequest("Require【method】and【uri】")
} else {
if (model.uri.contains(BaseModel.SPLIT)) {
logger.warn(s"【uri】can't contains ${BaseModel.SPLIT}")
Resp.badRequest(s"【uri】can't contains ${BaseModel.SPLIT}")
} else {
model.code = assembleCode(model.method, model.uri)
super.preSaveOrUpdate(model)
}
}
}
override def postSave(saveResult: EZ_Resource, preResult: EZ_Resource): Resp[EZ_Resource] = {
postAddExt(saveResult)
super.postSave(saveResult, preResult)
}
override def postUpdate(updateResult: EZ_Resource, preResult: EZ_Resource): Resp[EZ_Resource] = {
postAddExt(updateResult)
super.postUpdate(updateResult, preResult)
}
override def postSaveOrUpdate(saveOrUpdateResult: EZ_Resource, preResult: EZ_Resource): Resp[EZ_Resource] = {
postAddExt(saveOrUpdateResult)
super.postSaveOrUpdate(saveOrUpdateResult, preResult)
}
override def preDeleteById(id: Any): Resp[Any] = {
preRemoveExt(doGetById(id).body)
super.preDeleteById(id)
}
override def preDeleteByUUID(uuid: String): Resp[String] = {
preRemoveExt(doGetByUUID(uuid).body)
super.preDeleteByUUID(uuid)
}
override def preDeleteByCond(condition: String, parameters: List[Any]): Resp[(String, List[Any])] = {
doFind(condition, parameters).body.foreach(preRemoveExt)
super.preDeleteByCond(condition, parameters)
}
def deleteByCode(code: String): Resp[Void] = {
deleteByCond( s"""code = ?""", List(code))
}
private def postAddExt(obj: EZ_Resource): Unit = {
if (obj != null) {
CacheManager.RBAC.addResource(obj)
}
}
private def preRemoveExt(obj: EZ_Resource): Unit = {
if (obj != null) {
CacheManager.RBAC.removeResource(obj.code)
}
}
override def preUpdateByCond(newValues: String, condition: String, parameters: List[Any]): Resp[(String, String, List[Any])] =
Resp.notImplemented("")
def assembleCode(method: String, uri: String): String = {
method + BaseModel.SPLIT + uri
}
}
| gudaoxuri/ez-framework | services/auth/src/main/scala/com/ecfront/ez/framework/service/auth/model/EZ_Resource.scala | Scala | apache-2.0 | 3,458 |
package Scala
import math._
import scala.util._
object Solution extends App {
val message = readLine
var toTranslate = ""
message.foreach(c => toTranslate += to7bits(c.toInt.toBinaryString))
def to7bits(s: String): String = {
var converted = s
while (converted.length() < 7)
converted = "0" + converted
return converted
}
translateChar(toTranslate)
def translateChar(s: String) {
var previous = s.charAt(0)
var cpt = 0
var message = ""
for (i <- 1 until s.length) {
val b = s.charAt(i)
cpt += 1
if (b != previous) {
message += addDataToMessage(previous, cpt)
message += " "
cpt = 0
}
previous = b
}
cpt += 1
message += addDataToMessage(previous, cpt)
println(message)
}
def addDataToMessage(previous: Char, cpt: Int): String = {
var msg = ""
if (previous == '0') msg += "00 "
else msg += "0 "
for (j <- 0 until cpt) msg += "0"
msg
}
}
| JulienBe/CodinGame | src/Scala/ChuckNorris.scala | Scala | gpl-2.0 | 1,191 |
package scaladex.server.route
import scala.collection.SeqView
import scala.collection.SortedSet
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.util.Failure
import scala.util.Success
import akka.http.scaladsl.model.Uri._
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server._
import com.typesafe.scalalogging.LazyLogging
import play.twirl.api.HtmlFormat
import scaladex.core.model.Artifact
import scaladex.core.model.ArtifactSelection
import scaladex.core.model.BinaryVersion
import scaladex.core.model.Category
import scaladex.core.model.Env
import scaladex.core.model.GithubStatus
import scaladex.core.model.Platform
import scaladex.core.model.Project
import scaladex.core.model.SemanticVersion
import scaladex.core.model.UserState
import scaladex.core.service.SearchEngine
import scaladex.core.service.Storage
import scaladex.core.service.WebDatabase
import scaladex.server.TwirlSupport._
import scaladex.server.service.SearchSynchronizer
import scaladex.view
class ProjectPages(env: Env, database: WebDatabase, searchEngine: SearchEngine, localStorage: Storage)(
implicit executionContext: ExecutionContext
) extends LazyLogging {
private val searchSynchronizer = new SearchSynchronizer(database, searchEngine)
def route(user: Option[UserState]): Route =
concat(
post {
path("edit" / organizationM / repositoryM) { (organization, repository) =>
editForm { form =>
val ref = Project.Reference(organization, repository)
val updateF = for {
_ <- database.updateProjectSettings(ref, form)
_ <- searchSynchronizer.syncProject(ref)
} yield ()
onComplete(updateF) {
case Success(()) =>
redirect(
Uri(s"/$organization/$repository"),
StatusCodes.SeeOther
)
case Failure(e) =>
logger.error(s"Cannot save settings of project $ref", e)
redirect(
Uri(s"/$organization/$repository"),
StatusCodes.SeeOther
) // maybe we can print that it wasn't saved
}
}
}
},
get {
path("artifacts" / organizationM / repositoryM) { (org, repo) =>
val ref = Project.Reference(org, repo)
val res =
for {
projectOpt <- database.getProject(ref)
project = projectOpt.getOrElse(throw new Exception(s"project ${ref} not found"))
artifacts <- database.getArtifacts(project.reference)
} yield (project, artifacts)
onComplete(res) {
case Success((project, artifacts)) =>
val binaryVersionByPlatforms = artifacts
.map(_.binaryVersion)
.distinct
.groupBy(_.platform)
.view
.mapValues(_.sorted(BinaryVersion.ordering.reverse))
.toSeq
.sortBy(_._1)(Platform.ordering.reverse)
val artifactsByVersions = artifacts
.groupBy(_.version)
.map { case (version, artifacts) => (version, artifacts.groupBy(_.artifactName).toSeq.sortBy(_._1)) }
.toSeq
.sortBy(_._1)(SemanticVersion.ordering.reverse)
complete(view.html.artifacts(env, project, user, binaryVersionByPlatforms, artifactsByVersions))
case Failure(e) =>
complete(StatusCodes.NotFound, view.html.notfound(env, user))
}
}
},
get {
path("edit" / organizationM / repositoryM) { (organization, repository) =>
val projectRef = Project.Reference(organization, repository)
user match {
case Some(userState) if userState.canEdit(projectRef) =>
complete(getEditPage(projectRef, userState))
case maybeUser =>
complete((StatusCodes.Forbidden, view.html.forbidden(env, maybeUser)))
}
}
},
get {
path(organizationM / repositoryM)((organization, repository) =>
parameters("artifact".?, "version".?, "binaryVersion".?, "selected".?) {
(artifact, version, binaryVersion, selected) =>
val projectRef = Project.Reference(organization, repository)
val fut: Future[StandardRoute] = database.getProject(projectRef).flatMap {
case Some(Project(_, _, _, GithubStatus.Moved(_, newProjectRef), _, _)) =>
Future.successful(redirect(Uri(s"/$newProjectRef"), StatusCodes.PermanentRedirect))
case Some(project) =>
val artifactRouteF: Future[StandardRoute] =
getSelectedArtifact(
database,
project,
binaryVersion = binaryVersion,
artifact = artifact.map(Artifact.Name.apply),
version = version,
selected = selected
).map(_.map { artifact =>
val binaryVersionParam = s"?binaryVersion=${artifact.binaryVersion.label}"
redirect(
s"/$organization/$repository/${artifact.artifactName}/${artifact.version}/$binaryVersionParam",
StatusCodes.TemporaryRedirect
)
}.getOrElse(complete(StatusCodes.NotFound, view.html.notfound(env, user))))
artifactRouteF
case None =>
Future.successful(
complete(StatusCodes.NotFound, view.html.notfound(env, user))
)
}
onSuccess(fut)(identity)
}
)
},
get {
path(organizationM / repositoryM / artifactM)((organization, repository, artifact) =>
parameter("binaryVersion".?) { binaryVersion =>
val res = getProjectPage(
organization,
repository,
binaryVersion,
artifact,
None,
user
)
onComplete(res) {
case Success((code, some)) => complete(code, some)
case Failure(e) =>
complete(StatusCodes.NotFound, view.html.notfound(env, user))
}
}
)
},
get {
path(organizationM / repositoryM / artifactM / versionM)((organization, repository, artifact, version) =>
parameter("binaryVersion".?) { binaryVersion =>
val res = getProjectPage(organization, repository, binaryVersion, artifact, Some(version), user)
onComplete(res) {
case Success((code, some)) => complete(code, some)
case Failure(e) =>
complete(StatusCodes.NotFound, view.html.notfound(env, user))
}
}
)
}
)
private def getEditPage(ref: Project.Reference, user: UserState): Future[(StatusCode, HtmlFormat.Appendable)] =
for {
projectOpt <- database.getProject(ref)
artifacts <- database.getArtifacts(ref)
} yield projectOpt
.map { p =>
val page = view.project.html.editproject(env, p, artifacts, Some(user))
(StatusCodes.OK, page)
}
.getOrElse((StatusCodes.NotFound, view.html.notfound(env, Some(user))))
private def filterVersions(p: Project, allVersions: SeqView[SemanticVersion]): SortedSet[SemanticVersion] = {
val filtered =
if (p.settings.strictVersions) allVersions.view.filter(_.isSemantic)
else allVersions.view
SortedSet.from(filtered)(SemanticVersion.ordering.reverse)
}
private def getProjectPage(
organization: Project.Organization,
repository: Project.Repository,
binaryVersion: Option[String],
artifact: Artifact.Name,
version: Option[SemanticVersion],
user: Option[UserState]
): Future[(StatusCode, HtmlFormat.Appendable)] = {
val selection = ArtifactSelection.parse(
binaryVersion = binaryVersion,
artifactName = Some(artifact),
version = version.map(_.toString),
selected = None
)
val projectRef =
Project.Reference(organization, repository)
database.getProject(projectRef).flatMap {
case Some(project) =>
for {
artifacts <- database.getArtifacts(projectRef)
selectedArtifact = selection
.defaultArtifact(artifacts, project)
.getOrElse(throw new Exception(s"no artifact found for $projectRef"))
directDependencies <- database.getDirectDependencies(selectedArtifact)
reverseDependency <- database.getReverseDependencies(selectedArtifact)
} yield {
val allVersions = artifacts.view.map(_.version)
val filteredVersions = filterVersions(project, allVersions)
val binaryVersions = artifacts.view.map(_.binaryVersion)
val platformsForBadges = artifacts.view
.filter(_.artifactName == selectedArtifact.artifactName)
.map(_.binaryVersion.platform)
val artifactNames = artifacts.view.map(_.artifactName)
val twitterCard = project.twitterSummaryCard
val html = view.project.html.project(
env,
project,
SortedSet.from(artifactNames)(Artifact.Name.ordering),
filteredVersions,
SortedSet.from(binaryVersions)(BinaryVersion.ordering.reverse),
SortedSet.from(platformsForBadges)(Platform.ordering.reverse),
selectedArtifact,
user,
showEditButton = user.exists(_.canEdit(projectRef)), // show only when your are admin on the project
Some(twitterCard),
artifacts.size,
directDependencies,
reverseDependency
)
(StatusCodes.OK, html)
}
case None =>
Future.successful((StatusCodes.NotFound, view.html.notfound(env, user)))
}
}
// TODO remove all unused parameters
private val editForm: Directive1[Project.Settings] =
formFieldSeq.tflatMap(fields =>
formFields(
"contributorsWanted".as[Boolean] ? false,
"defaultArtifact".?,
"defaultStableVersion".as[Boolean] ? false,
"strictVersions".as[Boolean] ? false,
"deprecated".as[Boolean] ? false,
"artifactDeprecations".as[String].*,
"cliArtifacts".as[String].*,
"customScalaDoc".?,
"category".?,
"beginnerIssuesLabel".?,
"selectedBeginnerIssues".as[String].*,
"chatroom".?,
"contributingGuide".?,
"codeOfConduct".?
).tmap {
case (
contributorsWanted,
rawDefaultArtifact,
defaultStableVersion,
strictVersions,
deprecated,
rawArtifactDeprecations,
rawCliArtifacts,
rawCustomScalaDoc,
rawCategory,
rawBeginnerIssuesLabel,
selectedBeginnerIssues,
rawChatroom,
rawContributingGuide,
rawCodeOfConduct
) =>
val documentationLinks =
fields._1
.filter { case (key, _) => key.startsWith("documentationLinks") }
.groupBy {
case (key, _) =>
key
.drop("documentationLinks[".length)
.takeWhile(_ != ']')
}
.values
.map {
case Vector((a, b), (_, d)) =>
if (a.contains("label")) (b, d)
else (d, b)
}
.flatMap {
case (label, link) =>
Project.DocumentationLink.from(label, link)
}
.toList
def noneIfEmpty(value: String): Option[String] =
if (value.isEmpty) None else Some(value)
val settings: Project.Settings = Project.Settings(
defaultStableVersion,
rawDefaultArtifact.flatMap(noneIfEmpty).map(Artifact.Name.apply),
strictVersions,
rawCustomScalaDoc.flatMap(noneIfEmpty),
documentationLinks,
deprecated,
contributorsWanted,
rawArtifactDeprecations.map(Artifact.Name.apply).toSet,
rawCliArtifacts.map(Artifact.Name.apply).toSet,
rawCategory.flatMap(Category.byLabel.get),
rawBeginnerIssuesLabel.flatMap(noneIfEmpty)
)
Tuple1(settings)
}
)
}
| scalacenter/scaladex | modules/server/src/main/scala/scaladex/server/route/ProjectPages.scala | Scala | bsd-3-clause | 12,693 |
package mesosphere.marathon
package storage.migration
import akka.Done
import akka.stream.scaladsl.Source
import akka.stream.{ ActorMaterializer, Materializer }
import com.typesafe.scalalogging.StrictLogging
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.pod.PodDefinition
import mesosphere.marathon.state._
import mesosphere.marathon.storage.migration.MigrationTo146.Environment
import mesosphere.marathon.storage.repository.{ AppRepository, PodRepository }
import mesosphere.marathon.test.GroupCreation
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContextExecutor, Future }
class MigrationTo146Test extends AkkaUnitTest with GroupCreation with StrictLogging {
"Migration to 1.4.6" should {
"do nothing if env var is not configured" in new Fixture {
MigrationTo146.migrateUnreachableApps(appRepository, podRepository)(env, ctx, mat).futureValue
verify(appRepository, never).all()
verify(appRepository, never).store(_: AppDefinition)
verify(podRepository, never).all()
verify(podRepository, never).store(_: PodDefinition)
}
"do migration if env var is configured" in new Fixture(Map(MigrationTo146.MigrateUnreachableStrategyEnvVar -> "true")) {
MigrationTo146.migrateUnreachableApps(appRepository, podRepository)(env, ctx, mat).futureValue
val targetApp = app.copy(unreachableStrategy = UnreachableEnabled(0.seconds, 5.seconds)) // case 2
val targetApp2 = app2.copy(unreachableStrategy = UnreachableEnabled(0.seconds, 0.seconds)) // case 1
val targetPod = pod.copy(unreachableStrategy = UnreachableEnabled(0.seconds, 0.seconds)) // case 3
logger.info(s"Migration app ($app, $app2) and pod ($pod)")
verify(appRepository, once).all()
verify(appRepository, once).store(targetApp)
verify(appRepository, once).store(targetApp2)
verify(podRepository, once).all()
verify(podRepository, once).store(targetPod)
}
}
private class Fixture(val environment: Map[String, String] = Map.empty) {
val appRepository: AppRepository = mock[AppRepository]
val podRepository: PodRepository = mock[PodRepository]
implicit lazy val env = Environment(environment)
implicit lazy val mat: Materializer = ActorMaterializer()
implicit lazy val ctx: ExecutionContextExecutor = system.dispatcher
val app = AppDefinition(PathId("/app"), unreachableStrategy = UnreachableEnabled(1.seconds, 5.seconds))
val app2 = AppDefinition(PathId("/app2"), unreachableStrategy = UnreachableEnabled(5.minutes, 10.minutes))
val pod = PodDefinition(PathId("/pod"), unreachableStrategy = UnreachableEnabled(1.seconds, 2.seconds))
appRepository.all() returns Source(Seq(app, app2))
appRepository.store(any) returns Future.successful(Done)
podRepository.all() returns Source.single(pod)
podRepository.store(any) returns Future.successful(Done)
}
}
| guenter/marathon | src/test/scala/mesosphere/marathon/storage/migration/MigrationTo146Test.scala | Scala | apache-2.0 | 2,908 |
package de.johoop.xplane.api
import java.net.InetAddress
import akka.{Done, NotUsed}
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.stream.scaladsl.{Keep, Sink, Source}
import de.johoop.xplane.network
import de.johoop.xplane.network.{XPlaneConnection, XPlaneSource}
import de.johoop.xplane.network.protocol._
import de.johoop.xplane.network.protocol.Request._
import scala.concurrent.{ExecutionContext, Future}
object XPlane {
def connect(multicastGroupName: String = "239.255.1.1", multicastPort: Int = 49707)
(implicit system: ActorSystem, ec: ExecutionContext): Future[ConnectedToXPlane] =
network.resolveLocalXPlaneBeacon(InetAddress getByName multicastGroupName, multicastPort) map (new ConnectedToXPlane(_))
}
class ConnectedToXPlane(val beacon: BECN)(implicit system: ActorSystem, ec: ExecutionContext) {
private val connectionForSending: XPlaneConnection = network.createXPlaneClient(beacon)
def subscribeToRPOS(frequency: Int): Source[RPOS, NotUsed] = {
def sendSubscription(frequency: Int)(connection: XPlaneConnection): Unit =
network.sendTo(connection)(RPOSRequest(frequency))
def convertToRPOS(payload: Payload): Option[RPOS] = payload match {
case rpos: RPOS => Option(rpos)
case other =>
system.log.warning("received an unexpected response: {}", other)
None
}
Source
.fromGraph(new XPlaneSource(beacon, subscribe = sendSubscription(frequency), unsubscribe = sendSubscription(0)))
.map(convertToRPOS)
.collect { case Some(rpos) => rpos }
}
def subscribeToDataRefs(frequency: Int, dataRefPaths: String*): Source[String Map Float, NotUsed] = {
val dataRefs: Vector[(String, Int)] = dataRefPaths.toVector zip (Stream from 1)
def sendSubscriptionRequests(frequency: Int)(connection: XPlaneConnection): Unit =
dataRefs foreach { case (path, id) => network.sendTo(connection)(RREFRequest(frequency, id, path)) }
def convertToMap(payload: Payload): String Map Float = payload match {
case RREF(dataRefValues) =>
dataRefValues .flatMap { case (idToFind, value) =>
dataRefs.collect { case (path, id) if id == idToFind => (path, value) }
}
case other =>
system.log.warning("received an unexpected response: {}", other)
Map.empty
}
Source
.fromGraph(new XPlaneSource(
beacon, subscribe = sendSubscriptionRequests(frequency), unsubscribe = sendSubscriptionRequests(0)))
.map(convertToMap)
}
def getDataRef(dataRefPath: String)(implicit mat: Materializer): Future[Float] =
subscribeToDataRefs(100, dataRefPath).toMat(Sink.head)(Keep.right).run() map (_(dataRefPath))
def setDataRef(dataRef: String, value: Float): Future[Done] = Future {
network.sendTo(connectionForSending)(DREFRequest(value, dataRef))
Done
}
}
| jmhofer/xplane-udp | src/main/scala/de/johoop/xplane/api/XPlane.scala | Scala | gpl-3.0 | 2,869 |
/*
* Copyright (C) 2014 HMPerson1 <hmperson1@gmail.com> and nathanfei123
*
* This file is part of AOCM.
*
* AOCM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.adorableoctocm.engine
import scala.IndexedSeq
import scala.concurrent.duration.DurationInt
import InputEvent._
import net.adorableoctocm.State
import rx.lang.scala.Observable
/**
* The game engine.
*/
class Engine(input: Observable[InputEvents], renderer: (State => Unit)) {
import Engine._
val tmpLevel = IndexedSeq[Int](0xffffffff, 0x80100001, 0x80100003, 0x80100007, 0x8090000f, 0x80900007, 0x80000003, 0x80000001, 0xffffffff).map(int => (0 to 31).map(idx => ((int >> idx) & 1) == 1))
input.compose(sampleOnEvery[InputEvents](Observable.interval(Period))(Set())).scan(State(State())(posx = 16, posy = 16, blocks = tmpLevel))(tick).subscribe(renderer)
private def tick(prev: State, input: InputEvents): State = {
val jump = (s: State) => {
if (input(Up)) {
val x = s.posx
val y = s.posy
val columns = s.blocks.slice(x / BlockSize, (x - 1) / BlockSize + 2)
if (y % 16 == 0 && columns.map(_(y / BlockSize - 1)).contains(true)) {
State(s)(vely = JumpVel)
} else s
} else s
}
val walk = (s: State) => {
if (input(Left) ^ input(Right)) {
if (input(Left)) State(s)(velx = -WalkVel, facing = false) else State(s)(velx = WalkVel, facing = true)
} else State(s)(velx = 0)
}
val collide = (s: State) => {
val bx = s.posx
val by = s.posy
val tx = bx + BlockSize - 1
val ty = by + BlockSize * 2 - 1
val bxi = bx / BlockSize
val byi = by / BlockSize
val txi = tx / BlockSize
val tyi = ty / BlockSize
var vx = if (bx + s.velx <= 0) 0 - bx else s.velx
var vy = if (by + s.vely <= 0) 0 - by else s.vely
val columns = s.blocks.slice(bxi, txi + 1)
if (vy < 0) {
val below = columns.map(_.lastIndexOf(true, byi - 1)).max
val floor = (below + 1) * 16
vy = if (by + s.vely <= floor) floor - by else vy
}
if (vy > 0) {
val above = columns.map(_.indexOf(true, tyi + 1)).min
val ceil = above * 16
vy = if (ty + s.vely >= ceil) ceil - ty - 1 else vy
}
val rows = s.blocks.transpose.slice((by + vy) / BlockSize, (ty + vy) / BlockSize + 1)
if (vx < 0) {
val left = rows.map(_.lastIndexOf(true, bxi - 1)).max
val lwall = (left + 1) * 16
vx = if (bx + s.velx <= lwall) lwall - bx else vx
}
if (vx > 0) {
val right = rows.map(_.indexOf(true, txi + 1)).min
val rwall = right * 16
vx = if (tx + s.velx >= rwall) rwall - tx - 1 else vx
}
State(s)(
velx = vx,
vely = vy
)
}
val physics = (s: State) => {
State(s)(posx = s.posx + s.velx, posy = s.posy + s.vely, vely = s.vely - 1)
}
jump andThen walk andThen collide andThen physics apply prev
}
}
object Engine {
val Period = 20 millis
val JumpVel = 6
val WalkVel = 2
val BlockSize = 16
def sampleOnEvery[T](sampler: Observable[_])(default: T)(o: Observable[T]): Observable[T] = {
val lock = new AnyRef
var value = default
o.subscribe { v => lock.synchronized { value = v } }
Observable[T](subscriber => {
sampler.subscribe { _ =>
if (!subscriber.isUnsubscribed) {
lock.synchronized {
subscriber.onNext(value)
}
}
}
})
}
}
| HMPerson1/adorable-octo-computing-machine | src/net/adorableoctocm/engine/Engine.scala | Scala | gpl-3.0 | 4,092 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.data
import java.util.{Collections, Date}
import com.vividsolutions.jts.geom.Coordinate
import org.geotools.data._
import org.geotools.factory.Hints
import org.geotools.feature.NameImpl
import org.geotools.filter.text.cql2.CQL
import org.geotools.filter.text.ecql.ECQL
import org.geotools.geometry.jts.JTSFactoryFinder
import org.geotools.util.Converters
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.index._
import org.locationtech.geomesa.accumulo.iterators.TestData
import org.locationtech.geomesa.accumulo.{AccumuloFeatureIndexType, TestWithMultipleSfts}
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.conf.QueryHints._
import org.locationtech.geomesa.index.conf.{QueryHints, QueryProperties}
import org.locationtech.geomesa.index.utils.{ExplainNull, ExplainString}
import org.locationtech.geomesa.utils.bin.BinaryOutputEncoder
import org.locationtech.geomesa.utils.bin.BinaryOutputEncoder.EncodedValues
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.filter.Filter
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
import scala.util.Random
@RunWith(classOf[JUnitRunner])
class AccumuloDataStoreQueryTest extends Specification with TestWithMultipleSfts {
import org.locationtech.geomesa.filter.ff
sequential
val defaultSft = createNewSchema("name:String:index=join,geom:Point:srid=4326,dtg:Date")
addFeature(defaultSft, ScalaSimpleFeature.create(defaultSft, "fid-1", "name1", "POINT(45 49)", "2010-05-07T12:30:00.000Z"))
"AccumuloDataStore" should {
"return an empty iterator correctly" in {
val fs = ds.getFeatureSource(defaultSft.getTypeName)
// compose a CQL query that uses a polygon that is disjoint with the feature bounds
val cqlFilter = CQL.toFilter(s"BBOX(geom, 64.9,68.9,65.1,69.1)")
val query = new Query(defaultSft.getTypeName, cqlFilter)
// Let's read out what we wrote.
val results = fs.getFeatures(query)
val features = results.features
"where schema matches" >> { results.getSchema mustEqual defaultSft }
"and there are no results" >> { features.hasNext must beFalse }
}
"process an exclude query correctly" in {
val fs = ds.getFeatureSource(defaultSft.getTypeName)
val query = new Query(defaultSft.getTypeName, Filter.EXCLUDE)
ds.getQueryPlan(query) must beEmpty
val features = fs.getFeatures(query).features
try {
features.hasNext must beFalse
} finally {
features.close()
}
}
"process a DWithin query correctly" in {
// compose a CQL query that uses a polygon that is disjoint with the feature bounds
val geomFactory = JTSFactoryFinder.getGeometryFactory
val q = ff.dwithin(ff.property("geom"),
ff.literal(geomFactory.createPoint(new Coordinate(45.000001, 48.99999))), 100.0, "meters")
val query = new Query(defaultSft.getTypeName, q)
// Let's read out what we wrote.
val results = ds.getFeatureSource(defaultSft.getTypeName).getFeatures(query)
val features = results.features
"with correct result" >> {
features.hasNext must beTrue
features.next().getID mustEqual "fid-1"
features.hasNext must beFalse
}
}
"process a DWithin of a Linestring and dtg During query correctly" >> {
val lineOfBufferCoords: Array[Coordinate] = Array(new Coordinate(-45, 0), new Coordinate(-90, 45))
val geomFactory = JTSFactoryFinder.getGeometryFactory
// create the data store
val sftPoints = createNewSchema("*geom:Point:srid=4326,dtg:Date")
// add the 150 excluded points
TestData.excludedDwithinPoints.zipWithIndex.foreach{ case (p, i) =>
addFeature(sftPoints, ScalaSimpleFeature.create(sftPoints, s"exfid$i", p, "2014-06-07T12:00:00.000Z"))
}
// add the 50 included points
TestData.includedDwithinPoints.zipWithIndex.foreach{ case (p, i) =>
addFeature(sftPoints, ScalaSimpleFeature.create(sftPoints, s"infid$i", p, "2014-06-07T12:00:00.000Z"))
}
// compose the query
val during = ECQL.toFilter("dtg DURING 2014-06-07T11:00:00.000Z/2014-06-07T13:00:00.000Z")
"with correct result when using a dwithin of degrees" >> {
val dwithinUsingDegrees = ff.dwithin(ff.property("geom"),
ff.literal(geomFactory.createLineString(lineOfBufferCoords)), 1.0, "degrees")
val filterUsingDegrees = ff.and(during, dwithinUsingDegrees)
val queryUsingDegrees = new Query(sftPoints.getTypeName, filterUsingDegrees)
val resultsUsingDegrees = ds.getFeatureSource(sftPoints.getTypeName).getFeatures(queryUsingDegrees)
SelfClosingIterator(resultsUsingDegrees.features).toSeq must haveLength(50)
}.pendingUntilFixed("Fixed Z3 'During And Dwithin' queries for a buffer created with unit degrees")
"with correct result when using a dwithin of meters" >> {
val dwithinUsingMeters = ff.dwithin(ff.property("geom"),
ff.literal(geomFactory.createLineString(lineOfBufferCoords)), 150000, "meters")
val filterUsingMeters = ff.and(during, dwithinUsingMeters)
val queryUsingMeters = new Query(sftPoints.getTypeName, filterUsingMeters)
val resultsUsingMeters = ds.getFeatureSource(sftPoints.getTypeName).getFeatures(queryUsingMeters)
SelfClosingIterator(resultsUsingMeters.features).toSeq must haveLength(50)
}
}
"handle bboxes without property name" in {
val filterNull = ff.bbox(ff.property(null.asInstanceOf[String]), 40, 44, 50, 54, "EPSG:4326")
val filterEmpty = ff.bbox(ff.property(""), 40, 44, 50, 54, "EPSG:4326")
val queryNull = new Query(defaultSft.getTypeName, filterNull)
val queryEmpty = new Query(defaultSft.getTypeName, filterEmpty)
val (planNull, explainNull) = {
val o = new ExplainString
val p = ds.getQueryPlan(queryNull, explainer = o)
(p, o.toString())
}
val (planEmpty, explainEmpty) = {
val o = new ExplainString
val p = ds.getQueryPlan(queryEmpty, explainer = o)
(p, o.toString())
}
planNull must haveLength(1)
planNull.head.table mustEqual Z2Index.getTableName(defaultSft.getTypeName, ds)
planEmpty must haveLength(1)
planEmpty.head.table mustEqual Z2Index.getTableName(defaultSft.getTypeName, ds)
explainNull must contain("Filter plan: FilterPlan[Z2Index[BBOX(geom, 40.0,44.0,50.0,54.0)][None]]")
explainEmpty must contain("Filter plan: FilterPlan[Z2Index[BBOX(geom, 40.0,44.0,50.0,54.0)][None]]")
val featuresNull = SelfClosingIterator(ds.getFeatureSource(defaultSft.getTypeName).getFeatures(queryNull).features).toSeq
val featuresEmpty = SelfClosingIterator(ds.getFeatureSource(defaultSft.getTypeName).getFeatures(queryEmpty).features).toSeq
featuresNull.map(_.getID) mustEqual Seq("fid-1")
featuresEmpty.map(_.getID) mustEqual Seq("fid-1")
}
"handle out-of-world bboxes" >> {
val sft = createNewSchema("name:String,*geom:Point:srid=4326", None)
val typeName = sft.getTypeName
val feature = ScalaSimpleFeature.create(sft, "1", "name1", "POINT (-100.236523 23)")
addFeature(sft, feature)
// example from geoserver open-layers preview
val ecql = "BBOX(geom, 254.17968736588955,16.52343763411045,264.02343736588955,26.36718763411045) OR " +
"BBOX(geom, -105.82031263411045,16.52343763411045,-95.97656263411045,26.36718763411045)"
val fs = ds.getFeatureSource(typeName)
val result = SelfClosingIterator(fs.getFeatures(new Query(typeName, ECQL.toFilter(ecql))).features).toList
result must haveLength(1)
result.head mustEqual feature
}
"process an OR query correctly obeying inclusion-exclusion principle" >> {
val sft = createNewSchema("name:String,geom:Point:srid=4326,dtg:Date")
val randVal: (Double, Double) => Double = {
val r = new Random(System.nanoTime())
(low, high) => {
(r.nextDouble() * (high - low)) + low
}
}
val features = (0 until 1000).map { i =>
val lat = randVal(-0.001, 0.001)
val lon = randVal(-0.001, 0.001)
ScalaSimpleFeature.create(sft, s"fid-$i", "testType", s"POINT($lat $lon)")
}
addFeatures(sft, features)
val fs = ds.getFeatureSource(sft.getTypeName)
val geomFactory = JTSFactoryFinder.getGeometryFactory
val urq = ff.dwithin(ff.property("geom"),
ff.literal(geomFactory.createPoint(new Coordinate( 0.0005, 0.0005))), 150.0, "meters")
val llq = ff.dwithin(ff.property("geom"),
ff.literal(geomFactory.createPoint(new Coordinate(-0.0005, -0.0005))), 150.0, "meters")
val orq = ff.or(urq, llq)
val andq = ff.and(urq, llq)
val urQuery = new Query(sft.getTypeName, urq)
val llQuery = new Query(sft.getTypeName, llq)
val orQuery = new Query(sft.getTypeName, orq)
val andQuery = new Query(sft.getTypeName, andq)
val urNum = SelfClosingIterator(fs.getFeatures(urQuery).features).length
val llNum = SelfClosingIterator(fs.getFeatures(llQuery).features).length
val orNum = SelfClosingIterator(fs.getFeatures(orQuery).features).length
val andNum = SelfClosingIterator(fs.getFeatures(andQuery).features).length
(urNum + llNum) mustEqual (orNum + andNum)
}
"process 'exists' queries correctly" in {
val fs = ds.getFeatureSource(defaultSft.getTypeName)
val exists = ECQL.toFilter("name EXISTS")
val doesNotExist = ECQL.toFilter("name DOES-NOT-EXIST")
val existsResults =
SelfClosingIterator(fs.getFeatures(new Query(defaultSft.getTypeName, exists)).features).toList
val doesNotExistResults =
SelfClosingIterator(fs.getFeatures(new Query(defaultSft.getTypeName, doesNotExist)).features).toList
existsResults must haveLength(1)
existsResults.head.getID mustEqual "fid-1"
doesNotExistResults must beEmpty
}
"handle between intra-day queries" in {
val filter =
CQL.toFilter("bbox(geom,40,40,60,60) AND dtg BETWEEN '2010-05-07T12:00:00.000Z' AND '2010-05-07T13:00:00.000Z'")
val query = new Query(defaultSft.getTypeName, filter)
val features = SelfClosingIterator(ds.getFeatureSource(defaultSft.getTypeName).getFeatures(query).features).toList
features.map(DataUtilities.encodeFeature) mustEqual List("fid-1=name1|POINT (45 49)|2010-05-07T12:30:00.000Z")
}
"handle 1s duration queries" in {
val filter = CQL.toFilter("bbox(geom,40,40,60,60) AND dtg DURING 2010-05-07T12:30:00.000Z/T1S")
val query = new Query(defaultSft.getTypeName, filter)
val features = SelfClosingIterator(ds.getFeatureSource(defaultSft.getTypeName).getFeatures(query).features).toList
features.map(DataUtilities.encodeFeature) mustEqual List("fid-1=name1|POINT (45 49)|2010-05-07T12:30:00.000Z")
}
"handle large ranges" in {
skipped("takes ~10 seconds")
val filter = ECQL.toFilter("contains(POLYGON ((40 40, 50 40, 50 50, 40 50, 40 40)), geom) AND " +
"dtg BETWEEN '2010-01-01T00:00:00.000Z' AND '2010-12-31T23:59:59.000Z'")
val query = new Query(defaultSft.getTypeName, filter)
val features = SelfClosingIterator(ds.getFeatureSource(defaultSft.getTypeName).getFeatures(query).features).toList
features.map(DataUtilities.encodeFeature) mustEqual List("fid-1=name1|POINT (45 49)|2010-05-07T12:30:00.000Z")
}
"handle out-of-bound longitude and in-bounds latitude bboxes" in {
val filter = ECQL.toFilter("BBOX(geom, -266.8359375,-75.5859375,279.4921875,162.7734375)")
val query = new Query(defaultSft.getTypeName, filter)
val features = SelfClosingIterator(ds.getFeatureSource(defaultSft.getTypeName).getFeatures(query).features).toList
features.map(DataUtilities.encodeFeature) mustEqual List("fid-1=name1|POINT (45 49)|2010-05-07T12:30:00.000Z")
}
"handle requests with namespaces" in {
import AccumuloDataStoreParams.NamespaceParam
import scala.collection.JavaConversions._
val ns = "mytestns"
val typeName = "namespacetest"
val sft = SimpleFeatureTypes.createType(typeName, "geom:Point:srid=4326")
val sftWithNs = SimpleFeatureTypes.createType(ns, typeName, "geom:Point:srid=4326")
ds.createSchema(sftWithNs)
ds.getSchema(typeName) mustEqual sft
ds.getSchema(new NameImpl(ns, typeName)) mustEqual sft
val dsWithNs = DataStoreFinder.getDataStore(dsParams ++ Map(NamespaceParam.key -> "ns0"))
val name = dsWithNs.getSchema(typeName).getName
name.getNamespaceURI mustEqual "ns0"
name.getLocalPart mustEqual typeName
}
"handle cql functions" in {
val sftName = defaultSft.getTypeName
val filters = Seq("name = 'name1'", "IN('fid-1')", "bbox(geom, 44, 48, 46, 50)",
"bbox(geom, 44, 48, 46, 50) AND dtg DURING 2010-05-07T12:00:00.000Z/2010-05-07T13:00:00.000Z")
val positives = filters.map(f => new Query(sftName, ECQL.toFilter(s"$f AND geometryType(geom) = 'Point'")))
val negatives = filters.map(f => new Query(sftName, ECQL.toFilter(s"$f AND geometryType(geom) = 'Polygon'")))
val pStrategies = positives.map(ds.getQueryPlan(_))
val nStrategies = negatives.map(ds.getQueryPlan(_))
forall(pStrategies ++ nStrategies)(_ must haveLength(1))
pStrategies.map(_.head.filter.index) mustEqual Seq(AttributeIndex, RecordIndex, Z2Index, Z3Index)
nStrategies.map(_.head.filter.index) mustEqual Seq(AttributeIndex, RecordIndex, Z2Index, Z3Index)
forall(positives) { query =>
val result = SelfClosingIterator(ds.getFeatureSource(sftName).getFeatures(query).features).toList
result must haveLength(1)
result.head.getID mustEqual "fid-1"
}
forall(negatives) { query =>
val result = SelfClosingIterator(ds.getFeatureSource(sftName).getFeatures(query).features).toList
result must beEmpty
}
}
"handle ANDed Filter.INCLUDE" in {
val filter = ff.and(Filter.INCLUDE,
ECQL.toFilter("dtg DURING 2010-05-07T12:00:00.000Z/2010-05-07T13:00:00.000Z and bbox(geom,40,44,50,54)"))
val reader = ds.getFeatureReader(new Query(defaultSft.getTypeName, filter), Transaction.AUTO_COMMIT)
val features = SelfClosingIterator(reader).toList
features must haveLength(1)
features.head.getID mustEqual "fid-1"
}
"short-circuit disjoint geometry predicates" in {
val filter = ECQL.toFilter("bbox(geom,0,0,10,10) AND bbox(geom,20,20,30,30)")
val query = new Query(defaultSft.getTypeName, filter)
val plans = ds.getQueryPlan(query)
plans must haveLength(1)
plans.head must beAnInstanceOf[EmptyPlan]
val reader = ds.getFeatureReader(new Query(defaultSft.getTypeName, filter), Transaction.AUTO_COMMIT)
val features = SelfClosingIterator(reader).toList
features must beEmpty
}
"short-circuit disjoint date predicates" in {
val filter = ECQL.toFilter("dtg DURING 2010-05-07T12:00:00.000Z/2010-05-07T13:00:00.000Z AND " +
"dtg DURING 2010-05-07T15:00:00.000Z/2010-05-07T17:00:00.000Z AND bbox(geom,0,0,10,10)")
val query = new Query(defaultSft.getTypeName, filter)
val plans = ds.getQueryPlan(query)
plans must haveLength(1)
plans.head must beAnInstanceOf[EmptyPlan]
val reader = ds.getFeatureReader(new Query(defaultSft.getTypeName, filter), Transaction.AUTO_COMMIT)
val features = SelfClosingIterator(reader).toList
features must beEmpty
}
"support multi-polygon and bbox predicates" in {
val bbox = "bbox(geom,44.9,48.9,45.1,49.1)"
val intersects = "intersects(geom, 'MULTIPOLYGON (((40 40, 55 60, 20 60, 40 40)),((25 15, 50 20, 20 30, 15 20, 25 15)))')"
val dtg = "dtg DURING 2010-05-07T12:29:50.000Z/2010-05-07T12:30:10.000Z"
val dtg2 = "dtg DURING 2010-05-07T12:00:00.000Z/2010-05-07T13:00:00.000Z"
val queries = Seq(s"$bbox AND $dtg AND $intersects AND $dtg2", s"$intersects AND $dtg2 AND $bbox AND $dtg")
forall(queries) { ecql =>
val query = new Query(defaultSft.getTypeName, ECQL.toFilter(ecql))
val reader = ds.getFeatureReader(query, Transaction.AUTO_COMMIT)
val features = SelfClosingIterator(reader).toList
features must haveLength(1)
features.head.getID mustEqual "fid-1"
}
}
"support complex OR queries" in {
val sft = createNewSchema("attr1:String,attr2:Double,dtg:Date,*geom:Point:srid=4326")
val date = "dtg > '2010-05-07T12:29:50.000Z' AND dtg < '2010-05-07T12:30:10.000Z'"
def attr(fuzz: Int) = s"(intersects(geom, POLYGON ((39.$fuzz 39.$fuzz, 44.$fuzz 39.$fuzz, 44.$fuzz 44.$fuzz, 39.$fuzz 44.$fuzz, 39.$fuzz 39.$fuzz))) AND attr1 like 'foo%' AND attr2 > 0.001 and attr2 < 2.001)"
val disjoint = "disjoint(geom, POLYGON ((40 40, 42 42, 42 44, 40 40)))"
val clauses = (0 until 128).map(attr).mkString(" OR ")
val filter = s"$date AND ($clauses) AND $disjoint"
val query = new Query(sft.getTypeName, ECQL.toFilter(filter))
val start = System.currentTimeMillis()
// makes sure this doesn't blow up
SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
// we give it 30 seconds due to weak build boxes
(System.currentTimeMillis() - start) must beLessThan(30000L)
}
"avoid deduplication when possible" in {
val sft = createNewSchema(s"name:String:index=join:cardinality=high,dtg:Date,*geom:Point:srid=4326")
addFeature(sft, ScalaSimpleFeature.create(sft, "1", "bob", "2010-05-07T12:00:00.000Z", "POINT(45 45)"))
val filter = "bbox(geom,-180,-90,180,90) AND dtg DURING 2010-05-07T00:00:00.000Z/2010-05-08T00:00:00.000Z" +
" AND (name = 'alice' OR name = 'bob' OR name = 'charlie')"
val query = new Query(sft.getTypeName, ECQL.toFilter(filter))
val plans = ds.getQueryPlan(query)
plans must haveLength(1)
plans.head.hasDuplicates must beFalse
plans.head must beAnInstanceOf[JoinPlan]
plans.head.asInstanceOf[JoinPlan].joinQuery.hasDuplicates must beFalse
val features = SelfClosingIterator(ds.getFeatureSource(sft.getTypeName).getFeatures(query).features).toList
features must haveLength(1)
features.head.getID mustEqual "1"
}
"support bin queries" in {
import org.locationtech.geomesa.utils.bin.BinaryOutputEncoder.BIN_ATTRIBUTE_INDEX
val sft = createNewSchema(s"name:String,dtg:Date,*geom:Point:srid=4326")
addFeature(sft, ScalaSimpleFeature.create(sft, "1", "name1", "2010-05-07T00:00:00.000Z", "POINT(45 45)"))
addFeature(sft, ScalaSimpleFeature.create(sft, "2", "name2", "2010-05-07T01:00:00.000Z", "POINT(45 45)"))
val query = new Query(sft.getTypeName, ECQL.toFilter("BBOX(geom,40,40,50,50)"))
query.getHints.put(BIN_TRACK, "name")
query.getHints.put(BIN_BATCH_SIZE, 1000)
val queryPlanner = new AccumuloQueryPlanner(ds)
val results = queryPlanner.runQuery(sft, query, Some(Z2Index), ExplainNull).map(_.getAttribute(BIN_ATTRIBUTE_INDEX)).toSeq
forall(results)(_ must beAnInstanceOf[Array[Byte]])
val bins = results.flatMap(_.asInstanceOf[Array[Byte]].grouped(16).map(BinaryOutputEncoder.decode))
bins must haveSize(2)
bins.map(_.trackId) must containAllOf(Seq("name1", "name2").map(_.hashCode))
}
"support bin queries with linestrings" in {
import org.locationtech.geomesa.utils.bin.BinaryOutputEncoder.BIN_ATTRIBUTE_INDEX
val sft = createNewSchema(s"name:String,dtgs:List[Date],dtg:Date,*geom:LineString:srid=4326")
val dtgs1 = new java.util.ArrayList[Date]
dtgs1.add(Converters.convert("2010-05-07T00:00:00.000Z", classOf[Date]))
dtgs1.add(Converters.convert("2010-05-07T00:01:00.000Z", classOf[Date]))
dtgs1.add(Converters.convert("2010-05-07T00:02:00.000Z", classOf[Date]))
dtgs1.add(Converters.convert("2010-05-07T00:03:00.000Z", classOf[Date]))
val dtgs2 = new java.util.ArrayList[Date]
dtgs2.add(Converters.convert("2010-05-07T01:00:00.000Z", classOf[Date]))
dtgs2.add(Converters.convert("2010-05-07T01:01:00.000Z", classOf[Date]))
dtgs2.add(Converters.convert("2010-05-07T01:02:00.000Z", classOf[Date]))
addFeature(sft, ScalaSimpleFeature.create(sft, "1", "name1", dtgs1, "2010-05-07T00:00:00.000Z", "LINESTRING(40 41, 42 43, 44 45, 46 47)"))
addFeature(sft, ScalaSimpleFeature.create(sft, "2", "name2", dtgs2, "2010-05-07T01:00:00.000Z", "LINESTRING(50 50, 51 51, 52 52)"))
forall(Seq(2, 1000)) { batch =>
val query = new Query(sft.getTypeName, ECQL.toFilter("BBOX(geom,40,40,55,55)"))
query.getHints.put(BIN_TRACK, "name")
query.getHints.put(BIN_BATCH_SIZE, batch)
query.getHints.put(BIN_DTG, "dtgs")
val bytes = SelfClosingIterator(ds.getFeatureSource(sft.getTypeName).getFeatures(query).features).map(_.getAttribute(BIN_ATTRIBUTE_INDEX)).toList
forall(bytes)(_ must beAnInstanceOf[Array[Byte]])
val bins = bytes.flatMap(_.asInstanceOf[Array[Byte]].grouped(16).map(BinaryOutputEncoder.decode))
bins must haveSize(7)
val sorted = bins.sortBy(_.dtg)
sorted(0) mustEqual EncodedValues("name1".hashCode, 41, 40, dtgs1(0).getTime, -1L)
sorted(1) mustEqual EncodedValues("name1".hashCode, 43, 42, dtgs1(1).getTime, -1L)
sorted(2) mustEqual EncodedValues("name1".hashCode, 45, 44, dtgs1(2).getTime, -1L)
sorted(3) mustEqual EncodedValues("name1".hashCode, 47, 46, dtgs1(3).getTime, -1L)
sorted(4) mustEqual EncodedValues("name2".hashCode, 50, 50, dtgs2(0).getTime, -1L)
sorted(5) mustEqual EncodedValues("name2".hashCode, 51, 51, dtgs2(1).getTime, -1L)
sorted(6) mustEqual EncodedValues("name2".hashCode, 52, 52, dtgs2(2).getTime, -1L)
}
}
"support IN queries without dtg on non-indexed string attributes" in {
val sft = createNewSchema(s"name:String,dtg:Date,*geom:Point:srid=4326")
addFeature(sft, ScalaSimpleFeature.create(sft, "1", "name1", "2010-05-07T00:00:00.000Z", "POINT(45 45)"))
addFeature(sft, ScalaSimpleFeature.create(sft, "2", "name2", "2010-05-07T01:00:00.000Z", "POINT(45 46)"))
val filter = ECQL.toFilter("name IN('name1','name2') AND BBOX(geom, 40.0,40.0,50.0,50.0)")
val query = new Query(sft.getTypeName, filter)
val features = SelfClosingIterator(ds.getFeatureSource(sft.getTypeName).getFeatures(query).features).toList
features.map(DataUtilities.encodeFeature) must containTheSameElementsAs {
List("1=name1|2010-05-07T00:00:00.000Z|POINT (45 45)", "2=name2|2010-05-07T01:00:00.000Z|POINT (45 46)")
}
}
"support IN queries without dtg on indexed string attributes" in {
val sft = createNewSchema("name:String:index=join,dtg:Date,*geom:Point:srid=4326")
addFeature(sft, ScalaSimpleFeature.create(sft, "1", "name1", "2010-05-07T00:00:00.000Z", "POINT(45 45)"))
addFeature(sft, ScalaSimpleFeature.create(sft, "2", "name2", "2010-05-07T01:00:00.000Z", "POINT(45 46)"))
val filter = ECQL.toFilter("name IN('name1','name2') AND BBOX(geom, -180.0,-90.0,180.0,90.0)")
val query = new Query(sft.getTypeName, filter)
val features = SelfClosingIterator(ds.getFeatureSource(sft.getTypeName).getFeatures(query).features).toList
features.map(DataUtilities.encodeFeature).sorted mustEqual List("1=name1|2010-05-07T00:00:00.000Z|POINT (45 45)", "2=name2|2010-05-07T01:00:00.000Z|POINT (45 46)").sorted
}
"kill queries after a configurable timeout" in {
import scala.concurrent.duration._
val params = dsParams ++ Map(AccumuloDataStoreParams.QueryTimeoutParam.getName -> "1s")
val dsWithTimeout = DataStoreFinder.getDataStore(params).asInstanceOf[AccumuloDataStore]
val reader = dsWithTimeout.getFeatureReader(new Query(defaultSft.getTypeName, Filter.INCLUDE), Transaction.AUTO_COMMIT)
reader.isClosed must beFalse
eventually(20, 200.millis)(reader.isClosed must beTrue)
}
"block full table scans" in {
val sft = createNewSchema("name:String:index=join,age:Int,geom:Point:srid=4326,dtg:Date")
val feature = ScalaSimpleFeature.create(sft, "fid-1", "name1", "23", "POINT(45 49)", "2010-05-07T12:30:00.000Z")
addFeature(sft, feature)
val filters = Seq(
"IN ('fid-1')",
"name = 'name1'",
"name IN ('name1', 'name2')",
"bbox(geom,44,48,46,50)",
"bbox(geom,44,48,46,50) AND age < 25",
"dtg during 2010-05-07T12:25:00.000Z/2010-05-07T12:35:00.000Z",
"bbox(geom,44,48,46,50) AND dtg during 2010-05-07T12:25:00.000Z/2010-05-07T12:35:00.000Z AND age = 23"
)
val fullScans = Seq("INCLUDE", "age = 23")
// test that blocking full table scans doesn't interfere with regular queries
QueryProperties.BlockFullTableScans.threadLocalValue.set("true")
try {
foreach(filters) { filter =>
val query = new Query(sft.getTypeName, ECQL.toFilter(filter))
val features = SelfClosingIterator(ds.getFeatureSource(sft.getTypeName).getFeatures(query).features).toList
features mustEqual List(feature)
}
foreach(fullScans) { filter =>
val query = new Query(sft.getTypeName, ECQL.toFilter(filter))
ds.getFeatureSource(sft.getTypeName).getFeatures(query).features must throwA[RuntimeException]
}
// verify that we can override individually
System.setProperty(s"geomesa.scan.${sft.getTypeName}.block-full-table", "false")
foreach(fullScans) { filter =>
val query = new Query(sft.getTypeName, ECQL.toFilter(filter))
val features = SelfClosingIterator(ds.getFeatureSource(sft.getTypeName).getFeatures(query).features).toList
features mustEqual List(feature)
}
// verify that we can also block individually
QueryProperties.BlockFullTableScans.threadLocalValue.remove()
System.setProperty(s"geomesa.scan.${sft.getTypeName}.block-full-table", "true")
foreach(fullScans) { filter =>
val query = new Query(sft.getTypeName, ECQL.toFilter(filter))
ds.getFeatureSource(sft.getTypeName).getFeatures(query).features must throwA[RuntimeException]
}
} finally {
QueryProperties.BlockFullTableScans.threadLocalValue.remove()
System.clearProperty(s"geomesa.scan.${sft.getTypeName}.block-full-table")
}
}
"allow query strategy to be specified via view params" in {
val filter = "BBOX(geom,40,40,50,50) and dtg during 2010-05-07T00:00:00.000Z/2010-05-08T00:00:00.000Z and name='name1'"
val query = new Query(defaultSft.getTypeName, ECQL.toFilter(filter))
def expectStrategy(strategy: AccumuloFeatureIndexType) = {
val plans = ds.getQueryPlan(query)
plans must haveLength(1)
plans.head.filter.index mustEqual strategy
val res = SelfClosingIterator(ds.getFeatureSource(defaultSft.getTypeName).getFeatures(query).features).map(_.getID).toList
res must containTheSameElementsAs(Seq("fid-1"))
}
forall(Seq(AttributeIndex, Z2Index, Z3Index, RecordIndex)) { index =>
query.getHints.put(QUERY_INDEX, index.identifier)
expectStrategy(index)
query.getHints.remove(QUERY_INDEX)
query.getHints.put(Hints.VIRTUAL_TABLE_PARAMETERS, Collections.singletonMap("QUERY_INDEX", index.name))
expectStrategy(index)
query.getHints.remove(QUERY_INDEX)
query.getHints.put(Hints.VIRTUAL_TABLE_PARAMETERS, Collections.singletonMap("STRATEGY", index.name))
expectStrategy(index)
}
}
"allow for loose bounding box config" >> {
val bbox = "bbox(geom,45.000000001,49.000000001,46,50)"
val z2Query = new Query(defaultSft.getTypeName, ECQL.toFilter(bbox))
val z3Query = new Query(defaultSft.getTypeName,
ECQL.toFilter(s"$bbox AND dtg DURING 2010-05-07T12:25:00.000Z/2010-05-07T12:35:00.000Z"))
val params = dsParams ++ Map(AccumuloDataStoreParams.LooseBBoxParam.getName -> "false")
val strictDs = DataStoreFinder.getDataStore(params).asInstanceOf[AccumuloDataStore]
"with loose bbox as default" >> {
"for z2 index" >> {
val looseReader = ds.getFeatureReader(z2Query, Transaction.AUTO_COMMIT)
try {
looseReader.hasNext must beTrue
} finally {
looseReader.close()
}
}
"for z3 index" >> {
val looseReader = ds.getFeatureReader(z3Query, Transaction.AUTO_COMMIT)
try {
looseReader.hasNext must beTrue
} finally {
looseReader.close()
}
}
}
"with strict configuration through data store params" >> {
"for z2 index" >> {
val strictReader = strictDs.getFeatureReader(z2Query, Transaction.AUTO_COMMIT)
try {
strictReader.hasNext must beFalse
} finally {
strictReader.close()
}
}
"for z3 index" >> {
val strictReader = strictDs.getFeatureReader(z3Query, Transaction.AUTO_COMMIT)
try {
strictReader.hasNext must beFalse
} finally {
strictReader.close()
}
}
}
"with query hints" >> {
"overriding loose config" >> {
"for z2 index" >> {
val strictZ2Query = new Query(z2Query)
strictZ2Query.getHints.put(QueryHints.LOOSE_BBOX, java.lang.Boolean.FALSE)
val strictReader = ds.getFeatureReader(strictZ2Query, Transaction.AUTO_COMMIT)
try {
strictReader.hasNext must beFalse
} finally {
strictReader.close()
}
}
"for z3 index" >> {
val strictZ3Query = new Query(z3Query)
strictZ3Query.getHints.put(QueryHints.LOOSE_BBOX, java.lang.Boolean.FALSE)
val strictReader = ds.getFeatureReader(strictZ3Query, Transaction.AUTO_COMMIT)
try {
strictReader.hasNext must beFalse
} finally {
strictReader.close()
}
}
}
"overriding strict config" >> {
"for z2 index" >> {
val looseZ2Query = new Query(z2Query)
looseZ2Query.getHints.put(QueryHints.LOOSE_BBOX, java.lang.Boolean.TRUE)
val looseReader = strictDs.getFeatureReader(looseZ2Query, Transaction.AUTO_COMMIT)
try {
looseReader.hasNext must beTrue
} finally {
looseReader.close()
}
}
"for z3 index" >> {
val looseZ3Query = new Query(z3Query)
looseZ3Query.getHints.put(QueryHints.LOOSE_BBOX, java.lang.Boolean.TRUE)
val looseReader = strictDs.getFeatureReader(looseZ3Query, Transaction.AUTO_COMMIT)
try {
looseReader.hasNext must beTrue
} finally {
looseReader.close()
}
}
}
}
"be able to run explainQuery" in {
val filter = ECQL.toFilter("INTERSECTS(geom, POLYGON ((41 28, 42 28, 42 29, 41 29, 41 28)))")
val query = new Query(defaultSft.getTypeName, filter)
val out = new ExplainString()
ds.getQueryPlan(query, explainer = out)
val explanation = out.toString()
explanation must not be null
explanation.trim must not(beEmpty)
}
}
"handle Query.ALL" in {
ds.getFeatureSource(defaultSft.getTypeName).getFeatures(Query.ALL).features() must throwAn[IllegalArgumentException]
ds.getFeatureReader(Query.ALL, Transaction.AUTO_COMMIT) must throwAn[IllegalArgumentException]
}
}
}
| ddseapy/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/data/AccumuloDataStoreQueryTest.scala | Scala | apache-2.0 | 32,609 |
package com.proinnovate.activityscheduler
import org.joda.time.DateTime
import org.scalatest.FunSuite
object SlotTest {
val overallSlots = {
val slot1 = Slot("11am-12noon", new DateTime(2015,2,14,11,0),new DateTime(2015,2,14,12,0))
val slot2 = Slot("12noon-1pm", new DateTime(2015,2,14,12,0),new DateTime(2015,2,14,13,0))
val slot3 = Slot("2pm-3pm", new DateTime(2015,2,14,14,0),new DateTime(2015,2,14,15,0))
val slot4 = Slot("3pm-4pm", new DateTime(2015,2,14,15,0),new DateTime(2015,2,14,16,0))
Set(slot1,slot2,slot3,slot4)
}
}
| sroebuck/activity-scheduler | src/test/scala/com/proinnovate/activityscheduler/SlotTest.scala | Scala | apache-2.0 | 559 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.inject
package guice
import javax.inject.{ Inject, Provider, Singleton }
import com.google.inject.{ CreationException, ProvisionException }
import org.specs2.mutable.Specification
import play.api.i18n.I18nModule
import play.api.{ Configuration, Environment }
class GuiceApplicationBuilderSpec extends Specification {
"GuiceApplicationBuilder" should {
"add bindings" in {
val injector = new GuiceApplicationBuilder()
.bindings(
new GuiceApplicationBuilderSpec.AModule,
bind[GuiceApplicationBuilderSpec.B].to[GuiceApplicationBuilderSpec.B1])
.injector()
injector.instanceOf[GuiceApplicationBuilderSpec.A] must beAnInstanceOf[GuiceApplicationBuilderSpec.A1]
injector.instanceOf[GuiceApplicationBuilderSpec.B] must beAnInstanceOf[GuiceApplicationBuilderSpec.B1]
}
"override bindings" in {
val app = new GuiceApplicationBuilder()
.bindings(new GuiceApplicationBuilderSpec.AModule)
.overrides(
bind[Configuration] to new GuiceApplicationBuilderSpec.ExtendConfiguration("a" -> 1),
bind[GuiceApplicationBuilderSpec.A].to[GuiceApplicationBuilderSpec.A2])
.build()
app.configuration.get[Int]("a") must_== 1
app.injector.instanceOf[GuiceApplicationBuilderSpec.A] must beAnInstanceOf[GuiceApplicationBuilderSpec.A2]
}
"disable modules" in {
val injector = new GuiceApplicationBuilder()
.bindings(new GuiceApplicationBuilderSpec.AModule)
.disable(classOf[GuiceApplicationBuilderSpec.AModule])
.injector()
injector.instanceOf[GuiceApplicationBuilderSpec.A] must throwA[com.google.inject.ConfigurationException]
}
"set initial configuration loader" in {
val extraConfig = Configuration("a" -> 1)
val app = new GuiceApplicationBuilder()
.loadConfig(env => Configuration.load(env) ++ extraConfig)
.build()
app.configuration.get[Int]("a") must_== 1
}
"set module loader" in {
val injector = new GuiceApplicationBuilder()
.load((env, conf) => Seq(new BuiltinModule, new I18nModule, bind[GuiceApplicationBuilderSpec.A].to[GuiceApplicationBuilderSpec.A1]))
.injector()
injector.instanceOf[GuiceApplicationBuilderSpec.A] must beAnInstanceOf[GuiceApplicationBuilderSpec.A1]
}
"set loaded modules directly" in {
val injector = new GuiceApplicationBuilder()
.load(new BuiltinModule, new I18nModule, bind[GuiceApplicationBuilderSpec.A].to[GuiceApplicationBuilderSpec.A1])
.injector()
injector.instanceOf[GuiceApplicationBuilderSpec.A] must beAnInstanceOf[GuiceApplicationBuilderSpec.A1]
}
"eagerly load singletons" in {
new GuiceApplicationBuilder()
.load(new BuiltinModule, new I18nModule, bind[GuiceApplicationBuilderSpec.C].to[GuiceApplicationBuilderSpec.C1])
.eagerlyLoaded()
.injector() must throwA[CreationException]
}
"work with built in modules and requireAtInjectOnConstructors" in {
new GuiceApplicationBuilder()
.load(new BuiltinModule, new I18nModule)
.requireAtInjectOnConstructors()
.eagerlyLoaded()
.injector() must not(throwA[CreationException])
}
"set lazy load singletons" in {
val builder = new GuiceApplicationBuilder()
.load(new BuiltinModule, new I18nModule, bind[GuiceApplicationBuilderSpec.C].to[GuiceApplicationBuilderSpec.C1])
builder.injector() must throwAn[CreationException].not
builder.injector().instanceOf[GuiceApplicationBuilderSpec.C] must throwAn[ProvisionException]
}
"display logger deprecation message" in {
List("logger", "logger.resource", "logger.resource.test").forall { path =>
List("DEBUG", "WARN", "INFO", "ERROR", "TRACE", "OFF").forall { value =>
val data = Map(path -> value)
val builder = new GuiceApplicationBuilder()
builder.shouldDisplayLoggerDeprecationMessage(Configuration.from(data)) must_=== true
}
}
}
"not display logger deprecation message" in {
List("logger", "logger.resource", "logger.resource.test").forall { path =>
val data = Map(path -> "NOT_A_DEPRECATED_VALUE")
val builder = new GuiceApplicationBuilder()
builder.shouldDisplayLoggerDeprecationMessage(Configuration.from(data)) must_=== false
}
}
}
}
object GuiceApplicationBuilderSpec {
class ExtendConfiguration(conf: (String, Any)*) extends Provider[Configuration] {
@Inject
var injector: Injector = _
lazy val get = {
val current = injector.instanceOf[ConfigurationProvider].get
current ++ Configuration.from(conf.toMap)
}
}
trait A
class A1 extends A
class A2 extends A
class AModule extends SimpleModule(bind[A].to[A1])
trait B
class B1 extends B
trait C
@Singleton
class C1 extends C {
throw new EagerlyLoadedException
}
class EagerlyLoadedException extends RuntimeException
}
| aradchykov/playframework | framework/src/play-guice/src/test/scala/play/api/inject/guice/GuiceApplicationBuilderSpec.scala | Scala | apache-2.0 | 5,083 |
package com.mogproject.mogami.playground.view
import com.mogproject.mogami.frontend.view.footer.FooterLike
/**
*
*/
case class Footer(isDev: Boolean, isDebug: Boolean) extends FooterLike | mogproject/mog-playground | src/main/scala/com/mogproject/mogami/playground/view/Footer.scala | Scala | apache-2.0 | 192 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, UnaryNode}
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog}
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.types.{DataType, Metadata, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
/**
* Thrown when an invalid attempt is made to access a property of a tree that has yet to be fully
* resolved.
*/
class UnresolvedException(function: String)
extends AnalysisException(s"Invalid call to $function on unresolved object")
/**
* Holds the name of a relation that has yet to be looked up in a catalog.
*
* @param multipartIdentifier table name
* @param options options to scan this relation. Only applicable to v2 table scan.
*/
case class UnresolvedRelation(
multipartIdentifier: Seq[String],
options: CaseInsensitiveStringMap = CaseInsensitiveStringMap.empty(),
override val isStreaming: Boolean = false)
extends LeafNode with NamedRelation {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
/** Returns a `.` separated name for this relation. */
def tableName: String = multipartIdentifier.quoted
override def name: String = tableName
override def output: Seq[Attribute] = Nil
override lazy val resolved = false
}
object UnresolvedRelation {
def apply(
tableIdentifier: TableIdentifier,
extraOptions: CaseInsensitiveStringMap,
isStreaming: Boolean): UnresolvedRelation = {
UnresolvedRelation(
tableIdentifier.database.toSeq :+ tableIdentifier.table, extraOptions, isStreaming)
}
def apply(tableIdentifier: TableIdentifier): UnresolvedRelation =
UnresolvedRelation(tableIdentifier.database.toSeq :+ tableIdentifier.table)
}
/**
* A variant of [[UnresolvedRelation]] which can only be resolved to a v2 relation
* (`DataSourceV2Relation`), not v1 relation or temp view.
*
* @param originalNameParts the original table identifier name parts before catalog is resolved.
* @param catalog The catalog which the table should be looked up from.
* @param tableName The name of the table to look up.
*/
case class UnresolvedV2Relation(
originalNameParts: Seq[String],
catalog: TableCatalog,
tableName: Identifier)
extends LeafNode with NamedRelation {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
override def name: String = originalNameParts.quoted
override def output: Seq[Attribute] = Nil
override lazy val resolved = false
}
/**
* An inline table that has not been resolved yet. Once resolved, it is turned by the analyzer into
* a [[org.apache.spark.sql.catalyst.plans.logical.LocalRelation]].
*
* @param names list of column names
* @param rows expressions for the data
*/
case class UnresolvedInlineTable(
names: Seq[String],
rows: Seq[Seq[Expression]])
extends LeafNode {
lazy val expressionsResolved: Boolean = rows.forall(_.forall(_.resolved))
override lazy val resolved = false
override def output: Seq[Attribute] = Nil
}
/**
* A table-valued function, e.g.
* {{{
* select id from range(10);
*
* // Assign alias names
* select t.a from range(10) t(a);
* }}}
*
* @param name qualified name of this table-value function
* @param functionArgs list of function arguments
* @param outputNames alias names of function output columns. If these names given, an analyzer
* adds [[Project]] to rename the output columns.
*/
case class UnresolvedTableValuedFunction(
name: FunctionIdentifier,
functionArgs: Seq[Expression],
outputNames: Seq[String])
extends LeafNode {
override def output: Seq[Attribute] = Nil
override lazy val resolved = false
}
object UnresolvedTableValuedFunction {
def apply(
name: String,
functionArgs: Seq[Expression],
outputNames: Seq[String]): UnresolvedTableValuedFunction = {
UnresolvedTableValuedFunction(FunctionIdentifier(name), functionArgs, outputNames)
}
}
/**
* Holds the name of an attribute that has yet to be resolved.
*/
case class UnresolvedAttribute(nameParts: Seq[String]) extends Attribute with Unevaluable {
def name: String =
nameParts.map(n => if (n.contains(".")) s"`$n`" else n).mkString(".")
override def exprId: ExprId = throw new UnresolvedException("exprId")
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override def qualifier: Seq[String] = throw new UnresolvedException("qualifier")
override lazy val resolved = false
override def newInstance(): UnresolvedAttribute = this
override def withNullability(newNullability: Boolean): UnresolvedAttribute = this
override def withQualifier(newQualifier: Seq[String]): UnresolvedAttribute = this
override def withName(newName: String): UnresolvedAttribute = UnresolvedAttribute.quoted(newName)
override def withMetadata(newMetadata: Metadata): Attribute = this
override def withExprId(newExprId: ExprId): UnresolvedAttribute = this
override def toString: String = s"'$name"
override def sql: String = nameParts.map(quoteIfNeeded(_)).mkString(".")
}
object UnresolvedAttribute {
/**
* Creates an [[UnresolvedAttribute]], parsing segments separated by dots ('.').
*/
def apply(name: String): UnresolvedAttribute =
new UnresolvedAttribute(CatalystSqlParser.parseMultipartIdentifier(name))
/**
* Creates an [[UnresolvedAttribute]], from a single quoted string (for example using backticks in
* HiveQL. Since the string is consider quoted, no processing is done on the name.
*/
def quoted(name: String): UnresolvedAttribute = new UnresolvedAttribute(Seq(name))
/**
* Creates an [[UnresolvedAttribute]] from a string in an embedded language. In this case
* we treat it as a quoted identifier, except for '.', which must be further quoted using
* backticks if it is part of a column name.
*/
def quotedString(name: String): UnresolvedAttribute =
new UnresolvedAttribute(parseAttributeName(name))
/**
* Used to split attribute name by dot with backticks rule.
* Backticks must appear in pairs, and the quoted string must be a complete name part,
* which means `ab..c`e.f is not allowed.
* We can use backtick only inside quoted name parts.
*/
def parseAttributeName(name: String): Seq[String] = {
def e = QueryCompilationErrors.attributeNameSyntaxError(name)
val nameParts = scala.collection.mutable.ArrayBuffer.empty[String]
val tmp = scala.collection.mutable.ArrayBuffer.empty[Char]
var inBacktick = false
var i = 0
while (i < name.length) {
val char = name(i)
if (inBacktick) {
if (char == '`') {
if (i + 1 < name.length && name(i + 1) == '`') {
tmp += '`'
i += 1
} else {
inBacktick = false
if (i + 1 < name.length && name(i + 1) != '.') throw e
}
} else {
tmp += char
}
} else {
if (char == '`') {
if (tmp.nonEmpty) throw e
inBacktick = true
} else if (char == '.') {
if (name(i - 1) == '.' || i == name.length - 1) throw e
nameParts += tmp.mkString
tmp.clear()
} else {
tmp += char
}
}
i += 1
}
if (inBacktick) throw e
nameParts += tmp.mkString
nameParts.toSeq
}
}
/**
* Represents an unresolved generator, which will be created by the parser for
* the [[org.apache.spark.sql.catalyst.plans.logical.Generate]] operator.
* The analyzer will resolve this generator.
*/
case class UnresolvedGenerator(name: FunctionIdentifier, children: Seq[Expression])
extends Generator {
override def elementSchema: StructType = throw new UnresolvedException("elementTypes")
override def dataType: DataType = throw new UnresolvedException("dataType")
override def foldable: Boolean = throw new UnresolvedException("foldable")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override lazy val resolved = false
override def prettyName: String = name.unquotedString
override def toString: String = s"'$name(${children.mkString(", ")})"
override def eval(input: InternalRow = null): TraversableOnce[InternalRow] =
throw QueryExecutionErrors.cannotEvaluateExpressionError(this)
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode =
throw QueryExecutionErrors.cannotGenerateCodeForExpressionError(this)
override def terminate(): TraversableOnce[InternalRow] =
throw QueryExecutionErrors.cannotTerminateGeneratorError(this)
override protected def withNewChildrenInternal(
newChildren: IndexedSeq[Expression]): UnresolvedGenerator = copy(children = newChildren)
}
case class UnresolvedFunction(
name: FunctionIdentifier,
arguments: Seq[Expression],
isDistinct: Boolean,
filter: Option[Expression] = None,
ignoreNulls: Boolean = false)
extends Expression with Unevaluable {
override def children: Seq[Expression] = arguments ++ filter.toSeq
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override lazy val resolved = false
override def prettyName: String = name.unquotedString
override def toString: String = {
val distinct = if (isDistinct) "distinct " else ""
s"'$name($distinct${children.mkString(", ")})"
}
override protected def withNewChildrenInternal(
newChildren: IndexedSeq[Expression]): UnresolvedFunction = {
if (filter.isDefined) {
copy(arguments = newChildren.dropRight(1), filter = Some(newChildren.last))
} else {
copy(arguments = newChildren)
}
}
}
object UnresolvedFunction {
def apply(name: String, arguments: Seq[Expression], isDistinct: Boolean): UnresolvedFunction = {
UnresolvedFunction(FunctionIdentifier(name, None), arguments, isDistinct)
}
}
/**
* Represents all of the input attributes to a given relational operator, for example in
* "SELECT * FROM ...". A [[Star]] gets automatically expanded during analysis.
*/
abstract class Star extends LeafExpression with NamedExpression {
override def name: String = throw new UnresolvedException("name")
override def exprId: ExprId = throw new UnresolvedException("exprId")
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override def qualifier: Seq[String] = throw new UnresolvedException("qualifier")
override def toAttribute: Attribute = throw new UnresolvedException("toAttribute")
override def newInstance(): NamedExpression = throw new UnresolvedException("newInstance")
override lazy val resolved = false
def expand(input: LogicalPlan, resolver: Resolver): Seq[NamedExpression]
}
/**
* Represents all of the input attributes to a given relational operator, for example in
* "SELECT * FROM ...".
*
* This is also used to expand structs. For example:
* "SELECT record.* from (SELECT struct(a,b,c) as record ...)
*
* @param target an optional name that should be the target of the expansion. If omitted all
* targets' columns are produced. This can either be a table name or struct name. This
* is a list of identifiers that is the path of the expansion.
*/
case class UnresolvedStar(target: Option[Seq[String]]) extends Star with Unevaluable {
/**
* Returns true if the nameParts is a subset of the last elements of qualifier of the attribute.
*
* For example, the following should all return true:
* - `SELECT ns1.ns2.t.* FROM ns1.ns2.t` where nameParts is Seq("ns1", "ns2", "t") and
* qualifier is Seq("ns1", "ns2", "t").
* - `SELECT ns2.t.* FROM ns1.ns2.t` where nameParts is Seq("ns2", "t") and
* qualifier is Seq("ns1", "ns2", "t").
* - `SELECT t.* FROM ns1.ns2.t` where nameParts is Seq("t") and
* qualifier is Seq("ns1", "ns2", "t").
*/
private def matchedQualifier(
attribute: Attribute,
nameParts: Seq[String],
resolver: Resolver): Boolean = {
val qualifierList = if (nameParts.length == attribute.qualifier.length) {
attribute.qualifier
} else {
attribute.qualifier.takeRight(nameParts.length)
}
nameParts.corresponds(qualifierList)(resolver)
}
def isQualifiedByTable(input: LogicalPlan, resolver: Resolver): Boolean = {
target.exists(nameParts => input.output.exists(matchedQualifier(_, nameParts, resolver)))
}
override def expand(
input: LogicalPlan,
resolver: Resolver): Seq[NamedExpression] = {
// If there is no table specified, use all non-hidden input attributes.
if (target.isEmpty) return input.output
// If there is a table specified, use hidden input attributes as well
val hiddenOutput = input.metadataOutput.filter(_.supportsQualifiedStar)
val expandedAttributes = (hiddenOutput ++ input.output).filter(
matchedQualifier(_, target.get, resolver))
if (expandedAttributes.nonEmpty) return expandedAttributes
// Try to resolve it as a struct expansion. If there is a conflict and both are possible,
// (i.e. [name].* is both a table and a struct), the struct path can always be qualified.
val attribute = input.resolve(target.get, resolver)
if (attribute.isDefined) {
// This target resolved to an attribute in child. It must be a struct. Expand it.
attribute.get.dataType match {
case s: StructType => s.zipWithIndex.map {
case (f, i) =>
val extract = GetStructField(attribute.get, i)
Alias(extract, f.name)()
}
case _ =>
throw QueryCompilationErrors.starExpandDataTypeNotSupportedError(target.get)
}
} else {
val from = input.inputSet.map(_.name).mkString(", ")
val targetString = target.get.mkString(".")
throw QueryCompilationErrors.cannotResolveStarExpandGivenInputColumnsError(
targetString, from)
}
}
override def toString: String = target.map(_ + ".").getOrElse("") + "*"
}
/**
* Represents all of the input attributes to a given relational operator, for example in
* "SELECT `(id)?+.+` FROM ...".
*
* @param table an optional table that should be the target of the expansion. If omitted all
* tables' columns are produced.
*/
case class UnresolvedRegex(regexPattern: String, table: Option[String], caseSensitive: Boolean)
extends Star with Unevaluable {
override def expand(input: LogicalPlan, resolver: Resolver): Seq[NamedExpression] = {
val pattern = if (caseSensitive) regexPattern else s"(?i)$regexPattern"
table match {
// If there is no table specified, use all input attributes that match expr
case None => input.output.filter(_.name.matches(pattern))
// If there is a table, pick out attributes that are part of this table that match expr
case Some(t) => input.output.filter(a => a.qualifier.nonEmpty &&
resolver(a.qualifier.last, t)).filter(_.name.matches(pattern))
}
}
override def toString: String = table.map(_ + "." + regexPattern).getOrElse(regexPattern)
}
/**
* Used to assign new names to Generator's output, such as hive udtf.
* For example the SQL expression "stack(2, key, value, key, value) as (a, b)" could be represented
* as follows:
* MultiAlias(stack_function, Seq(a, b))
*
* @param child the computation being performed
* @param names the names to be associated with each output of computing [[child]].
*/
case class MultiAlias(child: Expression, names: Seq[String])
extends UnaryExpression with NamedExpression with Unevaluable {
override def name: String = throw new UnresolvedException("name")
override def exprId: ExprId = throw new UnresolvedException("exprId")
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override def qualifier: Seq[String] = throw new UnresolvedException("qualifier")
override def toAttribute: Attribute = throw new UnresolvedException("toAttribute")
override def newInstance(): NamedExpression = throw new UnresolvedException("newInstance")
override lazy val resolved = false
override def toString: String = s"$child AS $names"
override protected def withNewChildInternal(newChild: Expression): MultiAlias =
copy(child = newChild)
}
/**
* Represents all the resolved input attributes to a given relational operator. This is used
* in the data frame DSL.
*
* @param expressions Expressions to expand.
*/
case class ResolvedStar(expressions: Seq[NamedExpression]) extends Star with Unevaluable {
override def newInstance(): NamedExpression = throw new UnresolvedException("newInstance")
override def expand(input: LogicalPlan, resolver: Resolver): Seq[NamedExpression] = expressions
override def toString: String = expressions.mkString("ResolvedStar(", ", ", ")")
}
/**
* Extracts a value or values from an Expression
*
* @param child The expression to extract value from,
* can be Map, Array, Struct or array of Structs.
* @param extraction The expression to describe the extraction,
* can be key of Map, index of Array, field name of Struct.
*/
case class UnresolvedExtractValue(child: Expression, extraction: Expression)
extends BinaryExpression with Unevaluable {
override def left: Expression = child
override def right: Expression = extraction
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override lazy val resolved = false
override def toString: String = s"$child[$extraction]"
override def sql: String = s"${child.sql}[${extraction.sql}]"
override protected def withNewChildrenInternal(
newLeft: Expression, newRight: Expression): UnresolvedExtractValue = {
copy(child = newLeft, extraction = newRight)
}
}
/**
* Holds the expression that has yet to be aliased.
*
* @param child The computation that is needs to be resolved during analysis.
* @param aliasFunc The function if specified to be called to generate an alias to associate
* with the result of computing [[child]]
*
*/
case class UnresolvedAlias(
child: Expression,
aliasFunc: Option[Expression => String] = None)
extends UnaryExpression with NamedExpression with Unevaluable {
override def toAttribute: Attribute = throw new UnresolvedException("toAttribute")
override def qualifier: Seq[String] = throw new UnresolvedException("qualifier")
override def exprId: ExprId = throw new UnresolvedException("exprId")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override def dataType: DataType = throw new UnresolvedException("dataType")
override def name: String = throw new UnresolvedException("name")
override def newInstance(): NamedExpression = throw new UnresolvedException("newInstance")
override lazy val resolved = false
override protected def withNewChildInternal(newChild: Expression): UnresolvedAlias =
copy(child = newChild)
}
/**
* Aliased column names resolved by positions for subquery. We could add alias names for output
* columns in the subquery:
* {{{
* // Assign alias names for output columns
* SELECT col1, col2 FROM testData AS t(col1, col2);
* }}}
*
* @param outputColumnNames the [[LogicalPlan]] on which this subquery column aliases apply.
* @param child the logical plan of this subquery.
*/
case class UnresolvedSubqueryColumnAliases(
outputColumnNames: Seq[String],
child: LogicalPlan)
extends UnaryNode {
override def output: Seq[Attribute] = Nil
override lazy val resolved = false
override protected def withNewChildInternal(
newChild: LogicalPlan): UnresolvedSubqueryColumnAliases = copy(child = newChild)
}
/**
* Holds the deserializer expression and the attributes that are available during the resolution
* for it. Deserializer expression is a special kind of expression that is not always resolved by
* children output, but by given attributes, e.g. the `keyDeserializer` in `MapGroups` should be
* resolved by `groupingAttributes` instead of children output.
*
* @param deserializer The unresolved deserializer expression
* @param inputAttributes The input attributes used to resolve deserializer expression, can be empty
* if we want to resolve deserializer by children output.
*/
case class UnresolvedDeserializer(deserializer: Expression, inputAttributes: Seq[Attribute] = Nil)
extends UnaryExpression with Unevaluable with NonSQLExpression {
// The input attributes used to resolve deserializer expression must be all resolved.
require(inputAttributes.forall(_.resolved), "Input attributes must all be resolved.")
override def child: Expression = deserializer
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override lazy val resolved = false
override protected def withNewChildInternal(newChild: Expression): UnresolvedDeserializer =
copy(deserializer = newChild)
}
case class GetColumnByOrdinal(ordinal: Int, dataType: DataType) extends LeafExpression
with Unevaluable with NonSQLExpression {
override def nullable: Boolean = throw new UnresolvedException("nullable")
override lazy val resolved = false
}
case class GetViewColumnByNameAndOrdinal(
viewName: String,
colName: String,
ordinal: Int,
expectedNumCandidates: Int)
extends LeafExpression with Unevaluable with NonSQLExpression {
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override lazy val resolved = false
}
/**
* Represents unresolved ordinal used in order by or group by.
*
* For example:
* {{{
* select a from table order by 1
* select a from table group by 1
* }}}
* @param ordinal ordinal starts from 1, instead of 0
*/
case class UnresolvedOrdinal(ordinal: Int)
extends LeafExpression with Unevaluable with NonSQLExpression {
override def dataType: DataType = throw new UnresolvedException("dataType")
override def nullable: Boolean = throw new UnresolvedException("nullable")
override lazy val resolved = false
}
/**
* Represents unresolved having clause, the child for it can be Aggregate, GroupingSets, Rollup
* and Cube. It is turned by the analyzer into a Filter.
*/
case class UnresolvedHaving(
havingCondition: Expression,
child: LogicalPlan)
extends UnaryNode {
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = child.output
override protected def withNewChildInternal(newChild: LogicalPlan): UnresolvedHaving =
copy(child = newChild)
}
/**
* A place holder expression used in random functions, will be replaced after analyze.
*/
case object UnresolvedSeed extends LeafExpression with Unevaluable {
override def nullable: Boolean = throw new UnresolvedException("nullable")
override def dataType: DataType = throw new UnresolvedException("dataType")
override lazy val resolved = false
}
| BryanCutler/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala | Scala | apache-2.0 | 24,648 |
package rere.sasl.scram.parsers
import org.parboiled2.{Parser, ParserInput}
class SCRAMParser(val input: ParserInput)
extends Parser
with RFC5802Parser
| pbaun/rere | modules/sasl/src/main/scala/rere/sasl/scram/parsers/SCRAMParser.scala | Scala | apache-2.0 | 158 |
package us.newsrdr.models
import us.newsrdr.tasks._
import us.newsrdr._
import slick.driver.{JdbcDriver, JdbcProfile, H2Driver, MySQLDriver}
import slick.jdbc.meta.{MTable}
import slick.jdbc.GetResult
import java.sql.Timestamp
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Await
import scala.concurrent.duration._
case class SiteStatistics(numUsers: Int, numFeeds: Int, numUsersInLastWeek: Int, numUsersInLastDay: Int)
case class BlogEntry(id: Option[Int], authorId: Int, postDate: Timestamp, subject: String, body: String)
class DataTables(val driver: JdbcProfile) {
import driver.api._
// The amount we need to subtract the add date by so we
// don't end up getting posts that are years old in the
// results.
val OLDEST_POST_DIFFERENCE_MS : Long = 1000 * 60 * 60 * 24 * 14
val OLDEST_POST_DIFFERENCE_SEC = OLDEST_POST_DIFFERENCE_MS / 1000
// UNIX_TIMESTAMP support
val unixTimestampFn = SimpleFunction.unary[Option[Timestamp], Long]("UNIX_TIMESTAMP")
case class SiteSetting(
id: Option[Int],
isDown: Boolean)
class SiteSettings(tag: Tag) extends Table[SiteSetting](tag, "SiteSettings") {
def id = column[Option[Int]]("id", O.PrimaryKey, O.AutoInc)
def isDown = column[Boolean]("isDown")
def * = (id, isDown) <> (SiteSetting.tupled, SiteSetting.unapply)
}
val SiteSettings = TableQuery[SiteSettings]
case class FeedFailureLog(
id: Option[Int],
feedId: Int,
failureDate: Timestamp,
failureMessage: String)
class FeedFailureLogs(tag: Tag) extends Table[FeedFailureLog](tag, "FeedFailureLogs") {
def id = column[Option[Int]]("id", O.PrimaryKey, O.AutoInc)
def feedId = column[Int]("feedId")
def failureDate = column[Timestamp]("failureDate")
def failureMessage = column[String]("failureMessage")
def * = (id, feedId, failureDate, failureMessage) <> (FeedFailureLog.tupled, FeedFailureLog.unapply)
def feed = foreignKey("feedIdentifierLogKey", feedId, NewsFeeds)(_.id)
}
val FeedFailureLogs = TableQuery[FeedFailureLogs]
class Categories(tag: Tag) extends Table[Category](tag, "Categories") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def name = column[String]("name")
def * = (id.?, name) <> (Category.tupled, Category.unapply)
}
val Categories = TableQuery[Categories]
class NewsFeeds(tag: Tag) extends Table[NewsFeed](tag, "NewsFeeds") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def title = column[String]("title")
def link = column[String]("link")
def description = column[String]("description")
def feedUrl = column[String]("feedUrl")
def lastUpdate = column[Timestamp]("lastUpdate")
def language = column[Option[String]]("language")
def copyright = column[Option[String]]("copyright")
def managingEditor = column[Option[String]]("managingEditor")
def webMaster = column[Option[String]]("webMaster")
def pubDate = column[Option[Timestamp]]("pubDate")
def lastBuildDate = column[Option[Timestamp]]("lastBuildDate")
def generator = column[Option[String]]("generator")
def docs = column[Option[String]]("docs")
def ttl = column[Option[Int]]("ttl")
def imageUrl = column[Option[String]]("imageUrl")
def imageTitle = column[Option[String]]("imageTitle")
def imageLink = column[Option[String]]("imageLink")
def hash = column[String]("hash")
def * =
(id.?, title, link, description, feedUrl, language, copyright, managingEditor,
webMaster, pubDate, lastBuildDate, generator, docs, ttl, imageUrl,
imageTitle, imageLink, lastUpdate, hash) <> (NewsFeed.tupled, NewsFeed.unapply)
}
val NewsFeeds = TableQuery[NewsFeeds]
class NewsFeedCategories(tag: Tag) extends Table[(Int, Int, Int)](tag, "NewsFeedCategories") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def feedId = column[Int]("feedId")
def categoryId = column[Int]("categoryId")
def * = (id, feedId, categoryId)
def feed = foreignKey("feedIdentifierKey", feedId, NewsFeeds)(_.id)
def category = foreignKey("categoryIdKey", categoryId, Categories)(_.id)
}
val NewsFeedCategories = TableQuery[NewsFeedCategories]
class UserNewsFeedArticles(tag: Tag) extends Table[UserNewsFeedArticle](tag, "UserNewsFeedArticles") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def userId = column[Int]("userId")
def feedId = column[Int]("feedId")
def title = column[String]("title")
def link = column[String]("link")
def description = column[String]("description")
def author = column[Option[String]]("author")
def comments = column[Option[String]]("comments")
def enclosureUrl = column[Option[String]]("enclosureUrl")
def enclosureLength = column[Option[Int]]("enclosureLength")
def enclosureType = column[Option[String]]("enclosureType")
def guid = column[Option[String]]("guid")
def isGuidPermalink = column[Option[Boolean]]("isGuidPermalink")
def pubDate = column[Option[Timestamp]]("pubDate")
def source = column[Option[String]]("source")
def isRead = column[Boolean]("isRead")
def isSaved = column[Boolean]("isSaved")
def * =
(id.?, userId, feedId, title, link, description, author, comments,
enclosureUrl, enclosureLength, enclosureType, guid, isGuidPermalink,
pubDate, source, isRead, isSaved) <> (UserNewsFeedArticle.tupled, UserNewsFeedArticle.unapply)
def feed = foreignKey("feedIdKey", feedId, NewsFeeds)(_.id)
def user = foreignKey("userIdKey", userId, Users)(_.id)
}
val UserNewsFeedArticles = TableQuery[UserNewsFeedArticles]
class Users(tag: Tag) extends Table[User](tag, "Users") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def username = column[String]("username")
def password = column[String]("password")
def email = column[String]("email")
def friendlyName = column[String]("friendlyName")
def optOutSharing = column[Boolean]("optOutSharing")
def isAdmin = column[Boolean]("isAdmin")
def * =
(id.?, username, password, email, friendlyName, optOutSharing, isAdmin) <> (User.tupled, User.unapply)
}
val Users = TableQuery[Users]
class UserSessions(tag: Tag) extends Table[UserSession](tag, "UserSessions") {
def userId = column[Int]("userId")
def sessionId = column[String]("sessionId", O.SqlType("VARCHAR(128)"))
def lastAccess = column[Timestamp]("lastAccess")
def lastAccessIp = column[String]("lastAccessIp")
def * = (userId, sessionId, lastAccess, lastAccessIp) <> (UserSession.tupled, UserSession.unapply)
def bIdx1 = index("userSessionKey", (userId, sessionId), unique = true)
def user = foreignKey("userSessionUserKey", userId, Users)(_.id)
}
val UserSessions = TableQuery[UserSessions]
class UserFeeds(tag: Tag) extends Table[UserFeed](tag, "UserFeeds") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def userId = column[Int]("userId")
def feedId = column[Int]("feedId")
def addedDate = column[Timestamp]("addedDate")
def * = (id.?, userId, feedId, addedDate) <> (UserFeed.tupled, UserFeed.unapply)
def feed = foreignKey("userFeedIdKey", feedId, NewsFeeds)(_.id)
def user = foreignKey("userFeedUserIdKey", userId, Users)(_.id)
}
val UserFeeds = TableQuery[UserFeeds]
class BlogEntries(tag: Tag) extends Table[BlogEntry](tag, "BlogEntries") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def authorId = column[Int]("authorId")
def postDate = column[Timestamp]("postDate")
def subject = column[String]("subject")
def body = column[String]("body")
def * = (id.?, authorId, postDate, subject, body) <> (BlogEntry.tupled, BlogEntry.unapply)
def user = foreignKey("blogEntryUserIdKey", authorId, Users)(_.id)
}
val BlogEntries = TableQuery[BlogEntries]
private def executeNow[R](query: slick.dbio.DBIOAction[R, slick.dbio.NoStream, Nothing])(implicit db: Database) : R = {
Await.result(db.run(query), 60 second)
}
def create()(implicit db: Database) = {
val setup = DBIO.seq(
(Categories.schema ++ NewsFeeds.schema ++ NewsFeedCategories.schema ++
Users.schema ++ UserNewsFeedArticles.schema ++ UserFeeds.schema ++
UserSessions.schema ++ FeedFailureLogs.schema ++ BlogEntries.schema ++
SiteSettings.schema).create)
executeNow(setup)
/* if (!MTable.getTables.list.exists(_.name.name == Categories.tableName)) Categories.ddl.create
if (!MTable.getTables.list.exists(_.name.name == NewsFeeds.tableName)) NewsFeeds.ddl.create
if (!MTable.getTables.list.exists(_.name.name == NewsFeedCategories.tableName)) NewsFeedCategories.ddl.create
if (!MTable.getTables.list.exists(_.name.name == Users.tableName)) Users.ddl.create
if (!MTable.getTables.list.exists(_.name.name == UserNewsFeedArticles.tableName)) UserNewsFeedArticles.ddl.create
//if (!MTable.getTables.list.exists(_.name.name == NewsFeedArticleCategories.tableName)) NewsFeedArticleCategories.ddl.create
//if (!MTable.getTables.list.exists(_.name.name == UserArticles.tableName)) UserArticles.ddl.create
if (!MTable.getTables.list.exists(_.name.name == UserFeeds.tableName)) UserFeeds.ddl.create
if (!MTable.getTables.list.exists(_.name.name == UserSessions.tableName)) UserSessions.ddl.create
if (!MTable.getTables.list.exists(_.name.name == FeedFailureLogs.tableName)) FeedFailureLogs.ddl.create
if (!MTable.getTables.list.exists(_.name.name == BlogEntries.tableName)) BlogEntries.ddl.create
if (!MTable.getTables.list.exists(_.name.name == SiteSettings.tableName)) SiteSettings.ddl.create*/
}
def isSiteDown()(implicit db: Database) : Boolean = {
val q = for { s <- SiteSettings } yield s.isDown
executeNow(q.result.map { x => x.headOption }).getOrElse(false)
}
def getSiteStatistics()(implicit database: Database) : SiteStatistics = {
val today = new java.sql.Timestamp(new java.util.Date().getTime())
val lastWeek = new java.sql.Timestamp(today.getTime() - 60*60*24*7*1000)
val yesterday = new java.sql.Timestamp(today.getTime() - 60*60*24*1000)
val userCount = executeNow((for{t <- Users} yield t).result.map { x => x.length })
val feedCount = executeNow((for{t <- NewsFeeds} yield t).result.map { x => x.length })
val usersSinceLastWeek = executeNow((for{t <- UserSessions if unixTimestampFn(t.lastAccess) >= unixTimestampFn(Some(lastWeek))} yield t.userId).groupBy(x=>x).map(_._1).result.map { x => x.length })
val usersSinceYesterday = executeNow((for{t <- UserSessions if unixTimestampFn(t.lastAccess) >= unixTimestampFn(Some(yesterday))} yield t.userId).groupBy(x=>x).map(_._1).result.map { x => x.length })
SiteStatistics(
userCount,
feedCount,
usersSinceLastWeek,
usersSinceYesterday)
}
def getBlogPosts(offset: Int)(implicit db: Database) : List[BlogEntry] = {
val query = BlogEntries.sortBy(_.postDate.desc)
.drop(offset)
.take(Constants.ITEMS_PER_PAGE).to[List].result
executeNow(query)
}
def getBlogPostById(id: Int)(implicit db: Database) : BlogEntry = {
executeNow(BlogEntries.filter(_.id === id).result.map { x => x.head })
}
def insertBlogPost(uid: Int, subject: String, body: String)(implicit db: Database) {
executeNow(BlogEntries += BlogEntry(None, uid, new java.sql.Timestamp(new java.util.Date().getTime()), subject, body))
}
def deleteBlogPost(id: Int)(implicit db: Database) {
val query = for { be <- BlogEntries if be.id === id } yield be
executeNow(query.delete)
}
def editBlogPost(id: Int, subject: String, body: String)(implicit db: Database) {
val query = for { be <- BlogEntries if be.id === id } yield (be.subject, be.body)
executeNow(query.update(subject, body))
}
def getSubscribedFeeds(userId: Int)(implicit db: Database) : List[(NewsFeed, Int)] = {
implicit val getNewsFeedResult = GetResult(
r => NewsFeed(r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<,
r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<))
val queryString = if (driver.isInstanceOf[H2Driver]) {
sql"""
select "nf".*, (
select count("id") from "UserNewsFeedArticles" where "feedId" = "uf"."feedId" and "userId" = $userId and "isRead" = false
) as unread
from "NewsFeeds" "nf"
inner join "UserFeeds" "uf" on "nf"."id" = "uf"."feedId"
where "uf"."userId" = $userId
"""
} else {
sql"""
select nf.*, (
select count(id) from UserNewsFeedArticles where feedId = uf.feedId and userId = $userId and isRead = false
) as unread
from NewsFeeds nf
inner join UserFeeds uf on nf.id = uf.feedId
where uf.userId = $userId
"""
}
val unreadCountQuery = queryString.as[(NewsFeed, Int)]
executeNow(unreadCountQuery.map { x => x.toList })
}
def getUnreadCountForFeed(userId: Int, feedId: Int)(implicit db: Database) : Int = {
val today = new Timestamp(new java.util.Date().getTime())
val feed_posts = for {
nfa <- UserNewsFeedArticles if nfa.feedId === feedId
uf <- UserFeeds if uf.userId === userId && nfa.feedId === uf.feedId && nfa.userId === uf.userId
} yield (nfa, uf, nfa.isRead)
val feed_posts2 = for {
(nfa, uf, read) <- feed_posts
if unixTimestampFn(nfa.pubDate.getOrElse(today)) >= (unixTimestampFn(uf.addedDate) - OLDEST_POST_DIFFERENCE_MS)
} yield (nfa, read)
val resultQuery = (for { (fp, fq) <- feed_posts2 if fq == false } yield fp )
executeNow(resultQuery.result.map { x => x.length })
}
def getFeedFromUrl(url: String)(implicit db: Database) : Option[NewsFeed] = {
val feedQuery = for { f <- NewsFeeds if f.feedUrl === url || f.link === url } yield f
executeNow(feedQuery.result.map { x => x.headOption })
}
def addSubscriptionIfNotExists(userId: Int, feedId: Int)(implicit db: Database) {
val userFeed = for { uf <- UserFeeds if uf.userId === userId && uf.feedId === feedId } yield uf
val result : Option[UserFeed] = executeNow(userFeed.result.map { x => x.headOption })
result match {
case Some(uf) => ()
case None => {
executeNow(UserFeeds += UserFeed(None, userId, feedId, new Timestamp(new java.util.Date().getTime())))
()
}
}
}
def unsubscribeFeed(userId: Int, feedId: Int)(implicit db: Database) {
val userFeed = for { uf <- UserFeeds if uf.userId === userId && uf.feedId === feedId } yield uf
executeNow(userFeed.delete)
val subscribedQuery = for { uf <- UserFeeds if uf.feedId === feedId } yield uf
val numSubscribed = executeNow(subscribedQuery.length.result)
if (numSubscribed == 0)
{
val feedQuery = for { f <- NewsFeeds if f.id === feedId } yield f.feedUrl
val feed = executeNow(feedQuery.result.map { x => x.head })
BackgroundJobManager.unscheduleFeedJob(feed)
}
}
def getFeedByPostId(postId: Long)(implicit db: Database) : NewsFeed = {
val feed = for {
(nfa, nf) <- UserNewsFeedArticles join NewsFeeds on (_.feedId === _.id) if nfa.id === postId
} yield nf
return executeNow(feed.result.map { x => x.head })
}
def getLatestPostsForUser(userId: Int)(implicit db: Database) : List[NewsFeedArticleInfo] = {
val userOptedOut = executeNow(Users.filter(_.id === userId).result.map { x => x.head.optOutSharing })
val articleQuery =
if (userOptedOut) {
for {
(uf, u) <- UserFeeds join Users on (_.userId === _.id) if u.optOutSharing === false
nf <- NewsFeeds if uf.feedId === nf.id
nfa <- UserNewsFeedArticles if nfa.feedId === uf.feedId
} yield nfa
} else {
for {
nfa <- UserNewsFeedArticles
nf <- NewsFeeds if nfa.feedId === nf.id
uf <- UserFeeds if nf.id === uf.feedId
u <- Users if u.id === uf.userId && u.id === userId
} yield nfa
}.take(Constants.ITEMS_PER_PAGE)
executeNow(articleQuery.result.map { x => x.map(y =>
NewsFeedArticleInfo(
NewsFeedArticle(y.id, y.feedId, y.title, y.link, y.description, y.author,
y.comments, y.enclosureUrl, y.enclosureLength, y.enclosureType,
y.guid, y.isGuidPermalink, y.pubDate, y.source), false, false)).toList })
}
def getLinkForPost(postId: Long)(implicit db: Database): String = {
val query = for { unfa <- UserNewsFeedArticles if unfa.id === postId } yield unfa.link;
executeNow(query.result.map { x => x.head })
}
def getPostsForFeeds(userId: Int, feedIds: List[Int], unreadOnly: Boolean, offset: Int, maxEntries: Int, latestPostDate: Long, latestPostId: Long)(implicit db: Database): List[NewsFeedArticleInfo] = {
val feed_posts = if (unreadOnly) {
for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.feedId.inSet(feedIds) &&
unfa.id <= latestPostId && unixTimestampFn(unfa.pubDate) < latestPostDate &&
unfa.isRead === false } yield unfa
} else {
for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.feedId.inSet(feedIds) &&
unfa.id <= latestPostId && unixTimestampFn(unfa.pubDate) < latestPostDate
} yield unfa
}
val feedPostQuery = feed_posts.sortBy(_.pubDate.desc).drop(offset).take(maxEntries)
executeNow(feedPostQuery.result.map { x => x.map(y =>
NewsFeedArticleInfo(
NewsFeedArticle(y.id, y.feedId, y.title, y.link, y.description, y.author,
y.comments, y.enclosureUrl, y.enclosureLength, y.enclosureType,
y.guid, y.isGuidPermalink, y.pubDate, y.source),
y.isRead == false, y.isSaved)).toList })
}
def getPostsForFeed(userId: Int, feedId: Int, unreadOnly: Boolean, offset: Int, maxEntries: Int, latestPostDate: Long, latestPostId: Long)(implicit db: Database) : List[NewsFeedArticleInfo] = {
val feed_posts = if (unreadOnly) {
for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.feedId === feedId &&
unfa.id <= latestPostId && unixTimestampFn(unfa.pubDate) < latestPostDate &&
unfa.isRead === false } yield unfa
} else {
for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.feedId === feedId &&
unfa.id <= latestPostId && unixTimestampFn(unfa.pubDate) < latestPostDate
} yield unfa
}
val feedPostQuery = feed_posts.sortBy(_.pubDate.desc).drop(offset).take(maxEntries)
executeNow(feedPostQuery.result.map{ x => x.map(y =>
NewsFeedArticleInfo(
NewsFeedArticle(y.id, y.feedId, y.title, y.link, y.description, y.author, y.comments,
y.enclosureUrl, y.enclosureLength, y.enclosureType, y.guid, y.isGuidPermalink,
y.pubDate, y.source),
y.isRead == false, y.isSaved)).toList })
}
def getPostsForAllFeeds(userId: Int, unreadOnly: Boolean, offset: Int, maxEntries: Int, latestPostDate: Long, latestPostId: Long)(implicit db: Database) : List[NewsFeedArticleInfo] = {
val feed_posts = if (unreadOnly) {
for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.id <= latestPostId &&
unixTimestampFn(unfa.pubDate) < latestPostDate &&
unfa.isRead === false } yield unfa
} else {
for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.id <= latestPostId &&
unixTimestampFn(unfa.pubDate) < latestPostDate
} yield unfa
}
val feedQuery = feed_posts.sortBy(_.pubDate.desc).drop(offset).take(maxEntries)
executeNow(feedQuery.result.map { x => x.map(y =>
NewsFeedArticleInfo(
NewsFeedArticle(y.id, y.feedId, y.title, y.link, y.description, y.author, y.comments,
y.enclosureUrl, y.enclosureLength, y.enclosureType, y.guid, y.isGuidPermalink,
y.pubDate, y.source),
y.isRead == false, y.isSaved)).toList })
}
def getSavedPosts(userId: Int, offset: Int, maxEntries: Int, latestPostDate: Long, latestPostId: Long)(implicit db: Database) : List[NewsFeedArticleInfo] = {
val feed_posts = for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.id <= latestPostId &&
unixTimestampFn(unfa.pubDate) < latestPostDate &&
unfa.isSaved === true } yield unfa
val feedQuery = feed_posts.sortBy(_.pubDate.desc).drop(offset).take(maxEntries)
executeNow(feedQuery.result.map { x => x.map(y =>
NewsFeedArticleInfo(
NewsFeedArticle(y.id, y.feedId, y.title, y.link, y.description, y.author, y.comments,
y.enclosureUrl, y.enclosureLength, y.enclosureType, y.guid, y.isGuidPermalink,
y.pubDate, y.source),
false, true)).toList })
}
def setPostStatusForAllPosts(userId: Int, feedId: Int, from: Long, upTo: Long, unread: Boolean)(implicit db: Database) : Boolean = {
val feedPostsQuery = for { unfa <- UserNewsFeedArticles if unfa.userId === userId && unfa.feedId === feedId &&
unixTimestampFn(unfa.pubDate) <= from &&
unixTimestampFn(unfa.pubDate) >= upTo &&
unfa.isRead === unread } yield unfa.isRead
executeNow(feedPostsQuery.update(!unread))
true
}
def setPostStatusForAllPosts(userId: Int, from: Long, upTo: Long, unread: Boolean)(implicit db: Database) : Boolean = {
val feedPostsQuery = for { unfa <- UserNewsFeedArticles if unfa.userId === userId &&
unixTimestampFn(unfa.pubDate) <= from &&
unixTimestampFn(unfa.pubDate) >= upTo &&
unfa.isRead === unread } yield unfa.isRead
executeNow(feedPostsQuery.update(!unread))
true
}
def setPostStatus(userId: Int, feedId: Int, postId: Long, unread: Boolean)(implicit db: Database) : Boolean = {
val my_feed = for { uf <- UserFeeds if uf.feedId === feedId && uf.userId === userId } yield uf
val foundFeed : Option[UserFeed] = executeNow(my_feed.result.map { x => x.headOption })
foundFeed match {
case Some(_) => {
val feedPostsQuery = for { unfa <- UserNewsFeedArticles if unfa.userId === userId &&
unfa.feedId === feedId &&
unfa.id === postId &&
unfa.isRead === unread } yield unfa.isRead
executeNow(feedPostsQuery.update(!unread))
true
}
case _ => false
}
}
def setPostStatus(userId: Int, postId: Long, unread: Boolean)(implicit db: Database) : Boolean = {
val my_feed = for { uf <- UserFeeds if uf.userId === userId } yield uf
val foundFeed : Option[UserFeed] = executeNow(my_feed.result.map { x => x.headOption })
foundFeed match {
case Some(_) => {
val feedPostsQuery = for { unfa <- UserNewsFeedArticles if unfa.userId === userId &&
unfa.id === postId &&
unfa.isRead === unread } yield unfa.isRead
executeNow(feedPostsQuery.update(!unread))
true
}
case _ => false
}
}
def savePost(userId: Int, feedId: Int, postId: Long)(implicit db: Database) : Boolean = {
val my_feed = for { uf <- UserFeeds if uf.feedId === feedId && uf.userId === userId } yield uf
val foundFeed : Option[UserFeed] = executeNow(my_feed.result.map { x => x.headOption })
foundFeed match {
case Some(_) => {
val feedPostsQuery = for { unfa <- UserNewsFeedArticles if unfa.userId === userId &&
unfa.feedId === feedId &&
unfa.id === postId } yield unfa.isSaved
executeNow(feedPostsQuery.update(true))
true
}
case _ => false
}
}
def unsavePost(userId: Int, feedId: Int, postId: Long)(implicit db: Database) : Boolean = {
val my_feed = for { uf <- UserFeeds if uf.feedId === feedId && uf.userId === userId } yield uf
val foundFeed : Option[UserFeed] = executeNow(my_feed.result.map { x => x.headOption })
foundFeed match {
case Some(_) => {
val feedPostsQuery = for { unfa <- UserNewsFeedArticles if unfa.userId === userId &&
unfa.feedId === feedId &&
unfa.id === postId } yield unfa.isSaved
executeNow(feedPostsQuery.update(false))
true
}
case _ => false
}
}
def getUserSessionById(sessionId: String)(implicit db: Database) : UserSession = {
val query = (for { sess <- UserSessions if sess.sessionId === sessionId } yield sess)
executeNow(query.result.map { x => x.head })
}
def getUserSession(sessionId: String, ip: String)(implicit db: Database) : Option[UserSession] = {
val q = (for { sess <- UserSessions if sess.sessionId === sessionId } yield sess)
val foundResult : Option[UserSession] = executeNow(q.result.map { x => x.headOption })
foundResult match {
case Some(s) => {
executeNow(q.update(UserSession(s.userId, s.sessionId, new java.sql.Timestamp(new java.util.Date().getTime()), ip)))
Some(s)
}
case None => None
}
}
def getUserName(userId: Int)(implicit db: Database) : String = {
val q = for { u <- Users if u.id === userId } yield u.username
executeNow(q.result.map { x => x.headOption.getOrElse("") })
}
def getUserInfoByUsername(username: String)(implicit db: Database) : Option[User] = {
val q = for { u <- Users if u.username === username } yield u
executeNow(q.result.map { x => x.headOption })
}
def getUserInfo(userId: Int)(implicit db: Database) : User = {
val q = for { u <- Users if u.id === userId } yield u
executeNow(q.result.map { x => x.head })
}
def setOptOut(userId: Int, optOut: Boolean)(implicit db: Database) {
val user = getUserInfo(userId)
val q = for { u <- Users if u.id === userId } yield u
executeNow(q.update(User(user.id, user.username, user.password, user.email, user.friendlyName, optOut, user.isAdmin)))
}
def invalidateSession(sessionId: String)(implicit db: Database) {
val q = (for { sess <- UserSessions if sess.sessionId === sessionId } yield sess)
val session : Option[UserSession] = executeNow(q.result.map { x => x.headOption })
session match {
case Some(s) => executeNow(q.delete)
case None => ()
}
}
def startUserSession(sessionId: String, email: String, ip: String, friendlyName: String)(implicit db: Database) {
startUserSession(sessionId, email, email, ip, friendlyName)
}
def createUser(username: String, password: String, email: String)(implicit db: Database) = {
val q = for { u <- Users if u.username === username } yield u
val isUserDefined = executeNow(q.result.map { x => x.headOption }).isDefined
if (isUserDefined) { false }
else {
executeNow(Users += User(None, username, AuthenticationTools.hashPassword(password), email, username, false, false))
true
}
}
def setPassword(username: String, password: String)(implicit db: Database) = {
val q = for { u <- Users if u.username === username } yield u.password
executeNow(q.update(AuthenticationTools.hashPassword(password)))
}
def setPassword(uId: Int, password: String)(implicit db: Database) = {
val q = for { u <- Users if u.id === uId } yield u.password
executeNow(q.update(AuthenticationTools.hashPassword(password)))
}
def setEmail(uId: Int, email: String)(implicit db: Database) = {
val q = for { u <- Users if u.id === uId } yield u.email
executeNow(q.update(email))
}
def startUserSession(sessionId: String, username: String, email: String, ip: String, friendlyName: String)(implicit db: Database) {
val q = for { u <- Users if u.username === username } yield u
var queryResult : Option[User] = executeNow(q.result.map { x => x.headOption })
val userId = queryResult match {
case Some(u) => {
executeNow(q.update(User(u.id, u.username, u.password, email, friendlyName, u.optOutSharing, u.isAdmin)))
u.id.get
}
case None => {
executeNow(Users returning Users.map(_.id) += User(None, username, "", email, friendlyName, false, false))
}
}
executeNow(UserSessions += UserSession(userId, sessionId, new java.sql.Timestamp(new java.util.Date().getTime()), ip))
}
private def updateOrInsertFeedInfo(feedUrl: String, feed: XmlFeed)(implicit db: Database) : NewsFeed = {
val newsFeed =
for { f <- NewsFeeds if f.feedUrl === feedUrl } yield
(f.copyright, f.description, f.docs, f.generator, f.imageLink,
f.imageTitle, f.imageUrl, f.language, f.lastBuildDate, f.link,
f.managingEditor, f.pubDate, f.title, f.ttl, f.webMaster, f.lastUpdate, f.hash)
val newsFeedOption = executeNow(newsFeed.result.map { x => x.headOption })
newsFeedOption match {
case Some(fd) => {
if (fd._17 == feed.feedProperties.hash) throw new NotModifiedException
executeNow(newsFeed.update(
(feed.feedProperties.copyright,
feed.feedProperties.description,
feed.feedProperties.docs,
feed.feedProperties.generator,
feed.feedProperties.imageLink,
feed.feedProperties.imageTitle,
feed.feedProperties.imageUrl,
feed.feedProperties.language,
feed.feedProperties.lastBuildDate,
feed.feedProperties.link,
feed.feedProperties.managingEditor,
feed.feedProperties.pubDate,
feed.feedProperties.title,
feed.feedProperties.ttl,
feed.feedProperties.webMaster,
new java.sql.Timestamp(new java.util.Date().getTime()),
feed.feedProperties.hash)))
}
case None => {
executeNow(NewsFeeds.map(x => (x.feedUrl, x.copyright, x.description, x.docs, x.generator,
x.imageLink, x.imageTitle, x.imageUrl, x.language, x.lastBuildDate,
x.link, x.managingEditor, x.pubDate, x.title, x.ttl, x.webMaster,
x.lastUpdate, x.hash)) += (
feedUrl,
feed.feedProperties.copyright,
feed.feedProperties.description,
feed.feedProperties.docs,
feed.feedProperties.generator,
feed.feedProperties.imageLink,
feed.feedProperties.imageTitle,
feed.feedProperties.imageUrl,
feed.feedProperties.language,
feed.feedProperties.lastBuildDate,
feed.feedProperties.link,
feed.feedProperties.managingEditor,
feed.feedProperties.pubDate,
feed.feedProperties.title,
feed.feedProperties.ttl,
feed.feedProperties.webMaster,
new java.sql.Timestamp(new java.util.Date().getTime()),
feed.feedProperties.hash))
}
}
val resultQuery = (for { f <- NewsFeeds if f.feedUrl === feedUrl } yield f)
executeNow(resultQuery.result.map { x => x.head })
}
def updateOrInsertFeed(userId: Int, feedUrl: String, feed: XmlFeed)(implicit db: Database) : NewsFeed = {
val f = updateOrInsertFeedInfo(feedUrl, feed)
val newsFeedId = f.id.get
// Now update/insert each individual post in the feed.
for { p <- feed.entries } insertOrUpdateEntry(userId, newsFeedId, p)
f
}
def updateOrInsertFeed(feedUrl: String, feed: XmlFeed)(implicit db: Database) : NewsFeed = {
val f = updateOrInsertFeedInfo(feedUrl, feed)
val newsFeedId = f.id.get
// Now update/insert each individual post in the feed.
val subscribed_users = for {
f <- NewsFeeds if f.feedUrl === feedUrl
uf <- UserFeeds if uf.feedId === f.id
} yield uf.userId
val subscribedUsersList = executeNow(subscribed_users.result.map { x => x.toList })
subscribedUsersList.foreach(uid =>
for { p <- feed.entries } insertOrUpdateEntry(uid, newsFeedId, p)
)
f
}
private def insertOrUpdateEntry(userId: Int, feedId: Int, p: (NewsFeedArticle, List[String]))(implicit db: Database) {
val newPost = p._1
// Insert or update article as needed.
val existingEntryId = for {
e <- UserNewsFeedArticles if e.feedId === feedId && e.userId === userId &&
((e.link === newPost.link && !newPost.link.isEmpty()) ||
(e.guid =!= (None : Option[String]) && e.guid === newPost.guid) ||
(e.title === newPost.title && e.description === newPost.description))
} yield e.id
val entry = for {
e <- UserNewsFeedArticles if e.feedId === feedId && e.userId === userId &&
((e.link === newPost.link && !newPost.link.isEmpty()) ||
(e.guid =!= (None : Option[String]) && e.guid === newPost.guid) ||
(e.title === newPost.title && e.description === newPost.description))
} yield (
e.title, e.link, e.description, e.author, e.comments, e.enclosureLength,
e.enclosureType, e.enclosureUrl, e.guid, e.isGuidPermalink, e.pubDate,
e.source)
val entryResult = executeNow(entry.result.map { x => x.headOption })
val entryId = entryResult match {
case Some(ent) => {
executeNow(entry.update(
newPost.title,
newPost.link,
newPost.description,
newPost.author,
newPost.comments,
newPost.enclosureLength,
newPost.enclosureType,
newPost.enclosureUrl,
newPost.guid,
newPost.isGuidPermalink,
ent._11,
newPost.source))
executeNow(existingEntryId.result.map { x => x.head })
}
case None => (
executeNow(UserNewsFeedArticles.map(p =>
(p.userId, p.feedId, p.title, p.link, p.description,
p.author, p.comments, p.enclosureLength, p.enclosureType,
p.enclosureUrl, p.guid, p.isGuidPermalink, p.pubDate, p.source,
p.isRead, p.isSaved)) returning UserNewsFeedArticles.map(_.id) += (
userId,
feedId,
newPost.title,
newPost.link,
newPost.description,
newPost.author,
newPost.comments,
newPost.enclosureLength,
newPost.enclosureType,
newPost.enclosureUrl,
newPost.guid,
newPost.isGuidPermalink,
newPost.pubDate,
newPost.source,
false,
false
)))
}
}
def logFeedFailure(feedUrl: String, message: String)(implicit db: Database) {
val feed = for { nf <- NewsFeeds if nf.feedUrl === feedUrl } yield nf
val feedOption = executeNow(feed.result.map { x => x.headOption })
feedOption match {
case Some(f) => {
executeNow(FeedFailureLogs += (
FeedFailureLog(None, f.id.get, new java.sql.Timestamp(new java.util.Date().getTime()), message)))
}
case None => ()
}
}
def deleteOldPosts()(implicit db: Database) {
val threeMonthsAgo = new java.sql.Timestamp(new java.util.Date().getTime() - 60*60*24*30*3*1000)
val matchingOldPosts = for { unfa <- UserNewsFeedArticles if unixTimestampFn(unfa.pubDate) < unixTimestampFn(Some(threeMonthsAgo)) && unfa.isSaved === false } yield unfa
executeNow(matchingOldPosts.delete)
}
def deleteOldSessions()(implicit db: Database) {
val oneWeekAgo = new java.sql.Timestamp(new java.util.Date().getTime() - 60*60*24*7*1000)
val oldSessions = for { us <- UserSessions if unixTimestampFn(us.lastAccess) < unixTimestampFn(Some(oneWeekAgo)) } yield us
executeNow(oldSessions.delete)
}
def deleteOldFailLogs()(implicit db: Database) {
val oneWeekAgo = new java.sql.Timestamp(new java.util.Date().getTime() - 60*60*24*7*1000)
val oldFailLogs = for { fl <- FeedFailureLogs if unixTimestampFn(fl.failureDate) < unixTimestampFn(Some(oneWeekAgo)) } yield fl
executeNow(oldFailLogs.delete)
}
}
| tmiw/newsrdr | src/main/scala/us/newsrdr/models/DataTables.scala | Scala | bsd-3-clause | 37,584 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.serializers
import org.junit.Assert._
import org.junit.Test
import scala.collection.JavaConverters._
class TestJsonSerde {
@Test
def testJsonSerdeShouldWork {
val serde = new JsonSerde[java.util.HashMap[String, Object]]
val obj = new java.util.HashMap[String, Object](Map[String, Object]("hi" -> "bye", "why" -> new java.lang.Integer(2)).asJava)
val bytes = serde.toBytes(obj)
assertEquals(obj, serde.fromBytes(bytes))
val serdeHashMapEntry = new JsonSerde[java.util.Map.Entry[String, Object]]
obj.entrySet().asScala.foreach(entry => {
try {
val entryBytes = serdeHashMapEntry.toBytes(entry)
} catch {
case e: Exception => fail("HashMap Entry serialization failed!")
}
})
}
}
| prateekm/samza | samza-core/src/test/scala/org/apache/samza/serializers/TestJsonSerde.scala | Scala | apache-2.0 | 1,583 |
package org.brzy
/**
* Calista is a api for accessing and manipulate Cassandra Data Stores.
*
* === Overview ===
* Example usage...
*
* Session...
*
* ColumnFamily and SuperColumnFamily...
*
* Definition...
*
* Object Column Mapping...
*/
package object calista | m410/calista | src/main/scala/org/brzy/calista/package.scala | Scala | apache-2.0 | 275 |
package be.studiocredo.auth
import org.joda.time.{Duration, Interval, DateTime}
import models.ids.UserId
import play.api.cache.Cache
import play.api.Play.current
import com.google.inject.Inject
import be.studiocredo.Service
import akka.actor.Cancellable
import play.api.libs.concurrent.Akka
trait Token {
def creationDate: DateTime
def lastUsed: DateTime
def expirationDate: DateTime
}
case class AuthToken(id: String, userId: UserId, creationDate: DateTime, lastUsed: DateTime, expirationDate: DateTime) extends Token
case class EmailToken(id: String, email: String, userId: Option[UserId], creationDate: DateTime, lastUsed: DateTime, expirationDate: DateTime) extends Token
trait AuthTokenStore {
def save(token: AuthToken)
def save(token: EmailToken)
def findAuthToken(id: String): Option[AuthToken]
def findEmailToken(id: String): Option[EmailToken]
def delete(id: String)
def deleteExpiredTokens()
}
class CacheAuthTokenStore extends AuthTokenStore {
def expireSeconds(token: Token): Int = {
Math.max(5, new Duration(DateTime.now(), token.expirationDate).getStandardSeconds.toInt)
}
override def save(token: AuthToken) = Cache.set(token.id, token, expireSeconds(token))
override def save(token: EmailToken) = Cache.set(token.id, token, expireSeconds(token))
override def findAuthToken(id: String): Option[AuthToken] = Cache.getAs[AuthToken](id)
override def findEmailToken(id: String): Option[EmailToken] = Cache.getAs[EmailToken](id)
override def delete(id: String) = Cache.remove(id)
override def deleteExpiredTokens() = {
// Cache handles it
}
}
class DbAuthTokenStore extends AuthTokenStore {
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick._
import models.schema.tables._
val AuthTokensQ = Query(AuthTokens)
val EmailAuthTokensQ = Query(EmailAuthTokens)
override def save(token: AuthToken) = {
DB.withTransaction { implicit session: Session =>
val byId = AuthTokens.filter(_.id === token.id)
if (byId.exists.run) {
byId.update(token)
} else {
AuthTokens.insert(token)
}
}
}
override def save(token: EmailToken) = {
DB.withTransaction { implicit session: Session =>
val byId = EmailAuthTokens.filter(_.id === token.id)
if (byId.exists.run) {
byId.update(token)
} else {
EmailAuthTokens.insert(token)
}
}
}
override def findAuthToken(id: String): Option[AuthToken] = {
DB.withSession { implicit session: Session =>
AuthTokensQ.filter(_.id === id).firstOption
}
}
override def findEmailToken(id: String): Option[EmailToken] = {
DB.withSession { implicit session: Session =>
EmailAuthTokensQ.filter(_.id === id).firstOption
}
}
override def delete(id: String) = {
DB.withSession { implicit session: Session =>
AuthTokensQ.filter(_.id === id).delete
EmailAuthTokensQ.filter(_.id === id).delete
}
}
override def deleteExpiredTokens() {
DB.withSession { implicit session: Session =>
import com.github.tototoshi.slick.JodaSupport._
AuthTokensQ.filter(_.expiration < DateTime.now()).delete
EmailAuthTokensQ.filter(_.expiration < DateTime.now()).delete
}
}
}
class AuthTokenExpireService @Inject()(authTokenStore: AuthTokenStore) extends Service {
var cancellable: Option[Cancellable] = None
override def onStop() {
cancellable.map(_.cancel())
}
override def onStart() {
import play.api.Play.current
import scala.concurrent.duration._
import play.api.libs.concurrent.Execution.Implicits._
cancellable = Some(
Akka.system.scheduler.schedule(0.seconds, 5.minutes) {
authTokenStore.deleteExpiredTokens()
}
)
}
}
| studiocredo/ticket-reservation | app/be/studiocredo/auth/Tokens.scala | Scala | apache-2.0 | 3,763 |
package skinny.micro.implicits
import scala.language.implicitConversions
import scala.util.control.Exception.allCatch
object TypeConverterSupport extends TypeConverterSupport
/**
* Support types and implicits for [[TypeConverter]].
*/
trait TypeConverterSupport {
implicit def safe[S, T](f: S => T): TypeConverter[S, T] = new TypeConverter[S, T] {
def apply(s: S): Option[T] = allCatch opt f(s)
}
/**
* Implicit convert a `(String) => Option[T]` function into a `TypeConverter[T]`
*/
implicit def safeOption[S, T](f: S => Option[T]): TypeConverter[S, T] = new TypeConverter[S, T] {
def apply(v1: S): Option[T] = allCatch.withApply(_ => None)(f(v1))
}
}
| xerial/skinny-micro | micro/src/main/scala/skinny/micro/implicits/TypeConverterSupport.scala | Scala | bsd-2-clause | 686 |
import com.amazonaws.services.{ s3 => aws }
object Owner {
def apply(o: aws.model.Owner) = new Owner(o.getId, o.getDisplayName)
}
case class Owner(id: String, displayName: String) extends aws.model.Owner(id, displayName)
| hirokikonishi/awscala | aws/s3/src/main/scala/Owner.scala | Scala | apache-2.0 | 226 |
package model.domain
import org.scalatest.{FlatSpec, Matchers}
class ProductTests extends FlatSpec with Matchers {
}
| alexflav23/exercises | rps/test/model/domain/ProductTests.scala | Scala | apache-2.0 | 120 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.engine.output.task
import com.bwsw.sj.common.dal.model.stream.StreamDomain
import com.bwsw.sj.common.si.model.instance.OutputInstance
import com.bwsw.sj.common.engine.core.environment.{EnvironmentManager, OutputEnvironmentManager}
import com.bwsw.sj.common.engine.core.managment.TaskManager
import com.bwsw.sj.common.engine.core.output.OutputStreamingExecutor
import scaldi.Injector
import scala.collection.mutable
/**
* Class allows to manage an environment of output streaming task
*
* @author Kseniya Tomskikh
*/
class OutputTaskManager(implicit injector: Injector) extends TaskManager {
val outputInstance: OutputInstance = instance.asInstanceOf[OutputInstance]
val inputs: mutable.Map[StreamDomain, Array[Int]] = getInputs(outputInstance.executionPlan)
require(numberOfAgentsPorts >= 1, "Not enough ports for t-stream consumers. One or more ports are required")
def getExecutor(environmentManager: EnvironmentManager): OutputStreamingExecutor[AnyRef] = {
logger.debug(s"Task: $taskName. Start loading of executor class from module jar.")
val executor = executorClass.getConstructor(classOf[OutputEnvironmentManager])
.newInstance(environmentManager)
.asInstanceOf[OutputStreamingExecutor[AnyRef]]
logger.debug(s"Task: $taskName. Create an instance of executor class.")
executor
}
}
| bwsw/sj-platform | core/sj-output-streaming-engine/src/main/scala/com/bwsw/sj/engine/output/task/OutputTaskManager.scala | Scala | apache-2.0 | 2,167 |
package spire
package benchmark
import scala.util.Random
import Random._
import spire.implicits._
import com.google.caliper.Runner
import com.google.caliper.SimpleBenchmark
import com.google.caliper.Param
import java.lang.Math
import java.math.BigInteger
import java.lang.Long.numberOfTrailingZeros
object GcdBenchmarks extends MyRunner(classOf[GcdBenchmarks])
class GcdBenchmarks extends MyBenchmark {
var longs: Array[Long] = null
var bigs: Array[BigInteger] = null
override def setUp(): Unit = {
longs = init(200000)(nextLong)
bigs = init(200000)(new BigInteger(nextLong.toString))
}
def timeXorEuclidGcdLong(reps:Int) = run(reps)(xorEuclidGcdLong(longs))
def timeXorBinaryGcdLong(reps:Int) = run(reps)(xorBinaryGcdLong(longs))
//def timeXorBuiltinGcdBigInteger(reps:Int) = run(reps)(xorBuiltinGcdBigInteger(bigs))
def xorEuclidGcdLong(data:Array[Long]):Long = {
var t = 0L
var i = 0
val len = data.length - 1
while (i < len) {
t ^= euclidGcdLong(data(i), data(i + 1))
i += 1
}
t
}
def xorBinaryGcdLong(data:Array[Long]):Long = {
var t = 0L
var i = 0
val len = data.length - 1
while (i < len) {
t ^= binaryGcdLong(data(i), data(i + 1))
i += 1
}
t
}
def xorBuiltinGcdBigInteger(data:Array[BigInteger]):BigInteger = {
var t = BigInteger.ZERO
var i = 0
val len = data.length - 1
while (i < len) {
t = t.xor(data(i).gcd(data(i + 1)))
i += 1
}
t
}
@tailrec
final def euclidGcdLong(x: Long, y: Long): Long = {
if (y == 0L) Math.abs(x) else euclidGcdLong(y, x % y)
}
def binaryGcdLong(_x: Long, _y: Long): Long = {
if (_x == 0L) return _y
if (_y == 0L) return _x
var x = Math.abs(_x)
var xz = numberOfTrailingZeros(x)
x >>= xz
var y = Math.abs(_y)
var yz = numberOfTrailingZeros(y)
y >>= yz
while (x != y) {
if (x > y) {
x -= y
x >>= numberOfTrailingZeros(x)
} else {
y -= x
y >>= numberOfTrailingZeros(y)
}
}
if (xz < yz) x << xz else x << yz
}
}
| tixxit/spire | benchmark/src/main/scala/spire/benchmark/GcdBenchmarks.scala | Scala | mit | 2,114 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.box.ValidatableBox._
import uk.gov.hmrc.ct.box._
case class AC7995(value: Option[String]) extends CtBoxIdentifier(name = "Commitments by way of guarantee note") with CtOptionalString
with Input
with SelfValidatableBox[Frs105AccountsBoxRetriever, Option[String]] {
override def validate(boxRetriever: Frs105AccountsBoxRetriever) = {
import boxRetriever._
collectErrors (
cannotExistIf(value.nonEmpty && ac7991().isFalse),
failIf (boxRetriever.ac7991().isTrue) (
collectErrors (
validateStringAsMandatory(),
validateOptionalStringByLength(1, StandardCohoTextFieldLimit),
validateCoHoStringReturnIllegalChars()
)
)
)
}
}
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC7995.scala | Scala | apache-2.0 | 1,449 |
/* Copyright 2009-2021 EPFL, Lausanne */
import stainless.lang._
object Lists {
sealed abstract class List[T]
case class Cons[T](head: T, tail: List[T]) extends List[T]
case class Nil[T]() extends List[T]
def forall[T](list: List[T], f: T => Boolean): Boolean = list match {
case Cons(head, tail) => f(head) && forall(tail, f)
case Nil() => true
}
def positive(list: List[Int]): Boolean = list match {
case Cons(head, tail) => if (head < 0) false else positive(tail)
case Nil() => true
}
def gt(i: Int): Int => Boolean = x => x > i
def positive_lemma(list: List[Int]): Boolean = {
positive(list) == forall(list, gt(0))
}
def failling_1(list: List[Int]): Boolean = {
list match {
case Nil() => positive_lemma(list)
case Cons(head, tail) => positive_lemma(list) && failling_1(tail)
}
}.holds
}
// vim: set ts=4 sw=4 et:
| epfl-lara/stainless | frontends/benchmarks/verification/invalid/Lists.scala | Scala | apache-2.0 | 890 |
package org.jetbrains.plugins.scala.highlighter.usages
import com.intellij.codeInsight.highlighting.{HighlightUsagesHandlerBase, HighlightUsagesHandlerFactory}
import com.intellij.openapi.editor.Editor
import com.intellij.psi.{PsiElement, PsiFile}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScCaseClause
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScVariableDefinition, ScPatternDefinition, ScFunction, ScFunctionDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import com.intellij.codeInsight.TargetElementUtilBase
/**
* User: Alexander Podkhalyuzin
* Date: 22.12.2009
*/
class ScalaHighlightUsagesHandlerFactory extends HighlightUsagesHandlerFactory {
def createHighlightUsagesHandler(editor: Editor, file: PsiFile): HighlightUsagesHandlerBase[_ <: PsiElement] = {
if (!file.isInstanceOf[ScalaFile]) return null
val offset = TargetElementUtilBase.adjustOffset(file, editor.getDocument, editor.getCaretModel.getOffset)
val element: PsiElement = file.findElementAt(offset)
if (element == null || element.getNode == null) return null
element.getNode.getElementType match {
case ScalaTokenTypes.kRETURN => {
val fun = PsiTreeUtil.getParentOfType(element, classOf[ScFunctionDefinition])
if (fun != null) return new ScalaHighlightExitPointsHandler(fun, editor, file, element)
}
case ScalaTokenTypes.kDEF => {
val fun = PsiTreeUtil.getParentOfType(element, classOf[ScFunction])
fun match {
case d: ScFunctionDefinition => return new ScalaHighlightExitPointsHandler(d, editor, file, element)
case _ =>
}
}
case ScalaTokenTypes.kVAL => {
PsiTreeUtil.getParentOfType(element, classOf[ScPatternDefinition]) match {
case pattern @ ScPatternDefinition.expr(expr) if pattern.pList.allPatternsSimple && pattern.pList.patterns.length == 1 =>
return new ScalaHighlightExprResultHandler(expr, editor, file, element)
case _ =>
}
}
case ScalaTokenTypes.kVAR => {
PsiTreeUtil.getParentOfType(element, classOf[ScVariableDefinition]) match {
case pattern @ ScPatternDefinition.expr(expr) if pattern.pList.allPatternsSimple && pattern.pList.patterns.length == 1 =>
return new ScalaHighlightExprResultHandler(expr, editor, file, element)
case _ =>
}
}
case ScalaTokenTypes.kCASE => {
val caseClauseNullable = PsiTreeUtil.getParentOfType(element, classOf[ScCaseClause])
for {
cc <- Option(caseClauseNullable)
expr <- cc.expr
} {
return new ScalaHighlightExprResultHandler(expr, editor, file, element)
}
}
case ScalaTokenTypes.kMATCH => {
val matchStmtNullable = PsiTreeUtil.getParentOfType(element, classOf[ScMatchStmt])
Option(matchStmtNullable) match {
case Some(matchStmt) =>
return new ScalaHighlightExprResultHandler(matchStmt, editor, file, element)
case _ =>
}
}
case ScalaTokenTypes.kTRY => {
val tryStmtNullable = PsiTreeUtil.getParentOfType(element, classOf[ScTryStmt])
Option(tryStmtNullable) match {
case Some(tryStmt) =>
return new ScalaHighlightExprResultHandler(tryStmt, editor, file, element)
case _ =>
}
}
case ScalaTokenTypes.kFOR => {
val forStmtNullable = PsiTreeUtil.getParentOfType(element, classOf[ScForStatement])
for {
forStmt <- Option(forStmtNullable)
if forStmt.isYield
body <- forStmt.body
} {
return new ScalaHighlightExprResultHandler(body, editor, file, element)
}
}
case ScalaTokenTypes.kIF => {
val ifStmtNullable = PsiTreeUtil.getParentOfType(element, classOf[ScIfStmt])
for {
ifStmt <- Option(ifStmtNullable)
} {
return new ScalaHighlightExprResultHandler(ifStmt, editor, file, element)
}
}
case ScalaTokenTypes.tFUNTYPE => {
val funcExprNullable = PsiTreeUtil.getParentOfType(element, classOf[ScFunctionExpr])
for {
funcExpr <- Option(funcExprNullable)
resultExpr <- funcExpr.result
} {
return new ScalaHighlightExprResultHandler(resultExpr, editor, file, element)
}
}
case ScalaTokenTypes.kCLASS | ScalaTokenTypes.kTRAIT | ScalaTokenTypes.kOBJECT => {
val templateDefOpt = PsiTreeUtil.getParentOfType(element, classOf[ScTemplateDefinition])
for {
templateDef <- Option(templateDefOpt)
} {
return new ScalaHighlightPrimaryConstructorExpressionsHandler(templateDef, editor, file, element)
}
}
case _ =>
}
null
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/highlighter/usages/ScalaHighlightUsagesHandlerFactory.scala | Scala | apache-2.0 | 5,070 |
package k2b6s9j.singingKIA.Songs
object SexyAndIKnowIt {
}
| k2b6s9j/singingKIA | src/main/scala/k2b6s9j/singingKIA/Songs/SexyAndIKnowIt.scala | Scala | mit | 61 |
package app.circumstances
import app.FunctionalTestCommon
import utils.LightFakeApplication
import utils.pageobjects.{Page, XmlPage, TestData, PageObjects}
import utils.pageobjects.circumstances.start_of_process.GReportChangesPage
import utils.pageobjects.xml_validation.{XMLCircumstancesBusinessValidation, XMLBusinessValidation}
import utils.WithJsBrowser
class FunctionalTestCase36Spec extends FunctionalTestCommon {
isolated
section("functional")
"The application Circumstances" should {
"Successfully run Circumstances Test Case 36 for not started employment" in new WithJsBrowser with PageObjects {
val page = GReportChangesPage(context)
val circs = TestData.readTestDataFromFile("/functional_scenarios/circumstances/TestCase36.csv")
page goToThePage()
val lastPage = page runClaimWith(circs)
lastPage match {
case p: XmlPage => {
val validator: XMLBusinessValidation = new XMLCircumstancesBusinessValidation
validateAndPrintErrors(p, circs, validator) should beTrue
}
case p: Page => println(p.source)
}
}
}
section("functional")
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/app/circumstances/FunctionalTestCase36Spec.scala | Scala | mit | 1,143 |
package com.twitter.finagle
import com.twitter.util.{Await, Future}
import org.junit.runner.RunWith
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.FunSuite
import org.scalatest.junit.{JUnitRunner, AssertionsForJUnit}
import org.scalatest.prop.GeneratorDrivenPropertyChecks
@RunWith(classOf[JUnitRunner])
class FailureTest extends FunSuite with AssertionsForJUnit with GeneratorDrivenPropertyChecks {
val exc = Gen.oneOf[Throwable](
null,
new Exception("first"),
new Exception("second"))
val flag = Gen.oneOf(
0L,
Failure.Restartable,
Failure.Interrupted,
Failure.Wrapped,
Failure.Naming)
val flag2 = for (f1 <- flag; f2 <- flag if f1 != f2) yield f1|f2
test("simple failures with a cause") {
val why = "boom!"
val exc = new Exception(why)
val failure = Failure(exc)
val Failure(Some(`exc`)) = failure
assert(failure.why == why)
}
test("equality") {
forAll(flag2) { f =>
val e1, e2 = new Exception
Failure(e1, f) == Failure(e1, f) &&
Failure(e1, f) != Failure(e2, f) &&
Failure(e1, f) != Failure(e1, ~f) &&
Failure(e1, f).hashCode == Failure(e1, f).hashCode
}
}
test("flags") {
val e = new Exception
for (flags <- Seq(flag, flag2)) {
forAll(flags.suchThat(_!=0)) { f =>
Failure(e, f).isFlagged(f) &&
Failure(e).flagged(f) == Failure(e, f) &&
Failure(e, f) != Failure(e, f).unflagged(f) &&
Failure(e, f).isFlagged(f) &&
!Failure(e, 0).isFlagged(f)
}
}
}
test("Failure.adapt(Failure)") {
val parent = Failure("sadface", Failure.Restartable)
val f = Failure.adapt(parent, Failure.Interrupted)
assert(f.flags == (Failure.Restartable|Failure.Interrupted))
assert(f.getCause == parent)
assert(f.getMessage == "sadface")
assert(f != parent)
}
test("Failure.adapt(Throwable)") {
val parent = new Exception("sadface")
val f = Failure.adapt(parent, Failure.Interrupted)
assert(f.flags == Failure.Interrupted)
assert(f.isFlagged(Failure.Interrupted))
assert(f.getCause == parent)
assert(f.getMessage == "sadface")
assert(f != parent)
}
test("Failure.show") {
assert(Failure("ok", Failure.Restartable|Failure.Interrupted).show == Failure("ok", Failure.Interrupted))
val inner = new Exception
assert(Failure.wrap(inner).show == inner)
assert(Failure.wrap(Failure.wrap(inner)).show == inner)
}
test("Invalid Failure") {
intercept[IllegalArgumentException] {
Failure(null: Throwable, Failure.Wrapped)
}
}
test("Failure.ProcessFailures") {
val echo = Service.mk((exc: Throwable) => Future.exception(exc))
val service = (new Failure.ProcessFailures) andThen echo
def assertFail(exc: Throwable, expect: Throwable) = {
val exc1 = intercept[Throwable] { Await.result(service(exc)) }
assert(exc1 == expect)
}
assertFail(Failure("ok", Failure.Restartable), Failure("ok"))
assertFail(Failure("ok"), Failure("ok"))
assertFail(Failure("ok", Failure.Interrupted), Failure("ok", Failure.Interrupted))
assertFail(Failure("ok", Failure.Interrupted|Failure.Restartable), Failure("ok", Failure.Interrupted))
val inner = new Exception
assertFail(Failure.wrap(inner), inner)
}
}
| cogitate/twitter-finagle-uuid | finagle-core/src/test/scala/com/twitter/finagle/FailureTest.scala | Scala | apache-2.0 | 3,340 |
/* Copyright 2009-2018 EPFL, Lausanne */
package inox
package ast
import org.scalatest._
class ExprOpsSuite extends FunSuite {
import inox.trees._
import inox.trees.exprOps._
implicit val ctx0 = TestContext.empty
private def foldConcatNames(e: Expr, subNames: Seq[String]): String = e match {
case Variable(id, _, _) => subNames.mkString + id.name
case _ => subNames.mkString
}
private def foldCountVariables(e: Expr, subCounts: Seq[Int]): Int = e match {
case Variable(_, _, _) => subCounts.sum + 1
case _ => subCounts.sum
}
val a = Variable.fresh("a", Int32Type())
val b = Variable.fresh("b", Int32Type())
val x = Variable.fresh("x", IntegerType())
val y = Variable.fresh("y", IntegerType())
val p = Variable.fresh("p", BooleanType())
val q = Variable.fresh("q", BooleanType())
val r = Variable.fresh("r", BooleanType())
test("foldRight works on single variable expression") {
assert(fold(foldConcatNames)(x) === x.id.name)
assert(fold(foldConcatNames)(y) === y.id.name)
assert(fold(foldCountVariables)(x) === 1)
assert(fold(foldCountVariables)(y) === 1)
}
test("foldRight works on simple expressions without nested structure") {
assert(fold(foldConcatNames)(And(p, q)) === (p.id.name + q.id.name))
assert(fold(foldConcatNames)(And(q, p)) === (q.id.name + p.id.name))
assert(fold(foldConcatNames)(And(Seq(p, p, p, q, r))) ===
(p.id.name + p.id.name + p.id.name + q.id.name + r.id.name))
assert(fold(foldConcatNames)(Or(Seq(p, p, p, q, r))) ===
(p.id.name + p.id.name + p.id.name + q.id.name + r.id.name))
assert(fold(foldConcatNames)(Plus(x, y)) === (x.id.name + y.id.name))
assert(fold(foldCountVariables)(And(p, q)) === 2)
assert(fold(foldCountVariables)(And(q, p)) === 2)
assert(fold(foldCountVariables)(And(p, p)) === 2)
assert(fold(foldCountVariables)(And(Seq(p, p, p, q, r))) === 5)
assert(fold(foldCountVariables)(Or(Seq(p, p, p, q, r))) === 5)
}
test("foldRight works on simple structure of nested expressions") {
assert(fold(foldConcatNames)(And(And(p, q), r)) === (p.id.name + q.id.name + r.id.name))
assert(fold(foldConcatNames)(And(p, Or(q, r))) === (p.id.name + q.id.name + r.id.name))
}
private class LocalCounter {
private var c = 0
def inc() = c += 1
def get = c
}
test("preTraversal works on a single node") {
val c = new LocalCounter
preTraversal(e => c.inc())(x)
assert(c.get === 1)
preTraversal(e => c.inc())(y)
assert(c.get === 2)
var names: List[String] = List()
preTraversal({
case Variable(id, _, _) => names ::= id.name
case _ => ()
})(x)
assert(names === List(x.id.name))
}
test("preTraversal correctly applies on every nodes on a simple expression") {
val c1 = new LocalCounter
preTraversal(e => c1.inc())(And(Seq(p, q, r)))
assert(c1.get === 4)
val c2 = new LocalCounter
preTraversal(e => c2.inc())(Or(p, q))
assert(c2.get === 3)
preTraversal(e => c2.inc())(Plus(x, y))
assert(c2.get === 6)
}
test("preTraversal visits children from left to right") {
var names: List[String] = List()
preTraversal({
case Variable(id, _, _) => names ::= id.name
case _ => ()
})(And(List(p, q, r)))
assert(names === List(r.id.name, q.id.name, p.id.name))
}
test("preTraversal works on nexted expressions") {
val c = new LocalCounter
preTraversal(e => c.inc())(And(p, And(q, r)))
assert(c.get === 5)
}
test("preTraversal traverses in pre-order") {
var nodes: List[Expr] = List()
val node = And(List(p, q, r))
preTraversal(e => nodes ::= e)(node)
assert(nodes === List(r, q, p, node))
}
test("postTraversal works on a single node") {
val c = new LocalCounter
postTraversal(e => c.inc())(x)
assert(c.get === 1)
postTraversal(e => c.inc())(y)
assert(c.get === 2)
var names: List[String] = List()
postTraversal({
case Variable(id, _, _) => names ::= id.name
case _ => ()
})(x)
assert(names === List(x.id.name))
}
test("postTraversal correctly applies on every nodes on a simple expression") {
val c1 = new LocalCounter
postTraversal(e => c1.inc())(And(Seq(p, q, r)))
assert(c1.get === 4)
val c2 = new LocalCounter
postTraversal(e => c2.inc())(Or(p, q))
assert(c2.get === 3)
postTraversal(e => c2.inc())(Plus(x, y))
assert(c2.get === 6)
}
test("postTraversal visits children from left to right") {
var names: List[String] = List()
postTraversal({
case Variable(id, _, _) => names ::= id.name
case _ => ()
})(And(List(p, q, r)))
assert(names === List(r.id.name, q.id.name, p.id.name))
}
test("postTraversal works on nexted expressions") {
val c = new LocalCounter
postTraversal(e => c.inc())(And(p, And(q, r)))
assert(c.get === 5)
}
test("postTraversal traverses in pre-order") {
var nodes: List[Expr] = List()
val node = And(List(p, q, r))
postTraversal(e => nodes ::= e)(node)
assert(nodes === List(node, r, q, p))
}
/**
* If the formula consist of some top level AND, find a top level
* Equals and extract it, return the remaining formula as well
*/
def extractEquals(expr: Expr): (Option[Equals], Expr) = expr match {
case And(es) =>
// OK now I'm just messing with you.
val (r, nes) = es.foldLeft[(Option[Equals],Seq[Expr])]((None, Seq())) {
case ((None, nes), eq @ Equals(_,_)) => (Some(eq), nes)
case ((o, nes), e) => (o, e +: nes)
}
(r, And(nes.reverse))
case e => (None, e)
}
//def checkEq(ctx: InoxContext)(e1: Expr, e2: Expr): Unit = {
// val e = evaluators.RecursiveEvaluator.default(InoxProgram(ctx, new Symbols(Map.empty, Map.empty)))
// val r1 = e.eval(e1)
// val r2 = e.eval(e2)
// assert(r1 === r2, s"'$e1' != '$e2' ('$r1' != '$r2')")
//}
//test("simplifyArithmetic") { ctx =>
// val e1 = Plus(IntegerLiteral(3), IntegerLiteral(2))
// checkEq(ctx)(e1, simplifyArithmetic(e1))
// val e2 = Plus(x, Plus(IntegerLiteral(3), IntegerLiteral(2)))
// checkEq(ctx)(e2, simplifyArithmetic(e2))
// val e3 = Minus(IntegerLiteral(3), IntegerLiteral(2))
// checkEq(ctx)(e3, simplifyArithmetic(e3))
// val e4 = Plus(x, Minus(IntegerLiteral(3), IntegerLiteral(2)))
// checkEq(ctx)(e4, simplifyArithmetic(e4))
// val e5 = Plus(x, Minus(x, IntegerLiteral(2)))
// checkEq(ctx)(e5, simplifyArithmetic(e5))
// val e6 = Times(IntegerLiteral(9), Plus(Division(x, IntegerLiteral(3)), Division(x, IntegerLiteral(6))))
// checkEq(ctx)(e6, simplifyArithmetic(e6))
//}
test("extractEquals") {
val eq = Equals(a, b)
val lt1 = LessThan(a, b)
val lt2 = LessThan(b, a)
val lt3 = LessThan(x, y)
val f1 = And(Seq(eq, lt1, lt2, lt3))
val (eq1, r1) = extractEquals(f1)
assert(eq1 != None)
assert(eq1.get === eq)
assert(extractEquals(r1)._1 === None)
val f2 = And(Seq(lt1, lt2, eq, lt3))
val (eq2, r2) = extractEquals(f2)
assert(eq2 != None)
assert(eq2.get === eq)
assert(extractEquals(r2)._1 === None)
val f3 = And(Seq(lt1, eq, lt2, lt3, eq))
val (eq3, r3) = extractEquals(f3)
assert(eq3 != None)
assert(eq3.get === eq)
val (eq4, r4) = extractEquals(r3)
assert(eq4 != None)
assert(eq4.get === eq)
assert(extractEquals(r4)._1 === None)
}
test("pre and post traversal") {
val expr = Plus(IntegerLiteral(1), Minus(IntegerLiteral(2), IntegerLiteral(3)))
var res = ""
def f(e: Expr): Unit = e match {
case IntegerLiteral(i) => res += i
case _ : Plus => res += "P"
case _ : Minus => res += "M"
}
preTraversal(f)(expr)
assert(res === "P1M23")
res = ""
postTraversal(f)(expr)
assert(res === "123MP")
}
test("pre- and postMap") {
val expr = Plus(IntegerLiteral(1), Minus(IntegerLiteral(2), IntegerLiteral(3)))
def op(e : Expr ) = e match {
case Minus(IntegerLiteral(two), e2) if two == BigInt(2) => Some(IntegerLiteral(2))
case IntegerLiteral(one) if one == BigInt(1) => Some(IntegerLiteral(2))
case IntegerLiteral(two) if two == BigInt(2) => Some(IntegerLiteral(42))
case _ => None
}
assert( preMap(op, false)(expr) == Plus(IntegerLiteral(2), IntegerLiteral(2)) )
assert( preMap(op, true )(expr) == Plus(IntegerLiteral(42), IntegerLiteral(42)) )
assert( postMap(op, false)(expr) == Plus(IntegerLiteral(2), Minus(IntegerLiteral(42), IntegerLiteral(3))) )
assert( postMap(op, true)(expr) == Plus(IntegerLiteral(42), Minus(IntegerLiteral(42), IntegerLiteral(3))) )
}
test("simplestValue") {
val syms = new Symbols(Map.empty, Map.empty)
object program extends Program {
val trees: inox.trees.type = inox.trees
val symbols: syms.type = syms
val ctx = ctx0
}
import program._
import program.trees._
import program.symbols._
val types = Seq(BooleanType(),
Int32Type(),
IntegerType(),
SetType(BooleanType()),
TupleType(Seq(BooleanType(), BooleanType())),
MapType(Int32Type(), BooleanType()))
for (t <- types) {
val v = simplestValue(t)
assert(isSubtypeOf(v.getType, t), "SimplestValue of "+t+": "+v+":"+v.getType)
}
}
test("preMapWithContext") {
val expr = Plus(IntegerLiteral(1), Minus(IntegerLiteral(2), IntegerLiteral(3)))
def op(e : Expr, set: Set[Int]): (Option[Expr], Set[Int]) = e match {
case Minus(IntegerLiteral(two), e2) if two == BigInt(2) => (Some(IntegerLiteral(2)), set)
case IntegerLiteral(one) if one == BigInt(1) => (Some(IntegerLiteral(2)), set)
case IntegerLiteral(two) if two == BigInt(2) => (Some(IntegerLiteral(42)), set)
case _ => (None, set)
}
assert(preMapWithContext(op, false)(expr, Set()) === Plus(IntegerLiteral(2), IntegerLiteral(2)))
assert(preMapWithContext(op, true)(expr, Set()) === Plus(IntegerLiteral(42), IntegerLiteral(42)))
val expr2 = Let(x.toVal, IntegerLiteral(1), Let(y.toVal, IntegerLiteral(2), Plus(x, y)))
def op2(e: Expr, bindings: Map[Variable, BigInt]): (Option[Expr], Map[Variable, BigInt]) = e match {
case Let(vd, IntegerLiteral(v), body) => (None, bindings + (vd.toVariable -> v))
case v: Variable => (bindings.get(v).map(value => IntegerLiteral(value)), bindings)
case _ => (None, bindings)
}
assert(preMapWithContext(op2, false)(expr2, Map()) === Let(
x.toVal, IntegerLiteral(1), Let(y.toVal, IntegerLiteral(2), Plus(IntegerLiteral(1), IntegerLiteral(2)))))
def op3(e: Expr, bindings: Map[Variable, BigInt]): (Option[Expr], Map[Variable, BigInt]) = e match {
case Let(vd, IntegerLiteral(v), body) => (Some(body), bindings + (vd.toVariable -> v))
case v: Variable => (bindings.get(v).map(v => IntegerLiteral(v)), bindings)
case _ => (None, bindings)
}
assert(preMapWithContext(op3, true)(expr2, Map()) === Plus(IntegerLiteral(1), IntegerLiteral(2)))
val expr4 = Plus(Let(y.toVal, IntegerLiteral(2), y),
Let(y.toVal, IntegerLiteral(4), y))
def op4(e: Expr, bindings: Map[Variable, BigInt]): (Option[Expr], Map[Variable, BigInt]) = e match {
case Let(vd, IntegerLiteral(v), body) =>
(Some(body), if (bindings.contains(vd.toVariable)) bindings else (bindings + (vd.toVariable -> v)))
case v: Variable => (bindings.get(v).map(v => IntegerLiteral(v)), bindings)
case _ => (None, bindings)
}
assert(preMapWithContext(op4, true)(expr4, Map()) === Plus(IntegerLiteral(2), IntegerLiteral(4)))
}
}
| romac/inox | src/test/scala/inox/ast/ExprOpsSuite.scala | Scala | apache-2.0 | 11,737 |
/*
* ____ ____ _____ ____ ___ ____
* | _ \ | _ \ | ____| / ___| / _/ / ___| Precog (R)
* | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data
* | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc.
* |_| |_| \_\ |_____| \____| /__/ \____| All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.precog.bifrost
package service
import com.precog.common._
import com.precog.yggdrasil._
import com.precog.yggdrasil.table._
import com.precog.yggdrasil.metadata._
import com.precog.yggdrasil.vfs._
import blueeyes.json._
import scalaz._
import scalaz.syntax.monad._
import scalaz.syntax.comonad._
import org.specs2.mutable._
abstract class BrowseServiceSpecs[M[+_]](implicit val M: Monad[M] with Comonad[M]) extends Specification {
def colSizeMetadata(descriptor: ColumnRef, size: Long): ColumnMetadata = Map(
descriptor -> Map(StringValueStats -> StringValueStats(size, "a", "z"))
)
lazy val projectionMetadata: Map[Path, Map[ColumnRef, Long]] = Map(
Path("/foo/bar1/baz/quux1") -> Map(ColumnRef(CPath(), CString) -> 10L),
Path("/foo/bar2/baz/quux1") -> Map(ColumnRef(CPath(), CString) -> 20L),
Path("/foo/bar2/baz/quux2") -> Map(ColumnRef(CPath(), CString) -> 30L),
Path("/foo2/bar1/baz/quux1") -> Map(ColumnRef(CPath(), CString) -> 40L),
Path("/foo/bar/") -> Map(ColumnRef(CPath(".bar"), CLong) -> 50, ColumnRef(CPath(".baz"), CLong) -> 60L)
)
val metadata = new StubVFSMetadata[M](projectionMetadata)
val client = new BrowseSupport(metadata)
"browse" should {
"find child paths" in {
client.browse("", Path("/foo/")).valueOr(e => sys.error(e.toString)).copoint must beLike {
case JArray(results) => results.map(_ \ "name") must haveTheSameElementsAs(JString("bar/") :: JString("bar1/") :: JString("bar2/") :: Nil)
}
}
}
"size" should {
"find correct size for single-column path" in {
client.size("", Path("/foo/bar1/baz/quux1")).valueOr(e => sys.error(e.toString)).copoint must beLike {
case JNum(result) => result mustEqual 10
}
}
"find correct size for multi-column path" in {
client.size("", Path("/foo/bar")).valueOr(e => sys.error(e.toString)).copoint must beLike {
case JNum(result) => result mustEqual 60
}
}
"find default (0) size for non-existent path" in {
client.size("", Path("/not/really")).valueOr(e => sys.error(e.toString)).copoint must beLike {
case JNum(result) => result mustEqual 0
}
}
}
"structure" should {
"find correct node information" in {
client.structure("", Path("/foo/bar"), CPath.Identity).valueOr(e => sys.error(e.toString)).copoint must beLike {
case result => result must_== JObject("children" -> JArray(JString(".bar") :: JString(".baz") :: Nil), "types" -> JObject())
}
}
"find correct leaf types" in {
client.structure("", Path("/foo/bar"), CPath("bar")).valueOr(e => sys.error(e.toString)).copoint must beLike {
case result => result must_== JObject("children" -> JArray(), "types" -> JObject("Number" -> JNum(50)))
}
}
"find default empty result for non-existent path" in {
client.structure("", Path("/bar/foo"), CPath.Identity).valueOr(e => sys.error(e.toString)).copoint must beLike {
case result => result must_== JUndefined
}
}
}
}
object BrowseServiceSpecs extends BrowseServiceSpecs[Need]
// vim: set ts=4 sw=4 et:
| precog/platform | bifrost/src/test/scala/com/precog/bifrost/StorageMetadataClientSpecs.scala | Scala | agpl-3.0 | 4,214 |
package lila.game
case class Crosstable(
user1: Crosstable.User,
user2: Crosstable.User,
results: List[Crosstable.Result],
nbGames: Int) {
import Crosstable.Result
def nonEmpty = results.nonEmpty option this
def users = List(user2, user1)
def winnerId =
if (user1.score > user2.score) Some(user1.id)
else if (user1.score < user2.score) Some(user2.id)
else None
def user(id: String) = users find (_.id == id)
def showScore(userId: String) = {
val byTen = user(userId) ?? (_.score)
s"${byTen / 10}${(byTen % 10 != 0).??("½")}"
}
def addWins(userId: Option[String], wins: Int) = copy(
user1 = user1.copy(
score = user1.score + (userId match {
case None => wins * 5
case Some(u) if user1.id == u => wins * 10
case _ => 0
})),
user2 = user2.copy(
score = user2.score + (userId match {
case None => wins * 5
case Some(u) if user2.id == u => wins * 10
case _ => 0
})))
def fromPov(userId: String) =
if (userId == user2.id) copy(user1 = user2, user2 = user1)
else this
lazy val size = results.size
def fill = (1 to 20 - size)
}
object Crosstable {
case class User(id: String, score: Int) // score is x10
case class Result(gameId: String, winnerId: Option[String])
private[game] def makeKey(u1: String, u2: String): String = List(u1, u2).sorted mkString "/"
import reactivemongo.bson._
import lila.db.BSON
object BSONFields {
val id = "_id"
val score1 = "s1"
val score2 = "s2"
val results = "r"
val nbGames = "n"
}
implicit val crosstableBSONHandler = new BSON[Crosstable] {
import BSONFields._
def reads(r: BSON.Reader): Crosstable = r str id split '/' match {
case Array(u1Id, u2Id) => Crosstable(
user1 = User(u1Id, r intD "s1"),
user2 = User(u2Id, r intD "s2"),
results = r.get[List[String]](results).map { r =>
r drop 8 match {
case "" => Result(r take 8, none)
case "+" => Result(r take 8, Some(u1Id))
case "-" => Result(r take 8, Some(u2Id))
case _ => sys error s"Invalid result string $r"
}
},
nbGames = r int nbGames)
case x => sys error s"Invalid crosstable id $x"
}
def writeResult(result: Result, u1: String): String =
result.gameId + (result.winnerId ?? { w => if (w == u1) "+" else "-" })
def writes(w: BSON.Writer, o: Crosstable) = BSONDocument(
id -> makeKey(o.user1.id, o.user2.id),
score1 -> o.user1.score,
score2 -> o.user2.score,
results -> o.results.map { writeResult(_, o.user1.id) },
nbGames -> w.int(o.nbGames))
}
}
| danilovsergey/i-bur | modules/game/src/main/Crosstable.scala | Scala | mit | 2,802 |
package com.rklaehn.abc
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import cats.kernel.instances.all._
import ScalaCollectionConverters._
import scala.collection.immutable.{HashSet, SortedSet}
sealed trait SetSetBenchOps {
def union: Any
def intersect: Any
def diff: Any
def subsetOf: Boolean
def filter(f: Int => Boolean): Any
}
object SetSetBenchOps extends BenchUtil {
def apply(a: Seq[Int], b: Seq[Int], kind: String) = {
val a1 = a.map(mix)
val b1 = b.map(mix)
kind match {
case "hashset" => ScalaCollectionBench(HashSet(a1: _*), HashSet(b1: _*))
case "sortedset" => ScalaCollectionBench(SortedSet(a1: _*), SortedSet(b1: _*))
case "arrayset" => TypeClassBench(ArraySet(a1: _*), ArraySet(b1: _*))
case "arrayset2" => ScalaCollectionBench(ArraySet(a1: _*).asCollection, ArraySet(b1: _*).asCollection)
}
}
private final case class ScalaCollectionBench(a: Set[Int], b: Set[Int]) extends SetSetBenchOps {
override def union: Any = a union b
override def diff: Any = a diff b
override def subsetOf: Boolean = a subsetOf b
override def intersect: Any = a intersect b
override def filter(f: (Int) => Boolean): Any = a filter f
}
private final case class TypeClassBench(a: ArraySet[Int], b: ArraySet[Int]) extends SetSetBenchOps {
override def union: Any = a union b
override def diff: Any = a diff b
override def subsetOf: Boolean = a subsetOf b
override def intersect: Any = a intersect b
override def filter(f: (Int) => Boolean): Any = a filter f
}
}
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SetSetBench {
@Param(Array("1", "10", "100", "1000", "10000", "100000"))
var size = 0
@Param(Array("0.0", "0.5", "1.0"))
// @Param(Array("0.5"))
var offset = 0.0
@Param(Array("arrayset", "hashset", "sortedset")) //, "arrayset2"))
var kind = ""
var k: Int = 0
var bench: SetSetBenchOps = _
val shift = 1000000 // so we don't get the cached java.lang.Integer instances
@Setup
def setup(): Unit = {
k = (offset * size).toInt
bench = SetSetBenchOps(shift until (shift + size), (shift + k) until (shift + k + size), kind)
}
@Benchmark
def union(x: Blackhole): Unit = {
x.consume(bench.union)
}
@Benchmark
def intersect(x: Blackhole): Unit = {
x.consume(bench.intersect)
}
@Benchmark
def diff(x: Blackhole): Unit = {
x.consume(bench.diff)
}
@Benchmark
def subsetOf(x: Blackhole): Unit = {
x.consume(bench.subsetOf)
}
@Benchmark
def filter(x: Blackhole): Unit = {
x.consume(bench.filter(_ < k + shift))
}
}
| rklaehn/abc | jmhBenchmarks/src/main/scala/com/rklaehn/abc/SetSetBench.scala | Scala | apache-2.0 | 2,731 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.std
import slamdata.Predef._, BigDecimal.RoundingMode
import quasar.{Data, DataCodec, Qspec, Type}
import quasar.DateArbitrary._
import quasar.frontend.logicalplan._
import java.time._, ZoneOffset.UTC
import scala.collection.Traversable
import scala.math.abs
import scala.util.matching.Regex
import matryoshka.data.Fix
import matryoshka.implicits._
import org.specs2.execute.{Failure, Result}
import org.specs2.matcher.{Expectable, Matcher}
import org.specs2.specification.core.Fragment
import org.scalacheck.{Arbitrary, Gen}
import scalaz._, Scalaz._
/** Abstract spec for the standard library, intended to be implemented for each
* library implementation, of which there are one or more per backend.
*/
abstract class StdLibSpec extends Qspec {
def isPrintableAscii(c: Char): Boolean = c >= '\\u0020' && c <= '\\u007e'
def isPrintableAscii(s: String): Boolean = s.forall(isPrintableAscii)
def beCloseTo(expected: Data): Matcher[Data] = new Matcher[Data] {
def isClose(x: BigDecimal, y: BigDecimal, err: Double): Boolean =
x == y || ((x - y).abs/(y.abs max err)).toDouble < err
def apply[S <: Data](s: Expectable[S]) = {
val v = s.value
(v, expected) match {
case (Data.Number(x), Data.Number(exp)) =>
result(isClose(x, exp, 1e-9),
s"$x is a Number and matches $exp",
s"$x is a Number but does not match $exp", s)
case _ =>
result(Equal[Data].equal(v, expected),
s"$v matches $expected",
s"$v does not match $expected", s)
}
}
}
def tests(runner: StdLibTestRunner): Fragment = {
import runner._
implicit val arbBigInt = Arbitrary[BigInt] { runner.intDomain }
implicit val arbBigDecimal = Arbitrary[BigDecimal] { runner.decDomain }
implicit val arbString = Arbitrary[String] { runner.stringDomain }
implicit val arbDate = Arbitrary[LocalDate] { runner.dateDomain }
implicit val arbData = Arbitrary[Data] {
Gen.oneOf(
runner.intDomain.map(Data.Int(_)),
runner.decDomain.map(Data.Dec(_)),
runner.stringDomain.map(Data.Str(_)))
}
def commute(
prg: (Fix[LogicalPlan], Fix[LogicalPlan]) => Fix[LogicalPlan],
arg1: Data, arg2: Data,
expected: Data): Result =
binary(prg, arg1, arg2, expected) and
binary(prg, arg2, arg1, expected)
"StringLib" >> {
import StringLib._
"Concat" >> {
"any strings" >> prop { (str1: String, str2: String) =>
binary(Concat(_, _).embed, Data.Str(str1), Data.Str(str2), Data.Str(str1 + str2))
}
}
// NB: `Like` is simplified to `Search`
"Search" >> {
"find contents within string when case sensitive" >> {
ternary(Search(_, _, _).embed, Data.Str("church"), Data.Str(".*ch.*"), Data.Bool(false), Data.Bool(true)) and
ternary(Search(_, _, _).embed, Data.Str("China"), Data.Str("^Ch.*$"), Data.Bool(false), Data.Bool(true)) and
ternary(Search(_, _, _).embed, Data.Str("matching"), Data.Str(".*ch.*"), Data.Bool(false), Data.Bool(true))
}
"reject a non-matching string when case sensitive" >> {
ternary(Search(_, _, _).embed, Data.Str("church"), Data.Str("^bs.*$"), Data.Bool(false), Data.Bool(false)) and
ternary(Search(_, _, _).embed, Data.Str("china"), Data.Str("^bs.*$"), Data.Bool(false), Data.Bool(false)) and
ternary(Search(_, _, _).embed, Data.Str("matching"), Data.Str(".*bs.*"), Data.Bool(false), Data.Bool(false)) and
ternary(Search(_, _, _).embed, Data.Str("matching"), Data.Str(".*CH.*"), Data.Bool(false), Data.Bool(false))
}
"find contents within string when case insensitive" >> {
ternary(Search(_, _, _).embed, Data.Str("Church"), Data.Str(".*ch.*"), Data.Bool(true), Data.Bool(true)) and
ternary(Search(_, _, _).embed, Data.Str("cHina"), Data.Str("^ch.*$"), Data.Bool(true), Data.Bool(true)) and
ternary(Search(_, _, _).embed, Data.Str("matCHing"), Data.Str(".*ch.*"), Data.Bool(true), Data.Bool(true))
}
"reject a non-matching string when case insensitive" >> {
ternary(Search(_, _, _).embed, Data.Str("Church"), Data.Str("^bs.*$"), Data.Bool(true), Data.Bool(false)) and
ternary(Search(_, _, _).embed, Data.Str("cHina"), Data.Str("^bs.*$"), Data.Bool(true), Data.Bool(false)) and
ternary(Search(_, _, _).embed, Data.Str("matCHing"), Data.Str(".*bs.*"), Data.Bool(true), Data.Bool(false))
}
}
"Length" >> {
"multibyte chars" >> {
unary(Length(_).embed, Data.Str("€1"), Data.Int(2))
}
"any string" >> prop { (str: String) =>
unary(Length(_).embed, Data.Str(str), Data.Int(str.length))
}
}
"Lower" >> {
"any string" >> prop { (str: String) =>
unary(Lower(_).embed, Data.Str(str), Data.Str(str.toLowerCase))
}
}
"Upper" >> {
"any string" >> prop { (str: String) =>
unary(Upper(_).embed, Data.Str(str), Data.Str(str.toUpperCase))
}
}
"Substring" >> {
"simple" >> {
// NB: not consistent with PostgreSQL, which is 1-based for `start`
ternary(Substring(_, _, _).embed, Data.Str("Thomas"), Data.Int(1), Data.Int(3), Data.Str("hom"))
}
"multibyte chars" >> {
ternary(Substring(_, _, _).embed, Data.Str("cafétéria"), Data.Int(3), Data.Int(1), Data.Str("é"))
}
"empty string and any offsets" >> prop { (start0: Int, length0: Int) =>
// restrict the range to something that will actually exercise the behavior
val start = start0 % 1000
val length = length0 % 1000
ternary(Substring(_, _, _).embed, Data.Str(""), Data.Int(start), Data.Int(length), Data.Str(""))
}
"any string with entire range" >> prop { (str: String) =>
ternary(Substring(_, _, _).embed, Data.Str(str), Data.Int(0), Data.Int(str.length), Data.Str(str))
}
"any string with 0 length" >> prop { (str: String, start0: Int) =>
// restrict the range to something that will actually exercise the behavior
val start = start0 % 1000
ternary(Substring(_, _, _).embed, Data.Str(str), Data.Int(start), Data.Int(0), Data.Str(""))
}
"any string and offsets" >> prop { (str: String, start0: Int, length0: Int) =>
// restrict the range to something that will actually exercise the behavior
val start = start0 % 1000
val length = length0 % 1000
// NB: this is the MongoDB behavior, for lack of a better idea
val expected = StringLib.safeSubstring(str, start, length)
ternary(Substring(_, _, _).embed, Data.Str(str), Data.Int(start), Data.Int(length), Data.Str(expected))
}
}
"Split" >> {
"some string" >> {
binary(Split(_, _).embed, Data.Str("some string"), Data.Str(" "), Data.Arr(List("some", "string").map(Data.Str(_))))
}
"some string by itself" >> {
binary(Split(_, _).embed, Data.Str("some string"), Data.Str("some string"), Data.Arr(List("", "").map(Data.Str(_))))
}
"any string not containing delimiter" >> prop { (s: String, d: String) =>
(!d.isEmpty && !s.contains(d)) ==>
binary(Split(_, _).embed, Data.Str(s), Data.Str(d), Data.Arr(List(Data.Str(s))))
}
"any string with non-empty delimiter" >> prop { (s: String, d: String) =>
!d.isEmpty ==>
binary(Split(_, _).embed, Data.Str(s), Data.Str(d), Data.Arr(s.split(Regex.quote(d), -1).toList.map(Data.Str(_))))
}
}
"Boolean" >> {
"true" >> {
unary(Boolean(_).embed, Data.Str("true"), Data.Bool(true))
}
"false" >> {
unary(Boolean(_).embed, Data.Str("false"), Data.Bool(false))
}
// TODO: how to express "should execute and may produce any result"
}
"Integer" >> {
"any BigInt in the domain" >> prop { (x: BigInt) =>
unary(Integer(_).embed, Data.Str(x.toString), Data.Int(x))
}
// TODO: how to express "should execute and may produce any result"
}
"Decimal" >> {
"any BigDecimal in the domain" >> prop { (x: BigDecimal) =>
unary(Decimal(_).embed, Data.Str(x.toString), Data.Dec(x))
}
// TODO: how to express "should execute and may produce any result"
}
"Null" >> {
"null" >> {
unary(Null(_).embed, Data.Str("null"), Data.Null)
}
// TODO: how to express "should execute and may produce any result"
}
"ToString" >> {
"string" >> prop { (str: String) =>
unary(ToString(_).embed, Data.Str(str), Data.Str(str))
}
"null" >> {
unary(ToString(_).embed, Data.Null, Data.Str("null"))
}
"true" >> {
unary(ToString(_).embed, Data.Bool(true), Data.Str("true"))
}
"false" >> {
unary(ToString(_).embed, Data.Bool(false), Data.Str("false"))
}
"int" >> prop { (x: BigInt) =>
unary(ToString(_).embed, Data.Int(x), Data.Str(x.toString))
}
// TODO: re-parse and compare the resulting value approximately. It's
// not reasonable to expect a perfect match on formatted values,
// because of trailing zeros, round-off, and choive of notation.
// "dec" >> prop { (x: BigDecimal) =>
// unary(ToString(_).embed, Data.Dec(x), Data.Str(x.toString))
// }
"timestamp" >> {
def test(x: Instant) = unary(
ToString(_).embed,
Data.Timestamp(x),
Data.Str(x.atZone(UTC).format(DataCodec.dateTimeFormatter)))
"zero fractional seconds" >> test(Instant.EPOCH)
"any" >> prop (test(_: Instant))
}
"date" >> prop { (x: LocalDate) =>
unary(ToString(_).embed, Data.Date(x), Data.Str(x.toString))
}
"time" >> {
def test(x: LocalTime) = unary(
ToString(_).embed,
Data.Time(x),
Data.Str(x.format(DataCodec.timeFormatter)))
"zero fractional seconds" >> test(LocalTime.NOON)
"any" >> prop (test(_: LocalTime))
}
// TODO: Enable
// "interval" >> prop { (x: Duration) =>
// unary(ToString(_).embed, Data.Interval(x), Data.Str(x.toString))
// }
}
}
"DateLib" >> {
import DateLib._
"ExtractCentury" >> {
"0001-01-01" >> {
unary(ExtractCentury(_).embed, Data.Date(LocalDate.parse("0001-01-01")), Data.Int(1))
}
"2000-01-01" >> {
unary(ExtractCentury(_).embed, Data.Date(LocalDate.parse("2000-01-01")), Data.Int(20))
}
"2001-01-01" >> {
unary(ExtractCentury(_).embed, Data.Date(LocalDate.parse("2001-01-01")), Data.Int(21))
}
"midnight 0001-01-01" >> {
unary(ExtractCentury(_).embed, Data.Timestamp(Instant.parse("0001-01-01T00:00:00.000Z")), Data.Int(1))
}
"midnight 2000-01-01" >> {
unary(ExtractCentury(_).embed, Data.Timestamp(Instant.parse("2000-01-01T00:00:00.000Z")), Data.Int(20))
}
"midnight 2001-01-01" >> {
unary(ExtractCentury(_).embed, Data.Timestamp(Instant.parse("2001-01-01T00:00:00.000Z")), Data.Int(21))
}
}
"ExtractDayOfMonth" >> {
"2016-01-01" >> {
unary(ExtractDayOfMonth(_).embed, Data.Date(LocalDate.parse("2016-01-01")), Data.Int(1))
}
"midnight 2016-01-01" >> {
unary(ExtractDayOfMonth(_).embed, Data.Timestamp(Instant.parse("2016-01-01T00:00:00.000Z")), Data.Int(1))
}
"2016-02-29" >> {
unary(ExtractDayOfMonth(_).embed, Data.Date(LocalDate.parse("2016-02-29")), Data.Int(29))
}
"midnight 2016-02-29" >> {
unary(ExtractDayOfMonth(_).embed, Data.Timestamp(Instant.parse("2016-02-29T00:00:00.000Z")), Data.Int(29))
}
}
"ExtractDecade" >> {
"1999-12-31" >> {
unary(ExtractDecade(_).embed, Data.Date(LocalDate.parse("1999-12-31")), Data.Int(199))
}
"midnight 1999-12-31" >> {
unary(ExtractDecade(_).embed, Data.Timestamp(Instant.parse("1999-12-31T00:00:00.000Z")), Data.Int(199))
}
}
"ExtractDayOfWeek" >> {
"2016-09-28" >> {
unary(ExtractDayOfWeek(_).embed, Data.Date(LocalDate.parse("2016-09-28")), Data.Int(3))
}
"midnight 2016-09-28" >> {
unary(ExtractDayOfWeek(_).embed, Data.Timestamp(Instant.parse("2016-09-28T00:00:00.000Z")), Data.Int(3))
}
"2016-10-02" >> {
unary(ExtractDayOfWeek(_).embed, Data.Date(LocalDate.parse("2016-10-02")), Data.Int(0))
}
"midnight 2016-10-02" >> {
unary(ExtractDayOfWeek(_).embed, Data.Timestamp(Instant.parse("2016-10-02T00:00:00.000Z")), Data.Int(0))
}
"2016-10-08" >> {
unary(ExtractDayOfWeek(_).embed, Data.Date(LocalDate.parse("2016-10-08")), Data.Int(6))
}
"noon 2016-10-08" >> {
unary(ExtractDayOfWeek(_).embed, Data.Timestamp(Instant.parse("2016-10-08T12:00:00.000Z")), Data.Int(6))
}
}
"ExtractDayOfYear" >> {
"2016-03-01" >> {
unary(ExtractDayOfYear(_).embed, Data.Date(LocalDate.parse("2016-03-01")), Data.Int(61))
}
"midnight 2016-03-01" >> {
unary(ExtractDayOfYear(_).embed, Data.Timestamp(Instant.parse("2016-03-01T00:00:00.000Z")), Data.Int(61))
}
"2017-03-01" >> {
unary(ExtractDayOfYear(_).embed, Data.Date(LocalDate.parse("2017-03-01")), Data.Int(60))
}
"midnight 2017-03-01" >> {
unary(ExtractDayOfYear(_).embed, Data.Timestamp(Instant.parse("2017-03-01T00:00:00.000Z")), Data.Int(60))
}
}
"ExtractEpoch" >> {
"2016-09-29" >> {
unary(ExtractEpoch(_).embed, Data.Date(LocalDate.parse("2016-09-29")), Data.Dec(1475107200.0))
}
"2016-09-29 12:34:56.789" >> {
unary(ExtractEpoch(_).embed, Data.Timestamp(Instant.parse("2016-09-29T12:34:56.789Z")), Data.Dec(1475152496.789))
}
}
"ExtractHour" >> {
"2016-09-29" >> {
unary(ExtractHour(_).embed, Data.Date(LocalDate.parse("2016-09-29")), Data.Int(0))
}
"midnight 2016-09-29" >> {
unary(ExtractHour(_).embed, Data.Timestamp(Instant.parse("2016-03-01T00:00:00.000Z")), Data.Int(0))
}
"2016-09-29 12:34:56.789" >> {
unary(ExtractHour(_).embed, Data.Timestamp(Instant.parse("2016-03-01T12:34:56.789Z")), Data.Int(12))
}
}
"ExtractIsoDayOfWeek" >> {
"2016-09-28" >> {
unary(ExtractIsoDayOfWeek(_).embed, Data.Date(LocalDate.parse("2016-09-28")), Data.Int(3))
}
"midnight 2016-09-28" >> {
unary(ExtractIsoDayOfWeek(_).embed, Data.Timestamp(Instant.parse("2016-09-28T00:00:00.000Z")), Data.Int(3))
}
"2016-10-02" >> {
unary(ExtractIsoDayOfWeek(_).embed, Data.Date(LocalDate.parse("2016-10-02")), Data.Int(7))
}
"midnight 2016-10-02" >> {
unary(ExtractIsoDayOfWeek(_).embed, Data.Timestamp(Instant.parse("2016-10-02T00:00:00.000Z")), Data.Int(7))
}
}
"ExtractIsoYear" >> {
"2006-01-01" >> {
unary(ExtractIsoYear(_).embed, Data.Date(LocalDate.parse("2006-01-01")), Data.Int(2005))
}
"midnight 2006-01-01" >> {
unary(ExtractIsoYear(_).embed, Data.Timestamp(Instant.parse("2006-01-01T00:00:00.000Z")), Data.Int(2005))
}
"2006-01-02" >> {
unary(ExtractIsoYear(_).embed, Data.Date(LocalDate.parse("2006-01-02")), Data.Int(2006))
}
"midnight 2006-01-02" >> {
unary(ExtractIsoYear(_).embed, Data.Timestamp(Instant.parse("2006-01-02T00:00:00.000Z")), Data.Int(2006))
}
}
"ExtractMicroseconds" >> {
"2016-09-29" >> {
unary(ExtractMicroseconds(_).embed, Data.Date(LocalDate.parse("2016-09-29")), Data.Dec(0))
}
"midnight 2016-09-29" >> {
unary(ExtractMicroseconds(_).embed, Data.Timestamp(Instant.parse("2016-03-01T00:00:00.000Z")), Data.Dec(0))
}
"2016-09-29 12:34:56.789" >> {
unary(ExtractMicroseconds(_).embed, Data.Timestamp(Instant.parse("2016-03-01T12:34:56.789Z")), Data.Dec(56.789e6))
}
}
"ExtractMillennium" >> {
"0001-01-01" >> {
unary(ExtractMillennium(_).embed, Data.Date(LocalDate.parse("0001-01-01")), Data.Int(1))
}
"2000-01-01" >> {
unary(ExtractMillennium(_).embed, Data.Date(LocalDate.parse("2000-01-01")), Data.Int(2))
}
"2001-01-01" >> {
unary(ExtractMillennium(_).embed, Data.Date(LocalDate.parse("2001-01-01")), Data.Int(3))
}
"midnight 0001-01-01" >> {
unary(ExtractMillennium(_).embed, Data.Timestamp(Instant.parse("0001-01-01T00:00:00.000Z")), Data.Int(1))
}
"midnight 2000-01-01" >> {
unary(ExtractMillennium(_).embed, Data.Timestamp(Instant.parse("2000-01-01T00:00:00.000Z")), Data.Int(2))
}
"midnight 2001-01-01" >> {
unary(ExtractMillennium(_).embed, Data.Timestamp(Instant.parse("2001-01-01T00:00:00.000Z")), Data.Int(3))
}
}
"ExtractMilliseconds" >> {
"2016-09-29" >> {
unary(ExtractMilliseconds(_).embed, Data.Date(LocalDate.parse("2016-09-29")), Data.Dec(0))
}
"midnight 2016-09-29" >> {
unary(ExtractMilliseconds(_).embed, Data.Timestamp(Instant.parse("2016-03-01T00:00:00.000Z")), Data.Dec(0))
}
"2016-09-29 12:34:56.789" >> {
unary(ExtractMilliseconds(_).embed, Data.Timestamp(Instant.parse("2016-03-01T12:34:56.789Z")), Data.Dec(56.789e3))
}
}
"ExtractMinute" >> {
"2016-09-29" >> {
unary(ExtractMinute(_).embed, Data.Date(LocalDate.parse("2016-09-29")), Data.Int(0))
}
"midnight 2016-09-29" >> {
unary(ExtractMinute(_).embed, Data.Timestamp(Instant.parse("2016-03-01T00:00:00.000Z")), Data.Int(0))
}
"2016-09-29 12:34:56.789" >> {
unary(ExtractMinute(_).embed, Data.Timestamp(Instant.parse("2016-03-01T12:34:56.789Z")), Data.Int(34))
}
}
"ExtractMonth" >> {
"2016-01-01" >> {
unary(ExtractMonth(_).embed, Data.Date(LocalDate.parse("2016-01-01")), Data.Int(1))
}
"midnight 2016-01-01" >> {
unary(ExtractMonth(_).embed, Data.Timestamp(Instant.parse("2016-01-01T00:00:00.000Z")), Data.Int(1))
}
"2016-02-29" >> {
unary(ExtractMonth(_).embed, Data.Date(LocalDate.parse("2016-02-29")), Data.Int(2))
}
"midnight 2016-02-29" >> {
unary(ExtractMonth(_).embed, Data.Timestamp(Instant.parse("2016-02-29T00:00:00.000Z")), Data.Int(2))
}
}
"ExtractQuarter" >> {
"2016-10-03" >> {
unary(ExtractQuarter(_).embed, Data.Date(LocalDate.parse("2016-10-03")), Data.Int(4))
}
"midnight 2016-10-03" >> {
unary(ExtractQuarter(_).embed, Data.Timestamp(Instant.parse("2016-10-03T00:00:00.000Z")), Data.Int(4))
}
"2016-03-31 (leap year)" >> {
unary(ExtractQuarter(_).embed, Data.Date(LocalDate.parse("2016-03-31")), Data.Int(1))
}
"midnight 2016-03-31 (leap year)" >> {
unary(ExtractQuarter(_).embed, Data.Timestamp(Instant.parse("2016-03-31T00:00:00.000Z")), Data.Int(1))
}
"2016-04-01 (leap year)" >> {
unary(ExtractQuarter(_).embed, Data.Date(LocalDate.parse("2016-04-01")), Data.Int(2))
}
"midnight 2016-04-01 (leap year)" >> {
unary(ExtractQuarter(_).embed, Data.Timestamp(Instant.parse("2016-04-01T00:00:00.000Z")), Data.Int(2))
}
"2017-03-31" >> {
unary(ExtractQuarter(_).embed, Data.Date(LocalDate.parse("2017-03-31")), Data.Int(1))
}
"midnight 2017-03-31" >> {
unary(ExtractQuarter(_).embed, Data.Timestamp(Instant.parse("2017-03-31T00:00:00.000Z")), Data.Int(1))
}
"2017-04-01" >> {
unary(ExtractQuarter(_).embed, Data.Date(LocalDate.parse("2017-04-01")), Data.Int(2))
}
"midnight 2017-04-01" >> {
unary(ExtractQuarter(_).embed, Data.Timestamp(Instant.parse("2017-04-01T00:00:00.000Z")), Data.Int(2))
}
}
"ExtractSecond" >> {
"2016-09-29" >> {
unary(ExtractSecond(_).embed, Data.Date(LocalDate.parse("2016-09-29")), Data.Dec(0))
}
"midnight 2016-09-29" >> {
unary(ExtractSecond(_).embed, Data.Timestamp(Instant.parse("2016-03-01T00:00:00.000Z")), Data.Dec(0))
}
"2016-09-29 12:34:56.789" >> {
unary(ExtractSecond(_).embed, Data.Timestamp(Instant.parse("2016-03-01T12:34:56.789Z")), Data.Dec(56.789))
}
}
// TODO: ExtractTimezone
// TODO: ExtractTimezoneHour
// TODO: ExtractTimezoneMinute
"ExtractWeek" >> {
"2016-01-01" >> {
unary(ExtractWeek(_).embed, Data.Date(LocalDate.parse("2016-01-01")), Data.Int(53))
}
"midnight 2016-01-01" >> {
unary(ExtractWeek(_).embed, Data.Timestamp(Instant.parse("2016-01-01T00:00:00.000Z")), Data.Int(53))
}
"2001-02-16" >> {
unary(ExtractWeek(_).embed, Data.Date(LocalDate.parse("2001-02-16")), Data.Int(7))
}
"midnight 2016-10-03" >> {
unary(ExtractWeek(_).embed, Data.Timestamp(Instant.parse("2001-02-16T00:00:00.000Z")), Data.Int(7))
}
"2005-01-01" >> {
unary(ExtractWeek(_).embed, Data.Date(LocalDate.parse("2005-01-01")), Data.Int(53))
}
"midnight 2005-01-01" >> {
unary(ExtractWeek(_).embed, Data.Timestamp(Instant.parse("2005-01-01T00:00:00.000Z")), Data.Int(53))
}
}
"ExtractYear" >> {
"1999-12-31" >> {
unary(ExtractYear(_).embed, Data.Date(LocalDate.parse("1999-12-31")), Data.Int(1999))
}
"midnight 1999-12-31" >> {
unary(ExtractYear(_).embed, Data.Timestamp(Instant.parse("1999-12-31T00:00:00.000Z")), Data.Int(1999))
}
}
"StartOfDay" >> {
"timestamp" >> prop { (x: Instant) =>
val t = x.atZone(UTC)
truncZonedDateTime(TemporalPart.Day, t).fold(
e => Failure(e.shows),
tt => unary(
StartOfDay(_).embed,
Data.Timestamp(x),
Data.Timestamp(tt.toInstant)))
}
"date" >> prop { (x: LocalDate) =>
unary(
StartOfDay(_).embed,
Data.Date(x),
Data.Timestamp(x.atStartOfDay(UTC).toInstant))
}
}
"Now" >> {
import MathLib.Subtract
"now" >> prop { (_: Int) =>
val now = Now[Fix[LogicalPlan]]
nullary(Subtract(now.embed, now.embed).embed, Data.Interval(Duration.ZERO))
}
}
"TemporalTrunc" >> {
def truncZonedDateTimeTimestamp(p: TemporalPart, i: Instant): Result =
truncZonedDateTime(p, i.atZone(UTC)).fold(
e => Failure(e.shows),
tt => unary(
TemporalTrunc(p, _).embed,
Data.Timestamp(i),
Data.Timestamp(tt.toInstant)))
"Q#1966" >>
truncZonedDateTimeTimestamp(
TemporalPart.Century,
Instant.parse("2000-03-01T06:15:45.204Z"))
"timestamp Century" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Century, x)
}
"timestamp Day" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Day, x)
}
"timestamp Decade" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Decade, x)
}
"timestamp Hour" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Hour, x)
}
"timestamp Microsecond" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Microsecond, x)
}
"timestamp Millennium" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Millennium, x)
}
"timestamp Millisecond" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Millisecond, x)
}
"timestamp Minute" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Minute, x)
}
"timestamp Month" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Month, x)
}
"timestamp Quarter" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Quarter, x)
}
"timestamp Second" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Second, x)
}
"timestamp Week" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Week, x)
}
"timestamp Year" >> prop { x: Instant =>
truncZonedDateTimeTimestamp(TemporalPart.Year, x)
}
def truncZonedDateTimeDate(p: TemporalPart, d: LocalDate): Result =
truncZonedDateTime(p, d.atStartOfDay(UTC)).fold(
e => Failure(e.shows),
tt => unary(
TemporalTrunc(p, _).embed,
Data.Date(d),
Data.Date(tt.toLocalDate)))
"date Century" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Century, d)
}
"date Day" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Day, d)
}
"date Decade" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Decade, d)
}
"date Hour" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Hour, d)
}
"date Microsecond" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Microsecond, d)
}
"date Millennium" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Millennium, d)
}
"date Millisecond" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Millisecond, d)
}
"date Minute" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Minute, d)
}
"date Month" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Month, d)
}
"date Quarter" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Quarter, d)
}
"date Second" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Second, d)
}
"date Week" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Week, d)
}
"date Year" >> prop { d: LocalDate =>
truncZonedDateTimeDate(TemporalPart.Year, d)
}
def truncLocalTimeʹ(p: TemporalPart, t: LocalTime): Result =
truncLocalTime(p, t).fold(
e => Failure(e.shows),
tt => unary(
TemporalTrunc(p, _).embed,
Data.Time(t),
Data.Time(tt)))
"time Century" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Century, t)
}
"time Day" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Day, t)
}
"time Decade" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Decade, t)
}
"time Hour" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Hour, t)
}
"time Microsecond" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Microsecond, t)
}
"time Millennium" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Millennium, t)
}
"time Millisecond" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Millisecond, t)
}
"time Minute" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Minute, t)
}
"time Month" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Month, t)
}
"time Quarter" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Quarter, t)
}
"time Second" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Second, t)
}
"time Week" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Week, t)
}
"time Year" >> prop { t: LocalTime =>
truncLocalTimeʹ(TemporalPart.Year, t)
}
}
"TimeOfDay" >> {
"timestamp" >> {
val now = Instant.now
val expected = now.atZone(ZoneOffset.UTC).toLocalTime
unary(TimeOfDay(_).embed, Data.Timestamp(now), Data.Time(expected))
}
}
}
"MathLib" >> {
import MathLib._
// NB: testing only (32-bit) ints, to avoid overflowing 64-bit longs
// and the 53 bits of integer precision in a 64-bit double.
// TODO: BigDecimals (which can under/overflow)
// TODO: mixed BigInt/BigDecimal (which can explode)
"Add" >> {
"any ints" >> prop { (x: Int, y: Int) =>
binary(Add(_, _).embed, Data.Int(x), Data.Int(y), Data.Int(x.toLong + y.toLong))
}
"any doubles" >> prop { (x: Double, y: Double) =>
binary(Add(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Dec(x + y))
}
"mixed int/double" >> prop { (x: Int, y: Double) =>
commute(Add(_, _).embed, Data.Int(x), Data.Dec(y), Data.Dec(x + y))
}
// TODO: Timestamp + Interval, Date + Interval, Time + Interval
}
"Multiply" >> {
"any ints" >> prop { (x: Int, y: Int) =>
binary(Multiply(_, _).embed, Data.Int(x), Data.Int(y), Data.Int(x.toLong * y.toLong))
}
// TODO: figure out what domain can be tested here (tends to overflow)
// "any doubles" >> prop { (x: Double, y: Double) =>
// binary(Multiply(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Dec(x * y))
// }
// TODO: figure out what domain can be tested here
// "mixed int/double" >> prop { (x: Int, y: Double) =>
// commute(Multiply(_, _).embed, Data.Int(x), Data.Dec(y), Data.Dec(x * y))
// }
// TODO: Interval * Int
}
"Power" >> {
"Int to 0" >> prop { (x: BigInt) =>
binary(Power(_, _).embed, Data.Int(x), Data.Int(0), Data.Int(1))
}
"Dec to 0" >> prop { (x: BigDecimal) =>
binary(Power(_, _).embed, Data.Dec(x), Data.Int(0), Data.Int(1))
}
"Int to 1" >> prop { (x: BigInt) =>
binary(Power(_, _).embed, Data.Int(x), Data.Int(1), Data.Int(x))
}
"Dec to 1" >> prop { (x: BigDecimal) =>
binary(Power(_, _).embed, Data.Dec(x), Data.Int(1), Data.Dec(x))
}
"0 to small positive int" >> prop { (y0: Int) =>
val y = abs(y0 % 10)
y != 0 ==>
binary(Power(_, _).embed, Data.Int(0), Data.Int(y), Data.Int(0))
}
// TODO: figure out what domain can be tested here (negatives?)
// "0 to Dec" >> prop { (y: BigDecimal) =>
// y != 0 ==>
// binary(Power(_, _).embed, Data.Int(0), Data.Dec(y), Data.Int(0))
// }
"Int squared" >> prop { (x: Int) =>
binary(Power(_, _).embed, Data.Int(x), Data.Int(2), Data.Int(x.toLong * x.toLong))
}
// TODO: test as much of the domain as much sense
}
"Subtract" >> {
"any ints" >> prop { (x: Int, y: Int) =>
binary(Subtract(_, _).embed, Data.Int(x), Data.Int(y), Data.Int(x.toLong - y.toLong))
}
"any doubles" >> prop { (x: Double, y: Double) =>
binary(Subtract(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Dec(x - y))
}
"mixed int/double" >> prop { (x: Int, y: Double) =>
binary(Subtract(_, _).embed, Data.Int(x), Data.Dec(y), Data.Dec(x - y)) and
binary(Subtract(_, _).embed, Data.Dec(y), Data.Int(x), Data.Dec(y - x))
}
// TODO:
// Timestamp - Timestamp, Timestamp - Interval,
// Date - Date, Date - Interval,
// Time - Time, Time + Interval
}
"Divide" >> {
"any ints" >> prop { (x: Int, y: Int) =>
y != 0 ==>
binary(Divide(_, _).embed, Data.Int(x), Data.Int(y), Data.Dec(x.toDouble / y.toDouble))
}
// TODO: figure out what domain can be tested here
// "any doubles" >> prop { (x: Double, y: Double) =>
// binary(Divide(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Dec(x / y))
// }
// TODO: figure out what domain can be tested here
// "mixed int/double" >> prop { (x: Int, y: Double) =>
// commute(Divide(_, _).embed, Data.Int(x), Data.Dec(y), Data.Dec(x / y))
// }
// TODO: Interval * Int
}
"Negate" >> {
"any Int" >> prop { (x: BigInt) =>
unary(Negate(_).embed, Data.Int(x), Data.Int(-x))
}
"any Dec" >> prop { (x: BigDecimal) =>
unary(Negate(_).embed, Data.Dec(x), Data.Dec(-x))
}
// TODO: Interval
}
"Abs" >> {
"any Int" >> prop { (x: BigInt) =>
unary(Abs(_).embed, Data.Int(x), Data.Int(x.abs))
}
"any Dec" >> prop { (x: BigDecimal) =>
unary(Abs(_).embed, Data.Dec(x), Data.Dec(x.abs))
}
// TODO: add support for interval
// "any Interval" >> prop { (x: Duration) =>
// unary(Abs(_).embed, Data.Interval(x), if (x.isNegative) Data.Interval(x.negated) else Data.Interval(x))
// }
}
"Trunc" >> {
"any Int" >> prop { (x: BigInt) =>
unary(Trunc(_).embed, Data.Int(x), Data.Int(x))
}
"any Dec" >> prop { (x: BigDecimal) =>
unary(Trunc(_).embed, Data.Dec(x), Data.Dec(x.setScale(0, RoundingMode.DOWN)))
}
}
"Ceil" >> {
"any Int" >> prop { (x: BigInt) =>
unary(Ceil(_).embed, Data.Int(x), Data.Int(x))
}
"any Dec" >> prop { (x: BigDecimal) =>
unary(Ceil(_).embed, Data.Dec(x), Data.Dec(x.setScale(0, RoundingMode.CEILING)))
}
}
"Floor" >> {
"any Int" >> prop { (x: BigInt) =>
unary(Floor(_).embed, Data.Int(x), Data.Int(x))
}
"any Dec" >> prop { (x: BigDecimal) =>
unary(Floor(_).embed, Data.Dec(x), Data.Dec(x.setScale(0, RoundingMode.FLOOR)))
}
}
"Round" >> {
"any Int" >> prop { (x: BigInt) =>
unary(Round(_).embed, Data.Int(x), Data.Int(x))
}
"0.5 -> 0" >> {
unary(Round(_).embed, Data.Dec(0.5), Data.Int(0))
}
"1.5 -> 2" >> {
unary(Round(_).embed, Data.Dec(1.5), Data.Int(2))
}
"1.75 -> 2" >> {
unary(Round(_).embed, Data.Dec(1.75), Data.Int(2))
}
"2.5 -> 2" >> {
unary(Round(_).embed, Data.Dec(2.5), Data.Int(2))
}
"2.75 -> 3" >> {
unary(Round(_).embed, Data.Dec(2.75), Data.Int(3))
}
"-0.5 -> 0" >> {
unary(Round(_).embed, Data.Dec(-0.5), Data.Int(0))
}
"-1.5 -> -2" >> {
unary(Round(_).embed, Data.Dec(-1.5), Data.Int(-2))
}
"-2.5 -> -2" >> {
unary(Round(_).embed, Data.Dec(-2.5), Data.Int(-2))
}
}
"CeilScale" >> {
"scale 0" >> {
val scale = 0
"any Int" >> prop { (x: BigInt) =>
binary(CeilScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 1" >> {
binary(CeilScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Int(1))
}
"1.5 -> 2" >> {
binary(CeilScale(_, _).embed, Data.Dec(1.5), Data.Int(scale), Data.Int(2))
}
"2.5 -> 3" >> {
binary(CeilScale(_, _).embed, Data.Dec(2.5), Data.Int(scale), Data.Int(3))
}
"-0.5 -> 0" >> {
binary(CeilScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Int(0))
}
"-1.5 -> -1" >> {
binary(CeilScale(_, _).embed, Data.Dec(-1.5), Data.Int(scale), Data.Int(-1))
}
"-2.5 -> -2" >> {
binary(CeilScale(_, _).embed, Data.Dec(-2.5), Data.Int(scale), Data.Int(-2))
}
}
"scale 1" >> {
val scale = 1
"any Int" >> prop { (x: BigInt) =>
binary(CeilScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 0.5" >> {
binary(CeilScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Dec(0.5))
}
"0.25 -> 0.3" >> {
binary(CeilScale(_, _).embed, Data.Dec(0.25), Data.Int(scale), Data.Dec(0.3))
}
"-0.5 -> -0.5" >> {
binary(CeilScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Dec(-0.5))
}
"-0.25 -> 0.2" >> {
binary(CeilScale(_, _).embed, Data.Dec(-0.25), Data.Int(scale), Data.Dec(-0.2))
}
}
"scale 2" >> {
val scale = 2
"any Int" >> prop { (x: BigInt) =>
binary(CeilScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 0.5" >> {
binary(CeilScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Dec(0.5))
}
"0.25 -> 0.25" >> {
binary(CeilScale(_, _).embed, Data.Dec(0.25), Data.Int(scale), Data.Dec(0.25))
}
"0.125 -> 0.13" >> {
binary(CeilScale(_, _).embed, Data.Dec(0.125), Data.Int(scale), Data.Dec(0.13))
}
"-0.5 -> -0.5" >> {
binary(CeilScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Dec(-0.5))
}
"-0.25 -> 0.25" >> {
binary(CeilScale(_, _).embed, Data.Dec(-0.25), Data.Int(scale), Data.Dec(-0.25))
}
"-0.125 -> -0.12" >> {
binary(CeilScale(_, _).embed, Data.Dec(-0.125), Data.Int(scale), Data.Dec(-0.12))
}
}
"scale -1" >> {
val scale = -1
"1 -> 10" >> {
binary(CeilScale(_, _).embed, Data.Int(1), Data.Int(scale), Data.Int(10))
}
"10 -> 10" >> {
binary(CeilScale(_, _).embed, Data.Int(10), Data.Int(scale), Data.Int(10))
}
"12345 -> 12350" >> {
binary(CeilScale(_, _).embed, Data.Int(12345), Data.Int(scale), Data.Int(12350))
}
}
}
"FloorScale" >> {
"scale 0" >> {
val scale = 0
"any Int" >> prop { (x: BigInt) =>
binary(FloorScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 0" >> {
binary(FloorScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Int(0))
}
"1.5 -> 1" >> {
binary(FloorScale(_, _).embed, Data.Dec(1.5), Data.Int(scale), Data.Int(1))
}
"2.5 -> 2" >> {
binary(FloorScale(_, _).embed, Data.Dec(2.5), Data.Int(scale), Data.Int(2))
}
"-0.5 -> -1" >> {
binary(FloorScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Int(-1))
}
"-1.5 -> -2" >> {
binary(FloorScale(_, _).embed, Data.Dec(-1.5), Data.Int(scale), Data.Int(-2))
}
"-2.5 -> -3" >> {
binary(FloorScale(_, _).embed, Data.Dec(-2.5), Data.Int(scale), Data.Int(-3))
}
}
"scale 1" >> {
val scale = 1
"any Int" >> prop { (x: BigInt) =>
binary(FloorScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 0.5" >> {
binary(FloorScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Dec(0.5))
}
"0.25 -> 0.2" >> {
binary(FloorScale(_, _).embed, Data.Dec(0.25), Data.Int(scale), Data.Dec(0.2))
}
"-0.5 -> -0.5" >> {
binary(FloorScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Dec(-0.5))
}
"-0.25 -> -0.3" >> {
binary(FloorScale(_, _).embed, Data.Dec(-0.25), Data.Int(scale), Data.Dec(-0.3))
}
}
"scale 2" >> {
val scale = 2
"any Int" >> prop { (x: BigInt) =>
binary(FloorScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 0.5" >> {
binary(FloorScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Dec(0.5))
}
"0.25 -> 0.25" >> {
binary(FloorScale(_, _).embed, Data.Dec(0.25), Data.Int(scale), Data.Dec(0.25))
}
"0.125 -> 0.12" >> {
binary(FloorScale(_, _).embed, Data.Dec(0.125), Data.Int(scale), Data.Dec(0.12))
}
"-0.5 -> -0.5" >> {
binary(FloorScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Dec(-0.5))
}
"-0.25 -> 0.25" >> {
binary(FloorScale(_, _).embed, Data.Dec(-0.25), Data.Int(scale), Data.Dec(-0.25))
}
"-0.125 -> -0.13" >> {
binary(FloorScale(_, _).embed, Data.Dec(-0.125), Data.Int(scale), Data.Dec(-0.13))
}
}
"scale -1" >> {
val scale = -1
"1 -> 0" >> {
binary(FloorScale(_, _).embed, Data.Int(1), Data.Int(scale), Data.Int(0))
}
"10 -> 10" >> {
binary(FloorScale(_, _).embed, Data.Int(10), Data.Int(scale), Data.Int(10))
}
"12345 -> 12340" >> {
binary(FloorScale(_, _).embed, Data.Int(12345), Data.Int(scale), Data.Int(12340))
}
}
}
"RoundScale" >> {
"scale 0" >> {
val scale = 0
"any Int" >> prop { (x: BigInt) =>
binary(RoundScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 0" >> {
binary(RoundScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Int(0))
}
"1.5 -> 2" >> {
binary(RoundScale(_, _).embed, Data.Dec(1.5), Data.Int(scale), Data.Int(2))
}
"1.75 -> 2" >> {
binary(RoundScale(_, _).embed, Data.Dec(1.75), Data.Int(scale), Data.Int(2))
}
"2.5 -> 2" >> {
binary(RoundScale(_, _).embed, Data.Dec(2.5), Data.Int(scale), Data.Int(2))
}
"2.75 -> 3" >> {
binary(RoundScale(_, _).embed, Data.Dec(2.75), Data.Int(scale), Data.Int(3))
}
"-0.5 -> 0" >> {
binary(RoundScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Int(0))
}
"-1.5 -> -2" >> {
binary(RoundScale(_, _).embed, Data.Dec(-1.5), Data.Int(scale), Data.Int(-2))
}
"-2.5 -> -2" >> {
binary(RoundScale(_, _).embed, Data.Dec(-2.5), Data.Int(scale), Data.Int(-2))
}
}
"scale 1" >> {
val scale = 1
"any Int" >> prop { (x: BigInt) =>
binary(RoundScale(_, _).embed, Data.Int(x), Data.Int(scale), Data.Int(x))
}
"0.5 -> 0.5" >> {
binary(RoundScale(_, _).embed, Data.Dec(0.5), Data.Int(scale), Data.Dec(0.5))
}
"1.5 -> 1.5" >> {
binary(RoundScale(_, _).embed, Data.Dec(1.5), Data.Int(scale), Data.Dec(1.5))
}
"1.75 -> 1.8" >> {
binary(RoundScale(_, _).embed, Data.Dec(1.75), Data.Int(scale), Data.Dec(1.8))
}
"1.65 -> 1.6" >> {
binary(RoundScale(_, _).embed, Data.Dec(1.65), Data.Int(scale), Data.Dec(1.6))
}
"-0.5 -> -0.5" >> {
binary(RoundScale(_, _).embed, Data.Dec(-0.5), Data.Int(scale), Data.Dec(-0.5))
}
"-1.5 -> -1.5" >> {
binary(RoundScale(_, _).embed, Data.Dec(-1.5), Data.Int(scale), Data.Dec(-1.5))
}
"-1.75 -> -1.8" >> {
binary(RoundScale(_, _).embed, Data.Dec(-1.75), Data.Int(scale), Data.Dec(-1.8))
}
"-1.85 -> -1.8" >> {
binary(RoundScale(_, _).embed, Data.Dec(-1.85), Data.Int(scale), Data.Dec(-1.8))
}
}
"scale -1" >> {
val scale = -1
"1 -> 0" >> {
binary(RoundScale(_, _).embed, Data.Int(1), Data.Int(scale), Data.Int(0))
}
"5 -> 0" >> {
binary(RoundScale(_, _).embed, Data.Int(5), Data.Int(scale), Data.Int(0))
}
"10 -> 10" >> {
binary(RoundScale(_, _).embed, Data.Int(10), Data.Int(scale), Data.Int(10))
}
"12345 -> 12340" >> {
binary(RoundScale(_, _).embed, Data.Int(12345), Data.Int(scale), Data.Int(12340))
}
"12335 -> 12340" >> {
binary(RoundScale(_, _).embed, Data.Int(12335), Data.Int(scale), Data.Int(12340))
}
"123 -> 120" >> {
binary(RoundScale(_, _).embed, Data.Int(123), Data.Int(scale), Data.Int(120))
}
}
}
"Modulo" >> {
"any int by 1" >> prop { (x: Int) =>
binary(Modulo(_, _).embed, Data.Int(x), Data.Int(1), Data.Int(0))
}
"any ints" >> prop { (x: Int, y: Int) =>
y != 0 ==>
binary(Modulo(_, _).embed, Data.Int(x), Data.Int(y), Data.Int(BigInt(x) % BigInt(y)))
}
// TODO analyze and optionally shortCircuit per connector
// "any doubles" >> prop { (x: Double, y: Double) =>
// y != 0 ==>
// binary(Modulo(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Dec(BigDecimal(x).remainder(BigDecimal(y))))
// }
//
// "any big decimals" >> prop { (x: BigDecimal, y: BigDecimal) =>
// !y.equals(0.0) ==>
// binary(Modulo(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Dec(x.remainder(y)))
// }
//
// "mixed int/double" >> prop { (x: Int, y: Double) =>
// y != 0 ==>
// binary(Modulo(_, _).embed, Data.Int(x), Data.Dec(y), Data.Dec(BigDecimal(y).remainder(BigDecimal(x))))
// x != 0 ==>
// binary(Modulo(_, _).embed, Data.Dec(y), Data.Int(x), Data.Dec(BigDecimal(y).remainder(BigDecimal(x))))
// }
}
}
"RelationsLib" >> {
import RelationsLib._
"Eq" >> {
"any Int with self" >> prop { (x: BigInt) =>
binary(Eq(_, _).embed, Data.Int(x), Data.Int(x), Data.Bool(true))
}
"any two Ints" >> prop { (x: BigInt, y: BigInt) =>
binary(Eq(_, _).embed, Data.Int(x), Data.Int(y), Data.Bool(x == y))
}
"any Dec with self" >> prop { (x: BigDecimal) =>
binary(Eq(_, _).embed, Data.Dec(x), Data.Dec(x), Data.Bool(true))
}
"any two Decs" >> prop { (x: BigDecimal, y: BigDecimal) =>
binary(Eq(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Bool(x == y))
}
"any Str with self" >> prop { (x: String) =>
binary(Eq(_, _).embed, Data.Str(x), Data.Str(x), Data.Bool(true))
}
"any two Strs" >> prop { (x: String, y: String) =>
binary(Eq(_, _).embed, Data.Str(x), Data.Str(y), Data.Bool(x == y))
}
"any value with self" >> prop { (x: Data) =>
binary(Eq(_, _).embed, x, x, Data.Bool(true))
}
"any values with different types" >> prop { (x: Data, y: Data) =>
// ...provided they are not both Numeric (Int | Dec)
(x.dataType != y.dataType &&
!((Type.Numeric contains x.dataType) &&
(Type.Numeric contains y.dataType))) ==>
binary(Eq(_, _).embed, x, y, Data.Bool(false))
}
"any date & timestamp" >> prop { (d: LocalDate, i: Instant) =>
binary(Eq(_, _).embed, Data.Date(d), Data.Timestamp(i), Data.NA)
}
// TODO: the rest of the types
}
"Neq" >> {
"any Int with self" >> prop { (x: BigInt) =>
binary(Neq(_, _).embed, Data.Int(x), Data.Int(x), Data.Bool(false))
}
"any two Ints" >> prop { (x: BigInt, y: BigInt) =>
binary(Neq(_, _).embed, Data.Int(x), Data.Int(y), Data.Bool(x != y))
}
"any Dec with self" >> prop { (x: BigDecimal) =>
binary(Neq(_, _).embed, Data.Dec(x), Data.Dec(x), Data.Bool(false))
}
"any two Decs" >> prop { (x: BigDecimal, y: BigDecimal) =>
binary(Neq(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Bool(x != y))
}
"any Str with self" >> prop { (x: String) =>
binary(Neq(_, _).embed, Data.Str(x), Data.Str(x), Data.Bool(false))
}
"any two Strs" >> prop { (x: String, y: String) =>
binary(Neq(_, _).embed, Data.Str(x), Data.Str(y), Data.Bool(x != y))
}
"any value with self" >> prop { (x: Data) =>
binary(Neq(_, _).embed, x, x, Data.Bool(false))
}
"any values with different types" >> prop { (x: Data, y: Data) =>
// ...provided they are not both Numeric (Int | Dec)
(x.dataType != y.dataType &&
!((Type.Numeric contains x.dataType) &&
(Type.Numeric contains y.dataType))) ==>
binary(Neq(_, _).embed, x, y, Data.Bool(true))
}
// TODO: the rest of the types
}
"Lt" >> {
"any Int with self" >> prop { (x: BigInt) =>
binary(Lt(_, _).embed, Data.Int(x), Data.Int(x), Data.Bool(false))
}
"any two Ints" >> prop { (x: BigInt, y: BigInt) =>
binary(Lt(_, _).embed, Data.Int(x), Data.Int(y), Data.Bool(x < y))
}
"any Dec with self" >> prop { (x: BigDecimal) =>
binary(Lt(_, _).embed, Data.Dec(x), Data.Dec(x), Data.Bool(false))
}
"any two Decs" >> prop { (x: BigDecimal, y: BigDecimal) =>
binary(Lt(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Bool(x < y))
}
"any Str with self" >> prop { (x: String) =>
binary(Lt(_, _).embed, Data.Str(x), Data.Str(x), Data.Bool(false))
}
"any two Strs" >> prop { (x: String, y: String) =>
binary(Lt(_, _).embed, Data.Str(x), Data.Str(y), Data.Bool(x < y))
}
"any date & timestamp" >> prop { (d: LocalDate, i: Instant) =>
binary(Lt(_, _).embed, Data.Date(d), Data.Timestamp(i), Data.NA)
}
// TODO: Timestamp, Interval, cross-type comparison
}
"Lte" >> {
"any Int with self" >> prop { (x: BigInt) =>
binary(Lte(_, _).embed, Data.Int(x), Data.Int(x), Data.Bool(true))
}
"any two Ints" >> prop { (x: BigInt, y: BigInt) =>
binary(Lte(_, _).embed, Data.Int(x), Data.Int(y), Data.Bool(x <= y))
}
"any Dec with self" >> prop { (x: BigDecimal) =>
binary(Lte(_, _).embed, Data.Dec(x), Data.Dec(x), Data.Bool(true))
}
"any two Decs" >> prop { (x: BigDecimal, y: BigDecimal) =>
binary(Lte(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Bool(x <= y))
}
"any Str with self" >> prop { (x: String) =>
binary(Lte(_, _).embed, Data.Str(x), Data.Str(x), Data.Bool(true))
}
"any two Strs" >> prop { (x: String, y: String) =>
binary(Lte(_, _).embed, Data.Str(x), Data.Str(y), Data.Bool(x <= y))
}
"any date & timestamp" >> prop { (d: LocalDate, i: Instant) =>
binary(Lte(_, _).embed, Data.Date(d), Data.Timestamp(i), Data.NA)
}
}
"Gt" >> {
"any Int with self" >> prop { (x: BigInt) =>
binary(Gt(_, _).embed, Data.Int(x), Data.Int(x), Data.Bool(false))
}
"any two Ints" >> prop { (x: BigInt, y: BigInt) =>
binary(Gt(_, _).embed, Data.Int(x), Data.Int(y), Data.Bool(x > y))
}
"any Dec with self" >> prop { (x: BigDecimal) =>
binary(Gt(_, _).embed, Data.Dec(x), Data.Dec(x), Data.Bool(false))
}
"any two Decs" >> prop { (x: BigDecimal, y: BigDecimal) =>
binary(Gt(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Bool(x > y))
}
"any Str with self" >> prop { (x: String) =>
binary(Gt(_, _).embed, Data.Str(x), Data.Str(x), Data.Bool(false))
}
"any two Strs" >> prop { (x: String, y: String) =>
binary(Gt(_, _).embed, Data.Str(x), Data.Str(y), Data.Bool(x > y))
}
"any date & timestamp" >> prop { (d: LocalDate, i: Instant) =>
binary(Gt(_, _).embed, Data.Date(d), Data.Timestamp(i), Data.NA)
}
}
"Gte" >> {
"any Int with self" >> prop { (x: BigInt) =>
binary(Gte(_, _).embed, Data.Int(x), Data.Int(x), Data.Bool(true))
}
"any two Ints" >> prop { (x: BigInt, y: BigInt) =>
binary(Gte(_, _).embed, Data.Int(x), Data.Int(y), Data.Bool(x >= y))
}
"any Dec with self" >> prop { (x: BigDecimal) =>
binary(Gte(_, _).embed, Data.Dec(x), Data.Dec(x), Data.Bool(true))
}
"any two Decs" >> prop { (x: BigDecimal, y: BigDecimal) =>
binary(Gte(_, _).embed, Data.Dec(x), Data.Dec(y), Data.Bool(x >= y))
}
"any Str with self" >> prop { (x: String) =>
binary(Gte(_, _).embed, Data.Str(x), Data.Str(x), Data.Bool(true))
}
"any two Strs" >> prop { (x: String, y: String) =>
binary(Gte(_, _).embed, Data.Str(x), Data.Str(y), Data.Bool(x >= y))
}
"any date & timestamp" >> prop { (d: LocalDate, i: Instant) =>
binary(Gte(_, _).embed, Data.Date(d), Data.Timestamp(i), Data.NA)
}
}
"Between" >> {
"any Int with self" >> prop { (x: BigInt) =>
ternary(Between(_, _, _).embed, Data.Int(x), Data.Int(x), Data.Int(x), Data.Bool(true))
}
"any three Ints" >> prop { (x1: BigInt, x2: BigInt, x3: BigInt) =>
val xs = List(x1, x2, x3).sorted
ternary(Between(_, _, _).embed, Data.Int(xs(1)), Data.Int(xs(0)), Data.Int(xs(2)), Data.Bool(true))
}
"any Dec with self" >> prop { (x: BigDecimal) =>
ternary(Between(_, _, _).embed, Data.Dec(x), Data.Dec(x), Data.Dec(x), Data.Bool(true))
}
"any three Decs" >> prop { (x1: BigDecimal, x2: BigDecimal, x3: BigDecimal) =>
val xs = List(x1, x2, x3).sorted
ternary(Between(_, _, _).embed, Data.Dec(xs(1)), Data.Dec(xs(0)), Data.Dec(xs(2)), Data.Bool(true))
}
"any Str with self" >> prop { (x: String) =>
ternary(Between(_, _, _).embed, Data.Str(x), Data.Str(x), Data.Str(x), Data.Bool(true))
}
"any three Strs" >> prop { (x1: String, x2: String, x3: String) =>
val xs = List(x1, x2, x3).sorted
ternary(Between(_, _, _).embed, Data.Str(xs(1)), Data.Str(xs(0)), Data.Str(xs(2)), Data.Bool(true))
}
// TODO: Timestamp, Interval, cross-type comparison
}
"IfUndefined" >> {
"""NA ?? 42""" >> {
binary(
IfUndefined(_, _).embed,
Data.NA,
Data.Int(42),
Data.Int(42))
}
"""1 ?? 2""" >> {
binary(
IfUndefined(_, _).embed,
Data.Int(1),
Data.Int(2),
Data.Int(1))
}
"""{"a": 1} ?? 2""" >> {
binary(
IfUndefined(_, _).embed,
Data.Obj("a" -> Data.Int(1)),
Data.Int(2),
Data.Obj("a" -> Data.Int(1)))
}
"""{"a": NA, "b": 2} ?? 3""" >> {
binary(
IfUndefined(_, _).embed,
Data.Obj("a" -> Data.NA, "b" -> Data.Int(2)),
Data.Int(3),
Data.Obj("b" -> Data.Int(2)))
}
"""[NA, 2] ?? 3""" >> {
binary(
IfUndefined(_, _).embed,
Data.Arr(Data.NA :: Data.Int(2) :: Nil),
Data.Int(3),
Data.Arr(Data.Int(2) :: Nil))
}
}
"And" >> {
"false, false" >> {
binary(And(_, _).embed, Data.Bool(false), Data.Bool(false), Data.Bool(false))
}
"false, true" >> {
commute(And(_, _).embed, Data.Bool(false), Data.Bool(true), Data.Bool(false))
}
"true, true" >> {
binary(And(_, _).embed, Data.Bool(true), Data.Bool(true), Data.Bool(true))
}
}
"Or" >> {
"false, false" >> {
binary(Or(_, _).embed, Data.Bool(false), Data.Bool(false), Data.Bool(false))
}
"false, true" >> {
commute(Or(_, _).embed, Data.Bool(false), Data.Bool(true), Data.Bool(true))
}
"true, true" >> {
binary(Or(_, _).embed, Data.Bool(true), Data.Bool(true), Data.Bool(true))
}
}
"Not" >> {
"false" >> {
unary(Not(_).embed, Data.Bool(false), Data.Bool(true))
}
"true" >> {
unary(Not(_).embed, Data.Bool(true), Data.Bool(false))
}
}
"Cond" >> {
"true" >> prop { (x: Data, y: Data) =>
ternary(Cond(_, _, _).embed, Data.Bool(true), x, y, x)
}
"false" >> prop { (x: Data, y: Data) =>
ternary(Cond(_, _, _).embed, Data.Bool(false), x, y, y)
}
}
}
"StructuralLib" >> {
import StructuralLib._
// FIXME: No idea why this is necessary, but ScalaCheck arbContainer
// demands it and can't seem to find one in this context.
implicit def listToTraversable[A](as: List[A]): Traversable[A] = as
"ConcatOp" >> {
"array || array" >> prop { (xs: List[BigInt], ys: List[BigInt]) =>
val (xints, yints) = (xs map (Data._int(_)), ys map (Data._int(_)))
binary(ConcatOp(_, _).embed, Data._arr(xints), Data._arr(yints), Data._arr(xints ::: yints))
}
"array || string" >> prop { (xs: List[BigInt], y: String) =>
val (xints, ystrs) = (xs map (Data._int(_)), y.toList map (c => Data._str(c.toString)))
binary(ConcatOp(_, _).embed, Data._arr(xints), Data._str(y), Data._arr(xints ::: ystrs))
}
"string || array" >> prop { (x: String, ys: List[BigInt]) =>
val (xstrs, yints) = (x.toList map (c => Data._str(c.toString)), ys map (Data._int(_)))
binary(ConcatOp(_, _).embed, Data._str(x), Data._arr(yints), Data._arr(xstrs ::: yints))
}
"string || string" >> prop { (x: String, y: String) =>
binary(ConcatOp(_, _).embed, Data._str(x), Data._str(y), Data._str(x + y))
}
}
"ObjectProject" >> {
"""({"a":1}).a""" >> {
binary(
ObjectProject(_, _).embed,
Data.Obj("a" -> Data.Int(1)),
Data.Str("a"),
Data.Int(1))
}
"""({"a":1, "b":2}).b""" >> {
binary(
ObjectProject(_, _).embed,
Data.Obj("a" -> Data.Int(1), "b" -> Data.Int(2)),
Data.Str("b"),
Data.Int(2))
}
"""({"a":1, "b":2}).c""" >> {
binary(
ObjectProject(_, _).embed,
Data.Obj("a" -> Data.Int(1), "b" -> Data.Int(2)),
Data.Str("c"),
Data.NA)
}
"""({}).c""" >> {
binary(
ObjectProject(_, _).embed,
Data.Obj(),
Data.Str("c"),
Data.NA)
}
}
"DeleteField" >> {
"{a:1, b:2} delete .a" >> {
binary(
DeleteField(_, _).embed,
Data.Obj("a" -> Data.Int(1), "b" -> Data.Int(2)),
Data.Str("a"),
Data.Obj("b" -> Data.Int(2)))
}
"{a:1, b:2} delete .b" >> {
binary(
DeleteField(_, _).embed,
Data.Obj("a" -> Data.Int(1), "b" -> Data.Int(2)),
Data.Str("b"),
Data.Obj("a" -> Data.Int(1)))
}
"{a:1, b:2} delete .c" >> {
binary(
DeleteField(_, _).embed,
Data.Obj("a" -> Data.Int(1), "b" -> Data.Int(2)),
Data.Str("c"),
Data.Obj("a" -> Data.Int(1), "b" -> Data.Int(2)))
}
}
"Meta" >> {
// FIXME: Implement once we've switched to EJson in LogicalPlan.
"returns metadata associated with a value" >> pending("Requires EJson.")
}
}
"SetLib" >> {
import SetLib._
"Within" >> {
"0 in [1, 2, 3]" >> {
binary(Within(_, _).embed, Data.Int(0), Data.Arr(List(Data.Int(1), Data.Int(2), Data.Int(3))), Data.False)
}
"1 in [1, 2, 3]" >> {
binary(Within(_, _).embed, Data.Int(1), Data.Arr(List(Data.Int(1), Data.Int(2), Data.Int(3))), Data.True)
}
"0 in []" >> {
binary(Within(_, _).embed, Data.Int(0), Data.Arr(Nil), Data.False)
}
"[0] in [[1], 2, {a:3}, [0]]" >> {
binary(
Within(_, _).embed,
Data.Arr(List(Data.Int(0))),
Data.Arr(
List(
Data.Arr(List(Data.Int(1))),
Data.Int(2),
Data.Obj(ListMap("a" -> Data.Int(3))),
Data.Arr(List(Data.Int(0))))),
Data.True)
}
"[0, 1] in [[1], 2, {a:3}, [0, 1]]" >> {
binary(
Within(_, _).embed,
Data.Arr(List(Data.Int(0), Data.Int(1))),
Data.Arr(
List(
Data.Arr(List(Data.Int(1))),
Data.Int(2),
Data.Obj(ListMap("a" -> Data.Int(3))),
Data.Arr(List(Data.Int(0), Data.Int(1))))),
Data.True)
}
}
}
}
}
| drostron/quasar | frontend/src/test/scala/quasar/std/StdLibSpec.scala | Scala | apache-2.0 | 62,926 |
package scala
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(classOf[JUnit4])
class PartialFunctionSerializationTest {
val pf1: PartialFunction[Int, Int] = { case n if n > 0 => 1 }
val pf2: PartialFunction[Int, Int] = { case n if n <= 0 => 2 }
private def assertSerializable[A,B](fn: A => B): Unit = {
import java.io._
new ObjectOutputStream(new ByteArrayOutputStream()).writeObject(fn)
}
@Test def canSerializeLiteral = assertSerializable(pf1)
@Test def canSerializeLifted = assertSerializable(pf1.lift)
@Test def canSerializeOrElse = assertSerializable(pf1 orElse pf2)
@Test def canSerializeUnlifted = assertSerializable(Function.unlift((x: Int) => Some(x)))
@Test def canSerializeAndThen = assertSerializable(pf1.andThen((x: Int) => x))
@Test def canSerializeEmpty = assertSerializable(PartialFunction.empty)
}
| martijnhoekstra/scala | test/junit/scala/PartialFunctionSerializationTest.scala | Scala | apache-2.0 | 900 |
package mgoeminne.scalagit
/**
* Represents the mode of a node. A node being a file, in the Unix sense of the term.
*/
class NodeMode(rights: String)
{
val permissions = Integer.parseInt(rights, 8)
private val userPrivilege = ( Integer.parseInt("00400", 8) , Integer.parseInt("00200", 8) , Integer.parseInt("00100", 8) )
private val groupPrivilege = ( Integer.parseInt("00040", 8) , Integer.parseInt("00020", 8) , Integer.parseInt("00010", 8) )
private val otherPrivilege = ( Integer.parseInt("00004", 8) , Integer.parseInt("00002", 8) , Integer.parseInt("00001", 8) )
private val SET_UID = Integer.parseInt("0004000", 8)
private val SET_GROUP_ID = Integer.parseInt("0004000", 8)
private val STICKY = Integer.parseInt("0001000", 8)
private val FIFO = Integer.parseInt("0010000", 8)
private val CHARACTER_DEVICE = Integer.parseInt("0020000", 8)
private val DIRECTORY = Integer.parseInt("0040000", 8)
private val BLOCK_DEVICE = Integer.parseInt("0060000", 8)
private val REGULAR_FILE = Integer.parseInt("0100000", 8)
private val SYMBOLIC_LINK = Integer.parseInt("0120000", 8)
private val SOCKET = Integer.parseInt("0140000", 8)
/**
* @return the privileges associated to the owner of the node.
*/
def user: Privilege = ConcretePrivilege(permissions, userPrivilege)
/**
* @return the privileges associated to the group owning the node.
*/
def group: Privilege = ConcretePrivilege(permissions, groupPrivilege)
/**
* @return the privileges associated to those who are not the user nor the group.
*/
def other: Privilege = ConcretePrivilege(permissions, otherPrivilege)
/**
* @return the value of the UID bit.
*/
def uIdBit: Boolean = (permissions & SET_UID) != 0
/**
* @return the value of the group ID bit.
*/
def groupIdBit: Boolean = (permissions & SET_GROUP_ID) != 0
/**
* @return the value of the sticky bit.
*/
def stickyBit: Boolean = (permissions & STICKY) != 0
/**
* @return true if the node is a fifo file; false otherwise.
*/
def fifo: Boolean = (permissions & FIFO) != 0
def characterDevice: Boolean = (permissions & CHARACTER_DEVICE) != 0
def directory: Boolean = (permissions & DIRECTORY) != 0
def blockDevice: Boolean = (permissions & BLOCK_DEVICE) != 0
/**
*
* @return true if the node is a regular file, false otherwise.
*/
def isRegularFile: Boolean = (permissions & REGULAR_FILE) != 0
/**
*
* @return true if the node is a symbolic link, false otherwise.
*/
def isSymbolicLink: Boolean = (permissions & SYMBOLIC_LINK) != 0
/**
*
* @return true if the node is a socket, false otherwise.
*/
def isSocket: Boolean = (permissions & SOCKET) != 0
override def toString = Integer.toOctalString(permissions)
}
trait Privilege
{
def canRead: Boolean
def canWrite: Boolean
def canExecute: Boolean
}
private case class ConcretePrivilege(permissions: Int, privileges: (Int, Int, Int)) extends Privilege
{
override def canRead: Boolean = (permissions & privileges._1) != 0
override def canWrite: Boolean = (permissions & privileges._2) != 0
override def canExecute: Boolean = (permissions & privileges._3) != 0
}
| mgoeminne/scalagit | src/main/scala/mgoeminne/scalagit/NodeMode.scala | Scala | lgpl-3.0 | 3,271 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.modules.jvm
import edu.latrobe._
import edu.latrobe.blaze._
import edu.latrobe.blaze.modules._
final class AddUniformNoise_JVM_Baseline(override val builder: AddUniformNoiseBuilder,
override val inputHints: BuildHints,
override val seed: InstanceSeed,
override val weightBufferBuilder: ValueTensorBufferBuilder)
extends AddUniformNoise_JVM {
// ---------------------------------------------------------------------------
// Forward propagation related.
// ---------------------------------------------------------------------------
override protected def doPredict(output: RealArrayTensor)
: Unit = {
ArrayEx.transform(
output.values
)(_ + uniform.sample())
}
}
object AddUniformNoise_JVM_Baseline_Description
extends ModuleVariant_JVM_Description[AddUniformNoiseBuilder] {
override def build(builder: AddUniformNoiseBuilder,
hints: BuildHints,
seed: InstanceSeed,
weightBufferBuilder: ValueTensorBufferBuilder)
: Module = new AddUniformNoise_JVM_Baseline(
builder,
hints,
seed,
weightBufferBuilder
)
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/modules/jvm/AddUniformNoise_JVM_Baseline.scala | Scala | apache-2.0 | 2,035 |
package nodes.images
import breeze.linalg.DenseVector
import pipelines._
import utils.{ImageMetadata, ChannelMajorArrayVectorizedImage, Image}
/**
* This node takes an image and performs pooling on regions of the image.
*
* Divides images into fixed size pools, but when fed with images of various
* sizes may produce a varying number of pools.
*
* NOTE: By default strides start from poolSize/2.
*
* @param stride x and y stride to get regions of the image
* @param poolSize size of the patch to perform pooling on
* @param pixelFunction function to apply on every pixel before pooling
* @param poolFunction pooling function to use on every region.
*/
class Pooler(
stride: Int,
poolSize: Int,
pixelFunction: Double => Double,
poolFunction: DenseVector[Double] => Double)
extends Transformer[Image, Image] {
val strideStart = poolSize / 2
def apply(image: Image) = {
val xDim = image.metadata.xDim
val yDim = image.metadata.yDim
val numChannels = image.metadata.numChannels
val numPoolsX = math.ceil((xDim - strideStart).toDouble / stride).toInt
val numPoolsY = math.ceil((yDim - strideStart).toDouble / stride).toInt
val patch = new Array[Double]( numPoolsX * numPoolsY * numChannels)
// Start at strideStart in (x, y) and
for (x <- strideStart until xDim by stride;
y <- strideStart until yDim by stride) {
// Extract the pool. Then apply the pixel and pool functions
val pool = DenseVector.zeros[Double](poolSize * poolSize)
val startX = x - poolSize/2
val endX = math.min(x + poolSize/2, xDim)
val startY = y - poolSize/2
val endY = math.min(y + poolSize/2, yDim)
var c = 0
while (c < numChannels) {
var s = startX
while (s < endX) {
var b = startY
while (b < endY) {
pool((s-startX) + (b-startY)*(endX-startX)) =
pixelFunction(image.get(s, b, c))
b = b + 1
}
s = s + 1
}
patch(c + (x - strideStart)/stride * numChannels +
(y - strideStart)/stride * numPoolsX * numChannels) = poolFunction(pool)
c = c + 1
}
}
ChannelMajorArrayVectorizedImage(patch, ImageMetadata(numPoolsX, numPoolsY, numChannels))
}
}
| shivaram/keystone | src/main/scala/nodes/images/Pooler.scala | Scala | apache-2.0 | 2,284 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.internal
import org.apache.spark.SparkConf
import org.apache.spark.annotation.Unstable
import org.apache.spark.sql.{ExperimentalMethods, SparkSession, UDFRegistration, _}
import org.apache.spark.sql.catalyst.analysis.{Analyzer, FunctionRegistry, ResolveSessionCatalog}
import org.apache.spark.sql.catalyst.catalog.SessionCatalog
import org.apache.spark.sql.catalyst.optimizer.Optimizer
import org.apache.spark.sql.catalyst.parser.ParserInterface
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.connector.catalog.CatalogManager
import org.apache.spark.sql.execution.{ColumnarRule, QueryExecution, SparkOptimizer, SparkPlanner, SparkSqlParser}
import org.apache.spark.sql.execution.aggregate.ResolveEncodersInScalaAgg
import org.apache.spark.sql.execution.analysis.DetectAmbiguousSelfJoin
import org.apache.spark.sql.execution.command.CommandCheck
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.datasources.v2.{TableCapabilityCheck, V2SessionCatalog}
import org.apache.spark.sql.streaming.StreamingQueryManager
import org.apache.spark.sql.util.ExecutionListenerManager
/**
* Builder class that coordinates construction of a new [[SessionState]].
*
* The builder explicitly defines all components needed by the session state, and creates a session
* state when `build` is called. Components should only be initialized once. This is not a problem
* for most components as they are only used in the `build` function. However some components
* (`conf`, `catalog`, `functionRegistry`, `experimentalMethods` & `sqlParser`) are as dependencies
* for other components and are shared as a result. These components are defined as lazy vals to
* make sure the component is created only once.
*
* A developer can modify the builder by providing custom versions of components, or by using the
* hooks provided for the analyzer, optimizer & planner. There are some dependencies between the
* components (they are documented per dependency), a developer should respect these when making
* modifications in order to prevent initialization problems.
*
* A parent [[SessionState]] can be used to initialize the new [[SessionState]]. The new session
* state will clone the parent sessions state's `conf`, `functionRegistry`, `experimentalMethods`
* and `catalog` fields. Note that the state is cloned when `build` is called, and not before.
*/
@Unstable
abstract class BaseSessionStateBuilder(
val session: SparkSession,
val parentState: Option[SessionState] = None) {
type NewBuilder = (SparkSession, Option[SessionState]) => BaseSessionStateBuilder
/**
* Function that produces a new instance of the `BaseSessionStateBuilder`. This is used by the
* [[SessionState]]'s clone functionality. Make sure to override this when implementing your own
* [[SessionStateBuilder]].
*/
protected def newBuilder: NewBuilder
/**
* Session extensions defined in the [[SparkSession]].
*/
protected def extensions: SparkSessionExtensions = session.extensions
/**
* Extract entries from `SparkConf` and put them in the `SQLConf`
*/
protected def mergeSparkConf(sqlConf: SQLConf, sparkConf: SparkConf): Unit = {
sparkConf.getAll.foreach { case (k, v) =>
sqlConf.setConfString(k, v)
}
}
/**
* SQL-specific key-value configurations.
*
* These either get cloned from a pre-existing instance or newly created. The conf is merged
* with its [[SparkConf]] only when there is no parent session.
*/
protected lazy val conf: SQLConf = {
parentState.map { s =>
val cloned = s.conf.clone()
if (session.sparkContext.conf.get(StaticSQLConf.SQL_LEGACY_SESSION_INIT_WITH_DEFAULTS)) {
mergeSparkConf(cloned, session.sparkContext.conf)
}
cloned
}.getOrElse {
val conf = new SQLConf
mergeSparkConf(conf, session.sparkContext.conf)
conf
}
}
/**
* Internal catalog managing functions registered by the user.
*
* This either gets cloned from a pre-existing version or cloned from the built-in registry.
*/
protected lazy val functionRegistry: FunctionRegistry = {
parentState.map(_.functionRegistry.clone())
.getOrElse(extensions.registerFunctions(FunctionRegistry.builtin.clone()))
}
/**
* Experimental methods that can be used to define custom optimization rules and custom planning
* strategies.
*
* This either gets cloned from a pre-existing version or newly created.
*/
protected lazy val experimentalMethods: ExperimentalMethods = {
parentState.map(_.experimentalMethods.clone()).getOrElse(new ExperimentalMethods)
}
/**
* Parser that extracts expressions, plans, table identifiers etc. from SQL texts.
*
* Note: this depends on the `conf` field.
*/
protected lazy val sqlParser: ParserInterface = {
extensions.buildParser(session, new SparkSqlParser(conf))
}
/**
* ResourceLoader that is used to load function resources and jars.
*/
protected lazy val resourceLoader: SessionResourceLoader = new SessionResourceLoader(session)
/**
* Catalog for managing table and database states. If there is a pre-existing catalog, the state
* of that catalog (temp tables & current database) will be copied into the new catalog.
*
* Note: this depends on the `conf`, `functionRegistry` and `sqlParser` fields.
*/
protected lazy val catalog: SessionCatalog = {
val catalog = new SessionCatalog(
() => session.sharedState.externalCatalog,
() => session.sharedState.globalTempViewManager,
functionRegistry,
conf,
SessionState.newHadoopConf(session.sparkContext.hadoopConfiguration, conf),
sqlParser,
resourceLoader)
parentState.foreach(_.catalog.copyStateTo(catalog))
catalog
}
protected lazy val v2SessionCatalog = new V2SessionCatalog(catalog, conf)
protected lazy val catalogManager = new CatalogManager(conf, v2SessionCatalog, catalog)
/**
* Interface exposed to the user for registering user-defined functions.
*
* Note 1: The user-defined functions must be deterministic.
* Note 2: This depends on the `functionRegistry` field.
*/
protected def udfRegistration: UDFRegistration = new UDFRegistration(functionRegistry)
/**
* Logical query plan analyzer for resolving unresolved attributes and relations.
*
* Note: this depends on the `conf` and `catalog` fields.
*/
protected def analyzer: Analyzer = new Analyzer(catalogManager, conf) {
override val extendedResolutionRules: Seq[Rule[LogicalPlan]] =
new FindDataSourceTable(session) +:
new ResolveSQLOnFile(session) +:
new FallBackFileSourceV2(session) +:
ResolveEncodersInScalaAgg +:
new ResolveSessionCatalog(
catalogManager, conf, catalog.isTempView, catalog.isTempFunction) +:
customResolutionRules
override val postHocResolutionRules: Seq[Rule[LogicalPlan]] =
new DetectAmbiguousSelfJoin(conf) +:
PreprocessTableCreation(session) +:
PreprocessTableInsertion(conf) +:
DataSourceAnalysis(conf) +:
customPostHocResolutionRules
override val extendedCheckRules: Seq[LogicalPlan => Unit] =
PreWriteCheck +:
PreReadCheck +:
HiveOnlyCheck +:
TableCapabilityCheck +:
CommandCheck(conf) +:
customCheckRules
}
/**
* Custom resolution rules to add to the Analyzer. Prefer overriding this instead of creating
* your own Analyzer.
*
* Note that this may NOT depend on the `analyzer` function.
*/
protected def customResolutionRules: Seq[Rule[LogicalPlan]] = {
extensions.buildResolutionRules(session)
}
/**
* Custom post resolution rules to add to the Analyzer. Prefer overriding this instead of
* creating your own Analyzer.
*
* Note that this may NOT depend on the `analyzer` function.
*/
protected def customPostHocResolutionRules: Seq[Rule[LogicalPlan]] = {
extensions.buildPostHocResolutionRules(session)
}
/**
* Custom check rules to add to the Analyzer. Prefer overriding this instead of creating
* your own Analyzer.
*
* Note that this may NOT depend on the `analyzer` function.
*/
protected def customCheckRules: Seq[LogicalPlan => Unit] = {
extensions.buildCheckRules(session)
}
/**
* Logical query plan optimizer.
*
* Note: this depends on `catalog` and `experimentalMethods` fields.
*/
protected def optimizer: Optimizer = {
new SparkOptimizer(catalogManager, catalog, experimentalMethods) {
override def earlyScanPushDownRules: Seq[Rule[LogicalPlan]] =
super.earlyScanPushDownRules ++ customEarlyScanPushDownRules
override def extendedOperatorOptimizationRules: Seq[Rule[LogicalPlan]] =
super.extendedOperatorOptimizationRules ++ customOperatorOptimizationRules
}
}
/**
* Custom operator optimization rules to add to the Optimizer. Prefer overriding this instead
* of creating your own Optimizer.
*
* Note that this may NOT depend on the `optimizer` function.
*/
protected def customOperatorOptimizationRules: Seq[Rule[LogicalPlan]] = {
extensions.buildOptimizerRules(session)
}
/**
* Custom early scan push down rules to add to the Optimizer. Prefer overriding this instead
* of creating your own Optimizer.
*
* Note that this may NOT depend on the `optimizer` function.
*/
protected def customEarlyScanPushDownRules: Seq[Rule[LogicalPlan]] = Nil
/**
* Planner that converts optimized logical plans to physical plans.
*
* Note: this depends on the `conf` and `experimentalMethods` fields.
*/
protected def planner: SparkPlanner = {
new SparkPlanner(session, conf, experimentalMethods) {
override def extraPlanningStrategies: Seq[Strategy] =
super.extraPlanningStrategies ++ customPlanningStrategies
}
}
/**
* Custom strategies to add to the planner. Prefer overriding this instead of creating
* your own Planner.
*
* Note that this may NOT depend on the `planner` function.
*/
protected def customPlanningStrategies: Seq[Strategy] = {
extensions.buildPlannerStrategies(session)
}
protected def columnarRules: Seq[ColumnarRule] = {
extensions.buildColumnarRules(session)
}
/**
* Create a query execution object.
*/
protected def createQueryExecution: LogicalPlan => QueryExecution = { plan =>
new QueryExecution(session, plan)
}
/**
* Interface to start and stop streaming queries.
*/
protected def streamingQueryManager: StreamingQueryManager = new StreamingQueryManager(session)
/**
* An interface to register custom [[org.apache.spark.sql.util.QueryExecutionListener]]s
* that listen for execution metrics.
*
* This gets cloned from parent if available, otherwise a new instance is created.
*/
protected def listenerManager: ExecutionListenerManager = {
parentState.map(_.listenerManager.clone(session)).getOrElse(
new ExecutionListenerManager(session, loadExtensions = true))
}
/**
* Function used to make clones of the session state.
*/
protected def createClone: (SparkSession, SessionState) => SessionState = {
val createBuilder = newBuilder
(session, state) => createBuilder(session, Option(state)).build()
}
/**
* Build the [[SessionState]].
*/
def build(): SessionState = {
new SessionState(
session.sharedState,
conf,
experimentalMethods,
functionRegistry,
udfRegistration,
() => catalog,
sqlParser,
() => analyzer,
() => optimizer,
planner,
() => streamingQueryManager,
listenerManager,
() => resourceLoader,
createQueryExecution,
createClone,
columnarRules)
}
}
/**
* Helper class for using SessionStateBuilders during tests.
*/
private[sql] trait WithTestConf { self: BaseSessionStateBuilder =>
def overrideConfs: Map[String, String]
override protected lazy val conf: SQLConf = {
val overrideConfigurations = overrideConfs
parentState.map { s =>
val cloned = s.conf.clone()
if (session.sparkContext.conf.get(StaticSQLConf.SQL_LEGACY_SESSION_INIT_WITH_DEFAULTS)) {
mergeSparkConf(conf, session.sparkContext.conf)
}
cloned
}.getOrElse {
val conf = new SQLConf {
clear()
override def clear(): Unit = {
super.clear()
// Make sure we start with the default test configs even after clear
overrideConfigurations.foreach { case (key, value) => setConfString(key, value) }
}
}
mergeSparkConf(conf, session.sparkContext.conf)
conf
}
}
}
| spark-test/spark | sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala | Scala | apache-2.0 | 13,615 |
package com.twitter.finagle.thrift
import com.twitter.finagle.{Status, Service, WriteException}
import com.twitter.util.{Await, Future, Promise, Return, Throw}
import org.apache.thrift.TApplicationException
import org.apache.thrift.protocol.{TBinaryProtocol, TMessage, TMessageType}
import org.junit.runner.RunWith
import org.mockito.Matchers
import org.mockito.Mockito.{verify, when, times}
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class ValidateThriftServiceTest extends FunSuite with MockitoSugar {
case class ValidateThriftServiceContext(p: Promise[Array[Byte]] = new Promise[Array[Byte]]){
def newValidate() = new ValidateThriftService(service, protocolFactory)
lazy val service: Service[ThriftClientRequest, Array[Byte]] = {
val service = mock[Service[ThriftClientRequest, Array[Byte]]]
when(service(Matchers.any[ThriftClientRequest])).thenReturn(p)
when(service.status).thenReturn(Status.Open)
service
}
val req: ThriftClientRequest = mock[ThriftClientRequest]
lazy val validate = newValidate()
lazy val protocolFactory = new TBinaryProtocol.Factory()
}
test("ValidateThriftService should query availability from underlying") {
val c = ValidateThriftServiceContext()
import c._
when(service.status).thenReturn(Status.Open)
assert(validate.isAvailable)
verify(service).status
when(service.status).thenReturn(Status.Closed)
assert(!validate.isAvailable)
verify(service, times(2)).status
}
test("ValidateThriftService should handle no-exception messages") {
val c = ValidateThriftServiceContext()
import c._
val buf = new OutputBuffer(protocolFactory)
buf().writeMessageBegin(new TMessage("ok123", TMessageType.REPLY, 0))
buf().writeMessageEnd()
val res = validate(req)
assert(res.isDefined == false)
verify(service).apply(req)
p.setValue(buf.toArray)
assert(res.isDefined)
assert(validate.isAvailable)
}
test("ValidateThriftService should invalidate connection on bad TApplicationException") {
val c = ValidateThriftServiceContext()
import c._
val codes = Seq(
TApplicationException.BAD_SEQUENCE_ID,
TApplicationException.INVALID_MESSAGE_TYPE,
TApplicationException.MISSING_RESULT,
TApplicationException.UNKNOWN,
TApplicationException.WRONG_METHOD_NAME)
for (typ <- codes) {
val buf = new OutputBuffer(protocolFactory)
buf().writeMessageBegin(new TMessage("ok123", TMessageType.EXCEPTION, 0))
val exc = new TApplicationException(typ, "wtf")
exc.write(buf())
buf().writeMessageEnd()
val validate = newValidate()
val arr = buf.toArray
when(service(Matchers.any[ThriftClientRequest])).thenReturn(Future.value(arr))
assert(validate.isAvailable)
val f = validate(req)
assert(f.isDefined)
assert(Await.result(f) == arr)
assert(!validate.isAvailable)
val resp = validate(req).poll
assert(resp.isDefined)
assert(resp.get.isThrow)
val thrown = resp.get.asInstanceOf[Throw[Array[Byte]]].e
assert(thrown.isInstanceOf[WriteException])
assert(thrown.getCause.isInstanceOf[InvalidThriftConnectionException])
}
}
test("ValidateThriftService should not invalidate connection on OK TApplicationException") {
val c = ValidateThriftServiceContext()
import c._
val codes = Seq(TApplicationException.INTERNAL_ERROR,
TApplicationException.UNKNOWN_METHOD)
for (typ <- codes) {
val buf = new OutputBuffer(protocolFactory)
buf().writeMessageBegin(new TMessage("foobar", TMessageType.EXCEPTION, 0))
val exc = new TApplicationException(typ, "it's ok, don't worry about it!")
exc.write(buf())
buf().writeMessageEnd()
val validate = newValidate()
val arr = buf.toArray
when(service(Matchers.any[ThriftClientRequest])).thenReturn(Future.value(arr))
assert(validate.isAvailable)
val f = validate(req)
assert(f.isDefined)
assert(Await.result(f) == arr)
assert(validate.isAvailable)
assert(validate(req).poll match {
case Some(Return(_)) => true
case _ => false
})
}
}
}
| adriancole/finagle | finagle-thrift/src/test/scala/com/twitter/finagle/thrift/ValidateThriftServiceTest.scala | Scala | apache-2.0 | 4,297 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.metrics
import scala.collection.mutable.ArrayBuffer
import com.codahale.metrics.MetricRegistry
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.master.MasterSource
import org.apache.spark.internal.config._
import org.apache.spark.metrics.sink.Sink
import org.apache.spark.metrics.source.{Source, StaticSources}
class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateMethodTester{
var filePath: String = _
var conf: SparkConf = null
var securityMgr: SecurityManager = null
before {
filePath = getClass.getClassLoader.getResource("test_metrics_system.properties").getFile
conf = new SparkConf(false).set("spark.metrics.conf", filePath)
securityMgr = new SecurityManager(conf)
}
test("MetricsSystem with default config") {
val metricsSystem = MetricsSystem.createMetricsSystem("default", conf, securityMgr)
metricsSystem.start()
val sources = PrivateMethod[ArrayBuffer[Source]]('sources)
val sinks = PrivateMethod[ArrayBuffer[Sink]]('sinks)
assert(metricsSystem.invokePrivate(sources()).length === StaticSources.allSources.length)
assert(metricsSystem.invokePrivate(sinks()).length === 0)
assert(metricsSystem.getServletHandlers.nonEmpty)
}
test("MetricsSystem with sources add") {
val metricsSystem = MetricsSystem.createMetricsSystem("test", conf, securityMgr)
metricsSystem.start()
val sources = PrivateMethod[ArrayBuffer[Source]]('sources)
val sinks = PrivateMethod[ArrayBuffer[Sink]]('sinks)
assert(metricsSystem.invokePrivate(sources()).length === StaticSources.allSources.length)
assert(metricsSystem.invokePrivate(sinks()).length === 1)
assert(metricsSystem.getServletHandlers.nonEmpty)
val source = new MasterSource(null)
metricsSystem.registerSource(source)
assert(metricsSystem.invokePrivate(sources()).length === StaticSources.allSources.length + 1)
}
test("MetricsSystem with Driver instance") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
val executorId = "driver"
conf.set("spark.app.id", appId)
conf.set("spark.executor.id", executorId)
val instanceName = "driver"
val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = driverMetricsSystem.buildRegistryName(source)
assert(metricName === s"$appId.$executorId.${source.sourceName}")
}
test("MetricsSystem with Driver instance and spark.app.id is not set") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val executorId = "driver"
conf.set("spark.executor.id", executorId)
val instanceName = "driver"
val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = driverMetricsSystem.buildRegistryName(source)
assert(metricName === source.sourceName)
}
test("MetricsSystem with Driver instance and spark.executor.id is not set") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
conf.set("spark.app.id", appId)
val instanceName = "driver"
val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = driverMetricsSystem.buildRegistryName(source)
assert(metricName === source.sourceName)
}
test("MetricsSystem with Executor instance") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
val executorId = "1"
conf.set("spark.app.id", appId)
conf.set("spark.executor.id", executorId)
val instanceName = "executor"
val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = executorMetricsSystem.buildRegistryName(source)
assert(metricName === s"$appId.$executorId.${source.sourceName}")
}
test("MetricsSystem with Executor instance and spark.app.id is not set") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val executorId = "1"
conf.set("spark.executor.id", executorId)
val instanceName = "executor"
val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = executorMetricsSystem.buildRegistryName(source)
assert(metricName === source.sourceName)
}
test("MetricsSystem with Executor instance and spark.executor.id is not set") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
conf.set("spark.app.id", appId)
val instanceName = "executor"
val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = executorMetricsSystem.buildRegistryName(source)
assert(metricName === source.sourceName)
}
test("MetricsSystem with instance which is neither Driver nor Executor") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
val executorId = "dummyExecutorId"
conf.set("spark.app.id", appId)
conf.set("spark.executor.id", executorId)
val instanceName = "testInstance"
val testMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = testMetricsSystem.buildRegistryName(source)
// Even if spark.app.id and spark.executor.id are set, they are not used for the metric name.
assert(metricName != s"$appId.$executorId.${source.sourceName}")
assert(metricName === source.sourceName)
}
test("MetricsSystem with Executor instance, with custom namespace") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
val appName = "testName"
val executorId = "1"
conf.set("spark.app.id", appId)
conf.set("spark.app.name", appName)
conf.set("spark.executor.id", executorId)
conf.set(METRICS_NAMESPACE, "${spark.app.name}")
val instanceName = "executor"
val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = executorMetricsSystem.buildRegistryName(source)
assert(metricName === s"$appName.$executorId.${source.sourceName}")
}
test("MetricsSystem with Executor instance, custom namespace which is not set") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val executorId = "1"
val namespaceToResolve = "${spark.doesnotexist}"
conf.set("spark.executor.id", executorId)
conf.set(METRICS_NAMESPACE, namespaceToResolve)
val instanceName = "executor"
val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = executorMetricsSystem.buildRegistryName(source)
// If the user set the spark.metrics.namespace property to an expansion of another property
// (say ${spark.doesnotexist}, the unresolved name (i.e. literally ${spark.doesnotexist})
// is used as the root logger name.
assert(metricName === s"$namespaceToResolve.$executorId.${source.sourceName}")
}
test("MetricsSystem with Executor instance, custom namespace, spark.executor.id not set") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
conf.set("spark.app.name", appId)
conf.set(METRICS_NAMESPACE, "${spark.app.name}")
val instanceName = "executor"
val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = executorMetricsSystem.buildRegistryName(source)
assert(metricName === source.sourceName)
}
test("MetricsSystem with non-driver, non-executor instance with custom namespace") {
val source = new Source {
override val sourceName = "dummySource"
override val metricRegistry = new MetricRegistry()
}
val appId = "testId"
val appName = "testName"
val executorId = "dummyExecutorId"
conf.set("spark.app.id", appId)
conf.set("spark.app.name", appName)
conf.set(METRICS_NAMESPACE, "${spark.app.name}")
conf.set("spark.executor.id", executorId)
val instanceName = "testInstance"
val testMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr)
val metricName = testMetricsSystem.buildRegistryName(source)
// Even if spark.app.id and spark.executor.id are set, they are not used for the metric name.
assert(metricName != s"$appId.$executorId.${source.sourceName}")
assert(metricName === source.sourceName)
}
}
| bravo-zhang/spark | core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala | Scala | apache-2.0 | 10,143 |
package com.twitter.querulous.database
import com.twitter.querulous.AutoDisabler
import com.twitter.util.Duration
import java.sql.{Connection, SQLException}
class AutoDisablingDatabaseFactory(val databaseFactory: DatabaseFactory, val disableErrorCount: Int, val disableDuration: Duration) extends DatabaseFactory {
def apply(dbhosts: List[String], dbname: String, username: String, password: String, urlOptions: Map[String, String], driverName: String) = {
new AutoDisablingDatabase(
databaseFactory(dbhosts, dbname, username, password, urlOptions, driverName),
disableErrorCount,
disableDuration)
}
}
class AutoDisablingDatabase(
val database: Database,
protected val disableErrorCount: Int,
protected val disableDuration: Duration)
extends Database
with DatabaseProxy
with AutoDisabler {
def open() = {
throwIfDisabled(database.hosts.head)
try {
val rv = database.open()
noteOperationOutcome(true)
rv
} catch {
case e: SQLException =>
noteOperationOutcome(false)
throw e
case e: Exception =>
throw e
}
}
def close(connection: Connection) { database.close(connection) }
}
| twitter/querulous | querulous-core/src/main/scala/com/twitter/querulous/database/AutoDisablingDatabase.scala | Scala | apache-2.0 | 1,186 |
package com.fourseasapp.facebookads.network
import java.io.File
import java.net.URLEncoder
import java.util.Locale
import java.util.function.BiConsumer
import javax.annotation.Nullable
import com.fourseasapp.facebookads.{APIContext, APIException, Mappable}
import com.google.inject.assistedinject.{Assisted, AssistedInject}
import org.slf4j.LoggerFactory
import org.asynchttpclient.{AsyncHttpClient, RequestBuilder, Response}
import org.asynchttpclient.request.body.multipart.{FilePart, StringPart}
import play.api.libs.functional.syntax._
import play.api.libs.json._
import play.api.libs.ws.ahc.AhcWSResponse
import play.api.libs.ws.{WSClient, WSResponse}
import scala.collection.mutable.ListBuffer
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.reflect.runtime.universe._
/**
* Created by hailegia on 3/12/2016.
*/
class APIRequest @AssistedInject()(wsClient: WSClient,
apiRequestFactory: APIRequestFactory,
@Assisted apiContext: APIContext,
@Nullable @Assisted("nodeId") nodeId: String,
@Nullable @Assisted("endpoint") endpoint: String,
@Assisted("method") method: String,
@Assisted returnFields: Seq[String],
@Assisted params: Map[String, Any],
@Assisted files: Map[String, File]) {
private var _totalResultCount:Option[Int] = None
private var paging: Option[Paging] = None
private val parentId = if (endpoint != null) nodeId else null
def getList[T <: APINode[T]](extraParams: Map[String, Any] = Map())
(implicit format: Format[T], ec: ExecutionContext): Future[Either[JsValue, List[T]]] = {
callInternalList(APIRequest.METHOD_GET, extraParams)
}
def totalResultCount = _totalResultCount
def canGoNext = paging.isDefined && paging.get.next.isDefined
def getNext[T <: APINode[T]](extraParams: Map[String, Any] = Map())
(implicit format: Format[T], ec: ExecutionContext): Future[Either[JsValue, List[T]]] = {
if (canGoNext) {
var nextExtraParams = Map[String, Any]()
if (extraParams != null) {
nextExtraParams ++= extraParams
}
nextExtraParams += "after" -> paging.get.cursors.after
callInternalList(APIRequest.METHOD_GET, nextExtraParams)
} else {
Future(Right(List()))
}
}
def execute[T](extraParams: Map[String, Any] = Map())
(implicit format: Format[T], ec: ExecutionContext): Future[Either[JsValue, T]] = {
callInternal(method, extraParams)
}
def setMethod(newMethod: String): APIRequest = {
new APIRequest(wsClient, apiRequestFactory, apiContext, nodeId, endpoint, newMethod, returnFields, params, files)
}
def setParams(newParams: Map[String, Any]): APIRequest = {
new APIRequest(wsClient, apiRequestFactory, apiContext, nodeId, endpoint, method, returnFields, newParams, files)
}
def setReturnFields(newReturnFields: List[String]): APIRequest = {
new APIRequest(wsClient, apiRequestFactory, apiContext, nodeId, endpoint, method, newReturnFields, params, files)
}
private def callInternal[T](method: String, extraParams: Map[String, Any])
(implicit format: Format[T], ec: ExecutionContext): Future[Either[JsValue, T]] = {
val result: Future[WSResponse] = prepareRequest(method, extraParams)
result map {
wsResponse =>
wsResponse.json.validate[T].fold(
invalid => Left(wsResponse.json),
obj => {
if (obj.isInstanceOf[APINode[_]]) {
val resultObj = obj.asInstanceOf[APINode[_]]
resultObj.apiContext = apiContext
resultObj.apiRequestFactory = apiRequestFactory
if (parentId != null) {
resultObj.parentId = parentId
}
Right(resultObj.asInstanceOf[T])
} else {
Right(obj)
}
}
)
}
}
private[network] def callRaw(method: String)(implicit ec: ExecutionContext): Future[JsValue] = {
prepareRequest(method, Map()).map(_.json)
}
private def callInternalList[T <: APINode[T]](method: String, extraParams: Map[String, Any])
(implicit format: Format[T], ec: ExecutionContext): Future[Either[JsValue, List[T]]] = {
val result: Future[WSResponse] = prepareRequest(method, extraParams)
result map {
wsResponse =>
paging = (wsResponse.json \ "paging").validate[Paging].asOpt
_totalResultCount = (wsResponse.json \ "summary" \ "total_count").validate[Int].asOpt
(wsResponse.json \ "data").validate[List[T]].fold(
invalid => {
APIRequest.logger.error(invalid.toString())
APIRequest.logger.error(Json.stringify(wsResponse.json))
Left(wsResponse.json)
},
data => {
data.foreach(obj => {
obj.apiContext = apiContext
obj.apiRequestFactory = apiRequestFactory
if (parentId != null) {
obj.parentId = parentId
}
})
Right(data)
}
)
}
}
private def prepareRequest(method: String, extraParams: Map[String, Any]): Future[WSResponse] = {
val allParams = getAllParams(extraParams)
val apiUrl = getApiUrl()
val request = wsClient
.url(apiUrl)
.withRequestTimeout((apiContext.timeOut * 1000).seconds)
.withHeaders("User-Agent" -> APIContext.USER_AGENT)
val result = method match {
case APIRequest.METHOD_GET => {
request
.withQueryString(
allParams.toSeq.map {
case (k, v) => (k, v.toString)
}: _*)
.get()
}
case APIRequest.METHOD_POST => {
val hasFile = files != null && files.size > 0
if (hasFile) {
val client: AsyncHttpClient = wsClient.underlying
val requestBuilder = new RequestBuilder().setUrl(apiUrl)
allParams.foreach(e => {
requestBuilder.addBodyPart(new StringPart(e._1, e._2.toString))
})
files.foreach(e => {
requestBuilder.addBodyPart(new FilePart(e._1, e._2, APIRequest.getContentTypeForFile(e._2)))
})
val p = Promise[WSResponse]()
val realRequest = requestBuilder.build()
val requestFuture = client.executeRequest(realRequest).toCompletableFuture
requestFuture.whenComplete(new BiConsumer[Response, Throwable] {
override def accept(r: Response, t: Throwable): Unit = {
if (t != null) {
p.failure(t)
} else {
p.success(AhcWSResponse(r))
}
}
})
client.executeRequest(requestBuilder)
p.future
} else {
request
.post(allParams.mapValues(value => Seq(value.toString)))
}
}
case APIRequest.METHOD_DELETE => {
request
.withQueryString(
allParams.toSeq.map {
case (k, v) => (k, v.toString)
}: _*)
.delete()
}
case _ => Future.failed(new scala.RuntimeException(s"HTTP METHOD ${method} is not supported."))
}
result
}
private def getAllParams(extraParams: Map[String, Any]): Map[String, Any] = {
var result = Map[String, Any]()
if (params != null) {
result ++= params
}
if (extraParams != null) {
result ++= extraParams
}
result += ("access_token" -> apiContext.accessToken)
if (apiContext.hasAppSecret) {
result += ("appsecret_proof" -> apiContext.getAppSecretProof())
}
if (returnFields != null && returnFields.size > 0) {
result += ("fields" -> returnFields.mkString(","))
}
result
}
def getApiUrl(): String = {
if (endpoint != null) {
s"${apiContext.endpointBase}/${apiContext.version}/${nodeId}/${endpoint}"
} else if (nodeId != null) {
s"${apiContext.endpointBase}/${apiContext.version}/${nodeId}"
} else {
s"${apiContext.endpointBase}/${apiContext.version}"
}
}
def getBatchInfo[T <: APINode[T]](): BatchRequestInfo[T] = {
var allParams = Map[String, Any]()
if (params != null) {
allParams ++= params
}
if (returnFields != null && returnFields.size > 0) {
allParams += ("fields" -> returnFields.mkString(","))
}
val filesMap = allParams.filter(p => p._2.isInstanceOf).mapValues(f => f.asInstanceOf[File])
val bodies = for {p <- allParams } yield (p._1 + "=" + URLEncoder.encode(p._2.toString, "UTF-8"))
val relativeUrl = method match {
case APIRequest.METHOD_POST =>s"$nodeId/$endpoint"
case _ => nodeId
}
BatchRequestInfo[T](method, relativeUrl, bodies.mkString("&"), parentId, apiRequestFactory, apiContext, filesMap, Promise[T]())
}
}
class BatchAPIRequest @AssistedInject()(apiRequestFactory: APIRequestFactory, @Assisted() context: APIContext) {
var requests: Seq[BatchRequestInfo[_]] = Seq()
def addRequest[T <: APINode[T]](request: APIRequest): Future[T] = {
val requestInfo = request.getBatchInfo[T]()
requests = requestInfo +: requests
requestInfo.promise.future
}
def execute[T <: APINode[T]](extraParams: Map[String, Any] = Map())
(implicit format: Format[T], m: Mappable[T], ec: ExecutionContext): Future[List[Either[JsValue, T]]] = {
var allFiles = Map[String, File]()
var batch = Json.arr()
requests.foreach(info => {
var element = Json.obj("method" -> info.method, "relative_url" -> info.relativePath)
if (info.body != null) {
element = element + ("body", JsString(info.body))
}
if (info.files != null && info.files.size > 0) {
element = element + ("attached_files", JsString(info.files.keys.mkString(",")))
allFiles ++= info.files
}
batch = batch.append(element)
})
val request = apiRequestFactory.createAPIRequest(context, null, null, APIRequest.METHOD_POST, List(), Map("batch" -> batch), allFiles)
request.callRaw(APIRequest.METHOD_POST).map {
case JsArray(allResponses) => {
var results = ListBuffer[Either[JsValue, T]]()
var index = 0
for (response <- allResponses) {
val requestInfo = requests(index)
index += 1
if (response == JsNull) {
results += null
requestInfo.success(null)
} else {
val code = (response \ "code").get.as[Int]
var node: Either[JsValue, T] = Left(response)
if (code == 200) {
(response \ "body").asOpt[String] foreach {body =>
val bodyJson = Json.parse(body)
bodyJson.validate[T].fold(
_ => (bodyJson \ "id").asOpt[String] foreach {id =>
node = Right(APINode.materialize(Map[String, Any]("id" -> id)))
},
x => node = Right(x)
)
}
}
results += node
node match {
case Left(_) => requestInfo.promise.failure(new APIException(response.toString()))
case Right(nodeValue) => {
nodeValue.apiContext = requestInfo.apiContext
nodeValue.apiRequestFactory = requestInfo.apiRequestFactory
if (requestInfo.parentId != null) {
nodeValue.parentId = requestInfo.parentId
}
requestInfo.success(nodeValue)
}
}
}
}
results.toList
}
case x => throw new APIException(x.toString())
}
}
}
case class BatchRequestInfo[T <: APINode[T]](method: String, relativePath: String, body: String, parentId: String,
apiRequestFactory: APIRequestFactory, apiContext: APIContext,
files: Map[String, File], promise: Promise[T]) {
def success(x: Any) = promise.success(x.asInstanceOf[T])
}
case class CUDResponse(id: Option[String], success: Option[Boolean])
object CUDResponse {
implicit val CUDResponseFormat: Format[CUDResponse] = (
(JsPath \ "id").formatNullable[String] and
(JsPath \ "success").formatNullable[Boolean]
)(CUDResponse.apply, unlift(CUDResponse.unapply))
}
case class PagingCursor(after: String, before: String)
case class Paging(cursors: PagingCursor, previous: Option[String], next: Option[String])
object Paging {
implicit val pagingCursorFormat = Json.format[PagingCursor]
implicit val pagingFormats: Format[Paging] = (
(JsPath \ "cursors").format[PagingCursor] and
(JsPath \ "previous").formatNullable[String] and
(JsPath \ "next").formatNullable[String]
)(Paging.apply, unlift(Paging.unapply))
}
object APIRequest {
val METHOD_POST = "POST"
val METHOD_GET = "GET"
val METHOD_DELETE = "DELETE"
val logger = LoggerFactory.getLogger(classOf[APIRequest])
val fileToContentTypeMap = Map(
".atom" -> "application/atom+xml",
".rss" -> "application/rss+xml",
".xml" -> "application/xml",
".csv" -> "text/csv",
".txt" -> "text/plain"
)
def getContentTypeForFile(file: File): String = {
for (e <- fileToContentTypeMap) {
if (file.getName.toLowerCase(Locale.getDefault).endsWith(e._1)) {
return e._2
}
}
return null
}
}
trait APIRequestFactory {
def createAPIRequest(@Assisted apiContext: APIContext,
@Assisted("nodeId") nodeId: String,
@Assisted("endpoint") endpoint: String,
@Assisted("method") method: String,
@Assisted returnFields: Seq[String],
@Assisted params: Map[String, Any],
@Assisted files: Map[String, File]): APIRequest
def createAPIBatchRequest(apiContext: APIContext): BatchAPIRequest
}
| hailg/facebook-scala-ads-sdk | src/main/scala/com/fourseasapp/facebookads/network/APIRequest.scala | Scala | mit | 14,164 |
package models.annotation.handler
import com.scalableminds.util.accesscontext.DBAccessContext
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import javax.inject.Inject
import models.annotation._
import models.task.{TaskDAO, TaskTypeDAO}
import models.user.{User, UserService}
import models.annotation.AnnotationState._
import utils.ObjectId
import scala.concurrent.ExecutionContext
class TaskTypeInformationHandler @Inject()(taskTypeDAO: TaskTypeDAO,
taskDAO: TaskDAO,
userService: UserService,
annotationDAO: AnnotationDAO,
annotationMerger: AnnotationMerger)(implicit val ec: ExecutionContext)
extends AnnotationInformationHandler
with FoxImplicits {
override def provideAnnotation(taskTypeId: ObjectId, userOpt: Option[User])(
implicit ctx: DBAccessContext): Fox[Annotation] =
for {
taskType <- taskTypeDAO.findOne(taskTypeId) ?~> "taskType.notFound"
tasks <- taskDAO.findAllByTaskType(taskType._id)
annotations <- Fox
.serialCombined(tasks)(task => annotationDAO.findAllByTaskIdAndType(task._id, AnnotationType.Task))
.map(_.flatten)
.toFox
finishedAnnotations = annotations.filter(_.state == Finished)
_ <- assertAllOnSameDataset(finishedAnnotations)
_ <- assertNonEmpty(finishedAnnotations) ?~> "taskType.noAnnotations"
user <- userOpt ?~> "user.notAuthorised"
_dataSet <- finishedAnnotations.headOption.map(_._dataSet).toFox
mergedAnnotation <- annotationMerger.mergeN(taskTypeId,
persistTracing = false,
user._id,
_dataSet,
taskType._team,
AnnotationType.CompoundTaskType,
finishedAnnotations) ?~> "annotation.merge.failed.compound"
} yield mergedAnnotation
override def restrictionsFor(taskTypeId: ObjectId)(implicit ctx: DBAccessContext): Fox[AnnotationRestrictions] =
for {
taskType <- taskTypeDAO.findOne(taskTypeId) ?~> "taskType.notFound"
} yield {
new AnnotationRestrictions {
override def allowAccess(userOption: Option[User]): Fox[Boolean] =
(for {
user <- userOption.toFox
allowed <- userService.isTeamManagerOrAdminOf(user, taskType._team)
} yield allowed).orElse(Fox.successful(false))
}
}
}
| scalableminds/webknossos | app/models/annotation/handler/TaskTypeInformationHandler.scala | Scala | agpl-3.0 | 2,684 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.util
import java.time.LocalDate
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.plans.SQLHelper
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.internal.SQLConf
class DateFormatterSuite extends SparkFunSuite with SQLHelper {
test("parsing dates") {
DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
val formatter = DateFormatter()
val daysSinceEpoch = formatter.parse("2018-12-02")
assert(daysSinceEpoch === 17867)
}
}
}
test("format dates") {
DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
val formatter = DateFormatter()
val date = formatter.format(17867)
assert(date === "2018-12-02")
}
}
}
test("roundtrip date -> days -> date") {
Seq(
"0050-01-01",
"0953-02-02",
"1423-03-08",
"1969-12-31",
"1972-08-25",
"1975-09-26",
"2018-12-12",
"2038-01-01",
"5010-11-17").foreach { date =>
DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
val formatter = DateFormatter()
val days = formatter.parse(date)
val formatted = formatter.format(days)
assert(date === formatted)
}
}
}
}
test("roundtrip days -> date -> days") {
Seq(
-701265,
-371419,
-199722,
-1,
0,
967,
2094,
17877,
24837,
1110657).foreach { days =>
DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
val formatter = DateFormatter()
val date = formatter.format(days)
val parsed = formatter.parse(date)
assert(days === parsed)
}
}
}
}
test("parsing date without explicit day") {
val formatter = DateFormatter("yyyy MMM")
val daysSinceEpoch = formatter.parse("2018 Dec")
assert(daysSinceEpoch === LocalDate.of(2018, 12, 1).toEpochDay)
}
test("formatting negative years with default pattern") {
val epochDays = LocalDate.of(-99, 1, 1).toEpochDay.toInt
assert(DateFormatter().format(epochDays) === "-0099-01-01")
}
}
| techaddict/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/util/DateFormatterSuite.scala | Scala | apache-2.0 | 3,262 |
package models
import org.joda.time.DateTime
/** Un Item en el sistema */
case class Item(
id: Long,
idMascota: Long,
monto: BigDecimal,
descripcion: Option[String],
fecha: DateTime
)
| kdoomsday/kaminalapp | app/models/Item.scala | Scala | mit | 196 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.