code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package chandu0101.scalajs.react.components.materialui
import japgolly.scalajs.react._
import materialui.Mui
import scala.scalajs.js
object MuiAvatar {
def apply(backgroundColor: js.UndefOr[String] = js.undefined,
style: js.UndefOr[js.Any] = js.undefined,
icon: js.UndefOr[ReactElement] = js.undefined,
ref: js.UndefOr[String] = js.undefined,
color: js.UndefOr[String] = js.undefined,
key: js.UndefOr[String] = js.undefined,
src: js.UndefOr[String] = js.undefined) = {
val p = js.Dynamic.literal()
backgroundColor.foreach(v => p.updateDynamic("backgroundColor")(v))
style.foreach(v => p.updateDynamic("style")(v))
icon.foreach(v => p.updateDynamic("icon")(v))
ref.foreach(v => p.updateDynamic("ref")(v))
color.foreach(v => p.updateDynamic("color")(v))
key.foreach(v => p.updateDynamic("key")(v))
src.foreach(v => p.updateDynamic("src")(v))
val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.Avatar)
f(p).asInstanceOf[ReactComponentU_]
}
}
| coreyauger/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/materialui/MuiAvatar.scala | Scala | apache-2.0 | 1,069 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.Literal._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.types._
// scalastyle:off
/**
* Calculates and propagates precision for fixed-precision decimals. Hive has a number of
* rules for this based on the SQL standard and MS SQL:
* https://cwiki.apache.org/confluence/download/attachments/27362075/Hive_Decimal_Precision_Scale_Support.pdf
* https://msdn.microsoft.com/en-us/library/ms190476.aspx
*
* In particular, if we have expressions e1 and e2 with precision/scale p1/s2 and p2/s2
* respectively, then the following operations have the following precision / scale:
*
* Operation Result Precision Result Scale
* ------------------------------------------------------------------------
* e1 + e2 max(s1, s2) + max(p1-s1, p2-s2) + 1 max(s1, s2)
* e1 - e2 max(s1, s2) + max(p1-s1, p2-s2) + 1 max(s1, s2)
* e1 * e2 p1 + p2 + 1 s1 + s2
* e1 / e2 p1 - s1 + s2 + max(6, s1 + p2 + 1) max(6, s1 + p2 + 1)
* e1 % e2 min(p1-s1, p2-s2) + max(s1, s2) max(s1, s2)
* e1 union e2 max(s1, s2) + max(p1-s1, p2-s2) max(s1, s2)
* sum(e1) p1 + 10 s1
* avg(e1) p1 + 4 s1 + 4
*
* To implement the rules for fixed-precision types, we introduce casts to turn them to unlimited
* precision, do the math on unlimited-precision numbers, then introduce casts back to the
* required fixed precision. This allows us to do all rounding and overflow handling in the
* cast-to-fixed-precision operator.
*
* In addition, when mixing non-decimal types with decimals, we use the following rules:
* - BYTE gets turned into DECIMAL(3, 0)
* - SHORT gets turned into DECIMAL(5, 0)
* - INT gets turned into DECIMAL(10, 0)
* - LONG gets turned into DECIMAL(20, 0)
* - FLOAT and DOUBLE cause fixed-length decimals to turn into DOUBLE
*/
// scalastyle:on
object DecimalPrecision extends TypeCoercionRule {
import scala.math.{max, min}
private def isFloat(t: DataType): Boolean = t == FloatType || t == DoubleType
// Returns the wider decimal type that's wider than both of them
def widerDecimalType(d1: DecimalType, d2: DecimalType): DecimalType = {
widerDecimalType(d1.precision, d1.scale, d2.precision, d2.scale)
}
// max(s1, s2) + max(p1-s1, p2-s2), max(s1, s2)
def widerDecimalType(p1: Int, s1: Int, p2: Int, s2: Int): DecimalType = {
val scale = max(s1, s2)
val range = max(p1 - s1, p2 - s2)
DecimalType.bounded(range + scale, scale)
}
private def promotePrecision(e: Expression, dataType: DataType): Expression = {
PromotePrecision(Cast(e, dataType))
}
override protected def coerceTypes(plan: LogicalPlan): LogicalPlan = plan transformUp {
// fix decimal precision for expressions
case q => q.transformExpressionsUp(
decimalAndDecimal.orElse(integralAndDecimalLiteral).orElse(nondecimalAndDecimal))
}
/** Decimal precision promotion for +, -, *, /, %, pmod, and binary comparison. */
private val decimalAndDecimal: PartialFunction[Expression, Expression] = {
// Skip nodes whose children have not been resolved yet
case e if !e.childrenResolved => e
// Skip nodes who is already promoted
case e: BinaryArithmetic if e.left.isInstanceOf[PromotePrecision] => e
case Add(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val dt = DecimalType.bounded(max(s1, s2) + max(p1 - s1, p2 - s2) + 1, max(s1, s2))
CheckOverflow(Add(promotePrecision(e1, dt), promotePrecision(e2, dt)), dt)
case Subtract(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val dt = DecimalType.bounded(max(s1, s2) + max(p1 - s1, p2 - s2) + 1, max(s1, s2))
CheckOverflow(Subtract(promotePrecision(e1, dt), promotePrecision(e2, dt)), dt)
case Multiply(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultType = DecimalType.bounded(p1 + p2 + 1, s1 + s2)
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Multiply(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType)
case Divide(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
var intDig = min(DecimalType.MAX_SCALE, p1 - s1 + s2)
var decDig = min(DecimalType.MAX_SCALE, max(6, s1 + p2 + 1))
val diff = (intDig + decDig) - DecimalType.MAX_SCALE
if (diff > 0) {
decDig -= diff / 2 + 1
intDig = DecimalType.MAX_SCALE - decDig
}
val resultType = DecimalType.bounded(intDig + decDig, decDig)
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Divide(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType)
case Remainder(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultType = DecimalType.bounded(min(p1 - s1, p2 - s2) + max(s1, s2), max(s1, s2))
// resultType may have lower precision, so we cast them into wider type first.
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Remainder(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType)
case Pmod(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) =>
val resultType = DecimalType.bounded(min(p1 - s1, p2 - s2) + max(s1, s2), max(s1, s2))
// resultType may have lower precision, so we cast them into wider type first.
val widerType = widerDecimalType(p1, s1, p2, s2)
CheckOverflow(Pmod(promotePrecision(e1, widerType), promotePrecision(e2, widerType)),
resultType)
case b @ BinaryComparison(e1 @ DecimalType.Expression(p1, s1),
e2 @ DecimalType.Expression(p2, s2)) if p1 != p2 || s1 != s2 =>
val resultType = widerDecimalType(p1, s1, p2, s2)
b.makeCopy(Array(Cast(e1, resultType), Cast(e2, resultType)))
// TODO: MaxOf, MinOf, etc might want other rules
// SUM and AVERAGE are handled by the implementations of those expressions
}
/**
* Strength reduction for comparing integral expressions with decimal literals.
* 1. int_col > decimal_literal => int_col > floor(decimal_literal)
* 2. int_col >= decimal_literal => int_col >= ceil(decimal_literal)
* 3. int_col < decimal_literal => int_col < ceil(decimal_literal)
* 4. int_col <= decimal_literal => int_col <= floor(decimal_literal)
* 5. decimal_literal > int_col => ceil(decimal_literal) > int_col
* 6. decimal_literal >= int_col => floor(decimal_literal) >= int_col
* 7. decimal_literal < int_col => floor(decimal_literal) < int_col
* 8. decimal_literal <= int_col => ceil(decimal_literal) <= int_col
*
* Note that technically this is an "optimization" and should go into the optimizer. However,
* by the time the optimizer runs, these comparison expressions would be pretty hard to pattern
* match because there are multiple (at least 2) levels of casts involved.
*
* There are a lot more possible rules we can implement, but we don't do them
* because we are not sure how common they are.
*/
private val integralAndDecimalLiteral: PartialFunction[Expression, Expression] = {
case GreaterThan(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
GreaterThan(i, Literal(value.floor.toLong))
}
case GreaterThanOrEqual(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
GreaterThanOrEqual(i, Literal(value.ceil.toLong))
}
case LessThan(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
LessThan(i, Literal(value.ceil.toLong))
}
case LessThanOrEqual(i @ IntegralType(), DecimalLiteral(value)) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
LessThanOrEqual(i, Literal(value.floor.toLong))
}
case GreaterThan(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
GreaterThan(Literal(value.ceil.toLong), i)
}
case GreaterThanOrEqual(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
FalseLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
TrueLiteral
} else {
GreaterThanOrEqual(Literal(value.floor.toLong), i)
}
case LessThan(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
LessThan(Literal(value.floor.toLong), i)
}
case LessThanOrEqual(DecimalLiteral(value), i @ IntegralType()) =>
if (DecimalLiteral.smallerThanSmallestLong(value)) {
TrueLiteral
} else if (DecimalLiteral.largerThanLargestLong(value)) {
FalseLiteral
} else {
LessThanOrEqual(Literal(value.ceil.toLong), i)
}
}
/**
* Type coercion for BinaryOperator in which one side is a non-decimal numeric, and the other
* side is a decimal.
*/
private val nondecimalAndDecimal: PartialFunction[Expression, Expression] = {
// Promote integers inside a binary expression with fixed-precision decimals to decimals,
// and fixed-precision decimals in an expression with floats / doubles to doubles
case b @ BinaryOperator(left, right) if left.dataType != right.dataType =>
(left.dataType, right.dataType) match {
case (t: IntegralType, DecimalType.Fixed(p, s)) =>
b.makeCopy(Array(Cast(left, DecimalType.forType(t)), right))
case (DecimalType.Fixed(p, s), t: IntegralType) =>
b.makeCopy(Array(left, Cast(right, DecimalType.forType(t))))
case (t, DecimalType.Fixed(p, s)) if isFloat(t) =>
b.makeCopy(Array(left, Cast(right, DoubleType)))
case (DecimalType.Fixed(p, s), t) if isFloat(t) =>
b.makeCopy(Array(Cast(left, DoubleType), right))
case _ =>
b
}
}
}
| ron8hu/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala | Scala | apache-2.0 | 11,861 |
package pt.up.fe.iart.proj1.problem
import scala.collection.immutable.HashSet
class State(val previousLocations: HashSet[Int], val currentLocation: Int, val patientsAmbulance: List[Patient], val gasLevel: Double) {
def numberPatientsAmbulance = patientsAmbulance.size
override def toString = s"{ $currentLocation, $previousLocations, $patientsAmbulance, $gasLevel }"
}
| migulorama/feup-iart-2014 | src/main/scala/pt/up/fe/iart/proj1/problem/State.scala | Scala | mit | 380 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.blueeyes
package json
package serialization
import DefaultSerialization._
import Extractor.Invalid
import scalaz._
import quasar.precog.JsonTestSupport._
object VersionedSpec extends Specification with ScalaCheck {
import IsoSerializationSpec._
import Versioned._
"versioned serialization" should {
"serialize a simple case class" in {
val fooDecomp = decomposerV[Foo](fooSchema, Some("1.0".v))
val result = fooDecomp.decompose(foo)
result must_== JParser.parseUnsafe("""{ "s": "Hello world", "i": 23, "b": true, "schemaVersion": "1.0" }""")
}
}
"versioned deserialization" should {
"extract to a simple case class" in {
val fooExtract = extractorV[Foo](fooSchema, Some("1.0".v))
val result = fooExtract.extract(
jobject(
jfield("s", "Hello world"),
jfield("i", 23),
jfield("b", true),
jfield("schemaVersion", "1.0")
)
)
result must_== foo
}
"refuse to deserialize an object missing a version" in {
val fooExtract = extractorV[Foo](fooSchema, Some("1.0".v))
val result = fooExtract.validated(
jobject(
jfield("s", "Hello world"),
jfield("i", 23),
jfield("b", true)
)
)
result must beLike {
case Failure(Invalid(message, None)) => message must startWith(".schemaVersion property missing")
}
}
"refuse to deserialize an object from a future version" in {
val fooExtract = extractorV[Foo](fooSchema, Some("1.0".v))
val result = fooExtract.validated(
jobject(
jfield("s", "Hello world"),
jfield("i", 23),
jfield("b", true),
jfield("schemaVersion", "1.1")
)
)
result must beLike {
case Failure(Invalid(message, None)) => message must contain("was incompatible with desired version")
}
}
"deserialize an object from a major-compatible prior version" in {
val fooExtract = extractorV[Foo](fooSchema, Some("1.1".v))
val result = fooExtract.validated(
jobject(
jfield("s", "Hello world"),
jfield("i", 23),
jfield("b", true),
jfield("schemaVersion", "1.0")
)
)
result must beLike {
case Success(v) => v must_== foo
}
}
}
}
| drostron/quasar | blueeyes/src/test/scala/quasar/blueeyes/json/serialization/VersionedSpec.scala | Scala | apache-2.0 | 2,972 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
class DefaultSource extends SimpleScanSource
// This class is used by pyspark tests. If this class is modified/moved, make sure pyspark
// tests still pass.
class SimpleScanSource extends RelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
SimpleScan(parameters("from").toInt, parameters("TO").toInt)(sqlContext.sparkSession)
}
}
case class SimpleScan(from: Int, to: Int)(@transient val sparkSession: SparkSession)
extends BaseRelation with TableScan {
override def sqlContext: SQLContext = sparkSession.sqlContext
override def schema: StructType =
StructType(StructField("i", IntegerType, nullable = false) :: Nil)
override def buildScan(): RDD[Row] = {
sparkSession.sparkContext.parallelize(from to to).map(Row(_))
}
}
class AllDataTypesScanSource extends SchemaRelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType): BaseRelation = {
// Check that weird parameters are passed correctly.
parameters("option_with_underscores")
parameters("option.with.dots")
AllDataTypesScan(
parameters("from").toInt,
parameters("TO").toInt, schema)(sqlContext.sparkSession)
}
}
case class AllDataTypesScan(
from: Int,
to: Int,
userSpecifiedSchema: StructType)(@transient val sparkSession: SparkSession)
extends BaseRelation
with TableScan {
override def sqlContext: SQLContext = sparkSession.sqlContext
override def schema: StructType = userSpecifiedSchema
override def needConversion: Boolean = true
override def buildScan(): RDD[Row] = {
sparkSession.sparkContext.parallelize(from to to).map { i =>
Row(
s"str_$i",
s"str_$i".getBytes(StandardCharsets.UTF_8),
i % 2 == 0,
i.toByte,
i.toShort,
i,
i.toLong,
i.toFloat,
i.toDouble,
new java.math.BigDecimal(i),
new java.math.BigDecimal(i),
Date.valueOf("1970-01-01"),
new Timestamp(20000 + i),
s"varchar_$i",
s"char_$i",
Seq(i, i + 1),
Seq(Map(s"str_$i" -> Row(i.toLong))),
Map(i -> i.toString),
Map(Map(s"str_$i" -> i.toFloat) -> Row(i.toLong)),
Row(i, i.toString),
Row(Seq(s"str_$i", s"str_${i + 1}"),
Row(Seq(Date.valueOf(s"1970-01-${i + 1}")))))
}
}
}
class TableScanSuite extends DataSourceTest with SharedSparkSession {
protected override lazy val sql = spark.sql _
private lazy val tableWithSchemaExpected = (1 to 10).map { i =>
Row(
s"str_$i",
s"str_$i",
i % 2 == 0,
i.toByte,
i.toShort,
i,
i.toLong,
i.toFloat,
i.toDouble,
new java.math.BigDecimal(i),
new java.math.BigDecimal(i),
Date.valueOf("1970-01-01"),
new Timestamp(20000 + i),
s"varchar_$i",
s"char_$i",
Seq(i, i + 1),
Seq(Map(s"str_$i" -> Row(i.toLong))),
Map(i -> i.toString),
Map(Map(s"str_$i" -> i.toFloat) -> Row(i.toLong)),
Row(i, i.toString),
Row(Seq(s"str_$i", s"str_${i + 1}"), Row(Seq(Date.valueOf(s"1970-01-${i + 1}")))))
}.toSeq
override def beforeAll(): Unit = {
super.beforeAll()
sql(
"""
|CREATE TEMPORARY VIEW oneToTen
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
| To '10',
| option_with_underscores 'someval',
| option.with.dots 'someval'
|)
""".stripMargin)
sql(
"""
|CREATE TEMPORARY VIEW tableWithSchema (
|`string$%Field` stRIng,
|binaryField binary,
|`booleanField` boolean,
|ByteField tinyint,
|shortField smaLlint,
|int_Field iNt,
|`longField_:,<>=+/~^` Bigint,
|floatField flOat,
|doubleField doubLE,
|decimalField1 decimal,
|decimalField2 decimal(9,2),
|dateField dAte,
|timestampField tiMestamp,
|varcharField varchaR(12),
|charField ChaR(18),
|arrayFieldSimple Array<inT>,
|arrayFieldComplex Array<Map<String, Struct<key:bigInt>>>,
|mapFieldSimple MAP<iNt, StRing>,
|mapFieldComplex Map<Map<stRING, fLOAT>, Struct<key:bigInt>>,
|structFieldSimple StRuct<key:INt, Value:STrINg>,
|structFieldComplex StRuct<key:Array<String>, Value:struct<`value_(2)`:Array<date>>>
|)
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| From '1',
| To '10',
| option_with_underscores 'someval',
| option.with.dots 'someval'
|)
""".stripMargin)
}
sqlTest(
"SELECT * FROM oneToTen",
(1 to 10).map(Row(_)).toSeq)
sqlTest(
"SELECT i FROM oneToTen",
(1 to 10).map(Row(_)).toSeq)
sqlTest(
"SELECT i FROM oneToTen WHERE i < 5",
(1 to 4).map(Row(_)).toSeq)
sqlTest(
"SELECT i * 2 FROM oneToTen",
(1 to 10).map(i => Row(i * 2)).toSeq)
sqlTest(
"SELECT a.i, b.i FROM oneToTen a JOIN oneToTen b ON a.i = b.i + 1",
(2 to 10).map(i => Row(i, i - 1)).toSeq)
test("Schema and all fields") {
val expectedSchema = StructType(
StructField("string$%Field", StringType, true) ::
StructField("binaryField", BinaryType, true) ::
StructField("booleanField", BooleanType, true) ::
StructField("ByteField", ByteType, true) ::
StructField("shortField", ShortType, true) ::
StructField("int_Field", IntegerType, true) ::
StructField("longField_:,<>=+/~^", LongType, true) ::
StructField("floatField", FloatType, true) ::
StructField("doubleField", DoubleType, true) ::
StructField("decimalField1", DecimalType.USER_DEFAULT, true) ::
StructField("decimalField2", DecimalType(9, 2), true) ::
StructField("dateField", DateType, true) ::
StructField("timestampField", TimestampType, true) ::
StructField("varcharField", VarcharType(12), true) ::
StructField("charField", CharType(18), true) ::
StructField("arrayFieldSimple", ArrayType(IntegerType), true) ::
StructField("arrayFieldComplex",
ArrayType(
MapType(StringType, StructType(StructField("key", LongType, true) :: Nil))), true) ::
StructField("mapFieldSimple", MapType(IntegerType, StringType), true) ::
StructField("mapFieldComplex",
MapType(
MapType(StringType, FloatType),
StructType(StructField("key", LongType, true) :: Nil)), true) ::
StructField("structFieldSimple",
StructType(
StructField("key", IntegerType, true) ::
StructField("Value", StringType, true) :: Nil), true) ::
StructField("structFieldComplex",
StructType(
StructField("key", ArrayType(StringType), true) ::
StructField("Value",
StructType(
StructField("value_(2)", ArrayType(DateType), true) :: Nil), true) :: Nil), true) ::
Nil
)
assert(CharVarcharUtils.replaceCharVarcharWithStringInSchema(expectedSchema) ==
spark.table("tableWithSchema").schema)
withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
checkAnswer(
sql(
"""SELECT
| `string$%Field`,
| cast(binaryField as string),
| booleanField,
| byteField,
| shortField,
| int_Field,
| `longField_:,<>=+/~^`,
| floatField,
| doubleField,
| decimalField1,
| decimalField2,
| dateField,
| timestampField,
| varcharField,
| charField,
| arrayFieldSimple,
| arrayFieldComplex,
| mapFieldSimple,
| mapFieldComplex,
| structFieldSimple,
| structFieldComplex FROM tableWithSchema""".stripMargin),
tableWithSchemaExpected
)
}
}
sqlTest(
"SELECT count(*) FROM tableWithSchema",
Seq(Row(10)))
sqlTest(
"SELECT `string$%Field` FROM tableWithSchema",
(1 to 10).map(i => Row(s"str_$i")).toSeq)
sqlTest(
"SELECT int_Field FROM tableWithSchema WHERE int_Field < 5",
(1 to 4).map(Row(_)).toSeq)
sqlTest(
"SELECT `longField_:,<>=+/~^` * 2 FROM tableWithSchema",
(1 to 10).map(i => Row(i * 2.toLong)).toSeq)
sqlTest(
"SELECT structFieldSimple.key, arrayFieldSimple[1] FROM tableWithSchema a where int_Field=1",
Seq(Row(1, 2)))
sqlTest(
"SELECT structFieldComplex.Value.`value_(2)` FROM tableWithSchema",
(1 to 10).map(i => Row(Seq(Date.valueOf(s"1970-01-${i + 1}")))).toSeq)
test("Caching") {
// Cached Query Execution
spark.catalog.cacheTable("oneToTen")
assertCached(sql("SELECT * FROM oneToTen"))
checkAnswer(
sql("SELECT * FROM oneToTen"),
(1 to 10).map(Row(_)).toSeq)
assertCached(sql("SELECT i FROM oneToTen"))
checkAnswer(
sql("SELECT i FROM oneToTen"),
(1 to 10).map(Row(_)).toSeq)
assertCached(sql("SELECT i FROM oneToTen WHERE i < 5"))
checkAnswer(
sql("SELECT i FROM oneToTen WHERE i < 5"),
(1 to 4).map(Row(_)).toSeq)
assertCached(sql("SELECT i * 2 FROM oneToTen"))
checkAnswer(
sql("SELECT i * 2 FROM oneToTen"),
(1 to 10).map(i => Row(i * 2)).toSeq)
assertCached(sql(
"SELECT a.i, b.i FROM oneToTen a JOIN oneToTen b ON a.i = b.i + 1"), 2)
checkAnswer(sql(
"SELECT a.i, b.i FROM oneToTen a JOIN oneToTen b ON a.i = b.i + 1"),
(2 to 10).map(i => Row(i, i - 1)).toSeq)
// Verify uncaching
spark.catalog.uncacheTable("oneToTen")
assertCached(sql("SELECT * FROM oneToTen"), 0)
}
test("defaultSource") {
sql(
"""
|CREATE TEMPORARY VIEW oneToTenDef
|USING org.apache.spark.sql.sources
|OPTIONS (
| from '1',
| to '10'
|)
""".stripMargin)
checkAnswer(
sql("SELECT * FROM oneToTenDef"),
(1 to 10).map(Row(_)).toSeq)
}
test("exceptions") {
// Make sure we do throw correct exception when users use a relation provider that
// only implements the RelationProvider or the SchemaRelationProvider.
Seq("TEMPORARY VIEW", "TABLE").foreach { tableType =>
val schemaNotMatch = intercept[Exception] {
sql(
s"""
|CREATE $tableType relationProviderWithSchema (i string)
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
}
assert(schemaNotMatch.getMessage.contains(
"The user-specified schema doesn't match the actual schema"))
val schemaNeeded = intercept[Exception] {
sql(
s"""
|CREATE $tableType schemaRelationProviderWithoutSchema
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
}
assert(schemaNeeded.getMessage.contains("A schema needs to be specified when using"))
}
}
test("read the data source tables that do not extend SchemaRelationProvider") {
Seq("TEMPORARY VIEW", "TABLE").foreach { tableType =>
val tableName = "relationProviderWithSchema"
withTable (tableName) {
sql(
s"""
|CREATE $tableType $tableName
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
checkAnswer(spark.table(tableName), spark.range(1, 11).toDF())
}
}
}
test("SPARK-5196 schema field with comment") {
sql(
"""
|CREATE TEMPORARY VIEW student(name string comment "SN", age int comment "SA", grade int)
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| from '1',
| to '10',
| option_with_underscores 'someval',
| option.with.dots 'someval'
|)
""".stripMargin)
val planned = sql("SELECT * FROM student").queryExecution.executedPlan
val comments = planned.schema.fields.map(_.getComment().getOrElse("NO_COMMENT")).mkString(",")
assert(comments === "SN,SA,NO_COMMENT")
}
}
| ueshin/apache-spark | sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala | Scala | apache-2.0 | 13,647 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.spark.cloud.utils
import com.github.lalyos.jfiglet.FigletFont
object Demo {
/**
* Uses figlet to render to a string.
* see: https://github.com/lalyos/jfiglet
*/
def text(m: String): String = {
"\\n" + FigletFont.convertOneLine(m)
}
}
| hortonworks-spark/cloud-integration | cloud-examples/src/main/scala/com/cloudera/spark/cloud/utils/Demo.scala | Scala | apache-2.0 | 1,079 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.persistence.jdbc.journal
import akka.actor.ActorSystem
import akka.persistence.jdbc.dao.JournalDao
import akka.persistence.jdbc.extension.{ AkkaPersistenceConfig, DaoRepository }
import akka.persistence.jdbc.serialization.SerializationFacade
import akka.stream.{ ActorMaterializer, Materializer }
import scala.concurrent.ExecutionContext
class JdbcAsyncWriteJournal extends SlickAsyncWriteJournal {
implicit val ec: ExecutionContext = context.dispatcher
implicit val system: ActorSystem = context.system
override implicit val mat: Materializer = ActorMaterializer()
override val journalDao: JournalDao = DaoRepository(system).journalDao
override val serializationFacade: SerializationFacade =
SerializationFacade(system, separatorChar)
def separatorChar: String =
Option(AkkaPersistenceConfig(context.system).persistenceQueryConfiguration.separator).filterNot(_.isEmpty)
.getOrElse(",")
override val serialize: Boolean = AkkaPersistenceConfig(system).serializationConfiguration.journal
}
| wwwiiilll/akka-persistence-jdbc | src/main/scala/akka/persistence/jdbc/journal/JdbcAsyncWriteJournal.scala | Scala | apache-2.0 | 1,636 |
package org.senkbeil.debugger.akka.messages
import org.senkbeil.debugger.akka.messages.structures._
case class AccessWatchpointMessage(
thread: Option[JDIThread] = None,
location: Option[JDILocation] = None,
field: Option[JDIField] = None,
`object`: Option[JDIObject] = None,
valueCurrent: Option[JDIValue] = None
) extends WatchpointMessageLike with MessageLike
| chipsenkbeil/scala-debugger-akka | src/main/scala/org/senkbeil/debugger/akka/messages/AccessWatchpointMessage.scala | Scala | apache-2.0 | 406 |
package microfluidic
import scala.concurrent.duration._
import org.scalatest.{ FlatSpec, ShouldMatchers }
class ExamplesSpec extends FlatSpec with ShouldMatchers {
"Microfluidic" should "be able to store fluids" in {
val storedWater = dispense("water", 100, 22).store
experiment(storedWater) should be { // expected intermediate representation
Stage(List(Store(Input("water",100,22.0))),List())
}
}
it should "be able to waste fluids" in {
val water = dispense("water", 100, 22).waste
experiment(water) should be { // expected intermediate representation
Stage(List(Waste(Input("water",100,22.0))),List())
}
}
it should "mix fluids" in {
val water = dispense("water", 60, 22)
val syrup = dispense("syrup", 40, 22)
val juice = water.mix(syrup, 5 seconds).store
// we could do something like:
// water mix syrup for 5 seconds
experiment(juice) should be { // expected intermediate representation
Stage(List(Store(Mix(Input("water",60,22.0),Input("syrup",40,22.0),"water with syrup"))),List())
}
}
it should "split up fluids" in {
val (halfOfWater, otherHalf) = dispense("water", 100, 22).split
experiment(Seq(halfOfWater.store, otherHalf.store)) should be { // expected intermediate representation
Stage(List(Store(Split(Input("water",100,22.0),2)), Store(Split(Input("water",100,22.0),2))),List())
}
}
it should "heat up fluids" in {
val sterilizedWater = dispense("water", 100, 22).heat(95, 5 minutes).store
experiment(sterilizedWater) should be { // expected intermediate representation
Stage(List(Store(Heat(Input("water",100,22.0),95.0,5 minutes))),List())
}
}
/*
* Take measurements.
*/
import microfluidic.sensors._
it should "support taking measurements" in {
val hotWater = dispense("water", 100, 22).heat(95, 1 seconds)
val salt = dispense("salt", 10, 22)
val saltyWater = hotWater.mix(salt, 5 seconds)
experiment(saltyWater.store, saltyWater.conductivity) should be {
Stage(List(Store(Mix(Heat(Input("water",100,22.0),95.0,1 second),Input("salt",10,22.0),"water with salt"))),List(Measurement(Mix(Heat(Input("water",100,22.0),95.0,1 second),Input("salt",10,22.0),"water with salt"),"conductivity")))
}
}
}
| vjovanov/microfluidic | src/test/scala/microfluidic/Examples.scala | Scala | bsd-3-clause | 2,312 |
package lila.mod
import lila.db.BSON.BSONJodaDateTimeHandler
import org.joda.time.DateTime
import reactivemongo.api._
import reactivemongo.api.bson._
import scala.concurrent.duration._
import lila.db.dsl._
import lila.memo.CacheApi._
import lila.report.Room
import lila.user.User
final class Gamify(
logRepo: ModlogRepo,
reportApi: lila.report.ReportApi,
modApi: lila.mod.ModApi,
cacheApi: lila.memo.CacheApi,
historyRepo: HistoryRepo
)(implicit ec: scala.concurrent.ExecutionContext) {
import Gamify._
import lila.report.BSONHandlers.RoomBSONHandler
implicit private val modMixedBSONHandler = Macros.handler[ModMixed]
implicit private val historyMonthBSONHandler = Macros.handler[HistoryMonth]
def history(orCompute: Boolean = true): Fu[List[HistoryMonth]] = {
val until = DateTime.now minusMonths 1 withDayOfMonth 1
val lastId = HistoryMonth.makeId(until.getYear, until.getMonthOfYear)
historyRepo.coll
.find($empty)
.sort(
$doc(
"year" -> -1,
"month" -> -1
)
)
.cursor[HistoryMonth]()
.list() flatMap { months =>
months.headOption match {
case Some(m) if m._id == lastId => fuccess(months)
case _ if !orCompute => fuccess(months)
case Some(m) => buildHistoryAfter(m.year, m.month, until) >> history(false)
case _ => buildHistoryAfter(2017, 6, until) >> history(false)
}
}
}
private def buildHistoryAfter(afterYear: Int, afterMonth: Int, until: DateTime): Funit =
(afterYear to until.getYear)
.flatMap { year =>
((if (year == afterYear) afterMonth + 1 else 1) to
(if (year == until.getYear) until.getMonthOfYear else 12)).map { month =>
mixedLeaderboard(
after = new DateTime(year, month, 1, 0, 0).pp("compute mod history"),
before = new DateTime(year, month, 1, 0, 0).plusMonths(1).some
).map {
_.headOption.map { champ =>
HistoryMonth(HistoryMonth.makeId(year, month), year, month, champ)
}
}
}.toList
}
.toList
.sequenceFu
.map(_.flatten)
.flatMap {
_.map { month =>
historyRepo.coll.update.one($doc("_id" -> month._id), month, upsert = true).void
}.sequenceFu
}
.void
def leaderboards = leaderboardsCache.getUnit
private val leaderboardsCache = cacheApi.unit[Leaderboards] {
_.expireAfterWrite(10 minutes)
.buildAsyncFuture { _ =>
mixedLeaderboard(DateTime.now minusDays 1, none) zip
mixedLeaderboard(DateTime.now minusWeeks 1, none) zip
mixedLeaderboard(DateTime.now minusMonths 1, none) map { case ((daily, weekly), monthly) =>
Leaderboards(daily, weekly, monthly)
}
}
}
private def mixedLeaderboard(after: DateTime, before: Option[DateTime]): Fu[List[ModMixed]] =
for {
actions <- actionLeaderboard(after, before)
reports <- reportLeaderboard(after, before)
modList <- modApi.allMods
} yield actions.map(_.modId) intersect modList.map(_.id) diff hidden map { modId =>
ModMixed(
modId,
action = actions.find(_.modId == modId) ?? (_.count),
report = reports.find(_.modId == modId) ?? (_.count)
)
} sortBy (-_.score)
private def dateRange(from: DateTime, toOption: Option[DateTime]) =
$doc("$gte" -> from) ++ toOption.?? { to =>
$doc("$lt" -> to)
}
private val hidden = List(User.lichessId, "irwin")
private def actionLeaderboard(after: DateTime, before: Option[DateTime]): Fu[List[ModCount]] =
logRepo.coll
.aggregateList(maxDocs = 100, readPreference = ReadPreference.secondaryPreferred) { framework =>
import framework._
Match(
$doc(
"date" -> dateRange(after, before),
"mod" -> $nin(hidden)
)
) -> List(
GroupField("mod")("nb" -> SumAll),
Sort(Descending("nb"))
)
}
.map {
_.flatMap { obj =>
import cats.implicits._
(obj.string("_id"), obj.int("nb")) mapN ModCount.apply
}
}
private def reportLeaderboard(after: DateTime, before: Option[DateTime]): Fu[List[ModCount]] =
reportApi.coll
.aggregateList(
maxDocs = Int.MaxValue,
readPreference = ReadPreference.secondaryPreferred
) { framework =>
import framework._
Match(
$doc(
"done.at" -> dateRange(after, before),
"done.by" -> $nin(hidden),
"open" -> false
)
) -> List(
GroupField("done.by")(
"nb" -> Sum(
$doc(
"$cond" -> $arr($doc("$eq" -> $arr("$room", Room.Cheat.key)), 3, 1)
)
)
),
Sort(Descending("nb"))
)
}
.map { docs =>
for {
doc <- docs
id <- doc.string("_id")
nb <- doc.int("nb")
} yield ModCount(id, nb)
}
}
object Gamify {
case class HistoryMonth(_id: String, year: Int, month: Int, champion: ModMixed) {
def date = new DateTime(year, month, 1, 0, 0)
}
object HistoryMonth {
def makeId(year: Int, month: Int) = s"$year/$month"
}
sealed trait Period {
def name = toString.toLowerCase
}
object Period {
case object Day extends Period
case object Week extends Period
case object Month extends Period
def apply(p: String) = List(Day, Week, Month).find(_.name == p)
}
case class Leaderboards(daily: List[ModMixed], weekly: List[ModMixed], monthly: List[ModMixed]) {
def apply(period: Period) =
period match {
case Period.Day => daily
case Period.Week => weekly
case Period.Month => monthly
}
}
case class ModCount(modId: User.ID, count: Int)
case class ModMixed(modId: User.ID, action: Int, report: Int) {
def score = action + report
}
}
| luanlv/lila | modules/mod/src/main/Gamify.scala | Scala | mit | 6,056 |
package com.sfxcode.sapphire.core.control
import com.sfxcode.sapphire.core.application.ApplicationEnvironment
import javafx.geometry.Pos
import javafx.scene.control.IndexedCell
import javafx.scene.control.cell.{ TextFieldTableCell, TextFieldTreeTableCell }
import javafx.scene.text.TextAlignment
import javafx.util.StringConverter
import scala.beans.BeanProperty
trait FXCellFactory[S, T] {
@BeanProperty
var packageName: String = "javafx.scene.control.cell."
@BeanProperty
var simpleClassName: String = defaultClassName
@BeanProperty
var alignment: Any = "left"
@BeanProperty
var converter: String = _
def defaultClassName: String
protected def updateCell(cell: IndexedCell[T]): Unit = {
if (alignment == TextAlignment.CENTER || alignment.toString.equalsIgnoreCase("center"))
cell.setAlignment(Pos.CENTER)
else if (alignment == TextAlignment.RIGHT || alignment.toString.equalsIgnoreCase("right"))
cell.setAlignment(Pos.CENTER_RIGHT)
else
cell.setAlignment(Pos.CENTER_LEFT)
if (converter != null)
cell match {
case textFieldCell: TextFieldTableCell[S, T] => textFieldCell.setConverter(getConverterForName(converter))
case textFieldCell: TextFieldTreeTableCell[S, T] => textFieldCell.setConverter(getConverterForName(converter))
}
def getConverterForName(name: String): StringConverter[T] =
ApplicationEnvironment.getConverterByName(name)
}
}
| sfxcode/sapphire-core | src/main/scala/com/sfxcode/sapphire/core/control/FXCellFactory.scala | Scala | apache-2.0 | 1,449 |
package com.twitter.finagle.exp
import com.twitter.finagle._
import com.twitter.finagle.client.{StackClient, StdStackClient, DefaultPool, Transporter}
import com.twitter.finagle.exp.mysql._
import com.twitter.finagle.exp.mysql.transport.{MysqlTransporter, Packet}
import com.twitter.finagle.tracing._
import com.twitter.finagle.transport.Transport
import com.twitter.util.Duration
/**
* Supplements a [[com.twitter.finagle.Client]] with convenient
* builder methods for constructing a mysql client.
*/
trait MysqlRichClient { self: com.twitter.finagle.Client[Request, Result] =>
/**
* Creates a new `RichClient` connected to the logical
* destination described by `dest` with the assigned
* `label`. The `label` is used to scope client stats.
*/
def newRichClient(dest: Name, label: String): mysql.Client =
mysql.Client(newClient(dest, label))
/**
* Creates a new `RichClient` connected to the logical
* destination described by `dest`.
*/
def newRichClient(dest: String): mysql.Client =
mysql.Client(newClient(dest))
}
object MySqlClientTracingFilter {
object Stackable extends Stack.Simple[ServiceFactory[Request, Result]] {
val role = ClientTracingFilter.role
val description = "Add MySql client specific annotations to the trace"
def make(next: ServiceFactory[Request, Result])(implicit params: Params) = {
val param.Label(label) = get[param.Label]
// TODO(jeff): should be able to get this directly from ClientTracingFilter
val annotations = new AnnotatingTracingFilter[Request, Result](
label, Annotation.ClientSend(), Annotation.ClientRecv())
annotations andThen TracingFilter andThen next
}
}
object TracingFilter extends SimpleFilter[Request, Result] {
def apply(request: Request, service: Service[Request, Result]) = {
if (Trace.isActivelyTracing) {
request match {
case QueryRequest(sqlStatement) => Trace.recordBinary("mysql.query", sqlStatement)
case PrepareRequest(sqlStatement) => Trace.recordBinary("mysql.prepare", sqlStatement)
// TODO: save the prepared statement and put it in the executed request trace
case ExecuteRequest(id, _, _, _) => Trace.recordBinary("mysql.execute", id)
case _ => Trace.record("mysql." + request.getClass.getSimpleName.replace("$", ""))
}
}
service(request)
}
}
}
/**
* @example {{{
* val client = Mysql.client
* .withCredentials("<username>", "<password>")
* .withDatabase("<db>")
* .newRichClient("inet!localhost:3306")
* }}}
*/
object Mysql extends com.twitter.finagle.Client[Request, Result] with MysqlRichClient {
/**
* Implements a mysql client in terms of a
* [[com.twitter.finagle.StackClient]]. The client inherits a wealth
* of features from finagle including connection pooling and load
* balancing.
*
* Additionally, this class provides methods for constructing a rich
* client which exposes a rich mysql api.
*/
case class Client(
stack: Stack[ServiceFactory[Request, Result]] = StackClient.newStack
.replace(ClientTracingFilter.role, MySqlClientTracingFilter.Stackable),
params: Stack.Params = StackClient.defaultParams + DefaultPool.Param(
low = 0, high = 1, bufferSize = 0,
idleTime = Duration.Top,
maxWaiters = Int.MaxValue)
) extends StdStackClient[Request, Result, Client] with MysqlRichClient {
protected def copy1(
stack: Stack[ServiceFactory[Request, Result]] = this.stack,
params: Stack.Params = this.params
): Client = copy(stack, params)
protected type In = Packet
protected type Out = Packet
protected def newTransporter() = MysqlTransporter(params)
protected def newDispatcher(transport: Transport[Packet, Packet]): Service[Request, Result] =
mysql.ClientDispatcher(transport, Handshake(params))
/**
* The credentials to use when authenticating a new session.
*/
def withCredentials(u: String, p: String): Client =
configured(Handshake.Credentials(Option(u), Option(p)))
/**
* Database to use when this client establishes a new session.
*/
def withDatabase(db: String): Client =
configured(Handshake.Database(Option(db)))
/**
* The default character set used when establishing
* a new session.
*/
def withCharset(charset: Short): Client =
configured(Handshake.Charset(charset))
}
val client = Client()
def newClient(dest: Name, label: String): ServiceFactory[Request, Result] =
client.newClient(dest, label)
/**
* The credentials to use when authenticating a new session.
*/
@deprecated("Use client.withCredentials", "6.22.0")
def withCredentials(u: String, p: String): Client =
client.configured(Handshake.Credentials(Option(u), Option(p)))
/**
* Database to use when this client establishes a new session.
*/
@deprecated("Use client.withDatabase", "6.22.0")
def withDatabase(db: String): Client =
client.configured(Handshake.Database(Option(db)))
/**
* The default character set used when establishing
* a new session.
*/
@deprecated("Use client.withCharset", "6.22.0")
def withCharset(charset: Short): Client =
client.configured(Handshake.Charset(charset))
/**
* A client configured with parameter p.
*/
@deprecated("Use client.configured", "6.22.0")
def configured[P: Stack.Param](p: P): Client =
client.configured(p)
}
| yancl/finagle-6.22.0 | finagle-mysql/src/main/scala/com/twitter/finagle/Mysql.scala | Scala | apache-2.0 | 5,481 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
/*
* We inherit from `Product` and `Serializable` because `case` objects and classes inherit from them and if we don't
* do it here, the compiler will infer types that unexpectedly include `Product` and `Serializable`, see
* http://underscore.io/blog/posts/2015/06/04/more-on-sealed.html for more information.
*/
trait BaseEnum extends Product with Serializable {
def name: String
}
| flange/drift-dev | kafka/00-kafka_2.11-0.10.1.0/libs/tmp/kafka/common/BaseEnum.scala | Scala | apache-2.0 | 1,210 |
package utilTests
import org.junit.Test
import xyz.nabijaczleweli.scala_game_of_life.util.NumberUtil
/** @author Jędrzej
* @since 26.04.14
*/
class NumberUtilDecTest {
@Test
def `9to1001`() {
val t = NumberUtil long2bin 9
assert(t == "1001", "Weird value: \\'" + t + "\\'.")
}
@Test
def `-9to-1001`() {
val t = NumberUtil long2bin -9
assert(t == "-1001", "Weird value: \\'" + t + "\\'.")
}
@Test
def `100to1100100`() {
val t = NumberUtil long2bin 100
assert(t == "1100100", "Weird value: \\'" + t + "\\'.")
}
@Test
def `-100to1100100`() {
val t = NumberUtil long2bin -100
assert(t == "-1100100", "Weird value: \\'" + t + "\\'.")
}
@Test
def `Long.MaxValueto111111111111111111111111111111111111111111111111111111111111111`() {
val t = NumberUtil long2bin Long.MaxValue
assert(t == "111111111111111111111111111111111111111111111111111111111111111", "Weird value: \\'" + t + "\\'.")
}
@Test
def `Long.MinValueto-111111111111111111111111111111111111111111111111111111111111111`() {
val t = NumberUtil long2bin Long.MinValue
assert(t == "-111111111111111111111111111111111111111111111111111111111111111", "Weird value: \\'" + t + "\\'.")
}
}
| nabijaczleweli/Scala-Game-of-Life | src/test/scala/utilTests/NumberUtilDecTest.scala | Scala | mit | 1,180 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package base
import com.intellij.lang.ASTNode
import com.intellij.psi._
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.InferUtil.SafeCheckException
import org.jetbrains.plugins.scala.lang.psi.api.base._
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScParameterizedTypeElement, ScSimpleTypeElement, ScTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScAssignStmt, ScExpression, ScNewTemplateDefinition, ScReferenceExpression}
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScTypeAliasDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScClassParents, ScExtendsBlock}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import org.jetbrains.plugins.scala.lang.psi.api.{InferUtil, ScalaElementVisitor}
import org.jetbrains.plugins.scala.lang.psi.impl.base.types.ScSimpleTypeElementImpl
import org.jetbrains.plugins.scala.lang.psi.types.Compatibility.Expression
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{Parameter, ScMethodType, ScTypePolymorphicType, TypeParameter}
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypeResult, TypingContext}
import org.jetbrains.plugins.scala.lang.resolve.{ResolveUtils, ScalaResolveResult}
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, ModCount}
import scala.collection.Seq
import scala.collection.immutable.HashMap
import scala.collection.mutable.ArrayBuffer
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
*/
class ScConstructorImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScConstructor {
def typeElement: ScTypeElement = findNotNullChildByClass(classOf[ScTypeElement])
override def toString: String = "Constructor"
def expectedType: Option[ScType] = {
getContext match {
case parents: ScClassParents =>
if (parents.allTypeElements.length != 1) None
else {
parents.getContext match {
case e: ScExtendsBlock =>
e.getContext match {
case n: ScNewTemplateDefinition =>
n.expectedType()
case _ => None
}
case _ => None
}
}
case _ => None
}
}
def newTemplate = {
getContext match {
case parents: ScClassParents =>
parents.getContext match {
case e: ScExtendsBlock =>
e.getContext match {
case n: ScNewTemplateDefinition =>
Some(n)
case _ => None
}
}
case _ => None
}
}
//todo: duplicate ScSimpleTypeElementImpl
def parameterize(tp: ScType, clazz: PsiClass, subst: ScSubstitutor): ScType = {
if (clazz.getTypeParameters.isEmpty) {
tp
} else {
ScParameterizedType(tp, clazz.getTypeParameters.map {
case tp: ScTypeParam => new ScTypeParameterType(tp, subst)
case ptp => new ScTypeParameterType(ptp, subst)
})
}
}
def shapeType(i: Int): TypeResult[ScType] = {
val seq = shapeMultiType(i)
if (seq.length == 1) seq.head
else Failure("Can't resolve type", Some(this))
}
def shapeMultiType(i: Int): Seq[TypeResult[ScType]] = innerMultiType(i, isShape = true)
def multiType(i: Int): Seq[TypeResult[ScType]] = innerMultiType(i, isShape = false)
private def innerMultiType(i: Int, isShape: Boolean): Seq[TypeResult[ScType]] = {
def FAILURE = Failure("Can't resolve type", Some(this))
def workWithResolveResult(constr: PsiMethod, r: ScalaResolveResult,
subst: ScSubstitutor, s: ScSimpleTypeElement,
ref: ScStableCodeReferenceElement): TypeResult[ScType] = {
val clazz = constr.containingClass
val tp = r.getActualElement match {
case ta: ScTypeAliasDefinition => subst.subst(ta.aliasedType.getOrElse(return FAILURE))
case _ =>
parameterize(ScSimpleTypeElementImpl.calculateReferenceType(ref, shapesOnly = true).
getOrElse(return FAILURE), clazz, subst)
}
val res = constr match {
case fun: ScMethodLike =>
val methodType = ScType.nested(fun.methodType(Some(tp)), i).getOrElse(return FAILURE)
subst.subst(methodType)
case method: PsiMethod =>
if (i > 0) return Failure("Java constructors only have one parameter section", Some(this))
ResolveUtils.javaMethodType(method, subst, getResolveScope, Some(subst.subst(tp)))
}
val typeParameters: Seq[TypeParameter] = r.getActualElement match {
case tp: ScTypeParametersOwner if tp.typeParameters.nonEmpty =>
tp.typeParameters.map(new TypeParameter(_))
case ptp: PsiTypeParameterListOwner if ptp.getTypeParameters.nonEmpty =>
ptp.getTypeParameters.toSeq.map(new TypeParameter(_))
case _ => return Success(res, Some(this))
}
s.getParent match {
case p: ScParameterizedTypeElement =>
val zipped = p.typeArgList.typeArgs.zip(typeParameters)
val appSubst = new ScSubstitutor(new HashMap[(String, PsiElement), ScType] ++ zipped.map {
case (arg, typeParam) =>
((typeParam.name, ScalaPsiUtil.getPsiElementId(typeParam.ptp)), arg.getType(TypingContext.empty).getOrAny)
}, Map.empty, None)
Success(appSubst.subst(res), Some(this))
case _ =>
var nonValueType = ScTypePolymorphicType(res, typeParameters)
expectedType match {
case Some(expected) =>
try {
nonValueType = InferUtil.localTypeInference(nonValueType.internalType,
Seq(new Parameter("", None, expected, false, false, false, 0)),
Seq(new Expression(InferUtil.undefineSubstitutor(nonValueType.typeParameters).
subst(subst.subst(tp).inferValueType))),
nonValueType.typeParameters, shouldUndefineParameters = false, filterTypeParams = false)
} catch {
case s: SafeCheckException => //ignore
}
case _ =>
}
Success(nonValueType, Some(this))
}
}
def processSimple(s: ScSimpleTypeElement): Seq[TypeResult[ScType]] = {
s.reference match {
case Some(ref) =>
val buffer = new ArrayBuffer[TypeResult[ScType]]
val resolve = if (isShape) ref.shapeResolveConstr else ref.resolveAllConstructors
resolve.foreach {
case r@ScalaResolveResult(constr: PsiMethod, subst) =>
buffer += workWithResolveResult(constr, r, subst, s, ref)
case ScalaResolveResult(clazz: PsiClass, subst) if !clazz.isInstanceOf[ScTemplateDefinition] && clazz.isAnnotationType =>
val params = clazz.getMethods.flatMap {
case p: PsiAnnotationMethod =>
val paramType = subst.subst(ScType.create(p.getReturnType, getProject, getResolveScope))
Seq(Parameter(p.getName, None, paramType, paramType, p.getDefaultValue != null, isRepeated = false, isByName = false))
case _ => Seq.empty
}
buffer += Success(ScMethodType(ScDesignatorType(clazz), params, isImplicit = false)(getProject, getResolveScope), Some(this))
case _ =>
}
buffer.toSeq
case _ => Seq(Failure("Hasn't reference", Some(this)))
}
}
simpleTypeElement.toSeq.flatMap(processSimple)
}
def reference: Option[ScStableCodeReferenceElement] = {
simpleTypeElement.flatMap(_.reference)
}
def simpleTypeElement: Option[ScSimpleTypeElement] = typeElement match {
case s: ScSimpleTypeElement => Some(s)
case p: ScParameterizedTypeElement =>
p.typeElement match {
case s: ScSimpleTypeElement => Some(s)
case _ => None
}
case _ => None
}
override def accept(visitor: ScalaElementVisitor) {
visitor.visitConstructor(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => s.visitConstructor(this)
case _ => super.accept(visitor)
}
}
@Cached(true, ModCount.getBlockModificationCount, this)
def matchedParameters: Seq[(ScExpression, Parameter)] = {
val paramClauses = this.reference.flatMap(r => Option(r.resolve())) match {
case Some(pc: ScPrimaryConstructor) => pc.parameterList.clauses.map(_.parameters)
case Some(fun: ScFunction) if fun.isConstructor => fun.parameterList.clauses.map(_.parameters)
case Some(m: PsiMethod) if m.isConstructor => Seq(m.getParameterList.getParameters.toSeq)
case _ => Seq.empty
}
(for {
(paramClause, argList) <- paramClauses.zip(arguments)
(arg, idx) <- argList.exprs.zipWithIndex
} yield {
arg match {
case ScAssignStmt(refToParam: ScReferenceExpression, Some(expr)) =>
val param = paramClause.find(_.getName == refToParam.refName)
.orElse(refToParam.resolve().asOptionOf[ScParameter])
param.map(p => (expr, new Parameter(p))).toSeq
case expr =>
val paramIndex = Math.min(idx, paramClause.size - 1)
paramClause.lift(paramIndex).map(p => (expr, new Parameter(p))).toSeq
}
}).flatten
}
}
| jeantil/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScConstructorImpl.scala | Scala | apache-2.0 | 9,643 |
package lila.coach
import org.joda.time.DateTime
import lila.user.User
case class CoachReview(
_id: String, // user:coach
userId: User.ID, // reviewer
coachId: Coach.Id,
score: Int,
text: String,
approved: Boolean,
createdAt: DateTime,
updatedAt: DateTime) {
def id = _id
}
object CoachReview {
def makeId(user: User, coach: Coach) = s"${user.id}:${coach.id.value}"
case class Score(value: Double) extends AnyVal {
}
case class Reviews(list: List[CoachReview]) {
def approved = list.filter(_.approved)
lazy val averageScore: Option[Score] = approved.nonEmpty option {
Score(approved.map(_.score).sum.toDouble / list.size)
}
}
}
| clarkerubber/lila | modules/coach/src/main/CoachReview.scala | Scala | agpl-3.0 | 701 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.sampler
import com.twitter.finagle.Service
import com.twitter.util.{Await, Future}
import com.twitter.zipkin.common._
import org.scalatest.FunSuite
class SpanSamplerFilterTest extends FunSuite {
test("filters spans based on their traceId") {
val spans = Seq(
Span(0, "svc", 123L),
Span(1, "svc", 123L),
Span(2, "svc", 123L),
Span(3, "svc", 123L),
Span(4, "svc", 123L))
var rcvdSpans = Seq.empty[Span]
val svc = new SpanSamplerFilter(_ > 2) andThen Service.mk[Seq[Span], Unit] { spans =>
rcvdSpans = spans
Future.Done
}
Await.ready(svc(spans))
assert(rcvdSpans === spans.drop(3))
}
test("will not filter debug spans") {
val spans = Seq(
Span(0, "svc", 123L, debug = Some(true)),
Span(1, "svc", 123L, debug = Some(true)),
Span(1, "svc", 123L))
var rcvdSpans = Seq.empty[Span]
val svc = new SpanSamplerFilter(_ => false) andThen Service.mk[Seq[Span], Unit] { spans =>
rcvdSpans = spans
Future.Done
}
Await.ready(svc(spans))
assert(rcvdSpans === spans.take(2))
}
}
| jkdcdlly/zipkin | zipkin-sampler/src/test/scala/com/twitter/zipkin/sampler/SpanSamplerFilterTest.scala | Scala | apache-2.0 | 1,725 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.hbase.data.HBaseDataStore
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand
import org.locationtech.geomesa.tools.CatalogParam
import org.locationtech.geomesa.tools.stats.{StatsHistogramCommand, StatsHistogramParams}
class HBaseStatsHistogramCommand extends StatsHistogramCommand[HBaseDataStore] with HBaseDataStoreCommand {
override val params = new HBaseStatsHistogramParams
}
@Parameters(commandDescription = "View or calculate counts of attribute in a GeoMesa feature type, grouped by sorted values")
class HBaseStatsHistogramParams extends StatsHistogramParams with CatalogParam
| ronq/geomesa | geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/stats/HBaseStatsHistogramCommand.scala | Scala | apache-2.0 | 1,192 |
/*
* Copyright 2011-2017 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.yaidom.integrationtest
import java.time.DayOfWeek
import java.time.LocalDate
import java.time.Month
import scala.collection.immutable
import scala.reflect.classTag
import eu.cdevreeze.yaidom.core.EName
import eu.cdevreeze.yaidom.core.Path
import eu.cdevreeze.yaidom.core.QName
import eu.cdevreeze.yaidom.core.Scope
import eu.cdevreeze.yaidom.indexed
import eu.cdevreeze.yaidom.print.DocumentPrinterUsingDom
import eu.cdevreeze.yaidom.queryapi.BackingNodes
import eu.cdevreeze.yaidom.queryapi.ScopedElemLike
import eu.cdevreeze.yaidom.queryapi.ScopedNodes
import eu.cdevreeze.yaidom.queryapi.SubtypeAwareElemLike
import eu.cdevreeze.yaidom.resolved
import eu.cdevreeze.yaidom.simple
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
/**
* XML creation test, using resolved elements to quickly and easily create some XML snippet.
*
* @author Chris de Vreeze
*/
class XmlCreationTest extends AnyFunSuite with BeforeAndAfterAll {
import XmlCreationTest._
test("testCreateTimesheetXmlFor2017") {
val timesheetAfterFirstPass: Timesheet =
getTimesheetAfterFirstPass(
LocalDate.of(2017, 1, 1),
LocalDate.of(2017, 12, 31),
dutchHolidays2017,
onVacation2017,
sickLeaveDays2017,
defaultTasksPerDay2017)
val timesheet =
refineTimesheet(timesheetAfterFirstPass, refinementsPerDay2017)
assertResult(true) {
timesheet.days.size >= 250
}
assertResult(true) {
timesheet.days.count(_.dayOfWeek == DayOfWeek.MONDAY) >= 35 &&
timesheet.days.count(_.dayOfWeek == DayOfWeek.MONDAY) < 70
}
assertResult(24) {
timesheet.allTasks.filter(_.taskName == "blockchain-hackathon").map(_.hours).sum
}
assertResult(Set(Month.FEBRUARY)) {
timesheet.allTasks.filter(_.taskName == "blockchain-hackathon").map(_.date.getMonth).toSet
}
assertResult(Set(8)) {
timesheet.days.map(_.totalHours).toSet
}
val docPrinter = DocumentPrinterUsingDom.newInstance()
val timesheetSimpleElem = simple.Elem.from(timesheet)
val xmlString = docPrinter.print(simple.Document(timesheetSimpleElem.prettify(2)))
println()
println(xmlString)
}
test("testCreateTimesheetXmlFor2018") {
val timesheetAfterFirstPass: Timesheet =
getTimesheetAfterFirstPass(
LocalDate.of(2018, 1, 1),
LocalDate.of(2018, 12, 31),
dutchHolidays2018,
onVacation2018,
sickLeaveDays2018,
defaultTasksPerDay2018)
val timesheet =
refineTimesheet(timesheetAfterFirstPass, refinementsPerDay2018)
assertResult(true) {
timesheet.days.size >= 250
}
assertResult(true) {
timesheet.days.count(_.dayOfWeek == DayOfWeek.MONDAY) >= 35 &&
timesheet.days.count(_.dayOfWeek == DayOfWeek.MONDAY) < 70
}
assertResult(Set(8)) {
timesheet.days.map(_.totalHours).toSet
}
val docPrinter = DocumentPrinterUsingDom.newInstance()
val timesheetSimpleElem = simple.Elem.from(timesheet)
val xmlString = docPrinter.print(simple.Document(timesheetSimpleElem.prettify(2)))
println()
println(xmlString)
}
test("testCreateTimesheetXmlFor2019") {
val timesheetAfterFirstPass: Timesheet =
getTimesheetAfterFirstPass(
LocalDate.of(2019, 1, 1),
LocalDate.of(2019, 9, 6), // Make that 2019-12-31 at the end of the year
dutchHolidays2019,
onVacation2019,
sickLeaveDays2019,
defaultTasksPerDay2019)
val timesheet =
refineTimesheet(timesheetAfterFirstPass, refinementsPerDay2019)
assertResult(true) {
timesheet.days.size >= 120 // Make that 250 at the end of the year
}
assertResult(true) {
timesheet.days.count(_.dayOfWeek == DayOfWeek.MONDAY) >= 15 && // Make that at least 35 at the end of the year
timesheet.days.count(_.dayOfWeek == DayOfWeek.MONDAY) < 70
}
assertResult(Set(8)) {
timesheet.days.map(_.totalHours).toSet
}
val docPrinter = DocumentPrinterUsingDom.newInstance()
val timesheetSimpleElem = simple.Elem.from(timesheet)
val xmlString = docPrinter.print(simple.Document(timesheetSimpleElem.prettify(2)))
println()
println(xmlString)
}
}
object XmlCreationTest {
// Yaidom dialect for timesheets
sealed trait TimesheetNode extends ScopedNodes.Node
final case class TimesheetText(text: String) extends TimesheetNode with ScopedNodes.Text
sealed class TimesheetElem(val backingElem: BackingNodes.Elem)
extends TimesheetNode with ScopedNodes.Elem with ScopedElemLike with SubtypeAwareElemLike {
type ThisElem = TimesheetElem
type ThisNode = TimesheetNode
final def thisElem: ThisElem = TimesheetElem.this
final def children: immutable.IndexedSeq[TimesheetNode] = {
backingElem.children flatMap {
case e: BackingNodes.Elem => Some(TimesheetElem(e))
case t: BackingNodes.Text => Some(TimesheetText(t.text))
case _ => None
}
}
final def findAllChildElems: immutable.IndexedSeq[TimesheetElem] = {
backingElem.findAllChildElems.map(e => TimesheetElem(e))
}
final def resolvedName: EName = backingElem.resolvedName
final def resolvedAttributes: immutable.IndexedSeq[(EName, String)] = backingElem.resolvedAttributes.toIndexedSeq
final def text: String = backingElem.text
final def scope: Scope = backingElem.scope
final def qname: QName = backingElem.qname
final def attributes: immutable.IndexedSeq[(QName, String)] = backingElem.attributes.toIndexedSeq
}
final class Timesheet(backingElem: BackingNodes.Elem) extends TimesheetElem(backingElem) {
require(localName == "timesheet")
def days: immutable.IndexedSeq[Day] = findAllChildElemsOfType(classTag[Day])
def allTasks: immutable.IndexedSeq[Task] = days.flatMap(_.tasks)
}
final class Day(backingElem: BackingNodes.Elem) extends TimesheetElem(backingElem) {
require(localName == "day")
def date: LocalDate = LocalDate.parse(attribute(EName("date")))
def dayOfWeek: DayOfWeek = DayOfWeek.valueOf(attribute(EName("dayOfWeek")))
def tasks: immutable.IndexedSeq[Task] = findAllChildElemsOfType(classTag[Task])
def totalHours: Int = tasks.map(_.hours).sum
}
final class Task(backingElem: BackingNodes.Elem) extends TimesheetElem(backingElem) {
require(localName == "task")
def taskName: String = attribute(EName("name"))
def hours: Int = attribute(EName("hours")).toInt
def date: LocalDate = LocalDate.parse(backingElem.parent.attribute(EName("date")))
def dayOfWeek: DayOfWeek = DayOfWeek.valueOf(backingElem.parent.attribute(EName("dayOfWeek")))
}
object TimesheetElem {
def apply(backingElem: BackingNodes.Elem): TimesheetElem = {
backingElem.resolvedName match {
case EName(_, "timesheet") => new Timesheet(backingElem)
case EName(_, "day") => new Day(backingElem)
case EName(_, "task") => new Task(backingElem)
case _ => new TimesheetElem(backingElem)
}
}
}
object Timesheet {
def apply(backingElem: BackingNodes.Elem): Timesheet = {
TimesheetElem(backingElem.ensuring(_.localName == "timesheet")).asInstanceOf[Timesheet]
}
}
// Timesheet manipulation
val OfficialHoliday = "official-holiday"
val Vacation = "vacation"
val Sick = "sick"
// Weekends are considered free time here!
private def isWeekend(date: LocalDate): Boolean = {
(date.getDayOfWeek == DayOfWeek.SATURDAY) || (date.getDayOfWeek == DayOfWeek.SUNDAY)
}
private def getTimesheetAfterFirstPass(
startDate: LocalDate,
endDate: LocalDate,
officialHolidays: Set[LocalDate],
vacationDays: Set[LocalDate],
sickLeaveDays: Set[LocalDate],
defaultTasksPerDay: Map[LocalDate, Map[String, Int]]): Timesheet = {
require(startDate.isBefore(endDate), s"$startDate not before $endDate")
val allDates = getPeriodAsLocalDateSeq(startDate, endDate)
require(
officialHolidays.union(vacationDays).union(sickLeaveDays).union(defaultTasksPerDay.keySet).subsetOf(allDates.toSet),
s"Not all used dates (in the first pass) fit in the period from $startDate to $endDate (maybe weekend?)")
val nonWeekendDays = allDates.filter(d => !isWeekend(d))
import resolved.Node._
val emptyTimesheetElem: resolved.Elem =
emptyElem(EName("timesheet"))
.plusChildren(
nonWeekendDays.map { day =>
emptyElem(EName("day"))
.plusAttribute(EName("date"), day.toString)
.plusAttribute(EName("dayOfWeek"), day.getDayOfWeek.toString)
})
val timesheetElem = emptyTimesheetElem.transformChildElems {
case elm if elm.localName == "day" =>
val day = TimesheetElem(indexed.Elem(simple.Elem.from(elm, Scope.Empty))).asInstanceOf[Day]
require(!isWeekend(day.date), s"Saturday or Sunday: ${day.date}")
if (officialHolidays.contains(day.date)) {
addTask(elm, OfficialHoliday, 8)
} else if (vacationDays.contains(day.date)) {
addTask(elm, Vacation, 8)
} else if (sickLeaveDays.contains(day.date)) {
addTask(elm, Sick, 8)
} else if (defaultTasksPerDay.contains(day.date)) {
defaultTasksPerDay(day.date).foldLeft(elm) {
case (accDayElm, (taskName, hours)) =>
addTask(accDayElm, taskName, hours)
}
} else {
elm
}
case elm =>
elm
}
val timesheet = Timesheet(indexed.Elem(simple.Elem.from(timesheetElem, Scope.Empty)))
timesheet
.ensuring(_.days.map(_.date).toSet == nonWeekendDays.toSet, s"Not all days filled from $startDate to $endDate")
}
private def refineTimesheet(
prevTimesheet: Timesheet,
refinedTasksPerDay: Map[LocalDate, Map[String, Int]]): Timesheet = {
require(
refinedTasksPerDay.keySet.subsetOf(prevTimesheet.days.map(_.date).toSet),
s"Not all used dates (in the second pass or later) fit in the period of the given timesheet (maybe weekend?)")
val pathsPerDate: Map[LocalDate, Path] = {
prevTimesheet.days.map(d => d.date -> d.backingElem.path).toMap
}
// Method filterKeys deprecated since Scala 2.13.0.
val paths: Set[Path] =
pathsPerDate.filter { case (dt, _) => refinedTasksPerDay.keySet.contains(dt) }.values.toSet
val rawResultTimesheetElem =
resolved.Elem.from(prevTimesheet).updateElems(paths) {
case (elm, _) =>
assert(elm.localName == "day")
val date = LocalDate.parse(elm.attribute(EName("date")))
assert(refinedTasksPerDay.contains(date))
val taskHours: Map[String, Int] = refinedTasksPerDay(date)
taskHours.foldLeft(elm) {
case (accDayElm, (taskName, hours)) =>
tryToAddTaskWithoutChangingTotalHours(accDayElm, taskName, hours)
}
}
val resultTimesheetElem = removeTasksWithoutHours(rawResultTimesheetElem)
val resultTimesheet = Timesheet(indexed.Elem(simple.Elem.from(resultTimesheetElem, Scope.Empty)))
resultTimesheet
}
private def onLeave(day: Day): Boolean = {
!isWeekend(day.date) &&
(day.tasks.exists(_.taskName == OfficialHoliday) ||
day.tasks.exists(_.taskName == Vacation) ||
day.tasks.exists(_.taskName == Sick))
}
// Adding and updating tasks
private def addTask(dayElem: resolved.Elem, taskName: String, hours: Int): resolved.Elem = {
assert(dayElem.localName == "day")
dayElem
.plusChild(
resolved.Node.emptyElem(EName("task"), Map(EName("name") -> taskName, EName("hours") -> hours.toString)))
}
private def tryToSubtractHoursFromTask(dayElem: resolved.Elem, taskName: String, hours: Int): Option[resolved.Elem] = {
assert(dayElem.localName == "day")
val day = TimesheetElem(indexed.Elem(simple.Elem.from(dayElem, Scope.Empty))).asInstanceOf[Day]
val taskIndex: Int =
day.tasks.zipWithIndex.find(kv => kv._1.taskName == taskName && kv._1.hours >= hours).map(_._2).getOrElse(-1)
if (taskIndex < 0) {
None
} else {
val pathEntry = Path.Entry(EName("task"), taskIndex)
val prevTaskValue = day.tasks(taskIndex).ensuring(_.taskName == taskName)
val result =
dayElem.updateChildElem(pathEntry) { taskElm =>
taskElm
.plusAttribute(EName("hours"), (prevTaskValue.hours.ensuring(_ >= hours) - hours).toString)
}
Some(result)
}
}
private def tryToAddTaskWithoutChangingTotalHours(
dayElem: resolved.Elem,
taskName: String,
hours: Int): resolved.Elem = {
assert(dayElem.localName == "day")
val day = TimesheetElem(indexed.Elem(simple.Elem.from(dayElem, Scope.Empty))).asInstanceOf[Day]
require(!onLeave(day), s"Cannot add task (on ${day.date} the employee is on leave)")
val longestTaskOption: Option[Task] = day.tasks.sortBy(_.hours).reverse.headOption
if (longestTaskOption.isEmpty) {
dayElem
} else if (longestTaskOption.get.hours < hours) {
dayElem
} else {
val longestTask = longestTaskOption.get
val tempResultOption: Option[resolved.Elem] =
tryToSubtractHoursFromTask(dayElem, longestTask.taskName, hours)
tempResultOption.map(d => addTask(d, taskName, hours)).getOrElse(dayElem)
} ensuring { newDayElm =>
val newDay = TimesheetElem(indexed.Elem(simple.Elem.from(newDayElm, Scope.Empty))).asInstanceOf[Day]
newDay.tasks.map(_.hours).sum == day.tasks.map(_.hours).sum
}
}
private def removeTasksWithoutHours(timesheetElem: resolved.Elem): resolved.Elem = {
assert(timesheetElem.localName == "timesheet")
timesheetElem transformElemsToNodeSeq {
case e if e.localName == "task" && e.attribute(EName("hours")).toInt == 0 =>
Vector()
case e =>
Vector(e)
}
}
def getPeriodAsLocalDateSeq(startDate: LocalDate, endDate: LocalDate): immutable.IndexedSeq[LocalDate] = {
require(startDate.isBefore(endDate), s"$startDate not before $endDate")
Iterator.from(0).map(i => startDate.plusDays(i)).takeWhile(d => !d.isAfter(endDate)).toIndexedSeq
}
def getYearAsPeriodAsLocalDateSeq(year: Int): immutable.IndexedSeq[LocalDate] = {
getPeriodAsLocalDateSeq(LocalDate.of(year, 1, 1), LocalDate.of(year, 12, 31))
}
// Timesheet data 2017
// Pass 1
private val dutchHolidays2017: Set[LocalDate] = {
Set(
LocalDate.of(2017, 1, 1),
LocalDate.of(2017, 4, 16),
LocalDate.of(2017, 4, 17),
LocalDate.of(2017, 4, 27),
LocalDate.of(2017, 5, 5),
LocalDate.of(2017, 5, 25),
LocalDate.of(2017, 6, 4),
LocalDate.of(2017, 6, 5),
LocalDate.of(2017, 12, 25),
LocalDate.of(2017, 12, 26))
}
private val onVacation2017: Set[LocalDate] = {
Set(LocalDate.of(2017, 3, 3), LocalDate.of(2017, 5, 26), LocalDate.of(2017, 6, 2))
.union(getPeriodAsLocalDateSeq(LocalDate.of(2017, 7, 21), LocalDate.of(2017, 8, 11)).toSet)
.union(getPeriodAsLocalDateSeq(LocalDate.of(2017, 12, 22), LocalDate.of(2017, 12, 31)).toSet)
}
private val sickLeaveDays2017: Set[LocalDate] = {
Set(LocalDate.of(2017, 8, 21))
}
private val defaultTasksPerDay2017: Map[LocalDate, Map[String, Int]] = {
getYearAsPeriodAsLocalDateSeq(2017)
.map(d => d -> Map("XBRL-validator" -> 8))
.toMap
}
// Pass 2
// TODO Check with agenda (e.g. recurring meetings) and commits in Github
private val refinementsPerDay2017: Map[LocalDate, Map[String, Int]] = {
Map(
LocalDate.of(2017, 1, 3) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 1, 4) -> Map("blockchain-preparation" -> 1),
LocalDate.of(2017, 1, 12) -> Map("blockchain-training" -> 8),
LocalDate.of(2017, 1, 13) -> Map("blockchain-training" -> 8),
LocalDate.of(2017, 1, 17) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 1, 20) -> Map("performance-review" -> 1),
LocalDate.of(2017, 1, 26) -> Map("blockchain-preparation" -> 8),
LocalDate.of(2017, 1, 30) -> Map("training-sharepoint" -> 1),
LocalDate.of(2017, 1, 31) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 2, 1) -> Map("blockchain-preparation" -> 8),
LocalDate.of(2017, 2, 3) -> Map("blockchain-preparation" -> 1),
LocalDate.of(2017, 2, 8) -> Map("blockchain-preparation" -> 4),
LocalDate.of(2017, 2, 9) -> Map("blockchain-hackathon" -> 8),
LocalDate.of(2017, 2, 10) -> Map("blockchain-hackathon" -> 8),
LocalDate.of(2017, 2, 13) -> Map("blockchain-hackathon" -> 8),
LocalDate.of(2017, 2, 14) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 2, 20) -> Map("HR" -> 1, "new-year" -> 3),
LocalDate.of(2017, 2, 24) -> Map("brainstorm-session" -> 1),
LocalDate.of(2017, 2, 28) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 3, 14) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 3, 17) -> Map("interview-applicant" -> 2),
LocalDate.of(2017, 3, 24) -> Map("bila" -> 1),
LocalDate.of(2017, 3, 28) -> Map("sprint-demo" -> 4, "interview-applicant" -> 2),
LocalDate.of(2017, 4, 11) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 4, 14) -> Map("team-data" -> 8),
LocalDate.of(2017, 4, 18) -> Map("team-data" -> 6),
LocalDate.of(2017, 4, 25) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 5, 2) -> Map("team-data" -> 8),
LocalDate.of(2017, 5, 9) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 5, 10) -> Map("team-data" -> 5, "knowledge-session" -> 2),
LocalDate.of(2017, 5, 12) -> Map("team-data" -> 6),
LocalDate.of(2017, 5, 15) -> Map("bila" -> 1),
LocalDate.of(2017, 5, 16) -> Map("team-data" -> 8),
LocalDate.of(2017, 5, 17) -> Map("team-data" -> 5),
LocalDate.of(2017, 5, 18) -> Map("interview-applicant" -> 2),
LocalDate.of(2017, 5, 19) -> Map("bila" -> 1),
LocalDate.of(2017, 5, 23) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 5, 24) -> Map("team-data" -> 5),
LocalDate.of(2017, 6, 6) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 6, 12) -> Map("team-data" -> 3, "strategy-team-orange" -> 2),
LocalDate.of(2017, 6, 14) -> Map("team-data" -> 5),
LocalDate.of(2017, 6, 20) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 7, 4) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 8, 15) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 8, 23) -> Map("team-data" -> 6, "bila" -> 1),
LocalDate.of(2017, 8, 29) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 8, 31) -> Map("farewell" -> 2, "interview-applicant" -> 2),
LocalDate.of(2017, 9, 1) -> Map("meeting-development-process" -> 2),
LocalDate.of(2017, 9, 12) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 9, 18) -> Map("bila" -> 1),
LocalDate.of(2017, 9, 26) -> Map("organisation-change" -> 2),
LocalDate.of(2017, 10, 3) -> Map("team-data" -> 8),
LocalDate.of(2017, 10, 10) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 10, 16) -> Map("team-data" -> 3, "bila" -> 1),
LocalDate.of(2017, 10, 24) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 11, 7) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 11, 16) -> Map("organisation-change" -> 2),
LocalDate.of(2017, 11, 17) -> Map("farewell" -> 2),
LocalDate.of(2017, 11, 21) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 11, 22) -> Map("bila" -> 1, "townhall" -> 2),
LocalDate.of(2017, 11, 24) -> Map("chapter-meeting" -> 1),
LocalDate.of(2017, 11, 28) -> Map("meeting-about-conferences" -> 2),
LocalDate.of(2017, 11, 30) -> Map("organisation-change" -> 2),
LocalDate.of(2017, 12, 5) -> Map("sprint-demo" -> 4),
LocalDate.of(2017, 12, 14) -> Map("meeting-performance-reviews" -> 2, "chapter-meeeting" -> 1),
LocalDate.of(2017, 12, 18) -> Map("bila" -> 1),
LocalDate.of(2017, 12, 19) -> Map("organisation-change" -> 2))
} ensuring(_.keySet.forall(d => !isWeekend(d)), s"No weekend days allowed")
// Timesheet data 2018
// Pass 1
private val dutchHolidays2018: Set[LocalDate] = {
Set(
LocalDate.of(2018, 1, 1),
LocalDate.of(2018, 4, 1),
LocalDate.of(2018, 4, 2),
LocalDate.of(2018, 4, 27),
LocalDate.of(2018, 5, 10),
LocalDate.of(2018, 5, 20),
LocalDate.of(2018, 5, 21),
LocalDate.of(2018, 12, 25),
LocalDate.of(2018, 12, 26))
}
private val onVacation2018: Set[LocalDate] = {
Set(LocalDate.of(2018, 5, 11), LocalDate.of(2018, 8, 22))
.union(getPeriodAsLocalDateSeq(LocalDate.of(2018, 7, 20), LocalDate.of(2018, 8, 10)).toSet)
.union(getPeriodAsLocalDateSeq(LocalDate.of(2018, 9, 17), LocalDate.of(2018, 9, 21)).toSet)
.union(getPeriodAsLocalDateSeq(LocalDate.of(2018, 12, 21), LocalDate.of(2018, 12, 24)).toSet)
.union(getPeriodAsLocalDateSeq(LocalDate.of(2018, 12, 27), LocalDate.of(2018, 12, 31)).toSet)
}
private val sickLeaveDays2018: Set[LocalDate] = {
Set(LocalDate.of(2018, 1, 19))
}
private val defaultTasksPerDay2018: Map[LocalDate, Map[String, Int]] = {
val maintenanceDates: immutable.IndexedSeq[LocalDate] =
getPeriodAsLocalDateSeq(LocalDate.of(2018, 1, 1), LocalDate.of(2018, 2, 28))
val devDates: immutable.IndexedSeq[LocalDate] =
getPeriodAsLocalDateSeq(LocalDate.of(2018, 3, 1), LocalDate.of(2018, 12, 31))
maintenanceDates.distinct.map(d => d -> Map("Gegevenstooling maintenance" -> 8)).toMap ++
devDates.distinct.map(d => d -> Map("Gegevenstooling development" -> 8)).toMap
}
// Pass 2
// TODO Check with agenda (e.g. recurring meetings) and commits in Github
private val refinementsPerDay2018: Map[LocalDate, Map[String, Int]] = {
Map(
LocalDate.of(2018, 1, 8) -> Map("Meeting" -> 1),
LocalDate.of(2018, 1, 24) -> Map("Meeting" -> 1),
LocalDate.of(2018, 2, 5) -> Map("Meeting" -> 4),
LocalDate.of(2018, 2, 6) -> Map("Meeting" -> 1),
LocalDate.of(2018, 2, 7) -> Map("Meeting" -> 2),
LocalDate.of(2018, 2, 19) -> Map("Meeting" -> 5),
LocalDate.of(2018, 2, 22) -> Map("Meeting" -> 1),
LocalDate.of(2018, 3, 7) -> Map("Meeting" -> 1),
LocalDate.of(2018, 3, 9) -> Map("Meeting" -> 1),
LocalDate.of(2018, 3, 13) -> Map("Meeting" -> 1),
LocalDate.of(2018, 3, 15) -> Map("Meeting" -> 1),
LocalDate.of(2018, 3, 22) -> Map("Meeting" -> 1),
LocalDate.of(2018, 3, 28) -> Map("Meeting" -> 1),
LocalDate.of(2018, 4, 6) -> Map("Meeting" -> 1),
LocalDate.of(2018, 4, 10) -> Map("Meeting" -> 1),
LocalDate.of(2018, 4, 16) -> Map("Meeting" -> 1),
LocalDate.of(2018, 4, 20) -> Map("Meeting" -> 1),
LocalDate.of(2018, 4, 24) -> Map("Meeting" -> 1),
LocalDate.of(2018, 4, 26) -> Map("Meeting" -> 1),
LocalDate.of(2018, 5, 1) -> Map("Meeting" -> 1),
LocalDate.of(2018, 5, 3) -> Map("Meeting" -> 4),
LocalDate.of(2018, 5, 22) -> Map("Meeting" -> 2),
LocalDate.of(2018, 5, 23) -> Map("Meeting" -> 1),
LocalDate.of(2018, 6, 13) -> Map("Meeting" -> 1),
LocalDate.of(2018, 6, 25) -> Map("Meeting" -> 1),
LocalDate.of(2018, 7, 12) -> Map("Meeting" -> 1),
LocalDate.of(2018, 8, 21) -> Map("Meeting" -> 4),
LocalDate.of(2018, 8, 29) -> Map("Meeting" -> 2),
LocalDate.of(2018, 9, 3) -> Map("Meeting" -> 2),
LocalDate.of(2018, 9, 5) -> Map("Meeting" -> 1),
LocalDate.of(2018, 9, 13) -> Map("Meeting" -> 2),
LocalDate.of(2018, 9, 27) -> Map("Meeting" -> 1),
LocalDate.of(2018, 10, 5) -> Map("Borrel" -> 3),
LocalDate.of(2018, 10, 10) -> Map("Meeting" -> 1),
LocalDate.of(2018, 10, 12) -> Map("Meeting" -> 2),
LocalDate.of(2018, 10, 23) -> Map("XBRL-Workshop" -> 3),
LocalDate.of(2018, 10, 26) -> Map("Meeting" -> 3),
LocalDate.of(2018, 11, 5) -> Map("Meeting" -> 1),
LocalDate.of(2018, 11, 9) -> Map("Company-Outing" -> 2),
LocalDate.of(2018, 11, 20) -> Map("Meeting" -> 2),
LocalDate.of(2018, 11, 23) -> Map("Meeting" -> 1),
LocalDate.of(2018, 11, 22) -> Map("Security-Training" -> 4),
LocalDate.of(2018, 11, 28) -> Map("Meeting" -> 1),
LocalDate.of(2018, 11, 30) -> Map("Meeting" -> 1),
LocalDate.of(2018, 12, 4) -> Map("Meeting" -> 3),
LocalDate.of(2018, 12, 5) -> Map("Meeting" -> 2),
LocalDate.of(2018, 12, 20) -> Map("Borrel" -> 1))
}
// Timesheet data 2019
// Pass 1
private val dutchHolidays2019: Set[LocalDate] = {
Set(
LocalDate.of(2019, 1, 1),
LocalDate.of(2019, 4, 22),
LocalDate.of(2019, 5, 30),
LocalDate.of(2019, 6, 10),
/* LocalDate.of(2019, 12, 25), */
/* LocalDate.of(2019, 12, 26) */)
}
private val onVacation2019: Set[LocalDate] = {
Set(LocalDate.of(2019, 5, 31))
.union(getPeriodAsLocalDateSeq(LocalDate.of(2019, 1, 1), LocalDate.of(2019, 1, 4)).toSet)
.union(getPeriodAsLocalDateSeq(LocalDate.of(2019, 6, 3), LocalDate.of(2019, 6, 4)).toSet)
.union(getPeriodAsLocalDateSeq(LocalDate.of(2019, 7, 17), LocalDate.of(2019, 8, 9)).toSet)
}
private val sickLeaveDays2019: Set[LocalDate] = {
getPeriodAsLocalDateSeq(LocalDate.of(2019, 2, 27), LocalDate.of(2019, 3, 1)).toSet
}
private val defaultTasksPerDay2019: Map[LocalDate, Map[String, Int]] = {
val maintenanceDates: immutable.IndexedSeq[LocalDate] =
getPeriodAsLocalDateSeq(LocalDate.of(2019, 1, 1), LocalDate.of(2019, 6, 30))
val devDates: immutable.IndexedSeq[LocalDate] =
getPeriodAsLocalDateSeq(LocalDate.of(2019, 7, 1), LocalDate.of(2019, 9, 6)) // Adapt at end of year
maintenanceDates.distinct.map(d => d -> Map("Gegevenstooling maintenance" -> 8)).toMap ++
devDates.distinct.map(d => d -> Map("Gegevenstooling development" -> 8)).toMap
}
// Pass 2
// TODO Check with agenda (e.g. recurring meetings) and commits in Github
private val refinementsPerDay2019: Map[LocalDate, Map[String, Int]] = {
Map(
LocalDate.of(2019, 6, 25) -> Map("Meeting" -> 1),
)
}
}
| dvreeze/yaidom | jvm/src/test/scala/eu/cdevreeze/yaidom/integrationtest/XmlCreationTest.scala | Scala | apache-2.0 | 26,660 |
package sorm.reflection
import sext._
import scala.reflect.runtime.universe._
object `package` {
implicit class AnyReflected
[ T : TypeTag ]
( any : T )
{
def reflected
= new Reflected( any, Reflection( typeTag[T] ) )
}
implicit class ClassAdapter
[ T ]
( c : Class[T] )
{
def instantiate
( args : Seq[Any] )
: T
= try {
c .getConstructors.head
.newInstance(args.asInstanceOf[Seq[Object]]: _*)
.asInstanceOf[T]
} catch {
case e : IllegalArgumentException =>
throw new IllegalArgumentException(
e.getMessage + ":\\n" +
"Incorrect values of parameter types:\\n" +
c.getConstructors.head.getParameterTypes.view
.zip(args)
.filter{ case (t, v) => !t.isAssignableFrom(v.getClass) }
.map{ case (t, v) => t -> (v.getClass -> v) }
.valueTreeString,
e
)
}
}
}
| cllu/sorm2 | src/main/scala/sorm/reflection/package.scala | Scala | mit | 1,071 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.regression
import slamdata.Predef._
import argonaut._, Json._
case class ExpectedResult(
rows: List[Json],
predicate: Predicate,
ignoredFields: List[JsonField],
ignoreFieldOrder: Boolean,
ignoreResultOrder: Boolean,
backends: Directives)
| jedesah/Quasar | it/src/test/scala/quasar/regression/ExpectedResult.scala | Scala | apache-2.0 | 908 |
package idv.brianhsu.maidroid.plurk.view
import idv.brianhsu.maidroid.plurk._
import idv.brianhsu.maidroid.plurk.TypedResource._
import idv.brianhsu.maidroid.ui.util.CallbackConversions._
import android.content.Context
import android.widget.LinearLayout
import android.view.LayoutInflater
import android.view.View
import android.util.AttributeSet
import android.widget.Button
class ErrorNoticeView(context: Context, attrs: AttributeSet) extends LinearLayout(context, attrs) {
private lazy val inflater = LayoutInflater from context
private lazy val messageTextView = this.findView(TR.moduleErrorNoticeText)
private lazy val retryButton = this.findView(TR.moduleErrorNoticeRetryButton)
initView()
private def initView() {
inflater.inflate(R.layout.module_error_notice, this, true)
}
def setMessage(message: String) {
messageTextView.setText(message)
}
def setMessageWithRetry(message: String)(callback: Button => Any) {
messageTextView.setText(message)
retryButton.setOnClickListener { view: View => callback(retryButton) }
retryButton.setEnabled(true)
}
}
| brianhsu/MaidroidPlurk | src/main/scala/view/ErrorNoticeView.scala | Scala | gpl-3.0 | 1,107 |
package edu.gemini.sp.vcs2
import edu.gemini.pot.sp._
import edu.gemini.pot.sp.version._
import edu.gemini.shared.util._
import edu.gemini.sp.vcs2.NodeDetail.Obs
import edu.gemini.sp.vcs2.VcsFailure._
import edu.gemini.spModel.rich.pot.sp._
import scalaz._
import Scalaz._
/** Describes the modifications required for a local program to complete a
* merge.
*/
case class MergePlan(update: Tree[MergeNode], delete: Set[Missing]) {
/** True if the MergePlan contains no updates. */
def isEmpty: Boolean = update.rootLabel match {
case Unmodified(_) => delete.isEmpty
case _ => false
}
def nonEmpty: Boolean = !isEmpty
/** "Encode" for serialization. The issue is that `scalaz.Tree` is not
* `Serializable` but we need to send `MergePlan`s over `trpc`. */
def encode: MergePlan.Transport = {
def encodeTree(t: Tree[MergeNode]): MergePlan.TreeTransport =
MergePlan.TreeTransport(t.rootLabel, t.subForest.toList.map(encodeTree))
MergePlan.Transport(encodeTree(update), delete)
}
/** Gets the `VersionMap` of the provided program as it will be after the
* updates in this plan have been applied. */
def vm(p: ISPProgram): VersionMap = vm(p.getVersions)
/** Gets the `VersionMap` with any modifications required that will be made
* by this merge plan. */
def vm(vm0: VersionMap): VersionMap = {
// Extract the updates to the VersionMap from the MergePlan.
val vmUpdates: VersionMap = {
val vm0 = update.sFoldRight(Map.empty[SPNodeKey, NodeVersions]) { (mn, m) =>
mn match {
case Modified(k, nv, _, _, _) => m.updated(k, nv)
case _ => m
}
}
(vm0/:delete) { case (vm1, Missing(k, nv)) => vm1.updated(k, nv) }
}
vm0 ++ vmUpdates
}
/** Compares the version information in this merge plan with the given
* version map. The assumption here is that any unmodified parts of the
* program tree have the same version information and do not need to be
* considered in the calculation. */
def compare(vm: VersionMap): VersionComparison = {
val vm0 = vm.withDefaultValue(EmptyNodeVersions)
val up = update.foldMap {
case Modified(k, nv, _, _, _) => VersionComparison.compare(nv, vm0(k))
case Unmodified(_) => VersionComparison.Same
}
val del = delete.foldMap {
case Missing(k, nv) => VersionComparison.compare(nv, vm0(k))
}
up |+| del
}
/** True if this plan contains non-empty `Conflicts`, false otherwise.
*/
def hasConflicts: Boolean =
update.sFoldRight(false) { (mn, b) =>
b || (mn match {
case Modified(_, _, _, _, con) => !con.isEmpty
case _ => false
})
}
/** Accepts a program and edits it according to this merge plan. */
def merge(f: ISPFactory, p: ISPProgram): VcsAction[Unit] = {
// Tries to create an ISPNode from the information in the MergeNode.
def create(mn: MergeNode): TryVcs[ISPNode] =
mn match {
case Modified(k, _, dob, _, _) =>
NodeFactory.mkNode(f, p, dob.getType, Some(k)) \\/>
Unexpected("Could not create science program node of type: " + dob.getType)
case Unmodified(k) =>
Unexpected(s"Unmodified node with key $k not found in program ${p.getProgramID}.").left
}
// Edit the ISPNode, applying the changes in the MergeNode if any.
def edit(t: Tree[(MergeNode, ISPNode)]): Unit = {
t.rootLabel match {
case (Modified(_, nv, dob, det, con), n) =>
n.setDataObject(dob)
n.setConflicts(con)
// If it is an observation, set the observation number.
(det, n) match {
case (Obs(num), o: ISPObservation) => o.setObservationNumber(num)
case _ => // not an observation
}
// Edit then set the children.
t.subForest.foreach(edit)
n.children = t.subForest.toList.map(_.rootLabel._2)
case (Unmodified(_), _) => // do nothing
}
}
// Pair up MergeNodes with their corresponding ISPNode, creating any missing
// ISPNodes as necessary.
val mergeTree: VcsAction[Tree[(MergeNode, ISPNode)]] = {
val nodeMap = p.nodeMap
update.traverseU { mn =>
nodeMap.get(mn.key).fold(create(mn)) { _.right }.strengthL(mn)
}
}.liftVcs
def doEdit(mt: Tree[(MergeNode, ISPNode)]): VcsAction[Unit] =
\\/.fromTryCatch {
edit(mt)
p.setVersions(vm(p))
}.leftMap(VcsException).liftVcs
mergeTree >>= doEdit
}
}
object MergePlan {
/** A serializable Tree[MergeNode]. Sadly scalaz.Tree is not serializable. */
case class TreeTransport(mn: MergeNode, children: List[TreeTransport]) {
def decode: Tree[MergeNode] = Tree.node(mn, children.map(_.decode).toStream)
}
/** A serializable MergePlan. Sadly the Tree[MergeNode] contained in the
* MergePlan is not serializable.
*/
case class Transport(update: TreeTransport, delete: Set[Missing]) {
def decode: MergePlan = MergePlan(update.decode, delete)
}
}
| arturog8m/ocs | bundle/edu.gemini.sp.vcs/src/main/scala/edu/gemini/sp/vcs2/MergePlan.scala | Scala | bsd-3-clause | 5,156 |
package com.twitter.server.util
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.concurrent.Scheduler
object TwitterStats {
def register(statsReceiver: StatsReceiver) = {
val sched = statsReceiver.scope("scheduler")
// Productivity is a very rough estimate of time spent not
// blocking. It measures the proportion of a thread's execution
// time spent on the CPU. This cannot take into account issues
// like CPU scheduling effects.
sched.provideGauge("productivity") {
val cpu = Scheduler.cpuTime
val wall = Scheduler.wallTime
if (wall.toFloat <= 0F) 0F
else cpu.toFloat / wall.toFloat
}
sched.provideGauge("dispatches") {
Scheduler.numDispatches.toFloat
}
}
}
| cogitate/twitter-server-uuid | src/main/scala/com/twitter/server/util/TwitterStats.scala | Scala | apache-2.0 | 755 |
package org.locationtech.geomesa.jobs.mapred
import org.apache.accumulo.core.client.mapred.AccumuloInputFormat
import org.apache.accumulo.core.client.security.tokens.{AuthenticationToken, PasswordToken}
import org.apache.accumulo.core.security.Authorizations
import org.apache.hadoop.mapred.JobConf
import org.apache.log4j.Level
import org.locationtech.geomesa.accumulo.AccumuloVersion._
object InputFormatBaseAdapter {
def setConnectorInfo(job: JobConf, user: String, token: PasswordToken) = accumuloVersion match {
case V15 => setConnectorInfo15(job, user, token)
case V16 => setConnectorInfo16(job, user, token)
case _ => setConnectorInfo16(job, user, token)
}
def setConnectorInfo15(job: JobConf, user: String, token: PasswordToken) = {
val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase")
.getMethod("setConnectorInfo", classOf[JobConf], classOf[String], classOf[AuthenticationToken])
method.invoke(null, job, user, token)
}
def setConnectorInfo16(job: JobConf, user: String, token: PasswordToken) = {
val method = classOf[AccumuloInputFormat]
.getMethod("setConnectorInfo", classOf[JobConf], classOf[String], classOf[AuthenticationToken])
method.invoke(null, job, user, token)
}
def setZooKeeperInstance(job: JobConf, instance: String, zookeepers: String) = accumuloVersion match {
case V15 => setZooKeeperInstance15(job, instance, zookeepers)
case V16 => setZooKeeperInstance16(job, instance, zookeepers)
case _ => setZooKeeperInstance16(job, instance, zookeepers)
}
def setZooKeeperInstance15(job: JobConf, instance: String, zookeepers: String) = {
val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase")
.getMethod("setZooKeeperInstance", classOf[JobConf], classOf[String], classOf[String])
method.invoke(null, job, instance, zookeepers)
}
def setZooKeeperInstance16(job: JobConf, instance: String, zookeepers: String) = {
val method = classOf[AccumuloInputFormat]
.getMethod("setZooKeeperInstance", classOf[JobConf], classOf[String], classOf[String])
method.invoke(null, job, instance, zookeepers)
}
def setScanAuthorizations(job: JobConf, authorizations: Authorizations): Unit = accumuloVersion match {
case V15 => setScanAuthorizations15(job, authorizations)
case V16 => setScanAuthorizations16(job, authorizations)
case _ => setScanAuthorizations16(job, authorizations)
}
def setScanAuthorizations15(job: JobConf, authorizations: Authorizations): Unit = {
val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase")
.getMethod("setScanAuthorizations", classOf[JobConf], classOf[Authorizations], classOf[String])
method.invoke(null, job, authorizations)
}
def setScanAuthorizations16(job: JobConf, authorizations: Authorizations): Unit = {
val method = classOf[AccumuloInputFormat]
.getMethod("setScanAuthorizations", classOf[JobConf], classOf[Authorizations], classOf[String])
method.invoke(null, job, authorizations)
}
def setLogLevel(job: JobConf, level: Level) = accumuloVersion match {
case V15 => setLogLevel15(job, level)
case V16 => setLogLevel16(job, level)
case _ => setLogLevel16(job, level)
}
def setLogLevel15(job: JobConf, level: Level) = {
val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase")
.getMethod("setLogLevel", classOf[JobConf], classOf[Level])
method.invoke(null, job, level)
}
def setLogLevel16(job: JobConf, level: Level) = {
val method = classOf[AccumuloInputFormat].getMethod("setLogLevel", classOf[JobConf], classOf[Level])
method.invoke(null, job, level)
}
}
| drackaer/geomesa | geomesa-jobs/src/main/scala/org/locationtech/geomesa/jobs/mapred/InputFormatBaseAdapter.scala | Scala | apache-2.0 | 3,758 |
/**
* Copyright 2014 Jorge Aliss (jaliss at gmail dot com) - twitter: @jaliss
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package securesocial.core
import play.api.mvc.{Cookies, Session, SimpleResult}
import play.api.http.HeaderNames
import securesocial.core.authenticator.Authenticator
/**
* Utility methods
*/
object utils {
/**
* Helper methods for SimpleResult
* @param r a SimpleResult instance
*/
implicit class SimpleResultMethods(val r: SimpleResult) {
def startingAuthenticator[A](authenticator: Authenticator[A]) = authenticator.starting(r)
def discardingAuthenticator[A](authenticator: Authenticator[A]) = authenticator.discarding(r)
def touchingAuthenticator[A](authenticator: Authenticator[A]) = authenticator.touching(r)
def addToSession(values: (String, String)*) = {
val cookies = Cookies(r.header.headers.get(HeaderNames.SET_COOKIE))
val resultSession = Session.decodeFromCookie(cookies.get(Session.COOKIE_NAME))
def addValues(list: List[(String, String)], session: Session): Session = {
if ( list.isEmpty) session else {
val s = session + list.head
addValues(list.tail, s)
}
}
r.withSession(addValues(values.toList, resultSession))
}
}
} | matthewchartier/securesocial | module-code/app/securesocial/core/utils.scala | Scala | apache-2.0 | 1,783 |
/*
* This software is licensed under the GNU Affero General Public License, quoted below.
*
* This file is a part of PowerAPI.
*
* Copyright (C) 2011-2014 Inria, University of Lille 1.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI.
*
* If not, please consult http://www.gnu.org/licenses/agpl-3.0.html.
*/
package org.powerapi.core.power
import scala.concurrent.duration._
import org.apache.logging.log4j.LogManager
import RawPower._
object Power {
def apply(value: Double, unit: PowerUnit): Power = new RawPower(value, unit)
def apply(value: Double, unit: String): Power = new RawPower(value, PowerUnitSystem(unit))
def fromJoule(joule: Double, duration: FiniteDuration = 1.second) = new RawPower(joule / (duration.toMillis / 1000.0), WATTS)
/**
* The natural ordering of powers matches the natural ordering for Double.
*/
implicit object PowerIsOrdered extends Ordering[Power] {
def compare(a: Power, b: Power) = a compare b
}
}
trait Power extends Ordered[Power] {
def value: Double
def unit: PowerUnit
def toMilliWatts: Double
def toWatts: Double
def toKiloWatts: Double
def toMegaWatts: Double
def toUnit(unit: PowerUnit): Double
def +(other: Power): Power
def -(other: Power): Power
def *(factor: Double): Power
def /(divisor: Double): Power
def min(other: Power): Power = if (this < other) this else other
def max(other: Power): Power = if (this > other) this else other
// Java API
def div(divisor: Double) = this / divisor
def gt(other: Power) = this > other
def gteq(other: Power) = this >= other
def lt(other: Power) = this < other
def lteq(other: Power) = this <= other
def minus(other: Power) = this - other
def mul(factor: Double) = this * factor
def plus(other: Power) = this + other
}
object RawPower {
implicit object RawPowerIsOrdered extends Ordering[RawPower] {
def compare(a: RawPower, b: RawPower) = a compare b
}
def apply(value: Double, unit: PowerUnit) = new RawPower(value, unit)
def apply(value: Double, unit: String) = new RawPower(value, PowerUnitSystem(unit))
// limit on abs. value of powers in their units
private final val max_mw = Double.MaxValue
private final val max_w = max_mw / 1000.0
private final val max_kw = max_w / 1000.0
private final val max_Mw = max_kw / 1000.0
}
/**
* Defines a power value.
*
* @author Loïc Huertas <l.huertas.pro@gmail.com>
* @author Romain Rouvoy <romain.rouvoy@univ-lille1.fr>
*/
final class RawPower(val value: Double, val unit: PowerUnit) extends Power {
private val log = LogManager.getLogger
private[this] def bounded(max: Double) = 0.0 <= value && value <= max
require(unit match {
case MILLIWATTS => bounded(max_mw)
case WATTS => bounded(max_w)
case KILOWATTS => bounded(max_kw)
case MEGAWATTS => bounded(max_Mw)
case _ =>
val v = MEGAWATTS.convert(value, unit)
0.0 <= v && v <= max_Mw
}, "Power value is limited to 1.79e308 mW and cannot be negative")
def toMilliWatts = unit.toMilliWatts(value)
def toWatts = unit.toWatts(value)
def toKiloWatts = unit.toKiloWatts(value)
def toMegaWatts = unit.toMegaWatts(value)
def toUnit(u: PowerUnit) = toMilliWatts / MILLIWATTS.convert(1, u)
override def toString() = s"$value $unit"
def compare(other: Power) = toMilliWatts compare other.toMilliWatts
private[this] def safeAdd(a: Double, b: Double): Double = {
if ((b > 0.0) && (a > Double.MaxValue - b)) throw new IllegalArgumentException("double overflow")
if ((b < 0.0) && (a < -b)) throw new IllegalArgumentException("negative power cannot exists")
a + b
}
private[this] def add(otherValue: Double, otherUnit: PowerUnit): Power = {
val commonUnit = if (otherUnit.convert(1, unit) < 1.0) unit else otherUnit
val resultValue = safeAdd(commonUnit.convert(value, unit), commonUnit.convert(otherValue, otherUnit))
new RawPower(resultValue, commonUnit)
}
def +(other: Power) = add(other.value, other.unit)
def -(other: Power) = add(-other.value, other.unit)
private[this] def safeMul(a: Double): Double = {
if (a.isInfinite) throw new IllegalArgumentException("multiplication's result is an infinite value")
if (a.isNaN) throw new IllegalArgumentException("multiplication's result is an undefined value")
if (a > Double.MaxValue) throw new IllegalArgumentException("double overflow")
if (a < 0.0) throw new IllegalArgumentException("negative power cannot exists")
a
}
def *(factor: Double) = new RawPower({
if (factor.isInfinite || factor.isNaN) throw new IllegalArgumentException("factor must be a finite and defined value")
else safeMul(value * factor)
}, unit
)
def /(divisor: Double) = new RawPower({
if (divisor.isInfinite || divisor.isNaN) throw new IllegalArgumentException("divisor must be a finite and defined value")
else safeMul(value / divisor)
}, unit
)
override def equals(other: Any) = other match {
case x: RawPower => toMilliWatts == x.toMilliWatts
case _ => super.equals(other)
}
}
| rouvoy/powerapi | powerapi-core/src/main/scala/org/powerapi/core/power/Power.scala | Scala | agpl-3.0 | 5,697 |
package com.crobox.clickhouse.dsl.column
import com.crobox.clickhouse.dsl._
import com.crobox.clickhouse.{DslIntegrationSpec, dsl => CHDsl}
class MiscellaneousFunctionsTest extends DslIntegrationSpec {
it should "succeed for MiscFunctions" in {
val inf = const(1) / 0
r(hostName()).length should be > 4
r(visibleWidth("1")) shouldBe "1"
r(toTypeName(toUInt64(1))) shouldBe "UInt64"
r(blockSize()) shouldBe "1"
r(materialize(1)) shouldBe "1"
r(CHDsl.ignore()) shouldBe "0"
r(sleep(0.1)) shouldBe "0"
r(currentDatabase()) shouldBe "default"
r(isFinite(inf)) shouldBe "0"
r(isInfinite(inf)) shouldBe "1"
r(isNaN(0)) shouldBe "0"
r(hasColumnInTable("system", "one", "dummy")) shouldBe "1"
r(bar(1, 0, 100, None)) shouldBe "▋"
r(transform[Int, String](1, Seq(3, 2, 1), Seq("do", "re", "mi"), "fa")) shouldBe "mi"
r(formatReadableSize(1)) shouldBe "1.00 B"
r(least(3, 2)) shouldBe "2"
r(greatest(3, 2)) shouldBe "3"
r(uptime()).length should be > 0
r(version()).length should be > 4
r(rowNumberInAllBlocks()) shouldBe "0"
r(runningDifference(1)) shouldBe "0"
r(mACNumToString(toUInt64(123))) shouldBe "00:00:00:00:00:7B"
r(mACStringToNum("00:00:00:00:00:7B")) shouldBe "123"
r(mACStringToOUI("00:00:00:00:00:7B")) shouldBe "0"
}
}
| crobox/clickhouse-scala-client | dsl/src/test/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctionsTest.scala | Scala | lgpl-3.0 | 1,335 |
package lore.compiler.types
/**
* A type path describes the path that must be taken to get to a specific subterm of a type. Type paths are used to
* deduce open type arguments.
*
* Targets inside sum and intersection types cannot be reached via type paths because these types are inherently
* unordered.
*/
case class TypePath(steps: Vector[TypePath.Step])
object TypePath {
sealed trait Step
case class TupleElement(index: Int) extends Step
case object FunctionInput extends Step
case object FunctionOutput extends Step
case object ListElement extends Step
case object MapKey extends Step
case object MapValue extends Step
case class ShapeProperty(name: String) extends Step
case class TypeArgument(schema: DeclaredSchema, index: Int) extends Step
/**
* Get all type paths that lead to `target` from `origin`.
*/
def of(origin: Type, target: Type): Vector[TypePath] = {
getSteps(origin, target).map(TypePath.apply)
}
/**
* This function allows us to compute [[of]] without recreating countless instances of [[TypePath]].
*/
private def getSteps(origin: Type, target: Type): Vector[Vector[Step]] = {
if (origin == target) {
return Vector(Vector.empty)
}
// Each possible branch in `origin` is tried and followed until `target` is found. If `target` cannot be found, the
// list of paths for the branch is empty and the branch is ignored.
def handleBranch(subOrigin: Type, getStep: => Step) = {
val possibilities = getSteps(subOrigin, target)
if (possibilities.nonEmpty) {
val step = getStep
possibilities.map(steps => step +: steps)
} else Vector.empty
}
origin match {
case TupleType(elements) => elements.zipWithIndex.flatMap {
case (element, index) => handleBranch(element, TupleElement(index))
}
case FunctionType(input, output) => handleBranch(input, FunctionInput) ++ handleBranch(output, FunctionOutput)
case ListType(element) => handleBranch(element, ListElement)
case MapType(key, value) => handleBranch(key, MapKey) ++ handleBranch(value, MapValue)
case ShapeType(properties) => properties.values.toVector.flatMap { property =>
handleBranch(property.tpe, ShapeProperty(property.name))
}
case dt: DeclaredType => dt.typeArguments.zipWithIndex.flatMap {
case (typeArgument, index) => handleBranch(typeArgument, TypeArgument(dt.schema, index))
}
case _ => Vector.empty
}
}
}
| marcopennekamp/lore | compiler/src/lore/compiler/types/TypePath.scala | Scala | mit | 2,513 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils
import java.nio.file.Paths
import com.google.protobuf.GeneratedMessage
import com.intel.analytics.bigdl.dllib.models.resnet.Convolution
import com.intel.analytics.bigdl.dllib.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.dllib.nn._
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import org.scalatest.{FlatSpec, Matchers}
import com.intel.analytics.bigdl.numeric.NumericDouble
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.caffe.{CaffeConversionException, CaffeLoader, CaffePersister, Customizable}
import scala.collection.mutable
import scala.reflect.ClassTag
import scala.util.Random
class CaffePersisterSpec extends FlatSpec with Matchers{
val resource = getClass().getClassLoader().getResource("caffe")
val prototxt = Paths.get(TestUtils.processPath(resource.getPath()), "test.prototxt").toString
val modelPath = Paths.get(TestUtils.processPath(resource.getPath()), "test.caffemodel").toString
val savedprototxt = Paths.get(TestUtils.processPath(resource.getPath()),
"test_persist.prototxt").toString
val savedmodelPath = Paths.get(TestUtils.processPath(resource.getPath()),
"test_persist.caffemodel").toString
private class LoadDummy[T: ClassTag](implicit ev: TensorNumeric[T]) extends Customizable[T] {
override def convertor(layer: GeneratedMessage): Seq[ModuleNode[T]] = {
Seq(Identity[T].setName("Dummy").inputs())
}
}
val convertMap = new mutable.HashMap[String, Customizable[Double]]()
convertMap("DUMMY") = new LoadDummy[Double]
"Save graph module" should "Works properly" in {
val convolution1 = new ModuleNode(Convolution(3, 4, 2, 2).
setName("conv1").asInstanceOf[AbstractModule[Activity, Activity, Double]])
val convolution2 = new ModuleNode(Convolution(4, 3, 2, 2).setName("conv2")
.asInstanceOf[AbstractModule[Activity, Activity, Double]])
val view = new ModuleNode(View(27).setName("view")
.asInstanceOf[AbstractModule[Activity, Activity, Double]])
val ip = new ModuleNode(Linear(2, 27, withBias = false).setName("ip")
.asInstanceOf[AbstractModule[Activity, Activity, Double]])
convolution1 -> convolution2
convolution2 -> view
view -> ip
val module = Graph(convolution1, ip)
CaffePersister.persist("/tmp/test.prototxt", "/tmp/test.caffemodel", module, overwrite = true)
}
"Persist V1 module" should "works properly" in {
val module = new ModuleNode(Linear[Double](100, 10).
setName("simple linear").asInstanceOf[AbstractModule[Activity, Activity, Double]])
val graph = Graph(module, module)
CaffePersister.persist("/tmp/v1.prototxt", "/tmp/v1.caffemodel",
graph, useV2 = false, overwrite = true)
}
"Persist V2 module" should "works properly" in {
val module = new ModuleNode(Linear[Double](100, 10).
setName("simple linear").asInstanceOf[AbstractModule[Activity, Activity, Double]])
val graph = Graph(module, module)
CaffePersister.persist("/tmp/v2.prototxt", "/tmp/v2.caffemodel",
graph, useV2 = true, overwrite = true)
}
"A saved module" should "have same result as pre-saved one" in {
val input1 = Tensor[Double](1, 3, 5, 5).apply1( e => Random.nextDouble())
val input2 = Tensor[Double]()
input2.resizeAs(input1).copy(input1)
val preSavedModule = CaffeLoader.loadCaffe(prototxt, modelPath,
customizedConverters = convertMap)._1
val savedModule = CaffeLoader.loadCaffe(savedprototxt, savedmodelPath)._1
val preSavedResult = preSavedModule.forward(input1)
val savedResult = savedModule.forward(input2)
preSavedResult should be (savedResult)
}
"Save sequantial module" should "throw exception" in {
val module = Sequential()
.add(Convolution(3, 4, 2, 2).setName("conv"))
.add(Convolution(4, 3, 2, 2).setName("conv2"))
.add(View(27)).setName("view")
.add(Linear(2, 27, withBias = false).setName("ip"))
intercept[CaffeConversionException] {
CaffePersister.persist("/tmp/test.prototxt", "/tmp/test.caffemodel", module)
}
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/utils/CaffePersisterSpec.scala | Scala | apache-2.0 | 4,838 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.oap.execution
import com.intel.oap.ColumnarPluginConfig
import com.intel.oap.expression._
import com.intel.oap.vectorized._
import java.util.concurrent.TimeUnit._
import org.apache.spark.TaskContext
import org.apache.spark.memory.{SparkOutOfMemoryError, TaskMemoryManager}
import org.apache.spark.rdd.RDD
import org.apache.spark.util.{UserAddedJarUtils, Utils, ExecutorManager}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.errors._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.BindReferences.bindReferences
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
import org.apache.spark.sql.catalyst.util.truncatedString
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.aggregate.HashAggregateExec
import org.apache.spark.sql.execution.datasources.v2.arrow.SparkMemoryUtils
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.execution.vectorized.MutableColumnarRow
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.vectorized.{ColumnarBatch, ColumnVector}
import org.apache.spark.sql.types.{DecimalType, StringType, StructType}
import org.apache.spark.unsafe.KVIterator
import scala.collection.Iterator
/**
* Columnar Based HashAggregateExec.
*/
class ColumnarHashAggregateExec(
requiredChildDistributionExpressions: Option[Seq[Expression]],
groupingExpressions: Seq[NamedExpression],
aggregateExpressions: Seq[AggregateExpression],
aggregateAttributes: Seq[Attribute],
initialInputBufferOffset: Int,
resultExpressions: Seq[NamedExpression],
child: SparkPlan)
extends HashAggregateExec(
requiredChildDistributionExpressions,
groupingExpressions,
aggregateExpressions,
aggregateAttributes,
initialInputBufferOffset,
resultExpressions,
child) {
val sparkConf = sparkContext.getConf
val numaBindingInfo = ColumnarPluginConfig.getConf(sparkContext.getConf).numaBindingInfo
override def supportsColumnar = true
// Disable code generation
override def supportCodegen: Boolean = false
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"numOutputBatches" -> SQLMetrics.createMetric(sparkContext, "output_batches"),
"numInputBatches" -> SQLMetrics.createMetric(sparkContext, "input_batches"),
"aggTime" -> SQLMetrics.createTimingMetric(sparkContext, "time in aggregation process"),
"totalTime" -> SQLMetrics
.createTimingMetric(sparkContext, "totaltime_hashagg"))
val numOutputRows = longMetric("numOutputRows")
val numOutputBatches = longMetric("numOutputBatches")
val numInputBatches = longMetric("numInputBatches")
val aggTime = longMetric("aggTime")
val totalTime = longMetric("totalTime")
numOutputRows.set(0)
numOutputBatches.set(0)
numInputBatches.set(0)
val (listJars, signature): (Seq[String], String) =
if (ColumnarPluginConfig
.getConf(sparkConf)
.enableCodegenHashAggregate && groupingExpressions.nonEmpty) {
var signature: String = ""
try {
signature = ColumnarGroupbyHashAggregation.prebuild(
groupingExpressions,
child.output,
aggregateExpressions,
aggregateAttributes,
resultExpressions,
output,
numInputBatches,
numOutputBatches,
numOutputRows,
aggTime,
totalTime,
sparkConf)
} catch {
case e: UnsupportedOperationException
if e.getMessage == "Unsupport to generate native expression from replaceable expression." =>
logWarning(e.getMessage())
case e: Throwable =>
throw e
}
if (signature != "") {
if (sparkContext.listJars.filter(path => path.contains(s"${signature}.jar")).isEmpty) {
val tempDir = ColumnarPluginConfig.getRandomTempDir
val jarFileName =
s"${tempDir}/tmp/spark-columnar-plugin-codegen-precompile-${signature}.jar"
sparkContext.addJar(jarFileName)
}
(sparkContext.listJars.filter(path => path.contains(s"${signature}.jar")), signature)
} else {
(List(), "")
}
} else {
(List(), "")
}
listJars.foreach(jar => logInfo(s"Uploaded ${jar}"))
override def doExecuteColumnar(): RDD[ColumnarBatch] = {
child.executeColumnar().mapPartitionsWithIndex { (partIndex, iter) =>
ExecutorManager.tryTaskSet(numaBindingInfo)
val hasInput = iter.hasNext
val res = if (!hasInput) {
// This is a grouped aggregate and the input iterator is empty,
// so return an empty iterator.
Iterator.empty
} else {
if (ColumnarPluginConfig
.getConf(sparkConf)
.enableCodegenHashAggregate && groupingExpressions.nonEmpty) {
val execTempDir = ColumnarPluginConfig.getTempFile
val jarList = listJars
.map(jarUrl => {
logInfo(s"HashAggregate Get Codegened library Jar ${jarUrl}")
UserAddedJarUtils.fetchJarFromSpark(
jarUrl,
execTempDir,
s"spark-columnar-plugin-codegen-precompile-${signature}.jar",
sparkConf)
s"${execTempDir}/spark-columnar-plugin-codegen-precompile-${signature}.jar"
})
val aggregation = ColumnarGroupbyHashAggregation.create(
groupingExpressions,
child.output,
aggregateExpressions,
aggregateAttributes,
resultExpressions,
output,
jarList,
numInputBatches,
numOutputBatches,
numOutputRows,
aggTime,
totalTime,
sparkConf)
SparkMemoryUtils.addLeakSafeTaskCompletionListener[Unit](_ => {
aggregation.close()
})
new CloseableColumnBatchIterator(aggregation.createIterator(iter))
} else {
var aggregation = ColumnarAggregation.create(
partIndex,
groupingExpressions,
child.output,
aggregateExpressions,
aggregateAttributes,
resultExpressions,
output,
numInputBatches,
numOutputBatches,
numOutputRows,
aggTime,
totalTime,
sparkConf)
SparkMemoryUtils.addLeakSafeTaskCompletionListener[Unit](_ => {
aggregation.close()
})
new CloseableColumnBatchIterator(aggregation.createIterator(iter))
}
}
res
}
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[ColumnarHashAggregateExec]
override def equals(other: Any): Boolean = other match {
case that: ColumnarHashAggregateExec =>
(that canEqual this) && super.equals(that)
case _ => false
}
override def verboseString(maxFields: Int): String = toString(verbose = true, maxFields)
override def simpleString(maxFields: Int): String = toString(verbose = false, maxFields)
private def toString(verbose: Boolean, maxFields: Int): String = {
val allAggregateExpressions = aggregateExpressions
val keyString = truncatedString(groupingExpressions, "[", ", ", "]", maxFields)
val functionString = truncatedString(allAggregateExpressions, "[", ", ", "]", maxFields)
val outputString = truncatedString(output, "[", ", ", "]", maxFields)
if (verbose) {
s"ColumnarHashAggregate(keys=$keyString, functions=$functionString, output=$outputString)"
} else {
s"ColumnarHashAggregate(keys=$keyString, functions=$functionString)"
}
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/main/scala/com/intel/oap/execution/ColumnarHashAggregateExec.scala | Scala | apache-2.0 | 8,843 |
package com.azavea.opentransit.indicators.calculators
import org.joda.time.Seconds
import com.azavea.gtfs._
import com.azavea.opentransit._
import com.azavea.opentransit.indicators._
import com.azavea.opentransit.indicators.parameters._
/**
* This indicator calculates the average deviation between
* arrival times predicted and actually observed (in minutes)
**/
class OnTimePerformance(params: ObservedStopTimes)
extends Indicator
with AggregatesByAll {
type Intermediate = Seq[Double]
val name = "on_time_perf"
def calculation(period: SamplePeriod) = {
def map(trip: Trip): Seq[Double] =
for {
(sched, obs) <- params.observedStopsByTrip(trip.id)
deviation = Seconds.secondsBetween(sched.arrivalTime, obs.arrivalTime).getSeconds
} yield deviation.abs.toDouble
def reduce(timeDeltas: Seq[Seq[Double]]): Double = {
val (total, count) =
timeDeltas.flatten.foldLeft((0.0, 0)) { case ((total, count), diff) =>
(total + diff, count + 1)
}
if (count > 0) (total /count) / 60 else 0.0 // div60 for minutes
}
perTripCalculation(map, reduce)
}
}
| flibbertigibbet/open-transit-indicators | scala/opentransit/src/main/scala/com/azavea/opentransit/indicators/calculators/OnTimePerformance.scala | Scala | gpl-3.0 | 1,146 |
package io.taig.android
trait implicits
extends app.implicits
// with compatibility.all
with concurrent.implicits
with context.implicits
with functional.implicits
with graphic.implicits
with intent.implicits
with log.implicits
with monix.implicits
with resource.implicits
with system_service.implicits
with unit.implicits
with util.implicits
with widget.implicits
object implicits extends implicits
| Taig/Toolbelt | src/main/scala/io/taig/android/implicits.scala | Scala | mit | 457 |
package com.reactific.jfxtend
import org.specs2.mutable.{Before, Specification}
import javafx.application.Application
import javafx.stage.Stage
class JavaFXInitializer(val barrier : Object) extends Application {
var stage : Stage = null
def start(primaryStage : Stage) : Unit = {
stage = primaryStage
this.synchronized {
barrier.notify()
}
}
}
/** Unit Tests For JFXtendSpecification */
abstract class JFXtendSpecification extends Specification with Before {
private var _stage : Stage = null
def stage : Option[Stage] = Option(_stage)
def before : Any = {
val barrier = new Object()
val initializer = new JavaFXInitializer(barrier)
val task = new Runnable {
def run : Unit = {
Application.launch(classOf[JavaFXInitializer])
}
}
val thread = new Thread(task, "JavaFX Init Thread")
thread.setDaemon(true)
thread.start()
initializer.synchronized {
barrier.wait()
}
_stage = initializer.stage
success
}
}
| reactific/jfxtensions | src/test/scala/com/reactific/jfxtend/JFXtendSpecification.scala | Scala | apache-2.0 | 1,013 |
final class Foo(val i: Int) extends AnyVal {
def foo() = go(i)
private[this] def go(i: Int) = i * 2
}
object Test {
def main(args: Array[String]): Unit = {
assert(new Foo(1).foo() == 2)
}
}
| yusuke2255/dotty | tests/run/t7019.scala | Scala | bsd-3-clause | 203 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Ack.Continue
import monix.reactive.{Observable, Observer}
import scala.concurrent.duration.Duration.Zero
object MaxBySuite extends BaseOperatorSuite {
def createObservable(sourceCount: Int): Option[Sample] = Some {
val o = Observable.range(0, sourceCount+1).maxBy[Long](x => x + 1)
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
def count(sourceCount: Int): Int = 1
def sum(sourceCount: Int): Int = sourceCount
def observableInError(sourceCount: Int, ex: Throwable): Option[Sample] = Some {
val o = Observable.range(0, sourceCount).endWithError(ex).maxBy(x => x)
Sample(o, 0, 0, Zero, Zero)
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable): Option[Sample] = {
val o = Observable.range(0, sourceCount+1).maxBy[Long](x => throw ex)
Some(Sample(o, 0, 0, Zero, Zero))
}
override def cancelableObservables(): Seq[Sample] = {
import scala.concurrent.duration._
val o = Observable.now(1L).delayOnNext(1.second).maxBy(x => x)
Seq(Sample(o,0,0,0.seconds,0.seconds))
}
test("empty observable should be empty") { implicit s =>
val source: Observable[Long] = Observable.empty
var received = 0
var wasCompleted = false
source.maxBy(x => 100 - x).unsafeSubscribeFn(new Observer[Long] {
def onNext(elem: Long) = { received += 1; Continue }
def onError(ex: Throwable) = ()
def onComplete() = { wasCompleted = true }
})
assertEquals(received, 0)
assert(wasCompleted)
}
} | Wogan/monix | monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/MaxBySuite.scala | Scala | apache-2.0 | 2,238 |
package com.jakehschwartz.finatra.swagger
import io.swagger.v3.oas.models._
import io.swagger.v3.oas.models.media.{Content, MediaType}
import io.swagger.v3.oas.models.parameters._
import io.swagger.v3.oas.models.responses.{ApiResponse, ApiResponses}
import io.swagger.v3.oas.models.security.SecurityRequirement
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe._
object FinatraOperation {
implicit def convert(operation: Operation): FinatraOperation = new FinatraOperation(operation)
}
class FinatraOperation(operation: Operation) {
import FinatraSwagger._
def pathParam[T: TypeTag](name: String, description: String = "", required: Boolean = true)
(implicit openAPI: OpenAPI): Operation = {
val param = new PathParameter()
.name(name)
.description(description)
.required(required)
.schema(openAPI.registerModel[T])
operation.addParametersItem(param)
}
def request[T <: Product : TypeTag](implicit openAPI: OpenAPI): Operation = {
operation.setParameters(openAPI.register[T].asJava)
operation
}
def queryParam[T: TypeTag](name: String, description: String = "", required: Boolean = true)
(implicit openAPI: OpenAPI): Operation = {
val param = new QueryParameter()
.name(name)
.description(description)
.required(required)
.schema(openAPI.registerModel[T])
operation.addParametersItem(param)
}
def headerParam[T: TypeTag](name: String, description: String = "", required: Boolean = true)
(implicit openAPI: OpenAPI): Operation = {
val param = new HeaderParameter()
.name(name)
.description(description)
.required(required)
.schema(openAPI.registerModel[T])
operation.addParametersItem(param)
}
def cookieParam[T: TypeTag](name: String, description: String = "", required: Boolean = true)
(implicit openAPI: OpenAPI): Operation = {
val param = new CookieParameter()
.name(name)
.description(description)
.required(required)
.schema(openAPI.registerModel[T])
operation.addParametersItem(param)
operation
}
def bodyParam[T: TypeTag](description: String = "", example: Option[T] = None)
(implicit openAPI: OpenAPI): Operation = {
val model = openAPI.registerModel[T]
val content = new Content
val mediaType = new MediaType()
.schema(model)
val mt = example.fold(mediaType)(mediaType.example)
content.addMediaType("application/json", mt)
val reqBody = new RequestBody()
.content(content)
.description(description)
operation.requestBody(reqBody)
operation
}
def responseWith[T: TypeTag](status: Int,
description: String = "",
contentType: String = "",
example: Option[T] = None)
(implicit openAPI: OpenAPI): Operation = {
val ref = openAPI.registerModel[T]
// //todo not working, sample is not in the generated api, waiting for swagger fix
example.foreach { e =>
if (ref != null) {
ref.setExample(e)
// //val model = api.swagger.getDefinitions.get(ref.asInstanceOf[RefProperty].getSimpleRef)
// //model.setExample(example)
}
}
val content = new Content
val mediaType = new MediaType().schema(ref)
content.addMediaType(contentType, example.fold(mediaType)(mediaType.example))
val apiResponse = new ApiResponse()
.description(description)
.content(content)
if (operation.getResponses == null) {
operation.responses(new ApiResponses().addApiResponse(status.toString, apiResponse))
} else {
operation.getResponses.addApiResponse(status.toString, apiResponse)
operation
}
}
def addSecurity(name: String, scopes: List[String]): Operation = {
operation.addSecurityItem(new SecurityRequirement().addList(name, scopes.asJava))
}
def tag(tag: String): Operation = {
tags(List(tag))
}
def tags(tags: List[String]): Operation = {
operation.setTags(tags.asJava)
operation
}
}
| jakehschwartz/finatra-swagger | src/main/scala/com/jakehschwartz/finatra/swagger/FinatraOperation.scala | Scala | apache-2.0 | 4,230 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.consumers
import monix.execution.Callback
import monix.execution.{Cancelable, Scheduler}
import monix.execution.cancelables.{AssignableCancelable, SingleAssignCancelable}
import monix.reactive.{Consumer, Observer}
import monix.reactive.observers.Subscriber
import scala.util.{Failure, Success, Try}
/** Implementation for [[monix.reactive.Consumer.create]]. */
private[reactive]
final class CreateConsumer[-In,+Out]
(f: (Scheduler, Cancelable, Callback[Throwable, Out]) => Observer[In])
extends Consumer[In,Out] {
def createSubscriber(cb: Callback[Throwable, Out], s: Scheduler): (Subscriber[In], AssignableCancelable) = {
val conn = SingleAssignCancelable()
Try(f(s, conn, cb)) match {
case Failure(ex) =>
Consumer.raiseError(ex).createSubscriber(cb,s)
case Success(out) =>
val sub = Subscriber(out, s)
(sub, conn)
}
}
}
| Wogan/monix | monix-reactive/shared/src/main/scala/monix/reactive/internal/consumers/CreateConsumer.scala | Scala | apache-2.0 | 1,590 |
/*
* Copyright 2016 Coursera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.coursera.naptime.courier
import com.linkedin.data.DataMap
import com.linkedin.data.codec.JacksonDataCodec
import com.linkedin.data.codec.TextDataCodec
import com.linkedin.data.schema.DataSchema
import com.linkedin.data.schema.TyperefDataSchema
import com.linkedin.data.schema.validation.ValidateDataAgainstSchema
import com.linkedin.data.schema.validation.ValidationOptions
import com.linkedin.data.schema.validation.ValidationResult
import com.linkedin.data.schema.validator.DataSchemaAnnotationValidator
import com.linkedin.data.template.DataTemplate
import com.linkedin.data.template.UnionTemplate
import org.coursera.courier.templates.DataTemplates.DataConversion
import org.coursera.courier.templates.DataValidationException
import org.coursera.pegasus.TypedDefinitionCodec
import scala.reflect.ClassTag
/**
* Provides methods for serializing and deserializing the Pegasus data types used by Courier
* to JSON.
*
* This uses [[org.coursera.pegasus.TypedDefinitionCodec]], the default codec for use with Courier
* at Coursera.
*
* For example, given a generated Courier data binding class named `Profile`, to serialize the
* Courier data binding class (a.k.a. data template) to JSON:
*
* ```
* val profile = Profile(...)
* val jsonString = CourierSerializer.write(profile)
* ```
*
* And to Deserialize JSON to the Courier data binding:
*
* ```
* val profile = CourierSerializer.read[Profile](jsonString)
* ```
*/
object CourierSerializer {
/**
* Reads a record template from a JSON string.
*
* @throws org.coursera.courier.templates.DataValidationException if validation fails.
*/
def read[T <: DataTemplate[DataMap]](json: String)(implicit tag: ClassTag[T]): T = {
val clazz = tag.runtimeClass.asInstanceOf[Class[T]]
val dataMap = readDataMap(json, tag.runtimeClass.asInstanceOf[Class[T]])
CourierSerializer.builder(clazz).validateAndBuild(dataMap) match {
case Right(template) => template
case Left(validationResult) => throw new DataValidationException(validationResult)
}
}
def write[T <: DataTemplate[DataMap]](template: T)(implicit tag: ClassTag[T]): String = {
writeDataMap(template.data(), tag.runtimeClass.asInstanceOf[Class[T]])
}
/**
* Reads a union template from a JSON string.
*
* @throws org.coursera.courier.templates.DataValidationException if validation fails.
*/
def readUnion[T <: UnionTemplate](json: String)(implicit tag: ClassTag[T]): T = {
val clazz = tag.runtimeClass.asInstanceOf[Class[T]]
val dataMap = readUnion(json, clazz)
CourierSerializer.builder(clazz).validateAndBuild(dataMap) match {
case Right(template) => template
case Left(validationResult) => throw new DataValidationException(validationResult)
}
}
def writeUnion[T <: UnionTemplate](template: T)(implicit tag: ClassTag[T]): String = {
template.data() match {
case dataMap: DataMap =>
writeUnion(dataMap, tag.runtimeClass.asInstanceOf[Class[T]])
case _: AnyRef =>
throw new IllegalArgumentException("Null union values not supported by CourierSerializers")
}
}
private[this] val recordValidationOptions = new ValidationOptions()
class TemplateBuilder[T <: DataTemplate[_ <: AnyRef]](private val clazz: Class[T]) {
private[this] val companionInstance = companion(clazz)
private[this] val schema = getSchema(clazz)
private[this] val annotationValidator = new DataSchemaAnnotationValidator(schema)
private[this] val applyMethod = {
companionInstance.getClass.getDeclaredMethod(
"apply",
classOf[DataMap],
classOf[DataConversion])
}
def build(dataMap: DataMap): T = {
applyMethod.invoke(companionInstance, dataMap, DataConversion.SetReadOnly).asInstanceOf[T]
}
def validateAndBuild(dataMap: DataMap): Either[ValidationResult, T] = {
val validationResult =
ValidateDataAgainstSchema.validate(
dataMap, schema, recordValidationOptions, annotationValidator)
if (!validationResult.isValid) {
Left(validationResult)
} else {
Right(
applyMethod.invoke(
companionInstance, dataMap, DataConversion.SetReadOnly).asInstanceOf[T])
}
}
}
def builder[T <: DataTemplate[_ <: AnyRef]](clazz: Class[T]): TemplateBuilder[T] = {
new TemplateBuilder(clazz)
}
def getSchema[T <: DataTemplate[_]](implicit clazz: Class[T]): DataSchema = {
getSchema(clazz, schemaFieldName)
}
/**
* For unions declared in a typeref, gets the typeref schema.
*/
def getDeclaringTyperefSchema[T <: DataTemplate[_]](
implicit clazz: Class[T]): Option[TyperefDataSchema] = {
try {
getSchema(clazz, typerefSchemaFieldName) match {
case schema: TyperefDataSchema => Some(schema)
case unknown: DataSchema =>
throw new IllegalStateException(
s"$typerefSchemaFieldName must be a TyperefDataSchema but found $unknown")
}
} catch {
case e: NoSuchMethodException => None
}
}
private[this] def getSchema[T <: DataTemplate[_]](
clazz: Class[T], fieldName: String): DataSchema = {
val companionInstance = companion(clazz)
val companionClass = companionInstance.getClass
companionClass.getDeclaredMethod(fieldName).invoke(companionInstance).asInstanceOf[DataSchema]
}
private[this] val schemaFieldName = "SCHEMA"
private[this] val typerefSchemaFieldName = "TYPEREF_SCHEMA"
private[this] def companion(clazz: Class[_]): AnyRef = {
import scala.reflect.runtime.universe
val mirror = universe.runtimeMirror(clazz.getClassLoader)
val classSymbol = mirror.classSymbol(clazz)
val companionMirror = mirror.reflectModule(classSymbol.companion.asModule)
companionMirror.instance.asInstanceOf[AnyRef]
}
private[this] val underlyingCodec = new JacksonDataCodec()
private[this] def readDataMap[T <: DataTemplate[DataMap]](
json: String, clazz: Class[T]): DataMap = {
codec(clazz).stringToMap(json)
}
private[this] def readUnion[T <: UnionTemplate](
json: String, clazz: Class[T]): DataMap = {
codec(clazz).stringToMap(json)
}
private[this] def writeDataMap[T <: DataTemplate[DataMap]](
dataMap: DataMap, clazz: Class[T]): String = {
codec(clazz).mapToString(dataMap)
}
private[this] def writeUnion[T <: DataTemplate[AnyRef]](
dataMap: DataMap, clazz: Class[T]): String = {
codec(clazz).mapToString(dataMap)
}
private[this] def codec[T <: DataTemplate[_]](clazz: Class[T]): TextDataCodec = {
codec(CourierSerializer.getSchema(clazz))
}
private[this] def codec[T <: DataTemplate[_]](schema: DataSchema): TextDataCodec = {
new TypedDefinitionCodec(schema, underlyingCodec)
}
}
| josh-newman/naptime | naptime-models/src/main/scala/org/coursera/naptime/courier/CourierSerializer.scala | Scala | apache-2.0 | 7,376 |
package com.orendainx.trucking.simulator.depots
import akka.actor.{ActorLogging, Props, Stash}
import com.orendainx.trucking.simulator.depots.ResourceDepot.{RequestRoute, RequestTruck, ReturnRoute, ReturnTruck}
import com.orendainx.trucking.simulator.generators.DataGenerator.NewResource
import com.orendainx.trucking.simulator.models._
import com.orendainx.trucking.simulator.services.RouteParser
import com.orendainx.trucking.simulator.models.EmptyRoute
import com.typesafe.config.Config
import scala.util.Random
/**
* This implementation of a [[ResourceDepot]] disallows [[com.orendainx.trucking.simulator.generators.DataGenerator]]
* objects from sharing resources. That is, any resource managed by this depot can only be checked out by a single generator.
*
* Sending a [[ResourceDepot.RequestRoute]] or [[ResourceDepot.RequestTruck]] message will return a new resource (that is different
* than the one specified as an argument in that message) as soon as one is available.
*
* @author Edgar Orendain <edgar@orendainx.com>
*/
object NoSharingDepot {
def props()(implicit config: Config) =
Props(new NoSharingDepot())
}
class NoSharingDepot(implicit config: Config) extends ResourceDepot with Stash with ActorLogging {
private val trucksAvailable = Random.shuffle(1 to config.getInt("resource-depot.trucks-available")).toList.map(Truck).toBuffer
private val routesAvailable = RouteParser(config.getString("resource-depot.route-directory")).routes.toBuffer
log.info("Trucks and routes initialized and ready for deployment")
log.info(s"${trucksAvailable.length} trucks available.")
log.info(s"${routesAvailable.length} routes available.")
def receive = {
case RequestTruck(previous) if previous != EmptyTruck =>
val ind = trucksAvailable.indexWhere(_ != previous)
if (ind >= 0) sender() ! NewResource(trucksAvailable.remove(ind))
else stash() // None available, stash request for later
case RequestTruck(_) =>
if (trucksAvailable.nonEmpty) sender() ! NewResource(trucksAvailable.remove(0))
else stash()
case RequestRoute(previous) if previous != EmptyRoute =>
val ind = routesAvailable.indexWhere(_ != previous)
if (ind >= 0) sender() ! NewResource(routesAvailable.remove(ind))
else stash()
case RequestRoute(_) =>
if (routesAvailable.nonEmpty) sender() ! NewResource(routesAvailable.remove(0))
else stash()
case ReturnTruck(truck) =>
trucksAvailable.append(truck)
unstashAll()
case ReturnRoute(route) =>
routesAvailable.append(route)
unstashAll()
}
}
| orendain/trucking-iot | simulator/src/main/scala/com/orendainx/trucking/simulator/depots/NoSharingDepot.scala | Scala | apache-2.0 | 2,613 |
package pew
import java.awt.event.MouseAdapter
import java.awt.event.MouseWheelEvent
import java.awt.event.MouseEvent
import java.awt.event.MouseListener
import java.awt.event.MouseEvent._
import java.awt.Canvas
import pew.Pew._
class Mouse private () {
@volatile var x = -1
@volatile var y = -1
@volatile var m0 = false
}
object Mouse {
def create(): (Mouse, MouseAdapter) = {
val mouse = new Mouse()
def handleButton(btn: Int, state: Boolean): Unit = {
if (btn == BUTTON1) mouse.m0 = state
}
val adapt = new MouseAdapter {
// override def mouseClicked(e: MouseEvent): Unit =
// handleButton(e.getButton, true)
override def mousePressed(e: MouseEvent): Unit =
handleButton(e.getButton, true)
override def mouseReleased(e: MouseEvent): Unit =
handleButton(e.getButton, false)
// override def mouseEntered(e: MouseEvent): Unit = {}
// override def mouseExited(e: MouseEvent): Unit = {}
// override def mouseWheelMoved(e: MouseWheelEvent): Unit = {}
override def mouseDragged(e: MouseEvent): Unit = {
val p = e.getPoint
mouse.x = p.x / scale
mouse.y = p.y / scale
handleButton(e.getButton, true)
}
override def mouseMoved(e: MouseEvent): Unit = {
val p = e.getPoint
mouse.x = p.x / scale
mouse.y = p.y / scale
}
}
(mouse, adapt)
}
def register(panel: Any, adapter: MouseAdapter): Unit = {
panel.is[Canvas].foreach { canvas =>
canvas.addMouseListener(adapter)
canvas.addMouseMotionListener(adapter)
canvas.addMouseWheelListener(adapter)
}
}
}
| keddelzz/pixel-firework | src/main/scala/pew/Mouse.scala | Scala | mit | 1,661 |
/*^
===========================================================================
Helios - FX
===========================================================================
Copyright (C) 2013-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.helios.fx.dialogs
import scalafx.Includes._
import scalafx.application.Platform
import scalafx.geometry.Insets
import scalafx.scene.Scene
import scalafx.scene.control.ProgressIndicator
import scalafx.scene.layout.BorderPane
import scalafx.stage.{Modality, Stage, WindowEvent}
/**
* A modal dialog showing a progress indicator while an action is performed.
*
* The dialog cannot be closed by the user.
*
* @param owner The owner stage
* @param dialogTitle The title
* @param progressIndicatorSize The size (width and height) of the progress indicator
*/
class BusyDialog(owner: Stage, dialogTitle: String, progressIndicatorSize: Double = 150) extends Stage {
initOwner(owner)
initModality(Modality.WindowModal)
title = dialogTitle
resizable = false
handleEvent(WindowEvent.WindowCloseRequest) {
(event: WindowEvent) => {
event.consume()
}
}
scene = new Scene {
root = new BorderPane {
center = new ProgressIndicator {
prefWidth = progressIndicatorSize
prefHeight = progressIndicatorSize
margin = Insets(50)
}
}
}
centerOnScreen()
/**
* Shows the dialog, executing the given action in background.
*
* Updates to the JavaFX scene graph require Platform.runLater
*
* @param action
*/
def run(action: => Unit): Unit = {
new BusyThread(this, action).start()
}
private class BusyThread(busyDialog: BusyDialog, action: => Unit) extends Thread {
setDaemon(true)
override def run(): Unit = {
Platform.runLater {
busyDialog.show()
}
try {
action
} finally {
Platform.runLater {
busyDialog.close()
}
}
}
}
} | giancosta86/Helios-fx | src/main/scala/info/gianlucacosta/helios/fx/dialogs/BusyDialog.scala | Scala | apache-2.0 | 2,693 |
package client
import scala.scalajs.concurrent.JSExecutionContext.Implicits.runNow
import scala.scalajs.js
import scala.scalajs.js.Dynamic._
import scala.scalajs.js.JSConverters._
import scala.scalajs.js.annotation.JSExport
import autowire._
import shared.api.CommentApi
import shared.model.Comment
import upickle.MapW
import scala.scalajs.js.annotation.JSExportAll
@JSExport
object CommentAction {
def setComments(comp: ReactComponent, comments: Seq[Comment]) {
if (comments.isEmpty) {
comp.setState(literal("data" -> js.Array()))
} else {
val data = comments.map(t => literal("key" -> t.id, "author" -> t.author, "text" -> t.text)).toJSArray
comp.setState(literal("data" -> data))
}
}
@JSExport
def list(comp: ReactComponent) {
PostClient[CommentApi].list().call().onSuccess {
case comments => setComments(comp, comments)
}
}
@JSExport
def update(c: js.Dictionary[String], comp: ReactComponent) {
val comment = new Comment(c.getOrElse("author", ""), c.getOrElse("text", ""))
PostClient[CommentApi].update(comment).call().onSuccess {
case comments => setComments(comp, comments)
}
}
@JSExport
def delete(id: Int, comp: ReactComponent) = {
PostClient[CommentApi].delete(id).call().onSuccess {
case comments => setComments(comp, comments)
}
}
} | MomoPain/scalajs-react-crud | js/src/main/scala/client/CommentAction.scala | Scala | mit | 1,346 |
/**
* COPYRIGHT (C) 2015 Alpine Data Labs Inc. All Rights Reserved.
*/
package com.alpine.plugin.core.io
import java.util
import com.alpine.plugin.core.annotation.AlpineSdkApi
import scala.collection.JavaConversions
/**
* :: AlpineSdkApi ::
*/
@AlpineSdkApi
case class TabularSchema private (
definedColumns: Seq[ColumnDef],
isPartial: Boolean,
expectedOutputFormatAttributes: Option[TabularFormatAttributes]) {
def getNumDefinedColumns: Int = definedColumns.length
def getDefinedColumns: Seq[ColumnDef] = definedColumns
/**
* This may be empty (the user is not required to specify the format).
* @return TabularFormatAttributes Some(Avro, Parquet, TSV etc) or None.
*/
def getExpectedOutputFormat: Option[TabularFormatAttributes] = {
this.expectedOutputFormatAttributes
}
}
object TabularSchema {
def apply(columnDefs: Seq[ColumnDef]): TabularSchema = {
TabularSchema(columnDefs, isPartial = false, None)
}
def apply(columnDefs: Seq[ColumnDef],
expectedOutputFormatAttributes: TabularFormatAttributes): TabularSchema = {
TabularSchema(columnDefs, isPartial = false, Some(expectedOutputFormatAttributes))
}
def apply(columnDefs: util.List[ColumnDef]): TabularSchema = {
apply(columnDefs, isPartial = false)
}
def apply(columnDefs: util.List[ColumnDef],
expectedOutputFormatAttributes: TabularFormatAttributes): TabularSchema = {
apply(columnDefs, isPartial = false, expectedOutputFormatAttributes)
}
def apply(
columnDefs: util.List[ColumnDef],
isPartial: Boolean): TabularSchema = {
TabularSchema(JavaConversions.asScalaBuffer(columnDefs), isPartial, None)
}
def apply(
columnDefs: util.List[ColumnDef],
isPartial: Boolean,
expectedOutputFormatAttributes: TabularFormatAttributes): TabularSchema = {
TabularSchema(JavaConversions.asScalaBuffer(columnDefs), isPartial, Some(expectedOutputFormatAttributes))
}
def apply(
columnDefs: util.List[ColumnDef],
isPartial: Boolean,
expectedOutputFormatAttributes: Option[TabularFormatAttributes]): TabularSchema = {
TabularSchema(JavaConversions.asScalaBuffer(columnDefs), isPartial, expectedOutputFormatAttributes)
}
def apply(
columnDefs: Seq[ColumnDef],
isPartial: Boolean,
expectedOutputFormatAttributes: TabularFormatAttributes): TabularSchema = {
TabularSchema(columnDefs, isPartial, Some(expectedOutputFormatAttributes))
}
}
| holdenk/PluginSDK | plugin-core/src/main/scala/com/alpine/plugin/core/io/TabularSchema.scala | Scala | apache-2.0 | 2,454 |
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.control.controls
import org.scalatest.junit.AssertionsForJUnit
import org.junit.Test
import XFormsUploadControl._
import org.orbeon.oxf.test.ResourceManagerTestBase
import org.orbeon.oxf.util.PathOps
class XFormsUploadControlTest extends ResourceManagerTestBase with AssertionsForJUnit with PathOps {
@Test def hmac(): Unit = {
val parameters = List(
"file:/foo/tmp1.tmp",
"bar & baz.png",
"image/png",
"1234"
)
def hmacFromList(p: List[String]) =
hmacURL(p(0), Some(p(1)), Some(p(2)), Some(p(3)))
val signed = hmacFromList(parameters)
// Basic asserts
assert("file:/foo/tmp1.tmp?filename=bar+%26+baz.png&mediatype=image%2Fpng&size=1234&mac=49acb231d3cf572cfce67f09a31d6669a3d0257f" === signed)
assert(Some("49acb231d3cf572cfce67f09a31d6669a3d0257f") === getMAC(signed))
assert("file:/foo/tmp1.tmp?filename=bar+%26+baz.png&mediatype=image%2Fpng&size=1234" === removeMAC(signed))
assert(true === verifyMAC(signed))
val names = List("filename", "mediatype", "size")
// Check parameter values
for ((name, expected) ← names zip parameters.tail)
assert(Some(expected) === getFirstQueryParameter(signed, name))
// Modify each parameter in turn and make sure the MAC is different
for (pos ← 0 to parameters.size - 1) {
val newParameters = parameters.updated(pos, parameters(pos) + 'x')
assert(getMAC(signed) != getMAC(hmacFromList(newParameters)))
}
}
}
| wesley1001/orbeon-forms | src/test/scala/org/orbeon/oxf/xforms/control/controls/XFormsUploadControlTest.scala | Scala | lgpl-2.1 | 2,155 |
package mesosphere.marathon.state
import java.io.{ ByteArrayInputStream, ObjectInputStream }
import javax.inject.Inject
import mesosphere.marathon.Protos.{ MarathonTask, StorageVersion }
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.state.StorageVersions._
import mesosphere.marathon.{ BuildInfo, MarathonConf, MigrationFailedException }
import mesosphere.util.Logging
import scala.concurrent.ExecutionContext.Implicits.global
import mesosphere.util.state.{ PersistentStore, PersistentStoreManagement }
import org.slf4j.LoggerFactory
import scala.collection.SortedSet
import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }
import scala.util.control.NonFatal
class Migration @Inject() (
store: PersistentStore,
appRepo: AppRepository,
groupRepo: GroupRepository,
taskRepo: TaskRepository,
config: MarathonConf,
metrics: Metrics) extends Logging {
//scalastyle:off magic.number
type MigrationAction = (StorageVersion, () => Future[Any])
private[state] val minSupportedStorageVersion = StorageVersions(0, 3, 0)
/**
* All the migrations, that have to be applied.
* They get applied after the master has been elected.
*/
def migrations: List[MigrationAction] = List(
StorageVersions(0, 7, 0) -> { () =>
Future.failed(new IllegalStateException("migration from 0.7.x not supported anymore"))
},
StorageVersions(0, 11, 0) -> { () =>
new MigrationTo0_11(groupRepo, appRepo).migrateApps().recover {
case NonFatal(e) => throw new MigrationFailedException("while migrating storage to 0.11", e)
}
},
StorageVersions(0, 13, 0) -> { () =>
new MigrationTo0_13(taskRepo, store).migrate().recover {
case NonFatal(e) => throw new MigrationFailedException("while migrating storage to 0.13", e)
}
},
StorageVersions(0, 16, 0) -> { () =>
new MigrationTo0_16(groupRepo, appRepo).migrate().recover {
case NonFatal(e) => throw new MigrationFailedException("while migrating storage to 0.16", e)
}
}
)
def applyMigrationSteps(from: StorageVersion): Future[List[StorageVersion]] = {
if (from < minSupportedStorageVersion && from.nonEmpty) {
val msg = s"Migration from versions < $minSupportedStorageVersion is not supported. Your version: $from"
throw new MigrationFailedException(msg)
}
migrations.filter(_._1 > from).sortBy(_._1).foldLeft(Future.successful(List.empty[StorageVersion])) {
case (resultsFuture, (migrateVersion, change)) => resultsFuture.flatMap { res =>
log.info(
s"Migration for storage: ${from.str} to current: ${current.str}: " +
s"apply change for version: ${migrateVersion.str} "
)
change.apply().map(_ => res :+ migrateVersion)
}
}
}
def initializeStore(): Future[Unit] = store match {
case manager: PersistentStoreManagement => manager.initialize()
case _: PersistentStore => Future.successful(())
}
def migrate(): StorageVersion = {
val versionFuture = for {
_ <- initializeStore()
changes <- currentStorageVersion.flatMap(applyMigrationSteps)
storedVersion <- storeCurrentVersion
} yield storedVersion
val result = versionFuture.map { version =>
log.info(s"Migration successfully applied for version ${version.str}")
version
}.recover {
case ex: MigrationFailedException => throw ex
case NonFatal(ex) => throw new MigrationFailedException("MigrationFailed", ex)
}
Await.result(result, Duration.Inf)
}
private val storageVersionName = "internal:storage:version"
def currentStorageVersion: Future[StorageVersion] = {
store.load(storageVersionName).map {
case Some(variable) => StorageVersion.parseFrom(variable.bytes.toArray)
case None => StorageVersions.current
}
}
def storeCurrentVersion: Future[StorageVersion] = {
val bytes = StorageVersions.current.toByteArray
store.load(storageVersionName).flatMap {
case Some(entity) => store.update(entity.withNewContent(bytes))
case None => store.create(storageVersionName, bytes)
}.map{ _ => StorageVersions.current }
}
}
/**
* Implements the following migration logic:
* * Add version info to the AppDefinition by looking at all saved versions.
* * Make the groupRepository the ultimate source of truth for the latest app version.
*/
class MigrationTo0_11(groupRepository: GroupRepository, appRepository: AppRepository) {
private[this] val log = LoggerFactory.getLogger(getClass)
def migrateApps(): Future[Unit] = {
log.info("Start 0.11 migration")
val rootGroupFuture = groupRepository.rootGroup().map(_.getOrElse(Group.empty))
val appIdsFuture = appRepository.allPathIds()
for {
rootGroup <- rootGroupFuture
appIdsFromAppRepo <- appIdsFuture
appIds = appIdsFromAppRepo.toSet ++ rootGroup.transitiveApps.map(_.id)
_ = log.info(s"Discovered ${appIds.size} app IDs")
appsWithVersions <- processApps(appIds, rootGroup)
_ <- storeUpdatedAppsInRootGroup(rootGroup, appsWithVersions)
} yield log.info("Finished 0.11 migration")
}
private[this] def storeUpdatedAppsInRootGroup(
rootGroup: Group,
updatedApps: Iterable[AppDefinition]): Future[Unit] = {
val updatedGroup = updatedApps.foldLeft(rootGroup){ (updatedGroup, updatedApp) =>
updatedGroup.updateApp(updatedApp.id, _ => updatedApp, updatedApp.version)
}
groupRepository.store(groupRepository.zkRootName, updatedGroup).map(_ => ())
}
private[this] def processApps(appIds: Iterable[PathId], rootGroup: Group): Future[Vector[AppDefinition]] = {
appIds.foldLeft(Future.successful[Vector[AppDefinition]](Vector.empty)) { (otherStores, appId) =>
otherStores.flatMap { storedApps =>
val maybeAppInGroup = rootGroup.app(appId)
maybeAppInGroup match {
case Some(appInGroup) =>
addVersionInfo(appId, appInGroup).map(storedApps ++ _)
case None =>
log.warn(s"App [$appId] will be expunged because it is not contained in the group data")
appRepository.expunge(appId).map(_ => storedApps)
}
}
}
}
private[this] def addVersionInfo(id: PathId, appInGroup: AppDefinition): Future[Option[AppDefinition]] = {
def addVersionInfoToVersioned(
maybeLastApp: Option[AppDefinition],
nextVersion: Timestamp,
maybeNextApp: Option[AppDefinition]): Option[AppDefinition] = {
maybeNextApp.map { nextApp =>
maybeLastApp match {
case Some(lastApp) if !lastApp.isUpgrade(nextApp) =>
log.info(s"Adding versionInfo to ${nextApp.id} (${nextApp.version}): scaling or restart")
nextApp.copy(versionInfo = lastApp.versionInfo.withScaleOrRestartChange(nextApp.version))
case _ =>
log.info(s"Adding versionInfo to ${nextApp.id} (${nextApp.version}): new config")
nextApp.copy(versionInfo = AppDefinition.VersionInfo.forNewConfig(nextApp.version))
}
}
}
def loadApp(id: PathId, version: Timestamp): Future[Option[AppDefinition]] = {
if (appInGroup.version == version) {
Future.successful(Some(appInGroup))
}
else {
appRepository.app(id, version)
}
}
val sortedVersions = appRepository.listVersions(id).map(_.to[SortedSet])
sortedVersions.flatMap { sortedVersionsWithoutGroup =>
val sortedVersions = sortedVersionsWithoutGroup ++ Seq(appInGroup.version)
log.info(s"Add versionInfo to app [$id] for ${sortedVersions.size} versions")
sortedVersions.foldLeft(Future.successful[Option[AppDefinition]](None)) { (maybeLastAppFuture, nextVersion) =>
for {
maybeLastApp <- maybeLastAppFuture
maybeNextApp <- loadApp(id, nextVersion)
withVersionInfo = addVersionInfoToVersioned(maybeLastApp, nextVersion, maybeNextApp)
storedResult <- withVersionInfo
.map((newApp: AppDefinition) => appRepository.store(newApp).map(Some(_)))
.getOrElse(maybeLastAppFuture)
} yield storedResult
}
}
}
}
class MigrationTo0_13(taskRepository: TaskRepository, store: PersistentStore) {
private[this] val log = LoggerFactory.getLogger(getClass)
val entityStore = taskRepository.store
// the bytes stored via TaskTracker are incompatible to EntityRepo, so we have to parse them 'manually'
def fetchLegacyTask(taskKey: String): Future[Option[MarathonTask]] = {
def deserialize(taskKey: String, source: ObjectInputStream): Option[MarathonTask] = {
if (source.available > 0) {
try {
val size = source.readInt
val bytes = new Array[Byte](size)
source.readFully(bytes)
Some(MarathonTask.parseFrom(bytes))
}
catch {
case e: com.google.protobuf.InvalidProtocolBufferException =>
None
}
}
else {
None
}
}
store.load("task:" + taskKey).map(_.flatMap { entity =>
val source = new ObjectInputStream(new ByteArrayInputStream(entity.bytes.toArray))
deserialize(taskKey, source)
})
}
def migrateTasks(): Future[Unit] = {
log.info("Start 0.13 migration")
entityStore.names().flatMap { keys =>
log.info("Found {} tasks in store", keys.size)
// old format is appId:appId.taskId
val oldFormatRegex = """^.*:.*\\..*$""".r
val namesInOldFormat = keys.filter(key => oldFormatRegex.pattern.matcher(key).matches)
log.info("{} tasks in old format need to be migrated.", namesInOldFormat.size)
namesInOldFormat.foldLeft(Future.successful(())) { (f, nextKey) =>
f.flatMap(_ => migrateKey(nextKey))
}
}.map { _ =>
log.info("Completed 0.13 migration")
}
}
// including 0.12, task keys are in format task:appId:taskId – the appId is
// already contained the task, for example as in
// task:my-app:my-app.13cb0cbe-b959-11e5-bb6d-5e099c92de61
// where my-app.13cb0cbe-b959-11e5-bb6d-5e099c92de61 is the taskId containing
// the appId as prefix. When using the generic EntityRepo, a colon
// in the key after the prefix implicitly denotes a versioned entry, so this
// had to be changed, even though tasks are not stored with versions. The new
// format looks like this:
// task:my-app.13cb0cbe-b959-11e5-bb6d-5e099c92de61
private[state] def migrateKey(legacyKey: String): Future[Unit] = {
fetchLegacyTask(legacyKey).flatMap {
case Some(task) => taskRepository.store(task).flatMap { _ =>
entityStore.expunge(legacyKey).map(_ => ())
}
case _ => Future.failed[Unit](new RuntimeException(s"Unable to load entity with key = $legacyKey"))
}
}
def renameFrameworkId(): Future[Unit] = {
val oldName = "frameworkId"
val newName = "framework:id"
def moveKey(bytes: IndexedSeq[Byte]): Future[Unit] = {
for {
_ <- store.create(newName, bytes)
_ <- store.delete(oldName)
} yield ()
}
store.load(newName).flatMap {
case Some(_) =>
log.info("framework:id already exists, no need to migrate")
Future.successful(())
case None =>
store.load(oldName).flatMap {
case None =>
log.info("no frameworkId stored, no need to migrate")
Future.successful(())
case Some(entity) =>
log.info("migrating frameworkId -> framework:id")
moveKey(entity.bytes)
}
}
}
def migrate(): Future[Unit] = for {
_ <- migrateTasks()
_ <- renameFrameworkId()
} yield ()
}
/**
* Implements the following migration logic:
* * Load all apps, the logic in AppDefinition.mergeFromProto will create portDefinitions from the deprecated ports
* * Save all apps, the logic in [[AppDefinition.toProto]] will save the new portDefinitions and skip the deprecated
* ports
*/
class MigrationTo0_16(groupRepository: GroupRepository, appRepository: AppRepository) {
private[this] val log = LoggerFactory.getLogger(getClass)
def migrate(): Future[Unit] = {
log.info("Start 0.16 migration")
val rootGroupFuture = groupRepository.rootGroup().map(_.getOrElse(Group.empty))
for {
rootGroup <- rootGroupFuture
apps = rootGroup.transitiveApps
_ = log.info(s"Discovered ${apps.size} apps")
_ <- migrateRootGroup(rootGroup)
_ <- migrateApps(rootGroup)
} yield log.info("Finished 0.16 migration")
}
private[this] def migrateRootGroup(rootGroup: Group): Future[Unit] = {
updateAllGroupVersions()
}
private[this] def migrateApps(rootGroup: Group): Future[Unit] = {
val apps = rootGroup.transitiveApps
apps.foldLeft(Future.successful(())) { (future, app) =>
future.flatMap { _ => updateAllAppVersions(app.id) }
}
}
private[this] def updateAllGroupVersions(): Future[Unit] = {
val id = groupRepository.zkRootName
groupRepository.listVersions(id).map(d => d.toSeq.sorted).flatMap { sortedVersions =>
sortedVersions.foldLeft(Future.successful(())) { (future, version) =>
future.flatMap { _ =>
groupRepository.group(id, version).flatMap {
case Some(group) => groupRepository.store(id, group).map(_ => ())
case None => Future.failed(new MigrationFailedException(s"Group $id:$version not found"))
}
}
}
}
}
private[this] def updateAllAppVersions(appId: PathId): Future[Unit] = {
appRepository.listVersions(appId).map(d => d.toSeq.sorted).flatMap { sortedVersions =>
sortedVersions.foldLeft(Future.successful(())) { (future, version) =>
future.flatMap { _ =>
appRepository.app(appId, version).flatMap {
case Some(app) => appRepository.store(app).map(_ => ())
case None => Future.failed(new MigrationFailedException(s"App $appId:$version not found"))
}
}
}
}
}
}
object StorageVersions {
val VersionRegex = """^(\\d+)\\.(\\d+)\\.(\\d+).*""".r
def apply(major: Int, minor: Int, patch: Int): StorageVersion = {
StorageVersion
.newBuilder()
.setMajor(major)
.setMinor(minor)
.setPatch(patch)
.build()
}
def current: StorageVersion = {
BuildInfo.version match {
case VersionRegex(major, minor, patch) =>
StorageVersions(
major.toInt,
minor.toInt,
patch.toInt
)
}
}
implicit class OrderedStorageVersion(val version: StorageVersion) extends AnyVal with Ordered[StorageVersion] {
override def compare(that: StorageVersion): Int = {
def by(left: Int, right: Int, fn: => Int): Int = if (left.compareTo(right) != 0) left.compareTo(right) else fn
by(version.getMajor, that.getMajor, by(version.getMinor, that.getMinor, by(version.getPatch, that.getPatch, 0)))
}
def str: String = s"Version(${version.getMajor}, ${version.getMinor}, ${version.getPatch})"
def nonEmpty: Boolean = !version.equals(empty)
}
def empty: StorageVersion = StorageVersions(0, 0, 0)
}
| ss75710541/marathon | src/main/scala/mesosphere/marathon/state/Migration.scala | Scala | apache-2.0 | 15,151 |
package edu.uchicago.cs.encsel.dataset.schema
import org.junit.Test
import org.junit.Assert._
import java.io.File
import edu.uchicago.cs.encsel.model.DataType
class SchemaGuesserTest {
@Test
def testTestType(): Unit = {
val guess = new SchemaGuesser
assertEquals(DataType.INTEGER, guess.testType("22", DataType.BOOLEAN))
assertEquals(DataType.INTEGER, guess.testType("22,232", DataType.BOOLEAN))
assertEquals(DataType.BOOLEAN, guess.testType("1", DataType.BOOLEAN))
assertEquals(DataType.BOOLEAN, guess.testType("0", DataType.BOOLEAN))
assertEquals(DataType.BOOLEAN, guess.testType("true", DataType.BOOLEAN))
assertEquals(DataType.BOOLEAN, guess.testType("False", DataType.BOOLEAN))
assertEquals(DataType.BOOLEAN, guess.testType("Yes", DataType.BOOLEAN))
assertEquals(DataType.BOOLEAN, guess.testType("nO", DataType.BOOLEAN))
assertEquals(DataType.INTEGER, guess.testType("22,322", DataType.INTEGER))
assertEquals(DataType.LONG, guess.testType("311,131,322,322", DataType.INTEGER))
assertEquals(DataType.DOUBLE, guess.testType("22.54", DataType.INTEGER))
assertEquals(DataType.DOUBLE, guess.testType("33,222.54", DataType.INTEGER))
assertEquals(DataType.STRING, guess.testType("Goodman", DataType.INTEGER))
assertEquals(DataType.LONG, guess.testType("32", DataType.LONG))
assertEquals(DataType.LONG, guess.testType("32,942", DataType.LONG))
assertEquals(DataType.LONG, guess.testType("32,323,232,234,234", DataType.LONG))
assertEquals(DataType.STRING, guess.testType("32,323,232,234,234,432,234,234,234,234,234,234", DataType.LONG))
assertEquals(DataType.STRING, guess.testType("Goodman", DataType.LONG))
assertEquals(DataType.DOUBLE, guess.testType("22.5", DataType.DOUBLE))
assertEquals(DataType.DOUBLE, guess.testType(".5", DataType.DOUBLE))
assertEquals(DataType.DOUBLE, guess.testType(".5E2", DataType.DOUBLE))
assertEquals(DataType.DOUBLE, guess.testType("-3.5", DataType.DOUBLE))
assertEquals(DataType.DOUBLE, guess.testType("3234", DataType.DOUBLE))
assertEquals(DataType.DOUBLE, guess.testType("83,323.4", DataType.DOUBLE))
assertEquals(DataType.DOUBLE, guess.testType("5", DataType.DOUBLE))
assertEquals(DataType.STRING, guess.testType("Goews", DataType.DOUBLE))
assertEquals(DataType.STRING, guess.testType("Goews", DataType.STRING))
assertEquals(DataType.STRING, guess.testType("32", DataType.STRING))
assertEquals(DataType.STRING, guess.testType("32.323", DataType.STRING))
assertEquals(DataType.STRING, guess.testType("E5230", DataType.DOUBLE))
}
@Test
def testGuessSchema(): Unit = {
val guess = new SchemaGuesser()
val csvSchema = guess.guessSchema(new File("src/test/resource/test_guess_schema.csv").toURI)
assertEquals(5, csvSchema.columns.length)
assertEquals("A_K", csvSchema.columns(0)._2)
assertEquals("B_M", csvSchema.columns(1)._2)
assertEquals("CWD", csvSchema.columns(2)._2)
assertEquals("DEE", csvSchema.columns(3)._2)
assertEquals("E", csvSchema.columns(4)._2)
assertEquals(DataType.DOUBLE, csvSchema.columns(0)._1)
assertEquals(DataType.STRING, csvSchema.columns(1)._1)
assertEquals(DataType.LONG, csvSchema.columns(2)._1)
assertEquals(DataType.STRING, csvSchema.columns(3)._1)
assertEquals(DataType.INTEGER, csvSchema.columns(4)._1)
val xlsxSchema = guess.guessSchema(new File("src/test/resource/test_guess_schema.xlsx").toURI)
assertEquals(4, xlsxSchema.columns.length)
assertEquals(DataType.DOUBLE, xlsxSchema.columns(0)._1)
assertEquals(DataType.STRING, xlsxSchema.columns(1)._1)
assertEquals(DataType.LONG, xlsxSchema.columns(2)._1)
assertEquals(DataType.STRING, xlsxSchema.columns(3)._1)
val jsonSchema = guess.guessSchema(new File("src/test/resource/test_guess_schema.json").toURI)
assertEquals(4, jsonSchema.columns.length)
assertEquals(DataType.DOUBLE, jsonSchema.columns(0)._1)
assertEquals(DataType.STRING, jsonSchema.columns(1)._1)
assertEquals(DataType.INTEGER, jsonSchema.columns(2)._1)
assertEquals(DataType.STRING, jsonSchema.columns(3)._1)
val tsvSchema = guess.guessSchema(new File("src/test/resource/test_guess_schema.tsv").toURI)
assertEquals(5, tsvSchema.columns.length)
assertEquals(DataType.DOUBLE, tsvSchema.columns(0)._1)
assertEquals(DataType.STRING, tsvSchema.columns(1)._1)
assertEquals(DataType.LONG, tsvSchema.columns(2)._1)
assertEquals(DataType.INTEGER, tsvSchema.columns(3)._1)
assertEquals(DataType.STRING, tsvSchema.columns(4)._1)
}
} | harperjiang/enc-selector | src/test/scala/edu/uchicago/cs/encsel/dataset/schema/SchemaGuesserTest.scala | Scala | apache-2.0 | 4,583 |
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package breeze;
package text;
package tokenize;
/**
* Splits the input document according to the given pattern. Does not
* return the splits.
*
* @author dramage
*/
case class RegexSplitTokenizer(pattern : String) extends Tokenizer {
override def apply(doc : String) = doc.split(pattern);
}
| tjhunter/scalanlp-core | process/src/main/scala/breeze/text/tokenize/RegexSplitTokenizer.scala | Scala | apache-2.0 | 879 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.infer
import cc.factorie._
import cc.factorie.directed.{DirectedFactor, DirectedModel}
import cc.factorie.model.{Factor, Model}
import cc.factorie.variable._
import scala.collection.mutable.{ArrayBuffer, HashMap}
/** Sample a value for a single variable. This sampler works in one of two ways:
If the model is a DirectedModel, then sampling is performed based on a suite a handlers
selected according to the variable type and its neighboring factors.
If the model is not a DirectedModel, then the variable should inherit from IterableSettings
which is used to create a list of Proposals with alternative values. */
class GibbsSampler(val model:Model, val objective:Model = null)(implicit val random: scala.util.Random) extends ProposalSampler[Var] {
type V = Var
private var _handlers: Iterable[GibbsSamplerHandler] = null
def defaultHandlers = GibbsSamplerDefaultHandlers
def setHandlers(h:Iterable[GibbsSamplerHandler]): Unit = _handlers = h
def handlers: Iterable[GibbsSamplerHandler] = if (_handlers eq null) defaultHandlers else _handlers
val cacheClosures = true
def closures = new HashMap[V, GibbsSamplerClosure]
val doProcessByHandlers = model.isInstanceOf[DirectedModel]
override def process1(v:V): DiffList = if (doProcessByHandlers) processByHandlers(v) else processProposals(proposals(v))
def processByHandlers(v:V): DiffList = {
val d = newDiffList
// If we have a cached closure, just use it and return
if (cacheClosures && closures.contains(v)) { closures(v).sample(d); return d }
// Get factors, in sorted order of the their classname
val factors = model.factors(v).toSeq.sortWith((f1:Factor,f2:Factor) => f1.getClass.getName < f2.getClass.getName).toSeq
var done = false
val handlerIterator = handlers.iterator
while (!done && handlerIterator.hasNext) {
val closure = handlerIterator.next().sampler(v, factors, this)
if (closure ne null) {
done = true
closure.sample(d)
if (cacheClosures) closures(v) = closure
}
}
if (!done) throw new Error("GibbsSampler: No sampling handler found for "+factors)
d
}
def proposals(v:V): Seq[Proposal[V]] = model match {
case m:DirectedModel => throw new Error("Not yet implemented")
case m:Model => v match {
case v:DiscreteVariable => proposals(v)
case v:Var with IterableSettings => proposals(v.settings, v)
}
}
def proposals(si:SettingIterator, v:V): Seq[Proposal[V]] = {
val dmodel = model: Model
val dobjective = objective: Model
val props = new ArrayBuffer[Proposal[V]]()
while (si.hasNext) {
val d = si.next()
val (m,o) = d.scoreAndUndo(dmodel,dobjective)
props += new Proposal(d, m, o, m/temperature, v)
}
props
}
// Special case for a bit more efficiency
def proposals(dv:DiscreteVariable): Seq[Proposal[V]] = {
var i = 0; val len = dv.domain.size
val result = new ArrayBuffer[Proposal[V]](len)
while (i < len) {
val diff = new DiffList
dv.set(i)(diff)
val (modelScore, objectiveScore) = diff.scoreAndUndo(model, objective)
result += new Proposal(diff, modelScore, objectiveScore, modelScore/temperature, dv)
i += 1
}
result
}
}
object GibbsSamplerDefaultHandlers extends ArrayBuffer[GibbsSamplerHandler] {
this += GeneratedVarGibbsSamplerHandler
}
trait GibbsSamplerHandler {
def sampler(v:Var, factors:Seq[Factor], sampler:GibbsSampler): GibbsSamplerClosure
}
trait GibbsSamplerClosure {
def sample(implicit d:DiffList = null): Unit
}
object GeneratedVarGibbsSamplerHandler extends GibbsSamplerHandler {
class Closure(val variable:MutableVar, val factor:DirectedFactor)(implicit random: scala.util.Random) extends GibbsSamplerClosure {
def sample(implicit d:DiffList = null): Unit = variable.set(factor.sampledValue.asInstanceOf[variable.Value])
}
def sampler(v:Var, factors:Seq[Factor], sampler:GibbsSampler): GibbsSamplerClosure = {
factors match {
case List(factor:DirectedFactor) => {
v match {
case v:MutableVar => new Closure(v, factor)(sampler.random)
}
}
case _ => null
}
}
}
// TODO Create MixtureChoiceGibbsSamplerHandler, IterableSettingsGibbsSamplerHandler
| patverga/factorie | src/main/scala/cc/factorie/infer/GibbsSampler.scala | Scala | apache-2.0 | 5,040 |
package org.psesd.srx.services.prs
import org.json4s.JValue
import org.mongodb.scala.bson.BsonValue
import org.mongodb.scala.{Document, MongoCollection, MongoDatabase}
import org.psesd.srx.shared.core.SrxResponseFormat.SrxResponseFormat
import org.psesd.srx.shared.core.{sif, _}
import org.psesd.srx.shared.core.exceptions.{ArgumentInvalidException, ArgumentNullException, SrxResourceNotFoundException}
import org.psesd.srx.shared.core.extensions.TypeExtensions._
import org.psesd.srx.shared.core.sif.SifRequestAction._
import org.psesd.srx.shared.core.sif.{SifHttpStatusCode, SifRequestAction, SifRequestParameter, SifRequestParameterCollection}
import org.psesd.srx.shared.data.exceptions.DatasourceDuplicateViolationException
import org.psesd.srx.shared.data.{Datasource, DatasourceResult}
import scala.collection.mutable.ArrayBuffer
import scala.xml.Node
/** Represents External Service.
*
* @version 1.0
* @since 1.0
* @author Stephen Pugmire (iTrellis, LLC)
*/
class ExternalService(
val id: Int,
val authorizedEntityId: Int,
val name: Option[String],
val description: Option[String]
) extends SrxResource with PrsEntity {
def toJson: JValue = {
toXml.toJsonStringNoRoot.toJson
}
def toXml: Node = {
<externalService>
<id>{id.toString}</id>
<authorizedEntityId>{authorizedEntityId.toString}</authorizedEntityId>
{optional(name.orNull, <externalServiceName>{name.orNull}</externalServiceName>)}
{optional(description.orNull, <externalServiceDescription>{description.orNull}</externalServiceDescription>)}
</externalService>
}
}
/** Represents External Service method result.
*
* @version 1.0
* @since 1.0
* @author Stephen Pugmire (iTrellis, LLC)
*/
class ExternalServiceResult(requestAction: SifRequestAction, httpStatusCode: Int, result: DatasourceResult, responseFormat: SrxResponseFormat) extends PrsEntityResult(
requestAction,
httpStatusCode,
result,
ExternalService.getExternalServiceFromResult,
<externalServices/>,
responseFormat
) {
}
/** PRS External Service methods.
*
* @version 1.0
* @since 1.0
* @author Stephen Pugmire (iTrellis, LLC)
* */
object ExternalService extends PrsEntityService {
def apply(
id: Int,
authorizedEntityId: Int,
name: Option[String],
description: Option[String]
): ExternalService = new ExternalService(id, authorizedEntityId, name, description)
def apply(requestBody: SrxRequestBody, parameters: Option[List[SifRequestParameter]]): ExternalService = {
if (requestBody == null) {
throw new ArgumentNullException("requestBody parameter")
}
apply(requestBody.getXml.orNull, parameters)
}
def apply(externalServiceXml: Node, parameters: Option[List[SifRequestParameter]]): ExternalService = {
if (externalServiceXml == null) {
throw new ArgumentNullException("externalServiceXml parameter")
}
val authorizedEntityParam = {if (parameters.isDefined) parameters.get.find(p => p.key.toLowerCase == "authorizedentityid") else None}
if (authorizedEntityParam.isEmpty) {
throw new ArgumentNullException("authorizedEntityId parameter")
}
val rootElementName = externalServiceXml.label
if (rootElementName != "externalService" && rootElementName != "root") {
throw new ArgumentInvalidException("root element '%s'".format(rootElementName))
}
val id = (externalServiceXml \\ "id").textOption.getOrElse("0").toInt
val authorizedEntityId = authorizedEntityParam.get.value.toInt
val name = (externalServiceXml \\ "externalServiceName").textOption
val description = (externalServiceXml \\ "externalServiceDescription").textOption
new ExternalService(
id,
authorizedEntityId,
name,
description
)
}
def create(resource: SrxResource, parameters: List[SifRequestParameter]): SrxResourceResult = {
try {
if (resource == null) {
throw new ArgumentNullException("resource parameter")
}
val externalService = resource.asInstanceOf[ExternalService]
val datasource = new Datasource(datasourceConfig)
val result = datasource.create(
"insert into srx_services_prs.external_service (" +
"id, authorized_entity_id, name, description) values (" +
"DEFAULT, ?, ?, ?) " +
"RETURNING id;",
"id",
externalService.authorizedEntityId,
externalService.name.orNull,
externalService.description.orNull
)
datasource.close()
if (result.success) {
val mongoDataSource = new MongoDataSource
mongoDataSource.insertOrganization(externalService.authorizedEntityId.toString, result.id.get)
PrsServer.logSuccessMessage(
PrsResource.ExternalServices.toString,
SifRequestAction.Create.toString,
result.id,
SifRequestParameterCollection(parameters),
Some(externalService.toXml.toXmlString)
)
val responseFormat = SrxResponseFormat.getResponseFormat(parameters)
if(responseFormat.equals(SrxResponseFormat.Object)) {
val queryResult = executeQuery(Some(result.id.get.toInt))
new ExternalServiceResult(
SifRequestAction.Create,
SifRequestAction.getSuccessStatusCode(SifRequestAction.Create),
queryResult,
responseFormat
)
} else {
new ExternalServiceResult(
SifRequestAction.Create,
SifRequestAction.getSuccessStatusCode(SifRequestAction.Create),
result,
responseFormat
)
}
} else {
throw result.exceptions.head
}
} catch {
case dv: DatasourceDuplicateViolationException =>
SrxResourceErrorResult(SifHttpStatusCode.BadRequest, dv)
case e: Exception =>
SrxResourceErrorResult(SifHttpStatusCode.InternalServerError, e)
}
}
def delete(parameters: List[SifRequestParameter]): SrxResourceResult = {
val id = getKeyIdFromRequestParameters(parameters)
val externalServiceResult = ExternalService.query(List[SifRequestParameter](SifRequestParameter("id", id.get.toString)))
val externalServiceXml = externalServiceResult.toXml.get
val authorizedEntityId = (externalServiceXml \\ "externalService" \\ "authorizedEntityId").text
val authorizedEntityXml = AuthorizedEntity.query(List[SifRequestParameter](SifRequestParameter("id", authorizedEntityId))).toXml.get
if (id.isEmpty || id.get == -1) {
SrxResourceErrorResult(SifHttpStatusCode.BadRequest, new ArgumentInvalidException("id parameter"))
} else {
try {
val datasource = new Datasource(datasourceConfig)
val result = datasource.execute(
"delete from srx_services_prs.external_service where id = ?;",
id.get
)
datasource.close()
if (result.success) {
val mongoDataSource = new MongoDataSource
mongoDataSource.deleteOrganization(authorizedEntityXml)
PrsServer.logSuccessMessage(
PrsResource.ExternalServices.toString,
SifRequestAction.Delete.toString,
Some(id.get.toString),
SifRequestParameterCollection(parameters),
None
)
val esResult = new ExternalServiceResult(
SifRequestAction.Delete,
SifRequestAction.getSuccessStatusCode(SifRequestAction.Delete),
result,
SrxResponseFormat.getResponseFormat(parameters)
)
esResult.setId(id.get)
esResult
} else {
throw result.exceptions.head
}
} catch {
case e: Exception =>
SrxResourceErrorResult(SifHttpStatusCode.InternalServerError, e)
}
}
}
def query(parameters: List[SifRequestParameter]): SrxResourceResult = {
val id = getKeyIdFromRequestParameters(parameters)
if (id.isDefined && id.get == -1) {
SrxResourceErrorResult(SifHttpStatusCode.BadRequest, new ArgumentInvalidException("id parameter"))
} else {
try {
val result = executeQuery(id)
if (result.success) {
val resourceId = if (id.isEmpty) Some("all") else Some(id.get.toString)
if (id.isDefined && result.rows.isEmpty) {
PrsServer.logNotFoundMessage(
PrsResource.ExternalServices.toString,
SifRequestAction.Query.toString,
resourceId,
SifRequestParameterCollection(parameters),
None
)
SrxResourceErrorResult(SifHttpStatusCode.NotFound, new SrxResourceNotFoundException(PrsResource.ExternalServices.toString))
} else {
PrsServer.logSuccessMessage(
PrsResource.ExternalServices.toString,
SifRequestAction.Query.toString,
resourceId,
SifRequestParameterCollection(parameters),
None
)
new ExternalServiceResult(
SifRequestAction.Query,
SifHttpStatusCode.Ok,
result,
SrxResponseFormat.getResponseFormat(parameters)
)
}
} else {
throw result.exceptions.head
}
} catch {
case e: Exception =>
SrxResourceErrorResult(SifHttpStatusCode.InternalServerError, e)
}
}
}
private def executeQuery(id: Option[Int]): DatasourceResult = {
val selectFrom = "select srx_services_prs.external_service.* from srx_services_prs.external_service "
val datasource = new Datasource(datasourceConfig)
val result = {
if (id.isEmpty) {
datasource.get(selectFrom + "order by srx_services_prs.external_service.id;")
} else {
datasource.get(selectFrom + "where srx_services_prs.external_service.id = ?;", id.get)
}
}
datasource.close()
result
}
def update(resource: SrxResource, parameters: List[SifRequestParameter]): SrxResourceResult = {
if (resource == null) {
throw new ArgumentNullException("resource parameter")
}
try {
var id = getKeyIdFromRequestParameters(parameters)
val externalService = resource.asInstanceOf[ExternalService]
if ((id.isEmpty || id.get == 0) && externalService.id > 0) {
id = Some(externalService.id)
}
if (id.isEmpty || id.get == -1) {
SrxResourceErrorResult(SifHttpStatusCode.BadRequest, new ArgumentInvalidException("id parameter"))
} else {
val datasource = new Datasource(datasourceConfig)
val result = datasource.execute(
"update srx_services_prs.external_service set " +
"authorized_entity_id = ?, " +
"name = ?, " +
"description = ? " +
"where id = ?;",
externalService.authorizedEntityId,
externalService.name.orNull,
externalService.description.orNull,
id.get
)
datasource.close()
if (result.success) {
PrsServer.logSuccessMessage(
PrsResource.ExternalServices.toString,
SifRequestAction.Update.toString,
Some(id.get.toString),
SifRequestParameterCollection(parameters),
Some(externalService.toXml.toXmlString)
)
val responseFormat = SrxResponseFormat.getResponseFormat(parameters)
var esResult: ExternalServiceResult = null
if(responseFormat.equals(SrxResponseFormat.Object)) {
val queryResult = executeQuery(Some(id.get))
esResult = new ExternalServiceResult(
SifRequestAction.Update,
SifRequestAction.getSuccessStatusCode(SifRequestAction.Update),
queryResult,
responseFormat
)
} else {
esResult = new ExternalServiceResult(
SifRequestAction.Update,
SifRequestAction.getSuccessStatusCode(SifRequestAction.Update),
result,
responseFormat
)
}
esResult.setId(id.get)
esResult
} else {
throw result.exceptions.head
}
}
} catch {
case dv: DatasourceDuplicateViolationException =>
SrxResourceErrorResult(SifHttpStatusCode.BadRequest, dv)
case e: Exception =>
SrxResourceErrorResult(SifHttpStatusCode.InternalServerError, e)
}
}
def getExternalServiceFromResult(result: DatasourceResult): List[ExternalService] = {
val externalServiceResult = ArrayBuffer[ExternalService]()
for (row <- result.rows) {
val externalService = ExternalService(
row.getString("id").getOrElse("").toInt,
row.getString("authorized_entity_id").getOrElse("").toInt,
row.getString("name"),
row.getString("description")
)
externalServiceResult += externalService
}
externalServiceResult.toList
}
}
| PSESD/srx-services-prs | src/main/scala/org/psesd/srx/services/prs/ExternalService.scala | Scala | mit | 13,055 |
package score.money
import java.math.RoundingMode.{CEILING => JCEILING}
import java.math.RoundingMode.{DOWN => JDOWN}
import java.math.RoundingMode.{FLOOR => JFLOOR}
import java.math.RoundingMode.{HALF_DOWN => JHALF_DOWN}
import java.math.RoundingMode.{HALF_EVEN => JHALF_EVEN}
import java.math.RoundingMode.{HALF_UP => JHALF_UP}
import java.math.RoundingMode.{UNNECESSARY => JUNNECESSARY}
import java.math.RoundingMode.{UP => JUP}
import java.math.{BigDecimal => JBigDecimal}
import java.math.{MathContext => JMathContext}
import java.math.{RoundingMode => JRoundingMode}
import java.math.{RoundingMode => JRoundingMode}
import scala.math.BigDecimal.RoundingMode.CEILING
import scala.math.BigDecimal.RoundingMode.DOWN
import scala.math.BigDecimal.RoundingMode.FLOOR
import scala.math.BigDecimal.RoundingMode.HALF_DOWN
import scala.math.BigDecimal.RoundingMode.HALF_EVEN
import scala.math.BigDecimal.RoundingMode.HALF_UP
import scala.math.BigDecimal.RoundingMode.RoundingMode
import scala.math.BigDecimal.RoundingMode.UNNECESSARY
import scala.math.BigDecimal.RoundingMode.UP
import scala.math.BigDecimal.RoundingMode
import scala.math.ScalaNumber
import scala.math.ScalaNumericConversions
import score.money
/**
* 128-bit fixed point value
*/
object FixedPoint extends AnyRef with FixedPointExport {
/**
* Long constraints for integral part
*/
private[money] val INT_VAL_MIN = Long.MinValue
private[money] val INT_VAL_MAX = Long.MaxValue
/**
* Long constraints for fractional part
*/
private[this] val p10: (Long, Int) => Long = (v: Long, p: Int) => if (p == 0) v else p10(v * 10, p - 1)
private[money] val FRAC_VAL_FROM = p10(-1, FRAC_LEN_MAX)
private[money] val FRAC_VAL_UNTIL = -FRAC_VAL_FROM
/**
* BigDecimal constraints for integral part
*/
private[money] val INT_VAL_MIN_BIG = BigDecimal(INT_VAL_MIN, MC)
private[money] val INT_VAL_MAX_BIG = BigDecimal(INT_VAL_MAX, MC)
private[this] val INT_VAL_MIN_JBIG = new JBigDecimal(INT_VAL_MIN, MC)
private[this] val INT_VAL_MAX_JBIG = new JBigDecimal(INT_VAL_MAX, MC)
/**
* BigDecimal constraints for fractional part
*/
private[money] val FRAC_VAL_FROM_BIG = BigDecimal(FRAC_VAL_FROM, MC)
private[money] val FRAC_VAL_UNTIL_BIG = BigDecimal(FRAC_VAL_UNTIL, MC)
private[this] val FRAC_VAL_UNTIL_JBIG = new JBigDecimal(FRAC_VAL_UNTIL, MC)
/**
* Double constraints for integral part
*/
private[money] val INT_VAL_MIN_DOUBLE = INT_VAL_MIN.toDouble
private[money] val INT_VAL_MAX_DOUBLE = INT_VAL_MAX.toDouble
/**
* Double constraints for fractional part
*/
private[money] val FRAC_VAL_FROM_DOUBLE = FRAC_VAL_FROM.toDouble
private[money] val FRAC_VAL_UNTIL_DOUBLE = FRAC_VAL_UNTIL.toDouble
def fp(value: BigDecimal): (Long, Long) = {
if (value.mc.getPrecision() >= PRECISION) {
// integral part must fit Long
val intValBig = value.setScale(0, RoundingMode.DOWN)
require(intValBig >= INT_VAL_MIN_BIG && intValBig <= INT_VAL_MAX_BIG)
val intValBeforeRound = intValBig.longValue()
// fractional part must fit (FRAC_VAL_FROM_BIG, FRAC_VAL_UNTIL_BIG)
val fracValBig = ((value - intValBig) * FRAC_VAL_UNTIL_BIG).setScale(0, BigDecimal.RoundingMode.DOWN)
require(fracValBig >= FRAC_VAL_FROM_BIG && fracValBig <= FRAC_VAL_UNTIL_BIG)
val fracValRounded = fracValBig.longValue
// treat rounding and overflow
val intVal = if (fracValRounded < FRAC_VAL_UNTIL) intValBeforeRound
else if (intValBeforeRound > 0 && intValBeforeRound < INT_VAL_MAX) intValBeforeRound + 1
else if (intValBeforeRound < 0 && intValBeforeRound > INT_VAL_MIN) intValBeforeRound - 1
else throw new IllegalArgumentException("Overflow: " + (intValBeforeRound, fracValRounded))
val fracVal = if (fracValRounded < FRAC_VAL_UNTIL) fracValRounded else 0
(intVal, fracVal)
} else fp(BigDecimal(value.bigDecimal, MC))
}
def fp(value: JBigDecimal): (Long, Long) = fp(BigDecimal(value, MC))
def fp(value: String): (Long, Long) = fp(BigDecimal(value, MC))
def fp(value: Double): (Long, Long) = {
// not NaN, not Infinity
require((!value.isNaN) && (!value.isInfinity))
// integral part must fit Long
val intValDouble = if (value > 0.0) Math.floor(value) else if (value < 0.0) Math.ceil(value) else 0.0
require(intValDouble >= INT_VAL_MIN_DOUBLE && intValDouble <= INT_VAL_MAX_DOUBLE)
val intValBeforeRound = intValDouble.toLong
import scala.Predef
// fractional part must fit (FRAC_VAL_FROM_DOUBLE, FRAC_VAL_UNTIL_DOUBLE)
val fracValDouble = (value - intValDouble) * FRAC_VAL_UNTIL_DOUBLE
require(fracValDouble > FRAC_VAL_FROM_DOUBLE && fracValDouble < FRAC_VAL_UNTIL_DOUBLE)
val fracValRounded = double2Double(if (fracValDouble > 0.0) Math.floor(fracValDouble)
else if (fracValDouble < 0.0) Math.ceil(fracValDouble) else 0.0).longValue()
// treat rounding and overflow
val intVal = if (fracValRounded < FRAC_VAL_UNTIL) intValBeforeRound
else if (intValBeforeRound > 0 && intValBeforeRound < INT_VAL_MAX) intValBeforeRound + 1
else if (intValBeforeRound < 0 && intValBeforeRound > INT_VAL_MIN) intValBeforeRound - 1
else throw new IllegalArgumentException("Overflow: " + (intValBeforeRound, fracValRounded))
val fracVal = if (fracValRounded < FRAC_VAL_UNTIL) fracValRounded
else 0
(intVal, fracVal)
}
object Format {
protected[money] val roundingMode_id = {
Map[RoundingMode, Int](
HALF_UP -> 0
,HALF_EVEN -> 1
,UNNECESSARY -> 2
,HALF_DOWN -> 3
,UP -> 4
,DOWN -> 5
,CEILING -> 6
,FLOOR -> 7
)
}
protected[money] val id_roundingMode: Map[Int, RoundingMode] =
roundingMode_id.map(el => (el._2, el._1))
protected[money] val roundingMode_jRoundingMode = {
Map[RoundingMode, JRoundingMode](
HALF_UP -> JHALF_UP
,HALF_EVEN -> JHALF_EVEN
,UNNECESSARY -> JUNNECESSARY
,HALF_DOWN -> JHALF_DOWN
,UP -> JUP
,DOWN -> JDOWN
,CEILING -> JCEILING
,FLOOR -> JFLOOR
)
}
}
trait Format {
protected def format: Int
def scale: Int
def roundingMode: RoundingMode
def roundingModeJava: JRoundingMode
}
implicit def int2FixedPoint(i: Int): FixedPoint = { new Currency(i.toLong) }
implicit def long2FixedPoint(l: Long): FixedPoint = { new Currency(l) }
implicit def jBigDecimal2FixedPoint(j: JBigDecimal): FixedPoint = { new Currency(j) }
implicit def bigDecimal2FixedPoint(b: BigDecimal): FixedPoint = { new Currency(b) }
implicit def double2FixedPoint(d: Double): FixedPoint = { new Currency(d) }
}
import score.money.FixedPoint.{long2FixedPoint => _, _}
abstract class FixedPoint protected (intArg: Long, fracArg: Long)
extends /*AnyRef*/ ScalaNumber with Ordered[FixedPoint] with Format with ScalaNumericConversions with Serializable {
require(fracArg > FRAC_VAL_FROM && fracArg < FRAC_VAL_UNTIL)
require((intArg >= 0 && fracArg >= 0) || (intArg <= 0 && fracArg <= 0))
// pack integral and fractional parts inside: int0, int1, frac0, frac1
val int0 = intArg.toInt
val int1 = (intArg >>> 32).toInt
val frac0 = fracArg.toInt
val frac1 = (fracArg >>> 32).toInt
def signum: Int = {
((int1.toLong << 32) | (int0.toLong & 0x00000000ffffffffL)).signum |
((frac1.toLong << 32) | (frac0.toLong & 0x00000000ffffffffL)).signum
}
protected def this(intFracPair: (Long, Long)) =
this(intFracPair._1, intFracPair._2)
def toValueString = {
val int = ((int1.toLong << 32) | (int0.toLong & 0x00000000ffffffffL)).toString
val frac = ((frac1.toLong << 32) | (frac0.toLong & 0x00000000ffffffffL)).abs.toString
(if (signum < 0 && (! int.startsWith("-"))) "-" else "") +
(if (frac == "0") int
else {
val withZeroes = int + "." + "0" * (scale - frac.length) + frac
withZeroes.substring(0, withZeroes.findLastIndexOf(_ != '0') + 1)
}
)
}
override def toString = {
val rounded = if (scale == FRAC_LEN_MAX) toValueString
else BigDecimal(toValueString, MC).bigDecimal.setScale(scale, roundingModeJava).toPlainString
if (scale == 0) rounded
else {
val dotPos = rounded.indexOf('.')
if (dotPos != -1) rounded + "0" * ((scale - (rounded.length - (dotPos + 1))) max 0)
else rounded + "." + "0" * scale
}
}
def canEqual(other: Any): Boolean =
other.isInstanceOf[FixedPoint] ||
other.isInstanceOf[BigDecimal] ||
other.isInstanceOf[Double] ||
other.isInstanceOf[Float] ||
other.isInstanceOf[Int] ||
other.isInstanceOf[Long] ||
other.isInstanceOf[Short] ||
other.isInstanceOf[Byte] ||
other.isInstanceOf[Char]
override def equals(other: Any): Boolean =
other match {
case that: FixedPoint =>
(that canEqual this) &&
(int0 == that.int0) && (frac1 == that.frac1) &&
(int1 == that.int1) && (frac0 == that.frac0) &&
(format == that.format)
case that: BigDecimal =>
underlying.equals(that)
case _ => unifiedPrimitiveEquals(other)
}
override def hashCode = {
var hash = 37 + (int0 ^ int1)
hash = (hash << 5) + (hash << 3) + hash
hash += frac1 ^ frac0
hash = (hash << 5) + (hash << 3) + hash
hash += format
hash
}
override def compare(that: FixedPoint) = {
val intThis = (int1.toLong << 32) | (int0.toLong & 0x00000000ffffffffL)
val intThat = (that.int1.toLong << 32) | (that.int0.toLong & 0x00000000ffffffffL)
if (intThis < intThat) -1
else if (intThis > intThat) 1
else {
val fracThis = (frac1.toLong << 32) | (frac0.toLong & 0x00000000ffffffffL)
val fracThat = (that.frac1.toLong << 32) | (that.frac0.toLong & 0x00000000ffffffffL)
if (fracThis < fracThat) -1
else if (fracThis > fracThat) 1
else 0
}
}
/** ScalaNumber
*/
def underlying = BigDecimal(toString, MC)
protected def isWhole() = (((frac1.toLong << 32) | (frac0.toLong & 0x00000000ffffffffL)) == 0L)
/** ScalaNumericConversions
*/
override def intValue = int0
override def longValue = ((int1.toLong << 32) | (int0.toLong & 0x00000000ffffffffL))
override def floatValue = underlying.toFloat
override def doubleValue = underlying.toDouble
override def toByte = intValue.toByte
override def toShort = intValue.toShort
override def toInt = intValue
override def toLong = longValue
override def toFloat = floatValue
override def toDouble = doubleValue
}
| idonnie/SCore | src/score/money/FixedPoint.scala | Scala | mit | 10,824 |
package io.backchat.scapulet.stanza
import org.specs2.Specification
class FeatureSpec extends Specification {
def is =
"A feature should" ^
"when extracting" ^
"get the feature name for a valid stanza" ! {
Feature.unapply(<feature var="hello world"/>) must beSome(("hello world"))
} ^
"not match when the stanza is not a feature with a name" ! {
Feature.unapply(<feature/>) must beNone
} ^ bt ^
"when generating" ^
"throw an exception when the name is null" ! {
Feature(null) must throwA[Exception]
} ^
"throw an exception when the name is blank" ! {
Feature(" ") must throwA[Exception]
} ^
"generate a nodeseq when the name is provided" ! {
Feature("hello world").toXml must ==/(<feature var="hello world"/>)
} ^
end
} | backchatio/scapulet | src/test/scala/io/backchat/scapulet/stanza/FeatureSpec.scala | Scala | bsd-3-clause | 846 |
package org.jetbrains.plugins.scala.lang.scaladoc.psi.api
import org.jetbrains.plugins.scala.lang.psi.api.ScalaPsiElement
trait ScDocListItem extends ScalaPsiElement {
/** @return element containing item type text e.g. "-", "1.", "i." */
def headToken: ScPsiDocToken
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/scaladoc/psi/api/ScDocListItem.scala | Scala | apache-2.0 | 275 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import com.esotericsoftware.kryo.{Kryo, KryoSerializable}
import com.esotericsoftware.kryo.io.{Input, Output}
import com.google.common.cache.CacheBuilder
import java.sql.SQLException
import java.util.concurrent.TimeUnit
import org.apache.spark.{broadcast, Partition, SparkContext, TaskContext}
import org.apache.spark.rdd.{RDD, ZippedPartitionsBaseRDD, ZippedPartitionsPartition}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.aggregate.HashAggregateExec
import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, SortMergeJoinExec}
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
/**
* An interface for those physical operators that support codegen.
*/
trait CodegenSupport extends SparkPlan {
/** Prefix used in the current operator's variable names. */
private def variablePrefix: String = this match {
case _: HashAggregateExec => "agg"
case _: BroadcastHashJoinExec => "bhj"
case _: SortMergeJoinExec => "smj"
case _: RDDScanExec => "rdd"
case _: DataSourceScanExec => "scan"
case _ => nodeName.toLowerCase
}
/**
* Creates a metric using the specified name.
*
* @return name of the variable representing the metric
*/
def metricTerm(ctx: CodegenContext, name: String): String = {
ctx.addReferenceObj(name, longMetric(name))
}
/**
* Whether this SparkPlan support whole stage codegen or not.
*/
def supportCodegen: Boolean = true
/**
* Which SparkPlan is calling produce() of this one. It's itself for the first SparkPlan.
*/
protected var parent: CodegenSupport = null
/**
* Returns all the RDDs of InternalRow which generates the input rows.
*
* Note: right now we support up to two RDDs.
*/
def inputRDDs(): Seq[RDD[InternalRow]]
/**
* Returns Java source code to process the rows from input RDD.
*/
final def produce(ctx: CodegenContext, parent: CodegenSupport): String = executeQuery {
this.parent = parent
ctx.freshNamePrefix = variablePrefix
s"""
|${ctx.registerComment(s"PRODUCE: ${this.simpleString}")}
|${doProduce(ctx)}
""".stripMargin
}
/**
* Generate the Java source code to process, should be overridden by subclass to support codegen.
*
* doProduce() usually generate the framework, for example, aggregation could generate this:
*
* if (!initialized) {
* # create a hash map, then build the aggregation hash map
* # call child.produce()
* initialized = true;
* }
* while (hashmap.hasNext()) {
* row = hashmap.next();
* # build the aggregation results
* # create variables for results
* # call consume(), which will call parent.doConsume()
* if (shouldStop()) return;
* }
*/
protected def doProduce(ctx: CodegenContext): String
/**
* Consume the generated columns or row from current SparkPlan, call its parent's `doConsume()`.
*/
final def consume(ctx: CodegenContext, outputVars: Seq[ExprCode], row: String = null): String = {
val inputVars =
if (row != null) {
ctx.currentVars = null
ctx.INPUT_ROW = row
output.zipWithIndex.map { case (attr, i) =>
BoundReference(i, attr.dataType, attr.nullable).genCode(ctx)
}
} else {
assert(outputVars != null)
assert(outputVars.length == output.length)
// outputVars will be used to generate the code for UnsafeRow, so we should copy them
outputVars.map(_.copy())
}
val rowVar = if (row != null) {
ExprCode("", "false", row)
} else {
if (outputVars.nonEmpty) {
val colExprs = output.zipWithIndex.map { case (attr, i) =>
BoundReference(i, attr.dataType, attr.nullable)
}
val evaluateInputs = evaluateVariables(outputVars)
// generate the code to create a UnsafeRow
ctx.INPUT_ROW = row
ctx.currentVars = outputVars
val ev = GenerateUnsafeProjection.createCode(ctx, colExprs, false)
val code = s"""
|$evaluateInputs
|${ev.code.trim}
""".stripMargin.trim
ExprCode(code, "false", ev.value)
} else {
// There is no columns
ExprCode("", "false", "unsafeRow")
}
}
ctx.freshNamePrefix = parent.variablePrefix
val evaluated = evaluateRequiredVariables(output, inputVars, parent.usedInputs)
s"""
|${ctx.registerComment(s"CONSUME: ${parent.simpleString}")}
|$evaluated
|${parent.doConsume(ctx, inputVars, rowVar)}
""".stripMargin
}
/**
* Returns source code to evaluate all the variables, and clear the code of them, to prevent
* them to be evaluated twice.
*/
protected def evaluateVariables(variables: Seq[ExprCode]): String = {
val evaluate = variables.filter(_.code != "").map(_.code.trim).mkString("\\n")
variables.foreach(_.code = "")
evaluate
}
/**
* Returns source code to evaluate the variables for required attributes, and clear the code
* of evaluated variables, to prevent them to be evaluated twice.
*/
protected def evaluateRequiredVariables(
attributes: Seq[Attribute],
variables: Seq[ExprCode],
required: AttributeSet): String = {
val evaluateVars = new StringBuilder
variables.zipWithIndex.foreach { case (ev, i) =>
if (ev.code != "" && required.contains(attributes(i))) {
evaluateVars.append(ev.code.trim + "\\n")
ev.code = ""
}
}
evaluateVars.toString()
}
/**
* The subset of inputSet those should be evaluated before this plan.
*
* We will use this to insert some code to access those columns that are actually used by current
* plan before calling doConsume().
*/
def usedInputs: AttributeSet = references
/**
* Generate the Java source code to process the rows from child SparkPlan.
*
* This should be override by subclass to support codegen.
*
* For example, Filter will generate the code like this:
*
* # code to evaluate the predicate expression, result is isNull1 and value2
* if (isNull1 || !value2) continue;
* # call consume(), which will call parent.doConsume()
*
* Note: A plan can either consume the rows as UnsafeRow (row), or a list of variables (input).
*/
def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = {
throw new UnsupportedOperationException
}
}
/**
* InputAdapter is used to hide a SparkPlan from a subtree that support codegen.
*
* This is the leaf node of a tree with WholeStageCodegen that is used to generate code
* that consumes an RDD iterator of InternalRow.
*/
case class InputAdapter(child: SparkPlan) extends UnaryExecNode with CodegenSupport {
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
override def outputOrdering: Seq[SortOrder] = child.outputOrdering
override def doExecute(): RDD[InternalRow] = {
child.execute()
}
override def doExecuteBroadcast[T](): broadcast.Broadcast[T] = {
child.doExecuteBroadcast()
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
child.execute() :: Nil
}
override def doProduce(ctx: CodegenContext): String = {
val input = ctx.freshName("input")
// Right now, InputAdapter is only used when there is one input RDD.
ctx.addMutableState("scala.collection.Iterator", input, s"$input = inputs[0];")
val row = ctx.freshName("row")
s"""
| while ($input.hasNext() && !stopEarly()) {
| InternalRow $row = (InternalRow) $input.next();
| ${consume(ctx, null, row).trim}
| if (shouldStop()) return;
| }
""".stripMargin
}
override def generateTreeString(
depth: Int,
lastChildren: Seq[Boolean],
builder: StringBuilder,
verbose: Boolean,
prefix: String = ""): StringBuilder = {
child.generateTreeString(depth, lastChildren, builder, verbose, "")
}
}
object WholeStageCodegenExec {
val PIPELINE_DURATION_METRIC = "duration"
private[sql] val dumpGenCodeForException: Boolean =
System.getProperty("spark.sql.codegen.dump", "true").toBoolean
private[sql] lazy val dumpedGenCodes = CacheBuilder.newBuilder().maximumSize(20)
.expireAfterWrite(60, TimeUnit.SECONDS).build[CodeAndComment, java.lang.Boolean]()
}
/**
* WholeStageCodegen compile a subtree of plans that support codegen together into single Java
* function.
*
* Here is the call graph of to generate Java source (plan A support codegen, but plan B does not):
*
* WholeStageCodegen Plan A FakeInput Plan B
* =========================================================================
*
* -> execute()
* |
* doExecute() ---------> inputRDDs() -------> inputRDDs() ------> execute()
* |
* +-----------------> produce()
* |
* doProduce() -------> produce()
* |
* doProduce()
* |
* doConsume() <--------- consume()
* |
* doConsume() <-------- consume()
*
* SparkPlan A should override doProduce() and doConsume().
*
* doCodeGen() will create a CodeGenContext, which will hold a list of variables for input,
* used to generated code for BoundReference.
*/
case class WholeStageCodegenExec(child: SparkPlan) extends UnaryExecNode with CodegenSupport {
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
override def outputOrdering: Seq[SortOrder] = child.outputOrdering
override lazy val metrics = Map(
"pipelineTime" -> SQLMetrics.createTimingMetric(sparkContext,
WholeStageCodegenExec.PIPELINE_DURATION_METRIC))
/**
* Generates code for this subtree.
*
* @return the tuple of the codegen context and the actual generated source.
*/
def doCodeGen(): (CodegenContext, CodeAndComment) = {
val ctx = new CodegenContext
val code = child.asInstanceOf[CodegenSupport].produce(ctx, this)
val source = s"""
public Object generate(Object[] references) {
return new GeneratedIterator(references);
}
${ctx.registerComment(s"""Codegend pipeline for\\n${child.treeString.trim}""")}
final class GeneratedIterator extends org.apache.spark.sql.execution.BufferedRowIterator {
private Object[] references;
private scala.collection.Iterator[] inputs;
${ctx.declareMutableStates()}
public GeneratedIterator(Object[] references) {
this.references = references;
}
public void init(int index, scala.collection.Iterator[] inputs) {
partitionIndex = index;
this.inputs = inputs;
${ctx.initMutableStates()}
${ctx.initPartition()}
}
${ctx.declareAddedFunctions()}
protected void processNext() throws java.io.IOException {
${code.trim}
}
}
""".trim
// try to compile, helpful for debug
val cleanedSource = CodeFormatter.stripOverlappingComments(
new CodeAndComment(CodeFormatter.stripExtraNewLines(source), ctx.getPlaceHolderToComments()))
logDebug(s"\\n${CodeFormatter.format(cleanedSource)}")
(ctx, cleanedSource)
}
override def doExecute(): RDD[InternalRow] = {
val (ctx, cleanedSource) = doCodeGen()
// try to compile and fallback if it failed
try {
CodeGenerator.compile(cleanedSource)
} catch {
case e: Exception if !Utils.isTesting && sqlContext.conf.wholeStageFallback =>
// We should already saw the error message
logWarning(s"Whole-stage codegen disabled for this plan:\\n $treeString")
return child.execute()
}
val references = ctx.references.toArray
val durationMs = longMetric("pipelineTime")
val rdds = child.asInstanceOf[CodegenSupport].inputRDDs()
WholeStageCodegenRDD(sqlContext.sparkContext, cleanedSource,
references, durationMs, rdds)
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
throw new UnsupportedOperationException
}
override def doProduce(ctx: CodegenContext): String = {
throw new UnsupportedOperationException
}
override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = {
val doCopy = if (ctx.copyResult) {
".copy()"
} else {
""
}
s"""
|${row.code}
|append(${row.value}$doCopy);
""".stripMargin.trim
}
override def generateTreeString(
depth: Int,
lastChildren: Seq[Boolean],
builder: StringBuilder,
verbose: Boolean,
prefix: String = ""): StringBuilder = {
child.generateTreeString(depth, lastChildren, builder, verbose, "*")
}
}
/**
* Find the chained plans that support codegen, collapse them together as WholeStageCodegen.
*/
case class CollapseCodegenStages(conf: SQLConf) extends Rule[SparkPlan] {
private def supportCodegen(e: Expression): Boolean = e match {
case e: LeafExpression => true
// CodegenFallback requires the input to be an InternalRow
case e: CodegenFallback => false
case _ => true
}
private def numOfNestedFields(dataType: DataType): Int = dataType match {
case dt: StructType => dt.fields.map(f => numOfNestedFields(f.dataType)).sum
case m: MapType => numOfNestedFields(m.keyType) + numOfNestedFields(m.valueType)
case a: ArrayType => numOfNestedFields(a.elementType)
case u: UserDefinedType[_] => numOfNestedFields(u.sqlType)
case _ => 1
}
private def supportCodegen(plan: SparkPlan): Boolean = plan match {
case plan: CodegenSupport if plan.supportCodegen =>
val willFallback = plan.expressions.exists(_.find(e => !supportCodegen(e)).isDefined)
// the generated code will be huge if there are too many columns
val hasTooManyOutputFields =
numOfNestedFields(plan.schema) > conf.wholeStageMaxNumFields
val hasTooManyInputFields =
plan.children.map(p => numOfNestedFields(p.schema)).exists(_ > conf.wholeStageMaxNumFields)
!willFallback && !hasTooManyOutputFields && !hasTooManyInputFields
case _ => false
}
/**
* Inserts an InputAdapter on top of those that do not support codegen.
*/
private def insertInputAdapter(plan: SparkPlan): SparkPlan = plan match {
case j @ SortMergeJoinExec(_, _, _, _, left, right) if j.supportCodegen =>
// The children of SortMergeJoin should do codegen separately.
j.copy(left = InputAdapter(insertWholeStageCodegen(left)),
right = InputAdapter(insertWholeStageCodegen(right)))
case p if !supportCodegen(p) =>
// collapse them recursively
InputAdapter(insertWholeStageCodegen(p))
case p =>
p.withNewChildren(p.children.map(insertInputAdapter))
}
/**
* Inserts a WholeStageCodegen on top of those that support codegen.
*/
private def insertWholeStageCodegen(plan: SparkPlan): SparkPlan = plan match {
// For operators that will output domain object, do not insert WholeStageCodegen for it as
// domain object can not be written into unsafe row.
case plan if plan.output.length == 1 &&
plan.output.head.dataType.isInstanceOf[ObjectType] =>
plan.withNewChildren(plan.children.map(insertWholeStageCodegen))
case plan: CodegenSupport => if (supportCodegen(plan)) {
WholeStageCodegenExec(insertInputAdapter(plan))
} else {
plan.withNewChildren(plan.children.map(insertInputAdapter))
}
case other =>
other.withNewChildren(other.children.map(insertWholeStageCodegen))
}
def apply(plan: SparkPlan): SparkPlan = {
if (conf.wholeStageEnabled) {
insertWholeStageCodegen(plan)
} else {
plan
}
}
}
case class WholeStageCodegenRDD(@transient sc: SparkContext, var source: CodeAndComment,
var references: Array[Any], var durationMs: SQLMetric,
inputRDDs: Seq[RDD[InternalRow]])
extends ZippedPartitionsBaseRDD[InternalRow](sc, inputRDDs)
with Serializable with KryoSerializable {
// PooledKryoSerializer.serializer refers this class using productIterator
// Any change to this class should be reflected there.
override def getPartitions: Array[Partition] = {
if (rdds.length == 1) rdds.head.partitions
else super.getPartitions
}
override def getPreferredLocations(s: Partition): Seq[String] = {
if (rdds.length == 1) rdds.head.preferredLocations(s)
else s.asInstanceOf[ZippedPartitionsPartition].preferredLocations
}
override def compute(split: Partition,
context: TaskContext): Iterator[InternalRow] = {
new Iterator[InternalRow] {
private[this] var iter = computeInternal(split, context)
override def hasNext: Boolean = try {
try {
iter.hasNext
} catch {
case _: ClassCastException =>
logInfo(s"ClassCastException, hence recompiling")
CodeGenerator.invalidate(source)
iter = computeInternal(split, context)
iter.hasNext
}
} catch {
case e: Throwable =>
if (WholeStageCodegenExec.dumpGenCodeForException && testNotLoggedAndSet(source)) {
logFormattedError(e, s"\\n${CodeFormatter.format(source)}")
}
throw e
}
override def next(): InternalRow = try {
iter.next()
} catch {
case e: Throwable =>
if (WholeStageCodegenExec.dumpGenCodeForException && testNotLoggedAndSet(source)) {
logFormattedError(e, s"\\n${CodeFormatter.format(source)}")
}
throw e
}
}
}
private def testNotLoggedAndSet(source: CodeAndComment): Boolean = {
if (WholeStageCodegenExec.dumpedGenCodes.getIfPresent(source) eq null) {
WholeStageCodegenExec.dumpedGenCodes.put(source, java.lang.Boolean.TRUE)
true
} else false
}
def logFormattedError(e: Throwable, source: String): Unit = {
var cause = e
while (cause ne null) {
// Don't log the code when the exception is out of memory
cause match {
case e: SQLException if e.getSQLState == "XCL54.T" =>
return
case e: RuntimeException if e.getClass.getName.contains("LowMemoryException") =>
return
case _ =>
}
cause = cause.getCause
}
logError(s"\\nFailed with exception $e:$source")
}
def computeInternal(split: Partition,
context: TaskContext): Iterator[InternalRow] = {
val clazz = CodeGenerator.compile(source)
val buffer = clazz.generate(references).asInstanceOf[BufferedRowIterator]
if (rdds.length == 1) {
buffer.init(split.index, Array(rdds.head.iterator(split, context)
.asInstanceOf[Iterator[InternalRow]]))
} else {
val zippedPartition = split.asInstanceOf[ZippedPartitionsPartition]
val partitions = zippedPartition.partitions
val iterators = new Array[Iterator[InternalRow]](partitions.length)
for (i <- partitions.indices) {
iterators(i) = rdds(i).iterator(partitions(i), context)
.asInstanceOf[Iterator[InternalRow]]
}
buffer.init(zippedPartition.index, iterators)
}
new Iterator[InternalRow] {
override def hasNext: Boolean = {
val v = buffer.hasNext
if (!v) durationMs += buffer.durationMs()
v
}
override def next: InternalRow = buffer.next()
}
}
override def write(kryo: Kryo, output: Output): Unit = {
// PooledKryoSerializer.serializer refers this class using productIterator
// Any change to this class should be reflected there.
output.writeInt(_id)
// write CodeAndComment
output.writeInt(source.hashCode())
output.writeString(source.body)
val comment = source.comment
output.writeInt(comment.size)
for ((k, v) <- comment) {
output.writeString(k)
output.writeString(v)
}
val refsLen = if (references != null) references.length else 0
output.writeVarInt(refsLen, true)
var i = 0
while (i < refsLen) {
kryo.writeClassAndObject(output, references(i))
i += 1
}
durationMs.write(kryo, output)
output.writeVarInt(rdds.length, true)
for (rdd <- rdds) {
kryo.writeClassAndObject(output, rdd)
}
}
override def read(kryo: Kryo, input: Input): Unit = {
_id = input.readInt()
storageLevel = StorageLevel.NONE
checkpointData = None
val hash = input.readInt()
val body = input.readString()
var commentSize = input.readInt()
val comment = new scala.collection.mutable.HashMap[String, String]()
while (commentSize > 0) {
val k = input.readString()
val v = input.readString()
comment.put(k, v)
commentSize -= 1
}
source = new CodeAndComment(body, comment)
source.hash = hash
val refsLen = input.readVarInt(true)
if (refsLen > 0) {
references = new Array[Any](refsLen)
var i = 0
while (i < refsLen) {
references(i) = kryo.readClassAndObject(input)
i += 1
}
} else {
references = null
}
durationMs = new SQLMetric(null)
durationMs.read(kryo, input)
val rddsBuilder = IndexedSeq.newBuilder[RDD[InternalRow]]
var rddsLen = input.readVarInt(true)
while (rddsLen > 0) {
rddsBuilder += kryo.readClassAndObject(input).asInstanceOf[RDD[InternalRow]]
rddsLen -= 1
}
rdds = rddsBuilder.result()
}
}
| SnappyDataInc/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/WholeStageCodegenExec.scala | Scala | apache-2.0 | 22,869 |
/**
* Created on: Feb 12, 2013
*/
package com.tubros.constraints.api.solver.error
import scalaz._
import com.tubros.constraints.api.VariableName
/**
* The '''MissingVariables'''
* [[com.tubros.constraints.api.solver.error.SolverError]] type is produced
* when an [[com.tubros.constraints.api.problem.Equation]] references a
* [[com.tubros.constraints.api.solver.Variable]] which does not exist in
* the CPS evaluation environment.
*
* @author svickers
*
*/
case class MissingVariables (
val missing : Set[VariableName]
)
extends SolverError
{
/// Class Imports
import scalaz.syntax.std.boolean._
/// Instance Properties
override lazy val message : String =
"Missing %s variable%s: %s".format (
missing.size,
(missing.size > 1) ? "s" | "",
missing
);
}
| osxhacker/smocs | smocs-api/src/main/scala/com/tubros/constraints/api/solver/error/MissingVariables.scala | Scala | bsd-3-clause | 792 |
package com.socrata.datacoordinator.secondary.feedback
import com.socrata.datacoordinator.id.UserColumnId
sealed abstract class RequestFailure {
def english: String
}
case object FailedToDiscoverDataCoordinator extends RequestFailure {
val english = "Failed to discover data-coordinator host and port"
}
case object DataCoordinatorBusy extends RequestFailure {
val english = "Data-coordinator responded with 409"
}
case object DatasetDoesNotExist extends RequestFailure {
val english = "Dataset does not exist"
}
case class UnexpectedError(reason: String, cause: Throwable) extends RequestFailure {
val english = s"Unexpected error in data-coordinator client: $reason"
}
sealed abstract class SchemaFailure {
def english: String
}
sealed abstract class UpdateSchemaFailure extends SchemaFailure
case class TargetColumnDoesNotExist(id: UserColumnId) extends UpdateSchemaFailure {
val english = s"Target column ${id.underlying} does not exist"
}
case class ColumnsDoNotExist(columns: Set[UserColumnId]) extends SchemaFailure {
val english = s"Columns ${columns.map(_.underlying)} do not exist"
}
| socrata-platform/data-coordinator | secondarylib-feedback/src/main/scala/com/socrata/datacoordinator/secondary/feedback/RequestFailure.scala | Scala | apache-2.0 | 1,120 |
// match~case を使う
import java.util.Date
object Pre06 {
def main(args: Array[String]) {
// 例1
val num = 1
val daiji = num match {
case 1 => "壱"
case 2 => "弐"
case _ => "参以上"
}
println(daiji)
// 例2
val date = new Date
val msg = date.getDate match {
case d if (d % 2 == 0) => "今日は偶数日です。"
case _ => "今日は奇数日です。"
}
println(msg)
}
} | mzkrelx/wakuwaku-scala1 | Pre06.scala | Scala | mit | 466 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package toplevel
package typedef
/**
* @author ilyas
*/
import javax.swing.Icon
import com.intellij.lang.ASTNode
import com.intellij.lang.java.JavaLanguage
import com.intellij.navigation._
import com.intellij.openapi.editor.colors._
import com.intellij.openapi.util.Iconable
import com.intellij.psi._
import com.intellij.psi.impl._
import com.intellij.psi.javadoc.PsiDocComment
import com.intellij.psi.stubs.StubElement
import com.intellij.psi.tree.IElementType
import com.intellij.psi.util.{PsiTreeUtil, PsiUtil}
import com.intellij.util.VisibilityIcons
import org.jetbrains.plugins.scala.conversion.JavaToScala
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.ScModifierList
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScBlock
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScToplevelElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScExtendsBlock, ScTemplateBody, ScTemplateParents}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.JavaIdentifier
import org.jetbrains.plugins.scala.lang.psi.stubs.{ScMemberOrLocal, ScTemplateDefinitionStub}
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.api.TypeParameterType
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypeResult, TypingContext}
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, ModCount}
import _root_.scala.collection.mutable.ArrayBuffer
import scala.annotation.tailrec
import scala.collection.Seq
import scala.reflect.NameTransformer
abstract class ScTypeDefinitionImpl protected (stub: StubElement[ScTemplateDefinition], nodeType: IElementType, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, nodeType, node) with ScTypeDefinition with PsiClassFake {
override def hasTypeParameters: Boolean = typeParameters.nonEmpty
override def add(element: PsiElement): PsiElement = {
element match {
case member: PsiMember if member.getLanguage.isKindOf(JavaLanguage.INSTANCE) =>
val newMemberText = JavaToScala.convertPsiToText(member).trim()
val mem: Option[ScMember] = member match {
case method: PsiMethod =>
Some(ScalaPsiElementFactory.createMethodFromText(newMemberText, getManager))
case _ => None
}
mem match {
case Some(m) => addMember(m, None)
case _ => super.add(element)
}
case mem: ScMember => addMember(mem, None)
case _ => super.add(element)
}
}
override def getSuperTypes: Array[PsiClassType] = {
superTypes.flatMap {
case tp =>
val psiType = tp.toPsiType(getProject, getResolveScope)
psiType match {
case c: PsiClassType => Seq(c)
case _ => Seq.empty
}
}.toArray
}
override def isAnnotationType: Boolean = {
val annotation = ScalaPsiManager.instance(getProject).getCachedClass("scala.annotation.Annotation",getResolveScope, ScalaPsiManager.ClassCategory.TYPE)
if (annotation == null) return false
ScalaPsiManager.instance(getProject).cachedDeepIsInheritor(this, annotation)
}
def getType(ctx: TypingContext) = {
val parentClass: ScTemplateDefinition = containingClass
if (typeParameters.isEmpty) {
if (parentClass != null) {
Success(ScProjectionType(ScThisType(parentClass), this, superReference = false), Some(this))
} else {
Success(ScalaType.designator(this), Some(this))
}
} else {
if (parentClass != null) {
Success(ScParameterizedType(ScProjectionType(ScThisType(parentClass), this, superReference = false),
typeParameters.map(TypeParameterType(_))), Some(this))
} else {
Success(ScParameterizedType(ScalaType.designator(this),
typeParameters.map(TypeParameterType(_))), Some(this))
}
}
}
def getTypeWithProjections(ctx: TypingContext, thisProjections: Boolean = false): TypeResult[ScType] = {
def args = typeParameters.map(TypeParameterType(_))
def innerType = if (typeParameters.isEmpty) ScalaType.designator(this)
else ScParameterizedType(ScalaType.designator(this), args)
val parentClazz = containingClass
if (parentClazz != null) {
val tpe: ScType = if (!thisProjections) parentClazz.getTypeWithProjections(TypingContext.empty, thisProjections = false).
getOrElse(return Failure("Cannot resolve parent class", Some(this)))
else ScThisType(parentClazz)
val innerProjection = ScProjectionType(tpe, this, superReference = false)
Success(if (typeParameters.isEmpty) innerProjection
else ScParameterizedType(innerProjection, args), Some(this))
} else Success(innerType, Some(this))
}
override def getModifierList: ScModifierList = super[ScTypeDefinition].getModifierList
override def hasModifierProperty(name: String): Boolean = super[ScTypeDefinition].hasModifierProperty(name)
override def getNavigationElement = getContainingFile match {
case s: ScalaFileImpl if s.isCompiled => getSourceMirrorClass
case _ => this
}
private def hasSameScalaKind(other: PsiClass) = (this, other) match {
case (_: ScTrait, _: ScTrait)
| (_: ScObject, _: ScObject)
| (_: ScClass, _: ScClass) => true
case _ => false
}
def getSourceMirrorClass: PsiClass = {
val classParent = PsiTreeUtil.getParentOfType(this, classOf[ScTypeDefinition], true)
val name = this.name
if (classParent == null) {
val classes: Array[PsiClass] = getContainingFile.getNavigationElement match {
case o: ScalaFile => o.typeDefinitions.toArray
case o: PsiClassOwner => o.getClasses
}
val classesIterator = classes.iterator
while (classesIterator.hasNext) {
val c = classesIterator.next()
if (name == c.name && hasSameScalaKind(c)) return c
}
} else {
val parentSourceMirror = classParent.asInstanceOf[ScTypeDefinitionImpl].getSourceMirrorClass
parentSourceMirror match {
case td: ScTypeDefinitionImpl => for (i <- td.typeDefinitions if name == i.name && hasSameScalaKind(i))
return i
case _ => this
}
}
this
}
override def isLocal: Boolean = {
val stub: StubElement[_ <: PsiElement] = this match {
case st: ScalaStubBasedElementImpl[_] => st.getStub
case _ => null
}
stub match {
case memberOrLocal: ScMemberOrLocal =>
return memberOrLocal.isLocal
case _ =>
}
containingClass == null && PsiTreeUtil.getParentOfType(this, classOf[ScTemplateDefinition]) != null
}
def nameId: PsiElement = findChildByType[PsiElement](ScalaTokenTypes.tIDENTIFIER)
override def getTextOffset: Int = nameId.getTextRange.getStartOffset
override def getContainingClass: PsiClass = {
super[ScTypeDefinition].getContainingClass match {
case o: ScObject => o.fakeCompanionClassOrCompanionClass
case containingClass => containingClass
}
}
override final def getQualifiedName: String = {
val stub = getStub
if (stub != null) stub.asInstanceOf[ScTemplateDefinitionStub].javaQualName
else javaQualName()
}
@Cached(synchronized = false, ModCount.getBlockModificationCount, this)
private def javaQualName(): String = {
var res = qualifiedName(".", encodeName = true).split('.').map { s =>
if (s.startsWith("`") && s.endsWith("`") && s.length > 2) s.drop(1).dropRight(1)
else s
}.mkString(".")
this match {
case o: ScObject =>
if (o.isPackageObject) res = res + ".package$"
else res = res + "$"
case _ =>
}
res
}
override def qualifiedName: String = {
val stub = getStub
if (stub != null) stub.asInstanceOf[ScTemplateDefinitionStub].qualName
else qualName()
}
@Cached(synchronized = false, ModCount.getBlockModificationCount, this)
private def qualName(): String = qualifiedName(".")
override def getExtendsListTypes: Array[PsiClassType] = innerExtendsListTypes
override def getImplementsListTypes: Array[PsiClassType] = innerExtendsListTypes
def getTruncedQualifiedName: String = qualifiedName(".", trunced = true)
def getQualifiedNameForDebugger: String = {
containingClass match {
case td: ScTypeDefinition => td.getQualifiedNameForDebugger + "$" + transformName(encodeName = true, name)
case _ =>
if (this.isPackageObject) qualifiedName("", encodeName = true) + ".package"
else qualifiedName("$", encodeName = true)
}
}
protected def transformName(encodeName: Boolean, name: String): String = {
if (!encodeName) name
else {
val deticked =
if (name.startsWith("`") && name.endsWith("`") && name.length() > 1)
name.substring(1, name.length() - 1)
else name
NameTransformer.encode(deticked)
}
}
protected def qualifiedName(classSeparator: String, trunced: Boolean = false,
encodeName: Boolean = false): String = {
// Returns prefix with convenient separator sep
@tailrec
def _packageName(e: PsiElement, sep: String, k: (String) => String): String = e.getContext match {
case o: ScObject if o.isPackageObject && o.name == "`package`" => _packageName(o, sep, k)
case _: ScClass | _: ScTrait if trunced => k("")
case t: ScTypeDefinition => _packageName(t, sep, (s) => {
val name = t.name
k(s + transformName(encodeName, name) + sep)
})
case p: ScPackaging => _packageName(p, ".", (s) => k(s + p.getPackageName + "."))
case f: ScalaFile => val pn = ""; k(if (pn.length > 0) pn + "." else "")
case _: PsiFile | null => k("")
case _: ScBlock => k("")
case parent: ScTemplateBody => _packageName(parent, sep, k)
case parent: ScExtendsBlock => _packageName(parent, sep, k)
case parent: ScTemplateParents => _packageName(parent, sep, k)
case parent => _packageName(parent, sep, identity)
}
val packageName = _packageName(this, classSeparator, identity)
packageName + transformName(encodeName, name)
}
override def getPresentation: ItemPresentation = {
val presentableName = this match {
case o: ScObject if o.isPackageObject && o.name == "`package`" =>
val packageName = o.qualifiedName.stripSuffix(".`package`")
val index = packageName.lastIndexOf('.')
if (index < 0) packageName else packageName.substring(index + 1, packageName.length)
case _ => name
}
new ItemPresentation() {
def getPresentableText: String = presentableName
def getTextAttributesKey: TextAttributesKey = null
def getLocationString: String = getPath match {
case "" => "<default>"
case p => '(' + p + ')'
}
override def getIcon(open: Boolean) = ScTypeDefinitionImpl.this.getIcon(0)
}
}
override def findMethodBySignature(patternMethod: PsiMethod, checkBases: Boolean): PsiMethod = {
super[ScTypeDefinition].findMethodBySignature(patternMethod, checkBases)
}
override def findMethodsBySignature(patternMethod: PsiMethod, checkBases: Boolean): Array[PsiMethod] = {
super[ScTypeDefinition].findMethodsBySignature(patternMethod, checkBases)
}
import _root_.java.util.{Collection => JCollection, List => JList}
import com.intellij.openapi.util.{Pair => IPair}
override def findMethodsAndTheirSubstitutorsByName(name: String,
checkBases: Boolean): JList[IPair[PsiMethod, PsiSubstitutor]] = {
super[ScTypeDefinition].findMethodsAndTheirSubstitutorsByName(name, checkBases)
}
override def getAllMethodsAndTheirSubstitutors: JList[IPair[PsiMethod, PsiSubstitutor]] = {
super[ScTypeDefinition].getAllMethodsAndTheirSubstitutors
}
override def getVisibleSignatures: JCollection[HierarchicalMethodSignature] = {
super[ScTypeDefinition].getVisibleSignatures
}
override def findMethodsByName(name: String, checkBases: Boolean): Array[PsiMethod] = {
super[ScTypeDefinition].findMethodsByName(name, checkBases)
}
override def findFieldByName(name: String, checkBases: Boolean): PsiField = {
super[ScTypeDefinition].findFieldByName(name, checkBases)
}
override def checkDelete() {
}
override def delete() {
var toDelete: PsiElement = this
var parent: PsiElement = getParent
while (parent.isInstanceOf[ScToplevelElement] && parent.asInstanceOf[ScToplevelElement].typeDefinitions.length == 1) {
toDelete = parent
parent = toDelete.getParent
}
toDelete match {
case file: ScalaFile => file.delete()
case _ => parent.getNode.removeChild(toDelete.getNode)
}
}
override def getTypeParameters = typeParameters.toArray
override def getSupers: Array[PsiClass] = {
val direct = extendsBlock.supers.toArray
val res = new ArrayBuffer[PsiClass]
res ++= direct
for (sup <- direct if !res.contains(sup)) res ++= sup.getSupers
// return strict superclasses
res.filter(_ != this).toArray
}
override def isInheritor(baseClass: PsiClass, deep: Boolean): Boolean =
super[ScTypeDefinition].isInheritor(baseClass, deep)
def signaturesByName(name: String): Seq[PhysicalSignature] = {
(for ((s: PhysicalSignature, _) <- TypeDefinitionMembers.getSignatures(this).forName(name)._1) yield s) ++
syntheticMethodsNoOverride.filter(_.name == name).map(new PhysicalSignature(_, ScSubstitutor.empty))
}
override def getNameIdentifier: PsiIdentifier = {
Predef.assert(nameId != null, "Class hase null nameId. Class text: " + getText) //diagnostic for EA-20122
new JavaIdentifier(nameId)
}
override def getIcon(flags: Int): Icon = {
val icon = getIconInner
return icon //todo: remove, when performance issues will be fixed
if (!this.isValid) return icon //to prevent Invalid access: EA: 13535
val isLocked = (flags & Iconable.ICON_FLAG_READ_STATUS) != 0 && !isWritable
val rowIcon = ElementBase.createLayeredIcon(this, icon, ElementPresentationUtil.getFlags(this, isLocked))
if ((flags & Iconable.ICON_FLAG_VISIBILITY) != 0) {
val accessLevel = {
if (hasModifierProperty("private")) PsiUtil.ACCESS_LEVEL_PRIVATE
else if (hasModifierProperty("protected")) PsiUtil.ACCESS_LEVEL_PROTECTED
else PsiUtil.ACCESS_LEVEL_PUBLIC
}
VisibilityIcons.setVisibilityIcon(accessLevel, rowIcon)
}
rowIcon
}
protected def getIconInner: Icon
override def getDocComment: PsiDocComment = super[ScTypeDefinition].getDocComment
override def isDeprecated: Boolean = {
val stub = getStub
if (stub != null) {
return stub.asInstanceOf[ScTemplateDefinitionStub].isDeprecated
}
hasAnnotation("scala.deprecated").isDefined || hasAnnotation("java.lang.Deprecated").isDefined
}
override def getInnerClasses: Array[PsiClass] = {
def ownInnerClasses = members.filter(_.isInstanceOf[PsiClass]).map(_.asInstanceOf[PsiClass]).toArray
ScalaPsiUtil.getBaseCompanionModule(this) match {
case Some(o: ScObject) =>
val res: ArrayBuffer[PsiClass] = new ArrayBuffer[PsiClass]()
val innerClasses = ownInnerClasses
res ++= innerClasses
o.members.foreach {
case o: ScObject => o.fakeCompanionClass match {
case Some(clazz) =>
res += o
res += clazz
case None =>
res += o
}
case t: ScTrait =>
res += t
res += t.fakeCompanionClass
case c: ScClass => res += c
case _ =>
}
res.toArray
case _ => ownInnerClasses
}
}
override def getAllInnerClasses: Array[PsiClass] = {
PsiClassImplUtil.getAllInnerClasses(this)
}
override def findInnerClassByName(name: String, checkBases: Boolean): PsiClass = {
super[ScTypeDefinition].findInnerClassByName(name, checkBases)
}
override def getAllFields: Array[PsiField] = {
super[ScTypeDefinition].getAllFields
}
override def getOriginalElement: PsiElement = {
ScalaPsiImplementationHelper.getOriginalClass(this)
}
} | katejim/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScTypeDefinitionImpl.scala | Scala | apache-2.0 | 16,501 |
package arrays
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import pl.iterators.kebs.instances.time.YearMonthString
import slick.lifted.Isomorphism
class ListIsomorphismTest extends AnyFunSuite with Matchers with YearMonthString {
import pl.iterators.kebs._
case class C(a: String)
test("No CaseClass1Rep implicits derived") {
import pl.iterators.kebs.macros.CaseClass1Rep
"implicitly[CaseClass1Rep[YearMonth, String]]" shouldNot typeCheck
"implicitly[CaseClass1Rep[String, YearMonth]]" shouldNot typeCheck
}
test("Case class isomorphism implies list isomorphism") {
val iso = implicitly[Isomorphism[List[C], List[String]]]
iso.map(List(C("a"), C("b"))) shouldBe List("a", "b")
iso.comap(List("a", "b")) shouldBe List(C("a"), C("b"))
}
test("Case class isomorphism implies seq to list isomorphism") {
val iso = implicitly[Isomorphism[Seq[C], List[String]]]
iso.map(Seq(C("a"), C("b"))) shouldBe List("a", "b")
iso.comap(List("a", "b")) shouldBe Seq(C("a"), C("b"))
}
import java.time.YearMonth
test("List[Obj[String]] <-> List[String]") {
"val iso = implicitly[Isomorphism[List[YearMonth], List[String]]]" should compile
}
test("Seq[Obj[String]] <-> List[String]") {
"val iso = implicitly[Isomorphism[Seq[YearMonth], List[String]]]" should compile
}
}
| theiterators/kebs | slick/src/test/scala/arrays/ListIsomorphismTest.scala | Scala | mit | 1,375 |
package com.advancedspark.pmml.spark.ml
import java.io.File
import scala.collection.JavaConverters._
import org.apache.spark.SparkConf
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.Predictor
import org.apache.spark.ml.classification.DecisionTreeClassifier
import org.apache.spark.ml.feature.RFormula
import org.apache.spark.ml.regression.DecisionTreeRegressor
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.DataFrameReader
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types.StructType
import org.dmg.pmml.FieldName
import org.dmg.pmml.DataField
import org.jpmml.evaluator.Evaluator
import org.jpmml.evaluator.FieldValue
import org.jpmml.evaluator.ModelEvaluatorFactory
import org.jpmml.evaluator.EvaluatorUtil
import org.jpmml.model.ImportFilter
import org.jpmml.model.JAXBUtil
import org.jpmml.model.MetroJAXBUtil
import org.jpmml.sparkml.ConverterUtil
import org.xml.sax.InputSource
import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.DefaultHttpClient // TODO: this is deprecated
object PMMLSparkML {
val datasetsHome = sys.env.get("DATASETS_HOME").getOrElse("/root/pipeline/datasets/")
val csvInput: File = new File(s"${datasetsHome}/R/census.csv")
val functionType: String = "classification" // or "regression"
val formulaStr: String = "income ~ ."
val pmmlName = "census"
val pmmlOutput: File = new File(s"${pmmlName}.pmml")
def main(args: Array[String]) = {
val sparkConf: SparkConf = new SparkConf()
val sparkContext: JavaSparkContext = new JavaSparkContext(sparkConf)
val sqlContext: SQLContext = new SQLContext(sparkContext)
val reader: DataFrameReader = sqlContext.read
.format("com.databricks.spark.csv")
.option("header", "true")
.option("inferSchema", "true")
val dataFrame: DataFrame = reader.load(csvInput.getAbsolutePath())
val schema: StructType = dataFrame.schema
System.out.println(schema.treeString)
val formula: RFormula = new RFormula().setFormula(formulaStr)
var predictor: Predictor[_, _, _] = null
if (functionType.equals("classification")) {
predictor = new DecisionTreeClassifier().setMinInstancesPerNode(10)
} else
if (functionType.equals("regression")) {
predictor = new DecisionTreeRegressor().setMinInstancesPerNode(10)
}
else
throw new IllegalArgumentException()
predictor.setLabelCol(formula.getLabelCol)
predictor.setFeaturesCol(formula.getFeaturesCol)
val pipeline = new Pipeline().setStages(Array[PipelineStage](formula, predictor))
val pipelineModel = pipeline.fit(dataFrame)
val predictorModel = pipeline.getStages(1).asInstanceOf[DecisionTreeClassifier]
System.out.println(predictorModel.explainParams())
// Note: This requires latest version of org.jpmml:jpmml-sparkml which requires shading
// to avoid conflict with Spark 1.6.1
val pmml = ConverterUtil.toPMML(schema, pipelineModel)
System.out.println(pmml.getModels().get(0).toString())
val os = new java.io.FileOutputStream(pmmlOutput.getAbsolutePath())
MetroJAXBUtil.marshalPMML(pmml, os)
val baos = new java.io.ByteArrayOutputStream()
MetroJAXBUtil.marshalPMML(pmml, baos)
// create an HttpPost object
println("--- HTTP POST UPDATED PMML ---")
val post = new HttpPost(s"http://127.0.0.1:9040/update-pmml/${pmmlName}")
// set the Content-type
post.setHeader("Content-type", "application/xml")
// add the JSON as a StringEntity
post.setEntity(new StringEntity(baos.toString()))
// send the post request
val response = (new DefaultHttpClient).execute(post)
// print the response headers
println("--- HTTP RESPONSE HEADERS ---")
response.getAllHeaders.foreach(arg => println(arg))
// Form the following: https://github.com/jpmml/jpmml-evaluator
val is = new java.io.FileInputStream(pmmlOutput.getAbsolutePath())
val transformedSource = ImportFilter.apply(new InputSource(is))
val pmml2 = JAXBUtil.unmarshalPMML(transformedSource)
//val modelEvaluator = new TreeModelEvaluator(pmml2)
val modelEvaluatorFactory = ModelEvaluatorFactory.newInstance()
val modelEvaluator: Evaluator = modelEvaluatorFactory.newModelEvaluator(pmml2)
System.out.println("Mining function: " + modelEvaluator.getMiningFunction())
val inputFields = modelEvaluator.getInputFields().asScala
System.out.println("Input schema:");
System.out.println("\\t" + "Input fields: " + inputFields)
System.out.println("Output schema:");
System.out.println("\\t" + "Target fields: " + modelEvaluator.getTargetFields())
System.out.println("\\t" + "Output fields: " + modelEvaluator.getOutputFields())
val inputs: Map[String, _] = Map("age" -> 39,
"workclass" -> "State-gov",
"education" -> "Bachelors",
"education_num" -> 13,
"marital_status" -> "Never-married",
"occupation" -> "Adm-clerical",
"relationship" -> "Not-in-family",
"race" -> "White",
"sex" -> "Male",
"capital_gain" -> 2174,
"capital_loss" -> 0,
"hours_per_week" -> 40,
"native_country" -> "United-States")
val arguments =
( for(inputField <- inputFields)
// The raw value is passed through:
// 1) outlier treatment,
// 2) missing value treatment,
// 3) invalid value treatment
// 4) type conversion
yield (inputField.getName -> inputField.prepare(inputs(inputField.getName.getValue)))
).toMap.asJava
val results = modelEvaluator.evaluate(arguments)
val targetField = modelEvaluator.getTargetFields().asScala(0)
val targetValue = results.get(targetField.getName)
System.out.println(s"**** Predicted value for '${targetField.getName}': ${targetValue} ****")
}
}
| fluxcapacitor/source.ml | apachespark.ml/demos/pmml/src/main/scala/com/advancedspark/pmml/spark/ml/PMMLSparkML.scala | Scala | apache-2.0 | 6,404 |
package edu.chop.cbmi.dataExpress.test.backends
import org.scalatest.{Tag, FeatureSpec, GivenWhenThen}
import org.scalatest.matchers.ShouldMatchers
import edu.chop.cbmi.dataExpress.backends.SqlServerBackend
import edu.chop.cbmi.dataExpress.test.util._
import edu.chop.cbmi.dataExpress.dataModels._
import edu.chop.cbmi.dataExpress.dataModels.sql._
import edu.chop.cbmi.dataExpress.dataModels.sql.IntegerDataType
import scala.language.reflectiveCalls
class SqlServerBackendFeatureSpec extends FeatureSpec with GivenWhenThen with ShouldMatchers {
def fixture =
new {
val props = TestProps.getDbProps("sqlserver")
}
val identifierQuote = "`"
def dataSetupFixture =
new {
val tf = fixture
val targetBackend = new SqlServerBackend(tf.props)
targetBackend.connect
val targetConnection = targetBackend.connection
val targetStatement = targetConnection.createStatement()
}
val setup = dataSetupFixture
def removeTestDataSetup: Boolean = {
setup.targetStatement.execute("DROP TABLE cars_deba_a")
setup.targetBackend.commit
true
}
scenario("The user can create a table with four columns", SqlServerTest) {
val f = fixture
val tableName = "cars_deba_a"
val columnFixedWidth: Boolean = false
val columnNames: List[String] = List("carid", "carnumber", "carmake", "carmodel")
val dataTypes = List(CharacterDataType(20, columnFixedWidth), IntegerDataType, CharacterDataType(20, columnFixedWidth), CharacterDataType(20, columnFixedWidth))
val verifyTableStatement: String = "SELECT COUNT(*) as 'count' FROM sys.tables WHERE name = '%s'".format(tableName)
val backend = new SqlServerBackend(f.props)
val cascade: Boolean = true
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("the user issues a valid create table instruction for a table that does not exist")
val startTableCount = backend.executeQuery(verifyTableStatement)
assert(startTableCount.next())
if (startTableCount.getInt("count") != 0) {
backend.dropTable(tableName,cascade)
}
startTableCount.close()
val endTableCount = backend.executeQuery(verifyTableStatement)
assert(endTableCount.next())
if (endTableCount.getInt("count") != 0){
fail( "Unable to drop existing table " + tableName )
}
/* Table should be dropped now if it existed) */
backend.createTable(tableName,columnNames,dataTypes)
Then("the table should exist")
val tableExistResult = backend.executeQuery(verifyTableStatement)
assert(tableExistResult.next())
tableExistResult.getInt("count") should equal(1)
backend.commit()
backend.close()
}
scenario("The user can truncate a table and commit", SqlServerTest) {
val f = fixture
val tableName: String = "cars_deba_a"
val countStatement: String = """select count(*) as 'count' from """ + tableName
val backend = new SqlServerBackend(f.props)
Given("an active connection and a populated table")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("the user issues truncate and Then commit instructions for that table")
backend.truncateTable(tableName)
backend.commit()
Then("the table should be truncated")
val countResult = backend.executeQuery(countStatement)
assert(countResult.next())
countResult.getInt("count") should equal (0)
backend.close()
}
//TODO: This test needs to be re-written with an auto-incrementing sequence in the table to fully test insert returning keys
scenario("The inserted row can be committed", SqlServerTest) {
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val columnNames:List[String] = List("carid","carnumber","carmake","carmodel")
val valuesHolders:List[String] = for (i <- (0 to (columnNames.length - 1)).toList) yield "?"
val sqlStatement = """insert into %s(%s) values(%s)""".format(tableName,
columnNames.mkString(", "),
valuesHolders.mkString(", "))
val carId = "K0000001"
val carNumber = 1234567890
val carMake = "MiniCoopeRa"
val carModel = "One"
val valuesList = List(carId,carNumber,carMake,carModel)
val bindVars:DataRow[Any] = DataRow((columnNames(0),valuesList(0)),
(columnNames(1),valuesList(1)),
(columnNames(2),valuesList(2)),
(columnNames(3),valuesList(3)))
var isDataRow = false
var insertedRow:DataRow[Any] = DataRow.empty
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("an insert query is executed and committed")
backend.execute(sqlStatement,bindVars)
backend.commit()
Then("the inserted row should be in the database")
val rs = backend.executeQuery("select count(*) from %s where %s = ?".format(tableName, columnNames(0)), List(Option(carId)))
rs.next
rs.getInt(1) should equal(1)
backend.close()
}
scenario("The user can obtain a record from executing a select query", SqlServerTest) {
//Prerequisites: ignore 1: Passed
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val sqlStatement = "select * from " + tableName + " where carid = ?"
val valuesList: List[String] = List("K0000001")
val columnNames: List[String] = List("carid")
val bindVars: DataRow[String] = DataRow((columnNames(0), valuesList(0)))
var hasResults: Boolean = false
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection])
backend.connection.setAutoCommit(false)
When("a query that should generate results is executed")
val resultSet = backend.executeQuery(sqlStatement, bindVars)
Then("one or more results should be returned")
hasResults = resultSet.next()
hasResults should be(true)
backend.close()
}
scenario("The user can determine whether a select query has returned a record", SqlServerTest) {
//Prerequisites: ignore 1: Passed
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val sqlStatement = "select * from %s where carid = ?".format(tableName)
val columnNames = List("carid")
val valuesList = List("K0000001")
val bindVars:DataRow[String] = DataRow((columnNames(0),valuesList(0)))
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("a select query that has a non empty result is executed")
//resultSetReturned = backend.execute(sqlStatement,bindVars)
//resultSetReturned seems to only be true only if it is an update count or if execute does not return anything at all
val results = backend.executeQuery(sqlStatement, bindVars)
Then("the query should have returned a non empty result set")
val nonEmptyResultSet: Boolean = results.next()
nonEmptyResultSet should be(true)
backend.close()
}
scenario("The user can commit an open transaction", SqlServerTest) {
val f = fixture
var backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val columnNames = List("carid","carnumber","carmake","carmodel")
val valuesHolders = for (i <- (0 to (columnNames.length - 1)).toList ) yield "?"
val sqlStatement = """insert into %s(%s) values(%s)""".format(tableName,
columnNames.mkString(", "),
valuesHolders.mkString(", "))
val carId = "K0000002"
val carNumber = 1234567899
val carMake = "MiniCoopeRa"
val carModel = "Two"
val valuesList = List(carId, carNumber, carMake, carModel)
val bindVars: DataRow[Any] = DataRow(("carid", carId), ("carnumber", carNumber), ("carmake", carMake), ("carmodel", carModel))
Given("an active connection with an open transaction ")
assert(backend.connect().isInstanceOf[java.sql.Connection])
backend.connection.setAutoCommit(false)
backend.startTransaction()
backend.execute(sqlStatement, bindVars)
When("the user issues a commit instruction")
backend.commit()
Then("the data should be persisted")
backend.close()
backend.connection.isClosed should be (true)
val confirmSqlStatement = """select * from %s
where carid = ?
and carnumber = ?
and carmake = ?
and carmodel = ?""".format(tableName)
val newFixture = fixture
backend = new SqlServerBackend(newFixture.props)
assert(backend.connect().isInstanceOf[java.sql.Connection])
backend.execute(confirmSqlStatement, bindVars) should be(true)
backend.close()
}
scenario("The user can truncate a populated table", SqlServerTest) {
val f = fixture
val tableName = "cars_deba_a"
val countStatement = "select count(1) as 'count' from %s".format(tableName)
val backend = new SqlServerBackend(f.props)
Given("an active connection and a populated table")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
var countResult = backend.executeQuery(countStatement)
countResult.next() should be (true)
countResult.getInt("count") should be > (0)
When("the user issues a truncate table instruction for that table")
backend.truncateTable(tableName)
Then("the table should be truncated")
countResult = backend.executeQuery(countStatement)
assert(countResult.next())
countResult.getInt("count") should equal(0)
backend.close()
}
scenario("The user can roll back an open transaction", SqlServerTest) {
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val columnNames: List[String] = List("carid", "carnumber", "carmake", "carmodel")
val valuesHolders: List[String] = for (i <- (0 to (columnNames.length - 1)).toList) yield "?"
val sqlStatement = """insert into %s(%s) values(%s)""".format(tableName,
columnNames.mkString(", "),
valuesHolders.mkString(", "))
val carId = "K0000050"
val carNumber = 1234567777
val carMake = "MiniCoopeRa"
val carModel = "Fifty"
val valuesList = List(carId, carNumber, carMake, carModel)
val bindVars: DataRow[Any] = DataRow(("carid", carId), ("carnumber", carNumber), ("carmake", carMake), ("carmodel", carModel))
Given("an active connection with an open transaction ")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
backend.startTransaction()
backend.execute(sqlStatement,bindVars)
//can't depend on the row coming back
//assert(insertedRow.isInstanceOf[DataRow[Any]])
When("the user issues a rollback instruction")
backend.rollback()
Then("the data should not be persisted")
backend.close()
backend.connection.isClosed should be(true)
val sqlVerifyStatement = """select count(*) as count from %s
where carid = ?
and carnumber = ?
and carmake = ?
and carmodel = ?""".format(tableName)
val newFixture = fixture
val newBackend = new SqlServerBackend(newFixture.props)
assert(newBackend.connect().isInstanceOf[java.sql.Connection])
val persistentDataCount = newBackend.executeQuery(sqlVerifyStatement, bindVars)
assert(persistentDataCount.next())
persistentDataCount.getInt("count") should equal(0)
newBackend.close()
}
scenario("The user can open a transaction, insert a row, and end the transaction", SqlServerTest) {
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val columnNames = List("carid","carnumber","carmake","carmodel")
val valuesHolders = for (i <- (0 to (columnNames.length - 1)).toList ) yield "?"
val sqlStatement = """insert into %s(%s) values(%s)""".format(tableName,
columnNames.mkString(", "),
valuesHolders.mkString(", "))
val carId = "K0000055"
val carNumber = 1234567755
val carMake = "MiniCoopeRa"
val carModel = "FiftyFive"
val valuesList = List(carId,carNumber,carMake,carModel)
val bindVars:DataRow[Any] = DataRow((columnNames(0),valuesList(0)),
(columnNames(1),valuesList(1)),
(columnNames(2),valuesList(2)),
(columnNames(3),valuesList(3)))
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("the user issues a start transaction instruction")
backend.startTransaction()
And("the user inserts a row")
backend.execute(sqlStatement,bindVars)
And("the user ends the transaction")
backend.endTransaction()
backend.commit()
Then("the data should be persisted")
backend.close()
backend.connection.isClosed should be (true)
val sqlVerifyStatement = """select count(*) as count from %s
where carid = ?
and carnumber = ?
and carmake = ?
and carmodel = ?""".format(tableName)
val newFixture = fixture
val newBackend = new SqlServerBackend(newFixture.props)
assert(newBackend.connect().isInstanceOf[java.sql.Connection] )
val persistentDataCount = newBackend.executeQuery(sqlVerifyStatement,bindVars)
assert(persistentDataCount.next() )
persistentDataCount.getInt("count") should equal (1)
newBackend.close()
}
scenario("The user can create a table with 32 columns", SqlServerTest) {
val f = fixture
val tableName = "cars_deba_b"
val columnFixedWidth:Boolean = false
val columnNames:List[String] = List("carid","carnumber","carmake","carmodel")
val dataTypes = List( CharacterDataType(20,columnFixedWidth),IntegerDataType,CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth),CharacterDataType(20,columnFixedWidth)
)
val verifyTableStatement: String = "SELECT COUNT(*) as 'count' FROM sys.tables WHERE name = '%s'".format(tableName)
val backend = new SqlServerBackend(f.props)
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("the user issues a valid create table instruction")
backend.createTable(tableName,columnNames,dataTypes)
Then("the table should exist")
val tableExistResult = backend.executeQuery(verifyTableStatement)
assert(tableExistResult.next())
tableExistResult.getInt("count") should equal (1)
backend.close()
}
scenario("The user can insert a row without constructing an insert statement", SqlServerTest) {
val f = fixture
val tableName = "cars_deba_a"
val columnNames = List("carid","carnumber","carmake","carmodel")
val carId = "K0000003"
val carNumber = 1234567888
val carMake = "MiniCoopeRa"
val carModel = "Three"
val valuesList = List(carId,carNumber,carMake,carModel)
val row = DataRow(("carid",carId),("carnumber",carNumber),("carmake",carMake),("carmodel",carModel))
val backend = new SqlServerBackend(f.props)
val verifyRecordStatement = "select count(*) as count from %s where carid = '%s'".format(tableName, row.carid.get)
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("the user issues a valid insert command for an existing table and a unique record")
var recordCountResult = backend.executeQuery(verifyRecordStatement)
assert(recordCountResult.next())
var recordCount = recordCountResult.getInt("count")
recordCount should be (0)
backend.insertRow(tableName,row)
And("the row should be inserted")
recordCountResult = backend.executeQuery(verifyRecordStatement)
assert(recordCountResult.next())
recordCount = recordCountResult.getInt("count")
recordCount should be (1)
backend.commit()
backend.close()
}
scenario("The user can insert a batch of rows and commit without having to construct the insert statements", SqlServerTest) {
val f = fixture
val tableName = "cars_deba_a"
val columnNames = List("carid","carnumber","carmake","carmodel")
val rows = Seq(Seq("K0000201",1234901,"MiniCoopeRb","One"),
Seq("K0000202",1234902,"MiniCoopeRb","Two"),
Seq("K0000203",1234903,"MiniCoopeRb","Three"),
Seq("K0000204",1234904,"MiniCoopeRb","Four"),
Seq("K0000205",1234905,"MiniCoopeRb","Five"),
Seq("K0000206",1234906,"MiniCoopeRb","Six"),
Seq("K0000207",1234907,"MiniCoopeRb","Seven"),
Seq("K0000208",1234908,"MiniCoopeRb","Eight"),
Seq("K0000209",1234909,"MiniCoopeRb","Nine"),
Seq("K0000210",1234910,"MiniCoopeRb","Ten"))
val table = DataTable(columnNames, rows: _*)
val backend = new SqlServerBackend(f.props)
var successfulStatementCount = 0
val verifyRowsStatement = "select count(*) as count from %s where carid in (%s)".format(tableName, rows.map{r => "'%s'".format(r.head)}.mkString(", ") )
Given("an active connection and an empty table")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
backend.truncateTable(tableName)
When("the user issues a batch insert command (with commit) to insert multiple rows into the table ")
successfulStatementCount = backend.batchInsert(tableName, table)
backend.commit()
Then("the batch insert command should be successful")
successfulStatementCount should equal (rows.length)
And("the rows should be inserted")
val recordCountResult = backend.executeQuery(verifyRowsStatement)
assert(recordCountResult.next())
val recordCount = recordCountResult.getInt("count")
recordCount should be (10)
backend.close()
}
scenario("The user can drop a table", SqlServerTest) {
val f = fixture
val tableName = "cars_deba_c"
val columnFixedWidth = false
val columnNames = List("carid","carnumber","carmake","carmodel")
val dataTypes = List(CharacterDataType(20,columnFixedWidth),
IntegerDataType,
CharacterDataType(20,columnFixedWidth),
CharacterDataType(20,columnFixedWidth))
val verifyTableStatement: String = "SELECT COUNT(*) as 'count' FROM sys.tables WHERE name = '%s'".format(tableName)
val backend = new SqlServerBackend(f.props)
Given("an active connection and an existing table")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
backend.createTable(tableName,columnNames,dataTypes)
val tableVerifiedResult = backend.executeQuery(verifyTableStatement)
assert(tableVerifiedResult.next())
tableVerifiedResult.getInt("count") should be (1)
tableVerifiedResult.close()
When("the user issues a drop table command for that table")
backend.dropTable(tableName)
Then("the table should be dropped")
val tableExistResult = backend.executeQuery(verifyTableStatement)
assert(tableExistResult.next())
tableExistResult.getInt("count") should be (0)
backend.close()
}
//Cascade on drop table is unsupported in sql server
scenario("The user gets an exception when trying to drop a table with cascade", SqlServerTest) {
val f = fixture
val tableName = "cars_deba_c"
val viewName = "cars_deba_c_v"
val columnFixedWidth = false
val columnNames = List("carid", "carnumber", "carmake", "carmodel")
val dataTypes = List(CharacterDataType(20, columnFixedWidth),
IntegerDataType,
CharacterDataType(20, columnFixedWidth),
CharacterDataType(20, columnFixedWidth))
val verifyTableStatement = "SELECT COUNT(*) as 'count' FROM sys.tables WHERE name = '%s'".format(tableName)
val backend = new SqlServerBackend(f.props)
val cascade = true
Given("an active connection, an existing table, and a view on the existing table")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
backend.createTable(tableName,columnNames,dataTypes)
val tableVerifiedResult = backend.executeQuery(verifyTableStatement)
assert(tableVerifiedResult.next())
tableVerifiedResult.getInt("count") should equal(1)
When("the user issues a drop table command with cascade for that table")
Then("A runtime exception should be thrown")
intercept[RuntimeException] {
backend.dropTable(tableName, cascade)
}
backend.close()
}
scenario("The user can iterate over the results of a select query", SqlServerTest) {
//Prerequisites: Need Multiple Row in table cars_deba_a
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val sqlStatement = "select * from %s".format(tableName)
val bindVars: DataRow[String] = DataRow.empty
var resultsCount: Int = 0
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("a query that should generate multiple results is executed")
val resultSet = backend.executeQuery(sqlStatement,bindVars)
Then("the user should be able to iterate over the results")
while (resultSet.next()) { resultsCount+=1 }
And("multiple results should be returned")
resultsCount should be > (1)
backend.close()
}
scenario("The user can update a record in a table using a valid update statement", SqlServerTest) {
//Prerequisites: Need Multiple Row in table cars_deba_a
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val columnNames = List("carid", "carnumber", "carmake", "carmodel")
val carId = "K0000210"
val carNumber = 1234567899
val carMake = "MiniCoopeRa"
val carModel = "FourteenMillion"
val sqlStatement = "update %s set %s where carid = '%s'".format(tableName,
columnNames.map("%s = ?".format(_)).mkString(", "),
carId)
val valuesList = List(carId, carNumber, carMake, carModel, carId)
val bindVars = DataRow(("carid", carId),
("carnumber", carNumber),
("carmake", carMake),
("carmodel", carModel))
var resultsCount: Int = 0
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("an update query is executed")
backend.execute(sqlStatement,bindVars)
Then("the record(s) should be updated")
val sqlVerifyStatement = "select count(*) as count from %s where carmodel = ?".format(tableName)
val recordCountResult = backend.executeQuery(sqlVerifyStatement, Seq(Some(carModel)))
assert(recordCountResult.next())
recordCountResult.getInt("count") should be(1)
backend.close()
}
scenario("The user can update a multiple records in a table using a valid update statement", SqlServerTest) {
//Prerequisites: Need Multiple Row in table cars_deba_a
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
val columnNames = List("carmodel")
val sqlStatement = "update %s set %s".format(tableName,
columnNames.map("%s = ?".format(_)).mkString(", "))
val carModel = "SeventeenMillion"
val valuesList = List(carModel)
val bindVars = DataRow((columnNames(0), carModel))
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection])
backend.connection.setAutoCommit(false)
When("an update query for multiple records is executed")
backend.execute(sqlStatement, bindVars)
Then("multiple record(s) should be updated")
val sqlVerifyStatement = "select count(*) as count from %s where carmodel = ?".format(tableName)
val recordCountResult = backend.executeQuery(sqlVerifyStatement, bindVars)
assert(recordCountResult.next())
recordCountResult.getInt("count") should be > (1)
//It would be better to compare the number of rows updated to the count i.e.:
//http://www.coderanch.com/t/426288/JDBC/java/Row-count-update-statement
//Statement st = connection.createStatement("update t_number set number = 2 where name='abcd");
//int rowCount = st.executeUpdate();
//However, this executeUpdate is not yet available on the backend
backend.close()
}
scenario("The user can update a multiple records in a table without constructing update statement", SqlServerTest) {
//Prerequisites: Need Multiple Row in table cars_deba_a with carmake = 'MiniCoopeRb'
val f = fixture
val backend = new SqlServerBackend(f.props)
val tableName = "cars_deba_a"
var columnNames = List("carnumber", "carmake", "carmodel")
val carNumber = 192837465
val carMake = "MiniCoopeRaStyle004"
val carModel = "SeventeenMillion"
val valuesList = List(carNumber, carMake, carModel)
val filter = List(("carmake", "MiniCoopeRb"))
val updatesBindVars = DataRow((columnNames(0),valuesList(0)),
(columnNames(1),valuesList(1)),
(columnNames(2),valuesList(2)))
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("an update row instruction for multiple records is executed")
backend.updateRow(tableName,updatesBindVars,filter)
Then("multiple record(s) should be updated")
val sqlStatement = "select count(*) as count from %s where carmake = ?".format(tableName)
val bindVars = DataRow((columnNames(1), valuesList(1)))
val recordCountResult = backend.executeQuery(sqlStatement, bindVars)
assert(recordCountResult.next())
recordCountResult.getInt("count") should be > (1)
//It would be better to compare the number of rows updated to the count i.e.:
//http://www.coderanch.com/t/426288/JDBC/java/Row-count-update-statement
//Statement st = connection.createStatement("update t_number set number = 2 where name='abcd");
//int rowCount = st.executeUpdate();
//However, this executeUpdate is not yet available on the backend
backend.close()
}
scenario("The user can insert a multiple rows using a loop without constructing an insert statement", SqlServerTest) {
//Prerequisites: None of theses record should exist
val f = fixture
val tableName = "cars_deba_a"
val columnNames = List("carid", "carnumber", "carmake", "carmodel")
val carIds = List("K0000500", "K0000501", "K0000502", "K0000503", "K0000504")
val carNumbers = List(1234561000, 1234561001, 1234561002, 1234561003, 1234561004)
val carMakes = List("MiniCoopeRd", "MiniCoopeRd", "MiniCoopeRd", "MiniCoopeRd", "MiniCoopeRd")
val carModels = List("Zero", "One", "Ten", "Ten", "Ten")
val backend = new SqlServerBackend(f.props)
val verifyRecordsStatement = """select count(*) as count
from %s where carid in (%s)""".format(tableName, carIds.map("'%s'".format(_)).mkString(", "))
Given("an active connection")
assert(backend.connect().isInstanceOf[java.sql.Connection] )
backend.connection.setAutoCommit(false)
When("the user issues valid insert row commands in a loop for an existing table")
carIds.zipWithIndex.foreach{
case (carId,index) => backend.insertRow(tableName, DataRow(("carid", carId),
("carnumber", carNumbers(index)),
("carmake", carMakes(index)),
("carmodel", carModels(index))))
}
Then("the rows should be inserted")
val recordCountResult = backend.executeQuery(verifyRecordsStatement)
assert(recordCountResult.next())
recordCountResult.getInt("count") should equal(carIds.length)
backend.close()
}
scenario("Remove Test Data Setup", SqlServerTest) {
/**** Remove Test Data ****/
removeTestDataSetup
/**** ****/
}
scenario("Close Test SetUp Connections", SqlServerTest) {
setup.targetBackend.close
}
}
| chop-dbhi/dataexpress | src/test/scala/edu/chop/cbmi/dataExpress/test/backends/SqlServerBackendFeatureSpec.scala | Scala | bsd-2-clause | 29,575 |
package org.jetbrains.plugins.scala.decompiler.scalasig
/**
* Nikolay.Tropin
* 19-Jul-17
*/
object PickleFormat {
//37 LITERALsymbol len_Nat name_Ref
final val LITERALsymbol = 37 //is added to PickleFormat in scala 2.13
}
//Literal implementation of scala.reflect.internal.pickling.PickleFormat
sealed trait Entry
trait Symbol extends Flags with Entry {
def name: String
def parentRef: Option[Ref[Symbol]]
def parent: Option[Symbol] = parentRef.map(_.get)
def children: Iterable[Symbol]
def attributes: Iterable[SymAnnot]
def isType: Boolean = this match {
case _: ClassSymbol if !isModule=> true
case _: TypeSymbol => true
case _ if isTrait => true
case _ => false
}
lazy val path: String = parent match {
case Some(NoSymbol) | None => name
case Some(sym) => s"${sym.path}.$name"
}
def isStableObject: Boolean = {
@annotation.tailrec
def loop(parent: Option[Symbol]): Boolean = parent match {
case Some(_: ExternalSymbol) | Some(NoSymbol) | None => true
case Some(sym) =>
(sym.isStable ||
(sym.isModule && sym.name == "package")) &&
loop(sym.parent)
}
if (isStable) true
else if (!isModule) false
else loop(parent)
}
}
abstract class ScalaSigSymbol(protected val scalaSig: ScalaSig) extends Symbol {
override def children: Iterable[Symbol] = scalaSig.children(this)
override def attributes: Iterable[SymAnnot] = scalaSig.attributes(this)
}
abstract class SymbolInfoSymbol(val symbolInfo: SymbolInfo) extends ScalaSigSymbol(symbolInfo.name.scalaSig) {
override lazy val name: String = symbolInfo.name.get.value.trim
override def parentRef: Option[Ref[Symbol]] = Some(symbolInfo.owner)
override def hasFlag(flag: Long): Boolean = (symbolInfo.flags & flag) != 0L
def infoType: Type = symbolInfo.info.get
}
case class Name(value: String) extends Entry
//represents scala.Symbol
case class ScalaSymbol(value: String) extends Entry
case object NoSymbol extends Symbol {
override def name = "<no symbol>"
override def parentRef: Option[Ref[Symbol]] = None
override def hasFlag(flag: Long) = false
override def children: Iterable[Symbol] = Iterable.empty
override def attributes: Iterable[SymAnnot] = Iterable.empty
}
case class TypeSymbol(info: SymbolInfo) extends SymbolInfoSymbol(info)
case class AliasSymbol(info: SymbolInfo) extends SymbolInfoSymbol(info)
case class ClassSymbol(info: SymbolInfo, thisTypeRef: Option[Ref[Type]]) extends SymbolInfoSymbol(info)
case class ObjectSymbol(info: SymbolInfo) extends SymbolInfoSymbol(info) {
def companionClass: Option[ClassSymbol] = scalaSig.findCompanionClass(this)
}
case class MethodSymbol(info: SymbolInfo, aliasRef: Option[Ref[Symbol]]) extends SymbolInfoSymbol(info)
case class ExternalSymbol(nameRef: Ref[Name], ownerRef: Option[Ref[Symbol]], isObject: Boolean)
extends ScalaSigSymbol(nameRef.scalaSig) {
override def toString: String = path
override def hasFlag(flag: Long) = false
override lazy val name: String = nameRef.value
override def parentRef: Option[Ref[Symbol]] = ownerRef
}
case class SymbolInfo(name: Ref[Name], owner: Ref[Symbol], flags: Int, privateWithin: Option[Ref[Symbol]], info: Ref[Type]) {
override def toString: String = s"SymbolInfo(${name.value})"
}
//sealed trait AnnotArg extends Entry //seems inconsistent with PickleFormat
sealed trait ConstAnnotArg extends Entry
case class Constant(value: Any) extends ConstAnnotArg
//separate class is useless
//case class AnnotInfoBody(infoRef: Ref[Type], annotArgs: Seq[Ref[ConstAnnotArg]], namedArgs: Seq[(Ref[Name], Ref[ConstAnnotArg])]) {
// override def toString: String = "AnnotInfoBody"
//}
case class SymAnnot(symbol: Ref[Symbol], infoRef: Ref[Type], annotArgs: Seq[Ref[ConstAnnotArg]], named: Seq[(Ref[Name], Ref[ConstAnnotArg])]) extends Entry {
def typeRef: Type = infoRef.get
def args: Seq[ConstAnnotArg] = annotArgs.collect {
case ref if ref.get != Tree => ref.get
}
def namedArgs: Seq[(String, ConstAnnotArg)] = named.collect {
case (refName, refArg) if refArg.get != Tree => (refName.get.value, refArg.get)
}
def hasArgs: Boolean = args.size + namedArgs.size > 0
}
case object Children extends Entry
//case class AnnotInfo(body: AnnotInfoBody) extends ConstAnnotArg //seems inconsistent with PickleFormat
case object AnnotInfo extends Entry
case class AnnotArgArray(args: Seq[Ref[ConstAnnotArg]]) extends ConstAnnotArg
case object Tree extends ConstAnnotArg
trait Type extends Entry
trait TypeWithParams extends Type {
def paramRefs: Seq[Ref[Symbol]]
def paramSymbols: Seq[Symbol] = paramRefs.map(_.get)
}
trait FunctionType extends TypeWithParams {
def resultType: Ref[Type]
}
case object NoType extends Type
case object NoPrefixType extends Type
case class ThisType(symbol: Ref[Symbol]) extends Type
case class SuperType(typerRef: Ref[Type], superTypeRef: Ref[Type]) extends Type
case class SingleType(typeRef: Ref[Type], symbol: Ref[Symbol]) extends Type
case class ConstantType(constant: Ref[Constant]) extends Type
case class TypeRefType(prefix: Ref[Type], symbol: Ref[Symbol], typeArgs: Seq[Ref[Type]]) extends Type
case class TypeBoundsType(lower: Ref[Type], upper: Ref[Type]) extends Type
case class RefinedType(classSym: Ref[Symbol], typeRefs: Seq[Ref[Type]]) extends Type
case class ClassInfoType(symbol: Ref[Symbol], typeRefs: Seq[Ref[Type]]) extends Type
case class ClassInfoTypeWithCons(symbol: Ref[Symbol], typeRefs: Seq[Ref[Type]], cons: String) extends Type
case class MethodType(override val resultType: Ref[Type], override val paramRefs: Seq[Ref[Symbol]]) extends FunctionType
case class NullaryMethodType(resultType: Ref[Type]) extends Type
case class PolyType(typeRef: Ref[Type], override val paramRefs: Seq[Ref[Symbol]]) extends TypeWithParams
case class PolyTypeWithCons(typeRef: Ref[Type], override val paramRefs: Seq[Ref[Symbol]], cons: String) extends TypeWithParams
case class ImplicitMethodType(override val resultType: Ref[Type], override val paramRefs: Seq[Ref[Symbol]]) extends FunctionType
//case class AnnotatedType(typeRef: Ref[Type], attribTreeRefs: Seq[Ref[AnnotInfo]]) extends Type
//we don't use AnnotInfos, and they seem inconsistent
case class AnnotatedType(typeRef: Ref[Type]) extends Type
case class AnnotatedWithSelfType(typeRef: Ref[Type], symbol: Ref[Symbol], attribTreeRefs: Seq[Int]) extends Type
case class DeBruijnIndexType(typeLevel: Int, typeIndex: Int) extends Type
case class ExistentialType(typeRef: Ref[Type], override val paramRefs: Seq[Ref[Symbol]]) extends TypeWithParams
//todo: should we use it somehow?
case class Modifiers(flags: Long, privateWithin: Ref[Name]) extends Entry | JetBrains/intellij-scala | scala/decompiler/src/org/jetbrains/plugins/scala/decompiler/scalasig/PickleFormat.scala | Scala | apache-2.0 | 6,733 |
package com.lysdev.transperthcached.business
import android.util.Log
import java.util.Collections
import com.lysdev.transperthcached.timetable.Visit
import com.lysdev.transperthcached.timetable.VisitComparator
import com.lysdev.transperthcached.timetable.Timetable
import com.lysdev.transperthcached.exceptions.StateException
import org.joda.time.DateTime
import org.joda.time.LocalTime
import org.joda.time.format.DateTimeFormat
import scala.collection.JavaConverters._
import scala.collection.JavaConversions._
object StopTimetableBusinessLogic {
def getVisitsForStop(
stop_num: String,
timetable: Timetable,
show_for_date: DateTime
) : List[Visit] = {
if (stop_num.length() != 5) {
throw new StateException(
"Bad stop", "Please provide a 5 digit stop number"
)
}
val stop_timetable = timetable.getVisitsForStop(Integer.parseInt(stop_num))
if (stop_timetable == null) {
val error = String.format("No such stop as %s", stop_num)
Log.d("TransperthCached", error)
throw new StateException("Bad stop", error)
}
val forDayType = stop_timetable.getForWeekdayNumber(
show_for_date.getDayOfWeek() - 1 // getDayOfWeek returns 1 through 7
)
Log.d(
"TransperthCached",
f"Showing for day number ${show_for_date.getDayOfWeek() - 1}"
)
if (forDayType == null || forDayType.isEmpty()) {
val error = String.format(
"No stops on %s for %s",
List[Object](
DateTimeFormat.forPattern("EEE, MMMM dd, yyyy").print(
show_for_date
),
stop_num
) : _*
)
Log.d("TransperthCached", error)
throw new StateException("No stops", error)
}
val forTime = show_for_date.toLocalTime()
var valid = (
forDayType
.filter(_.getTime.isAfter(forTime))
.sortWith(
(a, b) => {
a.getTime.getMillisOfDay
.compareTo(b.getTime.getMillisOfDay) > 1
}
)
)
// Collections.sort(valid, new VisitComparator())
if (valid.isEmpty()) {
val error = "No more stops today"
Log.d("TransperthCached", error)
throw new StateException("No stops", error)
}
valid.toList
}
}
| Mause/TransperthCached | src/com/lysdev/transperthcached/business/StopTimetableBusinessLogic.scala | Scala | apache-2.0 | 2,575 |
package scala.slick.driver
import scala.collection.mutable.Builder
import scala.slick.ast._
import scala.slick.ast.Util._
import scala.slick.ast.TypeUtil._
import scala.slick.util.SQLBuilder
import scala.slick.profile.SqlExecutorComponent
import scala.slick.lifted.{Shape, FlatShapeLevel}
import scala.slick.relational.CompiledMapping
/** The part of the driver cake that handles the <em>executor</em> API for
* running queries. */
trait JdbcExecutorComponent extends SqlExecutorComponent { driver: JdbcDriver =>
type QueryExecutor[T] = QueryExecutorDef[T]
def createQueryExecutor[R](tree: Node, param: Any): QueryExecutor[R] = new QueryExecutorDef[R](tree, param)
class QueryExecutorDef[R](tree: Node, param: Any) extends super.QueryExecutorDef[R] {
lazy val selectStatement =
tree.findNode(_.isInstanceOf[CompiledStatement]).get
.asInstanceOf[CompiledStatement].extra.asInstanceOf[SQLBuilder.Result].sql
def run(implicit session: Backend#Session): R = tree match {
case rsm @ ResultSetMapping(_, _, CompiledMapping(_, elemType)) :@ CollectionType(cons, el) =>
val b = cons.createBuilder(el.classTag).asInstanceOf[Builder[Any, R]]
createQueryInvoker[Any](rsm, param).foreach({ x => b += x }, 0)(session)
b.result()
case First(rsm: ResultSetMapping) =>
createQueryInvoker[R](rsm, param).first
}
}
}
| nuodb/slick | src/main/scala/scala/slick/driver/JdbcExecutorComponent.scala | Scala | bsd-2-clause | 1,386 |
package spark.deploy
import akka.actor.{ActorRef, Props, Actor, ActorSystem, Terminated}
import spark.deploy.worker.Worker
import spark.deploy.master.Master
import spark.util.AkkaUtils
import spark.{Logging, Utils}
import scala.collection.mutable.ArrayBuffer
/**
* Testing class that creates a Spark standalone process in-cluster (that is, running the
* spark.deploy.master.Master and spark.deploy.worker.Workers in the same JVMs). Executors launched
* by the Workers still run in separate JVMs. This can be used to test distributed operation and
* fault recovery without spinning up a lot of processes.
*/
private[spark]
class LocalSparkCluster(numWorkers: Int, coresPerWorker: Int, memoryPerWorker: Int) extends Logging {
private val localIpAddress = Utils.localIpAddress
private val masterActorSystems = ArrayBuffer[ActorSystem]()
private val workerActorSystems = ArrayBuffer[ActorSystem]()
def start(): String = {
logInfo("Starting a local Spark cluster with " + numWorkers + " workers.")
/* Start the Master */
val (masterSystem, masterPort) = Master.startSystemAndActor(localIpAddress, 0, 0)
masterActorSystems += masterSystem
val masterUrl = "spark://" + localIpAddress + ":" + masterPort
/* Start the Workers */
for (workerNum <- 1 to numWorkers) {
val (workerSystem, _) = Worker.startSystemAndActor(localIpAddress, 0, 0, coresPerWorker,
memoryPerWorker, masterUrl, null, Some(workerNum))
workerActorSystems += workerSystem
}
return masterUrl
}
def stop() {
logInfo("Shutting down local Spark cluster.")
// Stop the workers before the master so they don't get upset that it disconnected
workerActorSystems.foreach(_.shutdown())
workerActorSystems.foreach(_.awaitTermination())
masterActorSystems.foreach(_.shutdown())
masterActorSystems.foreach(_.awaitTermination())
}
}
| koeninger/spark | core/src/main/scala/spark/deploy/LocalSparkCluster.scala | Scala | bsd-3-clause | 1,892 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import java.io._
import java.nio.charset.StandardCharsets
import java.sql.Timestamp
import java.util.Date
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Promise
import scala.concurrent.duration._
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.SparkFunSuite
import org.apache.spark.internal.Logging
import org.apache.spark.sql.hive.test.HiveTestJars
import org.apache.spark.sql.internal.StaticSQLConf
import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
import org.apache.spark.util.{ThreadUtils, Utils}
/**
* A test suite for the `spark-sql` CLI tool.
*/
class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
val warehousePath = Utils.createTempDir()
val metastorePath = Utils.createTempDir()
val scratchDirPath = Utils.createTempDir()
val sparkWareHouseDir = Utils.createTempDir()
override def beforeAll(): Unit = {
super.beforeAll()
warehousePath.delete()
metastorePath.delete()
scratchDirPath.delete()
}
override def afterAll(): Unit = {
try {
Utils.deleteRecursively(warehousePath)
Utils.deleteRecursively(metastorePath)
Utils.deleteRecursively(scratchDirPath)
} finally {
super.afterAll()
}
}
/**
* Run a CLI operation and expect all the queries and expected answers to be returned.
*
* @param timeout maximum time for the commands to complete
* @param extraArgs any extra arguments
* @param errorResponses a sequence of strings whose presence in the stdout of the forked process
* is taken as an immediate error condition. That is: if a line containing
* with one of these strings is found, fail the test immediately.
* The default value is `Seq("Error:")`
* @param maybeWarehouse an option for warehouse path, which will be set via
* `hive.metastore.warehouse.dir`.
* @param useExternalHiveFile whether to load the hive-site.xml from `src/test/noclasspath` or
* not, disabled by default
* @param metastore which path the embedded derby database for metastore locates. Use the the
* global `metastorePath` by default
* @param queriesAndExpectedAnswers one or more tuples of query + answer
*/
def runCliWithin(
timeout: FiniteDuration,
extraArgs: Seq[String] = Seq.empty,
errorResponses: Seq[String] = Seq("Error:"),
maybeWarehouse: Option[File] = Some(warehousePath),
useExternalHiveFile: Boolean = false,
metastore: File = metastorePath)(
queriesAndExpectedAnswers: (String, String)*): Unit = {
// Explicitly adds ENTER for each statement to make sure they are actually entered into the CLI.
val queriesString = queriesAndExpectedAnswers.map(_._1 + "\\n").mkString
// spark-sql echoes the queries on STDOUT, expect first an echo of the query, then the answer.
val expectedAnswers = queriesAndExpectedAnswers.flatMap {
case (query, answer) =>
if (query == "") {
// empty query means a command launched with -e
Seq(answer)
} else {
// spark-sql echoes the submitted queries
val xs = query.split("\\n").toList
val queryEcho = s"spark-sql> ${xs.head}" :: xs.tail.map(l => s" > $l")
// longer lines sometimes get split in the output,
// match the first 60 characters of each query line
queryEcho.map(_.take(60)) :+ answer
}
}
val extraHive = if (useExternalHiveFile) {
s"--driver-class-path ${System.getProperty("user.dir")}/src/test/noclasspath"
} else {
""
}
val warehouseConf =
maybeWarehouse.map(dir => s"--hiveconf ${ConfVars.METASTOREWAREHOUSE}=$dir").getOrElse("")
val command = {
val cliScript = "../../bin/spark-sql".split("/").mkString(File.separator)
val jdbcUrl = s"jdbc:derby:;databaseName=$metastore;create=true"
s"""$cliScript
| --master local
| --driver-java-options -Dderby.system.durability=test
| $extraHive
| --conf spark.ui.enabled=false
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$jdbcUrl
| --hiveconf ${ConfVars.SCRATCHDIR}=$scratchDirPath
| --hiveconf conf1=conftest
| --hiveconf conf2=1
| $warehouseConf
""".stripMargin.split("\\\\s+").toSeq ++ extraArgs
}
var next = 0
val foundMasterAndApplicationIdMessage = Promise.apply[Unit]()
val foundAllExpectedAnswers = Promise.apply[Unit]()
val buffer = new ArrayBuffer[String]()
val lock = new Object
def captureOutput(source: String)(line: String): Unit = lock.synchronized {
// This test suite sometimes gets extremely slow out of unknown reason on Jenkins. Here we
// add a timestamp to provide more diagnosis information.
val newLine = s"${new Timestamp(new Date().getTime)} - $source> $line"
log.info(newLine)
buffer += newLine
if (line.startsWith("Spark master: ") && line.contains("Application Id: ")) {
foundMasterAndApplicationIdMessage.trySuccess(())
}
// If we haven't found all expected answers and another expected answer comes up...
if (next < expectedAnswers.size && line.contains(expectedAnswers(next))) {
log.info(s"$source> found expected output line $next: '${expectedAnswers(next)}'")
next += 1
// If all expected answers have been found...
if (next == expectedAnswers.size) {
foundAllExpectedAnswers.trySuccess(())
}
} else {
errorResponses.foreach { r =>
if (line.contains(r)) {
foundAllExpectedAnswers.tryFailure(
new RuntimeException(s"Failed with error line '$line'"))
}
}
}
}
val process = new ProcessBuilder(command: _*).start()
val stdinWriter = new OutputStreamWriter(process.getOutputStream, StandardCharsets.UTF_8)
stdinWriter.write(queriesString)
stdinWriter.flush()
stdinWriter.close()
new ProcessOutputCapturer(process.getInputStream, captureOutput("stdout")).start()
new ProcessOutputCapturer(process.getErrorStream, captureOutput("stderr")).start()
try {
val timeoutForQuery = if (!extraArgs.contains("-e")) {
// Wait for for cli driver to boot, up to two minutes
ThreadUtils.awaitResult(foundMasterAndApplicationIdMessage.future, 2.minutes)
log.info("Cli driver is booted. Waiting for expected answers.")
// Given timeout is applied after the cli driver is ready
timeout
} else {
// There's no boot message if -e option is provided, just extend timeout long enough
// so that the bootup duration is counted on the timeout
2.minutes + timeout
}
ThreadUtils.awaitResult(foundAllExpectedAnswers.future, timeoutForQuery)
log.info("Found all expected output.")
} catch { case cause: Throwable =>
val message =
s"""
|=======================
|CliSuite failure output
|=======================
|Spark SQL CLI command line: ${command.mkString(" ")}
|Exception: $cause
|Failed to capture next expected output "${expectedAnswers(next)}" within $timeout.
|
|${buffer.mkString("\\n")}
|===========================
|End CliSuite failure output
|===========================
""".stripMargin
logError(message, cause)
fail(message, cause)
} finally {
if (!process.waitFor(1, MINUTES)) {
try {
log.warn("spark-sql did not exit gracefully.")
} finally {
process.destroy()
}
}
}
}
test("load warehouse dir from hive-site.xml") {
val metastore = Utils.createTempDir()
metastore.delete()
try {
runCliWithin(1.minute,
maybeWarehouse = None,
useExternalHiveFile = true,
metastore = metastore)(
"desc database default;" -> "hive_one",
"set spark.sql.warehouse.dir;" -> "hive_one")
} finally {
Utils.deleteRecursively(metastore)
}
}
test("load warehouse dir from --hiveconf") {
// --hiveconf will overrides hive-site.xml
runCliWithin(2.minute, useExternalHiveFile = true)(
"desc database default;" -> warehousePath.getAbsolutePath,
"create database cliTestDb;" -> "",
"desc database cliTestDb;" -> warehousePath.getAbsolutePath,
"set spark.sql.warehouse.dir;" -> warehousePath.getAbsolutePath)
}
test("load warehouse dir from --conf spark(.hadoop).hive.*") {
// override conf from hive-site.xml
val metastore = Utils.createTempDir()
metastore.delete()
try {
runCliWithin(2.minute,
extraArgs =
Seq("--conf", s"spark.hadoop.${ConfVars.METASTOREWAREHOUSE}=$sparkWareHouseDir"),
maybeWarehouse = None,
useExternalHiveFile = true,
metastore = metastore)(
"desc database default;" -> sparkWareHouseDir.getAbsolutePath,
"create database cliTestDb;" -> "",
"desc database cliTestDb;" -> sparkWareHouseDir.getAbsolutePath,
"set spark.sql.warehouse.dir;" -> sparkWareHouseDir.getAbsolutePath)
// override conf from --hiveconf too
runCliWithin(2.minute,
extraArgs = Seq("--conf", s"spark.${ConfVars.METASTOREWAREHOUSE}=$sparkWareHouseDir"),
metastore = metastore)(
"desc database default;" -> sparkWareHouseDir.getAbsolutePath,
"create database cliTestDb;" -> "",
"desc database cliTestDb;" -> sparkWareHouseDir.getAbsolutePath,
"set spark.sql.warehouse.dir;" -> sparkWareHouseDir.getAbsolutePath)
} finally {
Utils.deleteRecursively(metastore)
}
}
test("load warehouse dir from spark.sql.warehouse.dir") {
// spark.sql.warehouse.dir overrides all hive ones
val metastore = Utils.createTempDir()
metastore.delete()
try {
runCliWithin(2.minute,
extraArgs = Seq(
"--conf", s"${StaticSQLConf.WAREHOUSE_PATH.key}=${sparkWareHouseDir}1",
"--conf", s"spark.hadoop.${ConfVars.METASTOREWAREHOUSE}=${sparkWareHouseDir}2"),
metastore = metastore)(
"desc database default;" -> sparkWareHouseDir.getAbsolutePath.concat("1"))
} finally {
Utils.deleteRecursively(metastore)
}
}
test("Simple commands") {
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
runCliWithin(3.minute)(
"CREATE TABLE hive_test(key INT, val STRING) USING hive;"
-> "",
"SHOW TABLES;"
-> "hive_test",
s"""LOAD DATA LOCAL INPATH '$dataFilePath'
|OVERWRITE INTO TABLE hive_test;""".stripMargin
-> "",
"CACHE TABLE hive_test;"
-> "",
"SELECT COUNT(*) FROM hive_test;"
-> "5",
"DROP TABLE hive_test;"
-> ""
)
}
test("Single command with -e") {
runCliWithin(2.minute, Seq("-e", "SHOW DATABASES;"))("" -> "")
}
test("Single command with --database") {
runCliWithin(2.minute)(
"CREATE DATABASE hive_db_test;"
-> "",
"USE hive_db_test;"
-> "",
"CREATE TABLE hive_table_test(key INT, val STRING);"
-> "",
"SHOW TABLES;"
-> "hive_table_test"
)
runCliWithin(2.minute, Seq("--database", "hive_db_test", "-e", "SHOW TABLES;"))(
"" -> "hive_table_test"
)
}
test("Commands using SerDe provided in --jars") {
val jarFile = HiveTestJars.getHiveHcatalogCoreJar().getCanonicalPath
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
runCliWithin(3.minute, Seq("--jars", s"$jarFile"))(
"""CREATE TABLE t1(key string, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe';""".stripMargin
-> "",
"CREATE TABLE sourceTable (key INT, val STRING) USING hive;"
-> "",
s"""LOAD DATA LOCAL INPATH '$dataFilePath'
|OVERWRITE INTO TABLE sourceTable;""".stripMargin
-> "",
"INSERT INTO TABLE t1 SELECT key, val FROM sourceTable;"
-> "",
"SELECT collect_list(array(val)) FROM t1;"
-> """[["val_238"],["val_86"],["val_311"],["val_27"],["val_165"]]""",
"DROP TABLE t1;"
-> "",
"DROP TABLE sourceTable;"
-> ""
)
}
test("SPARK-29022: Commands using SerDe provided in --hive.aux.jars.path") {
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
val hiveContribJar = HiveTestJars.getHiveHcatalogCoreJar().getCanonicalPath
runCliWithin(
3.minute,
Seq("--conf", s"spark.hadoop.${ConfVars.HIVEAUXJARS}=$hiveContribJar"))(
"""CREATE TABLE addJarWithHiveAux(key string, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe';""".stripMargin
-> "",
"CREATE TABLE sourceTableForWithHiveAux (key INT, val STRING) USING hive;"
-> "",
s"""LOAD DATA LOCAL INPATH '$dataFilePath'
|OVERWRITE INTO TABLE sourceTableForWithHiveAux;""".stripMargin
-> "",
"INSERT INTO TABLE addJarWithHiveAux SELECT key, val FROM sourceTableForWithHiveAux;"
-> "",
"SELECT collect_list(array(val)) FROM addJarWithHiveAux;"
-> """[["val_238"],["val_86"],["val_311"],["val_27"],["val_165"]]""",
"DROP TABLE addJarWithHiveAux;"
-> "",
"DROP TABLE sourceTableForWithHiveAux;"
-> ""
)
}
test("SPARK-11188 Analysis error reporting") {
runCliWithin(timeout = 2.minute,
errorResponses = Seq("AnalysisException"))(
"select * from nonexistent_table;"
-> "Error in query: Table or view not found: nonexistent_table;"
)
}
test("SPARK-11624 Spark SQL CLI should set sessionState only once") {
runCliWithin(2.minute, Seq("-e", "!echo \\"This is a test for Spark-11624\\";"))(
"" -> "This is a test for Spark-11624")
}
test("list jars") {
val jarFile = Thread.currentThread().getContextClassLoader.getResource("TestUDTF.jar")
runCliWithin(2.minute)(
s"ADD JAR $jarFile;" -> "",
s"LIST JARS;" -> "TestUDTF.jar"
)
}
test("list jar <jarfile>") {
val jarFile = Thread.currentThread().getContextClassLoader.getResource("TestUDTF.jar")
runCliWithin(2.minute)(
s"ADD JAR $jarFile;" -> "",
s"List JAR $jarFile;" -> "TestUDTF.jar"
)
}
test("list files") {
val dataFilePath = Thread.currentThread().
getContextClassLoader.getResource("data/files/small_kv.txt")
runCliWithin(2.minute)(
s"ADD FILE $dataFilePath;" -> "",
s"LIST FILES;" -> "small_kv.txt"
)
}
test("list file <filepath>") {
val dataFilePath = Thread.currentThread().
getContextClassLoader.getResource("data/files/small_kv.txt")
runCliWithin(2.minute)(
s"ADD FILE $dataFilePath;" -> "",
s"LIST FILE $dataFilePath;" -> "small_kv.txt"
)
}
test("apply hiveconf from cli command") {
runCliWithin(2.minute)(
"SET conf1;" -> "conftest",
"SET conf2;" -> "1",
"SET conf3=${hiveconf:conf1};" -> "conftest",
"SET conf3;" -> "conftest"
)
}
test("Support hive.aux.jars.path") {
val hiveContribJar = HiveTestJars.getHiveContribJar().getCanonicalPath
runCliWithin(
1.minute,
Seq("--conf", s"spark.hadoop.${ConfVars.HIVEAUXJARS}=$hiveContribJar"))(
"CREATE TEMPORARY FUNCTION example_format AS " +
"'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat';" -> "",
"SELECT example_format('%o', 93);" -> "135"
)
}
test("SPARK-28840 test --jars command") {
val jarFile = new File("../../sql/hive/src/test/resources/SPARK-21101-1.0.jar").getCanonicalPath
runCliWithin(
1.minute,
Seq("--jars", s"$jarFile"))(
"CREATE TEMPORARY FUNCTION testjar AS" +
" 'org.apache.spark.sql.hive.execution.UDTFStack';" -> "",
"SELECT testjar(1,'TEST-SPARK-TEST-jar', 28840);" -> "TEST-SPARK-TEST-jar\\t28840"
)
}
test("SPARK-28840 test --jars and hive.aux.jars.path command") {
val jarFile = new File("../../sql/hive/src/test/resources/SPARK-21101-1.0.jar").getCanonicalPath
val hiveContribJar = HiveTestJars.getHiveContribJar().getCanonicalPath
runCliWithin(
2.minutes,
Seq("--jars", s"$jarFile", "--conf",
s"spark.hadoop.${ConfVars.HIVEAUXJARS}=$hiveContribJar"))(
"CREATE TEMPORARY FUNCTION testjar AS" +
" 'org.apache.spark.sql.hive.execution.UDTFStack';" -> "",
"SELECT testjar(1,'TEST-SPARK-TEST-jar', 28840);" -> "TEST-SPARK-TEST-jar\\t28840",
"CREATE TEMPORARY FUNCTION example_max AS " +
"'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax';" -> "",
"SELECT concat_ws(',', 'First', example_max(1234321), 'Third');" -> "First,1234321,Third"
)
}
test("SPARK-29022 Commands using SerDe provided in ADD JAR sql") {
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
val hiveContribJar = HiveTestJars.getHiveHcatalogCoreJar().getCanonicalPath
runCliWithin(
3.minute)(
s"ADD JAR ${hiveContribJar};" -> "",
"""CREATE TABLE addJarWithSQL(key string, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe';""".stripMargin
-> "",
"CREATE TABLE sourceTableForWithSQL(key INT, val STRING) USING hive;"
-> "",
s"""LOAD DATA LOCAL INPATH '$dataFilePath'
|OVERWRITE INTO TABLE sourceTableForWithSQL;""".stripMargin
-> "",
"INSERT INTO TABLE addJarWithSQL SELECT key, val FROM sourceTableForWithSQL;"
-> "",
"SELECT collect_list(array(val)) FROM addJarWithSQL;"
-> """[["val_238"],["val_86"],["val_311"],["val_27"],["val_165"]]""",
"DROP TABLE addJarWithSQL;"
-> "",
"DROP TABLE sourceTableForWithSQL;"
-> ""
)
}
test("SPARK-26321 Should not split semicolon within quoted string literals") {
runCliWithin(3.minute)(
"""select 'Test1', "^;^";""" -> "Test1\\t^;^",
"""select 'Test2', "\\";";""" -> "Test2\\t\\";",
"""select 'Test3', "\\';";""" -> "Test3\\t';",
"select concat('Test4', ';');" -> "Test4;"
)
}
test("Pad Decimal numbers with trailing zeros to the scale of the column") {
runCliWithin(1.minute)(
"SELECT CAST(1 AS DECIMAL(38, 18));"
-> "1.000000000000000000"
)
}
test("SPARK-30049 Should not complain for quotes in commented lines") {
runCliWithin(1.minute)(
"""SELECT concat('test', 'comment') -- someone's comment here
|;""".stripMargin -> "testcomment"
)
}
test("SPARK-31102 spark-sql fails to parse when contains comment") {
runCliWithin(1.minute)(
"""SELECT concat('test', 'comment'),
| -- someone's comment here
| 2;""".stripMargin -> "testcomment"
)
}
test("SPARK-30049 Should not complain for quotes in commented with multi-lines") {
runCliWithin(1.minute)(
"""SELECT concat('test', 'comment') -- someone's comment here \\
| comment continues here with single ' quote \\
| extra ' \\
|;""".stripMargin -> "testcomment"
)
}
test("SPARK-31595 Should allow unescaped quote mark in quoted string") {
runCliWithin(1.minute)(
"SELECT '\\"legal string a';select 1 + 234;".stripMargin -> "235"
)
runCliWithin(1.minute)(
"SELECT \\"legal 'string b\\";select 22222 + 1;".stripMargin -> "22223"
)
}
test("AnalysisException with root cause will be printStacktrace") {
// If it is not in silent mode, will print the stacktrace
runCliWithin(
1.minute,
extraArgs = Seq("--hiveconf", "hive.session.silent=false",
"-e", "select date_sub(date'2011-11-11', '1.2');"),
errorResponses = Seq("NumberFormatException"))(
("", "Error in query: The second argument of 'date_sub' function needs to be an integer."),
("", "NumberFormatException: invalid input syntax for type numeric: 1.2"))
// If it is in silent mode, will print the error message only
runCliWithin(
1.minute,
extraArgs = Seq("--conf", "spark.hive.session.silent=true",
"-e", "select date_sub(date'2011-11-11', '1.2');"),
errorResponses = Seq("AnalysisException"))(
("", "Error in query: The second argument of 'date_sub' function needs to be an integer."))
}
test("SPARK-30808: use Java 8 time API in Thrift SQL CLI by default") {
// If Java 8 time API is enabled via the SQL config `spark.sql.datetime.java8API.enabled`,
// the date formatter for `java.sql.LocalDate` must output negative years with sign.
runCliWithin(1.minute)("SELECT MAKE_DATE(-44, 3, 15);" -> "-0044-03-15")
}
test("SPARK-33100: Ignore a semicolon inside a bracketed comment in spark-sql") {
runCliWithin(4.minute)(
"/* SELECT 'test';*/ SELECT 'test';" -> "test",
";;/* SELECT 'test';*/ SELECT 'test';" -> "test",
"/* SELECT 'test';*/;; SELECT 'test';" -> "test",
"SELECT 'test'; -- SELECT 'test';" -> "test",
"SELECT 'test'; /* SELECT 'test';*/;" -> "test",
"/*$meta chars{^\\\\;}*/ SELECT 'test';" -> "test",
"/*\\nmulti-line\\n*/ SELECT 'test';" -> "test",
"/*/* multi-level bracketed*/ SELECT 'test';" -> "test"
)
}
test("SPARK-33100: test sql statements with hint in bracketed comment") {
runCliWithin(2.minute)(
"CREATE TEMPORARY VIEW t1 AS SELECT * FROM VALUES(1, 2) AS t1(k, v);" -> "",
"CREATE TEMPORARY VIEW t2 AS SELECT * FROM VALUES(2, 1) AS t2(k, v);" -> "",
"EXPLAIN SELECT /*+ MERGEJOIN(t1) */ t1.* FROM t1 JOIN t2 ON t1.k = t2.v;" -> "SortMergeJoin",
"EXPLAIN SELECT /* + MERGEJOIN(t1) */ t1.* FROM t1 JOIN t2 ON t1.k = t2.v;"
-> "BroadcastHashJoin"
)
}
}
| witgo/spark | sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala | Scala | apache-2.0 | 22,916 |
/*
* Copyright 2013 Turkcell Teknoloji Inc. and individual
* contributors by the 'Created by' comments.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.swarm
package security
import java.nio.ByteBuffer
import io.swarm.{UUIDGenerator, Config}
/**
* @author Anil Chalil
*/
object AuthPrincipalType extends Enumeration {
type AuthPrincipalType = Value
val Organization, Admin, Domain, Device, User = Value
def fromBase64(s: String) = s match {
case AuthPrincipalTypeValue.orgBase64 => Organization
case AuthPrincipalTypeValue.adminBase64 => Admin
case AuthPrincipalTypeValue.databaseBase64 => Domain
case AuthPrincipalTypeValue.deviceBase64 => Device
case AuthPrincipalTypeValue.databaseUserBase64 => User
}
def fromPrefix(s: String) = s match {
case "oa" => Organization
case "ad" => Admin
case "do" => Domain
case "dv" => Device
case "us" => User
}
class AuthPrincipalTypeValue(val principalType: Value) {
def prefix = principalType match {
case Organization => "oa"
case Admin => "ad"
case Domain => "do"
case Device => "dv"
case User => "us"
}
def base64Prefix = principalType match {
case Organization => AuthPrincipalTypeValue.orgBase64
case Admin => AuthPrincipalTypeValue.adminBase64
case Domain => AuthPrincipalTypeValue.databaseBase64
case Device => AuthPrincipalTypeValue.deviceBase64
case User => AuthPrincipalTypeValue.databaseUserBase64
}
}
implicit def value2AuthPrincipalTypeValue(ap: Value) = new AuthPrincipalTypeValue(ap)
object AuthPrincipalTypeValue {
val orgBase64 = (":" + Organization.prefix).base64
val adminBase64 = (":" + Admin.prefix).base64
val databaseBase64 = (":" + Domain.prefix).base64
val deviceBase64 = (":" + Device.prefix).base64
val databaseUserBase64 = (":" + User.prefix).base64
val prefixLength = 2
val base64prefixLength = 4
}
}
| Turkcell/swarm | core/src/main/scala/io/swarm/security/AuthPrincipalType.scala | Scala | apache-2.0 | 2,476 |
package scorex.transaction
import java.io.File
import org.h2.mvstore.MVStore
import org.scalacheck.Gen
import org.scalatest._
import org.scalatest.prop.{GeneratorDrivenPropertyChecks, PropertyChecks}
import scorex.transaction.state.database.blockchain.StoredState
import scorex.transaction.state.database.state._
class StoredStateUnitTests extends PropSpec with PropertyChecks with GeneratorDrivenPropertyChecks with Matchers
with PrivateMethodTester with OptionValues with TransactionGen {
val folder = "/tmp/scorex/test/"
new File(folder).mkdirs()
val stateFile = folder + "state.dat"
new File(stateFile).delete()
val db = new MVStore.Builder().fileName(stateFile).compress().open()
val state = new StoredState(db)
val testAdd = "aPFwzRp5TXCzi6DSuHmpmbQunopXRuxLk"
val applyMethod = PrivateMethod[Unit]('applyChanges)
property("private methods") {
forAll(paymentGenerator, Gen.posNum[Long]) { (tx: PaymentTransaction,
balance: Long) =>
state.balance(testAdd) shouldBe 0
state invokePrivate applyMethod(Map(testAdd ->(AccState(balance), Seq(FeesStateChange(balance), tx, tx))))
state.balance(testAdd) shouldBe balance
state.included(tx).value shouldBe state.stateHeight
state invokePrivate applyMethod(Map(testAdd ->(AccState(0L), Seq(tx))))
}
}
property("Reopen state") {
val balance = 1234L
state invokePrivate applyMethod(Map(testAdd ->(AccState(balance), Seq(FeesStateChange(balance)))))
state.balance(testAdd) shouldBe balance
db.close()
val state2 = new StoredState(new MVStore.Builder().fileName(stateFile).compress().open())
state2.balance(testAdd) shouldBe balance
state2 invokePrivate applyMethod(Map(testAdd ->(AccState(0L), Seq())))
}
}
| ScorexProject/Scorex-Lagonaki | scorex-transaction/src/test/scala/scorex/transaction/StoredStateUnitTests.scala | Scala | cc0-1.0 | 1,800 |
/**
* Copyright (c) 2007-2011 Eric Torreborre <etorreborre@yahoo.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software. Neither the name of specs nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written permission.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package org.specs.matcher
import scala.collection.mutable.Queue
import org.specs.runner._
import org.specs.Sugar._
import org.specs.specification._
import org.specs.execute._
import org.specs._
class MatchersSpecification extends SpecificationWithJUnit with ExpectationMatchers {
var reported: Example = new Example("this example serves as a stub to collect failure messages", new Sus("", this))
// an expression which knows how much time is had been evaluated
case class exp[T](var a: T) { var evaluationsNb: Int= 0; def evaluate = {evaluationsNb += 1; a} }
// a matcher which checks that a matcher is not evaluating twice the value to evaluate
def evalOnce[T](a : exp[T]) = new Matcher[Matcher[T] ] {
def apply(m: =>Matcher[T]) = ({m.apply(a.evaluate); a.evaluationsNb == 1}, "ok", "ko")
}
}
trait ExpectationMatchers { this: Specification =>
def failWith(message: String) = is_==(message)
def failWithMatch(pattern: String) = beMatching(pattern)
def expectation(value: => Any): String = {
try {
value
} catch {
case FailureException(message) => return message
case t: Throwable => throw t
}
return "this expectation has not failed"
}
} | Muki-SkyWalker/specs | src/test/scala/org/specs/matcher/MatchersSpecification.scala | Scala | mit | 2,560 |
package com.wavesplatform.it.sync.activation
import com.typesafe.config.Config
import com.wavesplatform.features.api.NodeFeatureStatus
import com.wavesplatform.features.{BlockchainFeatureStatus, BlockchainFeatures}
import com.wavesplatform.it.api.SyncHttpApi._
import com.wavesplatform.it.{BaseFreeSpec, NodeConfigs, ReportingTestName}
class FeatureActivationTestSuite extends BaseFreeSpec with ActivationStatusRequest with ReportingTestName {
private val votingInterval = 12
private val blocksForActivation = 12 // should be even
private val featureNum: Short = BlockchainFeatures.SmallerMinimalGeneratingBalance.id
private val featureDescr = BlockchainFeatures.SmallerMinimalGeneratingBalance.description
override protected def nodeConfigs: Seq[Config] = {
NodeConfigs.newBuilder
.overrideBase(_.raw(s"""waves {
| blockchain.custom.functionality {
| pre-activated-features = {}
| feature-check-blocks-period = $votingInterval
| blocks-for-feature-activation = $blocksForActivation
| }
| features.supported = [$featureNum]
| miner.quorum = 1
|}""".stripMargin))
.withDefault(2)
.buildNonConflicting()
}
"supported blocks increased when voting starts" in {
nodes.waitForHeight(votingInterval * 2 / 3)
val status = nodes.map(_.featureActivationStatus(featureNum))
status.foreach { s =>
s.description shouldBe featureDescr
assertVotingStatus(s, s.supportingBlocks.get, BlockchainFeatureStatus.Undefined, NodeFeatureStatus.Voted)
}
}
"supported blocks counter resets on the next voting interval" in {
nodes.waitForHeight(votingInterval * 2 - blocksForActivation / 2)
val info = nodes.map(_.featureActivationStatus(featureNum))
info.foreach(i => i.blockchainStatus shouldBe BlockchainFeatureStatus.Undefined)
}
"blockchain status is APPROVED in second voting interval" in {
val checkHeight = votingInterval * 2
nodes.waitForHeight(checkHeight)
val statusInfo = nodes.map(_.featureActivationStatus(featureNum))
statusInfo.foreach { si =>
si.description shouldBe featureDescr
// Activation will be on a next voting interval
assertApprovedStatus(si, checkHeight + votingInterval, NodeFeatureStatus.Voted)
}
}
"blockchain status is ACTIVATED in third voting interval" in {
val checkHeight = votingInterval * 3
nodes.waitForHeight(checkHeight)
val statusInfo = nodes.map(_.featureActivationStatus(featureNum))
statusInfo.foreach { si =>
si.description shouldBe featureDescr
assertActivatedStatus(si, checkHeight, NodeFeatureStatus.Implemented)
}
}
}
| wavesplatform/Waves | node-it/src/test/scala/com/wavesplatform/it/sync/activation/FeatureActivationTestSuite.scala | Scala | mit | 2,884 |
/*
* Copyright (c) 2013-2014. Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.avocado.algorithms.hmm
import scala.annotation.tailrec
object HMMAligner {
val debug = false
def align(refSequence: String, testSequence: String, testQualities: String): Alignment = {
val hmm = new HMMAligner
hmm.alignSequences(refSequence, testSequence, testQualities)
}
}
/**
* Pairwise alignment HMM. See the Durbin textbook (1998), chapter 4.
*
* All likelihoods are computed in log-space as are the input parameters
*/
class HMMAligner(val LOG_GAP_OPEN_PENALTY: Double = -4.0,
val LOG_GAP_CONTINUE_PENALTY: Double = -2.0,
val LOG_SNP_RATE: Double = -3.0,
val LOG_INDEL_RATE: Double = -4.0) {
val transitionMatrix = new TransitionMatrix(LOG_GAP_OPEN_PENALTY,
LOG_GAP_CONTINUE_PENALTY,
LOG_SNP_RATE,
LOG_INDEL_RATE)
/**
* Aligns sequences.
*
* @param refSequence Reference sequence over the active region.
* @param testSequence Sequence being scored.
* @param testQualities String of qualities. Not currently used.
* @return Alignment which stores the aligned sequences and likelihoods
*/
def alignSequences(refSequence: String, testSequence: String, testQualities: String): Alignment = {
computePathLikelihood(refSequence, testSequence, testQualities)
constructAlignment(refSequence, testSequence, transitionMatrix)
}
/**
* Builds the aligned sequences by choosing the most likely state transitions
*
* @param refSequence Reference sequence over the active region.
* @param testSequence Sequence being scored.
* @return Alignment which stores the aligned sequences and likelihoods
*/
def constructAlignment(refSequence: String, testSequence: String, transitionMatrix: TransitionMatrix): Alignment = {
val alignmentLikelihood = transitionMatrix.getAlignmentLikelihood
val paddedRefLen = refSequence.length + 1
val paddedTestLen = testSequence.length + 1
val stride = paddedRefLen
@tailrec
def constructAlignmentSequences(i: Int, j: Int, revAlignedRefSeq: String = "", revAlignedTestSeq: String = "", revAlignment: String = "", numSnps: Int = 0, numIndels: Int = 0): Alignment = {
val idx = i * stride + j
if (i <= 0 || j <= 0) {
// Compute the prior probability of the alignments, with the Dindel numbers.
val hasVariants: Boolean = numSnps > 0 || numIndels > 0
val alignmentPrior = LOG_SNP_RATE * numSnps + LOG_INDEL_RATE * numIndels
new Alignment(alignmentLikelihood,
alignmentPrior,
refSequence.take(j).toLowerCase + revAlignedRefSeq.reverse, // pads with first j bases of the reference
"_" * j + revAlignedTestSeq.reverse,
"P" * j + revAlignment.reverse,
hasVariants)
} else {
/*
* Check for scoring at each position, and then suggest next move. At this step, we identify
* the next direction to move by looking at the "state" of the current coordinate. We call
* our current 'state' by choosing the state with the highest cumulative likelihood.
*/
val nextDirection = transitionMatrix.getMostLikelyState(idx)
nextDirection match {
case AlignmentState.Match => {
val isSnp = testSequence(i - 1) != refSequence(j - 1)
val alignmentCharacter = if (isSnp) 'X' else '='
val addSnp = if (isSnp) 1 else 0
constructAlignmentSequences(i - 1, j - 1, revAlignedRefSeq + refSequence(j - 1), revAlignedTestSeq + testSequence(i - 1), revAlignment + alignmentCharacter, numSnps + addSnp, numIndels)
}
case AlignmentState.Insertion => {
// Inserted base from reference, add gap in reference alignment, check next base in test
constructAlignmentSequences(i - 1, j, revAlignedRefSeq + '_', revAlignedTestSeq + testSequence(i - 1), revAlignment + 'I', numSnps, numIndels + 1)
}
case AlignmentState.Deletion => {
// Deleted base from reference, add gap in test alignment, check next base in reference
constructAlignmentSequences(i, j - 1, revAlignedRefSeq + refSequence(j - 1), revAlignedTestSeq + '_', revAlignment + 'D', numSnps, numIndels + 1)
}
case AlignmentState.Padding => {
// Padded reference sequence, gap in test alignment, check next base in reference
constructAlignmentSequences(i, j - 1, revAlignedRefSeq + refSequence(j - 1).toLower, revAlignedTestSeq + '_', revAlignment + 'P', numSnps, numIndels)
}
}
}
}
// Construct alignment sequence recursively starting at the end of the sequences
constructAlignmentSequences(paddedTestLen - 1, paddedRefLen - 1)
}
/**
* Compute the optimal transition matrix based on gap penalties
*
* @param refSequence Reference sequence over the active region.
* @param testSequence Sequence being scored.
* @param testQualities String of qualities. Not currently used.
* @return TransitionMatrix which stores the likelihoods of each state and every pair of positions in the reference and test
*/
private def computePathLikelihood(refSequence: String, testSequence: String, testQualities: String): TransitionMatrix = {
val paddedRefLen = refSequence.length + 1
val paddedTestLen = testSequence.length + 1
val stride = paddedRefLen
transitionMatrix.reallocate(paddedRefLen, paddedTestLen)
for (testSeqPos <- 0 until paddedTestLen) {
for (refSeqPos <- 0 until paddedRefLen) {
val m: Double = transitionMatrix.getMatchLikelihood(testSeqPos, refSeqPos, stride, refSequence, testSequence)
val ins: Double = transitionMatrix.getInsertionLikelihood(testSeqPos, refSeqPos, stride)
val del: Double = transitionMatrix.getDeletionLikelihood(testSeqPos, refSeqPos, stride)
val p: Double = transitionMatrix.getPaddingLikelihood(testSeqPos, refSeqPos, stride, testSequence.length)
if (testSeqPos > 0 || refSeqPos > 0) {
val idx = testSeqPos * stride + refSeqPos
transitionMatrix.matches(idx) = m
transitionMatrix.inserts(idx) = ins
transitionMatrix.deletes(idx) = del
transitionMatrix.padding(idx) = p
}
}
}
transitionMatrix
}
}
| hammerlab/avocado | avocado-core/src/main/scala/org/bdgenomics/avocado/algorithms/hmm/HMMAligner.scala | Scala | apache-2.0 | 6,936 |
package multitenantdb
import com.mnubo.dbevolv._
import com.typesafe.config.{ConfigFactory, Config}
class TenantConfiguration(config: Config) extends TenantConfigurationProvider {
private val awesomeConfig = ConfigFactory.parseString("shard_number = 2")
private val noConfig = ConfigFactory.empty()
override def configFor(tenant: String) =
if (tenant == "awesomecustomer") awesomeConfig
else noConfig
override def close() = ()
}
| mnubo/dbevolv | plugin/src/sbt-test/schema-manager-generator/multitenantdb/src/main/scala/multitenantdb/TenantConfiguration.scala | Scala | apache-2.0 | 450 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.rdd
import scala.collection.JavaConverters._
import org.apache.hadoop.mapreduce.RecordReader
import org.apache.spark.{Partition, TaskContext}
import org.apache.spark.sql.carbondata.execution.datasources.tasklisteners.CarbonQueryTaskCompletionListener
import org.apache.spark.sql.profiler.{Profiler, QueryTaskEnd}
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.memory.UnsafeMemoryManager
import org.apache.carbondata.core.stats.{QueryStatistic, QueryStatisticsConstants, QueryStatisticsRecorder}
import org.apache.carbondata.core.util.{DataTypeUtil, TaskMetricsMap, ThreadLocalTaskInfo}
import org.apache.carbondata.spark.InitInputMetrics
class QueryTaskCompletionListener(freeMemory: Boolean,
var reader: RecordReader[Void, Object],
inputMetricsStats: InitInputMetrics, executionId: String, taskId: Int, queryStartTime: Long,
queryStatisticsRecorder: QueryStatisticsRecorder, split: Partition, queryId: String)
extends CarbonQueryTaskCompletionListener {
override def onTaskCompletion(context: TaskContext): Unit = {
if (reader != null) {
try {
reader.close()
} catch {
case e: Exception =>
LogServiceFactory.getLogService(this.getClass.getCanonicalName).error(e)
}
reader = null
}
TaskMetricsMap.getInstance().updateReadBytes(Thread.currentThread().getId)
inputMetricsStats.updateAndClose()
logStatistics(executionId, taskId, queryStartTime, queryStatisticsRecorder, split)
if (freeMemory) {
UnsafeMemoryManager.INSTANCE
.freeMemoryAll(ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId)
ThreadLocalTaskInfo.clearCarbonTaskInfo()
DataTypeUtil.clearFormatter()
}
}
def logStatistics(
executionId: String,
taskId: Long,
queryStartTime: Long,
recorder: QueryStatisticsRecorder,
split: Partition
): Unit = {
if (null != recorder) {
val queryStatistic = new QueryStatistic()
queryStatistic.addFixedTimeStatistic(QueryStatisticsConstants.EXECUTOR_PART,
System.currentTimeMillis - queryStartTime)
recorder.recordStatistics(queryStatistic)
// print executor query statistics for each task_id
val statistics = recorder.statisticsForTask(taskId, queryStartTime)
if (statistics != null && executionId != null) {
Profiler.invokeIfEnable {
val inputSplit = split.asInstanceOf[CarbonSparkPartition].split.value
inputSplit.calculateLength()
val size = inputSplit.getLength
val files = inputSplit.getAllSplits.asScala.map { s =>
s.getSegmentId + "/" + s.getPath.getName
}.toArray[String]
Profiler.send(
QueryTaskEnd(
executionId.toLong,
queryId,
statistics.getValues,
size,
files
)
)
}
}
recorder.logStatisticsForTask(statistics)
}
}
}
| zzcclp/carbondata | integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala | Scala | apache-2.0 | 3,825 |
package hyperion.database
import java.sql.SQLException
import java.time.{LocalDate, Month}
import scala.collection.immutable.Seq
import scala.concurrent.ExecutionContextExecutor
import scala.concurrent.duration.DurationInt
import scala.util.{Failure, Success}
import akka.actor.{Actor, ActorLogging, ActorRef}
import hyperion.AppSettings
import hyperion.database.DatabaseActor._
import slick.jdbc.JdbcBackend.Database
object DatabaseActor {
final case object GetDatabaseInfo
final case class RetrieveMeterReadingForDate(date: LocalDate)
final case class RetrieveMeterReadingForDateRange(start: LocalDate, end: LocalDate)
final case class RetrieveMeterReadingForMonth(month: Month, year: Int)
final case class RetrievedMeterReadings(readings: Seq[HistoricalMeterReading])
final case class StoreMeterReading(reading: HistoricalMeterReading)
}
class DatabaseActor extends Actor with ActorLogging with AppSettings {
implicit val executionContext: ExecutionContextExecutor = context.dispatcher
log.info("Connecting to database...")
private[this] val database = createDatabase()
protected def createDatabase(): Database = Database.forConfig("hyperion.database")
private[this] val dao = createDao()
protected def createDao(): MeterReadingDAO = new MeterReadingDAO(database)
override def postStop: Unit = {
database.close()
database.source.close()
}
override def receive: Receive = {
case GetDatabaseInfo => getDatabaseInfo(sender())
case RetrieveMeterReadingForDate(date) => retrieveMeterReadingByDate(sender(), date)
case RetrieveMeterReadingForDateRange(start, end) => retrieveMeterReadingByDateRange(sender(), start, end)
case RetrieveMeterReadingForMonth(month, year) => retrieveMeterReadingsByMonth(sender(), month, year)
case StoreMeterReading(reading) => storeMeterReading(reading)
}
private def getDatabaseInfo(receiver: ActorRef) = {
log.info("Retrieving database metadata")
val session = database.createSession()
val metadata = session.metaData
val result = s"${metadata.getDatabaseProductName} ${metadata.getDatabaseProductVersion}"
session.close()
receiver ! result
}
private def retrieveMeterReadingByDateRange(receiver: ActorRef, start: LocalDate, end: LocalDate) = {
log.info(s"Retrieve meter reading from $start to $end")
dao.retrieveMeterReadings(start, end) andThen {
case Success(result) if result.nonEmpty =>
receiver ! RetrievedMeterReadings(result)
case Success(result) if result.isEmpty =>
log.error(s"No meter readings between $start and $end")
receiver ! RetrievedMeterReadings(Seq.empty)
case Failure(reason) =>
log.error("Error retrieving meter readings from database: {}", reason)
receiver ! RetrievedMeterReadings(Seq.empty)
}
}
private def retrieveMeterReadingByDate(receiver: ActorRef, date: LocalDate) = {
log.info(s"Retrieve meter reading for $date")
dao.retrieveMeterReading(date) andThen {
case Success(Some(result)) =>
receiver ! RetrievedMeterReadings(Seq(result))
case Success(None) =>
log.error(s"No meter reading for date: $date")
receiver ! RetrievedMeterReadings(Seq.empty)
case Failure(reason) =>
log.error("Error retrieving meter reading from database: {}", reason)
receiver ! RetrievedMeterReadings(Seq.empty)
}
}
private def retrieveMeterReadingsByMonth(receiver: ActorRef, month: Month, year: Int) = {
log.info(s"Retrieve meter reading for $month $year")
val startDate = LocalDate.of(year, month, 1)
val endDate = startDate.plusMonths(1).minusDays(1)
retrieveMeterReadingByDateRange(receiver, startDate, endDate)
}
private def storeMeterReading(reading: HistoricalMeterReading) = {
log.info("Storing one record in database:")
log.info(s" Date : ${reading.recordDate}")
log.info(s" Gas : ${reading.gas}")
log.info(s" Electricity normal : ${reading.electricityNormal}")
log.info(s" Electricity low : ${reading.electricityLow}")
dao.recordMeterReading(reading)
.recover { case e: SQLException => scheduleRetry(e, reading) }
}
private def scheduleRetry(cause: Throwable, reading: HistoricalMeterReading): Unit = {
log.error(s"Inserting failed due to ${cause.getMessage}, retrying in an hour...")
context.system.scheduler.scheduleOnce(1 hour, self, StoreMeterReading(reading))
}
}
| mthmulders/hyperion | app/src/main/scala/hyperion/database/DatabaseActor.scala | Scala | mit | 4,475 |
package com.github.chengpohi.domain.tab
import akka.http.scaladsl.model.{ContentTypes, HttpEntity}
import akka.http.scaladsl.server.Route
import com.github.chengpohi.infrastructure.{BaseController, Repository}
import com.github.chengpohi.infrastructure.util.AdjointUtils
trait TabController extends BaseController {
repository: Repository =>
def tabRoutes: Route =
auth { user =>
{
get {
pathPrefix("tab") {
val json = getAllTabs(user)
responseJson(json)
}
} ~
post {
pathPrefix("tab") {
entity(as[Tab]) { tab =>
{
complete(HttpEntity(
ContentTypes.`application/json`, {
val resultId = createTab(user, tab)
val tabId = resultId.getId
val t = tab.copy(id = Some(tabId))
"""{"result": "create success"}"""
}
))
}
}
}
} ~ delete {
pathPrefix("tab" / Segment) { _id =>
{
complete(HttpEntity(ContentTypes.`application/json`, {
deleteTabById(_id, user)
"""{"result": "delete success"}"""
}))
}
}
}
}
}
}
| chengpohi/coolmarks | src/main/scala/com/github/chengpohi/domain/tab/TabController.scala | Scala | apache-2.0 | 1,364 |
package com.binglihub.mllib.util
/**
* This class contains a data set.
* @param dataSet a 2D array contains data
* @tparam T the type of data
*/
case class Data[T](val dataSet: Array[Array[T]]) {
}
| binglihub/mllib | src/main/scala/com/binglihub/mllib/util/Data.scala | Scala | mit | 208 |
package org.apache.spark.mllib.treelib.core
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.rdd._
import scala.collection.immutable.HashMap
import java.io._
import java.io.DataOutputStream
import java.io.FileOutputStream
import java.io.DataInputStream
import java.io.FileInputStream
import scala.util.Random
import org.apache.spark.mllib.treelib.cart._
/**
* Abstract class of tree builder
*
* @param featureSet all features in the training data
* @param usefulFeatureSet the features which we used to build the tree (included the target feature)
*/
abstract class TreeBuilder extends Serializable {
protected val DEBUG: Boolean = true
/**
* Temporary model file
*/
val temporaryModelFile = "/tmp/model.temp"
/********************************************************/
/* REGION OF MAIN COMPONENTS */
/********************************************************/
/**
* Contains raw information about features, which will be used to construct a feature set
*/
//private var metadata: Array[String] = Array[String]()
private var headerOfDataset = Array[String]()
/**
* Contains information of all features in data
*/
var fullFeatureSet = new FeatureSet()
/**
* The number of features which will be used for building tree (include the target feature)
*/
//var usefulFeatureSet = new FeatureSet()
//var usefulFeatures : Set[Int] = null // means all features will be used for building tree
protected var numberOfUsefulFeatures : Int = fullFeatureSet.numberOfFeature
/**
* Tree model
*/
var treeModel: TreeModel = new CARTTreeModel()
/**
* Default value of the index of the target feature.
* Here this value is the index of the last feature
*/
var yIndexDefault = fullFeatureSet.numberOfFeature - 1 // = number_of_feature - 1
/**
* index of target feature,
* default value is the index of the last feature in dataset
*/
protected var yIndex = yIndexDefault
/**
* the indices/indexes of X features, which will be used to predict the target feature
* this variable can be infered from featureSet and yIndex
* but because it will be used in functions processLine, and buidingTree
* so we don't want to calculate it multiple time
* The default value is the index of all features, except the last one
*/
protected var xIndexes = fullFeatureSet.data.map(x => x.index).filter(x => (x != yIndex)).toSet[Int]
/**
* A value , which is used to marked a split point is invalid
*/
protected val ERROR_SPLITPOINT_VALUE = ",,,@,,,"
/**
* The data will be used to build tree
*/
var trainingData: RDD[String] = null;
/**
* In each node split, do we choose splitpoint on the random subset of features ?
* This argument is often used when building tree with Random Forest
*/
var useRandomSubsetFeature = false
/**
* Cache the dataset
*/
var useCache = true
/*****************************************************************/
/* REGION OF PARAMETERS */
/*****************************************************************/
/**
* minimum records to do a splitting, default value is 10
*/
var minsplit = 10
/**
* delimiter of fields in data set, default value is ","
*/
var delimiter = ','
/**
* coefficient of variation, default value is 0.1
*/
var threshold: Double = 0.1
/**
* Max depth of the tree, default value if 30
*/
protected var maxDepth: Int = 62
/**
* The maximum complexity of the tree
*/
protected var maximumComplexity = 0.001
def setDelimiter(c: Char) = {
delimiter = c
}
/**
* Set the minimum records of splitting
* It's mean if a node have the number of records <= minsplit, it can't be splitted anymore
*
* @param xMinSplit new minimum records for splitting
*/
def setMinSplit(xMinSplit: Int) = {
this.minsplit = xMinSplit
this.treeModel.minsplit = xMinSplit
}
/**
* Set threshold for stopping criterion. This threshold is coefficient of variation
* A node will stop expand if Dev(Y)/E(Y) < threshold
* In which:
* Dev(Y) is standard deviation
* E(Y) is medium of Y
*
* @param xThreshold new threshold
*/
def setThreshold(xThreshlod: Double) = {
threshold = xThreshlod
treeModel.threshold = xThreshlod
}
/**
* Set the maximum of depth of tree
*/
def setMaxDepth(value: Int) = {
this.maxDepth = value
this.treeModel.maxDepth = value
}
def setMaximumComplexity(cp: Double) = {
this.maximumComplexity = cp
this.treeModel.maximumComplexity = cp
}
/*****************************************************************/
/* REGION OF UTILITIES FUNCTIONS */
/*****************************************************************/
/**
* Convert feature name (or feature info) to theirs index
* @param xNames Set of feature names or set of FeatureInformation
* @param yName Name of the target feature
* @output A tuple with the first component is the set of indices of predictors, the second
* component is the index of the target feature
*/
private def getXIndexesAndYIndexByNames(xNames: Set[Any], yName: String): (Set[Int], Int) = {
var yindex = fullFeatureSet.getIndex(yName)
if (yName == "" && yindex < 0)
yindex = this.yIndexDefault
if (yindex < 0)
throw new Exception("ERROR: Can not find attribute `" + yName + "` in (" + fullFeatureSet.data.map(f => f.Name).mkString(",") + ")")
// index of features, which will be used to predict the target feature
var xindexes =
if (xNames.isEmpty) // if user didn't specify xFeature, we will process on all feature, exclude Y feature (to check stop criterion)
fullFeatureSet.data.filter(_.index != yindex).map(x => x.index).toSet[Int]
else //xNames.map(x => featureSet.getIndex(x)) //+ yindex
{
xNames.map(x => {
var index = x match {
case Feature(name, ftype, _) => {
val idx = fullFeatureSet.getIndex(name)
fullFeatureSet.update(Feature(name, ftype, idx), idx)
idx
}
case s: String => {
fullFeatureSet.getIndex(s)
}
case _ => { throw new Exception("Invalid feature. Expect as.String(feature_name) or as.Number(feature_name) or \\"feature_name\\"") }
}
if (index < 0)
throw new Exception("Could not find feature " + x)
else
index
})
}
(xindexes, yindex)
}
/**
* From the full training set, remove the unused columns
* @param trainingData the training data
* @param xIndexes the set of indices of predictors
* @param yIndex the index of the target feature
* @param removeInvalidRecord remove line which contains invalid feature values or not
* @output the new RDD which will be use "directly" in building phase
*/
/*
def filterUnusedFeatures(trainingData: RDD[String], xIndexes: Set[Int], yIndex: Int, removeInvalidRecord: Boolean = true): RDD[String] = {
var i = 0
var j = 0
var temp = trainingData.map(line => {
var array = line.split(this.delimiter)
i = 0
j = 0
var newLine = ""
try {
array.foreach(element => {
if (yIndex == i || xIndexes.contains(i)) {
if (newLine.equals(""))
newLine = element
else {
newLine = "%s,%s".format(newLine, element)
}
if (removeInvalidRecord) {
this.usefulFeatureSet.data(j).Type match {
case FeatureType.Categorical => element
case FeatureType.Numerical => element.toDouble
}
}
j = j + 1
}
i = i + 1
})
newLine
} catch {
case _: Throwable => ""
}
})
temp.filter(line => !line.equals(""))
}
*/
/**
* Convert index of the useful features into index in the full feature set
* @param featureSet the full feature set
* @param usefulFeatureSet the useful feature set
* @param a tuple with the first component is the set of indices of predictors
* the second component is the index of the target feature
*/
/*
private def mapFromUsefulIndexToOriginalIndex(featureSet: FeatureSet, usefulFeatureSet: FeatureSet): (Set[Int], Int) = {
var xIndexes = treeModel.xIndexes.map(index => treeModel.fullFeatureSet.getIndex(treeModel.usefulFeatureSet.data(index).Name))
var yIndex = treeModel.fullFeatureSet.getIndex(usefulFeatureSet.data(treeModel.yIndex).Name)
(xIndexes, yIndex)
}
*/
/**
* Get node by nodeID
* @param id node id
* @output node
*/
protected def getNodeByID(id: BigInt): CARTNode = {
if (id != 0) {
val level = (Math.log(id.toDouble) / Math.log(2)).toInt
var i: Int = level - 1
var TWO: BigInt = 2
var parent = treeModel.tree.asInstanceOf[CARTNode]; // start adding from root node
try {
while (i >= 0) {
if ((id / (TWO << i - 1)) % 2 == 0) {
// go to the left
parent = parent.left
} else {
// go go the right
parent = parent.right
}
i -= 1
} // end while
} catch {
case e: Throwable => {
e.printStackTrace()
if (DEBUG) println("currentID:" + id)
if (DEBUG) println("currentTree:\\n" + treeModel.tree)
throw e
}
}
parent
} else {
null
}
}
/*********************************************************************/
/* REGION FUNCTIONS OF BUILDING PHASE */
/*********************************************************************/
/**
* This function is used to build the tree
*
* @param yFeature name of target feature, the feature which we want to predict.
* Default value is the name of the last feature
* @param xFeatures set of names of features which will be used to predict the target feature
* Default value is all features names, except target feature
* @return <code>TreeModel</code> the root of tree
* @see TreeModel
*/
def buildTree(yFeature: String = "",
xFeatures: Set[Any] = Set[Any]()): TreeModel = {
if (this.trainingData == null) {
throw new Exception("ERROR: Dataset can not be null.Set dataset first")
}
if (yIndexDefault < 0) {
throw new Exception("ERROR:Dataset is invalid or invalid feature names")
}
try {
treeModel.tree = null
// These information will be used in Pruning phase
treeModel.xFeatures = xFeatures
treeModel.yFeature = yFeature
var (xIndexes, yIndex) = this.getXIndexesAndYIndexByNames(xFeatures, yFeature)
println("current yIndex=" + yIndex + " xIndex:" + xIndexes.toString)
// SET UP LIST OF USEFUL FEATURES AND ITS INDEXES //
var usefulFeatureList = List[Feature]()
var i = -1
var usefulIndexes = List[Int]()
var newYIndex = 0
fullFeatureSet.data.foreach(feature => {
if (xIndexes.contains(feature.index) || feature.index == yIndex) {
i = i + 1
if (feature.index == yIndex) {
newYIndex = i
println("new yindex:" + newYIndex)
}
usefulIndexes = usefulIndexes.:+(i)
usefulFeatureList = usefulFeatureList.:+(Feature(feature.Name, feature.Type, i))
}
})
//this.usefulFeatureSet = new FeatureSet(usefulFeatureList)
/*
// FILTER OUT THE UNUSED FEATURES //
this.trainingData = filterUnusedFeatures(this.trainingData, xIndexes, yIndex)
// because we remove unused features, so the indices are changed
var newXIndexes = usefulIndexes.filter(x => x != newYIndex)
this.usefulFeatureSet = new FeatureSet(usefulFeatureList)
this.yIndex = newYIndex
this.xIndexes = newXIndexes.toSet
treeModel.yIndex = newYIndex
treeModel.xIndexes = this.xIndexes
treeModel.usefulFeatureSet = this.usefulFeatureSet
treeModel.fullFeatureSet = this.fullFeatureSet
println("build tree with feature set:" + this.usefulFeatureSet + "\\n xIndexes:" + this.xIndexes + "\\nYIndex:" + this.yIndex)
treeModel.treeBuilder = this
// build tree
if (this.useCache)
this.startBuildTree(this.trainingData.cache, newXIndexes.toSet, newYIndex)
else
this.startBuildTree(this.trainingData, newXIndexes.toSet, newYIndex)
*/
this.yIndex = yIndex
this.xIndexes = xIndexes
treeModel.xIndexes = xIndexes
treeModel.yIndex = yIndex
treeModel.fullFeatureSet = this.fullFeatureSet
treeModel.treeBuilder = this
this.numberOfUsefulFeatures = this.xIndexes.size + 1
println("xIndexes:" + xIndexes + " yIndex:" + yIndex)
println("Building tree with predictors:" + this.xIndexes.map(i => fullFeatureSet.data(i).Name))
println("Target feature:" + fullFeatureSet.data(yIndex).Name)
//this.startBuildTree(this.trainingData, xIndexes, yIndex)
if (this.useCache)
this.startBuildTree(this.trainingData.cache, xIndexes, yIndex)
else
this.startBuildTree(this.trainingData, xIndexes, yIndex)
} catch {
case e: Throwable => {
println("Error:" + e.getStackTraceString)
}
}
this.trainingData.unpersist(true)
this.treeModel
}
/**
* This function is used to build the tree
*
* @param yFeature name of target feature, the feature which we want to predict.
* @param xFeatures set of names of features which will be used to predict the target feature
* @return <code>TreeModel</code> the root of tree
* @see TreeModel
*/
protected def startBuildTree(trainingData: RDD[String],
xIndexes: Set[Int],
yIndex: Int): Unit
protected def getPredictedValue(info: StatisticalInformation): Any = {
info.YValue
}
protected def updateModel(info: Array[(BigInt, SplitPoint, StatisticalInformation)], isStopNode: Boolean = false) = {
info.foreach(stoppedRegion =>
{
var (label, splitPoint, statisticalInformation) = stoppedRegion
if (DEBUG) println("update model with label=%d splitPoint:%s".format(
label,
splitPoint))
var newnode = (
if (isStopNode) {
new CARTLeafNode(splitPoint.point.toString)
} else {
val chosenFeatureInfoCandidate = fullFeatureSet.data.find(f => f.index == splitPoint.index)
chosenFeatureInfoCandidate match {
case Some(chosenFeatureInfo) => {
new CARTNonLeafNode(chosenFeatureInfo,
splitPoint,
new CARTLeafNode("empty.left"),
new CARTLeafNode("empty.right"));
}
case None => { new CARTLeafNode(this.ERROR_SPLITPOINT_VALUE) }
}
}) // end of assign value for new node
if (newnode.value == this.ERROR_SPLITPOINT_VALUE) {
println("Value of job id=" + label + " is invalid")
} else {
if (!isStopNode) { // update predicted value for non-leaf node
newnode.value = getPredictedValue(statisticalInformation)
}
newnode.statisticalInformation = statisticalInformation
if (DEBUG) println("create node with statistical infor:" + statisticalInformation + "\\n new node:" + newnode.value)
// If tree has zero node, create a root node
if (treeModel.isEmpty) {
treeModel.tree = newnode;
} else // add new node to current model
{
var parent = getNodeByID(label >> 1)
if (label % 2 == 0) {
parent.setLeft(newnode)
} else {
parent.setRight(newnode)
}
}
}
})
}
/************************************************************/
/* REGION: SET DATASET AND METADATA */
/************************************************************/
/**
* Set the training dataset to used for building tree
*
* @param trainingData the training dataset
* @throw Exception if the dataset contains less than 2 lines
*/
def setDataset(trainingData: RDD[String]) {
var firstLine = trainingData.take(1)
// If we can not get 2 first line of dataset, it's invalid dataset
if (firstLine.length < 1) {
throw new Exception("ERROR:Invalid dataset")
} else {
this.trainingData = trainingData;
/*
var header = firstLine(0)
var temp_header = header.split(delimiter)
if (hasHeader) {
this.headerOfDataset = temp_header
} else {
var i = -1;
this.headerOfDataset = temp_header.map(v => { i = i + 1; "Column" + i })
}
*/
// determine types of features automatically
// Get the second line of dataset and try to parse each of them to double
// If we can parse, it's a numerical feature, otherwise, it's a categorical feature
var sampleData = firstLine(0).split(delimiter);
var i = -1;
this.headerOfDataset = sampleData.map(v => { i = i + 1; "Column" + i })
i = 0
var listOfFeatures = List[Feature]()
// if we can parse value of a feature into double, this feature may be a numerical feature
sampleData.foreach(v => {
Utility.parseDouble(v.trim()) match {
case Some(d) => { listOfFeatures = listOfFeatures.:+(Feature(headerOfDataset(i), FeatureType.Numerical, i)) }
case None => { listOfFeatures = listOfFeatures.:+(Feature(headerOfDataset(i), FeatureType.Categorical, i)) }
}
i = i + 1
})
// update the dataset
fullFeatureSet = new FeatureSet(listOfFeatures)
updateFeatureSet()
}
}
/**
* Set feature names
*
* @param names a line contains names of features,
* separated by by a delimiter, which you set before (default value is comma ',')
* Example: Temperature,Age,"Type"
* @throw Exception if the training set is never be set before
*/
def setFeatureNames(names: Array[String]) = {
if (this.trainingData == null)
throw new Exception("Trainingset is null. Set dataset first")
else {
if (names.length != fullFeatureSet.data.length) {
throw new Exception("Incorrect names")
}
var i = 0
names.foreach(name => {
fullFeatureSet.data(i).Name = name
fullFeatureSet.update(fullFeatureSet.data(i), i)
i = i + 1
})
updateFeatureSet()
}
}
/**
* Update the feature set based on the information of metadata
*/
private def updateFeatureSet() = {
yIndexDefault = fullFeatureSet.numberOfFeature - 1
println("Set new yIndexDefault = " + yIndexDefault)
//treeBuilder = new ThreadTreeBuilder(featureSet);
//treeBuilder = treeBuilder.createNewInstance(featureSet, usefulFeatureSet)
}
/* END REGION DATASET AND METADATA */
/************************************************/
/* REGION FUNCTIONS OF PREDICTION MAKING */
/************************************************/
/**
* Predict value of the target feature base on the values of input features
*
* @param record an array, which its each element is a value of each input feature (already remove unused features)
* @return predicted value or '???' if input record is invalid
*/
private def predictOnPreciseData(record: Array[String], ignoreBranchIDs: Set[BigInt]): String = {
try {
treeModel.predict(record, ignoreBranchIDs)
} catch {
case e: Exception => { println("Error P: " + e.getStackTraceString); "???" }
}
}
def predictOneInstance(record: Array[String], ignoreBranchIDs: Set[BigInt] = Set[BigInt]()): String = {
if (record.length == 0)
"???"
else {
/*
var (xIndexes, yIndex) = mapFromUsefulIndexToOriginalIndex(fullFeatureSet, usefulFeatureSet)
var newRecord: Array[String] = Array[String]()
var i = 0
for (field <- record) {
if (i == yIndex || xIndexes.contains(i)) {
newRecord = newRecord.:+(field)
}
i = i + 1
}
predictOnPreciseData(newRecord, ignoreBranchIDs)
*
*/
predictOnPreciseData(record, ignoreBranchIDs)
}
}
/**
* Predict value of the target feature base on the values of input features
*
* @param testingData the RDD of testing data
* @return a RDD contain predicted values
*/
def predict(testingData: RDD[String],
delimiter: String = ",",
ignoreBranchIDs: Set[BigInt] = Set[BigInt]()): RDD[String] = {
/*
* var (xIndexes, yIndex) = mapFromUsefulIndexToOriginalIndex(fullFeatureSet, usefulFeatureSet)
var newTestingData = filterUnusedFeatures(testingData, xIndexes, yIndex, false)
newTestingData.map(line => this.predictOnPreciseData(line.split(delimiter), ignoreBranchIDs))
*/
testingData.map(line => this.predictOnPreciseData(line.split(delimiter), ignoreBranchIDs))
}
/***********************************************/
/* REGION WRITING AND LOADING MODEL */
/***********************************************/
/**
* Write the current tree model to file
*
* @param path where we want to write to
*/
def writeModelToFile(path: String) = {
val ois = new ObjectOutputStream(new FileOutputStream(path))
ois.writeObject(treeModel)
ois.close()
}
/**
* Load tree model from file
*
* @param path the location of file which contains tree model
*/
def loadModelFromFile(path: String) = {
//val js = new JavaSerializer(null, null)
val ois = new ObjectInputStream(new FileInputStream(path)) {
override def resolveClass(desc: java.io.ObjectStreamClass): Class[_] = {
try { Class.forName(desc.getName, false, getClass.getClassLoader) }
catch { case ex: ClassNotFoundException => super.resolveClass(desc) }
}
}
var rt = ois.readObject().asInstanceOf[TreeModel]
//treeModel = rt
//this.featureSet = treeModel.featureSet
//this.usefulFeatureSet = treeModel.usefulFeatureSet
setTreeModel(rt)
ois.close()
}
def setTreeModel(tm: TreeModel) = {
this.treeModel = tm
this.fullFeatureSet = tm.fullFeatureSet
//this.usefulFeatureSet = tm.usefulFeatureSet
//this.usefulFeatures = tm.usefulFeatures
updateFeatureSet()
}
/**
* Recover, repair and continue build tree from the last state
*/
def continueFromIncompleteModel(trainingData: RDD[String], path_to_model: String): TreeModel = {
loadModelFromFile(path_to_model)
this.treeModel = treeModel
this.fullFeatureSet = treeModel.fullFeatureSet
//this.usefulFeatureSet = treeModel.usefulFeatureSet
//var (xIndexes, yIndex) = mapFromUsefulIndexToOriginalIndex(fullFeatureSet, usefulFeatureSet)
//var newtrainingData = filterUnusedFeatures(trainingData, xIndexes, yIndex)
if (treeModel == null) {
throw new Exception("The tree model is empty because of no building. Please build it first")
}
if (treeModel.isComplete) {
println("This model is already complete")
} else {
println("Recover from the last state")
/* INITIALIZE */
this.fullFeatureSet = treeModel.fullFeatureSet
//this.usefulFeatureSet = treeModel.usefulFeatureSet
this.xIndexes = treeModel.xIndexes
this.yIndex = treeModel.yIndex
startBuildTree(trainingData, xIndexes, yIndex)
}
treeModel
}
def createNewInstance() : TreeBuilder
}
| bigfootproject/spark-dectree | spark/mllib/src/main/scala/org/apache/spark/mllib/treelib/core/TreeBuilder.scala | Scala | apache-2.0 | 26,685 |
package org.wquery.similarity
import java.io._
import jline.console.ConsoleReader
import org.rogach.scallop._
import org.rogach.scallop.exceptions.{Help, ScallopException, Version}
import org.wquery.emitter.WQueryEmitter
import org.wquery.lang.operations.{LowerFunction, TitleFunction, UpperFunction}
import org.wquery.lang.{WInteractiveMain, WTupleParsers}
import org.wquery.loader.WnLoader
import org.wquery.model._
import org.wquery.update.WUpdate
import org.wquery.{WQueryCommandLineException, WQueryProperties}
import scala.io.Source
object WSimMain extends WInteractiveMain {
val loader = new WnLoader
def loadCounts(wordNet: WordNet, fileName: String) = {
val tupleParsers = new Object with WTupleParsers
val senseCountRelation = Relation.binary("tag_count", SenseType, IntegerType)
val wordCountRelation = Relation.binary("tag_count", StringType, IntegerType)
var senseCounts = false
wordNet.addRelation(senseCountRelation)
wordNet.addRelation(wordCountRelation)
for (line <- Source.fromFile(fileName).getLines()) {
if (!line.startsWith("#")) {
tupleParsers.parse(wordNet, line) match {
case List(sense: Sense, count: Int) =>
wordNet.addSuccessor(sense, senseCountRelation, count)
senseCounts = true
case List(word: String, count: Int) =>
wordNet.addSuccessor(word, wordCountRelation, count)
case _ =>
/* do nothing - count for an unknown word or sense provided */
}
}
}
senseCounts
}
def main(args: Array[String]) {
val opts = Scallop(args)
.version("wsim " + WQueryProperties.version + " " + WQueryProperties.copyright)
.banner( """
|Computes semantic similarity among pairs of words, senses and synsets.
|
|usage:
|
| wsim [OPTIONS] [WORDNET] [IFILE] [OFILE]
|
|options:
| """.stripMargin)
.opt[String]("counts", short = 'c', descr = "Word and/or sense counts for IC-based measures", required = false)
.opt[String]("emitter", short = 'e', default = () => Some("tsv"),
validate = arg => WQueryEmitter.emitters.contains(arg),
descr = "Set result emitter (i.e. output format) - either raw, plain, escaping or tsv")
.opt[String]("field-separator", short = 'F', descr = "Set field separator", default = () => Some("\\t"), required = false)
.opt[Boolean]("help", short = 'h', descr = "Show help message")
.opt[Boolean]("ignore-case", short = 'I', descr = "Ignore case while looking up words in the wordnet", required = false)
.opt[Boolean]("interactive", short = 'i', descr = "Run in the interactive interpreter mode", required = false)
.opt[String]("measure", short = 'm', default = () => Some("path"), descr = "Similarity measure")
.opt[List[String]]("hypernymy-names", short = 'n', default = () => Some(List("hypernym")), descr = "Hypernymy relation name(s)")
.opt[Boolean]("print-pairs", short = 'p', descr = "Print word/sense pairs to the output", required = false)
.opt[Boolean]("root-node", short = 'r', descr = "Introduce root nodes in the nouns and verbs hierarchies", required = false)
.opt[Boolean]("version", short = 'v', descr = "Show version")
.trailArg[String](name = "WORDNET", required = false,
descr = "A wordnet model as created by wcompile (read from stdin if not specified)")
.trailArg[String](name = "IFILE", required = false,
descr = "Tab separated pairs of words, senses or synsets (read from stdin if not specified)")
.trailArg[String](name = "OFILE", required = false,
descr = "Similarity values (printed to stdout if not specified)")
try {
opts.verify
val emitter = WQueryEmitter.demandEmitter(opts[String]("emitter"))
val hypernymyNames = opts[List[String]]("hypernymy-names")
val ignoreCase = opts[Boolean]("ignore-case")
val interactiveMode = opts[Boolean]("interactive")
val separator = opts[String]("field-separator")
val measure = opts[String]("measure")
val printPairs = opts[Boolean]("print-pairs")
val rootNode = opts[Boolean]("root-node")
val wordNetInput = opts.get[String]("WORDNET")
.map(inputName => new FileInputStream(inputName))
.getOrElse(System.in)
val wordNet = loader.load(wordNetInput)
val senseCounts = opts.get[String]("counts").map(fileName => loadCounts(wordNet, fileName)).getOrElse(true)
val wupdate = new WUpdate(wordNet)
for (name <- hypernymyNames if name != "hypernym") {
wupdate.execute(s"from !$name$$a$$_$$b update $$a hypernym += $$b")
}
if (rootNode) {
wupdate.execute("update synsets += {ROOT:1:n}")
wupdate.execute("update {}[empty(hypernym) and pos = `n`] hypernym := {ROOT:1:n}")
wupdate.execute("update {ROOT:1:n} pos := `n`")
wupdate.execute("update synsets += {ROOT:1:v}")
wupdate.execute("update {}[empty(hypernym) and pos = `v`] hypernym := {ROOT:1:v}")
wupdate.execute("update {ROOT:1:v} pos := `v`")
}
if (measure != "path" && measure != "wup" && measure != "lch") {
if (senseCounts) {
wupdate.execute("from {}$a update $a count := sum(last($a.senses.tag_count))")
} else {
wupdate.execute("from {}$a update $a count := sum(last($a.senses.word.tag_count))")
}
}
if (interactiveMode) {
executeInteractive(wupdate, "wsim", new ConsoleReader(System.in, System.out), emitter)
} else {
def escape(x: String) = "as_tuple(`" + x + "`)"
def caseIgnoringQuery(x: String) = List(escape(x), escape(UpperFunction.upper(x)),
escape(LowerFunction.lower(x)), escape(TitleFunction.title(x))).mkString(" union ")
val input = opts.get[String]("IFILE")
.map(ifile => new FileInputStream(ifile)).getOrElse(System.in)
val output = opts.get[String]("OFILE")
.map(outputName => new BufferedOutputStream(new FileOutputStream(outputName))).getOrElse(System.out)
val writer = new BufferedWriter(new OutputStreamWriter(output))
for (line <- scala.io.Source.fromInputStream(input).getLines()) {
val fields = line.split(separator, 2)
val (left, right) = (fields(0), fields(1))
val (leftSenseQuery, rightSenseQuery) = if (ignoreCase)
(caseIgnoringQuery(left), caseIgnoringQuery(right))
else
(escape(left), escape(right))
val pairQuery = if (printPairs) "`" + left + "`,`" + right + "`," else ""
val result = wupdate.execute(
s"""
|do
| %l := {distinct($leftSenseQuery)}
| %r := {distinct($rightSenseQuery)}
| emit ${pairQuery}na(distinct(max(from (%l,%r)$$a$$b emit ${measure}_measure($$a,$$b))))
|end
""".stripMargin)
writer.write(emitter.emit(result))
writer.flush()
}
writer.close()
}
} catch {
case e: Help =>
opts.printHelp()
case Version =>
opts.printHelp()
case e: ScallopException =>
println("ERROR: " + e.message)
println()
opts.printHelp()
sys.exit(1)
case e: WQueryCommandLineException =>
println("ERROR: " + e.message)
println()
opts.printHelp()
sys.exit(1)
}
}
}
| marekkubis/wquery | src/main/scala/org/wquery/similarity/WSimMain.scala | Scala | bsd-3-clause | 7,553 |
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.testkit.ScalatestRouteTest
import common.graphql.UserContext
import common.routes.graphql.{GraphQLRoute, HttpHandler, WebSocketHandler}
import common.slick.SchemaInitializer
import core.guice.injection.InjectorProvider
import monix.execution.Scheduler
import org.scalamock.scalatest.MockFactory
import org.scalatest._
import sangria.execution.{Executor, QueryReducer}
import shapes.ServerModule
trait TestHelper
extends WordSpec
with ScalatestRouteTest
with BeforeAndAfter
with BeforeAndAfterEach
with BeforeAndAfterAll
with Matchers
with MockFactory {
val endpoint: String = "/graphql"
lazy implicit val scheduler: Scheduler = inject[Scheduler]
def inject[T: Manifest]: T = InjectorProvider.inject[T]
def routesWithGraphQLSchema(serverModule: ServerModule[UserContext, SchemaInitializer[_]]): Route = {
val graphQl = new TestGraphQLSchema(serverModule)
val graphQlExecutor = Executor(
schema = graphQl.schema,
queryReducers = List(
QueryReducer.rejectMaxDepth[UserContext](graphQl.maxQueryDepth),
QueryReducer
.rejectComplexQueries[UserContext](graphQl.maxQueryComplexity, (_, _) => new Exception("maxQueryComplexity"))
)
)
val httpHandler = new HttpHandler(graphQl, graphQlExecutor)
val webSocketHandler = new WebSocketHandler(graphQl, graphQlExecutor)
val graphQLRoute = new GraphQLRoute(httpHandler, webSocketHandler, graphQl)
graphQLRoute.routes
}
}
| sysgears/apollo-universal-starter-kit | modules/core/server-scala/src/test/scala/TestHelper.scala | Scala | mit | 1,520 |
trait A {
val foo = 0
} | som-snytt/xsbt | sbt/src/sbt-test/source-dependencies/trait-private-object/A.scala | Scala | bsd-3-clause | 25 |
package mesosphere.marathon
package core.plugin
import play.api.libs.json.JsObject
case class PluginDefinition(
id: String,
plugin: String,
implementation: String,
tags: Option[Set[String]],
configuration: Option[JsObject],
info: Option[JsObject],
enabled: Option[Boolean]
)
case class PluginDefinitions(plugins: Seq[PluginDefinition])
object PluginDefinitions {
lazy val None = PluginDefinitions(Seq.empty[PluginDefinition])
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/core/plugin/PluginDefinition.scala | Scala | apache-2.0 | 462 |
package org.bitcoins.core.protocol.script
import org.bitcoins.core.script.constant._
import scodec.bits.ByteVector
/** Created by chris on 1/19/16.
*/
sealed trait ScriptPubKeyUpdateIndicator
case class UpdateScriptPubKeyAsm(asm: Seq[ScriptToken])
extends ScriptPubKeyUpdateIndicator
case class UpdateScriptPubKeyBytes(bytes: ByteVector)
extends ScriptPubKeyUpdateIndicator
| bitcoin-s/bitcoin-s | core/src/main/scala/org/bitcoins/core/protocol/script/ScriptPubKeyFactory.scala | Scala | mit | 388 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import java.util.Locale
import javax.servlet.http.HttpServletRequest
import scala.collection.mutable.{Buffer, ListBuffer}
import scala.xml.{Node, NodeSeq, Unparsed, Utility}
import org.apache.commons.text.StringEscapeUtils
import org.apache.spark.JobExecutionStatus
import org.apache.spark.resource.ResourceProfile
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1
import org.apache.spark.ui._
/** Page showing statistics and stage list for a given job */
private[ui] class JobPage(parent: JobsTab, store: AppStatusStore) extends WebUIPage("job") {
private val STAGES_LEGEND =
<div class="legend-area"><svg width="150px" height="85px">
<rect class="completed-stage-legend"
x="5px" y="5px" width="20px" height="15px" rx="2px" ry="2px"></rect>
<text x="35px" y="17px">Completed</text>
<rect class="failed-stage-legend"
x="5px" y="30px" width="20px" height="15px" rx="2px" ry="2px"></rect>
<text x="35px" y="42px">Failed</text>
<rect class="active-stage-legend"
x="5px" y="55px" width="20px" height="15px" rx="2px" ry="2px"></rect>
<text x="35px" y="67px">Active</text>
</svg></div>.toString.filter(_ != '\\n')
private val EXECUTORS_LEGEND =
<div class="legend-area"><svg width="150px" height="55px">
<rect class="executor-added-legend"
x="5px" y="5px" width="20px" height="15px" rx="2px" ry="2px"></rect>
<text x="35px" y="17px">Added</text>
<rect class="executor-removed-legend"
x="5px" y="30px" width="20px" height="15px" rx="2px" ry="2px"></rect>
<text x="35px" y="42px">Removed</text>
</svg></div>.toString.filter(_ != '\\n')
private def makeStageEvent(stageInfos: Seq[v1.StageData]): Seq[String] = {
stageInfos.map { stage =>
val stageId = stage.stageId
val attemptId = stage.attemptId
val name = stage.name
val status = stage.status.toString.toLowerCase(Locale.ROOT)
val submissionTime = stage.submissionTime.get.getTime()
val completionTime = stage.completionTime.map(_.getTime())
.getOrElse(System.currentTimeMillis())
// The timeline library treats contents as HTML, so we have to escape them. We need to add
// extra layers of escaping in order to embed this in a JavaScript string literal.
val escapedName = Utility.escape(name)
val jsEscapedNameForTooltip = StringEscapeUtils.escapeEcmaScript(Utility.escape(escapedName))
val jsEscapedNameForLabel = StringEscapeUtils.escapeEcmaScript(escapedName)
s"""
|{
| 'className': 'stage job-timeline-object ${status}',
| 'group': 'stages',
| 'start': new Date(${submissionTime}),
| 'end': new Date(${completionTime}),
| 'content': '<div class="job-timeline-content" data-toggle="tooltip"' +
| 'data-placement="top" data-html="true"' +
| 'data-title="${jsEscapedNameForTooltip} (Stage ${stageId}.${attemptId})<br>' +
| 'Status: ${status.toUpperCase(Locale.ROOT)}<br>' +
| 'Submitted: ${UIUtils.formatDate(submissionTime)}' +
| '${
if (status != "running") {
s"""<br>Completed: ${UIUtils.formatDate(completionTime)}"""
} else {
""
}
}">' +
| '${jsEscapedNameForLabel} (Stage ${stageId}.${attemptId})</div>',
|}
""".stripMargin
}
}
def makeExecutorEvent(executors: Seq[v1.ExecutorSummary]): Seq[String] = {
val events = ListBuffer[String]()
executors.foreach { e =>
val addedEvent =
s"""
|{
| 'className': 'executor added',
| 'group': 'executors',
| 'start': new Date(${e.addTime.getTime()}),
| 'content': '<div class="executor-event-content"' +
| 'data-toggle="tooltip" data-placement="top"' +
| 'data-title="Executor ${e.id}<br>' +
| 'Added at ${UIUtils.formatDate(e.addTime)}"' +
| 'data-html="true">Executor ${e.id} added</div>'
|}
""".stripMargin
events += addedEvent
e.removeTime.foreach { removeTime =>
val removedEvent =
s"""
|{
| 'className': 'executor removed',
| 'group': 'executors',
| 'start': new Date(${removeTime.getTime()}),
| 'content': '<div class="executor-event-content"' +
| 'data-toggle="tooltip" data-placement="top"' +
| 'data-title="Executor ${e.id}<br>' +
| 'Removed at ${UIUtils.formatDate(removeTime)}' +
| '${
e.removeReason.map { reason =>
s"""<br>Reason: ${StringEscapeUtils.escapeEcmaScript(
reason.replace("\\n", " "))}"""
}.getOrElse("")
}"' +
| 'data-html="true">Executor ${e.id} removed</div>'
|}
""".stripMargin
events += removedEvent
}
}
events.toSeq
}
private def makeTimeline(
stages: Seq[v1.StageData],
executors: Seq[v1.ExecutorSummary],
appStartTime: Long): Seq[Node] = {
val stageEventJsonAsStrSeq = makeStageEvent(stages)
val executorsJsonAsStrSeq = makeExecutorEvent(executors)
val groupJsonArrayAsStr =
s"""
|[
| {
| 'id': 'executors',
| 'content': '<div>Executors</div>${EXECUTORS_LEGEND}',
| },
| {
| 'id': 'stages',
| 'content': '<div>Stages</div>${STAGES_LEGEND}',
| }
|]
""".stripMargin
val eventArrayAsStr =
(stageEventJsonAsStrSeq ++ executorsJsonAsStrSeq).mkString("[", ",", "]")
<span class="expand-job-timeline">
<span class="expand-job-timeline-arrow arrow-closed"></span>
<a data-toggle="tooltip" title={ToolTips.STAGE_TIMELINE} data-placement="top">
Event Timeline
</a>
</span> ++
<div id="job-timeline" class="collapsed">
<div class="control-panel">
<div id="job-timeline-zoom-lock">
<input type="checkbox"></input>
<span>Enable zooming</span>
</div>
</div>
</div> ++
<script type="text/javascript">
{Unparsed(s"drawJobTimeline(${groupJsonArrayAsStr}, ${eventArrayAsStr}, " +
s"${appStartTime}, ${UIUtils.getTimeZoneOffset()});")}
</script>
}
def render(request: HttpServletRequest): Seq[Node] = {
val parameterId = request.getParameter("id")
require(parameterId != null && parameterId.nonEmpty, "Missing id parameter")
val jobId = parameterId.toInt
val (jobData, sqlExecutionId) = store.asOption(store.jobWithAssociatedSql(jobId)).getOrElse {
val content =
<div id="no-info">
<p>No information to display for job {jobId}</p>
</div>
return UIUtils.headerSparkPage(
request, s"Details for Job $jobId", content, parent)
}
val isComplete = jobData.status != JobExecutionStatus.RUNNING
val stages = jobData.stageIds.map { stageId =>
// This could be empty if the listener hasn't received information about the
// stage or if the stage information has been garbage collected
store.asOption(store.lastStageAttempt(stageId)).getOrElse {
new v1.StageData(
status = v1.StageStatus.PENDING,
stageId = stageId,
attemptId = 0,
numTasks = 0,
numActiveTasks = 0,
numCompleteTasks = 0,
numFailedTasks = 0,
numKilledTasks = 0,
numCompletedIndices = 0,
submissionTime = None,
firstTaskLaunchedTime = None,
completionTime = None,
failureReason = None,
executorDeserializeTime = 0L,
executorDeserializeCpuTime = 0L,
executorRunTime = 0L,
executorCpuTime = 0L,
resultSize = 0L,
jvmGcTime = 0L,
resultSerializationTime = 0L,
memoryBytesSpilled = 0L,
diskBytesSpilled = 0L,
peakExecutionMemory = 0L,
inputBytes = 0L,
inputRecords = 0L,
outputBytes = 0L,
outputRecords = 0L,
shuffleRemoteBlocksFetched = 0L,
shuffleLocalBlocksFetched = 0L,
shuffleFetchWaitTime = 0L,
shuffleRemoteBytesRead = 0L,
shuffleRemoteBytesReadToDisk = 0L,
shuffleLocalBytesRead = 0L,
shuffleReadBytes = 0L,
shuffleReadRecords = 0L,
shuffleWriteBytes = 0L,
shuffleWriteTime = 0L,
shuffleWriteRecords = 0L,
name = "Unknown",
description = None,
details = "Unknown",
schedulingPool = null,
rddIds = Nil,
accumulatorUpdates = Nil,
tasks = None,
executorSummary = None,
killedTasksSummary = Map(),
ResourceProfile.UNKNOWN_RESOURCE_PROFILE_ID,
peakExecutorMetrics = None,
taskMetricsDistributions = None,
executorMetricsDistributions = None)
}
}
val activeStages = Buffer[v1.StageData]()
val completedStages = Buffer[v1.StageData]()
// If the job is completed, then any pending stages are displayed as "skipped":
val pendingOrSkippedStages = Buffer[v1.StageData]()
val failedStages = Buffer[v1.StageData]()
for (stage <- stages) {
if (stage.submissionTime.isEmpty) {
pendingOrSkippedStages += stage
} else if (stage.completionTime.isDefined) {
if (stage.status == v1.StageStatus.FAILED) {
failedStages += stage
} else {
completedStages += stage
}
} else {
activeStages += stage
}
}
val basePath = "jobs/job"
val pendingOrSkippedTableId =
if (isComplete) {
"skipped"
} else {
"pending"
}
val activeStagesTable =
new StageTableBase(store, request, activeStages.toSeq, "active", "activeStage",
parent.basePath, basePath, parent.isFairScheduler,
killEnabled = parent.killEnabled, isFailedStage = false)
val pendingOrSkippedStagesTable =
new StageTableBase(store, request, pendingOrSkippedStages.toSeq, pendingOrSkippedTableId,
"pendingStage", parent.basePath, basePath, parent.isFairScheduler,
killEnabled = false, isFailedStage = false)
val completedStagesTable =
new StageTableBase(store, request, completedStages.toSeq, "completed", "completedStage",
parent.basePath, basePath, parent.isFairScheduler,
killEnabled = false, isFailedStage = false)
val failedStagesTable =
new StageTableBase(store, request, failedStages.toSeq, "failed", "failedStage",
parent.basePath, basePath, parent.isFairScheduler,
killEnabled = false, isFailedStage = true)
val shouldShowActiveStages = activeStages.nonEmpty
val shouldShowPendingStages = !isComplete && pendingOrSkippedStages.nonEmpty
val shouldShowCompletedStages = completedStages.nonEmpty
val shouldShowSkippedStages = isComplete && pendingOrSkippedStages.nonEmpty
val shouldShowFailedStages = failedStages.nonEmpty
val summary: NodeSeq =
<div>
<ul class="list-unstyled">
<li>
<Strong>Status:</Strong>
{jobData.status}
</li>
<li>
<Strong>Submitted:</Strong>
{JobDataUtil.getFormattedSubmissionTime(jobData)}
</li>
<li>
<Strong>Duration:</Strong>
{JobDataUtil.getFormattedDuration(jobData)}
</li>
{
if (sqlExecutionId.isDefined) {
<li>
<strong>Associated SQL Query: </strong>
{<a href={"%s/SQL/execution/?id=%s".format(
UIUtils.prependBaseUri(request, parent.basePath),
sqlExecutionId.get)
}>{sqlExecutionId.get}</a>}
</li>
}
}
{
if (jobData.jobGroup.isDefined) {
<li>
<strong>Job Group:</strong>
{jobData.jobGroup.get}
</li>
}
}
{
if (shouldShowActiveStages) {
<li>
<a href="#active"><strong>Active Stages:</strong></a>
{activeStages.size}
</li>
}
}
{
if (shouldShowPendingStages) {
<li>
<a href="#pending">
<strong>Pending Stages:</strong>
</a>{pendingOrSkippedStages.size}
</li>
}
}
{
if (shouldShowCompletedStages) {
<li>
<a href="#completed"><strong>Completed Stages:</strong></a>
{completedStages.size}
</li>
}
}
{
if (shouldShowSkippedStages) {
<li>
<a href="#skipped"><strong>Skipped Stages:</strong></a>
{pendingOrSkippedStages.size}
</li>
}
}
{
if (shouldShowFailedStages) {
<li>
<a href="#failed"><strong>Failed Stages:</strong></a>
{failedStages.size}
</li>
}
}
</ul>
</div>
var content = summary
val appStartTime = store.applicationInfo().attempts.head.startTime.getTime()
content ++= makeTimeline((activeStages ++ completedStages ++ failedStages).toSeq,
store.executorList(false), appStartTime)
val operationGraphContent = store.asOption(store.operationGraphForJob(jobId)) match {
case Some(operationGraph) => UIUtils.showDagVizForJob(jobId, operationGraph)
case None =>
<div id="no-info">
<p>No DAG visualization information to display for job {jobId}</p>
</div>
}
content ++= operationGraphContent
if (shouldShowActiveStages) {
content ++=
<span id="active" class="collapse-aggregated-activeStages collapse-table"
onClick="collapseTable('collapse-aggregated-activeStages','aggregated-activeStages')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Active Stages ({activeStages.size})</a>
</h4>
</span> ++
<div class="aggregated-activeStages collapsible-table">
{activeStagesTable.toNodeSeq}
</div>
}
if (shouldShowPendingStages) {
content ++=
<span id="pending" class="collapse-aggregated-pendingOrSkippedStages collapse-table"
onClick="collapseTable('collapse-aggregated-pendingOrSkippedStages',
'aggregated-pendingOrSkippedStages')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Pending Stages ({pendingOrSkippedStages.size})</a>
</h4>
</span> ++
<div class="aggregated-pendingOrSkippedStages collapsible-table">
{pendingOrSkippedStagesTable.toNodeSeq}
</div>
}
if (shouldShowCompletedStages) {
content ++=
<span id="completed" class="collapse-aggregated-completedStages collapse-table"
onClick="collapseTable('collapse-aggregated-completedStages',
'aggregated-completedStages')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Completed Stages ({completedStages.size})</a>
</h4>
</span> ++
<div class="aggregated-completedStages collapsible-table">
{completedStagesTable.toNodeSeq}
</div>
}
if (shouldShowSkippedStages) {
content ++=
<span id="skipped" class="collapse-aggregated-pendingOrSkippedStages collapse-table"
onClick="collapseTable('collapse-aggregated-pendingOrSkippedStages',
'aggregated-pendingOrSkippedStages')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Skipped Stages ({pendingOrSkippedStages.size})</a>
</h4>
</span> ++
<div class="aggregated-pendingOrSkippedStages collapsible-table">
{pendingOrSkippedStagesTable.toNodeSeq}
</div>
}
if (shouldShowFailedStages) {
content ++=
<span id ="failed" class="collapse-aggregated-failedStages collapse-table"
onClick="collapseTable('collapse-aggregated-failedStages','aggregated-failedStages')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Failed Stages ({failedStages.size})</a>
</h4>
</span> ++
<div class="aggregated-failedStages collapsible-table">
{failedStagesTable.toNodeSeq}
</div>
}
UIUtils.headerSparkPage(
request, s"Details for Job $jobId", content, parent, showVisualization = true)
}
}
| BryanCutler/spark | core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala | Scala | apache-2.0 | 17,927 |
package com.esri.udt
import org.apache.spark.sql.catalyst.InternalRow
/**
*/
class PolygonUDT extends PolyUDT[PolygonType] {
override def serialize(obj: Any): InternalRow = {
obj match {
case PolygonType(xmin, ymin, xmax, ymax, xyNum, xyArr) => {
serialize(xmin, ymin, xmax, ymax, xyNum, xyArr)
}
}
}
override def deserialize(xmin: Double, ymin: Double, xmax: Double, ymax: Double, xyNum: Array[Int], xyArr: Array[Double]) = {
PolygonType(xmin, ymin, xmax, ymax, xyNum, xyArr)
}
override def userClass = classOf[PolygonType]
override def pyUDT = "com.esri.udt.PolygonUDT"
override def typeName = "polygon"
override def equals(o: Any) = {
o match {
case v: PolygonUDT => true
case _ => false
}
}
// see [SPARK-8647], this achieves the needed constant hash code without constant no.
override def hashCode(): Int = classOf[PolygonUDT].getName.hashCode()
override def asNullable: PolygonUDT = this
}
| mraad/spark-gdb | src/main/scala/com/esri/udt/PolygonUDT.scala | Scala | apache-2.0 | 985 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression
package proxies.primitives
import scala.collection.JavaConversions._
import org.scalaide.debug.internal.expression.Names.Java
import org.scalaide.debug.internal.expression.Names.Scala
import org.scalaide.debug.internal.expression.context.JdiContext
import org.scalaide.debug.internal.expression.proxies.JdiProxy
import org.scalaide.debug.internal.expression.proxies.JdiProxyCompanion
import org.scalaide.debug.internal.expression.proxies.StringJdiProxy
import com.sun.jdi.ClassType
import com.sun.jdi.Method
import com.sun.jdi.ObjectReference
import com.sun.jdi.PrimitiveValue
/**
* Base for all primitive proxies.
*
* @tparam Primitive type to proxy
* @tparam ProxyType type of proxy
* @param companion companion object for this proxy
*/
abstract class PrimitiveJdiProxy[Primitive, ProxyType <: PrimitiveJdiProxy[Primitive, ProxyType, ValueType], ValueType <: PrimitiveValue](
companion: PrimitiveJdiProxyCompanion[Primitive, ProxyType, ValueType])
extends JdiProxy {
self: ProxyType =>
/** Underlying primitive value from this proxy. */
override def __value: ValueType
final def boxed: ObjectReference = {
val boxedClass: ClassType = __context.classByName(companion.name.javaBoxed)
val boxingMethod: Method = boxedClass
.methodsByName("valueOf")
.filter(_.argumentTypeNames.toSeq == Seq(companion.name.java))
.head
boxedClass.invokeMethod(__context.currentThread(), boxingMethod, List(__value)).asInstanceOf[ObjectReference]
}
/** Underlying primitive name. */
final def primitiveName: String = companion.name.java
/** Underlying boxed name */
final def boxedName: String = companion.name.javaBoxed
override final protected[expression] def genericThisType: Option[String] = Some(companion.name.scalaRich)
}
/**
* Base for companions of [[org.scalaide.debug.internal.expression.proxies.primitives.PrimitiveJdiProxy]].
*
* It requires one to implement `mirror` method.
*
* @tparam Primitive type to proxy
* @tparam ProxyType type of proxy
* @param boxedName name of boxed type (for example 'java.lang.Character')
* @param unboxedName name of unboxed type (for example 'char')
*/
abstract class PrimitiveJdiProxyCompanion[Primitive, ProxyType <: PrimitiveJdiProxy[Primitive, ProxyType, ValueType], ValueType <: PrimitiveValue](
val name: TypeNames.Primitive)
extends JdiProxyCompanion[ProxyType, ValueType] {
/** Creates a mirror of primitive value in debug context. */
protected def mirror(value: Primitive, context: JdiContext): ValueType
/** Creates proxy from primitive using proxy context */
final def fromPrimitive(value: Primitive, context: JdiContext): ProxyType =
apply(context, mirror(value, context))
}
private[expression] object PrimitiveJdiProxy {
/** Maps java and scala primitive type names to appropriate proxies. */
def primitiveToProxy(primitiveType: String): String = {
val prefix =
if (primitiveType.head.isUpper && !primitiveType.startsWith("scala") && !primitiveType.startsWith("java.lang")) "scala."
else ""
primitiveToProxyMap(prefix + primitiveType)
}
private val primitiveToProxyMap = Map(
Scala.primitives.Byte -> classOf[ByteJdiProxy],
Scala.primitives.Short -> classOf[ShortJdiProxy],
Scala.primitives.Int -> classOf[IntJdiProxy],
Scala.primitives.Long -> classOf[LongJdiProxy],
Scala.primitives.Double -> classOf[DoubleJdiProxy],
Scala.primitives.Float -> classOf[FloatJdiProxy],
Scala.primitives.Char -> classOf[CharJdiProxy],
Scala.primitives.Boolean -> classOf[BooleanJdiProxy],
Scala.unitType -> classOf[UnitJdiProxy],
Scala.nullType -> classOf[NullJdiProxy],
Scala.rich.Boolean -> classOf[BooleanJdiProxy],
Scala.rich.Byte -> classOf[ByteJdiProxy],
Scala.rich.Char -> classOf[CharJdiProxy],
Scala.rich.Double -> classOf[DoubleJdiProxy],
Scala.rich.Float -> classOf[FloatJdiProxy],
Scala.rich.Int -> classOf[IntJdiProxy],
Scala.rich.Long -> classOf[LongJdiProxy],
Scala.rich.Short -> classOf[ShortJdiProxy],
Java.boxed.Byte -> classOf[ByteJdiProxy],
Java.boxed.Short -> classOf[ShortJdiProxy],
Java.boxed.Integer -> classOf[IntJdiProxy],
Java.boxed.Long -> classOf[LongJdiProxy],
Java.boxed.Double -> classOf[DoubleJdiProxy],
Java.boxed.Float -> classOf[FloatJdiProxy],
Java.boxed.Character -> classOf[CharJdiProxy],
Java.boxed.Boolean -> classOf[BooleanJdiProxy],
Java.String -> classOf[StringJdiProxy]).mapValues(_.getSimpleName)
}
| stephenh/scala-ide | org.scala-ide.sdt.debug.expression/src/org/scalaide/debug/internal/expression/proxies/primitives/PrimitiveJdiProxy.scala | Scala | bsd-3-clause | 4,623 |
import sbt._
import Keys._
import xerial.sbt.Sonatype.sonatypeSettings
object Publish {
lazy val settings = sonatypeSettings :+ (pomExtra :=
<scm>
<url>git@github.com:EventStore/EventStore.JVM.git</url>
<connection>scm:git:git@github.com:EventStore/EventStore.JVM.git</connection>
<developerConnection>scm:git:git@github.com:EventStore/EventStore.JVM.git</developerConnection>
</scm>
<developers>
<developer>
<id>t3hnar</id>
<name>Yaroslav Klymko</name>
<email>t3hnar@gmail.com</email>
</developer>
</developers>)
} | pawelkaczor/EventStore.JVM | project/Publish.scala | Scala | bsd-3-clause | 589 |
/*
* Copyright (c) 2014.
* Created by MrTJP.
* All rights reserved.
*/
package mrtjp.core.vec
import codechicken.lib.render.CCModel
import codechicken.lib.vec.Vector3
import net.minecraft.util.Direction
import net.minecraft.util.math.{BlockPos, BlockRayTraceResult, RayTraceResult, Vec3d}
object ModelRayTracer
{
def raytraceModel(x:Double, y:Double, z:Double, from1:Vec3d, to1:Vec3d, model:CCModel):RayTraceResult =
{
val from = new Vector3(from1)
val to = new Vector3(to1)
val offset = new Vector3(x, y, z)
val start = from.copy.subtract(offset)
val dir = to.copy.subtract(from)
def getSide(vec:Vector3) =
{
import vec.{x => x1, y => y1, z => z1}
Seq((-y1, 0), (y1, 1), (-z1, 2), (z1, 3), (-x1, 4), (x1, 5)).maxBy(_._1)._2
}
raytraceModel(start, dir, model) match
{
case Some((dist, tri)) =>
val side = getSide(tri.normal.copy.add(start.copy.add(dir).multiply(dist).multiply(0.001)))
val mop = new BlockRayTraceResult(calcPlayerHit(new Vector3(x, y, z), from.copy.add(dir.copy.multiply(dist))).vec3(),
Direction.byIndex(side), new BlockPos(x.toInt, y.toInt, z.toInt), false)
mop.subHit = 0
mop
case None => null
}
}
private def calcPlayerHit(b:Vector3, p:Vector3) =
{
val shift = 1/4096F
val thresh = 0.5
val c = p.copy.subtract(b).add(-0.5)
val ac = new Vector3(c.x.abs, c.y.abs, c.z.abs)
if (ac.x < thresh && ac.x >= ac.y && ac.x >= ac.z)
new Vector3(if (c.x > 0) b.x + 1 - shift else b.x + shift, p.y, p.z)
else if (ac.y < thresh && ac.y >= ac.z && ac.y >= ac.x)
new Vector3(p.x, if (c.y > 0) b.y + 1 - shift else b.y + shift, p.z)
else if (ac.z < thresh && ac.z >= ac.x && ac.z >= ac.y)
new Vector3(p.x, p.y, if (c.z > 0) b.z + 1 - shift else b.z + shift)
else p
}
private def raytraceModel(from:Vector3, dir:Vector3, model:CCModel) =
{
val faces = for (i <- model.getVertices.indices by 4) yield
Quad(model.verts(i).vec, model.verts(i+1).vec, model.verts(i+2).vec, model.verts(i+3).vec)
val tfaces = faces.flatMap(_.toTri)
var currentHit:Option[(Double, Tri)] = None
tfaces.foreach(t => mt(from, dir, t.v0, t.v1, t.v2) match
{
case Some(dist) =>
if (currentHit.isEmpty || dist < currentHit.get._1)
currentHit = Option((dist, t))
case None =>
})
currentHit
}
private def mt(origin:Vector3, dir:Vector3, v0:Vector3, v1:Vector3, v2:Vector3, cullBack:Boolean = true, epsilon:Double = 1e-6) =
{
// 2 edges of a triangle
val e1 = v1.copy.subtract(v0)
val e2 = v2.copy.subtract(v0)
// determinant of the equation
val p = dir.copy.crossProduct(e2)
val det = e1.dotProduct(p)
if (cullBack)
{
if (det < epsilon) None
else
{
val t = origin.copy.subtract(v0)
val du = t.dotProduct(p)
if (du < 0.0 || du > det) None
else
{
val q = t.copy.crossProduct(e1)
val dv = dir.dotProduct(q)
if (dv < 0.0 || du+dv > det) None
else Some(e2.dotProduct(q)/det)
}
}
}
else
{
if (det < epsilon && det > -epsilon) None
else
{
val invDet = 1.0/det
val t = origin.copy.subtract(v0)
val u = t.dotProduct(p)*invDet
if (u < 0.0 || u > 1.0) None
else
{
val q = t.copy.crossProduct(e1)
val v = dir.dotProduct(q)*invDet
if (v < 0.0 || u+v > 1.0) None
else Some(e2.dotProduct(q)*invDet)
}
}
}
}
private case class Tri(v0:Vector3, v1:Vector3, v2:Vector3)
{
val normal = v1.copy.subtract(v0).crossProduct(v2.copy.subtract(v0)).normalize()
}
private case class Quad(v0:Vector3, v1:Vector3, v2:Vector3, v3:Vector3)
{
def toTri = Seq(Tri(v0, v1, v2), Tri(v0, v2, v3))
}
}
| MrTJP/MrTJPCore | src/main/scala/mrtjp/core/vec/ModelRayTracer.scala | Scala | lgpl-3.0 | 4,452 |
package com.daodecode.scalax
object NonBlankString extends (String => Option[String]){
/**
* @param s a String to check for non blankness
* @return `None` if `s` is `null` or `""` or contains only whitespace chars, `Some(s)` otherwise
* @since 0.2.1
*
* Example
* {{{
* scala> NonBlankString(null)
* res0: Option[String] = None
* scala> NonBlankString("")
* res1: Option[String] = None
* scala> NonBlankString(" \\t ")
* res2: Option[String] = None
* scala> NonBlankString(" boo ")
* res3: Option[String] = Some( boo )
* }}}
*/
@inline
def apply(s: String): Option[String] =
if (s == null || s.trim.isEmpty) None else Some(s)
/**
* Extractor for non-blank strings
*
* @param s String to check for non blankness
* @return `None` if `s` is `null` or `""` or contains only whitespace chars, `Some(s)` otherwise
* @since 0.2.1
*
* Example
* {{{
* scala> null match {
* | case NonBlankString(_) => "no way!"
* | case _ => "works!"
* |}
* res0: String = works!
* scala> "" match {
* | case NonBlankString(_) => "no way!"
* | case _ => "works!"
* |}
* res1: String = works!
* scala> " \\n\\r\\t " match {
* | case NonBlankString(_) => "no way!"
* | case _ => "works!"
* |}
* res2: String = works!
* scala> "works!" match {
* | case NonBlankString(s) => s
* | case _ => "no way!"
* |}
* res3: String = works!
* }}}
*/
@inline
def unapply(s: String): Option[String] = apply(s)
}
| jozic/scalax-collection | src/main/scala/com/daodecode/scalax/NonBlankString.scala | Scala | bsd-3-clause | 1,960 |
/* *\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\* */
package squants.space
import org.scalatest.{ FlatSpec, Matchers }
import squants.QuantityParseException
import squants.energy.{ Joules, JoulesPerCubicMeter }
import squants.mass.{ Kilograms, KilogramsPerCubicMeter }
import squants.motion.CubicMetersPerSecond
import squants.time.Seconds
/**
* @author garyKeorkunian
* @since 0.1
*
*/
class VolumeSpec extends FlatSpec with Matchers {
behavior of "Volume and its Units of Measure"
it should "create values using UOM factories" in {
CubicMeters(1).toCubicMeters should be(1)
Litres(1).toLitres should be(1)
Nanolitres(1).toNanolitres should be(1)
Microlitres(1).toMicrolitres should be(1)
Millilitres(1).toMillilitres should be(1)
Centilitres(1).toCentilitres should be(1)
Decilitres(1).toDecilitres should be(1)
Hectolitres(1).toHectolitres should be(1)
CubicUsMiles(1).toCubicMiles should be(1)
CubicYards(1).toCubicYards should be(1)
CubicFeet(1).toCubicFeet should be(1)
CubicInches(1).toCubicInches should be(1)
UsGallons(1).toUsGallons should be(1)
UsQuarts(1).toUsQuarts should be(1)
UsPints(1).toUsPints should be(1)
UsCups(1).toUsCups should be(1)
FluidOunces(1).toFluidOunces should be(1)
Tablespoons(1).toTablespoons should be(1)
Teaspoons(1).toTeaspoons should be(1)
}
it should "create values from properly formatted Strings" in {
Volume("10.22 m³").get should be(CubicMeters(10.22))
Volume("10.22 L").get should be(Litres(10.22))
Volume("10.22 nl").get should be(Nanolitres(10.22))
Volume("10.22 µl").get should be(Microlitres(10.22))
Volume("10.22 ml").get should be(Millilitres(10.22))
Volume("10.22 cl").get should be(Centilitres(10.22))
Volume("10.22 dl").get should be(Decilitres(10.22))
Volume("10.22 hl").get should be(Hectolitres(10.22))
Volume("10.22 mi³").get should be(CubicUsMiles(10.22))
Volume("10.22 yd³").get should be(CubicYards(10.22))
Volume("10.22 ft³").get should be(CubicFeet(10.22))
Volume("10.22 in³").get should be(CubicInches(10.22))
Volume("10.22 gal").get should be(UsGallons(10.22))
Volume("10.22 qt").get should be(UsQuarts(10.22))
Volume("10.22 pt").get should be(UsPints(10.22))
Volume("10.22 c").get should be(UsCups(10.22))
Volume("10.22 oz").get should be(FluidOunces(10.22))
Volume("10.22 tbsp").get should be(Tablespoons(10.22))
Volume("10.22 tsp").get should be(Teaspoons(10.22))
Volume("10.22 zz").failed.get should be(QuantityParseException("Unable to parse Volume", "10.22 zz"))
Volume("ZZ L").failed.get should be(QuantityParseException("Unable to parse Volume", "ZZ L"))
}
it should "properly convert to all supported Units of Measure" in {
val x = CubicMeters(1)
x.toCubicMeters should be(1)
x.toLitres should be(1000)
x.toNanolitres - 1000000000000.0d < 1 should be(right = true) // Some issues with conversion precision
x.toMicrolitres - 1000000000.0d < 1 should be(right = true) // Some issues with conversion precision
x.toMillilitres - 1000000.0d < 1 should be(right = true) // Some issues with conversion precision
x.toCentilitres - 100000 < 1 should be(right = true) // Some issues with conversion precision
x.toDecilitres should be(10000)
x.toHectolitres should be(10)
x.toCubicMiles should be(1 / math.pow(UsMiles.conversionFactor, 3))
x.toCubicYards should be(1 / BigDecimal(Yards.conversionFactor).pow(3).toDouble)
x.toCubicFeet should be(1 / BigDecimal(Feet.conversionFactor).pow(3).toDouble)
x.toCubicInches should be(1 / math.pow(Inches.conversionFactor, 3))
val litresPerUsGallon = 3.785411784
x.toUsGallons should be(1000d / litresPerUsGallon)
x.toUsQuarts should be(4000d / litresPerUsGallon)
x.toUsPints should be(8000d / litresPerUsGallon)
x.toUsCups should be(16000d / litresPerUsGallon)
x.toFluidOunces should be(128000d / litresPerUsGallon)
x.toTablespoons should be((128000d / litresPerUsGallon) * 2d)
x.toTeaspoons should be((128000d / litresPerUsGallon) * 6d)
val litresPerUsDryGallon = 4.4048837
x.toUsDryGallons should be(1000d / litresPerUsDryGallon)
x.toUsDryQuarts should be(4000d / litresPerUsDryGallon)
x.toUsDryPints should be(8000d / litresPerUsDryGallon)
x.toUsDryCups should be(16000d / litresPerUsDryGallon)
val litresPerImperialGallon = 4.54609
x.toImperialGallons should be(1000d / litresPerImperialGallon)
x.toImperialQuarts should be(4000d / litresPerImperialGallon)
x.toImperialPints should be(8000d / litresPerImperialGallon)
x.toImperialCups should be(16000d / litresPerImperialGallon)
}
it should "return properly formatted strings for all supported Units of Measure" in {
CubicMeters(1).toString(CubicMeters) should be("1.0 m³")
Litres(1).toString(Litres) should be("1.0 L")
Nanolitres(1).toString(Nanolitres) should be("1.0 nl")
Microlitres(1).toString(Microlitres) should be("1.0 µl")
Millilitres(1).toString(Millilitres) should be("1.0 ml")
Centilitres(1).toString(Centilitres) should be("1.0 cl")
Decilitres(1).toString(Decilitres) should be("1.0 dl")
Hectolitres(1).toString(Hectolitres) should be("1.0 hl")
CubicUsMiles(1).toString(CubicUsMiles) should be("1.0 mi³")
CubicYards(1).toString(CubicYards) should be("1.0 yd³")
CubicFeet(1).toString(CubicFeet) should be("1.0 ft³")
CubicInches(1).toString(CubicInches) should be("1.0 in³")
UsGallons(1).toString(UsGallons) should be("1.0 gal")
UsQuarts(1).toString(UsQuarts) should be("1.0 qt")
UsPints(1).toString(UsPints) should be("1.0 pt")
UsCups(1).toString(UsCups) should be("1.0 c")
}
it should "return Mass when multiplied by Density" in {
CubicMeters(1) * KilogramsPerCubicMeter(10) should be(Kilograms(10))
}
it should "return Energy when multiplied by EnergyDensity" in {
CubicMeters(1) * JoulesPerCubicMeter(10) should be(Joules(10))
}
it should "return Length when divided by Area" in {
CubicMeters(1) / SquareMeters(1) should be(Meters(1))
}
it should "return Area when divided by Length" in {
CubicMeters(1) / Meters(1) should be(SquareMeters(1))
}
it should "return VolumeFlowRate when divided by Time" in {
CubicMeters(1) / Seconds(1) should be(CubicMetersPerSecond(1))
}
it should "return Time when divided by VolumeFlowRate" in {
CubicMeters(1) / CubicMetersPerSecond(1) should be(Seconds(1))
}
it should "return Length when cube rooted" in {
CubicMeters(27).cubeRoot should be(Meters(3))
}
behavior of "VolumeConversions"
it should "provide aliases for single unit values" in {
import VolumeConversions._
cubicMeter should be(CubicMeters(1))
litre should be(Litres(1))
nanoliter should be(Nanolitres(1))
nanolitre should be(Nanolitres(1))
microliter should be(Microlitres(1))
microlitre should be(Microlitres(1))
milliliter should be(Millilitres(1))
millilitre should be(Millilitres(1))
centiliter should be(Centilitres(1))
centilitre should be(Centilitres(1))
deciliter should be(Decilitres(1))
decilitre should be(Decilitres(1))
hectoliter should be(Hectolitres(1))
hectolitre should be(Hectolitres(1))
cubicMile should be(CubicUsMiles(1))
cubicYard should be(CubicYards(1))
cubicFoot should be(CubicFeet(1))
cubicInch should be(CubicInches(1))
gallon should be(UsGallons(1))
quart should be(UsQuarts(1))
pint should be(UsPints(1))
cup should be(UsCups(1))
fluidOunce should be(FluidOunces(1))
tablespoon should be(Tablespoons(1))
teaspoon should be(Teaspoons(1))
}
it should "provide implicit conversion from Double" in {
import VolumeConversions._
val d = 10d
d.cubicMeters should be(CubicMeters(d))
d.cubicMetres should be(CubicMeters(d))
d.litres should be(Litres(d))
d.nanoliters should be(Nanolitres(d))
d.nanolitres should be(Nanolitres(d))
d.microliters should be(Microlitres(d))
d.microlitres should be(Microlitres(d))
d.milliliters should be(Millilitres(d))
d.millilitres should be(Millilitres(d))
d.centiliters should be(Centilitres(d))
d.centilitres should be(Centilitres(d))
d.deciliters should be(Decilitres(d))
d.decilitres should be(Decilitres(d))
d.hectoliters should be(Hectolitres(d))
d.hectolitres should be(Hectolitres(d))
d.cubicMiles should be(CubicUsMiles(d))
d.cubicYards should be(CubicYards(d))
d.cubicFeet should be(CubicFeet(d))
d.cubicInches should be(CubicInches(d))
d.gallons should be(UsGallons(d))
d.quarts should be(UsQuarts(d))
d.pints should be(UsPints(d))
d.cups should be(UsCups(d))
d.fluidOunces should be(FluidOunces(d))
d.tablespoons should be(Tablespoons(d))
d.teaspoons should be(Teaspoons(d))
}
it should "provide Numeric support" in {
import VolumeConversions.VolumeNumeric
val vs = List(CubicMeters(100), CubicMeters(1))
vs.sum should be(CubicMeters(101))
}
}
| derekmorr/squants | shared/src/test/scala/squants/space/VolumeSpec.scala | Scala | apache-2.0 | 9,578 |
package thesis.utils
import breeze.linalg._
/** Class that defines a kernel
*/
case class Kernel(var matrix:DenseMatrix[Double], name:String){
require(matrix.rows%2==1, s"The number of rows of the kernel $name must be odd")
require(matrix.cols%2==1, s"The number of cols of the kernel $name must be odd")
override def toString:String = s"$name(${matrix.rows}x${matrix.cols})"
/**
* @return the sum of the values in this kernel
*/
def sumKernel = sum(matrix)
/**
* @return the number of rows of this kernel
*/
def rows = matrix.rows
/**
* @return the number of columns of this kernel
*/
def cols = matrix.cols
/**
* @return true if this kernel is equal to its transpose
*/
def isSymmetric:Boolean = matrix.equals(matrix.t)
private var isTranspose_ = false
/** Transposes this kernel
*/
def t:this.type = {
matrix = matrix.t
isTranspose_ = !isTranspose_
this
}
/**
* @return true if this kernel is transposed
*/
def isTranspose = isTranspose_
/**
* @return a copy of this kernel
*/
def copy:Kernel = Kernel(matrix.copy, name)
}
/** Different kernels that can be used for the convolution
*/
object Kernels {
/**********************
* KERNELS
**********************/
def identity(size:Int):Kernel = identity(size, size)
def identity(sizeRow:Int, sizeCol:Int) = Kernel({
val matrix = DenseMatrix.zeros[Double](sizeRow, sizeCol)
matrix(sizeRow/2,sizeCol/2) = 1.0
matrix
}, s"identity")
def ones(sizeRow:Int, sizeCol:Int) = normalize(Kernel(
DenseMatrix.ones[Double](sizeRow, sizeCol)
, s"ones"))
/**
* Kernels with negative values are currently not supported!
*/
def almostIdentity = Kernel( DenseMatrix(
(0.0, 1.0, 0.0),
(0.0, 0.0, 0.0),
(0.0, 0.0, 0.0)
), "almostIdentity") // For debug only
def almostIdentity5 = Kernel( DenseMatrix(
(1.0, 2.0, 1.0, 2.0, 1.0),
(2.0, 3.0, 1.0, 3.0, 2.0),
(1.0, 1.0, 4.0, 1.0, 1.0),
(2.0, 3.0, 1.0, 3.0, 2.0),
(1.0, 2.0, 1.0, 2.0, 1.0)
), "almostIdentity") // For debug only
def edgeDetection1 = Kernel(DenseMatrix(
(1.0, 0.0, -1.0),
(0.0, 0.0, 0.0),
(-1.0, 0.0, 1.0)
), "edgeDetection1")
def edgeDetection2 = Kernel(DenseMatrix(
(0.0, 1.0, 0.0),
(1.0, -4.0, 1.0),
(0.0, 1.0, 0.0)
), "edgeDetection2")
def edgeDetection3 = Kernel(DenseMatrix(
(-1.0, 1.0, -1.0),
(-1.0, 8.0, -1.0),
(-1.0, -1.0, -1.0)
), "edgeDetection3")
def boxBlur = Kernel((1/9.0) :* DenseMatrix(
(1.0, 1.0, 1.0),
(1.0, 1.0, 1.0),
(1.0, 1.0, 1.0)
), "boxBlur")
def sharpen = Kernel(DenseMatrix(
(0.0, -1.0, 0.0),
(-1.0, 5.0, -1.0),
(0.0, -1.0, 0.0)
), "sharpen")
def gaussianBlur = Kernel((1/16.0) :* DenseMatrix(
(1.0, 2.0, 1.0),
(2.0, 4.0, 2.0),
(1.0, 2.0, 1.0)
), "gaussianBlur")
def diagonal = Kernel((1/63.0) :* (DenseMatrix(
(10.0, 1.0, 1.0),
(10.0, 10.0, 1.0),
(10.0, 10.0, 10.0)
)), "diagonal")
def bruteKernel = Kernel((1/8.0) :* DenseMatrix(
(0.0, 1.0, 0.0),
(1.0, 4.0, 1.0),
(0.0, 1.0, 0.0)
), "bruteKernel")
def cross = Kernel((1/6.0) :* DenseMatrix(
(0.0, 1.0, 0.0),
(1.0, 2.0, 1.0),
(0.0, 1.0, 0.0)
), "crossKernel")
def rectangleKernel = Kernel((1/4.0) :* DenseMatrix(
(1.0),
(2.0),
(1.0)
), "rectangleKernel")
def gaussian(x:Int, y:Int, sigma:Int) = math.exp(-(x*x+y*y)/(2.0*sigma*sigma)) // no need to multiply by 1/2pi*s^2, since that's constant, and we'll normalize afterwards anyways
def normalize(k:Kernel):Kernel = {
Kernel(k.matrix :* (1.0)/sum(k.matrix), k.name)
}
/**
* @param size The number of rows & columns of the kernel
* @return a normalized exponential gaussian kernel (divide by 2 as we go further from center) of size size x size
*/
def gaussianBlurExp(size:Int):Kernel = gaussianBlurExp(size, size)
/**
* @param sizeRows The number of rows of the kernel
* @param sizeCols The number of columns of the kernel
* @return a normalized exponential gaussian kernel (divide by 2 as we go further from center) of size sizeRows x sizeCols
*/
def gaussianBlurExp(sizeRows:Int, sizeCols:Int):Kernel = {
normalize(gaussianBlurExpNotNormalized(sizeRows,sizeCols))
}
/**
* @param sizeRows The number of rows of the kernel
* @param sizeCols The number of columns of the kernel
* @return a non normalized exponential gaussian kernel (divide by 2 as we go further from center) of size sizeRows x sizeCols
*/
def gaussianBlurExpNotNormalized(sizeRows:Int, sizeCols:Int):Kernel = {
require(sizeRows%2==1, "sizeRows must be odd")
require(sizeCols%2==1, "sizeCols must be odd")
val center:Int = sizeRows.max(sizeCols)/2
val max = math.pow(2, 2 * center)
val theKernel = DenseMatrix.tabulate[Double](sizeRows,sizeCols){case(i,j) =>
val dist = Math.abs(i-sizeRows/2) + Math.abs(j-sizeCols/2)
math.pow(2, 2 * center) / math.pow(2, dist)
}
Kernel(theKernel, s"gaussianBlurExp")
}
/**
* @param sizeRows The number of rows of the kernel
* @param sizeCols The number of columns of the kernel
* @return a normalized smoothed gaussian kernel (-1 as we go further from center)
*/
def gaussianBlurSmooth(sizeRows:Int, sizeCols:Int):Kernel = {
normalize(gaussianBlurSmoothNotNormalized(sizeRows,sizeCols))
}
/**
* @param sizeRows The number of rows of the kernel
* @param sizeCols The number of columns of the kernel
* @return a non normalized smoothed gaussian kernel (-1 as we go further from center)
*/
def gaussianBlurSmoothNotNormalized(sizeRows:Int, sizeCols:Int):Kernel = {
require(sizeRows%2==1, "sizeRows must be odd")
require(sizeCols%2==1, "sizeCols must be odd")
val centerR:Int = sizeRows/2
val centerC:Int = sizeCols/2
val center = centerR.max(centerC)
val theKernel = DenseMatrix.tabulate[Double](sizeRows,sizeCols){case(i,j) =>
center - Math.max(Math.abs(centerR-i), Math.abs(centerC-j)) + 1
}
Kernel(theKernel, s"gaussianBlurSmooth")
}
/**
* @param size The number of rows & columns of the kernel
* @return a real gaussian kernel
*/
def gaussianBlur(size:Int):Kernel = {
require(size%2==1, "size must be odd")
val center:Int = size/2
val sigma = size/3 // set the mask size to be 3 times the standard deviation
val scale = gaussian(-center, -center,sigma) // Set the scale to have the corner's weight equal to 1
val theKernel = DenseMatrix.tabulate[Double](size,size){case(i,j) =>
math.floor(gaussian(i-center,j-center,sigma)/scale)
}
val scaledKernel = theKernel
Kernel(scaledKernel, s"gaussianBlur")
}
/**
* @param size The number of rows & columns of the kernel
* @return a normalized cross kernel of size size x size
*/
def cross(size:Int):Kernel = cross(size, size)
/**
* @param sizeRows The number of rows of the kernel
* @param sizeCols The number of columns of the kernel
* @return a normalized cross kernel of size sizeRows x sizeCols
*/
def cross(sizeRows:Int, sizeCols:Int):Kernel = {
normalize(crossNotNormalized(sizeRows, sizeCols))
}
/**
* @param sizeRows The number of rows of the kernel
* @param sizeCols The number of columns of the kernel
* @return a cross kernel with sum = 1 where the weight diminishes exponentially to the borders
*/
def crossNotNormalized(sizeRows:Int, sizeCols:Int):Kernel = {
require(sizeRows%2==1, "sizeRows must be odd")
require(sizeCols%2==1, "sizeCols must be odd")
val center:Int = sizeRows.max(sizeCols)/2
val theKernel = DenseMatrix.tabulate[Double](sizeRows,sizeCols){case(i,j) =>
if(i == sizeRows/2) Math.pow(2,center-Math.abs(j-sizeCols/2))
else if (j == sizeCols/2) Math.pow(2,center-Math.abs(i-sizeRows/2))
else 0.0
}
Kernel(theKernel, s"cross")
}
}
| GLeurquin/Faithful-visualization-of-categorical-datasets | src/main/scala/Utils/Kernels.scala | Scala | mit | 7,628 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.motion
import org.scalatest.{ Matchers, FlatSpec }
import squants.time.Seconds
import squants.mass.Kilograms
import squants.QuantityParseException
/**
* @author garyKeorkunian
* @since 0.1
*
*/
class MomentumSpec extends FlatSpec with Matchers {
behavior of "Momentum and its Units of Measure"
it should "create values using UOM factories" in {
NewtonSeconds(1).toNewtonSeconds should be(1)
}
it should "create values from properly formatted Strings" in {
Momentum("10.22 Ns").get should be(NewtonSeconds(10.22))
Momentum("10.22 zz").failed.get should be(QuantityParseException("Unable to parse Momentum", "10.22 zz"))
Momentum("zz Ns").failed.get should be(QuantityParseException("Unable to parse Momentum", "zz Ns"))
}
it should "properly convert to all supported Units of Measure" in {
val x = NewtonSeconds(1)
x.toNewtonSeconds should be(1)
}
it should "return properly formatted strings for all supported Units of Measure" in {
NewtonSeconds(1).toString should be("1.0 Ns")
}
it should "return Force when divided by Time" in {
NewtonSeconds(1) / Seconds(1) should be(Newtons(1))
}
it should "return Time when divided by Force" in {
NewtonSeconds(1) / Newtons(1) should be(Seconds(1))
}
it should "return Mass when divided by Velocity" in {
NewtonSeconds(1) / MetersPerSecond(1) should be(Kilograms(1))
}
it should "return Velocity when divided by Mass" in {
NewtonSeconds(1) / Kilograms(1) should be(MetersPerSecond(1))
}
behavior of "MomentumConversions"
it should "provide aliases for single unit values" in {
import MomentumConversions._
newtonSecond should be(NewtonSeconds(1))
}
it should "provide implicit conversion from Double" in {
import MomentumConversions._
val d = 10d
d.newtonSeconds should be(NewtonSeconds(d))
}
it should "provide Numeric support" in {
import MomentumConversions.MomentumNumeric
val ms = List(NewtonSeconds(100), NewtonSeconds(10))
ms.sum should be(NewtonSeconds(110))
}
}
| derekmorr/squants | shared/src/test/scala/squants/motion/MomentumSpec.scala | Scala | apache-2.0 | 2,602 |
package slick.compiler
import slick.ast.Library.AggregateFunctionSymbol
import slick.ast.TypeUtil._
import slick.ast.Util._
import slick.ast._
import slick.util.ConstArray
/** Rewrite aggregation function calls to Aggregate nodes. */
class CreateAggregates extends Phase {
val name = "createAggregates"
def apply(state: CompilerState) = {
if(state.get(Phase.assignUniqueSymbols).map(_.aggregate).getOrElse(true))
state.map(_.replace({
case n @ Apply(f: AggregateFunctionSymbol, ConstArray(from)) =>
logger.debug("Converting aggregation function application", n)
val CollectionType(_, elType @ Type.Structural(StructType(els))) = from.nodeType
val s = new AnonSymbol
val a = Aggregate(s, from, Apply(f, ConstArray(f match {
case Library.CountAll => LiteralNode(1)
case _ => Select(Ref(s) :@ elType, els.head._1) :@ els.head._2
}))(n.nodeType)).infer()
logger.debug("Converted aggregation function application", a)
inlineMap(a)
case n @ Bind(s1, from1, Pure(sel1, ts1)) if !from1.isInstanceOf[GroupBy] =>
val (sel2, temp) = liftAggregates(sel1, s1)
if(temp.isEmpty) n else {
logger.debug("Lifting aggregates into join in:", n)
logger.debug("New mapping with temporary refs:", sel2)
val sources = (from1 match {
case Pure(StructNode(ConstArray()), _) => Vector.empty[(TermSymbol, Node)]
case _ => Vector(s1 -> from1)
}) ++ temp.map { case (s, n) => (s, Pure(n)) }
val from2 = sources.init.foldRight(sources.last._2) {
case ((_, n), z) => Join(new AnonSymbol, new AnonSymbol, n, z, JoinType.Inner, LiteralNode(true))
}.infer()
logger.debug("New 'from' with joined aggregates:", from2)
val repl: Map[TermSymbol, List[TermSymbol]] = sources match {
case Vector((s, n)) => Map(s -> List(s1))
case _ =>
val len = sources.length
val it = Iterator.iterate(s1)(_ => ElementSymbol(2))
sources.zipWithIndex.map { case ((s, _), i) =>
val l = List.iterate(s1, i+1)(_ => ElementSymbol(2))
s -> (if(i == len-1) l else l :+ ElementSymbol(1))
}.toMap
}
logger.debug("Replacement paths: " + repl)
val scope = Type.Scope(s1 -> from2.nodeType.asCollectionType.elementType)
val replNodes = repl.mapValues(ss => FwdPath(ss).infer(scope))
logger.debug("Replacement path nodes: ", StructNode(ConstArray.from(replNodes)))
val sel3 = sel2.replace({ case n @ Ref(s) => replNodes.getOrElse(s, n) }, keepType = true)
val n2 = Bind(s1, from2, Pure(sel3, ts1)).infer()
logger.debug("Lifted aggregates into join in:", n2)
n2
}
}, keepType = true, bottomUp = true))
else state
}
/** Recursively inline mapping Bind calls under an Aggregate */
def inlineMap(a: Aggregate): Aggregate = a.from match {
case Bind(s1, f1, Pure(StructNode(defs1), ts1)) if !f1.isInstanceOf[GroupBy] => // mergeToComprehensions always needs a Bind around a GroupBy
logger.debug("Inlining mapping Bind under Aggregate", a)
val defs1M = defs1.iterator.toMap
val sel = a.select.replace({
case FwdPath(s :: f :: rest) if s == a.sym =>
rest.foldLeft(defs1M(f)) { case (n, s) => n.select(s) }.infer()
}, keepType = true)
val a2 = Aggregate(s1, f1, sel) :@ a.nodeType
logger.debug("Inlining mapping Bind under Aggregate", a2)
inlineMap(a2)
case _ => a
}
/** Find all scalar Aggregate calls in a sub-tree that do not refer to the given Symbol,
* and replace them by temporary Refs. */
def liftAggregates(n: Node, outer: TermSymbol): (Node, Map[TermSymbol, Aggregate]) = n match {
case a @ Aggregate(s1, f1, sel1) =>
if(a.findNode {
case n: PathElement => n.sym == outer
case _ => false
}.isDefined) (a, Map.empty)
else {
val s, f = new AnonSymbol
val a2 = Aggregate(s1, f1, StructNode(ConstArray(f -> sel1))).infer()
(Select(Ref(s) :@ a2.nodeType, f).infer(), Map(s -> a2))
}
case n :@ CollectionType(_, _) =>
(n, Map.empty)
case n =>
val mapped = n.children.map(liftAggregates(_, outer))
val m = mapped.iterator.flatMap(_._2).toMap
val n2 =
if(m.isEmpty) n else n.withChildren(mapped.map(_._1)) :@ n.nodeType
(n2, m)
}
}
| kwark/slick | slick/src/main/scala/slick/compiler/CreateAggregates.scala | Scala | bsd-2-clause | 4,605 |
package com.argcv.dvergar.ptcer.models
import java.util.concurrent.atomic.AtomicLong
/**
* @author yu
*/
class PatternCounter(psize: Int) {
val counter = (0 until psize).map(i => new AtomicLong()).toArray
def add(i: Int): Long = counter(i).incrementAndGet()
/**
* @param i index
* @param c count
* @return
*/
def add(i: Int, c: Long): Long = counter(i).addAndGet(c)
def count(i: Int): Long = counter(i).get()
def printAll(): Unit = {
println("Pattern Summary:")
counter.zipWithIndex.foreach { c =>
//logger.info(s"[${c._2}] = ${c._1.get()}")
if (c._2 % 5 == 4) {
println(f"[${c._2}%2d] = ${c._1.get()}%-6d")
} else {
print(f"[${c._2}%2d] = ${c._1.get()}%-6d ")
}
}
println()
}
}
| yuikns/pattern-counter | src/main/scala/com/argcv/dvergar/ptcer/models/PatternCounter.scala | Scala | mit | 772 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.lang
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._
import org.scalajs.testsuite.utils.Platform._
class IntegerTest {
// Explicitly define these as `var`'s to avoid any compile-time constant folding
val MaxValue: Int = Int.MaxValue
val MinValue: Int = Int.MinValue
@Test def `reverseBytes`(): Unit = {
assertEquals(0xefbeadde, Integer.reverseBytes(0xdeadbeef))
}
@Test def rotateLeft(): Unit = {
assertEquals(0x689cd401, Integer.rotateLeft(0x689cd401, 0))
assertEquals(0xd139a802, Integer.rotateLeft(0x689cd401, 1))
assertEquals(0x9cd40168, Integer.rotateLeft(0x689cd401, 8))
assertEquals(0x9a802d13, Integer.rotateLeft(0x689cd401, 13))
assertEquals(0x689cd401, Integer.rotateLeft(0x689cd401, 32))
assertEquals(0xd139a802, Integer.rotateLeft(0x689cd401, 33))
assertEquals(0xe6a00b44, Integer.rotateLeft(0x689cd401, 43))
assertEquals(0xb44e6a00, Integer.rotateLeft(0x689cd401, -1))
assertEquals(0x89cd4016, Integer.rotateLeft(0x689cd401, -28))
assertEquals(0x2d139a8, Integer.rotateLeft(0x689cd401, -39))
}
@Test def rotateRight(): Unit = {
assertEquals(0x689cd401, Integer.rotateRight(0x689cd401, 0))
assertEquals(0xb44e6a00, Integer.rotateRight(0x689cd401, 1))
assertEquals(0x1689cd4, Integer.rotateRight(0x689cd401, 8))
assertEquals(0xa00b44e6, Integer.rotateRight(0x689cd401, 13))
assertEquals(0x689cd401, Integer.rotateRight(0x689cd401, 32))
assertEquals(0xb44e6a00, Integer.rotateRight(0x689cd401, 33))
assertEquals(0x802d139a, Integer.rotateRight(0x689cd401, 43))
assertEquals(0xd139a802, Integer.rotateRight(0x689cd401, -1))
assertEquals(0x1689cd40, Integer.rotateRight(0x689cd401, -28))
assertEquals(0x4e6a00b4, Integer.rotateRight(0x689cd401, -39))
}
@Test def bitCount(): Unit = {
assertEquals(0, Integer.bitCount(0))
assertEquals(1, Integer.bitCount(1))
assertEquals(1, Integer.bitCount(2))
assertEquals(2, Integer.bitCount(3))
assertEquals(31, Integer.bitCount(Int.MaxValue))
assertEquals(1, Integer.bitCount(Int.MinValue))
assertEquals(2, Integer.bitCount(Int.MinValue + 1))
assertEquals(32, Integer.bitCount(-1))
assertEquals(31, Integer.bitCount(-2))
assertEquals(18, Integer.bitCount(-155937076))
assertEquals(12, Integer.bitCount(830524462))
assertEquals(17, Integer.bitCount(-1468950275))
assertEquals(22, Integer.bitCount(1878189982))
assertEquals(16, Integer.bitCount(1369853111))
assertEquals(16, Integer.bitCount(993872011))
assertEquals(17, Integer.bitCount(-419203945))
assertEquals(18, Integer.bitCount(-1529972891))
assertEquals(15, Integer.bitCount(-560981166))
assertEquals(19, Integer.bitCount(-1083297551))
assertEquals(19, Integer.bitCount(-1513915437))
assertEquals(19, Integer.bitCount(-774144288))
assertEquals(13, Integer.bitCount(1617041908))
assertEquals(15, Integer.bitCount(-799619923))
assertEquals(11, Integer.bitCount(1630552297))
assertEquals(15, Integer.bitCount(1893565724))
assertEquals(20, Integer.bitCount(-167512165))
assertEquals(17, Integer.bitCount(-1226735856))
assertEquals(13, Integer.bitCount(-1602623352))
assertEquals(14, Integer.bitCount(73385109))
assertEquals(17, Integer.bitCount(1843595740))
assertEquals(16, Integer.bitCount(-2005160623))
assertEquals(21, Integer.bitCount(-220474394))
assertEquals(14, Integer.bitCount(-1474261577))
assertEquals(18, Integer.bitCount(-2015504620))
assertEquals(20, Integer.bitCount(1450835633))
assertEquals(13, Integer.bitCount(-1877059561))
assertEquals(16, Integer.bitCount(-864957023))
assertEquals(17, Integer.bitCount(-1423863837))
assertEquals(16, Integer.bitCount(661877472))
assertEquals(14, Integer.bitCount(-1297344862))
assertEquals(20, Integer.bitCount(-1084965589))
assertEquals(20, Integer.bitCount(-169792549))
assertEquals(19, Integer.bitCount(-1175303521))
assertEquals(14, Integer.bitCount(-2075407535))
assertEquals(21, Integer.bitCount(-34407382))
assertEquals(18, Integer.bitCount(-686482061))
assertEquals(18, Integer.bitCount(-1280254298))
assertEquals(14, Integer.bitCount(-1236753591))
assertEquals(20, Integer.bitCount(-629695246))
assertEquals(15, Integer.bitCount(-1041379007))
assertEquals(12, Integer.bitCount(1133674695))
assertEquals(17, Integer.bitCount(-673156775))
assertEquals(15, Integer.bitCount(1634657308))
assertEquals(15, Integer.bitCount(-1634571160))
assertEquals(17, Integer.bitCount(-1394160814))
assertEquals(13, Integer.bitCount(57693078))
assertEquals(15, Integer.bitCount(788250760))
assertEquals(19, Integer.bitCount(-1217888690))
assertEquals(12, Integer.bitCount(-1568144709))
assertEquals(16, Integer.bitCount(827170343))
assertEquals(21, Integer.bitCount(-341950555))
assertEquals(14, Integer.bitCount(1287166354))
assertEquals(19, Integer.bitCount(-1639223942))
assertEquals(17, Integer.bitCount(532815708))
assertEquals(17, Integer.bitCount(-768179729))
assertEquals(15, Integer.bitCount(760154173))
assertEquals(15, Integer.bitCount(2000995890))
assertEquals(10, Integer.bitCount(1468010757))
assertEquals(17, Integer.bitCount(295957433))
assertEquals(17, Integer.bitCount(-1594421450))
assertEquals(16, Integer.bitCount(-1110692843))
assertEquals(10, Integer.bitCount(72567553))
assertEquals(14, Integer.bitCount(1008258604))
assertEquals(18, Integer.bitCount(1017279739))
assertEquals(14, Integer.bitCount(-649579130))
assertEquals(12, Integer.bitCount(-1743090924))
assertEquals(15, Integer.bitCount(-1321851761))
assertEquals(21, Integer.bitCount(1995849614))
assertEquals(19, Integer.bitCount(1874069759))
assertEquals(18, Integer.bitCount(57468414))
assertEquals(13, Integer.bitCount(-159055416))
assertEquals(15, Integer.bitCount(-770646612))
assertEquals(15, Integer.bitCount(1274257460))
assertEquals(17, Integer.bitCount(-1728268856))
assertEquals(14, Integer.bitCount(-131769823))
assertEquals(18, Integer.bitCount(1810706244))
assertEquals(14, Integer.bitCount(881236344))
assertEquals(11, Integer.bitCount(-536176288))
assertEquals(20, Integer.bitCount(-371993265))
assertEquals(13, Integer.bitCount(-1257692889))
assertEquals(11, Integer.bitCount(38550368))
assertEquals(14, Integer.bitCount(-196060824))
assertEquals(18, Integer.bitCount(-218909520))
assertEquals(21, Integer.bitCount(-735195141))
assertEquals(16, Integer.bitCount(-1122922843))
assertEquals(19, Integer.bitCount(-269171126))
assertEquals(18, Integer.bitCount(2002409940))
assertEquals(18, Integer.bitCount(-106797451))
assertEquals(17, Integer.bitCount(-1412648370))
assertEquals(20, Integer.bitCount(-342432881))
assertEquals(20, Integer.bitCount(-294768321))
assertEquals(14, Integer.bitCount(586296006))
assertEquals(19, Integer.bitCount(-1627992562))
assertEquals(17, Integer.bitCount(-1567624079))
assertEquals(13, Integer.bitCount(453182827))
assertEquals(16, Integer.bitCount(-704549035))
assertEquals(15, Integer.bitCount(1722304234))
assertEquals(19, Integer.bitCount(-747553362))
assertEquals(18, Integer.bitCount(-1535508973))
}
@Test def numberOfLeadingZeros(): Unit = {
/* The optimizer can *constant-fold* Integer.numberOfLeadingZeros,
* so if we want to actually test anything happening at runtime, we have
* to prevent the optimizer to see the connection between the actual
* value of i, hence testNoInline. We also test the constant-folding
* logic with testInline.
*/
@inline def testInline(i: Int, expected: Int): Unit =
assertEquals(expected, Integer.numberOfLeadingZeros(i))
@noinline def testNoInline(i: Int, expected: Int): Unit =
testInline(i, expected)
@inline def test(i: Int, expected: Int): Unit = {
testInline(i, expected)
testNoInline(i, expected)
}
test(0, 32)
test(1, 31)
test(5, 29)
test(-1, 0)
test(454050, 13)
test(5623, 19)
test(31, 27)
test(9903091, 8)
test(1692, 21)
test(2, 30)
test(1109670822, 1)
test(3453, 20)
test(38, 26)
test(5, 29)
test(4966, 19)
test(1, 31)
test(11552, 18)
test(3, 30)
test(7973478, 9)
test(29285, 17)
test(286646607, 3)
test(226, 24)
test(3934789, 10)
test(661375551, 2)
test(96414310, 5)
test(983679, 12)
test(1277, 21)
test(168316711, 4)
test(23440, 17)
test(4609, 19)
test(1757, 21)
test(307973987, 3)
test(3260121, 10)
test(52257, 16)
test(402, 23)
test(1046482241, 2)
test(12785, 18)
test(4, 29)
test(422, 23)
test(19, 27)
test(2991, 20)
test(2036, 21)
test(13, 28)
test(38, 26)
test(101, 25)
test(85138295, 5)
test(13225, 18)
test(7768, 19)
test(7630814, 9)
test(226, 24)
test(177422164, 4)
test(32, 26)
test(407, 23)
test(860691653, 2)
test(34, 26)
test(25786, 17)
test(55778847, 6)
test(1439057, 11)
test(671565896, 2)
test(603, 22)
test(741, 22)
test(1459118, 11)
test(78, 25)
test(177510, 14)
test(926370, 12)
test(685733055, 2)
test(217, 24)
test(523651, 13)
test(3, 30)
test(8, 28)
test(-1253180290, 0)
test(134956860, 4)
test(1255403863, 1)
test(30, 27)
test(351465, 13)
test(1281021, 11)
test(12073178, 8)
test(1034035, 12)
test(978759, 12)
test(63, 26)
test(49700708, 6)
test(31, 27)
test(20, 27)
test(6686565, 9)
test(31, 27)
test(621614, 12)
test(81224072, 5)
test(2687, 20)
test(19236, 17)
test(129429186, 5)
test(4, 29)
test(978, 22)
test(24137647, 7)
test(150728, 14)
test(3825, 20)
test(34, 26)
test(100111471, 5)
test(92028, 15)
test(-1198731278, 0)
test(250395, 14)
test(2753, 20)
test(491505965, 3)
test(30716590, 7)
test(213241, 14)
}
@Test def numberOfTrailingZeros(): Unit = {
assertEquals(32, Integer.numberOfTrailingZeros(0))
assertEquals(0, Integer.numberOfTrailingZeros(1))
assertEquals(2, Integer.numberOfTrailingZeros(12))
assertEquals(3, Integer.numberOfTrailingZeros(1024 + 64 + 8))
assertEquals(0, Integer.numberOfTrailingZeros(-1))
assertEquals(31, Integer.numberOfTrailingZeros(Int.MinValue))
assertEquals(0, Integer.numberOfTrailingZeros(Int.MaxValue))
assertEquals(29, Integer.numberOfTrailingZeros(1610612736))
assertEquals(29, Integer.numberOfTrailingZeros(-1610612736))
assertEquals(26, Integer.numberOfTrailingZeros(1409286144))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(27, Integer.numberOfTrailingZeros(1207959552))
assertEquals(13, Integer.numberOfTrailingZeros(-536928256))
assertEquals(2, Integer.numberOfTrailingZeros(-1096256668))
assertEquals(20, Integer.numberOfTrailingZeros(-923795456))
assertEquals(29, Integer.numberOfTrailingZeros(-1610612736))
assertEquals(19, Integer.numberOfTrailingZeros(-2077753344))
assertEquals(23, Integer.numberOfTrailingZeros(-1602224128))
assertEquals(4, Integer.numberOfTrailingZeros(-136456432))
assertEquals(10, Integer.numberOfTrailingZeros(1711399936))
assertEquals(21, Integer.numberOfTrailingZeros(467664896))
assertEquals(22, Integer.numberOfTrailingZeros(1270874112))
assertEquals(16, Integer.numberOfTrailingZeros(-1163198464))
assertEquals(8, Integer.numberOfTrailingZeros(-1830050048))
assertEquals(4, Integer.numberOfTrailingZeros(929614128))
assertEquals(24, Integer.numberOfTrailingZeros(-83886080))
assertEquals(29, Integer.numberOfTrailingZeros(-1610612736))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(15, Integer.numberOfTrailingZeros(-1549762560))
assertEquals(7, Integer.numberOfTrailingZeros(1810143104))
assertEquals(21, Integer.numberOfTrailingZeros(1398800384))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(16, Integer.numberOfTrailingZeros(-81723392))
assertEquals(2, Integer.numberOfTrailingZeros(1061913644))
assertEquals(7, Integer.numberOfTrailingZeros(-1964724608))
assertEquals(20, Integer.numberOfTrailingZeros(1995440128))
assertEquals(26, Integer.numberOfTrailingZeros(-335544320))
assertEquals(0, Integer.numberOfTrailingZeros(-1193917187))
assertEquals(18, Integer.numberOfTrailingZeros(1714683904))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(14, Integer.numberOfTrailingZeros(1003175936))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(29, Integer.numberOfTrailingZeros(1610612736))
assertEquals(26, Integer.numberOfTrailingZeros(-1946157056))
assertEquals(17, Integer.numberOfTrailingZeros(-1907228672))
assertEquals(28, Integer.numberOfTrailingZeros(-268435456))
assertEquals(9, Integer.numberOfTrailingZeros(-1535911424))
assertEquals(7, Integer.numberOfTrailingZeros(523888512))
assertEquals(5, Integer.numberOfTrailingZeros(358260320))
assertEquals(23, Integer.numberOfTrailingZeros(981467136))
assertEquals(23, Integer.numberOfTrailingZeros(1669332992))
assertEquals(27, Integer.numberOfTrailingZeros(-1207959552))
assertEquals(29, Integer.numberOfTrailingZeros(536870912))
assertEquals(18, Integer.numberOfTrailingZeros(-748945408))
assertEquals(14, Integer.numberOfTrailingZeros(1289338880))
assertEquals(18, Integer.numberOfTrailingZeros(-82051072))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(12, Integer.numberOfTrailingZeros(-675172352))
assertEquals(15, Integer.numberOfTrailingZeros(77234176))
assertEquals(0, Integer.numberOfTrailingZeros(-276596511))
assertEquals(5, Integer.numberOfTrailingZeros(222966496))
assertEquals(6, Integer.numberOfTrailingZeros(-1981260992))
assertEquals(14, Integer.numberOfTrailingZeros(1689927680))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(8, Integer.numberOfTrailingZeros(-1499141888))
assertEquals(3, Integer.numberOfTrailingZeros(1441572168))
assertEquals(30, Integer.numberOfTrailingZeros(-1073741824))
assertEquals(10, Integer.numberOfTrailingZeros(-1175759872))
assertEquals(19, Integer.numberOfTrailingZeros(781713408))
assertEquals(30, Integer.numberOfTrailingZeros(1073741824))
assertEquals(7, Integer.numberOfTrailingZeros(117264512))
assertEquals(1, Integer.numberOfTrailingZeros(-808865046))
assertEquals(23, Integer.numberOfTrailingZeros(-394264576))
assertEquals(26, Integer.numberOfTrailingZeros(-1946157056))
assertEquals(23, Integer.numberOfTrailingZeros(-1619001344))
assertEquals(16, Integer.numberOfTrailingZeros(-1236860928))
assertEquals(5, Integer.numberOfTrailingZeros(-134639968))
assertEquals(26, Integer.numberOfTrailingZeros(-1946157056))
assertEquals(28, Integer.numberOfTrailingZeros(-1342177280))
assertEquals(29, Integer.numberOfTrailingZeros(1610612736))
assertEquals(28, Integer.numberOfTrailingZeros(1879048192))
assertEquals(30, Integer.numberOfTrailingZeros(-1073741824))
assertEquals(21, Integer.numberOfTrailingZeros(1994391552))
assertEquals(24, Integer.numberOfTrailingZeros(-1862270976))
assertEquals(18, Integer.numberOfTrailingZeros(-2055471104))
assertEquals(20, Integer.numberOfTrailingZeros(349175808))
assertEquals(16, Integer.numberOfTrailingZeros(-1247477760))
assertEquals(12, Integer.numberOfTrailingZeros(-1874415616))
assertEquals(16, Integer.numberOfTrailingZeros(1336606720))
assertEquals(0, Integer.numberOfTrailingZeros(565581745))
assertEquals(31, Integer.numberOfTrailingZeros(-2147483648))
assertEquals(23, Integer.numberOfTrailingZeros(562036736))
assertEquals(3, Integer.numberOfTrailingZeros(1372687704))
assertEquals(15, Integer.numberOfTrailingZeros(1147437056))
assertEquals(10, Integer.numberOfTrailingZeros(941499392))
assertEquals(9, Integer.numberOfTrailingZeros(938859008))
assertEquals(2, Integer.numberOfTrailingZeros(1987222364))
assertEquals(24, Integer.numberOfTrailingZeros(251658240))
assertEquals(3, Integer.numberOfTrailingZeros(1598124120))
assertEquals(14, Integer.numberOfTrailingZeros(-1177763840))
assertEquals(28, Integer.numberOfTrailingZeros(1879048192))
assertEquals(27, Integer.numberOfTrailingZeros(-939524096))
assertEquals(21, Integer.numberOfTrailingZeros(-622854144))
assertEquals(25, Integer.numberOfTrailingZeros(-1577058304))
assertEquals(16, Integer.numberOfTrailingZeros(1751711744))
assertEquals(24, Integer.numberOfTrailingZeros(-1392508928))
assertEquals(19, Integer.numberOfTrailingZeros(303562752))
}
@Test def toBinaryString(): Unit = {
assertEquals("11111111111111111111111111111111", Integer.toBinaryString(-1))
assertEquals("11111111111111111101100011101111", Integer.toBinaryString(-10001))
assertEquals("10000000000000000000000000000000", Integer.toBinaryString(MinValue))
assertEquals("1111111111111111111111111111111", Integer.toBinaryString(MaxValue))
}
@Test def toHexString(): Unit = {
assertEquals("ffffffff", Integer.toHexString(-1))
assertEquals("ffffd8ef", Integer.toHexString(-10001))
assertEquals("80000000", Integer.toHexString(MinValue))
assertEquals("8007613e", Integer.toHexString(-2147000002))
assertEquals("7fffffff", Integer.toHexString(MaxValue))
}
@Test def toOctalString(): Unit = {
assertEquals("37777777777", Integer.toOctalString(-1))
assertEquals("37777754357", Integer.toOctalString(-10001))
assertEquals("20000000000", Integer.toOctalString(MinValue))
assertEquals("17777777777", Integer.toOctalString(MaxValue))
}
@Test def compareTo(): Unit = {
def compare(x: Int, y: Int): Int =
new Integer(x).compareTo(new Integer(y))
assertTrue(compare(0, 5) < 0)
assertTrue(compare(10, 9) > 0)
assertTrue(compare(-2, -1) < 0)
assertEquals(0, compare(3, 3))
}
@Test def should_be_a_Comparable(): Unit = {
def compare(x: Any, y: Any): Int =
x.asInstanceOf[Comparable[Any]].compareTo(y)
assertTrue(compare(0, 5) < 0)
assertTrue(compare(10, 9) > 0)
assertTrue(compare(-2, -1) < 0)
assertEquals(0, compare(3, 3))
}
@Test def should_parse_strings(): Unit = {
def test(s: String, v: Int, radix: Int = 10): Unit = {
assertEquals(v, Integer.parseInt(s, radix))
assertEquals(v, Integer.valueOf(s, radix).intValue())
if (radix == 10)
assertEquals(v, new Integer(s).intValue())
}
test("0", 0)
test("5", 5)
test("127", 127)
test("-100", -100)
test("30000", 30000)
test("-90000", -90000)
test("Kona", 411787, 27)
if (!executingInJVMOnJDK6)
test("+42", 42)
test("-0", 0)
test("-FF", -255, 16)
}
@Test def should_reject_invalid_strings_when_parsing(): Unit = {
def test(s: String, radix: Int = 10): Unit =
expectThrows(classOf[NumberFormatException], Integer.parseInt(s, radix))
test("abc")
test("5a")
test("2147483648")
test("99", 8)
test("-")
test("")
}
@Test def should_parse_strings_in_base_16(): Unit = {
def test(s: String, v: Int): Unit = {
assertEquals(v, Integer.parseInt(s, 16))
assertEquals(v, Integer.valueOf(s, 16).intValue())
}
test("0", 0x0)
test("5", 0x5)
test("ff", 0xff)
test("-24", -0x24)
test("30000", 0x30000)
test("-90000", -0x90000)
}
@Test def highestOneBit(): Unit = {
/* Spec ported from
* https://github.com/gwtproject/gwt/blob/master/user/test/com/google/gwt/emultest/java/lang/IntegerTest.java
*/
assertEquals(0, Integer.highestOneBit(0))
assertEquals(Integer.MIN_VALUE, Integer.highestOneBit(-1))
assertEquals(Integer.MIN_VALUE, Integer.highestOneBit(-256))
assertEquals(1, Integer.highestOneBit(1))
assertEquals(0x80, Integer.highestOneBit(0x88))
assertEquals(0x40000000, Integer.highestOneBit(Int.MaxValue))
assertEquals(Int.MinValue, Integer.highestOneBit(Int.MinValue))
}
@Test def lowestOneBit(): Unit = {
assertEquals(0, Integer.lowestOneBit(0))
assertEquals(1, Integer.lowestOneBit(-1))
assertEquals(256, Integer.lowestOneBit(-256))
assertEquals(4, Integer.lowestOneBit(12))
assertEquals(0x8, Integer.lowestOneBit(0x88))
assertEquals(1, Integer.lowestOneBit(Int.MaxValue))
assertEquals(Int.MinValue, Integer.lowestOneBit(Int.MinValue))
}
@Test def toString_without_radix(): Unit = {
/* Spec ported from
* https://github.com/gwtproject/gwt/blob/master/user/test/com/google/gwt/emultest/java/lang/IntegerTest.java
*/
assertEquals("12345", new Integer(12345).toString)
assertEquals("-12345", new Integer("-12345").toString)
assertEquals("-80765", Integer.toString(-80765))
assertEquals("2147483647", Integer.toString(Int.MaxValue))
assertEquals("-2147483647", Integer.toString(-Int.MaxValue))
assertEquals("-2147483648", Integer.toString(Int.MinValue))
assertEquals("0", Integer.toString(0))
}
@Test def toString_with_radix(): Unit = {
/* Spec ported from
* https://github.com/gwtproject/gwt/blob/master/user/test/com/google/gwt/emultest/java/lang/IntegerTest.java
*/
assertEquals("17777777777", Integer.toString(2147483647, 8))
assertEquals("7fffffff", Integer.toString(2147483647, 16))
assertEquals("1111111111111111111111111111111", Integer.toString(2147483647, 2))
assertEquals("2147483647", Integer.toString(2147483647, 10))
assertEquals("-17777777777", Integer.toString(-2147483647, 8))
assertEquals("-7fffffff", Integer.toString(-2147483647, 16))
assertEquals("-1111111111111111111111111111111", Integer.toString(-2147483647, 2))
assertEquals("-2147483647", Integer.toString(-2147483647, 10))
assertEquals("-20000000000", Integer.toString(-2147483648, 8))
assertEquals("-80000000", Integer.toString(-2147483648, 16))
assertEquals("-10000000000000000000000000000000", Integer.toString(-2147483648, 2))
assertEquals("-2147483648", Integer.toString(-2147483648, 10))
}
}
| mdedetrich/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/lang/IntegerTest.scala | Scala | bsd-3-clause | 23,003 |
package zzb.storage
import com.mongodb.casbah.{query, Imports}
import com.mongodb.casbah.query.dsl.QueryExpressionObject
import com.mongodb.casbah.query.Imports._
import com.typesafe.scalalogging.slf4j.Logging
import zzb.datatype.StructPath
/**
* todo:
* @author 何伟波
* @since 0.1.0
*/
trait DBObjectHelper{
val gtStringMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gt") if value.isInstanceOf[String]=> Some( key $gt value.toString )
}
val gtDoubleMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gt") if value.isInstanceOf[Double]=> Some( key $gt value.asInstanceOf[Double] )
}
val gtIntMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gt") if value.isInstanceOf[Int]=> Some( key $gt value.toString.toDouble )
}
val gtBooleanMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gt") if value.isInstanceOf[Boolean]=> Some( key $gt value.toString.toBoolean )
}
val ltStringMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lt") if value.isInstanceOf[String]=> Some( key $lt value.toString )
}
val ltDoubleMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lt") if value.isInstanceOf[Double]=> Some( key $lt value.asInstanceOf[Double] )
}
val ltIntMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lt") if value.isInstanceOf[Int]=> Some( key $lt value.toString.toDouble )
}
val ltBooleanMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lt") if value.isInstanceOf[Boolean]=> Some( key $lt value.toString.toBoolean )
}
val gteStringMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gte") if value.isInstanceOf[String]=> Some( key $gte value.toString )
}
val gteDoubleMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gte") if value.isInstanceOf[Double]=> Some( key $gte value.asInstanceOf[Double] )
}
val gteIntMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gte") if value.isInstanceOf[Int]=> Some( key $gte value.toString.toDouble )
}
val gteBooleanMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"gte") if value.isInstanceOf[Boolean]=> Some( key $gte value.toString.toBoolean )
}
val lteStringMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lte") if value.isInstanceOf[String]=> Some( key $lte value.toString )
}
val lteDoubleMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lte") if value.isInstanceOf[Double]=> Some( key $lte value.asInstanceOf[Double] )
}
val lteIntMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lte") if value.isInstanceOf[Int]=> Some( key $lte value.toString.toDouble )
}
val lteBooleanMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"lte") if value.isInstanceOf[Boolean]=> Some( key $lte value.toString.toBoolean )
}
val eqStringMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"eq") if value.isInstanceOf[String]=> Some( key $eq value.toString )
}
val eqDoubleMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"eq") if value.isInstanceOf[Double]=> Some( key $eq value.asInstanceOf[Double] )
}
val eqIntMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"eq") if value.isInstanceOf[Int]=> Some( key $eq value.toString.toDouble )
}
val eqBooleanMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case (key,value,"eq") if value.isInstanceOf[Boolean]=> Some( key $eq value.toString.toBoolean )
}
val otherMethod :PartialFunction[(String,Any,String),Option[Imports.DBObject with QueryExpressionObject]]={
case _=> None
}
val gtfilterMethod= gtStringMethod orElse gtDoubleMethod orElse gtIntMethod orElse gtBooleanMethod
val gtefilterMethod=gteStringMethod orElse gteDoubleMethod orElse gteIntMethod orElse gteBooleanMethod
val ltfilterMethod=ltStringMethod orElse ltDoubleMethod orElse ltIntMethod orElse ltBooleanMethod
val ltefilterMethod=lteStringMethod orElse lteDoubleMethod orElse lteIntMethod orElse lteBooleanMethod
val eqfilterMethod=eqStringMethod orElse eqDoubleMethod orElse eqIntMethod orElse eqBooleanMethod
val dbObjectFilter= gtfilterMethod orElse gtefilterMethod orElse ltfilterMethod orElse ltefilterMethod orElse eqfilterMethod orElse otherMethod
def structPathToString(structPath:StructPath)=
structPath.relativeStr.drop(1).replace("/",".")
/**
* 创建and的查询条件
* @param params
* @return
*/
def makeAndFilter(params:Imports.DBObject *): query.Imports.DBObject ={
$and(params: _*)
}
/**
* 创建and的查询条件
* @param params
* @return
*/
def makeAndFilter(params:List[Imports.DBObject]): query.Imports.DBObject ={
makeAndFilter(params: _*)
}
/**
* 创建or的查询条件
* @param params
* @return
*/
def makeOrFilter(params:Imports.DBObject *): query.Imports.DBObject ={
$or(params: _*)
}
/**
* 创建or的查询条件
* @param params
* @return
*/
def makeOrFilter(params:List[Imports.DBObject]): query.Imports.DBObject ={
makeOrFilter(params: _*)
}
/**
* 根据条件去生成查询条件
* @param key
* @param value
* @param option eq:相等,lt:小于, lte:小于等于 , gt:大于 ,gte:大于等于 ,
* @return
*/
def makeFilterByConditionName(key:String , value:Any,option:String): Option[query.Imports.DBObject]={
dbObjectFilter(key,value,option)
}
/**
* 根据条件去生成查询条件
* @param structPath
* @param value
* @param option
* @return
*/
def makeFilterByConditionName(structPath:StructPath , value:Any,option:String): Option[query.Imports.DBObject]={
val key = structPathToString(structPath)
dbObjectFilter(key,value,option)
}
/**
* 创建小于的条件
* @param key
* @param value
* @return
*/
def makeLtFilter(key:String , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(key,value,"lt")
}
/**
* 创建小于的条件
* @param structPath
* @param value
* @return
*/
def makeLtFilter(structPath:StructPath , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(structPath,value,"lt")
}
/**
* 创建小于等于的条件
* @param key
* @param value
* @return
*/
def makeLteFilter(key:String , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(key,value,"lte")
}
/**
* 创建小于等于的条件
* @param structPath
* @param value
* @return
*/
def makeLteFilter(structPath:StructPath , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(structPath,value,"lte")
}
/**
* 创建大于的条件
* @param key
* @param value
* @return
*/
def makeGtFilter(key:String , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(key,value,"gt")
}
/**
* 创建大于的条件
* @param structPath
* @param value
* @return
*/
def makeGtFilter(structPath:StructPath , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(structPath,value,"gt")
}
/**
* 创建大于等于的条件
* @param key
* @param value
* @return
*/
def makeGteFilter(key:String , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(key,value,"gte")
}
/**
* 创建大于等于的条件
* @param structPath
* @param value
* @return
*/
def makeGteFilter(structPath:StructPath , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(structPath,value,"gte")
}
/**
* 创建等于的条件
* @param key
* @param value
* @return
*/
def makeEqFilter(key:String , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(key,value,"eq")
}
/**
* 创建等于的条件
* @param structPath
* @param value
* @return
*/
def makeEqFilter(structPath:StructPath , value:Any): Option[query.Imports.DBObject]={
makeFilterByConditionName(structPath,value,"eq")
}
}
| stepover/zzb | zzb-storage/src/main/scala/zzb/storage/DBObjectHelper.scala | Scala | mit | 9,226 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.joins
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.execution.{RowIterator, SparkPlan}
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.sql.types.{IntegralType, LongType}
trait HashJoin {
self: SparkPlan =>
val leftKeys: Seq[Expression]
val rightKeys: Seq[Expression]
val joinType: JoinType
val buildSide: BuildSide
val condition: Option[Expression]
val left: SparkPlan
val right: SparkPlan
override def output: Seq[Attribute] = {
joinType match {
case Inner =>
left.output ++ right.output
case LeftOuter =>
left.output ++ right.output.map(_.withNullability(true))
case RightOuter =>
left.output.map(_.withNullability(true)) ++ right.output
case j: ExistenceJoin =>
left.output :+ j.exists
case LeftExistence(_) =>
left.output
case x =>
throw new IllegalArgumentException(s"HashJoin should not take $x as the JoinType")
}
}
override def outputPartitioning: Partitioning = streamedPlan.outputPartitioning
protected lazy val (buildPlan, streamedPlan) = buildSide match {
case BuildLeft => (left, right)
case BuildRight => (right, left)
}
protected lazy val (buildKeys, streamedKeys) = {
require(leftKeys.map(_.dataType) == rightKeys.map(_.dataType),
"Join keys from two sides should have same types")
val lkeys = HashJoin.rewriteKeyExpr(leftKeys).map(BindReferences.bindReference(_, left.output))
val rkeys = HashJoin.rewriteKeyExpr(rightKeys)
.map(BindReferences.bindReference(_, right.output))
buildSide match {
case BuildLeft => (lkeys, rkeys)
case BuildRight => (rkeys, lkeys)
}
}
protected def buildSideKeyGenerator(): Projection =
UnsafeProjection.create(buildKeys)
protected def streamSideKeyGenerator(): UnsafeProjection =
UnsafeProjection.create(streamedKeys)
@transient private[this] lazy val boundCondition = if (condition.isDefined) {
newPredicate(condition.get, streamedPlan.output ++ buildPlan.output)
} else {
(r: InternalRow) => true
}
protected def createResultProjection(): (InternalRow) => InternalRow = joinType match {
case LeftExistence(_) =>
UnsafeProjection.create(output, output)
case _ =>
// Always put the stream side on left to simplify implementation
// both of left and right side could be null
UnsafeProjection.create(
output, (streamedPlan.output ++ buildPlan.output).map(_.withNullability(true)))
}
private def innerJoin(
streamIter: Iterator[InternalRow],
hashedRelation: HashedRelation): Iterator[InternalRow] = {
val joinRow = new JoinedRow
val joinKeys = streamSideKeyGenerator()
streamIter.flatMap { srow =>
joinRow.withLeft(srow)
val matches = hashedRelation.get(joinKeys(srow))
if (matches != null) {
matches.map(joinRow.withRight(_)).filter(boundCondition)
} else {
Seq.empty
}
}
}
private def outerJoin(
streamedIter: Iterator[InternalRow],
hashedRelation: HashedRelation): Iterator[InternalRow] = {
val joinedRow = new JoinedRow()
val keyGenerator = streamSideKeyGenerator()
val nullRow = new GenericInternalRow(buildPlan.output.length)
streamedIter.flatMap { currentRow =>
val rowKey = keyGenerator(currentRow)
joinedRow.withLeft(currentRow)
val buildIter = hashedRelation.get(rowKey)
new RowIterator {
private var found = false
override def advanceNext(): Boolean = {
while (buildIter != null && buildIter.hasNext) {
val nextBuildRow = buildIter.next()
if (boundCondition(joinedRow.withRight(nextBuildRow))) {
found = true
return true
}
}
if (!found) {
joinedRow.withRight(nullRow)
found = true
return true
}
false
}
override def getRow: InternalRow = joinedRow
}.toScala
}
}
private def semiJoin(
streamIter: Iterator[InternalRow],
hashedRelation: HashedRelation): Iterator[InternalRow] = {
val joinKeys = streamSideKeyGenerator()
val joinedRow = new JoinedRow
streamIter.filter { current =>
val key = joinKeys(current)
lazy val buildIter = hashedRelation.get(key)
!key.anyNull && buildIter != null && (condition.isEmpty || buildIter.exists {
(row: InternalRow) => boundCondition(joinedRow(current, row))
})
}
}
private def existenceJoin(
streamIter: Iterator[InternalRow],
hashedRelation: HashedRelation): Iterator[InternalRow] = {
val joinKeys = streamSideKeyGenerator()
val result = new GenericMutableRow(Array[Any](null))
val joinedRow = new JoinedRow
streamIter.map { current =>
val key = joinKeys(current)
lazy val buildIter = hashedRelation.get(key)
val exists = !key.anyNull && buildIter != null && (condition.isEmpty || buildIter.exists {
(row: InternalRow) => boundCondition(joinedRow(current, row))
})
result.setBoolean(0, exists)
joinedRow(current, result)
}
}
private def antiJoin(
streamIter: Iterator[InternalRow],
hashedRelation: HashedRelation): Iterator[InternalRow] = {
val joinKeys = streamSideKeyGenerator()
val joinedRow = new JoinedRow
streamIter.filter { current =>
val key = joinKeys(current)
lazy val buildIter = hashedRelation.get(key)
key.anyNull || buildIter == null || (condition.isDefined && !buildIter.exists {
row => boundCondition(joinedRow(current, row))
})
}
}
protected def join(
streamedIter: Iterator[InternalRow],
hashed: HashedRelation,
numOutputRows: SQLMetric): Iterator[InternalRow] = {
val joinedIter = joinType match {
case Inner =>
innerJoin(streamedIter, hashed)
case LeftOuter | RightOuter =>
outerJoin(streamedIter, hashed)
case LeftSemi =>
semiJoin(streamedIter, hashed)
case LeftAnti =>
antiJoin(streamedIter, hashed)
case j: ExistenceJoin =>
existenceJoin(streamedIter, hashed)
case x =>
throw new IllegalArgumentException(
s"BroadcastHashJoin should not take $x as the JoinType")
}
val resultProj = createResultProjection
joinedIter.map { r =>
numOutputRows += 1
resultProj(r)
}
}
}
object HashJoin {
/**
* Try to rewrite the key as LongType so we can use getLong(), if they key can fit with a long.
*
* If not, returns the original expressions.
*/
private[joins] def rewriteKeyExpr(keys: Seq[Expression]): Seq[Expression] = {
assert(keys.nonEmpty)
// TODO: support BooleanType, DateType and TimestampType
if (keys.exists(!_.dataType.isInstanceOf[IntegralType])
|| keys.map(_.dataType.defaultSize).sum > 8) {
return keys
}
var keyExpr: Expression = if (keys.head.dataType != LongType) {
Cast(keys.head, LongType)
} else {
keys.head
}
keys.tail.foreach { e =>
val bits = e.dataType.defaultSize * 8
keyExpr = BitwiseOr(ShiftLeft(keyExpr, Literal(bits)),
BitwiseAnd(Cast(e, LongType), Literal((1L << bits) - 1)))
}
keyExpr :: Nil
}
}
| gioenn/xSpark | sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashJoin.scala | Scala | apache-2.0 | 8,342 |
package co.blocke.scalajack
package mongo
import model._
import java.time._
import java.util.UUID
import org.bson._
import org.bson.types.ObjectId
import TestUtil._
import munit._
import munit.internal.console
import scala.jdk.CollectionConverters._
import co.blocke.scala_reflection.RType
class WrappedOffsetDateTime(val offsetDateTime: OffsetDateTime) extends AnyVal
class MongoSpec extends FunSuite:
val TRUE = true
val FALSE = false
val data = One(
"Greg",
List("a", "b"),
List(Two("x", FALSE), Two("y", TRUE)),
Two("Nest!", TRUE),
Some("wow"),
Map("hey" -> 17, "you" -> 21),
TRUE,
99123986123L,
Num.C,
46
)
def mongoScalaJack: JackFlavor[BsonValue] = ScalaJack(MongoFlavor())
test("Naked Map support") {
describe(
"---------------------------\\n: Mongo Tests (MongoDB) :\\n---------------------------", Console.BLUE
)
describe("Prinitives")
val li = Map("a" -> 1, "b" -> 2, "c" -> 3)
val dbo: BsonValue = mongoScalaJack.render(li)
assertEquals(dbo.asDocument.toJson, """{"a": 1, "b": 2, "c": 3}""")
assertEquals(mongoScalaJack.read[Map[String, Int]](dbo), li)
}
test("UUID support") {
val thing = UuidThing(
"Foo",
UUID.fromString("1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c"),
List(
UUID.fromString("1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c"),
UUID.fromString("1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c")
),
Some(UUID.fromString("1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c"))
)
val dbo = mongoScalaJack.render(thing)
assertEquals(dbo.asDocument.toJson,
"""{"name": "Foo", "uuid": "1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c", "many": ["1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c", "1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c"], "maybe": "1e6c2b31-4dfe-4bf6-a0a0-882caaff0e9c"}"""
)
val b = mongoScalaJack.read[UuidThing](dbo)
assertEquals(b, thing)
}
test("Misc number primitives support") {
val inst = Loose('A', 1.23F, 15.toShort, 3.toByte)
val dbo = mongoScalaJack.render(inst)
assertEquals(dbo.asDocument.toJson,
"""{"a": "A", "b": 1.23, "c": 15, "d": 3}"""
)
assertEquals(mongoScalaJack.read[Loose](dbo), inst)
}
test("OffsetDateTime support") {
val t = LocalDate
.parse("1986-07-01")
.atTime(OffsetTime.of(LocalTime.MIDNIGHT, ZoneOffset.UTC))
val thing = JodaThing("Foo", t, List(t, t), Some(t))
val dbo = mongoScalaJack.render(thing)
assertEquals(dbo.asDocument.toJson,
"""{"name": "Foo", "dt": {"$date": 520560000000}, "many": [{"$date": 520560000000}, {"$date": 520560000000}], "maybe": {"$date": 520560000000}}"""
)
val b = mongoScalaJack.read[JodaThing](dbo)
assertEquals(b, thing)
}
test("ZonedDateTime must work") {
val inst = SampleZonedDateTime(
ZonedDateTime.parse("2007-12-03T10:15:30Z[UTC]"),
ZonedDateTime.parse("2007-12-03T10:15:30Z[UTC]")
)
val dbo = mongoScalaJack.render(inst)
assertEquals(dbo.asDocument.toJson,
"""{"o1": {"$date": 1196676930000}, "o2": {"$date": 1196676930000}}"""
)
val b = mongoScalaJack.read[SampleZonedDateTime](dbo)
assertEquals(b, inst)
}
test("Permissives work") {
val bd = new BsonDocument()
bd.append("name", new BsonString("Fido"))
bd.append("legs", new BsonString("3"))
val wPerm = mongoScalaJack.allowPermissivePrimitives()
assertEquals(wPerm.read[Animal](bd), Animal("Fido", 3))
}
test(
"Case class having List parameter - Foo[A](x:A) where A -> List of simple type"
) {
describe("Basic Collection Support")
val w = Carry("Trey", Wrap("Hobbies", List(true, true, false), "all"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Trey", "w": {"name": "Hobbies", "data": [true, true, false], "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[List[Boolean]]](db), w)
}
test(
"Case class having Map parameter - Foo[A](x:A) where A -> Map of simple type"
) {
val w = Carry("Troy", Wrap("Articles", Map("OK" -> 59), "all"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Troy", "w": {"name": "Articles", "data": {"OK": 59}, "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Map[String, Int]]](db), w)
}
test(
"Case class having Option parameter - Foo[A](x:A) where A -> Option of simple type"
) {
val w = Carry(
"Terri",
Wrap("Hobbies", Some(17).asInstanceOf[Option[Int]], "all")
)
val x = Carry[Option[Int]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": 17, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Int]]](db), w)
assertEquals(mongoScalaJack.read[Carry[Option[Int]]](db2), x)
}
test(
"Case class having List of parameterized value - Foo[A](x:List[A]) - where A is a simple type"
) {
val w = BagList("list", List(1, 2, 3))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "list", "many": [1, 2, 3]}"""
)
assertEquals(mongoScalaJack.read[BagList[Int]](db), w)
}
test(
"Case class having Map of parameterized value - Foo[A,B](x:Map[A,B]) - where A,B are simple types"
) {
val w = BagMap(5, Map("one" -> true, "two" -> false))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"i": 5, "items": {"one": true, "two": false}}"""
)
assertEquals(mongoScalaJack.read[BagMap[Boolean]](db), w)
}
test(
"Case class having Option of parameterized value - Foo[A](x:Option[A]) - where A is a simple type"
) {
val w = BagOpt(1, Some("ok"))
val x = BagOpt[String](1, None)
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson, """{"i": 1, "maybe": "ok"}""")
assertEquals(db2.asDocument.toJson, """{"i": 1}""")
assertEquals(mongoScalaJack.read[BagOpt[String]](db), w)
assertEquals(mongoScalaJack.read[BagOpt[String]](db2), x)
}
test(
"Case class having List parameter - Foo[A](x:A) where A -> List of Bar[Int]"
) {
describe(
"Advanced Collection Support - collections of parameterized case class"
)
val w = Carry(
"Trey",
Wrap("Hobbies", List(Zoo("one", 1), Zoo("two", 2)), "all")
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Trey", "w": {"name": "Hobbies", "data": [{"name": "one", "z": 1}, {"name": "two", "z": 2}], "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[List[Zoo[Int]]]](db), w)
}
test(
"Case class having Map parameter - Foo[A](x:A) where A -> Map of Bar[Int,String]"
) {
val w =
Carry("Troy", Wrap("Articles", Map("OK" -> Zoo("q", false)), "all"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Troy", "w": {"name": "Articles", "data": {"OK": {"name": "q", "z": false}}, "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Map[String, Zoo[Boolean]]]](db),
w
)
}
test(
"Case class having Option parameter - Foo[A](x:A) where A -> Option of Bar[Int]"
) {
val w = Carry(
"Terri",
Wrap(
"Hobbies",
Some(Zoo("a", "b")).asInstanceOf[Option[Zoo[String]]],
"all"
)
)
val x = Carry[Option[Int]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": {"name": "a", "z": "b"}, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Zoo[String]]]](db), w)
assert(mongoScalaJack.read[Carry[Option[Zoo[String]]]](db2) == x)
}
test(
"Case class having List parameter - Foo[A](x:A) where A -> List of value class"
) {
val w = Carry(
"Trey",
Wrap("Hobbies", List(new Wrapper(99), new Wrapper(100)), "all")
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Trey", "w": {"name": "Hobbies", "data": [99, 100], "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[List[Wrapper]]](db), w)
}
test(
"Case class having Map parameter - Foo[A](x:A) where A -> Map of Bar[String,value class]"
) {
val w =
Carry("Troy", Wrap("Articles", Map("OK" -> new Wrapper(2)), "all"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Troy", "w": {"name": "Articles", "data": {"OK": 2}, "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Map[String, Wrapper]]](db), w)
}
test(
"Case class having Option parameter - Foo[A](x:A) where A -> Option of value class"
) {
val w = Carry(
"Terri",
Wrap(
"Hobbies",
Some(new Wrapper(-2)).asInstanceOf[Option[Wrapper]],
"all"
)
)
val x = Carry[Option[Wrapper]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": -2, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Wrapper]]](db), w)
assertEquals(mongoScalaJack.read[Carry[Option[Wrapper]]](db2), x)
}
test(
"Case class having List of parameterized value - Foo[A](x:List[A]) - where A -> Bar[Int]"
) {
val w = BagList("list", List(Zoo("a", 1), Zoo("b", 2)))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "list", "many": [{"name": "a", "z": 1}, {"name": "b", "z": 2}]}"""
)
assertEquals(mongoScalaJack.read[BagList[Zoo[Int]]](db), w)
}
test(
"Case class having Map of parameterized value - Foo[A,B](x:Map[A,B]) - where A,B -> String,Bar[Int]"
) {
val w = BagMap(5, Map("one" -> Zoo("a", 1), "two" -> Zoo("b", 2)))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"i": 5, "items": {"one": {"name": "a", "z": 1}, "two": {"name": "b", "z": 2}}}"""
)
assertEquals(mongoScalaJack.read[BagMap[Zoo[Int]]](db), w)
}
test(
"Case class having Option of parameterized value - Foo[A](x:Option[A]) - where A -> Bar[Int]"
) {
val w = Carry(
"Terri",
Wrap(
"Hobbies",
Some(Truck(false, Two("aaa", true)))
.asInstanceOf[Option[Truck[Boolean]]],
"all"
)
)
val x =
Carry[Option[Truck[Boolean]]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": {"s": false, "t": {"foo": "aaa", "bar": true}}, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Truck[Boolean]]]](db), w)
assertEquals(mongoScalaJack.read[Carry[Option[Truck[Boolean]]]](db2), x)
}
test(
"Case class having List of parameterized value - Foo[A](x:List[A]) - where A -> value class"
) {
val w = BagList(
"list",
List(Zoo("a", new Wrapper(1)), Zoo("b", new Wrapper(2)))
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "list", "many": [{"name": "a", "z": 1}, {"name": "b", "z": 2}]}"""
)
assertEquals(mongoScalaJack.read[BagList[Zoo[Wrapper]]](db), w)
}
test(
"Case class having Map of parameterized value - Foo[A,B](x:Map[A,B]) - where A,B -> String,value class"
) {
val w = BagMap(
5,
Map(
"one" -> Zoo("a", new Wrapper(1)),
"two" -> Zoo("b", new Wrapper(2))
)
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"i": 5, "items": {"one": {"name": "a", "z": 1}, "two": {"name": "b", "z": 2}}}"""
)
assertEquals(mongoScalaJack.read[BagMap[Zoo[Wrapper]]](db), w)
}
test(
"Case class having Option of parameterized value - Foo[A](x:Option[A]) - where A -> value class"
) {
val w = Carry(
"Terri",
Wrap(
"Hobbies",
Some(Zoo("a", new Wrapper(12))).asInstanceOf[Option[Zoo[Wrapper]]],
"all"
)
)
val x =
Carry[Option[Truck[Boolean]]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": {"name": "a", "z": 12}, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Zoo[Wrapper]]]](db), w)
assert(mongoScalaJack.read[Carry[Option[Zoo[Wrapper]]]](db2) == x)
}
test("Parameter is a simple trait") {
describe("Basic trait support")
val w = Carry[Pop]("Surprise", Wrap("Yellow", Wow2("three", 3), "Done"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Surprise", "w": {"name": "Yellow", "data": {"_hint": "co.blocke.scalajack.mongo.Wow2", "x": "three", "y": 3}, "stuff": "Done"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Pop]](db), w)
}
test("Parameter is a simple trait with hint function value mappings") {
val w = Carry[Pop]("Surprise", Wrap("Yellow", Wow2("three", 4), "Done"))
val scalaJack = mongoScalaJack.withHintModifiers(
RType.of[Pop] -> ClassNameHintModifier(
hint => s"co.blocke.scalajack.mongo.$hint",
fullName => fullName.split('.').last
)
)
val db = scalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Surprise", "w": {"name": "Yellow", "data": {"_hint": "Wow2", "x": "three", "y": 4}, "stuff": "Done"}}"""
)
assertEquals(scalaJack.read[Carry[Pop]](db), w)
}
test("Hint modifier fails") {
val w = Carry[Pop]("Surprise", Wrap("Yellow", Wow2("three", 4), "Done"))
val scalaJack = mongoScalaJack.withHintModifiers(
RType.of[Pop] -> ClassNameHintModifier(
hint => throw new Exception("Boom"), // intentional hint mod failure
fullName => fullName.split('.').last
)
)
val db = scalaJack.render(w)
interceptMessage[ScalaJackError]("Couldn't marshal class for Wow2"){
scalaJack.read[Carry[Pop]](db)
}
}
test("Type modifier works") {
val scalaJack = ScalaJack(MongoFlavor()).withTypeValueModifier(
ClassNameHintModifier(
(hint: String) => "co.blocke.scalajack.mongo." + hint,
(cname: String) => cname.split('.').last
)
)
val value: Envelope[Body] = Envelope("DEF", FancyBody("BOO"))
val d = scalaJack.render[Envelope[Body]](value)
assertEquals(d.asDocument.toJson,
"""{"Giraffe": "FancyBody", "id": "DEF", "body": {"message": "BOO"}}"""
)
assertEquals(scalaJack.read[Envelope[Body]](d), value)
}
test("Parameter is List of trait") {
val w = Carry[List[Pop]](
"Surprise",
Wrap("Yellow", List(Wow1("four", 4), Wow2("three", 3)), "Done")
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Surprise", "w": {"name": "Yellow", "data": [{"_hint": "co.blocke.scalajack.mongo.Wow1", "a": "four", "b": 4}, {"_hint": "co.blocke.scalajack.mongo.Wow2", "x": "three", "y": 3}], "stuff": "Done"}}"""
)
assertEquals(mongoScalaJack.read[Carry[List[Pop]]](db), w)
}
test("Parameter is Map of String->trait") {
val w = Carry[Map[String, Pop]](
"Surprise",
Wrap(
"Yellow",
Map("a" -> Wow1("four", 4), "b" -> Wow2("three", 3)),
"Done"
)
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Surprise", "w": {"name": "Yellow", "data": {"a": {"_hint": "co.blocke.scalajack.mongo.Wow1", "a": "four", "b": 4}, "b": {"_hint": "co.blocke.scalajack.mongo.Wow2", "x": "three", "y": 3}}, "stuff": "Done"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Map[String, Pop]]](db), w)
}
test("Parameter is an Option of trait") {
val w = Carry[Option[Pop]](
"Terri",
Wrap("Hobbies", Some(Wow1("ok", -99)), "all")
)
val x = Carry[Option[Pop]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": {"_hint": "co.blocke.scalajack.mongo.Wow1", "a": "ok", "b": -99}, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Pop]]](db), w)
assertEquals(mongoScalaJack.read[Carry[Option[Pop]]](db2), x)
}
test("List of parameter, where parameter is a trait") {
val w = BagList[Pop]("list", List(Wow1("A", 1), Wow1("B", 2)))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "list", "many": [{"_hint": "co.blocke.scalajack.mongo.Wow1", "a": "A", "b": 1}, {"_hint": "co.blocke.scalajack.mongo.Wow1", "a": "B", "b": 2}]}"""
)
assertEquals(mongoScalaJack.read[BagList[Pop]](db), w)
}
test("Map of String->parameter, where parameter is a trait") {
val w =
BagMap[Pop](5, Map("one" -> Wow2("q", 7), "two" -> Wow1("r", 3)))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"i": 5, "items": {"one": {"_hint": "co.blocke.scalajack.mongo.Wow2", "x": "q", "y": 7}, "two": {"_hint": "co.blocke.scalajack.mongo.Wow1", "a": "r", "b": 3}}}"""
)
assertEquals(mongoScalaJack.read[BagMap[Pop]](db), w)
}
test("Option of parameter, where parameter is a trait") {
val w = Carry[Option[Pop]](
"Terri",
Wrap("Hobbies", Some(Wow2("finite", 1000)), "all")
)
val x = Carry[Option[Pop]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": {"_hint": "co.blocke.scalajack.mongo.Wow2", "x": "finite", "y": 1000}, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Pop]]](db), w)
assertEquals(mongoScalaJack.read[Carry[Option[Pop]]](db2), x)
}
test("Case class having an embedded parameterized trait") {
describe(
"Advanced trait support -- parameters are traits, themselves having parameters"
)
val w = Breakfast(true, Toast(7, "Burnt"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"y": true, "bread": {"_hint": "co.blocke.scalajack.mongo.Toast", "g": 7, "yum": "Burnt"}}"""
)
assertEquals(mongoScalaJack.read[Breakfast[String]](db), w)
}
test(
"Case class having an embedded parameterized trait, with the trait's parameter another case class"
) {
val w = Breakfast(true, Toast(7, Two("two", true)))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"y": true, "bread": {"_hint": "co.blocke.scalajack.mongo.Toast", "g": 7, "yum": {"foo": "two", "bar": true}}}"""
)
assertEquals(mongoScalaJack.read[Breakfast[Two]](db), w)
}
test(
"Case class having an embedded parameterized trait, with the trait's parameter a value class"
) {
val w = Breakfast(true, Toast(7, new Wrapper(-100)))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"y": true, "bread": {"_hint": "co.blocke.scalajack.mongo.Toast", "g": 7, "yum": -100}}"""
)
assertEquals(mongoScalaJack.read[Breakfast[Wrapper]](db), w)
}
test("Parameter is a parameterized trait") { // I can't believe this one worked!
val w = Carry[Tart[Soup[String]]](
"Bill",
Wrap("Betty", Bun(3, Cruton(8, "eight")), "ok")
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Bill", "w": {"name": "Betty", "data": {"_hint": "co.blocke.scalajack.mongo.Bun", "g": 3, "yum": {"_hint": "co.blocke.scalajack.mongo.Cruton", "i": 8, "sweet": "eight"}}, "stuff": "ok"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Tart[Soup[String]]]](db), w)
}
test("Parameter is List of parameterized trait") {
val w = Carry[List[Tart[Boolean]]](
"Trey",
Wrap("Hobbies", List(Bun(1, false), Toast(2, true)), "all")
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Trey", "w": {"name": "Hobbies", "data": [{"_hint": "co.blocke.scalajack.mongo.Bun", "g": 1, "yum": false}, {"_hint": "co.blocke.scalajack.mongo.Toast", "g": 2, "yum": true}], "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[List[Tart[Boolean]]]](db), w)
}
test("Parameter is Map of String->parameterized trait") {
val w = Carry[Map[String, Tart[String]]](
"Troy",
Wrap("Articles", Map("OK" -> Bun(27, "Hot")), "all")
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Troy", "w": {"name": "Articles", "data": {"OK": {"_hint": "co.blocke.scalajack.mongo.Bun", "g": 27, "yum": "Hot"}}, "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Map[String, Tart[String]]]](db),
w
)
}
test("Parameter is an Option of parameterized trait") {
val w = Carry[Option[Tart[Int]]](
"Terri",
Wrap("Hobbies", Some(Toast(11, 12)), "all")
)
val x = Carry[Option[Tart[Int]]]("Terry", Wrap("Hobbies", None, "all"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"s": "Terri", "w": {"name": "Hobbies", "data": {"_hint": "co.blocke.scalajack.mongo.Toast", "g": 11, "yum": 12}, "stuff": "all"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Terry", "w": {"name": "Hobbies", "stuff": "all"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Option[Tart[Int]]]](db), w)
assertEquals(mongoScalaJack.read[Carry[Option[Tart[Int]]]](db2), x)
}
test("List of parameter, where parameter is a parameterized trait") {
val w =
BagList[Tart[Boolean]]("list", List(Toast(1, true), Bun(2, false)))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "list", "many": [{"_hint": "co.blocke.scalajack.mongo.Toast", "g": 1, "yum": true}, {"_hint": "co.blocke.scalajack.mongo.Bun", "g": 2, "yum": false}]}"""
)
assertEquals(mongoScalaJack.read[BagList[Tart[Boolean]]](db), w)
}
test("Map of String->parameter, where parameter is a parameterized trait") {
val w = BagMap[Tart[Boolean]](
5,
Map("one" -> Bun(1, true), "two" -> Toast(2, false))
)
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"i": 5, "items": {"one": {"_hint": "co.blocke.scalajack.mongo.Bun", "g": 1, "yum": true}, "two": {"_hint": "co.blocke.scalajack.mongo.Toast", "g": 2, "yum": false}}}"""
)
assertEquals(mongoScalaJack.read[BagMap[Tart[Boolean]]](db), w)
}
test("Option of parameter, where parameter is a parameterized trait") {
val w = BagOpt[Tart[String]](1, Some(Bun(6, "ok")))
val x = BagOpt[Tart[String]](1, None)
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
assertEquals(db.asDocument.toJson,
"""{"i": 1, "maybe": {"_hint": "co.blocke.scalajack.mongo.Bun", "g": 6, "yum": "ok"}}"""
)
assertEquals(db2.asDocument.toJson, """{"i": 1}""")
assertEquals(mongoScalaJack.read[BagOpt[Tart[String]]](db), w)
assertEquals(mongoScalaJack.read[BagOpt[Tart[String]]](db2), x)
}
test("DBKey Annotation (_id field generation) - single key") {
describe("Annotations (e.g. DBKey)")
val five = Five("Fred", Two("blah", true))
val dbo = mongoScalaJack.render(five)
assertEquals(dbo.asDocument.toJson,
"""{"_id": "Fred", "two": {"foo": "blah", "bar": true}}"""
)
assertEquals(mongoScalaJack.read[Five](dbo), five)
}
test(
"DBKey Annotation (_id field generation) - single key -- Missing Non-Key Field"
) {
val dbo = new BsonDocument(
List(
new BsonElement("_id", new BsonString("Fred")),
new BsonElement(
"two",
new BsonDocument(
List(new BsonElement("bar", new BsonBoolean(true))).asJava
)
)
).asJava
)
assertEquals(dbo.toJson, """{"_id": "Fred", "two": {"bar": true}}""")
val msg = """Class co.blocke.scalajack.mongo.Two missing required fields: foo""".stripMargin
interceptMessage[ScalaJackError](msg){
mongoScalaJack.read[Five](dbo)
}
}
test(
"DBKey Annotation (_id field generation) - single key -- Missing Key Field"
) {
val dbo = new BsonDocument(
List(
new BsonElement(
"two",
new BsonDocument(
List(
new BsonElement("foo", new BsonString("blah")),
new BsonElement("bar", new BsonBoolean(true))
).asJava
)
)
).asJava
)
assertEquals(dbo.toJson, """{"two": {"foo": "blah", "bar": true}}""")
val msg =
"""Missing key (_id) field, or a component of a compound key field"""
interceptMessage[ScalaJackError](msg){
mongoScalaJack.read[Five](dbo)
}
}
test("DBKey Annotation (_id field generation) - compound key") {
val six = Six("Fred", 12, Two("blah", true))
val dbo = mongoScalaJack.render(six)
assertEquals(dbo.asDocument.toJson,
"""{"_id": {"name": "Fred", "num": 12}, "two": {"foo": "blah", "bar": true}}"""
)
assertEquals(mongoScalaJack.read[Six](dbo), six)
}
test(
"DBKey Annotation (_id field generation) - compound key -- Missing Key Field component"
) {
val js =
"""{"_id": {"name": "Fred"},"two": {"foo": "blah", "bar": true}}"""
val dbo = BsonDocument.parse(js)
val msg =
"""Class co.blocke.scalajack.mongo.Six missing required fields: num"""
interceptMessage[ScalaJackError](msg){
mongoScalaJack.read[Six](dbo)
}
}
test("ObjectId support -- Mongo") {
describe("Mongo ObjectID")
// val oid = (new BsonObjectId()).getValue()
val oid = new ObjectId()
val seven = Seven(oid, Two("blah", true))
val dbo = mongoScalaJack.render(seven)
assertEquals(dbo.asDocument.toJson,
s"""{"_id": {"$$oid": "${oid.toString}"}, "two": {"foo": "blah", "bar": true}}"""
)
assertEquals(mongoScalaJack.read[Seven](dbo), seven)
}
test("ObjectId support (null) -- Mongo") {
val seven = Seven(null, Two("blah", bar = true))
val dbo = mongoScalaJack.render(seven)
assertEquals(mongoScalaJack.read[Seven](dbo), seven)
}
test("Must handle a case class with default values - defaults specified") {
describe("Basic Case Class Support")
val wd = WithDefaults(
"Greg",
49,
Some(5),
Some(false),
GrumpyPet(Cat("Fluffy"), "fish")
)
val dbo = mongoScalaJack.render(wd)
assertEquals(dbo.asDocument.toJson,
"""{"name": "Greg", "age": 49, "num": 5, "hasStuff": false, "pet": {"_hint": "co.blocke.scalajack.mongo.GrumpyPet", "kind": {"_hint": "co.blocke.scalajack.mongo.Cat", "name": "Fluffy"}, "food": "fish"}}"""
)
val b = mongoScalaJack.read[WithDefaults](dbo)
assertEquals(b, wd)
}
test("Case class as value for Any parameter") {
val f = Flexible("foo", Two("bar", bar = true))
val d = mongoScalaJack.render(f)
assertEquals(d.toString,
"""{"name": "foo", "dunno": {"_hint": "co.blocke.scalajack.mongo.Two", "foo": "bar", "bar": true}}"""
)
assertEquals(mongoScalaJack.read[Flexible](d), f)
}
test(
"Must handle a case class with default values - defaults not specified"
) {
val wd = WithDefaults("Greg", 49, None)
val dbo = mongoScalaJack.render(wd)
assertEquals(dbo.asDocument.toJson,
"""{"name": "Greg", "age": 49, "hasStuff": true, "pet": {"_hint": "co.blocke.scalajack.mongo.NicePet", "kind": {"_hint": "co.blocke.scalajack.mongo.Dog", "name": "Fido"}, "food": "bones"}}"""
)
val b = mongoScalaJack.read[WithDefaults](dbo)
assertEquals(b, wd)
}
test("Simple parameters - Foo[A](x:A) where A -> simple type") {
describe("Basic Parameterized Case Class")
val w = Wrap("number", true, 15)
val w2 = Wrap("number", true, "wow")
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(w2)
assertEquals(db.asDocument.toJson,
"""{"name": "number", "data": true, "stuff": 15}"""
)
assertEquals(db2.asDocument.toJson,
"""{"name": "number", "data": true, "stuff": "wow"}"""
)
assertEquals(mongoScalaJack.read[Wrap[Boolean, Int]](db), w)
assertEquals(mongoScalaJack.read[Wrap[Boolean, String]](db2), w2)
}
test(
"Non-parameter case clase as a field member - Foo[A](x:A, b:Bar) where A -> simple type"
) {
val w = Truck(false, Two("z", true))
val dbo = mongoScalaJack.render(w)
assertEquals(dbo.asDocument.toJson,
"""{"s": false, "t": {"foo": "z", "bar": true}}"""
)
assertEquals(mongoScalaJack.read[Truck[Boolean]](dbo), w)
}
test("Non-parameter case class as a parameter - Foo[A](x:A) where A -> Bar") {
val w = Wrap("number", true, Two("a", false))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"name": "number", "data": true, "stuff": {"foo": "a", "bar": false}}"""
)
assertEquals(mongoScalaJack.read[Wrap[Boolean, Two]](db), w)
}
test(
"Parameterized case class as parameter - Foo[A](x:A) where A -> Bar[Int]"
) {
describe("Advanced Parameterized Case Class")
val w = Carry("Bob", Wrap("Mary", 3, "Available"))
val x = Carry("Mary", Wrap("Greg", false, "Done"))
val y = Carry("Fred", Wrap("Mike", Two("Steam", true), "OK"))
val db = mongoScalaJack.render(w)
val db2 = mongoScalaJack.render(x)
val db3 = mongoScalaJack.render(y)
assertEquals(db.asDocument.toJson,
"""{"s": "Bob", "w": {"name": "Mary", "data": 3, "stuff": "Available"}}"""
)
assertEquals(db2.asDocument.toJson,
"""{"s": "Mary", "w": {"name": "Greg", "data": false, "stuff": "Done"}}"""
)
assertEquals(db3.asDocument.toJson,
"""{"s": "Fred", "w": {"name": "Mike", "data": {"foo": "Steam", "bar": true}, "stuff": "OK"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Int]](db), w)
assertEquals(mongoScalaJack.read[Carry[Boolean]](db2), x)
assertEquals(mongoScalaJack.read[Carry[Two]](db3), y)
}
test(
"Case class having value class parameter - Foo[A](x:A) where A -> value class (no value class handler)"
) {
val w = Carry("Mike", Wrap("Sally", new Wrapper(15), "Fine"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.asDocument.toJson,
"""{"s": "Mike", "w": {"name": "Sally", "data": 15, "stuff": "Fine"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Wrapper]](db), w)
}
test(
"Case class having value class parameter - Foo[A](x:A) where A -> value class (WITH value class handler)"
) {
val offsetDateTime =
OffsetDateTime.of(2015, 7, 1, 0, 0, 0, 0, ZoneOffset.UTC)
val w = Carry(
"Mike",
Wrap("Sally", new WrappedOffsetDateTime(offsetDateTime), "Fine")
)
val db = mongoScalaJack.render(w)
val timeval = offsetDateTime.toInstant.toEpochMilli
assertEquals(db.asDocument.toJson,
s"""{"s": "Mike", "w": {"name": "Sally", "data": {"$$date": $timeval}, "stuff": "Fine"}}"""
)
assertEquals(mongoScalaJack.read[Carry[WrappedOffsetDateTime]](db), w)
}
test(
"Case class having parameterized case class as a parameter: Foo[A](x:A) where A -> Bar[Blah[Long]]"
) {
val w = Carry("Bill", Wrap("Betty", Zoo("dog", false), "ok"))
val db = mongoScalaJack.render(w)
assertEquals(db.asDocument.toJson,
"""{"s": "Bill", "w": {"name": "Betty", "data": {"name": "dog", "z": false}, "stuff": "ok"}}"""
)
assertEquals(mongoScalaJack.read[Carry[Zoo[Boolean]]](db), w)
}
| gzoller/ScalaJack | mongo/src/test/scala/co.blocke.scalajack/mongo/MongoSpec.scala | Scala | mit | 33,803 |
/*
Copyright (c) 2012, The Children's Hospital of Philadelphia All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.chop.cbmi.dataExpress.test.dataModels
import org.scalatest._
import org.scalatest.junit.JUnitRunner
import org.scalatest.{GivenWhenThen, FunSpec}
import org.scalatest.matchers.ShouldMatchers
import edu.chop.cbmi.dataExpress.dataModels.DataRow
import edu.chop.cbmi.dataExpress.exceptions.ColumnDoesNotExist
import collection.mutable.ListBuffer
import scala.language.implicitConversions
/**
* Created by IntelliJ IDEA.
* User: masinoa
* Date: 12/1/11
* Time: 2:05 PM
* To change this template use File | Settings | File Templates.
*/
class DataRowSpec extends FunSpec with GivenWhenThen with ShouldMatchers {
describe("A DataRow object") {
it("should function as a Seq with dot notation and map like access to elements") {
Given("a list of column names and a list of data items")
val names = List("a", "b")
val values = List(1, 2)
val row = DataRow(names)(values map (Some(_)))
When("treated as a normal seq, allow iterations")
val sum = (row.head.get /: row.tail)(_ + _.get)
sum should equal(3)
When("accessed using dot notation with column names, enable dynamic method calls")
(row.a.get + row.b.get) should equal(3)
When("accessed using map notation with column names, enable the apply(string) call")
(row("a").get + row("b").get) should equal(3)
When("accessed using index notation, enable apply(int) call")
(row(0).get + row(1).get) should equal(3)
And("should throw a columnDoesNotExist exception if accessed with an invalid column name")
intercept[ColumnDoesNotExist] {
row.c
}
intercept[ColumnDoesNotExist] {
row("c")
}
Given("variable argument length of (String,Any) should return a DataRow[Any]")
val m = ListBuffer("fn" -> "Jane", "ln" -> "Doe", "age" -> 10)
val row2 = DataRow(m: _*)
row2.fn should equal(Some("Jane"))
row2.fn.get should equal("Jane")
And("the DataRow should be immutable even though the map is mutable")
m += "gender" -> "female"
m(0) = "fn" -> "Jen"
row2.fn.get should equal("Jane")
intercept[ColumnDoesNotExist] {
row2.gender
}
}
}
} | chop-dbhi/dataexpress | src/test/scala/edu/chop/cbmi/dataExpress/test/dataModels/DataRowSpec.scala | Scala | bsd-2-clause | 3,532 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic._
import org.scalatest.exceptions.TestFailedException
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShouldEqualToleranceSpec extends AnyFunSpec with Tolerance {
val sevenDotOh = 7.0
val minusSevenDotOh = -7.0
val sevenDotOhFloat = 7.0f
val minusSevenDotOhFloat = -7.0f
val sevenLong = 7L
val minusSevenLong = -7L
val sevenInt = 7
val minusSevenInt = -7
val sevenShort: Short = 7
val minusSevenShort: Short = -7
val sevenByte: Byte = 7
val minusSevenByte: Byte = -7
/*
I decided that for X +- Y, Y can be any numeric type that's implicitly
convertible to X. So if X is Double, Y could be Double, Float, Long, Int, Short, Byte.
If X is Long, Y could be Long, Int, Short, Byte. If X is Short, Y could be Short or Byte.
And if X is Byte, Y must be Byte.
assert(minusSevenDotOhFloat === (-6.8f +- 0.2d))
*/
/* Chose not to do the symmetry, because no one needs it and implementing it would require an implicit. So these fail:
(7.1 +- 0.2) should equal sevenDotOh
(7.5 +- 0.2) should not equal sevenDotOh
*/
describe("The should equal syntax") {
it("should succeed if the number is within the given interval") {
// Double +- Double
sevenDotOh should equal (7.1 +- 0.2)
sevenDotOh should equal (6.9 +- 0.2)
sevenDotOh should equal (7.0 +- 0.2)
sevenDotOh should equal (7.2 +- 0.2)
sevenDotOh should equal (6.8 +- 0.2)
minusSevenDotOh should equal (-7.1 +- 0.2)
minusSevenDotOh should equal (-6.9 +- 0.2)
minusSevenDotOh should equal (-7.0 +- 0.2)
minusSevenDotOh should equal (-7.2 +- 0.2)
minusSevenDotOh should equal (-6.8 +- 0.2)
// Double +- Float
sevenDotOh should equal (7.1 +- 0.2f)
sevenDotOh should equal (6.9 +- 0.2f)
sevenDotOh should equal (7.0 +- 0.2f)
sevenDotOh should equal (7.2 +- 0.2f)
sevenDotOh should equal (6.8 +- 0.2f)
minusSevenDotOh should equal (-7.1 +- 0.2f)
minusSevenDotOh should equal (-6.9 +- 0.2f)
minusSevenDotOh should equal (-7.0 +- 0.2f)
minusSevenDotOh should equal (-7.2 +- 0.2f)
minusSevenDotOh should equal (-6.8 +- 0.2f)
// Double +- Long
sevenDotOh should equal (7.1 +- 2L)
sevenDotOh should equal (6.9 +- 2L)
sevenDotOh should equal (7.0 +- 2L)
sevenDotOh should equal (7.2 +- 2L)
sevenDotOh should equal (6.8 +- 2L)
minusSevenDotOh should equal (-7.1 +- 2L)
minusSevenDotOh should equal (-6.9 +- 2L)
minusSevenDotOh should equal (-7.0 +- 2L)
minusSevenDotOh should equal (-7.2 +- 2L)
minusSevenDotOh should equal (-6.8 +- 2L)
// Double +- Int
sevenDotOh should equal (7.1 +- 2)
sevenDotOh should equal (6.9 +- 2)
sevenDotOh should equal (7.0 +- 2)
sevenDotOh should equal (7.2 +- 2)
sevenDotOh should equal (6.8 +- 2)
minusSevenDotOh should equal (-7.1 +- 2)
minusSevenDotOh should equal (-6.9 +- 2)
minusSevenDotOh should equal (-7.0 +- 2)
minusSevenDotOh should equal (-7.2 +- 2)
minusSevenDotOh should equal (-6.8 +- 2)
// Double +- Short
sevenDotOh should equal (7.1 +- 2.toShort)
sevenDotOh should equal (6.9 +- 2.toShort)
sevenDotOh should equal (7.0 +- 2.toShort)
sevenDotOh should equal (7.2 +- 2.toShort)
sevenDotOh should equal (6.8 +- 2.toShort)
minusSevenDotOh should equal (-7.1 +- 2.toShort)
minusSevenDotOh should equal (-6.9 +- 2.toShort)
minusSevenDotOh should equal (-7.0 +- 2.toShort)
minusSevenDotOh should equal (-7.2 +- 2.toShort)
minusSevenDotOh should equal (-6.8 +- 2.toShort)
// Double +- Byte
sevenDotOh should equal (7.1 +- 2.toByte)
sevenDotOh should equal (6.9 +- 2.toByte)
sevenDotOh should equal (7.0 +- 2.toByte)
sevenDotOh should equal (7.2 +- 2.toByte)
sevenDotOh should equal (6.8 +- 2.toByte)
minusSevenDotOh should equal (-7.1 +- 2.toByte)
minusSevenDotOh should equal (-6.9 +- 2.toByte)
minusSevenDotOh should equal (-7.0 +- 2.toByte)
minusSevenDotOh should equal (-7.2 +- 2.toByte)
minusSevenDotOh should equal (-6.8 +- 2.toByte)
// Float +- Float
sevenDotOhFloat should equal (7.1f +- 0.2f)
sevenDotOhFloat should equal (6.9f +- 0.2f)
sevenDotOhFloat should equal (7.0f +- 0.2f)
sevenDotOhFloat should equal (7.2f +- 0.2f)
sevenDotOhFloat should equal (6.8f +- 0.2f)
minusSevenDotOhFloat should equal (-7.1f +- 0.2f)
minusSevenDotOhFloat should equal (-6.9f +- 0.2f)
minusSevenDotOhFloat should equal (-7.0f +- 0.2f)
minusSevenDotOhFloat should equal (-7.2f +- 0.2f)
minusSevenDotOhFloat should equal (-6.8f +- 0.2f)
// Float +- Long
sevenDotOhFloat should equal (7.1f +- 2L)
sevenDotOhFloat should equal (6.9f +- 2L)
sevenDotOhFloat should equal (7.0f +- 2L)
sevenDotOhFloat should equal (7.2f +- 2L)
sevenDotOhFloat should equal (6.8f +- 2L)
minusSevenDotOhFloat should equal (-7.1f +- 2L)
minusSevenDotOhFloat should equal (-6.9f +- 2L)
minusSevenDotOhFloat should equal (-7.0f +- 2L)
minusSevenDotOhFloat should equal (-7.2f +- 2L)
minusSevenDotOhFloat should equal (-6.8f +- 2L)
// Float +- Int
sevenDotOhFloat should equal (7.1f +- 2)
sevenDotOhFloat should equal (6.9f +- 2)
sevenDotOhFloat should equal (7.0f +- 2)
sevenDotOhFloat should equal (7.2f +- 2)
sevenDotOhFloat should equal (6.8f +- 2)
minusSevenDotOhFloat should equal (-7.1f +- 2)
minusSevenDotOhFloat should equal (-6.9f +- 2)
minusSevenDotOhFloat should equal (-7.0f +- 2)
minusSevenDotOhFloat should equal (-7.2f +- 2)
minusSevenDotOhFloat should equal (-6.8f +- 2)
// Float +- Short
sevenDotOhFloat should equal (7.1f +- 2.toShort)
sevenDotOhFloat should equal (6.9f +- 2.toShort)
sevenDotOhFloat should equal (7.0f +- 2.toShort)
sevenDotOhFloat should equal (7.2f +- 2.toShort)
sevenDotOhFloat should equal (6.8f +- 2.toShort)
minusSevenDotOhFloat should equal (-7.1f +- 2.toShort)
minusSevenDotOhFloat should equal (-6.9f +- 2.toShort)
minusSevenDotOhFloat should equal (-7.0f +- 2.toShort)
minusSevenDotOhFloat should equal (-7.2f +- 2.toShort)
minusSevenDotOhFloat should equal (-6.8f +- 2.toShort)
// Float +- Byte
sevenDotOhFloat should equal (7.1f +- 2.toByte)
sevenDotOhFloat should equal (6.9f +- 2.toByte)
sevenDotOhFloat should equal (7.0f +- 2.toByte)
sevenDotOhFloat should equal (7.2f +- 2.toByte)
sevenDotOhFloat should equal (6.8f +- 2.toByte)
minusSevenDotOhFloat should equal (-7.1f +- 2.toByte)
minusSevenDotOhFloat should equal (-6.9f +- 2.toByte)
minusSevenDotOhFloat should equal (-7.0f +- 2.toByte)
minusSevenDotOhFloat should equal (-7.2f +- 2.toByte)
minusSevenDotOhFloat should equal (-6.8f +- 2.toByte)
// Long +- Long
sevenLong should equal (9L +- 2L)
sevenLong should equal (8L +- 2L)
sevenLong should equal (7L +- 2L)
sevenLong should equal (6L +- 2L)
sevenLong should equal (5L +- 2L)
minusSevenLong should equal (-9L +- 2L)
minusSevenLong should equal (-8L +- 2L)
minusSevenLong should equal (-7L +- 2L)
minusSevenLong should equal (-6L +- 2L)
minusSevenLong should equal (-5L +- 2L)
// Long +- Int
sevenLong should equal (9L +- 2)
sevenLong should equal (8L +- 2)
sevenLong should equal (7L +- 2)
sevenLong should equal (6L +- 2)
sevenLong should equal (5L +- 2)
minusSevenLong should equal (-9L +- 2)
minusSevenLong should equal (-8L +- 2)
minusSevenLong should equal (-7L +- 2)
minusSevenLong should equal (-6L +- 2)
minusSevenLong should equal (-5L +- 2)
// Long +- Short
sevenLong should equal (9L +- 2.toShort)
sevenLong should equal (8L +- 2.toShort)
sevenLong should equal (7L +- 2.toShort)
sevenLong should equal (6L +- 2.toShort)
sevenLong should equal (5L +- 2.toShort)
minusSevenLong should equal (-9L +- 2.toShort)
minusSevenLong should equal (-8L +- 2.toShort)
minusSevenLong should equal (-7L +- 2.toShort)
minusSevenLong should equal (-6L +- 2.toShort)
minusSevenLong should equal (-5L +- 2.toShort)
// Long +- Byte
sevenLong should equal (9L +- 2.toByte)
sevenLong should equal (8L +- 2.toByte)
sevenLong should equal (7L +- 2.toByte)
sevenLong should equal (6L +- 2.toByte)
sevenLong should equal (5L +- 2.toByte)
minusSevenLong should equal (-9L +- 2.toByte)
minusSevenLong should equal (-8L +- 2.toByte)
minusSevenLong should equal (-7L +- 2.toByte)
minusSevenLong should equal (-6L +- 2.toByte)
minusSevenLong should equal (-5L +- 2.toByte)
// Int +- Int
sevenInt should equal (9 +- 2)
sevenInt should equal (8 +- 2)
sevenInt should equal (7 +- 2)
sevenInt should equal (6 +- 2)
sevenInt should equal (5 +- 2)
minusSevenInt should equal (-9 +- 2)
minusSevenInt should equal (-8 +- 2)
minusSevenInt should equal (-7 +- 2)
minusSevenInt should equal (-6 +- 2)
minusSevenInt should equal (-5 +- 2)
// Int +- Short
sevenInt should equal (9 +- 2.toShort)
sevenInt should equal (8 +- 2.toShort)
sevenInt should equal (7 +- 2.toShort)
sevenInt should equal (6 +- 2.toShort)
sevenInt should equal (5 +- 2.toShort)
minusSevenInt should equal (-9 +- 2.toShort)
minusSevenInt should equal (-8 +- 2.toShort)
minusSevenInt should equal (-7 +- 2.toShort)
minusSevenInt should equal (-6 +- 2.toShort)
minusSevenInt should equal (-5 +- 2.toShort)
// Int +- Byte
sevenInt should equal (9 +- 2.toByte)
sevenInt should equal (8 +- 2.toByte)
sevenInt should equal (7 +- 2.toByte)
sevenInt should equal (6 +- 2.toByte)
sevenInt should equal (5 +- 2.toByte)
minusSevenInt should equal (-9 +- 2.toByte)
minusSevenInt should equal (-8 +- 2.toByte)
minusSevenInt should equal (-7 +- 2.toByte)
minusSevenInt should equal (-6 +- 2.toByte)
minusSevenInt should equal (-5 +- 2.toByte)
// Short +- Short
sevenShort should equal (9.toShort +- 2.toShort)
sevenShort should equal (8.toShort +- 2.toShort)
sevenShort should equal (7.toShort +- 2.toShort)
sevenShort should equal (6.toShort +- 2.toShort)
sevenShort should equal (5.toShort +- 2.toShort)
minusSevenShort should equal ((-9).toShort +- 2.toShort)
minusSevenShort should equal ((-8).toShort +- 2.toShort)
minusSevenShort should equal ((-7).toShort +- 2.toShort)
minusSevenShort should equal ((-6).toShort +- 2.toShort)
minusSevenShort should equal ((-5).toShort +- 2.toShort)
// Short +- Byte
sevenShort should equal (9.toShort +- 2.toByte)
sevenShort should equal (8.toShort +- 2.toByte)
sevenShort should equal (7.toShort +- 2.toByte)
sevenShort should equal (6.toShort +- 2.toByte)
sevenShort should equal (5.toShort +- 2.toByte)
minusSevenShort should equal ((-9).toShort +- 2.toByte)
minusSevenShort should equal ((-8).toShort +- 2.toByte)
minusSevenShort should equal ((-7).toShort +- 2.toByte)
minusSevenShort should equal ((-6).toShort +- 2.toByte)
minusSevenShort should equal ((-5).toShort +- 2.toByte)
// Byte +- Byte
sevenByte should equal (9.toByte +- 2.toByte)
sevenByte should equal (8.toByte +- 2.toByte)
sevenByte should equal (7.toByte +- 2.toByte)
sevenByte should equal (6.toByte +- 2.toByte)
sevenByte should equal (5.toByte +- 2.toByte)
minusSevenByte should equal ((-9).toByte +- 2.toByte)
minusSevenByte should equal ((-8).toByte +- 2.toByte)
minusSevenByte should equal ((-7).toByte +- 2.toByte)
minusSevenByte should equal ((-6).toByte +- 2.toByte)
minusSevenByte should equal ((-5).toByte +- 2.toByte)
}
it("should throw TFE if the number is outside the given interval") {
// Double +- Double
val caught = intercept[TestFailedException] { sevenDotOh should equal (7.5 +- 0.2) }
assert(caught.getMessage === sevenDotOh + " did not equal 7.5 plus or minus 0.2")
intercept[TestFailedException] { sevenDotOh should equal (6.5 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should equal (-7.5 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should equal (-6.5 +- 0.2) }
// Double +- Float
intercept[TestFailedException] { sevenDotOh should equal (7.5 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should equal (6.5 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should equal (-7.5 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should equal (-6.5 +- 0.2f) }
// Double +- Long
intercept[TestFailedException] { sevenDotOh should equal (4.0 +- 2L) }
intercept[TestFailedException] { sevenDotOh should equal (9.1 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should equal (-4.0 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should equal (-9.1 +- 2L) }
// Double +- Int
intercept[TestFailedException] { sevenDotOh should equal (4.0 +- 2) }
intercept[TestFailedException] { sevenDotOh should equal (9.1 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should equal (-4.0 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should equal (-9.1 +- 2) }
// Double +- Short
intercept[TestFailedException] { sevenDotOh should equal (4.0 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should equal (9.1 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should equal (-4.0 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should equal (-9.1 +- 2.toShort) }
// Double +- Byte
intercept[TestFailedException] { sevenDotOh should equal (4.0 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should equal (9.1 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should equal (-4.0 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should equal (-9.1 +- 2.toByte) }
// Float +- Float
intercept[TestFailedException] { sevenDotOhFloat should equal (7.5f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should equal (6.5f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-7.5f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-6.5f +- 0.2f) }
// Float +- Long
intercept[TestFailedException] { sevenDotOhFloat should equal (4.0f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should equal (9.1f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-4.0f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-9.1f +- 2L) }
// Float +- Int
intercept[TestFailedException] { sevenDotOhFloat should equal (4.0f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should equal (9.1f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-4.0f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-9.1f +- 2) }
// Float +- Short
intercept[TestFailedException] { sevenDotOhFloat should equal (4.0f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should equal (9.1f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-4.0f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-9.1f +- 2.toShort) }
// Float +- Byte
intercept[TestFailedException] { sevenDotOhFloat should equal (4.0f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should equal (9.1f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-4.0f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should equal (-9.1f +- 2.toByte) }
// Long +- Long
intercept[TestFailedException] { sevenLong should equal (4L +- 2L) }
intercept[TestFailedException] { sevenLong should equal (10L +- 2L) }
intercept[TestFailedException] { minusSevenLong should equal (-4L +- 2L) }
intercept[TestFailedException] { minusSevenLong should equal (-10L +- 2L) }
// Long +- Int
intercept[TestFailedException] { sevenLong should equal (4L +- 2) }
intercept[TestFailedException] { sevenLong should equal (10L +- 2) }
intercept[TestFailedException] { minusSevenLong should equal (-4L +- 2) }
intercept[TestFailedException] { minusSevenLong should equal (-10L +- 2) }
// Long +- Short
intercept[TestFailedException] { sevenLong should equal (4L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should equal (10L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should equal (-4L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should equal (-10L +- 2.toShort) }
// Long +- Byte
intercept[TestFailedException] { sevenLong should equal (4L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should equal (10L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should equal (-4L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should equal (-10L +- 2.toByte) }
// Int +- Int
intercept[TestFailedException] { sevenInt should equal (4 +- 2) }
intercept[TestFailedException] { sevenInt should equal (10 +- 2) }
intercept[TestFailedException] { minusSevenInt should equal (-4 +- 2) }
intercept[TestFailedException] { minusSevenInt should equal (-10 +- 2) }
// Int +- Short
intercept[TestFailedException] { sevenInt should equal (4 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should equal (10 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should equal (-4 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should equal (-10 +- 2.toShort) }
// Int +- Byte
intercept[TestFailedException] { sevenInt should equal (4 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should equal (10 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should equal (-4 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should equal (-10 +- 2.toByte) }
// Short +- Short
intercept[TestFailedException] { sevenShort should equal (4.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should equal (10.toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should equal ((-4).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should equal ((-10).toShort +- 2.toShort) }
// Short +- Byte
intercept[TestFailedException] { sevenShort should equal (4.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should equal (10.toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should equal ((-4).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should equal ((-10).toShort +- 2.toByte) }
// Byte +- Byte
intercept[TestFailedException] { sevenByte should equal (4.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should equal (10.toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should equal ((-4).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should equal ((-10).toByte +- 2.toByte) }
}
it("should succeed if the number is outside the given interval when used with not") {
// Double +- Double
sevenDotOh should not equal (7.5 +- 0.2)
sevenDotOh should not equal (6.5 +- 0.2)
minusSevenDotOh should not equal (-7.5 +- 0.2)
minusSevenDotOh should not equal (-6.5 +- 0.2)
// Double +- Float
sevenDotOh should not equal (7.5 +- 0.2f)
sevenDotOh should not equal (6.5 +- 0.2f)
minusSevenDotOh should not equal (-7.5 +- 0.2f)
minusSevenDotOh should not equal (-6.5 +- 0.2f)
// Double +- Long
sevenDotOh should not equal (4.0 +- 2L)
sevenDotOh should not equal (9.1 +- 2L)
minusSevenDotOh should not equal (-4.0 +- 2L)
minusSevenDotOh should not equal (-9.1 +- 2L)
// Double +- Int
sevenDotOh should not equal (4.0 +- 2)
sevenDotOh should not equal (9.1 +- 2)
minusSevenDotOh should not equal (-4.0 +- 2)
minusSevenDotOh should not equal (-9.1 +- 2)
// Double +- Short
sevenDotOh should not equal (4.0 +- 2.toShort)
sevenDotOh should not equal (9.1 +- 2.toShort)
minusSevenDotOh should not equal (-4.0 +- 2.toShort)
minusSevenDotOh should not equal (-9.1 +- 2.toShort)
// Double +- Byte
sevenDotOh should not equal (4.0 +- 2.toByte)
sevenDotOh should not equal (9.1 +- 2.toByte)
minusSevenDotOh should not equal (-4.0 +- 2.toByte)
minusSevenDotOh should not equal (-9.1 +- 2.toByte)
// Float +- Float
sevenDotOhFloat should not equal (7.5f +- 0.2f)
sevenDotOhFloat should not equal (6.5f +- 0.2f)
minusSevenDotOhFloat should not equal (-7.5f +- 0.2f)
minusSevenDotOhFloat should not equal (-6.5f +- 0.2f)
// Float +- Long
sevenDotOhFloat should not equal (4.0f +- 2L)
sevenDotOhFloat should not equal (9.1f +- 2L)
minusSevenDotOhFloat should not equal (-4.0f +- 2L)
minusSevenDotOhFloat should not equal (-9.1f +- 2L)
// Float +- Int
sevenDotOhFloat should not equal (4.0f +- 2)
sevenDotOhFloat should not equal (9.1f +- 2)
minusSevenDotOhFloat should not equal (-4.0f +- 2)
minusSevenDotOhFloat should not equal (-9.1f +- 2)
// Float +- Short
sevenDotOhFloat should not equal (4.0f +- 2.toShort)
sevenDotOhFloat should not equal (9.1f +- 2.toShort)
minusSevenDotOhFloat should not equal (-4.0f +- 2.toShort)
minusSevenDotOhFloat should not equal (-9.1f +- 2.toShort)
// Float +- Byte
sevenDotOhFloat should not equal (4.0f +- 2.toByte)
sevenDotOhFloat should not equal (9.1f +- 2.toByte)
minusSevenDotOhFloat should not equal (-4.0f +- 2.toByte)
minusSevenDotOhFloat should not equal (-9.1f +- 2.toByte)
// Long +- Long
sevenLong should not equal (4L +- 2L)
sevenLong should not equal (10L +- 2L)
minusSevenLong should not equal (-4L +- 2L)
minusSevenLong should not equal (-10L +- 2L)
// Long +- Int
sevenLong should not equal (4L +- 2)
sevenLong should not equal (10L +- 2)
minusSevenLong should not equal (-4L +- 2)
minusSevenLong should not equal (-10L +- 2)
// Long +- Short
sevenLong should not equal (4L +- 2.toShort)
sevenLong should not equal (10L +- 2.toShort)
minusSevenLong should not equal (-4L +- 2.toShort)
minusSevenLong should not equal (-10L +- 2.toShort)
// Long +- Byte
sevenLong should not equal (4L +- 2.toByte)
sevenLong should not equal (10L +- 2.toByte)
minusSevenLong should not equal (-4L +- 2.toByte)
minusSevenLong should not equal (-10L +- 2.toByte)
// Int +- Int
sevenInt should not equal (4 +- 2)
sevenInt should not equal (10 +- 2)
minusSevenInt should not equal (-4 +- 2)
minusSevenInt should not equal (-10 +- 2)
// Int +- Short
sevenInt should not equal (4 +- 2.toShort)
sevenInt should not equal (10 +- 2.toShort)
minusSevenInt should not equal (-4 +- 2.toShort)
minusSevenInt should not equal (-10 +- 2.toShort)
// Int +- Byte
sevenInt should not equal (4 +- 2.toByte)
sevenInt should not equal (10 +- 2.toByte)
minusSevenInt should not equal (-4 +- 2.toByte)
minusSevenInt should not equal (-10 +- 2.toByte)
// Short +- Short
sevenShort should not equal (4.toShort +- 2.toShort)
sevenShort should not equal (10.toShort +- 2.toShort)
minusSevenShort should not equal ((-4).toShort +- 2.toShort)
minusSevenShort should not equal ((-10).toShort +- 2.toShort)
// Short +- Byte
sevenShort should not equal (4.toShort +- 2.toByte)
sevenShort should not equal (10.toShort +- 2.toByte)
minusSevenShort should not equal ((-4).toShort +- 2.toByte)
minusSevenShort should not equal ((-10).toShort +- 2.toByte)
// Byte +- Byte
sevenByte should not equal (4.toByte +- 2.toByte)
sevenByte should not equal (10.toByte +- 2.toByte)
minusSevenByte should not equal ((-4).toByte +- 2.toByte)
minusSevenByte should not equal ((-10).toByte +- 2.toByte)
}
it("should throw TFE if the number is within the given interval when used with not") {
// Double +- Double
val caught = intercept[TestFailedException] { sevenDotOh should not equal (7.1 +- 0.2) }
assert(caught.getMessage === sevenDotOh + " equaled 7.1 plus or minus 0.2")
intercept[TestFailedException] { sevenDotOh should not equal (6.9 +- 0.2) }
intercept[TestFailedException] { sevenDotOh should not equal (7.0 +- 0.2) }
intercept[TestFailedException] { sevenDotOh should not equal (7.2 +- 0.2) }
intercept[TestFailedException] { sevenDotOh should not equal (6.8 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.1 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.9 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.0 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.2 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.8 +- 0.2) }
// Double +- Float
intercept[TestFailedException] { sevenDotOh should not equal (7.1 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should not equal (6.9 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should not equal (7.0 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should not equal (7.2 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should not equal (6.8 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.1 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.9 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.0 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.2 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.8 +- 0.2f) }
// Double +- Long
intercept[TestFailedException] { sevenDotOh should not equal (7.1 +- 2L) }
intercept[TestFailedException] { sevenDotOh should not equal (6.9 +- 2L) }
intercept[TestFailedException] { sevenDotOh should not equal (7.0 +- 2L) }
intercept[TestFailedException] { sevenDotOh should not equal (7.2 +- 2L) }
intercept[TestFailedException] { sevenDotOh should not equal (6.8 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.1 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.9 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.0 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.2 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.8 +- 2L) }
// Double +- Int
intercept[TestFailedException] { sevenDotOh should not equal (7.1 +- 2) }
intercept[TestFailedException] { sevenDotOh should not equal (6.9 +- 2) }
intercept[TestFailedException] { sevenDotOh should not equal (7.0 +- 2) }
intercept[TestFailedException] { sevenDotOh should not equal (7.2 +- 2) }
intercept[TestFailedException] { sevenDotOh should not equal (6.8 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.1 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.9 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.0 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.2 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.8 +- 2) }
// Double +- Short
intercept[TestFailedException] { sevenDotOh should not equal (7.1 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should not equal (6.9 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should not equal (7.0 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should not equal (7.2 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should not equal (6.8 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.1 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.9 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.0 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.2 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.8 +- 2.toShort) }
// Double +- Byte
intercept[TestFailedException] { sevenDotOh should not equal (7.1 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should not equal (6.9 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should not equal (7.0 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should not equal (7.2 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should not equal (6.8 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.1 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.9 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.0 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-7.2 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should not equal (-6.8 +- 2.toByte) }
// Float +- Float
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.1f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.9f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.0f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.2f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.8f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.1f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.9f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.0f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.2f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.8f +- 0.2f) }
// Float +- Long
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.1f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.9f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.0f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.2f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.8f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.1f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.9f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.0f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.2f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.8f +- 2L) }
// Float +- Int
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.1f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.9f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.0f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.2f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.8f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.1f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.9f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.0f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.2f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.8f +- 2) }
// Float +- Short
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.1f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.9f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.0f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.2f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.8f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.1f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.9f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.0f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.2f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.8f +- 2.toShort) }
// Float +- Byte
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.1f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.9f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.0f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (7.2f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should not equal (6.8f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.1f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.9f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.0f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-7.2f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should not equal (-6.8f +- 2.toByte) }
// Long +- Long
intercept[TestFailedException] { sevenLong should not equal (9L +- 2L) }
intercept[TestFailedException] { sevenLong should not equal (8L +- 2L) }
intercept[TestFailedException] { sevenLong should not equal (7L +- 2L) }
intercept[TestFailedException] { sevenLong should not equal (6L +- 2L) }
intercept[TestFailedException] { sevenLong should not equal (5L +- 2L) }
intercept[TestFailedException] { minusSevenLong should not equal (-9L +- 2L) }
intercept[TestFailedException] { minusSevenLong should not equal (-8L +- 2L) }
intercept[TestFailedException] { minusSevenLong should not equal (-7L +- 2L) }
intercept[TestFailedException] { minusSevenLong should not equal (-6L +- 2L) }
intercept[TestFailedException] { minusSevenLong should not equal (-5L +- 2L) }
// Long +- Int
intercept[TestFailedException] { sevenLong should not equal (9L +- 2) }
intercept[TestFailedException] { sevenLong should not equal (8L +- 2) }
intercept[TestFailedException] { sevenLong should not equal (7L +- 2) }
intercept[TestFailedException] { sevenLong should not equal (6L +- 2) }
intercept[TestFailedException] { sevenLong should not equal (5L +- 2) }
intercept[TestFailedException] { minusSevenLong should not equal (-9L +- 2) }
intercept[TestFailedException] { minusSevenLong should not equal (-8L +- 2) }
intercept[TestFailedException] { minusSevenLong should not equal (-7L +- 2) }
intercept[TestFailedException] { minusSevenLong should not equal (-6L +- 2) }
intercept[TestFailedException] { minusSevenLong should not equal (-5L +- 2) }
// Long +- Short
intercept[TestFailedException] { sevenLong should not equal (9L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should not equal (8L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should not equal (7L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should not equal (6L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should not equal (5L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should not equal (-9L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should not equal (-8L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should not equal (-7L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should not equal (-6L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should not equal (-5L +- 2.toShort) }
// Long +- Byte
intercept[TestFailedException] { sevenLong should not equal (9L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should not equal (8L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should not equal (7L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should not equal (6L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should not equal (5L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should not equal (-9L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should not equal (-8L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should not equal (-7L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should not equal (-6L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should not equal (-5L +- 2.toByte) }
// Int +- Int
intercept[TestFailedException] { sevenInt should not equal (9 +- 2) }
intercept[TestFailedException] { sevenInt should not equal (8 +- 2) }
intercept[TestFailedException] { sevenInt should not equal (7 +- 2) }
intercept[TestFailedException] { sevenInt should not equal (6 +- 2) }
intercept[TestFailedException] { sevenInt should not equal (5 +- 2) }
intercept[TestFailedException] { minusSevenInt should not equal (-9 +- 2) }
intercept[TestFailedException] { minusSevenInt should not equal (-8 +- 2) }
intercept[TestFailedException] { minusSevenInt should not equal (-7 +- 2) }
intercept[TestFailedException] { minusSevenInt should not equal (-6 +- 2) }
intercept[TestFailedException] { minusSevenInt should not equal (-5 +- 2) }
// Int +- Short
intercept[TestFailedException] { sevenInt should not equal (9 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should not equal (8 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should not equal (7 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should not equal (6 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should not equal (5 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should not equal (-9 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should not equal (-8 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should not equal (-7 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should not equal (-6 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should not equal (-5 +- 2.toShort) }
// Int +- Byte
intercept[TestFailedException] { sevenInt should not equal (9 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should not equal (8 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should not equal (7 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should not equal (6 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should not equal (5 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should not equal (-9 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should not equal (-8 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should not equal (-7 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should not equal (-6 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should not equal (-5 +- 2.toByte) }
// Short +- Short
intercept[TestFailedException] { sevenShort should not equal (9.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should not equal (8.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should not equal (7.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should not equal (6.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should not equal (5.toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should not equal ((-9).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should not equal ((-8).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should not equal ((-7).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should not equal ((-6).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should not equal ((-5).toShort +- 2.toShort) }
// Short +- Byte
intercept[TestFailedException] { sevenShort should not equal (9.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should not equal (8.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should not equal (7.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should not equal (6.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should not equal (5.toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should not equal ((-9).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should not equal ((-8).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should not equal ((-7).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should not equal ((-6).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should not equal ((-5).toShort +- 2.toByte) }
// Byte +- Byte
intercept[TestFailedException] { sevenByte should not equal (9.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should not equal (8.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should not equal (7.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should not equal (6.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should not equal (5.toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should not equal ((-9).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should not equal ((-8).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should not equal ((-7).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should not equal ((-6).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should not equal ((-5).toByte +- 2.toByte) }
}
it("should succeed when equal and used in a logical-and expression") {
// Double +- Double
sevenDotOh should (equal (7.1 +- 0.2) and equal (7.1 +- 0.2))
sevenDotOh should (equal (6.9 +- 0.2) and equal (6.9 +- 0.2))
sevenDotOh should (equal (7.0 +- 0.2) and equal (7.0 +- 0.2))
sevenDotOh should (equal (7.2 +- 0.2) and equal (7.2 +- 0.2))
sevenDotOh should (equal (6.8 +- 0.2) and equal (6.8 +- 0.2))
minusSevenDotOh should (equal (-7.1 +- 0.2) and equal (-7.1 +- 0.2))
minusSevenDotOh should (equal (-6.9 +- 0.2) and equal (-6.9 +- 0.2))
minusSevenDotOh should (equal (-7.0 +- 0.2) and equal (-7.0 +- 0.2))
minusSevenDotOh should (equal (-7.2 +- 0.2) and equal (-7.2 +- 0.2))
minusSevenDotOh should (equal (-6.8 +- 0.2) and equal (-6.8 +- 0.2))
// Double +- Float
sevenDotOh should (equal (7.1 +- 0.2f) and equal (7.1 +- 0.2f))
sevenDotOh should (equal (6.9 +- 0.2f) and equal (6.9 +- 0.2f))
sevenDotOh should (equal (7.0 +- 0.2f) and equal (7.0 +- 0.2f))
sevenDotOh should (equal (7.2 +- 0.2f) and equal (7.2 +- 0.2f))
sevenDotOh should (equal (6.8 +- 0.2f) and equal (6.8 +- 0.2f))
minusSevenDotOh should (equal (-7.1 +- 0.2f) and equal (-7.1 +- 0.2f))
minusSevenDotOh should (equal (-6.9 +- 0.2f) and equal (-6.9 +- 0.2f))
minusSevenDotOh should (equal (-7.0 +- 0.2f) and equal (-7.0 +- 0.2f))
minusSevenDotOh should (equal (-7.2 +- 0.2f) and equal (-7.2 +- 0.2f))
minusSevenDotOh should (equal (-6.8 +- 0.2f) and equal (-6.8 +- 0.2f))
// Double +- Long
sevenDotOh should (equal (7.1 +- 2L) and equal (7.1 +- 2L))
sevenDotOh should (equal (6.9 +- 2L) and equal (6.9 +- 2L))
sevenDotOh should (equal (7.0 +- 2L) and equal (7.0 +- 2L))
sevenDotOh should (equal (7.2 +- 2L) and equal (7.2 +- 2L))
sevenDotOh should (equal (6.8 +- 2L) and equal (6.8 +- 2L))
minusSevenDotOh should (equal (-7.1 +- 2L) and equal (-7.1 +- 2L))
minusSevenDotOh should (equal (-6.9 +- 2L) and equal (-6.9 +- 2L))
minusSevenDotOh should (equal (-7.0 +- 2L) and equal (-7.0 +- 2L))
minusSevenDotOh should (equal (-7.2 +- 2L) and equal (-7.2 +- 2L))
minusSevenDotOh should (equal (-6.8 +- 2L) and equal (-6.8 +- 2L))
// Double +- Int
sevenDotOh should (equal (7.1 +- 2) and equal (7.1 +- 2))
sevenDotOh should (equal (6.9 +- 2) and equal (6.9 +- 2))
sevenDotOh should (equal (7.0 +- 2) and equal (7.0 +- 2))
sevenDotOh should (equal (7.2 +- 2) and equal (7.2 +- 2))
sevenDotOh should (equal (6.8 +- 2) and equal (6.8 +- 2))
minusSevenDotOh should (equal (-7.1 +- 2) and equal (-7.1 +- 2))
minusSevenDotOh should (equal (-6.9 +- 2) and equal (-6.9 +- 2))
minusSevenDotOh should (equal (-7.0 +- 2) and equal (-7.0 +- 2))
minusSevenDotOh should (equal (-7.2 +- 2) and equal (-7.2 +- 2))
minusSevenDotOh should (equal (-6.8 +- 2) and equal (-6.8 +- 2))
// Double +- Short
sevenDotOh should (equal (7.1 +- 2.toShort) and equal (7.1 +- 2.toShort))
sevenDotOh should (equal (6.9 +- 2.toShort) and equal (6.9 +- 2.toShort))
sevenDotOh should (equal (7.0 +- 2.toShort) and equal (7.0 +- 2.toShort))
sevenDotOh should (equal (7.2 +- 2.toShort) and equal (7.2 +- 2.toShort))
sevenDotOh should (equal (6.8 +- 2.toShort) and equal (6.8 +- 2.toShort))
minusSevenDotOh should (equal (-7.1 +- 2.toShort) and equal (-7.1 +- 2.toShort))
minusSevenDotOh should (equal (-6.9 +- 2.toShort) and equal (-6.9 +- 2.toShort))
minusSevenDotOh should (equal (-7.0 +- 2.toShort) and equal (-7.0 +- 2.toShort))
minusSevenDotOh should (equal (-7.2 +- 2.toShort) and equal (-7.2 +- 2.toShort))
minusSevenDotOh should (equal (-6.8 +- 2.toShort) and equal (-6.8 +- 2.toShort))
// Double +- Byte
sevenDotOh should (equal (7.1 +- 2.toByte) and equal (7.1 +- 2.toByte))
sevenDotOh should (equal (6.9 +- 2.toByte) and equal (6.9 +- 2.toByte))
sevenDotOh should (equal (7.0 +- 2.toByte) and equal (7.0 +- 2.toByte))
sevenDotOh should (equal (7.2 +- 2.toByte) and equal (7.2 +- 2.toByte))
sevenDotOh should (equal (6.8 +- 2.toByte) and equal (6.8 +- 2.toByte))
minusSevenDotOh should (equal (-7.1 +- 2.toByte) and equal (-7.1 +- 2.toByte))
minusSevenDotOh should (equal (-6.9 +- 2.toByte) and equal (-6.9 +- 2.toByte))
minusSevenDotOh should (equal (-7.0 +- 2.toByte) and equal (-7.0 +- 2.toByte))
minusSevenDotOh should (equal (-7.2 +- 2.toByte) and equal (-7.2 +- 2.toByte))
minusSevenDotOh should (equal (-6.8 +- 2.toByte) and equal (-6.8 +- 2.toByte))
// Float +- Float
sevenDotOhFloat should (equal (7.1f +- 0.2f) and equal (7.1f +- 0.2f))
sevenDotOhFloat should (equal (6.9f +- 0.2f) and equal (6.9f +- 0.2f))
sevenDotOhFloat should (equal (7.0f +- 0.2f) and equal (7.0f +- 0.2f))
sevenDotOhFloat should (equal (7.2f +- 0.2f) and equal (7.2f +- 0.2f))
sevenDotOhFloat should (equal (6.8f +- 0.2f) and equal (6.8f +- 0.2f))
minusSevenDotOhFloat should (equal (-7.1f +- 0.2f) and equal (-7.1f +- 0.2f))
minusSevenDotOhFloat should (equal (-6.9f +- 0.2f) and equal (-6.9f +- 0.2f))
minusSevenDotOhFloat should (equal (-7.0f +- 0.2f) and equal (-7.0f +- 0.2f))
minusSevenDotOhFloat should (equal (-7.2f +- 0.2f) and equal (-7.2f +- 0.2f))
minusSevenDotOhFloat should (equal (-6.8f +- 0.2f) and equal (-6.8f +- 0.2f))
// Float +- Long
sevenDotOhFloat should (equal (7.1f +- 2L) and equal (7.1f +- 2L))
sevenDotOhFloat should (equal (6.9f +- 2L) and equal (6.9f +- 2L))
sevenDotOhFloat should (equal (7.0f +- 2L) and equal (7.0f +- 2L))
sevenDotOhFloat should (equal (7.2f +- 2L) and equal (7.2f +- 2L))
sevenDotOhFloat should (equal (6.8f +- 2L) and equal (6.8f +- 2L))
minusSevenDotOhFloat should (equal (-7.1f +- 2L) and equal (-7.1f +- 2L))
minusSevenDotOhFloat should (equal (-6.9f +- 2L) and equal (-6.9f +- 2L))
minusSevenDotOhFloat should (equal (-7.0f +- 2L) and equal (-7.0f +- 2L))
minusSevenDotOhFloat should (equal (-7.2f +- 2L) and equal (-7.2f +- 2L))
minusSevenDotOhFloat should (equal (-6.8f +- 2L) and equal (-6.8f +- 2L))
// Float +- Int
sevenDotOhFloat should (equal (7.1f +- 2) and equal (7.1f +- 2))
sevenDotOhFloat should (equal (6.9f +- 2) and equal (6.9f +- 2))
sevenDotOhFloat should (equal (7.0f +- 2) and equal (7.0f +- 2))
sevenDotOhFloat should (equal (7.2f +- 2) and equal (7.2f +- 2))
sevenDotOhFloat should (equal (6.8f +- 2) and equal (6.8f +- 2))
minusSevenDotOhFloat should (equal (-7.1f +- 2) and equal (-7.1f +- 2))
minusSevenDotOhFloat should (equal (-6.9f +- 2) and equal (-6.9f +- 2))
minusSevenDotOhFloat should (equal (-7.0f +- 2) and equal (-7.0f +- 2))
minusSevenDotOhFloat should (equal (-7.2f +- 2) and equal (-7.2f +- 2))
minusSevenDotOhFloat should (equal (-6.8f +- 2) and equal (-6.8f +- 2))
// Float +- Short
sevenDotOhFloat should (equal (7.1f +- 2.toShort) and equal (7.1f +- 2.toShort))
sevenDotOhFloat should (equal (6.9f +- 2.toShort) and equal (6.9f +- 2.toShort))
sevenDotOhFloat should (equal (7.0f +- 2.toShort) and equal (7.0f +- 2.toShort))
sevenDotOhFloat should (equal (7.2f +- 2.toShort) and equal (7.2f +- 2.toShort))
sevenDotOhFloat should (equal (6.8f +- 2.toShort) and equal (6.8f +- 2.toShort))
minusSevenDotOhFloat should (equal (-7.1f +- 2.toShort) and equal (-7.1f +- 2.toShort))
minusSevenDotOhFloat should (equal (-6.9f +- 2.toShort) and equal (-6.9f +- 2.toShort))
minusSevenDotOhFloat should (equal (-7.0f +- 2.toShort) and equal (-7.0f +- 2.toShort))
minusSevenDotOhFloat should (equal (-7.2f +- 2.toShort) and equal (-7.2f +- 2.toShort))
minusSevenDotOhFloat should (equal (-6.8f +- 2.toShort) and equal (-6.8f +- 2.toShort))
// Float +- Byte
sevenDotOhFloat should (equal (7.1f +- 2.toByte) and equal (7.1f +- 2.toByte))
sevenDotOhFloat should (equal (6.9f +- 2.toByte) and equal (6.9f +- 2.toByte))
sevenDotOhFloat should (equal (7.0f +- 2.toByte) and equal (7.0f +- 2.toByte))
sevenDotOhFloat should (equal (7.2f +- 2.toByte) and equal (7.2f +- 2.toByte))
sevenDotOhFloat should (equal (6.8f +- 2.toByte) and equal (6.8f +- 2.toByte))
minusSevenDotOhFloat should (equal (-7.1f +- 2.toByte) and equal (-7.1f +- 2.toByte))
minusSevenDotOhFloat should (equal (-6.9f +- 2.toByte) and equal (-6.9f +- 2.toByte))
minusSevenDotOhFloat should (equal (-7.0f +- 2.toByte) and equal (-7.0f +- 2.toByte))
minusSevenDotOhFloat should (equal (-7.2f +- 2.toByte) and equal (-7.2f +- 2.toByte))
minusSevenDotOhFloat should (equal (-6.8f +- 2.toByte) and equal (-6.8f +- 2.toByte))
// Long +- Long
sevenLong should (equal (9L +- 2L) and equal (9L +- 2L))
sevenLong should (equal (8L +- 2L) and equal (8L +- 2L))
sevenLong should (equal (7L +- 2L) and equal (7L +- 2L))
sevenLong should (equal (6L +- 2L) and equal (6L +- 2L))
sevenLong should (equal (5L +- 2L) and equal (5L +- 2L))
minusSevenLong should (equal (-9L +- 2L) and equal (-9L +- 2L))
minusSevenLong should (equal (-8L +- 2L) and equal (-8L +- 2L))
minusSevenLong should (equal (-7L +- 2L) and equal (-7L +- 2L))
minusSevenLong should (equal (-6L +- 2L) and equal (-6L +- 2L))
minusSevenLong should (equal (-5L +- 2L) and equal (-5L +- 2L))
// Long +- Int
sevenLong should (equal (9L +- 2) and equal (9L +- 2))
sevenLong should (equal (8L +- 2) and equal (8L +- 2))
sevenLong should (equal (7L +- 2) and equal (7L +- 2))
sevenLong should (equal (6L +- 2) and equal (6L +- 2))
sevenLong should (equal (5L +- 2) and equal (5L +- 2))
minusSevenLong should (equal (-9L +- 2) and equal (-9L +- 2))
minusSevenLong should (equal (-8L +- 2) and equal (-8L +- 2))
minusSevenLong should (equal (-7L +- 2) and equal (-7L +- 2))
minusSevenLong should (equal (-6L +- 2) and equal (-6L +- 2))
minusSevenLong should (equal (-5L +- 2) and equal (-5L +- 2))
// Long +- Short
sevenLong should (equal (9L +- 2.toShort) and equal (9L +- 2.toShort))
sevenLong should (equal (8L +- 2.toShort) and equal (8L +- 2.toShort))
sevenLong should (equal (7L +- 2.toShort) and equal (7L +- 2.toShort))
sevenLong should (equal (6L +- 2.toShort) and equal (6L +- 2.toShort))
sevenLong should (equal (5L +- 2.toShort) and equal (5L +- 2.toShort))
minusSevenLong should (equal (-9L +- 2.toShort) and equal (-9L +- 2.toShort))
minusSevenLong should (equal (-8L +- 2.toShort) and equal (-8L +- 2.toShort))
minusSevenLong should (equal (-7L +- 2.toShort) and equal (-7L +- 2.toShort))
minusSevenLong should (equal (-6L +- 2.toShort) and equal (-6L +- 2.toShort))
minusSevenLong should (equal (-5L +- 2.toShort) and equal (-5L +- 2.toShort))
// Long +- Byte
sevenLong should (equal (9L +- 2.toByte) and equal (9L +- 2.toByte))
sevenLong should (equal (8L +- 2.toByte) and equal (8L +- 2.toByte))
sevenLong should (equal (7L +- 2.toByte) and equal (7L +- 2.toByte))
sevenLong should (equal (6L +- 2.toByte) and equal (6L +- 2.toByte))
sevenLong should (equal (5L +- 2.toByte) and equal (5L +- 2.toByte))
minusSevenLong should (equal (-9L +- 2.toByte) and equal (-9L +- 2.toByte))
minusSevenLong should (equal (-8L +- 2.toByte) and equal (-8L +- 2.toByte))
minusSevenLong should (equal (-7L +- 2.toByte) and equal (-7L +- 2.toByte))
minusSevenLong should (equal (-6L +- 2.toByte) and equal (-6L +- 2.toByte))
minusSevenLong should (equal (-5L +- 2.toByte) and equal (-5L +- 2.toByte))
// Int +- Int
sevenInt should (equal (9 +- 2) and equal (9 +- 2))
sevenInt should (equal (8 +- 2) and equal (8 +- 2))
sevenInt should (equal (7 +- 2) and equal (7 +- 2))
sevenInt should (equal (6 +- 2) and equal (6 +- 2))
sevenInt should (equal (5 +- 2) and equal (5 +- 2))
minusSevenInt should (equal (-9 +- 2) and equal (-9 +- 2))
minusSevenInt should (equal (-8 +- 2) and equal (-8 +- 2))
minusSevenInt should (equal (-7 +- 2) and equal (-7 +- 2))
minusSevenInt should (equal (-6 +- 2) and equal (-6 +- 2))
minusSevenInt should (equal (-5 +- 2) and equal (-5 +- 2))
// Int +- Short
sevenInt should (equal (9 +- 2.toShort) and equal (9 +- 2.toShort))
sevenInt should (equal (8 +- 2.toShort) and equal (8 +- 2.toShort))
sevenInt should (equal (7 +- 2.toShort) and equal (7 +- 2.toShort))
sevenInt should (equal (6 +- 2.toShort) and equal (6 +- 2.toShort))
sevenInt should (equal (5 +- 2.toShort) and equal (5 +- 2.toShort))
minusSevenInt should (equal (-9 +- 2.toShort) and equal (-9 +- 2.toShort))
minusSevenInt should (equal (-8 +- 2.toShort) and equal (-8 +- 2.toShort))
minusSevenInt should (equal (-7 +- 2.toShort) and equal (-7 +- 2.toShort))
minusSevenInt should (equal (-6 +- 2.toShort) and equal (-6 +- 2.toShort))
minusSevenInt should (equal (-5 +- 2.toShort) and equal (-5 +- 2.toShort))
// Int +- Byte
sevenInt should (equal (9 +- 2.toByte) and equal (9 +- 2.toByte))
sevenInt should (equal (8 +- 2.toByte) and equal (8 +- 2.toByte))
sevenInt should (equal (7 +- 2.toByte) and equal (7 +- 2.toByte))
sevenInt should (equal (6 +- 2.toByte) and equal (6 +- 2.toByte))
sevenInt should (equal (5 +- 2.toByte) and equal (5 +- 2.toByte))
minusSevenInt should (equal (-9 +- 2.toByte) and equal (-9 +- 2.toByte))
minusSevenInt should (equal (-8 +- 2.toByte) and equal (-8 +- 2.toByte))
minusSevenInt should (equal (-7 +- 2.toByte) and equal (-7 +- 2.toByte))
minusSevenInt should (equal (-6 +- 2.toByte) and equal (-6 +- 2.toByte))
minusSevenInt should (equal (-5 +- 2.toByte) and equal (-5 +- 2.toByte))
// Short +- Short
sevenShort should (equal (9.toShort +- 2.toShort) and equal (9.toShort +- 2.toShort))
sevenShort should (equal (8.toShort +- 2.toShort) and equal (8.toShort +- 2.toShort))
sevenShort should (equal (7.toShort +- 2.toShort) and equal (7.toShort +- 2.toShort))
sevenShort should (equal (6.toShort +- 2.toShort) and equal (6.toShort +- 2.toShort))
sevenShort should (equal (5.toShort +- 2.toShort) and equal (5.toShort +- 2.toShort))
minusSevenShort should (equal ((-9).toShort +- 2.toShort) and equal ((-9).toShort +- 2.toShort))
minusSevenShort should (equal ((-8).toShort +- 2.toShort) and equal ((-8).toShort +- 2.toShort))
minusSevenShort should (equal ((-7).toShort +- 2.toShort) and equal ((-7).toShort +- 2.toShort))
minusSevenShort should (equal ((-6).toShort +- 2.toShort) and equal ((-6).toShort +- 2.toShort))
minusSevenShort should (equal ((-5).toShort +- 2.toShort) and equal ((-5).toShort +- 2.toShort))
// Short +- Byte
sevenShort should (equal (9.toShort +- 2.toByte) and equal (9.toShort +- 2.toByte))
sevenShort should (equal (8.toShort +- 2.toByte) and equal (8.toShort +- 2.toByte))
sevenShort should (equal (7.toShort +- 2.toByte) and equal (7.toShort +- 2.toByte))
sevenShort should (equal (6.toShort +- 2.toByte) and equal (6.toShort +- 2.toByte))
sevenShort should (equal (5.toShort +- 2.toByte) and equal (5.toShort +- 2.toByte))
minusSevenShort should (equal ((-9).toShort +- 2.toByte) and equal ((-9).toShort +- 2.toByte))
minusSevenShort should (equal ((-8).toShort +- 2.toByte) and equal ((-8).toShort +- 2.toByte))
minusSevenShort should (equal ((-7).toShort +- 2.toByte) and equal ((-7).toShort +- 2.toByte))
minusSevenShort should (equal ((-6).toShort +- 2.toByte) and equal ((-6).toShort +- 2.toByte))
minusSevenShort should (equal ((-5).toShort +- 2.toByte) and equal ((-5).toShort +- 2.toByte))
// Byte +- Byte
sevenByte should (equal (9.toByte +- 2.toByte) and equal (9.toByte +- 2.toByte))
sevenByte should (equal (8.toByte +- 2.toByte) and equal (8.toByte +- 2.toByte))
sevenByte should (equal (7.toByte +- 2.toByte) and equal (7.toByte +- 2.toByte))
sevenByte should (equal (6.toByte +- 2.toByte) and equal (6.toByte +- 2.toByte))
sevenByte should (equal (5.toByte +- 2.toByte) and equal (5.toByte +- 2.toByte))
minusSevenByte should (equal ((-9).toByte +- 2.toByte) and equal ((-9).toByte +- 2.toByte))
minusSevenByte should (equal ((-8).toByte +- 2.toByte) and equal ((-8).toByte +- 2.toByte))
minusSevenByte should (equal ((-7).toByte +- 2.toByte) and equal ((-7).toByte +- 2.toByte))
minusSevenByte should (equal ((-6).toByte +- 2.toByte) and equal ((-6).toByte +- 2.toByte))
minusSevenByte should (equal ((-5).toByte +- 2.toByte) and equal ((-5).toByte +- 2.toByte))
}
it("should succeed when equal and used in a logical-or expression") {
sevenDotOh should (equal (7.1 +- 0.2) or equal (7.1 +- 0.2))
sevenDotOh should (equal (6.9 +- 0.2) or equal (6.9 +- 0.2))
sevenDotOh should (equal (7.0 +- 0.2) or equal (7.0 +- 0.2))
sevenDotOh should (equal (7.2 +- 0.2) or equal (7.2 +- 0.2))
sevenDotOh should (equal (6.8 +- 0.2) or equal (6.8 +- 0.2))
minusSevenDotOh should (equal (-7.1 +- 0.2) or equal (-7.1 +- 0.2))
minusSevenDotOh should (equal (-6.9 +- 0.2) or equal (-6.9 +- 0.2))
minusSevenDotOh should (equal (-7.0 +- 0.2) or equal (-7.0 +- 0.2))
minusSevenDotOh should (equal (-7.2 +- 0.2) or equal (-7.2 +- 0.2))
minusSevenDotOh should (equal (-6.8 +- 0.2) or equal (-6.8 +- 0.2))
// Double +- Float
sevenDotOh should (equal (7.1 +- 0.2f) or equal (7.1 +- 0.2f))
sevenDotOh should (equal (6.9 +- 0.2f) or equal (6.9 +- 0.2f))
sevenDotOh should (equal (7.0 +- 0.2f) or equal (7.0 +- 0.2f))
sevenDotOh should (equal (7.2 +- 0.2f) or equal (7.2 +- 0.2f))
sevenDotOh should (equal (6.8 +- 0.2f) or equal (6.8 +- 0.2f))
minusSevenDotOh should (equal (-7.1 +- 0.2f) or equal (-7.1 +- 0.2f))
minusSevenDotOh should (equal (-6.9 +- 0.2f) or equal (-6.9 +- 0.2f))
minusSevenDotOh should (equal (-7.0 +- 0.2f) or equal (-7.0 +- 0.2f))
minusSevenDotOh should (equal (-7.2 +- 0.2f) or equal (-7.2 +- 0.2f))
minusSevenDotOh should (equal (-6.8 +- 0.2f) or equal (-6.8 +- 0.2f))
// Double +- Long
sevenDotOh should (equal (7.1 +- 2L) or equal (7.1 +- 2L))
sevenDotOh should (equal (6.9 +- 2L) or equal (6.9 +- 2L))
sevenDotOh should (equal (7.0 +- 2L) or equal (7.0 +- 2L))
sevenDotOh should (equal (7.2 +- 2L) or equal (7.2 +- 2L))
sevenDotOh should (equal (6.8 +- 2L) or equal (6.8 +- 2L))
minusSevenDotOh should (equal (-7.1 +- 2L) or equal (-7.1 +- 2L))
minusSevenDotOh should (equal (-6.9 +- 2L) or equal (-6.9 +- 2L))
minusSevenDotOh should (equal (-7.0 +- 2L) or equal (-7.0 +- 2L))
minusSevenDotOh should (equal (-7.2 +- 2L) or equal (-7.2 +- 2L))
minusSevenDotOh should (equal (-6.8 +- 2L) or equal (-6.8 +- 2L))
// Double +- Int
sevenDotOh should (equal (7.1 +- 2) or equal (7.1 +- 2))
sevenDotOh should (equal (6.9 +- 2) or equal (6.9 +- 2))
sevenDotOh should (equal (7.0 +- 2) or equal (7.0 +- 2))
sevenDotOh should (equal (7.2 +- 2) or equal (7.2 +- 2))
sevenDotOh should (equal (6.8 +- 2) or equal (6.8 +- 2))
minusSevenDotOh should (equal (-7.1 +- 2) or equal (-7.1 +- 2))
minusSevenDotOh should (equal (-6.9 +- 2) or equal (-6.9 +- 2))
minusSevenDotOh should (equal (-7.0 +- 2) or equal (-7.0 +- 2))
minusSevenDotOh should (equal (-7.2 +- 2) or equal (-7.2 +- 2))
minusSevenDotOh should (equal (-6.8 +- 2) or equal (-6.8 +- 2))
// Double +- Short
sevenDotOh should (equal (7.1 +- 2.toShort) or equal (7.1 +- 2.toShort))
sevenDotOh should (equal (6.9 +- 2.toShort) or equal (6.9 +- 2.toShort))
sevenDotOh should (equal (7.0 +- 2.toShort) or equal (7.0 +- 2.toShort))
sevenDotOh should (equal (7.2 +- 2.toShort) or equal (7.2 +- 2.toShort))
sevenDotOh should (equal (6.8 +- 2.toShort) or equal (6.8 +- 2.toShort))
minusSevenDotOh should (equal (-7.1 +- 2.toShort) or equal (-7.1 +- 2.toShort))
minusSevenDotOh should (equal (-6.9 +- 2.toShort) or equal (-6.9 +- 2.toShort))
minusSevenDotOh should (equal (-7.0 +- 2.toShort) or equal (-7.0 +- 2.toShort))
minusSevenDotOh should (equal (-7.2 +- 2.toShort) or equal (-7.2 +- 2.toShort))
minusSevenDotOh should (equal (-6.8 +- 2.toShort) or equal (-6.8 +- 2.toShort))
// Double +- Byte
sevenDotOh should (equal (7.1 +- 2.toByte) or equal (7.1 +- 2.toByte))
sevenDotOh should (equal (6.9 +- 2.toByte) or equal (6.9 +- 2.toByte))
sevenDotOh should (equal (7.0 +- 2.toByte) or equal (7.0 +- 2.toByte))
sevenDotOh should (equal (7.2 +- 2.toByte) or equal (7.2 +- 2.toByte))
sevenDotOh should (equal (6.8 +- 2.toByte) or equal (6.8 +- 2.toByte))
minusSevenDotOh should (equal (-7.1 +- 2.toByte) or equal (-7.1 +- 2.toByte))
minusSevenDotOh should (equal (-6.9 +- 2.toByte) or equal (-6.9 +- 2.toByte))
minusSevenDotOh should (equal (-7.0 +- 2.toByte) or equal (-7.0 +- 2.toByte))
minusSevenDotOh should (equal (-7.2 +- 2.toByte) or equal (-7.2 +- 2.toByte))
minusSevenDotOh should (equal (-6.8 +- 2.toByte) or equal (-6.8 +- 2.toByte))
// Float +- Float
sevenDotOhFloat should (equal (7.1f +- 0.2f) or equal (7.1f +- 0.2f))
sevenDotOhFloat should (equal (6.9f +- 0.2f) or equal (6.9f +- 0.2f))
sevenDotOhFloat should (equal (7.0f +- 0.2f) or equal (7.0f +- 0.2f))
sevenDotOhFloat should (equal (7.2f +- 0.2f) or equal (7.2f +- 0.2f))
sevenDotOhFloat should (equal (6.8f +- 0.2f) or equal (6.8f +- 0.2f))
minusSevenDotOhFloat should (equal (-7.1f +- 0.2f) or equal (-7.1f +- 0.2f))
minusSevenDotOhFloat should (equal (-6.9f +- 0.2f) or equal (-6.9f +- 0.2f))
minusSevenDotOhFloat should (equal (-7.0f +- 0.2f) or equal (-7.0f +- 0.2f))
minusSevenDotOhFloat should (equal (-7.2f +- 0.2f) or equal (-7.2f +- 0.2f))
minusSevenDotOhFloat should (equal (-6.8f +- 0.2f) or equal (-6.8f +- 0.2f))
// Float +- Long
sevenDotOhFloat should (equal (7.1f +- 2L) or equal (7.1f +- 2L))
sevenDotOhFloat should (equal (6.9f +- 2L) or equal (6.9f +- 2L))
sevenDotOhFloat should (equal (7.0f +- 2L) or equal (7.0f +- 2L))
sevenDotOhFloat should (equal (7.2f +- 2L) or equal (7.2f +- 2L))
sevenDotOhFloat should (equal (6.8f +- 2L) or equal (6.8f +- 2L))
minusSevenDotOhFloat should (equal (-7.1f +- 2L) or equal (-7.1f +- 2L))
minusSevenDotOhFloat should (equal (-6.9f +- 2L) or equal (-6.9f +- 2L))
minusSevenDotOhFloat should (equal (-7.0f +- 2L) or equal (-7.0f +- 2L))
minusSevenDotOhFloat should (equal (-7.2f +- 2L) or equal (-7.2f +- 2L))
minusSevenDotOhFloat should (equal (-6.8f +- 2L) or equal (-6.8f +- 2L))
// Float +- Int
sevenDotOhFloat should (equal (7.1f +- 2) or equal (7.1f +- 2))
sevenDotOhFloat should (equal (6.9f +- 2) or equal (6.9f +- 2))
sevenDotOhFloat should (equal (7.0f +- 2) or equal (7.0f +- 2))
sevenDotOhFloat should (equal (7.2f +- 2) or equal (7.2f +- 2))
sevenDotOhFloat should (equal (6.8f +- 2) or equal (6.8f +- 2))
minusSevenDotOhFloat should (equal (-7.1f +- 2) or equal (-7.1f +- 2))
minusSevenDotOhFloat should (equal (-6.9f +- 2) or equal (-6.9f +- 2))
minusSevenDotOhFloat should (equal (-7.0f +- 2) or equal (-7.0f +- 2))
minusSevenDotOhFloat should (equal (-7.2f +- 2) or equal (-7.2f +- 2))
minusSevenDotOhFloat should (equal (-6.8f +- 2) or equal (-6.8f +- 2))
// Float +- Short
sevenDotOhFloat should (equal (7.1f +- 2.toShort) or equal (7.1f +- 2.toShort))
sevenDotOhFloat should (equal (6.9f +- 2.toShort) or equal (6.9f +- 2.toShort))
sevenDotOhFloat should (equal (7.0f +- 2.toShort) or equal (7.0f +- 2.toShort))
sevenDotOhFloat should (equal (7.2f +- 2.toShort) or equal (7.2f +- 2.toShort))
sevenDotOhFloat should (equal (6.8f +- 2.toShort) or equal (6.8f +- 2.toShort))
minusSevenDotOhFloat should (equal (-7.1f +- 2.toShort) or equal (-7.1f +- 2.toShort))
minusSevenDotOhFloat should (equal (-6.9f +- 2.toShort) or equal (-6.9f +- 2.toShort))
minusSevenDotOhFloat should (equal (-7.0f +- 2.toShort) or equal (-7.0f +- 2.toShort))
minusSevenDotOhFloat should (equal (-7.2f +- 2.toShort) or equal (-7.2f +- 2.toShort))
minusSevenDotOhFloat should (equal (-6.8f +- 2.toShort) or equal (-6.8f +- 2.toShort))
// Float +- Byte
sevenDotOhFloat should (equal (7.1f +- 2.toByte) or equal (7.1f +- 2.toByte))
sevenDotOhFloat should (equal (6.9f +- 2.toByte) or equal (6.9f +- 2.toByte))
sevenDotOhFloat should (equal (7.0f +- 2.toByte) or equal (7.0f +- 2.toByte))
sevenDotOhFloat should (equal (7.2f +- 2.toByte) or equal (7.2f +- 2.toByte))
sevenDotOhFloat should (equal (6.8f +- 2.toByte) or equal (6.8f +- 2.toByte))
minusSevenDotOhFloat should (equal (-7.1f +- 2.toByte) or equal (-7.1f +- 2.toByte))
minusSevenDotOhFloat should (equal (-6.9f +- 2.toByte) or equal (-6.9f +- 2.toByte))
minusSevenDotOhFloat should (equal (-7.0f +- 2.toByte) or equal (-7.0f +- 2.toByte))
minusSevenDotOhFloat should (equal (-7.2f +- 2.toByte) or equal (-7.2f +- 2.toByte))
minusSevenDotOhFloat should (equal (-6.8f +- 2.toByte) or equal (-6.8f +- 2.toByte))
// Long +- Long
sevenLong should (equal (9L +- 2L) or equal (9L +- 2L))
sevenLong should (equal (8L +- 2L) or equal (8L +- 2L))
sevenLong should (equal (7L +- 2L) or equal (7L +- 2L))
sevenLong should (equal (6L +- 2L) or equal (6L +- 2L))
sevenLong should (equal (5L +- 2L) or equal (5L +- 2L))
minusSevenLong should (equal (-9L +- 2L) or equal (-9L +- 2L))
minusSevenLong should (equal (-8L +- 2L) or equal (-8L +- 2L))
minusSevenLong should (equal (-7L +- 2L) or equal (-7L +- 2L))
minusSevenLong should (equal (-6L +- 2L) or equal (-6L +- 2L))
minusSevenLong should (equal (-5L +- 2L) or equal (-5L +- 2L))
// Long +- Int
sevenLong should (equal (9L +- 2) or equal (9L +- 2))
sevenLong should (equal (8L +- 2) or equal (8L +- 2))
sevenLong should (equal (7L +- 2) or equal (7L +- 2))
sevenLong should (equal (6L +- 2) or equal (6L +- 2))
sevenLong should (equal (5L +- 2) or equal (5L +- 2))
minusSevenLong should (equal (-9L +- 2) or equal (-9L +- 2))
minusSevenLong should (equal (-8L +- 2) or equal (-8L +- 2))
minusSevenLong should (equal (-7L +- 2) or equal (-7L +- 2))
minusSevenLong should (equal (-6L +- 2) or equal (-6L +- 2))
minusSevenLong should (equal (-5L +- 2) or equal (-5L +- 2))
// Long +- Short
sevenLong should (equal (9L +- 2.toShort) or equal (9L +- 2.toShort))
sevenLong should (equal (8L +- 2.toShort) or equal (8L +- 2.toShort))
sevenLong should (equal (7L +- 2.toShort) or equal (7L +- 2.toShort))
sevenLong should (equal (6L +- 2.toShort) or equal (6L +- 2.toShort))
sevenLong should (equal (5L +- 2.toShort) or equal (5L +- 2.toShort))
minusSevenLong should (equal (-9L +- 2.toShort) or equal (-9L +- 2.toShort))
minusSevenLong should (equal (-8L +- 2.toShort) or equal (-8L +- 2.toShort))
minusSevenLong should (equal (-7L +- 2.toShort) or equal (-7L +- 2.toShort))
minusSevenLong should (equal (-6L +- 2.toShort) or equal (-6L +- 2.toShort))
minusSevenLong should (equal (-5L +- 2.toShort) or equal (-5L +- 2.toShort))
// Long +- Byte
sevenLong should (equal (9L +- 2.toByte) or equal (9L +- 2.toByte))
sevenLong should (equal (8L +- 2.toByte) or equal (8L +- 2.toByte))
sevenLong should (equal (7L +- 2.toByte) or equal (7L +- 2.toByte))
sevenLong should (equal (6L +- 2.toByte) or equal (6L +- 2.toByte))
sevenLong should (equal (5L +- 2.toByte) or equal (5L +- 2.toByte))
minusSevenLong should (equal (-9L +- 2.toByte) or equal (-9L +- 2.toByte))
minusSevenLong should (equal (-8L +- 2.toByte) or equal (-8L +- 2.toByte))
minusSevenLong should (equal (-7L +- 2.toByte) or equal (-7L +- 2.toByte))
minusSevenLong should (equal (-6L +- 2.toByte) or equal (-6L +- 2.toByte))
minusSevenLong should (equal (-5L +- 2.toByte) or equal (-5L +- 2.toByte))
// Int +- Int
sevenInt should (equal (9 +- 2) or equal (9 +- 2))
sevenInt should (equal (8 +- 2) or equal (8 +- 2))
sevenInt should (equal (7 +- 2) or equal (7 +- 2))
sevenInt should (equal (6 +- 2) or equal (6 +- 2))
sevenInt should (equal (5 +- 2) or equal (5 +- 2))
minusSevenInt should (equal (-9 +- 2) or equal (-9 +- 2))
minusSevenInt should (equal (-8 +- 2) or equal (-8 +- 2))
minusSevenInt should (equal (-7 +- 2) or equal (-7 +- 2))
minusSevenInt should (equal (-6 +- 2) or equal (-6 +- 2))
minusSevenInt should (equal (-5 +- 2) or equal (-5 +- 2))
// Int +- Short
sevenInt should (equal (9 +- 2.toShort) or equal (9 +- 2.toShort))
sevenInt should (equal (8 +- 2.toShort) or equal (8 +- 2.toShort))
sevenInt should (equal (7 +- 2.toShort) or equal (7 +- 2.toShort))
sevenInt should (equal (6 +- 2.toShort) or equal (6 +- 2.toShort))
sevenInt should (equal (5 +- 2.toShort) or equal (5 +- 2.toShort))
minusSevenInt should (equal (-9 +- 2.toShort) or equal (-9 +- 2.toShort))
minusSevenInt should (equal (-8 +- 2.toShort) or equal (-8 +- 2.toShort))
minusSevenInt should (equal (-7 +- 2.toShort) or equal (-7 +- 2.toShort))
minusSevenInt should (equal (-6 +- 2.toShort) or equal (-6 +- 2.toShort))
minusSevenInt should (equal (-5 +- 2.toShort) or equal (-5 +- 2.toShort))
// Int +- Byte
sevenInt should (equal (9 +- 2.toByte) or equal (9 +- 2.toByte))
sevenInt should (equal (8 +- 2.toByte) or equal (8 +- 2.toByte))
sevenInt should (equal (7 +- 2.toByte) or equal (7 +- 2.toByte))
sevenInt should (equal (6 +- 2.toByte) or equal (6 +- 2.toByte))
sevenInt should (equal (5 +- 2.toByte) or equal (5 +- 2.toByte))
minusSevenInt should (equal (-9 +- 2.toByte) or equal (-9 +- 2.toByte))
minusSevenInt should (equal (-8 +- 2.toByte) or equal (-8 +- 2.toByte))
minusSevenInt should (equal (-7 +- 2.toByte) or equal (-7 +- 2.toByte))
minusSevenInt should (equal (-6 +- 2.toByte) or equal (-6 +- 2.toByte))
minusSevenInt should (equal (-5 +- 2.toByte) or equal (-5 +- 2.toByte))
// Short +- Short
sevenShort should (equal (9.toShort +- 2.toShort) or equal (9.toShort +- 2.toShort))
sevenShort should (equal (8.toShort +- 2.toShort) or equal (8.toShort +- 2.toShort))
sevenShort should (equal (7.toShort +- 2.toShort) or equal (7.toShort +- 2.toShort))
sevenShort should (equal (6.toShort +- 2.toShort) or equal (6.toShort +- 2.toShort))
sevenShort should (equal (5.toShort +- 2.toShort) or equal (5.toShort +- 2.toShort))
minusSevenShort should (equal ((-9).toShort +- 2.toShort) or equal ((-9).toShort +- 2.toShort))
minusSevenShort should (equal ((-8).toShort +- 2.toShort) or equal ((-8).toShort +- 2.toShort))
minusSevenShort should (equal ((-7).toShort +- 2.toShort) or equal ((-7).toShort +- 2.toShort))
minusSevenShort should (equal ((-6).toShort +- 2.toShort) or equal ((-6).toShort +- 2.toShort))
minusSevenShort should (equal ((-5).toShort +- 2.toShort) or equal ((-5).toShort +- 2.toShort))
// Short +- Byte
sevenShort should (equal (9.toShort +- 2.toByte) or equal (9.toShort +- 2.toByte))
sevenShort should (equal (8.toShort +- 2.toByte) or equal (8.toShort +- 2.toByte))
sevenShort should (equal (7.toShort +- 2.toByte) or equal (7.toShort +- 2.toByte))
sevenShort should (equal (6.toShort +- 2.toByte) or equal (6.toShort +- 2.toByte))
sevenShort should (equal (5.toShort +- 2.toByte) or equal (5.toShort +- 2.toByte))
minusSevenShort should (equal ((-9).toShort +- 2.toByte) or equal ((-9).toShort +- 2.toByte))
minusSevenShort should (equal ((-8).toShort +- 2.toByte) or equal ((-8).toShort +- 2.toByte))
minusSevenShort should (equal ((-7).toShort +- 2.toByte) or equal ((-7).toShort +- 2.toByte))
minusSevenShort should (equal ((-6).toShort +- 2.toByte) or equal ((-6).toShort +- 2.toByte))
minusSevenShort should (equal ((-5).toShort +- 2.toByte) or equal ((-5).toShort +- 2.toByte))
// Byte +- Byte
sevenByte should (equal (9.toByte +- 2.toByte) or equal (9.toByte +- 2.toByte))
sevenByte should (equal (8.toByte +- 2.toByte) or equal (8.toByte +- 2.toByte))
sevenByte should (equal (7.toByte +- 2.toByte) or equal (7.toByte +- 2.toByte))
sevenByte should (equal (6.toByte +- 2.toByte) or equal (6.toByte +- 2.toByte))
sevenByte should (equal (5.toByte +- 2.toByte) or equal (5.toByte +- 2.toByte))
minusSevenByte should (equal ((-9).toByte +- 2.toByte) or equal ((-9).toByte +- 2.toByte))
minusSevenByte should (equal ((-8).toByte +- 2.toByte) or equal ((-8).toByte +- 2.toByte))
minusSevenByte should (equal ((-7).toByte +- 2.toByte) or equal ((-7).toByte +- 2.toByte))
minusSevenByte should (equal ((-6).toByte +- 2.toByte) or equal ((-6).toByte +- 2.toByte))
minusSevenByte should (equal ((-5).toByte +- 2.toByte) or equal ((-5).toByte +- 2.toByte))
}
it("should succeed when not equal and used in a logical-and expression with not") {
1 should { not { equal (3 +- 1) } and not { equal (3 +- 1) }}
1 should { not equal (3 +- 1) and (not equal (3 +- 1)) }
1 should (not equal (3 +- 1) and not equal (3 +- 1))
}
it("should succeed when not equal and used in a logical-or expression with not") {
1 should { not { equal (3 +- 1) } or not { equal (3 +- 1) }}
1 should { not equal (3 +- 1) or (not equal (3 +- 1)) }
1 should (not equal (3 +- 1) or not equal (3 +- 1))
}
it("should throw a TFE when not equal and used in a logical-and expression") {
val caught = intercept[TestFailedException] {
1 should { equal (5 +- 1) and equal (2 +- 1) }
}
assert(caught.getMessage === "1 did not equal 5 plus or minus 1")
}
it("should throw a TFE when not equal and used in a logical-or expression") {
val caught = intercept[TestFailedException] {
1 should { equal (5 +- 1) or equal (4 +- 1) }
}
assert(caught.getMessage === "1 did not equal 5 plus or minus 1, and 1 did not equal 4 plus or minus 1")
}
it("should throw a TFE when equal and used in a logical-and expression with not") {
val caught1 = intercept[TestFailedException] {
1 should { not { equal (2 +- 1) } and not { equal (3 - 1) }}
}
assert(caught1.getMessage === "1 equaled 2 plus or minus 1")
val caught2 = intercept[TestFailedException] {
1 should { not equal (2 +- 1) and (not equal (3 - 1)) }
}
assert(caught2.getMessage === "1 equaled 2 plus or minus 1")
val caught3 = intercept[TestFailedException] {
1 should (not equal (2 +- 1) and not equal (3 - 1))
}
assert(caught3.getMessage === "1 equaled 2 plus or minus 1")
val caught4 = intercept[TestFailedException] {
1 should { not { equal (3 +- 1) } and not { equal (2 +- 1) }}
}
assert(caught4.getMessage === "1 did not equal 3 plus or minus 1, but 1 equaled 2 plus or minus 1")
val caught5 = intercept[TestFailedException] {
1 should { not equal (3 +- 1) and (not equal (2 +- 1)) }
}
assert(caught5.getMessage === "1 did not equal 3 plus or minus 1, but 1 equaled 2 plus or minus 1")
val caught6 = intercept[TestFailedException] {
1 should (not equal (3 +- 1) and not equal (2 +- 1))
}
assert(caught6.getMessage === "1 did not equal 3 plus or minus 1, but 1 equaled 2 plus or minus 1")
}
it("should throw a TFE when equal and used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
1 should { not { equal (2 +- 1) } or not { equal (2 +- 1) }}
}
assert(caught1.getMessage === "1 equaled 2 plus or minus 1, and 1 equaled 2 plus or minus 1")
val caught2 = intercept[TestFailedException] {
1 should { not equal (2 +- 1) or { not equal (2 +- 1) }}
}
assert(caught2.getMessage === "1 equaled 2 plus or minus 1, and 1 equaled 2 plus or minus 1")
val caught3 = intercept[TestFailedException] {
1 should (not equal (2 +- 1) or not equal (2 +- 1))
}
assert(caught3.getMessage === "1 equaled 2 plus or minus 1, and 1 equaled 2 plus or minus 1")
}
}
describe("The X +- Y syntax") {
it("should throw IllegalArgumentException if the number passed to the right is 0 or negative") {
// Double +- Double
val caught1 = intercept[IllegalArgumentException] {
sevenDotOh should equal (7.1 +- -0.2)
}
assert(caught1.getMessage === "-0.2 passed to +- was zero or negative. Must be a positive non-zero number.", caught1.getMessage)
// Double +- Float
val caught2 = intercept[IllegalArgumentException] {
sevenDotOh should equal (7.1 +- -0.2f)
}
assert(caught2.getMessage === "-0.20000000298023224 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Long
val caught3 = intercept[IllegalArgumentException] {
sevenDotOh should equal (7.1 +- -2L)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught3.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught3.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Int
val caught4 = intercept[IllegalArgumentException] {
sevenDotOh should equal (7.1 +- -2)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught4.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught4.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Short
val caught5 = intercept[IllegalArgumentException] {
sevenDotOh should equal (7.1 +- (-2).toShort)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught5.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught5.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Byte
val caught6 = intercept[IllegalArgumentException] {
sevenDotOh should equal (7.1 +- (-2).toByte)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught6.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught6.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Float
val caught7 = intercept[IllegalArgumentException] {
sevenDotOhFloat should equal (7.1f +- -0.2f)
}
assert(caught7.getMessage === -0.2f + " passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Long
val caught8 = intercept[IllegalArgumentException] {
sevenDotOhFloat should equal (7.1f +- -2L)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught8.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught8.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Int
val caught9 = intercept[IllegalArgumentException] {
sevenDotOhFloat should equal (7.1f +- -2)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught9.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught9.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Short
val caught10 = intercept[IllegalArgumentException] {
sevenDotOhFloat should equal (7.1f +- (-2).toShort)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught10.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught10.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Byte
val caught11 = intercept[IllegalArgumentException] {
sevenDotOhFloat should equal (7.1f +- (-2).toByte)
}
// SKIP-SCALATESTJS,NATIVE-START
assert(caught11.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY assert(caught11.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Long
val caught12 = intercept[IllegalArgumentException] {
sevenLong should equal (9L +- -2L)
}
assert(caught12.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Int
val caught13 = intercept[IllegalArgumentException] {
sevenLong should equal (9L +- -2)
}
assert(caught13.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Short
val caught14 = intercept[IllegalArgumentException] {
sevenLong should equal (9L +- (-2).toShort)
}
assert(caught14.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Byte
val caught15 = intercept[IllegalArgumentException] {
sevenLong should equal (9L +- (-2).toByte)
}
assert(caught15.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int +- Int
val caught16 = intercept[IllegalArgumentException] {
sevenInt should equal (9 +- -2)
}
assert(caught16.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int +- Short
val caught17 = intercept[IllegalArgumentException] {
sevenInt should equal (9 +- (-2).toShort)
}
assert(caught17.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int +- Byte
val caught18 = intercept[IllegalArgumentException] {
sevenInt should equal (9 +- (-2).toByte)
}
assert(caught18.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Short +- Short
val caught19 = intercept[IllegalArgumentException] {
sevenShort should equal (9.toShort +- (-2).toShort)
}
assert(caught19.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Short +- Byte
val caught20 = intercept[IllegalArgumentException] {
sevenShort should equal (9.toShort +- (-2).toByte)
}
assert(caught20.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Byte +- Byte
val caught21 = intercept[IllegalArgumentException] {
sevenByte should equal (9.toByte +- (-2).toByte)
}
assert(caught21.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
}
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/ShouldEqualToleranceSpec.scala | Scala | apache-2.0 | 89,637 |
package nexus.diff
import nexus._
/**
* A parameter of a model.
* @param value Initial value of this parameter
* @note A `Param` has to be differentiable by providing a `Grad[X]` instance as its type tag.
*/
case class Param[X] private(var value: X, name: String)(implicit grad: Grad[X]) extends Symbolic[X] with Traced[X] {
// Circumvent typechecking issues when loading from parameterMap
private[nexus] def assign_!(newValue: Any): Unit =
value = newValue.asInstanceOf[X]
final def requireGrad = true // or else, how could it be updated?
def tag: Tag.Aux[X, Grad] = Tag of grad
def +=(g: X): Unit = if (grad.mutable)
grad.addInplace(value, g)
else value = grad.add(value, g)
def -=(g: X): Unit = +=(grad.neg(g))
/**
* Coerces this parameter into one of the execution boxes ([[Symbolic]] / [[Traced]]).
*/
def as[F[_]](implicit F: Algebra[F]): F[X] = F.fromParam(this)
override def toString = name
}
object Param {
/**
* Shorthand syntax for creating a parameter.
*/ // Use `sourcecode.Name` to get the name of the Scala val
def apply[X](value: X)(implicit name: sourcecode.Name, grad: Grad[X]): Param[X] =
new Param[X](value, name.value)(grad)
def apply[X](value: X, name: String)(implicit grad: Grad[X]): Param[X] =
new Param[X](value, name)(grad)
}
| ctongfei/nexus | diff/src/main/scala/nexus/diff/Param.scala | Scala | mit | 1,326 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.