code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package org.dbpedia.lookup.inputformat
trait InputFormat extends Traversable[(String, String, String)] {
}
|
dbpedia/lookup
|
src/main/scala/org/dbpedia/lookup/inputformat/InputFormat.scala
|
Scala
|
apache-2.0
| 109
|
package vultura.factor.inference.gbp
import org.specs2.mutable.Specification
import vultura.factor.inference.calibration.{LBP, BPResult}
import vultura.factor._
import vultura.factor.generators._
import vultura.propagation._
class ParentToChildTest extends Specification with FactorMatchers {
val p1: Problem = grid(5,5,4).simplify.toRing(LogD)
"compare PTC on bethe RG with ordinary LBP result" >> {
val regularBPResult: BPResult = LBP.infer(p1,tol=1e-15)
val (ptcResult, status) = ParentToChild.infer(RegionGraph.betheRG(p1),p1,tol=1e-15)
status.isConverged and p1.variables.map{vi =>
ptcResult.variableBelief(vi) must beSimilarTo(regularBPResult.variableBelief(vi),1e-12)
}.reduce(_ and _)
}
}
|
ziggystar/vultura-factor
|
src/test/scala/vultura/factor/inference/gbp/ParentToChildTest.scala
|
Scala
|
mit
| 730
|
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.util
import org.junit.Assert._
import org.junit.Test
import scala.concurrent.Future
/**
* User: apassos
* Date: 6/9/13
* Time: 7:51 AM
*/
class TestHyperParameterSearcher {
@Test def testSimpleParamSearch() {
class CmdClass extends CmdOptions {
val value = new CmdOption("value", 0.0, "DOUBLE", "A simple value")
}
val cmds = new CmdClass
val uniformHyper = new UniformDoubleSampler(0.0, 1.0)
def test(args: Array[String]) = {
val t = new CmdClass
t.parse(args)
Future.successful(t.value.value)
}
val optimizer = new HyperParameterSearcher(cmds,
Seq(HyperParameter(cmds.value, uniformHyper)),
test, 10, 10, secondsToSleep = 1)
optimizer.optimize()
assertTrue(cmds.value.hasValue)
assertTrue(cmds.value.value > 0.8)
val seqHyper = new SampleFromSeq(Seq(0.0, 0.1, 0.5, 1.0))
val cmds2 = new CmdClass
val optimizer2 = new HyperParameterSearcher(cmds2,
Seq(HyperParameter(cmds2.value, seqHyper)),
test, 10, 10, secondsToSleep = 1)
optimizer2.optimize()
assertTrue(cmds2.value.hasValue)
assertEquals(cmds2.value.value, 1.0, 0.0001)
}
}
|
patverga/factorie
|
src/test/scala/cc/factorie/util/TestHyperParameterSearcher.scala
|
Scala
|
apache-2.0
| 1,921
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.magic.builtin
import java.io.PrintStream
import com.google.common.base.Strings
import org.apache.toree.kernel.protocol.v5.MIMEType
import org.apache.toree.magic._
import org.apache.toree.magic.dependencies.IncludeOutputStream
import org.apache.toree.utils.ArgumentParsingSupport
import org.slf4j.LoggerFactory
class JavaScript extends CellMagic with ArgumentParsingSupport
with IncludeOutputStream {
// Lazy because the outputStream is not provided at construction
private lazy val printStream = new PrintStream(outputStream)
override def execute(code: String): CellMagicOutput = {
def printHelpAndReturn: CellMagicOutput = {
printHelp(printStream, """%JavaScript <string_code>""")
CellMagicOutput()
}
Strings.isNullOrEmpty(code) match {
case true => printHelpAndReturn
case false => CellMagicOutput(MIMEType.ApplicationJavaScript -> code)
}
}
}
|
asorianostratio/incubator-toree
|
kernel/src/main/scala/org/apache/toree/magic/builtin/JavaScript.scala
|
Scala
|
apache-2.0
| 1,741
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zouzias.spark.lucenerdd
import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.lucene.index.Term
import org.apache.lucene.search.{FuzzyQuery, PrefixQuery}
import org.apache.spark.sql.{Row, SparkSession}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest._
import matchers.should._
import scala.io.Source
case class Country(name: String)
class LuceneRDDRecordLinkageSpec extends AnyFlatSpec
with Matchers
with BeforeAndAfterEach
with SharedSparkContext {
var luceneRDD: LuceneRDD[_] = _
override def afterEach() {
luceneRDD.close()
}
val First = "_1"
"LuceneRDD.linkByQuery" should "correctly link with prefix query" in {
val leftCountries = Array("gree", "germa", "spa", "ita")
implicit val mySC = sc
val leftCountriesRDD = sc.parallelize(leftCountries)
val countries = sc.parallelize(Source.fromFile("src/test/resources/countries.txt").getLines()
.map(_.toLowerCase()).toSeq)
luceneRDD = LuceneRDD(countries)
val linker = (country: String) => {
val term = new Term("_1", country)
new PrefixQuery(term)
}
val linked = luceneRDD.linkByQuery(leftCountriesRDD, linker, 10)
linked.count() should equal(leftCountries.length)
// Greece and Greenland should appear
linked.collect().exists(link => link._1 == "gree" && link._2.length == 2) should equal(true)
// Italy should appear
linked.collect().exists(link => link._1 == "ita" && link._2.length == 1) should equal(true)
}
"LuceneRDD.linkByQuery" should "correctly link with prefix query (query / )" in {
val leftCountries = Array("gree", "germa", "spa", "ita")
implicit val mySC = sc
val leftCountriesRDD = sc.parallelize(leftCountries)
val countries = sc.parallelize(Source.fromFile("src/test/resources/countries.txt").getLines()
.map(_.toLowerCase()).toSeq)
luceneRDD = LuceneRDD(countries)
val linker = (country: String) => {
val term = new Term("_1", country)
new PrefixQuery(term)
}
val linked = luceneRDD.linkByQuery(leftCountriesRDD, linker, 10, "cartesian")
linked.count() should equal(leftCountries.length)
// Greece and Greenland should appear
linked.collect().exists(link => link._1 == "gree" && link._2.length == 2) should equal(true)
// Italy should appear
linked.collect().exists(link => link._1 == "ita" && link._2.length == 1) should equal(true)
}
"LuceneRDD.linkByQuery" should "correctly link with query parser (fuzzy)" in {
val leftCountries = Array("gree", "germa", "spa", "ita")
implicit val mySC = sc
val leftCountriesRDD = sc.parallelize(leftCountries)
val countries = sc.parallelize(Source.fromFile("src/test/resources/countries.txt").getLines()
.map(_.toLowerCase()).toSeq)
luceneRDD = LuceneRDD(countries)
val fuzzyLinker = (country: String) => {
val Fuzziness = 2
val term = new Term("_1", country)
new FuzzyQuery(term, Fuzziness)
}
val linked = luceneRDD.linkByQuery(leftCountriesRDD, fuzzyLinker, 10)
linked.count() should equal(leftCountries.size)
// Greece should appear only
linked.collect().exists(link => link._1 == "gree" && link._2.length == 1) should equal(true)
// Italy, Iraq and Iran should appear
linked.collect().exists(link => link._1 == "ita" && link._2.length >= 3) should equal (true)
}
"LuceneRDD.link" should "correctly link with query parser (prefix)" in {
val leftCountries = Array("gree", "germa", "spa", "ita")
implicit val mySC = sc
val leftCountriesRDD = sc.parallelize(leftCountries)
val countries = sc.parallelize(Source.fromFile("src/test/resources/countries.txt").getLines()
.map(_.toLowerCase()).toSeq)
luceneRDD = LuceneRDD(countries)
def linker(country: String): String = {
s"_1:${country}*"
}
val linked = luceneRDD.link(leftCountriesRDD, linker, 10)
linked.count() should equal(leftCountries.length)
// Greece and Greenland should appear
linked.collect().exists(link => link._1 == "gree" && link._2.length == 2) should equal(true)
// Italy should appear
linked.collect().exists(link => link._1 == "ita" && link._2.length == 1) should equal(true)
}
"LuceneRDD.dedup" should "correctly deduplication top result" in {
implicit val mySC = sc
val countries = sc.parallelize(Source.fromFile("src/test/resources/countries.txt").getLines()
.map(_.toLowerCase()).toSeq)
luceneRDD = LuceneRDD(countries)
def linker(country: String): String = {
s"_1:${country}*"
}
val linked = luceneRDD.dedup(linker, 5)
linked.count() should equal(countries.count)
}
"LuceneRDD.linkDataFrame" should "correctly link with query parser (prefix)" in {
val leftCountries = Array("gree", "germa", "spa", "ita")
implicit val mySC = sc
val leftCountriesRDD = sc.parallelize(leftCountries)
implicit val spark = SparkSession.builder().getOrCreate()
import spark.implicits._
val countriesDF = leftCountriesRDD.map(Country).toDF()
val countries = sc.parallelize(Source.fromFile("src/test/resources/countries.txt").getLines()
.map(_.toLowerCase()).toSeq)
luceneRDD = LuceneRDD(countries)
def linker(row: Row): String = {
val country = Option(row.get("name"))
country match {
case Some(c) => s"_1:${c}*"
case None => s"_1:*"
}
}
val linked = luceneRDD.linkDataFrame(countriesDF, linker, 10)
linked.count() should equal(leftCountries.length)
// Greece and Greenland should appear
linked.collect().exists(link => link._1.get("name") == "gree"
&& link._2.length == 2) should equal(true)
// Italy should appear
linked.collect().exists(link => link._1.get("name") == "ita"
&& link._2.length == 1) should equal(true)
}
"LuceneRDD.link" should "correctly link with query parser (fuzzy)" in {
val leftCountries = Array("gree", "germa", "spa", "ita")
implicit val mySC = sc
val leftCountriesRDD = sc.parallelize(leftCountries)
val countries = sc.parallelize(Source.fromFile("src/test/resources/countries.txt").getLines()
.map(_.toLowerCase()).toSeq)
luceneRDD = LuceneRDD(countries)
def fuzzyLinker(country: String): String = {
val Fuzziness = 2
s"_1:${country}~${Fuzziness}"
}
val linked = luceneRDD.link(leftCountriesRDD, fuzzyLinker, 10)
linked.count() should equal(leftCountries.length)
// Greece should appear only
linked.collect.exists(link => link._1 == "gree" && link._2.length == 1) should equal(true)
// At least Italy, Iraq and Iran should appear
linked.collect.exists(link => link._1 == "ita" && link._2.length >= 3) should equal(true)
}
}
|
zouzias/spark-lucenerdd
|
src/test/scala/org/zouzias/spark/lucenerdd/LuceneRDDRecordLinkageSpec.scala
|
Scala
|
apache-2.0
| 7,598
|
object Simon {
import generated.TestGreetingListType
}
|
sbt/sbt-xjc
|
src/sbt-test/sbt-xjc/compile-and-test/src/test/scala/Simon.scala
|
Scala
|
bsd-3-clause
| 57
|
/*
* -------------------------------------------------------------------------------------------------
* - Project: Objectify -
* - Copyright: ©2014 Matygo Educational Incorporated operating as Learndot -
* - Author: Arthur Gonigberg (arthur@learndot.com) and contributors (see contributors.txt) -
* - License: Licensed under MIT license (see license.txt) -
* -------------------------------------------------------------------------------------------------
*/
package org.objectify.resolvers
import org.objectify.adapters.ObjectifyRequestAdapter
/**
* Resolver for query parameters
*/
class QueryParametersResolver extends Resolver[Map[String, List[String]], ObjectifyRequestAdapter] {
def apply(req: ObjectifyRequestAdapter) = {
req.getQueryParameters
}
}
|
learndot/Objectify.scala
|
src/main/scala/org/objectify/resolvers/QueryParametersResolver.scala
|
Scala
|
mit
| 930
|
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.raster.data
import com.google.common.collect.{ImmutableMap, ImmutableSetMultimap}
import com.typesafe.scalalogging.LazyLogging
import org.junit.runner.RunWith
import org.locationtech.geomesa.raster.RasterTestsUtils._
import org.locationtech.geomesa.raster._
import org.locationtech.geomesa.utils.geohash.BoundingBox
import org.specs2.matcher.MatchResult
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AccumuloRasterQueryPlannerTest extends Specification with LazyLogging {
sequential
val availableResolutions = List[Double](45.0/256.0, 45.0/1024.0)
val dataMap: ImmutableSetMultimap[Double, Int] = ImmutableSetMultimap.of(45.0/256.0, 1, 45.0/1024.0, 1)
val boundsMap: ImmutableMap[Double, BoundingBox] = ImmutableMap.of(45.0/256.0, wholeworld, 45.0/1024.0, wholeworld)
val arqp = AccumuloRasterQueryPlanner
val testCases = List(
(128, 45.0/256.0),
(156, 45.0/256.0),
(201, 45.0/256.0),
(256, 45.0/256.0),
(257, 45.0/1024.0),
(432, 45.0/1024.0),
(512, 45.0/1024.0),
(1000, 45.0/1024.0),
(1024, 45.0/1024.0),
(1025, 45.0/1024.0),
(2000, 45.0/1024.0)
)
def runTest(size: Int, expectedResolution: Double): MatchResult[Double] = {
val q1 = generateQuery(0, 45, 0, 45, 45.0/size)
val qp = arqp.getQueryPlan(q1, dataMap, boundsMap).get
val rangeString = qp.ranges.head.getStartKey.getRow.toString
val encodedDouble = rangeString.split("~")(0)
val queryResolution = lexiDecodeStringToDouble(encodedDouble)
logger.debug(s"Query pixel size: $size. Expected query resolution: $expectedResolution. " +
s"Returned query resolution $queryResolution.")
val roundedResolution = BigDecimal(expectedResolution).round(mc).toDouble
queryResolution should be equalTo roundedResolution
}
"RasterQueryPlanner" should {
"return a valid resolution by rounding down" in {
testCases.map {
case (size, expected) =>
runTest(size, expected)
}
}
}
}
|
vpipkt/geomesa
|
geomesa-raster/src/test/scala/org/locationtech/geomesa/raster/data/AccumuloRasterQueryPlannerTest.scala
|
Scala
|
apache-2.0
| 2,539
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.sql.Timestamp
import org.apache.spark.sql.catalyst.analysis.TypeCoercion._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
class TypeCoercionSuite extends AnalysisTest {
// scalastyle:off line.size.limit
// The following table shows all implicit data type conversions that are not visible to the user.
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | Source Type\\CAST TO | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType | NumericType | IntegralType |
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | ByteType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(3, 0) | ByteType | ByteType |
// | ShortType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(5, 0) | ShortType | ShortType |
// | IntegerType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 0) | IntegerType | IntegerType |
// | LongType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(20, 0) | LongType | LongType |
// | DoubleType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(30, 15) | DoubleType | IntegerType |
// | FloatType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(14, 7) | FloatType | IntegerType |
// | Dec(10, 2) | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 2) | Dec(10, 2) | IntegerType |
// | BinaryType | X | X | X | X | X | X | X | BinaryType | X | StringType | X | X | X | X | X | X | X | X | X | X |
// | BooleanType | X | X | X | X | X | X | X | X | BooleanType | StringType | X | X | X | X | X | X | X | X | X | X |
// | StringType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | X | StringType | DateType | TimestampType | X | X | X | X | X | DecimalType(38, 18) | DoubleType | X |
// | DateType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X |
// | TimestampType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X |
// | ArrayType | X | X | X | X | X | X | X | X | X | X | X | X | ArrayType* | X | X | X | X | X | X | X |
// | MapType | X | X | X | X | X | X | X | X | X | X | X | X | X | MapType* | X | X | X | X | X | X |
// | StructType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | StructType* | X | X | X | X | X |
// | NullType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType(38, 18) | DoubleType | IntegerType |
// | CalendarIntervalType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | CalendarIntervalType | X | X | X |
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// Note: MapType*, StructType* are castable only when the internal child types also match; otherwise, not castable.
// Note: ArrayType* is castable when the element type is castable according to the table.
// scalastyle:on line.size.limit
private def shouldCast(from: DataType, to: AbstractDataType, expected: DataType): Unit = {
// Check default value
val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to)
assert(DataType.equalsIgnoreCompatibleNullability(
castDefault.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
// Check null value
val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to)
assert(DataType.equalsIgnoreCaseAndNullability(
castNull.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
}
private def shouldNotCast(from: DataType, to: AbstractDataType): Unit = {
// Check default value
val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to)
assert(castDefault.isEmpty, s"Should not be able to cast $from to $to, but got $castDefault")
// Check null value
val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to)
assert(castNull.isEmpty, s"Should not be able to cast $from to $to, but got $castNull")
}
private def default(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.default(internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.default(keyDataType), Literal.default(valueDataType)))
case _ => Literal.default(dataType)
}
private def createNull(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.create(null, internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.create(null, keyDataType), Literal.create(null, valueDataType)))
case _ => Literal.create(null, dataType)
}
val integralTypes: Seq[DataType] =
Seq(ByteType, ShortType, IntegerType, LongType)
val fractionalTypes: Seq[DataType] =
Seq(DoubleType, FloatType, DecimalType.SYSTEM_DEFAULT, DecimalType(10, 2))
val numericTypes: Seq[DataType] = integralTypes ++ fractionalTypes
val atomicTypes: Seq[DataType] =
numericTypes ++ Seq(BinaryType, BooleanType, StringType, DateType, TimestampType)
val complexTypes: Seq[DataType] =
Seq(ArrayType(IntegerType),
ArrayType(StringType),
MapType(StringType, StringType),
new StructType().add("a1", StringType),
new StructType().add("a1", StringType).add("a2", IntegerType))
val allTypes: Seq[DataType] =
atomicTypes ++ complexTypes ++ Seq(NullType, CalendarIntervalType)
// Check whether the type `checkedType` can be cast to all the types in `castableTypes`,
// but cannot be cast to the other types in `allTypes`.
private def checkTypeCasting(checkedType: DataType, castableTypes: Seq[DataType]): Unit = {
val nonCastableTypes = allTypes.filterNot(castableTypes.contains)
castableTypes.foreach { tpe =>
shouldCast(checkedType, tpe, tpe)
}
nonCastableTypes.foreach { tpe =>
shouldNotCast(checkedType, tpe)
}
}
private def checkWidenType(
widenFunc: (DataType, DataType) => Option[DataType],
t1: DataType,
t2: DataType,
expected: Option[DataType],
isSymmetric: Boolean = true): Unit = {
var found = widenFunc(t1, t2)
assert(found == expected,
s"Expected $expected as wider common type for $t1 and $t2, found $found")
// Test both directions to make sure the widening is symmetric.
if (isSymmetric) {
found = widenFunc(t2, t1)
assert(found == expected,
s"Expected $expected as wider common type for $t2 and $t1, found $found")
}
}
test("implicit type cast - ByteType") {
val checkedType = ByteType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.ByteDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - ShortType") {
val checkedType = ShortType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.ShortDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - IntegerType") {
val checkedType = IntegerType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(IntegerType, DecimalType, DecimalType.IntDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - LongType") {
val checkedType = LongType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.LongDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - FloatType") {
val checkedType = FloatType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.FloatDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DoubleType") {
val checkedType = DoubleType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.DoubleDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DecimalType(10, 2)") {
val checkedType = DecimalType(10, 2)
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, checkedType)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BinaryType") {
val checkedType = BinaryType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BooleanType") {
val checkedType = BooleanType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - StringType") {
val checkedType = StringType
val nonCastableTypes =
complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType)
checkTypeCasting(checkedType, castableTypes = allTypes.filterNot(nonCastableTypes.contains))
shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT)
shouldCast(checkedType, NumericType, NumericType.defaultConcreteType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DateType") {
val checkedType = DateType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, TimestampType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - TimestampType") {
val checkedType = TimestampType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, DateType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - ArrayType(StringType)") {
val checkedType = ArrayType(StringType)
val nonCastableTypes =
complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType)
checkTypeCasting(checkedType,
castableTypes = allTypes.filterNot(nonCastableTypes.contains).map(ArrayType(_)))
nonCastableTypes.map(ArrayType(_)).foreach(shouldNotCast(checkedType, _))
shouldNotCast(ArrayType(DoubleType, containsNull = false),
ArrayType(LongType, containsNull = false))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - MapType(StringType, StringType)") {
val checkedType = MapType(StringType, StringType)
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - StructType().add(\\"a1\\", StringType)") {
val checkedType = new StructType().add("a1", StringType)
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - NullType") {
val checkedType = NullType
checkTypeCasting(checkedType, castableTypes = allTypes)
shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT)
shouldCast(checkedType, NumericType, NumericType.defaultConcreteType)
shouldCast(checkedType, IntegralType, IntegralType.defaultConcreteType)
}
test("implicit type cast - CalendarIntervalType") {
val checkedType = CalendarIntervalType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("eligible implicit type cast - TypeCollection") {
shouldCast(NullType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(StringType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(BinaryType, TypeCollection(StringType, BinaryType), BinaryType)
shouldCast(StringType, TypeCollection(BinaryType, StringType), StringType)
shouldCast(IntegerType, TypeCollection(IntegerType, BinaryType), IntegerType)
shouldCast(IntegerType, TypeCollection(BinaryType, IntegerType), IntegerType)
shouldCast(BinaryType, TypeCollection(BinaryType, IntegerType), BinaryType)
shouldCast(BinaryType, TypeCollection(IntegerType, BinaryType), BinaryType)
shouldCast(IntegerType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(IntegerType, TypeCollection(BinaryType, StringType), StringType)
shouldCast(DecimalType.SYSTEM_DEFAULT,
TypeCollection(IntegerType, DecimalType), DecimalType.SYSTEM_DEFAULT)
shouldCast(DecimalType(10, 2), TypeCollection(IntegerType, DecimalType), DecimalType(10, 2))
shouldCast(DecimalType(10, 2), TypeCollection(DecimalType, IntegerType), DecimalType(10, 2))
shouldCast(IntegerType, TypeCollection(DecimalType(10, 2), StringType), DecimalType(10, 2))
shouldCast(StringType, TypeCollection(NumericType, BinaryType), DoubleType)
shouldCast(
ArrayType(StringType, false),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, false))
shouldCast(
ArrayType(StringType, true),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, true))
}
test("ineligible implicit type cast - TypeCollection") {
shouldNotCast(IntegerType, TypeCollection(DateType, TimestampType))
}
test("tightest common bound for types") {
def widenTest(t1: DataType, t2: DataType, expected: Option[DataType]): Unit =
checkWidenType(TypeCoercion.findTightestCommonType, t1, t2, expected)
// Null
widenTest(NullType, NullType, Some(NullType))
// Boolean
widenTest(NullType, BooleanType, Some(BooleanType))
widenTest(BooleanType, BooleanType, Some(BooleanType))
widenTest(IntegerType, BooleanType, None)
widenTest(LongType, BooleanType, None)
// Integral
widenTest(NullType, ByteType, Some(ByteType))
widenTest(NullType, IntegerType, Some(IntegerType))
widenTest(NullType, LongType, Some(LongType))
widenTest(ShortType, IntegerType, Some(IntegerType))
widenTest(ShortType, LongType, Some(LongType))
widenTest(IntegerType, LongType, Some(LongType))
widenTest(LongType, LongType, Some(LongType))
// Floating point
widenTest(NullType, FloatType, Some(FloatType))
widenTest(NullType, DoubleType, Some(DoubleType))
widenTest(FloatType, DoubleType, Some(DoubleType))
widenTest(FloatType, FloatType, Some(FloatType))
widenTest(DoubleType, DoubleType, Some(DoubleType))
// Integral mixed with floating point.
widenTest(IntegerType, FloatType, Some(FloatType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(LongType, FloatType, Some(FloatType))
widenTest(LongType, DoubleType, Some(DoubleType))
// No up-casting for fixed-precision decimal (this is handled by arithmetic rules)
widenTest(DecimalType(2, 1), DecimalType(3, 2), None)
widenTest(DecimalType(2, 1), DoubleType, None)
widenTest(DecimalType(2, 1), IntegerType, None)
widenTest(DoubleType, DecimalType(2, 1), None)
// StringType
widenTest(NullType, StringType, Some(StringType))
widenTest(StringType, StringType, Some(StringType))
widenTest(IntegerType, StringType, None)
widenTest(LongType, StringType, None)
// TimestampType
widenTest(NullType, TimestampType, Some(TimestampType))
widenTest(TimestampType, TimestampType, Some(TimestampType))
widenTest(DateType, TimestampType, Some(TimestampType))
widenTest(IntegerType, TimestampType, None)
widenTest(StringType, TimestampType, None)
// ComplexType
widenTest(NullType,
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
widenTest(StringType, MapType(IntegerType, StringType, true), None)
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
widenTest(
StructType(Seq(StructField("a", IntegerType))),
StructType(Seq(StructField("b", IntegerType))),
None)
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", DoubleType, nullable = false))),
None)
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", IntegerType, nullable = false))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = false)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", IntegerType, nullable = true))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = true))),
StructType(Seq(StructField("a", IntegerType, nullable = false))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = true))),
StructType(Seq(StructField("a", IntegerType, nullable = true))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
widenTest(
StructType(Seq(StructField("a", IntegerType))),
StructType(Seq(StructField("A", IntegerType))),
None)
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkWidenType(
TypeCoercion.findTightestCommonType,
StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType))),
StructType(Seq(StructField("A", IntegerType), StructField("b", IntegerType))),
Some(StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType)))),
isSymmetric = false)
}
widenTest(
ArrayType(IntegerType, containsNull = true),
ArrayType(IntegerType, containsNull = false),
Some(ArrayType(IntegerType, containsNull = true)))
widenTest(
MapType(IntegerType, StringType, valueContainsNull = true),
MapType(IntegerType, StringType, valueContainsNull = false),
Some(MapType(IntegerType, StringType, valueContainsNull = true)))
widenTest(
new StructType()
.add("arr", ArrayType(IntegerType, containsNull = true), nullable = false),
new StructType()
.add("arr", ArrayType(IntegerType, containsNull = false), nullable = true),
Some(new StructType()
.add("arr", ArrayType(IntegerType, containsNull = true), nullable = true)))
}
test("wider common type for decimal and array") {
def widenTestWithStringPromotion(
t1: DataType,
t2: DataType,
expected: Option[DataType]): Unit = {
checkWidenType(TypeCoercion.findWiderTypeForTwo, t1, t2, expected)
}
def widenTestWithoutStringPromotion(
t1: DataType,
t2: DataType,
expected: Option[DataType]): Unit = {
checkWidenType(TypeCoercion.findWiderTypeWithoutStringPromotionForTwo, t1, t2, expected)
}
// Decimal
widenTestWithStringPromotion(
DecimalType(2, 1), DecimalType(3, 2), Some(DecimalType(3, 2)))
widenTestWithStringPromotion(
DecimalType(2, 1), DoubleType, Some(DoubleType))
widenTestWithStringPromotion(
DecimalType(2, 1), IntegerType, Some(DecimalType(11, 1)))
widenTestWithStringPromotion(
DecimalType(2, 1), LongType, Some(DecimalType(21, 1)))
// ArrayType
widenTestWithStringPromotion(
ArrayType(ShortType, containsNull = true),
ArrayType(DoubleType, containsNull = false),
Some(ArrayType(DoubleType, containsNull = true)))
widenTestWithStringPromotion(
ArrayType(TimestampType, containsNull = false),
ArrayType(StringType, containsNull = true),
Some(ArrayType(StringType, containsNull = true)))
widenTestWithStringPromotion(
ArrayType(ArrayType(IntegerType), containsNull = false),
ArrayType(ArrayType(LongType), containsNull = false),
Some(ArrayType(ArrayType(LongType), containsNull = false)))
// Without string promotion
widenTestWithoutStringPromotion(IntegerType, StringType, None)
widenTestWithoutStringPromotion(StringType, TimestampType, None)
widenTestWithoutStringPromotion(ArrayType(LongType), ArrayType(StringType), None)
widenTestWithoutStringPromotion(ArrayType(StringType), ArrayType(TimestampType), None)
// String promotion
widenTestWithStringPromotion(IntegerType, StringType, Some(StringType))
widenTestWithStringPromotion(StringType, TimestampType, Some(StringType))
widenTestWithStringPromotion(
ArrayType(LongType), ArrayType(StringType), Some(ArrayType(StringType)))
widenTestWithStringPromotion(
ArrayType(StringType), ArrayType(TimestampType), Some(ArrayType(StringType)))
}
private def ruleTest(rule: Rule[LogicalPlan], initial: Expression, transformed: Expression) {
ruleTest(Seq(rule), initial, transformed)
}
private def ruleTest(
rules: Seq[Rule[LogicalPlan]],
initial: Expression,
transformed: Expression): Unit = {
val testRelation = LocalRelation(AttributeReference("a", IntegerType)())
val analyzer = new RuleExecutor[LogicalPlan] {
override val batches = Seq(Batch("Resolution", FixedPoint(3), rules: _*))
}
comparePlans(
analyzer.execute(Project(Seq(Alias(initial, "a")()), testRelation)),
Project(Seq(Alias(transformed, "a")()), testRelation))
}
test("cast NullType for expressions that implement ExpectsInputTypes") {
import TypeCoercionSuite._
ruleTest(TypeCoercion.ImplicitTypeCasts,
AnyTypeUnaryExpression(Literal.create(null, NullType)),
AnyTypeUnaryExpression(Literal.create(null, NullType)))
ruleTest(TypeCoercion.ImplicitTypeCasts,
NumericTypeUnaryExpression(Literal.create(null, NullType)),
NumericTypeUnaryExpression(Literal.create(null, DoubleType)))
}
test("cast NullType for binary operators") {
import TypeCoercionSuite._
ruleTest(TypeCoercion.ImplicitTypeCasts,
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)))
ruleTest(TypeCoercion.ImplicitTypeCasts,
NumericTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
NumericTypeBinaryOperator(Literal.create(null, DoubleType), Literal.create(null, DoubleType)))
}
test("coalesce casts") {
val rule = TypeCoercion.FunctionArgumentConversion
val intLit = Literal(1)
val longLit = Literal.create(1L)
val doubleLit = Literal(1.0)
val stringLit = Literal.create("c", StringType)
val nullLit = Literal.create(null, NullType)
val floatNullLit = Literal.create(null, FloatType)
val floatLit = Literal.create(1.0f, FloatType)
val timestampLit = Literal.create("2017-04-12", TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
val tsArrayLit = Literal(Array(new Timestamp(System.currentTimeMillis())))
val strArrayLit = Literal(Array("c"))
val intArrayLit = Literal(Array(1))
ruleTest(rule,
Coalesce(Seq(doubleLit, intLit, floatLit)),
Coalesce(Seq(Cast(doubleLit, DoubleType),
Cast(intLit, DoubleType), Cast(floatLit, DoubleType))))
ruleTest(rule,
Coalesce(Seq(longLit, intLit, decimalLit)),
Coalesce(Seq(Cast(longLit, DecimalType(22, 0)),
Cast(intLit, DecimalType(22, 0)), Cast(decimalLit, DecimalType(22, 0)))))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit)),
Coalesce(Seq(Cast(nullLit, IntegerType), Cast(intLit, IntegerType))))
ruleTest(rule,
Coalesce(Seq(timestampLit, stringLit)),
Coalesce(Seq(Cast(timestampLit, StringType), Cast(stringLit, StringType))))
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, intLit)),
Coalesce(Seq(Cast(nullLit, FloatType), Cast(floatNullLit, FloatType),
Cast(intLit, FloatType))))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit, decimalLit, doubleLit)),
Coalesce(Seq(Cast(nullLit, DoubleType), Cast(intLit, DoubleType),
Cast(decimalLit, DoubleType), Cast(doubleLit, DoubleType))))
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, doubleLit, stringLit)),
Coalesce(Seq(Cast(nullLit, StringType), Cast(floatNullLit, StringType),
Cast(doubleLit, StringType), Cast(stringLit, StringType))))
ruleTest(rule,
Coalesce(Seq(timestampLit, intLit, stringLit)),
Coalesce(Seq(Cast(timestampLit, StringType), Cast(intLit, StringType),
Cast(stringLit, StringType))))
ruleTest(rule,
Coalesce(Seq(tsArrayLit, intArrayLit, strArrayLit)),
Coalesce(Seq(Cast(tsArrayLit, ArrayType(StringType)),
Cast(intArrayLit, ArrayType(StringType)), Cast(strArrayLit, ArrayType(StringType)))))
}
test("CreateArray casts") {
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0, FloatType)
:: Nil),
CreateArray(Cast(Literal(1.0), DoubleType)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0, FloatType), DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal("a")
:: Nil),
CreateArray(Cast(Literal(1.0), StringType)
:: Cast(Literal(1), StringType)
:: Cast(Literal("a"), StringType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal(1)
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(13, 3))
:: Literal(1).cast(DecimalType(13, 3))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal.create(null, DecimalType(22, 10))
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(22, 10)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(38, 38)).cast(DecimalType(38, 38))
:: Nil))
}
test("CreateMap casts") {
// type coercion for map keys
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal.create(2.0, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Cast(Literal(1), FloatType)
:: Literal("a")
:: Cast(Literal.create(2.0, FloatType), FloatType)
:: Literal("b")
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal.create(null, DecimalType(5, 3))
:: Literal("a")
:: Literal.create(2.0, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Literal.create(null, DecimalType(5, 3)).cast(DoubleType)
:: Literal("a")
:: Literal.create(2.0, FloatType).cast(DoubleType)
:: Literal("b")
:: Nil))
// type coercion for map values
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2)
:: Literal(3.0)
:: Nil),
CreateMap(Literal(1)
:: Cast(Literal("a"), StringType)
:: Literal(2)
:: Cast(Literal(3.0), StringType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0)).cast(DecimalType(38, 38))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38)).cast(DecimalType(38, 38))
:: Nil))
// type coercion for both map keys and values
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2.0)
:: Literal(3.0)
:: Nil),
CreateMap(Cast(Literal(1), DoubleType)
:: Cast(Literal("a"), StringType)
:: Cast(Literal(2.0), DoubleType)
:: Cast(Literal(3.0), StringType)
:: Nil))
}
test("greatest/least cast") {
for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) {
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0, FloatType)
:: Nil),
operator(Cast(Literal(1.0), DoubleType)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0, FloatType), DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1L)
:: Literal(1)
:: Literal(new java.math.BigDecimal("1000000000000000000000"))
:: Nil),
operator(Cast(Literal(1L), DecimalType(22, 0))
:: Cast(Literal(1), DecimalType(22, 0))
:: Cast(Literal(new java.math.BigDecimal("1000000000000000000000")), DecimalType(22, 0))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal(1.0).cast(DoubleType)
:: Literal.create(null, DecimalType(10, 5)).cast(DoubleType)
:: Literal(1).cast(DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal.create(null, DecimalType(15, 0))
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal.create(null, DecimalType(15, 0)).cast(DecimalType(20, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(20, 5))
:: Literal(1).cast(DecimalType(20, 5))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal.create(2L, LongType)
:: Literal(1)
:: Literal.create(null, DecimalType(10, 5))
:: Nil),
operator(Literal.create(2L, LongType).cast(DecimalType(25, 5))
:: Literal(1).cast(DecimalType(25, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(25, 5))
:: Nil))
}
}
test("nanvl casts") {
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(1.0, DoubleType)),
NaNvl(Cast(Literal.create(1.0f, FloatType), DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0f, FloatType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(1.0f, FloatType), DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)),
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0f, FloatType), Cast(Literal.create(null, NullType), FloatType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(null, NullType), DoubleType)))
}
test("type coercion for If") {
val rule = TypeCoercion.IfCoercion
val intLit = Literal(1)
val doubleLit = Literal(1.0)
val trueLit = Literal.create(true, BooleanType)
val falseLit = Literal.create(false, BooleanType)
val stringLit = Literal.create("c", StringType)
val floatLit = Literal.create(1.0f, FloatType)
val timestampLit = Literal.create("2017-04-12", TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
ruleTest(rule,
If(Literal(true), Literal(1), Literal(1L)),
If(Literal(true), Cast(Literal(1), LongType), Literal(1L)))
ruleTest(rule,
If(Literal.create(null, NullType), Literal(1), Literal(1)),
If(Literal.create(null, BooleanType), Literal(1), Literal(1)))
ruleTest(rule,
If(AssertTrue(trueLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(trueLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(AssertTrue(falseLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(falseLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(trueLit, intLit, doubleLit),
If(trueLit, Cast(intLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, doubleLit),
If(trueLit, Cast(floatLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, decimalLit),
If(trueLit, Cast(floatLit, DoubleType), Cast(decimalLit, DoubleType)))
ruleTest(rule,
If(falseLit, stringLit, doubleLit),
If(falseLit, stringLit, Cast(doubleLit, StringType)))
ruleTest(rule,
If(trueLit, timestampLit, stringLit),
If(trueLit, Cast(timestampLit, StringType), stringLit))
}
test("type coercion for CaseKeyWhen") {
ruleTest(TypeCoercion.ImplicitTypeCasts,
CaseKeyWhen(Literal(1.toShort), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Cast(Literal(1.toShort), IntegerType), Seq(Literal(1), Literal("a")))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a")))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(1.2))), Literal.create(1, DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Literal(1.2))),
Cast(Literal.create(1, DecimalType(7, 2)), DoubleType))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(100L))), Literal.create(1, DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Cast(Literal(100L), DecimalType(22, 2)))),
Cast(Literal.create(1, DecimalType(7, 2)), DecimalType(22, 2)))
)
}
test("type coercion for Stack") {
val rule = TypeCoercion.StackCoercion
ruleTest(rule,
Stack(Seq(Literal(3), Literal(1), Literal(2), Literal(null))),
Stack(Seq(Literal(3), Literal(1), Literal(2), Literal.create(null, IntegerType))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(1.0), Literal(null), Literal(3.0))),
Stack(Seq(Literal(3), Literal(1.0), Literal.create(null, DoubleType), Literal(3.0))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(null), Literal("2"), Literal("3"))),
Stack(Seq(Literal(3), Literal.create(null, StringType), Literal("2"), Literal("3"))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null))),
Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(1), Literal("2"),
Literal(null), Literal(null))),
Stack(Seq(Literal(2),
Literal(1), Literal("2"),
Literal.create(null, IntegerType), Literal.create(null, StringType))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(1), Literal(null),
Literal(null), Literal("2"))),
Stack(Seq(Literal(2),
Literal(1), Literal.create(null, StringType),
Literal.create(null, IntegerType), Literal("2"))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(null), Literal(1),
Literal("2"), Literal(null))),
Stack(Seq(Literal(2),
Literal.create(null, StringType), Literal(1),
Literal("2"), Literal.create(null, IntegerType))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(null), Literal(null),
Literal(1), Literal("2"))),
Stack(Seq(Literal(2),
Literal.create(null, IntegerType), Literal.create(null, StringType),
Literal(1), Literal("2"))))
ruleTest(rule,
Stack(Seq(Subtract(Literal(3), Literal(1)),
Literal(1), Literal("2"),
Literal(null), Literal(null))),
Stack(Seq(Subtract(Literal(3), Literal(1)),
Literal(1), Literal("2"),
Literal.create(null, IntegerType), Literal.create(null, StringType))))
}
test("type coercion for Concat") {
val rule = TypeCoercion.ConcatCoercion(conf)
ruleTest(rule,
Concat(Seq(Literal("ab"), Literal("cde"))),
Concat(Seq(Literal("ab"), Literal("cde"))))
ruleTest(rule,
Concat(Seq(Literal(null), Literal("abc"))),
Concat(Seq(Cast(Literal(null), StringType), Literal("abc"))))
ruleTest(rule,
Concat(Seq(Literal(1), Literal("234"))),
Concat(Seq(Cast(Literal(1), StringType), Literal("234"))))
ruleTest(rule,
Concat(Seq(Literal("1"), Literal("234".getBytes()))),
Concat(Seq(Literal("1"), Cast(Literal("234".getBytes()), StringType))))
ruleTest(rule,
Concat(Seq(Literal(1L), Literal(2.toByte), Literal(0.1))),
Concat(Seq(Cast(Literal(1L), StringType), Cast(Literal(2.toByte), StringType),
Cast(Literal(0.1), StringType))))
ruleTest(rule,
Concat(Seq(Literal(true), Literal(0.1f), Literal(3.toShort))),
Concat(Seq(Cast(Literal(true), StringType), Cast(Literal(0.1f), StringType),
Cast(Literal(3.toShort), StringType))))
ruleTest(rule,
Concat(Seq(Literal(1L), Literal(0.1))),
Concat(Seq(Cast(Literal(1L), StringType), Cast(Literal(0.1), StringType))))
ruleTest(rule,
Concat(Seq(Literal(Decimal(10)))),
Concat(Seq(Cast(Literal(Decimal(10)), StringType))))
ruleTest(rule,
Concat(Seq(Literal(BigDecimal.valueOf(10)))),
Concat(Seq(Cast(Literal(BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Concat(Seq(Literal(java.math.BigDecimal.valueOf(10)))),
Concat(Seq(Cast(Literal(java.math.BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Concat(Seq(Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))),
Concat(Seq(Cast(Literal(new java.sql.Date(0)), StringType),
Cast(Literal(new Timestamp(0)), StringType))))
withSQLConf("spark.sql.function.concatBinaryAsString" -> "true") {
ruleTest(rule,
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))),
Concat(Seq(Cast(Literal("123".getBytes), StringType),
Cast(Literal("456".getBytes), StringType))))
}
withSQLConf("spark.sql.function.concatBinaryAsString" -> "false") {
ruleTest(rule,
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))),
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))))
}
}
test("type coercion for Elt") {
val rule = TypeCoercion.EltCoercion(conf)
ruleTest(rule,
Elt(Seq(Literal(1), Literal("ab"), Literal("cde"))),
Elt(Seq(Literal(1), Literal("ab"), Literal("cde"))))
ruleTest(rule,
Elt(Seq(Literal(1.toShort), Literal("ab"), Literal("cde"))),
Elt(Seq(Cast(Literal(1.toShort), IntegerType), Literal("ab"), Literal("cde"))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(null), Literal("abc"))),
Elt(Seq(Literal(2), Cast(Literal(null), StringType), Literal("abc"))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(1), Literal("234"))),
Elt(Seq(Literal(2), Cast(Literal(1), StringType), Literal("234"))))
ruleTest(rule,
Elt(Seq(Literal(3), Literal(1L), Literal(2.toByte), Literal(0.1))),
Elt(Seq(Literal(3), Cast(Literal(1L), StringType), Cast(Literal(2.toByte), StringType),
Cast(Literal(0.1), StringType))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(true), Literal(0.1f), Literal(3.toShort))),
Elt(Seq(Literal(2), Cast(Literal(true), StringType), Cast(Literal(0.1f), StringType),
Cast(Literal(3.toShort), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(1L), Literal(0.1))),
Elt(Seq(Literal(1), Cast(Literal(1L), StringType), Cast(Literal(0.1), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(Decimal(10)))),
Elt(Seq(Literal(1), Cast(Literal(Decimal(10)), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(BigDecimal.valueOf(10)))),
Elt(Seq(Literal(1), Cast(Literal(BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(java.math.BigDecimal.valueOf(10)))),
Elt(Seq(Literal(1), Cast(Literal(java.math.BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))),
Elt(Seq(Literal(2), Cast(Literal(new java.sql.Date(0)), StringType),
Cast(Literal(new Timestamp(0)), StringType))))
withSQLConf("spark.sql.function.eltOutputAsString" -> "true") {
ruleTest(rule,
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))),
Elt(Seq(Literal(1), Cast(Literal("123".getBytes), StringType),
Cast(Literal("456".getBytes), StringType))))
}
withSQLConf("spark.sql.function.eltOutputAsString" -> "false") {
ruleTest(rule,
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))),
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))))
}
}
test("BooleanEquality type cast") {
val be = TypeCoercion.BooleanEquality
// Use something more than a literal to avoid triggering the simplification rules.
val one = Add(Literal(Decimal(1)), Literal(Decimal(0)))
ruleTest(be,
EqualTo(Literal(true), one),
EqualTo(Cast(Literal(true), one.dataType), one)
)
ruleTest(be,
EqualTo(one, Literal(true)),
EqualTo(one, Cast(Literal(true), one.dataType))
)
ruleTest(be,
EqualNullSafe(Literal(true), one),
EqualNullSafe(Cast(Literal(true), one.dataType), one)
)
ruleTest(be,
EqualNullSafe(one, Literal(true)),
EqualNullSafe(one, Cast(Literal(true), one.dataType))
)
}
test("BooleanEquality simplification") {
val be = TypeCoercion.BooleanEquality
ruleTest(be,
EqualTo(Literal(true), Literal(1)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(true), Literal(0)),
Not(Literal(true))
)
ruleTest(be,
EqualNullSafe(Literal(true), Literal(1)),
And(IsNotNull(Literal(true)), Literal(true))
)
ruleTest(be,
EqualNullSafe(Literal(true), Literal(0)),
And(IsNotNull(Literal(true)), Not(Literal(true)))
)
ruleTest(be,
EqualTo(Literal(true), Literal(1L)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(new java.math.BigDecimal(1)), Literal(true)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(BigDecimal(0)), Literal(true)),
Not(Literal(true))
)
ruleTest(be,
EqualTo(Literal(Decimal(1)), Literal(true)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal.create(Decimal(1), DecimalType(8, 0)), Literal(true)),
Literal(true)
)
}
private def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
private val timeZoneResolver = ResolveTimeZone(new SQLConf)
private def widenSetOperationTypes(plan: LogicalPlan): LogicalPlan = {
timeZoneResolver(TypeCoercion.WidenSetOperationTypes(plan))
}
test("WidenSetOperationTypes for except and intersect") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", StringType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
val r1 = widenSetOperationTypes(Except(firstTable, secondTable)).asInstanceOf[Except]
val r2 = widenSetOperationTypes(Intersect(firstTable, secondTable)).asInstanceOf[Intersect]
checkOutput(r1.left, expectedTypes)
checkOutput(r1.right, expectedTypes)
checkOutput(r2.left, expectedTypes)
checkOutput(r2.right, expectedTypes)
// Check if a Project is added
assert(r1.left.isInstanceOf[Project])
assert(r1.right.isInstanceOf[Project])
assert(r2.left.isInstanceOf[Project])
assert(r2.right.isInstanceOf[Project])
}
test("WidenSetOperationTypes for union") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", StringType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val thirdTable = LocalRelation(
AttributeReference("m", StringType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", FloatType)(),
AttributeReference("q", DoubleType)())
val forthTable = LocalRelation(
AttributeReference("m", StringType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", ByteType)(),
AttributeReference("q", DoubleType)())
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
val unionRelation = widenSetOperationTypes(
Union(firstTable :: secondTable :: thirdTable :: forthTable :: Nil)).asInstanceOf[Union]
assert(unionRelation.children.length == 4)
checkOutput(unionRelation.children.head, expectedTypes)
checkOutput(unionRelation.children(1), expectedTypes)
checkOutput(unionRelation.children(2), expectedTypes)
checkOutput(unionRelation.children(3), expectedTypes)
assert(unionRelation.children.head.isInstanceOf[Project])
assert(unionRelation.children(1).isInstanceOf[Project])
assert(unionRelation.children(2).isInstanceOf[Project])
assert(unionRelation.children(3).isInstanceOf[Project])
}
test("Transform Decimal precision/scale for union except and intersect") {
def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
val left1 = LocalRelation(
AttributeReference("l", DecimalType(10, 8))())
val right1 = LocalRelation(
AttributeReference("r", DecimalType(5, 5))())
val expectedType1 = Seq(DecimalType(10, 8))
val r1 = widenSetOperationTypes(Union(left1, right1)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(Except(left1, right1)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(Intersect(left1, right1)).asInstanceOf[Intersect]
checkOutput(r1.children.head, expectedType1)
checkOutput(r1.children.last, expectedType1)
checkOutput(r2.left, expectedType1)
checkOutput(r2.right, expectedType1)
checkOutput(r3.left, expectedType1)
checkOutput(r3.right, expectedType1)
val plan1 = LocalRelation(AttributeReference("l", DecimalType(10, 5))())
val rightTypes = Seq(ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType)
val expectedTypes = Seq(DecimalType(10, 5), DecimalType(10, 5), DecimalType(15, 5),
DecimalType(25, 5), DoubleType, DoubleType)
rightTypes.zip(expectedTypes).foreach { case (rType, expectedType) =>
val plan2 = LocalRelation(
AttributeReference("r", rType)())
val r1 = widenSetOperationTypes(Union(plan1, plan2)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(Except(plan1, plan2)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(Intersect(plan1, plan2)).asInstanceOf[Intersect]
checkOutput(r1.children.last, Seq(expectedType))
checkOutput(r2.right, Seq(expectedType))
checkOutput(r3.right, Seq(expectedType))
val r4 = widenSetOperationTypes(Union(plan2, plan1)).asInstanceOf[Union]
val r5 = widenSetOperationTypes(Except(plan2, plan1)).asInstanceOf[Except]
val r6 = widenSetOperationTypes(Intersect(plan2, plan1)).asInstanceOf[Intersect]
checkOutput(r4.children.last, Seq(expectedType))
checkOutput(r5.left, Seq(expectedType))
checkOutput(r6.left, Seq(expectedType))
}
}
test("rule for date/timestamp operations") {
val dateTimeOperations = TypeCoercion.DateTimeOperations
val date = Literal(new java.sql.Date(0L))
val timestamp = Literal(new Timestamp(0L))
val interval = Literal(new CalendarInterval(0, 0))
val str = Literal("2015-01-01")
ruleTest(dateTimeOperations, Add(date, interval), Cast(TimeAdd(date, interval), DateType))
ruleTest(dateTimeOperations, Add(interval, date), Cast(TimeAdd(date, interval), DateType))
ruleTest(dateTimeOperations, Add(timestamp, interval),
Cast(TimeAdd(timestamp, interval), TimestampType))
ruleTest(dateTimeOperations, Add(interval, timestamp),
Cast(TimeAdd(timestamp, interval), TimestampType))
ruleTest(dateTimeOperations, Add(str, interval), Cast(TimeAdd(str, interval), StringType))
ruleTest(dateTimeOperations, Add(interval, str), Cast(TimeAdd(str, interval), StringType))
ruleTest(dateTimeOperations, Subtract(date, interval), Cast(TimeSub(date, interval), DateType))
ruleTest(dateTimeOperations, Subtract(timestamp, interval),
Cast(TimeSub(timestamp, interval), TimestampType))
ruleTest(dateTimeOperations, Subtract(str, interval), Cast(TimeSub(str, interval), StringType))
// interval operations should not be effected
ruleTest(dateTimeOperations, Add(interval, interval), Add(interval, interval))
ruleTest(dateTimeOperations, Subtract(interval, interval), Subtract(interval, interval))
}
/**
* There are rules that need to not fire before child expressions get resolved.
* We use this test to make sure those rules do not fire early.
*/
test("make sure rules do not fire early") {
// InConversion
val inConversion = TypeCoercion.InConversion(conf)
ruleTest(inConversion,
In(UnresolvedAttribute("a"), Seq(Literal(1))),
In(UnresolvedAttribute("a"), Seq(Literal(1)))
)
ruleTest(inConversion,
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1))),
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1)))
)
ruleTest(inConversion,
In(Literal("a"), Seq(Literal(1), Literal("b"))),
In(Cast(Literal("a"), StringType),
Seq(Cast(Literal(1), StringType), Cast(Literal("b"), StringType)))
)
}
test("SPARK-15776 Divide expression's dataType should be casted to Double or Decimal " +
"in aggregation function like sum") {
val rules = Seq(FunctionArgumentConversion, Division)
// Casts Integer to Double
ruleTest(rules, sum(Divide(4, 3)), sum(Divide(Cast(4, DoubleType), Cast(3, DoubleType))))
// Left expression is Double, right expression is Int. Another rule ImplicitTypeCasts will
// cast the right expression to Double.
ruleTest(rules, sum(Divide(4.0, 3)), sum(Divide(4.0, 3)))
// Left expression is Int, right expression is Double
ruleTest(rules, sum(Divide(4, 3.0)), sum(Divide(Cast(4, DoubleType), Cast(3.0, DoubleType))))
// Casts Float to Double
ruleTest(
rules,
sum(Divide(4.0f, 3)),
sum(Divide(Cast(4.0f, DoubleType), Cast(3, DoubleType))))
// Left expression is Decimal, right expression is Int. Another rule DecimalPrecision will cast
// the right expression to Decimal.
ruleTest(rules, sum(Divide(Decimal(4.0), 3)), sum(Divide(Decimal(4.0), 3)))
}
test("SPARK-17117 null type coercion in divide") {
val rules = Seq(FunctionArgumentConversion, Division, ImplicitTypeCasts)
val nullLit = Literal.create(null, NullType)
ruleTest(rules, Divide(1L, nullLit), Divide(Cast(1L, DoubleType), Cast(nullLit, DoubleType)))
ruleTest(rules, Divide(nullLit, 1L), Divide(Cast(nullLit, DoubleType), Cast(1L, DoubleType)))
}
test("binary comparison with string promotion") {
val rule = TypeCoercion.PromoteStrings(conf)
ruleTest(rule,
GreaterThan(Literal("123"), Literal(1)),
GreaterThan(Cast(Literal("123"), IntegerType), Literal(1)))
ruleTest(rule,
LessThan(Literal(true), Literal("123")),
LessThan(Literal(true), Cast(Literal("123"), BooleanType)))
ruleTest(rule,
EqualTo(Literal(Array(1, 2)), Literal("123")),
EqualTo(Literal(Array(1, 2)), Literal("123")))
ruleTest(rule,
GreaterThan(Literal("1.5"), Literal(BigDecimal("0.5"))),
GreaterThan(Cast(Literal("1.5"), DoubleType), Cast(Literal(BigDecimal("0.5")),
DoubleType)))
Seq(true, false).foreach { convertToTS =>
withSQLConf(
"spark.sql.typeCoercion.compareDateTimestampInTimestamp" -> convertToTS.toString) {
val date0301 = Literal(java.sql.Date.valueOf("2017-03-01"))
val timestamp0301000000 = Literal(Timestamp.valueOf("2017-03-01 00:00:00"))
val timestamp0301000001 = Literal(Timestamp.valueOf("2017-03-01 00:00:01"))
if (convertToTS) {
// `Date` should be treated as timestamp at 00:00:00 See SPARK-23549
ruleTest(rule, EqualTo(date0301, timestamp0301000000),
EqualTo(Cast(date0301, TimestampType), timestamp0301000000))
ruleTest(rule, LessThan(date0301, timestamp0301000001),
LessThan(Cast(date0301, TimestampType), timestamp0301000001))
} else {
ruleTest(rule, LessThan(date0301, timestamp0301000000),
LessThan(Cast(date0301, StringType), Cast(timestamp0301000000, StringType)))
ruleTest(rule, LessThan(date0301, timestamp0301000001),
LessThan(Cast(date0301, StringType), Cast(timestamp0301000001, StringType)))
}
}
}
}
test("cast WindowFrame boundaries to the type they operate upon") {
// Can cast frame boundaries to order dataType.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(3), Literal(2147483648L))),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, Cast(3, LongType), Literal(2147483648L)))
)
// Cannot cast frame boundaries to order dataType.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal.default(DateType), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L))),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal.default(DateType), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L)))
)
// Should not cast SpecialFrameBoundary.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing)),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing))
)
}
}
object TypeCoercionSuite {
case class AnyTypeUnaryExpression(child: Expression)
extends UnaryExpression with ExpectsInputTypes with Unevaluable {
override def inputTypes: Seq[AbstractDataType] = Seq(AnyDataType)
override def dataType: DataType = NullType
}
case class NumericTypeUnaryExpression(child: Expression)
extends UnaryExpression with ExpectsInputTypes with Unevaluable {
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
override def dataType: DataType = NullType
}
case class AnyTypeBinaryOperator(left: Expression, right: Expression)
extends BinaryOperator with Unevaluable {
override def dataType: DataType = NullType
override def inputType: AbstractDataType = AnyDataType
override def symbol: String = "anytype"
}
case class NumericTypeBinaryOperator(left: Expression, right: Expression)
extends BinaryOperator with Unevaluable {
override def dataType: DataType = NullType
override def inputType: AbstractDataType = NumericType
override def symbol: String = "numerictype"
}
}
|
ddna1021/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
|
Scala
|
apache-2.0
| 63,281
|
import scala.quoted.*
inline def inspect[A]: String =
${ inspect2[A] }
def inspect2[A: Type](using Quotes): Expr[String] = {
import quotes.reflect.*
val ps =
TypeRepr.of[A].typeSymbol.primaryConstructor.tree match
case DefDef(_, List(Nil, ps: TermParamClause), _, _) => ps
case DefDef(_, List(ps: TermParamClause), _, _) => ps
val names = ps.params.map(p => s"${p.name}: ${p.tpt.show}").mkString("(", ", ", ")")
Expr(s"${Type.show[A]}: $names isImplicit=${ps.isImplicit}, isGiven=${ps.isGiven}, isErased=${ps.isErased}")
}
|
dotty-staging/dotty
|
tests/run-macros/i12021/Macro_1.scala
|
Scala
|
apache-2.0
| 554
|
package com.github.mnogu.gatling.kafka.test
import io.gatling.core.Predef._
import org.apache.kafka.clients.producer.ProducerConfig
import scala.concurrent.duration._
import com.github.mnogu.gatling.kafka.Predef._
class ThrottledSimulation extends Simulation {
val kafkaConf = kafka
// Kafka topic name
.topic("test")
// Kafka producer configs
.properties(
Map(
ProducerConfig.ACKS_CONFIG -> "1",
// list of Kafka broker hostname and port pairs
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG -> "localhost:9092",
// in most cases, StringSerializer or ByteArraySerializer
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG ->
"org.apache.kafka.common.serialization.StringSerializer",
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG ->
"org.apache.kafka.common.serialization.StringSerializer"))
val scn = scenario("Kafka Test")
.forever(
exec(
kafka("request")
// message to send
.send[String]("foo"))
)
setUp(
scn.inject(atOnceUsers(10)))
.throttle(jumpToRps(10), holdFor(30.seconds))
.protocols(kafkaConf)
}
|
mnogu/gatling-kafka
|
src/test/scala/com/github/mnogu/gatling/kafka/test/ThrottledSimulation.scala
|
Scala
|
apache-2.0
| 1,128
|
package lang.lightweightjava.ast
import lang.lightweightjava.ClassInterface
import name.namegraph.{NameGraphExtended, NameGraphModular}
import name._
case class ClassDefinition(className: ClassName, superClass: ClassRef, elements: ClassElement*) extends NominalModular[ClassInterface] {
private var dependencies = Set[ClassInterface]()
override def link(dependencies: Set[ClassInterface]) = {
this.dependencies = dependencies
val newInterface = ClassInterface(className, exportedFields, exportedMethods)
if (_interface != null)
newInterface.original = _interface
_interface = newInterface
_resolved = null
this
}
require(AST.isLegalName(className.name), "Class name '" + className.name + "' is no legal Java class name")
override def allNames = elements.flatMap(_.allNames).toSet ++ superClass.allNames ++ className.allNames
val fields = elements.collect({ case f: FieldDeclaration => f }).toSet
val methods = elements.collect({ case m: MethodDefinition => m }).toSet
private def exportedFields = {
val ownFields = fields.filter(_.accessModifier == AccessModifier.PUBLIC).map(_.fieldName)
val superFields = dependencies.find(_.className.name == superClass.name) match {
case Some(i) => i.exportedFields
case None => Set()
}
superFields ++ ownFields
}
private def exportedMethods = {
val ownMethods = methods.filter(_.signature.accessModifier == AccessModifier.PUBLIC).map(_.signature.methodName)
val superMethods = dependencies.find(_.className.name == superClass.name) match {
case Some(i) => i.exportedMethods
case None => Set()
}
superMethods.filter(m => !ownMethods.exists(_.name == m.name)) ++ ownMethods
}
val moduleID: Identifier = className
protected var _interface: ClassInterface = null
def interface = {
if (_interface == null)
_interface = ClassInterface(className, exportedFields, exportedMethods)
_interface
}
def rename(renaming: Renaming) = {
val cl = ClassDefinition(className.rename(renaming).asInstanceOf[ClassName], superClass.rename(renaming), elements.map(_.rename(renaming)): _*)
cl.link(dependencies)
cl.interface.original = interface.original
cl
}
def typeCheckForProgram(program : Program) = {
val classFields = program.findAllFields(this)
val classMethods = program.findAllMethods(this)
require(className != superClass, "Class '" + className.name + "' can't be it's own super-class")
superClass match {
case superClassName:ClassName =>
val superClassDefinition = program.findClassDefinition(superClassName)
require(superClassDefinition.isDefined, "Super-class '" + superClassName.name + "' of class '" + className.name + "' can't be resolved")
require(fields.map(_.fieldName.name).intersect(program.findAllFields(superClassDefinition.get).map(_.fieldName.name)).size == 0,
"Class '" + className + "' overshadows fields of it's super-classes")
require(classMethods.forall(method => program.findMethod(superClassDefinition.get, method.signature.methodName.name) match {
case Some(superClassMethod) =>
method.signature.accessModifier == superClassMethod.signature.accessModifier &&
method.signature.returnType.name == superClassMethod.signature.returnType.name &&
method.signature.parameters.map(_.variableType.name) == superClassMethod.signature.parameters.map(_.variableType.name)
case None => true
}), "Class '" + className + "' overwrites a super-class method with a different access modifier, return type or different parameter types")
case _ => ; // Skip if super-class is Object class
}
require(fields.size == fields.map(_.fieldName.name).size, "Field names of class '" + className.name + "' are not distinct")
require(methods.size == methods.map(_.signature.methodName.name).size, "Method names of class '" + className.name + "' are not distinct")
require(classFields.map(_.fieldType).forall {
case className:ClassName =>
program.findClassDefinition(className).isDefined
case ObjectClass => true
}, "Could not find definition for some field types of class '" + className.name + "'")
program.findAllMethods(this).foreach(_.typeCheckForClassDefinition(program, this))
}
def resolveNames(nameEnvironment: ClassNameEnvironment) = {
var conflictReferences: Map[Identifier, Set[Identifier]] = Map()
// All classes in the environment that share the same name as this class
val classes = nameEnvironment(className.name)
// Add references for conflicting classes
if (classes.size > 1)
conflictReferences += (className -> (classes.map(_._1.asInstanceOf[Identifier]) - className))
// Find this class by comparing IDs instead of names
val ownClass = classes.find(_._1 == className).get
for (classEnv <- classes) {
// Add references for conflicting fields
for ((fieldName, fields) <- classEnv._2) {
if (ownClass._2.contains(fieldName) && fields.size > 1)
conflictReferences ++= ownClass._2(fieldName).map(f => (f, fields - f))
}
// Add references for conflicting methods
for ((methodName, methods) <- classEnv._3) {
if (ownClass._3.contains(methodName) && methods.size > 1)
conflictReferences ++= ownClass._3(methodName).map(m => (m, methods - m))
}
}
className.resolveNames(nameEnvironment) + superClass.resolveNames(nameEnvironment) +
elements.foldLeft(NameGraphExtended(Set(), conflictReferences))(_ + _.resolveNames(nameEnvironment, this))
}
private var _resolved: NameGraphModular[ClassInterface] = _
def resolveNamesModular: NameGraphModular[ClassInterface] = {
if (_resolved != null)
return _resolved
var environment: ClassNameEnvironment = Map()
// Collect all exported super-class fields/methods (if there is a super-class)
val (superClassFields:Set[Identifier], superClassMethods:Set[Identifier]) = dependencies.find(_.className.name == superClass.name) match {
case Some(superInterface) => (superInterface.exportedFields, superInterface.exportedMethods)
case None => (Set(), Set())
}
// Merge the fields/methods of the super-class and current class together
val ownFieldsMap = superClassFields.groupBy(_.name) ++
fields.map(_.fieldName).groupBy(_.name).map(m => (m._1, superClassFields.groupBy(_.name).getOrElse(m._1, Set()) ++ m._2)).toMap
val ownMethodsMap = superClassMethods.groupBy(_.name) ++
methods.map(_.signature.methodName).groupBy(_.name).map(m => (m._1, superClassMethods.groupBy(_.name).getOrElse(m._1, Set()) ++ m._2)).toMap
// Add the merged sets to the environment
environment += (className.name -> Set((className, ownFieldsMap, ownMethodsMap)))
// Add exported fields/methods for all other, external classes to the environment
for (dependency <- dependencies) {
if (environment.contains(dependency.moduleID.name))
throw new IllegalArgumentException("Multiple instances of class '" + dependency.moduleID.name + "' found!")
else {
val fieldsMap = dependency.exportedFields.groupBy(_.name)
val methodsMap = dependency.exportedMethods.groupBy(_.name)
environment += (dependency.moduleID.name -> Set((dependency.moduleID, fieldsMap, methodsMap)))
}
}
val nameGraph = resolveNames(environment)
// Create the final modular name graph (and filter remaining, empty edge sets)
_resolved = NameGraphModular(nameGraph.V, dependencies, nameGraph.E, interface)
_resolved
}
override def toString = "class " + className.toString +
(if (superClass != ObjectClass) " extends " + superClass.toString else "") + " {\\n\\t" + elements.mkString("\\n\\t") + "}"
}
|
matthisk/hygienic-transformations
|
scala/src/main/scala/lang/lightweightjava/ast/ClassDefinition.scala
|
Scala
|
lgpl-3.0
| 7,848
|
package edu.gemini.phase2.skeleton.factory
import edu.gemini.model.p1.immutable.{Site, PhoenixBlueprint}
import edu.gemini.model.p1.mutable.{PhoenixFilter, PhoenixFocalPlaneUnit}
import edu.gemini.spModel.core.MagnitudeBand
import edu.gemini.spModel.gemini.phoenix.InstPhoenix
import edu.gemini.spModel.obs.SPObservation
import edu.gemini.spModel.util.SPTreeUtil
import org.specs2.mutable.Specification
import scala.collection.JavaConverters._
object SpPhoenixTemplateSpec extends TemplateSpec("PHOENIX_BP.xml") with Specification {
def test(fpu: PhoenixFocalPlaneUnit, filter: PhoenixFilter) =
expand(proposal(PhoenixBlueprint(Site.GS, fpu, filter), List(1), MagnitudeBand.R)) { (p, sp) =>
s"Phoenic Blueprint Expansion $fpu $filter " >> {
def group = groups(sp).head
def obs = group.getAllObservations.asScala
def insts = obs.map(SPTreeUtil.findInstrument(_).getDataObject.asInstanceOf[InstPhoenix])
def sci = obs.find(_.getDataObject.asInstanceOf[SPObservation].getLibraryId == "1").get
def scii = SPTreeUtil.findInstrument(sci).getDataObject.asInstanceOf[InstPhoenix]
val (exp, coadds) = filter.name.head match {
case 'J' | 'H' | 'K' => (900.0, 1)
case 'L' => (120.0, 3)
case 'M' => ( 30.0, 4)
}
"There should be exactly one template group." in {
groups(sp).size must_== 1
}
"Group should have human-readable name" in {
groups(sp).forall(_.getDataObject.getTitle startsWith "Phoenix")
}
"It should contain all four observations." in {
libs(group) == Set(1, 2, 3, 4)
}
"It should contain the how-to note." in {
existsNote(group, "How to use the observations in this folder")
}
"It should contain the calibration note." in {
existsNote(group, "Darks, Flats, and Arcs")
}
s"All obs should have FPU $fpu" in {
insts.forall(_.getMask.name must_== fpu.name)
}
s"All obs should have filter $filter" in {
insts.forall(_.getFilter.name must_== filter.name)
}
s"Science obs should have exposure $exp" in {
scii.getExposureTime must_== exp
}
s"Science obs should have coadds $coadds" in {
scii.getCoadds must_== coadds
}
}
}
/*
* Test with every filter because exposure and coadds depend on it. We'll just pick an FPU
* because it doesn't enter into any of the configuration decisions.
*/
PhoenixFilter.values.foreach(test(PhoenixFocalPlaneUnit.MASK_2, _))
}
|
arturog8m/ocs
|
bundle/edu.gemini.phase2.skeleton.servlet/src/test/scala/edu/gemini/phase2/skeleton/factory/SpPhoenixTemplateSpec.scala
|
Scala
|
bsd-3-clause
| 2,654
|
// Copyright 2014 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.twofishes.indexer.scalding
import com.twitter.scalding._
import com.twitter.scalding.typed.TypedSink
import io.fsq.twofishes.gen._
import io.fsq.twofishes.indexer.output.PrefixIndexer
import io.fsq.twofishes.indexer.util.SpindleSequenceFileSource
import io.fsq.twofishes.util.NameNormalizer
import org.apache.hadoop.io.{LongWritable, Text}
case class PrefixEntry(
isFull: Boolean,
score: Int,
id: Long,
woeType: YahooWoeType,
cc: String,
name: FeatureName
) {
val bestWoeTypes = Set(
YahooWoeType.POSTAL_CODE,
YahooWoeType.TOWN,
YahooWoeType.SUBURB,
YahooWoeType.ADMIN3,
YahooWoeType.AIRPORT,
YahooWoeType.COUNTRY
)
def isAllowedWoeType(): Boolean = bestWoeTypes.contains(woeType)
}
class BasePrefixIndexBuildIntermediateJob(
name: String,
sources: Seq[String],
args: Args
) extends TwofishesIntermediateJob(name, args) {
val features = getJobOutputsAsTypedPipe[LongWritable, GeocodeServingFeature](sources).group
def isAllDigits(x: String) = x forall Character.isDigit
def shouldExcludeFromPrefixIndex(name: FeatureName, woeType: YahooWoeType): Boolean = {
// exclude because of flags
name.flags.exists(flag => (flag == FeatureNameFlags.NEVER_DISPLAY) || flag == FeatureNameFlags.LOW_QUALITY) ||
// exclude purely numeric names of non-postalcode features
woeType == YahooWoeType.POSTAL_CODE && isAllDigits(name.name)
}
def joinAndSortLists(lists: List[PrefixEntry]*): List[PrefixEntry] = {
lists.toList.flatMap(l => {
l.sortBy(_.score)
})
}
def sortRecordsByNames(entries: List[PrefixEntry]) = {
val (prefPureNames, nonPrefPureNames) = entries.partition(
e =>
e.name.flags.exists(flag => flag == FeatureNameFlags.PREFERRED || flag == FeatureNameFlags.ALT_NAME) &&
(e.name.lang == "en" || e.name.flags.contains(FeatureNameFlags.LOCAL_LANG))
)
val (secondBestNames, worstNames) =
nonPrefPureNames.partition(e => e.name.lang == "en" || e.name.flags.contains(FeatureNameFlags.LOCAL_LANG))
(joinAndSortLists(prefPureNames), joinAndSortLists(secondBestNames, worstNames))
}
def roundRobinByCountryCode(entries: List[PrefixEntry]): List[PrefixEntry] = {
// to ensure global distribution of features from all countries, group by cc
// and then pick the top from each group by turn and cycle through
// input: a (US), b (US), c (CN), d (US), e (AU), f (AU), g (CN)
// desired output: a (US), c (CN), e (AU), b (US), g (CN), f (AU), d (US)
entries
.groupBy(_.cc) // (US -> a, b, d), (CN -> c, g), (AU -> e, f)
.values
.toList // (a, b, d), (c, g), (e, f)
.flatMap(_.zipWithIndex) // (a, 0), (b, 1), (d, 2), (c, 0), (g, 1), (e, 0), (f, 1)
.groupBy(_._2)
.toList // (0 -> a, c, e), (1 -> b, g, f), (2 -> d)
.sortBy(_._1)
.flatMap(_._2.map(_._1)) // a, c, e, b, g, f, d
}
(for {
(featureId, servingFeature) <- features
population = servingFeature.scoringFeatures.populationOption.getOrElse(0)
boost = servingFeature.scoringFeatures.boostOption.getOrElse(0)
score = population + boost
cc = servingFeature.feature.ccOrThrow
woeType = servingFeature.feature.woeTypeOrDefault
// unlike with the name index, we can't just choose distinct normalized names up front because we need
// to know which name each prefix came from
// as a result different names might normalize to the same string, and separately generate the same prefixes
// so we will need to dedupe longIds in the reducer instead
name <- servingFeature.feature.names
shouldExclude = shouldExcludeFromPrefixIndex(name, woeType)
normalizedName = NameNormalizer.normalize(name.name)
// filter out any name that must be excluded unless it's short enough to make it into the prefix index
// as a full name match, in which case, let it through for now and don't generate prefixes for it
if (!shouldExclude || normalizedName.length <= PrefixIndexer.MaxPrefixLength)
fromLength = if (!shouldExclude) {
1
} else {
normalizedName.length
}
toLength = math.min(PrefixIndexer.MaxPrefixLength, normalizedName.length)
length <- fromLength to toLength
prefix = normalizedName.substring(0, length)
if prefix.nonEmpty
isFull = (length == normalizedName.length)
// to prevent too many results for short prefixes, use score threshold
if (isFull || (length > 2) || (length <= 2 && score > 0))
} yield {
(new Text(prefix) -> PrefixEntry(isFull, score * -1, featureId.get, woeType, cc, name))
}).group
.withReducers(1)
.toList
.mapValues({ values: List[PrefixEntry] =>
{
val filtered = values
.sortBy({ case entry: PrefixEntry => (entry.isFull, entry.score, entry.id) })
.take(PrefixIndexer.MaxNamesToConsider)
val woeMatches = filtered.filter({ case entry: PrefixEntry => entry.isAllowedWoeType })
val (prefSortedRecords, unprefSortedRecords) = sortRecordsByNames(woeMatches)
//val preferredIds = roundRobinByCountryCode(prefSortedRecords).map(_.id).distinct.take(PrefixIndexer.MaxFidsToStorePerPrefix)
val preferredIds = prefSortedRecords.map(_.id).distinct.take(PrefixIndexer.MaxFidsToStorePerPrefix)
val nonPreferredIds =
if (preferredIds.size < PrefixIndexer.MaxFidsWithPreferredNamesBeforeConsideringNonPreferred) {
//roundRobinByCountryCode(unprefSortedRecords).map(_.id).distinct.take(PrefixIndexer.MaxFidsToStorePerPrefix - preferredIds.size)
unprefSortedRecords.map(_.id).distinct.take(PrefixIndexer.MaxFidsToStorePerPrefix - preferredIds.size)
} else {
Nil
}
IntermediateDataContainer.newBuilder.longList(preferredIds ++ nonPreferredIds).result
}
})
.filter({ case (k: Text, v: IntermediateDataContainer) => v.longList.nonEmpty })
.write(
TypedSink[(Text, IntermediateDataContainer)](
SpindleSequenceFileSource[Text, IntermediateDataContainer](outputPath)
)
)
}
|
foursquare/fsqio
|
src/jvm/io/fsq/twofishes/indexer/scalding/BasePrefixIndexBuildIntermediateJob.scala
|
Scala
|
apache-2.0
| 6,128
|
/*******************************************************************************
* Copyright 2010 Maxime Lévesque
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************** */
package org.squeryl.customtypes;
import java.util.{Date, UUID}
import org.squeryl.dsl._
import java.sql.Timestamp
import org.squeryl.internals.FieldMapper
trait CustomType[T] extends Product1[T] {
def value: T
def _1 = value
def canEqual(a:Any) = false
}
trait CustomTypesMode extends QueryDsl with FieldMapper {
private val ps = PrimitiveTypeSupport
val stringTEF = new NonPrimitiveJdbcMapper[String,StringField,TString](ps.stringTEF, this) {
def convertFromJdbc(v: String) = new StringField(v)
def convertToJdbc(v: StringField) = v.value
}
val optionStringTEF = new TypedExpressionFactory[Option[StringField],TOptionString] with DeOptionizer[String, StringField, TString, Option[StringField], TOptionString]{
val deOptionizer = stringTEF
}
val dateTEF = new NonPrimitiveJdbcMapper[Date, DateField,TDate](ps.dateTEF, this) {
def convertFromJdbc(v: Date) = new DateField(v)
def convertToJdbc(v: DateField) = v.value
}
val optionDateTEF = new TypedExpressionFactory[Option[DateField],TOptionDate] with DeOptionizer[Date, DateField, TDate, Option[DateField], TOptionDate] {
val deOptionizer = dateTEF
}
val timestampTEF = new NonPrimitiveJdbcMapper[Timestamp, TimestampField,TTimestamp](ps.timestampTEF, this) {
def convertFromJdbc(v: Timestamp) = new TimestampField(v)
def convertToJdbc(v: TimestampField) = v.value
}
val optionTimestampTEF = new TypedExpressionFactory[Option[TimestampField],TOptionTimestamp] with DeOptionizer[Timestamp, TimestampField, TTimestamp, Option[TimestampField], TOptionTimestamp] {
val deOptionizer = timestampTEF
}
val booleanTEF = new NonPrimitiveJdbcMapper[Boolean, BooleanField,TBoolean](ps.booleanTEF, this) {
def convertFromJdbc(v: Boolean) = new BooleanField(v)
def convertToJdbc(v: BooleanField) = v.value
}
val optionBooleanTEF = new TypedExpressionFactory[Option[BooleanField],TOptionBoolean] with DeOptionizer[Boolean, BooleanField, TBoolean, Option[BooleanField], TOptionBoolean] {
val deOptionizer = booleanTEF
}
val uuidTEF = new NonPrimitiveJdbcMapper[UUID,UuidField,TUUID](ps.uuidTEF, this) {
def convertFromJdbc(v: UUID) = new UuidField(v)
def convertToJdbc(v: UuidField) = v.value
}
val optionUUIDTEF = new TypedExpressionFactory[Option[UuidField],TOptionUUID] with DeOptionizer[UUID, UuidField, TUUID, Option[UuidField], TOptionUUID] {
val deOptionizer = uuidTEF
}
// =========================== Numerical Integral ===========================
val byteTEF = new NonPrimitiveJdbcMapper[Byte, ByteField,TByte](ps.byteTEF, this) {
def convertFromJdbc(v: Byte) = new ByteField(v)
def convertToJdbc(v: ByteField) = v.value
}
val optionByteTEF = new IntegralTypedExpressionFactory[Option[ByteField],TOptionByte, Option[FloatField], TOptionFloat] with DeOptionizer[Byte, ByteField, TByte, Option[ByteField], TOptionByte] {
val deOptionizer = byteTEF
val floatifyer = optionFloatTEF
}
val intTEF = new NonPrimitiveJdbcMapper[Int, IntField,TInt](ps.intTEF, this) with IntegralTypedExpressionFactory[IntField,TInt,FloatField,TFloat] {
val floatifyer = floatTEF
def convertFromJdbc(v: Int) = new IntField(v)
def convertToJdbc(v: IntField) = v.value
}
val optionIntTEF = new IntegralTypedExpressionFactory[Option[IntField],TOptionInt,Option[FloatField],TOptionFloat] with DeOptionizer[Int, IntField,TInt,Option[IntField],TOptionInt] {
val deOptionizer = intTEF
val floatifyer = optionFloatTEF
}
val longTEF = new NonPrimitiveJdbcMapper[Long, LongField,TLong](ps.longTEF, this) with IntegralTypedExpressionFactory[LongField,TLong,DoubleField,TDouble] {
val floatifyer = doubleTEF
def convertFromJdbc(v: Long) = new LongField(v)
def convertToJdbc(v: LongField) = v.value
}
val optionLongTEF = new IntegralTypedExpressionFactory[Option[LongField],TOptionLong,Option[DoubleField],TOptionDouble] with DeOptionizer[Long,LongField,TLong,Option[LongField],TOptionLong] {
val deOptionizer = longTEF
val floatifyer = optionDoubleTEF
}
// =========================== Numerical Floating Point ===========================
val floatTEF = new NonPrimitiveJdbcMapper[Float, FloatField,TFloat](ps.floatTEF, this) with FloatTypedExpressionFactory[FloatField,TFloat] {
def convertFromJdbc(v: Float) = new FloatField(v)
def convertToJdbc(v: FloatField) = v.value
}
val optionFloatTEF = new FloatTypedExpressionFactory[Option[FloatField],TOptionFloat] with DeOptionizer[Float,FloatField,TFloat,Option[FloatField],TOptionFloat] {
val deOptionizer = floatTEF
}
val doubleTEF = new NonPrimitiveJdbcMapper[Double, DoubleField,TDouble](ps.doubleTEF, this) with FloatTypedExpressionFactory[DoubleField,TDouble] {
def convertFromJdbc(v: Double) = new DoubleField(v)
def convertToJdbc(v: DoubleField) = v.value
}
val optionDoubleTEF = new FloatTypedExpressionFactory[Option[DoubleField],TOptionDouble] with DeOptionizer[Double,DoubleField,TDouble,Option[DoubleField],TOptionDouble] {
val deOptionizer = doubleTEF
}
val bigDecimalTEF = new NonPrimitiveJdbcMapper[BigDecimal, BigDecimalField,TBigDecimal](ps.bigDecimalTEF, this) with FloatTypedExpressionFactory[BigDecimalField,TBigDecimal] {
def convertFromJdbc(v: BigDecimal) = new BigDecimalField(v)
def convertToJdbc(v: BigDecimalField) = v.value
}
val optionBigDecimalTEF = new FloatTypedExpressionFactory[Option[BigDecimalField],TOptionBigDecimal] with DeOptionizer[BigDecimal,BigDecimalField,TBigDecimal,Option[BigDecimalField],TOptionBigDecimal] {
val deOptionizer = bigDecimalTEF
}
implicit def stringToTE(s: String) = stringTEF.createFromNativeJdbcValue(s)
implicit def optionStringToTE(s: Option[String]) = s.map(new StringField(_))
implicit def dateToTE(s: Date) = dateTEF.createFromNativeJdbcValue(s)
implicit def optionDateToTE(s: Option[Date]) = s.map(new DateField(_))
implicit def timestampToTE(s: Timestamp) = timestampTEF.createFromNativeJdbcValue(s)
implicit def optionTimestampToTE(s: Option[Timestamp]) = s.map(new TimestampField(_))
implicit def booleanToTE(s: Boolean) = booleanTEF.createFromNativeJdbcValue(s)
implicit def optionBooleanToTE(s: Option[Boolean]) = s.map(new BooleanField(_))
implicit def uuidToTE(s: UUID) = uuidTEF.createFromNativeJdbcValue(s)
implicit def optionUUIDToTE(s: Option[UUID]) = s.map(new UuidField(_))
implicit def byteToTE(f: Byte) = byteTEF.createFromNativeJdbcValue(f)
implicit def optionByteToTE(f: Option[Byte]) = f.map(new ByteField(_))
implicit def intToTE(f: IntField) = intTEF.create(f)
implicit def optionIntToTE(f: Option[IntField]) = optionIntTEF.create(f)
//implicit def _intToTE(f: Int) = intTEF.createFromNativeJdbcValue(f)
//implicit def _optionIntToTE(f: Option[Int]) = f.map(new IntField(_))
implicit def longToTE(f: Long) = longTEF.createFromNativeJdbcValue(f)
implicit def optionLongToTE(f: Option[Long]) = f.map(new LongField(_))
implicit def floatToTE(f: Float) = floatTEF.createFromNativeJdbcValue(f)
implicit def optionFloatToTE(f: Option[Float]) = f.map(new FloatField(_))
implicit def doubleToTE(f: Double) = doubleTEF.createFromNativeJdbcValue(f)
implicit def optionDoubleToTE(f: Option[Double]) = f.map(new DoubleField(_))
implicit def bigDecimalToTE(f: BigDecimal) = bigDecimalTEF.createFromNativeJdbcValue(f)
implicit def optionBigDecimalToTE(f: Option[BigDecimal]) = f.map(new BigDecimalField(_))
}
object CustomTypesMode extends CustomTypesMode
class ByteField(val value: Byte) extends CustomType[Byte]
class IntField(val value: Int) extends CustomType[Int]
class StringField(val value: String) extends CustomType[String]
class DoubleField(val value: Double) extends CustomType[Double]
class BigDecimalField(val value: BigDecimal) extends CustomType[BigDecimal]
class FloatField(val value: Float) extends CustomType[Float]
class LongField(val value: Long) extends CustomType[Long]
class BooleanField(val value: Boolean) extends CustomType[Boolean]
class DateField(val value: Date) extends CustomType[Date]
class TimestampField(val value: Timestamp) extends CustomType[Timestamp]
class BinaryField(val value: Array[Byte]) extends CustomType[Array[Byte]]
class UuidField(val value: UUID) extends CustomType[UUID]
|
ccap/Squeryl
|
src/main/scala/org/squeryl/customtypes/CustomTypesMode.scala
|
Scala
|
apache-2.0
| 9,421
|
package codegeneration
import helpers.CodeComparisonSpec
class RelationTraitSpec extends CodeComparisonSpec {
import contextMock.universe._
"simple trait" >> {
generatedContainsCode(
q"object A {@Relation trait T}",
"""trait T[+START <: Node, +END <: Node] extends AbstractRelation[START, END] ;"""
)
}
"with super trait" >> {
generatedContainsCode(
q"object A { @Relation trait K; @Relation trait T extends K}",
"""trait T[+START <: Node, +END <: Node] extends K[START, END] ;"""
)
}
"with external super trait" >> {
generatedContainsCode(
q"object A { @Relation trait K; @Relation trait T extends K with Immutable}",
"""trait T[+START <: Node, +END <: Node] extends K[START, END] with Immutable;"""
)
}
"with properties" >> {
generatedContainsCode(
q"object A {@Relation trait T {val p:Long}}",
q"""trait T[+START <: Node, +END <: Node] extends AbstractRelation[START, END] {
def p: Long = rawItem.properties("p").asInstanceOf[LongPropertyValue]
}"""
)
}
"custom code" >> {
generatedContainsCode(
q"object A {@Node trait T {def custom = 5}}",
q"""trait T extends Node { def custom = 5 }"""
)
}
}
|
renesca/renesca-magic
|
src/test/scala/codegeneration/RelationTraitSpec.scala
|
Scala
|
apache-2.0
| 1,252
|
import scala.quoted._
def test(using QuoteContext) = {
'{ Option(4) match { case Some(a) => a; case None => 1 }}
}
|
som-snytt/dotty
|
tests/pos/i4396a.scala
|
Scala
|
apache-2.0
| 117
|
package scalax.collection.io.json
package error
trait JsonGraphIssue
object JsonGraphError extends Enumeration with JsonGraphIssue {
type JsonGraphError = Value
val NonObjArrValue,
NonArray,
ObjectSizeNEQ1,
InvalidElemTypeId,
EmptyNodeFieldList,
InsufficientNodes,
UnexpectedNodeId,
UnexpectedDescr,
UnknownNode,
NoNodeDescr,
NoEdgeDescr = Value
def err(errType: JsonGraphError, args: String*) = {
val msg = errType match {
case m if m == NonObjArrValue =>
""""{}" is of JSON type {}. Values of nodes/edges JSON fields must be of type JSON object or array."""
case m if m == NonArray =>
""""{}" is of JSON type {}. Nodes and edges must be JSON arrays."""
case m if m == ObjectSizeNEQ1 =>
"""Typed edge JSON objects must contain exactly one field."""
case m if m == InvalidElemTypeId =>
"""The node/edgeTypeId "{}" found in the JSON text is not contained in the descriptor's node/edgeDescriptors."""
case m if m == EmptyNodeFieldList=>
"""Empty node field list detected. Node field lists must contain at leas one field."""
case m if m == InsufficientNodes =>
"""Hyperedge with less than two nodes detected: "{}"."""
case m if m == UnexpectedNodeId =>
"""JSON array of hyperedge node-Ids contains non-string value(s): "{}"."""
case m if m == UnexpectedDescr =>
"""Edge-descriptor "{}" of unexpected type cannot be processed."""
case m if m == UnknownNode =>
"""Edge cannot be created due to missing node with id "{}"."""
case m if m == NoNodeDescr =>
"""No 'NodeDescriptor' capable of processing type "{}" found."""
case m if m == NoEdgeDescr =>
"""No 'EdgeDescriptor' capable of processing type "{}" found."""
}
val replMsg = replacePlaceholders(msg, args)
JsonGraphException(errType, replMsg)
}
case class JsonGraphException(val err: JsonGraphError, val msg: String)
extends Exception("JSON-Graph error: " + msg)
}
object JsonGraphWarning extends Enumeration with JsonGraphIssue {
type JsonGraphWarning = Value
val DuplicateNodeId = Value
def warn(warnType: JsonGraphWarning, args: String*) =
warnType match {
case m if m == DuplicateNodeId =>
"""Duplicate node id "{}" returned for noe "{}". Node "{}" has the same id."""
}
}
|
opyate/scala-graph
|
json/src/main/scala/scalax/collection/io/json/error/Errors.scala
|
Scala
|
bsd-3-clause
| 2,532
|
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.persistence.cassandra
import java.io.File
import akka.actor.{ ActorSystem, BootstrapSetup }
import akka.actor.setup.ActorSystemSetup
import akka.cluster.Cluster
import akka.persistence.cassandra.testkit.CassandraLauncher
import com.lightbend.lagom.persistence.{ ActorSystemSpec, PersistenceSpec }
import com.lightbend.lagom.scaladsl.persistence.cassandra.testkit.TestUtil
import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry
import com.typesafe.config.{ Config, ConfigFactory }
class CassandraPersistenceSpec private (system: ActorSystem) extends ActorSystemSpec(system) {
def this(testName: String, config: Config, jsonSerializerRegistry: JsonSerializerRegistry) =
this(ActorSystem(testName, ActorSystemSetup(
BootstrapSetup(config.withFallback(TestUtil.persistenceConfig(
testName,
CassandraLauncher.randomPort
))),
JsonSerializerRegistry.serializationSetupFor(jsonSerializerRegistry)
)))
def this(config: Config, jsonSerializerRegistry: JsonSerializerRegistry) = this(PersistenceSpec.getCallerName(getClass), config, jsonSerializerRegistry)
def this(jsonSerializerRegistry: JsonSerializerRegistry) = this(ConfigFactory.empty(), jsonSerializerRegistry)
override def beforeAll(): Unit = {
super.beforeAll()
val cassandraDirectory = new File("target/" + system.name)
CassandraLauncher.start(cassandraDirectory, "lagom-test-embedded-cassandra.yaml", clean = true, port = 0)
TestUtil.awaitPersistenceInit(system)
// Join ourselves - needed because the Cassandra offset store uses cluster startup task
val cluster = Cluster(system)
cluster.join(cluster.selfAddress)
}
override def afterAll(): Unit = {
CassandraLauncher.stop()
super.afterAll()
}
}
|
edouardKaiser/lagom
|
persistence-cassandra/scaladsl/src/test/scala/com/lightbend/lagom/scaladsl/persistence/cassandra/CassandraPersistenceSpec.scala
|
Scala
|
apache-2.0
| 1,877
|
object Solutionex3_4 extends App {
def extract(r: Array[Int]) = r.filter(_ > 0) ++ r.filter(_ <= 0)
val r = Array(-1, 3, 4, -8, 0, 3)
extract(r)
}
|
koenighotze/scalafortheimpatient
|
src/main/scala/chapter3/ex3_4.scala
|
Scala
|
apache-2.0
| 155
|
package dk.tennis.dbn.em
import org.junit._
import Assert._
import dk.atp.api.CSVATPMatchesLoader
import dk.atp.api.domain.MatchComposite
import dk.atp.api.domain.SurfaceEnum._
import scala.util.Random
import org.joda.time.DateTime
import org.joda.time.Duration
import dk.tennis.dbn.MatchOutcome
import TennisEM._
import dk.tennis.dbn._
import EMTestUtil._
class GenericTennisEMRealResultsTest {
val ratingSize = 10
val priorProb = getPriorProb(ratingSize)
val emissionProb = getEmissionProb(ratingSize)
val transitionProb = getTransitionProb(ratingSize)
private var llh: List[Double] = Nil
private def progress(currentIter: Int, logLikelihood: Double) = {
llh = logLikelihood :: llh;
println("Log likelihood for iteration %d = %f".format(currentIter, logLikelihood))
}
val iterNum = 10
@Test def emTrain_for_tennis_results_2010_and_2011 {
val atpMatchesLoader = CSVATPMatchesLoader.fromCSVFile("./src/test/resources/match_data_2006_2011.csv")
val matches: Seq[MatchComposite] = (2010 to 2011).flatMap(year => atpMatchesLoader.loadMatches(year))
val filteredMatches = matches.filter(m => m.tournament.surface == HARD && m.tournament.numOfSet == 2)
val rand = new Random(System.currentTimeMillis())
val shuffledMatches = filteredMatches.map { m =>
rand.nextBoolean() match {
case true => {
val newMatchFacts = m.matchFacts.copy(playerAFacts = m.matchFacts.playerBFacts, playerBFacts = m.matchFacts.playerAFacts)
m.copy(matchFacts = newMatchFacts)
}
case false => m
}
}.toList.take(500)
val matchOutcomes = shuffledMatches.map(m => toMatchOutcome(m))
println("Matches size: " + matchOutcomes.size)
val timeSliceInDays = 30
val tennisClusterGraph = GenericTennisDBN(matchOutcomes, priorProb, emissionProb, transitionProb, timeSliceInDays).getTennisClusterGraph()
val trainedParams = GenericTennisEM.train(tennisClusterGraph, iterNum, progress)
println(trainedParams.priorProb.map(e => e.formatted("%.4f")).toList)
println(trainedParams.emissionProb.map(e => e.formatted("%.4f")).toList)
println(trainedParams.transitionProb.map(e => e.formatted("%.4f")).toList)
}
private def toMatchOutcome(m: MatchComposite): MatchOutcome = {
val playerAName = m.matchFacts.playerAFacts.playerName
val playerBName = m.matchFacts.playerBFacts.playerName
val playerAWinner = m.matchFacts.winner.equals(m.matchFacts.playerAFacts.playerName)
MatchOutcome(playerAName, playerBName, playerAWinner, m.tournament.tournamentTime.getTime)
}
}
|
danielkorzekwa/tennis-rating-dbn-em-scala
|
src/test/scala/dk/tennis/dbn/em/GenericTennisEMRealResultsTest.scala
|
Scala
|
bsd-2-clause
| 2,659
|
package org.jetbrains.plugins.scala.lang.completion.weighter
import com.intellij.codeInsight.completion.{CompletionLocation, CompletionWeigher}
import com.intellij.codeInsight.lookup.LookupElement
import org.jetbrains.plugins.scala.lang.completion.lookups.ScalaLookupItem
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScModifierListOwner
/**
* @author Alexander Podkhalyuzin
*/
class ScalaContainingClassWiegher extends CompletionWeigher {
def weigh(element: LookupElement, location: CompletionLocation): Comparable[_] = {
import KindWeights._
ScalaLookupItem.original(element) match {
case si: ScalaLookupItem if si.isLocalVariable => local
case si: ScalaLookupItem if si.isUnderlined => underlined
case si: ScalaLookupItem if si.isDeprecated => deprecated
case p: ScalaLookupItem if p.isNamedParameter => nparam
case sii: ScalaLookupItem if sii.bold => bold
case si: ScalaLookupItem =>
si.element match {
case func: ScFunction if func.getContainingClass == null => localFunc
case withImplicit: ScModifierListOwner if withImplicit.hasModifierPropertyScala("implicit") => underlined
case _ => normal
}
case _ => normal
}
}
object KindWeights extends Enumeration {
val deprecated, underlined, normal, nparam, bold, localFunc, local = Value
}
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/completion/weighter/ScalaContainingClassWiegher.scala
|
Scala
|
apache-2.0
| 1,444
|
package controllers
import play.api._
import play.api.mvc._
import play.api.libs.json.Json
import anorm._
import spray.json._
import structure.ManufactureFormatter._
import util.time._
/**
* Project IntelliJ IDEA
* Module controllers
* User: Gyuhyeon
* Date: 2014. 2. 6.
* Time: 오전 2:13
*/
object Manufacture extends Controller {
def create() = Action(parse.urlFormEncoded)
{
request =>
val body:Map[String, Seq[String]] = request.body
val manufacture_name = body.getOrElse("name", util.dummy.dummyList)(0)
val manufacture_type = body.getOrElse("type", util.dummy.dummyList)(0)
val manufacture_address:String = body.getOrElse("address", util.dummy.dummyList)(0)
val manufacture_phone = body.getOrElse("phone", util.dummy.dummyList)(0)
val manufacture_charger = body.getOrElse("charger", util.dummy.dummyList)(0)
val manufacture_mobile = body.getOrElse("mobile", util.dummy.dummyList)(0)
val manufacture_created = timestamp
val manufacture_updated = timestamp
val manufacture = structure.Manufacture(
NotAssigned,
manufacture_name,
manufacture_type,
manufacture_address,
manufacture_phone,
manufacture_charger,
manufacture_mobile,
manufacture_created,
manufacture_updated)
val dbResult = structure.Manufacture.create(manufacture)
if(dbResult != null)
Ok(Json.obj("result"->"OK", "code"->"200", "data"->dbResult.toJson.toString))
else
Ok(Json.obj("result"->"Fail", "code"->"410", "message"->"DATABASE_EXECUTION_EXCEPTION"))
}
def modify(id:Int) = Action(parse.urlFormEncoded)
{
request =>
val body:Map[String, Seq[String]] = request.body
val manufacture_name = body.getOrElse("name", util.dummy.dummyList)(0)
val manufacture_type = body.getOrElse("type", util.dummy.dummyList)(0)
val manufacture_address:String = body.getOrElse("address", util.dummy.dummyList)(0)
val manufacture_phone = body.getOrElse("phone", util.dummy.dummyList)(0)
val manufacture_charger = body.getOrElse("charger", util.dummy.dummyList)(0)
val manufacture_mobile = body.getOrElse("mobile", util.dummy.dummyList)(0)
val manufacture_updated = timestamp
val manufacture = structure.Manufacture(
new Id(id),
manufacture_name,
manufacture_type,
manufacture_address,
manufacture_phone,
manufacture_charger,
manufacture_mobile,
0,
manufacture_updated)
val dbResult = structure.Manufacture.update(manufacture)
if(dbResult != null)
Ok(Json.obj("result"->"OK", "code"->"200", "data"->dbResult.toJson.toString))
else
Ok(Json.obj("result"->"Fail", "code"->"410", "message"->"DATABASE_EXECUTION_EXCEPTION"))
}
def delete(id:Int) = TODO
def get(id:Int) = Action
{
request =>
val dbResult = structure.Manufacture.findById(new Id(id))
if(dbResult != null)
Ok(Json.obj("result"->"OK", "code"->"200", "data"->dbResult.toJson.toString))
else
Ok(Json.obj("result"->"Fail", "code"->"404", "message"->"NOT_FOUND"))
}
def list(page:Int, count:Int, orderBy:String, orderType:String) = Action
{
request =>
val dbResult = structure.Manufacture.findAll(page, count, orderBy, orderType)
if(dbResult != null)
Ok(Json.obj("result"->"OK", "code"->"200", "data"->dbResult.toJson.toString))
else
Ok(Json.obj("result"->"Fail", "code"->"404", "message"->"NOT_FOUND"))
}
def find(target:String, keyword:String, option:String) = Action
{
request =>
var keywordEscape:String = ""
val optionEscape:String = option match
{
case "=" => "="
case ">=" => ">="
case "<=" => "<="
case "like" => "like"
case _ => "="
}
if(optionEscape == "like")
keywordEscape = "%" + keyword + "%"
else
keywordEscape = keyword
val dbResult = structure.Manufacture.findByOption("manufacture_" + target, keywordEscape, optionEscape)
if(dbResult != null)
Ok(Json.obj("result"->"OK", "code"->"200", "data"->dbResult.toJson.toString))
else
Ok(Json.obj("result"->"Fail", "code"->"404", "message"->"NOT_FOUND"))
}
}
|
wingleess/EZOne-server
|
app/controllers/Manufacture.scala
|
Scala
|
lgpl-3.0
| 4,504
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.library
import scala.scalajs.js
import org.junit.Assert._
import org.junit.Test
import scala.collection.mutable
import scala.reflect.ClassTag
class WrappedDictionaryTest {
// Methods we actually implement
@Test def get(): Unit = {
val map: mutable.Map[String, Any] =
js.Dictionary("a" -> "a", "b" -> 6, "e" -> js.undefined)
assertTrue(map.get("a") == Some("a"))
assertTrue(map.get("b") == Some(6))
assertTrue(map.get("e") == Some(()))
assertTrue(map.get("f") == None)
}
@Test def plusEqualAndMinusEqual(): Unit = {
val dict = js.Dictionary[String]()
val map: mutable.Map[String, String] = dict
assertArrayEquals(Array[AnyRef](), js.Object.properties(dict).toArray[AnyRef])
map += "hello" -> "world"
assertEquals("world", dict("hello"))
map += "foo" -> "bar"
assertEquals("bar", dict("foo"))
map -= "hello"
assertFalse(dict.get("hello").isDefined)
assertArrayEquals(Array[AnyRef]("foo"), js.Object.properties(dict).toArray[AnyRef])
}
@Test def iterator(): Unit = {
val elems = ('a' to 'e').map(_.toString).zip(1 to 5)
val dict = js.Dictionary[Int]()
val map: mutable.Map[String, Int] = dict
dict ++= elems
assertTrue(map.iterator.toList.sorted.sameElements(elems))
}
/* Methods that need to be overloaded in 2.13 collections to get the correct
* result type.
*/
@Test def map(): Unit = {
def ct[A: ClassTag](x: A): ClassTag[A] = implicitly[ClassTag[A]]
val dict = js.Dictionary[Int]("one" -> 1, "two" -> 2, "three" -> 3)
val mapChr = dict.map { case (k, v) => k(0) -> v * 2 }
val mapStr = dict.map { case (k, v) => k(0).toString -> v * 2 }
assertNotSame(classOf[js.WrappedDictionary[_]], ct(mapChr).runtimeClass)
assertSame(classOf[js.WrappedDictionary[_]], ct(mapStr).runtimeClass)
assertEquals(2, mapChr.size)
assertEquals(2, mapStr.size)
}
@Test def flatMap(): Unit = {
def ct[A: ClassTag](x: A): ClassTag[A] = implicitly[ClassTag[A]]
val dict = js.Dictionary[Int]("one" -> 1, "two" -> 2, "three" -> 3)
val flatMapChr = dict.flatMap {
case (k, v) => List(k(0) -> v * 2, k(1) -> v * 3)
}
val flatMapStr = dict.flatMap {
case (k, v) => List(k(0).toString -> v * 2, k(1).toString -> v * 3)
}
assertNotSame(classOf[js.WrappedDictionary[_]], ct(flatMapChr).runtimeClass)
assertSame(classOf[js.WrappedDictionary[_]], ct(flatMapStr).runtimeClass)
assertEquals(5, flatMapChr.size)
assertEquals(5, flatMapStr.size)
}
@Test def collect(): Unit = {
def ct[A: ClassTag](x: A): ClassTag[A] = implicitly[ClassTag[A]]
val dict = js.Dictionary[Int]("one" -> 1, "two" -> 2, "three" -> 3)
val collectChr = dict.collect {
case (k, v) if v > 1 => k(0) -> v * 2
}
val collectStr = dict.collect {
case (k, v) if v > 1 => k(0).toString -> v * 2
}
assertNotSame(classOf[js.WrappedDictionary[_]], ct(collectChr).runtimeClass)
assertSame(classOf[js.WrappedDictionary[_]], ct(collectStr).runtimeClass)
assertEquals(1, collectChr.size)
assertEquals(1, collectStr.size)
}
// Some arbitrary methods to test the builders
@Test def withFilter(): Unit = {
val dict = js.Dictionary[Int]()
val flt = dict.withFilter { case (k, v) => v > 5 || k == "a" }
def size: Int = flt.map(x => x).size
assertEquals(0, size)
dict += "a" -> 1
assertEquals(1, size)
dict += "b" -> 2
assertEquals(1, size)
dict += "c" -> 6
assertEquals(2, size)
dict += "b" -> 7
assertEquals(3, size)
dict -= "a"
assertEquals(2, size)
}
@Test def toList(): Unit = {
val dict = js.Dictionary("a" -> "a", "b" -> 6, "e" -> js.undefined)
val list = dict.toList
assertEquals(3, list.size)
}
}
|
scala-js/scala-js
|
test-suite/js/src/test/scala/org/scalajs/testsuite/library/WrappedDictionaryTest.scala
|
Scala
|
apache-2.0
| 4,103
|
/*
* Copyright (C) 2012 The Regents of The University California.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shark.memstore2
import java.io.{DataInput, DataOutput}
import scala.collection.JavaConversions._
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector
import org.apache.hadoop.io.Writable
import shark.memstore2.column.ColumnBuilder
/**
* Used to build a TablePartition. This is used in the serializer to convert a
* partition of data into columnar format and to generate a TablePartition.
*/
class TablePartitionBuilder(
ois: Seq[ObjectInspector],
initialColumnSize: Int,
shouldCompress: Boolean)
extends Writable {
def this(oi: StructObjectInspector, initialColumnSize: Int, shouldCompress: Boolean = true) = {
this(oi.getAllStructFieldRefs.map(_.getFieldObjectInspector), initialColumnSize, shouldCompress)
}
private var numRows: Long = 0
private val columnBuilders: Array[ColumnBuilder[_]] = ois.map { oi =>
val columnBuilder = ColumnBuilder.create(oi, shouldCompress)
columnBuilder.initialize(initialColumnSize)
columnBuilder
}.toArray
def incrementRowCount() {
numRows += 1
}
def append(columnIndex: Int, o: Object, oi: ObjectInspector) {
columnBuilders(columnIndex).append(o, oi)
}
def stats: TablePartitionStats = new TablePartitionStats(columnBuilders.map(_.stats), numRows)
def build(): TablePartition = new TablePartition(numRows, columnBuilders.map(_.build()))
// We don't use these, but want to maintain Writable interface for SerDe
override def write(out: DataOutput) {}
override def readFields(in: DataInput) {}
}
|
alabid/blinkdb
|
src/main/scala/shark/memstore2/TablePartitionBuilder.scala
|
Scala
|
apache-2.0
| 2,264
|
/*
* Copyright 2016 Michal Harish, michal.harish@gmail.com
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.codahale.metrics.JmxReporter
import com.typesafe.config._
import io.amient.affinity.core.cluster.Node
import io.amient.affinity.core.util.AffinityMetrics
import scala.util.control.NonFatal
object ExampleGraphMain3 extends App {
try {
val config = ConfigFactory.load("example")
AffinityMetrics.apply(registry => JmxReporter.forRegistry(registry).inDomain("Affinity").build().start())
new Node(ConfigFactory.parseResources("example-node3.conf").withFallback(config)).start()
} catch {
case NonFatal(e) =>
e.printStackTrace()
System.exit(1)
}
}
|
amient/affinity
|
examples/example-distributed-graph/src/main/scala/ExampleGraphMain3.scala
|
Scala
|
apache-2.0
| 1,456
|
package org.jetbrains.plugins.scala.lang.psi.impl.statements
import com.intellij.lang.ASTNode
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi.scope.PsiScopeProcessor
import com.intellij.psi.tree.IElementType
import com.intellij.psi.{PsiClass, PsiElement, ResolveState}
import org.jetbrains.annotations.Nullable
import org.jetbrains.plugins.scala.extensions.ObjectExt
import org.jetbrains.plugins.scala.icons.Icons
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenType
import org.jetbrains.plugins.scala.lang.parser.ScalaElementType
import org.jetbrains.plugins.scala.lang.parser.ScalaElementType.{EXTENSION_BODY, PARAM_CLAUSES}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScBegin
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameterClause, ScParameters}
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScExtension, ScExtensionBody, ScFunction, ScFunctionDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScNamedElement, ScTypeParametersOwner}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScMember.WithBaseIconProvider
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaStubBasedElementImpl
import org.jetbrains.plugins.scala.lang.psi.stubs.ScExtensionStub
import javax.swing.Icon
class ScExtensionImpl(@Nullable stub: ScExtensionStub, @Nullable node: ASTNode)
extends ScalaStubBasedElementImpl(stub, ScalaElementType.EXTENSION, node)
with ScExtension
with ScTypeParametersOwner
with WithBaseIconProvider
with ScBegin {
override def toString: String = "Extension on " + targetTypeElement.fold("<unknown>")(_.getText)
override protected final def baseIcon: Icon =
Icons.EXTENSION
override def targetParameter: Option[ScParameter] =
allClauses.find(!_.isUsing).flatMap(_.parameters.headOption)
override def targetTypeElement: Option[ScTypeElement] =
targetParameter.flatMap(_.typeElement)
override def declaredElements: Seq[ScFunction] = extensionMethods
override def extensionMethods: Seq[ScFunction] =
extensionBody.fold(Seq.empty[ScFunction])(_.functions)
override def parameters: Seq[ScParameter] =
clauses.toSeq.flatMap(_.clauses.flatMap(_.parameters))
override def clauses: Option[ScParameters] = getStubOrPsiChild(PARAM_CLAUSES).toOption
override def extensionBody: Option[ScExtensionBody] = getStubOrPsiChild(EXTENSION_BODY).toOption
override def getContainingClass: PsiClass = null
override def hasModifierProperty(name: String): Boolean = false
override def processDeclarations(
processor: PsiScopeProcessor,
state: ResolveState,
lastParent: PsiElement,
place: PsiElement
): Boolean = {
if (!super[ScTypeParametersOwner].processDeclarations(processor, state, lastParent, place))
return false
for {
clause <- effectiveParameterClauses
param <- clause.effectiveParameters
} {
ProgressManager.checkCanceled()
if (!processor.execute(param, state)) return false
}
true
}
override protected def keywordTokenType: IElementType = ScalaTokenType.ExtensionKeyword
override def namedTag: Option[ScNamedElement] = declaredElements.headOption
override protected def endParent: Option[PsiElement] = extensionBody
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScExtensionImpl.scala
|
Scala
|
apache-2.0
| 3,453
|
package cromwell.database.sql
/**
* The collection of case classes representing database tables.
*
* It's possible that we may need to swap out the database layers at some point, because:
* - slick upgrades from 3.x to 4.x (see previous 2.x to 3.x migration)
* - we want a new relational mapping layer
* - we simply want a mock database
*
* '''TL;DR Keep the database logic to an absolute minimum!'''
*
* Each case class name should match the table name, replacing capital letters in the class name with an underscore,
* and then converting the entire string to upper case.
*
* The columns in each class should be a primitive type matching the database column.
*
* All column types should match the type in the database, and be one of:
* - `Boolean`
* - `Double`
* - `Int`
* - `Long`
* - `java.sql.Clob`
* - `java.sql.Timestamp`
*
* Nullable columns should be wrapped in an `Option`.
*
* Primary and foreign key columns are the only columns that should be defaulted, as they are to be filled in by the
* database, and cannot and should not be set within the business logic. On the other hand, columns to be filled in by
* the business logic should __not__ have a default in the database package, even if they are nullable.
*
* Example:
*
* {{{
* case class Car
* (
* make: String, // Generic make as a String. No enums, traits, objects, or other business logic.
* model: String, // Same for model. Any tracking of "make must belong to model" isn't done in ORM/FRM.
* year: Int, // Generic year here. Any business logic handled elsewhere.
* lastOwner: Option[String] // No defaults! Please let the business layer populate this value.
* carId: Option[Int] = None // PK, will be automatically populated by the database. Last because it's defaulted.
* )
* }}}
*
* The database(s) will store whatever model passed in. Place the common car types in core, not in the database layer.
*
* {{{
* enum CarModels { Ford, Toyota, Volkswagen, ... } <<-- NOT IN THE DATABASE!
* }}}
*
* Then, to generate a car in the business layer, all values to be passed into the database must be specified,
* including an explicit statement about the last owner:
*
* {{{
* val newCar = Car(Ford.toString, Ford.Fusion.toString, 2010, None)
* }}}
*/
package object tables
|
ohsu-comp-bio/cromwell
|
database/sql/src/main/scala/cromwell/database/sql/tables/package.scala
|
Scala
|
bsd-3-clause
| 2,404
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package generators.bankdetails
import generators.BaseGenerator
import models.bankdetails._
import org.scalacheck.Gen
// scalastyle:off magic.number
trait BankDetailsGenerator extends BaseGenerator {
val accountGen: Gen[BankAccount] = for {
accountNumber <- numSequence(10)
sortCode <- numSequence(6)
iban <- numSequence(15)
account <- Gen.oneOf(Seq(
BankAccount(Some(BankAccountIsUk(true)), None, Some(UKAccount(accountNumber, sortCode))),
BankAccount(Some(BankAccountIsUk(false)), Some(BankAccountHasIban(false)), Some(NonUKAccountNumber(accountNumber))),
BankAccount(Some(BankAccountIsUk(false)), Some(BankAccountHasIban(true)), Some( NonUKIBANNumber(iban)))
))
} yield account
val accountTypeGenerator: Gen[BankAccountType] = Gen.oneOf(Seq(PersonalAccount, BelongsToBusiness, BelongsToOtherBusiness, NoBankAccountUsed))
val bankDetailsGen: Gen[BankDetails] = for {
accountType <- accountTypeGenerator
name <- stringOfLengthGen(10)
account <- accountGen
} yield {
BankDetails(Some(accountType), Some(name), Some(account), hasAccepted = true)
}
}
|
hmrc/amls-frontend
|
test/generators/bankdetails/BankDetailsGenerator.scala
|
Scala
|
apache-2.0
| 1,724
|
package de.tuberlin.uebb.sl2.tests.impl
import de.tuberlin.uebb.sl2.modules._
import de.tuberlin.uebb.sl2.impl._
import de.tuberlin.uebb.sl2.tests.specs.ModuleResolverSpec
class ModuleResolverTest
extends ModuleResolverSpec
with AbstractFile
with ModuleResolverImpl
with Syntax
with Errors
with Configs
with SignatureJsonSerializer
{
def testedImplementationName = "Module resolver"
}
|
mzuber/simple-language
|
src/test/scala/impl/ModuleResolverTest.scala
|
Scala
|
bsd-3-clause
| 408
|
/**
* Copyright (C) 2014 Kaj Magnus Lindberg (born 1979)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.debiki.core
case class RawSettings(target: SettingsTarget, valuesBySettingName: Map[String, Any])
/** Identifies a section of all pages, e.g. a forum, a subforum, a blog, or a single page.
* Used to clarify what pages a setting should affect.
*/
sealed abstract class SettingsTarget
object SettingsTarget {
case object WholeSite extends SettingsTarget
case class PageTree(rootPageId: PageId) extends SettingsTarget
case class SinglePage(id: PageId) extends SettingsTarget
}
|
debiki/debiki-server-old
|
modules/debiki-core/src/main/scala/com/debiki/core/Settings.scala
|
Scala
|
agpl-3.0
| 1,238
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.utils
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.{LocalEnvironment, DataSet => JDataSet}
import org.apache.flink.api.scala.{DataSet, ExecutionEnvironment}
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.bridge.java.internal.{BatchTableEnvironmentImpl => JavaBatchTableEnvironmentImpl, StreamTableEnvironmentImpl => JavaStreamTableEnvironmentImpl}
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api.bridge.scala.internal.{BatchTableEnvironmentImpl => ScalaBatchTableEnvironmentImpl, StreamTableEnvironmentImpl => ScalaStreamTableEnvironmentImpl}
import org.apache.flink.table.api.internal.{TableEnvImpl, TableEnvironmentImpl, TableImpl, BatchTableEnvImpl => _}
import org.apache.flink.table.api.{ApiExpression, Table, TableConfig, TableSchema}
import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog}
import org.apache.flink.table.executor.StreamExecutor
import org.apache.flink.table.expressions.Expression
import org.apache.flink.table.functions.{AggregateFunction, ScalarFunction, TableFunction}
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.operations.{DataSetQueryOperation, JavaDataStreamQueryOperation, ScalaDataStreamQueryOperation}
import org.apache.flink.table.planner.StreamPlanner
import org.apache.calcite.plan.RelOptUtil
import org.apache.calcite.rel.RelNode
import org.junit.Assert.assertEquals
import org.junit.rules.ExpectedException
import org.junit.{ComparisonFailure, Rule}
import org.mockito.Mockito.{mock, when}
import scala.io.Source
import scala.util.control.Breaks._
/**
* Test base for testing Table API / SQL plans.
*/
class TableTestBase {
// used for accurate exception information checking.
val expectedException = ExpectedException.none()
@Rule
def thrown = expectedException
def batchTestUtil(): BatchTableTestUtil = {
BatchTableTestUtil()
}
def streamTestUtil(): StreamTableTestUtil = {
StreamTableTestUtil()
}
def verifyTableEquals(expected: Table, actual: Table): Unit = {
assertEquals(
"Logical plans do not match",
LogicalPlanFormatUtils.formatTempTableId(RelOptUtil.toString(
TableTestUtil.toRelNode(expected))),
LogicalPlanFormatUtils.formatTempTableId(RelOptUtil.toString(
TableTestUtil.toRelNode(actual))))
}
}
abstract class TableTestUtil(verifyCatalogPath: Boolean = false) {
private var counter = 0
def addTable[T: TypeInformation](fields: Expression*): Table = {
counter += 1
addTable[T](s"Table$counter", fields: _*)
}
def addTable[T: TypeInformation](name: String, fields: Expression*): Table
def addFunction[T: TypeInformation](name: String, function: TableFunction[T]): TableFunction[T]
def addFunction(name: String, function: ScalarFunction): Unit
def verifySql(query: String, expected: String): Unit
def verifyTable(resultTable: Table, expected: String): Unit
def verifySchema(resultTable: Table, fields: Seq[(String, TypeInformation[_])]): Unit = {
val actual = resultTable.getSchema
val expected = new TableSchema(fields.map(_._1).toArray, fields.map(_._2).toArray)
assertEquals(expected, actual)
}
// the print methods are for debugging purposes only
def printTable(resultTable: Table): Unit
def printSql(query: String): Unit
protected def verifyString(expected: String, optimized: RelNode) {
val actual = RelOptUtil.toString(optimized)
// we remove the charset for testing because it
// depends on the native machine (Little/Big Endian)
val actualNoCharset = actual.replace("_UTF-16LE'", "'").replace("_UTF-16BE'", "'")
.replace(" CHARACTER SET \\"UTF-16LE\\"", "").replace(" CHARACTER SET \\"UTF-16BE\\"", "")
val expectedLines = expected.split("\\n").map(_.trim)
val actualLines = actualNoCharset.split("\\n").map(_.trim)
val expectedMessage = expectedLines.mkString("\\n")
val actualMessage = actualLines.mkString("\\n")
breakable {
for ((expectedLine, actualLine) <- expectedLines.zip(actualLines)) {
if (expectedLine == TableTestUtil.ANY_NODE) {
}
else if (expectedLine == TableTestUtil.ANY_SUBTREE) {
break
} else if (expectedLine != actualLine) {
throw new ComparisonFailure(null, expectedMessage, actualMessage)
}
}
}
}
def explain(resultTable: Table): String
}
object TableTestUtil {
val ANY_NODE = "%ANY_NODE%"
val ANY_SUBTREE = "%ANY_SUBTREE%"
private[utils] def toRelNode(expected: Table) = {
expected.asInstanceOf[TableImpl].getTableEnvironment match {
case t: TableEnvImpl => t.getRelBuilder.tableOperation(expected.getQueryOperation).build()
case t: TableEnvironmentImpl =>
t.getPlanner.asInstanceOf[StreamPlanner].getRelBuilder
.tableOperation(expected.getQueryOperation).build()
case _ =>
throw new AssertionError()
}
}
// this methods are currently just for simplifying string construction,
// we could replace it with logic later
def unaryAnyNode(input: String): String = {
s"""$ANY_NODE
|$input
|""".stripMargin.stripLineEnd
}
def anySubtree(): String = {
ANY_SUBTREE
}
def unaryNode(node: String, input: String, term: String*): String = {
s"""$node(${term.mkString(", ")})
|$input
|""".stripMargin.stripLineEnd
}
def binaryNode(node: String, left: String, right: String, term: String*): String = {
s"""$node(${term.mkString(", ")})
|$left
|$right
|""".stripMargin.stripLineEnd
}
def naryNode(node: String, inputs: List[AnyRef], term: String*): String = {
val strInputs = inputs.mkString("\\n")
s"""$node(${term.mkString(", ")})
|$strInputs
|""".stripMargin.stripLineEnd
}
def values(node: String, term: String*): String = {
s"$node(${term.mkString(", ")})"
}
def term(term: AnyRef, value: AnyRef*): String = {
s"$term=[${value.mkString(", ")}]"
}
def tuples(value: List[AnyRef]*): String = {
val listValues = value.map(listValue => s"{ ${listValue.mkString(", ")} }")
term("tuples", "[" + listValues.mkString(", ") + "]")
}
def batchTableNode(table: Table): String = {
val dataSetTable = table.getQueryOperation.asInstanceOf[DataSetQueryOperation[_]]
s"DataSetScan(ref=[${System.identityHashCode(dataSetTable.getDataSet)}], " +
s"fields=[${dataSetTable.getTableSchema.getFieldNames.mkString(", ")}])"
}
def streamTableNode(table: Table): String = {
val (id, fieldNames) = table.getQueryOperation match {
case q: JavaDataStreamQueryOperation[_] =>
(q.getDataStream.getId, q.getTableSchema.getFieldNames)
case q: ScalaDataStreamQueryOperation[_] =>
(q.getDataStream.getId, q.getTableSchema.getFieldNames)
case n => throw new AssertionError(s"Unexpected table node $n")
}
s"DataStreamScan(id=[$id], fields=[${fieldNames.mkString(", ")}])"
}
def readFromResource(file: String): String = {
val source = s"${getClass.getResource("/").getFile}../../src/test/scala/resources/$file"
Source.fromFile(source).mkString
}
def replaceStageId(s: String): String = {
s.replaceAll("\\\\r\\\\n", "\\n").replaceAll("Stage \\\\d+", "")
}
}
case class BatchTableTestUtil(
catalogManager: Option[CatalogManager] = None)
extends TableTestUtil {
val javaEnv = new LocalEnvironment()
val javaTableEnv = new JavaBatchTableEnvironmentImpl(
javaEnv,
new TableConfig,
catalogManager
.getOrElse(CatalogManagerMocks.createEmptyCatalogManager()),
new ModuleManager)
val env = new ExecutionEnvironment(javaEnv)
val tableEnv = new ScalaBatchTableEnvironmentImpl(
env,
new TableConfig,
catalogManager
.getOrElse(CatalogManagerMocks.createEmptyCatalogManager()),
new ModuleManager)
def addTable[T: TypeInformation](
name: String,
fields: Expression*)
: Table = {
val ds = mock(classOf[DataSet[T]])
val jDs = mock(classOf[JDataSet[T]])
when(ds.javaSet).thenReturn(jDs)
val typeInfo: TypeInformation[T] = implicitly[TypeInformation[T]]
when(jDs.getType).thenReturn(typeInfo)
val t = ds.toTable(tableEnv, fields: _*)
tableEnv.registerTable(name, t)
t
}
def addJavaTable[T](typeInfo: TypeInformation[T], name: String, fields: ApiExpression*): Table = {
val jDs = mock(classOf[JDataSet[T]])
when(jDs.getType).thenReturn(typeInfo)
val t = javaTableEnv.fromDataSet(jDs, fields: _*)
javaTableEnv.registerTable(name, t)
t
}
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T])
: TableFunction[T] = {
tableEnv.registerFunction(name, function)
function
}
def addFunction(name: String, function: ScalarFunction): Unit = {
tableEnv.registerFunction(name, function)
}
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = {
tableEnv.registerFunction(name, function)
}
def verifySql(query: String, expected: String): Unit = {
verifyTable(tableEnv.sqlQuery(query), expected)
}
def verifyTable(resultTable: Table, expected: String): Unit = {
val relNode = TableTestUtil.toRelNode(resultTable)
val optimized = tableEnv.optimizer.optimize(relNode)
verifyString(expected, optimized)
}
def verifyJavaSql(query: String, expected: String): Unit = {
verifyJavaTable(javaTableEnv.sqlQuery(query), expected)
}
def verifyJavaTable(resultTable: Table, expected: String): Unit = {
val relNode = TableTestUtil.toRelNode(resultTable)
val optimized = javaTableEnv.optimizer.optimize(relNode)
verifyString(expected, optimized)
}
def printTable(resultTable: Table): Unit = {
val relNode = TableTestUtil.toRelNode(resultTable)
val optimized = tableEnv.optimizer.optimize(relNode)
println(RelOptUtil.toString(optimized))
}
def printSql(query: String): Unit = {
printTable(tableEnv.sqlQuery(query))
}
def explain(resultTable: Table): String = {
tableEnv.explain(resultTable)
}
def toRelNode(table: Table): RelNode = {
tableEnv.getRelBuilder.tableOperation(table.getQueryOperation).build()
}
}
case class StreamTableTestUtil(
catalogManager: Option[CatalogManager] = None)
extends TableTestUtil {
val javaEnv = new LocalStreamEnvironment()
javaEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
private val tableConfig = new TableConfig
private val manager: CatalogManager = catalogManager
.getOrElse(CatalogManagerMocks.createEmptyCatalogManager())
private val moduleManager: ModuleManager = new ModuleManager
private val executor: StreamExecutor = new StreamExecutor(javaEnv)
private val functionCatalog = new FunctionCatalog(tableConfig, manager, moduleManager)
private val streamPlanner = new StreamPlanner(executor, tableConfig, functionCatalog, manager)
val javaTableEnv = new JavaStreamTableEnvironmentImpl(
manager,
moduleManager,
functionCatalog,
tableConfig,
javaEnv,
streamPlanner,
executor,
true)
val env = new StreamExecutionEnvironment(javaEnv)
val tableEnv = new ScalaStreamTableEnvironmentImpl(
manager,
moduleManager,
functionCatalog,
tableConfig,
env,
streamPlanner,
executor,
true)
def addTable[T: TypeInformation](
name: String,
fields: Expression*)
: Table = {
val table = env.fromElements().toTable(tableEnv, fields: _*)
tableEnv.registerTable(name, table)
table
}
def addJavaTable[T](typeInfo: TypeInformation[T], name: String, fields: ApiExpression*): Table = {
val stream = javaEnv.addSource(new EmptySource[T], typeInfo)
val table = javaTableEnv.fromDataStream(stream, fields: _*)
javaTableEnv.registerTable(name, table)
table
}
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T])
: TableFunction[T] = {
tableEnv.registerFunction(name, function)
function
}
def addFunction(name: String, function: ScalarFunction): Unit = {
tableEnv.registerFunction(name, function)
}
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = {
tableEnv.registerFunction(name, function)
}
def verifySql(query: String, expected: String): Unit = {
verifyTable(tableEnv.sqlQuery(query), expected)
}
def verifySqlPlansIdentical(query1: String, queries: String*): Unit = {
val resultTable1 = tableEnv.sqlQuery(query1)
queries.foreach(s => verify2Tables(resultTable1, tableEnv.sqlQuery(s)))
}
def verifyTable(resultTable: Table, expected: String): Unit = {
val optimized = optimize(resultTable)
verifyString(expected, optimized)
}
def verify2Tables(resultTable1: Table, resultTable2: Table): Unit = {
val optimized1 = optimize(resultTable1)
val optimized2 = optimize(resultTable2)
assertEquals(RelOptUtil.toString(optimized1), RelOptUtil.toString(optimized2))
}
def verifyJavaSql(query: String, expected: String): Unit = {
verifyJavaTable(javaTableEnv.sqlQuery(query), expected)
}
def verifyJavaTable(resultTable: Table, expected: String): Unit = {
val optimized = optimize(resultTable)
verifyString(expected, optimized)
}
// the print methods are for debugging purposes only
def printTable(resultTable: Table): Unit = {
val optimized = optimize(resultTable)
println(RelOptUtil.toString(optimized))
}
def printSql(query: String): Unit = {
printTable(tableEnv.sqlQuery(query))
}
def explain(resultTable: Table): String = {
tableEnv.explain(resultTable)
}
def toRelNode(table: Table): RelNode = {
tableEnv.getPlanner.asInstanceOf[StreamPlanner]
.getRelBuilder.tableOperation(table.getQueryOperation).build()
}
protected def optimize(resultTable1: Table): RelNode = {
val planner = resultTable1.asInstanceOf[TableImpl]
.getTableEnvironment.asInstanceOf[TableEnvironmentImpl]
.getPlanner.asInstanceOf[StreamPlanner]
val relNode = planner.getRelBuilder.tableOperation(resultTable1.getQueryOperation).build()
val optimized = planner.optimizer
.optimize(relNode, updatesAsRetraction = false, planner.getRelBuilder)
optimized
}
}
class EmptySource[T]() extends SourceFunction[T] {
override def run(ctx: SourceFunction.SourceContext[T]): Unit = {
}
override def cancel(): Unit = {
}
}
|
GJL/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/utils/TableTestBase.scala
|
Scala
|
apache-2.0
| 15,628
|
package circlepuzzles.geometry.spherical
import circlepuzzles.geometry.{Angle, SphericalGeometry}
/**
* Disks on the unit sphere. A disk is defined as the set of points whose distance in radians to a given center point
* is less than or equal to a given radius.
* @param center Center of this disk.
* @param radius Radius of this disk, in radians. Must be in the range (0,pi).
*/
case class Disk(override val center: Point, radius: Angle) extends SphericalGeometry.BaseDisk {
override def circle: Circle = {
new Circle(center, radius)
}
override def rotate(rotationCenter: Point, angle: Angle): Disk = {
// Rotate this by rotating the center, returning a disk with the same radius
Disk(center.rotate(rotationCenter, angle), radius)
}
override def containsCompare(pt: Point): Int = {
// Compute the convex angle between the center and the point
val angle = center.toVector3D.convexAngle(pt.toVector3D)
// Compare the angle to the radius, e.g. if it's less than the radius then it belongs to the interior
angle.radians.compare(radius.radians)
}
}
|
wkretschmer/CirclePuzzles
|
src/main/scala/circlepuzzles/geometry/spherical/Disk.scala
|
Scala
|
mit
| 1,099
|
package org.broadinstitute.clio.transfer.model.bam
import java.net.URI
import org.broadinstitute.clio.transfer.model.{DeliverableMetadata, Metadata}
import org.broadinstitute.clio.util.model.DocumentStatus
case class BamMetadata(
documentStatus: Option[DocumentStatus] = None,
bamMd5: Option[Symbol] = None,
bamSize: Option[Long] = None,
bamPath: Option[URI] = None,
baiPath: Option[URI] = None,
insertSizeMetricsPath: Option[URI] = None,
workspaceName: Option[String] = None,
billingProject: Option[String] = None,
notes: Option[String] = None,
) extends Metadata[BamMetadata]
with DeliverableMetadata[BamMetadata] {
override def pathsToDelete: Seq[URI] =
Seq.concat(
bamPath,
baiPath,
// Delete the bamPath.md5 file only if a workspaceName is defined otherwise there will be no md5
// (foo.bam.md5 where foo.bam is bamPath)
workspaceName.flatMap(
_ =>
bamPath.map { cp =>
URI.create(s"$cp${BamExtensions.Md5ExtensionAddition}")
}
)
)
override def changeStatus(
documentStatus: DocumentStatus,
changeNote: String
): BamMetadata =
this.copy(
documentStatus = Some(documentStatus),
notes = appendNote(changeNote)
)
override def withWorkspace(name: String, billingProject: String): BamMetadata = {
this.copy(
workspaceName = Some(name),
billingProject = Some(billingProject)
)
}
override def withDocumentStatus(
documentStatus: Option[DocumentStatus]
): BamMetadata =
this.copy(
documentStatus = documentStatus
)
val sampleLevelMetrics = Iterable.empty
}
|
broadinstitute/clio
|
clio-transfer-model/src/main/scala/org/broadinstitute/clio/transfer/model/bam/BamMetadata.scala
|
Scala
|
bsd-3-clause
| 1,647
|
package synereo.client.css
import scala.language.postfixOps
import scalacss.Defaults._
/**
* Created by a4tech on 6/3/2016.
*/
object NewMessageCSS {
object Style extends StyleSheet.Inline {
import dsl._
val textAreaNewMessage = style(
width(100.%%),
marginTop(16.px),
marginBottom(10.px),
border.none.important,
resize.none.important,
fontSize(18.px)
)
val newMessageActionsContainerDiv = style(
// marginTop(30.px),
marginBottom(30.px)
)
val userImage = style(
maxWidth(50.px),
maxHeight(50.px)
)
val createPostBtn = style(
backgroundColor(c"#ff806c"),
color(c"#FFFFFF"),
fontSize(22.px),
fontFamily :=! "karla",
// marginRight(10.px),
&.hover(
backgroundColor(c"#ff806c").important,
color(c"#FFFFFF")
),
&.focus(
backgroundColor(c"#ff806c").important,
color(c"#FFFFFF")
)
)
val changePersonaBtn = style(
backgroundColor.transparent.important,
fontSize(20.px),
fontWeight.bold,
paddingLeft.`0`.important,
&.hover(
backgroundColor.transparent.important
),
&.focus(
backgroundColor.transparent.important
),
media.maxWidth(1024.px)(
fontSize(15.px)
)
)
val PersonaContainerDiv = style(
marginTop.`0`,
marginBottom(20.px),
media.maxWidth(580.px)(
marginTop(-30.px)
)
)
val newMessageCancelBtn = style(
backgroundColor.transparent.important,
color(c"#242D40"),
border.none.important,
fontSize(22.px),
fontWeight._700,
fontFamily :=! "karla",
marginRight(10.px),
marginLeft(10.px),
&.hover(
backgroundColor.transparent.important,
color(c"#242D40"),
border.none.important
),
&.focus(
backgroundColor.transparent.important,
color(c"#242D40"),
border.none.important
)
)
val postingShortHandBtn = style(
backgroundColor.transparent.important,
color(c"#929497"),
border.none.important,
fontSize(14.px),
fontFamily :=! "karla",
marginRight(10.px),
marginLeft(10.px),
&.hover(
backgroundColor.transparent.important,
color(c"#242D40"),
border.none.important
),
&.focus(
backgroundColor.transparent.important,
color(c"#242D40"),
border.none.important
)
)
val userNameOnDilogue = style(
fontSize(16 px),
media.maxWidth(1024.px)(
fontSize(10.px)
)
)
val createPostTagBtn = style(
margin(5.px),
fontFamily :=! "karla",
fontWeight.normal,
fontSize(12.px),
// textTransform.capitalize,
backgroundColor.transparent.important,
height(38.px),
color(c"#000"),
opacity(0.8),
border(1.px, solid, c"#78D3F5"),
borderRadius(20.px),
minWidth(80.px),
// padding.`0`.important,
&.hover(
color(c"#000"),
border(1.px, solid, c"#78D3F5"),
backgroundColor.transparent.important
)
)
//spinner css//
val spinner = style(
width(100.%%)
)
val spinnerinput = style(
textAlign.right
)
val inputgroupbtnVertical = style(
position.relative,
whiteSpace.nowrap,
width(1 %%),
verticalAlign.middle,
display.tableCell
)
val spinnerBtn1 = style(
display.block,
float.none,
width(100 %%),
maxWidth(100 %%),
padding(8.px),
marginLeft(-1.px),
position.relative,
borderTopRightRadius(4.px),
borderBottomRightRadius(4.px),
borderBottomLeftRadius(0.px),
borderTopLeftRadius(0.px)
)
val spinnerBtn2 = style(
display.block,
float.none,
width(100 %%),
maxWidth(100 %%),
padding(8.px),
marginLeft(-1.px),
position.relative,
borderBottomRightRadius(4.px),
borderTopRightRadius(0.px),
borderBottomLeftRadius(0.px),
borderTopLeftRadius(0.px),
marginTop(-2.px)
)
val spinnerCaretIcon = style(
position.absolute,
top(0.px),
left(4.px)
)
}
}
|
LivelyGig/ProductWebUI
|
sclient/src/main/scala/synereo/client/css/NewMessageCSS.scala
|
Scala
|
apache-2.0
| 4,285
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.UnresolvedSeed
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator, ExprCode, FalseLiteral}
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.trees.TreePattern.{EXPRESSION_WITH_RANDOM_SEED, TreePattern}
import org.apache.spark.sql.types._
import org.apache.spark.util.random.XORShiftRandom
/**
* A Random distribution generating expression.
* TODO: This can be made generic to generate any type of random distribution, or any type of
* StructType.
*
* Since this expression is stateful, it cannot be a case object.
*/
abstract class RDG extends UnaryExpression with ExpectsInputTypes with Stateful
with ExpressionWithRandomSeed {
/**
* Record ID within each partition. By being transient, the Random Number Generator is
* reset every time we serialize and deserialize and initialize it.
*/
@transient protected var rng: XORShiftRandom = _
override protected def initializeInternal(partitionIndex: Int): Unit = {
rng = new XORShiftRandom(seed + partitionIndex)
}
override def seedExpression: Expression = child
@transient protected lazy val seed: Long = seedExpression match {
case e if e.dataType == IntegerType => e.eval().asInstanceOf[Int]
case e if e.dataType == LongType => e.eval().asInstanceOf[Long]
}
override def nullable: Boolean = false
override def dataType: DataType = DoubleType
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(IntegerType, LongType))
}
/**
* Represents the behavior of expressions which have a random seed and can renew the seed.
* Usually the random seed needs to be renewed at each execution under streaming queries.
*/
trait ExpressionWithRandomSeed extends Expression {
override val nodePatterns: Seq[TreePattern] = Seq(EXPRESSION_WITH_RANDOM_SEED)
def seedExpression: Expression
def withNewSeed(seed: Long): Expression
}
/** Generate a random column with i.i.d. uniformly distributed values in [0, 1). */
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_([seed]) - Returns a random value with independent and identically distributed (i.i.d.) uniformly distributed values in [0, 1).",
examples = """
Examples:
> SELECT _FUNC_();
0.9629742951434543
> SELECT _FUNC_(0);
0.7604953758285915
> SELECT _FUNC_(null);
0.7604953758285915
""",
note = """
The function is non-deterministic in general case.
""",
since = "1.5.0",
group = "math_funcs")
// scalastyle:on line.size.limit
case class Rand(child: Expression, hideSeed: Boolean = false) extends RDG {
def this() = this(UnresolvedSeed, true)
def this(child: Expression) = this(child, false)
override def withNewSeed(seed: Long): Rand = Rand(Literal(seed, LongType), hideSeed)
override protected def evalInternal(input: InternalRow): Double = rng.nextDouble()
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val className = classOf[XORShiftRandom].getName
val rngTerm = ctx.addMutableState(className, "rng")
ctx.addPartitionInitializationStatement(
s"$rngTerm = new $className(${seed}L + partitionIndex);")
ev.copy(code = code"""
final ${CodeGenerator.javaType(dataType)} ${ev.value} = $rngTerm.nextDouble();""",
isNull = FalseLiteral)
}
override def freshCopy(): Rand = Rand(child, hideSeed)
override def flatArguments: Iterator[Any] = Iterator(child)
override def sql: String = {
s"rand(${if (hideSeed) "" else child.sql})"
}
override protected def withNewChildInternal(newChild: Expression): Rand = copy(child = newChild)
}
object Rand {
def apply(seed: Long): Rand = Rand(Literal(seed, LongType))
}
/** Generate a random column with i.i.d. values drawn from the standard normal distribution. */
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """_FUNC_([seed]) - Returns a random value with independent and identically distributed (i.i.d.) values drawn from the standard normal distribution.""",
examples = """
Examples:
> SELECT _FUNC_();
-0.3254147983080288
> SELECT _FUNC_(0);
1.6034991609278433
> SELECT _FUNC_(null);
1.6034991609278433
""",
note = """
The function is non-deterministic in general case.
""",
since = "1.5.0",
group = "math_funcs")
// scalastyle:on line.size.limit
case class Randn(child: Expression, hideSeed: Boolean = false) extends RDG {
def this() = this(UnresolvedSeed, true)
def this(child: Expression) = this(child, false)
override def withNewSeed(seed: Long): Randn = Randn(Literal(seed, LongType), hideSeed)
override protected def evalInternal(input: InternalRow): Double = rng.nextGaussian()
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val className = classOf[XORShiftRandom].getName
val rngTerm = ctx.addMutableState(className, "rng")
ctx.addPartitionInitializationStatement(
s"$rngTerm = new $className(${seed}L + partitionIndex);")
ev.copy(code = code"""
final ${CodeGenerator.javaType(dataType)} ${ev.value} = $rngTerm.nextGaussian();""",
isNull = FalseLiteral)
}
override def freshCopy(): Randn = Randn(child, hideSeed)
override def flatArguments: Iterator[Any] = Iterator(child)
override def sql: String = {
s"randn(${if (hideSeed) "" else child.sql})"
}
override protected def withNewChildInternal(newChild: Expression): Randn = copy(child = newChild)
}
object Randn {
def apply(seed: Long): Randn = Randn(Literal(seed, LongType))
}
|
ueshin/apache-spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala
|
Scala
|
apache-2.0
| 6,538
|
package com.etsy.sahale
import cascading.flow.{Flow, FlowStep}
import cascading.flow.hadoop.HadoopFlowStep
import cascading.stats.{CascadingStats, FlowStepStats}
import cascading.stats.hadoop.{HadoopStepStats, HadoopSliceStats}
import cascading.tap.Tap
import cascading.util.Util
import java.util.Properties
import spray.json._
import DefaultJsonProtocol._
import org.apache.hadoop.mapred.JobConf
import org.apache.log4j.Logger
import scala.collection.mutable
import scala.collection.JavaConversions._
object StepStatus {
val NO_JOB_ID = "NO_JOB_ID"
val NOT_LAUNCHED = "NOT_LAUNCHED"
val LOG: Logger = Logger.getLogger(classOf[StepStatus])
}
class StepStatus(val flow: Flow[_], val stepNumber: Int, val stepId: String, props: Properties) {
import com.etsy.sahale.StepStatus._
import com.etsy.sahale.JsonUtil
private val state = mutable.Map[String, Any](
"step_number" -> stepNumber,
"sources" -> Map.empty[String, Seq[String]],
"sink" -> Map.empty[String, Seq[String]],
"job_id" -> NO_JOB_ID,
"step_id" -> stepId,
"map_progress" -> 0.0,
"reduce_progress" -> 0.0,
"step_status" -> NOT_LAUNCHED,
"step_priority" -> 5,
"step_running_time" -> 0L,
"step_start_epoch_ms" -> 0L,
"step_submit_epoch_ms" -> 0L,
"step_end_epoch_ms" -> 0L,
"has_reduce_stage" -> false,
"counters" -> Map.empty[String, Map[String, Long]],
"config_props" -> Map.empty[String, String]
)
def send: JsValue = JsonUtil.toJsonMap(state.toMap).toJson
def getStatus: String = stats.getStatus.toString
// return reference to step status only if it was updated
def update = state += (
"has_reduce_stage" -> stepHasReduceStage,
"job_id" -> stats.getJobID,
"step_running_time" -> getStepRunningTime,
"step_status" -> getStatus,
"map_progress" -> getMapProgress,
"reduce_progress" -> getReduceProgress,
"step_priority" -> getStepPriority,
"step_start_epoch_ms" -> stats.getStartTime,
"step_submit_epoch_ms" -> stats.getSubmitTime,
"step_end_epoch_ms" -> stats.getFinishedTime,
"counters" -> updateStepCounters,
"config_props" -> updateHadoopConf
)
// for unit tests
override def equals(other: Any): Boolean = {
if (other != null && other.isInstanceOf[StepStatus]) {
val nss = other.asInstanceOf[StepStatus]
this.state == nss.state
} else {
false
}
}
// let caller pull from cached data when possible
def get[T](key: String): T = state(key).asInstanceOf[T]
// let caller pass in queries for the step encapsulated here
def extract[T](fn: (HadoopFlowStep) => T): T = fn(step)
def captureTaps(sources: Map[String, Seq[String]], sink: Map[String, Seq[String]]): Unit = state += (
"sources" -> sources,
"sink" -> sink
)
// will be reset for us by FlowTrackerStepStrategy
private var stepStartMillis = System.currentTimeMillis
def markStartTime: Unit = stepStartMillis = System.currentTimeMillis
def getStepPriority: Int = step.getSubmitPriority
def getStepRunningTime: Long = (System.currentTimeMillis - stepStartMillis) / 1000L
def getMapProgress: Double = stats.getMapProgress.isNaN match {
case true => 0.0
case _ => JsonUtil.percent(stats.getMapProgress)
}
def getReduceProgress: Double = stats.getReduceProgress.isNaN match {
case true => 0.0
case _ => JsonUtil.percent(stats.getReduceProgress)
}
def stepHasReduceStage: Boolean = {
step.getConfig.asInstanceOf[JobConf].getNumReduceTasks > 0L
}
def aggrFunc(group: String, key: String): Long = {
stats.getCounterValue(group, key)
}
//////////// INTERNALS ////////////
private lazy val step = flow.getFlowSteps.toList.filter {
fs: FlowStep[_] => fs.getID == stepId
}.head.asInstanceOf[HadoopFlowStep]
private def stats = step.getFlowStepStats.asInstanceOf[HadoopStepStats]
private def updateHadoopConf = propertiesToExtract.foldLeft(Map.empty[String, String]) {
(acc, prop) => acc ++ Map(prop -> step.getConfig.asInstanceOf[JobConf].get(prop, ""))
}
private def updateStepCounters: Map[String, Map[String, Long]] = dumpCounters.groupBy[String] {
i: (String, String, Long) => i._1
}.map {
case(k: String, v: Iterable[(String, String, Long)]) =>
k -> v.map { vv => vv._2 -> vv._3 }.toMap
}.toMap
private def dumpCounters: Iterable[(String, String, Long)] = {
for (g <- stats.getCounterGroups ; c <- stats.getCountersFor(g)) yield {
(cleanGroupName(g), c, stats.getCounterValue(g, c))
}
}
// if users want to track additional JobConf values, put the chosen keys in a CSV
// list in flow-tracker.properties entry "sahale.step.selected.configs" at build time
private val propertiesToExtract = Seq("sahale.additional.links", "scalding.step.descriptions") ++ {
props.getProperty("sahale.step.selected.configs", "")
.split(""",""").map { _.trim }.filter { _ != "" }.toSeq
}
private def cleanGroupName(name: String): String = {
name match {
case oah: String if (oah.startsWith("org.apache.hadoop.")) => cleanGroupName(oah.substring(18))
case ic: String if (ic.indexOf("""$""") >= 0) => cleanGroupName(ic.substring(0, ic.indexOf("""$""")))
case _ => name
}
}
}
|
etsy/Sahale
|
flowtracker/src/main/scala/StepStatus.scala
|
Scala
|
mit
| 5,600
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License,
*
* Contributors:
* Hao Jiang - initial API and implementation
*
*/
package edu.uchicago.cs.encsel.dataset.schema
import java.net.URI
import java.text.NumberFormat
import edu.uchicago.cs.encsel.dataset.parser.ParserFactory
import edu.uchicago.cs.encsel.model.DataType
import org.slf4j.LoggerFactory
import scala.util.Try
class SchemaGuesser {
val logger = LoggerFactory.getLogger(getClass)
def guessSchema(file: URI): Schema = {
val parser = ParserFactory.getParser(file)
if (null == parser) {
if (logger.isDebugEnabled())
logger.debug("No parser available for %s".format(file.toString))
return null
}
val records = parser.parse(file, null)
val guessedHeader = parser.guessHeaderName
val columns = guessedHeader.map(_.replaceAll("[^\\d\\w_]+", "_"))
.map((DataType.BOOLEAN, _))
var malformatCount = 0
records.foreach(record => {
if (record.length == columns.length) {
for (i <- columns.indices) {
var value = record(i)
if (value != null && value.trim().length() != 0) {
value = value.trim()
val expected = testType(value, columns(i)._1)
if (expected != columns(i)._1)
columns(i) = (expected, columns(i)._2)
}
}
} else {
malformatCount += 1
}
})
if (malformatCount > 0) {
logger.warn("Malformatted record counts %d in %s".format(malformatCount, file.toString))
}
new Schema(columns, true)
}
protected val booleanValues = Set("0", "1", "yes", "no", "true", "false")
protected val numberRegex = """[\-]?[\d,]+""".r
protected val floatRegex = """[\-]?[,\d]*(\.\d*)?(E\d*)?""".r
protected val numberParser = NumberFormat.getInstance
def testType(input: String, expected: DataType): DataType = {
expected match {
case DataType.BOOLEAN => {
if (booleanValues.contains(input.toLowerCase()))
DataType.BOOLEAN
else
testType(input, DataType.INTEGER)
}
case DataType.INTEGER => {
input match {
case numberRegex(_*) => {
Try {
val num = numberParser.parse(input)
num match {
case x if x.longValue() != x.doubleValue() => DataType.STRING // Too Long
case x if x.intValue() == x.longValue() => DataType.INTEGER
case _ => DataType.LONG
}
}.getOrElse(DataType.STRING)
}
case floatRegex(_*) => testType(input, DataType.DOUBLE)
case _ => DataType.STRING
}
}
case DataType.LONG => {
input match {
case numberRegex(_*) => {
Try {
val num = numberParser.parse(input)
num match {
case x if x.longValue() != x.doubleValue() => DataType.STRING // Too Long
case _ => DataType.LONG
}
}.getOrElse(DataType.STRING)
}
case floatRegex(_*) => testType(input, DataType.DOUBLE)
case _ => DataType.STRING
}
}
case DataType.FLOAT => {
input match {
case floatRegex(_*) =>
Try {
val num = numberParser.parse(input)
num match {
case x if x.floatValue() == x.doubleValue() => DataType.FLOAT
case _ => DataType.DOUBLE
}
}.getOrElse(DataType.STRING)
case _ => DataType.STRING
}
}
case DataType.DOUBLE => {
input match {
case floatRegex(_*) =>
Try {
val num = numberParser.parse(input)
DataType.DOUBLE
}.getOrElse(DataType.STRING)
case _ => DataType.STRING
}
}
case DataType.STRING => expected
}
}
}
|
harperjiang/enc-selector
|
src/main/scala/edu/uchicago/cs/encsel/dataset/schema/SchemaGuesser.scala
|
Scala
|
apache-2.0
| 4,652
|
package scalax.collection.io.json
import scalax.collection._
import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._
import scalax.collection.generic.GraphCoreCompanion
import descriptor.predefined._
import org.scalatest._
import org.scalatest.refspec.RefSpec
class TDefaultSerializationRootTest
extends Suites(
new TDefaultSerialization[immutable.Graph](immutable.Graph),
new TDefaultSerialization[mutable.Graph](mutable.Graph)
)
class TDefaultSerialization[CC[N, E <: EdgeLike[N]] <: Graph[N, E] with GraphLike[N, E, CC]](
val factory: GraphCoreCompanion[CC]
) extends RefSpec
with Matchers {
object Fixture {
val jsonText = """
{ "nodes" : [
{"i":1, "e":{"jsonClass":"CExt","i":2}}
]
}""".filterNot(_.isWhitespace)
def descriptor(extClasses: List[Class[_]]) =
new Descriptor[Node](
new NodeDescriptor[Node](extraClasses = extClasses) {
def id(node: Any) = node.toString
},
Di.descriptor[Node]()
)
val extClasses = List(classOf[CExt])
val graph = factory[Node, DiEdge](Node(1, CExt(2)))
}
object `JSON import/export of node classes not known at compilation time works fine` {
def `when exporting` {
import Fixture._
graph.toJson(descriptor(extClasses)) should be(jsonText)
}
def `when importing` {
import Fixture._
factory.fromJson[Node, DiEdge](jsonText, descriptor(extClasses)) should be(graph)
}
def `when reimporting` {
import Fixture._
factory.fromJson[Node, DiEdge](graph.toJson(descriptor(extClasses)), descriptor(extClasses)) should be(graph)
}
}
}
trait Ext
case class Node(val i: Int, val e: Ext)
// library user extension
case class CExt(i: Int) extends Ext
|
scala-graph/scala-graph
|
json/src/test/scala/scalax/collection/io/json/TDefaultSerialization.scala
|
Scala
|
apache-2.0
| 1,786
|
/*
* Scala classfile decoder (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package tools.scalap
class ByteArrayReader(content: Array[Byte]) {
/** the buffer containing the file
*/
val buf: Array[Byte] = content
/** the current input pointer
*/
var bp: Int = 0
/** return byte at offset 'pos'
*/
def byteAt(pos: Int): Byte = buf(pos)
/** read a byte
*/
def nextByte: Byte = {
bp += 1
buf(bp - 1)
}
/** read some bytes
*/
def nextBytes(len: Int): Array[Byte] = {
val res = new Array[Byte](len)
System.arraycopy(buf, bp, res, 0, len)
bp += len
res
}
/** read a character
*/
def nextChar: Char = {
bp += 2
getChar(bp - 2)
}
/** read an integer
*/
def nextInt: Int = {
bp += 4
getInt(bp - 4)
}
/** read a long
*/
def nextLong: Long = {
bp += 8
getLong(bp - 8)
}
/** read a float
*/
def nextFloat: Float = java.lang.Float.intBitsToFloat(nextInt)
/** read a double
*/
def nextDouble: Double = java.lang.Double.longBitsToDouble(nextLong)
/** read an UTF8 encoded string
*/
def nextUTF8(len: Int): String = {
val cs = scala.io.Codec.fromUTF8(buf, bp, len)
bp += len
new String(cs)
}
/** extract a character at position bp from buf
*/
def getChar(bp: Int): Char =
(((buf(bp) & 0xff) << 8) + (buf(bp + 1) & 0xff)).asInstanceOf[Char]
/** extract an integer at position bp from buf
*/
def getInt(bp: Int): Int =
((buf(bp ) & 0xff) << 24) +
((buf(bp + 1) & 0xff) << 16) +
((buf(bp + 2) & 0xff) << 8) +
(buf(bp + 3) & 0xff)
/** extract a long integer at position bp from buf
*/
def getLong(bp: Int): Long =
(getInt(bp).toLong << 32) + (getInt(bp + 4).toLong & 0xffffffffL)
/** extract a float at position bp from buf
*/
def getFloat(bp: Int): Float = java.lang.Float.intBitsToFloat(getInt(bp))
/** extract a double at position bp from buf
*/
def getDouble(bp: Int): Double = java.lang.Double.longBitsToDouble(getLong(bp))
/** skip next 'n' bytes
*/
def skip(n: Int): Unit = {
bp += n
}
}
|
scala/scala
|
src/scalap/scala/tools/scalap/ByteArrayReader.scala
|
Scala
|
apache-2.0
| 2,363
|
package chrome.tabs.bindings
import chrome.windows.bindings.Window
import scala.scalajs.js
object MoveProperties {
def apply(windowId: js.UndefOr[Window.Id] = js.undefined,
index: Int): MoveProperties = {
js.Dynamic
.literal(
windowId = windowId,
index = index
)
.asInstanceOf[MoveProperties]
}
}
@js.native
trait MoveProperties extends js.Object {
def windowId: js.UndefOr[Window.Id] = js.native
def index: Int = js.native
}
|
lucidd/scala-js-chrome
|
bindings/src/main/scala/chrome/tabs/bindings/MoveProperties.scala
|
Scala
|
mit
| 497
|
package capitulo09
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import scala.io.Source
@RunWith(classOf[JUnitRunner])
class LendoCaracteresArquivo extends FunSuite{
test("lendo caracteres de um arquivo"){
//para ler caracteres individuais de um arquivo, você
//pode usar um objeto Source diretamente como um Iterator
//porque a classe Source estende de Iterator[Char]
val source = Source.fromFile("src/test/resources/myfile.txt","UTF-8")
var s = ""
for (c <- source) s += c + "."
assert("L.i.n.h.a. .1.\n.L.i.n.h.a. .2.\n.L.i.n.h.a. .3." == s)
source.close;
}
//se você deseja espiar um caracter sem consumi-lo, chame
//o método buffered no objeto source. Assim você pode
//espiar o próximo caracter de entrada com o método
//head sem consumi-lo
test("espiar caracteres de um arquivo"){
val source = Source.fromFile("src/test/resources/myfile.txt","UTF-8")
val iter = source.buffered
var count = 1
while (iter.hasNext){
if (iter.head == 'i') count += 1
iter.next
}
assert(4 == count)
source.close;
}
//se seu arquivo não for grande, você pode apenas lê-lo
//em uma string e processá-lo
test("ler o conteúdo do arquivo para uma string"){
val source = Source.fromFile("src/test/resources/myfile.txt","UTF-8")
val contents = source.mkString
assert("Linha 1\nLinha 2\nLinha 3" == contents)
source.close;
}
}
|
celioeduardo/scala-impatient
|
src/test/scala/capitulo09/LeituraCaracteresArquivo.scala
|
Scala
|
mit
| 1,493
|
trait T extends Array { // error // error
def t1(as: String*): Array[String] = { varargs1(as: _*) } // error
def t2(as: String*): Array[String] = { super.varargs1(as: _*) } // error
}
class C extends Base_1 { // error
def c1(as: String*): Array[String] = { varargs1(as: _*) } // error
def c2(as: String*): Array[String] = { super.varargs1(as: _*) } // error
}
object Test extends App {
val t = new T {} // error
println(t.t1("a", "b").mkString(","))
println(t.t2("a", "b").mkString(","))
val c = new C {}
println(c.c1("a", "b").mkString(","))
println(c.c2("a", "b").mkString(","))
class CC[T]
val x = new CC[_] // error
}
|
som-snytt/dotty
|
tests/neg/i1643.scala
|
Scala
|
apache-2.0
| 649
|
package collins.models
import play.api.Logger
import collins.util.config.NodeclassifierConfig
trait AssetDistanceEval {
/**
* calculates the distance between two assets, returning a positive integer.
* Higher value corresponds with further distance
*
* The ordering induced by distance should be a preorder
*/
def distance(a: Asset, b: Asset): Int
}
/**
* Expects asset names to be parsable integers!
*/
class MockAssetNameEval extends AssetDistanceEval {
def distance(a: Asset, b: Asset) = try {
math.abs(Integer.parseInt(a.tag) - Integer.parseInt(b.tag))
} catch {
case n: NumberFormatException =>
throw new NumberFormatException("MockAssetNameEval requires asset tags to be parse-able integers (%s)".format(n.getMessage))
}
}
class PhysicalDistanceEval(sortkeys: Set[String]) extends AssetDistanceEval {
/**
* Calculates physical distance using the configured set of nodeclass.sortKeys
*
* The formula is as follows:
* - sort keys are ordered from least to most significant, let Sn be the nth sort key (starting at 0)
* - let a.Sn be the tag value of Sn for asset a
* - let f(n) = if (a.Sn == b.Sn) 0 else 1
* - let distance(a,b) = SUM(i: 0 to n-1) (2 * f(i))^i
*/
def distance(a: Asset, b: Asset): Int = {
sortkeys
.zipWithIndex
.map{ key =>
if ( (a.getMetaAttribute(key._1), b.getMetaAttribute(key._1)) match {
case (None, None) => true
case (None, _) => false
case (_, None) => false
case (Some(x),Some(y)) => x.valueEquals(y) } )
math.pow(2, key._2).toInt
else 0
}
.sum
}
}
import shared.SortDirection
import shared.SortDirection._
object AssetSort extends Enumeration {
type Type = Value
val Name = Value("name")
val Distance = Value("distance")
val Distribution = Value("distribution")
val Arbitrary = Value("arbitrary")
}
import AssetSort._
object AssetDistanceSorter {
def sortKeys = NodeclassifierConfig.sortKeys
def sort(
target: Asset,
similarAssets: Seq[Asset],
sortType: AssetSort.Type,
direction: SortDirection
): Seq[Asset] = sortType match {
case Name => genericSort(target, similarAssets, new MockAssetNameEval, direction)
case Distance => genericSort(target, similarAssets, new PhysicalDistanceEval(sortKeys), direction)
/** Asc means sparse search, Desc means dense search */
case Distribution => distributionSort(target, similarAssets, direction, sortKeys)
case Arbitrary => similarAssets
}
def distributionSort(target: Asset, similar: Seq[Asset], direction: SortDirection, sortKeys: Set[String]) = {
val sort = new PhysicalDistanceEval(sortKeys)
/** pulls out assets one at time based on physical proximity to
current group of assets. sparse search orders based on least
close assets physically. this can be pulled out to take a flag
and also serve as a dense search if needed */
def sortLoop(build: Seq[Asset], remain: Seq[(Asset, Int)]): Seq[Asset] = if (remain == Nil) build else {
val s = remain
.map{case (assetA, sum) => (assetA, sum + sort.distance(assetA, build.headOption.getOrElse(target)))}
.sortWith{(a,b) => op(direction)(a._2,b._2) || (a._2 == b._2 && a._1.tag < b._1.tag)}
sortLoop(s.head._1 +: build, s.tail)
}
sortLoop(Nil, similar.map{x => (x,0)}).reverse
}
def genericSort(
target: Asset,
similarAssets: Seq[Asset],
sorter: AssetDistanceEval,
direction : SortDirection
): Seq[Asset] = {
similarAssets
.map{asset => (asset, sorter.distance(target, asset))}
.sortWith{(a,b) => SortDirection.op(direction)(a._2,b._2)}
.map{_._1}
}
}
|
funzoneq/collins
|
app/collins/models/AssetDistanceSorter.scala
|
Scala
|
apache-2.0
| 3,753
|
package com.nthportal.euler
package maths
import scala.annotation.tailrec
object NumericFormat {
private val big0 = BigInt(0)
private val big10 = BigInt(10)
def twoDigitStringAsNum(s: String): Int = {
require(s.length == 2, "Expected string of length 2 - actual length: " + s.length)
twoDigitStringAsNumImpl(s)
}
@tailrec
private def twoDigitStringAsNumImpl(s: String): Int = {
if (s.startsWith("0")) twoDigitStringAsNumImpl(s.substring(1))
else if (s.isEmpty) 0
else s.toInt
}
def isPalindrome(num: Long): Boolean = {
val str = digitsOf(num)
str == str.reverse
}
def isBinaryPalindrome(num: Long): Boolean = {
val str = binaryDigitsOf(num)
str == str.reverse
}
@inline
def isPandigital(num: Long): Boolean = isPandigital(num.toString)
def isPandigital(num: String): Boolean = (1 to num.length).map(_.toString).forall(_ in num)
/* digits */
def digitsOf(n: Int): List[Int] = digitsOf(n.toLong)
def digitsOf(n: Long): List[Int] = {
require(n >= 0, "Can only get digits of non-negative number")
carryDigits(n)
}
@tailrec
private def carryDigits(n: Long, carry: List[Int] = Nil): List[Int] = {
if (n < 10) n :: carry
else carryDigits(n / 10, (n % 10) :: carry)
}
def digitsOf(n: BigInt): List[Int] = {
require(n >= big0, "Can only get digits of non-negative number")
carryBigDigits(n)
}
@tailrec
private def carryBigDigits(n: BigInt, carry: List[Int] = Nil): List[Int] = {
if (n < big10) n.toInt :: carry
else {
val (d, r) = n /% big10
carryBigDigits(d, r.toInt :: carry)
}
}
def binaryDigitsOf(n: Long): List[Int] = {
require(n >= 0, "Can only get binary digits of non-negative number")
carryBinaryDigits(n)
}
@tailrec
private def carryBinaryDigits(n: Long, carry: List[Int] = Nil): List[Int] = {
if (n < 2) n :: carry
else carryBinaryDigits(n >> 1, (n & 1) :: carry)
}
def fromDigits(digits: Seq[Int]): Long = digits.foldLeft(0L) {_ * 10 + _}
}
|
NthPortal/euler-n-scala
|
src/main/scala/com/nthportal/euler/maths/NumericFormat.scala
|
Scala
|
mit
| 2,025
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package transform
/**
* An InfoTransform contains a compiler phase that transforms trees and symbol infos -- making sure they stay consistent.
* The symbol info is transformed assuming it is consistent right before this phase.
* The info transformation is triggered by Symbol::rawInfo, which caches the results in the symbol's type history.
* This way sym.info (during an enteringPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
* (If the transformed info had not been stored yet, rawInfo will compute the info by composing the info-transformers
* of the most recent phase before p, up to the transformer of the phase right before p.)
*
* Concretely, enteringPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
*/
trait InfoTransform extends Transform {
import global.{Symbol, Type, InfoTransformer, infoTransformers}
def transformInfo(sym: Symbol, tpe: Type): Type
override def newPhase(prev: scala.tools.nsc.Phase): StdPhase =
new Phase(prev)
protected def changesBaseClasses = true
protected def keepsTypeParams = true
class Phase(prev: scala.tools.nsc.Phase) extends super.Phase(prev) {
override val keepsTypeParams = InfoTransform.this.keepsTypeParams
if (infoTransformers.nextFrom(id).pid != id) {
// this phase is not yet in the infoTransformers
val infoTransformer = new InfoTransformer {
val pid = id
val changesBaseClasses = InfoTransform.this.changesBaseClasses
def transform(sym: Symbol, tpe: Type): Type = transformInfo(sym, tpe)
}
infoTransformers insert infoTransformer
}
}
}
|
martijnhoekstra/scala
|
src/compiler/scala/tools/nsc/transform/InfoTransform.scala
|
Scala
|
apache-2.0
| 2,012
|
package org.shade.common.parsers
import java.time.Instant
import org.scalatest.{Matchers, WordSpec}
class ParsersSpec extends WordSpec with Matchers {
import Parsers._
"ParseDecorator" when {
"parsing an Int" should {
"return Some(value) if the string contains a valid Int" in {
"6".parseInt shouldBe Some(6)
"0".parseInt shouldBe Some(0)
"-6".parseInt shouldBe Some(-6)
s"${Int.MinValue}".parseInt shouldBe Some(Int.MinValue)
s"${Int.MaxValue}".parseInt shouldBe Some(Int.MaxValue)
}
"cope with whitespace" in {
" 45 ".parseInt shouldBe Some(45)
"45 ".parseInt shouldBe Some(45)
" 45".parseInt shouldBe Some(45)
}
"return None otherwise" in {
"".parseInt shouldBe None
"abc".parseInt shouldBe None
"1.5".parseInt shouldBe None
(null: String).parseInt shouldBe None
s"${Int.MinValue.toLong - 1L}".parseInt shouldBe None
s"${Int.MaxValue.toLong + 1L}".parseInt shouldBe None
}
}
"parsing a Long" should {
"return Some(value) if the string contains a valid Long" in {
"6".parseLong shouldBe Some(6L)
"0".parseLong shouldBe Some(0L)
"-6".parseLong shouldBe Some(-6L)
s"${Long.MinValue}".parseLong shouldBe Some(Long.MinValue)
s"${Long.MaxValue}".parseLong shouldBe Some(Long.MaxValue)
}
"cope with whitespace" in {
" 45 ".parseLong shouldBe Some(45L)
"45 ".parseLong shouldBe Some(45L)
" 45".parseLong shouldBe Some(45L)
}
"return None otherwise" in {
"".parseLong shouldBe None
"abc".parseLong shouldBe None
"1.5".parseLong shouldBe None
"1L".parseLong shouldBe None
(null: String).parseLong shouldBe None
s"${BigDecimal(Long.MinValue) - BigDecimal(1)}".parseLong shouldBe None
s"${BigDecimal(Long.MaxValue) + BigDecimal(1)}".parseLong shouldBe None
}
}
"parsing an Instant" should {
"return Some(value) if the string contains a valid Instant" in {
"2018-01-04T14:19:28Z".parseInstant shouldBe Some(Instant.ofEpochSecond(1515075568L))
"2018-01-04T14:19:28.837Z".parseInstant shouldBe Some(Instant.ofEpochSecond(1515075568L).plusNanos(837000000L))
"2018-01-04T14:19:28.837345Z".parseInstant shouldBe Some(Instant.ofEpochSecond(1515075568L).plusNanos(837345000L))
"2018-01-04T14:19:28.837543234Z".parseInstant shouldBe Some(Instant.ofEpochSecond(1515075568L).plusNanos(837543234L))
}
"cope with whitespace" in {
" 2018-01-04T14:19:28Z ".parseInstant shouldBe Some(Instant.ofEpochSecond(1515075568L))
}
"return None otherwise" in {
"2018-01-04T14:19:28.837".parseInstant shouldBe None
"2018-01-04 14:19:28.837Z".parseInstant shouldBe None
"2018-01-04X14:19:28.837Z".parseInstant shouldBe None
"2018/01/04T14:19:28.837Z".parseInstant shouldBe None
"2018-02-29T14:19:28.837Z".parseInstant shouldBe None
"other".parseInstant shouldBe None
}
}
}
}
|
jamesshade/common
|
src/test/scala/org/shade/common/parsers/ParsersSpec.scala
|
Scala
|
apache-2.0
| 3,125
|
package edu.msstate.dasi.csb.workload.spark
import edu.msstate.dasi.csb.workload.Workload
import org.apache.spark.graphx.Graph
import scala.reflect.ClassTag
/**
* Computes the in-degree of each vertex in the graph.
*
* @note Vertices with no in-edges are ignored.
*/
class InDegree(engine: SparkEngine) extends Workload {
val name = "In-degree"
/**
* Computes the in-degree of each vertex in the graph.
*
* @note Vertices with no in-edges are ignored.
*/
def run[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED]): Unit = graph.inDegrees.foreach(engine.doNothing)
}
|
msstate-dasi/csb
|
csb/src/main/scala/edu/msstate/dasi/csb/workload/spark/InDegree.scala
|
Scala
|
gpl-3.0
| 591
|
package model.repositories
import java.util.UUID
import _root_.anorm.SqlParser._
import _root_.anorm._
import model.dtos._
import model.repositories.anorm.{ArticleParser, ConsultationParser, _}
import play.api.Play.current
import play.api.db.DB
class ConsultationRepository {
def getConsultationStats():List[ConsultationStats] = {
DB.withConnection { implicit c =>
SQL"""
with organizations as
(
select id as OrganizationId, title,
case when group_title in ('Περιεχόμενο ανά Υπουργείο') then 'Υπουργεία'
when group_title in ('Περιεχόμενο Πρώην Υπουργείων') then 'Πρώην Υπουργεία'
when group_title in ('Περιεχόμενο άλλων Φορέων') then 'Άλλοι φορείς'
else 'n/a'
End as categTitle,
case when group_title in ('Περιεχόμενο ανά Υπουργείο') then 1
when group_title in ('Περιεχόμενο Πρώην Υπουργείων') then 2
when group_title in ('Περιεχόμενο άλλων Φορέων') then 3
else 4
End as order
from public.organization_lkp
),
CommentsPerConsultation as
(
select consultation_id,count(a.consultation_id) as NumberOfArticles,
sum(case when comment_num<0 then 0 else comment_num end) as NumberOfComments
from public.articles a
group by a.consultation_id
)
select c.id,m.NumberOfArticles,m.NumberOfComments, DATE_PART('day',c.end_date -c.start_date) as DaysActive,
case when c.end_date> Now() then true
else false end as isActive,
o.*
from public.consultation c
inner join organizations o on o.OrganizationId =c.organization_id
left outer join CommentsPerConsultation as m on m.consultation_id = c.id
order by o.order, o.title
""".as(ConsultationStatsParser.Parse *)
}
}
def getFinalLawAnnotationsForComment(commentId:Long, finalLawId:Long): List[FinalLawUserAnnotation] ={
DB.withConnection { implicit c =>
val results = SQL"""
select fl_cm.*, fl_cm_areas.ann_id, u.fullname as user_name from final_law_comment_matching fl_cm
inner join final_law_comment_matching_areas fl_cm_areas on fl_cm.id = fl_cm_areas.final_law_ann_id
inner join account.user u on u.id = fl_cm.user_id
where fl_cm.comment_id = $commentId and fl_cm.final_law_id = $finalLawId and fl_cm.active = true;
""".as((FinalLawAnnotationParser.Parse ~ SqlParser.str("ann_id")) *)
val newResults = results.groupBy(z =>{z._1})
// val finalLawUserAnnotation:FinalLawUserAnnotation = results.head._1;
for (tuple <- newResults) // (FinalLawUserAnnotation, List[(FinalLawUserAnnotation ,List[String])])
{
//tuple._1 contains the FinalLawUserAnnotation
//tuple._2 contains the List[(FinalLawUserAnnotation ,List[String])]
tuple._1.annotationIds = tuple._2.map(x=> x._2)
}
newResults.map(x=>x._1).toList
}
}
def saveFinalLawAnnotation(userId: UUID, commentId:Long, finalLawId: Long, annotationIds: List[String]):Option[Long] = {
DB.withTransaction() { implicit c =>
val finalLawAnnId =
SQL"""
INSERT INTO public.final_law_comment_matching
(
user_id,
comment_id,
date_added,
final_law_Id
)
VALUES
(
cast($userId as uuid),
$commentId,
now(),
$finalLawId)
""".executeInsert()
for (annId <- annotationIds) {
SQL"""
INSERT INTO public.final_law_comment_matching_areas
(final_law_ann_id,ann_id)
VALUES
($finalLawAnnId,$annId)
""".execute()
}
finalLawAnnId
}
}
def updateFinalLawAnnotation(userId: UUID, commentId:Long, finalLawId: Long, annotationIds: List[String]):Option[Long] = {
DB.withTransaction() { implicit c =>
val finalLawAnnId =
SQL"""
UPDATE public.final_law_comment_matching SET active = false WHERE user_id = cast($userId AS UUID) and final_law_Id = $finalLawId
""".execute()
saveFinalLawAnnotation(userId, commentId, finalLawId, annotationIds)
}
}
def rateFinalLaw(userId: UUID, consultationId: Long, finalLawId: Long, attitude: Int, liked:Boolean):Unit = {
var column = "num_of_approvals"
if(attitude == 1) {
column = "num_of_dissaprovals"
}
DB.withConnection { implicit c =>
var likedBit = if (liked) 1 else 0
if(likedBit == 1) {
SQL( """update consultation_final_law set """ + column + """ = """ + column + """ + 1 where consultation_id =""" + consultationId + """ and id =""" + finalLawId).execute()
if(likedBit == 1 && attitude == 1) {
SQL"""
UPDATE consultation_final_law
set active = CAST(0 AS BIT)
where id = $finalLawId and consultation_id = $consultationId and num_of_dissaprovals > 4;""".execute()
}
if(attitude == 1) {
likedBit = 0
}
SQL"""
UPDATE consultation_final_law_rating
set liked = CAST($likedBit AS BIT)
where user_id = CAST($userId AS UUID) and consultation_id = $consultationId;
INSERT INTO consultation_final_law_rating (user_id, consultation_id,liked,date_added, final_law_id, is_rating_active)
select CAST($userId AS UUID), $consultationId ,CAST($likedBit AS BIT) , now(), $finalLawId, 1
where not exists (select 1 from consultation_final_law_rating where user_id = CAST($userId AS UUID) and consultation_id = $consultationId );
""".execute()
} else {
SQL( """update consultation_final_law set """ + column + """ = """ + column + """ - 1 where consultation_id =""" + consultationId + """ and id =""" + finalLawId).execute()
// SQL"""
// delete from consultation_final_law_rating
// where user_id = CAST($userId AS UUID) and consultation_id = $consultationId ;
//
// """.execute()
SQL"""
update consultation_final_law_rating set is_rating_active = 0
where user_id = CAST($userId AS UUID) and final_law_id = $finalLawId
""".execute()
}
}
}
def deleteFinalLaw(finalLawId: Long):Unit = {
DB.withConnection { implicit c =>
SQL"""
update consultation_final_law set active = CAST(0 AS BIT) where id = $finalLawId
""".execute()
}
}
def getFinalLawUploader(finalLawId: Long):String = {
DB.withConnection { implicit c =>
val userId:String = SQL"""
select cast(user_id as text) from consultation_final_law where id = $finalLawId
""".as(SqlParser.str("user_id").single)
userId
}
}
def search(searchRequest: ConsultationSearchRequest): List[Consultation] = {
//Retrieving values with string interpolation https://www.playframework.com/documentation/2.3.5/ScalaAnorm
DB.withConnection { implicit c =>
SQL"""
select c.*, o.title as OrganizationTitle from public.consultation c
inner join public.organization_lkp o on c.organization_id = o.id
where
c.title like ${"%"+searchRequest.query+"%"} or
o.title like ${"%"+searchRequest.query+"%"}
order by end_date desc """.as(ConsultationParser.Parse *)
}
}
def latestConsultations(maxConsultationsToReceive:Int) : List[Consultation] = {
DB.withConnection { implicit c =>
SQL"""
select c.*, o.title as OrganizationTitle from public.consultation c
inner join public.organization_lkp o on c.organization_id = o.id
order by end_date desc limit $maxConsultationsToReceive""".as(ConsultationParser.Parse *)
}
}
def getRelevantMaterial (consultationId: Long):Seq[RelevantMaterial] = {
DB.withConnection { implicit c =>
val results = SQL"""
select c.* from public.relevant_mat c where c.consultation_id = $consultationId
""".as(RelevantMaterialParser.Parse *)
results
}
}
def getRelevantLaws (consultationId: Long):Seq[RelevantLaws] = {
DB.withConnection { implicit c =>
val results = SQL"""
select c.*, ar.title from public.article_entities c
inner join articles ar on c.article_id = ar.id
where c.consultation_id = $consultationId
""".as(RelevantLawsParser.Parse *)
results
}
}
def getConsultationFinalLaw (consultationId: Long): Option[ConsultationFinalLaw] = {
DB.withConnection { implicit c =>
SQL"""
select * from public.consultation_final_law where consultation_id = $consultationId and active = CAST(1 AS BIT)
""".as(ConsFinalLawParser.Parse *).headOption
}
}
def getFinalLawRatingUsers(consultationId:Long, finalLawId:BigInt): List[ConsFinalLawRatingUsers] = {
DB.withConnection { implicit c =>
SQL"""
select consultation_final_law_rating.user_id, final_law_id, consultation_final_law_rating.consultation_id, liked from consultation_final_law_rating
inner join consultation_final_law law on law.id = consultation_final_law_rating.final_law_id
where consultation_final_law_rating.consultation_id = $consultationId and law.active = CAST(1 AS BIT) and consultation_final_law_rating.is_rating_active = 1
""".as(ConsFinalLawRatingUsersParser.Parse *)
}
}
def storeFinalLawInDB(consultationId:Long, finalLawPath:String, finalLawText:String, userId:java.util.UUID):Unit = {
DB.withConnection { implicit c =>
SQL"""
insert into consultation_final_law(consultation_id, user_id, date_added, file_text, file_path, active) values
($consultationId, $userId::uuid, now(), $finalLawText, $finalLawPath, CAST(1 AS BIT))
""".execute()
}
}
def setConsultationFinalLawInactive(finalLawId:Long): Unit = {
DB.withConnection { implicit c =>
SQL""" update consultation_final_law set active = 0""".execute()
}
}
def get(consultationId: BigInt): Consultation =
{
DB.withConnection { implicit c =>
val results = SQL"""with openGovcommentsCount as (
select c.article_id, count(*) as comment_num from public.comments c
inner join public.articles a on c.article_id = a.id
where a.consultation_id= $consultationId
and c.source_type_id =2
group by c.article_id
)
select c.*,
o.title as OrganizationTitle,
a.id as article_id,
a.consultation_id,
a.title as article_title,
a.body as article_body,
a.art_order,
openGovCount.comment_num
from public.consultation c
inner join public.organization_lkp o on c.organization_id = o.id
inner join public.articles a on a.consultation_id = c.id
left outer join openGovcommentsCount openGovCount on openGovCount.article_id = a.id
where
c.id =$consultationId
order by end_date, a.art_order
""".as((ConsultationParser.Parse ~ ArticleParser.Parse map(flatten)) *)
//due to the inner join we have tuples of the same consultations and different articles
// val newResults = results.groupBy(z =>{z._1}) //group results by consultation. The results is a tuple with 2 properties. (Consultation, List[(Consultation,Article)]
val consultation:Consultation = results.head._1; //fetch the consultation from the first property of the tuple
for (tuple <- results)
{
consultation.articles =consultation.articles :+ tuple._2
}
consultation
}
}
}
|
scify/DemocracIT-Web
|
app/model/repositories/ConsultationRepository.scala
|
Scala
|
apache-2.0
| 13,023
|
package scorex.transaction
import play.api.libs.json.{JsObject, Json}
import scorex.account.PublicKeyAccount
import scorex.crypto.EllipticCurveImpl
import scorex.crypto.encode.Base58
import scorex.crypto.hash.FastCryptographicHash
trait SignedTransaction extends TypedTransaction {
def toSign: Array[Byte]
val signature: Array[Byte]
val sender: PublicKeyAccount
override lazy val id: Array[Byte] = FastCryptographicHash(toSign)
protected def jsonBase(): JsObject = Json.obj("type" -> transactionType.id,
"id" -> Base58.encode(id),
"sender" -> sender.address,
"senderPublicKey" -> Base58.encode(sender.publicKey),
"fee" -> assetFee._2,
"timestamp" -> timestamp,
"signature" -> Base58.encode(this.signature)
)
}
object SignedTransaction {
def verify[A <: SignedTransaction](t: A): Either[ValidationError, A] =
{
if (EllipticCurveImpl.verify(t.signature, t.toSign, t.sender.publicKey)) {
Right(t)
} else {
Left(ValidationError.InvalidSignature)
}
}
}
|
B83YPoj/Waves
|
src/main/scala/scorex/transaction/SignedTransaction.scala
|
Scala
|
apache-2.0
| 1,034
|
abstract class BTypes {
trait BType
sealed trait RefBType extends BType {
def classOrArrayType: String = this match {
case ClassBType(internalName) => internalName
case a: ArrayBType => ""
}
}
final class ClassBType(val internalName: String) extends RefBType
class ArrayBType extends RefBType
object ClassBType {
def unapply(x: ClassBType): Option[String] = None
}
}
|
som-snytt/dotty
|
tests/patmat/i2502.scala
|
Scala
|
apache-2.0
| 418
|
package com.github.libsml.math.function
import com.github.libsml.math.linalg
import com.github.libsml.math.linalg.Vector
import com.github.libsml.math.linalg.BLAS
/**
* Created by huangyu on 15/7/25.
*/
class PlusFunction(private val first: Option[Function[Vector]], private var second: Option[Function[Vector]] = None) extends Function[Vector] {
require(first.map(_.isDerivable).getOrElse(true) ||
second.map(_.isDerivable).getOrElse(true),
"Plus function exception!")
override def isDerivable: Boolean = {
first.map(_.isDerivable).getOrElse(true) &&
second.map(_.isDerivable).getOrElse(true)
}
override def subGradient(w: linalg.Vector, f: Double, g: linalg.Vector, sg: linalg.Vector): (Vector, Double) = {
val fun = first.filter(!_.isDerivable).map(_.subGradient(w, f, g, sg)._2).getOrElse(0.0) +
second.filter(!_.isDerivable).map(_.subGradient(w, f, g, sg)._2).getOrElse(0.0)
(sg, fun)
}
override def gradient(w: linalg.Vector, g: linalg.Vector, setZero: Boolean = true): (Vector, Double) = {
if (setZero) {
BLAS.zero(g)
}
(g, first.map(_.gradient(w, g, false)._2).getOrElse(0.0) + second.map(_.gradient(w, g, false)._2).getOrElse(0.0))
}
/**
* Hessian * d
* @param w current value
* @param d
* @param hv Hessian * d
*/
override def hessianVector(w: linalg.Vector, d: linalg.Vector, hv: linalg.Vector, isUpdateHessian: Boolean, setZero: Boolean = true): Unit = {
// require(isSecondDerivable,"Plus function exception!")
if (setZero) {
BLAS.zero(hv)
}
first.foreach(_.hessianVector(w, d, hv, isUpdateHessian, false))
second.foreach(_.hessianVector(w, d, hv, isUpdateHessian, false))
}
override def isSecondDerivable: Boolean = {
{
first.map(_.isSecondDerivable).getOrElse(true) &&
second.map(_.isSecondDerivable).getOrElse(true)
}
}
def +(function: Function[Vector]): PlusFunction = {
require(this.isDerivable || function.isDerivable, "Plus function exception!")
second match {
case None => {
second = Some(function)
this
}
case _ => new PlusFunction(Some(this), Some(function))
}
}
override def invertHessianVector(w: linalg.Vector, d: linalg.Vector,
hv: linalg.Vector, isUpdateHessian: Boolean, setZero: Boolean): Unit = {
if (setZero) {
BLAS.zero(hv)
}
first.foreach(_.invertHessianVector(w, d, hv, isUpdateHessian, false))
second.foreach(_.invertHessianVector(w, d, hv, isUpdateHessian, false))
}
override def isInBound(w: linalg.Vector): Boolean = {
first.map(_.isInBound(w)).getOrElse(true) && second.map(_.isInBound(w)).getOrElse(true)
}
}
|
libsml/libsml
|
core/src/main/scala/com/github/libsml/math/function/PlusFunction.scala
|
Scala
|
apache-2.0
| 2,728
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast
import scala.annotation.tailrec
import scala.collection.immutable
/*
* Helper function that finds connected components in patterns.
*/
object connectedComponents {
type ComponentPart = Set[Identifier]
type ConnectedComponent = Set[ComponentPart]
//enable using the companion objects of the type aliases,
//e.g. `ComponentPart(Identifier("a"), Identifier("b"),...)`
val ComponentPart = Set
val ConnectedComponent = Set
def apply(patternParts: Seq[PatternPart]): IndexedSeq[ConnectedComponent] = {
val parts: immutable.IndexedSeq[ComponentPart] = patternParts.map(_.fold(Set.empty[Identifier]) {
case NodePattern(Some(id), _, _, _) => list => list + id
}).toIndexedSeq
this.apply(parts)
}
def apply(parts: IndexedSeq[ComponentPart]): IndexedSeq[ConnectedComponent] = {
@tailrec
def loop(remaining: IndexedSeq[ComponentPart], connectedComponents: IndexedSeq[ConnectedComponent]): IndexedSeq[ConnectedComponent] = {
if (remaining.isEmpty) connectedComponents
else {
val part = remaining.head
val newConnected = connectedComponents.zipWithIndex.collectFirst {
case (cc, index) if cc connectedTo part => connectedComponents.updated(index, cc + part)
} getOrElse connectedComponents :+ ConnectedComponent(part)
loop(remaining.tail, newConnected)
}
}
loop(parts, IndexedSeq.empty)
}
implicit class RichConnectedComponent(connectedComponent: ConnectedComponent) {
def connectedTo(part: ComponentPart) = connectedComponent.exists(c => (c intersect part).nonEmpty)
def identifiers: Set[Identifier] = connectedComponent.flatten
}
}
|
HuangLS/neo4j
|
community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/connectedComponents.scala
|
Scala
|
apache-2.0
| 2,509
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.config
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.guice.GuiceOneAppPerTest
import play.api.Environment
import common.CommonPlaySpec
class IhtPropertiesReaderTest extends CommonPlaySpec with GuiceOneAppPerTest with MockitoSugar {
lazy val ihtPropertyRetriever: IhtPropertyRetriever = new IhtPropertyRetriever {
override lazy val environment: Environment = app.environment
}
"IhtPropertiesReaderTest" must {
"should read the key and return appropriate value" in {
val maxExecutors = ihtPropertyRetriever.getPropertyAsInt("maxCoExecutors")
val ukIsoCountryCode = ihtPropertyRetriever.getProperty("ukIsoCountryCode")
val govUkLink = ihtPropertyRetriever.getProperty("linkGovUkIht")
assert(maxExecutors == 3,"Maximum executors value is 3")
ukIsoCountryCode shouldBe "GB"
assert(govUkLink=="https://www.gov.uk/valuing-estate-of-someone-who-died" , "Link value is https://www.gov.uk/valuing-estate-of-someone-who-died")
}
}
"parseAssignmentsToSeqTuples" must {
"parse correctly a valid seq of 2 assignments with spaces before or after key values" in {
val result = ihtPropertyRetriever.parseAssignmentsToSeqTuples(
"aaa =bbb,ccc= ddd"
)
result shouldBe Seq(
("aaa", "bbb"),
("ccc", "ddd")
)
}
"parse correctly a valid seq of 1 assignment with spaces before and after key values" in {
val result = ihtPropertyRetriever.parseAssignmentsToSeqTuples(
"aaa = bbb"
)
result shouldBe Seq(
("aaa", "bbb")
)
}
"parse correctly an empty string" in {
val result = ihtPropertyRetriever.parseAssignmentsToSeqTuples(
""
)
result shouldBe Seq()
}
"throw an exception if invalid assignments are given (no equals symbols)" in {
a[RuntimeException] mustBe thrownBy {
ihtPropertyRetriever.parseAssignmentsToSeqTuples("aaa,bbb")
}
}
"throw an exception if invalid assignments are given (too many equals symbols)" in {
a[RuntimeException] mustBe thrownBy {
ihtPropertyRetriever.parseAssignmentsToSeqTuples("aaa=bbb=ccc")
}
}
}
}
|
hmrc/iht-frontend
|
test/iht/config/IhtPropertiesReaderTest.scala
|
Scala
|
apache-2.0
| 2,819
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.controllers.registration.applicant
import iht.config.AppConfig
import iht.connector.CachingConnector
import iht.controllers.ControllerHelper.Mode
import iht.forms.registration.ApplicantForms._
import iht.metrics.IhtMetrics
import iht.models.{ApplicantDetails, RegistrationDetails}
import iht.utils.CommonHelper
import iht.views.html.registration.applicant.probate_location
import javax.inject.Inject
import play.api.data.Form
import play.api.i18n.Messages
import play.api.mvc.{AnyContent, MessagesControllerComponents, Request}
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
class ProbateLocationControllerImpl @Inject()(val metrics: IhtMetrics,
val cachingConnector: CachingConnector,
val authConnector: AuthConnector,
val probateLocationView: probate_location,
implicit val appConfig: AppConfig,
val cc: MessagesControllerComponents) extends FrontendController(cc) with ProbateLocationController
trait ProbateLocationController extends RegistrationApplicantControllerWithEditMode {
def form(implicit messages: Messages): Form[ApplicantDetails] = probateLocationForm
def metrics: IhtMetrics
override def guardConditions = guardConditionsApplicantProbateLocation
override def getKickoutReason = kickoutReasonApplicantDetails
override val storageFailureMessage = "Store registration details fails on probate location submission"
lazy val submitRoute = routes.ProbateLocationController.onSubmit
lazy val editSubmitRoute = routes.ProbateLocationController.onEditSubmit
val probateLocationView: probate_location
def okForPageLoad(form: Form[ApplicantDetails], name: Option[String])(implicit request: Request[AnyContent]) =
Ok(probateLocationView(form, submitRoute))
def okForEditPageLoad(form: Form[ApplicantDetails], name: Option[String])(implicit request: Request[AnyContent]) =
Ok(probateLocationView(form, editSubmitRoute, cancelToRegSummary))
def badRequestForSubmit(form: Form[ApplicantDetails], name: Option[String])(implicit request: Request[AnyContent]) =
BadRequest(probateLocationView(form, submitRoute))
def badRequestForEditSubmit(form: Form[ApplicantDetails], name: Option[String])(implicit request: Request[AnyContent]) =
BadRequest(probateLocationView(form, editSubmitRoute, cancelToRegSummary))
def applyChangesToRegistrationDetails(rd: RegistrationDetails, ad: ApplicantDetails, mode: Mode.Value) = {
val x = CommonHelper.getOrException(rd.applicantDetails) copy (country = ad.country)
rd copy (applicantDetails = Some(x))
}
def onwardRoute(rd: RegistrationDetails) = routes.ApplicantTellUsAboutYourselfController.onPageLoad
}
|
hmrc/iht-frontend
|
app/iht/controllers/registration/applicant/ProbateLocationController.scala
|
Scala
|
apache-2.0
| 3,526
|
package controllers
import play.api.mvc._
import com.google.inject.Inject
import be.studiocredo.auth._
import be.studiocredo.{UserContextSupport, NotificationService, UserService}
import play.api.data.Form
import play.api.data.Forms._
import models.admin.UserFormData
import play.api.data.validation.{Valid, Invalid, Constraint}
import be.studiocredo.util.ServiceReturnValues._
import controllers.auth.Mailer
case class EmailSet(email: Option[String], confirmation: Option[String])
class Profile @Inject()(val userService: UserService, val authService: AuthenticatorService, val notificationService: NotificationService) extends Controller with Secure with UserContextSupport {
val defaultAuthorization = Some(Authorization.ANY)
val HomePage = Redirect(routes.Application.index())
val validEmails: Constraint[EmailSet] = Constraint({
emails => emails.email match {
case None => {
emails.confirmation match {
case None => Valid
case Some(_) => Invalid("Beide e-mail velden moeten ingevuld zijn")
}
}
case Some(email) => {
emails.confirmation match {
case None => Invalid("Beide e-mail velden moeten ingevuld zijn")
case Some(confirmation) => {
if (email == confirmation) Valid else Invalid("E-mail velden zijn verschillend")
}
}
}
}
})
val profileForm = Form(
mapping(
"name" -> nonEmptyText,
"username" -> nonEmptyText,
"email" -> mapping(
"email" -> optional(email),
"confirmation" -> optional(email)
)(EmailSet.apply)(EmailSet.unapply).verifying(validEmails),
"address" -> optional(text),
"phone" -> optional(text)
)((name, userName, emailSet, address, phone) => UserFormData(name,userName, emailSet.email, address, phone, true))(ufd => Some((ufd.name, ufd.username, EmailSet(ufd.email, None), ufd.address, ufd.phone)))
)
def edit = AuthDBAction { implicit rq =>
rq.currentUser match {
case None => HomePage
case Some(user) => {
userService.getEdit(user.id) match {
case None => HomePage
case Some(userFormData) => Ok(views.html.profileForm(profileForm.fillAndValidate(userFormData), userContext))
}
}
}
}
def update = AuthDBAction { implicit rs =>
rs.currentUser match {
case None => HomePage
case Some(user) => {
val bindedForm = profileForm.bindFromRequest
bindedForm.fold(
formWithErrors => BadRequest(views.html.profileForm(formWithErrors, userContext)),
userFormData => {
userService.update(user.id, userFormData).fold(
failure => BadRequest(views.html.profileForm(bindedForm.withGlobalError(serviceMessage(failure)), userContext)),
success => {
Mailer.sendProfileUpdatedEmail(user.user)
HomePage.flashing("success" -> "Profiel aangepast")
}
)
}
)
}
}
}
}
|
studiocredo/ticket-reservation
|
app/controllers/Profile.scala
|
Scala
|
apache-2.0
| 3,019
|
package testfeature
import feature.{Block, Minus, Plus, Transcript}
import org.scalatest.FunSuite
/**
* Created by prussell on 11/12/16.
*/
class TranscriptMiscSuite extends FunSuite {
test("Illegal orientation") {
intercept[IllegalArgumentException](new Transcript(chr1_1000_2000_both, Some("name"), Some("gene")))
}
}
|
pamelarussell/sgxlib
|
src/test/scala/testfeature/TranscriptMiscSuite.scala
|
Scala
|
mit
| 336
|
package com.lightning.walletapp.helper
import scodec.bits.{BitVector, ByteVector}
import com.lightning.walletapp.ln.Tools.{Bytes, random}
import javax.crypto.spec.{IvParameterSpec, SecretKeySpec}
import com.lightning.walletapp.ln.wire.LightningMessageCodecs.aesZygoteCodec
import com.lightning.walletapp.ln.wire.AESZygote
import javax.crypto.Cipher
import scala.util.Try
object AES {
def cipher(key: Bytes, initVector: Bytes, mode: Int) =
Cipher getInstance "AES/CBC/PKCS5Padding" match { case aesCipher =>
val ivParameterSpec: IvParameterSpec = new IvParameterSpec(initVector)
aesCipher.init(mode, new SecretKeySpec(key, "AES"), ivParameterSpec)
aesCipher
}
private[this] val ivLength = 16
def dec(data: Bytes, key: Bytes, initVector: Bytes) = ByteVector.view(cipher(key, initVector, Cipher.DECRYPT_MODE) doFinal data)
def enc(data: Bytes, key: Bytes, initVector: Bytes) = ByteVector.view(cipher(key, initVector, Cipher.ENCRYPT_MODE) doFinal data)
// Used for Object -> Json -> Encrypted -> Zygote -> BitVector
def encReadable(plain: String, key: Bytes): BitVector = {
val zygote = encBytes(plain getBytes "UTF-8", key)
aesZygoteCodec.encode(zygote).require
}
// Used for Object -> Scodec -> Encrypted -> Zygote
def encBytes(plain: Bytes, key: Bytes) = {
val initialVector = random getBytes ivLength
val cipher = enc(data = plain, key, initialVector)
AESZygote(v = 1, ByteVector(initialVector), cipher)
}
def decBytes(raw: Bytes, key: Bytes): Try[Bytes] = {
val aesz = aesZygoteCodec decode BitVector.view(raw)
decZygote(aesz.require.value, key)
}
def decZygote(aesz: AESZygote, key: Bytes): Try[Bytes] = Try {
dec(aesz.ciphertext.toArray, key, aesz.iv.toArray).toArray
}
}
|
btcontract/lnwallet
|
app/src/main/java/com/lightning/walletapp/helper/AES.scala
|
Scala
|
apache-2.0
| 1,770
|
package com.thetestpeople.trt.service
import com.thetestpeople.trt.model._
import com.thetestpeople.trt.model
import org.joda.time._
import java.net.URI
/**
* Types representing incoming test execution records
*/
object Incoming {
trait AbtractExecution {
def logOpt: Option[String]
def executionTimeOpt: Option[DateTime]
def durationOpt: Option[Duration]
}
case class Batch(
executions: Seq[Execution],
complete: Boolean,
urlOpt: Option[URI],
nameOpt: Option[String],
logOpt: Option[String],
executionTimeOpt: Option[DateTime],
durationOpt: Option[Duration],
configurationOpt: Option[Configuration]) extends AbtractExecution
case class Execution(
test: Test,
passed: Boolean,
summaryOpt: Option[String],
logOpt: Option[String],
executionTimeOpt: Option[DateTime],
durationOpt: Option[Duration],
configurationOpt: Option[Configuration]) extends AbtractExecution {
def failed = !passed
}
case class Test(name: String, groupOpt: Option[String] = None, categories: Seq[String] = Seq()) {
def qualifiedName = QualifiedName(name, groupOpt)
}
case class BatchCompleteMessage(id: Id[model.Batch], durationOpt: Option[Duration])
}
|
thetestpeople/trt
|
app/com/thetestpeople/trt/service/Incoming.scala
|
Scala
|
mit
| 1,249
|
package nat.traversal.upnp.igd
import akka.actor.ActorSystem
import akka.stream.Materializer
import java.net.InetSocketAddress
import scala.concurrent.ExecutionContext
/**
* WAN device.
*/
class WANDevice(
override val desc: DeviceDesc,
override val localAddress: InetSocketAddress
)(implicit system: ActorSystem, executionContext: ExecutionContext, materializer: Materializer)
extends Device(desc, localAddress)
{
/* A WANDevice may have more than one WANConnectionDevice.
* A WANConnectionDevice may have more than one WANIPConnection or
* WANPPPConnection.
*/
val wanConnections: List[WANConnection] =
getDevices("WANConnectionDevice").flatMap { cnxDevice =>
(for {
cnxService <- cnxDevice.getServices("WANIPConnection") :::
cnxDevice.getServices("WANPPPConnection")
} yield {
try {
Some(new WANConnection(this, cnxService))
}
catch {
case _: Throwable =>
None
}
}).flatten
}
}
|
suiryc/nat-traversal
|
src/main/scala/nat/traversal/upnp/igd/WANDevice.scala
|
Scala
|
gpl-3.0
| 1,015
|
package inloopio.math.algebra
/**
* Constructs and returns a new singular value decomposition object; The
* decomposed matrices can be retrieved via instance methods of the returned
* decomposition object.
* @see http://en.wikipedia.org/wiki/Singular_value_decomposition
* also @see http://en.wikipedia.org/wiki/Eigenvalue,_eigenvector_and_eigenspace
*
* @param arg
* A rectangular matrix.
*/
class SingularValueDecomposition(arg: Matrix) extends Serializable {
/** To handle the case where numRows < numCols and to use the fact that SVD(A')=VSU'=> SVD(A')'=SVD(A)**/
private var transpositionNeeded = false
/** Row and column dimensions. */
if (arg.numRows < arg.numCols) {
transpositionNeeded = true
}
// Derived from LINPACK code.
// Initialize.
private val (m, n) =
if (transpositionNeeded) {
(arg.numCols, arg.numRows) // use the transpose Matrix
} else {
(arg.numRows, arg.numCols)
}
val a = Array.ofDim[Double](m, n)
/** Array for internal storage of singular values. */
private val s = Array.ofDim[Double](math.min(m + 1, n))
/** Arrays for internal storage of U and V. */
private val u = Array.ofDim[Double](m, math.min(m, n))
private val v = Array.ofDim[Double](n, n)
def init(arg: Matrix) {
var i = 0
while (i < m) {
var j = 0
while (j < n) {
a(i)(j) = if (transpositionNeeded) arg.get(j, i) else arg.get(i, j)
j += 1
}
i += 1
}
val nu = math.min(m, n)
val e = Array.ofDim[Double](n)
val work = Array.ofDim[Double](m)
var wantu = true
var wantv = true
// Reduce A to bidiagonal form, storing the diagonal elements
// in s and the super-diagonal elements in e.
val nct = math.min(m - 1, n)
val nrt = math.max(0, math.min(n - 2, m))
var k = 0
while (k < math.max(nct, nrt)) {
if (k < nct) {
// Compute the transformation for the k-th column and
// place the k-th diagonal in s(k).
// Compute 2-norm of k-th column without under/overflow.
s(k) = 0
var i = k
while (i < m) {
s(k) = java.lang.Math.hypot(s(k), a(i)(k))
i += 1
}
if (s(k) != 0.0) {
if (a(k)(k) < 0.0) {
s(k) = -s(k)
}
var i = k
while (i < m) {
a(i)(k) /= s(k)
i += 1
}
a(k)(k) += 1.0
}
s(k) = -s(k)
}
var j = k + 1
while (j < n) {
if (k < nct && s(k) != 0.0) {
// Apply the transformation.
var t = 0.0
var i = k
while (i < m) {
t += a(i)(k) * a(i)(j)
i += 1
}
t = -t / a(k)(k)
i = k
while (i < m) {
a(i)(j) += t * a(i)(k)
i += 1
}
}
// Place the k-th row of A into e for the
// subsequent calculation of the row transformation.
e(j) = a(k)(j)
j += 1
}
if (wantu && k < nct) {
// Place the transformation in U for subsequent back multiplication.
var i = k
while (i < m) {
u(i)(k) = a(i)(k)
i += 1
}
}
if (k < nrt) {
// Compute the k-th row transformation and place the
// k-th super-diagonal in e(k).
// Compute 2-norm without under/overflow.
e(k) = 0
var i = k + 1
while (i < n) {
e(k) = java.lang.Math.hypot(e(k), e(i))
i += 1
}
if (e(k) != 0.0) {
if (e(k + 1) < 0.0) {
e(k) = -e(k)
}
var i = k + 1
while (i < n) {
e(i) /= e(k)
i += 1
}
e(k + 1) += 1.0
}
e(k) = -e(k)
if (k + 1 < m && e(k) != 0.0) {
// Apply the transformation.
var i = k + 1
while (i < m) {
work(i) = 0.0
i += 1
}
var j = k + 1
while (j < n) {
var i = k + 1
while (i < m) {
work(i) += e(j) * a(i)(j)
i += 1
}
j += 1
}
j = k + 1
while (j < n) {
val t = -e(j) / e(k + 1)
var i = k + 1
while (i < m) {
a(i)(j) += t * work(i)
i += 1
}
j += 1
}
}
if (wantv) {
// Place the transformation in V for subsequent
// back multiplication.
var i = k + 1
while (i < n) {
v(i)(k) = e(i)
i += 1
}
}
}
k += 1
}
// Set up the final bidiagonal matrix or order p.
var p = math.min(n, m + 1)
if (nct < n) {
s(nct) = a(nct)(nct)
}
if (m < p) {
s(p - 1) = 0.0
}
if (nrt + 1 < p) {
e(nrt) = a(nrt)(p - 1)
}
e(p - 1) = 0.0
// If required, generate U.
if (wantu) {
var j = nct
while (j < nu) {
var i = 0
while (i < m) {
u(i)(j) = 0.0
i += 1
}
u(j)(j) = 1.0
j += 1
}
var k = nct - 1
while (k >= 0) {
if (s(k) != 0.0) {
var j = k + 1
while (j < nu) {
var t = 0.0
var i = k
while (i < m) {
t += u(i)(k) * u(i)(j)
i += 1
}
t = -t / u(k)(k)
i = k
while (i < m) {
u(i)(j) += t * u(i)(k)
i += 1
}
j += 1
}
var i = k
while (i < m) {
u(i)(k) = -u(i)(k)
i += 1
}
u(k)(k) = 1.0 + u(k)(k)
i = 0
while (i < k - 1) {
u(i)(k) = 0.0
i += 1
}
} else {
var i = 0
while (i < m) {
u(i)(k) = 0.0
i += 1
}
u(k)(k) = 1.0
}
k -= 1
}
}
// If required, generate V.
if (wantv) {
var k = n - 1
while (k >= 0) {
if (k < nrt && e(k) != 0.0) {
var j = k + 1
while (j < nu) {
var t = 0.0
var i = k + 1
while (i < n) {
t += v(i)(k) * v(i)(j)
i += 1
}
t = -t / v(k + 1)(k)
i = k + 1
while (i < n) {
v(i)(j) += t * v(i)(k)
i += 1
}
j += 1
}
}
var i = 0
while (i < n) {
v(i)(k) = 0.0
i += 1
}
v(k)(k) = 1.0
k -= 1
}
}
// Main iteration loop for the singular values.
val pp = p - 1
//Int iter = 0
val eps = math.pow(2.0, -52.0)
while (p > 0) {
// Here is where a test for too many iterations would go.
// This section of the program inspects for
// negligible elements in the s and e arrays. On
// completion the variables kase and k are set as follows.
// kase = 1 if s(p) and e[k-1] are negligible and k<p
// kase = 2 if s(k) is negligible and k<p
// kase = 3 if e[k-1] is negligible, k<p, and
// s(k), ..., s(p) are not negligible (qr step).
// kase = 4 if e(p-1) is negligible (convergence).
var k = p - 2
var continue = true
while (k >= -1 && continue) {
if (k == -1) {
continue = false
} else {
if (math.abs(e(k)) <= eps * (math.abs(s(k)) + math.abs(s(k + 1)))) {
e(k) = 0.0
continue = false
} else {
k -= 1
}
}
}
var kase = 0
if (k == p - 2) {
kase = 4
} else {
var ks = p - 1
var continue = true
while (ks >= k && continue) {
if (ks == k) {
continue = false
} else {
val t = (if (ks == p) 0.0 else math.abs(e(ks))) + (if (ks == k + 1) 0.0 else math.abs(e(ks - 1)))
if (math.abs(s(ks)) <= eps * t) {
s(ks) = 0.0
continue = false
} else {
ks -= 1
}
}
}
if (ks == k) {
kase = 3
} else if (ks == p - 1) {
kase = 1
} else {
kase = 2
k = ks
}
}
k += 1
// Perform the task indicated by kase.
kase match {
// Deflate negligible s(p).
case 1 =>
var f = e(p - 2)
e(p - 2) = 0.0
var j = p - 2
while (j >= k) {
var t = java.lang.Math.hypot(s(j), f)
val cs = s(j) / t
val sn = f / t
s(j) = t
if (j != k) {
f = -sn * e(j - 1)
e(j - 1) = cs * e(j - 1)
}
if (wantv) {
var i = 0
while (i < n) {
t = cs * v(i)(j) + sn * v(i)(p - 1)
v(i)(p - 1) = -sn * v(i)(j) + cs * v(i)(p - 1)
v(i)(j) = t
i += 1
}
}
j -= 1
}
// Split at negligible s(k).
case 2 =>
var f = e(k - 1)
e(k - 1) = 0.0
var j = k
while (j < p) {
var t = java.lang.Math.hypot(s(j), f)
val cs = s(j) / t
val sn = f / t
s(j) = t
f = -sn * e(j)
e(j) = cs * e(j)
if (wantu) {
var i = 0
while (i < m) {
t = cs * u(i)(j) + sn * u(i)(k - 1)
u(i)(k - 1) = -sn * u(i)(j) + cs * u(i)(k - 1)
u(i)(j) = t
i += 1
}
}
j += 1
}
// Perform one qr step.
case 3 =>
// Calculate the shift.
val scale = math.max(math.max(math.max(math.max(
math.abs(s(p - 1)), math.abs(s(p - 2))), math.abs(e(p - 2))),
math.abs(s(k))), math.abs(e(k)))
val sp = s(p - 1) / scale
val spm1 = s(p - 2) / scale
val epm1 = e(p - 2) / scale
val sk = s(k) / scale
val ek = e(k) / scale
val b = ((spm1 + sp) * (spm1 - sp) + epm1 * epm1) / 2.0
val c = sp * epm1 * sp * epm1
var shift = 0.0
if (b != 0.0 || c != 0.0) {
shift = math.sqrt(b * b + c)
if (b < 0.0) {
shift = -shift
}
shift = c / (b + shift)
}
var f = (sk + sp) * (sk - sp) + shift
var g = sk * ek
// Chase zeros.
var j = k
while (j < p - 1) {
var t = java.lang.Math.hypot(f, g)
var cs = f / t
var sn = g / t
if (j != k) {
e(j - 1) = t
}
f = cs * s(j) + sn * e(j)
e(j) = cs * e(j) - sn * s(j)
g = sn * s(j + 1)
s(j + 1) = cs * s(j + 1)
if (wantv) {
var i = 0
while (i < n) {
t = cs * v(i)(j) + sn * v(i)(j + 1)
v(i)(j + 1) = -sn * v(i)(j) + cs * v(i)(j + 1)
v(i)(j) = t
i += 1
}
}
t = java.lang.Math.hypot(f, g)
cs = f / t
sn = g / t
s(j) = t
f = cs * e(j) + sn * s(j + 1)
s(j + 1) = -sn * e(j) + cs * s(j + 1)
g = sn * e(j + 1)
e(j + 1) = cs * e(j + 1)
if (wantu && j < m - 1) {
var i = 0
while (i < m) {
t = cs * u(i)(j) + sn * u(i)(j + 1)
u(i)(j + 1) = -sn * u(i)(j) + cs * u(i)(j + 1)
u(i)(j) = t
i += 1
}
}
j += 1
}
e(p - 2) = f
//iter += 1
// Convergence.
case 4 =>
// Make the singular values positive.
if (s(k) <= 0.0) {
s(k) = if (s(k) < 0.0) -s(k) else 0.0
if (wantv) {
var i = 0
while (i <= pp) {
v(i)(k) = -v(i)(k)
i += 1
}
}
}
// Order the singular values.
var continue = true
while (k < pp && continue) {
if (s(k) >= s(k + 1)) {
continue = false
} else {
var t = s(k)
s(k) = s(k + 1)
s(k + 1) = t
if (wantv && k < n - 1) {
var i = 0
while (i < n) {
t = v(i)(k + 1)
v(i)(k + 1) = v(i)(k)
v(i)(k) = t
i += 1
}
}
if (wantu && k < m - 1) {
var i = 0
while (i < m) {
t = u(i)(k + 1)
u(i)(k + 1) = u(i)(k)
u(i)(k) = t
i += 1
}
}
k += 1
}
}
//iter = 0
p -= 1
case _ =>
throw new IllegalStateException()
}
}
}
/**
* Returns the two norm condition number, which is <tt>max(S) / min(S)</tt>.
*/
def cond: Double = {
s(0) / s(math.min(m, n) - 1)
}
/**
* @return the diagonal matrix of singular values.
*/
def getS: Matrix = {
val s = Array.ofDim[Double](n, n)
var i = 0
while (i < n) {
var j = 0
while (j < n) {
s(i)(j) = 0.0
j += 1
}
s(i)(i) = this.s(i)
i += 1
}
DenseMatrix(s)
}
/**
* Returns the diagonal of <tt>S</tt>, which is a one-dimensional array of
* singular values
*
* @return diagonal of <tt>S</tt>.
*/
def getSingularValues: Array[Double] = {
s
}
/**
* Returns the left singular vectors <tt>U</tt>.
*
* @return <tt>U</tt>
*/
def getU: Matrix = {
if (transpositionNeeded) { //case numRows() < numCols()
DenseMatrix(v)
} else {
val numCols = math.min(m + 1, n)
val r = DenseMatrix(m, numCols)
var i = 0
while (i < m) {
var j = 0
while (j < numCols) {
r.set(i, j, u(i)(j))
j += 1
}
i += 1
}
r
}
}
/**
* Returns the right singular vectors <tt>V</tt>.
*
* @return <tt>V</tt>
*/
def getV: Matrix = {
if (transpositionNeeded) { // case numRows < numCols
val numCols = math.min(m + 1, n)
val r = DenseMatrix(m, numCols)
var i = 0
while (i < m) {
var j = 0
while (j < numCols) {
r.set(i, j, u(i)(j))
j += 1
}
i += 1
}
r
} else {
DenseMatrix(v)
}
}
/** Returns the two norm, which is <tt>max(S)</tt>. */
def norm2: Double = {
s(0)
}
/**
* Returns the effective numerical matrix rank, which is the number of
* nonnegligible singular values.
*/
def rank: Int = {
val eps = math.pow(2.0, -52.0)
val tol = math.max(m, n) * s(0) * eps
var r = 0
for (value <- s) {
if (value > tol) {
r += 1
}
}
r
}
/**
* @parameter minSingularValue
* minSingularValue - value below which singular values are ignored (a 0 or negative
* value implies all singular value will be used)
* @return Returns the n × n covariance matrix.
* The covariance matrix is V × J × Vt where J is the diagonal matrix of the inverse
* of the squares of the singular values.
*/
def getCovariance(minSingularValue: Double): Matrix = {
val j = DenseMatrix(s.length, s.length)
val vMat = DenseMatrix(this.v)
var i = 0
while (i < s.length) {
j.set(i, i, if (s(i) >= minSingularValue) 1 / (s(i) * s(i)) else 0.0)
i += 1
}
vMat.times(j).times(vMat.transpose)
}
/**
* Returns a String with (propertyName, propertyValue) pairs. Useful for
* debugging or to quickly get the rough picture. For example,
*
* <pre>
* rank : 3
* trace : 0
* </pre>
*/
override def toString = {
val sb = new StringBuilder
sb.append("---------------------------------------------------------------------\\n")
sb.append("SingularValueDecomposition(A) --> cond(A), rank(A), norm2(A), U, S, V\\n")
sb.append("---------------------------------------------------------------------\\n")
sb.append("cond = ")
val unknown = "Illegal operation or error: "
try {
sb.append(String.valueOf(this.cond))
} catch {
case e: IllegalArgumentException => sb.append(unknown).append(e.getMessage)
}
sb.append("\\nrank = ")
try {
sb.append(String.valueOf(this.rank))
} catch {
case e: IllegalArgumentException => sb.append(unknown).append(e.getMessage)
}
sb.append("\\nnorm2 = ")
try {
sb.append(String.valueOf(this.norm2))
} catch {
case e: IllegalArgumentException => sb.append(unknown).append(e.getMessage)
}
sb.append("\\n\\nU = ")
try {
sb.append(String.valueOf(this.getU))
} catch {
case e: IllegalArgumentException => sb.append(unknown).append(e.getMessage)
}
sb.append("\\n\\nS = ")
try {
sb.append(String.valueOf(this.getS))
} catch {
case e: IllegalArgumentException => sb.append(unknown).append(e.getMessage)
}
sb.append("\\n\\nV = ")
try {
sb.append(String.valueOf(this.getV))
} catch {
case e: IllegalArgumentException => sb.append(unknown).append(e.getMessage)
}
sb.toString
}
}
|
dcaoyuan/inloopio-libs
|
inloopio-math/src/main/scala/inloopio/math/algebra/SingularValueDecomposition.scala
|
Scala
|
bsd-3-clause
| 17,819
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import scala.collection.mutable
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.{expressions, InternalRow}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils}
import org.apache.spark.sql.types.StructType
/**
* An abstract class that represents [[FileIndex]]s that are aware of partitioned tables.
* It provides the necessary methods to parse partition data based on a set of files.
*
* @param parameters as set of options to control partition discovery
* @param userSpecifiedSchema an optional user specified schema that will be use to provide
* types for the discovered partitions
*/
abstract class PartitioningAwareFileIndex(
sparkSession: SparkSession,
parameters: Map[String, String],
userSpecifiedSchema: Option[StructType],
fileStatusCache: FileStatusCache = NoopCache) extends FileIndex with Logging {
import PartitioningAwareFileIndex.BASE_PATH_PARAM
/** Returns the specification of the partitions inferred from the data. */
def partitionSpec(): PartitionSpec
override def partitionSchema: StructType = partitionSpec().partitionColumns
protected val hadoopConf: Configuration =
sparkSession.sessionState.newHadoopConfWithOptions(parameters)
protected def leafFiles: mutable.LinkedHashMap[Path, FileStatus]
protected def leafDirToChildrenFiles: Map[Path, Array[FileStatus]]
private val caseInsensitiveMap = CaseInsensitiveMap(parameters)
private val pathFilters = PathFilterFactory.create(caseInsensitiveMap)
protected def matchPathPattern(file: FileStatus): Boolean =
pathFilters.forall(_.accept(file))
protected lazy val recursiveFileLookup: Boolean = {
caseInsensitiveMap.getOrElse("recursiveFileLookup", "false").toBoolean
}
override def listFiles(
partitionFilters: Seq[Expression], dataFilters: Seq[Expression]): Seq[PartitionDirectory] = {
def isNonEmptyFile(f: FileStatus): Boolean = {
isDataPath(f.getPath) && f.getLen > 0
}
val selectedPartitions = if (partitionSpec().partitionColumns.isEmpty) {
PartitionDirectory(InternalRow.empty, allFiles().filter(isNonEmptyFile)) :: Nil
} else {
if (recursiveFileLookup) {
throw new IllegalArgumentException(
"Datasource with partition do not allow recursive file loading.")
}
prunePartitions(partitionFilters, partitionSpec()).map {
case PartitionPath(values, path) =>
val files: Seq[FileStatus] = leafDirToChildrenFiles.get(path) match {
case Some(existingDir) =>
// Directory has children files in it, return them
existingDir.filter(f => matchPathPattern(f) && isNonEmptyFile(f))
case None =>
// Directory does not exist, or has no children files
Nil
}
PartitionDirectory(values, files)
}
}
logTrace("Selected files after partition pruning:\n\t" + selectedPartitions.mkString("\n\t"))
selectedPartitions
}
/** Returns the list of files that will be read when scanning this relation. */
override def inputFiles: Array[String] =
allFiles().map(_.getPath.toUri.toString).toArray
override def sizeInBytes: Long = allFiles().map(_.getLen).sum
def allFiles(): Seq[FileStatus] = {
val files = if (partitionSpec().partitionColumns.isEmpty && !recursiveFileLookup) {
// For each of the root input paths, get the list of files inside them
rootPaths.flatMap { path =>
// Make the path qualified (consistent with listLeafFiles and bulkListLeafFiles).
val fs = path.getFileSystem(hadoopConf)
val qualifiedPathPre = fs.makeQualified(path)
val qualifiedPath: Path = if (qualifiedPathPre.isRoot && !qualifiedPathPre.isAbsolute) {
// SPARK-17613: Always append `Path.SEPARATOR` to the end of parent directories,
// because the `leafFile.getParent` would have returned an absolute path with the
// separator at the end.
new Path(qualifiedPathPre, Path.SEPARATOR)
} else {
qualifiedPathPre
}
// There are three cases possible with each path
// 1. The path is a directory and has children files in it. Then it must be present in
// leafDirToChildrenFiles as those children files will have been found as leaf files.
// Find its children files from leafDirToChildrenFiles and include them.
// 2. The path is a file, then it will be present in leafFiles. Include this path.
// 3. The path is a directory, but has no children files. Do not include this path.
leafDirToChildrenFiles.get(qualifiedPath)
.orElse { leafFiles.get(qualifiedPath).map(Array(_)) }
.getOrElse(Array.empty)
}
} else {
leafFiles.values.toSeq
}
files.filter(matchPathPattern)
}
protected def inferPartitioning(): PartitionSpec = {
if (recursiveFileLookup) {
PartitionSpec.emptySpec
} else {
// We use leaf dirs containing data files to discover the schema.
val leafDirs = leafDirToChildrenFiles.filter { case (_, files) =>
files.exists(f => isDataPath(f.getPath))
}.keys.toSeq
val caseInsensitiveOptions = CaseInsensitiveMap(parameters)
val timeZoneId = caseInsensitiveOptions.get(DateTimeUtils.TIMEZONE_OPTION)
.getOrElse(sparkSession.sessionState.conf.sessionLocalTimeZone)
PartitioningUtils.parsePartitions(
leafDirs,
typeInference = sparkSession.sessionState.conf.partitionColumnTypeInferenceEnabled,
basePaths = basePaths,
userSpecifiedSchema = userSpecifiedSchema,
caseSensitive = sparkSession.sqlContext.conf.caseSensitiveAnalysis,
validatePartitionColumns = sparkSession.sqlContext.conf.validatePartitionColumns,
timeZoneId = timeZoneId)
}
}
private def prunePartitions(
predicates: Seq[Expression],
partitionSpec: PartitionSpec): Seq[PartitionPath] = {
val PartitionSpec(partitionColumns, partitions) = partitionSpec
val partitionColumnNames = partitionColumns.map(_.name).toSet
val partitionPruningPredicates = predicates.filter {
_.references.map(_.name).toSet.subsetOf(partitionColumnNames)
}
if (partitionPruningPredicates.nonEmpty) {
val predicate = partitionPruningPredicates.reduce(expressions.And)
val boundPredicate = Predicate.createInterpreted(predicate.transform {
case a: AttributeReference =>
val index = partitionColumns.indexWhere(a.name == _.name)
BoundReference(index, partitionColumns(index).dataType, nullable = true)
})
val selected = partitions.filter {
case PartitionPath(values, _) => boundPredicate.eval(values)
}
logInfo {
val total = partitions.length
val selectedSize = selected.length
val percentPruned = (1 - selectedSize.toDouble / total.toDouble) * 100
s"Selected $selectedSize partitions out of $total, " +
s"pruned ${if (total == 0) "0" else s"$percentPruned%"} partitions."
}
selected
} else {
partitions
}
}
/**
* Contains a set of paths that are considered as the base dirs of the input datasets.
* The partitioning discovery logic will make sure it will stop when it reaches any
* base path.
*
* By default, the paths of the dataset provided by users will be base paths.
* Below are three typical examples,
* Case 1) `spark.read.parquet("/path/something=true/")`: the base path will be
* `/path/something=true/`, and the returned DataFrame will not contain a column of `something`.
* Case 2) `spark.read.parquet("/path/something=true/a.parquet")`: the base path will be
* still `/path/something=true/`, and the returned DataFrame will also not contain a column of
* `something`.
* Case 3) `spark.read.parquet("/path/")`: the base path will be `/path/`, and the returned
* DataFrame will have the column of `something`.
*
* Users also can override the basePath by setting `basePath` in the options to pass the new base
* path to the data source.
* For example, `spark.read.option("basePath", "/path/").parquet("/path/something=true/")`,
* and the returned DataFrame will have the column of `something`.
*/
private def basePaths: Set[Path] = {
caseInsensitiveMap.get(BASE_PATH_PARAM).map(new Path(_)) match {
case Some(userDefinedBasePath) =>
val fs = userDefinedBasePath.getFileSystem(hadoopConf)
if (!fs.isDirectory(userDefinedBasePath)) {
throw new IllegalArgumentException(s"Option '$BASE_PATH_PARAM' must be a directory")
}
val qualifiedBasePath = fs.makeQualified(userDefinedBasePath)
val qualifiedBasePathStr = qualifiedBasePath.toString
rootPaths
.find(!fs.makeQualified(_).toString.startsWith(qualifiedBasePathStr))
.foreach { rp =>
throw new IllegalArgumentException(
s"Wrong basePath $userDefinedBasePath for the root path: $rp")
}
Set(qualifiedBasePath)
case None =>
rootPaths.map { path =>
// Make the path qualified (consistent with listLeafFiles and bulkListLeafFiles).
val qualifiedPath = path.getFileSystem(hadoopConf).makeQualified(path)
if (leafFiles.contains(qualifiedPath)) qualifiedPath.getParent else qualifiedPath }.toSet
}
}
// SPARK-15895: Metadata files (e.g. Parquet summary files) and temporary files should not be
// counted as data files, so that they shouldn't participate partition discovery.
private def isDataPath(path: Path): Boolean = {
val name = path.getName
!((name.startsWith("_") && !name.contains("=")) || name.startsWith("."))
}
}
object PartitioningAwareFileIndex {
val BASE_PATH_PARAM = "basePath"
}
|
wangmiao1981/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningAwareFileIndex.scala
|
Scala
|
apache-2.0
| 10,908
|
package chemistry
object Chemistry {
var procs = Runtime.getRuntime.availableProcessors
}
|
aturon/ChemistrySet
|
src/main/scala/core/Chemistry.scala
|
Scala
|
bsd-2-clause
| 93
|
package org.jetbrains.plugins.scala.lang
package refactoring.extractMethod
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScTypeParam
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.settings.ScalaApplicationSettings
class ScalaExtractMethodSettings(
val methodName: String,
val parameters: Array[ExtractMethodParameter],
val outputs: Array[ExtractMethodOutput],
val visibility: String,
val nextSibling: PsiElement,
val elements: Array[PsiElement],
val returnType: Option[ScType],
val addReturnType: ScalaApplicationSettings.ReturnTypeLevel,
val lastReturn: Boolean,
val lastExprType: Option[ScType],
val innerClassSettings: InnerClassSettings
) {
def projectContext: ProjectContext = nextSibling.getProject
lazy val (calcReturnTypeIsUnit, calcReturnTypeText) = ScalaExtractMethodUtils.calcReturnTypeExt(this)
val typeParameters: Seq[ScTypeParam] = {
val nextRange = nextSibling.getTextRange
val elem: PsiElement = elements.head
elem.parentsInFile
.takeWhile { parent =>
parent != null && ! {
val range = parent.getTextRange
range != null &&
range.contains(nextRange) &&
!range.equalsToRange(nextRange.getStartOffset, nextRange.getEndOffset)
}
}
.collect { case tpo: ScTypeParametersOwner => tpo}
.flatMap(_.typeParameters)
.toSeq
.reverse
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodSettings.scala
|
Scala
|
apache-2.0
| 1,669
|
package com.swara.learn.neural
import com.swara.learn.{Model, Supervised}
/**
* An artificial neural network. A neural network is a supervised learning technique inspired by
* biological neural networks that are used to approximate functions. Implementation provides a
* flexible and extensible way to build arbitrarily complex networks (rnn, cnn, etc.).
*
* @param loss Loss function s.t. (actual, expected) => loss
* @param layer Layers that form the network.
* @tparam I Type of inputs.
* @tparam O Type of outputs.
*/
class Network[I, O](
loss: ((O, O) => O),
layer: Layer[I, O]
) extends Model[Seq[I], Seq[O]] with Supervised[Seq[I], Seq[O]] {
override def train(inputs: Seq[I], outputs: Seq[O]): Unit = {
val result = this.layer(inputs)
val errors = result.forward.zip(outputs).map(this.loss.tupled)
result.backward(errors)
}
override def predict(input: Seq[I]): Seq[O] = {
this.layer(input).forward
}
}
|
ashwin153/swara
|
swara-learn/src/main/scala/com/swara/learn/neural/Network.scala
|
Scala
|
mit
| 952
|
/*
* Twitter Korean Text - Scala library to process Korean text
*
* Copyright 2014 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.penguin.korean.v1.tokenizer
import com.twitter.penguin.korean.v1.tokenizer.KoreanChunker._
import com.twitter.penguin.korean.v1.tokenizer.KoreanTokenizer.KoreanToken
import com.twitter.penguin.korean.v1.util.KoreanPos._
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class KoreanChunkerTest extends FunSuite {
test("getChunks should correctly split a string into Korean-sensitive chunks") {
assert(
getChunks("안녕? iphone6안녕? 세상아?")
=== "안녕 ? iphone 6 안녕 ? 세상아 ?".split(" ").toSeq
)
assert(
getChunks("This is an 한국어가 섞인 English tweet.")
=== "This is an 한국어가 섞인 English tweet .".split(" ").toSeq
)
assert(
getChunks("이 日本것은 日本語Eng")
=== "이 日本 것은 日本語 Eng".split(" ").toSeq
)
assert(
getChunks("무효이며")
=== Seq("무효이며")
)
assert(
getChunks("#해쉬태그 이라는 것 #hash @hello 123 이런이런 #여자최애캐_5명으로_취향을_드러내자")
=== "#해쉬태그 이라는 것 #hash @hello 123 이런이런 #여자최애캐_5명으로_취향을_드러내자".split(" ").toSeq
)
}
test("getChunkTokens should correctly find chunks with correct POS tags") {
assert(
chunk("한국어와 English와 1234와 pic.twitter.com " +
"http://news.kukinews.com/article/view.asp?" +
"page=1&gCode=soc&arcid=0008599913&code=41121111 " +
"hohyonryu@twitter.com 갤럭시 S5").mkString(" ")
===
"한국어와Korean EnglishAlpha 와Korean 1234Number 와Korean " +
"pic.twitter.comURL " +
"http://news.kukinews.com/article/view.asp?" +
"page=1&gCode=soc&arcid=0008599913&code=41121111URL " +
"hohyonryu@twitter.comEmail 갤럭시Korean SAlpha 5Number"
)
assert(
chunk("우와!!! 완전ㅋㅋㅋㅋ")
=== Seq(
KoreanToken("우와", Korean), KoreanToken("!!!", Punctuation),
KoreanToken("완전", Korean), KoreanToken("ㅋㅋㅋㅋ", KoreanParticle))
)
assert(
chunk("@nlpenguin @edeng #korean_tokenizer_rocks 우하하")
=== Seq(KoreanToken("@nlpenguin", ScreenName), KoreanToken("@edeng", ScreenName),
KoreanToken("#korean_tokenizer_rocks", Hashtag), KoreanToken("우하하", Korean))
)
}
}
|
NamHosung/SE
|
src/test/scala/com/twitter/penguin/korean/v1/tokenizer/KoreanChunkerTest.scala
|
Scala
|
apache-2.0
| 3,171
|
package orientdb.stream.impl
private[stream] object ActorSource {
sealed trait State
case object Ready extends State
case object Completed extends State
sealed trait Event
final case class Enqueue[A](x: A) extends Event
final case class ErrorOccurred(t: Throwable) extends Event
case object Complete extends Event
sealed trait Data
final case class Queue[A](xs: Vector[A]) extends Data
}
|
KadekM/orientdb-scala-stream
|
src/main/scala/orientdb/stream/impl/ActorSource.scala
|
Scala
|
apache-2.0
| 411
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataOutput, DataOutputStream, File,
FileOutputStream, PrintStream}
import java.lang.{Double => JDouble, Float => JFloat}
import java.net.{BindException, ServerSocket, URI}
import java.nio.{ByteBuffer, ByteOrder}
import java.nio.charset.StandardCharsets
import java.text.DecimalFormatSymbols
import java.util.Locale
import java.util.concurrent.TimeUnit
import java.util.zip.GZIPOutputStream
import scala.collection.mutable.ListBuffer
import scala.util.Random
import com.google.common.io.Files
import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.SystemUtils
import org.apache.commons.math3.stat.inference.ChiSquareTest
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.{SparkConf, SparkFunSuite, TaskContext}
import org.apache.spark.internal.Logging
import org.apache.spark.network.util.ByteUnit
class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
test("truncatedString") {
assert(Utils.truncatedString(Nil, "[", ", ", "]", 2) == "[]")
assert(Utils.truncatedString(Seq(1, 2), "[", ", ", "]", 2) == "[1, 2]")
assert(Utils.truncatedString(Seq(1, 2, 3), "[", ", ", "]", 2) == "[1, ... 2 more fields]")
assert(Utils.truncatedString(Seq(1, 2, 3), "[", ", ", "]", -5) == "[, ... 3 more fields]")
assert(Utils.truncatedString(Seq(1, 2, 3), ", ") == "1, 2, 3")
}
test("timeConversion") {
// Test -1
assert(Utils.timeStringAsSeconds("-1") === -1)
// Test zero
assert(Utils.timeStringAsSeconds("0") === 0)
assert(Utils.timeStringAsSeconds("1") === 1)
assert(Utils.timeStringAsSeconds("1s") === 1)
assert(Utils.timeStringAsSeconds("1000ms") === 1)
assert(Utils.timeStringAsSeconds("1000000us") === 1)
assert(Utils.timeStringAsSeconds("1m") === TimeUnit.MINUTES.toSeconds(1))
assert(Utils.timeStringAsSeconds("1min") === TimeUnit.MINUTES.toSeconds(1))
assert(Utils.timeStringAsSeconds("1h") === TimeUnit.HOURS.toSeconds(1))
assert(Utils.timeStringAsSeconds("1d") === TimeUnit.DAYS.toSeconds(1))
assert(Utils.timeStringAsMs("1") === 1)
assert(Utils.timeStringAsMs("1ms") === 1)
assert(Utils.timeStringAsMs("1000us") === 1)
assert(Utils.timeStringAsMs("1s") === TimeUnit.SECONDS.toMillis(1))
assert(Utils.timeStringAsMs("1m") === TimeUnit.MINUTES.toMillis(1))
assert(Utils.timeStringAsMs("1min") === TimeUnit.MINUTES.toMillis(1))
assert(Utils.timeStringAsMs("1h") === TimeUnit.HOURS.toMillis(1))
assert(Utils.timeStringAsMs("1d") === TimeUnit.DAYS.toMillis(1))
// Test invalid strings
intercept[NumberFormatException] {
Utils.timeStringAsMs("600l")
}
intercept[NumberFormatException] {
Utils.timeStringAsMs("This breaks 600s")
}
intercept[NumberFormatException] {
Utils.timeStringAsMs("This breaks 600ds")
}
intercept[NumberFormatException] {
Utils.timeStringAsMs("600s This breaks")
}
intercept[NumberFormatException] {
Utils.timeStringAsMs("This 123s breaks")
}
}
test("Test byteString conversion") {
// Test zero
assert(Utils.byteStringAsBytes("0") === 0)
assert(Utils.byteStringAsGb("1") === 1)
assert(Utils.byteStringAsGb("1g") === 1)
assert(Utils.byteStringAsGb("1023m") === 0)
assert(Utils.byteStringAsGb("1024m") === 1)
assert(Utils.byteStringAsGb("1048575k") === 0)
assert(Utils.byteStringAsGb("1048576k") === 1)
assert(Utils.byteStringAsGb("1k") === 0)
assert(Utils.byteStringAsGb("1t") === ByteUnit.TiB.toGiB(1))
assert(Utils.byteStringAsGb("1p") === ByteUnit.PiB.toGiB(1))
assert(Utils.byteStringAsMb("1") === 1)
assert(Utils.byteStringAsMb("1m") === 1)
assert(Utils.byteStringAsMb("1048575b") === 0)
assert(Utils.byteStringAsMb("1048576b") === 1)
assert(Utils.byteStringAsMb("1023k") === 0)
assert(Utils.byteStringAsMb("1024k") === 1)
assert(Utils.byteStringAsMb("3645k") === 3)
assert(Utils.byteStringAsMb("1024gb") === 1048576)
assert(Utils.byteStringAsMb("1g") === ByteUnit.GiB.toMiB(1))
assert(Utils.byteStringAsMb("1t") === ByteUnit.TiB.toMiB(1))
assert(Utils.byteStringAsMb("1p") === ByteUnit.PiB.toMiB(1))
assert(Utils.byteStringAsKb("1") === 1)
assert(Utils.byteStringAsKb("1k") === 1)
assert(Utils.byteStringAsKb("1m") === ByteUnit.MiB.toKiB(1))
assert(Utils.byteStringAsKb("1g") === ByteUnit.GiB.toKiB(1))
assert(Utils.byteStringAsKb("1t") === ByteUnit.TiB.toKiB(1))
assert(Utils.byteStringAsKb("1p") === ByteUnit.PiB.toKiB(1))
assert(Utils.byteStringAsBytes("1") === 1)
assert(Utils.byteStringAsBytes("1k") === ByteUnit.KiB.toBytes(1))
assert(Utils.byteStringAsBytes("1m") === ByteUnit.MiB.toBytes(1))
assert(Utils.byteStringAsBytes("1g") === ByteUnit.GiB.toBytes(1))
assert(Utils.byteStringAsBytes("1t") === ByteUnit.TiB.toBytes(1))
assert(Utils.byteStringAsBytes("1p") === ByteUnit.PiB.toBytes(1))
// Overflow handling, 1073741824p exceeds Long.MAX_VALUE if converted straight to Bytes
// This demonstrates that we can have e.g 1024^3 PB without overflowing.
assert(Utils.byteStringAsGb("1073741824p") === ByteUnit.PiB.toGiB(1073741824))
assert(Utils.byteStringAsMb("1073741824p") === ByteUnit.PiB.toMiB(1073741824))
// Run this to confirm it doesn't throw an exception
assert(Utils.byteStringAsBytes("9223372036854775807") === 9223372036854775807L)
assert(ByteUnit.PiB.toPiB(9223372036854775807L) === 9223372036854775807L)
// Test overflow exception
intercept[IllegalArgumentException] {
// This value exceeds Long.MAX when converted to bytes
Utils.byteStringAsBytes("9223372036854775808")
}
// Test overflow exception
intercept[IllegalArgumentException] {
// This value exceeds Long.MAX when converted to TB
ByteUnit.PiB.toTiB(9223372036854775807L)
}
// Test fractional string
intercept[NumberFormatException] {
Utils.byteStringAsMb("0.064")
}
// Test fractional string
intercept[NumberFormatException] {
Utils.byteStringAsMb("0.064m")
}
// Test invalid strings
intercept[NumberFormatException] {
Utils.byteStringAsBytes("500ub")
}
// Test invalid strings
intercept[NumberFormatException] {
Utils.byteStringAsBytes("This breaks 600b")
}
intercept[NumberFormatException] {
Utils.byteStringAsBytes("This breaks 600")
}
intercept[NumberFormatException] {
Utils.byteStringAsBytes("600gb This breaks")
}
intercept[NumberFormatException] {
Utils.byteStringAsBytes("This 123mb breaks")
}
}
test("bytesToString") {
assert(Utils.bytesToString(10) === "10.0 B")
assert(Utils.bytesToString(1500) === "1500.0 B")
assert(Utils.bytesToString(2000000) === "1953.1 KB")
assert(Utils.bytesToString(2097152) === "2.0 MB")
assert(Utils.bytesToString(2306867) === "2.2 MB")
assert(Utils.bytesToString(5368709120L) === "5.0 GB")
assert(Utils.bytesToString(5L * (1L << 40)) === "5.0 TB")
assert(Utils.bytesToString(5L * (1L << 50)) === "5.0 PB")
assert(Utils.bytesToString(5L * (1L << 60)) === "5.0 EB")
assert(Utils.bytesToString(BigInt(1L << 11) * (1L << 60)) === "2.36E+21 B")
}
test("copyStream") {
// input array initialization
val bytes = Array.ofDim[Byte](9000)
Random.nextBytes(bytes)
val os = new ByteArrayOutputStream()
Utils.copyStream(new ByteArrayInputStream(bytes), os)
assert(os.toByteArray.toList.equals(bytes.toList))
}
test("memoryStringToMb") {
assert(Utils.memoryStringToMb("1") === 0)
assert(Utils.memoryStringToMb("1048575") === 0)
assert(Utils.memoryStringToMb("3145728") === 3)
assert(Utils.memoryStringToMb("1024k") === 1)
assert(Utils.memoryStringToMb("5000k") === 4)
assert(Utils.memoryStringToMb("4024k") === Utils.memoryStringToMb("4024K"))
assert(Utils.memoryStringToMb("1024m") === 1024)
assert(Utils.memoryStringToMb("5000m") === 5000)
assert(Utils.memoryStringToMb("4024m") === Utils.memoryStringToMb("4024M"))
assert(Utils.memoryStringToMb("2g") === 2048)
assert(Utils.memoryStringToMb("3g") === Utils.memoryStringToMb("3G"))
assert(Utils.memoryStringToMb("2t") === 2097152)
assert(Utils.memoryStringToMb("3t") === Utils.memoryStringToMb("3T"))
}
test("splitCommandString") {
assert(Utils.splitCommandString("") === Seq())
assert(Utils.splitCommandString("a") === Seq("a"))
assert(Utils.splitCommandString("aaa") === Seq("aaa"))
assert(Utils.splitCommandString("a b c") === Seq("a", "b", "c"))
assert(Utils.splitCommandString(" a b\\t c ") === Seq("a", "b", "c"))
assert(Utils.splitCommandString("a 'b c'") === Seq("a", "b c"))
assert(Utils.splitCommandString("a 'b c' d") === Seq("a", "b c", "d"))
assert(Utils.splitCommandString("'b c'") === Seq("b c"))
assert(Utils.splitCommandString("a \\"b c\\"") === Seq("a", "b c"))
assert(Utils.splitCommandString("a \\"b c\\" d") === Seq("a", "b c", "d"))
assert(Utils.splitCommandString("\\"b c\\"") === Seq("b c"))
assert(Utils.splitCommandString("a 'b\\" c' \\"d' e\\"") === Seq("a", "b\\" c", "d' e"))
assert(Utils.splitCommandString("a\\t'b\\nc'\\nd") === Seq("a", "b\\nc", "d"))
assert(Utils.splitCommandString("a \\"b\\\\\\\\c\\"") === Seq("a", "b\\\\c"))
assert(Utils.splitCommandString("a \\"b\\\\\\"c\\"") === Seq("a", "b\\"c"))
assert(Utils.splitCommandString("a 'b\\\\\\"c'") === Seq("a", "b\\\\\\"c"))
assert(Utils.splitCommandString("'a'b") === Seq("ab"))
assert(Utils.splitCommandString("'a''b'") === Seq("ab"))
assert(Utils.splitCommandString("\\"a\\"b") === Seq("ab"))
assert(Utils.splitCommandString("\\"a\\"\\"b\\"") === Seq("ab"))
assert(Utils.splitCommandString("''") === Seq(""))
assert(Utils.splitCommandString("\\"\\"") === Seq(""))
}
test("string formatting of time durations") {
val second = 1000
val minute = second * 60
val hour = minute * 60
def str: (Long) => String = Utils.msDurationToString(_)
val sep = new DecimalFormatSymbols(Locale.US).getDecimalSeparator
assert(str(123) === "123 ms")
assert(str(second) === "1" + sep + "0 s")
assert(str(second + 462) === "1" + sep + "5 s")
assert(str(hour) === "1" + sep + "00 h")
assert(str(minute) === "1" + sep + "0 m")
assert(str(minute + 4 * second + 34) === "1" + sep + "1 m")
assert(str(10 * hour + minute + 4 * second) === "10" + sep + "02 h")
assert(str(10 * hour + 59 * minute + 59 * second + 999) === "11" + sep + "00 h")
}
def getSuffix(isCompressed: Boolean): String = {
if (isCompressed) {
".gz"
} else {
""
}
}
def writeLogFile(path: String, content: Array[Byte]): Unit = {
val outputStream = if (path.endsWith(".gz")) {
new GZIPOutputStream(new FileOutputStream(path))
} else {
new FileOutputStream(path)
}
IOUtils.write(content, outputStream)
outputStream.close()
content.size
}
private val workerConf = new SparkConf()
def testOffsetBytes(isCompressed: Boolean): Unit = {
val tmpDir2 = Utils.createTempDir()
val suffix = getSuffix(isCompressed)
val f1Path = tmpDir2 + "/f1" + suffix
writeLogFile(f1Path, "1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n".getBytes(StandardCharsets.UTF_8))
val f1Length = Utils.getFileLength(new File(f1Path), workerConf)
// Read first few bytes
assert(Utils.offsetBytes(f1Path, f1Length, 0, 5) === "1\\n2\\n3")
// Read some middle bytes
assert(Utils.offsetBytes(f1Path, f1Length, 4, 11) === "3\\n4\\n5\\n6")
// Read last few bytes
assert(Utils.offsetBytes(f1Path, f1Length, 12, 18) === "7\\n8\\n9\\n")
// Read some nonexistent bytes in the beginning
assert(Utils.offsetBytes(f1Path, f1Length, -5, 5) === "1\\n2\\n3")
// Read some nonexistent bytes at the end
assert(Utils.offsetBytes(f1Path, f1Length, 12, 22) === "7\\n8\\n9\\n")
// Read some nonexistent bytes on both ends
assert(Utils.offsetBytes(f1Path, f1Length, -3, 25) === "1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n")
Utils.deleteRecursively(tmpDir2)
}
test("reading offset bytes of a file") {
testOffsetBytes(isCompressed = false)
}
test("reading offset bytes of a file (compressed)") {
testOffsetBytes(isCompressed = true)
}
def testOffsetBytesMultipleFiles(isCompressed: Boolean): Unit = {
val tmpDir = Utils.createTempDir()
val suffix = getSuffix(isCompressed)
val files = (1 to 3).map(i => new File(tmpDir, i.toString + suffix)) :+ new File(tmpDir, "4")
writeLogFile(files(0).getAbsolutePath, "0123456789".getBytes(StandardCharsets.UTF_8))
writeLogFile(files(1).getAbsolutePath, "abcdefghij".getBytes(StandardCharsets.UTF_8))
writeLogFile(files(2).getAbsolutePath, "ABCDEFGHIJ".getBytes(StandardCharsets.UTF_8))
writeLogFile(files(3).getAbsolutePath, "9876543210".getBytes(StandardCharsets.UTF_8))
val fileLengths = files.map(Utils.getFileLength(_, workerConf))
// Read first few bytes in the 1st file
assert(Utils.offsetBytes(files, fileLengths, 0, 5) === "01234")
// Read bytes within the 1st file
assert(Utils.offsetBytes(files, fileLengths, 5, 8) === "567")
// Read bytes across 1st and 2nd file
assert(Utils.offsetBytes(files, fileLengths, 8, 18) === "89abcdefgh")
// Read bytes across 1st, 2nd and 3rd file
assert(Utils.offsetBytes(files, fileLengths, 5, 24) === "56789abcdefghijABCD")
// Read bytes across 3rd and 4th file
assert(Utils.offsetBytes(files, fileLengths, 25, 35) === "FGHIJ98765")
// Read some nonexistent bytes in the beginning
assert(Utils.offsetBytes(files, fileLengths, -5, 18) === "0123456789abcdefgh")
// Read some nonexistent bytes at the end
assert(Utils.offsetBytes(files, fileLengths, 18, 45) === "ijABCDEFGHIJ9876543210")
// Read some nonexistent bytes on both ends
assert(Utils.offsetBytes(files, fileLengths, -5, 45) ===
"0123456789abcdefghijABCDEFGHIJ9876543210")
Utils.deleteRecursively(tmpDir)
}
test("reading offset bytes across multiple files") {
testOffsetBytesMultipleFiles(isCompressed = false)
}
test("reading offset bytes across multiple files (compressed)") {
testOffsetBytesMultipleFiles(isCompressed = true)
}
test("deserialize long value") {
val testval : Long = 9730889947L
val bbuf = ByteBuffer.allocate(8)
assert(bbuf.hasArray)
bbuf.order(ByteOrder.BIG_ENDIAN)
bbuf.putLong(testval)
assert(bbuf.array.length === 8)
assert(Utils.deserializeLongValue(bbuf.array) === testval)
}
test("writeByteBuffer should not change ByteBuffer position") {
// Test a buffer with an underlying array, for both writeByteBuffer methods.
val testBuffer = ByteBuffer.wrap(Array[Byte](1, 2, 3, 4))
assert(testBuffer.hasArray)
val bytesOut = new ByteBufferOutputStream(4096)
Utils.writeByteBuffer(testBuffer, bytesOut)
assert(testBuffer.position() === 0)
val dataOut = new DataOutputStream(bytesOut)
Utils.writeByteBuffer(testBuffer, dataOut: DataOutput)
assert(testBuffer.position() === 0)
// Test a buffer without an underlying array, for both writeByteBuffer methods.
val testDirectBuffer = ByteBuffer.allocateDirect(8)
assert(!testDirectBuffer.hasArray())
Utils.writeByteBuffer(testDirectBuffer, bytesOut)
assert(testDirectBuffer.position() === 0)
Utils.writeByteBuffer(testDirectBuffer, dataOut: DataOutput)
assert(testDirectBuffer.position() === 0)
}
test("get iterator size") {
val empty = Seq[Int]()
assert(Utils.getIteratorSize(empty.toIterator) === 0L)
val iterator = Iterator.range(0, 5)
assert(Utils.getIteratorSize(iterator) === 5L)
}
test("getIteratorZipWithIndex") {
val iterator = Utils.getIteratorZipWithIndex(Iterator(0, 1, 2), -1L + Int.MaxValue)
assert(iterator.toArray === Array(
(0, -1L + Int.MaxValue), (1, 0L + Int.MaxValue), (2, 1L + Int.MaxValue)
))
intercept[IllegalArgumentException] {
Utils.getIteratorZipWithIndex(Iterator(0, 1, 2), -1L)
}
}
test("doesDirectoryContainFilesNewerThan") {
// create some temporary directories and files
val parent: File = Utils.createTempDir()
// The parent directory has two child directories
val child1: File = Utils.createTempDir(parent.getCanonicalPath)
val child2: File = Utils.createTempDir(parent.getCanonicalPath)
val child3: File = Utils.createTempDir(child1.getCanonicalPath)
// set the last modified time of child1 to 30 secs old
child1.setLastModified(System.currentTimeMillis() - (1000 * 30))
// although child1 is old, child2 is still new so return true
assert(Utils.doesDirectoryContainAnyNewFiles(parent, 5))
child2.setLastModified(System.currentTimeMillis - (1000 * 30))
assert(Utils.doesDirectoryContainAnyNewFiles(parent, 5))
parent.setLastModified(System.currentTimeMillis - (1000 * 30))
// although parent and its immediate children are new, child3 is still old
// we expect a full recursive search for new files.
assert(Utils.doesDirectoryContainAnyNewFiles(parent, 5))
child3.setLastModified(System.currentTimeMillis - (1000 * 30))
assert(!Utils.doesDirectoryContainAnyNewFiles(parent, 5))
}
test("resolveURI") {
def assertResolves(before: String, after: String): Unit = {
// This should test only single paths
assume(before.split(",").length === 1)
def resolve(uri: String): String = Utils.resolveURI(uri).toString
assert(resolve(before) === after)
assert(resolve(after) === after)
// Repeated invocations of resolveURI should yield the same result
assert(resolve(resolve(after)) === after)
assert(resolve(resolve(resolve(after))) === after)
}
val rawCwd = System.getProperty("user.dir")
val cwd = if (Utils.isWindows) s"/$rawCwd".replace("\\\\", "/") else rawCwd
assertResolves("hdfs:/root/spark.jar", "hdfs:/root/spark.jar")
assertResolves("hdfs:///root/spark.jar#app.jar", "hdfs:///root/spark.jar#app.jar")
assertResolves("spark.jar", s"file:$cwd/spark.jar")
assertResolves("spark.jar#app.jar", s"file:$cwd/spark.jar#app.jar")
assertResolves("path to/file.txt", s"file:$cwd/path%20to/file.txt")
if (Utils.isWindows) {
assertResolves("C:\\\\path\\\\to\\\\file.txt", "file:/C:/path/to/file.txt")
assertResolves("C:\\\\path to\\\\file.txt", "file:/C:/path%20to/file.txt")
}
assertResolves("file:/C:/path/to/file.txt", "file:/C:/path/to/file.txt")
assertResolves("file:///C:/path/to/file.txt", "file:///C:/path/to/file.txt")
assertResolves("file:/C:/file.txt#alias.txt", "file:/C:/file.txt#alias.txt")
assertResolves("file:foo", "file:foo")
assertResolves("file:foo:baby", "file:foo:baby")
}
test("resolveURIs with multiple paths") {
def assertResolves(before: String, after: String): Unit = {
assume(before.split(",").length > 1)
def resolve(uri: String): String = Utils.resolveURIs(uri)
assert(resolve(before) === after)
assert(resolve(after) === after)
// Repeated invocations of resolveURIs should yield the same result
assert(resolve(resolve(after)) === after)
assert(resolve(resolve(resolve(after))) === after)
}
val rawCwd = System.getProperty("user.dir")
val cwd = if (Utils.isWindows) s"/$rawCwd".replace("\\\\", "/") else rawCwd
assertResolves("jar1,jar2", s"file:$cwd/jar1,file:$cwd/jar2")
assertResolves("file:/jar1,file:/jar2", "file:/jar1,file:/jar2")
assertResolves("hdfs:/jar1,file:/jar2,jar3", s"hdfs:/jar1,file:/jar2,file:$cwd/jar3")
assertResolves("hdfs:/jar1,file:/jar2,jar3,jar4#jar5,path to/jar6",
s"hdfs:/jar1,file:/jar2,file:$cwd/jar3,file:$cwd/jar4#jar5,file:$cwd/path%20to/jar6")
if (Utils.isWindows) {
assertResolves("""hdfs:/jar1,file:/jar2,jar3,C:\\pi.py#py.pi,C:\\path to\\jar4""",
s"hdfs:/jar1,file:/jar2,file:$cwd/jar3,file:/C:/pi.py%23py.pi,file:/C:/path%20to/jar4")
}
assertResolves(",jar1,jar2", s"file:$cwd/jar1,file:$cwd/jar2")
// Also test resolveURIs with single paths
assertResolves("hdfs:/root/spark.jar", "hdfs:/root/spark.jar")
}
test("nonLocalPaths") {
assert(Utils.nonLocalPaths("spark.jar") === Array.empty)
assert(Utils.nonLocalPaths("file:/spark.jar") === Array.empty)
assert(Utils.nonLocalPaths("file:///spark.jar") === Array.empty)
assert(Utils.nonLocalPaths("local:/spark.jar") === Array.empty)
assert(Utils.nonLocalPaths("local:///spark.jar") === Array.empty)
assert(Utils.nonLocalPaths("hdfs:/spark.jar") === Array("hdfs:/spark.jar"))
assert(Utils.nonLocalPaths("hdfs:///spark.jar") === Array("hdfs:///spark.jar"))
assert(Utils.nonLocalPaths("file:/spark.jar,local:/smart.jar,family.py") === Array.empty)
assert(Utils.nonLocalPaths("local:/spark.jar,file:/smart.jar,family.py") === Array.empty)
assert(Utils.nonLocalPaths("hdfs:/spark.jar,s3:/smart.jar") ===
Array("hdfs:/spark.jar", "s3:/smart.jar"))
assert(Utils.nonLocalPaths("hdfs:/spark.jar,path to/a.jar,s3:/smart.jar") ===
Array("hdfs:/spark.jar", "s3:/smart.jar"))
assert(Utils.nonLocalPaths("hdfs:/spark.jar,s3:/smart.jar,local.py,file:/hello/pi.py") ===
Array("hdfs:/spark.jar", "s3:/smart.jar"))
assert(Utils.nonLocalPaths("local.py,hdfs:/spark.jar,file:/hello/pi.py,s3:/smart.jar") ===
Array("hdfs:/spark.jar", "s3:/smart.jar"))
// Test Windows paths
assert(Utils.nonLocalPaths("C:/some/path.jar", testWindows = true) === Array.empty)
assert(Utils.nonLocalPaths("file:/C:/some/path.jar", testWindows = true) === Array.empty)
assert(Utils.nonLocalPaths("file:///C:/some/path.jar", testWindows = true) === Array.empty)
assert(Utils.nonLocalPaths("local:/C:/some/path.jar", testWindows = true) === Array.empty)
assert(Utils.nonLocalPaths("local:///C:/some/path.jar", testWindows = true) === Array.empty)
assert(Utils.nonLocalPaths("hdfs:/a.jar,C:/my.jar,s3:/another.jar", testWindows = true) ===
Array("hdfs:/a.jar", "s3:/another.jar"))
assert(Utils.nonLocalPaths("D:/your.jar,hdfs:/a.jar,s3:/another.jar", testWindows = true) ===
Array("hdfs:/a.jar", "s3:/another.jar"))
assert(Utils.nonLocalPaths("hdfs:/a.jar,s3:/another.jar,e:/our.jar", testWindows = true) ===
Array("hdfs:/a.jar", "s3:/another.jar"))
}
test("isBindCollision") {
// Negatives
assert(!Utils.isBindCollision(null))
assert(!Utils.isBindCollision(new Exception))
assert(!Utils.isBindCollision(new Exception(new Exception)))
assert(!Utils.isBindCollision(new Exception(new BindException)))
// Positives
val be = new BindException("Random Message")
val be1 = new Exception(new BindException("Random Message"))
val be2 = new Exception(new Exception(new BindException("Random Message")))
assert(Utils.isBindCollision(be))
assert(Utils.isBindCollision(be1))
assert(Utils.isBindCollision(be2))
// Actual bind exception
var server1: ServerSocket = null
var server2: ServerSocket = null
try {
server1 = new java.net.ServerSocket(0)
server2 = new java.net.ServerSocket(server1.getLocalPort)
} catch {
case e: Exception =>
assert(e.isInstanceOf[java.net.BindException])
assert(Utils.isBindCollision(e))
} finally {
Option(server1).foreach(_.close())
Option(server2).foreach(_.close())
}
}
// Test for using the util function to change our log levels.
test("log4j log level change") {
val current = org.apache.log4j.Logger.getRootLogger().getLevel()
try {
Utils.setLogLevel(org.apache.log4j.Level.ALL)
assert(log.isInfoEnabled())
Utils.setLogLevel(org.apache.log4j.Level.ERROR)
assert(!log.isInfoEnabled())
assert(log.isErrorEnabled())
} finally {
// Best effort at undoing changes this test made.
Utils.setLogLevel(current)
}
}
test("deleteRecursively") {
val tempDir1 = Utils.createTempDir()
assert(tempDir1.exists())
Utils.deleteRecursively(tempDir1)
assert(!tempDir1.exists())
val tempDir2 = Utils.createTempDir()
val sourceFile1 = new File(tempDir2, "foo.txt")
Files.touch(sourceFile1)
assert(sourceFile1.exists())
Utils.deleteRecursively(sourceFile1)
assert(!sourceFile1.exists())
val tempDir3 = new File(tempDir2, "subdir")
assert(tempDir3.mkdir())
val sourceFile2 = new File(tempDir3, "bar.txt")
Files.touch(sourceFile2)
assert(sourceFile2.exists())
Utils.deleteRecursively(tempDir2)
assert(!tempDir2.exists())
assert(!tempDir3.exists())
assert(!sourceFile2.exists())
}
test("loading properties from file") {
val tmpDir = Utils.createTempDir()
val outFile = File.createTempFile("test-load-spark-properties", "test", tmpDir)
try {
System.setProperty("spark.test.fileNameLoadB", "2")
Files.write("spark.test.fileNameLoadA true\\n" +
"spark.test.fileNameLoadB 1\\n", outFile, StandardCharsets.UTF_8)
val properties = Utils.getPropertiesFromFile(outFile.getAbsolutePath)
properties
.filter { case (k, v) => k.startsWith("spark.")}
.foreach { case (k, v) => sys.props.getOrElseUpdate(k, v)}
val sparkConf = new SparkConf
assert(sparkConf.getBoolean("spark.test.fileNameLoadA", false) === true)
assert(sparkConf.getInt("spark.test.fileNameLoadB", 1) === 2)
} finally {
Utils.deleteRecursively(tmpDir)
}
}
test("timeIt with prepare") {
var cnt = 0
val prepare = () => {
cnt += 1
Thread.sleep(1000)
}
val time = Utils.timeIt(2)({}, Some(prepare))
require(cnt === 2, "prepare should be called twice")
require(time < 500, "preparation time should not count")
}
test("fetch hcfs dir") {
val tempDir = Utils.createTempDir()
val sourceDir = new File(tempDir, "source-dir")
val innerSourceDir = Utils.createTempDir(root = sourceDir.getPath)
val sourceFile = File.createTempFile("someprefix", "somesuffix", innerSourceDir)
val targetDir = new File(tempDir, "target-dir")
Files.write("some text", sourceFile, StandardCharsets.UTF_8)
val path =
if (Utils.isWindows) {
new Path("file:/" + sourceDir.getAbsolutePath.replace("\\\\", "/"))
} else {
new Path("file://" + sourceDir.getAbsolutePath)
}
val conf = new Configuration()
val fs = Utils.getHadoopFileSystem(path.toString, conf)
assert(!targetDir.isDirectory())
Utils.fetchHcfsFile(path, targetDir, fs, new SparkConf(), conf, false)
assert(targetDir.isDirectory())
// Copy again to make sure it doesn't error if the dir already exists.
Utils.fetchHcfsFile(path, targetDir, fs, new SparkConf(), conf, false)
val destDir = new File(targetDir, sourceDir.getName())
assert(destDir.isDirectory())
val destInnerDir = new File(destDir, innerSourceDir.getName)
assert(destInnerDir.isDirectory())
val destInnerFile = new File(destInnerDir, sourceFile.getName)
assert(destInnerFile.isFile())
val filePath =
if (Utils.isWindows) {
new Path("file:/" + sourceFile.getAbsolutePath.replace("\\\\", "/"))
} else {
new Path("file://" + sourceFile.getAbsolutePath)
}
val testFileDir = new File(tempDir, "test-filename")
val testFileName = "testFName"
val testFilefs = Utils.getHadoopFileSystem(filePath.toString, conf)
Utils.fetchHcfsFile(filePath, testFileDir, testFilefs, new SparkConf(),
conf, false, Some(testFileName))
val newFileName = new File(testFileDir, testFileName)
assert(newFileName.isFile())
}
test("shutdown hook manager") {
val manager = new SparkShutdownHookManager()
val output = new ListBuffer[Int]()
val hook1 = manager.add(1, () => output += 1)
manager.add(3, () => output += 3)
manager.add(2, () => output += 2)
manager.add(4, () => output += 4)
manager.remove(hook1)
manager.runAll()
assert(output.toList === List(4, 3, 2))
}
test("isInDirectory") {
val tmpDir = new File(sys.props("java.io.tmpdir"))
val parentDir = new File(tmpDir, "parent-dir")
val childDir1 = new File(parentDir, "child-dir-1")
val childDir1b = new File(parentDir, "child-dir-1b")
val childFile1 = new File(parentDir, "child-file-1.txt")
val childDir2 = new File(childDir1, "child-dir-2")
val childDir2b = new File(childDir1, "child-dir-2b")
val childFile2 = new File(childDir1, "child-file-2.txt")
val childFile3 = new File(childDir2, "child-file-3.txt")
val nullFile: File = null
parentDir.mkdir()
childDir1.mkdir()
childDir1b.mkdir()
childDir2.mkdir()
childDir2b.mkdir()
childFile1.createNewFile()
childFile2.createNewFile()
childFile3.createNewFile()
// Identity
assert(Utils.isInDirectory(parentDir, parentDir))
assert(Utils.isInDirectory(childDir1, childDir1))
assert(Utils.isInDirectory(childDir2, childDir2))
// Valid ancestor-descendant pairs
assert(Utils.isInDirectory(parentDir, childDir1))
assert(Utils.isInDirectory(parentDir, childFile1))
assert(Utils.isInDirectory(parentDir, childDir2))
assert(Utils.isInDirectory(parentDir, childFile2))
assert(Utils.isInDirectory(parentDir, childFile3))
assert(Utils.isInDirectory(childDir1, childDir2))
assert(Utils.isInDirectory(childDir1, childFile2))
assert(Utils.isInDirectory(childDir1, childFile3))
assert(Utils.isInDirectory(childDir2, childFile3))
// Inverted ancestor-descendant pairs should fail
assert(!Utils.isInDirectory(childDir1, parentDir))
assert(!Utils.isInDirectory(childDir2, parentDir))
assert(!Utils.isInDirectory(childDir2, childDir1))
assert(!Utils.isInDirectory(childFile1, parentDir))
assert(!Utils.isInDirectory(childFile2, parentDir))
assert(!Utils.isInDirectory(childFile3, parentDir))
assert(!Utils.isInDirectory(childFile2, childDir1))
assert(!Utils.isInDirectory(childFile3, childDir1))
assert(!Utils.isInDirectory(childFile3, childDir2))
// Non-existent files or directories should fail
assert(!Utils.isInDirectory(parentDir, new File(parentDir, "one.txt")))
assert(!Utils.isInDirectory(parentDir, new File(parentDir, "one/two.txt")))
assert(!Utils.isInDirectory(parentDir, new File(parentDir, "one/two/three.txt")))
// Siblings should fail
assert(!Utils.isInDirectory(childDir1, childDir1b))
assert(!Utils.isInDirectory(childDir1, childFile1))
assert(!Utils.isInDirectory(childDir2, childDir2b))
assert(!Utils.isInDirectory(childDir2, childFile2))
// Null files should fail without throwing NPE
assert(!Utils.isInDirectory(parentDir, nullFile))
assert(!Utils.isInDirectory(childFile3, nullFile))
assert(!Utils.isInDirectory(nullFile, parentDir))
assert(!Utils.isInDirectory(nullFile, childFile3))
}
test("circular buffer: if nothing was written to the buffer, display nothing") {
val buffer = new CircularBuffer(4)
assert(buffer.toString === "")
}
test("circular buffer: if the buffer isn't full, print only the contents written") {
val buffer = new CircularBuffer(10)
val stream = new PrintStream(buffer, true, "UTF-8")
stream.print("test")
assert(buffer.toString === "test")
}
test("circular buffer: data written == size of the buffer") {
val buffer = new CircularBuffer(4)
val stream = new PrintStream(buffer, true, "UTF-8")
// fill the buffer to its exact size so that it just hits overflow
stream.print("test")
assert(buffer.toString === "test")
// add more data to the buffer
stream.print("12")
assert(buffer.toString === "st12")
}
test("circular buffer: multiple overflow") {
val buffer = new CircularBuffer(25)
val stream = new PrintStream(buffer, true, "UTF-8")
stream.print("test circular test circular test circular test circular test circular")
assert(buffer.toString === "st circular test circular")
}
test("nanSafeCompareDoubles") {
def shouldMatchDefaultOrder(a: Double, b: Double): Unit = {
assert(Utils.nanSafeCompareDoubles(a, b) === JDouble.compare(a, b))
assert(Utils.nanSafeCompareDoubles(b, a) === JDouble.compare(b, a))
}
shouldMatchDefaultOrder(0d, 0d)
shouldMatchDefaultOrder(0d, 1d)
shouldMatchDefaultOrder(Double.MinValue, Double.MaxValue)
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NaN) === 0)
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.PositiveInfinity) === 1)
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NegativeInfinity) === 1)
assert(Utils.nanSafeCompareDoubles(Double.PositiveInfinity, Double.NaN) === -1)
assert(Utils.nanSafeCompareDoubles(Double.NegativeInfinity, Double.NaN) === -1)
}
test("nanSafeCompareFloats") {
def shouldMatchDefaultOrder(a: Float, b: Float): Unit = {
assert(Utils.nanSafeCompareFloats(a, b) === JFloat.compare(a, b))
assert(Utils.nanSafeCompareFloats(b, a) === JFloat.compare(b, a))
}
shouldMatchDefaultOrder(0f, 0f)
shouldMatchDefaultOrder(1f, 1f)
shouldMatchDefaultOrder(Float.MinValue, Float.MaxValue)
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NaN) === 0)
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.PositiveInfinity) === 1)
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NegativeInfinity) === 1)
assert(Utils.nanSafeCompareFloats(Float.PositiveInfinity, Float.NaN) === -1)
assert(Utils.nanSafeCompareFloats(Float.NegativeInfinity, Float.NaN) === -1)
}
test("isDynamicAllocationEnabled") {
val conf = new SparkConf()
conf.set("spark.master", "yarn")
conf.set("spark.submit.deployMode", "client")
assert(Utils.isDynamicAllocationEnabled(conf) === false)
assert(Utils.isDynamicAllocationEnabled(
conf.set("spark.dynamicAllocation.enabled", "false")) === false)
assert(Utils.isDynamicAllocationEnabled(
conf.set("spark.dynamicAllocation.enabled", "true")) === true)
assert(Utils.isDynamicAllocationEnabled(
conf.set("spark.executor.instances", "1")) === true)
assert(Utils.isDynamicAllocationEnabled(
conf.set("spark.executor.instances", "0")) === true)
assert(Utils.isDynamicAllocationEnabled(conf.set("spark.master", "local")) === false)
assert(Utils.isDynamicAllocationEnabled(conf.set("spark.dynamicAllocation.testing", "true")))
}
test("getDynamicAllocationInitialExecutors") {
val conf = new SparkConf()
assert(Utils.getDynamicAllocationInitialExecutors(conf) === 0)
assert(Utils.getDynamicAllocationInitialExecutors(
conf.set("spark.dynamicAllocation.minExecutors", "3")) === 3)
assert(Utils.getDynamicAllocationInitialExecutors( // should use minExecutors
conf.set("spark.executor.instances", "2")) === 3)
assert(Utils.getDynamicAllocationInitialExecutors( // should use executor.instances
conf.set("spark.executor.instances", "4")) === 4)
assert(Utils.getDynamicAllocationInitialExecutors( // should use executor.instances
conf.set("spark.dynamicAllocation.initialExecutors", "3")) === 4)
assert(Utils.getDynamicAllocationInitialExecutors( // should use initialExecutors
conf.set("spark.dynamicAllocation.initialExecutors", "5")) === 5)
assert(Utils.getDynamicAllocationInitialExecutors( // should use minExecutors
conf.set("spark.dynamicAllocation.initialExecutors", "2")
.set("spark.executor.instances", "1")) === 3)
}
test("Set Spark CallerContext") {
val context = "test"
new CallerContext(context).setCurrentContext()
if (CallerContext.callerContextSupported) {
val callerContext = Utils.classForName("org.apache.hadoop.ipc.CallerContext")
assert(s"SPARK_$context" ===
callerContext.getMethod("getCurrent").invoke(null).toString)
}
}
test("encodeFileNameToURIRawPath") {
assert(Utils.encodeFileNameToURIRawPath("abc") === "abc")
assert(Utils.encodeFileNameToURIRawPath("abc xyz") === "abc%20xyz")
assert(Utils.encodeFileNameToURIRawPath("abc:xyz") === "abc:xyz")
}
test("decodeFileNameInURI") {
assert(Utils.decodeFileNameInURI(new URI("files:///abc/xyz")) === "xyz")
assert(Utils.decodeFileNameInURI(new URI("files:///abc")) === "abc")
assert(Utils.decodeFileNameInURI(new URI("files:///abc%20xyz")) === "abc xyz")
}
test("Kill process") {
// Verify that we can terminate a process even if it is in a bad state. This is only run
// on UNIX since it does some OS specific things to verify the correct behavior.
if (SystemUtils.IS_OS_UNIX) {
def getPid(p: Process): Int = {
val f = p.getClass().getDeclaredField("pid")
f.setAccessible(true)
f.get(p).asInstanceOf[Int]
}
def pidExists(pid: Int): Boolean = {
val p = Runtime.getRuntime.exec(s"kill -0 $pid")
p.waitFor()
p.exitValue() == 0
}
def signal(pid: Int, s: String): Unit = {
val p = Runtime.getRuntime.exec(s"kill -$s $pid")
p.waitFor()
}
// Start up a process that runs 'sleep 10'. Terminate the process and assert it takes
// less time and the process is no longer there.
val startTimeMs = System.currentTimeMillis()
val process = new ProcessBuilder("sleep", "10").start()
val pid = getPid(process)
try {
assert(pidExists(pid))
val terminated = Utils.terminateProcess(process, 5000)
assert(terminated.isDefined)
process.waitFor(5, TimeUnit.SECONDS)
val durationMs = System.currentTimeMillis() - startTimeMs
assert(durationMs < 5000)
assert(!pidExists(pid))
} finally {
// Forcibly kill the test process just in case.
signal(pid, "SIGKILL")
}
val versionParts = System.getProperty("java.version").split("[+.\\\\-]+", 3)
var majorVersion = versionParts(0).toInt
if (majorVersion == 1) majorVersion = versionParts(1).toInt
if (majorVersion >= 8) {
// We'll make sure that forcibly terminating a process works by
// creating a very misbehaving process. It ignores SIGTERM and has been SIGSTOPed. On
// older versions of java, this will *not* terminate.
val file = File.createTempFile("temp-file-name", ".tmp")
file.deleteOnExit()
val cmd =
s"""
|#!/bin/bash
|trap "" SIGTERM
|sleep 10
""".stripMargin
Files.write(cmd.getBytes(StandardCharsets.UTF_8), file)
file.getAbsoluteFile.setExecutable(true)
val process = new ProcessBuilder(file.getAbsolutePath).start()
val pid = getPid(process)
assert(pidExists(pid))
try {
signal(pid, "SIGSTOP")
val start = System.currentTimeMillis()
val terminated = Utils.terminateProcess(process, 5000)
assert(terminated.isDefined)
process.waitFor(5, TimeUnit.SECONDS)
val duration = System.currentTimeMillis() - start
assert(duration < 6000) // add a little extra time to allow a force kill to finish
assert(!pidExists(pid))
} finally {
signal(pid, "SIGKILL")
}
}
}
}
test("chi square test of randomizeInPlace") {
// Parameters
val arraySize = 10
val numTrials = 1000
val threshold = 0.05
val seed = 1L
// results(i)(j): how many times Utils.randomize moves an element from position j to position i
val results = Array.ofDim[Long](arraySize, arraySize)
// This must be seeded because even a fair random process will fail this test with
// probability equal to the value of `threshold`, which is inconvenient for a unit test.
val rand = new java.util.Random(seed)
val range = 0 until arraySize
for {
_ <- 0 until numTrials
trial = Utils.randomizeInPlace(range.toArray, rand)
i <- range
} results(i)(trial(i)) += 1L
val chi = new ChiSquareTest()
// We expect an even distribution; this array will be rescaled by `chiSquareTest`
val expected = Array.fill(arraySize * arraySize)(1.0)
val observed = results.flatten
// Performs Pearson's chi-squared test. Using the sum-of-squares as the test statistic, gives
// the probability of a uniform distribution producing results as extreme as `observed`
val pValue = chi.chiSquareTest(expected, observed)
assert(pValue > threshold)
}
test("redact sensitive information") {
val sparkConf = new SparkConf
// Set some secret keys
val secretKeys = Seq(
"spark.executorEnv.HADOOP_CREDSTORE_PASSWORD",
"spark.my.password",
"spark.my.sECreT")
secretKeys.foreach { key => sparkConf.set(key, "sensitive_value") }
// Set a non-secret key
sparkConf.set("spark.regular.property", "regular_value")
// Set a property with a regular key but secret in the value
sparkConf.set("spark.sensitive.property", "has_secret_in_value")
// Redact sensitive information
val redactedConf = Utils.redact(sparkConf, sparkConf.getAll).toMap
// Assert that secret information got redacted while the regular property remained the same
secretKeys.foreach { key => assert(redactedConf(key) === Utils.REDACTION_REPLACEMENT_TEXT) }
assert(redactedConf("spark.regular.property") === "regular_value")
assert(redactedConf("spark.sensitive.property") === Utils.REDACTION_REPLACEMENT_TEXT)
}
test("tryWithSafeFinally") {
var e = new Error("Block0")
val finallyBlockError = new Error("Finally Block")
var isErrorOccurred = false
// if the try and finally blocks throw different exception instances
try {
Utils.tryWithSafeFinally { throw e }(finallyBlock = { throw finallyBlockError })
} catch {
case t: Error =>
assert(t.getSuppressed.head == finallyBlockError)
isErrorOccurred = true
}
assert(isErrorOccurred)
// if the try and finally blocks throw the same exception instance then it should not
// try to add to suppressed and get IllegalArgumentException
e = new Error("Block1")
isErrorOccurred = false
try {
Utils.tryWithSafeFinally { throw e }(finallyBlock = { throw e })
} catch {
case t: Error =>
assert(t.getSuppressed.length == 0)
isErrorOccurred = true
}
assert(isErrorOccurred)
// if the try throws the exception and finally doesn't throw exception
e = new Error("Block2")
isErrorOccurred = false
try {
Utils.tryWithSafeFinally { throw e }(finallyBlock = {})
} catch {
case t: Error =>
assert(t.getSuppressed.length == 0)
isErrorOccurred = true
}
assert(isErrorOccurred)
// if the try and finally block don't throw exception
Utils.tryWithSafeFinally {}(finallyBlock = {})
}
test("tryWithSafeFinallyAndFailureCallbacks") {
var e = new Error("Block0")
val catchBlockError = new Error("Catch Block")
val finallyBlockError = new Error("Finally Block")
var isErrorOccurred = false
TaskContext.setTaskContext(TaskContext.empty())
// if the try, catch and finally blocks throw different exception instances
try {
Utils.tryWithSafeFinallyAndFailureCallbacks { throw e }(
catchBlock = { throw catchBlockError }, finallyBlock = { throw finallyBlockError })
} catch {
case t: Error =>
assert(t.getSuppressed.head == catchBlockError)
assert(t.getSuppressed.last == finallyBlockError)
isErrorOccurred = true
}
assert(isErrorOccurred)
// if the try, catch and finally blocks throw the same exception instance then it should not
// try to add to suppressed and get IllegalArgumentException
e = new Error("Block1")
isErrorOccurred = false
try {
Utils.tryWithSafeFinallyAndFailureCallbacks { throw e }(catchBlock = { throw e },
finallyBlock = { throw e })
} catch {
case t: Error =>
assert(t.getSuppressed.length == 0)
isErrorOccurred = true
}
assert(isErrorOccurred)
// if the try throws the exception, catch and finally don't throw exceptions
e = new Error("Block2")
isErrorOccurred = false
try {
Utils.tryWithSafeFinallyAndFailureCallbacks { throw e }(catchBlock = {}, finallyBlock = {})
} catch {
case t: Error =>
assert(t.getSuppressed.length == 0)
isErrorOccurred = true
}
assert(isErrorOccurred)
// if the try, catch and finally blocks don't throw exceptions
Utils.tryWithSafeFinallyAndFailureCallbacks {}(catchBlock = {}, finallyBlock = {})
TaskContext.unset
}
}
|
narahari92/spark
|
core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
|
Scala
|
apache-2.0
| 45,547
|
//
// Copyright 2016 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package commbank.coppersmith.scalding
object TestUtil {
// This is quite general and could probably be lifted up to one of the dependency projects
def withoutLogging[T](loggerNames: String*)(f: =>T): T = {
import org.apache.log4j.{Level, Logger}
val loggerLevels: Iterable[(Logger, Level)] = loggerNames.map { loggerName =>
val logger = Logger.getLogger(loggerName)
val loggerLevel = logger.getLevel
(logger, loggerLevel)
}
loggerLevels.foreach { case (logger, _) => logger.setLevel(Level.OFF) }
try {
f
} finally {
loggerLevels.foreach { case (logger, level) => logger.setLevel(level) }
}
}
}
|
CommBank/coppersmith
|
scalding/src/test/scala/commbank/coppersmith/scalding/TestUtil.scala
|
Scala
|
apache-2.0
| 1,297
|
package com.outr.arango.api.model
import io.circe.Json
case class GeneralGraphVertexGetHttpExamplesRc403(error: Boolean,
code: Option[Int] = None,
errorMessage: Option[String] = None,
errorNum: Option[Int] = None)
|
outr/arangodb-scala
|
api/src/main/scala/com/outr/arango/api/model/GeneralGraphVertexGetHttpExamplesRc403.scala
|
Scala
|
mit
| 366
|
package com.sksamuel.elastic4s.handlers.fields
import com.sksamuel.elastic4s.fields.VersionField
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
object VersionFieldBuilderFn {
def toField(name: String, values: Map[String, Any]): VersionField = VersionField(name)
def build(field: VersionField): XContentBuilder = {
val builder = XContentFactory.jsonBuilder()
builder.field("type", field.`type`)
builder.endObject()
}
}
|
sksamuel/elastic4s
|
elastic4s-handlers/src/main/scala/com/sksamuel/elastic4s/handlers/fields/VersionFieldBuilderFn.scala
|
Scala
|
apache-2.0
| 462
|
package org.openapitools.server.model
/**
* @param links for example: ''null''
* @param author for example: ''null''
* @param id for example: ''null''
* @param title for example: ''null''
* @param url for example: ''null''
* @param `class` for example: ''null''
*/
final case class PipelineBranchesitempullRequest (
links: Option[PipelineBranchesitempullRequestlinks],
author: Option[String],
id: Option[String],
title: Option[String],
url: Option[String],
`class`: Option[String]
)
|
cliffano/swaggy-jenkins
|
clients/scala-akka-http-server/generated/src/main/scala/org/openapitools/server/model/PipelineBranchesitempullRequest.scala
|
Scala
|
mit
| 510
|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package matryoshka
import scalaz._
/** To avoid diverging implicits with fixed-point types, we need to defer the
* lookup.
*/
trait Delay[F[_], G[_]] {
def apply[A](fa: F[A]): F[G[A]]
}
object Delay {
/** Delay used to be a type alias for a natural transformation:
* type Delay[F[_], G[_]] = F ~> (F ∘ G)#λ
* As an interim measure, this lifts natural transformations of the above
* form into the Delay type class. But the end goal is to be unconnected to
* NaturalTransformation.
*/
def fromNT[F[_], G[_]](nt: F ~> (F ∘ G)#λ): Delay[F, G] =
new Delay[F, G] { def apply[A](fa: F[A]): F[G[A]] = nt(fa) }
def apply[F[_], G[_]](implicit ev: Delay[F, G]): Delay[F, G] = ev
}
|
slamdata/matryoshka
|
core/shared/src/main/scala/matryoshka/Delay.scala
|
Scala
|
apache-2.0
| 1,323
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.batch.sql.join
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.calcite.CalciteConfig
import org.apache.flink.table.plan.optimize.program.FlinkBatchProgram
import org.apache.flink.table.plan.stream.sql.join.TestTemporalTable
import org.apache.flink.table.util.TableTestBase
import org.junit.Assert.{assertTrue, fail}
import org.junit.{Before, Test}
class LookupJoinTest extends TableTestBase {
private val testUtil = batchTestUtil()
@Before
def before(): Unit = {
testUtil.addDataStream[(Int, String, Long)]("T0", 'a, 'b, 'c)
testUtil.addDataStream[(Int, String, Long, Double)]("T1", 'a, 'b, 'c, 'd)
testUtil.addDataStream[(Int, String, Int)]("nonTemporal", 'id, 'name, 'age)
testUtil.tableEnv.registerTableSource("temporalTest", new TestTemporalTable)
val myTable = testUtil.tableEnv.sqlQuery("SELECT *, PROCTIME() as proctime FROM T0")
testUtil.tableEnv.registerTable("MyTable", myTable)
}
@Test
def testJoinInvalidJoinTemporalTable(): Unit = {
// must follow a period specification
expectExceptionThrown(
"SELECT * FROM MyTable AS T JOIN temporalTest T.proc AS D ON T.a = D.id",
"SQL parse failed",
classOf[SqlParserException])
// can't query a dim table directly
expectExceptionThrown(
"SELECT * FROM temporalTest FOR SYSTEM_TIME AS OF TIMESTAMP '2017-08-09 14:36:11'",
"Cannot generate a valid execution plan for the given query",
classOf[TableException]
)
// only support left or inner join
// Calcite does not allow FOR SYSTEM_TIME AS OF non-nullable left table field to Right Join.
// There is a exception:
// java.lang.AssertionError
// at SqlToRelConverter.getCorrelationUse(SqlToRelConverter.java:2517)
// at SqlToRelConverter.createJoin(SqlToRelConverter.java:2426)
// at SqlToRelConverter.convertFrom(SqlToRelConverter.java:2071)
// at SqlToRelConverter.convertSelectImpl(SqlToRelConverter.java:646)
// at SqlToRelConverter.convertSelect(SqlToRelConverter.java:627)
// at SqlToRelConverter.convertQueryRecursive(SqlToRelConverter.java:3100)
// at SqlToRelConverter.convertQuery(SqlToRelConverter.java:563)
// at org.apache.flink.table.calcite.FlinkPlannerImpl.rel(FlinkPlannerImpl.scala:125)
expectExceptionThrown(
"SELECT * FROM MyTable AS T RIGHT JOIN temporalTest " +
"FOR SYSTEM_TIME AS OF T.proctime AS D ON T.a = D.id",
null,
classOf[AssertionError]
)
// only support join on raw key of right table
expectExceptionThrown(
"SELECT * FROM MyTable AS T LEFT JOIN temporalTest " +
"FOR SYSTEM_TIME AS OF T.proctime AS D ON T.a + 1 = D.id + 1",
"Temporal table join requires an equality condition on fields of table " +
"[TestTemporalTable(id, name, age)].",
classOf[TableException]
)
}
@Test
def testLogicalPlan(): Unit = {
val sql1 =
"""
|SELECT b, a, sum(c) c, sum(d) d, PROCTIME() as proctime
|FROM T1
|GROUP BY a, b
""".stripMargin
val sql2 =
s"""
|SELECT T.* FROM ($sql1) AS T
|JOIN temporalTest FOR SYSTEM_TIME AS OF T.proctime AS D
|ON T.a = D.id
|WHERE D.age > 10
""".stripMargin
val sql =
s"""
|SELECT b, count(a), sum(c), sum(d)
|FROM ($sql2) AS T
|GROUP BY b
""".stripMargin
val programs = FlinkBatchProgram.buildProgram(testUtil.tableEnv.getConfig.getConf)
programs.remove(FlinkBatchProgram.PHYSICAL)
val calciteConfig = CalciteConfig.createBuilder(testUtil.tableEnv.getConfig.getCalciteConfig)
.replaceBatchProgram(programs).build()
testUtil.tableEnv.getConfig.setCalciteConfig(calciteConfig)
testUtil.verifyPlan(sql)
}
@Test
def testLogicalPlanWithImplicitTypeCast(): Unit = {
val programs = FlinkBatchProgram.buildProgram(testUtil.tableEnv.getConfig.getConf)
programs.remove(FlinkBatchProgram.PHYSICAL)
val calciteConfig = CalciteConfig.createBuilder(testUtil.tableEnv.getConfig.getCalciteConfig)
.replaceBatchProgram(programs).build()
testUtil.tableEnv.getConfig.setCalciteConfig(calciteConfig)
thrown.expect(classOf[TableException])
thrown.expectMessage("VARCHAR(2147483647) and INTEGER does not have common type now")
testUtil.verifyPlan("SELECT * FROM MyTable AS T JOIN temporalTest "
+ "FOR SYSTEM_TIME AS OF T.proctime AS D ON T.b = D.id")
}
@Test
def testJoinInvalidNonTemporalTable(): Unit = {
// can't follow a period specification
expectExceptionThrown(
"SELECT * FROM MyTable AS T JOIN nonTemporal " +
"FOR SYSTEM_TIME AS OF T.proctime AS D ON T.a = D.id",
"Temporal table join only support join on a LookupableTableSource",
classOf[TableException])
}
@Test
def testJoinTemporalTable(): Unit = {
val sql = "SELECT * FROM MyTable AS T JOIN temporalTest " +
"FOR SYSTEM_TIME AS OF T.proctime AS D ON T.a = D.id"
testUtil.verifyPlan(sql)
}
@Test
def testLeftJoinTemporalTable(): Unit = {
val sql = "SELECT * FROM MyTable AS T LEFT JOIN temporalTest " +
"FOR SYSTEM_TIME AS OF T.proctime AS D ON T.a = D.id"
testUtil.verifyPlan(sql)
}
@Test
def testJoinTemporalTableWithNestedQuery(): Unit = {
val sql = "SELECT * FROM " +
"(SELECT a, b, proctime FROM MyTable WHERE c > 1000) AS T " +
"JOIN temporalTest " +
"FOR SYSTEM_TIME AS OF T.proctime AS D ON T.a = D.id"
testUtil.verifyPlan(sql)
}
@Test
def testJoinTemporalTableWithProjectionPushDown(): Unit = {
val sql =
"""
|SELECT T.*, D.id
|FROM MyTable AS T
|JOIN temporalTest FOR SYSTEM_TIME AS OF T.proctime AS D
|ON T.a = D.id
""".stripMargin
testUtil.verifyPlan(sql)
}
@Test
def testJoinTemporalTableWithFilterPushDown(): Unit = {
val sql =
"""
|SELECT * FROM MyTable AS T
|JOIN temporalTest FOR SYSTEM_TIME AS OF T.proctime AS D
|ON T.a = D.id AND D.age = 10
|WHERE T.c > 1000
""".stripMargin
testUtil.verifyPlan(sql)
}
@Test
def testAvoidAggregatePushDown(): Unit = {
val sql1 =
"""
|SELECT b, a, sum(c) c, sum(d) d, PROCTIME() as proctime
|FROM T1
|GROUP BY a, b
""".stripMargin
val sql2 =
s"""
|SELECT T.* FROM ($sql1) AS T
|JOIN temporalTest FOR SYSTEM_TIME AS OF T.proctime AS D
|ON T.a = D.id
|WHERE D.age > 10
""".stripMargin
val sql =
s"""
|SELECT b, count(a), sum(c), sum(d)
|FROM ($sql2) AS T
|GROUP BY b
""".stripMargin
testUtil.verifyPlan(sql)
}
@Test
def testReusing(): Unit = {
testUtil.tableEnv.getConfig.getConf.setBoolean(
PlannerConfigOptions.SQL_OPTIMIZER_REUSE_SUB_PLAN_ENABLED, true)
val sql1 =
"""
|SELECT b, a, sum(c) c, sum(d) d, PROCTIME() as proctime
|FROM T1
|GROUP BY a, b
""".stripMargin
val sql2 =
s"""
|SELECT * FROM ($sql1) AS T
|JOIN temporalTest FOR SYSTEM_TIME AS OF T.proctime AS D
|ON T.a = D.id
|WHERE D.age > 10
""".stripMargin
val sql3 =
s"""
|SELECT id as a, b FROM ($sql2) AS T
""".stripMargin
val sql =
s"""
|SELECT count(T1.a), count(T1.id), sum(T2.a)
|FROM ($sql2) AS T1, ($sql3) AS T2
|WHERE T1.a = T2.a
|GROUP BY T1.b, T2.b
""".stripMargin
testUtil.verifyPlan(sql)
}
// ==========================================================================================
// ==========================================================================================
private def expectExceptionThrown(
sql: String,
keywords: String,
clazz: Class[_ <: Throwable] = classOf[ValidationException]): Unit = {
try {
testUtil.verifyExplain(sql)
fail(s"Expected a $clazz, but no exception is thrown.")
} catch {
case e if e.getClass == clazz =>
if (keywords != null) {
assertTrue(
s"The actual exception message \n${e.getMessage}\n" +
s"doesn't contain expected keyword \n$keywords\n",
e.getMessage.contains(keywords))
}
case e: Throwable =>
e.printStackTrace()
fail(s"Expected throw ${clazz.getSimpleName}, but is $e.")
}
}
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/sql/join/LookupJoinTest.scala
|
Scala
|
apache-2.0
| 9,326
|
/*
Copyright 2016 Tunalytics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
"http://www.apache.org/licenses/LICENSE-2.0".
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.tunalytics.loader.transformer.topology.signals
import java.util.Map
import backtype.storm.tuple.Tuple
import backtype.storm.task.OutputCollector
import backtype.storm.task.TopologyContext
import backtype.storm.topology.OutputFieldsDeclarer
import backtype.storm.topology.base.BaseRichBolt
import com.typesafe.scalalogging.LazyLogging
import org.tunalytics.loader.logging.LoggerAware
/**
* Signal consuming bolt.
*
* For testing purposes! To be removed in production-ready version.
*/
class SignalBolt extends BaseRichBolt with LazyLogging {
// TODO: remove this class in production-ready version
logger.trace("instance created")
private var configuration: Map[_, _] = _
private var context: TopologyContext = _
private var collector: OutputCollector = _
def execute(tuple: Tuple) {
logger.trace("processing tuple...")
val signal = tuple.getValueByField("signal").asInstanceOf[Signal]
logger.debug("received: " + signal)
collector.ack(tuple)
logger.trace("tuple processed")
}
def prepare(configuration: Map[_, _], context: TopologyContext,
collector: OutputCollector) {
logger.trace("preparing...")
this.configuration = configuration
this.context = context
this.collector = collector
logger.trace("prepared")
}
def declareOutputFields(declarer: OutputFieldsDeclarer) {
}
}
|
tunalytics/loader
|
transformer/src/main/scala/org/tunalytics/loader/transformer/topology/signals/SignalBolt.scala
|
Scala
|
apache-2.0
| 1,983
|
package Yosemite.framework.master
private[Yosemite] object SlaveState extends Enumeration {
type SlaveState = Value
val ALIVE, DEAD, DECOMMISSIONED = Value
}
|
zhanghan1990/Yosemite
|
core/src/main/scala/Yosemite/framework/master/SlaveState.scala
|
Scala
|
apache-2.0
| 164
|
package cgta.oscala
package util
import java.io.InputStream
//////////////////////////////////////////////////////////////
// Copyright (c) 2015 Ben Jackman
// All Rights Reserved
// please contact ben@jackman.biz
// for licensing inquiries
// Created by bjackman @ 8/4/15 3:19 PM
//////////////////////////////////////////////////////////////
class DebugInputStream(that: InputStream, log: (String) => Unit) extends InputStream {
var c = 0L
var t = 0L
override def read(): Int = {
c += 1
if (c >= 30 * Million) {
t += c
c = 0
log(s"Total Uploaded MBytes: ${t / Million}")
}
that.read()
}
override def close() { that.close() }
}
|
cgta/open
|
oscala/jvm/src/main/scala/cgta/oscala/util/DebugInputStream.scala
|
Scala
|
mit
| 676
|
package com.stefansavev.randomprojections.utils
import java.io.File
import java.util.Random
import scala.reflect.ClassTag
case class TimedResult[T](result: T, timeMillisecs: Long)
object Utils{
def todo(): Nothing = {
throw new RuntimeException("todo")
}
def internalError(): Nothing = {
throw new RuntimeException("internal error")
}
def failWith(msg: String): Nothing = {
throw new RuntimeException(msg)
}
def timed[R](msg: String, codeBlock: => R): TimedResult[R] = {
val start = System.currentTimeMillis()
val result = codeBlock // call-by-name
val end = System.currentTimeMillis()
val elapsed = end - start
val timeAsStr = if (elapsed >= 1000) (elapsed/1000.0 + " secs.") else (elapsed + " ms.")
println(s"Time for '${msg}' ${timeAsStr}")
TimedResult(result, elapsed)
}
def combinePaths(path1: String, path2: String): String =
{
val file1 = new File(path1)
val file2 = new File(file1, path2)
file2.getPath()
}
}
object ImplicitExtensions{
import scala.reflect.ClassTag
implicit class RichArray(v: Array.type){
def zip3[A: ClassTag, B: ClassTag, C: ClassTag](a: Array[A], b: Array[B], c: Array[C]): Array[(A, B, C)] = {
a.zip(b).zip(c).map({case ((va, vb), vc) => (va, vb, vc)})
}
def unzip3[A: ClassTag, B: ClassTag, C: ClassTag](arr: Array[(A, B, C)]): (Array[A], Array[B], Array[C]) = {
val dim = arr.length
val (a,b,c) = (Array.ofDim[A](dim),Array.ofDim[B](dim),Array.ofDim[C](dim))
var i = 0
while(i < dim){
val (va,vb,vc) = arr(i)
a(i) = va
b(i) = vb
c(i) = vc
i += 1
}
(a,b,c)
}
def init[T : ClassTag](sz: Int, f: () => T ): Array[T] = {
val arr = Array.ofDim[T](sz)
var i = 0
while(i < sz){
arr(i) = f()
i += 1
}
arr
}
def init_i[T : ClassTag](sz: Int, f: Int => T): Array[T] = {
val arr = Array.ofDim[T](sz)
var i = 0
while(i < sz){
arr(i) = f(i)
i += 1
}
arr
}
def init2D[T: ClassTag](rows: Int, cols: Int, f: () => T): Array[Array[T]] = {
init(rows, () => init(cols, f))
}
def init2D_ij[T: ClassTag](rows: Int, cols: Int, f: (Int, Int) => T): Array[Array[T]] = {
init_i(rows, i => init_i(cols, j => f(i,j)))
}
}
}
object RandomExt{
import ImplicitExtensions.RichArray
def shuffleArray[T: ClassTag](rnd: Random, data: Array[T]): Array[T] = {
val randomNumbers = Array.init(data.length, () => rnd.nextDouble())
data.zip(randomNumbers).sortBy(_._2).map(_._1).toArray
}
}
|
codeaudit/random-projections-at-berlinbuzzwords
|
src/main/scala/com/stefansavev/randomprojections/utils/Utils.scala
|
Scala
|
apache-2.0
| 2,635
|
object Test {
def main(args: Array[String]): Unit = {
val f = 0.0 to 1.0 by 1.0 / 3.0
assert(f.size == 4)
}
}
|
folone/dotty
|
tests/run/t4201.scala
|
Scala
|
bsd-3-clause
| 123
|
package io.vamp.bootstrap
import akka.actor.ActorSystem
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import io.vamp.common.Namespace
import io.vamp.http_api.HttpApiBootstrap
import scala.concurrent.duration.{ FiniteDuration, MILLISECONDS }
trait Vamp extends App {
protected implicit val system: ActorSystem = ActorSystem("vamp")
protected implicit val timeout: Timeout = Timeout(FiniteDuration(ConfigFactory.load().getDuration("vamp.bootstrap.timeout", MILLISECONDS), MILLISECONDS))
protected lazy val bootstrap = {
implicit val namespace: Namespace = Namespace(ConfigFactory.load().getString("vamp.namespace"))
List() :+
new LoggingBootstrap {
lazy val logo =
s"""
|██╗ ██╗ █████╗ ███╗ ███╗██████╗
|██║ ██║██╔══██╗████╗ ████║██╔══██╗
|██║ ██║███████║██╔████╔██║██████╔╝
|╚██╗ ██╔╝██╔══██║██║╚██╔╝██║██╔═══╝
| ╚████╔╝ ██║ ██║██║ ╚═╝ ██║██║
| ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝
| $version
| by magnetic.io
|""".stripMargin
} :+
new KamonBootstrap :+
new ConfigurationBootstrap :+
new ClassProviderActorBootstrap :+
new ActorBootstrap(new HttpApiBootstrap :: Nil)
}
sys.addShutdownHook {
bootstrap.reverse.foreach(_.stop())
system.terminate()
}
bootstrap.foreach(_.start())
}
|
dragoslav/vamp
|
bootstrap/src/main/scala/io/vamp/bootstrap/Vamp.scala
|
Scala
|
apache-2.0
| 1,821
|
trait A[T]:
def f: T
trait B[T: A]:
println(summon[A[T]].f)
trait C[T: A] extends B[T]
given a1: A[Int] with
def f = 1
class D extends C[Int]:
given a2: A[Int] with
def f = 2
@main def Test = D()
|
dotty-staging/dotty
|
tests/run/i11966.scala
|
Scala
|
apache-2.0
| 214
|
package com.xah.chat.utils
import android.content.Context
import android.provider.Settings
import android.telephony.TelephonyManager
/**
* Project: xaHChat
* Created on 2015-03-02 by
* lemonxah -
* https://github.com/lemonxah
* http://stackoverflow.com/users/2919672/lemon-xah
*/
object DeviceUtils {
def hex2bytes(hex: String): Array[Byte] = {
hex.replaceAll("[^0-9A-Fa-f]","").sliding(2,2).toArray.map(Integer.parseInt(_,16).toByte)
}
def bytes2Hex(bytes: Array[Byte], sep: Option[String] = None): String = {
bytes.map("%02x".format(_)) mkString (sep match {
case None => ""
case Some(s) => s
})
}
def getDeviceId(context: Context): String = {
var id: String = getUniqueID(context)
if (id == null) id = Settings.Secure.getString(context.getContentResolver, Settings.Secure.ANDROID_ID)
id
}
private def getStringIntegerHexBlocks(v: Int): String = v.toString.toCharArray.sliding(4,4).mkString("-")
private def getUniqueID(context: Context): String = {
var telephonyDeviceId: String = "NoTelephonyId"
var androidDeviceId: String = "NoAndroidId"
try {
val tm: TelephonyManager = context.getSystemService(Context.TELEPHONY_SERVICE).asInstanceOf[TelephonyManager]
telephonyDeviceId = tm.getDeviceId
if (telephonyDeviceId == null) {
telephonyDeviceId = "NoTelephonyId"
}
}
catch {
case e: Exception =>
}
try {
androidDeviceId = android.provider.Settings.Secure.getString(context.getContentResolver, android.provider.Settings.Secure.ANDROID_ID)
if (androidDeviceId == null) {
androidDeviceId = "NoAndroidId"
}
}
catch {
case e: Exception =>
}
try {
getStringIntegerHexBlocks(androidDeviceId.hashCode) + "-" + getStringIntegerHexBlocks(telephonyDeviceId.hashCode)
}
catch {
case e: Exception => "0000-0000-1111-1111"
}
}
}
|
lemonxah/xaHChat
|
src/main/scala/com/xah/chat/utils/DeviceUtils.scala
|
Scala
|
mit
| 1,921
|
/*
* FFTLogicImpl.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.fscape.stream.impl
import akka.stream.{Attributes, FanInShape3, FanInShape4}
import de.sciss.fscape.Util
import de.sciss.fscape.stream.impl.Handlers._
import de.sciss.fscape.stream.impl.logic.WindowedInDOutD
import de.sciss.fscape.stream.{Allocator, BufD, BufI, Builder, InD, InI, Layer, OutD, OutI}
import de.sciss.numbers
import de.sciss.transform4s.fft.DoubleFFT_1D
import scala.annotation.switch
import scala.math.max
/** Base class for 1-dimensional FFT transforms. */
trait FFTLogicImpl extends WindowedInDOutD {
this: Handlers[_] =>
// ---- abstract ----
protected def performFFT(): Unit
// ---- impl ----
protected final var fft: DoubleFFT_1D = _
protected final var timeSize : Int = _
protected final var fftSize : Int = -1
protected final var gain : Double = _
protected def gainFor(fftSize: Int): Double
override protected def stopped(): Unit = {
super.stopped()
fft = null
}
protected final def setFFTSize(n: Int): Unit =
if (fftSize != n) {
fftSize = n
fft = DoubleFFT_1D (n)
gain = gainFor(n)
}
override protected final def processWindow(): Unit = {
val fftBuf = winBuf
val offI = readOff.toInt
Util.clear(fftBuf, offI, fftBuf.length - offI)
performFFT()
}
}
abstract class FFTHalfStageImpl(name: String)
extends StageImpl[FanInShape4[BufD, BufI, BufI, BufI, BufD]](name) {
// ---- impl ----
final val shape: Shape = new FanInShape4(
in0 = InD (s"$name.in" ),
in1 = InI (s"$name.size" ),
in2 = InI (s"$name.padding"),
in3 = InI (s"$name.mode" ),
out = OutD(s"$name.out" )
)
final def connect(in: OutD, size: OutI, padding: OutI, mode: OutI)(implicit b: Builder): OutD = {
val stage = b.add(this)
b.connect(in , stage.in0)
b.connect(size , stage.in1)
b.connect(padding , stage.in2)
b.connect(mode , stage.in3)
stage.out
}
}
abstract class FFTFullStageImpl(name: String)
extends StageImpl[FanInShape3[BufD, BufI, BufI, BufD]](name) {
// ---- impl ----
final val shape: Shape = new FanInShape3(
in0 = InD (s"$name.in" ),
in1 = InI (s"$name.size" ),
in2 = InI (s"$name.padding"),
out = OutD(s"$name.out" )
)
final def connect(in: OutD, size: OutI, padding: OutI)(implicit b: Builder): OutD = {
val stage = b.add(this)
b.connect(in , stage.in0)
b.connect(size , stage.in1)
b.connect(padding , stage.in2)
stage.out
}
}
final class Real1FFTStageImpl(layer: Layer)(implicit a: Allocator)
extends FFTHalfStageImpl("Real1FFT") {
def createLogic(attr: Attributes): NodeImpl[Shape] = new Real1FFTLogicImpl(name, shape, layer)
}
final class Real1IFFTStageImpl(layer: Layer)(implicit a: Allocator)
extends FFTHalfStageImpl("Real1IFFT") {
def createLogic(attr: Attributes): NodeImpl[Shape] = new Real1IFFTLogicImpl(name, shape, layer)
}
final class Real1FullFFTStageImpl(layer: Layer)(implicit a: Allocator)
extends FFTFullStageImpl("Real1FullFFT") {
def createLogic(attr: Attributes): NodeImpl[Shape] = new Real1FullFFTLogicImpl(name, shape, layer)
}
final class Real1FullIFFTStageImpl(layer: Layer)(implicit a: Allocator)
extends FFTFullStageImpl("Real1FullIFFT") {
def createLogic(attr: Attributes): NodeImpl[Shape] = new Real1FullIFFTLogicImpl(name, shape, layer)
}
final class Complex1FFTStageImpl(layer: Layer)(implicit a: Allocator)
extends FFTFullStageImpl("ComplexIFFT") {
def createLogic(attr: Attributes): NodeImpl[Shape] = new Complex1FFTLogicImpl(name, shape, layer)
}
final class Complex1IFFTStageImpl(layer: Layer)(implicit a: Allocator)
extends FFTFullStageImpl("Complex1IFFT") {
def createLogic(attr: Attributes): NodeImpl[Shape] = new Complex1IFFTLogicImpl(name, shape, layer)
}
abstract class FFTHalfLogicImpl(name: String, shape: FanInShape4[BufD, BufI, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends Handlers(name, layer, shape)
with FFTLogicImpl {
import numbers.Implicits._
protected final val hIn : InDMain = InDMain (this, shape.in0)
protected final val hOut : OutDMain = OutDMain (this, shape.out)
protected final val hSize : InIAux = InIAux (this, shape.in1)(max(0, _))
protected final val hPadding: InIAux = InIAux (this, shape.in2)(max(0, _))
protected final val hMode : InIAux = InIAux (this, shape.in3)(_.clip(0, 2))
protected final var mode : Int = _ // 0 - packed, 1 - unpacked, 2 - discarded
// for half-spectra we add the extra "redundant" complex entry possibly needed for untangling DC and Nyquist
final def winBufSize: Int = fftSize + 2
}
final class Real1FFTLogicImpl(name: String, shape: FanInShape4[BufD, BufI, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends FFTHalfLogicImpl(name, shape, layer) {
override protected def readWinSize : Long = timeSize
override protected def writeWinSize : Long = if (mode == 1) fftSize + 2 else fftSize
protected def gainFor(fftSize: Int): Double = 2.0 / fftSize
protected def tryObtainWinParams(): Boolean = {
val ok = hSize.hasNext && hPadding.hasNext && hMode.hasNext
if (ok) {
timeSize = hSize .next()
val padding = hPadding.next()
mode = hMode .next()
setFFTSize(timeSize + padding)
}
ok
}
protected def performFFT(): Unit = {
val fftBuf = winBuf
val _fftSize = fftSize
fft.realForward(fftBuf)
Util.mul(fftBuf, 0, _fftSize, gain) // scale correctly
(mode: @switch) match {
case 0 => // packed
case 1 => // unpacked
// move Re(Nyquist) from Im(DC)
fftBuf(_fftSize) = fftBuf(1)
fftBuf(1) = 0.0
fftBuf(_fftSize + 1) = 0.0
case 2 => // discarded
fftBuf(1) = 0.0
}
}
}
final class Real1IFFTLogicImpl(name: String, shape: FanInShape4[BufD, BufI, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends FFTHalfLogicImpl(name, shape, layer) {
override protected def readWinSize : Long = if (mode == 1) fftSize + 2 else fftSize
override protected def writeWinSize : Long = timeSize
protected def gainFor(fftSize: Int): Double = {
import numbers.Implicits._
if (fftSize.isPowerOfTwo) 1.0 else 0.5 // XXX TODO: bug in JTransforms it seems
}
protected def tryObtainWinParams(): Boolean = {
val ok = hSize.hasNext && hPadding.hasNext && hMode.hasNext
if (ok) {
val _fftSize = hSize .next()
val padding = hPadding.next()
mode = hMode .next()
timeSize = _fftSize - padding
setFFTSize(_fftSize)
}
ok
}
protected def performFFT(): Unit = {
val fftBuf = winBuf
(mode: @switch) match {
case 0 => // packed
case 1 => // unpacked
// move Re(Nyquist) to Im(DC)
fftBuf(1) = fftBuf(fftSize)
case 2 => // discarded
fftBuf(1) = 0.0
}
fft.realInverse(fftBuf, scale = false)
if (gain != 1.0) Util.mul(fftBuf, 0, timeSize, gain)
}
}
abstract class FFTFullLogicImpl(name: String, shape: FanInShape3[BufD, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends Handlers(name, shape = shape, layer = layer)
with FFTLogicImpl {
protected final val hIn : InDMain = InDMain (this, shape.in0)
protected final val hOut : OutDMain = OutDMain (this, shape.out)
protected final val hSize : InIAux = InIAux (this, shape.in1)(max(0, _))
protected final val hPadding: InIAux = InIAux (this, shape.in2)(max(0, _))
final def winBufSize: Int = fftSize << 1
}
trait FFTFullForwardLogicImpl {
this: FFTFullLogicImpl =>
final protected def tryObtainWinParams(): Boolean = {
val ok = hSize.hasNext && hPadding.hasNext
if (ok) {
timeSize = hSize .next()
val padding = hPadding.next()
val _fftSize = timeSize + padding
setFFTSize(_fftSize)
}
ok
}
}
trait FFTFullBackwardLogicImpl {
this: FFTFullLogicImpl =>
protected def tryObtainWinParams(): Boolean = {
val ok = hSize.hasNext && hPadding.hasNext
if (ok) {
val _fftSize = hSize .next()
val padding = hPadding.next()
timeSize = _fftSize - padding
setFFTSize(_fftSize)
}
ok
}
}
final class Real1FullFFTLogicImpl(name: String, shape: FanInShape3[BufD, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends FFTFullLogicImpl(name, shape, layer) with FFTFullForwardLogicImpl {
override protected def readWinSize : Long = timeSize
override protected def writeWinSize : Long = fftSize << 1
protected def gainFor(fftSize: Int): Double = 2.0 / fftSize
protected def performFFT(): Unit = {
val _fftBuf = winBuf
fft.realForwardFull(_fftBuf)
Util.mul(_fftBuf, 0, _fftBuf.length, gain) // scale correctly
}
}
final class Real1FullIFFTLogicImpl(name: String, shape: FanInShape3[BufD, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends FFTFullLogicImpl(name, shape, layer) with FFTFullBackwardLogicImpl {
override protected def readWinSize : Long = fftSize << 1
override protected def writeWinSize : Long = timeSize
protected def gainFor(fftSize: Int): Double = 0.5
protected def performFFT(): Unit = {
val _fftBuf = winBuf
fft.complexInverse(_fftBuf, scale = false)
var i = 0
var j = 0
val g = gain
while (j < _fftBuf.length) {
_fftBuf(i) = _fftBuf(j) * g
i += 1
j += 2
}
}
}
final class Complex1FFTLogicImpl(name: String, shape: FanInShape3[BufD, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends FFTFullLogicImpl(name, shape, layer) with FFTFullForwardLogicImpl {
override protected def readWinSize : Long = timeSize << 1
override protected def writeWinSize : Long = fftSize << 1
protected def gainFor(fftSize: Int): Double = 1.0 / fftSize
protected def performFFT(): Unit = {
val _fftBuf = winBuf
fft.complexForward(_fftBuf)
Util.mul(_fftBuf, 0, _fftBuf.length, 1.0 / fftSize) // scale correctly
}
}
final class Complex1IFFTLogicImpl(name: String, shape: FanInShape3[BufD, BufI, BufI, BufD], layer: Layer)
(implicit a: Allocator)
extends FFTFullLogicImpl(name, shape, layer) with FFTFullBackwardLogicImpl {
override protected def readWinSize : Long = fftSize << 1
override protected def writeWinSize : Long = timeSize << 1
protected def gainFor(fftSize: Int): Double = 1.0
protected def performFFT(): Unit = {
val _fftBuf = winBuf
fft.complexInverse(_fftBuf, scale = false)
}
}
|
Sciss/FScape-next
|
core/shared/src/main/scala/de/sciss/fscape/stream/impl/FFTLogicImpl.scala
|
Scala
|
agpl-3.0
| 11,141
|
package forimpatient.chapter03
import java.awt.datatransfer.{DataFlavor, SystemFlavorMap}
import scala.collection.mutable
import scala.collection.JavaConversions.asScalaBuffer
/**
* Created by Iryna Kharaborkina on 7/28/16.
*
* Solution to the Chapter 03 Exercise 10 'Scala for the Impatient' by Horstmann C.S.
*
* Import java.awt.datatransfer._ and make an object of type SystemFlavorMap with the call
* val flavors = SystemFlavorMap.getDefaultFlavorMap().asInstanceOf[SystemFlavorMap]
* Then call the getNativesForFlavor method with parameter DataFlavor.imageFlavor and get
* the return value as a Scala buffer. (Why this obscure class? It’s hard to find uses of
* java.util.List in the standard Java library.)
*/
object Exercise10 extends App {
println("Chapter 03 Exercise 10")
val flavors = SystemFlavorMap.getDefaultFlavorMap().asInstanceOf[SystemFlavorMap]
val imageFlavors: mutable.Buffer[String] = flavors.getNativesForFlavor(DataFlavor.imageFlavor)
println(imageFlavors)
}
|
Kiryna/Scala-for-the-Impatient
|
src/forimpatient/chapter03/Exercise10.scala
|
Scala
|
apache-2.0
| 1,024
|
def foo[A <% String : Manifest](x: Int = 45) = x
foo[Int]()(<caret>)
// (x: Int = 45)(implicit ev$1: Int => String, manifest$A: Manifest[Int])
|
jastice/intellij-scala
|
scala/scala-impl/testdata/parameterInfo/functionParameterInfo/simple/SyntheticParameter.scala
|
Scala
|
apache-2.0
| 143
|
package kr.reachlab.scala.data
import java.io.{File, PrintWriter}
import java.nio.file.{Files, Paths}
import scala.io.{Source => sSource}
class IrisSource extends Source with Serializable {
var dataUrl: String = "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data"
var dataPath: String = "/tmp/iris.data"
def load(): Source = {
if (!Files.exists(Paths.get(dataPath))) {
val data = sSource.fromURL(dataUrl).mkString
val writer = new PrintWriter(new File(dataPath))
writer.write(data)
writer.close()
}
return this
}
def createFrame(): Frame = {
return new IrisFrame(dataPath)
}
}
|
reachlab-kr-scala/sopark
|
src/main/scala/kr/reachlab/scala/data/IrisSource.scala
|
Scala
|
mit
| 658
|
package app
import java.awt.Color
import javax.swing.JFrame
import javax.swing.WindowConstants
import data.Cfg
import view.Animation
object Main extends App {
def run() {
val anima = Animation(1 / 20d)
val frame = new JFrame()
frame.getContentPane.add(anima)
frame.setBackground(Color.gray)
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE)
frame.setSize(Cfg.frameWidth, Cfg.frameHeight)
frame.setVisible(true)
while (true) {
anima.update()
frame.repaint()
}
}
run()
}
|
davips/mucel-scala
|
src/main/scala/app/Main.scala
|
Scala
|
gpl-3.0
| 539
|
package org.bitcoins.core.script.crypto
import org.bitcoins.core.script.ScriptOperationFactory
/**
* Created by chris on 3/24/16.
*/
trait CryptoSignatureEvaluationFactory
extends ScriptOperationFactory[CryptoSignatureEvaluation] {
/** The current [[CryptoSignatureEvaluation]] operations. */
def operations =
Seq(OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_CHECKSIG,
OP_CHECKSIGVERIFY)
}
object CryptoSignatureEvaluationFactory extends CryptoSignatureEvaluationFactory
|
bitcoin-s/bitcoin-s-core
|
core/src/main/scala/org/bitcoins/core/script/crypto/CryptoSignatureEvaluationFactory.scala
|
Scala
|
mit
| 516
|
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperations.spark.wrappers.evaluators
import io.deepsense.commons.utils.Version
import io.deepsense.deeplang.DOperation.Id
import io.deepsense.deeplang.documentation.OperationDocumentation
import io.deepsense.deeplang.doperables.spark.wrappers.evaluators.BinaryClassificationEvaluator
import io.deepsense.deeplang.doperations.EvaluatorAsFactory
class CreateBinaryClassificationEvaluator
extends EvaluatorAsFactory[BinaryClassificationEvaluator] with OperationDocumentation {
override val id: Id = "464ce3fa-e915-4a5d-a9d1-442c1e4b7aa7"
override val name: String = "Binary Classification Evaluator"
override val description: String = "Creates a binary classification evaluator"
override val since: Version = Version(1, 0, 0)
}
|
deepsense-io/seahorse-workflow-executor
|
deeplang/src/main/scala/io/deepsense/deeplang/doperations/spark/wrappers/evaluators/CreateBinaryClassificationEvaluator.scala
|
Scala
|
apache-2.0
| 1,369
|
package colossus.metrics
import org.scalatest._
import MetricAddress.Root
import MetricValues._
class MetricValueSpec extends WordSpec with MustMatchers with BeforeAndAfterAll {
"WeightedAverageValue" must {
"combine two non-zero values" in {
val a = WeightedAverageValue(value = 2, weight = 10)
val b = WeightedAverageValue(value = 5, weight = 5)
val expected = WeightedAverageValue(value = 3, weight = 15)
a + b must equal(expected)
}
"combine non-zero with zero value" in {
val a = WeightedAverageValue(value = 4, weight = 10)
val b = WeightedAverageValue(value = 0, weight = 0)
val expected = WeightedAverageValue(value = 4, weight = 10)
a + b must equal(expected)
}
"handle two zero weight values" in {
val a = WeightedAverageValue(0, 0)
a + a must equal(a)
}
}
}
|
zgagnon/colossus
|
colossus-metrics/src/test/scala/colossus/metrics/MetricValueSpec.scala
|
Scala
|
apache-2.0
| 868
|
/*
* Copyright 2017 TabMo http://tabmo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.tabmo
case class Person(name: String,
price: Option[Double] = None,
aFloat: Float,
cool: Boolean,
aMap: Option[Map[String, Long]],
favoriteNumber: Int,
favoriteColor: String,
seqInt: Seq[Int],
nicknames: List[String],
weddingYear: Option[Int] = None)
|
tabmo/parquet-avro-shapeless
|
src/test/scala/io/tabmo/Models.scala
|
Scala
|
apache-2.0
| 1,031
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views
import javax.inject.Inject
import uk.gov.hmrc.play.views.html.helpers._
import uk.gov.hmrc.play.views.html.layouts._
class ViewHelpers @Inject()(
//copied from uk.gov.hmrc.play.views.html.helpers
val address: Address,
val dateFields: DateFields,
val dateFieldsFreeInline: DateFieldsFreeInline,
val dateFieldsFreeInlineLegend: DateFieldsFreeInlineLegend,
val dateFieldsFreeYearInline: DateFieldsFreeYearInline,
val dateFieldsFreeYear: DateFieldsFreeYear,
val dateFieldsInline: DateFieldsInline,
val dropdown: Dropdown,
val errorInline: ErrorInline,
val errorNotifications: ErrorNotifications,
val errorSummary: ErrorSummary,
val fieldGroup: FieldGroup,
val form: FormWithCSRF,
val input: Input,
val inputRadioGroup: InputRadioGroup,
val reportAProblemLink: ReportAProblemLink,
val singleCheckbox: SingleCheckbox,
val textArea: TextArea,
//copied from uk.gov.hmrc.play.views.html.layouts
val article: Article,
val attorneyBanner: AttorneyBanner,
val betaBanner: BetaBanner,
val footer: Footer,
val euExitLinks: EuExitLinks,
val footerLinks: FooterLinks,
val head: Head,
val headerNav: HeaderNav,
val headWithTrackingConsent: HeadWithTrackingConsent,
val loginStatus: LoginStatus,
val mainContent: MainContent,
val mainContentHeader: MainContentHeader,
val gtmSnippet: GTMSnippet,
val serviceInfo: ServiceInfo,
val sidebar: Sidebar
)
|
hmrc/amls-frontend
|
app/views/ViewHelpers.scala
|
Scala
|
apache-2.0
| 2,995
|
package pw.ian.sysadmincraft.world
object WorldConstants {
val START_HEIGHT = 16
val MAX_HEIGHT = 96 - START_HEIGHT
val MAX_MEMORY = 2L * 1024 * 1024 // 2GB
val PILLAR_WIDTH = 4
val PILLAR_PADDING = 1
val PATHWAY_WIDTH = 4
val PILLAR_DISTANCE = PILLAR_WIDTH + PATHWAY_WIDTH
val MOB_HOUSE_HEIGHT = 4
val MOB_HOUSE_DEPTH = 2
}
|
simplyianm/sysadmincraft
|
src/main/scala/pw/ian/sysadmincraft/world/WorldConstants.scala
|
Scala
|
isc
| 354
|
/*
* Copyright 2017 Zhang Di
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dizhang.seqspark.numerics
import breeze.linalg.max
import breeze.numerics.abs
import org.slf4j.{Logger, LoggerFactory}
import scala.util.control.Breaks._
/**
* Created by zhangdi on 12/13/16.
*/
object Qelg {
val logger: Logger = LoggerFactory.getLogger(getClass)
case class Extrapolation() {
var n: Int = 0
var nRes: Int = 0
val rList2: Array[Double] = Array.fill(52)(0.0)
val res3La: Array[Double] = Array.fill(3)(0.0)
def append(y: Double): Unit = {
rList2(n) = y
n += 1
}
override def toString: String =
s"""
|n: $n
|nRes: $nRes
|rList2: ${rList2.slice(0, n).map(x => "%.4f".format(x)).mkString(",")}
|res3La: ${res3La.map(x => "%.4f".format(x)).mkString(",")}
|""".stripMargin.stripLineEnd
}
def apply(table: Extrapolation): (Double, Double) = {
val epsTab = table.rList2
val res3La = table.res3La
val n = table.n - 1
val current = epsTab(n)
var absolute = MAXVALUE
var relative = 5 * EPSILON * abs(current)
val newElm = n / 2
val nOrig = n
var nFinal = n
val nResOrig = table.nRes
var result = current
var absErr = MAXVALUE
if (n < 2) {
result = current
absErr = max(absolute, relative)
(result, absErr)
} else {
epsTab(n + 2) = epsTab(n)
epsTab(n) = MAXVALUE
breakable{
for (i <- 0 until newElm) {
var res = epsTab(n - 2 * i + 2)
val e0 = epsTab(n - 2 * i - 2)
val e1 = epsTab(n - 2 * i - 1)
val e2 = res
val e1abs = abs(e1)
val delta2 = e2 - e1
val err2 = abs(delta2)
val tol2 = max(abs(e2), e1abs) * EPSILON
val delta3 = e1 - e0
val err3 = abs(delta3)
val tol3 = max(e1abs, abs(e0)) * EPSILON
var (e3, delta1, err1, tol1, ss) = (0.0, 0.0, 0.0, 0.0, 0.0)
if (err2 <= tol2 && err3 <= tol3) {
result = res
absolute = err2 + err3
relative = 5 * EPSILON * abs(res)
absErr = max(absolute, relative)
return (result, absErr)
}
e3 = epsTab( n - 2 * i)
epsTab(n - 2 * i) = e1
delta1 = e1 - e3
err1 = abs(delta1)
tol1 = max(e1abs, abs(e3)) * EPSILON
if (err1 <= tol1 || err2 <= tol2 || err3 <= tol3) {
nFinal = 2 * i
break()
}
ss = (1/delta1 + 1/delta2) - 1/delta3
if (abs(ss * e1) <= 0.0001) {
nFinal = 2 * i
break()
}
res = e1 + 1/ss
epsTab(n - 2 * i) = res
{
val error = err2 + abs(res - e2) + err3
if (error <= absErr) {
absErr = error
result = res
}
}
}
}
val limexp = 50 - 1
if (nFinal == limexp) {
nFinal = 2 * (limexp/2)
}
if (nOrig%2 == 1) {
for (i<-0 to newElm) {
epsTab(i * 2 + 1) = epsTab(i*2 + 3)
}
} else {
for (i<-0 to newElm) {
epsTab(i * 2) = epsTab(i*2 + 2)
}
}
if (nOrig != nFinal) {
for (i <- 0 to nFinal) {
epsTab(i) = epsTab(nOrig - nFinal + i)
}
}
table.n = nFinal + 1
if (nResOrig < 3) {
res3La(nResOrig) = result
absErr = MAXVALUE
} else {
absErr = abs(result - res3La(2)) + abs(result - res3La(1)) + abs(result - res3La(0))
res3La(0) = res3La(1)
res3La(1) = res3La(2)
res3La(2) = result
}
table.nRes = nResOrig + 1
absErr = max(absErr, 5 * EPSILON * abs(result))
(result, absErr)
}
}
}
|
statgenetics/seqspark
|
src/main/scala/org/dizhang/seqspark/numerics/Qelg.scala
|
Scala
|
apache-2.0
| 4,338
|
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.provider
import play.api.libs.mailer._
import java.io.File
import org.apache.commons.mail.EmailAttachment
import javax.inject.{Inject, Singleton}
import org.maproulette.Config
import org.maproulette.framework.model.{UserNotification, UserNotificationEmail}
import scala.concurrent.{Future}
/**
* @author nrotstan
*
* TODO: internationalize these messages and move them out into templates
*/
@Singleton
class EmailProvider @Inject() (mailerClient: MailerClient, config: Config) {
import scala.concurrent.ExecutionContext.Implicits.global
def emailNotification(toAddress: String, notification: UserNotificationEmail) = {
val notificationName =
UserNotification.notificationTypeMap.get(notification.notificationType).get
val emailSubject = s"New MapRoulette notification: ${notificationName}"
val notificationDetails = notification.extra match {
case Some(details) => s"\n${details}"
case None => ""
}
val emailBody = s"""
|You have received a new MapRoulette notification:
|
|${notificationName}
|${notificationDetails}
|${this.notificationFooter}""".stripMargin
val email =
Email(emailSubject, config.getEmailFrom.get, Seq(toAddress), bodyText = Some(emailBody))
mailerClient.send(email)
}
def emailNotificationDigest(toAddress: String, notifications: List[UserNotificationEmail]) = {
val notificationNames = notifications.map(notification =>
UserNotification.notificationTypeMap.get(notification.notificationType).get
)
val notificationNameCounts = notificationNames.groupBy(identity).view.mapValues(_.size)
val notificationLines = notificationNameCounts.foldLeft("") {
(s: String, pair: (String, Int)) =>
s + pair._1 + " (" + pair._2 + ")\n"
}
val emailSubject = s"MapRoulette Notifications Daily Digest"
val emailBody = s"""
|You have received new MapRoulette notifications over the past day:
|
|${notificationLines}${this.notificationFooter}""".stripMargin
val email =
Email(emailSubject, config.getEmailFrom.get, Seq(toAddress), bodyText = Some(emailBody))
mailerClient.send(email)
}
def emailCountNotification(
toAddress: String,
name: String,
tasks: List[Int],
taskType: String
) = {
val notificationName = s"Task ${taskType.capitalize}s"
val emailSubject = s"New MapRoulette notification: ${notificationName}"
val notificationDetails = s"${name}, you have ${tasks.length} ${taskType}/s pending."
var subRoute = "";
if (taskType == UserNotification.TASK_TYPE_REVIEW) {
subRoute = "/review";
}
val emailBody = s"""
|You have received a new MapRoulette notification:
|
|${notificationName}
|${notificationDetails}
|
|${tasks
.map(task => s"${config.getPublicOrigin.get}/task/${task}${subRoute}")
.mkString("\n")}
|
|${this.notificationFooter}
|
|""".stripMargin;
val email =
Email(emailSubject, config.getEmailFrom.get, Seq(toAddress), bodyText = Some(emailBody))
mailerClient.send(email)
}
private def notificationFooter: String = {
val urlPrefix = config.getPublicOrigin.get
s"""
|You can view your notifications by visiting your MapRoulette Inbox at:
|${urlPrefix}/inbox
|
|Happy mapping!
|--The MapRoulette Team
|
|
|P.S. You received this because you asked to be emailed when you
|received this type of notification in MapRoulette. You can manage
|your notification subscriptions and email preferences at:
|${urlPrefix}/profile""".stripMargin
}
}
|
mgcuthbert/maproulette2
|
app/org/maproulette/provider/EmailProvider.scala
|
Scala
|
apache-2.0
| 4,084
|
package com.twitter.finagle.http
import com.twitter.finagle.{Http => FinagleHttp}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class Netty3EndToEndTest extends AbstractHttp1EndToEndTest {
def implName: String = "netty3"
def clientImpl(): FinagleHttp.Client = FinagleHttp.client.configured(FinagleHttp.Netty3Impl)
def serverImpl(): FinagleHttp.Server = FinagleHttp.server.configured(FinagleHttp.Netty3Impl)
def featureImplemented(feature: Feature): Boolean =
feature != TooLongStream // Disabled due to flakiness. see CSL-2946.
}
|
koshelev/finagle
|
finagle-http/src/test/scala/com/twitter/finagle/http/Netty3EndToEndTest.scala
|
Scala
|
apache-2.0
| 600
|
/*
* Copyright (c) 2006-2007, AIOTrade Computing Co. and Contributors
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* o Neither the name of AIOTrade Computing Co. nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.aiotrade.lib.indicator.basic
import org.aiotrade.lib.indicator.Indicator
/**
*
* @author Caoyuan Deng
*/
class MACDIndicator extends Indicator {
sname = "MACD"
lname = "Moving Average Convergence/Divergence"
val periodFast = Factor("Period EMA Fast", 12)
val periodSlow = Factor("Period EMA Slow", 26)
val periodSignal = Factor("Period Signal", 9)
val macd = TVar[Double]("MACD", Plot.Line)
val signal = TVar[Double]("SIGNAL", Plot.Line)
val osc = TVar[Double]("OSC", Plot.Stick)
protected def compute(fromIdx: Int, size: Int) = {
var i = fromIdx
while (i < size) {
macd(i) = macd(i, C, periodSlow, periodFast)
signal(i) = ema (i, macd, periodSignal)
osc(i) = macd(i) - signal(i)
i += 1
}
}
}
|
wandoulabs/wandou-math
|
wandou-indicator-basic/src/main/scala/org/aiotrade/lib/indicator/basic/MACDIndicator.scala
|
Scala
|
apache-2.0
| 2,465
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.aliyun.logservice
/**
* Loghub offset.
* @param shardId Loghub shard id.
* @param timestamp The offfset of each loghub shard.
*/
case class LoghubOffset(shardId: Int, timestamp: Long)
/**
* Loghub offset range.
* @param shardId Loghub shard id.
* @param beginCursor The start cursor of range.
* @param endCursor The end cursor of range.
*/
case class ShardOffsetRange(shardId: Int, beginCursor: String, endCursor: String)
|
aliyun/aliyun-emapreduce-sdk
|
emr-logservice/src/main/scala/org/apache/spark/streaming/aliyun/logservice/LoghubOffset.scala
|
Scala
|
artistic-2.0
| 1,269
|
package cromwell.util
import akka.actor.{Actor, ActorLogging, ActorRef, ActorSystem, Kill, PoisonPill, Props, SupervisorStrategy}
import akka.testkit.TestProbe
object AkkaTestUtil {
implicit class EnhancedTestProbe(probe: TestProbe) {
def props = Props(new Actor with ActorLogging {
def receive = {
case outbound @ _ if sender == probe.ref =>
val msg = "Unexpected outbound message from Probe. You're doing something wrong!"
log.error(msg)
throw new RuntimeException(msg)
case inbound => probe.ref forward inbound
}
})
}
def actorDeathMethods(system: ActorSystem): List[(String, ActorRef => Unit)] = List(
("external_stop", (a: ActorRef) => system.stop(a)),
("internal_stop", (a: ActorRef) => a ! InternalStop),
("poison_pill", (a: ActorRef) => a ! PoisonPill),
("kill_message", (a: ActorRef) => a ! Kill),
("throw_exception", (a: ActorRef) => a ! ThrowException)
)
case object InternalStop
case object ThrowException
class StoppingSupervisor extends Actor {
override val supervisorStrategy = SupervisorStrategy.stoppingStrategy
def receive = Actor.emptyBehavior
}
class DeathTestActor extends Actor {
private def stoppingReceive: Actor.Receive = {
case InternalStop => context.stop(self)
case ThrowException => throw new Exception("Don't panic, dear debugger! This was a deliberate exception for the test case.")
}
override def receive = stoppingReceive orElse Actor.ignoringBehavior
}
}
|
ohsu-comp-bio/cromwell
|
core/src/test/scala/cromwell/util/AkkaTestUtil.scala
|
Scala
|
bsd-3-clause
| 1,534
|
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.path
import org.scalatest._
import org.scalatest.path.{ FunSpec => PathFunSpec }
import org.scalatest.events._
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.DuplicateTestNameException
import org.scalatest.exceptions.TestRegistrationClosedException
class FunSpecSpec extends org.scalatest.FreeSpec with SharedHelpers with GivenWhenThen {
"A path.FunSpec" - {
"should return the test names in registration order from testNames" in {
class AFunSpec extends PathFunSpec {
it("should test this") {}
it("should test that") {}
override def newInstance = new AFunSpec
}
val a = new AFunSpec
assertResult(List("should test this", "should test that")) {
a.testNames.iterator.toList
}
val b = new PathFunSpec {}
assertResult(List[String]()) {
b.testNames.iterator.toList
}
class CFunSpec extends PathFunSpec {
it("should test that") {}
it("should test this") {}
override def newInstance = new CFunSpec
}
val c = new CFunSpec
assertResult(List("should test that", "should test this")) {
c.testNames.iterator.toList
}
class DFunSpec extends PathFunSpec {
describe("A Tester") {
it("should test that") {}
it("should test this") {}
}
override def newInstance = new DFunSpec
}
val d = new DFunSpec
assertResult(List("A Tester should test that", "A Tester should test this")) {
d.testNames.iterator.toList
}
class EFunSpec extends PathFunSpec {
describe("A Tester") {
it("should test this") {}
it("should test that") {}
}
override def newInstance = new EFunSpec
}
val e = new EFunSpec
assertResult(List("A Tester should test this", "A Tester should test that")) {
e.testNames.iterator.toList
}
}
"should throw DuplicateTestNameException if a duplicate test name registration is attempted" in {
intercept[DuplicateTestNameException] {
class AFunSpec extends PathFunSpec {
it("should test this") {}
it("should test this") {}
override def newInstance = new AFunSpec
}
(new AFunSpec).tags // Must call a method to get it to attempt to register the second test
}
intercept[DuplicateTestNameException] {
class AFunSpec extends PathFunSpec {
it("should test this") {}
ignore("should test this") {}
override def newInstance = new AFunSpec
}
(new AFunSpec).tags
}
intercept[DuplicateTestNameException] {
class AFunSpec extends PathFunSpec {
ignore("should test this") {}
ignore("should test this") {}
override def newInstance = new AFunSpec
}
(new AFunSpec).tags
}
intercept[DuplicateTestNameException] {
class AFunSpec extends PathFunSpec {
ignore("should test this") {}
it("should test this") {}
override def newInstance = new AFunSpec
}
(new AFunSpec).tags
}
}
"(with info calls)" - {
class InfoInsideTestSpec extends PathFunSpec {
val msg = "hi there, dude"
val testName = "test name"
it(testName) {
info(msg)
}
override def newInstance = new InfoInsideTestSpec
}
// In a Spec, any InfoProvided's fired during the test should be cached and sent out after the test has
// suceeded or failed. This makes the report look nicer, because the info is tucked under the "specifier'
// text for that test.
"should, when the info appears in the code of a successful test, report the info in the TestSucceeded" in {
val spec = new InfoInsideTestSpec
val (testStartingIndex, testSucceededIndex) =
getIndexesForTestInformerEventOrderTests(spec, spec.testName, spec.msg)
assert(testStartingIndex < testSucceededIndex)
}
class InfoBeforeTestSpec extends PathFunSpec {
val msg = "hi there, dude"
val testName = "test name"
info(msg)
it(testName) {}
}
"should, when the info appears in the body before a test, report the info before the test" in {
val spec = new InfoBeforeTestSpec
val (infoProvidedIndex, testStartingIndex, testSucceededIndex) =
getIndexesForInformerEventOrderTests(spec, spec.testName, spec.msg)
assert(infoProvidedIndex < testStartingIndex)
assert(testStartingIndex < testSucceededIndex)
}
"should, when the info appears in the body after a test, report the info after the test runs" in {
val msg = "hi there, dude"
val testName = "test name"
class MySpec extends PathFunSpec {
it(testName) {}
info(msg)
}
val (infoProvidedIndex, testStartingIndex, testSucceededIndex) =
getIndexesForInformerEventOrderTests(new MySpec, testName, msg)
assert(testStartingIndex < testSucceededIndex)
assert(testSucceededIndex < infoProvidedIndex)
}
"should throw an IllegalStateException when info is called by a method invoked after the suite has been executed" in {
class MySpec extends PathFunSpec {
callInfo() // This should work fine
def callInfo() {
info("howdy")
}
it("howdy also") {
callInfo() // This should work fine
}
override def newInstance = new MySpec
}
val spec = new MySpec
val myRep = new EventRecordingReporter
spec.run(None, Args(myRep))
intercept[IllegalStateException] {
spec.callInfo()
}
}
"should send an InfoProvided with an IndentedText formatter with level 1 when called outside a test" in {
val spec = new InfoBeforeTestSpec
val indentedText = getIndentedTextFromInfoProvided(spec)
assert(indentedText === IndentedText("+ " + spec.msg, spec.msg, 0))
}
"should send an InfoProvided with an IndentedText formatter with level 2 when called within a test" in {
val spec = new InfoInsideTestSpec
val indentedText = getIndentedTextFromTestInfoProvided(spec)
assert(indentedText === IndentedText(" + " + spec.msg, spec.msg, 1))
}
}
"(when a nesting rule has been violated)" - {
"should, if they call a describe from within an it clause, result in a TestFailedException when running the test" in {
class MySpec extends PathFunSpec {
it("should blow up") {
describe("in the wrong place, at the wrong time") {
}
}
override def newInstance = new MySpec
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
"should, if they call a describe with a nested it from within an it clause, result in a TestFailedException when running the test" in {
class MySpec extends PathFunSpec {
it("should blow up") {
describe("in the wrong place, at the wrong time") {
it("should never run") {
assert(1 === 1)
}
}
}
override def newInstance = new MySpec
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
"should, if they call a nested it from within an it clause, result in a TestFailedException when running the test" in {
class MySpec extends PathFunSpec {
it("should blow up") {
it("should never run") {
assert(1 === 1)
}
}
override def newInstance = new MySpec
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
"should, if they call a nested it with tags from within an it clause, result in a TestFailedException when running the test" in {
class MySpec extends PathFunSpec {
it("should blow up") {
it("should never run", mytags.SlowAsMolasses) {
assert(1 === 1)
}
}
override def newInstance = new MySpec
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
"should, if they call a describe with a nested ignore from within an it clause, result in a TestFailedException when running the test" in {
class MySpec extends PathFunSpec {
it("should blow up") {
describe("in the wrong place, at the wrong time") {
ignore("should never run") {
assert(1 === 1)
}
}
}
override def newInstance = new MySpec
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
"should, if they call a nested ignore from within an it clause, result in a TestFailedException when running the test" in {
class MySpec extends PathFunSpec {
it("should blow up") {
ignore("should never run") {
assert(1 === 1)
}
}
override def newInstance = new MySpec
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
"should, if they call a nested ignore with tags from within an it clause, result in a TestFailedException when running the test" in {
class MySpec extends PathFunSpec {
it("should blow up") {
ignore("should never run", mytags.SlowAsMolasses) {
assert(1 === 1)
}
}
override def newInstance = new MySpec
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
}
"should run tests registered via the 'it should behave like' syntax" in {
trait SharedSpecTests { this: PathFunSpec =>
def nonEmptyStack(s: String)(i: Int) {
it("should be that I am shared") {}
}
}
class MySpec extends PathFunSpec with SharedSpecTests {
it should behave like nonEmptyStack("hi")(1)
override def newInstance = new MySpec
}
val suite = new MySpec
val reporter = new EventRecordingReporter
suite.run(None, Args(reporter))
val indexedList = reporter.eventsReceived
val testStartingOption = indexedList.find(_.isInstanceOf[TestStarting])
assert(testStartingOption.isDefined)
assert(testStartingOption.get.asInstanceOf[TestStarting].testName === "should be that I am shared")
}
"should throw NullPointerException if a null test tag is provided" in {
// it
intercept[NullPointerException] {
new PathFunSpec {
it("hi", null) {}
}
}
val caught = intercept[NullPointerException] {
new PathFunSpec {
it("hi", mytags.SlowAsMolasses, null) {}
}
}
assert(caught.getMessage === "a test tag was null")
intercept[NullPointerException] {
new PathFunSpec {
it("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {}
}
}
// ignore
intercept[NullPointerException] {
new PathFunSpec {
ignore("hi", null) {}
}
}
val caught2 = intercept[NullPointerException] {
new PathFunSpec {
ignore("hi", mytags.SlowAsMolasses, null) {}
}
}
assert(caught2.getMessage === "a test tag was null")
intercept[NullPointerException] {
new PathFunSpec {
ignore("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {}
}
}
}
case class TestWasCalledCounts(var theTestThisCalled: Boolean, var theTestThatCalled: Boolean)
class TestWasCalledSuite(val counts: TestWasCalledCounts) extends PathFunSpec {
def this() { this(TestWasCalledCounts(false, false)) }
it("should run this") { counts.theTestThisCalled = true }
it("should run that, maybe") { counts.theTestThatCalled = true }
override def newInstance = new TestWasCalledSuite(counts)
}
"should execute all tests when run is called with testName None" in {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.counts.theTestThisCalled)
assert(b.counts.theTestThatCalled)
}
"should execute one test when run is called with a defined testName" in {
val a = new TestWasCalledSuite
val rep = new EventRecordingReporter
a.run(Some("should run this"), Args(rep))
assert(a.counts.theTestThisCalled)
assert(a.counts.theTestThatCalled) // In a path trait, this gets executed, but not reported
val tse = rep.testSucceededEventsReceived
assert(tse.size == 1)
assert(tse(0).testName === "should run this")
val tfe = rep.testFailedEventsReceived
assert(tfe.size === 0)
val tste = rep.testStartingEventsReceived
assert(tste.size === 1)
}
"should report as ignored, and not run, tests marked ignored" in {
class AFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
it("test this") { counts.theTestThisCalled = true }
it("test that") { counts.theTestThatCalled = true }
override def newInstance = new AFunSpec(counts)
}
val a = new AFunSpec(TestWasCalledCounts(false, false))
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.counts.theTestThisCalled)
assert(a.counts.theTestThatCalled)
class BFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
ignore("test this") { counts.theTestThisCalled = true }
it("test that") { counts.theTestThatCalled = true }
override def newInstance = new BFunSpec(counts)
}
val b = new BFunSpec(TestWasCalledCounts(false, false))
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "test this")
assert(!b.counts.theTestThisCalled)
assert(b.counts.theTestThatCalled)
class CFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
it("test this") { counts.theTestThisCalled = true }
ignore("test that") { counts.theTestThatCalled = true }
override def newInstance = new CFunSpec(counts)
}
val c = new CFunSpec(TestWasCalledCounts(false, false))
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "test that", repC.lastEvent.get.testName)
assert(c.counts.theTestThisCalled)
assert(!c.counts.theTestThatCalled)
// The order I want is order of appearance in the file.
// Will try and implement that tomorrow. Subtypes will be able to change the order.
class DFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
ignore("test this") { counts.theTestThisCalled = true }
ignore("test that") { counts.theTestThatCalled = true }
override def newInstance = new DFunSpec(counts)
}
val d = new DFunSpec(TestWasCalledCounts(false, false))
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "test that") // last because should be in order of appearance
assert(!d.counts.theTestThisCalled)
assert(!d.counts.theTestThatCalled)
}
"should ignore a test marked as ignored if run is invoked with that testName" in {
// If I provide a specific testName to run, then it should ignore an Ignore on that test
// method and actually invoke it.
class EFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
def this() { this(TestWasCalledCounts(false, false)) }
ignore("test this") { counts.theTestThisCalled = true }
it("test that") { counts.theTestThatCalled = true }
override def newInstance = new EFunSpec(counts)
}
val e = new EFunSpec
val repE = new EventRecordingReporter
e.run(Some("test this"), Args(repE))
assert(repE.testIgnoredEventsReceived.size === 1)
assert(!e.counts.theTestThisCalled)
assert(e.counts.theTestThatCalled) // In a path trait, tests other than the Some(testName) get executed, but not reported
val tste = repE.testStartingEventsReceived
assert(tste.size === 0)
}
"should run only those tests selected by the tags to include and exclude sets" in {
// Nothing is excluded
class AFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
it("test this", mytags.SlowAsMolasses) { counts.theTestThisCalled = true }
it("test that") { counts.theTestThatCalled = true }
override def newInstance = new AFunSpec(counts)
}
val a = new AFunSpec(TestWasCalledCounts(false, false))
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.counts.theTestThisCalled)
assert(a.counts.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
class BFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
it("test this", mytags.SlowAsMolasses) { counts.theTestThisCalled = true }
it("test that") { counts.theTestThatCalled = true }
override def newInstance = new BFunSpec(counts)
}
val b = new BFunSpec(TestWasCalledCounts(false, false))
val repB = new EventRecordingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repB.testIgnoredEventsReceived.isEmpty)
assert(b.counts.theTestThisCalled)
assert(b.counts.theTestThatCalled)
assert(repB.testStartingEventsReceived.size === 1)
assert(repB.testStartingEventsReceived(0).testName == "test this")
// SlowAsMolasses is included, and both tests should be included
class CFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
it("test this", mytags.SlowAsMolasses) { counts.theTestThisCalled = true }
it("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
override def newInstance = new CFunSpec(counts)
}
val c = new CFunSpec(TestWasCalledCounts(false, false))
val repC = new EventRecordingReporter
c.run(None, Args(repC, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repC.testIgnoredEventsReceived.isEmpty)
assert(c.counts.theTestThisCalled)
assert(c.counts.theTestThatCalled)
assert(repC.testStartingEventsReceived.size === 2)
// SlowAsMolasses is included. both tests should be included but one ignored
class DFunSpec(val counts: TestWasCalledCounts) extends PathFunSpec {
ignore("test this", mytags.SlowAsMolasses) { counts.theTestThisCalled = true }
it("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
override def newInstance = new DFunSpec(counts)
}
val d = new DFunSpec(TestWasCalledCounts(false, false))
val repD = new EventRecordingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredEventsReceived.size === 1)
assert(!d.counts.theTestThisCalled)
assert(d.counts.theTestThatCalled)
assert(repD.testStartingEventsReceived.size === 1)
assert(repD.testStartingEventsReceived(0).testName === "test that")
case class ThreeCounts(var theTestThisCalled: Boolean, var theTestThatCalled: Boolean, var theTestTheOtherCalled: Boolean)
// SlowAsMolasses included, FastAsLight excluded
class EFunSpec(val counts: ThreeCounts) extends PathFunSpec {
it("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { counts.theTestThisCalled = true }
it("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
it("test the other") { counts.theTestTheOtherCalled = true }
override def newInstance = new EFunSpec(counts)
}
val e = new EFunSpec(ThreeCounts(false, false, false))
val repE = new EventRecordingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(repE.testIgnoredEventsReceived.isEmpty)
assert(e.counts.theTestThisCalled)
assert(e.counts.theTestThatCalled)
assert(e.counts.theTestTheOtherCalled)
assert(repE.testStartingEventsReceived.size === 1)
assert(repE.testStartingEventsReceived(0).testName === "test that")
// An Ignored test that was both included and excluded should not generate a TestIgnored event
class FFunSpec(val counts: ThreeCounts) extends PathFunSpec {
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { counts.theTestThisCalled = true }
it("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
it("test the other") { counts.theTestTheOtherCalled = true }
override def newInstance = new FFunSpec(counts)
}
val f = new FFunSpec(ThreeCounts(false, false, false))
val repF = new EventRecordingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(repF.testIgnoredEventsReceived.isEmpty)
assert(!f.counts.theTestThisCalled)
assert(f.counts.theTestThatCalled)
assert(f.counts.theTestTheOtherCalled)
assert(repE.testStartingEventsReceived.size === 1)
assert(repE.testStartingEventsReceived(0).testName === "test that")
// An Ignored test that was not included should not generate a TestIgnored event
class GFunSpec(val counts: ThreeCounts) extends PathFunSpec {
it("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { counts.theTestThisCalled = true }
it("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
ignore("test the other") { counts.theTestTheOtherCalled = true }
override def newInstance = new GFunSpec(counts)
}
val g = new GFunSpec(ThreeCounts(false, false, false))
val repG = new EventRecordingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(repG.testIgnoredEventsReceived.isEmpty)
assert(g.counts.theTestThisCalled)
assert(g.counts.theTestThatCalled)
assert(!g.counts.theTestTheOtherCalled)
assert(repG.testStartingEventsReceived.size === 1)
assert(repG.testStartingEventsReceived(0).testName === "test that")
// No tagsToInclude set, FastAsLight excluded
class HFunSpec(val counts: ThreeCounts) extends PathFunSpec {
it("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { counts.theTestThisCalled = true }
it("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
it("test the other") { counts.theTestTheOtherCalled = true }
override def newInstance = new HFunSpec(counts)
}
val h = new HFunSpec(ThreeCounts(false, false, false))
val repH = new EventRecordingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repH.testIgnoredEventsReceived.isEmpty)
assert(h.counts.theTestThisCalled)
assert(h.counts.theTestThatCalled)
assert(h.counts.theTestTheOtherCalled)
assert(repH.testStartingEventsReceived.size === 2)
assert(repH.testStartingEventsReceived.exists(_.testName == "test that"))
assert(repH.testStartingEventsReceived.exists(_.testName == "test the other"))
// No tagsToInclude set, mytags.SlowAsMolasses excluded
class IFunSpec(val counts: ThreeCounts) extends PathFunSpec {
it("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { counts.theTestThisCalled = true }
it("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
it("test the other") { counts.theTestTheOtherCalled = true }
override def newInstance = new IFunSpec(counts)
}
val i = new IFunSpec(ThreeCounts(false, false, false))
val repI = new EventRecordingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repI.testIgnoredEventsReceived.isEmpty)
assert(i.counts.theTestThisCalled)
assert(i.counts.theTestThatCalled)
assert(i.counts.theTestTheOtherCalled)
assert(repI.testStartingEventsReceived.size === 1)
assert(repI.testStartingEventsReceived(0).testName === "test the other")
// No tagsToInclude set, mytags.SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
class JFunSpec(val counts: ThreeCounts) extends PathFunSpec {
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { counts.theTestThisCalled = true }
ignore("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
it("test the other") { counts.theTestTheOtherCalled = true }
override def newInstance = new JFunSpec(counts)
}
val j = new JFunSpec(ThreeCounts(false, false, false))
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repJ.testIgnoredReceived)
assert(!j.counts.theTestThisCalled)
assert(!j.counts.theTestThatCalled)
assert(j.counts.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
class KFunSpec(val counts: ThreeCounts) extends PathFunSpec {
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { counts.theTestThisCalled = true }
ignore("test that", mytags.SlowAsMolasses) { counts.theTestThatCalled = true }
ignore("test the other") { counts.theTestTheOtherCalled = true }
override def newInstance = new KFunSpec(counts)
}
val k = new KFunSpec(ThreeCounts(false, false, false))
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.counts.theTestThisCalled)
assert(!k.counts.theTestThatCalled)
assert(!k.counts.theTestTheOtherCalled)
}
"should return the correct test count from its expectedTestCount method" in {
class AFunSpec extends PathFunSpec {
it("test this") {}
it("test that") {}
override def newInstance = new AFunSpec
}
val a = new AFunSpec
assert(a.expectedTestCount(Filter()) === 2)
class BFunSpec extends PathFunSpec {
ignore("test this") {}
it("test that") {}
override def newInstance = new BFunSpec
}
val b = new BFunSpec
assert(b.expectedTestCount(Filter()) === 1)
class CFunSpec extends PathFunSpec {
it("test this", mytags.FastAsLight) {}
it("test that") {}
override def newInstance = new CFunSpec
}
val c = new CFunSpec
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) === 1)
class DFunSpec extends PathFunSpec {
it("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {}
it("test that", mytags.SlowAsMolasses) {}
it("test the other thing") {}
override def newInstance = new DFunSpec
}
val d = new DFunSpec
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 1)
assert(d.expectedTestCount(Filter()) === 3)
class EFunSpec extends PathFunSpec {
it("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {}
it("test that", mytags.SlowAsMolasses) {}
ignore("test the other thing") {}
// ignore("test the other thing") {}
override def newInstance = new EFunSpec
}
val e = new EFunSpec
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
"should generate a TestPending message when the test body is (pending)" in {
class AFunSpec extends PathFunSpec {
it("should do this") (pending)
it("should do that") {
assert(2 + 2 === 4)
}
it("should do something else") {
assert(2 + 2 === 4)
pending
}
override def newInstance = new AFunSpec
}
val a = new AFunSpec
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
"should generate a test failure if a Throwable, or an Error other than direct Error subtypes known in JDK 1.5, excluding AssertionError" in {
class AFunSpec extends PathFunSpec {
it("throws AssertionError") { throw new AssertionError }
it("throws plain old Error") { throw new Error }
it("throws Throwable") { throw new Throwable }
override def newInstance = new AFunSpec
}
val a = new AFunSpec
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
"should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than AssertionError, causing Suites and Runs to abort." in {
class AFunSpec extends PathFunSpec {
it("throws AssertionError") { throw new OutOfMemoryError }
override def newInstance = new AFunSpec
}
// val a = new AFunSpec
intercept[OutOfMemoryError] {
new AFunSpec
// a.run(None, Args(SilentReporter))
}
}
/*
"should send InfoProvided events with aboutAPendingTest set to true for info calls made from a test that is pending" in {
class AFunSpec extends PathFunSpec with GivenWhenThen {
it("should do something else") {
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
pending
}
override def newInstance = new AFunSpec
}
val a = new AFunSpec
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testPending = rep.testPendingEventsReceived
assert(testPending.size === 1)
val recordedEvents = testPending(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && ip.aboutAPendingTest.get)
}
}
"should send InfoProvided events with aboutAPendingTest set to false for info calls made from a test that is not pending" in {
class AFunSpec extends PathFunSpec with GivenWhenThen {
it("should do something else") {
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
assert(1 + 1 === 2)
}
override def newInstance = new AFunSpec
}
val a = new AFunSpec
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testSucceeded = rep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
val recordedEvents = testSucceeded(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
}
}
*/
}
"when failure happens" - {
"should fire TestFailed event with correct stack depth info when test failed" in {
class TestSpec extends FunSpec {
it("fail scenario") {
assert(1 === 2)
}
describe("a feature") {
it("nested fail scenario") {
assert(1 === 2)
}
}
override def newInstance = new TestSpec
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 2)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FunSpecSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 14)
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FunSpecSpec.scala")
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 12)
}
"should generate TestRegistrationClosedException with correct stack depth info when has a it nested inside a it" in {
class TestSpec extends FunSpec {
describe("a feature") {
it("a scenario") {
it("nested scenario") {
assert(1 === 2)
}
}
}
override def newInstance = new TestSpec
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FunSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 15)
}
}
}
|
svn2github/scalatest
|
src/test/scala/org/scalatest/path/FunSpecSpec.scala
|
Scala
|
apache-2.0
| 36,154
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.