code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.common
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.flink.api.dag.Transformation
import org.apache.flink.api.java.typeutils.InputTypeConfigurable
import org.apache.flink.runtime.state.KeyGroupRangeAssignment.DEFAULT_LOWER_BOUND_MAX_PARALLELISM
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.api.functions.sink.OutputFormatSinkFunction
import org.apache.flink.streaming.api.operators.SimpleOperatorFactory
import org.apache.flink.streaming.api.transformations.{LegacySinkTransformation, PartitionTransformation}
import org.apache.flink.streaming.runtime.partitioner.{KeyGroupStreamPartitioner, StreamPartitioner}
import org.apache.flink.table.api.{TableConfig, TableException}
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.catalog.{CatalogTable, ObjectIdentifier}
import org.apache.flink.table.connector.{ChangelogMode, ParallelismProvider}
import org.apache.flink.table.connector.sink.{DataStreamSinkProvider, DynamicTableSink, OutputFormatProvider, SinkFunctionProvider}
import org.apache.flink.table.data.RowData
import org.apache.flink.types.RowKind
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.plan.nodes.calcite.Sink
import org.apache.flink.table.planner.plan.nodes.physical.FlinkPhysicalRel
import org.apache.flink.table.planner.plan.utils.KeySelectorUtil
import org.apache.flink.table.planner.sinks.TableSinkUtils
import org.apache.flink.table.runtime.connector.sink.SinkRuntimeProviderContext
import org.apache.flink.table.runtime.operators.sink.{SinkNotNullEnforcer, SinkOperator}
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
import org.apache.flink.table.types.logical.RowType
import org.apache.flink.table.utils.TableSchemaUtils
import scala.collection.JavaConversions._
/**
* Base physical RelNode to write data to an external sink defined by a [[DynamicTableSink]].
*/
class CommonPhysicalSink (
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
tableIdentifier: ObjectIdentifier,
catalogTable: CatalogTable,
tableSink: DynamicTableSink)
extends Sink(cluster, traitSet, inputRel, tableIdentifier, catalogTable, tableSink)
with FlinkPhysicalRel {
/**
* Common implementation to create sink transformation for both batch and streaming.
*/
protected def createSinkTransformation(
env: StreamExecutionEnvironment,
inputTransformation: Transformation[RowData],
tableConfig: TableConfig,
rowtimeFieldIndex: Int,
isBounded: Boolean,
changelogMode:ChangelogMode): Transformation[Any] = {
val inputTypeInfo = InternalTypeInfo.of(FlinkTypeFactory.toLogicalRowType(getInput.getRowType))
val runtimeProvider = tableSink.getSinkRuntimeProvider(
new SinkRuntimeProviderContext(isBounded))
val notNullEnforcer = tableConfig.getConfiguration
.get(ExecutionConfigOptions.TABLE_EXEC_SINK_NOT_NULL_ENFORCER)
val notNullFieldIndices = TableSinkUtils.getNotNullFieldIndices(catalogTable)
val fieldNames = catalogTable.getSchema.toPhysicalRowDataType
.getLogicalType.asInstanceOf[RowType]
.getFieldNames
.toList.toArray
val enforcer = new SinkNotNullEnforcer(notNullEnforcer, notNullFieldIndices, fieldNames)
runtimeProvider match {
case _: DataStreamSinkProvider with ParallelismProvider =>
throw new TableException("`DataStreamSinkProvider` is not allowed to work with" +
" `ParallelismProvider`, " + "please see document of `ParallelismProvider`")
case provider: DataStreamSinkProvider =>
val dataStream = new DataStream(env, inputTransformation).filter(enforcer)
provider.consumeDataStream(dataStream).getTransformation.asInstanceOf[Transformation[Any]]
case _ =>
val sinkFunction = runtimeProvider match {
case provider: SinkFunctionProvider => provider.createSinkFunction()
case provider: OutputFormatProvider =>
val outputFormat = provider.createOutputFormat()
new OutputFormatSinkFunction(outputFormat)
}
sinkFunction match {
case itc: InputTypeConfigurable =>
// configure the type if needed
itc.setInputType(inputTypeInfo, env.getConfig)
case _ => // nothing to do
}
val operator = new SinkOperator(env.clean(sinkFunction), rowtimeFieldIndex, enforcer)
assert(runtimeProvider.isInstanceOf[ParallelismProvider],
"runtimeProvider with `ParallelismProvider` implementation is required")
val inputParallelism = inputTransformation.getParallelism
val parallelism = {
val parallelismOptional = runtimeProvider.asInstanceOf[ParallelismProvider].getParallelism
if (parallelismOptional.isPresent) {
val parallelismPassedIn = parallelismOptional.get().intValue()
if (parallelismPassedIn <= 0) {
throw new TableException(s"Table: $tableIdentifier configured sink parallelism: " +
s"$parallelismPassedIn should not be less than zero or equal to zero")
}
parallelismPassedIn
} else {
inputParallelism
}
}
val primaryKeys = TableSchemaUtils.getPrimaryKeyIndices(catalogTable.getSchema)
val theFinalInputTransformation = if (inputParallelism == parallelism ||
changelogMode.containsOnly(RowKind.INSERT)) {
// if the inputParallelism is equals to the parallelism or insert-only mode, do nothing.
inputTransformation
} else if (primaryKeys.isEmpty) {
throw new TableException(s"Table: $tableIdentifier configured sink parallelism is: " +
s"$parallelism, while the input parallelism is: $inputParallelism. Since " +
s"configured parallelism is different from input parallelism and the changelog " +
s"mode contains [${changelogMode.getContainedKinds.toList.mkString(",")}], which " +
s"is not INSERT_ONLY mode, primary key is required but no primary key is found")
} else {
//key by before sink
//according to [[StreamPhysicalExchange]]
val selector = KeySelectorUtil.getRowDataSelector(primaryKeys, inputTypeInfo)
val partitioner = new KeyGroupStreamPartitioner(selector,
DEFAULT_LOWER_BOUND_MAX_PARALLELISM)
val transformation = new PartitionTransformation(
inputTransformation,
partitioner.asInstanceOf[StreamPartitioner[RowData]])
transformation.setParallelism(parallelism)
transformation
}
new LegacySinkTransformation(
theFinalInputTransformation,
getRelDetailedDescription,
SimpleOperatorFactory.of(operator),
parallelism).asInstanceOf[Transformation[Any]]
}
}
}
| aljoscha/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/common/CommonPhysicalSink.scala | Scala | apache-2.0 | 7,952 |
package pl.project13.scala.akka.raft.cluster.clusters
import akka.remote.testkit.MultiNodeConfig
import com.typesafe.config.ConfigFactory
object FourNodesOnlyTwoRaftNodesCluster extends MultiNodeConfig {
val first = role("first")
val second = role("second")
val third = role("third")
val fourth = role("fourth")
val nodes = Map (
1 -> first,
2 -> second,
3 -> third,
4 -> fourth
)
val raftNodes = List(first, third)
val nonRaftNodes = List(second, fourth)
commonConfig(
ConfigFactory.parseResources("cluster.conf")
.withFallback(ConfigFactory.load())
)
nodeConfig(raftNodes: _*)(
ConfigFactory.parseResources("cluster.conf")
.withFallback(ConfigFactory.load())
)
nodeConfig(nonRaftNodes: _*)(
ConfigFactory.parseString(
"""
|akka {
| cluster {
| roles = [ "something" ]
| }
|}
""".stripMargin)
)
} | dmitraver/akka-raft | src/multi-jvm/scala/pl/project13/scala/akka/raft/cluster/clusters/FourNodesOnlyTwoRaftNodesCluster.scala | Scala | apache-2.0 | 915 |
package org.bitcoins.core.script.flag
import org.bitcoins.crypto.StringFactory
/** Created by chris on 3/23/16.
* Trait used to create a script flag used to evaluate scripts in a
* certain way
*/
trait ScriptFlagFactory extends StringFactory[ScriptFlag] {
/** All the [[ScriptFlag]]s found inside of bitcoin core
* https://github.com/bitcoin/bitcoin/blob/master/src/script/interpreter.h#L31.
*/
private def flags =
Seq(
ScriptVerifyNone,
ScriptVerifyP2SH,
ScriptVerifyStrictEnc,
ScriptVerifyDerSig,
ScriptVerifyLowS,
ScriptVerifySigPushOnly,
ScriptVerifyMinimalData,
ScriptVerifyNullDummy,
ScriptVerifyDiscourageUpgradableNOPs,
ScriptVerifyCleanStack,
ScriptVerifyCheckLocktimeVerify,
ScriptVerifyCheckSequenceVerify,
ScriptVerifyWitness,
ScriptVerifyDiscourageUpgradableWitnessProgram,
ScriptVerifyMinimalIf,
ScriptVerifyNullFail,
ScriptVerifyWitnessPubKeyType
)
/** Takes in a string and tries to match it with a [[ScriptFlag]]. */
override def fromStringOpt(str: String): Option[ScriptFlag] = {
flags.find(_.name == str)
}
override def fromString(str: String): ScriptFlag = {
fromStringOpt(str) match {
case Some(flag) => flag
case None => sys.error(s"Could not find ScriptFlag for string=${str}")
}
}
/** Parses the given list into[[ScriptFlag]]s
* the strings that do not match a [[ScriptFlag]] are discarded.
*/
def fromList(list: Seq[String]): Seq[ScriptFlag] = {
list.flatMap(fromStringOpt(_))
}
/** Parses a list of [[ScriptFlag]]s that is separated by commas. */
def fromList(str: String): Seq[ScriptFlag] = {
fromList(str.split(",").toList)
}
/** Empty script flag. */
def empty: Seq[ScriptFlag] = Nil
}
object ScriptFlagFactory extends ScriptFlagFactory
| bitcoin-s/bitcoin-s | core/src/main/scala/org/bitcoins/core/script/flag/ScriptFlagFactory.scala | Scala | mit | 1,875 |
package io.github.netvl.picopickle
import scala.annotation.implicitNotFound
import scala.reflect.ClassTag
import scala.{collection => coll}
import scala.collection.{mutable => mut}
import scala.collection.{immutable => imm}
import scala.collection.generic.CanBuildFrom
import scala.reflect.macros.whitebox
import scala.language.higherKinds
import scala.language.experimental.macros
trait MapPicklingComponent {
@implicitNotFound("Can't [un]pickle a map with keys of type ${T} neither as an object nor as an array of tuples; " +
"either define an `ObjectKeyReadWriter[${T}]` converter or explicitly allow " +
"serializing this map as an array of tuples via putting " +
"an implicit value returned by `allowMapPicklingWithKeyOfType[${T}]` in scope")
sealed class MapPicklingIsAllowed[T]
protected object MapPicklingIsAllowed extends MapPicklingIsAllowed[Nothing]
def allowMapPicklingWithKeysOfType[T]: MapPicklingIsAllowed[T] =
MapPicklingIsAllowed.asInstanceOf[MapPicklingIsAllowed[T]]
def mapPicklingIsAllowedByDefault[T]: MapPicklingIsAllowed[T]
}
trait MapPicklingEnabledByDefault extends MapPicklingComponent {
// by default it is enabled for all key types
override implicit def mapPicklingIsAllowedByDefault[T]: MapPicklingIsAllowed[T] =
allowMapPicklingWithKeysOfType[T]
}
trait MapPicklingDisabledByDefault extends MapPicklingComponent {
this: ObjectKeyTypesComponent =>
// here it is disabled via an aborting macro
override implicit def mapPicklingIsAllowedByDefault[T]: MapPicklingIsAllowed[T] =
macro MapPicklingDisabledByDefaultMacros.killItself[T]
// but another implicit value is defined for all keys which are readable/writable as object keys
implicit def mapPicklingIsAllowedForAppropriateKeyTypes[T: ObjectKeyReader: ObjectKeyWriter] =
allowMapPicklingWithKeysOfType[T]
}
@macrocompat.bundle
class MapPicklingDisabledByDefaultMacros(val c: whitebox.Context) {
def killItself[T: c.WeakTypeTag]: c.Expr[T] =
c.abort(c.enclosingPosition, "aborting expansion of an offending implicit")
}
trait CollectionWriters {
this: ObjectKeyTypesComponent with MapPicklingComponent with BackendComponent with TypesComponent =>
protected final def mkIterableWriter[T, C[_] <: Iterable[_]](implicit w: Writer[T]): Writer[C[T]] =
Writer { c =>
backend.makeArray(c.iterator.asInstanceOf[Iterator[T]].map(e => w.write(e)).toVector)
}
protected final def mkMapWriter[A, B, M[K, V] <: coll.Map[K, V] with coll.MapLike[K, V, M[K, V]]]
(implicit wa: Writer[A], wb: Writer[B], wab: Writer[(A, B)], kw: ObjectKeyWriter[A]): Writer[M[A, B]] =
if (kw != null) Writer.fromF0[M[A, B]] { (m: coll.MapLike[A, B, M[A, B]]) => {
case Some(backend.Get.Object(obj)) => m.foldLeft(obj) { (acc, t) =>
backend.setObjectKey(acc, kw.write(t._1), wb.write(t._2))
}
case None => backend.makeObject(m.map { case (k, v) => (kw.write(k), wb.write(v)) }.toMap)
}}
else Writer[M[A, B]] { (m: coll.MapLike[A, B, M[A, B]]) =>
backend.makeArray(m.map(t => wab.write(t)).toVector)
}
implicit def iterableWriter[T: Writer]: Writer[Iterable[T]] = mkIterableWriter[T, Iterable]
implicit def seqWriter[T: Writer]: Writer[coll.Seq[T]] = mkIterableWriter[T, coll.Seq]
implicit def immSeqWriter[T: Writer]: Writer[imm.Seq[T]] = mkIterableWriter[T, imm.Seq]
implicit def mutSeqWriter[T: Writer]: Writer[mut.Seq[T]] = mkIterableWriter[T, mut.Seq]
implicit def setWriter[T: Writer]: Writer[coll.Set[T]] = mkIterableWriter[T, coll.Set]
implicit def immSetWriter[T: Writer]: Writer[imm.Set[T]] = mkIterableWriter[T, imm.Set]
implicit def mutSetWriter[T: Writer]: Writer[mut.Set[T]] = mkIterableWriter[T, mut.Set]
implicit def indexedSeqWriter[T: Writer]: Writer[coll.IndexedSeq[T]] = mkIterableWriter[T, coll.IndexedSeq]
implicit def immIndexedSeqWriter[T: Writer]: Writer[imm.IndexedSeq[T]] = mkIterableWriter[T, imm.IndexedSeq]
implicit def mutIndexedSeqWriter[T: Writer]: Writer[mut.IndexedSeq[T]] = mkIterableWriter[T, mut.IndexedSeq]
implicit def linearSeqWriter[T: Writer]: Writer[coll.LinearSeq[T]] = mkIterableWriter[T, coll.LinearSeq]
implicit def immLinearSeqWriter[T: Writer]: Writer[imm.LinearSeq[T]] = mkIterableWriter[T, imm.LinearSeq]
implicit def mutLinearSeqWriter[T: Writer]: Writer[mut.LinearSeq[T]] = mkIterableWriter[T, mut.LinearSeq]
implicit def sortedSetWriter[T: Writer: Ordering]: Writer[coll.SortedSet[T]] = mkIterableWriter[T, coll.SortedSet]
implicit def immSortedSetWriter[T: Writer: Ordering]: Writer[imm.SortedSet[T]] = mkIterableWriter[T, imm.SortedSet]
implicit def mutSortedSetWriter[T: Writer: Ordering]: Writer[mut.SortedSet[T]] = mkIterableWriter[T, mut.SortedSet]
implicit def queueWriter[T: Writer]: Writer[imm.Queue[T]] = mkIterableWriter[T, imm.Queue]
implicit def vectorWriter[T: Writer]: Writer[imm.Vector[T]] = mkIterableWriter[T, imm.Vector]
implicit def listWriter[T: Writer]: Writer[imm.List[T]] = mkIterableWriter[T, imm.List]
implicit def streamWriter[T: Writer]: Writer[imm.Stream[T]] = mkIterableWriter[T, imm.Stream]
implicit def listSetWriter[T: Writer]: Writer[imm.ListSet[T]] = mkIterableWriter[T, imm.ListSet]
implicit def treeSetWriter[T: Writer: Ordering]: Writer[imm.TreeSet[T]] = mkIterableWriter[T, imm.TreeSet]
implicit def immHashSetWriter[T: Writer]: Writer[imm.HashSet[T]] = mkIterableWriter[T, imm.HashSet]
implicit def mutHashSetWriter[T: Writer]: Writer[mut.HashSet[T]] = mkIterableWriter[T, mut.HashSet]
implicit def bufferWriter[T: Writer]: Writer[mut.Buffer[T]] = mkIterableWriter[T, mut.Buffer]
implicit def arrayBufferWriter[T: Writer]: Writer[mut.ArrayBuffer[T]] = mkIterableWriter[T, mut.ArrayBuffer]
implicit def linkedListWriter[T: Writer]: Writer[mut.LinkedList[T]] = mkIterableWriter[T, mut.LinkedList]
implicit def linkedHashSetWriter[T: Writer]: Writer[mut.LinkedHashSet[T]] = mkIterableWriter[T, mut.LinkedHashSet]
implicit def mapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[coll.Map[A, B]] = mkMapWriter[A, B, coll.Map]
implicit def immMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.Map[A, B]] = mkMapWriter[A, B, imm.Map]
implicit def mutMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[mut.Map[A, B]] = mkMapWriter[A, B, mut.Map]
implicit def immHashMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.HashMap[A, B]] = mkMapWriter[A, B, imm.HashMap]
implicit def mutHashMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[mut.HashMap[A, B]] = mkMapWriter[A, B, mut.HashMap]
implicit def treeMapWriter[A: Writer: Ordering, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.TreeMap[A, B]] = mkMapWriter[A, B, imm.TreeMap]
implicit def listMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.ListMap[A, B]] = mkMapWriter[A, B, imm.ListMap]
implicit def linkedHashMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[mut.LinkedHashMap[A, B]] = mkMapWriter[A, B, mut.LinkedHashMap]
implicit def arrayWriter[T: Writer]: Writer[Array[T]] = Writer {
case arr => iterableWriter[T].write(arr)
}
}
trait CollectionReaders {
this: ObjectKeyTypesComponent with MapPicklingComponent with BackendComponent with TypesComponent =>
protected final def mkIterableReader[T, C[_] <: Iterable[_]](implicit r: Reader[T],
cbf: CanBuildFrom[C[T], T, C[T]]): Reader[C[T]] =
Reader.reading {
case backend.Extract.Array(arr) => arr.map(r.read).to[C]
}.orThrowing(whenReading = "iterable", expected = "array")
protected final def mkMapReader[A, B, M[_, _] <: coll.Map[_, _]]
(implicit ra: Reader[A], rb: Reader[B], kr: ObjectKeyReader[A], rab: Reader[(A, B)],
cbf: CanBuildFrom[M[A, B], (A, B), M[A, B]]) =
if (kr != null) Reader.reading {
case backend.Extract.Object(m) =>
val builder = cbf.apply()
m.foreach {
case (k, v) => builder += (kr.read(k) -> rb.read(v))
}
builder.result()
}.orThrowing(whenReading = "map with object keys", expected = "object") else Reader.reading {
case backend.Extract.Array(arr) =>
val builder = cbf.apply()
arr.foreach { e => builder += rab.read(e) }
builder.result()
}.orThrowing(whenReading = "map", expected = "array")
implicit def seqReader[T: Reader]: Reader[coll.Seq[T]] = mkIterableReader[T, coll.Seq]
implicit def immSeqReader[T: Reader]: Reader[imm.Seq[T]] = mkIterableReader[T, imm.Seq]
implicit def mutSeqReader[T: Reader]: Reader[mut.Seq[T]] = mkIterableReader[T, mut.Seq]
implicit def setReader[T: Reader]: Reader[coll.Set[T]] = mkIterableReader[T, coll.Set]
implicit def immSetReader[T: Reader]: Reader[imm.Set[T]] = mkIterableReader[T, imm.Set]
implicit def mutSetReader[T: Reader]: Reader[mut.Set[T]] = mkIterableReader[T, mut.Set]
implicit def indexedSeqReader[T: Reader]: Reader[coll.IndexedSeq[T]] = mkIterableReader[T, coll.IndexedSeq]
implicit def immIndexedSeqReader[T: Reader]: Reader[imm.IndexedSeq[T]] = mkIterableReader[T, imm.IndexedSeq]
implicit def mutIndexedSeqReader[T: Reader]: Reader[mut.IndexedSeq[T]] = mkIterableReader[T, mut.IndexedSeq]
implicit def linearSeqReader[T: Reader]: Reader[coll.LinearSeq[T]] = mkIterableReader[T, coll.LinearSeq]
implicit def immLinearSeqReader[T: Reader]: Reader[imm.LinearSeq[T]] = mkIterableReader[T, imm.LinearSeq]
implicit def mutLinearSeqReader[T: Reader]: Reader[mut.LinearSeq[T]] = mkIterableReader[T, mut.LinearSeq]
implicit def sortedSetReader[T: Reader: Ordering]: Reader[coll.SortedSet[T]] = mkIterableReader[T, coll.SortedSet]
implicit def immSortedSetReader[T: Reader: Ordering]: Reader[imm.SortedSet[T]] = mkIterableReader[T, imm.SortedSet]
implicit def mutSortedSetReader[T: Reader: Ordering]: Reader[mut.SortedSet[T]] = mkIterableReader[T, mut.SortedSet]
implicit def queueReader[T: Reader]: Reader[imm.Queue[T]] = mkIterableReader[T, imm.Queue]
implicit def vectorReader[T: Reader]: Reader[imm.Vector[T]] = mkIterableReader[T, imm.Vector]
implicit def listReader[T: Reader]: Reader[imm.List[T]] = mkIterableReader[T, imm.List]
implicit def streamReader[T: Reader]: Reader[imm.Stream[T]] = mkIterableReader[T, imm.Stream]
implicit def listSetReader[T: Reader]: Reader[imm.ListSet[T]] = mkIterableReader[T, imm.ListSet]
implicit def treeSetReader[T: Reader: Ordering]: Reader[imm.TreeSet[T]] = mkIterableReader[T, imm.TreeSet]
implicit def immHashSetReader[T: Reader]: Reader[imm.HashSet[T]] = mkIterableReader[T, imm.HashSet]
implicit def mutHashSetReader[T: Reader]: Reader[mut.HashSet[T]] = mkIterableReader[T, mut.HashSet]
implicit def bufferReader[T: Reader]: Reader[mut.Buffer[T]] = mkIterableReader[T, mut.Buffer]
implicit def arrayBufferReader[T: Reader]: Reader[mut.ArrayBuffer[T]] = mkIterableReader[T, mut.ArrayBuffer]
implicit def linkedListReader[T: Reader]: Reader[mut.LinkedList[T]] = mkIterableReader[T, mut.LinkedList]
implicit def linkedHashSetReader[T: Reader]: Reader[mut.LinkedHashSet[T]] = mkIterableReader[T, mut.LinkedHashSet]
implicit def mapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[coll.Map[A, B]] = mkMapReader[A, B, coll.Map]
implicit def immMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.Map[A, B]] = mkMapReader[A, B, imm.Map]
implicit def mutMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[mut.Map[A, B]] = mkMapReader[A, B, mut.Map]
implicit def immHashMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.HashMap[A, B]] = mkMapReader[A, B, imm.HashMap]
implicit def mutHashMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[mut.HashMap[A, B]] = mkMapReader[A, B, mut.HashMap]
implicit def treeMapReader[A: Reader: Ordering, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.TreeMap[A, B]] = mkMapReader[A, B, imm.TreeMap]
implicit def listMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.ListMap[A, B]] = mkMapReader[A, B, imm.ListMap]
implicit def linkedHashMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[mut.LinkedHashMap[A, B]] = mkMapReader[A, B, mut.LinkedHashMap]
implicit def arrayReader[T: ClassTag](implicit r: Reader[T]): Reader[Array[T]] = Reader.reading {
case backend.Extract.Array(arr) => arr.map(r.read).toArray[T]
}.orThrowing(whenReading = "array", expected = "array")
}
trait CollectionReaderWritersComponent extends CollectionReaders with CollectionWriters {
this: ObjectKeyTypesComponent with MapPicklingComponent with BackendComponent with TypesComponent =>
}
| netvl/picopickle | core/src/main/scala/io/github/netvl/picopickle/collections.scala | Scala | mit | 14,683 |
package org.moe.parser
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.ShouldMatchers
import org.moe.runtime._
import org.moe.interpreter._
import org.moe.ast._
import org.moe.parser._
class StringLiteralTestSuite extends FunSuite with BeforeAndAfter with ParserTestUtils with ShouldMatchers {
// double quotes ...
test("... basic test with a simple double-quoted string") {
val result = interpretCode(""" "hello world" """)
result.unboxToString.get should equal ("hello world")
}
test("... basic test with a double-quoted string with control characters") {
val result = interpretCode(""" "foo\tbar\n" """)
result.unboxToString.get should equal ("foo\tbar\n")
}
test("... basic test with a double-quoted string with escaped quotes") {
val result = interpretCode(""" "foo\"bar\"" """)
result.unboxToString.get should equal ("foo\"bar\"")
}
test("... basic test with a double-quoted string with unicode escaped literals ") {
val result = interpretCode(""" "\x{03a3}" """)
result.unboxToString.get should equal ("\\x{03a3}")
}
test("... basic test with double-quoted string with leading whitespace") {
val result = interpretCode(""" " hello world" """)
result.unboxToString.get should equal (" hello world")
}
// single quotes ...
test("... basic test with a simple single-quoted string") {
val result = interpretCode("'hello world'")
result.unboxToString.get should equal ("hello world")
}
test("... basic test with a single-quoted string with control characters") {
val result = interpretCode("'foo\\tbar\\n'")
result.unboxToString.get should equal ("foo\\tbar\\n")
}
test("... basic test with a single-quoted string with embedded double quotes") {
val result = interpretCode("'foo\"bar\"'")
result.unboxToString.get should equal ("foo\"bar\"")
}
test("... basic test with a single-quoted string with unicode escaped literals ") {
val result = interpretCode("'\\x{03a3}'")
result.unboxToString.get should equal ("\\x{03a3}")
}
test("... basic test with single-quoted string with leading whitespace") {
val result = interpretCode(""" ' hello world' """)
result.unboxToString.get should equal (" hello world")
}
}
| MoeOrganization/moe | src/test/scala/org/moe/parser/StringLiteralTestSuite.scala | Scala | mit | 2,310 |
package org.jetbrains.plugins.scala.macroAnnotations
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
/**
* Calling this def macro will generate code that does operation on all maps associated with methodNames passed in
*
* NOTE: methodNames passed in should be in the same class and have @CachedWithoutModificationCount annotation
*
* Usage example:
* {{{
* @CachedWithoutModificationCount(synchronized = false, ValueWrapper.None)
* def foo: PsiClass = ???
*
* LowMemoryWatcher.register(new Runnable {
* def run(): Unit = {
* WorkWithCache.workWithCache("clear", "foo")
* }
* })
* }}}
* Author: Svyatoslav Ilinskiy
* Date: 10/20/15.
*/
object WorkWithCache {
def workWithCache(operation: String, methodNames: String*): Unit = macro workWithCacheImpl
def workWithCacheImpl(c: whitebox.Context)(operation: c.Expr[String], methodNames: c.Expr[String]*): c.Expr[Unit] = {
import c.universe._
val statements = methodNames.map { s =>
val name: TermName = TermName(c.eval[String](c.Expr[String](s.tree)))
val op: TermName = TermName(c.eval[String](c.Expr[String](operation.tree)))
q"${TermName(name + CachedWithoutModificationCount.cachedMapPostfix)}.$op"
}
val res = q"..$statements"
CachedMacro.println(res)
c.Expr(res)
}
}
| LPTK/intellij-scala | macroAnnotations/src/org/jetbrains/plugins/scala/macroAnnotations/WorkWithCache.scala | Scala | apache-2.0 | 1,345 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.exchange
import java.util.UUID
import java.util.concurrent._
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.NANOSECONDS
import scala.util.control.NonFatal
import org.apache.spark.{broadcast, SparkException}
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.sql.catalyst.plans.physical.{BroadcastMode, BroadcastPartitioning, Partitioning}
import org.apache.spark.sql.execution.{SparkPlan, SQLExecution}
import org.apache.spark.sql.execution.joins.HashedRelation
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
import org.apache.spark.util.{SparkFatalException, ThreadUtils}
/**
* A [[BroadcastExchangeExec]] collects, transforms and finally broadcasts the result of
* a transformed SparkPlan.
*/
case class BroadcastExchangeExec(
mode: BroadcastMode,
child: SparkPlan) extends Exchange {
private val runId: UUID = UUID.randomUUID
override lazy val metrics = Map(
"dataSize" -> SQLMetrics.createSizeMetric(sparkContext, "data size"),
"collectTime" -> SQLMetrics.createTimingMetric(sparkContext, "time to collect"),
"buildTime" -> SQLMetrics.createTimingMetric(sparkContext, "time to build"),
"broadcastTime" -> SQLMetrics.createTimingMetric(sparkContext, "time to broadcast"))
override def outputPartitioning: Partitioning = BroadcastPartitioning(mode)
override def doCanonicalize(): SparkPlan = {
BroadcastExchangeExec(mode.canonicalized, child.canonicalized)
}
@transient
private val timeout: Long = SQLConf.get.broadcastTimeout
@transient
private lazy val relationFuture: Future[broadcast.Broadcast[Any]] = {
// relationFuture is used in "doExecute". Therefore we can get the execution id correctly here.
val executionId = sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
val task = new Callable[broadcast.Broadcast[Any]]() {
override def call(): broadcast.Broadcast[Any] = {
// This will run in another thread. Set the execution id so that we can connect these jobs
// with the correct execution.
SQLExecution.withExecutionId(sqlContext.sparkSession, executionId) {
try {
// Setup a job group here so later it may get cancelled by groupId if necessary.
sparkContext.setJobGroup(runId.toString, s"broadcast exchange (runId $runId)",
interruptOnCancel = true)
val beforeCollect = System.nanoTime()
// Use executeCollect/executeCollectIterator to avoid conversion to Scala types
val (numRows, input) = child.executeCollectIterator()
if (numRows >= 512000000) {
throw new SparkException(
s"Cannot broadcast the table with 512 million or more rows: $numRows rows")
}
val beforeBuild = System.nanoTime()
longMetric("collectTime") += NANOSECONDS.toMillis(beforeBuild - beforeCollect)
// Construct the relation.
val relation = mode.transform(input, Some(numRows))
val dataSize = relation match {
case map: HashedRelation =>
map.estimatedSize
case arr: Array[InternalRow] =>
arr.map(_.asInstanceOf[UnsafeRow].getSizeInBytes.toLong).sum
case _ =>
throw new SparkException("[BUG] BroadcastMode.transform returned unexpected " +
s"type: ${relation.getClass.getName}")
}
longMetric("dataSize") += dataSize
if (dataSize >= (8L << 30)) {
throw new SparkException(
s"Cannot broadcast the table that is larger than 8GB: ${dataSize >> 30} GB")
}
val beforeBroadcast = System.nanoTime()
longMetric("buildTime") += NANOSECONDS.toMillis(beforeBroadcast - beforeBuild)
// Broadcast the relation
val broadcasted = sparkContext.broadcast(relation)
longMetric("broadcastTime") += NANOSECONDS.toMillis(
System.nanoTime() - beforeBroadcast)
SQLMetrics.postDriverMetricUpdates(sparkContext, executionId, metrics.values.toSeq)
broadcasted
} catch {
// SPARK-24294: To bypass scala bug: https://github.com/scala/bug/issues/9554, we throw
// SparkFatalException, which is a subclass of Exception. ThreadUtils.awaitResult
// will catch this exception and re-throw the wrapped fatal throwable.
case oe: OutOfMemoryError =>
throw new SparkFatalException(
new OutOfMemoryError("Not enough memory to build and broadcast the table to all " +
"worker nodes. As a workaround, you can either disable broadcast by setting " +
s"${SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key} to -1 or increase the spark " +
s"driver memory by setting ${SparkLauncher.DRIVER_MEMORY} to a higher value.")
.initCause(oe.getCause))
case e if !NonFatal(e) =>
throw new SparkFatalException(e)
}
}
}
}
BroadcastExchangeExec.executionContext.submit[broadcast.Broadcast[Any]](task)
}
override protected def doPrepare(): Unit = {
// Materialize the future.
relationFuture
}
override protected def doExecute(): RDD[InternalRow] = {
throw new UnsupportedOperationException(
"BroadcastExchange does not support the execute() code path.")
}
override protected[sql] def doExecuteBroadcast[T](): broadcast.Broadcast[T] = {
try {
relationFuture.get(timeout, TimeUnit.SECONDS).asInstanceOf[broadcast.Broadcast[T]]
} catch {
case ex: TimeoutException =>
logError(s"Could not execute broadcast in $timeout secs.", ex)
if (!relationFuture.isDone) {
sparkContext.cancelJobGroup(runId.toString)
relationFuture.cancel(true)
}
throw new SparkException(s"Could not execute broadcast in $timeout secs. " +
s"You can increase the timeout for broadcasts via ${SQLConf.BROADCAST_TIMEOUT.key} or " +
s"disable broadcast join by setting ${SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key} to -1",
ex)
}
}
}
object BroadcastExchangeExec {
private[execution] val executionContext = ExecutionContext.fromExecutorService(
ThreadUtils.newDaemonCachedThreadPool("broadcast-exchange",
SQLConf.get.getConf(StaticSQLConf.BROADCAST_EXCHANGE_MAX_THREAD_THRESHOLD)))
}
| highfei2011/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/BroadcastExchangeExec.scala | Scala | apache-2.0 | 7,535 |
package debop4s.data.slick3.northwind.schema
import java.sql.Blob
import debop4s.data.slick3.northwind.model._
import debop4s.data.slick3.schema.SlickComponent
import org.joda.time.DateTime
import shapeless._
/**
* NorthwindTables
* @author sunghyouk.bae@gmail.com
*/
trait NorthwindTables {self: SlickComponent =>
import driver.api._
private def addressUnapply(addr: AddressComponent) = AddressComponent.unapply(addr).get
class Categories(tag: Tag) extends EntityTable[Category](tag, "Categories") {
def id = column[Int]("CategoryID", O.PrimaryKey, O.AutoInc)
def name = column[String]("CategoryName", O.Length(15, true))
def description = column[Option[String]]("Description", O.Length(2000, true))
def price = column[Option[Blob]]("Picture")
def * = (name, description, price, id.?) <>(Category.tupled, Category.unapply)
}
lazy val categories = EntityTableQuery[Category, Categories](cons = tag => new Categories(tag),
idLens = lens[Category] >> 'id
)
class Customers(tag: Tag) extends EntityTable[Customer](tag, "Customers") {
def id = column[String]("CustomerID", O.PrimaryKey, O.Length(5, false))
def companyName = column[String]("CompanyName", O.NotNull, O.Length(40, true))
def contactName = column[Option[String]]("ContactName", O.Length(30, true))
def contactTitle = column[Option[String]]("ContactTitle", O.Length(30, true))
def address = column[Option[String]]("Address", O.Length(60, true))
def city = column[Option[String]]("City", O.Length(15, true))
def region = column[Option[String]]("Region", O.Length(15, true))
def postalCode = column[Option[String]]("PostalCode", O.Length(10, true))
def country = column[Option[String]]("Country", O.Length(15, true))
def phone = column[Option[String]]("Phone", O.Length(24, true))
def fax = column[Option[String]]("Fax", O.Length(24, true))
/** Address Component 를 변환하는 예 */
def * = (companyName,
contactName,
contactTitle,
(address, city, region, postalCode, country),
phone,
fax,
id.?).shaped <>( {
case (companyName, contactName, contactTitle, addrComponent, phone, fax, id) =>
Customer(companyName, contactName, contactTitle, Some(AddressComponent.tupled.apply(addrComponent)), phone, fax, id)
}, {
c: Customer =>
Some((c.companyName, c.contactName, c.contactTitle, addressUnapply(c.address.get), c.phone, c.fax, c.id))
})
}
lazy val customers = EntityTableQuery[Customer, Customers](cons = tag => new Customers(tag),
idLens = lens[Customer] >> 'id)
class Employees(tag: Tag) extends EntityTable[Employee](tag, "Employees") {
def id = column[Int]("EmployeeID", O.PrimaryKey, O.AutoInc)
def lastname = column[String]("LastName", O.NotNull, O.Length(20, true))
def firstname = column[String]("FirstName", O.NotNull, O.Length(10, true))
def title = column[Option[String]]("Title", O.Length(30, true))
def titleOfCourtesy = column[Option[String]]("TitleOfCourtesy", O.Length(25, true))
def birthDate = column[Option[DateTime]]("BirthDate")
def hireDate = column[Option[DateTime]]("HireDate")
def address = column[Option[String]]("Address", O.Length(60, true))
def city = column[Option[String]]("City", O.Length(15, true))
def region = column[Option[String]]("Region", O.Length(15, true))
def postalCode = column[Option[String]]("PostalCode", O.Length(10, true))
def country = column[Option[String]]("Country", O.Length(15, true))
def homePhone = column[Option[String]]("homePhone", O.Length(24, true))
def extension = column[Option[String]]("Extension", O.Length(4, true))
def photo = column[Option[Blob]]("Photo")
def notes = column[String]("Notes", O.NotNull, O.Length(2000, true))
def reportsTo = column[Option[Int]]("ReportsTo")
def photoPath = column[Option[String]]("PhotoPath", O.Length(255, true))
def salary = column[Option[Float]]("Salary")
def * = (lastname,
firstname,
title,
titleOfCourtesy,
birthDate,
hireDate,
(address, city, region, postalCode, country),
homePhone,
extension,
photo,
notes,
reportsTo,
photoPath,
salary,
id.?).shaped <>( {
case (lastname, firstname, title, titleOfCourtesy, birthDate, hireDate,
addrComponent,
homePhone, extension, photo, notes, reportsTo, photoPath, salary, id) =>
Employee(lastname, firstname, title, titleOfCourtesy, birthDate, hireDate,
Some(AddressComponent.tupled.apply(addrComponent)),
homePhone, extension, photo, notes, reportsTo, photoPath, salary, id)
}, {
e: Employee =>
Some((e.lastname, e.firstname, e.title, e.titleOfCoutesy, e.birthDate, e.hireDate,
addressUnapply(e.address.get),
e.homePhone, e.extension, e.photo, e.notes, e.reportsTo, e.photoPath, e.salary, e.id))
})
def getOrders = orders.filter(_.employeeId === id)
}
lazy val employees = EntityTableQuery[Employee, Employees](cons = tag => new Employees(tag),
idLens = lens[Employee] >> 'id)
class EmployeeTerritories(tag: Tag) extends Table[(Int, String)](tag, "EmployeeTerritories") {
def employeeId = column[Int]("EmployeeID")
def territoryId = column[String]("TerritoryID", O.Length(20, true))
def * = (employeeId, territoryId)
def employeeFK = foreignKey("FK_EmployeeTerritories_Employees", employeeId, employees)(_.id)
def territoryFK = foreignKey("FK_EmployeeTerritories_Territory", territoryId, territories)(_.id)
}
lazy val employeeTerritories = TableQuery[EmployeeTerritories]
class OrderDetails(tag: Tag) extends Table[OrderDetail](tag, "Order Details") {
def orderId = column[Int]("OrderID", O.PrimaryKey)
def productId = column[Int]("ProductID")
def unitPrice = column[BigDecimal]("UnitPrice", O.SqlType("DECIMAL(10, 4)"), O.Default(0.000))
def quantity = column[Short]("Quantity", O.Default(1))
def discount = column[Double]("Discount", O.Default(0.0))
def * = (orderId, productId, unitPrice, quantity, discount) <>(OrderDetail.tupled, OrderDetail.unapply)
def orderFK = foreignKey("FK_OrderDetails_Orders", orderId, orders)(_.id)
def productFK = foreignKey("FK_OrderDetails_Products", productId, products)(_.id)
}
lazy val orderDetails = TableQuery[OrderDetails]
class Orders(tag: Tag) extends EntityTable[Order](tag, "Orders") {
def id = column[Int]("orderID", O.PrimaryKey, O.AutoInc)
def customerId = column[Option[String]]("CustomerID")
def employeeId = column[Option[Int]]("EmployeeID")
def orderDate = column[Option[DateTime]]("OrderDate")
def requiredDate = column[Option[DateTime]]("RequiredDate")
def shippedDate = column[Option[DateTime]]("ShippedDate")
def shipVia = column[Option[Int]]("ShipVia")
// Shippers
def freight = column[BigDecimal]("Freight", O.Default(0.0))
def shipName = column[Option[String]]("ShipName")
def address = column[Option[String]]("ShipAddress", O.Length(60, true))
def city = column[Option[String]]("ShipCity", O.Length(15, true))
def region = column[Option[String]]("ShipRegion", O.Length(15, true))
def postalCode = column[Option[String]]("ShipPostalCode", O.Length(10, true))
def country = column[Option[String]]("ShipCountry", O.Length(15, true))
def * = (customerId, employeeId, orderDate, requiredDate, shippedDate, shipVia, freight, shipName,
(address, city, region, postalCode, country), id.?).shaped <>( {
case (customerId, employeeId, orderDate, requiredDate, shippedDate, shipVia, freight, shipName, shipAddr, id) =>
Order(customerId, employeeId, orderDate, requiredDate, shippedDate, shipVia, freight, shipName, Some(AddressComponent.tupled.apply(shipAddr)), id)
}, {
o: Order =>
Some(o.customerId, o.employeeId, o.orderDate, o.requiredDate, o.shippedDate, o.shipVia, o.freight, o.shipName,
addressUnapply(o.shipAddress.get), o.id)
})
def customerFK = foreignKey("FK_Orders_Customers", customerId, customers)(_.id)
def employeeFK = foreignKey("FK_Orders_Employees", employeeId, employees)(_.id)
def shipperFK = foreignKey("FK_Orders_Shippers", shipVia, shippers)(_.id)
}
lazy val orders = EntityTableQuery[Order, Orders](cons = tag => new Orders(tag),
idLens = lens[Order] >> 'id)
class Products(tag: Tag) extends EntityTable[Product](tag, "Products") {
def id = column[Int]("ProductID", O.PrimaryKey, O.AutoInc)
def name = column[String]("ProductName", O.Length(40, true))
def supplierId = column[Option[Int]]("SupplierID")
def categoryId = column[Option[Int]]("CategoryID")
def quantityPerUnit = column[Option[String]]("QuantityPerUnit", O.Length(20, true))
def unitPrice = column[Option[BigDecimal]]("UnitPrice", O.Default(Some(0)))
def unitsInStock = column[Option[Short]]("UnitsInStock", O.Default(Some(0)))
def unitsOnOrder = column[Option[Short]]("UnitsOnOrder", O.Default(Some(0)))
def reorderLevel = column[Option[Short]]("ReorderLevel", O.Default(Some(0)))
def discontinued = column[Boolean]("Discontinued", O.Default(false))
def * = (name, supplierId, categoryId,
quantityPerUnit, unitPrice, unitsInStock, unitsOnOrder, reorderLevel,
discontinued, id.?) <>(Product.tupled, Product.unapply)
def idxName = index("ProductName", name)
def categoryFK = foreignKey("FK_Products_Categories", categoryId, categories)(_.id, onDelete = ForeignKeyAction.Cascade)
def supplierFK = foreignKey("FK_Products_Suppliers", supplierId, suppliers)(_.id, onDelete = ForeignKeyAction.Cascade)
}
lazy val products = new EntityTableQuery[Product, Products](cons = tag => new Products(tag),
idLens = lens[Product] >> 'id)
class Regions(tag: Tag) extends Table[Region](tag, "Region") {
def id = column[Int]("RegionID", O.NotNull)
def description = column[String]("RegionDescription", O.Length(50, true))
def * = (id, description) <>(Region.tupled, Region.unapply)
def getTerritories = territories.filter(_.regionId === id)
def getEmployees = {
for {
t <- territories if t.regionId === id
et <- employeeTerritories if et.territoryId === t.id
emp <- employees if emp.id === et.employeeId
} yield emp
}
}
lazy val regions = TableQuery[Regions]
class Shippers(tag: Tag) extends EntityTable[Shipper](tag, "Shippers") {
def id = column[Int]("ShipperID", O.PrimaryKey, O.AutoInc)
def name = column[String]("CompanyName", O.Length(40, true))
def phone = column[Option[String]]("Phone", O.Length(254, true))
def * = (name, phone, id.?) <>(Shipper.tupled, Shipper.unapply)
}
lazy val shippers = EntityTableQuery[Shipper, Shippers](cons = tag => new Shippers(tag),
idLens = lens[Shipper] >> 'id)
class Suppliers(tag: Tag) extends EntityTable[Supplier](tag, "Suppliers") {
def id = column[Int]("SupplierID", O.PrimaryKey, O.AutoInc)
def companyName = column[String]("CompanyName", O.Length(40, true))
def contactName = column[Option[String]]("ContactName", O.Length(30, true))
def contactTitle = column[Option[String]]("ContactTitle", O.Length(30, true))
def address = column[Option[String]]("Address", O.Length(60, true))
def city = column[Option[String]]("City", O.Length(15, true))
def region = column[Option[String]]("Region", O.Length(15, true))
def postalCode = column[Option[String]]("PostalCode", O.Length(10, true))
def country = column[Option[String]]("Country", O.Length(15, true))
def phone = column[Option[String]]("Phone", O.Length(24, true))
def fax = column[Option[String]]("Fax", O.Length(24, true))
def homepage = column[Option[String]]("Homepage", O.Length(15, true))
def * = (
companyName, contactName, contactTitle,
(address, city, region, postalCode, country),
phone, fax, homepage, id.?).shaped <>( {
case (companyName, contactName, contactTitle, addrComponent, phone, fax, homepage, id) =>
Supplier(companyName, contactName, contactTitle, Some(AddressComponent.tupled.apply(addrComponent)), phone, fax, homepage, id)
}, { s: Supplier =>
Some((s.companyName, s.contactName, s.contactTitle, addressUnapply(s.address.get), s.phone, s.fax, s.homepage, s.id))
})
}
lazy val suppliers = EntityTableQuery[Supplier, Suppliers](cons = tag => new Suppliers(tag),
idLens = lens[Supplier] >> 'id)
class Territories(tag: Tag) extends EntityTable[Territory](tag, "Territories") {
def id = column[String]("TerritoryID", O.PrimaryKey)
def description = column[String]("TerritoryDescription")
def regionId = column[Int]("RegionID", O.NotNull)
def * = (description, regionId, id.?) <>(Territory.tupled, Territory.unapply)
def regionFK = foreignKey("FK_Territories_Region", regionId, regions)(_.id)
def getEmployees = for {
et <- employeeTerritories if et.territoryId === id
emp <- et.employeeFK
} yield emp
}
lazy val territories = EntityTableQuery[Territory, Territories](cons = tag => new Territories(tag),
idLens = lens[Territory] >> 'id)
}
| debop/debop4s | debop4s-data-slick3-northwind/src/main/scala/debop4s/data/slick3/northwind/schema/NorthwindTables.scala | Scala | apache-2.0 | 13,888 |
package de.frosner.broccoli.templates
import de.frosner.broccoli.signal.SignalManager
import org.mockito.Mockito.{times, verify}
import org.mockito.{ArgumentCaptor, Matchers}
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import sun.misc.{Signal, SignalHandler}
class SignalRefreshedTemplateSourceSpec extends Specification with Mockito {
"Receiving a SIGUSR2 signal" should {
"update the cache" in {
val signalManager = mock[SignalManager]
val testTemplateSource = mock[CachedTemplateSource]
val signalRefreshedTemplateSource = new SignalRefreshedTemplateSource(testTemplateSource, signalManager)
val handler = ArgumentCaptor.forClass(classOf[SignalHandler])
there was one(signalManager).register(Matchers.eq(new Signal("USR2")), handler.capture())
there was no(testTemplateSource).refresh()
there was no(testTemplateSource).loadTemplates()
signalRefreshedTemplateSource.loadTemplates()
there was no(testTemplateSource).refresh()
there was one(testTemplateSource).loadTemplates()
verify(testTemplateSource, times(1)).loadTemplates()
handler.getValue.handle(new Signal("USR2"))
there was one(testTemplateSource).refresh()
there was one(testTemplateSource).loadTemplates()
}
}
}
| FRosner/cluster-broccoli | server/src/test/scala/de/frosner/broccoli/templates/SignalRefreshedTemplateSourceSpec.scala | Scala | apache-2.0 | 1,303 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.mkldnn
import com.intel.analytics.bigdl.mkl.Memory
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.BigDLSpecHelper
class ReorderMemorySpec extends BigDLSpecHelper {
"From heap to native" should "be correct" in {
val layer = ReorderMemory(new NativeData(Array(3, 4), Memory.Format.nc),
HeapData(Array(3, 4), Memory.Format.nc))
layer.setRuntime(new MklDnnRuntime())
layer.initFwdPrimitives(Array(HeapData(Array(3, 4), Memory.Format.nc)), Phase.TrainingPhase)
layer.initBwdPrimitives(Array(NativeData(Array(3, 4), Memory.Format.nc)), Phase.TrainingPhase)
val input = Tensor[Float](3, 4).rand()
val output = layer.forward(input)
val grad = layer.backward(input, output)
grad should be(input)
}
"From heap to heap" should "be correct" in {
val layer = ReorderMemory(
HeapData(Array(3, 4), Memory.Format.nc),
HeapData(Array(3, 4), Memory.Format.nc),
HeapData(Array(3, 4), Memory.Format.nc),
HeapData(Array(3, 4), Memory.Format.nc)
)
layer.setRuntime(new MklDnnRuntime())
layer.initFwdPrimitives(Array(HeapData(Array(3, 4), Memory.Format.nc)), Phase.TrainingPhase)
layer.initBwdPrimitives(Array(NativeData(Array(3, 4), Memory.Format.nc)), Phase.TrainingPhase)
val input = Tensor[Float](3, 4).rand()
val output = layer.forward(input)
val grad = layer.backward(input, output)
grad should be(input)
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/mkldnn/ReorderMemorySpec.scala | Scala | apache-2.0 | 2,080 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.cassandra.tools.export
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.cassandra.data.CassandraDataStore
import org.locationtech.geomesa.cassandra.tools.export.CassandraExportCommand.CassandraExportParams
import org.locationtech.geomesa.cassandra.tools.{CassandraConnectionParams, CassandraDataStoreCommand}
import org.locationtech.geomesa.tools.export.ExportCommand
import org.locationtech.geomesa.tools.export.ExportCommand.ExportParams
import org.locationtech.geomesa.tools.{CatalogParam, OptionalIndexParam, RequiredTypeNameParam}
class CassandraExportCommand extends ExportCommand[CassandraDataStore] with CassandraDataStoreCommand {
override val params = new CassandraExportParams
}
object CassandraExportCommand {
@Parameters(commandDescription = "Export features from a GeoMesa data store")
class CassandraExportParams extends ExportParams with CassandraConnectionParams
with CatalogParam with RequiredTypeNameParam with OptionalIndexParam
}
| aheyne/geomesa | geomesa-cassandra/geomesa-cassandra-tools/src/main/scala/org/locationtech/geomesa/cassandra/tools/export/CassandraExportCommand.scala | Scala | apache-2.0 | 1,486 |
// Copyright 2014 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime
import java.nio.ByteBuffer
import org.apache.thrift.TBase
import org.apache.thrift.protocol.{TField, TProtocol, TStruct, TType}
import org.apache.thrift.transport.TMemoryInputTransport
// Some protocols are "robust", i.e., they have complete field id and type information on the wire.
// Others are not. For example, TBSONProtocol is not robust: it uses field names instead of ids on
// the wire, and it represents an i16 as an i32, because BSON has no 16-bit integer type.
//
// If we read unknown fields using a non-robust protocol then we may not have enough information to
// write them back out in some other protocol.
//
// So, let's say we have some unknown fields that we read from protocol P1. There are two ways to serialize
// them back out to the wire using protocol P2:
//
// - Inline: If P1==P2, or P1 was robust, then we can emit the unknown fields in P2's regular stream of fields.
// The resulting output is as if the fields were known all along. When the P2 stream is later
// read back in, nothing special needs to happen.
//
// - As a blob: We serialize the unknown fields to a byte array using P1, and then emit that blob (together
// with the name of P1, so we know how to interpret the blob) as the value of a "magic" field in P2.
// Since we read the fields using P1, we can safely write them using P1. However when the P2 stream
// is later read back in, we need some special handling to unravel the blob.
object UnknownFieldsBlob {
// The name and identifier of the magic field. We rely on the following to avoid collisions:
// - The magic name is obscure, and so unlikely to collide with a real one.
// - Field ids in thrift IDLs are required to be positive, so this magic id won't
// collide. We don't use -1 because that's occasionally used as a sentinel value.
val magicField = new TField("__spindle_unknown_fields_blob", TType.STRUCT, -2)
// The value of the magic field is itself a struct containing two fields: the protocol used to
// serialize the blob, and the blob itself.
//
// It would be nice if that struct could be a real spindle struct generated from a .thrift file,
// but that creates nasty bootstrapping issues, so we read/write it manually instead, using these fields:
val unknownFieldsProtocol = new TField("protocol", TType.STRING, 1)
val unknownFieldsContents = new TField("contents", TType.STRING, 2)
// Turns the unknown fields to a blob. Matches UnknownFieldsBlob.read().
def toBlob(unknownFields: UnknownFields): UnknownFieldsBlob = {
val protocolName: String = unknownFields.inputProtocolName
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprotFactory = TProtocolInfo.getWriterFactory(protocolName)
val oprot = oprotFactory.getProtocol(trans)
oprot.writeStructBegin(new TStruct(""))
unknownFields.writeInline(oprot)
oprot.writeFieldStop()
oprot.writeStructEnd()
new UnknownFieldsBlob(protocolName, ByteBuffer.wrap(trans.getArray, 0, trans.length))
}
// Reads the contents of the magic field from the wire. Assumes the field header has
// already been consumed. Matches UnknownFieldsBlob.write().
def fromMagicField(iprot: TProtocol): UnknownFieldsBlob = {
iprot.readStructBegin()
iprot.readFieldBegin()
val protocolName = iprot.readString()
iprot.readFieldEnd()
iprot.readFieldBegin()
val buf: ByteBuffer = iprot.readBinary()
iprot.readFieldEnd()
iprot.readFieldBegin() // Consume the field stop.
iprot.readStructEnd()
new UnknownFieldsBlob(protocolName, buf)
}
}
// A set of unknown fields, serialized as a blob using some protocol.
class UnknownFieldsBlob(protocolName: String, contents: ByteBuffer) {
// Write this blob out to the magic field.
// Matches UnknownFieldsBlob.fromMagicField().
def write(oprot: TProtocol) {
oprot.writeFieldBegin(UnknownFieldsBlob.magicField)
// Write the value struct manually.
oprot.writeStructBegin(new TStruct(""))
oprot.writeFieldBegin(UnknownFieldsBlob.unknownFieldsProtocol)
oprot.writeString(protocolName)
oprot.writeFieldEnd()
oprot.writeFieldBegin(UnknownFieldsBlob.unknownFieldsContents)
oprot.writeBinary(contents)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
oprot.writeFieldEnd()
}
// Let rec attempt to read out of the blob. Fields that are known to rec will be read in as usual,
// and the rest will go into rec's unknown fields. Matches UnknownFieldsBlob.toBlob().
def read(rec: TBase[_, _]) {
val iprotFactory = TProtocolInfo.getReaderFactory(protocolName)
val buf = contents.array
val pos = contents.arrayOffset + contents.position
val length = contents.limit - contents.position
val iprot = iprotFactory.getProtocol(new TMemoryInputTransport(buf, pos, length))
// Note that this call will happen inside an outer call to rec's read().
rec.read(iprot)
}
}
| foursquare/fsqio | src/jvm/io/fsq/spindle/codegen/__shaded_for_spindle_bootstrap__/runtime/UnknownFieldsBlob.scala | Scala | apache-2.0 | 5,128 |
package blended.updater.config
import com.typesafe.config.ConfigFactory
import org.scalatest.matchers.should.Matchers
import blended.testsupport.scalatest.LoggingFreeSpecLike
import blended.testsupport.BlendedTestSupport
import java.io.File
class ResolvedProfileSpec extends LoggingFreeSpecLike with Matchers {
private val featureDir : File = new File(BlendedTestSupport.projectTestOutput)
"A Config with features references" - {
val config : String = """
|name = name
|version = 1
|bundles = [{url = "mvn:base:bundle1:1"}]
|startLevel = 10
|defaultStartLevel = 10
|features = [
| { url = "http://foobar.com/featuregrp/featurename/1", names = ["feature1"] }
| { url = "http://foobar.com/featuregrp/featurename/1", names = ["feature2"] }
|]
|""".stripMargin
val feature1 : String = """
|repoUrl = "http://foobar.com/featuregrp/featurename/1"
|name = feature1
|bundles = [{url = "mvn:feature1:bundle1:1"}]
|""".stripMargin
val feature2 : String = """
|repoUrl = "http://foobar.com/featuregrp/featurename/1"
|name = feature2
|bundles = [{url = "mvn:feature2:bundle1:1"}]
|features = [{ url = "http://foobar.com/featuregrp/featurename/1", names = [ "feature3" ] }]
|""".stripMargin
val feature3 : String = """
|repoUrl = "http://foobar.com/featuregrp/featurename/1"
|name = feature3
|bundles = [{url = "mvn:feature3:bundle1:1", startLevel = 0}]
|""".stripMargin
val f3 : FeatureConfig = FeatureConfigCompanion.read(ConfigFactory.parseString(feature3)).get
val feature4 : String = """
|repoUrl = "http://foobar.com/featuregrp/featurename/1"
|name = feature4
|features = [{ url = "http://foobar.com/featuregrp/featurename/1", names = [ "feature5" ] }]
|""".stripMargin
val f4 : FeatureConfig = FeatureConfigCompanion.read(ConfigFactory.parseString(feature4)).get
val feature5 : String = """
|repoUrl = "http://foobar.com/featuregrp/featurename/1"
|name = feature5
|features = [{ url = "http://foobar.com/featuregrp/featurename/1", names = [ "feature4" ] }]
|""".stripMargin
val f5 : FeatureConfig = FeatureConfigCompanion.read(ConfigFactory.parseString(feature5)).get
val features : List[FeatureConfig] = List(feature1, feature2, feature3).map(f => {
FeatureConfigCompanion.read(ConfigFactory.parseString(f)).get
})
val profile: Profile = ProfileCompanion.read(ConfigFactory.parseString(config)).get
"should be constructable with extra features" in {
ResolvedProfile(profile, features)
}
"should be constructable with optional resolved features" in {
ResolvedProfile(profile.copy(resolvedFeatures = features))
}
"should not be constructable when some feature refs are not resolved" in {
intercept[UnresolvedFeatureException] {
ResolvedProfile(profile)
}
}
"should not be constructable when no bundle with startlevel 0 is present" in {
val anotherBundle : BundleConfig = BundleConfig(url = "mvn:feature3:bundle2:1", startLevel = 0)
val fs : List[FeatureConfig] = features.filter { _.name != "feature3" } ++ Seq(f3.copy(bundles = anotherBundle :: f3.bundles))
intercept[MultipleFrameworksException] {
ResolvedProfile(profile.copy(resolvedFeatures = fs))
}
}
"should not be constructable with more than one bundle in start level 0" in {
val fs : List[FeatureConfig] = features.filter { _.name != "feature3" } ++ Seq(f3.copy(bundles = f3.bundles.map(_.copy(startLevel = None))))
intercept[NoFrameworkException] {
ResolvedProfile(profile.copy(resolvedFeatures = fs))
}
}
"should not be constructable when cycles between feature refs exist" in {
val resolver : FeatureResolver = new FeatureResolver(featureDir, f4 :: f5 :: features)
intercept[CyclicFeatureRefException] {
val cyclicProfile : Profile = profile.copy(
features = FeatureRef(url = "http://foobar.com/featuregrp/featurename/1", names = List("feature4")) :: profile.features
)
resolver.resolve(cyclicProfile).get
}
}
"should migrate all known features into RuntimeConfig.resolvedFeatures" in {
profile.resolvedFeatures shouldBe empty
features should have size (3)
val resolver : FeatureResolver = new FeatureResolver(featureDir, features)
val rrc1 : ResolvedProfile = resolver.resolve(profile).get
rrc1.profile.resolvedFeatures should have size (3)
val rrc2 = ResolvedProfile(rrc1.profile)
rrc1.allReferencedFeatures.get should contain theSameElementsAs (rrc2.allReferencedFeatures.get)
rrc1 should equal(rrc2)
}
}
}
| woq-blended/blended | blended.updater.config/jvm/src/test/scala/blended/updater/config/ResolvedProfileSpec.scala | Scala | apache-2.0 | 5,340 |
object Test {
trait Tree
sealed abstract class Prop
trait Simple extends Prop
case class Atom(tree: Tree) extends Prop with Simple
case class Not(prop: Prop) extends Prop with Simple
def simplify1(prop: Prop): Prop = prop match {
case Atom(tree) => ???
case Not(prop) => ???
case _ => ???
}
def simplify2(prop: Prop): Prop = prop match {
case Not(Atom(tree)) => ???
case Not(Not(prop)) => ???
case _ => ???
}
} | loskutov/intellij-scala | testdata/scalacTests/pos/t9369.scala | Scala | apache-2.0 | 484 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, CharArrayWriter, InputStreamReader, StringWriter}
import scala.util.parsing.combinator.RegexParsers
import com.fasterxml.jackson.core._
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.json._
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, ArrayData, BadRecordException, FailFastMode, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
private[this] sealed trait PathInstruction
private[this] object PathInstruction {
private[expressions] case object Subscript extends PathInstruction
private[expressions] case object Wildcard extends PathInstruction
private[expressions] case object Key extends PathInstruction
private[expressions] case class Index(index: Long) extends PathInstruction
private[expressions] case class Named(name: String) extends PathInstruction
}
private[this] sealed trait WriteStyle
private[this] object WriteStyle {
private[expressions] case object RawStyle extends WriteStyle
private[expressions] case object QuotedStyle extends WriteStyle
private[expressions] case object FlattenStyle extends WriteStyle
}
private[this] object JsonPathParser extends RegexParsers {
import PathInstruction._
def root: Parser[Char] = '$'
def long: Parser[Long] = "\\d+".r ^? {
case x => x.toLong
}
// parse `[*]` and `[123]` subscripts
def subscript: Parser[List[PathInstruction]] =
for {
operand <- '[' ~> ('*' ^^^ Wildcard | long ^^ Index) <~ ']'
} yield {
Subscript :: operand :: Nil
}
// parse `.name` or `['name']` child expressions
def named: Parser[List[PathInstruction]] =
for {
name <- '.' ~> "[^\\.\\[]+".r | "['" ~> "[^\\'\\?]+".r <~ "']"
} yield {
Key :: Named(name) :: Nil
}
// child wildcards: `..`, `.*` or `['*']`
def wildcard: Parser[List[PathInstruction]] =
(".*" | "['*']") ^^^ List(Wildcard)
def node: Parser[List[PathInstruction]] =
wildcard |
named |
subscript
val expression: Parser[List[PathInstruction]] = {
phrase(root ~> rep(node) ^^ (x => x.flatten))
}
def parse(str: String): Option[List[PathInstruction]] = {
this.parseAll(expression, str) match {
case Success(result, _) =>
Some(result)
case NoSuccess(msg, next) =>
None
}
}
}
private[this] object SharedFactory {
val jsonFactory = new JsonFactory()
// Enabled for Hive compatibility
jsonFactory.enable(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS)
}
/**
* Extracts json object from a json string based on json path specified, and returns json string
* of the extracted json object. It will return null if the input json string is invalid.
*/
@ExpressionDescription(
usage = "_FUNC_(json_txt, path) - Extracts a json object from `path`.",
extended = """
Examples:
> SELECT _FUNC_('{"a":"b"}', '$.a');
b
""")
case class GetJsonObject(json: Expression, path: Expression)
extends BinaryExpression with ExpectsInputTypes with CodegenFallback {
import com.fasterxml.jackson.core.JsonToken._
import PathInstruction._
import SharedFactory._
import WriteStyle._
override def left: Expression = json
override def right: Expression = path
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def prettyName: String = "get_json_object"
@transient private lazy val parsedPath = parsePath(path.eval().asInstanceOf[UTF8String])
override def eval(input: InternalRow): Any = {
val jsonStr = json.eval(input).asInstanceOf[UTF8String]
if (jsonStr == null) {
return null
}
val parsed = if (path.foldable) {
parsedPath
} else {
parsePath(path.eval(input).asInstanceOf[UTF8String])
}
if (parsed.isDefined) {
try {
/* We know the bytes are UTF-8 encoded. Pass a Reader to avoid having Jackson
detect character encoding which could fail for some malformed strings */
Utils.tryWithResource(CreateJacksonParser.utf8String(jsonFactory, jsonStr)) { parser =>
val output = new ByteArrayOutputStream()
val matched = Utils.tryWithResource(
jsonFactory.createGenerator(output, JsonEncoding.UTF8)) { generator =>
parser.nextToken()
evaluatePath(parser, generator, RawStyle, parsed.get)
}
if (matched) {
UTF8String.fromBytes(output.toByteArray)
} else {
null
}
}
} catch {
case _: JsonProcessingException => null
}
} else {
null
}
}
private def parsePath(path: UTF8String): Option[List[PathInstruction]] = {
if (path != null) {
JsonPathParser.parse(path.toString)
} else {
None
}
}
// advance to the desired array index, assumes to start at the START_ARRAY token
private def arrayIndex(p: JsonParser, f: () => Boolean): Long => Boolean = {
case _ if p.getCurrentToken == END_ARRAY =>
// terminate, nothing has been written
false
case 0 =>
// we've reached the desired index
val dirty = f()
while (p.nextToken() != END_ARRAY) {
// advance the token stream to the end of the array
p.skipChildren()
}
dirty
case i if i > 0 =>
// skip this token and evaluate the next
p.skipChildren()
p.nextToken()
arrayIndex(p, f)(i - 1)
}
/**
* Evaluate a list of JsonPath instructions, returning a bool that indicates if any leaf nodes
* have been written to the generator
*/
private def evaluatePath(
p: JsonParser,
g: JsonGenerator,
style: WriteStyle,
path: List[PathInstruction]): Boolean = {
(p.getCurrentToken, path) match {
case (VALUE_STRING, Nil) if style == RawStyle =>
// there is no array wildcard or slice parent, emit this string without quotes
if (p.hasTextCharacters) {
g.writeRaw(p.getTextCharacters, p.getTextOffset, p.getTextLength)
} else {
g.writeRaw(p.getText)
}
true
case (START_ARRAY, Nil) if style == FlattenStyle =>
// flatten this array into the parent
var dirty = false
while (p.nextToken() != END_ARRAY) {
dirty |= evaluatePath(p, g, style, Nil)
}
dirty
case (_, Nil) =>
// general case: just copy the child tree verbatim
g.copyCurrentStructure(p)
true
case (START_OBJECT, Key :: xs) =>
var dirty = false
while (p.nextToken() != END_OBJECT) {
if (dirty) {
// once a match has been found we can skip other fields
p.skipChildren()
} else {
dirty = evaluatePath(p, g, style, xs)
}
}
dirty
case (START_ARRAY, Subscript :: Wildcard :: Subscript :: Wildcard :: xs) =>
// special handling for the non-structure preserving double wildcard behavior in Hive
var dirty = false
g.writeStartArray()
while (p.nextToken() != END_ARRAY) {
dirty |= evaluatePath(p, g, FlattenStyle, xs)
}
g.writeEndArray()
dirty
case (START_ARRAY, Subscript :: Wildcard :: xs) if style != QuotedStyle =>
// retain Flatten, otherwise use Quoted... cannot use Raw within an array
val nextStyle = style match {
case RawStyle => QuotedStyle
case FlattenStyle => FlattenStyle
case QuotedStyle => throw new IllegalStateException()
}
// temporarily buffer child matches, the emitted json will need to be
// modified slightly if there is only a single element written
val buffer = new StringWriter()
var dirty = 0
Utils.tryWithResource(jsonFactory.createGenerator(buffer)) { flattenGenerator =>
flattenGenerator.writeStartArray()
while (p.nextToken() != END_ARRAY) {
// track the number of array elements and only emit an outer array if
// we've written more than one element, this matches Hive's behavior
dirty += (if (evaluatePath(p, flattenGenerator, nextStyle, xs)) 1 else 0)
}
flattenGenerator.writeEndArray()
}
val buf = buffer.getBuffer
if (dirty > 1) {
g.writeRawValue(buf.toString)
} else if (dirty == 1) {
// remove outer array tokens
g.writeRawValue(buf.substring(1, buf.length()-1))
} // else do not write anything
dirty > 0
case (START_ARRAY, Subscript :: Wildcard :: xs) =>
var dirty = false
g.writeStartArray()
while (p.nextToken() != END_ARRAY) {
// wildcards can have multiple matches, continually update the dirty count
dirty |= evaluatePath(p, g, QuotedStyle, xs)
}
g.writeEndArray()
dirty
case (START_ARRAY, Subscript :: Index(idx) :: (xs@Subscript :: Wildcard :: _)) =>
p.nextToken()
// we're going to have 1 or more results, switch to QuotedStyle
arrayIndex(p, () => evaluatePath(p, g, QuotedStyle, xs))(idx)
case (START_ARRAY, Subscript :: Index(idx) :: xs) =>
p.nextToken()
arrayIndex(p, () => evaluatePath(p, g, style, xs))(idx)
case (FIELD_NAME, Named(name) :: xs) if p.getCurrentName == name =>
// exact field match
if (p.nextToken() != JsonToken.VALUE_NULL) {
evaluatePath(p, g, style, xs)
} else {
false
}
case (FIELD_NAME, Wildcard :: xs) =>
// wildcard field match
p.nextToken()
evaluatePath(p, g, style, xs)
case _ =>
p.skipChildren()
false
}
}
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(jsonStr, p1, p2, ..., pn) - Returns a tuple like the function get_json_object, but it takes multiple names. All the input parameters and output column types are string.",
extended = """
Examples:
> SELECT _FUNC_('{"a":1, "b":2}', 'a', 'b');
1 2
""")
// scalastyle:on line.size.limit
case class JsonTuple(children: Seq[Expression])
extends Generator with CodegenFallback {
import SharedFactory._
override def nullable: Boolean = {
// a row is always returned
false
}
// if processing fails this shared value will be returned
@transient private lazy val nullRow: Seq[InternalRow] =
new GenericInternalRow(Array.ofDim[Any](fieldExpressions.length)) :: Nil
// the json body is the first child
@transient private lazy val jsonExpr: Expression = children.head
// the fields to query are the remaining children
@transient private lazy val fieldExpressions: Seq[Expression] = children.tail
// eagerly evaluate any foldable the field names
@transient private lazy val foldableFieldNames: IndexedSeq[String] = {
fieldExpressions.map {
case expr if expr.foldable => expr.eval().asInstanceOf[UTF8String].toString
case _ => null
}.toIndexedSeq
}
// and count the number of foldable fields, we'll use this later to optimize evaluation
@transient private lazy val constantFields: Int = foldableFieldNames.count(_ != null)
override def elementSchema: StructType = StructType(fieldExpressions.zipWithIndex.map {
case (_, idx) => StructField(s"c$idx", StringType, nullable = true)
})
override def prettyName: String = "json_tuple"
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length < 2) {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires at least two arguments")
} else if (children.forall(child => StringType.acceptsType(child.dataType))) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires that all arguments are strings")
}
}
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
val json = jsonExpr.eval(input).asInstanceOf[UTF8String]
if (json == null) {
return nullRow
}
try {
/* We know the bytes are UTF-8 encoded. Pass a Reader to avoid having Jackson
detect character encoding which could fail for some malformed strings */
Utils.tryWithResource(CreateJacksonParser.utf8String(jsonFactory, json)) { parser =>
parseRow(parser, input)
}
} catch {
case _: JsonProcessingException =>
nullRow
}
}
private def parseRow(parser: JsonParser, input: InternalRow): Seq[InternalRow] = {
// only objects are supported
if (parser.nextToken() != JsonToken.START_OBJECT) {
return nullRow
}
// evaluate the field names as String rather than UTF8String to
// optimize lookups from the json token, which is also a String
val fieldNames = if (constantFields == fieldExpressions.length) {
// typically the user will provide the field names as foldable expressions
// so we can use the cached copy
foldableFieldNames
} else if (constantFields == 0) {
// none are foldable so all field names need to be evaluated from the input row
fieldExpressions.map(_.eval(input).asInstanceOf[UTF8String].toString)
} else {
// if there is a mix of constant and non-constant expressions
// prefer the cached copy when available
foldableFieldNames.zip(fieldExpressions).map {
case (null, expr) => expr.eval(input).asInstanceOf[UTF8String].toString
case (fieldName, _) => fieldName
}
}
val row = Array.ofDim[Any](fieldNames.length)
// start reading through the token stream, looking for any requested field names
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.getCurrentToken == JsonToken.FIELD_NAME) {
// check to see if this field is desired in the output
val idx = fieldNames.indexOf(parser.getCurrentName)
if (idx >= 0) {
// it is, copy the child tree to the correct location in the output row
val output = new ByteArrayOutputStream()
// write the output directly to UTF8 encoded byte array
if (parser.nextToken() != JsonToken.VALUE_NULL) {
Utils.tryWithResource(jsonFactory.createGenerator(output, JsonEncoding.UTF8)) {
generator => copyCurrentStructure(generator, parser)
}
row(idx) = UTF8String.fromBytes(output.toByteArray)
}
}
}
// always skip children, it's cheap enough to do even if copyCurrentStructure was called
parser.skipChildren()
}
new GenericInternalRow(row) :: Nil
}
private def copyCurrentStructure(generator: JsonGenerator, parser: JsonParser): Unit = {
parser.getCurrentToken match {
// if the user requests a string field it needs to be returned without enclosing
// quotes which is accomplished via JsonGenerator.writeRaw instead of JsonGenerator.write
case JsonToken.VALUE_STRING if parser.hasTextCharacters =>
// slight optimization to avoid allocating a String instance, though the characters
// still have to be decoded... Jackson doesn't have a way to access the raw bytes
generator.writeRaw(parser.getTextCharacters, parser.getTextOffset, parser.getTextLength)
case JsonToken.VALUE_STRING =>
// the normal String case, pass it through to the output without enclosing quotes
generator.writeRaw(parser.getText)
case JsonToken.VALUE_NULL =>
// a special case that needs to be handled outside of this method.
// if a requested field is null, the result must be null. the easiest
// way to achieve this is just by ignoring null tokens entirely
throw new IllegalStateException("Do not attempt to copy a null field")
case _ =>
// handle other types including objects, arrays, booleans and numbers
generator.copyCurrentStructure(parser)
}
}
}
/**
* Converts an json input string to a [[StructType]] or [[ArrayType]] of [[StructType]]s
* with the specified schema.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`.",
extended = """
Examples:
> SELECT _FUNC_('{"a":1, "b":0.8}', 'a INT, b DOUBLE');
{"a":1, "b":0.8}
> SELECT _FUNC_('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy'));
{"time":"2015-08-26 00:00:00.0"}
""")
// scalastyle:on line.size.limit
case class JsonToStructs(
schema: DataType,
options: Map[String, String],
child: Expression,
timeZoneId: Option[String] = None)
extends UnaryExpression with TimeZoneAwareExpression with CodegenFallback with ExpectsInputTypes {
override def nullable: Boolean = true
def this(schema: DataType, options: Map[String, String], child: Expression) =
this(schema, options, child, None)
// Used in `FunctionRegistry`
def this(child: Expression, schema: Expression) =
this(
schema = JsonExprUtils.validateSchemaLiteral(schema),
options = Map.empty[String, String],
child = child,
timeZoneId = None)
def this(child: Expression, schema: Expression, options: Expression) =
this(
schema = JsonExprUtils.validateSchemaLiteral(schema),
options = JsonExprUtils.convertToMapData(options),
child = child,
timeZoneId = None)
override def checkInputDataTypes(): TypeCheckResult = schema match {
case _: StructType | ArrayType(_: StructType, _) =>
super.checkInputDataTypes()
case _ => TypeCheckResult.TypeCheckFailure(
s"Input schema ${schema.simpleString} must be a struct or an array of structs.")
}
@transient
lazy val rowSchema = schema match {
case st: StructType => st
case ArrayType(st: StructType, _) => st
}
// This converts parsed rows to the desired output by the given schema.
@transient
lazy val converter = schema match {
case _: StructType =>
(rows: Seq[InternalRow]) => if (rows.length == 1) rows.head else null
case ArrayType(_: StructType, _) =>
(rows: Seq[InternalRow]) => new GenericArrayData(rows)
}
@transient
lazy val parser =
new JacksonParser(
rowSchema,
new JSONOptions(options + ("mode" -> FailFastMode.name), timeZoneId.get))
override def dataType: DataType = schema
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override def nullSafeEval(json: Any): Any = {
// When input is,
// - `null`: `null`.
// - invalid json: `null`.
// - empty string: `null`.
//
// When the schema is array,
// - json array: `Array(Row(...), ...)`
// - json object: `Array(Row(...))`
// - empty json array: `Array()`.
// - empty json object: `Array(Row(null))`.
//
// When the schema is a struct,
// - json object/array with single element: `Row(...)`
// - json array with multiple elements: `null`
// - empty json array: `null`.
// - empty json object: `Row(null)`.
// We need `null` if the input string is an empty string. `JacksonParser` can
// deal with this but produces `Nil`.
if (json.toString.trim.isEmpty) return null
try {
converter(parser.parse(
json.asInstanceOf[UTF8String],
CreateJacksonParser.utf8String,
identity[UTF8String]))
} catch {
case _: BadRecordException => null
}
}
override def inputTypes: Seq[AbstractDataType] = StringType :: Nil
}
/**
* Converts a [[StructType]] or [[ArrayType]] of [[StructType]]s to a json output string.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr[, options]) - Returns a json string with a given struct value",
extended = """
Examples:
> SELECT _FUNC_(named_struct('a', 1, 'b', 2));
{"a":1,"b":2}
> SELECT _FUNC_(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy'));
{"time":"26/08/2015"}
> SELECT _FUNC_(array(named_struct('a', 1, 'b', 2));
[{"a":1,"b":2}]
""")
// scalastyle:on line.size.limit
case class StructsToJson(
options: Map[String, String],
child: Expression,
timeZoneId: Option[String] = None)
extends UnaryExpression with TimeZoneAwareExpression with CodegenFallback with ExpectsInputTypes {
override def nullable: Boolean = true
def this(options: Map[String, String], child: Expression) = this(options, child, None)
// Used in `FunctionRegistry`
def this(child: Expression) = this(Map.empty, child, None)
def this(child: Expression, options: Expression) =
this(
options = JsonExprUtils.convertToMapData(options),
child = child,
timeZoneId = None)
@transient
lazy val writer = new CharArrayWriter()
@transient
lazy val gen = new JacksonGenerator(
rowSchema, writer, new JSONOptions(options, timeZoneId.get))
@transient
lazy val rowSchema = child.dataType match {
case st: StructType => st
case ArrayType(st: StructType, _) => st
}
// This converts rows to the JSON output according to the given schema.
@transient
lazy val converter: Any => UTF8String = {
def getAndReset(): UTF8String = {
gen.flush()
val json = writer.toString
writer.reset()
UTF8String.fromString(json)
}
child.dataType match {
case _: StructType =>
(row: Any) =>
gen.write(row.asInstanceOf[InternalRow])
getAndReset()
case ArrayType(_: StructType, _) =>
(arr: Any) =>
gen.write(arr.asInstanceOf[ArrayData])
getAndReset()
}
}
override def dataType: DataType = StringType
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case _: StructType | ArrayType(_: StructType, _) =>
try {
JacksonUtils.verifySchema(rowSchema)
TypeCheckResult.TypeCheckSuccess
} catch {
case e: UnsupportedOperationException =>
TypeCheckResult.TypeCheckFailure(e.getMessage)
}
case _ => TypeCheckResult.TypeCheckFailure(
s"Input type ${child.dataType.simpleString} must be a struct or array of structs.")
}
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override def nullSafeEval(value: Any): Any = converter(value)
override def inputTypes: Seq[AbstractDataType] = TypeCollection(ArrayType, StructType) :: Nil
}
object JsonExprUtils {
def validateSchemaLiteral(exp: Expression): StructType = exp match {
case Literal(s, StringType) => CatalystSqlParser.parseTableSchema(s.toString)
case e => throw new AnalysisException(s"Expected a string literal instead of $e")
}
def convertToMapData(exp: Expression): Map[String, String] = exp match {
case m: CreateMap
if m.dataType.acceptsType(MapType(StringType, StringType, valueContainsNull = false)) =>
val arrayMap = m.eval().asInstanceOf[ArrayBasedMapData]
ArrayBasedMapData.toScalaMap(arrayMap).map { case (key, value) =>
key.toString -> value.toString
}
case m: CreateMap =>
throw new AnalysisException(
s"A type of keys and values in map() must be string, but got ${m.dataType}")
case _ =>
throw new AnalysisException("Must use a map() function for options")
}
}
| wangyixiaohuihui/spark2-annotation | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala | Scala | apache-2.0 | 25,396 |
package scalads.mongodb
import scalads.core._
import reactivemongo.bson._
import scalads.core.SortDirection.{ASC, DSC}
import scala.concurrent.ExecutionContext
import scala.Some
import scalads.core.Projection
import scalads.core.CompositeFilter
import reactivemongo.api.collections.default.BSONCollection
import scalads.core.SingleFilter
import scala.concurrent.duration.Duration
import scalads.readers.ObjectReader
/**
* @author Bryce Anderson
* Created on 6/15/13
*/
class MongoQuery[U] private(val ds: MongoDatastore,
val transformer: MongoTransformer[U],
maxResults: Int,
filters: List[Filter],
sorts: List[BSONDocument])
(implicit ec: ExecutionContext) extends Query[U, ScalaDSObject] { self =>
type Repr = MongoQuery[U]
private val waitTime = Duration.Inf
// Generates a fresh query
def this(ds: MongoDatastore, tpe: MongoTransformer[U])(implicit ec: ExecutionContext) = this(ds, tpe, 0, Nil, Nil)(ec)
def remove(id: String) {
ds.delete(new ScalaDSObject(transformer.typeName, BSONDocument("_id" -> BSONObjectID(id))))
}
private def makePath(lst: List[String], lastOp: (String) => BSONDocument): BSONDocument = lst match {
case last::Nil => lastOp(last)
case h::t => BSONDocument(h -> makePath(t, lastOp))
}
/** Generated a new query that will filter the results based on the filter
*
* @param filter filter to be applied to the query
* @return new query with the filter applied
*/
def setFilter(filter: Filter): MongoQuery[U] =
new MongoQuery[U](ds, transformer, maxResults, filter::filters, sorts)
/** Sort the results based on the projection and sorting direction
*
* @param field Projection representing the field to sort by
* @param dir direction with which to sort
* @return new query which will sort the result by the field specified
*/
def sortBy(field: Projection, dir: SortDirection): MongoQuery[U] = {
val order = dir match {
case ASC => 1
case DSC => -1
}
val obj = makePath(field.path, str => BSONDocument(str -> order))
new MongoQuery[U](ds, transformer, maxResults, filters, obj::sorts)
}
// generates the DBObject for a filter
private def filterwalk(f: Filter): BSONDocument = f match {
case f: SingleFilter =>
val op = f.op match {
case Operation.EQ => "$eq"
case Operation.LT => "$lt"
case Operation.GT => "$gt"
case Operation.LE => "$lte"
case Operation.GE => "$gte"
case Operation.NE => "$ne"
}
makePath(f.axis.path, key => BSONDocument(key -> {
if (op != "$eq") BSONDocument(op -> MongoDatastore.mongoHandle(f.value))
else MongoDatastore.mongoHandle(f.value)
}))
case CompositeFilter(f1, f2, JoinOperation.AND) =>
val lst = BSONArray( filterwalk(f1), filterwalk(f2) )
BSONDocument("$and" -> lst)
case CompositeFilter(f1, f2, JoinOperation.OR) =>
val lst = BSONArray( filterwalk(f1), filterwalk(f2) )
BSONDocument("$or" -> lst)
}
private def buildProjection(projs: List[Projection]): Option[BSONDocument] = if (projs.isEmpty) {
Some(BSONDocument(projs.map{ p => (p.path.head, BSONInteger(1))}))
} else None
private def getQueryBuilder(projection: Option[BSONDocument]) = {
val grandFilter: BSONDocument = { // Make the filters
val newFilters = filters.map(filterwalk)
newFilters match {
case Nil => BSONDocument()
case f::Nil => f
case _ => // Join all the sub filters with an and operation
BSONDocument("$and"-> BSONArray(newFilters))
}
}
// Run the query, add the limit, and add the sort directions
val coll = ds.db[BSONCollection](transformer.typeName)
val it = sorts.foldRight{
projection.fold(coll.find(grandFilter))(coll.find(grandFilter, _))
}((s, it) => it.sort(s))
it
}
def projectAndMap[T](projs: List[Projection], f: (DS, ObjectReader) => T): MongoIterator[T] = {
val proj = buildProjection(projs)
new MongoIterator[T](
getQueryBuilder(proj).cursor,
transformer.typeName,
ec,
d => f(ds, transformer.newReader(transformer.wrapDocument(d))),
maxResults
)
}
def runQuery = getIterator().map(_.ds_entity)
override def getIterator(): MongoIterator[U with EntityBacker[U, ScalaDSObject]] = {
new MongoIterator[U with EntityBacker[U, ScalaDSObject]](
getQueryBuilder(None).cursor,
transformer.typeName,
ec,
d => transformer.deserialize(ds, transformer.wrapDocument(d)),
maxResults
)
}
def limit(size: Int): MongoQuery[U] =
new MongoQuery[U](ds, transformer, size, filters, sorts)
}
| bryce-anderson/scalads | mongodb/src/main/scala/scalads/mongodb/MongoQuery.scala | Scala | apache-2.0 | 4,841 |
import sbt._
object Deps {
val parser = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.4"
val jsqlparser = "com.github.jsqlparser" % "jsqlparser" % "0.9.6"
val scalaCsv = "com.github.tototoshi" %% "scala-csv" % "1.3.4"
val postgres = "org.postgresql" % "postgresql" % "9.4.1211"
val h2 = "com.h2database" % "h2" % "1.4.193"
val s3 = "com.amazonaws" % "aws-java-sdk-s3" % "1.11.43"
val sts = "com.amazonaws" % "aws-java-sdk-sts" % "1.11.43"
val jawn = "org.spire-math" %% "jawn-ast" % "0.10.4"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.0"
val s3Proxy = "org.gaul" % "s3proxy" % "1.6.0"
val commonsCompress = "org.apache.commons" % "commons-compress" % "1.17"
}
| opt-tech/redshift-fake-driver | project/Deps.scala | Scala | apache-2.0 | 710 |
/*
* PBPTest.scala
* Particle Belief Propagation tests.
*
* Created By: Brian Ruttenberg (bruttenberg@cra.com)
* Creation Date: Jan 15, 2014
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.test.experimental.particlebp
import org.scalatest.WordSpec
import org.scalatest.Matchers
import com.cra.figaro.algorithm.factored._
import com.cra.figaro.algorithm.factored.beliefpropagation._
import com.cra.figaro.language._
import com.cra.figaro.library.compound.If
import com.cra.figaro.library.compound.^^
import com.cra.figaro.library.atomic.discrete.{ Uniform => DUniform }
import com.cra.figaro.library.atomic.continuous.{ Uniform => CUniform }
import com.cra.figaro.library.compound.IntSelector
import com.cra.figaro.algorithm.UnsupportedAlgorithmException
import com.cra.figaro.library.atomic.continuous.Normal
import com.cra.figaro.experimental.particlebp.ParticleBeliefPropagation
import com.cra.figaro.algorithm.factored.ParticleGenerator
import com.cra.figaro.language.Name.stringToName
import com.cra.figaro.library.atomic.continuous.{ Uniform => CUniform }
import com.cra.figaro.library.atomic.discrete.{ Uniform => DUniform }
import com.cra.figaro.library.atomic.discrete.Geometric
import com.cra.figaro.library.atomic.continuous.Beta
import com.cra.figaro.algorithm.sampling.Importance
import com.cra.figaro.experimental.particlebp.AutomaticDensityEstimator
import com.cra.figaro.algorithm.sampling.ProbEvidenceSampler
import com.cra.figaro.ndtest._
import org.apache.commons.math3.distribution.MultivariateNormalDistribution
import akka.util.Timeout
import java.util.concurrent.TimeUnit
class PBPTest extends WordSpec with Matchers {
val globalTol = 0.025
val alpha = 0.05
"Running ParticleBeliefPropagation" should {
"resample after the inner loop" in {
Universe.createNew()
val n = Normal(0.0, 1.0)
val bpb = ParticleBeliefPropagation(1, 1, n)
bpb.runInnerLoop(Set(), Set())
val pbpSampler = ParticleGenerator(Universe.universe)
val samples = pbpSampler(n)
bpb.resample
samples should not be pbpSampler(n)
}
"resample according to the resampler values " in {
def mean(p: List[(Double, Double)]) = (0.0 /: p)((c: Double, n: (Double, Double)) => c + n._1 * n._2)
Universe.createNew()
val n = Normal(0.0, 1.0)
val pbpSampler = ParticleGenerator(Universe.universe, new AutomaticDensityEstimator, 100, 100)
val samples = pbpSampler(n)
pbpSampler.resample(n, samples, List(samples), 0.05)
val newSamples = pbpSampler(n)
mean(newSamples) should be(mean(samples) +- 0.1)
}
"change the factor structure with new samples on the outer loop" in {
Universe.createNew()
val n = Normal(2.0, 2.0)
val number = Apply(n, (d: Double) => d.round.toInt)
val items = Chain(number, (num: Int) => {
val f = for { i <- 0 until num } yield Flip(0.5)
Inject(f: _*)
})
val pbpSampler = ParticleGenerator(Universe.universe)
pbpSampler.update(n, pbpSampler.numArgSamples, List[(Double, _)]((1.0, 2.0)))
val bpb = ParticleBeliefPropagation(1, 1, items)
bpb.runOuterLoop
val fg_2 = bpb.bp.factorGraph.getNodes.filter(p => p.isInstanceOf[VariableNode]).toSet
pbpSampler.update(n, pbpSampler.numArgSamples, List[(Double, _)]((1.0, 3.0)))
val dependentElems = Set[Element[_]](n, number, items)
bpb.runInnerLoop(dependentElems, Set())
// Currently have to subtract 3 since the old factors for n = 2 also get created since they exist in the chain cache
val fg_3 = bpb.bp.factorGraph.getNodes.filter(p => p.isInstanceOf[VariableNode]).toSet
val diff = fg_3 -- fg_2
diff.nonEmpty should equal(true)
}
/* Due to the way that factors are implemented for Chain, all
* models that use chain will result in loops. To test a non-loopy
* graph we have to not use chain, which IntSelector does not.
*/
"with no loops in the factor graph give exact results" in {
Universe.createNew()
//com.cra.figaro.util.setSeed(13)
val e1 = CUniform(1.5, 4.5)
val ep = Apply(e1, (d: Double) => d.round.toInt)
val e2 = IntSelector(ep)
val bp = ParticleBeliefPropagation(5, 30, 100, 100, e2, e1, ep)
bp.start
val e2_0 = 0.33333333 * (0.5 + 0.3333333 + 0.25)
val e2_1 = 0.33333333 * (0.5 + 0.3333333 + 0.25)
val e2_2 = 0.33333333 * (0 + 0.3333333 + 0.25)
val e2_3 = 0.33333333 * (0 + 0 + 0.25)
val tol = 0.025
val a = bp.distribution(e1).toList
val b = bp.distribution(ep).toList
bp.probability(e2, (i: Int) => i == 0) should be(e2_0 +- tol)
bp.probability(e2)(_ == 1) should be(e2_1 +- tol)
bp.probability(e2, (i: Int) => i == 2) should be(e2_2 +- tol)
bp.probability(e2)(_ == 3) should be(e2_3 +- tol)
}
"correctly retrieve the last messages to recompute sample densities" in {
Universe.createNew()
val n = Normal(2.0, 2.0)
val number = Apply(n, (d: Double) => d.round.toInt)
val items = Chain(number, (num: Int) => {
val f = for { i <- 0 until num } yield Flip(0.5)
Inject(f: _*)
})
val bpb = ParticleBeliefPropagation(1, 1, items)
bpb.runInnerLoop(Set(), Set())
val msgs = bpb.getLastMessagesToNode(n)
msgs.size should be > 1
msgs(1).numVars should be(1)
msgs(1).contents.map(_._2).sum should be(0.0 +- .01)
}
"with no conditions or constraints produce the correct result for a one time algorithm" in {
val ndtest = new NDTest {
override def oneTest = {
Universe.createNew()
val u = CUniform(0.3, 0.9)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
val result = test(f, 1, 30, 100, 100, (b: Boolean) => b, 0.6, globalTol)
update(result, NDTest.TTEST, "NoConditionsOrConstraints", 0.6, alpha)
}
}
ndtest.run(10)
}
"with a condition on a dependent element produce the result with the correct probability for a one time algorithm" in {
val ndtest = new NDTest {
override def oneTest = {
Universe.createNew()
val u = CUniform(0.3, 0.9)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setCondition((i: Int) => i == 2)
// U(true) = \\int_{0.2}^{1.0) 0.7 p = 0.35 * 0.96
// U(false) = \\int_{0.2}^{1.0) (1-p)
val u1 = 0.35 * 0.96
val u2 = 0.32
val result = test(f, 10, 30, 100, 100, (b: Boolean) => b, u1 / (u1 + u2), globalTol)
update(result - (u1 / (u1 + u2)), NDTest.TTEST, "ConditionOnDependentElement", 0.0, alpha)
}
}
ndtest.run(10)
}
"with a constraint on a dependent element produce the result with the correct probability for a one time algorithm" in {
val ndtest = new NDTest {
override def oneTest = {
Universe.createNew()
val u = CUniform(0.3, 0.9)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setConstraint((i: Int) => i.toDouble)
// U(true) = \\int_{0.2}^{1.0} (0.3 + 2 * 0.7) p = 0.85 * 0.96
// U(false) = \\int_{0.2}^(1.0) (2 * (1-p)) = 0.64
val u1 = 0.85 * 0.96
val u2 = 0.64
val result = test(f, 10, 30, 100, 100, (b: Boolean) => b, u1 / (u1 + u2), globalTol)
update(result - (u1 / (u1 + u2)), NDTest.TTEST, "ConstraintOnDependentElement", 0.00, alpha)
}
}
ndtest.run(10)
}
"with no conditions or constraints produce the correct result for a anytime algorithm" in {
Universe.createNew()
val u = CUniform(0.3, 0.9)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
test(f, 1000, 500, 100, 100, (b: Boolean) => b, 0.6, globalTol, false) should be(0.6 +- globalTol)
}
"with a condition on a dependent element produce the result with the correct probability for an anytime algorithm" in {
val ndtest = new NDTest {
override def oneTest = {
Universe.createNew()
val u = CUniform(0.3, 0.9)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setCondition((i: Int) => i == 2)
// U(true) = \\int_{0.2}^{1.0) 0.7 p = 0.35 * 0.96
// U(false) = \\int_{0.2}^{1.0) (1-p)
val u1 = 0.35 * 0.96
val u2 = 0.32
val result = test(f, 5000, 500, 100, 100, (b: Boolean) => b, u1 / (u1 + u2), globalTol, false)
update(result, NDTest.TTEST, "ConditionOnDependentElement", u1 / (u1 + u2), alpha)
}
}
ndtest.run(10)
}
"with a constraint on a dependent element produce the result with the correct probability for an anytime algorithm" in {
val ndtest = new NDTest {
override def oneTest = {
Universe.createNew()
val u = CUniform(0.3, 0.9)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setConstraint((i: Int) => i.toDouble)
// U(true) = \\int_{0.2}^{1.0} (0.3 + 2 * 0.7) p = 0.85 * 0.96
// U(false) = \\int_{0.2}^(1.0) (2 * (1-p)) = 0.64
val u1 = 0.85 * 0.96
val u2 = 0.64
val result = test(f, 5000, 500, 100, 100, (b: Boolean) => b, u1 / (u1 + u2), globalTol, false)
update(result, NDTest.TTEST, "ConstraintOnDependentElement", u1 / (u1 + u2), alpha)
}
}
ndtest.run(10)
}
"with an element that uses another element multiple times, " +
"always produce the same value for the different uses" in {
Universe.createNew()
val f = CUniform(0.3, 0.9)
val e = f === f
test(e, 1, 30, 100, 100, (b: Boolean) => b, 1.0, globalTol) should be(1.0 +- globalTol)
}
"with a constraint on an element that is used multiple times, only factor in the constraint once" in {
Universe.createNew()
val f1 = Flip(0.5)
val f2 = Flip(0.3)
val e1 = f1 === f1
val e2 = f1 === f2
val d = Dist(0.5 -> e1, 0.5 -> e2)
f1.setConstraint((b: Boolean) => if (b) 3.0; else 2.0)
// Probability that f1 is true = 0.6
// Probability that e1 is true = 1.0
// Probability that e2 is true = 0.6 * 0.3 + 0.4 * 0.7 = 0.46
// Probability that d is true = 0.5 * 1 + 0.5 * 0.46 = 0.73
test(d, 1, 30, 100, 100, (b: Boolean) => b, 0.73, globalTol) should be(0.73 +- globalTol)
}
"on a different universe from the current universe, produce the correct result" in {
val ndtest = new NDTest {
override def oneTest = {
val u1 = Universe.createNew()
val u = CUniform(0.3, 0.9)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
Universe.createNew()
val algorithm = ParticleBeliefPropagation(1, 20, 100, 100, f)(u1)
algorithm.start()
val result = algorithm.probability(f)(b => b)
algorithm.kill()
update(result, NDTest.TTEST, "DifferentUniverse", 0.6, alpha)
}
}
ndtest.run(10)
}
/*
"with a posterior different than the prior, converge upon the correct answer on a discrete variable" in {
Universe.createNew()
val fp = Geometric(0.9)
val f = Flip(Apply(fp, (i: Int) => 1.0/(1.0+math.exp(-1*i.toDouble/10))))
val s1 = Select(0.1 -> 1, 0.9 -> 2)
val s2 = Select(0.7 -> 1, 0.2 -> 2, 0.1 -> 3)
val c = Chain(f, (b: Boolean) => if (b) s1; else s2)
c.observe(1)
// ans obtained by importance sampling = 7.49
val algorithm = ParticleBeliefPropagation(10, 40, 50, 50, fp)
algorithm.start()
algorithm.expectation(fp, (i: Int) => i.toDouble) should be(7.49 +- globalTol)
}
*
*/
"with a posterior different than the prior, converge upon the correct posterior distribution of a continuous variable" in {
val ndtest = new NDTest {
override def oneTest = {
Universe.createNew()
val fp = CUniform(0.0, 1.0)
val f = Flip(fp)
val s1 = Select(0.1 -> 1, 0.9 -> 2)
val s2 = Select(0.7 -> 1, 0.2 -> 2, 0.1 -> 3)
val c = Chain(f, (b: Boolean) => if (b) s1; else s2)
c.observe(1)
// ans = \\int_0_1 [-.6*x + .7 dx]*x / \\int_0_1 [-.6*x + .7 dx] = .15/.4 = .375
val algorithm = ParticleBeliefPropagation(10, 40, 100, 100, fp)
algorithm.start()
// algorithm.expectation(fp, (i: Double) => i) should be(.375 +- globalTol)
val result = algorithm.expectation(fp, (i: Double) => i)
algorithm.kill
update(result, NDTest.TTEST, "PosteriorDifferentThanPrior", 0.375, alpha)
}
}
ndtest.run(10)
}
"with a posterior different than the prior, reduce the variance of the posterior as compared to BP" in {
val origCov = 8.0
val mvn = new MultivariateNormalDistribution(Array(0.0, 0.0), Array(Array(8.0, origCov), Array(origCov, 16.0)))
Universe.createNew()
val locX = CUniform(20, 80)("X", Universe.universe)
val locY = CUniform(20, 80)("Y", Universe.universe)
val loc = ^^(locX, locY)
loc.addConstraint((l: (Double, Double)) => {
mvn.density(Array((l._1 - 40.0), (l._2 - 40.0)))
})
val algorithm = ParticleBeliefPropagation(10, 4, 15, 15, loc, locX, locY)
algorithm.start()
val locE = algorithm.expectation(loc, (d: (Double, Double)) => d._1 * d._2)
val cov = locE - algorithm.mean(locX) * algorithm.mean(locY)
ParticleGenerator.clear
val algorithm2 = ParticleBeliefPropagation(1, 20, 15, 15, loc, locX, locY)
algorithm2.start()
val locE2 = algorithm2.expectation(loc, (d: (Double, Double)) => d._1 * d._2)
val cov2 = locE - algorithm2.mean(locX) * algorithm2.mean(locY)
(math.abs(cov - origCov) < math.abs(origCov - cov2)) should be(true)
}
"with a dependent universe, correctly take into account probability of evidence in the dependent universe" in {
Universe.createNew()
val x = IntSelector(Constant(10))
val y = IntSelector(Constant(10))
val x1 = Apply(x, (i: Int) => i < 1)
val y1 = Apply(y, (i: Int) => i < 2)
val dependentUniverse = new Universe(List(x1, y1))
val u1 = CUniform(0.0, 1.0)("", dependentUniverse)
val u2 = CUniform(0.0, 2.0)("", dependentUniverse)
val a = CachingChain(x1, y1, (x: Boolean, y: Boolean) => if (x || y) u1; else u2)("a", dependentUniverse)
val condition = (d: Double) => d < 0.5
val ve = ParticleBeliefPropagation(List((dependentUniverse, List(NamedEvidence("a", Condition(condition))))),
(u: Universe, e: List[NamedEvidence[_]]) => () => ProbEvidenceSampler.computeProbEvidence(10000, e)(u),
1, 40, x1)
ve.start()
val peGivenXTrue = 0.5
val peGivenXFalse = 0.2 * 0.5 + 0.8 * 0.25
val unnormalizedPXTrue = 0.1 * peGivenXTrue
val unnormalizedPXFalse = 0.9 * peGivenXFalse
val pXTrue = unnormalizedPXTrue / (unnormalizedPXTrue + unnormalizedPXFalse)
ve.probability(x1, true) should be(pXTrue +- globalTol)
ve.kill()
}
"with a contingent condition, correctly take into account the contingency" in {
Universe.createNew()
val x = Flip(0.1)
val y = Flip(0.2)
y.setCondition((b: Boolean) => b, List(Element.ElemVal(x, true)))
// Probability of y should be (0.1 * 0.2 + 0.9 * 0.2) / (0.1 * 0.2 + 0.9 * 0.2 + 0.9 * 0.8) (because the case where x is true and y is false has been ruled out)
val ve = ParticleBeliefPropagation(3, 50, y)
ve.start()
ve.probability(y, true) should be(((0.1 * 0.2 + 0.9 * 0.2) / (0.1 * 0.2 + 0.9 * 0.2 + 0.9 * 0.8)) +- globalTol)
}
}
/*
"MaxProductBeliefPropagation" should {
"compute the most likely values of all the variables given the conditions and constraints" in {
Universe.createNew()
val e1 = Flip(0.5)
e1.setConstraint((b: Boolean) => if (b) 3.0; else 1.0)
val e2 = If(e1, Flip(0.4), Flip(0.9))
val e3 = If(e1, Flip(0.52), Flip(0.4))
val e4 = e2 === e3
e4.observe(true)
// p(e1=T,e2=T,e3=T) = 0.75 * 0.4 * 0.52
// p(e1=T,e2=F,e3=F) = 0.75 * 0.6 * 0.48
// p(e1=F,e2=T,e3=T) = 0.25 * 0.9 * 0.4
// p(e1=F,e2=F,e3=F) = 0.25 * 0.1 * 0.6
// MPE: e1=T,e2=F,e3=F,e4=T
val alg = MPEBeliefPropagation(20)
alg.start()
alg.mostLikelyValue(e1) should equal(true)
alg.mostLikelyValue(e2) should equal(false)
alg.mostLikelyValue(e3) should equal(false)
alg.mostLikelyValue(e4) should equal(true)
}
}
*
*/
def test[T](target: Element[T], outer: Int, inner: Int,
argSamples: Int, totalSamples: Int,
predicate: T => Boolean, prob: Double, tol: Double, oneTime: Boolean = true): Double = {
val algorithm = if (oneTime) {
ParticleBeliefPropagation(outer, inner, argSamples, totalSamples, target)
} else {
val alg = ParticleBeliefPropagation(inner.toLong, argSamples, totalSamples, target)
alg.messageTimeout = Timeout(30000, TimeUnit.MILLISECONDS)
alg
}
algorithm.start()
if (!oneTime) Thread.sleep(outer.toLong)
algorithm.stop
// algorithm.probability(target, predicate) should be(prob +- tol)
val result = algorithm.probability(target, predicate)
algorithm.kill
result
}
} | scottcb/figaro | Figaro/src/test/scala/com/cra/figaro/test/experimental/particlebp/PBPTest.scala | Scala | bsd-3-clause | 17,751 |
package com.twitter.zipkin.redis
import com.twitter.app.App
import com.twitter.conversions.time._
import com.twitter.zipkin.storage.redis.RedisSpanStore
trait RedisSpanStoreFactory { self: App =>
val redisHost = flag("zipkin.storage.redis.host", "0.0.0.0", "Host for Redis")
val redisPort = flag("zipkin.storage.redis.port", 6379, "Port for Redis")
val redisTtl = flag("zipkin.storage.redis.ttl", 168, "Redis data TTL in hours")
def newRedisSpanStore(): RedisSpanStore = {
val storage = StorageBuilder(redisHost(), redisPort(), redisTtl().hours)
val index = IndexBuilder(redisHost(), redisPort(), redisTtl().hours)
new RedisSpanStore(index.apply(), storage.apply())
}
}
| travisbrown/zipkin | zipkin-redis/src/main/scala/com/twitter/zipkin/redis/RedisSpanStoreFactory.scala | Scala | apache-2.0 | 695 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.charts.template
import io.gatling.charts.report.Container.{ Group, Request }
private[charts] class MenuTemplate {
def getOutput: String = s"""
function getItemLink(item){
return item.pathFormatted + '.html';
}
function setDetailsLinkUrl(){
$$.each(stats.contents, function (name, data) {
$$('#details_link').attr('href', getItemLink(data));
return false;
});
}
var MENU_ITEM_MAX_LENGTH = 50;
function menuItem(item, level, parent, group) {
if (group)
var style = 'group';
else
var style = '';
if (item.name.length > MENU_ITEM_MAX_LENGTH) {
var title = ' title="' + item.name + '"';
var displayName = item.name.substr(0, MENU_ITEM_MAX_LENGTH) + '...';
}
else {
var title = '';
var displayName = item.name;
}
if (parent) {
if (level == 0)
var childOfRoot = 'child-of-ROOT ';
else
var childOfRoot = '';
var style = ' class="' + childOfRoot + 'child-of-menu-' + parent + '"';
} else
var style = '';
if (group)
var expandButton = '<span id="menu-' + item.pathFormatted + '" style="margin-left: ' + (level * 10) + 'px;" class="expand-button"> </span>';
else
var expandButton = '<span id="menu-' + item.pathFormatted + '" style="margin-left: ' + (level * 10) + 'px;" class="expand-button hidden"> </span>';
return '<li' + style + '><div class="item">' + expandButton + '<a href="' + getItemLink(item) + '"' + title + '>' + displayName + '</a></div></li>';
}
function menuItemsForGroup(group, level, parent) {
var items = '';
if (level > 0)
items += menuItem(group, level - 1, parent, true);
$$.each(group.contents, function (contentName, content) {
if (content.type == '$Group')
items += menuItemsForGroup(content, level + 1, group.pathFormatted);
else if (content.type == '$Request')
items += menuItem(content, level, group.pathFormatted);
});
return items;
}
function setDetailsMenu(){
$$('.nav ul').append(menuItemsForGroup(stats, 0));
$$('.nav').expandable();
}
function setGlobalMenu(){
$$('.nav ul').append('<li><div class="item"><a href="#active_users">Active Users</a></div></li> \\
<li><div class="item"><a href="#requests">Requests / sec</a></div></li> \\
<li><div class="item"><a href="#responses">Responses / sec</a></div></li>');
}
function getLink(link){
var a = link.split('/');
return (a.length<=1)? link : a[a.length-1];
}
function setActiveMenu(){
$$('.nav a').each(function(){
if(!$$(this).hasClass('expand-button') && $$(this).attr('href') == getLink(window.location.pathname)){
$$(this).parents('li').addClass('on');
return false;
}
});
}
"""
}
| gatling/gatling | gatling-charts/src/main/scala/io/gatling/charts/template/MenuTemplate.scala | Scala | apache-2.0 | 3,459 |
/*
* Copyright 2014 The Instalk Project
*
* The Instalk Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package im.instalk.protocol
import play.api.libs.json._
import im.instalk.User
import org.joda.time.DateTime
sealed trait OperationRequest {
def r: RoomId
}
sealed trait RoomOp extends OperationRequest
case class Message(txt: String)
case class RoomTopicMessage(`#`: Long, topic: String, sender: User, when: DateTime)
case class SeqEnvelope(seqNr: Long, sender: User, msg: Message, time: DateTime)
case class FetchBefore(before: Long)
case class UserInfoModification(`#`: Long, originalUsername: String, newUserInfo: User, when: DateTime)
case class Join(r: RoomId) extends OperationRequest
case class Leave(r: RoomId) extends OperationRequest
case class AnonymousInfoModification(name: Option[String], color: Option[String])
case class SetUserInfoRequest(r: RoomId, data: AnonymousInfoModification) extends OperationRequest
case class BeginTyping(r: RoomId) extends RoomOp
case class StoppedTyping(r: RoomId) extends RoomOp
case class Away(r: RoomId) extends RoomOp
case class BroadcastMessageRequest(r: RoomId, data: Message) extends RoomOp
case class SetRoomTopicResponse(r: RoomId, data: RoomTopicMessage) extends RoomOp
//coming from user (REQUEST)
case class RoomMessage(r: RoomId, envelope: SeqEnvelope)
case class NewTopic(topic: String)
case class SetRoomTopicRequest(r: RoomId, data: NewTopic) extends RoomOp
//going to room (RESPONSE)
case class Fetch(r: RoomId, data: FetchBefore) extends RoomOp
case class SetUserInfo(r: RoomId, data: UserInfoModification)
object DefaultFormats {
implicit val joinFmt = Json.format[Join]
implicit val leaveFmt = Json.format[Leave]
implicit val btFmt = Json.format[BeginTyping]
implicit val stFmt = Json.format[StoppedTyping]
implicit val awayFmt = Json.format[Away]
implicit val anonInfoModFmt = Json.format[AnonymousInfoModification]
implicit val setUserInfoReqFmt = Json.format[SetUserInfoRequest]
implicit val userInfoModFmt = Json.format[UserInfoModification]
implicit val msgFmt = Json.format[Message]
implicit val rtmFmt = Json.format[RoomTopicMessage]
implicit val newTopicFmt = Json.format[NewTopic]
implicit val srtReqFmt = Json.format[SetRoomTopicRequest]
implicit val setRoomTopicWrites = new Writes[SetRoomTopicResponse] {
def writes(resp: SetRoomTopicResponse): JsValue =
Json.obj(
"r" -> resp.r,
"o" -> "set-room-topic",
"data" -> Json.toJson(resp.data)
)
}
implicit val seqEnvWrites = new Writes[SeqEnvelope] {
def writes(env: SeqEnvelope): JsValue =
Json.obj(
"#" -> env.seqNr,
"sender" -> env.sender,
"time" -> env.time
) ++ Json.toJson(env.msg).as[JsObject]
}
implicit val setUserInfoWrites = new Writes[SetUserInfo] {
def writes(setInfo: SetUserInfo): JsValue =
Json.obj(
"r" -> setInfo.r,
"o" -> "set-user-info",
"data" -> Json.toJson(setInfo.data)
)
}
implicit val roomWrite = Json.writes[RoomMessage]
implicit val broadcastMsgFmt = Json.format[BroadcastMessageRequest]
implicit val fetchBeforeFmt = Json.format[FetchBefore]
implicit val fetchFmt = Json.format[Fetch]
}
| AhmedSoliman/instalk | app/im/instalk/protocol/ProtocolAST.scala | Scala | apache-2.0 | 3,769 |
package japgolly.microlibs.compiletime
import scala.quoted.*
object NewInstance {
import MacroEnv.*
type FailFn = () => Nothing
type TermLookupFn = (q: Quotes) ?=> (q.reflect.ValDef , FailFn) => q.reflect.Term
type TypeLookupFn = (q: Quotes) ?=> (q.reflect.TypeDef, FailFn) => q.reflect.TypeTree
def of[A: Type](findTermArg : Option[TermLookupFn] = None,
findTypeArg : Option[TypeLookupFn] = None,
autoPopulateImplicits: Boolean = true,
)(using Quotes): Expr[A] = {
import quotes.reflect.*
val A = TypeRepr.of[A].dealias.typeSymbol
if A.flags.is(Flags.Abstract) then
fail(s"${Type.show[A]} is abstract. It needs to be a concrete.")
if A.flags.is(Flags.Trait) then
fail(s"${Type.show[A]} is a trait. It needs to be a class.")
val ctor = A.primaryConstructor
def generateTypeArg(d: TypeDef): TypeTree = {
val failFn: FailFn = () => fail(s"Don't know how to populate the type parameter ${d.name} in new ${Type.show[A]}[...]")
findTypeArg match {
case Some(f) => f(d, failFn)
case None => failFn()
}
}
def generateTermArg(d: ValDef, isImplicit: Boolean): Term = {
val failFn: FailFn = () => {
if autoPopulateImplicits && isImplicit then {
println()
println()
d.tpt.summonOrError
println()
println()
}
val pre = if isImplicit then "implicit " else ""
fail(s"Don't know how to populate the parameter ($pre${d.name}: ${d.tpt.show}) in new ${Type.show[A]}(...)")
}
var result = Option.empty[Term]
if autoPopulateImplicits && isImplicit then
for (e <- d.tpt.summon)
result = Some(e.asTerm)
if result.isEmpty then
for (f <- findTermArg)
result = Some(f(d, failFn))
result getOrElse failFn()
}
var classType = TypeRepr.of[A].dealias.asTypeTree
var typeArgs = Vector.empty[TypeTree]
var termArgs = List.empty[List[Term]]
// Extract provided class types
classType.tpe match {
// A = F[X, Y, ..]
case AppliedType(cls, args) =>
classType = cls.asTypeTree
typeArgs = args.iterator.map(_.asTypeTree).toVector
case _ =>
}
// Extract args
locally {
var typeParamsToSkip = typeArgs.length
def generateArgs(clauses: List[ParamClause]): Unit =
clauses.foreach {
case c: TermParamClause =>
val isImplicit = c.isImplicit || c.isGiven
termArgs = termArgs ::: c.params.map(generateTermArg(_, isImplicit = isImplicit)) :: Nil
case c: TypeParamClause =>
for (p <- c.params)
if typeParamsToSkip > 0
then typeParamsToSkip -= 1
else typeArgs :+= generateTypeArg(p)
}
ctor.tree match {
case DefDef(_, p, _, _) => generateArgs(p)
case t => fail(s"Don't know how to interpret the constructor of ${Type.show[A]}\\n$t")
}
}
// Post-process args
val typeArgList = typeArgs.toList
typeArgs = null
locally {
if typeArgList.nonEmpty then
classType = Applied(classType, typeArgList)
if termArgs.isEmpty then
termArgs = Nil :: Nil // `new X` is translated to `new X()`
}
// Build AST
val result: Term = {
var ast: Term =
Select(New(classType), ctor)
if typeArgList.nonEmpty then
ast = TypeApply(ast, typeArgList)
for (args <- termArgs)
ast = Apply(ast, args)
ast
}
result.asExprOf[A]
}
}
| japgolly/microlibs-scala | compile-time/shared/src/main/scala-3/japgolly/microlibs/compiletime/NewInstance.scala | Scala | apache-2.0 | 3,670 |
package eu.inn.binders.naming
class CamelCaseToHyphenCaseConverter extends BaseConverter(new CamelCaseParser) {
def createBuilder(): IdentifierBuilder = new HyphenCaseBuilder()
}
| InnovaCo/binders | src/main/scala/eu/inn/binders/naming/CamelCaseToHyphenCaseConverter.scala | Scala | bsd-3-clause | 182 |
/*
* Copyright (c) 2013 Jascha Neutelings
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package nl.ru.cs.spl.ast
import scala.collection.mutable
import scala.util.Random
abstract class Environment (val parent: Option[Environment] = None) {
private val variables = mutable.Map[String, (VarDecl, VarInfo)]()
private val renamed = mutable.Map[String, String]()
private val children = mutable.Set[Environment]()
//private def isVariableShadowing(n: String): Boolean = parent.map(p => p.variables.contains(n) || p.isVariableShadowing(n)).getOrElse(false)
// def name: String
def getVariableAndInfo(name: String): Option[(VarDecl, VarInfo)] = variables.get(name).orElse(parent.flatMap(_.getVariableAndInfo(name)))
def getVariable(n: String) = getVariableAndInfo(n).map(_._1)
def updateVariableInfo(n: String)(f: VarInfo => VarInfo) {
variables.get(n) match {
case Some((varDecl, varInfo)) => variables(n) = (varDecl, f(varInfo))
case None => parent.foreach(_.updateVariableInfo(n)(f))
}
}
//def getVariableInfo(n: String) = getVariableEntry(n).map(_._2)
def resolveVariableName(n: String): String = renamed.getOrElse(n, parent.map(_.resolveVariableName(n)).getOrElse(n))
def declareVariable(v: VarDecl): Option[String] = {
if (variables.contains(resolveVariableName(v.name))) None
else {
val newName = getUniqueName(v.name)
variables(newName) = (v.copy(name = newName), VarInfo(v.name, isGlobal))
if (newName != v.name) renamed(v.name) = newName
Some(newName)
}
}
def getUniqueName(prefix: String): String
// def declareTemporaryVariable(tpe: Type): VarDecl = {
// val tempName = getUniqueName("$temp")
// val tempDecl = VarDecl(tempName, tpe)
// variables(tempName) = (tempDecl, VarInfo("?", temporary = true))
// tempDecl
// }
//def createTemporaryVariable(tpe: Type) = VarDecl(getUniqueName(""), tpe)
def collectVariables(recursive: Boolean): Iterator[VarDecl] =
if (recursive)
variables.valuesIterator.map(_._1) ++ children.flatMap(_.collectVariables(recursive))
else
variables.valuesIterator.map(_._1)
def getFunction(n: String): Option[FunDecl]
def declareFunction(f: FunDecl): Boolean
def updateFunctionInfo(n: String)(f: FunInfo => FunInfo)
def getFunctionInfo(n: String): Option[FunInfo]
def isTypeVarDefined(t: TypeParam): Boolean
def collectTypeVariables: Iterator[TypeParam] = Iterator.empty
def declareTypeVar(t: TypeParam) { }
// def create(n: String): Environment = {
// val env = new Environment.Local(this, n)
// children(n) = env
// env
// }
//def create(): Environment = create((if (isGlobal) "" else name + "$") + children.size)
def create: Environment
//def getEnv(n: String) = children(n)
def isGlobal = parent.isEmpty
}
object Environment {
def create: Environment = new Global
private class Local(parent: Environment) extends Environment(Some(parent)) {
require(parent != null)
def declareFunction(f: FunDecl) = parent.declareFunction(f)
def getFunction(n: String) = parent.getFunction(n)
def updateFunctionInfo(n: String)(f: FunInfo => FunInfo) {
parent.updateFunctionInfo(n)(f)
}
def getFunctionInfo(n: String)= parent.getFunctionInfo(n)
def isTypeVarDefined(t: TypeParam) = parent.isTypeVarDefined(t)
def getUniqueName(prefix: String) = parent.getUniqueName(prefix)
def create: Environment = {
val res = new Local(this)
children += res
res
}
}
private class Function(parent: Global) extends Local(parent) {
private val typeVars = mutable.Set[TypeParam]()
private val suffixes = mutable.Map("" -> 0)
override def getUniqueName(prefix: String) = {
val uniqueName = suffixes.get(prefix).map(prefix + "$" + _).getOrElse(prefix)
suffixes(prefix) = suffixes.get(prefix).map(_ + 1).getOrElse(1)
uniqueName
}
override def declareTypeVar(t: TypeParam) {
typeVars += t
}
override def isTypeVarDefined(t: TypeParam) = typeVars(t)
override def collectTypeVariables = typeVars.iterator
}
private class Global extends Environment {
private val functions = mutable.Map[String, (FunDecl, FunInfo)]()
private val suffixes = mutable.Map("" -> 0)
PredefinedFunctions.register(this)
//def name = "<isGlobal>"
def getFunction(n: String) = functions.get(n).map(_._1)
def updateFunctionInfo(n: String)(f: FunInfo => FunInfo) {
functions.get(n) match {
case Some((funDecl, funInfo)) => functions(n) = (funDecl, f(funInfo))
case None =>
}
}
def getFunctionInfo(n: String) = functions.get(n).map(_._2)
def declareFunction(f: FunDecl) =
if (functions.contains(f.name)) false
else {
functions(f.name) = (f, FunInfo.default); true
}
def isTypeVarDefined(t: TypeParam) = false
def getUniqueName(prefix: String) = {
val uniqueName = suffixes.get(prefix).map(prefix + "$" + _).getOrElse(prefix)
suffixes(prefix) = suffixes.get(prefix).map(_ + 1).getOrElse(1)
uniqueName
}
def create: Environment = {
val res = new Function(this)
children += res
res
}
}
}
trait Symbol {
def name: String
}
trait VarType
object VarType {
case object Param extends VarType
case object Local extends VarType
case object Global extends VarType
case object Result extends VarType
}
case class VarDecl(name: String, tpe: Type, varType: VarType) extends Symbol
case class FunDecl(name: String, resultType: Type, paramTypes: Seq[Type]) extends Symbol
case class VarInfo(originalName: String, global: Boolean)
case class FunInfo(references: Int, sideEffects: Boolean)
object FunInfo {
def default = FunInfo(0, sideEffects = false)
}
| Jascha-N/SPLScala | src/nl/ru/cs/spl/ast/Environment.scala | Scala | mit | 6,886 |
package com.softwaremill.bootzooka.passwordreset.application
import com.flowy.common.utils.ConfigWithDefault
import com.typesafe.config.Config
trait PasswordResetConfig extends ConfigWithDefault {
def rootConfig: Config
lazy val resetLinkPattern =
getString("bootzooka.reset-link-pattern", "http://localhost:8080/#/password-reset?code=%s")
}
| asciiu/fomo | api/src/main/scala/com/softwaremill/bootzooka/passwordreset/application/PasswordResetConfig.scala | Scala | apache-2.0 | 353 |
package dhg.ccg.parse.pcfg.mcmc
import org.junit.Test
import org.junit.Assert._
import dhg.ccg.prob._
import dhg.ccg.cat._
import dhg.ccg.rule._
import dhg.ccg.parse._
import dhg.ccg.parse.pcfg._
import dhg.ccg.tagdict.StartEndTags
import dhg.util._
import dhg.ccg.tagdict._
import dhg.ccg.test.TestUtil.DoubleIteratorRandomGenerator
import org.apache.commons.math3.random.MersenneTwister
import scala.collection.immutable.ListMap
import scalaz._
import Scalaz._
import dhg.ccg.util._
import scala.collection.immutable.BitSet
class PcfgProductionCounterITests {
val A: Cat = cat"A"
val B: Cat = cat"B"
val C: Cat = cat"C"
val D: Cat = cat"D"
val E: Cat = cat"E"
val F: Cat = cat"F"
val G: Cat = cat"G"
val H: Cat = cat"H"
val S: Cat = cat"S"
val NP: Cat = cat"NP"
val N: Cat = cat"N"
val PP: Cat = cat"PP"
val STA: Cat = cat"<S>"
val END: Cat = cat"<E>"
@Test
def i_test_SimplePcfgProductionFinder {
type Word = String
val tagdict = SimpleTagDictionary[Cat](Map(
"a1" -> Set(A),
"a2" -> Set(A),
"a3" -> Set(A),
"b1" -> Set(B),
"c1" -> Set(C),
"d1" -> Set(D),
"e1" -> Set(E)),
"<S>", STA, "<E>", END)
val rules = Set[CcgRule](FA, BA, N2NP)
val catIndexer = SimpleIndexer(CcgRule.allDerivable(rules, tagdict.allTags) -- Set(STA, END))
val wordIndexer = SimpleIndexer(tagdict.allWords)
val allCats = BitSet.empty ++ catIndexer.indices
val numCats = catIndexer.size
val numWords = wordIndexer.size
/*
* A
* / \\
* B C
* | / \\
* A A B
* / \\ / \\ |
* <s> B C B A A <e>
* b1 c1 b1 a1 a2
*
* A
* / \\
* B C
* | / \\
* A C B
* / \\ | |
* <s> B C B A <e>
* b1 c1 b1 a2
*/
val t1 =
CcgBinode(A,
CcgUnode(B,
CcgBinode(A,
CcgLeaf(B, "b1", "FAKEPOS"),
CcgLeaf(C, "c1", "FAKEPOS"))),
CcgBinode(C,
CcgBinode(A,
CcgLeaf(B, "b1", "FAKEPOS"),
CcgLeaf(A, "a1", "FAKEPOS")),
CcgUnode(B,
CcgLeaf(A, "a2", "FAKEPOS"))))
val t2 =
CcgBinode(A,
CcgUnode(B,
CcgBinode(A,
CcgLeaf(B, "b1", "FAKEPOS"),
CcgLeaf(C, "c1", "FAKEPOS"))),
CcgBinode(C,
CcgUnode(C,
CcgLeaf(B, "b1", "FAKEPOS")),
CcgUnode(B,
CcgLeaf(A, "a2", "FAKEPOS"))))
val trees = Array(t1, t2).map(CcgTreeI.to(_, catIndexer, wordIndexer))
val bProdsO: Map[Cat, Set[Prod]] = Map(
A -> Set(BinaryProd(A, C), BinaryProd(B, A), BinaryProd(B, C)),
B -> Set(BinaryProd(A, C)),
C -> Set(BinaryProd(A, B), BinaryProd(C, B)),
D -> Set(BinaryProd(A, B), BinaryProd(C, B)))
val uProdsO: Map[Cat, Set[Prod]] = Map(
B -> Set(UnaryProd(A)),
C -> Set(UnaryProd(A), UnaryProd(B)))
val tProdsO: Map[Cat, Set[Prod]] = Map(
A -> Set(TermProd("a1"), TermProd("a2"), TermProd("a3")),
B -> Set(TermProd("b1"), TermProd("c1")),
C -> Set(TermProd("b1"), TermProd("c1")),
D -> Set(TermProd("d1")))
val knownRoots = Array(catIndexer(A), catIndexer(C))
val knownBinys: Array[IndirectSparseVec[Array[Int]]] = DenseVec(bProdsO.map { case (t, prods) => catIndexer(t) -> IndirectSparseVec(prods.collect { case BinaryProd(u, v) => (catIndexer(u), catIndexer(v)) }.groupByKey.mapVals(_.toArray.sorted), numCats) }, numCats).values
val knownUnrys: IndirectSparseVec[Array[Int]] = IndirectSparseVec(uProdsO.map { case (t, prods) => catIndexer(t) -> prods.collect { case UnaryProd(u) => catIndexer(u) }.toArray.sorted }, numCats)
val knownTerms: Array[Array[Int]] = DenseVec(tProdsO.map { case (t, prods) => catIndexer(t) -> prods.collect { case TermProd(w) => wordIndexer(w) }.toArray.sorted }, numCats).values
val counter = new SimplePcfgProductionCounterI(catIndexer, wordIndexer)
val (rootCounts, binyCounts, unryCounts, termCounts, pmixCounts) =
counter.counts(trees: Array[CcgTreeI], trees.length,
knownRoots: Array[Int], // ts
knownBinys: Array[IndirectSparseVec[Array[Int]]], // t -> u -> vs
knownUnrys: IndirectSparseVec[Array[Int]], // t -> us
knownTerms: Array[Array[Int]], // t -> ws
numCats: Int, numWords: Int) //
// : ( //
// IndirectSparseVec[Int], // t -> c
// Array[IndirectSparseVec[IndirectSparseVec[Int]]], // t -> u -> v -> c
// IndirectSparseVec[IndirectSparseVec[Int]], // t -> u -> c
// Array[IndirectSparseVec[Double]], // t -> w -> c
// Array[Array[Double]]) // t -> c
assertEquals(5, rootCounts.length)
assertEquals(2, rootCounts.activeCount)
assertEquals(2, rootCounts(catIndexer(A)), 1e-9)
assertEquals(0, rootCounts(catIndexer(C)), 1e-9)
assertEquals(5, binyCounts.length)
assertEquals(5, binyCounts(catIndexer(A)).length)
assertEquals(2, binyCounts(catIndexer(A)).activeCount)
assertEquals(5, binyCounts(catIndexer(A))(catIndexer(A)).length)
assertEquals(1, binyCounts(catIndexer(A))(catIndexer(A)).activeCount)
assertEquals(0, binyCounts(catIndexer(A))(catIndexer(A))(catIndexer(C)), 1e-9)
assertEquals(5, binyCounts(catIndexer(A))(catIndexer(B)).length)
assertEquals(2, binyCounts(catIndexer(A))(catIndexer(B)).activeCount)
assertEquals(1, binyCounts(catIndexer(A))(catIndexer(B))(catIndexer(A)), 1e-9)
assertEquals(4, binyCounts(catIndexer(A))(catIndexer(B))(catIndexer(C)), 1e-9)
assertEquals(5, binyCounts(catIndexer(B)).length)
assertEquals(1, binyCounts(catIndexer(B)).activeCount)
assertEquals(5, binyCounts(catIndexer(B))(catIndexer(A)).length)
assertEquals(1, binyCounts(catIndexer(B))(catIndexer(A)).activeCount)
assertEquals(0, binyCounts(catIndexer(B))(catIndexer(A))(catIndexer(C)), 1e-9)
assertEquals(5, binyCounts(catIndexer(C)).length)
assertEquals(2, binyCounts(catIndexer(C)).activeCount)
assertEquals(5, binyCounts(catIndexer(C))(catIndexer(A)).length)
assertEquals(1, binyCounts(catIndexer(C))(catIndexer(A)).activeCount)
assertEquals(1, binyCounts(catIndexer(C))(catIndexer(A))(catIndexer(B)), 1e-9)
assertEquals(5, binyCounts(catIndexer(C))(catIndexer(C)).length)
assertEquals(1, binyCounts(catIndexer(C))(catIndexer(C)).activeCount)
assertEquals(1, binyCounts(catIndexer(C))(catIndexer(C))(catIndexer(B)), 1e-9)
assertEquals(5, binyCounts(catIndexer(D)).length)
assertEquals(2, binyCounts(catIndexer(D)).activeCount)
assertEquals(5, binyCounts(catIndexer(D))(catIndexer(A)).length)
assertEquals(1, binyCounts(catIndexer(D))(catIndexer(A)).activeCount)
assertEquals(0, binyCounts(catIndexer(D))(catIndexer(A))(catIndexer(B)), 1e-9)
assertEquals(5, binyCounts(catIndexer(D))(catIndexer(C)).length)
assertEquals(1, binyCounts(catIndexer(D))(catIndexer(C)).activeCount)
assertEquals(0, binyCounts(catIndexer(D))(catIndexer(C))(catIndexer(B)), 1e-9)
assertNull(binyCounts(catIndexer(E)))
assertEquals(5, unryCounts.length)
assertEquals(2, unryCounts.activeCount)
assertFalse(unryCounts.containsKey(catIndexer(A)))
assertEquals(5, unryCounts(catIndexer(B)).length)
assertEquals(1, unryCounts(catIndexer(B)).activeCount)
assertEquals(4, unryCounts(catIndexer(B))(catIndexer(A)), 1e-9)
assertEquals(5, unryCounts(catIndexer(C)).length)
assertEquals(2, unryCounts(catIndexer(C)).activeCount)
assertEquals(0, unryCounts(catIndexer(C))(catIndexer(A)), 1e-9)
assertEquals(1, unryCounts(catIndexer(C))(catIndexer(B)), 1e-9)
assertFalse(unryCounts.containsKey(catIndexer(D)))
assertFalse(unryCounts.containsKey(catIndexer(E)))
assertEquals(5, termCounts.length)
assertEquals(7, termCounts(catIndexer(A)).length)
assertEquals(3, termCounts(catIndexer(A)).activeCount)
assertEquals(1, termCounts(catIndexer(A))(wordIndexer("a1")), 1e-9)
assertEquals(2, termCounts(catIndexer(A))(wordIndexer("a2")), 1e-9)
assertEquals(0, termCounts(catIndexer(A))(wordIndexer("a3")), 1e-9)
assertEquals(7, termCounts(catIndexer(B)).length)
assertEquals(2, termCounts(catIndexer(B)).activeCount)
assertEquals(4, termCounts(catIndexer(B))(wordIndexer("b1")), 1e-9)
assertEquals(0, termCounts(catIndexer(B))(wordIndexer("c1")), 1e-9)
assertEquals(7, termCounts(catIndexer(C)).length)
assertEquals(2, termCounts(catIndexer(C)).activeCount)
assertEquals(0, termCounts(catIndexer(C))(wordIndexer("b1")), 1e-9)
assertEquals(2, termCounts(catIndexer(C))(wordIndexer("c1")), 1e-9)
assertEquals(7, termCounts(catIndexer(D)).length)
assertEquals(1, termCounts(catIndexer(D)).activeCount)
assertEquals(0, termCounts(catIndexer(D))(wordIndexer("d1")), 1e-9)
assertNull(termCounts(catIndexer(E)))
assertEquals(5, pmixCounts.length)
assertEquals(3, pmixCounts(catIndexer(A)).length)
assertEquals(5, pmixCounts(catIndexer(A))(0), 1e-9)
assertEquals(0, pmixCounts(catIndexer(A))(1), 1e-9)
assertEquals(3, pmixCounts(catIndexer(A))(2), 1e-9)
assertEquals(3, pmixCounts(catIndexer(B)).length)
assertEquals(0, pmixCounts(catIndexer(B))(0), 1e-9)
assertEquals(4, pmixCounts(catIndexer(B))(1), 1e-9)
assertEquals(4, pmixCounts(catIndexer(B))(2), 1e-9)
assertEquals(3, pmixCounts(catIndexer(C)).length)
assertEquals(2, pmixCounts(catIndexer(C))(0), 1e-9)
assertEquals(1, pmixCounts(catIndexer(C))(1), 1e-9)
assertEquals(2, pmixCounts(catIndexer(C))(2), 1e-9)
assertEquals(3, pmixCounts(catIndexer(D)).length)
assertEquals(0, pmixCounts(catIndexer(D))(0), 1e-9)
assertEquals(0, pmixCounts(catIndexer(D))(1), 1e-9)
assertEquals(0, pmixCounts(catIndexer(D))(2), 1e-9)
assertEquals(3, pmixCounts(catIndexer(E)).length)
assertEquals(0, pmixCounts(catIndexer(E))(0), 1e-9)
assertEquals(0, pmixCounts(catIndexer(E))(1), 1e-9)
assertEquals(0, pmixCounts(catIndexer(E))(2), 1e-9)
}
}
| dhgarrette/2015-ccg-parsing | src/test/scala/dhg/ccg/parse/pcfg/mcmc/PcfgProductionCounterITests.scala | Scala | apache-2.0 | 10,401 |
/*
* Copyright 2017 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.epigraph.java.service.projections.req.input
import ws.epigraph.java.GenContext
import ws.epigraph.java.JavaGenNames.ln
import ws.epigraph.java.service.projections.req.input.ReqInputProjectionGen.{classNamePrefix, classNameSuffix}
import ws.epigraph.java.service.projections.req.{BaseNamespaceProvider, ReqModelProjectionGen, ReqProjectionGen, ReqTypeProjectionGenCache}
import ws.epigraph.lang.Qn
import ws.epigraph.projections.op._
import ws.epigraph.types.{DatumTypeApi, TypeKind}
/**
* @author <a href="mailto:konstantin.sobolev@gmail.com">Konstantin Sobolev</a>
*/
abstract class ReqInputModelProjectionGen(
protected val baseNamespaceProvider: BaseNamespaceProvider,
op: OpModelProjection[_, _, _ <: DatumTypeApi, _],
baseNamespaceOpt: Option[Qn],
_namespaceSuffix: Qn,
override protected val parentClassGenOpt: Option[ReqInputModelProjectionGen],
protected val ctx: GenContext) extends ReqInputTypeProjectionGen with ReqModelProjectionGen {
override type OpProjectionType <: OpModelProjection[_, _, _ <: DatumTypeApi, _]
override type GenType = ReqInputModelProjectionGen
override protected def baseNamespace: Qn = ReqProjectionGen.baseNamespace(
referenceNameOpt,
baseNamespaceOpt.getOrElse(super.baseNamespace)
)
override protected def namespaceSuffix: Qn = ReqProjectionGen.namespaceSuffix(referenceNameOpt, _namespaceSuffix)
override val shortClassName: String = s"$classNamePrefix${ ln(cType) }$classNameSuffix"
}
object ReqInputModelProjectionGen {
def dataProjectionGen(
baseNamespaceProvider: BaseNamespaceProvider,
op: OpModelProjection[_, _, _ <: DatumTypeApi, _],
baseNamespaceOpt: Option[Qn],
namespaceSuffix: Qn,
parentClassGenOpt: Option[ReqInputModelProjectionGen],
ctx: GenContext): ReqInputModelProjectionGen =
ReqTypeProjectionGenCache.lookup(
Option(op.referenceName()),
ctx.reqInputProjections,
op.`type`().kind() match {
case TypeKind.RECORD =>
new ReqInputRecordModelProjectionGen(
baseNamespaceProvider,
op.asInstanceOf[OpRecordModelProjection],
baseNamespaceOpt,
namespaceSuffix,
parentClassGenOpt,
ctx
)
case TypeKind.MAP =>
new ReqInputMapModelProjectionGen(
baseNamespaceProvider,
op.asInstanceOf[OpMapModelProjection],
baseNamespaceOpt,
namespaceSuffix,
parentClassGenOpt,
ctx
)
case TypeKind.LIST =>
new ReqInputListModelProjectionGen(
baseNamespaceProvider,
op.asInstanceOf[OpListModelProjection],
baseNamespaceOpt,
namespaceSuffix,
parentClassGenOpt,
ctx
)
case TypeKind.PRIMITIVE =>
new ReqInputPrimitiveModelProjectionGen(
baseNamespaceProvider,
op.asInstanceOf[OpPrimitiveModelProjection],
baseNamespaceOpt,
namespaceSuffix,
parentClassGenOpt,
ctx
)
case x => throw new RuntimeException(s"Unsupported projection kind: $x")
}
)
}
| SumoLogic/epigraph | java/codegen/src/main/scala/ws/epigraph/java/service/projections/req/input/ReqInputModelProjectionGen.scala | Scala | apache-2.0 | 3,788 |
/****************************************************************************
* Copyright (C) 2015 Łukasz Szpakowski. *
* *
* This software is licensed under the GNU General Public License *
* v3 or later. See the LICENSE file for the full licensing terms. *
****************************************************************************/
package pl.luckboy.issuenotifier
import android.app.AlertDialog
import android.app.Notification
import android.app.NotificationManager
import android.app.PendingIntent
import android.content.Context
import android.content.DialogInterface
import android.graphics.BitmapFactory
import android.os.Handler
import android.text.Html
import android.text.SpannedString
import android.util.Log
object AndroidUtils
{
case class StopFlag(var b: Boolean)
def startThreadAndPost[T](handler: Handler, stopFlag: StopFlag)(f: () => T)(g: T => Unit)
{
new Thread(new Runnable() {
override def run()
{
val res = f()
handler.post(new Runnable() {
override def run()
{
if(!stopFlag.b) g(res)
}
})
}
}).start()
}
def post(handler: Handler)(f: () => Unit)
{
handler.post(new Runnable() {
override def run()
{
f()
}
})
}
def postDelayed(handler: Handler, millis: Int)(f: () => Unit)
{
handler.postDelayed(new Runnable() {
override def run()
{
f()
}
}, millis)
}
def createQuestionDialog(context: Context, title: String, msg: String, isWarning: Boolean = false)(f: () => Unit) ={
val builder = new AlertDialog.Builder(context)
if(isWarning) builder.setIcon(android.R.drawable.ic_dialog_alert)
builder.setTitle(title)
builder.setMessage(msg)
builder.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() {
override def onClick(dialog: DialogInterface, id: Int) = f()
})
builder.setNegativeButton(R.string.no, new DialogInterface.OnClickListener() {
override def onClick(dialog: DialogInterface, id: Int) = ()
})
builder.create()
}
def createErrorDialog(context: Context, msg: String) = {
val builder = new AlertDialog.Builder(context)
builder.setIcon(android.R.drawable.ic_dialog_alert)
builder.setTitle(R.string.error_title)
builder.setMessage(msg)
builder.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
override def onClick(dialog: DialogInterface, id: Int) = ()
})
builder.create()
}
def notify(context: Context, id: Int, smallIconId: Int, largeIconId: Option[Int], title: String, body: String, optPendingIntent: Option[PendingIntent], isTicker: Boolean, isAutoCancel: Boolean, isRingtone: Boolean, isVibration: Boolean)
{
val builder = new Notification.Builder(context)
builder.setSmallIcon(smallIconId)
for(id <- largeIconId) builder.setLargeIcon(BitmapFactory.decodeResource(context.getResources(), id))
builder.setContentTitle(title)
builder.setContentText(body)
if(isTicker) builder.setTicker(body)
for(pendingIntent <- optPendingIntent) builder.setContentIntent(pendingIntent)
val notification = builder.getNotification()
notification.flags |= (if(isAutoCancel) Notification.FLAG_AUTO_CANCEL else 0) | Notification.FLAG_NO_CLEAR
if(isRingtone) notification.defaults |= Notification.DEFAULT_SOUND
if(isVibration) notification.defaults |= Notification.DEFAULT_VIBRATE
val notificationManager = context.getSystemService(Context.NOTIFICATION_SERVICE).asInstanceOf[NotificationManager]
notificationManager.notify(id, notification)
}
def cancelNotification(context: Context, id: Int)
{
val notificationManager = context.getSystemService(Context.NOTIFICATION_SERVICE).asInstanceOf[NotificationManager]
notificationManager.cancel(id)
}
def log(tag: String, s: String) = { Log.i(tag, s); () }
def log[T](tag: String, res: Either[Exception, T]) =
res match {
case Left(e) => Log.w(tag, e); res
case _ => res
}
def htmlFromString(s: String) = Html.toHtml(new SpannedString(s))
}
| luckboy/IssueNotifier | src/main/scala/pl/luckboy/issuenotifier/AndroidUtils.scala | Scala | gpl-3.0 | 4,278 |
trait Unapply {
def unapply(x: Int): Option[Int] = Some(x)
}
object X {
Self: Unapply =>
22 match {
case Self(/*caret*/) =>
}
}
//Int | triggerNZ/intellij-scala | testdata/parameterInfo/patternParameterInfo/unapply/SelfType.scala | Scala | apache-2.0 | 146 |
package mesosphere.raml
import treehugger.forest._
import definitions._
import org.raml.v2.api.RamlModelResult
import org.raml.v2.api.model.v10.api.Library
import org.raml.v2.api.model.v10.datamodel._
import treehuggerDSL._
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.immutable.Seq
object RamlTypeGenerator {
val AdditionalProperties = "additionalProperties"
val baseTypeTable: Map[String, Symbol] =
Map(
"string" -> StringClass,
"int8" -> ByteClass,
"int16" -> ShortClass,
"int32" -> IntClass,
"integer" -> IntClass,
"int64" -> LongClass,
"long" -> LongClass,
"float" -> FloatClass,
"double" -> DoubleClass,
"boolean" -> BooleanClass,
"date-only" -> RootClass.newClass("java.time.LocalDate"),
"time-only" -> RootClass.newClass("java.time.LocalTime"),
"datetime-only" -> RootClass.newClass("java.time.LocalDateTime"),
"datetime" -> RootClass.newClass("java.time.OffsetDateTime"),
"RamlGenerated" -> RootClass.newClass("RamlGenerated"),
"RamlConstraints" -> RootClass.newClass("RamlConstraints")
)
val builtInTypes = Set(
"Byte",
"Short",
"Int",
"Long",
"Float",
"Double",
"Boolean",
"String",
"java.time.LocalDate",
"java.time.LocalTime",
"java.time.LocalDateTime",
"java.time.OffsetDateTime"
)
val TryClass = RootClass.newClass("scala.util.Try")
val SeqClass = RootClass.newClass("scala.collection.immutable.Seq")
val SetClass = RootClass.newClass("Set")
val IterableClass = RootClass.newClass("Iterable")
def TYPE_SEQ(typ: Type): Type = SeqClass TYPE_OF typ
val PlayJsonFormat = RootClass.newClass("play.api.libs.json.Format")
def PLAY_JSON_FORMAT(typ: Type): Type = PlayJsonFormat TYPE_OF typ
val PlayJsonResult = RootClass.newClass("play.api.libs.json.JsResult")
def PLAY_JSON_RESULT(typ: Type): Type = PlayJsonResult TYPE_OF typ
val PlayJson = RootClass.newClass("play.api.libs.json.Json")
val PlayJsValue = RootClass.newClass("play.api.libs.json.JsValue")
val PlayJsString = RootClass.newClass("play.api.libs.json.JsString")
val PlayJsObject = RootClass.newClass("play.api.libs.json.JsObject")
val PlayJsArray = RootClass.newClass("play.api.libs.json.JsArray")
val PlayValidationError = RootClass.newClass("play.api.libs.json.JsonValidationError")
val PlayJsError = RootClass.newClass("play.api.libs.json.JsError")
val PlayJsSuccess = RootClass.newClass("play.api.libs.json.JsSuccess")
val PlayReads = RootClass.newClass("play.api.libs.json.Reads")
def PLAY_JSON_READS(typ: Type): Type = PlayReads TYPE_OF typ
val PlayWrites = RootClass.newClass("play.api.libs.json.Writes")
def PLAY_JSON_WRITES(typ: Type): Type = PlayWrites TYPE_OF typ
val PlayPath = RootClass.newClass("play.api.libs.json.JsPath")
val PlayJsNull = REF("play.api.libs.json.JsNull")
/**
* we don't support unit test generation for RAML-generated scala code. to subvert the
* code coverage calculator this string may be inserted as a comment at the top of a
* scala file, just before the package declaration(s).
*
* order of operations is important: COVERAGE-OFF should appear at the top of the file,
* before package declarations. so `withComment` should be applied directly to package
* declarations (`inPackage`)
*/
val NoCodeCoverageReporting = "$COVERAGE-OFF$"
val NoScalaFormat = "format: OFF"
def camelify(name: String): String = name.toLowerCase.capitalize
def underscoreToCamel(name: String) = "(/|_|\\\\,)([a-z\\\\d])".r.replaceAllIn(name, { m =>
m.group(2).toUpperCase()
})
def enumName(s: StringTypeDeclaration, default: Option[String] = None): String = {
s.annotations().asScala.find(_.name() == "(pragma.scalaType)").fold(default.getOrElse(s.name()).capitalize) { annotation =>
annotation.structuredValue().value().toString
}
}
def objectName(o: ObjectTypeDeclaration): (String, Option[String]) = {
if (o.`type` == "object") {
o.name() -> None
} else if (o.name == "items") {
o.`type` -> None
} else if (o.name == "/.*/") {
o.`type` -> None
} else {
o.name() -> Some(o.`type`())
}
}
def scalaFieldName(name: String): String = {
if (name.contains("-")) s"`$name`"
else name
}
def isUpdateType(o: ObjectTypeDeclaration): Boolean =
(o.`type`() == "object") && o.annotations.asScala.exists(_.name() == "(pragma.asUpdateType)")
def isOmitEmpty(field: TypeDeclaration): Boolean =
field.annotations.asScala.exists(_.name() == "(pragma.omitEmpty)")
def pragmaForceOptional(o: TypeDeclaration): Boolean =
o.annotations().asScala.exists(_.name() == "(pragma.forceOptional)")
def pragmaSerializeOnly(o: TypeDeclaration): Boolean =
o.annotations().asScala.exists(_.name() == "(pragma.serializeOnly)")
def generateUpdateTypeName(o: ObjectTypeDeclaration): Option[String] =
if (o.`type`() == "object" && !isUpdateType(o)) {
// use the attribute value as the type name if specified ala enumName; otherwise just append "Update"
o.annotations().asScala.find(_.name() == "(pragma.generateUpdateType)").map { annotation =>
Option(annotation.structuredValue().value()).fold(o.name()+"Update")(_.toString)
}
} else {
None
}
def buildTypeTable(types: Set[TypeDeclaration]): Map[String, Symbol] = {
@tailrec def build(types: Set[TypeDeclaration], result: Map[String, Symbol]): Map[String, Symbol] = {
types match {
case s if s.nonEmpty =>
s.head match {
case a: ArrayTypeDeclaration if a.items().`type`() != "string" =>
sys.error(s"${a.name()} : ${a.items().name()} ${a.items.`type`} ArrayTypes should be declared as ObjectName[]")
case o: ObjectTypeDeclaration =>
val (name, _) = objectName(o)
val updateTypeName = generateUpdateTypeName(o)
val normalTypeName = Some(name)
val next = Seq(updateTypeName, normalTypeName).flatten.map(n => n -> RootClass.newClass(n))
build(s.tail, result ++ next)
case u: UnionTypeDeclaration =>
build(s.tail, result + (u.name() -> RootClass.newClass(u.name)))
case e: StringTypeDeclaration if e.enumValues().asScala.nonEmpty =>
build(s.tail, result + (e.name -> RootClass.newClass(e.name)))
case str: StringTypeDeclaration =>
build(s.tail, result + (str.name -> StringClass))
case n: NumberTypeDeclaration =>
build(s.tail, result + (n.name -> result(n.format())))
case _ =>
build(s.tail, result)
}
case _ =>
result
}
}
build(types, baseTypeTable)
}
sealed trait GeneratedClass {
val name: String
def toTree(): Seq[Tree]
}
case class EnumT(name: String, values: Set[String], default: Option[String], comments: Seq[String]) extends GeneratedClass {
val sortedValues = values.toVector.sorted
override def toString: String = s"Enum($name, $values)"
override def toTree(): Seq[Tree] = {
val baseTrait = TRAITDEF(name) withParents("Product", "Serializable", "RamlGenerated") withFlags Flags.SEALED := BLOCK(
VAL("value", StringClass),
DEF("toString", StringClass) withFlags Flags.OVERRIDE := REF("value")
)
val enumObjects = sortedValues.map { enumValue =>
CASEOBJECTDEF(underscoreToCamel(camelify(enumValue))) withParents name := BLOCK(
VAL("value") := LIT(enumValue)
)
}
val patternMatches = sortedValues.map { enumValue =>
CASE(LIT(enumValue.toLowerCase)) ==> REF(underscoreToCamel(camelify(enumValue)))
}
val playWildcard = CASE(WILDCARD) ==>
(REF(PlayJsError) APPLY (REF(PlayValidationError) APPLY(LIT("error.unknown.enum.literal"), LIT(s"$name (${sortedValues.mkString(", ")})"))))
val playPatternMatches = sortedValues.map { enumValue =>
CASE(LIT(enumValue.toLowerCase)) ==> (REF(PlayJsSuccess) APPLY REF(underscoreToCamel(camelify(enumValue))))
}
val playJsonFormat = (OBJECTDEF("playJsonFormat") withParents PLAY_JSON_FORMAT(name) withFlags Flags.IMPLICIT) := BLOCK(
DEF("reads", PLAY_JSON_RESULT(name)) withParams PARAM("json", PlayJsValue) := {
REF("json") MATCH(
CASE(REF(PlayJsString) UNAPPLY ID("s")) ==> (REF("s") DOT "toLowerCase" MATCH (playPatternMatches ++ Vector(playWildcard))),
playWildcard)
},
DEF("writes", PlayJsValue) withParams PARAM("o", name) := {
REF(PlayJsString) APPLY (REF("o") DOT "value")
}
)
val obj = OBJECTDEF(name) := BLOCK(
enumObjects ++ Seq(
playJsonFormat,
VAL("StringToValue") withType(TYPE_MAP(StringClass, name)) withFlags(Flags.PRIVATE) := REF("Map") APPLY(sortedValues.map { enumValue =>
TUPLE(LIT(enumValue), REF(underscoreToCamel(camelify(enumValue))))
}),
DEF("all", IterableClass TYPE_OF name) := REF("StringToValue") DOT "values",
DEF("fromString", TYPE_OPTION(name)) withParams(PARAM("v", StringClass)) := REF("StringToValue") DOT "get" APPLY(REF("v"))
) ++ default.map { defaultValue =>
VAL("DefaultValue") withType(name) := REF(underscoreToCamel(camelify(defaultValue)))
}
)
Seq(baseTrait.withDoc(comments), obj)
}
}
sealed trait Constraint[C] { self =>
val name: String
val constraint: C
val constraintToValue: C => Tree = { (c: C) => LIT(c) } // decent assumption for built-ins, probably not much else
/** false indicates custom, non-play constraint implementations that we've implemented as part of this generator */
val builtIn: Boolean
/** @return a code gen expression that represents a playJS reads validation */
def validate(): Tree
/** a code gen expression for a field that represents the constraint limit */
val limitField: Option[Tree] = None
/** decorate this constraint with a `limitField` implementation */
def copyWith(lf: Option[Tree] = None): Constraint[C]
def withFieldLimit(f: FieldT): Constraint[C] = {
val fieldName = scalaFieldName(underscoreToCamel(camelify(s"constraint_${f.rawName}_${name}".replace("-", "_"))))
val limit = (VAL(fieldName) := constraintToValue(constraint))
copyWith(Option(limit))
}
}
object Constraint {
// built-in playJS validators
def MaxLength(len: Integer) = Constraint("maxLength", len, builtIn = true) { REF(_) APPLYTYPE StringClass APPLY(_) }
def MinLength(len: Integer) = Constraint("minLength", len, builtIn = true) { REF(_) APPLYTYPE StringClass APPLY(_) }
def Pattern(p: String) = Constraint("pattern", p, builtIn = true, (c: String) => LIT(c) DOT "r") { REF(_) APPLY(_) }
def MaxItems(len: Integer, t: Type) = Constraint("maxLength", len, builtIn = true) { REF(_) APPLYTYPE t APPLY(_) }
def MinItems(len: Integer, t: Type) = Constraint("minLength", len, builtIn = true) { REF(_) APPLYTYPE t APPLY(_) }
def Max(v: Number, t: Type) = Constraint("max", v, builtIn = true) { REF(_) APPLYTYPE t APPLY(_) }
def Min(v: Number, t: Type) = Constraint("min", v, builtIn = true) { REF(_) APPLYTYPE t APPLY(_) }
// custom validator implementations follow
def KeyPattern(p: String, mapValType: Type) =
Constraint("keyPattern", p, builtIn = false, (c: String) => LIT(c) DOT "r") { REF(_) APPLYTYPE mapValType APPLY(_) }
case class BasicConstraint[C](
override val name: String,
override val constraint: C,
override val constraintToValue: C => Tree,
val validateFunc: (String, Tree) => Tree,
override val builtIn: Boolean,
override val limitField: Option[Tree] = None) extends Constraint[C] {
override def validate(): Tree = validateFunc(name, constraintToValue(constraint))
override def copyWith(lf: Option[Tree] = limitField): Constraint[C] = copy(limitField = lf)
}
def apply[C](n: String, c: C, builtIn: Boolean, c2v: C => Tree = { (c: C) => LIT(c) })(f: (String, Tree) => Tree): Constraint[C] =
new BasicConstraint(n, c, c2v, f, builtIn)
implicit class Constraints(val c: Seq[Constraint[_]]) extends AnyVal {
def validate(exp: Tree): Tree = {
if (c.isEmpty) {
exp
} else {
@tailrec
def buildChain(constraints: List[Constraint[_]], chain: Tree): Tree = constraints match {
case Nil => chain
case c :: rs => buildChain(rs, chain INFIX("keepAnd", c.validate()))
}
exp APPLY buildChain(c.tail.to[List], c.head.validate())
}
}
}
implicit class AllConstraints(val c: Seq[Seq[Constraint[_]]]) extends AnyVal {
def requiredImports: Seq[Tree] = {
val flattened = c.flatten
if (flattened.isEmpty) {
Nil
} else {
Seq(
Option(IMPORT(PlayReads DOT "_")),
if (c.exists(_.size > 1)) Option(IMPORT("play.api.libs.functional.syntax._")) else None,
flattened.find(!_.builtIn).map(_ => IMPORT("RamlConstraints._"))
)
}.flatten
}
}
}
case class FieldT(rawName: String, `type`: Type, comments: Seq[String], constraints: Seq[Constraint[_]], required: Boolean,
default: Option[String], repeated: Boolean = false, forceOptional: Boolean = false, omitEmpty: Boolean = false) {
val name = scalaFieldName(rawName)
override def toString: String = s"$name: ${`type`}"
lazy val paramTypeValue: Option[(Type, Tree)] = {
if ((required || default.isDefined) && !forceOptional) {
defaultValue.map { d => `type` -> d }
} else {
Option(
if (repeated && !forceOptional) {
val typeName = `type`.toString()
if (typeName.startsWith("Map")) {
`type` -> (REF("Map") DOT "empty")
} else {
if (typeName.startsWith("Set")) {
`type` -> (REF("Set") DOT "empty")
} else {
`type` -> NIL
}
}
} else {
TYPE_OPTION(`type`) -> NONE
}
)
}
}
lazy val param: treehugger.forest.ValDef =
paramTypeValue.fold { PARAM(name, `type`).tree } { case (pType, pValue) => PARAM(name, pType) := pValue }
lazy val comment: String = if (comments.nonEmpty) {
val lines = comments.flatMap(_.lines)
s"@param $name ${lines.head} ${if (lines.tail.nonEmpty) "\\n " else ""}${lines.tail.mkString("\\n ")}"
} else {
""
}
val defaultValue: Option[Tree] = default.map { d =>
`type`.toString() match {
case "Byte" => LIT(d.toByte)
case "Short" => LIT(d.toShort)
case "Int" => LIT(d.toInt)
case "Long" => LIT(d.toLong)
case "Float" => LIT(d.toFloat)
case "Double" => LIT(d.toDouble)
case "Boolean" => LIT(d.toBoolean)
case "String" => LIT(d)
// hopefully this is actually an enum
case _ => (`type` DOT underscoreToCamel(camelify(d))).tree
}
}
val playReader = {
// required fields never have defaults
if (required && !forceOptional) {
TUPLE(REF("__") DOT "\\\\" APPLY LIT(rawName)) DOT "read" APPLYTYPE `type`
} else if (repeated && !forceOptional) {
TUPLE(REF("__") DOT "\\\\" APPLY LIT(rawName)) DOT "read" APPLYTYPE `type` DOT "orElse" APPLY(REF(PlayReads) DOT "pure" APPLY(`type` APPLY()))
} else {
if (defaultValue.isDefined && !forceOptional) {
TUPLE((REF("__") DOT "\\\\" APPLY LIT(rawName)) DOT "read" APPLYTYPE `type`) DOT "orElse" APPLY (REF(PlayReads) DOT "pure" APPLY defaultValue.get)
} else {
TUPLE((REF("__") DOT "\\\\" APPLY LIT(rawName)) DOT "readNullable" APPLYTYPE `type`)
}
}
}
val playValidator = {
def reads = constraints.validate(PlayPath DOT "read" APPLYTYPE `type`)
def validate =
REF("json") DOT "\\\\" APPLY(LIT(rawName)) DOT "validate" APPLYTYPE `type` APPLY(reads)
def validateOpt =
REF("json") DOT "\\\\" APPLY(LIT(rawName)) DOT "validateOpt" APPLYTYPE `type` APPLY(reads)
def validateOptWithDefault(defaultValue: Tree) =
REF("json") DOT "\\\\" APPLY(LIT(rawName)) DOT "validateOpt" APPLYTYPE `type` APPLY(reads) DOT "map" APPLY (REF("_") DOT "getOrElse" APPLY defaultValue)
if (required && !forceOptional) {
validate
} else if (repeated && !forceOptional) {
validateOptWithDefault(`type` APPLY())
} else {
if (defaultValue.isDefined && !forceOptional) {
validateOptWithDefault(defaultValue.get)
} else {
validateOpt
}
}
}
}
case class ObjectT(name: String, fields: Seq[FieldT], parentType: Option[String], comments: Seq[String], childTypes: Seq[ObjectT] = Nil, discriminator: Option[String] = None, discriminatorValue: Option[String] = None, serializeOnly: Boolean = false) extends GeneratedClass {
override def toString: String = parentType.fold(s"$name(${fields.mkString(", ")})")(parent => s"$name(${fields.mkString(" , ")}) extends $parent")
override def toTree(): Seq[Tree] = {
val actualFields = fields.filter(_.rawName != discriminator.getOrElse(""))
val params = actualFields.map(_.param)
val klass = if (childTypes.nonEmpty) {
if (params.nonEmpty) {
parentType.fold(TRAITDEF(name) withParents("RamlGenerated", "Product", "Serializable") := BLOCK(params))(parent =>
TRAITDEF(name) withParents(parent, "Product", "Serializable") := BLOCK(params)
)
} else {
parentType.fold((TRAITDEF(name) withParents("RamlGenerated", "Product", "Serializable")).tree)(parent =>
(TRAITDEF(name) withParents(parent, "Product", "Serializable")).tree
)
}
} else {
parentType.fold(CASECLASSDEF(name) withParents("RamlGenerated") withParams params)(parent =>
CASECLASSDEF(name) withParams params withParents parent
).tree
}
val playFormat = if (discriminator.isDefined) {
Seq(
IMPORT("play.api.libs.json._"),
OBJECTDEF("playJsonFormat") withParents PLAY_JSON_FORMAT(name) withFlags Flags.IMPLICIT := BLOCK(
DEF("reads", PLAY_JSON_RESULT(name)) withParams PARAM("json", PlayJsValue) := BLOCK(
if (actualFields.size > 1) {
Seq(IMPORT("play.api.libs.functional.syntax._"),
actualFields.map(_.playReader).reduce(_ DOT "and" APPLY _) DOT "apply" APPLY (REF(name) DOT "apply _") DOT "reads" APPLY REF("json"))
} else if (actualFields.size == 1) {
Seq(actualFields.head.playReader DOT "map" APPLY(REF(name) DOT "apply _") DOT "reads" APPLY REF("json"))
} else {
Seq(REF(name))
}
),
DEF("writes", PlayJsObject) withParams PARAM("o", name) := {
REF(PlayJson) DOT "obj" APPLY
fields.map { field =>
if (field.rawName == discriminator.get) {
TUPLE(LIT(field.rawName), REF(PlayJson) DOT "toJsFieldJsValueWrapper" APPLY(PlayJson DOT "toJson" APPLY LIT(discriminatorValue.getOrElse(name))))
} else {
TUPLE(LIT(field.rawName), REF(PlayJson) DOT "toJsFieldJsValueWrapper" APPLY(PlayJson DOT "toJson" APPLY (REF("o") DOT field.rawName)))
}
}
}
)
)
} else if (actualFields.nonEmpty && actualFields.exists(_.default.nonEmpty) && !actualFields.exists(f => f.repeated || f.omitEmpty || f.constraints.nonEmpty)) {
Seq(
IMPORT("play.api.libs.json._"),
IMPORT("play.api.libs.functional.syntax._"),
VAL("playJsonReader") withType PLAY_JSON_READS(name) := TUPLE(
actualFields.map(_.playReader).reduce(_ DOT "and" APPLY _)
) APPLY (REF(name) DOT "apply _"),
VAL("playJsonWriter") withType PLAY_JSON_WRITES(name) := REF(PlayJson) DOT "writes" APPLYTYPE (name),
OBJECTDEF("playJsonFormat") withParents PLAY_JSON_FORMAT(name) withFlags Flags.IMPLICIT := BLOCK(
DEF("reads", PLAY_JSON_RESULT(name)) withParams PARAM("json", PlayJsValue) := BLOCK(
REF("playJsonReader") DOT "reads" APPLY(REF("json"))
),
DEF("writes", PlayJsValue) withParams PARAM("o", name) := BLOCK(
REF("playJsonWriter") DOT "writes" APPLY REF("o")
)
)
)
} else if (actualFields.size > 22 || actualFields.exists(f => f.repeated || f.omitEmpty || f.constraints.nonEmpty) ||
actualFields.map(_.toString).exists(t => t.toString.startsWith(name) || t.toString.contains(s"[$name]"))) {
actualFields.map(_.constraints).requiredImports ++ Seq(
OBJECTDEF("playJsonFormat") withParents (if (serializeOnly) PLAY_JSON_WRITES(name) else PLAY_JSON_FORMAT(name)) withFlags Flags.IMPLICIT := BLOCK(
if (serializeOnly) {
Seq()
} else Seq(DEF("reads", PLAY_JSON_RESULT(name)) withParams PARAM("json", PlayJsValue) := {
BLOCK(
actualFields.map { field =>
VAL(field.name) := field.playValidator
} ++ Seq(
VAL("_errors") := SEQ(actualFields.map(f => TUPLE(LIT(f.rawName), REF(f.name)))) DOT "collect" APPLY BLOCK(
CASE(REF(s"(field, e:$PlayJsError)")) ==> (REF("e") DOT "repath" APPLY (REF(PlayPath) DOT "\\\\" APPLY REF("field"))) DOT s"asInstanceOf[$PlayJsError]"),
IF(REF("_errors") DOT "nonEmpty") THEN (
REF("_errors") DOT "reduceOption" APPLYTYPE PlayJsError APPLY (REF("_") DOT "++" APPLY REF("_")) DOT "getOrElse" APPLY (REF("_errors") DOT "head")
) ELSE (
REF(PlayJsSuccess) APPLY (REF(name) APPLY
actualFields.map { field =>
REF(field.name) := (REF(field.name) DOT "get")
}))
)
)
}) ++ Seq(
DEF("writes", PlayJsValue) withParams PARAM("o", name) := BLOCK(
actualFields.withFilter(_.name != AdditionalProperties).map { field =>
val serialized = REF(PlayJson) DOT "toJson" APPLY (REF("o") DOT field.name)
if (field.omitEmpty && field.repeated && !field.forceOptional) {
VAL(field.name) := IF(REF("o") DOT field.name DOT "nonEmpty") THEN (
serialized
) ELSE (
PlayJsNull
)
} else if(field.omitEmpty && !field.repeated && !builtInTypes.contains(field.`type`.toString())) {
// earlier "require" check ensures that we won't see a field w/ omitEmpty that is not optional.
// see buildTypes
VAL(field.name) := serialized MATCH(
// avoid serializing JS objects w/o any fields
CASE(ID("obj") withType (PlayJsObject),
IF(REF("obj.fields") DOT "isEmpty")) ==> PlayJsNull,
CASE(ID("rs")) ==> REF("rs")
)
} else {
VAL(field.name) := serialized
}
} ++
Seq(
REF(PlayJsObject) APPLY (SEQ(
actualFields.withFilter(_.name != AdditionalProperties).map { field =>
TUPLE(LIT(field.rawName), REF(field.name))
}) DOT "filter" APPLY (REF("_._2") INFIX("!=") APPLY PlayJsNull) DOT("++") APPLY(
actualFields.find(_.name == AdditionalProperties).fold(REF("Seq") DOT "empty") { extraPropertiesField =>
REF("o.additionalProperties") DOT "fields"
})
)
)
)
)
)
)
} else {
Seq(VAL("playJsonFormat") withFlags Flags.IMPLICIT := REF("play.api.libs.json.Json") DOT "format" APPLYTYPE (name))
}
val defaultFields = fields.withFilter(_.paramTypeValue.nonEmpty).flatMap { f =>
val (dType, dValue) = f.paramTypeValue.get
val fieldName = (
if (f.name.contains("-")) underscoreToCamel(f.name.replace('-', '_')) else f.name
).replace("`", "").capitalize
Seq(VAL(s"Default${fieldName}") withType(dType) := dValue)
}
val defaultInstance: Seq[Tree] =
if (fields.forall(f => f.defaultValue.nonEmpty || f.forceOptional || (f.repeated && !f.required))) {
Seq(VAL("Default") withType (name) := REF(name) APPLY())
} else Nil
val obj = if (childTypes.isEmpty || serializeOnly) {
(OBJECTDEF(name)) := BLOCK(
playFormat ++ defaultFields ++ defaultInstance ++ fields.flatMap { f =>
f.constraints.flatMap(_.withFieldLimit(f).limitField)
}
)
} else if (discriminator.isDefined) {
val childDiscriminators: Map[String, ObjectT] = childTypes.map(ct => ct.discriminatorValue.getOrElse(ct.name) -> ct)(collection.breakOut)
OBJECTDEF(name) := BLOCK(
Seq(OBJECTDEF("PlayJsonFormat") withParents PLAY_JSON_FORMAT(name) withFlags Flags.IMPLICIT := BLOCK(
DEF("reads", PLAY_JSON_RESULT(name)) withParams PARAM("json", PlayJsValue) := {
TUPLE(REF("json") DOT "\\\\" APPLY LIT(discriminator.get)) DOT "validate" APPLYTYPE (StringClass) MATCH (
childDiscriminators.map { case (k, v) =>
CASE(PlayJsSuccess APPLY(LIT(k), REF("_"))) ==> (REF("json") DOT "validate" APPLYTYPE (v.name))
} ++
Seq(
CASE(WILDCARD) ==> (REF(PlayJsError) APPLY (REF(PlayValidationError) APPLY(LIT("error.expected.jsstring"), LIT(s"expected one of (${childDiscriminators.keys.mkString(", ")})"))))
)
)
},
DEF("writes", PlayJsValue) withParams PARAM("o", name) := BLOCK(
REF("o") MATCH
childDiscriminators.map { case (k, v) =>
CASE(REF(s"f:${v.name}")) ==> (REF(PlayJson) DOT "toJson" APPLY REF("f") APPLY(REF(v.name) DOT "playJsonFormat"))
}
)
)) ++ defaultFields ++ defaultInstance
)
} else {
System.err.println(s"[WARNING] $name uses subtyping but has no discriminator. If it is not a union type when it is" +
" used, it will not be able to be deserialized at this time")
OBJECTDEF(name) := BLOCK(defaultFields ++ defaultInstance)
}
val commentBlock = comments ++ actualFields.map(_.comment)(collection.breakOut)
Seq(klass.withDoc(commentBlock)) ++ childTypes.flatMap(_.toTree()) ++ Seq(obj)
}
}
case class StringT(name: String, defaultValue: Option[String]) extends GeneratedClass {
override def toTree(): Seq[treehugger.forest.Tree] = Seq.empty[Tree]
}
case class UnionT(name: String, childTypes: Seq[GeneratedClass], comments: Seq[String]) extends GeneratedClass {
override def toString: String = s"Union($name, $childTypes)"
override def toTree(): Seq[Tree] = {
val base = (TRAITDEF(name) withParents("RamlGenerated", "Product", "Serializable")).tree.withDoc(comments)
val childJson: Seq[GenericApply] = childTypes.map { child =>
REF("json") DOT s"validate" APPLYTYPE (child.name)
}
val obj = OBJECTDEF(name) := BLOCK(
OBJECTDEF("playJsonFormat") withParents PLAY_JSON_FORMAT(name) withFlags Flags.IMPLICIT := BLOCK(
DEF("reads", PLAY_JSON_RESULT(name)) withParams PARAM("json", PlayJsValue) := BLOCK(
childJson.reduce((acc, next) => acc DOT "orElse" APPLY next)
),
DEF("writes", PlayJsValue) withParams PARAM("o", name) := BLOCK(
REF("o") MATCH
childTypes.map { child =>
CASE(REF(s"f:${child.name}")) ==> (REF(PlayJson) DOT "toJson" APPLY REF("f") APPLY(REF(child.name) DOT "playJsonFormat"))
}
)
)
)
val children = childTypes.flatMap {
case s: StringT =>
Seq[Tree](
CASECLASSDEF(s.name) withParents name withParams s.defaultValue.fold(PARAM("value", StringClass).tree){ defaultValue =>
PARAM("value", StringClass) := LIT(defaultValue)
},
OBJECTDEF(s.name) := BLOCK(
Seq(OBJECTDEF("playJsonFormat") withParents PLAY_JSON_FORMAT(s.name) withFlags Flags.IMPLICIT := BLOCK(
DEF("reads", PLAY_JSON_RESULT(s.name)) withParams PARAM("json", PlayJsValue) := BLOCK(
REF("json") DOT "validate" APPLYTYPE StringClass DOT "map" APPLY (REF(s.name) DOT "apply")
),
DEF("writes", PlayJsValue) withParams PARAM("o", s.name) := BLOCK(
REF(PlayJsString) APPLY (REF("o") DOT "value")
)
)) ++ s.defaultValue.map{ defaultValue =>
VAL("DefaultValue") withType(s.name) := REF(s.name) APPLY()
}
)
)
case t => t.toTree()
}
Seq(base) ++ children ++ Seq(obj)
}
}
@tailrec def libraryTypes(libraries: List[Library], result: Set[TypeDeclaration] = Set.empty): Set[TypeDeclaration] = {
libraries match {
case head :: tail =>
libraryTypes(head.uses.asScala.toList ::: tail, result ++ head.types().asScala.toSet)
case Nil =>
result
}
}
@tailrec def allTypes(models: Seq[RamlModelResult], result: Set[TypeDeclaration] = Set.empty): Set[TypeDeclaration] = {
models match {
case head +: tail =>
val types = libraryTypes(Option(head.getLibrary).toList) ++
libraryTypes(Option(head.getApiV10).map(_.uses().asScala.toList).getOrElse(Nil))
allTypes(tail, result ++ types)
case Nil =>
result
}
}
def comment(t: TypeDeclaration): Seq[String] = {
def escapeDesc(s: Option[String]): Option[String] =
s.map(_.replace("$", "$$"))
t match {
case a: ArrayTypeDeclaration =>
Seq(escapeDesc(Option(a.description()).map(_.value)),
Option(a.minItems()).map(i => s"minItems: $i"),
Option(a.maxItems()).map(i => s"maxItems: $i")).flatten
case o: ObjectTypeDeclaration =>
Seq(escapeDesc(Option(o.description()).map(_.value)),
Option(o.example()).map(e => s"Example: <pre>${e.value}</pre>")).flatten
case s: StringTypeDeclaration =>
Seq(escapeDesc(Option(s.description()).map(_.value)),
Option(s.maxLength()).map(i => s"maxLength: $i"),
Option(s.minLength()).map(i => s"minLength: $i"),
Option(s.pattern()).map(i => s"pattern: <pre>$i</pre>")).flatten
case n: NumberTypeDeclaration =>
Seq(escapeDesc(Option(n.description()).map(_.value)),
Option(n.minimum()).map(i => s"minimum: $i"),
Option(n.maximum()).map(i => s"maximum: $i"),
Option(n.multipleOf()).map(i => s"multipleOf: $i")).flatten
case _ =>
Seq(escapeDesc(Option(t.description()).map(_.value()))).flatten
}
}
def typeIsActuallyAMap(t: TypeDeclaration): Boolean = t match {
case o: ObjectTypeDeclaration =>
o.properties.asScala.toList match {
case field :: Nil if field.name().startsWith('/') && field.name().endsWith('/') => true
case _ => false
}
case _ => false
}
def buildTypes(typeTable: Map[String, Symbol], allTypes: Set[TypeDeclaration]): Set[GeneratedClass] = {
@tailrec def buildTypes(types: Set[TypeDeclaration], results: Set[GeneratedClass] = Set.empty[GeneratedClass]): Set[GeneratedClass] = {
def buildConstraints(field: TypeDeclaration, fieldType: Type): Seq[Constraint[_]] = {
Option(field).collect {
case s: StringTypeDeclaration =>
Seq(
Option(s.maxLength()).map(Constraint.MaxLength),
Option(s.minLength()).map(Constraint.MinLength),
Option(s.pattern()).map(Constraint.Pattern)
).flatten
case a: ArrayTypeDeclaration =>
Seq(
Option(a.maxItems()).map(len => Constraint.MaxItems(len, fieldType)),
Option(a.minItems()).map(len => Constraint.MinItems(len, fieldType))
).flatten
case n: NumberTypeDeclaration =>
// convert numbers so that constraints are appropriately rendered
def toNum(v: Double): Number = fieldType match {
case DoubleClass => v
case FloatClass => v.toFloat
case LongClass => v.toLong
case _ => v.toInt
}
Seq(
Option(n.maximum()).map(v => Constraint.Max(toNum(v), fieldType)),
Option(n.minimum()).map(v => Constraint.Min(toNum(v), fieldType))
).flatten
case o: ObjectTypeDeclaration if typeIsActuallyAMap(o) =>
// last field of map-types has the pattern-matching spec that defines the key space, see typeIsActuallyAMap
val pattern = o.properties.asScala.last.name
val valueType = typeTable(o.properties.asScala.last.`type`)
if(pattern != "/.*/" && pattern != "/^.*$/") {
Seq(Constraint.KeyPattern(pattern.substring(1, pattern.length() - 1), valueType))
} else Nil
}.getOrElse(Nil)
}
def createField(fieldOwner: String, field: TypeDeclaration): FieldT = {
val comments = comment(field)
val defaultValue = Option(field.defaultValue())
// if a field has a default, its not required.
val required = defaultValue.fold(Option(field.required()).fold(false)(_.booleanValue()))(_ => false)
val forceOptional = pragmaForceOptional(field)
val omitEmpty = isOmitEmpty(field)
// see ObjectT.playFormat
require(!(((required || defaultValue.nonEmpty) && !forceOptional) && omitEmpty),
s"field $fieldOwner.${field.name()} specifies omitEmpty but is required or provides a default value")
def arrayType(a: ArrayTypeDeclaration): Type =
if (scala.util.Try[Boolean](a.uniqueItems()).getOrElse(false)) SetClass else SeqClass
field match {
case a: ArrayTypeDeclaration =>
@tailrec def arrayTypes(a: ArrayTypeDeclaration, types: List[Type]): List[Type] = {
a.items() match {
case n: ArrayTypeDeclaration =>
arrayTypes(n, arrayType(n) :: types)
case o: ObjectTypeDeclaration =>
objectName(o)._1 :: types
case n: NumberTypeDeclaration =>
typeTable(Option(n.format()).getOrElse("double")) :: types
case t: TypeDeclaration =>
typeTable(t.`type`.replaceAll("\\\\[\\\\]", "")) :: types
}
}
val typeList = arrayTypes(a, List(arrayType(a)))
// reducing with TYPE_OF doesn't work, you'd expect Seq[Seq[X]] but only get Seq[X]
// https://github.com/eed3si9n/treehugger/issues/38
val finalType = typeList.reduce((a, b) => s"$b[$a]")
FieldT(a.name(), finalType, comments, buildConstraints(field, finalType), required, defaultValue, true, forceOptional, omitEmpty = omitEmpty)
case n: NumberTypeDeclaration =>
val fieldType = typeTable(Option(n.format()).getOrElse("double"))
FieldT(n.name(), fieldType, comments, buildConstraints(field, fieldType), required, defaultValue, forceOptional = forceOptional, omitEmpty = omitEmpty)
case o: ObjectTypeDeclaration if typeIsActuallyAMap(o) =>
val fieldType = o.properties.asScala.head match {
case n: NumberTypeDeclaration =>
TYPE_MAP(StringClass, typeTable(Option(n.format()).getOrElse("double")))
case t =>
TYPE_MAP(StringClass, typeTable(t.`type`()))
}
val constraints = buildConstraints(o, fieldType)
FieldT(o.name(), fieldType, comments, constraints, false, defaultValue, true, forceOptional = forceOptional, omitEmpty = omitEmpty)
case t: TypeDeclaration =>
val (name, fieldType) = if (t.`type`() != "object") {
t.name() -> typeTable(t.`type`())
} else {
AdditionalProperties -> PlayJsObject
}
FieldT(name, fieldType, comments, buildConstraints(field, fieldType), required, defaultValue, forceOptional = forceOptional, omitEmpty = omitEmpty)
}
}
types match {
case s if s.nonEmpty =>
s.head match {
case a: ArrayTypeDeclaration if a.items().`type`() != "string" =>
sys.error("Can't build array types")
case u: UnionTypeDeclaration =>
if (!results.exists(_.name == u.name())) {
val subTypeNames = u.`type`().split("\\\\|").map(_.trim)
val subTypeDeclarations = subTypeNames.flatMap { t => allTypes.find(_.name == t) }
val subTypes = subTypeDeclarations.map {
case o: ObjectTypeDeclaration =>
val (name, parent) = objectName(o)
val fields: Seq[FieldT] = o.properties.asScala.withFilter(_.`type`() != "nil").map(f => createField(name, f))(collection.breakOut)
ObjectT(name, fields, parent, comment(o), discriminator = Option(o.discriminator()), discriminatorValue = Option(o.discriminatorValue()), serializeOnly = pragmaSerializeOnly(o))
case s: StringTypeDeclaration =>
StringT(s.name, Option(s.defaultValue()))
case t =>
sys.error(s"Unable to generate union types of non-object/string subtypes: ${u.name()} ${t.name()} ${t.`type`()}")
}
val unionType = UnionT(u.name(), subTypes.toVector, comment(u))
buildTypes(s.tail, results + unionType ++ subTypes)
} else {
buildTypes(s.tail, results)
}
case o: ObjectTypeDeclaration if !typeIsActuallyAMap(o) =>
if (!results.exists(_.name == o.name())) {
val (name, parent) = objectName(o)
val fields: Seq[FieldT] = o.properties().asScala.withFilter(_.`type`() != "nil").map(f => createField(name, f))(collection.breakOut)
if (isUpdateType(o)) {
val objectType = ObjectT(name, fields.map(_.copy(forceOptional = true)), parent, comment(o), discriminator = Option(o.discriminator()), discriminatorValue = Option(o.discriminatorValue()), serializeOnly = pragmaSerializeOnly(o))
buildTypes(s.tail, results + objectType)
} else {
val objectType = ObjectT(name, fields, parent, comment(o), discriminator = Option(o.discriminator()), discriminatorValue = Option(o.discriminatorValue()), serializeOnly = pragmaSerializeOnly(o))
val updateType = generateUpdateTypeName(o).withFilter(n => !results.exists(_.name == n)).map { updateName =>
objectType.copy(name = updateName, fields = fields.map(_.copy(forceOptional = true)))
}
buildTypes(s.tail, results ++ Seq(Some(objectType), updateType).flatten)
}
} else {
buildTypes(s.tail, results)
}
case o: ObjectTypeDeclaration if typeIsActuallyAMap(o) =>
buildTypes(s.tail, results)
case e: StringTypeDeclaration if e.enumValues().asScala.nonEmpty =>
val enumType = EnumT(e.name(),
e.enumValues().asScala.toSet,
Option(e.defaultValue()),
comment(e))
buildTypes(s.tail, results + enumType)
case _ =>
buildTypes(s.tail, results)
}
case _ =>
results
}
}
val all = buildTypes(allTypes)
val childTypes: Map[String, Set[ObjectT]] = all.collect { case obj: ObjectT if obj.parentType.isDefined => obj }.groupBy(_.parentType.get)
val childNames = childTypes.values.flatMap(_.map(_.name)).toSet
val unionTypeNames = all.collect { case u: UnionT => u }.flatMap { t => t.childTypes.map(_.name) }
// Reduce the list so that union types and type hierarchies are now all included from just the top-level type
val filterPhase1 = all.withFilter(t => !unionTypeNames.contains(t.name) && !childNames.contains(t.name) && !t.isInstanceOf[StringT]).map {
case u: UnionT =>
val children = u.childTypes.map {
case o: ObjectT =>
o.copy(parentType = Some(u.name))
case t => t
}
u.copy(childTypes = children)
case obj: ObjectT if childTypes.contains(obj.name) =>
val children = childTypes(obj.name)
obj.copy(childTypes = children.to[Seq])
case t => t
}
filterPhase1.filter {
case o: ObjectT =>
!o.childTypes.map(_.name).exists(unionTypeNames.contains)
case t => true
}
}
def generateBuiltInTypes(pkg: String): Map[String, Tree] = {
val baseType = TRAITDEF("RamlGenerated").tree.withDoc("Marker trait indicating generated code.")
.inPackage(pkg).withComment(NoCodeCoverageReporting)
val ramlConstraints = BLOCK(
(TRAITDEF("RamlConstraints") := BLOCK(
DEF("keyPattern")
withTypeParams(TYPEVAR(RootClass.newAliasType("T")))
withParams(
PARAM("regex", "=> scala.util.matching.Regex"),
PARAM("error", StringClass) := LIT("error.pattern")
)
withParams(
PARAM("reads", PLAY_JSON_READS("Map[String,T]"))
).withFlags(Flags.IMPLICIT) := PLAY_JSON_READS("Map[String,T]").APPLY(LAMBDA(PARAM("js")) ==> BLOCK(
((REF("reads") DOT "reads") APPLY(REF("js")) DOT "flatMap").APPLY(LAMBDA(PARAM("m")) ==> BLOCK(
VAL("errors") := (REF("m") DOT "map" APPLY (BLOCK(
CASE(TUPLE(REF("o"), WILDCARD)) ==>
(((REF("regex") DOT "unapplySeq") APPLY REF("o")) DOT "map" APPLY(
LAMBDA(PARAM(WILDCARD)) ==> (PlayJsSuccess APPLY REF("o"))
)) DOT "getOrElse" APPLY (PlayJsError APPLY(PlayPath DOT "\\\\" APPLY(REF("o")), PlayValidationError APPLY(REF("error"), REF("regex") DOT "regex")))
))) DOT "collect" APPLY(BLOCK(
CASE(ID("err") withType(PlayJsError)) ==> REF("err")
)),
IF(REF("errors") DOT "isEmpty") THEN(PlayJsSuccess APPLY(REF("m")))
ELSE(REF("errors") DOT "fold" APPLY(PlayJsError APPLY(REF("Nil"))) APPLY(WILDCARD DOT "++" APPLY WILDCARD))
))
))
)).withDoc("Validation helpers for generated RAML code."),
CASEOBJECTDEF("RamlConstraints").withParents("RamlConstraints").tree
).inPackage(pkg).withComment(NoCodeCoverageReporting)
Map(
"RamlGenerated" -> baseType,
"RamlConstraints" -> ramlConstraints
)
}
def apply(models: Seq[RamlModelResult], pkg: String): Map[String, Tree] = {
val typeDeclarations = allTypes(models)
val typeTable = buildTypeTable(typeDeclarations)
val types = buildTypes(typeTable, typeDeclarations)
generateBuiltInTypes(pkg) ++ types.map { tpe =>
val tree = tpe.toTree()
if (tree.nonEmpty) {
tpe.name -> BLOCK(tree).inPackage(pkg)
.withComment(NoCodeCoverageReporting).withComment(NoScalaFormat)
} else {
tpe.name -> BLOCK().withComment(s"Unsupported: $tpe").inPackage(pkg)
.withComment(NoCodeCoverageReporting).withComment(NoScalaFormat)
}
}(collection.breakOut)
}
}
| guenter/marathon | project/src/main/scala/com/mesosphere/RamlTypeGenerator.scala | Scala | apache-2.0 | 44,211 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.keras.nn
import com.intel.analytics.bigdl.keras.KerasBaseSpec
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.nn.keras.{LocallyConnected1D, Sequential => KSequential}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class LocallyConnected1DSpec extends KerasBaseSpec {
def weightConverter(data: Array[Tensor[Float]]): Array[Tensor[Float]] = {
val out = new Array[Tensor[Float]](data.length)
val d1l: Int = data(0).size(1)
val d2l: Int = data(0).size(2)
val d3l: Int = data(0).size(3)
out(0) = Tensor(d1l, d3l, d2l)
val page: Int = d2l * d3l
for (i <- 0 to d1l * d2l * d3l - 1) {
val d1 = i / page + 1
val d2 = (i % page) / (d3l) + 1
val d3 = (i % page) % d3l + 1
val v = data(0).valueAt(d1, d2, d3)
out(0).setValue(d1, d3, d2, v)
}
if (data.length > 1) {
out(1) = data(1)
}
out
}
"LocallyConnected1D" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[12, 24])
|input = np.random.random([3, 12, 24])
|output_tensor = LocallyConnected1D(32, 3, activation="relu")(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = KSequential[Float]()
val layer = LocallyConnected1D[Float](32, 3, activation = "relu",
inputShape = Shape(12, 24))
seq.add(layer)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode, weightConverter)
}
"LocallyConnected1D without bias" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[32, 32])
|input = np.random.random([2, 32, 32])
|output_tensor = LocallyConnected1D(64, 4, subsample_length=2,
| bias=False)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = KSequential[Float]()
val layer = LocallyConnected1D[Float](64, 4, subsampleLength = 2,
bias = false, inputShape = Shape(32, 32))
seq.add(layer)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode, weightConverter)
}
}
class LocallyConnected1DSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = LocallyConnected1D[Float](32, 3, inputShape = Shape(12, 24))
layer.build(Shape(2, 12, 24))
val input = Tensor[Float](2, 12, 24).apply1(_ => Random.nextFloat())
runSerializationTest(layer, input)
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/keras/nn/LocallyConnected1DSpec.scala | Scala | apache-2.0 | 3,398 |
package io.youi.path
import io.youi.drawable.Context
case class LineTo(x: Double, y: Double) extends PathAction {
override def draw(context: Context, x: Double, y: Double, scaleX: Double, scaleY: Double): Unit = {
context.lineTo(x + (this.x * scaleX), y + (this.y * scaleY))
}
override def toString: String = s"LineTo(x: $x, y: $y)"
}
| outr/youi | gui/src/main/scala/io/youi/path/LineTo.scala | Scala | mit | 348 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package system.basic
import java.io.File
import io.restassured.RestAssured
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import scala.concurrent.duration.DurationInt
import common._
import common.rest.WskRestOperations
import spray.json._
import system.rest.RestUtil
@RunWith(classOf[JUnitRunner])
class WskUnicodeTests extends TestHelpers with WskTestHelpers with JsHelpers with WskActorSystem with RestUtil {
implicit val wskprops: common.WskProps = WskProps()
val wsk: WskOperations = new WskRestOperations
val activationMaxDuration = 2.minutes
val activationPollDuration = 3.minutes
import WskUnicodeTests._
val actionKinds: Iterable[Kind] = {
val response = RestAssured.given.config(sslconfig).get(getServiceURL)
response.statusCode should be(200)
val mf = response.body.asString.parseJson.asJsObject.fields("runtimes").asJsObject
mf.fields.values.map(_.convertTo[Vector[Kind]]).flatten.filter(!_.deprecated)
}
println(s"Kinds to test: ${actionKinds.map(_.kind).mkString(", ")}")
def main(kind: String): Option[String] = {
if (kind.startsWith("java")) {
Some("Unicode")
} else if (kind.contains("dotnet")) {
Some("Apache.OpenWhisk.UnicodeTests.Dotnet::Apache.OpenWhisk.UnicodeTests.Dotnet.Unicode::Main")
} else None
}
def getFileLocation(kind: String): Option[String] = {
// the test file is either named kind.txt or kind.bin
// one of the two must exist otherwise, fail the test.
val prefix = "unicode.tests" + File.separator + kind.replace(":", "-")
val txt = new File(TestUtils.getTestActionFilename(s"$prefix.txt"))
val bin = new File(TestUtils.getTestActionFilename(s"$prefix.bin"))
if (txt.exists) Some(txt.toString)
else if (bin.exists) Some(bin.toString)
else {
println(s"WARNING: did not find text or binary action for kind $kind, skipping it")
None
}
}
// tolerate missing files rather than throw an exception
actionKinds.map(k => (k.kind, getFileLocation(k.kind))).collect {
case (actionKind, file @ Some(_)) =>
s"$actionKind action" should "Ensure that UTF-8 in supported in source files, input params, logs, and output results" in withAssetCleaner(
wskprops) { (wp, assetHelper) =>
val name = s"unicodeGalore.${actionKind.replace(":", "")}"
assetHelper.withCleaner(wsk.action, name) { (action, _) =>
action
.create(name, file, main = main(actionKind), kind = Some(actionKind), timeout = Some(activationMaxDuration))
}
withActivation(
wsk.activation,
wsk.action.invoke(name, parameters = Map("delimiter" -> JsString("❄"))),
totalWait = activationPollDuration) { activation =>
val response = activation.response
response.result.get.fields.get("error") shouldBe empty
response.result.get.fields.get("winter") should be(Some(JsString("❄ ☃ ❄")))
activation.logs.toList.flatten.mkString(" ") should include("❄ ☃ ❄")
}
}
}
}
protected[basic] object WskUnicodeTests extends DefaultJsonProtocol {
case class Kind(kind: String, deprecated: Boolean)
implicit val serdes: RootJsonFormat[Kind] = jsonFormat2(Kind)
}
| jasonpet/openwhisk | tests/src/test/scala/system/basic/WskUnicodeTests.scala | Scala | apache-2.0 | 4,058 |
package chess
import chess.format.pgn.San
import format.pgn.{ Parser, Reader, Tag }
import scalaz.Validation.FlatMap._
case class Replay(setup: Game, moves: List[Move], state: Game) {
lazy val chronoMoves = moves.reverse
def addMove(move: Move) = copy(
moves = move.applyVariantEffect :: moves,
state = state(move))
def moveAtPly(ply: Int): Option[Move] = chronoMoves lift (ply - 1 - setup.startedAtTurn)
}
object Replay {
def apply(game: Game) = new Replay(game, Nil, game)
def apply(
moveStrs: List[String],
initialFen: Option[String],
variant: chess.variant.Variant): Valid[Replay] =
moveStrs.some.filter(_.nonEmpty) toValid "[replay] pgn is empty" flatMap { nonEmptyMoves =>
Reader.moves(
nonEmptyMoves,
List(
initialFen map { fen => Tag(_.FEN, fen) },
variant.some.filterNot(_.standard) map { v => Tag(_.Variant, v.name) }
).flatten)
}
private def recursiveGames(game: Game, sans: List[San]): Valid[List[Game]] =
sans match {
case Nil => success(Nil)
case san :: rest => san(game.situation) flatMap { move =>
val newGame = game(move)
recursiveGames(newGame, rest) map { newGame :: _ }
}
}
def games(
moveStrs: List[String],
initialFen: Option[String],
variant: chess.variant.Variant): Valid[List[Game]] =
Parser.moves(moveStrs, variant) flatMap { moves =>
val game = Game(variant.some, initialFen)
recursiveGames(game, moves) map { game :: _ }
}
type ErrorMessage = String
def gameWhileValid(
moveStrs: List[String],
initialFen: String,
variant: chess.variant.Variant): (List[Game], Option[ErrorMessage]) = {
def mk(g: Game, moves: List[San]): (List[Game], Option[ErrorMessage]) = moves match {
case san :: rest => san(g.situation).fold(
err => (Nil, err.head.some),
move => {
val newGame = g(move)
mk(newGame, rest) match {
case (next, msg) => (newGame :: next, msg)
}
})
case _ => (Nil, None)
}
val init = Game(variant.some, initialFen.some)
Parser.moves(moveStrs, variant).fold(
err => Nil -> err.head.some,
moves => mk(init, moves)
) match {
case (games, err) => (init :: games, err)
}
}
private def recursiveBoards(sit: Situation, sans: List[San]): Valid[List[Board]] =
sans match {
case Nil => success(Nil)
case san :: rest => san(sit) flatMap { move =>
val after = move.afterWithLastMove
recursiveBoards(Situation(after, !sit.color), rest) map { after :: _ }
}
}
def boards(
moveStrs: List[String],
initialFen: Option[String],
variant: chess.variant.Variant,
color: Color = White): Valid[List[Board]] = {
val sit = {
initialFen.flatMap(format.Forsyth.<<) | Situation(chess.variant.Standard)
}.copy(color = color) withVariant variant
Parser.moves(moveStrs, sit.board.variant) flatMap { moves =>
recursiveBoards(sit, moves) map { sit.board :: _ }
}
}
}
| psuter/scalachess | src/main/scala/Replay.scala | Scala | mit | 3,076 |
package uk.gov.hmrc.ct.mocks
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
case class MockAccountsBoxRetriever(baseBoxRetriever: MockFilingAttributesBoxRetriever) extends AccountsBoxRetriever(baseBoxRetriever) {
override def companyAddress() = ???
override def ac1() = ???
override def ac2() = ???
override def ac3() = ???
override def ac4() = ???
override def ac12() = ???
override def ac205() = ???
override def ac206() = ???
override def generateValues = ???
override def companiesHouseAccountsApproval() = ???
override def hmrcAccountsApproval() = ???
override def accountingPeriod() = ???
override def charityAllExempt() = ???
override def charityNoIncome() = ???
}
| liquidarmour/ct-calculations | src/test/scala/uk/gov/hmrc/ct/mocks/MockAccountsBoxRetriever.scala | Scala | apache-2.0 | 730 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.storage.jdbc
import grizzled.slf4j.Logging
import org.apache.predictionio.data.storage.App
import org.apache.predictionio.data.storage.Apps
import org.apache.predictionio.data.storage.StorageClientConfig
import scalikejdbc._
/** JDBC implementation of [[Apps]] */
class JDBCApps(client: String, config: StorageClientConfig, prefix: String)
extends Apps with Logging {
/** Database table name for this data access object */
val tableName = JDBCUtils.prefixTableName(prefix, "apps")
DB autoCommit { implicit session =>
sql"""
create table if not exists $tableName (
id serial not null primary key,
name text not null,
description text)""".execute.apply()
}
def insert(app: App): Option[Int] = DB localTx { implicit session =>
val q = if (app.id == 0) {
sql"""
insert into $tableName (name, description) values(${app.name}, ${app.description})
"""
} else {
sql"""
insert into $tableName values(${app.id}, ${app.name}, ${app.description})
"""
}
Some(q.updateAndReturnGeneratedKey().apply().toInt)
}
def get(id: Int): Option[App] = DB readOnly { implicit session =>
sql"SELECT id, name, description FROM $tableName WHERE id = ${id}".map(rs =>
App(
id = rs.int("id"),
name = rs.string("name"),
description = rs.stringOpt("description"))
).single().apply()
}
def getByName(name: String): Option[App] = DB readOnly { implicit session =>
sql"SELECT id, name, description FROM $tableName WHERE name = ${name}".map(rs =>
App(
id = rs.int("id"),
name = rs.string("name"),
description = rs.stringOpt("description"))
).single().apply()
}
def getAll(): Seq[App] = DB readOnly { implicit session =>
sql"SELECT id, name, description FROM $tableName".map(rs =>
App(
id = rs.int("id"),
name = rs.string("name"),
description = rs.stringOpt("description"))
).list().apply()
}
def update(app: App): Unit = DB localTx { implicit session =>
sql"""
update $tableName set name = ${app.name}, description = ${app.description}
where id = ${app.id}""".update().apply()
}
def delete(id: Int): Unit = DB localTx { implicit session =>
sql"DELETE FROM $tableName WHERE id = $id".update().apply()
}
}
| alex9311/PredictionIO | data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCApps.scala | Scala | apache-2.0 | 2,968 |
/*
* Copyright 2014 Michał Fijas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sxxp.xpath1.parser
case class QName(ns: String, prefix: String, localPart: String) {
def getFullName = if (ns.nonEmpty) s"{$ns}$localPart" else localPart
override def toString = if (ns.nonEmpty) s"""QName("$ns","$prefix","$localPart")""" else s"""QName("$localPart")"""
}
object QName {
def apply(localPart: String): QName = QName("", "", localPart)
}
| mfijas/sxxp | src/main/scala/org/sxxp/xpath1/parser/QName.scala | Scala | apache-2.0 | 971 |
package models
import play.api._
import play.api.db._
import anorm._
import anorm.SqlParser._
import play.api.Play.current
/*
CRUD stands for
C: Create
R: Read
U: Update
D: Delete
I suggest this trait structure for every object that model a table in a DB
*/
abstract class tCRUD[V]{
val table:String
val query:String
/*
@return returns a list of the type of the object
*/
def list:List[V]
/*
@return one case class (if exists) otherwise None
@arg id unique id of the row
*/
def detail(id: Long):Option[V]
/*
creates/updates a record
if v.id.isDefined -> updates
else creates
@arg v: data to be created/updated
@return id of the object
*/
def update(v: V):Option[Long]
/*
@arg id unique id of the row
@return if successful returns true
deletes associated record
*/
def delete(id: Long):Boolean
/*
anorm parser mapped to V for everyhing SQL related
*/
val parser:anorm.RowParser[V]
} | Nexysweb/play-helpers | traits/tCRUD.scala | Scala | gpl-2.0 | 940 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.druid
import java.io.IOException
import java.net.ConnectException
import java.nio.charset.StandardCharsets
import akka.actor.ActorSystem
import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.HttpResponse
import akka.http.scaladsl.model.MediaTypes
import akka.http.scaladsl.model.StatusCodes
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
import akka.util.ByteString
import com.fasterxml.jackson.databind.JsonMappingException
import com.netflix.atlas.akka.AccessLogger
import com.netflix.atlas.json.Json
import com.typesafe.config.ConfigFactory
import munit.FunSuite
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import scala.util.Using
class DruidClientSuite extends FunSuite {
import DruidClient._
private val config = ConfigFactory.load().getConfig("atlas.druid")
private implicit val system: ActorSystem = ActorSystem(getClass.getSimpleName)
private def newClient(result: Try[HttpResponse]): DruidClient = {
val client = Flow[(HttpRequest, AccessLogger)]
.map {
case (_, logger) => result -> logger
}
new DruidClient(config, system, client)
}
private def ok[T: Manifest](data: T): HttpResponse = {
val json = Json.encode(data).getBytes(StandardCharsets.UTF_8)
HttpResponse(StatusCodes.OK, entity = json)
}
override def afterAll(): Unit = {
Await.result(system.terminate(), Duration.Inf)
super.afterAll()
}
test("get datasources") {
val client = newClient(Success(ok(List("a", "b", "c"))))
val future = client.datasources.runWith(Sink.head)
val result = Await.result(future, Duration.Inf)
assertEquals(result, List("a", "b", "c"))
}
test("get datasources http error") {
intercept[IOException] {
val client = newClient(Success(HttpResponse(StatusCodes.BadRequest)))
val future = client.datasources.runWith(Sink.head)
Await.result(future, Duration.Inf)
}
}
test("get datasources connect timeout") {
intercept[ConnectException] {
val client = newClient(Failure(new ConnectException("failed")))
val future = client.datasources.runWith(Sink.head)
Await.result(future, Duration.Inf)
}
}
test("get datasources read failure") {
intercept[IOException] {
val data = Source.failed[ByteString](new IOException("read failed"))
val entity = HttpEntity(MediaTypes.`application/json`, data)
val response = HttpResponse(StatusCodes.OK, entity = entity)
val client = newClient(Success(response))
val future = client.datasources.runWith(Sink.head)
Await.result(future, Duration.Inf)
}
}
test("get datasources bad json output") {
intercept[JsonMappingException] {
val json = """{"foo":"bar"}"""
val data = Source.single[ByteString](ByteString(json))
val entity = HttpEntity(MediaTypes.`application/json`, data)
val response = HttpResponse(StatusCodes.OK, entity = entity)
val client = newClient(Success(response))
val future = client.datasources.runWith(Sink.head)
Await.result(future, Duration.Inf)
}
}
test("get datasource empty") {
val client = newClient(Success(ok(Datasource(Nil, Nil))))
val future = client.datasource("abc").runWith(Sink.head)
val result = Await.result(future, Duration.Inf)
assertEquals(result, Datasource(Nil, Nil))
}
test("get datasource with data") {
val ds = Datasource(List("a", "b"), List(Metric("m1", "LONG"), Metric("m2", "LONG")))
val client = newClient(Success(ok(ds)))
val future = client.datasource("abc").runWith(Sink.head)
val result = Await.result(future, Duration.Inf)
assertEquals(result, ds)
}
private def executeSegmentMetadataRequest: List[SegmentMetadataResult] = {
import com.netflix.atlas.core.util.Streams._
val file = "segmentMetadataResponse.json"
val payload = Using.resource(resource(file))(byteArray)
val response = HttpResponse(StatusCodes.OK, entity = payload)
val client = newClient(Success(response))
val query = SegmentMetadataQuery("test")
val future = client.segmentMetadata(query).runWith(Sink.head)
Await.result(future, Duration.Inf)
}
test("segmentMetadata columns") {
val result = executeSegmentMetadataRequest
assertEquals(result.size, 1)
val columns = result.head.columns
assertEquals(columns.size, 5)
val expected = Set(
"__time",
"test.metric.counter",
"test.dim.1",
"test.dim.2",
"test.metric.histogram"
)
assertEquals(columns.keySet, expected)
}
test("segmentMetadata column types") {
val columns = executeSegmentMetadataRequest.head.columns
assertEquals(columns("__time").`type`, "LONG")
assertEquals(columns("test.metric.counter").`type`, "LONG")
assertEquals(columns("test.metric.histogram").`type`, "netflixHistogram")
assertEquals(columns("test.dim.1").`type`, "STRING")
assertEquals(columns("test.dim.1").`type`, "STRING")
}
test("segmentMetadata aggregators") {
val aggregators = executeSegmentMetadataRequest.head.aggregators
assertEquals(aggregators.size, 2)
val expected = Set(
"test.metric.counter",
"test.metric.histogram"
)
assertEquals(aggregators.keySet, expected)
}
private def executeGroupByRequest: List[GroupByDatapoint] = {
import com.netflix.atlas.core.util.Streams._
val file = "groupByResponseArray.json"
val payload = Using.resource(resource(file))(byteArray)
val response = HttpResponse(StatusCodes.OK, entity = payload)
val client = newClient(Success(response))
val query =
GroupByQuery("test", List(DefaultDimensionSpec("percentile", "percentile")), Nil, Nil)
val future = client.groupBy(query).runWith(Sink.head)
Await.result(future, Duration.Inf)
}
test("groupBy filter out null dimensions") {
val datapoints = executeGroupByRequest
assertEquals(datapoints.count(_.tags.isEmpty), 2)
assertEquals(datapoints.count(_.tags.nonEmpty), 5)
}
}
| Netflix-Skunkworks/iep-apps | atlas-druid/src/test/scala/com/netflix/atlas/druid/DruidClientSuite.scala | Scala | apache-2.0 | 6,800 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import Keys._
object Dependencies {
val common: Seq[ModuleID] = Seq (
"org.slf4j" % "slf4j-api" % "1.7.25",
"org.slf4j" % "slf4j-simple" % "1.7.25",
"org.scalatest" %% "scalatest" % "3.0.1" % "test"
)
val server: Seq[ModuleID] = common ++ Seq (
"org.scala-lang" % "scala-reflect" % "2.12.2"
)
val client: Seq[ModuleID] = common
}
| chlin501/macros | project/Dependencies.scala | Scala | apache-2.0 | 1,182 |
package japgolly.scalajs.react.internal
import japgolly.scalajs.react._
trait MonocleExtComponentLowPriorityImplicits {
implicit final def MonocleReactExt_StateWritableCB[I, S](i: I)(implicit sa: StateAccessor.WritePure[I, S]) = new MonocleExtComponent.StateWritableCB[I, S](i)(sa)
}
trait MonocleExtComponent extends MonocleExtComponentLowPriorityImplicits {
implicit final def MonocleReactExt_StateAccess[F[_], S](m: StateAccess[F, S]) = new MonocleExtComponent.StateAcc[F, S, m.type](m)
}
object MonocleExtComponent {
// Keep this import here so that Lens etc take priority over .internal
import monocle._
final class StateAcc[F[_], S, M <: StateAccess[F, S]](val self: M) extends AnyVal {
def zoomStateL[T](l: Lens[S, T]): self.WithMappedState[T] =
self.zoomState(l.get)(l.set)
def modStateL[A, B](l: PLens[S, S, A, B])(f: A => B, cb: Callback = Callback.empty): F[Unit] =
self.modState(l.modify(f), cb)
def modStateOptionL[A, B](l: PLens[S, S, A, B])(f: A => Option[B], cb: Callback = Callback.empty): F[Unit] =
self.modStateOption(s => f(l get s).map(l.set(_)(s)), cb)
def setStateL[L[_, _, _, _], B](l: L[S, S, _, B])(b: B, cb: Callback = Callback.empty)(implicit L: MonocleSetter[L]): F[Unit] =
self.modState(L.set(l)(b), cb)
def setStateOptionL[L[_, _, _, _], B](l: L[S, S, _, B])(o: Option[B], cb: Callback = Callback.empty)(implicit L: MonocleSetter[L]): F[Unit] =
o match {
case Some(b) => setStateL(l)(b, cb)
case None => self.setStateOption(None, cb)
}
@deprecated("Create a function yourself. If you're using this in Reusable.fn(…), use Reusable.fn.state(this.zoom…).set instead", "1.2.0")
def setStateFnL[L[_, _, _, _], B](l: L[S, S, _, B], cb: Callback = Callback.empty)(implicit L: MonocleSetter[L]): B => F[Unit] =
setStateL(l)(_, cb)
}
final class StateWritableCB[I, S](private val i: I)(implicit sa: StateAccessor.WritePure[I, S]) {
def modStateL[A, B](l: PLens[S, S, A, B])(f: A => B, cb: Callback = Callback.empty): Callback =
sa(i).modState(l.modify(f), cb)
def modStateOptionL[A, B](l: PLens[S, S, A, B])(f: A => Option[B], cb: Callback = Callback.empty): Callback =
sa(i).modStateOption(s => f(l get s).map(l.set(_)(s)), cb)
def setStateL[L[_, _, _, _], B](l: L[S, S, _, B])(b: B, cb: Callback = Callback.empty)(implicit L: MonocleSetter[L]): Callback =
sa(i).modState(L.set(l)(b), cb)
def setStateOptionL[L[_, _, _, _], B](l: L[S, S, _, B])(o: Option[B], cb: Callback = Callback.empty)(implicit L: MonocleSetter[L]): Callback =
o match {
case Some(b) => setStateL(l)(b, cb)
case None => sa(i).setStateOption(None, cb)
}
@deprecated("Create a function yourself. If you're using this in Reusable.fn(…), use Reusable.fn.state(this.zoom…).set instead", "1.2.0")
def setStateFnL[L[_, _, _, _], B](l: L[S, S, _, B], cb: Callback = Callback.empty)(implicit L: MonocleSetter[L]): B => Callback =
setStateL(l)(_, cb)
}
}
| matthughes/scalajs-react | monocle/src/main/scala/japgolly/scalajs/react/internal/MonocleExtComponent.scala | Scala | apache-2.0 | 3,045 |
package zzb.shell
import org.slf4j.LoggerFactory
import spray.caching.ExpiringLruCache
import wash.shell.ConsolePrintStream
import java.io._
import scala.concurrent.{Future, Await}
import scala.concurrent.duration._
import zzb.shell.remote.ShellDaemon
import scala.util.Failure
import scala.Some
import scala.util.Success
import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem
import com.typesafe.config.Config
import java.util.concurrent.atomic.AtomicInteger
import com.typesafe.scalalogging.slf4j.Logging
/**
* Created with IntelliJ IDEA.
* User: Simon Xiao
* Date: 13-9-13
* Time: 下午3:36
* Copyright baoxian.com 2012~2020
*/
class Shell(val appName: String, val setting: Setting, val pipeToIn: Option[OutputStream],
val path: List[String],notUseIoIn :Boolean = false) {
implicit val system = Shell.system
import system.dispatcher
val outputConverter = new OutputConversionEngine
private val pipeToInPrinter: Option[PrintStream] = pipeToIn match {
case None => None
case Some(outToIn) => Some(new PrintStream(outToIn))
}
def pipeToIn(text: String) = {
if (pipeToInPrinter.isDefined)
pipeToInPrinter.get.println(text)
}
def output = setting.output
def input = setting.input
private var lastException: Throwable = null
//var remote: RemoteAccess = _
private[this] val results = scala.collection.mutable.HashMap[String, Any]()
private val todoCommands = new scala.collection.mutable.SynchronizedQueue[String]()
/** 是否开启详细输出,各 Task 可以检索这个变量,决定是否输出某些信息 */
private[shell] var verbose_ = true
def verbose = verbose_
def requestExeCmd(line: String) {
todoCommands.enqueue(line)
}
/**
* Runs the command session.
* Create the Shell, then run this method to listen to the user,
* and the Shell will invoke Handler's methods.
* @throws java.io.IOException when can't readLine() from input.
*/
private def commandLoop() = {
output.output(appName, outputConverter)
var command: String = if (todoCommands.size != 0 || notUseIoIn) todoCommands.dequeue() else input.readCommand(path).trim
while (!(command == "exit")) {
try {
val isAsyn = command.endsWith("&")
val lineText = if (isAsyn) command.substring(0, command.length - 1) else command
lineText match {
case "" => ()
case "daemon-install" => ShellDaemon.install(this.appName)
case "daemon-remove" => ShellDaemon.remove()
case _ =>
val task = processLine(lineText, isAsyn)
checkTaskResult(task, isAsyn)
}
} catch {
case te: TokenException ⇒
lastException = te
output.outputException(command, te)
case clie: CLIException ⇒
lastException = clie
if (!(command.trim == "exit")) {
output.outputException(clie)
}
case ex: TaskNotFoundException ⇒
output.outputException(s"error: ${ex.getMessage}", ex)
case ex: TaskDuplicateException ⇒
output.outputException(s"error: ${ex.getMessage}", ex)
case ex: Throwable ⇒
ex.printStackTrace(output.err)
}
Thread.sleep(100) //防止输出串乱
command = if (todoCommands.size != 0 || notUseIoIn) todoCommands.dequeue() else input.readCommand(path).trim
}
output.output(s"$appName shell exit!", outputConverter)
}
/**
* You can operate Shell linewise, without entering the command loop.
* All output is directed to shell's Output.
*
* @see asg.cliche.Output
*
* @param line Full command line
* @throws asg.cliche.CLIException This may be TokenException
*/
private[shell] def processLine(line: String, isAsync: Boolean,verbose:Boolean = true,objParam:Option[Any] = None): Task = {
val tokens: List[Token] = Token.tokenize(line)
val cmdTxt: String = tokens.head.string
val task = if (cmdTxt.endsWith("*"))
Task("*", this, List(cmdTxt.replace("*", "")), isAsync = false)
else {
Task(tokens.head.string, this, tokens.tail.map(_.string), isAsync,verbose,objParam)
}
task
}
val inCache = new ExpiringLruCache[Any](1000, 20, Duration(240, TimeUnit.SECONDS), Duration(120, TimeUnit.SECONDS))
/**
* 获取其他命令的执行结果
* @param line 命令执行语句
* @tparam T 返回类型
* @return
*/
private[shell] def fromOther[T](line: String, forceUpdate: Boolean = false,objParam:Option[Any] = None): Future[T] = {
if (forceUpdate) inCache.remove(line)
inCache.apply(line, () => {
val preTask = processLine(line, isAsync = true,verbose = false,objParam) //调用其他任务,让其他任务进入非 verbose 模式
preTask.asInstanceOf[Pipable[T]].resultFuture
}).asInstanceOf[Future[T]]
}
def taskResult(key: String, value: Any) {
results += key -> value
}
def taskResult(key: String) = {
results(key)
}
def taskResultClear(key: String) {
results.remove(key)
}
private def checkTaskResult(task: Task, isAsyn: Boolean = false) = {
if (isAsyn)
output.output(s"[${task.fullName} running background]", outputConverter)
task.onComplete {
case _ ⇒
if (isAsyn)
output.output(s"[${task.fullName} over]", outputConverter)
(task.out, task.err) match {
case (out: ConsolePrintStream, err: ConsolePrintStream) ⇒
output.output(out.content(), outputConverter)
output.output(err.content(), outputConverter)
case _ ⇒ ()
}
}
sumaryTask(task)
if (!isAsyn)
Await.result(task.future, Duration.Inf)
}
private def sumaryTask(task: Task) = {
//监控日志
val log = LoggerFactory.getLogger("shellTask." + task.fullName)
task.stat.andThen {
case Success(s) ⇒
if (s.ex.isDefined) {
log.error("[ TimeSpan:" + s.timeSpan + " ms]")
} else {
log.info("[ TimeSpan:" + s.timeSpan + " ms]")
}
case Failure(ex: ActionException) ⇒ log.info("[ TimeSpan:" + ex.getCause.toString + " ms]")
case Failure(ex) ⇒ log.info("[ TimeSpan:" + ex.toString + " ms]")
}
}
}
case class Setting(input: Input, output: Output, displayTime: Boolean)
object Shell extends Logging{
def apply(prompt: String, appName: String, initCmds:Seq[String] = Nil,sync: Boolean = true,notUseIoIn :Boolean = false) = {
require(system_ != null, "Shell must be init .")
val io = new ConsoleIO()
val theShell = new Shell(appName, Setting(io, io, displayTime = false), None, List(prompt),notUseIoIn)
initCmds.foreach(cmd => theShell.requestExeCmd(cmd))
if (sync) new ShellRunner(appName,theShell).run()
else new Thread(new ShellRunner(appName,theShell)).start()
theShell
}
def apply(prompt: String, appName: String,
in: InputStream, out: PrintStream, err: PrintStream, pipeToIn: Option[OutputStream],
sync: Boolean ,notUseIoIn :Boolean ) = {
require(system_ != null, "Shell must be init .")
val io = new ConsoleIO(in, out, err)
val theShell = new Shell(appName, Setting(io, io, displayTime = false), pipeToIn, List(prompt),notUseIoIn)
if (sync) new ShellRunner(appName,theShell).run()
else new Thread(new ShellRunner(appName,theShell)).start()
theShell
}
def system: ActorSystem = system_
def config: Config = config_
def init(cfg: Config, sys: ActorSystem, sysOnlyForShell: Boolean = true) = {
system_ = sys
config_ = cfg
sysOnlyForShell_ = sysOnlyForShell
}
private var system_ : ActorSystem = _
private var config_ : Config = _
private var sysOnlyForShell_ = true
val counter = new AtomicInteger(0)
private class ShellRunner(val name:String ,val shell: Shell) extends Runnable {
def run() {
Thread.sleep(200)
counter.incrementAndGet()
logger.info(s" shell [$name] started .")
shell.commandLoop()
logger.info(s" shell [$name] exited .")
val runningCount = counter.decrementAndGet()
if (runningCount == 0 && sysOnlyForShell_) system_.shutdown()
}
}
}
| stepover/zzb | zzb-shell/src/main/scala/zzb/shell/Shell.scala | Scala | mit | 8,200 |
package ooyala.common.akka.web
import akka.actor.ActorSystem
import spray.routing.{Route, SimpleRoutingApp}
/**
* Contains methods for starting an embedded Spray web server.
*/
object WebService extends SimpleRoutingApp {
/**
* Starts a web server given a Route. Note that this call is meant to be made from an App or other top
* level scope, and not within an actor, as system.actorOf may block.
*
* @param route The spray Route for the service. Multiple routes can be combined like (route1 ~ route2).
* @param system the ActorSystem to use
* @param host The host string to bind to, defaults to "0.0.0.0"
* @param port The port number to bind to
*/
def start(route: Route, system: ActorSystem,
host: String = "0.0.0.0", port: Int = 8080) {
implicit val actorSystem = system
startServer(host, port)(route)
}
}
| nachiketa-shukla/spark-jobserver | akka-app/src/ooyala.common.akka/web/WebService.scala | Scala | apache-2.0 | 866 |
package sampler.abc
import org.scalatest.FreeSpec
import org.scalatest.mockito.MockitoSugar
import org.mockito.Mockito._
class ScoredTest extends FreeSpec with MockitoSugar {
"Scored should" - {
"Make an ID for you if using the apply buider" in fail("TODO")
"Throw exception if any of the scores are negative" in fail("TODO")
"Return number of reps as a double" in fail("TODO")
"Calculate the mean score" in fail("TODO")
"Know if it was generated locally" in {
val instanceLocal1 = Scored(null, null)
val instanceLocal2 = Scored(null, null)
val instanceNonLocal = Scored(null, null, None)
val mockUUID = mock[UUID]
when(mockUUID.generatingNodeId).thenReturn(99)
val instanceRemote = Scored(null, null, Some(mockUUID))
assert(instanceLocal1.wasLocallyGenerated)
assert(instanceLocal2.wasLocallyGenerated)
assert(!instanceNonLocal.wasLocallyGenerated)
assert(!instanceRemote.wasLocallyGenerated)
}
}
} | tearne/Sampler | sampler-abc/src/test/scala/sampler/abc/ScoredTest.scala | Scala | apache-2.0 | 991 |
package shredzzz.kirkwood.cumath.tensor.modules
import shredzzz.kirkwood.cumath.CuValue
import shredzzz.kirkwood.cumath.tensor.{CuMatrix, CuTensor, CuVector}
import shredzzz.kirkwood.driver.{CuKernels, CuModule}
trait ArithmeticModule[V, TT[X] <: CuTensor[X, _, TT]] extends KernelModule[V] with ElementModule[V] with TensorModule[TT]
{
protected def module: CuModule
protected def minusFunc = module.function("%sneg%s".format(typePrefix, tensorPostfix))
protected def addValFunc = module.function("%saddV%s".format(typePrefix, tensorPostfix))
protected def addMatFunc = module.function("%saddT%s".format(typePrefix, tensorPostfix))
protected def diffValFunc = module.function("%sdiffV%s".format(typePrefix, tensorPostfix))
protected def diffMatFunc = module.function("%sdiffT%s".format(typePrefix, tensorPostfix))
protected def multValFunc = module.function("%smultV%s".format(typePrefix, tensorPostfix))
protected def multMatFunc = module.function("%smultT%s".format(typePrefix, tensorPostfix))
protected def divValFunc = module.function("%sdivV%s".format(typePrefix, tensorPostfix))
protected def divMatFunc = module.function("%sdivT%s".format(typePrefix, tensorPostfix))
protected def modValFunc = module.function("%smodV%s".format(typePrefix, tensorPostfix))
protected def modMatFunc = module.function("%smodT%s".format(typePrefix, tensorPostfix))
def minus(x: TT[V], res: TT[V]) {
launchKernel(minusFunc, x, res)
}
def add(x: TT[V], c: CuValue[V], res: TT[V]) {
launchKernel(addValFunc, x, c, res)
}
def add(x: TT[V], y: TT[V], res: TT[V]) {
launchKernel(addMatFunc, x, y, res)
}
def diff(x: TT[V], c: CuValue[V], res: TT[V]) {
launchKernel(diffValFunc, x, c, res)
}
def diff(x: TT[V], y: TT[V], res: TT[V]) {
launchKernel(diffMatFunc, x, y, res)
}
def mult(x: TT[V], c: CuValue[V], res: TT[V]) {
launchKernel(multValFunc, x, c, res)
}
def mult(x: TT[V], y: TT[V], res: TT[V]) {
launchKernel(multMatFunc, x, y, res)
}
def div(x: TT[V], c: CuValue[V], res: TT[V]) {
launchKernel(divMatFunc, x, c, res)
}
def div(x: TT[V], y: TT[V], res: TT[V]) {
launchKernel(divMatFunc, x, y, res)
}
def mod(x: TT[V], y: TT[V], res: TT[V]) {
launchKernel(modMatFunc, x, y, res)
}
def mod(x: TT[V], c: CuValue[V], res: TT[V]) {
launchKernel(modValFunc, x, c, res)
}
}
trait FloatArithmeticModule[TT[X] <: CuTensor[X, _, TT]] extends ArithmeticModule[Float, TT]
{
override def modMatFunc = throw new UnsupportedOperationException
override def modValFunc = throw new UnsupportedOperationException
}
trait DoubleArithmeticModule[TT[X] <: CuTensor[X, _, TT]] extends ArithmeticModule[Double, TT]
{
override def modMatFunc = throw new UnsupportedOperationException
override def modValFunc = throw new UnsupportedOperationException
}
class IntVectorArithmeticModule(kernels: CuKernels) extends ArithmeticModule[Int, CuVector] with IntModule with VectorModule
{
val module = new CuModule("cumath/tensor/TensorArithmeticOps", kernels)
}
class IntMatrixArithmeticModule(kernels: CuKernels) extends ArithmeticModule[Int, CuMatrix] with IntModule with MatrixModule
{
val module = new CuModule("cumath/tensor/TensorArithmeticOps", kernels)
}
class FloatVectorArithmeticModule(kernels: CuKernels) extends FloatArithmeticModule[CuVector] with FloatModule with VectorModule
{
val module = new CuModule("cumath/tensor/TensorArithmeticOps", kernels)
}
class FloatMatrixArithmeticModule(kernels: CuKernels) extends FloatArithmeticModule[CuMatrix] with FloatModule with MatrixModule
{
val module = new CuModule("cumath/tensor/TensorArithmeticOps", kernels)
}
class DoubleVectorArithmeticModule(kernels: CuKernels) extends DoubleArithmeticModule[CuVector] with DoubleModule with VectorModule
{
val module = new CuModule("cumath/tensor/TensorArithmeticOps", kernels)
}
class DoubleMatrixArithmeticModule(kernels: CuKernels) extends DoubleArithmeticModule[CuMatrix] with DoubleModule with MatrixModule
{
val module = new CuModule("cumath/tensor/TensorArithmeticOps", kernels)
} | shredzzz/kirkwood | src/main/scala/shredzzz/kirkwood/cumath/tensor/modules/ArithmeticModule.scala | Scala | apache-2.0 | 4,108 |
package com.github.mdr.mash.utils
import com.github.mdr.mash.utils.Utils._
import org.scalatest.{ FlatSpec, Matchers }
class UtilsTest extends FlatSpec with Matchers {
"Common prefix" should "work" in {
commonPrefix("", "") should equal("".toSeq)
commonPrefix("a", "") should equal("".toSeq)
commonPrefix("", "a") should equal("".toSeq)
commonPrefix("a", "b") should equal("".toSeq)
commonPrefix("abc", "abd") should equal("ab".toSeq)
}
"Intercalate" should "intersperse sequences within a sequence of sequences" in {
intercalate(Seq[Seq[Char]]("foo", "bar", "baz"), ":::") should equal("foo:::bar:::baz".toSeq)
}
"truncate" should "work" in {
Utils.truncate(Seq(1, 2, 3, 4, 5), 3, 0) should equal(Seq(1, 2, 0))
}
} | mdr/mash | src/test/scala/com/github/mdr/mash/utils/UtilsTest.scala | Scala | mit | 761 |
object Test {
def main(args: Array[String]): Unit = {
println(HNil)
val l1: String :: HNil = HList1("s")
println(l1)
val l3: Double :: Double :: Double :: HNil = HList3(1d, 2d, 3d)
println(l3)
val l4: String :: Double :: Double :: Double :: HNil = HListN[String, Double :: Double :: Double :: HNil](Array("s", 1d, 2d, 3d))
println(l4)
}
}
// HList types ------------------------------------------------------------------------------------
sealed trait HList { def underlying: Array[Any] }
sealed trait ::[H, T <: HList] extends HList // Should be [+H, +T <: HList], see #1500
sealed trait HNil extends HList
// HList values -----------------------------------------------------------------------------------
case object HNil extends HNil {
val underlying: Array[Any] = Array.empty[Any]
override def toString(): String = "()"
}
// Case class based HLists for small sizes --------------------------------------------------------
final case class HList1[T1](e1: T1) extends (T1 :: HNil) {
def underlying: Array[Any] = Array(e1)
override def toString(): String = s"($e1,)"
}
final case class HList2[T1, T2](e1: T1, e2: T2) extends (T1 :: T2 :: HNil) {
def underlying: Array[Any] = Array(e1, e2)
override def toString(): String = s"($e1, $e2)"
}
final case class HList3[T1, T2, T3](e1: T1, e2: T2, e3: T3) extends (T1 :: T2 :: T3 :: HNil) {
def underlying: Array[Any] = Array(e1, e2, e3)
override def toString(): String = s"($e1, $e2, $e3)"
}
// Array based HLists for large sizes -------------------------------------------------------------
final case class HListN[H, T <: HList](underlying: Array[Any]) extends (H :: T) {
override def toString() = underlying.mkString("(", ", ", ")")
override def equals(o: Any): Boolean =
o match {
case l: HListN[_, _] => l.underlying.sameElements(underlying)
case _ => false
}
override def hashCode: Int = {
var r = 1
for (e <- underlying)
r = 31 * r + e.##
r
}
}
object HListUnapply {
def unapplySeq[L <: HList](l: L): Option[Seq[Any]] = Some(l.underlying)
}
// Low level (Array based) HLists Appender --------------------------------------------------------
trait Appender[L1 <: HList, L2 <: HList] {
type Out <: HList
def apply(l1: L1, l2: L2): Out
}
object Appender {
implicit def lowLevelAppender[L1 <: HList, L2 <: HList, O <: HList] (using erased p: PhantomAppender.Aux[L1, L2, O]): Appender[L1, L2] { type Out = O } =
new Appender[L1, L2] {
type Out = O
def apply(l1: L1, l2: L2): Out = HListN(Array.concat(l1.underlying, l2.underlying)).asInstanceOf[O]
}
}
// Type level "only" computation of type Out ------------------------------------------------------
object PhantomAppender {
type Aux[L1 <: HList, L2 <: HList, O <: HList]
implicit erased def caseHNil[L <: HList]: Aux[HNil, L, L] = ???
implicit erased def caseHCons[H, T <: HList, L <: HList, O <: HList] (using erased p: Aux[T, L, O]): Aux[H :: T, L, H :: O] = ???
}
| som-snytt/dotty | tests/run-custom-args/phantom-OnHList.scala | Scala | apache-2.0 | 3,020 |
// #43896
trait M extends DelayedInit {
def delayedInit(body : => Unit): Unit = {
println("hallo")
body
println("bye")
}
}
class C(init : Int) extends M {
def foo = init
println("constructor")
var x = init
println("out:"+x)
}
// #4380
object Main {
def main(argv: Array[String]): Unit = {
class Bip {
class Foo { override def toString() = "foo" }
object Main extends dotty.runtime.LegacyApp {
val cbn = new Foo()
}
Main.main(Array())
println(Main.cbn)
}
new Bip
}
}
object Test extends dotty.runtime.LegacyApp {
new C(22)
Main.main(Array())
}
| yusuke2255/dotty | tests/pending/run/t4396.scala | Scala | bsd-3-clause | 652 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.linalg
import org.netlib.util.intW
import org.apache.spark.ml.optim.SingularMatrixException
/**
* Compute Cholesky decomposition.
*/
private[spark] object CholeskyDecomposition {
/**
* Solves a symmetric positive definite linear system via Cholesky factorization.
* The input arguments are modified in-place to store the factorization and the solution.
* @param A the upper triangular part of A
* @param bx right-hand side
* @return the solution array
*/
def solve(A: Array[Double], bx: Array[Double]): Array[Double] = {
val k = bx.length
val info = new intW(0)
LAPACK.nativeLAPACK.dppsv("U", k, 1, A, bx, k, info)
checkReturnValue(info, "dppsv")
bx
}
/**
* Computes the inverse of a real symmetric positive definite matrix A
* using the Cholesky factorization A = U**T*U.
* The input arguments are modified in-place to store the inverse matrix.
* @param UAi the upper triangular factor U from the Cholesky factorization A = U**T*U
* @param k the dimension of A
* @return the upper triangle of the (symmetric) inverse of A
*/
def inverse(UAi: Array[Double], k: Int): Array[Double] = {
val info = new intW(0)
LAPACK.nativeLAPACK.dpptri("U", k, UAi, info)
checkReturnValue(info, "dpptri")
UAi
}
private def checkReturnValue(info: intW, method: String): Unit = {
info.`val` match {
case code if code < 0 =>
throw new IllegalStateException(s"LAPACK.$method returned $code; arg ${-code} is illegal")
case code if code > 0 =>
throw new SingularMatrixException (
s"LAPACK.$method returned $code because A is not positive definite. Is A derived from " +
"a singular matrix (e.g. collinear column values)?")
case _ => // do nothing
}
}
}
| ueshin/apache-spark | mllib/src/main/scala/org/apache/spark/mllib/linalg/CholeskyDecomposition.scala | Scala | apache-2.0 | 2,624 |
package com.krux.hyperion.activity
import com.krux.hyperion.adt.{ HString, HS3Uri }
import com.krux.hyperion.aws.AdpHadoopActivity
import com.krux.hyperion.expression.RunnableObject
import com.krux.hyperion.common.{ BaseFields, PipelineObjectId }
import com.krux.hyperion.datanode.S3DataNode
import com.krux.hyperion.resource.{ Resource, EmrCluster }
/**
* Runs a MapReduce job on a cluster. The cluster can be an EMR cluster managed by AWS Data Pipeline
* or another resource if you use TaskRunner. Use HadoopActivity when you want to run work in parallel.
* This allows you to use the scheduling resources of the YARN framework or the MapReduce resource
* negotiator in Hadoop 1. If you would like to run work sequentially using the Amazon EMR Step action,
* you can still use EmrActivity.
*/
case class HadoopActivity[A <: EmrCluster] private (
baseFields: BaseFields,
activityFields: ActivityFields[A],
emrTaskActivityFields: EmrTaskActivityFields,
jarUri: HString,
mainClass: Option[MainClass],
arguments: Seq[HString],
hadoopQueue: Option[HString],
inputs: Seq[S3DataNode],
outputs: Seq[S3DataNode]
) extends EmrTaskActivity[A] {
type Self = HadoopActivity[A]
def updateBaseFields(fields: BaseFields) = copy(baseFields = fields)
def updateActivityFields(fields: ActivityFields[A]) = copy(activityFields = fields)
def updateEmrTaskActivityFields(fields: EmrTaskActivityFields) = copy(emrTaskActivityFields = fields)
def withArguments(arguments: HString*) = copy(arguments = arguments ++ arguments)
def withHadoopQueue(queue: HString) = copy(hadoopQueue = Option(queue))
def withInput(input: S3DataNode*) = copy(inputs = inputs ++ input)
def withOutput(output: S3DataNode*) = copy(outputs = outputs ++ output)
override def objects = inputs ++ outputs ++ super.objects
lazy val serialize = new AdpHadoopActivity(
id = id,
name = name,
jarUri = jarUri.serialize,
mainClass = mainClass.map(_.toString),
argument = arguments.map(_.serialize),
hadoopQueue = hadoopQueue.map(_.serialize),
preActivityTaskConfig = preActivityTaskConfig.map(_.ref),
postActivityTaskConfig = postActivityTaskConfig.map(_.ref),
input = seqToOption(inputs)(_.ref),
output = seqToOption(outputs)(_.ref),
workerGroup = runsOn.asWorkerGroup.map(_.ref),
runsOn = runsOn.asManagedResource.map(_.ref),
dependsOn = seqToOption(dependsOn)(_.ref),
precondition = seqToOption(preconditions)(_.ref),
onFail = seqToOption(onFailAlarms)(_.ref),
onSuccess = seqToOption(onSuccessAlarms)(_.ref),
onLateAction = seqToOption(onLateActionAlarms)(_.ref),
attemptTimeout = attemptTimeout.map(_.serialize),
lateAfterTimeout = lateAfterTimeout.map(_.serialize),
maximumRetries = maximumRetries.map(_.serialize),
retryDelay = retryDelay.map(_.serialize),
failureAndRerunMode = failureAndRerunMode.map(_.serialize),
maxActiveInstances = maxActiveInstances.map(_.serialize)
)
}
object HadoopActivity extends RunnableObject {
def apply[A <: EmrCluster](jarUri: HS3Uri)(runsOn: Resource[A]): HadoopActivity[A] = apply(jarUri, None)(runsOn)
def apply[A <: EmrCluster](jarUri: HS3Uri, mainClass: MainClass)(runsOn: Resource[A]): HadoopActivity[A] = apply(jarUri, Option(mainClass))(runsOn)
def apply[A <: EmrCluster](jarUri: HS3Uri, mainClass: Option[MainClass])(runsOn: Resource[A]): HadoopActivity[A] = apply(jarUri.serialize, mainClass)(runsOn)
def apply[A <: EmrCluster](jarUri: HString, mainClass: MainClass)(runsOn: Resource[A]): HadoopActivity[A] = apply(jarUri, Option(mainClass))(runsOn)
def apply[A <: EmrCluster](jarUri: HString, mainClass: Option[MainClass] = None)(runsOn: Resource[A]): HadoopActivity[A] = new HadoopActivity(
baseFields = BaseFields(PipelineObjectId(HadoopActivity.getClass)),
activityFields = ActivityFields(runsOn),
emrTaskActivityFields = EmrTaskActivityFields(),
jarUri = jarUri,
mainClass = mainClass,
arguments = Seq.empty,
hadoopQueue = None,
inputs = Seq.empty,
outputs = Seq.empty
)
}
| hoangelos/hyperion | core/src/main/scala/com/krux/hyperion/activity/HadoopActivity.scala | Scala | apache-2.0 | 4,073 |
package su.t1001.daika
import com.typesafe.config.ConfigFactory
import org.slf4j.LoggerFactory
object EntryPoint extends App {
val log = LoggerFactory getLogger getClass
log debug s"Program started"
val config = ConfigFactory.load() getConfig "daika"
new TestServer(config).start()
} | vkrylov/daika | src/main/scala/su/t1001/daika/EntryPoint.scala | Scala | mit | 295 |
package io.getquill.context.finagle.postgres
import com.twitter.util.{ Await, Future, Throw }
import io.getquill.context.sql.ProductSpec
class TransactionSpec extends ProductSpec {
val context = testContext
import context._
def await[T](future: Future[T]) = Await.result(future)
val p = Product(0L, "Scala Compiler", 1L)
"If outer transaction fails, inner transactions shouldn't commit" in {
val id: Long = await {
context.transaction {
for {
id <- context.transaction {
context.run(productInsert(lift(p)))
}
Throw(_) <- context.transaction {
context.run(quote {
query[Product].insert(lift(p.copy(id = id)))
}).liftToTry
}
} yield id
}
}
// Since a query inside a transaction failed, the outermost transaction had
// to rollback.
val res: List[Product] = await { context.run(productById(lift(id))) }
res mustEqual List()
}
"Transaction inside transaction should not open a new client" in {
val res: Product = await {
context.transaction {
for {
id: Long <- context.run(productInsert(lift(p)))
// A subtransaction should have access to the previous queries of an
// outer transaction.
res: List[Product] <- context.transaction {
context.run(productById(lift(id)))
}
} yield res.head
}
}
res mustEqual p.copy(id = res.id)
}
override def beforeAll = {
await(context.run(quote { query[Product].delete }))
()
}
}
| getquill/quill | quill-finagle-postgres/src/test/scala/io/getquill/context/finagle/postgres/TransactionSpec.scala | Scala | apache-2.0 | 1,588 |
package controllers.api
import com.mohiva.play.silhouette.api.{Silhouette, Environment}
import com.mohiva.play.silhouette.impl.authenticators.JWTAuthenticator
import com.mohiva.play.silhouette.impl.providers.SocialProviderRegistry
import models.tenant.AuthCrewUser
import org.postgresql.util.PSQLException
import play.api.db.slick.DatabaseConfigProvider
import play.api.i18n.MessagesApi
import play.api.libs.json.Json
import play.api.mvc.{AnyContent, Action}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}
abstract class GenericSecureApiController(dbConfigProvider: DatabaseConfigProvider,
messagesApi: MessagesApi,
env: Environment[AuthCrewUser, JWTAuthenticator],
socialProviderRegistry: SocialProviderRegistry)
extends GenericApiController(dbConfigProvider, messagesApi, env, socialProviderRegistry)
with Silhouette[AuthCrewUser, JWTAuthenticator]
{
import dbConfig.driver.api._
override def update: Action[Model] = SecuredAction.async(parse.json(reads)) {
entity =>
db.run(convertToPersistable(entity.body).update).map(updated => Ok(Json.toJson(convertToDisplayable(updated))))
}
override def delete(id: Id): Action[AnyContent] = SecuredAction.async {
db.run(repo.filterById(id).delete).map {
i =>
if (i == 0) NotFound else Ok
}
}
override def getById(id: Id): Action[AnyContent] = SecuredAction.async {
db.run(repo.findById(id)).map(entity => Ok(Json.toJson(convertToDisplayable(entity))))
}
override def getAll: Action[AnyContent] = SecuredAction.async {
val all = db.run(repo.tableQuery.result)
all.map(all => Ok(Json.toJson(all.map(convertToDisplayable))))
}
override def create: Action[Model] = SecuredAction.async(parse.json(reads)) {
parsed =>
db.run(convertToPersistable(parsed.body).save.asTry).map {
case Success(created) => Ok(Json.toJson(convertToDisplayable(created)))
case Failure(e: PSQLException) if e.getSQLState == "23505" => InternalServerError("Unique key violation: unique key already exists in the database.")
case Failure(t: PSQLException) => InternalServerError("PSQL error.")
}
}
}
| thomastoye/speelsysteem | app/controllers/api/GenericSecureApiController.scala | Scala | gpl-2.0 | 2,305 |
package service
import helpers.VersionExtractor
import play.api.mvc.{Result, Request, AnyContent, Results}
import generators.{GenerationFailure, GenerationSuccess, ReportGenerator}
import pdfService.Implicits._
import play.api.Logger
import java.io.OutputStream
import data_sources.InvalidSourceFormatException
import monitoring.Counters
import scala.language.higherKinds
/**
* Entry point of the service. It consumes the HTTP request received, checks it is an XML request
* and then ask a [[generators.ReportGenerator]] to generate a report from the XML.
* @author Jorge Migueis
*/
trait RenderService{
protected def reportGenerator: ReportGenerator
protected val outputStream: OutputStream
protected def content: Either[String,Array[Byte]]
def outputGeneration(request: Request[AnyContent]): Result = {
request.body.asXml.map {
xml =>
val generator = reportGenerator
val node = xml \\ "TransactionId"
val transactionId = if (node.isEmpty) "" else node.text
try {
Logger.debug("treating XML received.")
val print = generator.generateFrom(xml, VersionExtractor.extractVersionFrom(xml))
generator.exportReportToStream(print, outputStream) match {
case GenerationSuccess() =>
Logger.info(s"Generation success for transactionId [${transactionId}] ")//TODO: Fix this with content size: ${content.length}")
Counters.recordClaimRenderCount()
content match {
case Right(v) => Results.Ok(v.asInstanceOf[Array[Byte]])
case Left(v) => Results.Ok(v.asInstanceOf[String])
}
case GenerationFailure() =>
Logger.error(s"Could not render XML for transactionId [${transactionId}]")
Results.InternalServerError
case e: Throwable =>
Logger.error(s"Unexpected result for transactionId [${transactionId}]",e)
Results.InternalServerError
}
}
catch {
case e: InvalidSourceFormatException =>
Logger.error(s"Could not render for transactionId [${transactionId}]",e)
Results.BadRequest // Error already logged by generator
case t: Throwable => {
Logger.error(s"Could not render for transactionId [${transactionId}]. ${t.getMessage}",t)
Results.InternalServerError
}
}
}.getOrElse(Results.UnsupportedMediaType)
}
}
| Department-for-Work-and-Pensions/RenderingService | app/service/RenderService.scala | Scala | mit | 2,483 |
package video
import java.io.File
import org.reactivestreams.api.{Producer, Consumer}
import com.xuggle.xuggler.IRational
import akka.actor.ActorRefFactory
/** Helper for dealing with FFMpeg data. */
object FFMpeg {
/** Reads a given file and pushes its stream events out.
* Note: This will not prefetch any data, but only read when requested.
*/
def readFile(file: File, system: ActorRefFactory): Producer[Frame] =
video.file.FFMpegProducer(system, file)
/**
* Writes a stream of frames to the given file as an FFMpeg.
*/
def writeFile(file: File, system: ActorRefFactory, width: Int, height: Int, frameRate: IRational = IRational.make(3, 1)): Consumer[Frame] =
video.file.FFMpegFileConsumerWorker(system, file, width, height, frameRate)
} | adilakhter/streams-workshop | src/library/video/FFMpeg.scala | Scala | cc0-1.0 | 775 |
package com.twitter.finagle.stats
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.util.DefaultTimer
import com.twitter.util.{Timer, TimerTask}
import java.util.logging.{Level, Logger}
import scala.collection.mutable
class JavaLoggerStatsReceiver(logger: Logger, timer: Timer)
extends StatsReceiverWithCumulativeGauges {
val repr = logger
private val timerTasks = new mutable.HashMap[Seq[String], TimerTask]
// Timer here will never be released. This is ok since this class
// is used for debugging only.
def this(logger: Logger) = this(logger, DefaultTimer)
def stat(verbosity: Verbosity, name: String*): Stat = new Stat {
def add(value: Float): Unit = {
val level = if (verbosity == Verbosity.Debug) Level.FINEST else Level.INFO
logger.log(level, "%s add %f".format(formatName(name), value))
}
}
def counter(verbosity: Verbosity, name: String*): Counter = new Counter {
def incr(delta: Long): Unit = {
val level = if (verbosity == Verbosity.Debug) Level.FINEST else Level.INFO
logger.log(level, "%s incr %d".format(formatName(name), delta))
}
}
protected[this] def registerGauge(verbosity: Verbosity, name: Seq[String], f: => Float): Unit =
synchronized {
deregisterGauge(name)
val level = if (verbosity == Verbosity.Debug) Level.FINEST else Level.INFO
timerTasks(name) = timer.schedule(10.seconds) {
logger.log(level, "%s %2f".format(formatName(name), f))
}
}
protected[this] def deregisterGauge(name: Seq[String]): Unit = synchronized {
timerTasks.remove(name) foreach { _.cancel() }
}
private[this] def formatName(description: Seq[String]) = {
description mkString "/"
}
}
object JavaLoggerStatsReceiver {
def apply(): JavaLoggerStatsReceiver =
new JavaLoggerStatsReceiver(Logger.getLogger("Finagle"))
}
| luciferous/finagle | finagle-core/src/main/scala/com/twitter/finagle/stats/JavaLoggerStatsReceiver.scala | Scala | apache-2.0 | 1,869 |
package com.shorrockin.cascal.model
import org.apache.cassandra.thrift.{ColumnPath, ColumnOrSuperColumn}
/**
* defines an object which can be looked up through a session get
* method.
*
* @author Chris Shorrock
* @param Result determines the type of object returned when this
* column is looked up through the session get method.
*/
trait Gettable[Result, ValueType] extends PathComponent[ValueType] {
val key:Key[_, _]
val keyspace:Keyspace
val family:ColumnFamily[_]
val columnPath:ColumnPath
def convertGetResult(colOrSuperCol:ColumnOrSuperColumn):Result
} | Shimi/cascal | src/main/scala/com/shorrockin/cascal/model/Gettable.scala | Scala | apache-2.0 | 581 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import java.util.Locale
import java.util.concurrent.TimeUnit._
import scala.collection.{GenMap, GenSeq}
import scala.collection.JavaConverters._
import scala.collection.parallel.ForkJoinTaskSupport
import scala.collection.parallel.immutable.ParVector
import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
import org.apache.hadoop.mapred.{FileInputFormat, JobConf}
import org.apache.spark.internal.config.RDD_PARALLEL_LISTING_THRESHOLD
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.Resolver
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.connector.catalog.SupportsNamespaces._
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation, PartitioningUtils}
import org.apache.spark.sql.execution.datasources.orc.OrcFileFormat
import org.apache.spark.sql.execution.datasources.parquet.ParquetSchemaConverter
import org.apache.spark.sql.internal.HiveSerDe
import org.apache.spark.sql.types._
import org.apache.spark.util.{SerializableConfiguration, ThreadUtils}
// Note: The definition of these commands are based on the ones described in
// https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL
/**
* A command for users to create a new database.
*
* It will issue an error message when the database with the same name already exists,
* unless 'ifNotExists' is true.
* The syntax of using this command in SQL is:
* {{{
* CREATE (DATABASE|SCHEMA) [IF NOT EXISTS] database_name
* [COMMENT database_comment]
* [LOCATION database_directory]
* [WITH DBPROPERTIES (property_name=property_value, ...)];
* }}}
*/
case class CreateDatabaseCommand(
databaseName: String,
ifNotExists: Boolean,
path: Option[String],
comment: Option[String],
props: Map[String, String])
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
catalog.createDatabase(
CatalogDatabase(
databaseName,
comment.getOrElse(""),
path.map(CatalogUtils.stringToURI).getOrElse(catalog.getDefaultDBPath(databaseName)),
props),
ifNotExists)
Seq.empty[Row]
}
}
/**
* A command for users to remove a database from the system.
*
* 'ifExists':
* - true, if database_name does't exist, no action
* - false (default), if database_name does't exist, a warning message will be issued
* 'cascade':
* - true, the dependent objects are automatically dropped before dropping database.
* - false (default), it is in the Restrict mode. The database cannot be dropped if
* it is not empty. The inclusive tables must be dropped at first.
*
* The syntax of using this command in SQL is:
* {{{
* DROP DATABASE [IF EXISTS] database_name [RESTRICT|CASCADE];
* }}}
*/
case class DropDatabaseCommand(
databaseName: String,
ifExists: Boolean,
cascade: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
sparkSession.sessionState.catalog.dropDatabase(databaseName, ifExists, cascade)
Seq.empty[Row]
}
}
/**
* A command for users to add new (key, value) pairs into DBPROPERTIES
* If the database does not exist, an error message will be issued to indicate the database
* does not exist.
* The syntax of using this command in SQL is:
* {{{
* ALTER (DATABASE|SCHEMA) database_name SET DBPROPERTIES (property_name=property_value, ...)
* }}}
*/
case class AlterDatabasePropertiesCommand(
databaseName: String,
props: Map[String, String])
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val db: CatalogDatabase = catalog.getDatabaseMetadata(databaseName)
catalog.alterDatabase(db.copy(properties = db.properties ++ props))
Seq.empty[Row]
}
}
/**
* A command for users to set new location path for a database
* If the database does not exist, an error message will be issued to indicate the database
* does not exist.
* The syntax of using this command in SQL is:
* {{{
* ALTER (DATABASE|SCHEMA) database_name SET LOCATION path
* }}}
*/
case class AlterDatabaseSetLocationCommand(databaseName: String, location: String)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val oldDb = catalog.getDatabaseMetadata(databaseName)
catalog.alterDatabase(oldDb.copy(locationUri = CatalogUtils.stringToURI(location)))
Seq.empty[Row]
}
}
/**
* A command for users to show the name of the database, its comment (if one has been set), and its
* root location on the filesystem. When extended is true, it also shows the database's properties
* If the database does not exist, an error message will be issued to indicate the database
* does not exist.
* The syntax of using this command in SQL is
* {{{
* DESCRIBE DATABASE [EXTENDED] db_name
* }}}
*/
case class DescribeDatabaseCommand(
databaseName: String,
extended: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val dbMetadata: CatalogDatabase =
sparkSession.sessionState.catalog.getDatabaseMetadata(databaseName)
val allDbProperties = dbMetadata.properties
val result =
Row("Database Name", dbMetadata.name) ::
Row("Description", dbMetadata.description) ::
Row("Location", CatalogUtils.URIToString(dbMetadata.locationUri))::
Row("Owner Name", allDbProperties.getOrElse(PROP_OWNER_NAME, "")) ::
Row("Owner Type", allDbProperties.getOrElse(PROP_OWNER_TYPE, "")) :: Nil
if (extended) {
val properties = allDbProperties -- Seq(PROP_OWNER_NAME, PROP_OWNER_TYPE)
val propertiesStr =
if (properties.isEmpty) {
""
} else {
properties.toSeq.mkString("(", ", ", ")")
}
result :+ Row("Properties", propertiesStr)
} else {
result
}
}
override val output: Seq[Attribute] = {
AttributeReference("database_description_item", StringType, nullable = false)() ::
AttributeReference("database_description_value", StringType, nullable = false)() :: Nil
}
}
/**
* Drops a table/view from the metastore and removes it if it is cached.
*
* The syntax of this command is:
* {{{
* DROP TABLE [IF EXISTS] table_name;
* DROP VIEW [IF EXISTS] [db_name.]view_name;
* }}}
*/
case class DropTableCommand(
tableName: TableIdentifier,
ifExists: Boolean,
isView: Boolean,
purge: Boolean) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val isTempView = catalog.isTemporaryTable(tableName)
if (!isTempView && catalog.tableExists(tableName)) {
// If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view
// issue an exception.
catalog.getTableMetadata(tableName).tableType match {
case CatalogTableType.VIEW if !isView =>
throw new AnalysisException(
"Cannot drop a view with DROP TABLE. Please use DROP VIEW instead")
case o if o != CatalogTableType.VIEW && isView =>
throw new AnalysisException(
s"Cannot drop a table with DROP VIEW. Please use DROP TABLE instead")
case _ =>
}
}
if (isTempView || catalog.tableExists(tableName)) {
try {
sparkSession.sharedState.cacheManager.uncacheQuery(
sparkSession.table(tableName), cascade = !isTempView)
} catch {
case NonFatal(e) => log.warn(e.toString, e)
}
catalog.refreshTable(tableName)
catalog.dropTable(tableName, ifExists, purge)
} else if (ifExists) {
// no-op
} else {
throw new AnalysisException(s"Table or view not found: ${tableName.identifier}")
}
Seq.empty[Row]
}
}
/**
* A command that sets table/view properties.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table1 SET TBLPROPERTIES ('key1' = 'val1', 'key2' = 'val2', ...);
* ALTER VIEW view1 SET TBLPROPERTIES ('key1' = 'val1', 'key2' = 'val2', ...);
* }}}
*/
case class AlterTableSetPropertiesCommand(
tableName: TableIdentifier,
properties: Map[String, String],
isView: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
DDLUtils.verifyAlterTableType(catalog, table, isView)
// This overrides old properties and update the comment parameter of CatalogTable
// with the newly added/modified comment since CatalogTable also holds comment as its
// direct property.
val newTable = table.copy(
properties = table.properties ++ properties,
comment = properties.get("comment").orElse(table.comment))
catalog.alterTable(newTable)
Seq.empty[Row]
}
}
/**
* A command that unsets table/view properties.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table1 UNSET TBLPROPERTIES [IF EXISTS] ('key1', 'key2', ...);
* ALTER VIEW view1 UNSET TBLPROPERTIES [IF EXISTS] ('key1', 'key2', ...);
* }}}
*/
case class AlterTableUnsetPropertiesCommand(
tableName: TableIdentifier,
propKeys: Seq[String],
ifExists: Boolean,
isView: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
DDLUtils.verifyAlterTableType(catalog, table, isView)
if (!ifExists) {
propKeys.foreach { k =>
if (!table.properties.contains(k) && k != "comment") {
throw new AnalysisException(
s"Attempted to unset non-existent property '$k' in table '${table.identifier}'")
}
}
}
// If comment is in the table property, we reset it to None
val tableComment = if (propKeys.contains("comment")) None else table.comment
val newProperties = table.properties.filter { case (k, _) => !propKeys.contains(k) }
val newTable = table.copy(properties = newProperties, comment = tableComment)
catalog.alterTable(newTable)
Seq.empty[Row]
}
}
/**
* A command to change the column for a table, only support changing the comment of a non-partition
* column for now.
*
* The syntax of using this command in SQL is:
* {{{
* ALTER TABLE table_identifier
* CHANGE [COLUMN] column_old_name column_new_name column_dataType [COMMENT column_comment]
* [FIRST | AFTER column_name];
* }}}
*/
case class AlterTableChangeColumnCommand(
tableName: TableIdentifier,
columnName: String,
newColumn: StructField) extends RunnableCommand {
// TODO: support change column name/dataType/metadata/position.
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
val resolver = sparkSession.sessionState.conf.resolver
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
// Find the origin column from dataSchema by column name.
val originColumn = findColumnByName(table.dataSchema, columnName, resolver)
// Throw an AnalysisException if the column name/dataType is changed.
if (!columnEqual(originColumn, newColumn, resolver)) {
throw new AnalysisException(
"ALTER TABLE CHANGE COLUMN is not supported for changing column " +
s"'${originColumn.name}' with type '${originColumn.dataType}' to " +
s"'${newColumn.name}' with type '${newColumn.dataType}'")
}
val newDataSchema = table.dataSchema.fields.map { field =>
if (field.name == originColumn.name) {
// Create a new column from the origin column with the new comment.
addComment(field, newColumn.getComment)
} else {
field
}
}
catalog.alterTableDataSchema(tableName, StructType(newDataSchema))
Seq.empty[Row]
}
// Find the origin column from schema by column name, throw an AnalysisException if the column
// reference is invalid.
private def findColumnByName(
schema: StructType, name: String, resolver: Resolver): StructField = {
schema.fields.collectFirst {
case field if resolver(field.name, name) => field
}.getOrElse(throw new AnalysisException(
s"Can't find column `$name` given table data columns " +
s"${schema.fieldNames.mkString("[`", "`, `", "`]")}"))
}
// Add the comment to a column, if comment is empty, return the original column.
private def addComment(column: StructField, comment: Option[String]): StructField =
comment.map(column.withComment).getOrElse(column)
// Compare a [[StructField]] to another, return true if they have the same column
// name(by resolver) and dataType.
private def columnEqual(
field: StructField, other: StructField, resolver: Resolver): Boolean = {
resolver(field.name, other.name) && field.dataType == other.dataType
}
}
/**
* A command that sets the serde class and/or serde properties of a table/view.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table [PARTITION spec] SET SERDE serde_name [WITH SERDEPROPERTIES props];
* ALTER TABLE table [PARTITION spec] SET SERDEPROPERTIES serde_properties;
* }}}
*/
case class AlterTableSerDePropertiesCommand(
tableName: TableIdentifier,
serdeClassName: Option[String],
serdeProperties: Option[Map[String, String]],
partSpec: Option[TablePartitionSpec])
extends RunnableCommand {
// should never happen if we parsed things correctly
require(serdeClassName.isDefined || serdeProperties.isDefined,
"ALTER TABLE attempted to set neither serde class name nor serde properties")
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
// For datasource tables, disallow setting serde or specifying partition
if (partSpec.isDefined && DDLUtils.isDatasourceTable(table)) {
throw new AnalysisException("Operation not allowed: ALTER TABLE SET " +
"[SERDE | SERDEPROPERTIES] for a specific partition is not supported " +
"for tables created with the datasource API")
}
if (serdeClassName.isDefined && DDLUtils.isDatasourceTable(table)) {
throw new AnalysisException("Operation not allowed: ALTER TABLE SET SERDE is " +
"not supported for tables created with the datasource API")
}
if (partSpec.isEmpty) {
val newTable = table.withNewStorage(
serde = serdeClassName.orElse(table.storage.serde),
properties = table.storage.properties ++ serdeProperties.getOrElse(Map()))
catalog.alterTable(newTable)
} else {
val spec = partSpec.get
val part = catalog.getPartition(table.identifier, spec)
val newPart = part.copy(storage = part.storage.copy(
serde = serdeClassName.orElse(part.storage.serde),
properties = part.storage.properties ++ serdeProperties.getOrElse(Map())))
catalog.alterPartitions(table.identifier, Seq(newPart))
}
Seq.empty[Row]
}
}
/**
* Add Partition in ALTER TABLE: add the table partitions.
*
* An error message will be issued if the partition exists, unless 'ifNotExists' is true.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table ADD [IF NOT EXISTS] PARTITION spec1 [LOCATION 'loc1']
* PARTITION spec2 [LOCATION 'loc2']
* }}}
*/
case class AlterTableAddPartitionCommand(
tableName: TableIdentifier,
partitionSpecsAndLocs: Seq[(TablePartitionSpec, Option[String])],
ifNotExists: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "ALTER TABLE ADD PARTITION")
val parts = partitionSpecsAndLocs.map { case (spec, location) =>
val normalizedSpec = PartitioningUtils.normalizePartitionSpec(
spec,
table.partitionColumnNames,
table.identifier.quotedString,
sparkSession.sessionState.conf.resolver)
// inherit table storage format (possibly except for location)
CatalogTablePartition(normalizedSpec, table.storage.copy(
locationUri = location.map(CatalogUtils.stringToURI)))
}
// Hive metastore may not have enough memory to handle millions of partitions in single RPC.
// Also the request to metastore times out when adding lot of partitions in one shot.
// we should split them into smaller batches
val batchSize = 100
parts.toIterator.grouped(batchSize).foreach { batch =>
catalog.createPartitions(table.identifier, batch, ignoreIfExists = ifNotExists)
}
if (table.stats.nonEmpty) {
if (sparkSession.sessionState.conf.autoSizeUpdateEnabled) {
def calculatePartSize(part: CatalogTablePartition) = CommandUtils.calculateLocationSize(
sparkSession.sessionState, table.identifier, part.storage.locationUri)
val threshold = sparkSession.sparkContext.conf.get(RDD_PARALLEL_LISTING_THRESHOLD)
val partSizes = if (parts.length > threshold) {
ThreadUtils.parmap(parts, "gatheringNewPartitionStats", 8)(calculatePartSize)
} else {
parts.map(calculatePartSize)
}
val addedSize = partSizes.sum
if (addedSize > 0) {
val newStats = CatalogStatistics(sizeInBytes = table.stats.get.sizeInBytes + addedSize)
catalog.alterTableStats(table.identifier, Some(newStats))
}
} else {
catalog.alterTableStats(table.identifier, None)
}
}
Seq.empty[Row]
}
}
/**
* Alter a table partition's spec.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table PARTITION spec1 RENAME TO PARTITION spec2;
* }}}
*/
case class AlterTableRenamePartitionCommand(
tableName: TableIdentifier,
oldPartition: TablePartitionSpec,
newPartition: TablePartitionSpec)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "ALTER TABLE RENAME PARTITION")
val normalizedOldPartition = PartitioningUtils.normalizePartitionSpec(
oldPartition,
table.partitionColumnNames,
table.identifier.quotedString,
sparkSession.sessionState.conf.resolver)
val normalizedNewPartition = PartitioningUtils.normalizePartitionSpec(
newPartition,
table.partitionColumnNames,
table.identifier.quotedString,
sparkSession.sessionState.conf.resolver)
catalog.renamePartitions(
tableName, Seq(normalizedOldPartition), Seq(normalizedNewPartition))
Seq.empty[Row]
}
}
/**
* Drop Partition in ALTER TABLE: to drop a particular partition for a table.
*
* This removes the data and metadata for this partition.
* The data is actually moved to the .Trash/Current directory if Trash is configured,
* unless 'purge' is true, but the metadata is completely lost.
* An error message will be issued if the partition does not exist, unless 'ifExists' is true.
* Note: purge is always false when the target is a view.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...] [PURGE];
* }}}
*/
case class AlterTableDropPartitionCommand(
tableName: TableIdentifier,
specs: Seq[TablePartitionSpec],
ifExists: Boolean,
purge: Boolean,
retainData: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "ALTER TABLE DROP PARTITION")
val normalizedSpecs = specs.map { spec =>
PartitioningUtils.normalizePartitionSpec(
spec,
table.partitionColumnNames,
table.identifier.quotedString,
sparkSession.sessionState.conf.resolver)
}
catalog.dropPartitions(
table.identifier, normalizedSpecs, ignoreIfNotExists = ifExists, purge = purge,
retainData = retainData)
CommandUtils.updateTableStats(sparkSession, table)
Seq.empty[Row]
}
}
case class PartitionStatistics(numFiles: Int, totalSize: Long)
/**
* Recover Partitions in ALTER TABLE: recover all the partition in the directory of a table and
* update the catalog.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table RECOVER PARTITIONS;
* MSCK REPAIR TABLE table;
* }}}
*/
case class AlterTableRecoverPartitionsCommand(
tableName: TableIdentifier,
cmd: String = "ALTER TABLE RECOVER PARTITIONS") extends RunnableCommand {
// These are list of statistics that can be collected quickly without requiring a scan of the data
// see https://github.com/apache/hive/blob/master/
// common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
val NUM_FILES = "numFiles"
val TOTAL_SIZE = "totalSize"
val DDL_TIME = "transient_lastDdlTime"
private def getPathFilter(hadoopConf: Configuration): PathFilter = {
// Dummy jobconf to get to the pathFilter defined in configuration
// It's very expensive to create a JobConf(ClassUtil.findContainingJar() is slow)
val jobConf = new JobConf(hadoopConf, this.getClass)
val pathFilter = FileInputFormat.getInputPathFilter(jobConf)
path: Path => {
val name = path.getName
if (name != "_SUCCESS" && name != "_temporary" && !name.startsWith(".")) {
pathFilter == null || pathFilter.accept(path)
} else {
false
}
}
}
override def run(spark: SparkSession): Seq[Row] = {
val catalog = spark.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
val tableIdentWithDB = table.identifier.quotedString
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
if (table.partitionColumnNames.isEmpty) {
throw new AnalysisException(
s"Operation not allowed: $cmd only works on partitioned tables: $tableIdentWithDB")
}
if (table.storage.locationUri.isEmpty) {
throw new AnalysisException(s"Operation not allowed: $cmd only works on table with " +
s"location provided: $tableIdentWithDB")
}
val root = new Path(table.location)
logInfo(s"Recover all the partitions in $root")
val hadoopConf = spark.sessionState.newHadoopConf()
val fs = root.getFileSystem(hadoopConf)
val threshold = spark.sparkContext.conf.get(RDD_PARALLEL_LISTING_THRESHOLD)
val pathFilter = getPathFilter(hadoopConf)
val evalPool = ThreadUtils.newForkJoinPool("AlterTableRecoverPartitionsCommand", 8)
val partitionSpecsAndLocs: Seq[(TablePartitionSpec, Path)] =
try {
scanPartitions(spark, fs, pathFilter, root, Map(), table.partitionColumnNames, threshold,
spark.sessionState.conf.resolver, new ForkJoinTaskSupport(evalPool)).seq
} finally {
evalPool.shutdown()
}
val total = partitionSpecsAndLocs.length
logInfo(s"Found $total partitions in $root")
val partitionStats = if (spark.sqlContext.conf.gatherFastStats) {
gatherPartitionStats(spark, partitionSpecsAndLocs, fs, pathFilter, threshold)
} else {
GenMap.empty[String, PartitionStatistics]
}
logInfo(s"Finished to gather the fast stats for all $total partitions.")
addPartitions(spark, table, partitionSpecsAndLocs, partitionStats)
// Updates the table to indicate that its partition metadata is stored in the Hive metastore.
// This is always the case for Hive format tables, but is not true for Datasource tables created
// before Spark 2.1 unless they are converted via `msck repair table`.
spark.sessionState.catalog.alterTable(table.copy(tracksPartitionsInCatalog = true))
catalog.refreshTable(tableName)
logInfo(s"Recovered all partitions ($total).")
Seq.empty[Row]
}
private def scanPartitions(
spark: SparkSession,
fs: FileSystem,
filter: PathFilter,
path: Path,
spec: TablePartitionSpec,
partitionNames: Seq[String],
threshold: Int,
resolver: Resolver,
evalTaskSupport: ForkJoinTaskSupport): GenSeq[(TablePartitionSpec, Path)] = {
if (partitionNames.isEmpty) {
return Seq(spec -> path)
}
val statuses = fs.listStatus(path, filter)
val statusPar: GenSeq[FileStatus] =
if (partitionNames.length > 1 && statuses.length > threshold || partitionNames.length > 2) {
// parallelize the list of partitions here, then we can have better parallelism later.
val parArray = new ParVector(statuses.toVector)
parArray.tasksupport = evalTaskSupport
parArray
} else {
statuses
}
statusPar.flatMap { st =>
val name = st.getPath.getName
if (st.isDirectory && name.contains("=")) {
val ps = name.split("=", 2)
val columnName = ExternalCatalogUtils.unescapePathName(ps(0))
// TODO: Validate the value
val value = ExternalCatalogUtils.unescapePathName(ps(1))
if (resolver(columnName, partitionNames.head)) {
scanPartitions(spark, fs, filter, st.getPath, spec ++ Map(partitionNames.head -> value),
partitionNames.drop(1), threshold, resolver, evalTaskSupport)
} else {
logWarning(
s"expected partition column ${partitionNames.head}, but got ${ps(0)}, ignoring it")
Seq.empty
}
} else {
logWarning(s"ignore ${new Path(path, name)}")
Seq.empty
}
}
}
private def gatherPartitionStats(
spark: SparkSession,
partitionSpecsAndLocs: GenSeq[(TablePartitionSpec, Path)],
fs: FileSystem,
pathFilter: PathFilter,
threshold: Int): GenMap[String, PartitionStatistics] = {
if (partitionSpecsAndLocs.length > threshold) {
val hadoopConf = spark.sessionState.newHadoopConf()
val serializableConfiguration = new SerializableConfiguration(hadoopConf)
val serializedPaths = partitionSpecsAndLocs.map(_._2.toString).toArray
// Set the number of parallelism to prevent following file listing from generating many tasks
// in case of large #defaultParallelism.
val numParallelism = Math.min(serializedPaths.length,
Math.min(spark.sparkContext.defaultParallelism, 10000))
// gather the fast stats for all the partitions otherwise Hive metastore will list all the
// files for all the new partitions in sequential way, which is super slow.
logInfo(s"Gather the fast stats in parallel using $numParallelism tasks.")
spark.sparkContext.parallelize(serializedPaths, numParallelism)
.mapPartitions { paths =>
val pathFilter = getPathFilter(serializableConfiguration.value)
paths.map(new Path(_)).map{ path =>
val fs = path.getFileSystem(serializableConfiguration.value)
val statuses = fs.listStatus(path, pathFilter)
(path.toString, PartitionStatistics(statuses.length, statuses.map(_.getLen).sum))
}
}.collectAsMap()
} else {
partitionSpecsAndLocs.map { case (_, location) =>
val statuses = fs.listStatus(location, pathFilter)
(location.toString, PartitionStatistics(statuses.length, statuses.map(_.getLen).sum))
}.toMap
}
}
private def addPartitions(
spark: SparkSession,
table: CatalogTable,
partitionSpecsAndLocs: GenSeq[(TablePartitionSpec, Path)],
partitionStats: GenMap[String, PartitionStatistics]): Unit = {
val total = partitionSpecsAndLocs.length
var done = 0L
// Hive metastore may not have enough memory to handle millions of partitions in single RPC,
// we should split them into smaller batches. Since Hive client is not thread safe, we cannot
// do this in parallel.
val batchSize = 100
partitionSpecsAndLocs.toIterator.grouped(batchSize).foreach { batch =>
val now = MILLISECONDS.toSeconds(System.currentTimeMillis())
val parts = batch.map { case (spec, location) =>
val params = partitionStats.get(location.toString).map {
case PartitionStatistics(numFiles, totalSize) =>
// This two fast stat could prevent Hive metastore to list the files again.
Map(NUM_FILES -> numFiles.toString,
TOTAL_SIZE -> totalSize.toString,
// Workaround a bug in HiveMetastore that try to mutate a read-only parameters.
// see metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
DDL_TIME -> now.toString)
}.getOrElse(Map.empty)
// inherit table storage format (possibly except for location)
CatalogTablePartition(
spec,
table.storage.copy(locationUri = Some(location.toUri)),
params)
}
spark.sessionState.catalog.createPartitions(tableName, parts, ignoreIfExists = true)
done += parts.length
logDebug(s"Recovered ${parts.length} partitions ($done/$total so far)")
}
}
}
/**
* A command that sets the location of a table or a partition.
*
* For normal tables, this just sets the location URI in the table/partition's storage format.
* For datasource tables, this sets a "path" parameter in the table/partition's serde properties.
*
* The syntax of this command is:
* {{{
* ALTER TABLE table_name [PARTITION partition_spec] SET LOCATION "loc";
* }}}
*/
case class AlterTableSetLocationCommand(
tableName: TableIdentifier,
partitionSpec: Option[TablePartitionSpec],
location: String)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
val locUri = CatalogUtils.stringToURI(location)
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
partitionSpec match {
case Some(spec) =>
DDLUtils.verifyPartitionProviderIsHive(
sparkSession, table, "ALTER TABLE ... SET LOCATION")
// Partition spec is specified, so we set the location only for this partition
val part = catalog.getPartition(table.identifier, spec)
val newPart = part.copy(storage = part.storage.copy(locationUri = Some(locUri)))
catalog.alterPartitions(table.identifier, Seq(newPart))
case None =>
// No partition spec is specified, so we set the location for the table itself
catalog.alterTable(table.withNewStorage(locationUri = Some(locUri)))
}
CommandUtils.updateTableStats(sparkSession, table)
Seq.empty[Row]
}
}
object DDLUtils {
val HIVE_PROVIDER = "hive"
def isHiveTable(table: CatalogTable): Boolean = {
isHiveTable(table.provider)
}
def isHiveTable(provider: Option[String]): Boolean = {
provider.isDefined && provider.get.toLowerCase(Locale.ROOT) == HIVE_PROVIDER
}
def isDatasourceTable(table: CatalogTable): Boolean = {
table.provider.isDefined && table.provider.get.toLowerCase(Locale.ROOT) != HIVE_PROVIDER
}
def readHiveTable(table: CatalogTable): HiveTableRelation = {
HiveTableRelation(
table,
// Hive table columns are always nullable.
table.dataSchema.asNullable.toAttributes,
table.partitionSchema.asNullable.toAttributes)
}
/**
* Throws a standard error for actions that require partitionProvider = hive.
*/
def verifyPartitionProviderIsHive(
spark: SparkSession, table: CatalogTable, action: String): Unit = {
val tableName = table.identifier.table
if (!spark.sqlContext.conf.manageFilesourcePartitions && isDatasourceTable(table)) {
throw new AnalysisException(
s"$action is not allowed on $tableName since filesource partition management is " +
"disabled (spark.sql.hive.manageFilesourcePartitions = false).")
}
if (!table.tracksPartitionsInCatalog && isDatasourceTable(table)) {
throw new AnalysisException(
s"$action is not allowed on $tableName since its partition metadata is not stored in " +
"the Hive metastore. To import this information into the metastore, run " +
s"`msck repair table $tableName`")
}
}
/**
* If the command ALTER VIEW is to alter a table or ALTER TABLE is to alter a view,
* issue an exception [[AnalysisException]].
*
* Note: temporary views can be altered by both ALTER VIEW and ALTER TABLE commands,
* since temporary views can be also created by CREATE TEMPORARY TABLE. In the future,
* when we decided to drop the support, we should disallow users to alter temporary views
* by ALTER TABLE.
*/
def verifyAlterTableType(
catalog: SessionCatalog,
tableMetadata: CatalogTable,
isView: Boolean): Unit = {
if (!catalog.isTemporaryTable(tableMetadata.identifier)) {
tableMetadata.tableType match {
case CatalogTableType.VIEW if !isView =>
throw new AnalysisException(
"Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")
case o if o != CatalogTableType.VIEW && isView =>
throw new AnalysisException(
s"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")
case _ =>
}
}
}
private[sql] def checkDataColNames(table: CatalogTable): Unit = {
checkDataColNames(table, table.dataSchema.fieldNames)
}
private[sql] def checkDataColNames(table: CatalogTable, colNames: Seq[String]): Unit = {
table.provider.foreach {
_.toLowerCase(Locale.ROOT) match {
case HIVE_PROVIDER =>
val serde = table.storage.serde
if (serde == HiveSerDe.sourceToSerDe("orc").get.serde) {
OrcFileFormat.checkFieldNames(colNames)
} else if (serde == HiveSerDe.sourceToSerDe("parquet").get.serde ||
serde == Some("parquet.hive.serde.ParquetHiveSerDe") ||
serde == Some("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe")) {
ParquetSchemaConverter.checkFieldNames(colNames)
}
case "parquet" => ParquetSchemaConverter.checkFieldNames(colNames)
case "orc" => OrcFileFormat.checkFieldNames(colNames)
case _ =>
}
}
}
/**
* Throws exception if outputPath tries to overwrite inputpath.
*/
def verifyNotReadPath(query: LogicalPlan, outputPath: Path) : Unit = {
val inputPaths = query.collect {
case LogicalRelation(r: HadoopFsRelation, _, _, _) =>
r.location.rootPaths
}.flatten
if (inputPaths.contains(outputPath)) {
throw new AnalysisException(
"Cannot overwrite a path that is also being read from.")
}
}
}
| jkbradley/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala | Scala | apache-2.0 | 36,571 |
package gcp
import java.io.InputStream
import java.nio.ByteBuffer
import cats.MonadError
import com.google.cloud.ReadChannel
import com.google.cloud.storage.{BlobId, StorageOptions, Storage => GStorage}
trait Storage {
def fetch[F[_]](
bucket: String,
name: String,
chunkSize: Int,
options: GStorage.BlobSourceOption*)(implicit F: MonadError[F, Throwable]): F[InputStream]
}
object Storage {
def apply()(implicit storageService: GStorage): Storage = new Storage {
def fetch[F[_]](
bucket: String,
name: String,
chunkSize: Int,
options: GStorage.BlobSourceOption*)(implicit F: MonadError[F, Throwable]): F[InputStream] =
F.catchNonFatal(
transform(storageService.reader(BlobId.of(bucket, name), options: _*), chunkSize))
}
def transform(ch: ReadChannel, chunkSize: Int): InputStream =
new InputStream {
private[this] var isEof = false
private[this] val buffer: ByteBuffer =
ByteBuffer.allocateDirect(chunkSize)
ch.read(buffer)
buffer.flip()
private def more: Int = {
if (isEof) -1
else {
buffer.clear()
val a = ch.read(buffer)
buffer.flip()
a
}
}
override def available(): Int =
if (isEof) 0
else buffer.remaining()
override def read(): Int = {
if (isEof) -1
else if (buffer.hasRemaining || more >= 0)
buffer.get & 0xff
else {
isEof = true
-1
}
}
override def close(): Unit = {
super.close()
ch.close()
buffer.clear()
}
}
object Service {
implicit lazy val default: GStorage =
StorageOptions.getDefaultInstance.getService
}
}
| tkrs/gcs-scala | core/src/main/scala/gcp/Storage.scala | Scala | mit | 1,784 |
// code-examples/Rounding/one-plus-two-script.scala
1 + 2 | XClouded/t4f-core | scala/src/tmp/Rounding/one-plus-two-script.scala | Scala | apache-2.0 | 58 |
package astrac.springy
package api
import org.elasticsearch.index.query.QueryBuilder
// Index document model
sealed trait VersionType
object VersionType {
case object Internal extends VersionType
case object External extends VersionType
case object ExternalGte extends VersionType
case object Force extends VersionType
}
sealed trait ConsistencyLevel
object ConsistencyLevel {
case object One extends ConsistencyLevel
case object Quorum extends ConsistencyLevel
case object All extends ConsistencyLevel
}
sealed trait OpType
object OpType {
case object Index extends OpType
case object Create extends OpType
}
// Search model
sealed trait Query
object Query {
case class Native(query: QueryBuilder) extends Query
case class Term(field: String, value: String) extends Query
case object MatchAll extends Query
// TODO: All the other queries
}
case class ShardInfo(total: Int, successful: Int, failed: Int)
case class SearchHit[T](_index: String, _type: String, _id: String, _source: T)
case class SearchHits[T](total: Long, hits: List[SearchHit[T]])
// Bulk model
sealed trait BulkItemResponse
case class BulkFailure(index: String, `type`: String , id: String, t: Throwable) extends BulkItemResponse
// Response model
case class AcknowledgedResponse(acknowledged: Boolean)
case class GetIndexResponse(exists: Boolean) // TODO: Map real response
case class IndexDocumentResponse(_index: String, _type: String, _id: String, _version: Long, created: Boolean) extends BulkItemResponse
case class GetDocumentResponse[T](_index: String, _type: String, _id: String, _version: Long, found: Boolean, document: Option[T])
case class DeleteDocumentResponse(_index: String, _type: String, _id: String, _version: Long, found: Boolean) extends BulkItemResponse
case class UpdateDocumentResponse(_index: String, _type: String, _id: String, _version: Long) extends BulkItemResponse
// TODO: case class ScriptUpdateRequest[T](index: String, `type`: String, id: String, script: String, params: T)
case class BulkResponse(items: Seq[BulkItemResponse])
case class SearchResponse[T](_shard: ShardInfo, hits: SearchHits[T])
| Astrac/springy | src/main/scala/astrac/springy/api/models.scala | Scala | mit | 2,134 |
package org.jetbrains.plugins.scala
package caches
package stats
trait CacheCapabilities[T] {
type CacheType = T
def cachedEntitiesCount(cache: CacheType): Int
def clear(cache: CacheType): Unit
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/caches/stats/CacheCapabilities.scala | Scala | apache-2.0 | 204 |
package com.artclod.mathml.scalar.apply
import org.junit.runner.RunWith
import play.api.test._
import play.api.test.Helpers._
import org.specs2.mutable._
import com.artclod.mathml._
import com.artclod.mathml.scalar._
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
// LATER try out http://rlegendi.github.io/specs2/runner/ and remove RunWith
@RunWith(classOf[JUnitRunner])
class ApplyPowerSpec extends Specification {
"eval" should {
"raise value to power" in {
ApplyPower(`2`, `3`).eval().get must beEqualTo(8)
}
"return 0 if base is 0" in {
ApplyPower(`0`, e).eval().get must beEqualTo(0)
}
"fail if base is nonzero and power results is 0" in {
ApplyPower(e, `-1000`).eval() must beFailedTry
}
}
"variables" should {
"be empty if element is constant" in {
ApplyPower(`1`, `2`).variables must beEmpty
}
"be x if element constains an x" in {
ApplyPower(x, `2`).variables must beEqualTo(Set("x"))
}
"be y if element constains a y" in {
ApplyPower(y, `2`).variables must beEqualTo(Set("y"))
}
"be x & y if element constains x & y" in {
ApplyPower(x, y).variables must beEqualTo(Set("x", "y"))
}
}
"c" should {
"return 1 if base is 1" in {
ApplyPower(`1`, x).c.get must beEqualTo(`1`)
}
"return base if power is 1" in {
ApplyPower(`5`, `1`).c.get must beEqualTo(`5`)
}
"return 1 if power is 0" in {
ApplyPower(x, `0`).c.get must beEqualTo(`1`)
}
"return None if function is not constant" in {
ApplyPower(`2`, x).c must beNone
}
}
"s" should {
"return 1 if base is 1" in {
ApplyPower(`1`, x).s must beEqualTo(`1`)
}
"return 1 if exponent is 0" in {
ApplyPower(x, `0`).s must beEqualTo(`1`)
}
"return base if exponent is 1" in {
ApplyPower(x, `1`).s must beEqualTo(x)
}
}
"d" should {
"obey the elementary power rule: (x^n)' = n*x^(n-1)" in {
ApplyPower(x, `3`).dx must beEqualTo(`3` * (x ^ `2`))
}
"obey the chain power rule: (f^n)' = n*f^(n-1)*f'" in {
ApplyPower(F, `3`).dx must beEqualTo(ApplyTimes(`3`, F ^ `2`, Fdx))
}
// (f^g)' = f^(g-1) * (g f'+f log(f) g')
"obey the generalized power rule: (f^g)' = f^(g-1) * (g * f' + f * log(f) * g')" in {
ApplyPower(F, G).dx must beEqualTo( ((F^(G-`1`)) * (G * Fdx + F * ApplyLn(F) * Gdx))s )
}
}
"toText" should {
"handle 3 ^ 5" in {
ApplyPower(3, 5).toMathJS must beEqualTo("(3 ^ 5)")
}
}
} | kristiankime/web-education-games | test/com/artclod/mathml/scalar/apply/ApplyPowerSpec.scala | Scala | mit | 2,435 |
/*
* Copyright 2012 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
/*
* Created by IntelliJ IDEA.
* User: martin
* Date: 5/11/11
* Time: 11:39 AM
*/
package simx.components.physics.jbullet
import com.bulletphysics.dynamics.DiscreteDynamicsWorld
import com.bulletphysics.collision.broadphase.{BroadphaseInterface, Dispatcher}
import com.bulletphysics.dynamics.constraintsolver.ConstraintSolver
import com.bulletphysics.collision.dispatch.CollisionConfiguration
import javax.vecmath.Vector3f
class JBulletDiscreteDynamicsWorld(dispatcher: Dispatcher, pairCache: BroadphaseInterface, constraintSolver: ConstraintSolver, collisionConfiguration: CollisionConfiguration)
extends DiscreteDynamicsWorld(dispatcher, pairCache, constraintSolver, collisionConfiguration){
def getGravity : Vector3f = {
val g = new Vector3f
getGravity(g)
g
}
}
| simulator-x/jbullet-physics | src/simx/components/physics/jbullet/JBulletDiscreteDynamicsWorld.scala | Scala | apache-2.0 | 1,652 |
package model.json
/**
* @author Camilo Sampedro <camilo.sampedro@udea.edu.co>
*/
case class LoginJson(username: String, password: String)
| ProjectAton/AtonLab | app/model/json/LoginJson.scala | Scala | gpl-3.0 | 144 |
package edu.chop.cbmi.dataExpress.test.dataModels
import java.util.Calendar
import edu.chop.cbmi.dataExpress.dataModels.{DataTable, DataRow}
import edu.chop.cbmi.dataExpress.dataModels.RichOption._
import edu.chop.cbmi.dataExpress.dataModels.sql.CharacterDataType
import edu.chop.cbmi.dataExpress.dataWriters.DataWriter
import edu.chop.cbmi.dataExpress.dataWriters.sql.SqlTableWriter
import edu.chop.cbmi.dataExpress.test.util.presidents._
import edu.chop.cbmi.dataExpress.test.util.{Functions}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* Created by IntelliJ IDEA.
* User: masinoa
* Date: 12/22/11
* Time: 10:49 AM
* To change this template use File | Settings | File Templates.
*/
@RunWith(classOf[JUnitRunner])
class SqlTableWriterSpec extends PresidentsSpecWithSourceTarget{
//override val backend_test_type : KNOWN_SQL_BACKEND = POSTGRES()
override val backend_test_type : KNOWN_SQL_BACKEND = MYSQL()
//will be creating some new tables in the target
val VICE_PRESIDENTS = "vice_presidents"
BackendOps.add_table_name(target_backend, VICE_PRESIDENTS)
val PRESIDENTS_COPY = "pres_copy"
BackendOps.add_table_name(target_backend, PRESIDENTS_COPY)
describe("A SqlTableWriter"){
val dw = DataWriter(target_backend)
it("should allow the insertion of data into a table in the db"){
Given("a datarow with the appropriate number of arguments insert it into the table")
val monroe = SQLStatements.potus_data_row(SQLStatements.MONROE)
dw.insert_row(PRESIDENTS, monroe).operation_succeeded_? should equal(true)
val jqa = SQLStatements.potus_data_row(SQLStatements.QUINCY_ADAMS)
dw.insert_row(PRESIDENTS,jqa).operation_succeeded_? should equal(true)
//not all columns are required
val rr = DataRow("id"->7,"first_name"->"Ronald", "last_name"->"Reagan")
dw.insert_row(PRESIDENTS,rr).operation_succeeded_? should equal(true)
And("after commiting the backend the rows should appear in a new query")
query_and_count(PRESIDENTS) should equal(7)
Given("any DataTable whose DataRows have an appropriate number of arguments, insert all rows into the table")
val gb = List(8,"George","Bush", 1)
val bc = List(9,"William","Clinton",2)
val sdt = DataTable(List("id", "first_name", "last_name", "num_terms"), gb, bc)
dw.insert_rows(PRESIDENTS,sdt).operation_succeeded_? should equal(true)
And("after committing the backend the rows of the table should be in a new query")
query_and_count(PRESIDENTS) should equal(9)
Given("a function mapping from a String to a T, insert a new row into the tables")
dw.insert_row(PRESIDENTS,(name:String)=>{
name match{
case "id" => Some(10)
case "first_name" => Some("Barack")
case "last_name" => Some("Obama")
case "num_terms" => Some(1)
case "dob" => Some(Functions.sqlDateFrom("19610804"))
case _ => None
}
})
And("after comitting the backend the rows of the table should be in a new query")
query_and_count(PRESIDENTS) should equal(10)
And("it should allow updates of existing rows")
Given("a new DataRow and a filter")
val washington = DataRow("first_name"->"Bob")
dw.update_row(PRESIDENTS, washington, "id"->1)
query_and_count(PRESIDENTS) should equal(10)
val gw = DataTable(source_backend, """select * from %s where %s='1'""".format(
source_backend.sqlDialect.quoteIdentifier(PRESIDENTS),source_backend.sqlDialect.quoteIdentifier("id"))).next
gw.first_name.asu[String] should equal("Bob")
gw.last_name.asu[String] should equal("Washington")
gw.id.asu[Int] should equal(1)
gw.num_terms.asu[Int] should equal(2)
And("a function mapping from a String to a T, insert a new row into the tables")
dw.update_row(PRESIDENTS,"id"->2)((name:String)=>{
name match{
case "first_name" => Some("Johnie")
case _ => None
}
})
query_and_count(PRESIDENTS) should equal(10)
val ja = DataTable(source_backend, """select * from %s where %s='2'""".format(
source_backend.sqlDialect.quoteIdentifier(PRESIDENTS),source_backend.sqlDialect.quoteIdentifier("id"))).next
ja.first_name.asu[String] should equal("Johnie")
ja.last_name.asu[String] should equal("Adams")
ja.num_terms.asu[Int] should equal(1)
ja.id.asu[Int] should equal(2)
And("it should allow creation of new tables")
val biden = List("Joe","Biden")
val cheney = List("Dick", "Cheney")
val vps = DataTable(List("first_name","last_name"), biden, cheney)
dw.insert_table(VICE_PRESIDENTS, List(CharacterDataType(20, false), CharacterDataType(20,false)),
vps, SqlTableWriter.OVERWRITE_OPTION_DROP)
BackendOps.add_table_name(target_backend, VICE_PRESIDENTS)
query_and_count(VICE_PRESIDENTS) should equal(2)
val the_pres_table = DataTable(source_backend, "SELECT * FROM %s".format(
source_backend.sqlDialect.quoteIdentifier(PRESIDENTS)))
val dts = the_pres_table.dataTypes
dw.insert_table(PRESIDENTS_COPY, dts, the_pres_table, SqlTableWriter.OVERWRITE_OPTION_DROP)
BackendOps.add_table_name(target_backend, PRESIDENTS_COPY)
query_and_count(PRESIDENTS_COPY) should equal(10)
}
}
}
| chop-dbhi/dataexpress | src/test/scala/edu/chop/cbmi/dataExpress/test/dataModels/SqlTableWriterSpec.scala | Scala | bsd-2-clause | 5,386 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.integration.torch
import com.intel.analytics.bigdl.dllib.nn.Sum
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.RandomGenerator
@com.intel.analytics.bigdl.tags.Serial
class SumSpec extends TorchSpec {
def randomn(): Double = RandomGenerator.RNG.normal(-10, 10)
"An Sum()" should "generate correct output and grad" in {
torchCheck()
val layer = Sum[Double]()
val input = Tensor[Double](2, 2, 2)
input.apply1(x => randomn())
val gradOutput = Tensor[Double](1, 2, 2)
gradOutput.apply1(x => randomn())
val start = System.nanoTime()
val output = layer.forward(input)
val gradInput = layer.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.Sum()\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be (luaOutput)
gradInput should be (luaGradInput)
println("Test case : Sum, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"An Sum(2)" should "generate correct output and grad" in {
torchCheck()
val layer = Sum[Double](2)
val input = Tensor[Double](2, 2, 2)
input.apply1(x => randomn())
val gradOutput = Tensor[Double](1, 2, 2)
gradOutput.apply1(x => randomn())
val start = System.nanoTime()
val output = layer.forward(input)
val gradInput = layer.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.Sum(2)\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be (luaOutput)
gradInput should be (luaGradInput)
println("Test case : Sum, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"An Sum(2,1,true)" should "generate correct output and grad" in {
torchCheck()
val layer = Sum[Double](2, 1, true)
val input = Tensor[Double](2, 2, 2)
input.apply1(x => randomn())
val gradOutput = Tensor[Double](1, 2, 2)
gradOutput.apply1(x => randomn())
val start = System.nanoTime()
val output = layer.forward(input)
val gradInput = layer.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.Sum(2,1,true)\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be (luaOutput)
gradInput should be (luaGradInput)
println("Test case : Sum, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"An Sum(-1,1,true)" should "generate correct output and grad" in {
torchCheck()
val layer = Sum[Double](-1, 1, true)
val input = Tensor[Double](2, 2, 2)
input.apply1(x => randomn())
val gradOutput = Tensor[Double](1, 2, 2)
gradOutput.apply1(x => randomn())
val start = System.nanoTime()
val output = layer.forward(input)
val gradInput = layer.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.Sum(-1,1,true)\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be (luaOutput)
gradInput should be (luaGradInput)
println("Test case : Sum, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/integration/torch/SumSpec.scala | Scala | apache-2.0 | 5,199 |
package typeclass.data
import typeclass.{Applicative, Semigroup}
import scalaprops.Gen
sealed trait Validation[E, A] {
import Validation._
def fold[B](f: E => B, g: A => B): B = this match {
case Failure(e) => f(e)
case Success(a) => g(a)
}
}
object Validation {
case class Failure[E, A](value: E) extends Validation[E, A]
case class Success[E, A](value: A) extends Validation[E, A]
def failure[E, A](e: E): Validation[E, A] = Failure(e)
def success[E, A](a: A): Validation[E, A] = Success(a)
def failureNel[E, A](e: E): Validation[NonEmptyList[E], A] = Failure(NonEmptyList(e))
def successNel[E, A](a: A): Validation[NonEmptyList[E], A] = Success(a)
implicit def validationApplicative[E](implicit E: Semigroup[E]): Applicative[Validation[E, ?]] = new Applicative[Validation[E, ?]] {
def pure[A](a: A): Validation[E, A] = success(a)
def ap[A, B](fab: Validation[E, A => B], fa: Validation[E, A]): Validation[E, B] =
(fab, fa) match {
case (Success(f), Success(a)) => Success(f(a))
case (Failure(e), Success(_)) => Failure(e)
case (Success(_), Failure(e)) => Failure(e)
case (Failure(x), Failure(y)) => Failure(E.combine(x, y))
}
}
implicit def gen[E: Gen, A: Gen]: Gen[Validation[E, A]] =
Gen.oneOf(Gen[E].map(failure), Gen[A].map(success))
} | julien-truffaut/Typeclass | answer/src/main/scala/typeclass/data/Validation.scala | Scala | mit | 1,339 |
/*
* BinaryOp.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.fscape
package graph
import de.sciss.fscape.Graph.{ProductReader, RefMapIn}
import de.sciss.fscape.UGen.Adjunct
import de.sciss.fscape.UGenSource.unwrap
import de.sciss.fscape.stream.{BufD, BufI, BufL, StreamIn, StreamOut}
import de.sciss.numbers.{DoubleFunctions => rd, DoubleFunctions2 => rd2, IntFunctions => ri, IntFunctions2 => ri2, LongFunctions => rl, LongFunctions2 => rl2}
import scala.annotation.switch
import scala.collection.immutable.{IndexedSeq => Vec}
object BinaryOp extends ProductReader[BinaryOp] {
object Op {
def apply(id: Int): Op = (id: @switch) match {
case Plus .id => Plus
case Minus .id => Minus
case Times .id => Times
case Div .id => Div
case Mod .id => Mod
case Eq .id => Eq
case Neq .id => Neq
case Lt .id => Lt
case Gt .id => Gt
case Leq .id => Leq
case Geq .id => Geq
case Min .id => Min
case Max .id => Max
case And .id => And
case Or .id => Or
case Xor .id => Xor
case Lcm .id => Lcm
case Gcd .id => Gcd
case RoundTo .id => RoundTo
case RoundUpTo .id => RoundUpTo
case Trunc .id => Trunc
case Atan2 .id => Atan2
case Hypot .id => Hypot
case Hypotx .id => Hypotx
case Pow .id => Pow
case LeftShift .id => LeftShift
case RightShift .id => RightShift
case UnsignedRightShift .id => UnsignedRightShift
case Ring1 .id => Ring1
case Ring2 .id => Ring2
case Ring3 .id => Ring3
case Ring4 .id => Ring4
case Difsqr .id => Difsqr
case Sumsqr .id => Sumsqr
case Sqrsum .id => Sqrsum
case Sqrdif .id => Sqrdif
case Absdif .id => Absdif
case Thresh .id => Thresh
case Amclip .id => Amclip
case Scaleneg .id => Scaleneg
case Clip2 .id => Clip2
case Excess .id => Excess
case Fold2 .id => Fold2
case Wrap2 .id => Wrap2
case FirstArg .id => FirstArg
case SecondArg .id => SecondArg
case ModJ .id => ModJ
}
final val MinId = Plus .id
final val MaxId = SecondArg .id
}
sealed trait Op {
op =>
def id: Int
def funDD: (Double, Double) => Double
/** The default converts to `Double`, but specific operators
* may better preserve semantics and precision for other types such as `Int` and `Long`.
*/
def apply(a: Constant, b: Constant): Constant =
ConstantD(funDD(a.doubleValue, b.doubleValue))
def name: String = plainName.capitalize
def make(a: GE, b: GE): GE = (a, b) match {
case (av: Constant, bv: Constant) => apply(av, bv)
case _ => BinaryOp(op.id, a, b)
}
private def plainName: String = {
val cn = getClass.getName
val sz = cn.length
val i = cn.indexOf('$') + 1
cn.substring(i, if (cn.charAt(sz - 1) == '$') sz - 1 else sz)
}
}
sealed trait OpII extends Op {
def funII: (Int, Int) => Int
override def apply(a: Constant, b: Constant): Constant = (a, b) match {
case (ConstantI(av), ConstantI(bv)) => ConstantI(funII(av, bv))
case _ => ConstantD(funDD(a.doubleValue, b.doubleValue))
}
}
sealed trait OpLL extends Op {
def funLL: (Long, Long) => Long
override def apply(a: Constant, b: Constant): Constant = (a, b) match {
case (ConstantL(av), ConstantL(bv)) => ConstantL(funLL(av, bv))
case _ => ConstantD(funDD(a.doubleValue, b.doubleValue))
}
}
sealed trait OpDI extends Op {
def funDI: (Double, Double) => Int
}
sealed trait OpDL extends Op {
def funDL: (Double, Double) => Long
}
sealed trait OpID extends Op {
def funID: (Int, Int) => Double
}
sealed trait OpLD extends Op {
def funLD: (Long, Long) => Double
}
sealed trait OpIL extends Op {
def funIL: (Int, Int) => Long
}
sealed trait OpLI extends Op {
def funLI: (Long, Long) => Int
}
sealed trait OpSame extends Op with OpII with OpLL {
override def apply(a: Constant, b: Constant): Constant = (a, b) match {
case (ConstantI(av), ConstantI(bv)) => ConstantI(funII(av, bv))
case (ConstantL(av), ConstantL(bv)) => ConstantL(funLL(av, bv))
case (ConstantL(av), ConstantI(bv)) => ConstantL(funLL(av, bv))
case (ConstantI(av), ConstantL(bv)) => ConstantL(funLL(av, bv))
case _ => ConstantD(funDD(a.doubleValue, b.doubleValue))
}
}
case object Plus extends OpSame {
final val id = 0
override val name = "+"
override def make(a: GE, b: GE): GE =
(a, b) match {
case (Constant(0), _) => b
case (_, Constant(0)) => a
case _ => super.make(a, b)
}
val funDD: (Double, Double) => Double = (a, b) => a + b
val funII: (Int , Int ) => Int = (a, b) => a + b
val funLL: (Long , Long ) => Long = (a, b) => a + b
}
case object Minus extends OpSame {
final val id = 1
override val name = "-"
override def make(a: GE, b: GE): GE =
(a, b) match {
case (Constant(0), _) => b
case (_, Constant(0)) => a
case _ => super.make(a, b)
}
val funDD: (Double, Double) => Double = (a, b) => a - b
val funII: (Int , Int ) => Int = (a, b) => a - b
val funLL: (Long , Long ) => Long = (a, b) => a - b
}
case object Times extends OpSame {
final val id = 2
override val name = "*"
override def make(a: GE, b: GE): GE =
(a, b) match {
// N.B. do not replace by Constant(0), because lengths might differ!
// case (Constant(0), _) => a
// case (_, Constant(0)) => b
case (Constant(1), _) => b
case (_, Constant(1)) => a
case (Constant(-1), _) => UnaryOp.Neg.make(b) // -b
case (_, Constant(-1)) => UnaryOp.Neg.make(a) // -a
case _ => super.make(a, b)
}
val funDD: (Double, Double) => Double = (a, b) => a * b
val funII: (Int , Int ) => Int = (a, b) => a * b
val funLL: (Long , Long ) => Long = (a, b) => a * b
}
// case object IDiv extends Op( 3 )
case object Div extends OpSame {
final val id = 4
override val name = "/"
val funDD: (Double, Double) => Double = (a, b) => a / b
val funII: (Int , Int ) => Int = (a, b) => a / b
val funLL: (Long , Long ) => Long = (a, b) => a / b
}
case object Mod extends OpSame {
final val id = 5
override val name = "mod"
val funDD: (Double, Double) => Double = (a, b) => rd.mod(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri.mod(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.mod(a, b)
}
sealed trait BoolOp extends Op with OpDI with OpII with OpLI {
override def apply(a: Constant, b: Constant): Constant = (a, b) match {
case (ConstantI(av), ConstantI(bv)) => ConstantI(funII(av, bv))
case (ConstantL(av), ConstantL(bv)) => ConstantI(funLI(av, bv))
case (ConstantL(av), ConstantI(bv)) => ConstantI(funLI(av, bv))
case (ConstantI(av), ConstantL(bv)) => ConstantI(funLI(av, bv))
case _ => ConstantI(funDI(a.doubleValue, b.doubleValue))
}
}
case object Eq extends BoolOp {
final val id = 6
override val name = "sig_=="
val funDD: (Double, Double) => Double = (a, b) => if (a == b) 1d else 0d
val funDI: (Double, Double) => Int = (a, b) => if (a == b) 1 else 0
val funII: (Int , Int ) => Int = (a, b) => if (a == b) 1 else 0
val funLI: (Long , Long ) => Int = (a, b) => if (a == b) 1 else 0
}
case object Neq extends BoolOp {
final val id = 7
override val name = "sig_!="
val funDD: (Double, Double) => Double = (a, b) => if (a != b) 1d else 0d
val funDI: (Double, Double) => Int = (a, b) => if (a != b) 1 else 0
val funII: (Int , Int ) => Int = (a, b) => if (a != b) 1 else 0
val funLI: (Long , Long ) => Int = (a, b) => if (a != b) 1 else 0
}
case object Lt extends BoolOp {
final val id = 8
override val name = "<"
val funDD: (Double, Double) => Double = (a, b) => if (a < b) 1d else 0d // NOT rd.< !
val funDI: (Double, Double) => Int = (a, b) => if (a < b) 1 else 0
val funII: (Int , Int ) => Int = (a, b) => if (a < b) 1 else 0
val funLI: (Long , Long ) => Int = (a, b) => if (a < b) 1 else 0
}
case object Gt extends BoolOp {
final val id = 9
override val name = ">"
val funDD: (Double, Double) => Double = (a, b) => if (a > b) 1d else 0d // NOT rd.> !
val funDI: (Double, Double) => Int = (a, b) => if (a > b) 1 else 0
val funII: (Int , Int ) => Int = (a, b) => if (a > b) 1 else 0
val funLI: (Long , Long ) => Int = (a, b) => if (a > b) 1 else 0
}
case object Leq extends BoolOp {
final val id = 10
override val name = "<="
val funDD: (Double, Double) => Double = (a, b) => if (a <= b) 1d else 0d // NOT rd.<= !
val funDI: (Double, Double) => Int = (a, b) => if (a <= b) 1 else 0
val funII: (Int , Int ) => Int = (a, b) => if (a <= b) 1 else 0
val funLI: (Long , Long ) => Int = (a, b) => if (a <= b) 1 else 0
}
case object Geq extends BoolOp {
final val id = 11
override val name = ">="
val funDD: (Double, Double) => Double = (a, b) => if (a >= b) 1d else 0d // NOT rd.>= !
val funDI: (Double, Double) => Int = (a, b) => if (a >= b) 1 else 0
val funII: (Int , Int ) => Int = (a, b) => if (a >= b) 1 else 0
val funLI: (Long , Long ) => Int = (a, b) => if (a >= b) 1 else 0
}
case object Min extends OpSame {
final val id = 12
val funDD: (Double, Double) => Double = (a, b) => rd.min(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri.min(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.min(a, b)
}
case object Max extends OpSame {
final val id = 13
val funDD: (Double, Double) => Double = (a, b) => rd.max(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri.max(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.max(a, b)
}
case object And extends OpSame {
final val id = 14
override val name = "&"
val funDD: (Double, Double) => Double = (a, b) => (a.toLong & b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => a & b
val funLL: (Long , Long ) => Long = (a, b) => a & b
}
case object Or extends OpSame {
final val id = 15
override val name = "|"
val funDD: (Double, Double) => Double = (a, b) => (a.toLong | b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => a | b
val funLL: (Long , Long ) => Long = (a, b) => a | b
}
case object Xor extends OpSame {
final val id = 16
override val name = "^"
val funDD: (Double, Double) => Double = (a, b) => (a.toLong ^ b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => a ^ b
val funLL: (Long , Long ) => Long = (a, b) => a ^ b
}
case object Lcm extends OpSame {
final val id = 17
val funDD: (Double, Double) => Double = (a, b) => rl.lcm(a.toLong, b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => ri.lcm(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.lcm(a, b)
}
case object Gcd extends OpSame {
final val id = 18
val funDD: (Double, Double) => Double = (a, b) => rl.gcd(a.toLong, b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => ri.gcd(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.gcd(a, b)
}
case object RoundTo extends OpSame {
final val id = 19
val funDD: (Double, Double) => Double = (a, b) => rd .roundTo(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri2.roundTo(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl2.roundTo(a, b)
}
case object RoundUpTo extends OpSame {
final val id = 20
val funDD: (Double, Double) => Double = (a, b) => rd .roundUpTo(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri2.roundUpTo(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl2.roundUpTo(a, b)
}
case object Trunc extends Op {
final val id = 21
val funDD: (Double, Double) => Double = (a, b) => rd.trunc(a, b)
}
case object Atan2 extends Op {
final val id = 22
val funDD: (Double, Double) => Double = (a, b) => rd.atan2(a, b)
}
case object Hypot extends Op {
final val id = 23
val funDD: (Double, Double) => Double = (a, b) => rd.hypot(a, b)
}
case object Hypotx extends Op {
final val id = 24
val funDD: (Double, Double) => Double = (a, b) => rd.hypotApx(a, b)
}
/** '''Warning:''' Unlike a normal power operation, the signum of the
* left operand is always preserved. I.e. `DC.kr(-0.5).pow(2)` will
* not output `0.25` but `-0.25`. This is to avoid problems with
* floating point noise and negative input numbers, so
* `DC.kr(-0.5).pow(2.001)` does not result in a `NaN`, for example.
*/
case object Pow extends Op {
final val id = 25
val funDD: (Double, Double) => Double = (a, b) => rd.pow(a, b)
}
case object LeftShift extends OpSame {
final val id = 26
val funDD: (Double, Double) => Double = (a, b) => (a.toLong << b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => a << b
val funLL: (Long , Long ) => Long = (a, b) => a << b
}
case object RightShift extends OpSame {
final val id = 27
val funDD: (Double, Double) => Double = (a, b) => (a.toLong >> b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => a >> b
val funLL: (Long , Long ) => Long = (a, b) => a >> b
}
case object UnsignedRightShift extends OpSame {
final val id = 28
val funDD: (Double, Double) => Double = (a, b) => (a.toLong >>> b.toLong).toDouble
val funII: (Int , Int ) => Int = (a, b) => a >>> b
val funLL: (Long , Long ) => Long = (a, b) => a >>> b
}
// case object Fill extends Op( 29 )
case object Ring1 extends Op {
final val id = 30
val funDD: (Double, Double) => Double = (a, b) => rd2.ring1(a, b)
}
case object Ring2 extends Op {
final val id = 31
val funDD: (Double, Double) => Double = (a, b) => rd2.ring2(a, b)
}
case object Ring3 extends Op {
final val id = 32
val funDD: (Double, Double) => Double = (a, b) => rd2.ring3(a, b)
}
case object Ring4 extends Op {
final val id = 33
val funDD: (Double, Double) => Double = (a, b) => rd2.ring4(a, b)
}
sealed trait LongOp extends Op with OpIL with OpLL {
override def apply(a: Constant, b: Constant): Constant = (a, b) match {
case (ConstantI(av), ConstantI(bv)) => ConstantL(funIL(av, bv))
case (ConstantL(av), ConstantL(bv)) => ConstantL(funLL(av, bv))
case (ConstantL(av), ConstantI(bv)) => ConstantL(funLL(av, bv))
case (ConstantI(av), ConstantL(bv)) => ConstantL(funLL(av, bv))
case _ => ConstantD(funDD(a.doubleValue, b.doubleValue))
}
}
case object Difsqr extends LongOp {
final val id = 34
val funDD: (Double, Double) => Double = (a, b) => rd .difSqr(a, b)
val funIL: (Int , Int ) => Long = (a, b) => ri2.difSqr(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl2.difSqr(a, b)
}
case object Sumsqr extends LongOp {
final val id = 35
val funDD: (Double, Double) => Double = (a, b) => rd .sumSqr(a, b)
val funIL: (Int , Int ) => Long = (a, b) => ri2.sumSqr(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl2.sumSqr(a, b)
}
case object Sqrsum extends LongOp {
final val id = 36
val funDD: (Double, Double) => Double = (a, b) => rd .sqrSum(a, b)
val funIL: (Int , Int ) => Long = (a, b) => ri2.sqrSum(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl2.sqrSum(a, b)
}
case object Sqrdif extends LongOp {
final val id = 37
val funDD: (Double, Double) => Double = (a, b) => rd .sqrDif(a, b)
val funIL: (Int , Int ) => Long = (a, b) => ri2.sqrDif(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl2.sqrDif(a, b)
}
case object Absdif extends OpSame {
final val id = 38
val funDD: (Double, Double) => Double = (a, b) => rd .absDif(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri2.absDif(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl2.absDif(a, b)
}
case object Thresh extends Op {
final val id = 39
val funDD: (Double, Double) => Double = (a, b) => rd2.thresh(a, b)
}
case object Amclip extends Op {
final val id = 40
val funDD: (Double, Double) => Double = (a, b) => rd2.amClip(a, b)
}
case object Scaleneg extends Op {
final val id = 41
val funDD: (Double, Double) => Double = (a, b) => rd2.scaleNeg(a, b)
}
case object Clip2 extends OpSame {
final val id = 42
val funDD: (Double, Double) => Double = (a, b) => rd.clip2(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri.clip2(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.clip2(a, b)
}
case object Excess extends OpSame {
final val id = 43
val funDD: (Double, Double) => Double = (a, b) => rd.excess(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri.excess(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.excess(a, b)
}
case object Fold2 extends OpSame {
final val id = 44
val funDD: (Double, Double) => Double = (a, b) => rd.fold2(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri.fold2(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.fold2(a, b)
}
case object Wrap2 extends OpSame {
final val id = 45
val funDD: (Double, Double) => Double = (a, b) => rd.wrap2(a, b)
val funII: (Int , Int ) => Int = (a, b) => ri.wrap2(a, b)
val funLL: (Long , Long ) => Long = (a, b) => rl.wrap2(a, b)
}
case object FirstArg extends OpSame {
final val id = 46
val funDD: (Double, Double) => Double = (a, _) => a
val funII: (Int , Int ) => Int = (a, _) => a
val funLL: (Long , Long ) => Long = (a, _) => a
override def apply(a: Constant, b: Constant): Constant = a
}
// case object Rrand extends Op( 47 )
// case object ExpRRand extends Op( 48 )
case object SecondArg extends OpSame {
final val id = 100
val funDD: (Double, Double) => Double = (_, b) => b
val funII: (Int , Int ) => Int = (_, b) => b
val funLL: (Long , Long ) => Long = (_, b) => b
override def apply(a: Constant, b: Constant): Constant = b
}
case object ModJ extends OpSame {
final val id = 101
override val name = "%"
val funDD: (Double, Double) => Double = (a, b) => a % b
val funII: (Int , Int ) => Int = (a, b) => a % b
val funLL: (Long , Long ) => Long = (a, b) => a % b
}
override def read(in: RefMapIn, key: String, arity: Int): BinaryOp = {
require (arity == 3)
val _op = in.readInt()
val _a = in.readGE()
val _b = in.readGE()
new BinaryOp(_op, _a, _b)
}
}
/** A binary operator UGen, for example two sum or multiply two signals.
* The left or `a` input is "hot", i.e. it keeps the UGen running,
* while the right or `b` input may close early, and the last value will
* be remembered.
*
* @param op the identifier of the operator (e.g. `BinaryOp.Times.id`)
* @param a the left operand which determines how long the UGen computes
* @param b the right operand.
*/
final case class BinaryOp(op: Int, a: GE, b: GE) extends UGenSource.SingleOut {
protected def makeUGens(implicit builder: UGenGraph.Builder): UGenInLike =
unwrap(this, Vector(a.expand, b.expand))
protected def makeUGen(args: Vec[UGenIn])(implicit builder: UGenGraph.Builder): UGenInLike =
UGen.SingleOut(this, inputs = args, adjuncts = Adjunct.Int(op) :: Nil)
private[fscape] def makeStream(args: Vec[StreamIn])(implicit b: stream.Builder): StreamOut = {
val Vec(in1, in2) = args: @unchecked
val op0 = BinaryOp.Op(op)
if (in1.isDouble || in2.isDouble) {
op0 match {
case opDI: BinaryOp.OpDI =>
stream.BinaryOp[Double , BufD, Int , BufI](op0.name, opDI.funDI, in1 = in1.toDouble, in2 = in2.toDouble ): StreamOut
case opDL: BinaryOp.OpDL =>
stream.BinaryOp[Double , BufD, Long , BufL](op0.name, opDL.funDL, in1 = in1.toDouble , in2 = in2.toDouble): StreamOut
case _ =>
stream.BinaryOp[Double , BufD, Double, BufD](op0.name, op0 .funDD, in1 = in1.toDouble , in2 = in2.toDouble): StreamOut
}
} else if (in1.isLong || in2.isLong) {
op0 match {
case opLI: BinaryOp.OpLI =>
stream.BinaryOp[Long , BufL, Int , BufI](op0.name, opLI.funLI, in1 = in1.toLong , in2 = in2.toLong ): StreamOut
case opLL: BinaryOp.OpLL =>
stream.BinaryOp[Long , BufL, Long , BufL](op0.name, opLL.funLL, in1 = in1.toLong , in2 = in2.toLong ): StreamOut
case opLD: BinaryOp.OpLD =>
stream.BinaryOp[Long , BufL, Double, BufD](op0.name, opLD.funLD, in1 = in1.toLong , in2 = in2.toLong ): StreamOut
case _ =>
stream.BinaryOp[Double , BufD, Double, BufD](op0.name, op0 .funDD, in1 = in1.toDouble , in2 = in2.toDouble): StreamOut
}
} else {
assert (in1.isInt && in2.isInt)
op0 match {
case opII: BinaryOp.OpII =>
stream.BinaryOp[Int , BufI, Int , BufI](op0.name, opII.funII, in1 = in1.toInt , in2 = in2.toInt ): StreamOut
case opIL: BinaryOp.OpIL =>
stream.BinaryOp[Int , BufI, Long , BufL](op0.name, opIL.funIL, in1 = in1.toInt , in2 = in2.toInt ): StreamOut
case opID: BinaryOp.OpID =>
stream.BinaryOp[Int , BufI, Double, BufD](op0.name, opID.funID, in1 = in1.toInt , in2 = in2.toInt ): StreamOut
case _ =>
stream.BinaryOp[Double , BufD, Double, BufD](op0.name, op0 .funDD, in1 = in1.toDouble , in2 = in2.toDouble): StreamOut
}
}
}
} | Sciss/FScape-next | core/shared/src/main/scala/de/sciss/fscape/graph/BinaryOp.scala | Scala | agpl-3.0 | 23,130 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Uniformity
import org.scalactic.Entry
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import scala.collection.JavaConverters._
import Matchers._
import exceptions.TestFailedException
class ListShouldContainOneElementOfLogicalAndSpec extends Spec {
val invertedStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
val invertedListOfStringEquality =
new Equality[List[String]] {
def areEqual(a: List[String], b: Any): Boolean = a != b
}
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a.toUpperCase == b
}
val upperCaseListOfStringEquality =
new Equality[List[String]] {
def areEqual(a: List[String], b: Any): Boolean = a.map(_.toUpperCase) == b
}
private def upperCase(value: Any): Any =
value match {
case l: List[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
//ADDITIONAL//
val fileName: String = "ListShouldContainOneElementOfLogicalAndSpec.scala"
object `a List` {
val fumList: List[String] = List("fum")
val toList: List[String] = List("to")
object `when used with (contain oneElementOf (..) and contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (contain oneElementOf Seq("fee", "fie", "foe", "fum") and contain oneElementOf Seq("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("happy", "birthday", "to", "you") and contain oneElementOf Seq("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, FailureMessages.didNotContainOneElementOf(fumList, Seq("happy", "birthday", "to", "you")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("fee", "fie", "foe", "fum") and contain oneElementOf Seq("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, FailureMessages.containedOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("happy", "birthday", "to", "you")), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and contain oneElementOf Seq("FIE", "FEE", "FUM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("fee", "fie", "foe", "fum") and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and (contain oneElementOf Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e2, FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and contain oneElementOf Seq("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain oneElementOf Seq("fee", "fie", "foe", "fum") and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and contain oneElementOf Seq("fee", "fie", "foe", "fum"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
(fumList should (contain oneElementOf Seq(" FEE ", " FIE ", " FOE ", " FUM ") and contain oneElementOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain oneElementOf Seq("fee", "fie", "foe", "fie", "fum") and contain oneElementOf Seq("fie", "fee", "fum", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (contain oneElementOf Seq("fie", "fee", "fum", "foe") and contain oneElementOf Seq("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (equal (..) and contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (equal (fumList) and contain oneElementOf Seq("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) and contain oneElementOf Seq("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, FailureMessages.didNotEqual(fumList, toList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (equal (fumList) and contain oneElementOf Seq("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, FailureMessages.equaled(fumList, fumList) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("happy", "birthday", "to", "you")), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (equal (fumList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, FailureMessages.didNotEqual(fumList, toList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (equal (fumList) and (contain oneElementOf Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e2, FailureMessages.equaled(fumList, fumList) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(fumList should (equal (toList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (equal (fumList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.didNotEqual(fumList, fumList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (equal (toList) and contain oneElementOf Seq("fee", "fie", "foe", "fum"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, FailureMessages.equaled(fumList, toList) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
(fumList should (equal (toList) and contain oneElementOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (decided by invertedListOfStringEquality, after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (equal (fumList) and contain oneElementOf Seq("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (be (..) and contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (be_== (fumList) and contain oneElementOf Seq("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (be_== (toList) and contain oneElementOf Seq("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, FailureMessages.wasNotEqualTo(fumList, toList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (be_== (fumList) and contain oneElementOf Seq("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, FailureMessages.wasEqualTo(fumList, fumList) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("happy", "birthday", "to", "you")), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (be_== (fumList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))
val e1 = intercept[TestFailedException] {
fumList should (be_== (toList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, FailureMessages.wasNotEqualTo(fumList, toList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (be_== (fumList) and (contain oneElementOf Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e2, FailureMessages.wasEqualTo(fumList, fumList) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(fumList should (be_== (fumList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (be_== (fumList) and contain oneElementOf Seq("fee", "fie", "foe", "fum"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.wasEqualTo(fumList, fumList) + ", but " + FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (be_== (toList) and contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, FailureMessages.wasNotEqualTo(fumList, toList), fileName, thisLineNumber - 2)
(fumList should (be_== (fumList) and contain oneElementOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (be_== (fumList) and contain oneElementOf Seq("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (contain oneElementOf (..) and be (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (contain oneElementOf Seq("fie", "fee", "fum", "foe") and be_== (fumList))
val e1 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("fee", "fie", "foe", "fum") and be_== (toList))
}
checkMessageStackDepth(e1, FailureMessages.containedOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")) + ", but " + FailureMessages.wasNotEqualTo(fumList, toList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and be_== (fumList))
}
checkMessageStackDepth(e2, FailureMessages.didNotContainOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and be_== (fumList))
val e1 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("fie", "fee", "fum", "foe") and be_== (toList))
}
checkMessageStackDepth(e1, FailureMessages.didNotContainOneElementOf(fumList, Seq("fie", "fee", "fum", "foe")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain oneElementOf Seq("fie", "fee", "fum", "foe") and (be_== (fumList)))
}
checkMessageStackDepth(e2, FailureMessages.didNotContainOneElementOf(fumList, Seq("fie", "fee", "fum", "foe")), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and be_== (fumList))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain oneElementOf Seq("fie", "fee", "fum", "foe") and be_== (fumList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.didNotContainOneElementOf(fumList, Seq("fie", "fee", "fum", "foe")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (contain oneElementOf Seq("FEE", "FIE", "FOE", "FUM") and be_== (toList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")) + ", but " + FailureMessages.wasNotEqualTo(fumList, toList), fileName, thisLineNumber - 2)
(fumList should (contain oneElementOf Seq(" FEE ", " FIE ", " FOE ", " FUM ") and be_== (fumList))) (after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain oneElementOf Seq("fee", "fie", "foe", "fie", "fum") and be_== (fumList))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (not contain oneElementOf (..) and not contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))
val e1 = intercept[TestFailedException] {
fumList should (not contain oneElementOf (Seq("fee", "fie", "foe", "fum")) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))
}
checkMessageStackDepth(e1, FailureMessages.containedOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e2, FailureMessages.didNotContainOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (not contain oneElementOf (Seq("fee", "fie", "foe", "fum")) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
fumList should (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
}
val e2 = intercept[TestFailedException] {
fumList should (not contain oneElementOf (Seq("fee", "fie", "foe", "fum")) and (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM"))))
}
checkMessageStackDepth(e2, FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(fumList should (not contain oneElementOf (Seq("fee", "fie", "foe", "fum")) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not contain oneElementOf (Seq("fee", "fie", "foe", "fum")) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, FailureMessages.didNotContainOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
(fumList should (contain oneElementOf Seq(" FEE ", " FIE ", " FOE ", " FUM ") and contain oneElementOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not contain oneElementOf (Seq("fee", "fie", "foe", "fie", "fum")) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")) and not contain oneElementOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (not equal (..) and not contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (not equal (toList) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))
}
checkMessageStackDepth(e1, FailureMessages.equaled(fumList, fumList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not equal (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e2, FailureMessages.didNotEqual(fumList, toList) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (not equal (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e1, FailureMessages.equaled(fumList, fumList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not equal (toList) and (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM"))))
}
checkMessageStackDepth(e2, FailureMessages.didNotEqual(fumList, toList) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(fumList should (not equal (fumList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not equal (fumList) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.didNotEqual(fumList, fumList) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not equal (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, FailureMessages.equaled(fumList, toList), fileName, thisLineNumber - 2)
(fumList should (not contain oneElementOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")) and not contain oneElementOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not equal (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (not be (..) and not contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (not be_== (toList) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))
val e1 = intercept[TestFailedException] {
fumList should (not be_== (fumList) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))
}
checkMessageStackDepth(e1, FailureMessages.wasEqualTo(fumList, fumList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not be_== (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e2, FailureMessages.wasNotEqualTo(fumList, toList) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (not be_== (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
fumList should (not be_== (fumList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e1, FailureMessages.wasEqualTo(fumList, fumList), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not be_== (toList) and (not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM"))))
}
checkMessageStackDepth(e2, FailureMessages.wasNotEqualTo(fumList, toList) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(fumList should (not be_== (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not be_== (toList) and not contain oneElementOf (Seq("FEE", "FIE", "FOE", "FUM")))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.wasNotEqualTo(fumList, toList) + ", but " + FailureMessages.containedOneElementOf(fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not be_== (fumList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fum")))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, FailureMessages.wasEqualTo(fumList, fumList), fileName, thisLineNumber - 2)
(fumList should (not contain oneElementOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")) and not contain oneElementOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not be_== (toList) and not contain oneElementOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
}
object `collection of Lists` {
val list1s: Vector[List[Int]] = Vector(List(1), List(1), List(1))
val lists: Vector[List[Int]] = Vector(List(1), List(1), List(2))
val nils: Vector[List[Int]] = Vector(Nil, Nil, Nil)
val listsNil: Vector[List[Int]] = Vector(List(1), List(1), Nil)
val hiLists: Vector[List[String]] = Vector(List("hi"), List("hi"), List("hi"))
val toLists: Vector[List[String]] = Vector(List("to"), List("to"), List("to"))
def allErrMsg(index: Int, message: String, lineNumber: Int, left: Any): String =
"'all' inspection failed, because: \\n" +
" at index " + index + ", " + message + " (" + fileName + ":" + (lineNumber) + ") \\n" +
"in " + decorateToStringValue(left)
object `when used with (contain oneElementOf (..) and contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should (contain oneElementOf Seq(3, 2, 1) and contain oneElementOf Seq(1, 3, 4))
atLeast (2, lists) should (contain oneElementOf Seq(3, 1, 5) and contain oneElementOf Seq(1, 3, 4))
atMost (2, lists) should (contain oneElementOf Seq(3, 2, 8) and contain oneElementOf Seq(2, 3, 4))
no (lists) should (contain oneElementOf Seq(3, 6, 9) and contain oneElementOf Seq(3, 4, 5))
no (nils) should (contain oneElementOf Seq(1, 2, 8) and contain oneElementOf Seq(1, 3, 4))
no (listsNil) should (contain oneElementOf Seq(3, 8, 5) and contain oneElementOf Seq(3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (contain oneElementOf Seq(1, 6, 8) and contain oneElementOf Seq(1, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, FailureMessages.didNotContainOneElementOf(lists(2), Seq(1, 6, 8)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (contain oneElementOf Seq(1, 2, 8) and contain oneElementOf Seq(1, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(2, FailureMessages.containedOneElementOf(lists(2), Seq(1, 2, 8)) + ", but " + FailureMessages.didNotContainOneElementOf(lists(2), Seq(1, 3, 4)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (nils) should (contain oneElementOf Seq("hi", "hello") and contain oneElementOf Seq("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, FailureMessages.didNotContainOneElementOf(nils(0), Seq("hi", "hello")), thisLineNumber - 2, nils), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (contain oneElementOf Seq("hi", "hello") and contain oneElementOf Seq("ho", "hey", "howdy"))
}
checkMessageStackDepth(e4, allErrMsg(0, FailureMessages.containedOneElementOf(hiLists(0), Seq("hi", "hello")) + ", but " + FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("ho", "hey", "howdy")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e5 = intercept[TestFailedException] {
all (listsNil) should (contain oneElementOf Seq(1, 3, 4) and contain oneElementOf Seq(1, 3, Nil))
}
checkMessageStackDepth(e5, allErrMsg(2, FailureMessages.didNotContainOneElementOf(listsNil(2), Seq(1, 3, 4)), thisLineNumber - 2, listsNil), fileName, thisLineNumber - 2)
val e6 = intercept[TestFailedException] {
all (lists) should (contain oneElementOf Seq(1, 2, 8) and contain oneElementOf Seq(1, 3, 4))
}
checkMessageStackDepth(e6, allErrMsg(2, FailureMessages.containedOneElementOf(lists(2), Seq(1, 2, 8)) + ", but " + FailureMessages.didNotContainOneElementOf(lists(2), Seq(1, 3, 4)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain oneElementOf Seq("HI", "HE") and contain oneElementOf Seq("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (contain oneElementOf Seq("hi", "he") and contain oneElementOf Seq("HI", "HE"))
}
checkMessageStackDepth(e1, allErrMsg(0, FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("hi", "he")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (contain oneElementOf Seq("HI", "HE") and contain oneElementOf Seq("hi", "he"))
}
checkMessageStackDepth(e2, allErrMsg(0, FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")) + ", but " + FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("hi", "he")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(all (hiLists) should (contain oneElementOf Seq("HI", "HE") and contain oneElementOf Seq("HI", "HE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (contain oneElementOf Seq("hi", "he") and contain oneElementOf Seq("HI", "HE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("hi", "he")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (contain oneElementOf Seq("HI", "HE") and contain oneElementOf Seq("hi", "he"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")) + ", but " + FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("hi", "he")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain oneElementOf Seq(3, 2, 2, 1) and contain oneElementOf Seq(1, 3, 4))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain oneElementOf Seq(1, 3, 4) and contain oneElementOf Seq(3, 2, 2, 1))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (be (..) and contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should (be_== (List(1)) and contain oneElementOf Seq(1, 3, 4))
atLeast (2, lists) should (be_== (List(1)) and contain oneElementOf Seq(1, 3, 4))
atMost (2, lists) should (be_== (List(1)) and contain oneElementOf Seq(2, 3, 4))
no (lists) should (be_== (List(8)) and contain oneElementOf Seq(3, 4, 5))
no (nils) should (be_== (List(8)) and contain oneElementOf Seq(1, 3, 4))
no (listsNil) should (be_== (List(8)) and contain oneElementOf Seq(3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (be_== (List(1)) and contain oneElementOf Seq(1, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(List(2)) + " was not equal to " + decorateToStringValue(List(1)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (list1s) should (be_== (List(1)) and contain oneElementOf Seq(2, 3, 8))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(List(1)) + " was equal to " + decorateToStringValue(List(1)) + ", but " + FailureMessages.didNotContainOneElementOf(List(1), Seq(2, 3, 8)), thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (nils) should (be_== (List("hey")) and contain oneElementOf Seq("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(List()) + " was not equal to " + decorateToStringValue(List("hey")), thisLineNumber - 2, nils), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (be_== (List("hi")) and contain oneElementOf Seq("ho", "hey", "howdy"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(List("hi")) + " was equal to " + decorateToStringValue(List("hi")) + ", but " + FailureMessages.didNotContainOneElementOf(List("hi"), Seq("ho", "hey", "howdy")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e5 = intercept[TestFailedException] {
all (listsNil) should (be_== (List(1)) and contain oneElementOf Seq(1, 3, Nil))
}
checkMessageStackDepth(e5, allErrMsg(2, decorateToStringValue(List()) + " was not equal to " + decorateToStringValue(List(1)), thisLineNumber - 2, listsNil), fileName, thisLineNumber - 2)
val e6 = intercept[TestFailedException] {
all (list1s) should (be_== (List(1)) and contain oneElementOf Seq(2, 3, 8))
}
checkMessageStackDepth(e6, allErrMsg(0, decorateToStringValue(List(1)) + " was equal to " + decorateToStringValue(List(1)) + ", but " + FailureMessages.didNotContainOneElementOf(List(1), Seq(2, 3, 8)), thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (hiLists) should (be_== (List("hi")) and contain oneElementOf Seq("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (be_== (List("ho")) and contain oneElementOf Seq("hi", "he"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(List("hi")) + " was not equal to " + decorateToStringValue(List("ho")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (be_== (List("hi")) and contain oneElementOf Seq("hi", "he"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(List("hi")) + " was equal to " + decorateToStringValue(List("hi")) + ", but " + FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("hi", "he")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(all (hiLists) should (be_== (List("hi")) and contain oneElementOf Seq("HI", "HE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (be_== (List("ho")) and contain oneElementOf Seq("hi", "he"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(List("hi")) + " was not equal to " + decorateToStringValue(List("ho")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (be_== (List("hi")) and contain oneElementOf Seq("hi", "he"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(List("hi")) + " was equal to " + decorateToStringValue(List("hi")) + ", but " + FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("hi", "he")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (be_== (List(1)) and contain oneElementOf Seq(3, 2, 2, 1))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (not contain oneElementOf (..) and not contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should (not contain oneElementOf (Seq(3, 2, 8)) and not contain oneElementOf (Seq(8, 3, 4)))
atLeast (2, lists) should (not contain oneElementOf (Seq(3, 8, 5)) and not contain oneElementOf (Seq(8, 3, 4)))
atMost (2, lists) should (not contain oneElementOf (Seq(3, 6, 8)) and contain oneElementOf Seq(5, 3, 4))
no (lists) should (not contain oneElementOf (Seq(1, 2, 9)) and not contain oneElementOf (Seq(2, 1, 5)))
val e1 = intercept[TestFailedException] {
all (lists) should (not contain oneElementOf (Seq(2, 6, 8)) and not contain oneElementOf (Seq(2, 3, 4)))
}
checkMessageStackDepth(e1, allErrMsg(2, FailureMessages.containedOneElementOf(lists(2), Seq(2, 6, 8)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (not contain oneElementOf (Seq(3, 6, 8)) and not contain oneElementOf (Seq(2, 3, 4)))
}
checkMessageStackDepth(e2, allErrMsg(2, FailureMessages.didNotContainOneElementOf(lists(2), Seq(3, 6, 8)) + ", but " + FailureMessages.containedOneElementOf(lists(2), Seq(2, 3, 4)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (hiLists) should (not contain oneElementOf (Seq("hi", "hello")) and not contain oneElementOf (Seq("ho", "hey", "howdy")))
}
checkMessageStackDepth(e3, allErrMsg(0, FailureMessages.containedOneElementOf(hiLists(0), Seq("hi", "hello")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (not contain oneElementOf (Seq("ho", "hey", "howdy")) and not contain oneElementOf (Seq("hi", "hello")))
}
checkMessageStackDepth(e4, allErrMsg(0, FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("ho", "hey", "howdy")) + ", but " + FailureMessages.containedOneElementOf(hiLists(0), Seq("hi", "hello")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not contain oneElementOf (Seq("hi", "he")) and not contain oneElementOf (Seq("hi", "he")))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not contain oneElementOf (Seq("HI", "HE")) and not contain oneElementOf (Seq("HO", "HEY", "HOWDY")))
}
checkMessageStackDepth(e1, allErrMsg(0, FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (not contain oneElementOf (Seq("HO", "HEY", "HOWDY")) and not contain oneElementOf (Seq("HI", "HE")))
}
checkMessageStackDepth(e2, allErrMsg(0, FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("HO", "HEY", "HOWDY")) + ", but " + FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(all (hiLists) should (not contain oneElementOf (Seq("hi", "he")) and not contain oneElementOf (Seq("hi", "he")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not contain oneElementOf (Seq("HI", "HE")) and not contain oneElementOf (Seq("HO", "HEY", "HOWDY")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (not contain oneElementOf (Seq("HO", "HEY", "HOWDY")) and not contain oneElementOf (Seq("HI", "HE")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, FailureMessages.didNotContainOneElementOf(hiLists(0), Seq("HO", "HEY", "HOWDY")) + ", but " + FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain oneElementOf (Seq(3, 2, 2, 1)) and not contain oneElementOf (Seq(8, 3, 4)))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain oneElementOf (Seq(8, 3, 4)) and not contain oneElementOf (Seq(3, 2, 2, 1)))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
object `when used with (not be (..) and not contain oneElementOf (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should (not be_== (List(2)) and not contain oneElementOf (Seq(8, 3, 4)))
atLeast (2, lists) should (not be_== (List(3)) and not contain oneElementOf (Seq(8, 3, 4)))
atMost (2, lists) should (not be_== (List(3)) and contain oneElementOf (Seq(5, 3, 4)))
no (list1s) should (not be_== (List(1)) and not contain oneElementOf (Seq(2, 1, 5)))
val e1 = intercept[TestFailedException] {
all (lists) should (not be_== (List(2)) and not contain oneElementOf (Seq(2, 3, 4)))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(List(2)) + " was equal to " + decorateToStringValue(List(2)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (not be_== (List(3)) and not contain oneElementOf (Seq(2, 3, 4)))
}
checkMessageStackDepth(e2, allErrMsg(2, decorateToStringValue(List(2)) + " was not equal to " + decorateToStringValue(List(3)) + ", but " + FailureMessages.containedOneElementOf(lists(2), Seq(2, 3, 4)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (hiLists) should (not be_== (List("hi")) and not contain oneElementOf (Seq("ho", "hey", "howdy")))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(List("hi")) + " was equal to " + decorateToStringValue(List("hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (not be_== (List("ho")) and not contain oneElementOf (Seq("hi", "hello")))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(List("hi")) + " was not equal to " + decorateToStringValue(List("ho")) + ", but " + FailureMessages.containedOneElementOf(hiLists(0), Seq("hi", "hello")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not be_== (List("ho")) and not contain oneElementOf (Seq("hi", "he")))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not be_== (List("hi")) and not contain oneElementOf (Seq("HI", "HE")))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(List("hi")) + " was equal to " + decorateToStringValue(List("hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (not be_== (List("ho")) and not contain oneElementOf (Seq("HI", "HE")))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(List("hi")) + " was not equal to " + decorateToStringValue(List("ho")) + ", but " + FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should use an explicitly provided Equality` {
(all (hiLists) should (not be_== (List("ho")) and not contain oneElementOf (Seq("hi", "he")))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not be_== (List("hi")) and not contain oneElementOf (Seq("HI", "HE")))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(List("hi")) + " was equal to " + decorateToStringValue(List("hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (not be_== (List("ho")) and not contain oneElementOf (Seq("HI", "HE")))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(List("hi")) + " was not equal to " + decorateToStringValue(List("ho")) + ", but " + FailureMessages.containedOneElementOf(hiLists(0), Seq("HI", "HE")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not be_== (List(2)) and not contain oneElementOf (Seq(3, 2, 2, 1)))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.oneElementOfDuplicate))
}
}
}
}
| SRGOM/scalatest | scalatest-test/src/test/scala/org/scalatest/ListShouldContainOneElementOfLogicalAndSpec.scala | Scala | apache-2.0 | 51,547 |
import sbt._
import Keys._
import java.util.regex.Pattern
object Status {
lazy val publishStatus = SettingKey[String]("publish-status")
def settings: Seq[Setting[_]] = Seq(
isSnapshot <<= version(v => v.contains("-") && snapshotQualifier(v)),
publishStatus <<= isSnapshot { snap => if (snap) "snapshots" else "releases" },
commands += stampVersion
)
def stampVersion = Command.command("stamp-version") { state =>
Project.extract(state).append((version in ThisBuild ~= stamp) :: Nil, state)
}
def stamp(v: String): String =
if (v endsWith Snapshot)
(v stripSuffix Snapshot) + "-" + timestampString(System.currentTimeMillis)
else
error("Release version '" + v + "' cannot be stamped")
def timestampString(time: Long): String =
{
val format = new java.text.SimpleDateFormat("yyyyMMdd-HHmmss")
format.format(new java.util.Date(time))
}
final val Snapshot = "-SNAPSHOT"
// NOte: This moved into sbt itself... But we need to add semantic knowledge of how
// we stamp our nightlies.
def snapshotQualifier(v: String) = Pattern.matches(""".+-.*SNAPSHOT.*""", v)
} | niktrop/sbt | project/Status.scala | Scala | bsd-3-clause | 1,135 |
package org.perftester.process.compiler
import scala.reflect.internal.util.Position
import scala.tools.nsc.reporters.Reporter
object Reporters {
object noInfo extends Reporter { // We are ignoring all
override protected def info0(pos: Position,
msg: String,
severity: this.Severity,
force: Boolean): Unit = {
println(s"[$severity] $pos: $msg")
}
}
}
| rorygraves/perf_tester | src/main/scala/org/perftester/process/compiler/Reporters.scala | Scala | apache-2.0 | 469 |
package collins.models
import org.specs2._
import specification._
import collins.models.shared.SortDirection._
import java.sql.Timestamp
import play.api.test.WithApplication
class AssetDistanceSorterSpec extends mutable.Specification {
args(sequential = true)
"Create a meta" should {
"for outlets" in new WithApplication {
val ms = new mocksorter {}
ms.assetValues.foreach { case(assetTag, metaList) =>
Asset.findByTag(assetTag.toString.toLowerCase).getOrElse {
val asset =
Asset.create(Asset(assetTag.toString.toLowerCase, Status.Unallocated.get, AssetType.ServerNode.get))
metaList.foreach { case (value, assetMetaTag) =>
AssetMeta.findOrCreateFromName(assetMetaTag)
val meta = AssetMeta.findByName(assetMetaTag).get
val mv = AssetMetaValue(asset, meta.id, value.toString)
try {
AssetMetaValue.create(mv)
} catch {
case e: RuntimeException =>
Thread.sleep(1000)
AssetMetaValue.create(mv)
}
}
}
}
}
}
"Asset distance sorter" should {
"during sorting" in new WithApplication {
"using sparse" in new mocksorter {
val expected = List("e","b","d","c","a")
val sortedAssets = AssetDistanceSorter.distributionSort(
targetAsset,
similarAssets,
SortAsc,
sortConfig)
sortedAssets.map{_.tag} must_== expected
}
"using dense" in new mocksorter {
val expected = List("a","b","c","d","e")
val sortedAssets = AssetDistanceSorter.distributionSort(
targetAsset,
similarAssets,
SortDesc,
sortConfig)
sortedAssets.map{_.tag} must_== expected
}
}
}
trait mocksorter extends Scope {
val sortParams = List("G", "H", "I")
val sortValues = List(
("t",List(0,0,0)),
("a",List(0,0,1)),
("b",List(0,1,0)),
("c",List(0,1,1)),
("d",List(1,0,0)),
("e",List(1,0,1))
)
val sortConfig = sortParams.reverse.toSet
val assetValues = sortValues.map{case (assetTag, values) => (assetTag, values.zip(sortParams))}
def targetAsset = Asset.findByTag(sortValues.head._1).get
def similarAssets = sortValues.tail.map{t => Asset.findByTag(t._1).get}
}
"MockAssetNameEval" should {
"return correct distance" in {
val a1 = new Asset("1", 0, 0, new Timestamp(System.currentTimeMillis), None, None)
val a2 = new Asset("2", 0, 0, new Timestamp(System.currentTimeMillis), None, None)
val nameeval = new MockAssetNameEval
nameeval.distance(a1, a2) must_== 1
}
}
"AssetDistanceSorter" should {
"sort named assets in ascending order" in {
val assets = (1 to 20).map { i =>
new Asset(i.toString, 0, 0, new Timestamp(System.currentTimeMillis), None, None)
}
assets must_== AssetDistanceSorter.sort(
new Asset("0", 0, 0, new Timestamp(System.currentTimeMillis), None, None),
assets,
AssetSort.Name,
SortAsc
)
}
"sort permuted named assets in ascending order" in {
val assets1 = (11 to 20).map { i =>
new Asset(i.toString, 0, 0, new Timestamp(System.currentTimeMillis), None, None)
}
val assets2 = (1 to 10).map { i =>
new Asset(i.toString, 0, 0, new Timestamp(System.currentTimeMillis), None, None)
}
(assets2 ++ assets1) must_== AssetDistanceSorter.sort(
new Asset("0", 0, 0, new Timestamp(System.currentTimeMillis), None, None),
(assets1 ++ assets2),
AssetSort.Name,
SortAsc)
}
"sort named assets in descending order" in {
val assets = (1 to 20).map { i =>
new Asset(i.toString, 0, 0, new Timestamp(System.currentTimeMillis), None, None)
}
assets.reverse must_== AssetDistanceSorter.sort(
new Asset("0", 0, 0, new Timestamp(System.currentTimeMillis), None, None),
assets,
AssetSort.Name,
SortDesc)
}
} // AssetDistanceSorter should
}
| funzoneq/collins | test/collins/models/AssetDistanceSorterSpec.scala | Scala | apache-2.0 | 4,414 |
case class L
| grzegorzbalcerek/scala-book-examples | examples/CaseClassNoParameterList.scala | Scala | mit | 13 |
/**
* Copyright 2015 ICT.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ac.ict.acs.netflow.load.master
import java.nio.ByteBuffer
import cn.ac.ict.acs.netflow.JobMessages.{LoadInfo, AllLoadersAvailable, GetAllLoaders}
import cn.ac.ict.acs.netflow.load.master.CommandSet.CmdStruct
import cn.ac.ict.acs.netflow.metrics.MetricsSystem
import org.joda.time.DateTime
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Await
import scala.concurrent.duration._
import akka.actor._
import akka.pattern.ask
import akka.remote.RemotingLifecycleEvent
import akka.serialization.SerializationExtension
import cn.ac.ict.acs.netflow._
import cn.ac.ict.acs.netflow.load.{ CombineStatus, LoadMessages }
import cn.ac.ict.acs.netflow.util._
import cn.ac.ict.acs.netflow.ha.{ LeaderElectionAgent, MonarchyLeaderAgent, LeaderElectable }
class LoadMaster(masterHost: String, masterPort: Int, webUiPort: Int, val conf: NetFlowConf)
extends Actor with ActorLogReceive with LeaderElectable with Logging {
import DeployMessages._
import LoadMasterMessages._
import LoadMessages._
import MasterMessages._
import ConfigurationMessages._
import context.dispatcher
// the interval for checking workes and receivers
val WORKER_TIMEOUT = conf.getLong("netflow.LoadWorker.timeout", 60) * 1000
// Remove a dead worker after given interval
val REAPER_ITERATIONS = conf.getInt("netflow.dead.worker.persistence", 15)
// Master recovery mode
val RECOVERY_MODE = conf.get("netflow.deploy.recoveryMode", "NONE")
// This may contain dead workers and receivers
val workers = new mutable.HashSet[LoadWorkerInfo]
// Current alive workers and receivers
val idToWorker = new mutable.HashMap[String, LoadWorkerInfo]
// Current alive workers and receivers
val addressToWorker = new mutable.HashMap[Address, LoadWorkerInfo]
Utils.checkHost(masterHost, "Expected hostname")
val masterMetricsSystem = MetricsSystem.createMetricsSystem("master", conf)
val masterSource = new LoadMasterSource(this)
val loadMasterUrl = "netflow-load://" + masterHost + ":" + masterPort
var loadMasterWebUIUrl: String = _
var state = LoadMasterRecoveryState.STANDBY
var persistenceEngine: MasterPersistenceEngine = _
var leaderElectionAgent: LeaderElectionAgent = _
// load master service
val loadServer = new MasterService(self, conf)
/**
* about balance
*/
// workerIP => (IP,port)
val workerToPort = new mutable.HashMap[String, (String, Int)]()
// worker : buffer used rate[0,100]
val workerToBufferRate = new mutable.HashMap[String, Double]()
// worker : receiver => 1 : n
val workerToCollectors = new mutable.HashMap[String, ArrayBuffer[String]]()
// receiver : worker => 1 : n
val collectorToWorkers = new mutable.HashMap[String, ArrayBuffer[String]]()
val workerToStreamingPort = new mutable.HashMap[String, Int]()
private val halfLimit = 0.5
private val warnLimit = 0.7
// combine parquet
private var combineParquetFinished: Boolean = false
// when there is no worker registers in cluster,
// we put the whole request receiver into waitQueue
val waitQueue = new mutable.HashSet[String]()
override def preStart(): Unit = {
logInfo(s"Starting NetFlow LoadMaster at $loadMasterUrl")
logInfo(s"Running NetFlow version ${cn.ac.ict.acs.netflow.NETFLOW_VERSION}")
// Listen for remote client disconnection events, since they don't go through Akka's watch()
context.system.eventStream.subscribe(self, classOf[RemotingLifecycleEvent])
// TODO: a pseudo webuiurl here
loadMasterWebUIUrl = "http://" + masterHost + ":" + webUiPort
context.system.scheduler.schedule(0.millis, WORKER_TIMEOUT.millis, self, CheckForWorkerTimeOut)
masterMetricsSystem.registerSource(masterSource)
masterMetricsSystem.start()
val (persistenceEngine_, leaderElectionAgent_) =
RECOVERY_MODE match {
case "ZOOKEEPER" =>
logInfo("Persisting recovery state to ZooKeeper")
val zkFactory =
new ZKRecoveryModeFactory(conf, SerializationExtension(context.system))
(zkFactory.createPersistenceEngine(), zkFactory.createLeaderElectionAgent(this))
case _ =>
logInfo("No state persisted as a MonarchyLeader")
(new LoadMasterBHPersistenceEngine(), new MonarchyLeaderAgent(this))
}
persistenceEngine = persistenceEngine_
leaderElectionAgent = leaderElectionAgent_
// start the receiver master service
loadServer.start()
}
override def preRestart(reason: Throwable, message: Option[Any]) {
super.preRestart(reason, message) // calls postStop()!
logError("LoadMaster actor restarted due to exception", reason)
}
override def postStop(): Unit = {
masterMetricsSystem.report()
masterMetricsSystem.stop()
persistenceEngine.close()
leaderElectionAgent.stop()
}
override def appointLeader() = {
self ! AppointedAsLeader
}
override def revokeLeadership() = {
self ! RevokedLeadership
}
override def receiveWithLogging: PartialFunction[Any, Unit] = {
case AppointedAsLeader =>
// TODO: dummy placeholder
state = LoadMasterRecoveryState.ALIVE
case RevokedLeadership => {
logError("Leadership has been revoked -- load master shutting down.")
System.exit(0)
}
case RegisterWorker(
id, workHost, workPort, cores, memory, webUiPort, workerIP, tcpPort, streamingPort) => {
logInfo("Registering %s %s:%d with %d cores, %s RAM".format(
id, workHost, workPort, cores, Utils.megabytesToString(memory)))
if (state == LoadMasterRecoveryState.STANDBY) {
// ignore, don't send response
} else if (idToWorker.contains(id)) {
sender ! RegisterWorkerFailed("Duplicate worker ID")
} else {
val worker = new LoadWorkerInfo(id, workHost, workPort, cores, memory, sender(),
webUiPort, workerIP, tcpPort, streamingPort)
if (registerWorker(worker)) {
persistenceEngine.addWorker(worker)
sender ! RegisteredWorker(loadMasterUrl, loadMasterWebUIUrl)
pushBGPToWorker(sender())
} else {
val workerAddress = worker.actor.path.address
logWarning("Worker registration failed. Attempted to re-register worker at same " +
"address: " + workerAddress)
sender ! RegisterWorkerFailed("Attempted to re-register Component at same address: "
+ workerAddress)
}
}
}
case Heartbeat(workerId) =>
idToWorker.get(workerId) match {
case Some(workerInfo) =>
workerInfo.lastHeartbeat = System.currentTimeMillis()
case None =>
if (workers.map(_.id).contains(workerId)) {
logWarning(s"Got heartbeat from unregistered component $workerId." +
" Asking it to re-register.")
sender ! ReconnectWorker(loadMasterUrl)
} else {
logWarning(s"Got heartbeat from unregistered worker $workerId." +
" This worker was never registered, so ignoring the heartbeat.")
}
}
case CheckForWorkerTimeOut =>
timeOutDeadWorkers()
case BoundPortsRequest =>
sender ! BoundPortsResponse(masterPort, webUiPort)
// message about buffer
case BuffersWarn(workerIp) =>
logDebug(s"$workerIp send BuffersWarn message to master.")
// adjustCollectorByBuffer(workerIp, sender())
// case BufferOverFlow(workerIp) =>
// logDebug(s"$workerIp send bufferoverflow message to master.")
// adjustCollectorByBuffer(workerIp, sender())
case BufferSimpleReport(workerIp, usageRate) =>
logDebug(s"get a simple report $workerIp -> $usageRate.")
workerToBufferRate.update(workerIp, usageRate)
case BufferWholeReport(workerIp, usageRate, maxSize, curSize) =>
logDebug(s"get a whole report $workerIp -> $usageRate ($curSize / $maxSize)")
workerToBufferRate.update(workerIp, usageRate)
// TODO save another information (maxSize, curSize)
// message about combine
// deal with the combine parquet
case CloseParquet(fileStamp) =>
combineParquet(fileStamp)
case CombineFinished(status) =>
dealWithCombineMessage(status)
// message about receiver
case DeleReceiver(receiverIP) =>
deleteDeadCollector(receiverIP)
case DeleWorker(workerIP, port) =>
deleDeadWorker(workerIP, port, notify = false)
case RequestWorker(collectorIP) =>
assignWorker(collectorIP)
// Forwarding rules and BGP table configuration
case GetAllRules =>
sender ! CurrentRules(forwardingRules.iterator.toArray)
case InsertRules(rule) =>
sender ! insertForwardingRules(rule)
notifyRulesToAllCollectors()
case UpdateSingleRule(ruleId, ruleItem) =>
sender ! SingleRuleSubstitution(forwardingRules.put(ruleId, ruleItem), ruleItem)
notifyRulesToAllCollectors()
case DeleteSingleRule(ruleId) =>
sender ! DeletedRule(forwardingRules.remove(ruleId))
notifyRulesToAllCollectors()
case GetAllLoaders =>
val infos = ArrayBuffer.empty[LoadInfo]
workers.foreach(worker => infos += LoadInfo(worker.ip, worker.streamPort))
sender ! AllLoadersAvailable(infos.toSeq)
}
// **********************************************************************************
// As a Configuration Server
private val forwardingRules = mutable.HashMap.empty[String, RuleItem]
def generateInsertionId(): String = (new DateTime).toString(TimeUtils.createFormat)
def insertForwardingRules(rule: ForwardingRule): ConfigurationMessage = {
val prefix = generateInsertionId()
rule.rules.zipWithIndex.foreach {
case (item, i) =>
forwardingRules(prefix + "-" + i) = item
}
InsertionSuccess(rule.rules.size)
}
def pushRuleToCollector(collector: String): Unit = {
getRuleStr match {
case Some(cmd) => loadServer.collector2Socket(collector).write(cmd)
case None =>
}
}
private def notifyRulesToAllCollectors(): Unit = {
val cmd = getRuleStr.get
cmd.mark()
loadServer.collector2Socket.foreach(coll => {
coll._2.write(cmd)
cmd.reset()
})
}
private def getRuleStr: Option[ByteBuffer] = {
if (forwardingRules.isEmpty) return None
val values = forwardingRules.valuesIterator
val valueMap = mutable.HashMap.empty[String, Int] // string = "desIP,rate"
val res_key = new StringBuilder()
val res_val = new StringBuilder()
values.foreach(value => {
val _key = value.routerId.concat(",").concat(value.srcPort.toString)
val _value = value.destIp.concat(",").concat(value.rate)
if (valueMap.contains(_value)) {
res_key.append(_key).append(",").append(valueMap(_value)).append(CmdStruct.inner_delim)
} else {
val pos = valueMap.size
valueMap(_value) = pos
res_key.append(_key).append(",").append(pos).append(CmdStruct.inner_delim)
}
})
res_key.delete(res_key.lastIndexOf(CmdStruct.inner_delim), res_key.length)
res_val.delete(res_val.lastIndexOf(CmdStruct.inner_delim), res_key.length)
Some(CommandSet.resRules(res_key.toString(), res_val.toString()))
}
// **********************************************************************************
private def registerWorker(worker: LoadWorkerInfo): Boolean = {
// There may be one or more refs to dead workers on this same node (with different ID's),
// remove them.
workers.filter { w =>
(w.host == worker.host && w.port == worker.port) && (w.state == WorkerState.DEAD)
}.foreach { w =>
workers -= w
}
val workerAddress = worker.actor.path.address
if (addressToWorker.contains(workerAddress)) {
val oldWorker = addressToWorker(workerAddress)
if (oldWorker.state == WorkerState.UNKNOWN) {
// A worker registering from UNKNOWN implies that the worker was restarted during recovery.
// The old worker must thus be dead, so we will remove it and accept the new worker.
removeWorker(oldWorker)
} else {
logInfo("Attempted to re-register worker at same address: " + workerAddress)
return false
}
}
workers += worker
idToWorker(worker.id) = worker
addressToWorker(workerAddress) = worker
deleDeadWorker(worker.ip, notify = true)
addNewWorker(worker.ip, worker.tcpPort)
pushBGPToWorker(worker.actor)
true
}
private def removeWorker(worker: LoadWorkerInfo): Unit = {
logInfo("Removing worker " + worker.id + " on " + worker.host + ":" + worker.port)
worker.setState(WorkerState.DEAD)
idToWorker -= worker.id
addressToWorker -= worker.actor.path.address
persistenceEngine.removeWorker(worker)
// we should tell all the receivers who connected with this
// dead worker to connect with living worker
deleDeadWorker(worker.ip, worker.tcpPort, notify = true)
// redo combine thread on anther worker node.
dealWithCombineError(worker)
}
/**
* Check for, and remove, any timed-out workers
*/
private def timeOutDeadWorkers() {
// Copy the workers into an array so we don't modify the hashset while iterating through it
val currentTime = System.currentTimeMillis()
val toRemove = workers.filter(_.lastHeartbeat < currentTime - WORKER_TIMEOUT).toArray
for (worker <- toRemove) {
if (worker.state != WorkerState.DEAD) {
logWarning("Removing %s because we got no heartbeat in %d seconds".format(
worker.id, WORKER_TIMEOUT / 1000))
removeWorker(worker)
} else {
if (worker.lastHeartbeat < currentTime - ((REAPER_ITERATIONS + 1) * WORKER_TIMEOUT)) {
workers -= worker // we've seen this DEAD worker in the UI, etc. for long enough; cull it
}
}
}
}
// ***********************************************************************************
/**
* deal with combine
*/
// only for combine server, since we can not get the exact load worker threads number
// in current time base, so we put the task to load worker by listening HDFS directory
private var curCombieWorker: LoadWorkerInfo = _
private var fileTimeStamp: Long = 0
private var nextCombIdx = 0
private def combineParquet(fileStamp: Long): Unit = {
if (fileStamp != fileTimeStamp) {
fileTimeStamp = fileStamp
sendCombMessage()
logInfo(s"will combine directory ${load.getPathByTime(fileTimeStamp, conf)}")
}
}
private def dealWithCombineMessage(status: CombineStatus.Value): Unit = {
status match {
case CombineStatus.FINISH =>
combineParquetFinished = true
logInfo(s"Combine the file ${load.getPathByTime(fileTimeStamp, conf)} completely")
case CombineStatus.PARTIAL_FINISH =>
combineParquetFinished = true
logInfo(s"Combine partial files in directory ${load.getPathByTime(fileTimeStamp, conf)}")
case CombineStatus.DIRECTORY_NOT_EXIST =>
logWarning(s"Combine directory ${load.getPathByTime(fileTimeStamp, conf)} failed, " +
s"for there is no such directory.")
case CombineStatus.UNKNOWN_DIRECTORY =>
logWarning(s"Combine directory ${load.getPathByTime(fileTimeStamp, conf)} failed, " +
s"for the directory structure is not parquet structure.")
case _ => logError(s"Combine error, cannot combine this directory")
}
}
/**
* deal with the situation that the worker which is running combine thread,
* so we should redo the combine thread on another worker node if the dead
* worker is running combine thread.
*/
private def dealWithCombineError(deadWorker: LoadWorkerInfo): Unit = {
if (!combineParquetFinished && deadWorker == curCombieWorker) {
sendCombMessage()
}
}
// determine a worker to run combine job by order
private def sendCombMessage(): Unit = {
if(idToWorker.isEmpty) return
nextCombIdx = (nextCombIdx + 1) % idToWorker.size
curCombieWorker = idToWorker.toList(nextCombIdx)._2
curCombieWorker.actor ! CombineParquet(fileTimeStamp) // tell it to combine the parquets
logInfo(s"will restart combine directory ${load.getPathByTime(fileTimeStamp, conf)} " +
s"on ${curCombieWorker.host}")
}
// ***********************************************************************************
/**
* only for bgp table
*/
private val bgpTable = new scala.collection.mutable.HashMap[Int, Array[Byte]]
private def updateBGP(bgpIds: Array[Int], bgpDatas: Array[Array[Byte]]): Unit = {
if (bgpTable.isEmpty) {
idToWorker.valuesIterator
.foreach(_.actor ! updateBGP(bgpIds, bgpDatas))
var idx = 0
while (idx != bgpIds.length) {
bgpTable(bgpIds(idx)) = bgpDatas(idx)
idx += 1
}
return
}
// update exist table
val _bgpIds = new ArrayBuffer[Int]
val _bgpDatas = new ArrayBuffer[Array[Byte]]
var idx = 0
while (idx != bgpIds.length) {
if (bgpTable.contains(bgpIds(idx))) {
val value = bgpTable.get(bgpIds(idx)).get
if (!(value sameElements bgpDatas(idx))) {
_bgpIds += bgpIds(idx)
_bgpDatas += bgpDatas(idx)
bgpTable(bgpIds(idx)) = bgpDatas(idx)
}
} else {
_bgpIds += bgpIds(idx)
_bgpDatas += bgpDatas(idx)
bgpTable(bgpIds(idx)) = bgpDatas(idx)
}
idx += 1
}
idToWorker.valuesIterator
.foreach(_.actor ! updateBGP(_bgpIds.toArray[Int], _bgpDatas.toArray[Array[Byte]]))
}
private def pushBGPToWorker(workerActor: ActorRef): Unit = {
if (bgpTable.isEmpty) return
val bgpIds = new Array[Int](bgpTable.size)
val bgpDatas = new Array[Array[Byte]](bgpTable.size)
var idx = 0
bgpTable.foreach(record => {
bgpIds(idx) = record._1
bgpDatas(idx) = record._2
idx += 1
})
workerActor ! updateBGP(bgpIds, bgpDatas)
}
// ***********************************************************************************
private def addConnection(worker: String, collector: String): Unit = {
workerToCollectors.get(worker) match {
case Some(collectors) => collectors += collector
case None =>
val collectors = new ArrayBuffer[String]
collectors += collector
workerToCollectors(worker) = collectors
}
collectorToWorkers.get(collector) match {
case Some(_workers) => _workers += worker
case None =>
val workers = new ArrayBuffer[String]
workers += worker
collectorToWorkers(collector) = workers
}
}
private def deleConnecton(worker: String, collector: String): Unit = {
workerToCollectors.get(worker) match {
case Some(collectors) =>
val idx = collectors.indexOf(collector)
if (idx == -1) {
// has deleted
require(collectorToWorkers.get(collector).isEmpty
|| collectorToWorkers.get(collector).get.indexOf(worker) == -1,
s" Since worker2collector does not contain $worker -> $collector," +
s"collector2worker should not contain $collector -> $worker")
} else {
collectors.remove(idx)
require(collectorToWorkers.get(collector).isDefined,
s"Since worker2collector contains $worker -> $collector," +
s"so collector2workers should also contain $collector -> $worker")
collectorToWorkers.get(collector).get -= worker
}
case None =>
require(collectorToWorkers.get(collector).isEmpty ||
!collectorToWorkers.get(collector).get.contains(worker),
s" Since worker2collector does not contain $worker -> $collector," +
s"collector2worker should not contain $collector -> $worker")
}
}
// ***********************************************************************************
/**
* deal with worker
*/
// called after a worker registered successfully, record workerToUdpPort & workerToCollectors
private def addNewWorker(workerIP: String, workerPort: Int): Unit = {
workerToPort += (workerIP -> (workerIP, workerPort))
workerToBufferRate += (workerIP -> 0)
_assignWorkerToWaitingCollector()
// when a worker registered in master, select a receiver to connect with this worker
def _assignWorkerToWaitingCollector(): Unit = {
if (waitQueue.isEmpty) {
logInfo("There is no collector waiting for worker.")
return
}
// selected this current new worker ip
val cmd = CommandSet.resWorkerIPs(Some(Array(workerToPort.get(workerIP).get)), None)
while (true) {
try {
if (waitQueue.isEmpty) {
logInfo("All wait Queue has been deal with, there is no collector waiting for worker.")
return
}
val collectorIP = if (waitQueue.contains(workerIP)) workerIP else waitQueue.head
loadServer.collector2Socket.get(collectorIP) match {
case Some(socket) =>
if (socket.isConnected) {
socket.write(cmd)
waitQueue.remove(collectorIP)
addConnection(workerIP, collectorIP)
return
} else {
waitQueue.remove(collectorIP)
throw new NetFlowException(s"the $collectorIP's socket is closed!")
}
case None =>
waitQueue.remove(collectorIP)
throw new NetFlowException(s"There is no $collectorIP collector!")
}
} catch {
case e: NetFlowException =>
logWarning(e.getMessage)
}
}
}
}
// Called when the heartbeat is timeout (heartbeat mechanism based)
// Or called when a receiver request worker list who also assigns a dead worker
private def deleDeadWorker(workerIP: String, port: Int = 0, notify: Boolean = false): Unit = {
def _delete(workerIP: String): Unit = {
workerToCollectors.remove(workerIP) match {
case Some(collectors) =>
collectors.foreach(collector => {
collectorToWorkers(collector) -= workerIP
if (notify) {
require(workerToPort.get(workerIP).isDefined,
s"Now worker2Port should exist deadWorker $workerIP")
val ip_port = workerToPort.get(workerIP).get
notifyReceiver(collector, None, Some(Array(ip_port)))
}
})
case None =>
logInfo(s"The $workerIP worker has been deleted.")
}
workerToPort -= workerIP
workerToBufferRate -= workerIP
}
// Since a single worker only has one receiver Server,
// so 'workerToPort' should at most contain one record about this 'workerIP'.
workerToPort.get(workerIP) match {
case Some(deadWorker) =>
if (port == 0) { // ignore port
_delete(workerIP)
} else {
if (deadWorker.equals((workerIP, port))) {
_delete(workerIP)
} else {
logInfo(s"Expect delete $workerIP:$port worker," +
s"but now this worker is ${deadWorker._1}:${deadWorker._2}." +
s"So there mast has something wrong if the load worker does not reboot.")
}
}
case None =>
logInfo(s"The $workerIP:$port worker has been deleted or does not existed.")
}
}
// ***********************************************************************************
/**
* deal with receiver
*/
// called when a master receives a message about Delete dead collector
private def deleteDeadCollector(collectorIP: String): Unit = {
// If the collector is down, the flag can been caught by socketChannel.
// For workers which is connected with this collector, they also know this flag,
// so here, we only delete related Struction.
if (waitQueue.contains(collectorIP)) {
waitQueue -= collectorIP
logInfo(s"Remove the collector $collectorIP. ")
return
}
collectorToWorkers.remove(collectorIP) match {
case Some(relatedWorkers) =>
relatedWorkers.foreach(_worker => {
val _collector = workerToCollectors.get(_worker)
require(_collector.isDefined &&
_collector.get.contains(collectorIP),
s"Since collector2workers contain ${collectorIP} -> ${_worker}, " +
s"So worker2Collectors should contain ${_worker}, but now is NONE.")
_collector.get -= collectorIP
})
case None =>
logError(s"'deleDeadCollector' method should be called only when the collector is lost," +
s"so ,for a determined $collectorIP collector, " +
s"'collectorToWorkers' should have one record at least" +
s"about this collector, but now, it is Empty")
}
}
private def selectSuitableWorkers(collector: String,
expectWorkerNum: Int): Option[Array[String]] = {
val availableWorkers = collectorToWorkers.get(collector) match {
case Some(_workers) =>
workerToBufferRate.filterNot(x => _workers.contains(x._1)).toList.sortWith(_._2 < _._2)
case None =>
workerToBufferRate.toList.sortWith(_._2 < _._2)
}
if (availableWorkers.isEmpty) return None
val actualLen = Math.min(expectWorkerNum, availableWorkers.length)
val result = new Array[String](actualLen)
var idx = 0
val selfworker = availableWorkers.filter(x => x._1 == collector)
if (selfworker.nonEmpty) {
result(idx) = selfworker.head._1
idx += 1
}
for (i <- idx until actualLen if availableWorkers(i)._1 != collector) {
result(i) = availableWorkers(i)._1
}
Some(result)
}
// called when a receiver ask for worker's info
private def assignWorker(collector: String, workerNum: Int = 1): Unit = {
loadServer.collector2Socket.get(collector) match {
case Some(socket) =>
if (!socket.isConnected) {
logDebug(s" Cannot connect with $collector, for socket dese not connected.")
return
}
selectSuitableWorkers(collector, workerNum) match {
case Some(workers: Array[String]) =>
val ip_port = new ArrayBuffer[(String, Int)](workers.length)
workers.foreach(x =>
workerToPort.get(x) match {
case Some(_worker) =>
ip_port += _worker
addConnection(_worker._1, collector)
case None =>
logError(s"Worker is lost? worker2Port does not contain $x " +
s"but worker2Rate does contain?")
return
})
logDebug(s"current selected ip " +
s"is ${ip_port.map(x => x._1 + ":" + x._2).mkString(" ")}")
val cmd = CommandSet.resWorkerIPs(Some(ip_port.toArray[(String, Int)]), None)
logDebug(s"the cmd is ${cmd.array().slice(2, cmd.limit())}")
val sendSize = socket.write(cmd)
logDebug(s"the sent size is ${sendSize}")
case None =>
waitQueue += collector
logWarning(s"There is no available worker to run in cluster.")
}
case None =>
logWarning(s"The $collector Collector is not a effective collector.")
}
}
// ***********************************************************************************
/**
* deal with balance
*/
// notify receiver to change worker
private def notifyReceiver(receiverHost: String,
addWorker: Option[Array[(String, Int)]],
deleWorker: Option[Array[(String, Int)]]): Unit = {
val res = CommandSet.resWorkerIPs(addWorker, deleWorker)
loadServer.collector2Socket.get(receiverHost) match {
case None =>
logError(s"There is no $receiverHost Receiver in 'collector2Socket'," +
s"so something must be wrong.")
case Some(socket) =>
if (socket.isConnected) {
socket.write(res)
} else {
logError(s"Can not connect with $receiverHost receiver")
}
}
}
// Called when a worker need to adjust receiver number
// leave the receiver co-located with worker to be the last one to remove
private def adjustCollectorByBuffer(workerIP: String, workerActor: ActorRef): Unit = {
// update buffer info
idToWorker.values.foreach(x => x.actor ! BufferInfo)
def underHalfRateStrategy(
availableWorkers: List[(String, Double)],
collector: String): Unit = {
// when there is a available worker who's rate is under buffer rate,
// we believe that this worker in enough to deal with this work.
require(availableWorkers.head._2 <= 0.5)
logDebug(s"Select underHalfRateStrategy, current header rate s ${availableWorkers.head._2}")
val assignedWorker = availableWorkers.head._1
addConnection(assignedWorker, collector)
require(workerToPort.contains(assignedWorker),
s"work2Rate contains $assignedWorker, " +
s"so worker2Port should always contains $assignedWorker")
val addWorker = Array(workerToPort(assignedWorker))
logDebug(s"Select a worker ${addWorker.head._1}:${addWorker.head._2}")
notifyReceiver(collector, Some(addWorker), None)
}
def underWarnRateStrategy(
availableWorkers: List[(String, Double)],
collector: String): Unit = {
logDebug(s"Select underWarnRateStrategy, current header rate is {availableWorkers.head._2}")
val adjustSize = Math.min(availableWorkers.size, workerToPort.size / 2)
val addWorker = new Array[(String, Int)](adjustSize)
for (i <- 0 until adjustSize) {
val worker = availableWorkers(i)._1
addConnection(worker, collector)
require(workerToPort.contains(worker),
s"work2Rate contains $worker, " +
s"so worker2Port should always contains $worker")
addWorker(i) = workerToPort(worker)
}
logDebug(
s"adjust size is $adjustSize, will connect with ${addWorker.map(_._1).mkString(" ")}")
notifyReceiver(collector, Some(addWorker), None)
}
def selectStrategy(
availableWorkers: List[(String, Double)],
collector: String,
ConnectOneWorker: Boolean): Unit = {
logDebug(s"Current available worker is ${availableWorkers.head._1}, " +
s"rate is ${availableWorkers.head._2}")
availableWorkers.head._2 match {
case x if x <= halfLimit =>
underHalfRateStrategy(availableWorkers, collector)
case x if x <= warnLimit =>
underWarnRateStrategy(availableWorkers, collector)
case x => logError(s"Too heavy!")
}
}
def Coll2worker(collector: String): Unit = {
collectorToWorkers.get(collector) match {
case None => logError(s"coll2Worker empty? ")
case Some(_workers) =>
assert(_workers.nonEmpty,
s"collector should contain $workerIP at least, but know empty")
val availableWorkers =
workerToBufferRate.filterNot(x => _workers.contains(x._1)).toList.sortWith(_._2 < _._2)
if (_workers.length == 1) {
assert(_workers.head == workerIP,
s"collector should contain $workerIP at least, but know ${_workers.head}")
selectStrategy(availableWorkers, collector, ConnectOneWorker = true)
} else {
selectStrategy(availableWorkers, collector, ConnectOneWorker = false)
}
}
}
workerToCollectors.get(workerIP) match {
case Some(colls) =>
colls.foreach(Coll2worker)
case None => logError(s"worker2Collectors should not be null! ")
}
// // deal with the situation that only one collector connect with this worker
// def dealWithSingleConnection(collector: String): Boolean = {
// // now, our strategy is split the writing stream on this collector
// logDebug(s"Call dealWithSingleConnection, current worker is $workerIP, " +
// s"it's connected collector is $collector")
//
// val availableWorkers: List[(String, Double)] =
// workerToBufferRate.filterNot(x => x._1 == workerIP).toList.sortWith(_._2 < _._2)
// if (availableWorkers.isEmpty) {
// logInfo(String.format(
// "Total worker number is %d (%s)," +
// "which are used by %s collector," +
// """so there is no available worker to adjust.""" +
// "Only to increase worker's thread.",
// workerToPort.size: Integer, workerToPort.keys.mkString(" "), collector))
//
// workerActor ! AdjustThread
// return false
// }
//
// // add new node to current collector which is select from availableWorkers
// selectStrategy(availableWorkers, collector)
// true
// }
//
// // deal with the situation that only more than one collector connect with this worker
// def dealWithMutilConnection(collectors: ArrayBuffer[String]): Unit = {
//
// // first add new connection with that collector
// // who only connected with this worker
// logDebug(s"[ayscb]Call dealWithSingleConnection, current worker is $workerIP, " +
// s"it's connected collector is ${collectors.mkString(",")}")
//
// val orderedcollectors = collectorToWorkers.filter(x => collectors.contains(x))
// .toList.sortWith(_._2.size < _._2.size)
//
// logDebug(s"[ayscb]ordered dealWithSingleConnection, current worker is $workerIP, " +
// s"it's connected collector is ${orderedcollectors.mkString("<")}")
//
// if (orderedcollectors.head._2.size == 1) {
// // only has one connection with worker
// // split this collector's stream to mutil-workers
// val availableWorkers = workerToBufferRate.filterNot(x => x._1 == workerIP)
// .toList.sortWith(_._2 < _._2)
//
// selectStrategy(availableWorkers, orderedcollectors.head._1)
// } else {
// // all collectors which is connected with this worker
// val avgRate = new ArrayBuffer[(String, Double)](collectors.length)
// collectors.foreach(coll => {
// require(collectorToWorkers.get(coll).isDefined,
// s" $coll should connectwith $workerIP")
//
// val workers = collectorToWorkers.get(coll).get
// val workersRate = workerToBufferRate.filter(x => workers.contains(x))
// var sum = 0.0
// workersRate.foreach(rate => sum += rate._2)
// avgRate += ((coll, sum / workers.size))
// })
// val orderedAvg = avgRate.sortWith(_._2 > _._2)
//
// val expectCollector = orderedAvg.head._1
// val expectWorkers = collectorToWorkers.get(expectCollector).get
// val availableWorkers = workerToBufferRate.filterNot(x => expectWorkers.contains(x))
// .toList.sortWith(_._2 < _._2)
//
// selectStrategy(availableWorkers, expectCollector)
// }
// }
//
// workerToCollectors.get(workerIP) match {
// case Some(colls) =>
// if (colls.size == 1) {
// dealWithSingleConnection(colls.head)
// } else {
// dealWithMutilConnection(colls)
// }
//
// case None => logError(s"worker2Collectors should not be null! ")
// }
}
}
object LoadMaster extends Logging {
import MasterMessages._
val systemName = "netflowLoadMaster"
private val actorName = "LoadMaster"
def main(argStrings: Array[String]): Unit = {
SignalLogger.register(log)
val conf = new NetFlowConf(false)
val masterArg = new LoadMasterArguments(argStrings, conf)
val (actorSystem, _, _) =
startSystemAndActor(masterArg.host, masterArg.port, masterArg.webUiPort, conf)
actorSystem.awaitTermination()
}
/**
* Start the Master and return a four tuple of:
* (1) The Master actor system
* (2) The bound port
* (3) The web UI bound port
*/
def startSystemAndActor(
host: String,
port: Int,
webUiPort: Int,
conf: NetFlowConf): (ActorSystem, Int, Int) = {
val (actorSystem, boundPort) = AkkaUtils.createActorSystem(systemName, host, port, conf)
val actor = actorSystem.actorOf(
Props(classOf[LoadMaster], host, boundPort, webUiPort, conf), actorName)
val timeout = AkkaUtils.askTimeout(conf)
val portsRequest = actor.ask(BoundPortsRequest)(timeout)
val portsResponse = Await.result(portsRequest, timeout).asInstanceOf[BoundPortsResponse]
(actorSystem, boundPort, portsResponse.webUIPort)
}
}
| DataSysLab/netflow | load/src/main/scala/cn/ac/ict/acs/netflow/load/master/LoadMaster.scala | Scala | apache-2.0 | 37,521 |
/*
* Copyright (c) 2014, Brook 'redattack34' Heisler
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the ModularRayguns team nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.castlebravostudios.rayguns.items.misc
import com.castlebravostudios.rayguns.api.LensGrinderRecipeRegistry
import com.castlebravostudios.rayguns.mod.Config
import com.castlebravostudios.rayguns.mod.ModularRayguns
import net.minecraft.block.Block
import net.minecraft.item.Item
import net.minecraft.item.ItemStack
object Tier3EmptyChamber extends Item {
setCreativeTab(ModularRayguns.raygunsTab)
setUnlocalizedName("rayguns.Tier3EmptyChamber")
setTextureName("rayguns:chamber_empty_t3")
} | Redattack34/ModularRayguns | src/main/scala/com/castlebravostudios/rayguns/items/misc/Tier3EmptyChamber.scala | Scala | bsd-3-clause | 2,105 |
package com.lambtors.poker_api.module.poker.domain.model
import java.util.UUID
case class Player(playerId: PlayerId, gameId: GameId, firstCard: Card, secondCard: Card) {
val cards: (Card, Card) = (firstCard, secondCard)
}
object Player {
def newPlayer(gameId: GameId): Player = Player(PlayerId(UUID.randomUUID()), gameId, Card.randomCard, Card.randomCard)
def newPlayerWithCards(gameId: GameId, firstCard: Card, secondCard: Card): Player =
Player(PlayerId(UUID.randomUUID()), gameId, firstCard, secondCard)
}
| lambtors/poker-api | src/main/scala/com/lambtors/poker_api/module/poker/domain/model/Player.scala | Scala | mit | 523 |
package com.socrata.soql.collection
import scala.collection.immutable._
import scala.collection.generic.{ImmutableSetFactory, CanBuildFrom, GenericCompanion, GenericSetTemplate}
import scala.collection.SetLike
class OrderedSet[A](underlying: Map[A, Int], order: Vector[A])
extends Set[A] with GenericSetTemplate[A, OrderedSet] with SetLike[A, OrderedSet[A]] with Serializable with IndexedSeqShim[A]
{
override def companion: GenericCompanion[OrderedSet] = OrderedSet
override def size: Int = underlying.size
override def empty = OrderedSet.empty[A]
def iterator: Iterator[A] = order.iterator
override def foreach[U](f: A => U): Unit = order.foreach(f)
def contains(e: A): Boolean = underlying.contains(e)
override def + (e: A): OrderedSet[A] =
underlying.get(e) match {
case Some(i) =>
new OrderedSet(underlying - e + (e -> order.length), order.updated(i, e))
case None =>
new OrderedSet(underlying + (e -> order.length), order :+ e)
}
override def + (elem1: A, elem2: A, elems: A*): OrderedSet[A] =
this + elem1 + elem2 ++ elems
def - (e: A): OrderedSet[A] =
underlying.get(e) match {
case Some(idx) =>
// hmm.
val newOrdering = order.take(idx) ++ order.drop(idx + 1)
var i = 0
var result = new HashMap[A, Int]
for(elem <- newOrdering) {
result = result.updated(elem, i)
i += 1
}
new OrderedSet(result, newOrdering)
case None =>
this
}
override def toSeq = order
}
object OrderedSet extends ImmutableSetFactory[OrderedSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, OrderedSet[A]] = setCanBuildFrom[A]
override def empty[A]: OrderedSet[A] = EmptyOrderedSet.asInstanceOf[OrderedSet[A]]
def emptyInstance: OrderedSet[Any] = EmptyOrderedSet.asInstanceOf[OrderedSet[Any]]
private val EmptyOrderedSet = new OrderedSet[Any](Map.empty, Vector.empty)
}
| socrata-platform/soql-reference | soql-environment/src/main/scala-2.12/com/socrata/soql/collection/OrderedSet.scala | Scala | apache-2.0 | 1,945 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext
import org.codehaus.commons.compiler.CompileException
import org.codehaus.janino.JaninoRuntimeException
import org.apache.spark.{broadcast, SparkEnv}
import org.apache.spark.internal.Logging
import org.apache.spark.io.CompressionCodec
import org.apache.spark.rdd.{RDD, RDDOperationScope}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{Predicate => GenPredicate, _}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.sql.types.DataType
import org.apache.spark.util.ThreadUtils
/**
* The base class for physical operators.
*
* The naming convention is that physical operators end with "Exec" suffix, e.g. [[ProjectExec]].
*/
abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializable {
/**
* A handle to the SQL Context that was used to create this plan. Since many operators need
* access to the sqlContext for RDD operations or configuration this field is automatically
* populated by the query planning infrastructure.
*/
@transient
final val sqlContext = SparkSession.getActiveSession.map(_.sqlContext).orNull
protected def sparkContext = sqlContext.sparkContext
// sqlContext will be null when SparkPlan nodes are created without the active sessions.
// So far, this only happens in the test cases.
val subexpressionEliminationEnabled: Boolean = if (sqlContext != null) {
sqlContext.conf.subexpressionEliminationEnabled
} else {
false
}
// whether we should fallback when hitting compilation errors caused by codegen
private val codeGenFallBack = (sqlContext == null) || sqlContext.conf.codegenFallback
/** Overridden make copy also propagates sqlContext to copied plan. */
override def makeCopy(newArgs: Array[AnyRef]): SparkPlan = {
SparkSession.setActiveSession(sqlContext.sparkSession)
super.makeCopy(newArgs)
}
/**
* @return All metrics containing metrics of this SparkPlan.
*/
def metrics: Map[String, SQLMetric] = Map.empty
/**
* Resets all the metrics.
*/
def resetMetrics(): Unit = {
metrics.valuesIterator.foreach(_.reset())
}
/**
* @return [[SQLMetric]] for the `name`.
*/
def longMetric(name: String): SQLMetric = metrics(name)
// TODO: Move to `DistributedPlan`
/** Specifies how data is partitioned across different nodes in the cluster. */
def outputPartitioning: Partitioning = UnknownPartitioning(0) // TODO: WRONG WIDTH!
/** Specifies any partition requirements on the input data for this operator. */
def requiredChildDistribution: Seq[Distribution] =
Seq.fill(children.size)(UnspecifiedDistribution)
/** Specifies how data is ordered in each partition. */
def outputOrdering: Seq[SortOrder] = Nil
/** Specifies sort order for each partition requirements on the input data for this operator. */
def requiredChildOrdering: Seq[Seq[SortOrder]] = Seq.fill(children.size)(Nil)
/**
* Returns the result of this query as an RDD[InternalRow] by delegating to `doExecute` after
* preparations.
*
* Concrete implementations of SparkPlan should override `doExecute`.
*/
final def execute(): RDD[InternalRow] = executeQuery {
doExecute()
}
/**
* Returns the result of this query as a broadcast variable by delegating to `doExecuteBroadcast`
* after preparations.
*
* Concrete implementations of SparkPlan should override `doExecuteBroadcast`.
*/
final def executeBroadcast[T](): broadcast.Broadcast[T] = executeQuery {
doExecuteBroadcast()
}
/**
* Executes a query after preparing the query and adding query plan information to created RDDs
* for visualization.
*/
protected final def executeQuery[T](query: => T): T = {
RDDOperationScope.withScope(sparkContext, nodeName, false, true) {
prepare()
waitForSubqueries()
query
}
}
/**
* List of (uncorrelated scalar subquery, future holding the subquery result) for this plan node.
* This list is populated by [[prepareSubqueries]], which is called in [[prepare]].
*/
@transient
private val runningSubqueries = new ArrayBuffer[ExecSubqueryExpression]
/**
* Finds scalar subquery expressions in this plan node and starts evaluating them.
*/
protected def prepareSubqueries(): Unit = {
expressions.foreach {
_.collect {
case e: ExecSubqueryExpression =>
e.plan.prepare()
runningSubqueries += e
}
}
}
/**
* Blocks the thread until all subqueries finish evaluation and update the results.
*/
protected def waitForSubqueries(): Unit = synchronized {
// fill in the result of subqueries
runningSubqueries.foreach { sub =>
sub.updateResult()
}
runningSubqueries.clear()
}
/**
* Whether the "prepare" method is called.
*/
private var prepared = false
/**
* Prepares this SparkPlan for execution. It's idempotent.
*/
final def prepare(): Unit = {
// doPrepare() may depend on it's children, we should call prepare() on all the children first.
children.foreach(_.prepare())
synchronized {
if (!prepared) {
prepareSubqueries()
doPrepare()
prepared = true
}
}
}
/**
* Overridden by concrete implementations of SparkPlan. It is guaranteed to run before any
* `execute` of SparkPlan. This is helpful if we want to set up some state before executing the
* query, e.g., `BroadcastHashJoin` uses it to broadcast asynchronously.
*
* @note `prepare` method has already walked down the tree, so the implementation doesn't have
* to call children's `prepare` methods.
*
* This will only be called once, protected by `this`.
*/
protected def doPrepare(): Unit = {}
/**
* Produces the result of the query as an `RDD[InternalRow]`
*
* Overridden by concrete implementations of SparkPlan.
*/
protected def doExecute(): RDD[InternalRow]
/**
* Produces the result of the query as a broadcast variable.
*
* Overridden by concrete implementations of SparkPlan.
*/
protected[sql] def doExecuteBroadcast[T](): broadcast.Broadcast[T] = {
throw new UnsupportedOperationException(s"$nodeName does not implement doExecuteBroadcast")
}
/**
* Packing the UnsafeRows into byte array for faster serialization.
* The byte arrays are in the following format:
* [size] [bytes of UnsafeRow] [size] [bytes of UnsafeRow] ... [-1]
*
* UnsafeRow is highly compressible (at least 8 bytes for any column), the byte array is also
* compressed.
*/
private def getByteArrayRdd(n: Int = -1): RDD[Array[Byte]] = {
execute().mapPartitionsInternal { iter =>
var count = 0
val buffer = new Array[Byte](4 << 10) // 4K
val codec = CompressionCodec.createCodec(SparkEnv.get.conf)
val bos = new ByteArrayOutputStream()
val out = new DataOutputStream(codec.compressedOutputStream(bos))
while (iter.hasNext && (n < 0 || count < n)) {
val row = iter.next().asInstanceOf[UnsafeRow]
out.writeInt(row.getSizeInBytes)
row.writeToStream(out, buffer)
count += 1
}
out.writeInt(-1)
out.flush()
out.close()
Iterator(bos.toByteArray)
}
}
/**
* Decodes the byte arrays back to UnsafeRows and put them into buffer.
*/
private def decodeUnsafeRows(bytes: Array[Byte]): Iterator[InternalRow] = {
val nFields = schema.length
val codec = CompressionCodec.createCodec(SparkEnv.get.conf)
val bis = new ByteArrayInputStream(bytes)
val ins = new DataInputStream(codec.compressedInputStream(bis))
new Iterator[InternalRow] {
private var sizeOfNextRow = ins.readInt()
override def hasNext: Boolean = sizeOfNextRow >= 0
override def next(): InternalRow = {
val bs = new Array[Byte](sizeOfNextRow)
ins.readFully(bs)
val row = new UnsafeRow(nFields)
row.pointTo(bs, sizeOfNextRow)
sizeOfNextRow = ins.readInt()
row
}
}
}
/**
* Runs this query returning the result as an array.
*/
def executeCollect(): Array[InternalRow] = {
val byteArrayRdd = getByteArrayRdd()
val results = ArrayBuffer[InternalRow]()
byteArrayRdd.collect().foreach { bytes =>
decodeUnsafeRows(bytes).foreach(results.+=)
}
results.toArray
}
/**
* Runs this query returning the result as an iterator of InternalRow.
*
* @note Triggers multiple jobs (one for each partition).
*/
def executeToIterator(): Iterator[InternalRow] = {
getByteArrayRdd().toLocalIterator.flatMap(decodeUnsafeRows)
}
/**
* Runs this query returning the result as an array, using external Row format.
*/
def executeCollectPublic(): Array[Row] = {
val converter = CatalystTypeConverters.createToScalaConverter(schema)
executeCollect().map(converter(_).asInstanceOf[Row])
}
/**
* Runs this query returning the first `n` rows as an array.
*
* This is modeled after `RDD.take` but never runs any job locally on the driver.
*/
def executeTake(n: Int): Array[InternalRow] = {
if (n == 0) {
return new Array[InternalRow](0)
}
val childRDD = getByteArrayRdd(n)
val buf = new ArrayBuffer[InternalRow]
val totalParts = childRDD.partitions.length
var partsScanned = 0
while (buf.size < n && partsScanned < totalParts) {
// The number of partitions to try in this iteration. It is ok for this number to be
// greater than totalParts because we actually cap it at totalParts in runJob.
var numPartsToTry = 1L
if (partsScanned > 0) {
// If we didn't find any rows after the previous iteration, quadruple and retry.
// Otherwise, interpolate the number of partitions we need to try, but overestimate
// it by 50%. We also cap the estimation in the end.
val limitScaleUpFactor = Math.max(sqlContext.conf.limitScaleUpFactor, 2)
if (buf.isEmpty) {
numPartsToTry = partsScanned * limitScaleUpFactor
} else {
// the left side of max is >=1 whenever partsScanned >= 2
numPartsToTry = Math.max((1.5 * n * partsScanned / buf.size).toInt - partsScanned, 1)
numPartsToTry = Math.min(numPartsToTry, partsScanned * limitScaleUpFactor)
}
}
val p = partsScanned.until(math.min(partsScanned + numPartsToTry, totalParts).toInt)
val sc = sqlContext.sparkContext
val res = sc.runJob(childRDD,
(it: Iterator[Array[Byte]]) => if (it.hasNext) it.next() else Array.empty[Byte], p)
buf ++= res.flatMap(decodeUnsafeRows)
partsScanned += p.size
}
if (buf.size > n) {
buf.take(n).toArray
} else {
buf.toArray
}
}
protected def newMutableProjection(
expressions: Seq[Expression],
inputSchema: Seq[Attribute],
useSubexprElimination: Boolean = false): MutableProjection = {
log.debug(s"Creating MutableProj: $expressions, inputSchema: $inputSchema")
GenerateMutableProjection.generate(expressions, inputSchema, useSubexprElimination)
}
private def genInterpretedPredicate(
expression: Expression, inputSchema: Seq[Attribute]): InterpretedPredicate = {
val str = expression.toString
val logMessage = if (str.length > 256) {
str.substring(0, 256 - 3) + "..."
} else {
str
}
logWarning(s"Codegen disabled for this expression:\\n $logMessage")
InterpretedPredicate.create(expression, inputSchema)
}
protected def newPredicate(
expression: Expression, inputSchema: Seq[Attribute]): GenPredicate = {
try {
GeneratePredicate.generate(expression, inputSchema)
} catch {
case _ @ (_: JaninoRuntimeException | _: CompileException) if codeGenFallBack =>
genInterpretedPredicate(expression, inputSchema)
}
}
protected def newOrdering(
order: Seq[SortOrder], inputSchema: Seq[Attribute]): Ordering[InternalRow] = {
GenerateOrdering.generate(order, inputSchema)
}
/**
* Creates a row ordering for the given schema, in natural ascending order.
*/
protected def newNaturalAscendingOrdering(dataTypes: Seq[DataType]): Ordering[InternalRow] = {
val order: Seq[SortOrder] = dataTypes.zipWithIndex.map {
case (dt, index) => SortOrder(BoundReference(index, dt, nullable = true), Ascending)
}
newOrdering(order, Seq.empty)
}
}
object SparkPlan {
private[execution] val subqueryExecutionContext = ExecutionContext.fromExecutorService(
ThreadUtils.newDaemonCachedThreadPool("subquery", 16))
}
trait LeafExecNode extends SparkPlan {
override final def children: Seq[SparkPlan] = Nil
override def producedAttributes: AttributeSet = outputSet
}
object UnaryExecNode {
def unapply(a: Any): Option[(SparkPlan, SparkPlan)] = a match {
case s: SparkPlan if s.children.size == 1 => Some((s, s.children.head))
case _ => None
}
}
trait UnaryExecNode extends SparkPlan {
def child: SparkPlan
override final def children: Seq[SparkPlan] = child :: Nil
}
trait BinaryExecNode extends SparkPlan {
def left: SparkPlan
def right: SparkPlan
override final def children: Seq[SparkPlan] = Seq(left, right)
}
| shubhamchopra/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala | Scala | apache-2.0 | 14,476 |
package lore.compiler.poem
import lore.compiler.assembly.PropertyOrder
import lore.compiler.core.CompilationException
import lore.compiler.utils.CollectionExtensions.VectorExtension
/**
* @param names The names must be ordered lexicographically.
*/
case class PoemMetaShape(names: Vector[String]) {
if (!names.isSorted) {
throw CompilationException(s"The property names $names of a shape must be sorted lexicographically.")
}
if (names.length != names.distinct.length) {
throw CompilationException(s"The property names $names of a shape may not contain duplicates.")
}
}
object PoemMetaShape {
def build(names: Vector[String]): PoemMetaShape = PoemMetaShape(PropertyOrder.sort(names)(identity))
}
| marcopennekamp/lore | compiler/src/lore/compiler/poem/PoemMetaShape.scala | Scala | mit | 723 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.scala
import java.io.ByteArrayInputStream
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import org.apache.avro.scala.test.generated.scala.RecordWithAllTypes
@RunWith(classOf[JUnitRunner])
class TestCopy
extends FunSuite {
test("copy") {
val rOrig = Fixtures.recordWithAllTypes("a")
val rCopy = rOrig.copy(stringField = "b")
assert(rCopy.stringField === "b")
assert(rOrig.doubleField === rCopy.doubleField) // check other fields are same
}
}
| julianpeeters/avro | lang/scala/src/test/java/org/apache/avro/scala/TestCopy.scala | Scala | apache-2.0 | 1,354 |
package lila.common
package paginator
import scalaz.Success
final class Paginator[A] private[paginator] (
val currentPage: Int,
val maxPerPage: Int,
/**
* Returns the results for the current page.
* The result is cached.
*/
val currentPageResults: Seq[A],
/**
* Returns the number of results.
* The result is cached.
*/
val nbResults: Int) {
/**
* Returns the previous page.
*/
def previousPage: Option[Int] = (currentPage > 1) option (currentPage - 1)
/**
* Returns the next page.
*/
def nextPage: Option[Int] = (currentPage < nbPages) option (currentPage + 1)
/**
* Returns the number of pages.
*/
def nbPages: Int = scala.math.ceil(nbResults.toFloat / maxPerPage).toInt
/**
* Returns whether we have to paginate or not.
* This is true if the number of results is higher than the max per page.
*/
def hasToPaginate: Boolean = nbResults > maxPerPage
/**
* Returns whether there is previous page or not.
*/
def hasPreviousPage: Boolean = previousPage.isDefined
/**
* Returns whether there is next page or not.
*/
def hasNextPage: Boolean = nextPage.isDefined
def withCurrentPageResults[B](newResults: Seq[B]): Paginator[B] = new Paginator(
currentPage = currentPage,
maxPerPage = maxPerPage,
currentPageResults = newResults,
nbResults = nbResults)
}
object Paginator {
def apply[A](
adapter: AdapterLike[A],
currentPage: Int = 1,
maxPerPage: Int = 10): Fu[Paginator[A]] =
validate(adapter, currentPage, maxPerPage) | apply(adapter, 1, maxPerPage)
def validate[A](
adapter: AdapterLike[A],
currentPage: Int = 1,
maxPerPage: Int = 10): Valid[Fu[Paginator[A]]] =
if (currentPage < 1) !!("Max per page must be greater than zero")
else if (maxPerPage <= 0) !!("Current page must be greater than zero")
else Success(for {
results ← adapter.slice((currentPage - 1) * maxPerPage, maxPerPage)
nbResults ← adapter.nbResults
} yield new Paginator(currentPage, maxPerPage, results, nbResults))
}
| JimmyMow/lila | modules/common/src/main/paginator/Paginator.scala | Scala | mit | 2,095 |
package com.github.slackey.bot
import org.json4s._
import com.github.slackey.codecs.types.{BotMessage, MeMessage, Message, SimpleMessage}
import com.github.slackey.codecs.{extract, isReply}
class Worker(listeners: List[RealTimeMessagingListener]) extends SlackeyActor {
override def receive: Receive = {
case Connected(state) =>
dispatchAndReply(_.onConnected(state))
case Disconnected(state) =>
dispatchAndReply(_.onDisconnected(state))
case ReceiveMessage(state, json) =>
if (isReply(json)) handleReply(state, json) else handle(state, json)
}
private def handle(state: SlackState, json: JObject) = {
json \\ "type" match {
case JString("message") =>
val message = extract[Message](json)
message.subtype match {
case None =>
val specific = SimpleMessage(message)
dispatchAndReply(_.onSimpleMessage(state, specific, json))
case Some("bot_message") =>
val specific = BotMessage(message)
dispatchAndReply(_.onBotMessage(state, specific, json))
case Some("me_message") =>
val specific = MeMessage(message)
dispatchAndReply(_.onMeMessage(state, specific, json))
case _ => // noop
}
case _ => // noop
}
for (listener <- listeners) {
listener.onAnyMessage(state, json)
}
}
private def handleReply(state: SlackState, json: JObject) = {}
private def dispatchAndReply(f: RealTimeMessagingListener => Seq[SendMessage]) = {
for {
listener <- listeners
msg <- f(listener)
} sender() ! msg
}
}
| slackey/slackey | src/main/scala/com/github/slackey/bot/Worker.scala | Scala | mit | 1,621 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
package internal
import Flags._
import util._
trait ReificationSupport { self: SymbolTable =>
import definitions._
class ReificationSupportImpl extends ReificationSupportApi {
def selectType(owner: Symbol, name: String): TypeSymbol =
select(owner, newTypeName(name)).asType
def selectTerm(owner: Symbol, name: String): TermSymbol = {
val result = select(owner, newTermName(name)).asTerm
if (result.isOverloaded) result.suchThat(!_.isMethod).asTerm
else result
}
protected def select(owner: Symbol, name: Name): Symbol = {
val result = owner.info decl name
if (result ne NoSymbol) result
else
mirrorThatLoaded(owner).missingHook(owner, name) orElse {
throw new ScalaReflectionException("%s %s in %s not found".format(if (name.isTermName) "term" else "type", name, owner.fullName))
}
}
def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol = {
val result = owner.info.decl(newTermName(name)).alternatives(index)
if (result ne NoSymbol) result.asMethod
else throw new ScalaReflectionException("overloaded method %s #%d in %s not found".format(name, index, owner.fullName))
}
def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
newFreeTermSymbol(newTermName(name), value, flags, origin).markFlagsCompleted(mask = AllFlags)
def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
newFreeTypeSymbol(newTypeName(name), flags, origin).markFlagsCompleted(mask = AllFlags)
def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
owner.newNestedSymbol(name, pos, flags, isClass).markFlagsCompleted(mask = AllFlags)
def newScopeWith(elems: Symbol*): Scope =
self.newScopeWith(elems: _*)
def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S =
sym.setAnnotations(annots)
def setInfo[S <: Symbol](sym: S, tpe: Type): S =
sym.setInfo(tpe).markAllCompleted()
def mkThis(sym: Symbol): Tree = self.This(sym)
def mkSelect(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
def mkIdent(sym: Symbol): Ident = self.Ident(sym)
def mkTypeTree(tp: Type): TypeTree = self.TypeTree(tp)
def ThisType(sym: Symbol): Type = self.ThisType(sym)
def SingleType(pre: Type, sym: Symbol): Type = self.SingleType(pre, sym)
def SuperType(thistpe: Type, supertpe: Type): Type = self.SuperType(thistpe, supertpe)
def ConstantType(value: Constant): ConstantType = self.ConstantType(value)
def TypeRef(pre: Type, sym: Symbol, args: List[Type]): Type = self.TypeRef(pre, sym, args)
def RefinedType(parents: List[Type], decls: Scope, typeSymbol: Symbol): RefinedType = self.RefinedType(parents, decls, typeSymbol)
def ClassInfoType(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType = self.ClassInfoType(parents, decls, typeSymbol)
def MethodType(params: List[Symbol], resultType: Type): MethodType = self.MethodType(params, resultType)
def NullaryMethodType(resultType: Type): NullaryMethodType = self.NullaryMethodType(resultType)
def PolyType(typeParams: List[Symbol], resultType: Type): PolyType = self.PolyType(typeParams, resultType)
def ExistentialType(quantified: List[Symbol], underlying: Type): ExistentialType = self.ExistentialType(quantified, underlying)
def AnnotatedType(annotations: List[Annotation], underlying: Type): AnnotatedType = self.AnnotatedType(annotations, underlying)
def TypeBounds(lo: Type, hi: Type): TypeBounds = self.TypeBounds(lo, hi)
def BoundedWildcardType(bounds: TypeBounds): BoundedWildcardType = self.BoundedWildcardType(bounds)
def thisPrefix(sym: Symbol): Type = sym.thisPrefix
def setType[T <: Tree](tree: T, tpe: Type): T = { tree.setType(tpe); tree }
def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
def toStats(tree: Tree): List[Tree] = tree match {
case EmptyTree => Nil
case SyntacticBlock(stats) => stats
case defn if defn.isDef => defn :: Nil
case imp: Import => imp :: Nil
case _ => throw new IllegalArgumentException(s"can't flatten $tree")
}
def mkAnnotation(tree: Tree): Tree = tree match {
case SyntacticNew(Nil, SyntacticApplied(SyntacticAppliedType(_, _), _) :: Nil, noSelfType, Nil) =>
tree
case _ =>
throw new IllegalArgumentException(s"Tree ${showRaw(tree)} isn't a correct representation of annotation." +
"""Consider reformatting it into a q"new $name[..$targs](...$argss)" shape""")
}
def mkAnnotation(trees: List[Tree]): List[Tree] = trees.map(mkAnnotation)
def mkParam(argss: List[List[Tree]], extraFlags: FlagSet = NoFlags, excludeFlags: FlagSet = DEFERRED): List[List[ValDef]] =
argss.map { args => args.map { mkParam(_, extraFlags, excludeFlags) } }
def mkParam(tree: Tree, extraFlags: FlagSet, excludeFlags: FlagSet): ValDef = tree match {
case Typed(Ident(name: TermName), tpt) =>
mkParam(ValDef(NoMods, name, tpt, EmptyTree), extraFlags, excludeFlags)
case vd: ValDef =>
var newmods = vd.mods & (~excludeFlags)
if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM
copyValDef(vd)(mods = newmods | extraFlags)
case _ =>
throw new IllegalArgumentException(s"$tree is not valid representation of a parameter, " +
"""consider reformatting it into q"val $name: $T = $default" shape""")
}
def mkImplicitParam(args: List[Tree]): List[ValDef] = args.map(mkImplicitParam)
def mkImplicitParam(tree: Tree): ValDef = mkParam(tree, IMPLICIT | PARAM, NoFlags)
def mkTparams(tparams: List[Tree]): List[TypeDef] =
tparams.map {
case td: TypeDef => copyTypeDef(td)(mods = (td.mods | PARAM) & (~DEFERRED))
case other => throw new IllegalArgumentException(s"can't splice $other as type parameter")
}
def mkRefineStat(stat: Tree): Tree = {
stat match {
case dd: DefDef => require(dd.rhs.isEmpty, "can't use DefDef with non-empty body as refine stat")
case vd: ValDef => require(vd.rhs.isEmpty, "can't use ValDef with non-empty rhs as refine stat")
case td: TypeDef =>
case _ => throw new IllegalArgumentException(s"not legal refine stat: $stat")
}
stat
}
def mkRefineStat(stats: List[Tree]): List[Tree] = stats.map(mkRefineStat)
def mkPackageStat(stat: Tree): Tree = {
stat match {
case cd: ClassDef =>
case md: ModuleDef =>
case pd: PackageDef =>
case _ => throw new IllegalArgumentException(s"not legal package stat: $stat")
}
stat
}
def mkPackageStat(stats: List[Tree]): List[Tree] = stats.map(mkPackageStat)
object ScalaDot extends ScalaDotExtractor {
def apply(name: Name): Tree = gen.scalaDot(name)
def unapply(tree: Tree): Option[Name] = tree match {
case Select(id @ Ident(nme.scala_), name) if id.symbol == ScalaPackage => Some(name)
case _ => None
}
}
def mkEarlyDef(defn: Tree): Tree = defn match {
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
copyValDef(vdef)(mods = mods | PRESUPER)
case tdef @ TypeDef(mods, _, _, _) =>
copyTypeDef(tdef)(mods = mods | PRESUPER)
case _ =>
throw new IllegalArgumentException(s"not legal early def: $defn")
}
def mkEarlyDef(defns: List[Tree]): List[Tree] = defns.map(mkEarlyDef)
def mkRefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX): TermName = self.freshTermName(prefix)
def freshTypeName(prefix: String): TypeName = self.freshTypeName(prefix)
protected implicit def fresh: FreshNameCreator = self.currentFreshNameCreator
object ImplicitParams extends ImplicitParamsExtractor {
def apply(paramss: List[List[Tree]], implparams: List[Tree]): List[List[Tree]] =
if (implparams.nonEmpty) paramss :+ mkImplicitParam(implparams) else paramss
def unapply(vparamss: List[List[ValDef]]): Some[(List[List[ValDef]], List[ValDef])] = vparamss match {
case init :+ (last @ (initlast :: _)) if initlast.mods.isImplicit => Some((init, last))
case _ => Some((vparamss, Nil))
}
}
object FlagsRepr extends FlagsReprExtractor {
def apply(bits: Long): FlagSet = bits
def unapply(flags: Long): Some[Long] = Some(flags)
}
/** Construct/deconstruct type application term trees.
* Treats other term trees as zero-argument type applications.
*/
object SyntacticTypeApplied extends SyntacticTypeAppliedExtractor {
def apply(tree: Tree, targs: List[Tree]): Tree =
if (targs.isEmpty) tree
else if (tree.isTerm) TypeApply(tree, targs)
else throw new IllegalArgumentException(s"can't apply type arguments to $tree")
def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
case TypeApply(fun, targs) => Some((fun, targs))
case _ if tree.isTerm => Some((tree, Nil))
case _ => None
}
}
/** Construct/deconstruct applied type trees.
* Treats other types as zero-arity applied types.
*/
object SyntacticAppliedType extends SyntacticTypeAppliedExtractor {
def apply(tree: Tree, targs: List[Tree]): Tree =
if (targs.isEmpty) tree
else if (tree.isType) AppliedTypeTree(tree, targs)
else throw new IllegalArgumentException(s"can't create applied type from non-type $tree")
def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
case MaybeTypeTreeOriginal(AppliedTypeTree(tpe, targs)) => Some((tpe, targs))
case _ if tree.isType => Some((tree, Nil))
case _ => None
}
}
object SyntacticApplied extends SyntacticAppliedExtractor {
def apply(tree: Tree, argss: List[List[Tree]]): Tree =
argss.foldLeft(tree) { (f, args) => Apply(f, args.map(treeInfo.assignmentToMaybeNamedArg)) }
def unapply(tree: Tree): Some[(Tree, List[List[Tree]])] = tree match {
case UnApply(treeInfo.Unapplied(Select(fun, nme.unapply)), pats) =>
Some((fun, pats :: Nil))
case treeInfo.Applied(fun, targs, argss) =>
fun match {
case Select(_: New, nme.CONSTRUCTOR) =>
Some((tree, Nil))
case _ =>
val callee =
if (fun.isTerm) SyntacticTypeApplied(fun, targs)
else SyntacticAppliedType(fun, targs)
Some((callee, argss))
}
}
}
// recover constructor contents generated by gen.mkTemplate
protected object UnCtor {
def unapply(tree: Tree): Option[(Modifiers, List[List[ValDef]], List[Tree])] = tree match {
case DefDef(mods, nme.MIXIN_CONSTRUCTOR, _, _, _, SyntacticBlock(lvdefs :+ _)) =>
Some((mods | Flag.TRAIT, Nil, lvdefs))
case DefDef(mods, nme.CONSTRUCTOR, Nil, vparamss, _, SyntacticBlock(lvdefs :+ _ :+ _)) =>
Some((mods, vparamss, lvdefs))
case _ => None
}
}
// undo gen.mkTemplate
protected class UnMkTemplate(isCaseClass: Boolean) {
def unapply(templ: Template): Option[(List[Tree], ValDef, Modifiers, List[List[ValDef]], List[Tree], List[Tree])] = {
val Template(parents, selfType, _) = templ
val tbody = treeInfo.untypecheckedTemplBody(templ)
def result(ctorMods: Modifiers, vparamss: List[List[ValDef]], edefs: List[Tree], body: List[Tree]) =
Some((parents, selfType, ctorMods, vparamss, edefs, body))
def indexOfCtor(trees: List[Tree]) =
trees.indexWhere { case UnCtor(_, _, _) => true ; case _ => false }
if (tbody forall treeInfo.isInterfaceMember)
result(NoMods | Flag.TRAIT, Nil, Nil, tbody)
else if (indexOfCtor(tbody) == -1)
None
else {
val (rawEdefs, rest) = tbody.span(treeInfo.isEarlyDef)
val (gvdefs, etdefs) = rawEdefs.partition(treeInfo.isEarlyValDef)
val (fieldDefs, UnCtor(ctorMods, ctorVparamss, lvdefs) :: body) = rest.splitAt(indexOfCtor(rest)): @unchecked
val evdefs = gvdefs.zip(lvdefs).map {
// TODO: in traits, early val defs are defdefs
case (gvdef @ ValDef(_, _, tpt: TypeTree, _), ValDef(_, _, _, rhs)) =>
copyValDef(gvdef)(tpt = tpt.original, rhs = rhs)
case (tr1, tr2) =>
throw new MatchError((tr1, tr2))
}
val edefs = evdefs ::: etdefs
if (ctorMods.isTrait)
result(ctorMods, Nil, edefs, body)
else {
// undo conversion from (implicit ... ) to ()(implicit ... ) when it's the only parameter section
// except that case classes require the explicit leading empty parameter list
val vparamssRestoredImplicits = ctorVparamss match {
case Nil :: (tail @ ((head :: _) :: _)) if head.mods.isImplicit && !isCaseClass => tail
case other => other
}
// undo flag modifications by merging flag info from constructor args and fieldDefs
val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods case x => throw new MatchError(x) }.toMap
def ctorArgsCorrespondToFields = vparamssRestoredImplicits.flatten.forall { vd => modsMap.contains(vd.name) }
if (!ctorArgsCorrespondToFields) None
else {
val vparamss = mmap(vparamssRestoredImplicits) { vd =>
val originalMods = modsMap(vd.name) | (vd.mods.flags & DEFAULTPARAM)
atPos(vd.pos)(ValDef(originalMods, vd.name, vd.tpt, vd.rhs))
}
result(ctorMods, vparamss, edefs, body)
}
}
}
}
def asCase = new UnMkTemplate(isCaseClass = true)
}
protected object UnMkTemplate extends UnMkTemplate(isCaseClass = false)
protected def mkSelfType(tree: Tree) = tree match {
case vd: ValDef =>
require(vd.rhs.isEmpty, "self types must have empty right hand side")
copyValDef(vd)(mods = (vd.mods | PRIVATE) & (~DEFERRED))
case _ =>
throw new IllegalArgumentException(
s"""$tree is not a valid representation of self type, consider reformatting into q"val $$self: $$T" shape""")
}
object SyntacticClassDef extends SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
constrMods: Modifiers, vparamss: List[List[Tree]],
earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L)
val vparamss0 = mkParam(vparamss, extraFlags, excludeFlags = DEFERRED | PARAM)
val tparams0 = mkTparams(tparams)
val parents0 = gen.mkParents(mods,
if (mods.isCase) parents.filter {
case ScalaDot(tpnme.Product | tpnme.Serializable | tpnme.AnyRef) => false
case _ => true
} else parents
)
val body0 = earlyDefs ::: body
val selfType0 = mkSelfType(selfType)
val templ = gen.mkTemplate(parents0, selfType0, constrMods, vparamss0, body0)
gen.mkClassDef(mods, name, tparams0, templ)
}
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case ClassDef(mods, name, tparams, impl) =>
val X = if (mods.isCase) UnMkTemplate.asCase else UnMkTemplate
impl match {
case X(parents, selfType, ctorMods, vparamss, earlyDefs, body)
if (!ctorMods.isTrait && !ctorMods.hasFlag(JAVA)) =>
Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfType, body))
case _ =>
None
}
case _ =>
None
}
}
object SyntacticTraitDef extends SyntacticTraitDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[Tree], earlyDefs: List[Tree],
parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
val mods0 = mods | TRAIT | ABSTRACT
val templ = gen.mkTemplate(parents, mkSelfType(selfType), Modifiers(TRAIT), Nil, earlyDefs ::: body)
gen.mkClassDef(mods0, name, mkTparams(tparams), templ)
}
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
if mods.isTrait =>
Some((mods, name, tparams, earlyDefs, parents, selfType, body))
case _ => None
}
}
object SyntacticObjectDef extends SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
parents: List[Tree], selfType: Tree, body: List[Tree]): ModuleDef =
ModuleDef(mods, name, gen.mkTemplate(parents, mkSelfType(selfType), NoMods, Nil, earlyDefs ::: body))
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case ModuleDef(mods, name, UnMkTemplate(parents, selfType, _, _, earlyDefs, body)) =>
Some((mods, name, earlyDefs, parents, selfType, body))
case _ =>
None
}
}
object SyntacticPackageObjectDef extends SyntacticPackageObjectDefExtractor {
def apply(name: TermName, earlyDefs: List[Tree],
parents: List[Tree], selfType: Tree, body: List[Tree]): PackageDef =
gen.mkPackageObject(SyntacticObjectDef(NoMods, name, earlyDefs, parents, selfType, body))
def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case PackageDef(Ident(name: TermName), List(SyntacticObjectDef(NoMods, nme.PACKAGEkw, earlyDefs, parents, selfType, body))) =>
Some((name, earlyDefs, parents, selfType, body))
case _ =>
None
}
}
// match references to `scala.$name`
protected class ScalaMemberRef(symbols: Seq[Symbol]) {
def result(name: Name): Option[Symbol] =
symbols.collect { case sym if sym.name == name => sym }.headOption
def unapply(tree: Tree): Option[Symbol] = tree match {
case id @ Ident(name) if symbols.contains(id.symbol) && name == id.symbol.name =>
Some(id.symbol)
case Select(scalapkg @ Ident(nme.scala_), name) if scalapkg.symbol == ScalaPackage =>
result(name)
case Select(Select(Ident(nme.ROOTPKG), nme.scala_), name) =>
result(name)
case _ => None
}
}
protected object TupleClassRef extends ScalaMemberRef(TupleClass.seq)
protected object TupleCompanionRef extends ScalaMemberRef(TupleClass.seq.map { _.companionModule })
protected object UnitClassRef extends ScalaMemberRef(Seq(UnitClass))
protected object FunctionClassRef extends ScalaMemberRef(FunctionClass.seq)
object SyntacticTuple extends SyntacticTupleExtractor {
def apply(args: List[Tree]): Tree = {
require(args.isEmpty || (TupleClass(args.length) != NoSymbol), s"Tuples with ${args.length} arity aren't supported")
gen.mkTuple(args)
}
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case Literal(Constant(())) =>
SomeOfNil
case Apply(MaybeTypeTreeOriginal(SyntacticTypeApplied(MaybeSelectApply(TupleCompanionRef(sym)), targs)), args)
if sym == TupleClass(args.length).companionModule
&& (targs.isEmpty || targs.length == args.length) =>
Some(args)
case _ if tree.isTerm =>
Some(tree :: Nil)
case _ =>
None
}
}
object SyntacticTupleType extends SyntacticTupleExtractor {
def apply(args: List[Tree]): Tree = {
require(args.isEmpty || (TupleClass(args.length) != NoSymbol), s"Tuples with ${args.length} arity aren't supported")
gen.mkTupleType(args)
}
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case MaybeTypeTreeOriginal(UnitClassRef(_)) =>
SomeOfNil
case MaybeTypeTreeOriginal(AppliedTypeTree(TupleClassRef(sym), args))
if sym == TupleClass(args.length) =>
Some(args)
case _ if tree.isType =>
Some(tree :: Nil)
case _ =>
None
}
}
object SyntacticFunctionType extends SyntacticFunctionTypeExtractor {
def apply(argtpes: List[Tree], restpe: Tree): Tree = {
require(FunctionClass(argtpes.length) != NoSymbol, s"Function types with ${argtpes.length} arity aren't supported")
gen.mkFunctionTypeTree(argtpes, restpe)
}
def unapply(tree: Tree): Option[(List[Tree], Tree)] = tree match {
case MaybeTypeTreeOriginal(AppliedTypeTree(FunctionClassRef(sym), args @ (argtpes :+ restpe)))
if sym == FunctionClass(args.length - 1) =>
Some((argtpes, restpe))
case _ => None
}
}
object SyntheticUnit {
def unapply(tree: Tree): Boolean = tree match {
case Literal(Constant(())) if tree.hasAttachment[SyntheticUnitAttachment.type] => true
case _ => false
}
}
/** Syntactic combinator that abstracts over Block tree.
*
* Apart from providing a more straightforward api that exposes
* block as a list of elements rather than (stats, expr) pair
* it also:
*
* 1. Strips trailing synthetic units which are inserted by the
* compiler if the block ends with a definition rather
* than an expression or is empty.
*
* 2. Matches non-block term trees and recognizes them as
* single-element blocks for sake of consistency with
* compiler's default to treat single-element blocks with
* expressions as just expressions. The only exception is q""
* which is not considered to be a block.
*/
object SyntacticBlock extends SyntacticBlockExtractor {
def apply(stats: List[Tree]): Tree = gen.mkBlock(stats)
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case bl @ self.Block(stats, SyntheticUnit()) => Some(treeInfo.untypecheckedBlockBody(bl))
case bl @ self.Block(stats, expr) => Some(treeInfo.untypecheckedBlockBody(bl) :+ expr)
case SyntheticUnit() => SomeOfNil
case _ if tree.isTerm && tree.nonEmpty => Some(tree :: Nil)
case _ => None
}
}
object SyntacticFunction extends SyntacticFunctionExtractor {
def apply(params: List[Tree], body: Tree): Function = {
val params0 :: Nil = mkParam(params :: Nil, PARAM): @unchecked
require(params0.forall { _.rhs.isEmpty }, "anonymous functions don't support parameters with default values")
Function(params0, body)
}
def unapply(tree: Function): Option[(List[ValDef], Tree)] = Function.unapply(tree)
}
object SyntacticNew extends SyntacticNewExtractor {
def apply(earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): Tree =
gen.mkNew(parents, mkSelfType(selfType), earlyDefs ::: body, NoPosition, NoPosition)
def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case treeInfo.Applied(Select(New(SyntacticAppliedType(ident, targs)), nme.CONSTRUCTOR), Nil, List(Nil)) =>
Some((Nil, SyntacticAppliedType(ident, targs) :: Nil, noSelfType, Nil))
case treeInfo.Applied(Select(New(SyntacticAppliedType(ident, targs)), nme.CONSTRUCTOR), Nil, argss) =>
Some((Nil, SyntacticApplied(SyntacticAppliedType(ident, targs), argss) :: Nil, noSelfType, Nil))
case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfType, body) ::
Apply(Select(New(Ident(tpnme.ANON_CLASS_NAME)), nme.CONSTRUCTOR), Nil) :: Nil) =>
Some((earlyDefs, parents, selfType, body))
case _ =>
None
}
}
object SyntacticDefDef extends SyntacticDefDefExtractor {
def apply(mods: Modifiers, name: TermName, tparams: List[Tree],
vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef = {
val tparams0 = mkTparams(tparams)
val vparamss0 = mkParam(vparamss, PARAM)
val rhs0 = {
if (name != nme.CONSTRUCTOR) rhs
else rhs match {
case Block(_, _) => rhs
case _ => Block(List(rhs), gen.mkSyntheticUnit())
}
}
DefDef(mods, name, tparams0, vparamss0, tpt, rhs0)
}
def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)] = tree match {
case DefDef(mods, nme.CONSTRUCTOR, tparams, vparamss, tpt, Block(List(expr), Literal(Constant(())))) =>
Some((mods, nme.CONSTRUCTOR, tparams, vparamss, tpt, expr))
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
Some((mods, name, tparams, vparamss, tpt, rhs))
case _ => None
}
}
protected class SyntacticValDefBase(isMutable: Boolean) extends SyntacticValDefExtractor {
def modifiers(mods: Modifiers): Modifiers = if (isMutable) mods | MUTABLE else mods
def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef = ValDef(modifiers(mods), name, tpt, rhs)
def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)] = tree match {
case ValDef(mods, name, tpt, rhs) if mods.hasFlag(MUTABLE) == isMutable =>
Some((mods, name, tpt, rhs))
case _ =>
None
}
}
object SyntacticValDef extends SyntacticValDefBase(isMutable = false)
object SyntacticVarDef extends SyntacticValDefBase(isMutable = true)
object SyntacticAssign extends SyntacticAssignExtractor {
def apply(lhs: Tree, rhs: Tree): Tree = gen.mkAssign(lhs, rhs)
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case Assign(lhs, rhs) => Some((lhs, rhs))
case NamedArg(lhs, rhs) => Some((lhs, rhs))
case Apply(Select(fn, nme.update), args :+ rhs) => Some((atPos(fn.pos)(Apply(fn, args)), rhs))
case _ => None
}
}
def UnliftListElementwise[T](unliftable: Unliftable[T]) = new UnliftListElementwise[T] {
def unapply(lst: List[Tree]): Option[List[T]] = {
val unlifted = lst.flatMap { unliftable.unapply(_) }
if (unlifted.length == lst.length) Some(unlifted) else None
}
}
def UnliftListOfListsElementwise[T](unliftable: Unliftable[T]) = new UnliftListOfListsElementwise[T] {
def unapply(lst: List[List[Tree]]): Option[List[List[T]]] = {
val unlifted = lst.map { l => l.flatMap { unliftable.unapply(_) } }
if (unlifted.flatten.length == lst.flatten.length) Some(unlifted) else None
}
}
object SyntacticValFrom extends SyntacticValFromExtractor {
def apply(pat: Tree, rhs: Tree): Tree = gen.ValFrom(pat, gen.mkCheckIfRefutable(pat, rhs))
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case gen.ValFrom(pat, UnCheckIfRefutable(pat1, rhs1)) if pat.equalsStructure(pat1) =>
Some((pat, rhs1))
case gen.ValFrom(pat, rhs) =>
Some((pat, rhs))
case _ => None
}
}
object SyntacticValEq extends SyntacticValEqExtractor {
def apply(pat: Tree, rhs: Tree): Tree = gen.ValEq(pat, rhs)
def unapply(tree: Tree): Option[(Tree, Tree)] = gen.ValEq.unapply(tree)
}
object SyntacticFilter extends SyntacticFilterExtractor {
def apply(tree: Tree): Tree = gen.Filter(tree)
def unapply(tree: Tree): Option[Tree] = gen.Filter.unapply(tree)
}
// If a tree in type position isn't provided by the user (e.g. `tpt` fields of
// `ValDef` and `DefDef`, function params etc), then it's going to be parsed as
// TypeTree with empty original and empty tpe. This extractor matches such trees
// so that one can write q"val x = 2" to match typecheck(q"val x = 2"). Note that
// TypeTree() is the only possible representation for empty trees in type positions.
// We used to sometimes receive EmptyTree in such cases, but not anymore.
object SyntacticEmptyTypeTree extends SyntacticEmptyTypeTreeExtractor {
def apply(): TypeTree = self.TypeTree()
def unapply(tt: TypeTree): Boolean = tt.original == null || tt.original.isEmpty
}
// match a sequence of desugared `val $pat = $value`
protected object UnPatSeq {
def unapply(trees: List[Tree]): Option[List[(Tree, Tree)]] = {
val imploded = implodePatDefs(trees)
val patvalues = imploded.flatMap {
case SyntacticPatDef(_, pat, EmptyTree, rhs) => Some((pat, rhs))
case ValDef(_, name, SyntacticEmptyTypeTree(), rhs) => Some((Bind(name, self.Ident(nme.WILDCARD)), rhs))
case ValDef(_, name, tpt, rhs) => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), rhs))
case _ => None
}
if (patvalues.length == imploded.length) Some(patvalues) else None
}
}
// implode multiple-statement desugaring of pattern definitions
// into single-statement valdefs with nme.QUASIQUOTE_PAT_DEF name
object implodePatDefs extends Transformer {
override def transform(tree: Tree) = tree match {
case templ: Template => deriveTemplate(templ)(transformStats)
case block: Block =>
val Block(init, last) = block
Block(transformStats(init), transform(last)).copyAttrs(block)
case ValDef(mods, name1, SyntacticEmptyTypeTree(), Match(MaybeTyped(MaybeUnchecked(value), tpt), CaseDef(pat, EmptyTree, Ident(name2)) :: Nil))
if name1 == name2 =>
ValDef(mods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), transform(value))
case _ =>
super.transform(tree)
}
def transformStats(trees: List[Tree]): List[Tree] = trees match {
case Nil => Nil
case ValDef(mods, _, SyntacticEmptyTypeTree(), Match(MaybeTyped(MaybeUnchecked(value), tpt), CaseDef(pat, EmptyTree, SyntacticTuple(ids)) :: Nil)) :: tail
if mods.hasAllFlags(SYNTHETIC | ARTIFACT) =>
ids match {
case Nil =>
ValDef(NoMods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), transform(value)) :: transformStats(tail)
case _ =>
val mods = tail.take(1).head.asInstanceOf[ValDef].mods
ValDef(mods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), transform(value)) :: transformStats(tail.drop(ids.length))
}
case other :: tail =>
transform(other) :: transformStats(tail)
}
def apply(tree: Tree) = transform(tree)
def apply(trees: List[Tree]) = transformStats(trees)
}
object SyntacticPatDef extends SyntacticPatDefExtractor {
def apply(mods: Modifiers, pat: Tree, tpt: Tree, rhs: Tree): List[ValDef] = tpt match {
case SyntacticEmptyTypeTree() => gen.mkPatDef(mods, pat, rhs)
case _ => gen.mkPatDef(mods, Typed(pat, tpt), rhs)
}
def unapply(tree: Tree): Option[(Modifiers, Tree, Tree, Tree)] = tree match {
case ValDef(mods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), rhs) => Some((mods, pat, tpt, rhs))
case _ => None
}
}
// match a sequence of desugared `val $pat = $value` with a tuple in the end
protected object UnPatSeqWithRes {
def unapply(tree: Tree): Option[(List[(Tree, Tree)], List[Tree])] = tree match {
case SyntacticBlock(UnPatSeq(trees) :+ SyntacticTuple(elems)) => Some((trees, elems))
case _ => None
}
}
// undo gen.mkSyntheticParam
protected object UnSyntheticParam {
def unapply(tree: Tree): Option[TermName] = tree match {
case ValDef(mods, name, _, EmptyTree)
if mods.hasAllFlags(SYNTHETIC | PARAM) =>
Some(name)
case _ => None
}
}
// undo gen.mkVisitor
protected object UnVisitor {
def unapply(tree: Tree): Option[(TermName, List[CaseDef])] = tree match {
case Function(UnSyntheticParam(x1) :: Nil, Match(MaybeUnchecked(Ident(x2)), cases))
if x1 == x2 =>
Some((x1, cases))
case _ => None
}
}
// undo gen.mkFor:makeClosure
protected object UnClosure {
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case Function(ValDef(Modifiers(PARAM, _, _), name, tpt, EmptyTree) :: Nil, body) =>
tpt match {
case SyntacticEmptyTypeTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body))
case _ => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body))
}
case UnVisitor(_, CaseDef(pat, EmptyTree, body) :: Nil) =>
Some((pat, body))
case _ => None
}
}
// match call to either withFilter or filter
// TODO: now that we no longer rewrite `filter` to `withFilter`, maybe this extractor should only look for `withFilter`?
protected object FilterCall {
def unapply(tree: Tree): Option[(Tree,Tree)] = tree match {
case Apply(Select(obj, nme.withFilter | nme.filter), arg :: Nil) =>
Some((obj, arg))
case _ => None
}
}
// transform a chain of withFilter calls into a sequence of for filters
protected object UnFilter {
def unapply(tree: Tree): Some[(Tree, List[Tree])] = tree match {
case UnCheckIfRefutable(_, _) =>
Some((tree, Nil))
case FilterCall(UnFilter(rhs, rest), UnClosure(_, test)) =>
Some((rhs, rest :+ SyntacticFilter(test)))
case _ =>
Some((tree, Nil))
}
}
// undo gen.mkCheckIfRefutable
protected object UnCheckIfRefutable {
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case FilterCall(rhs, UnVisitor(name,
CaseDef(pat, EmptyTree, Literal(Constant(true))) ::
CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) :: Nil))
if name.toString.contains(nme.CHECK_IF_REFUTABLE_STRING) =>
Some((pat, rhs))
case _ => None
}
}
// undo gen.mkFor:makeCombination accounting for possible extra implicit argument
protected class UnForCombination(name: TermName) {
def unapply(tree: Tree) = tree match {
case SyntacticApplied(SyntacticTypeApplied(sel @ Select(lhs, meth), _), (f :: Nil) :: Nil)
if name == meth && sel.hasAttachment[ForAttachment.type] =>
Some((lhs, f))
case SyntacticApplied(SyntacticTypeApplied(sel @ Select(lhs, meth), _), (f :: Nil) :: _ :: Nil)
if name == meth && sel.hasAttachment[ForAttachment.type] =>
Some((lhs, f))
case _ => None
}
}
protected object UnMap extends UnForCombination(nme.map)
protected object UnForeach extends UnForCombination(nme.foreach)
protected object UnFlatMap extends UnForCombination(nme.flatMap)
// undo desugaring done in gen.mkFor
protected object UnFor {
def unapply(tree: Tree): Option[(List[Tree], Tree)] = {
val interm = tree match {
case UnFlatMap(UnFilter(rhs, filters), UnClosure(pat, UnFor(rest, body))) =>
Some(((pat, rhs), filters ::: rest, body))
case UnForeach(UnFilter(rhs, filters), UnClosure(pat, UnFor(rest, body))) =>
Some(((pat, rhs), filters ::: rest, body))
case UnMap(UnFilter(rhs, filters), UnClosure(pat, cbody)) =>
Some(((pat, rhs), filters, gen.Yield(cbody)))
case UnForeach(UnFilter(rhs, filters), UnClosure(pat, cbody)) =>
Some(((pat, rhs), filters, cbody))
case _ => None
}
interm.flatMap {
case ((Bind(_, SyntacticTuple(_)) | SyntacticTuple(_),
UnFor(SyntacticValFrom(pat, rhs) :: innerRest, gen.Yield(UnPatSeqWithRes(pats, elems2)))),
outerRest, fbody) =>
val valeqs = pats.map { case (pat, rhs) => SyntacticValEq(pat, rhs) }
Some((SyntacticValFrom(pat, rhs) :: innerRest ::: valeqs ::: outerRest, fbody))
case ((pat, rhs), filters, body) =>
Some((SyntacticValFrom(pat, rhs) :: filters, body))
}
}
}
// check that enumerators are valid
protected def mkEnumerators(enums: List[Tree]): List[Tree] = {
require(enums.nonEmpty, "enumerators can't be empty")
enums.head match {
case SyntacticValFrom(_, _) =>
case t => throw new IllegalArgumentException(s"$t is not a valid first enumerator of for loop")
}
enums.tail.foreach {
case SyntacticValEq(_, _) | SyntacticValFrom(_, _) | SyntacticFilter(_) =>
case t => throw new IllegalArgumentException(s"$t is not a valid representation of a for loop enumerator")
}
enums
}
object SyntacticFor extends SyntacticForExtractor {
def apply(enums: List[Tree], body: Tree): Tree = gen.mkFor(mkEnumerators(enums), body)
def unapply(tree: Tree) = tree match {
case UnFor(enums, gen.Yield(body)) => None
case UnFor(enums, body) => Some((enums, body))
case _ => None
}
}
object SyntacticForYield extends SyntacticForExtractor {
def apply(enums: List[Tree], body: Tree): Tree = gen.mkFor(mkEnumerators(enums), gen.Yield(body))
def unapply(tree: Tree) = tree match {
case UnFor(enums, gen.Yield(body)) => Some((enums, body))
case _ => None
}
}
// use typetree's original instead of typetree itself
protected object MaybeTypeTreeOriginal {
def unapply(tree: Tree): Some[Tree] = tree match {
case tt: TypeTree => Some(tt.original)
case _ => Some(tree)
}
}
// drop potential extra call to .apply
protected object MaybeSelectApply {
def unapply(tree: Tree): Some[Tree] = tree match {
case Select(f, nme.apply) => Some(f)
case other => Some(other)
}
}
// drop potential @scala.unchecked annotation
protected object MaybeUnchecked {
def unapply(tree: Tree): Some[Tree] = tree match {
case Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), annottee) =>
Some(annottee)
case Typed(annottee, MaybeTypeTreeOriginal(
Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), _))) =>
Some(annottee)
case annottee => Some(annottee)
}
}
protected object MaybeTyped {
def unapply(tree: Tree): Some[(Tree, Tree)] = tree match {
case Typed(v, tpt) => Some((v, tpt))
case v => Some((v, SyntacticEmptyTypeTree()))
}
}
protected def mkCases(cases: List[Tree]): List[CaseDef] = cases.map {
case c: CaseDef => c
case tree => throw new IllegalArgumentException(s"$tree is not valid representation of pattern match case")
}
object SyntacticPartialFunction extends SyntacticPartialFunctionExtractor {
def apply(cases: List[Tree]): Match = Match(EmptyTree, mkCases(cases))
def unapply(tree: Tree): Option[List[CaseDef]] = tree match {
case Match(EmptyTree, cases) => Some(cases)
case Typed(
Block(
List(ClassDef(clsMods, tpnme.ANON_FUN_NAME, Nil, Template(
List(abspf: TypeTree, ser: TypeTree), noSelfType, List(
DefDef(_, nme.CONSTRUCTOR, _, _, _, _),
DefDef(_, nme.applyOrElse, _, _, _,
Match(_, cases :+
CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), _, _))),
DefDef(_, nme.isDefinedAt, _, _, _, _))))),
Apply(Select(New(Ident(tpnme.ANON_FUN_NAME)), termNames.CONSTRUCTOR), List())),
pf: TypeTree)
if pf.tpe != null && pf.tpe.typeSymbol.eq(PartialFunctionClass) &&
abspf.tpe != null && abspf.tpe.typeSymbol.eq(AbstractPartialFunctionClass) &&
ser.tpe != null && ser.tpe.typeSymbol.eq(SerializableClass) &&
clsMods.hasAllFlags(FINAL | SYNTHETIC) =>
Some(cases)
case _ => None
}
}
object SyntacticMatch extends SyntacticMatchExtractor {
def apply(scrutinee: Tree, cases: List[Tree]) = {
require(scrutinee.nonEmpty, "match's scrutinee may not be empty")
Match(scrutinee, mkCases(cases))
}
def unapply(tree: Match): Option[(Tree, List[CaseDef])] = tree match {
case Match(scrutinee, cases) if scrutinee.nonEmpty => Some((scrutinee, cases))
case _ => None
}
}
object SyntacticTry extends SyntacticTryExtractor {
def apply(block: Tree, catches: List[Tree], finalizer: Tree) = Try(block, mkCases(catches), finalizer)
def unapply(tree: Try): Option[(Tree, List[CaseDef], Tree)] = Try.unapply(tree)
}
object SyntacticTermIdent extends SyntacticTermIdentExtractor {
def apply(name: TermName, isBackquoted: Boolean): Ident = {
val id = self.Ident(name)
if (isBackquoted) id updateAttachment BackquotedIdentifierAttachment
id
}
def unapply(id: Ident): Option[(TermName, Boolean)] = id.name match {
case name: TermName => Some((name, id.hasAttachment[BackquotedIdentifierAttachment.type]))
case _ => None
}
}
object SyntacticTypeIdent extends SyntacticTypeIdentExtractor {
def apply(name: TypeName): Ident = self.Ident(name)
def unapply(tree: Tree): Option[TypeName] = tree match {
case MaybeTypeTreeOriginal(Ident(name: TypeName)) => Some(name)
case _ => None
}
}
/** Facade over Imports and ImportSelectors that lets to structurally
* deconstruct/reconstruct them.
*
* Selectors are represented in the following way:
* 1. q"import foo._" <==> q"import foo.${pq"_"}"
* 2. q"import foo.bar" <==> q"import foo.${pq"bar"}"
* 3. q"import foo.{bar => baz}" <==> q"import foo.${pq"bar -> baz"}"
* 4. q"import foo.{bar => _}" <==> q"import foo.${pq"bar -> _"}"
*
* All names in selectors are TermNames despite the fact ImportSelector
* can theoretically contain TypeNames too (but they never do in practice.)
*/
object SyntacticImport extends SyntacticImportExtractor {
// construct/deconstruct {_} import selector
private object WildcardSelector {
def apply(offset: Int): ImportSelector = ImportSelector.wildAt(offset)
def unapply(sel: ImportSelector): Option[Int] = if (sel.isWildcard) Some(sel.namePos) else None
}
// construct/deconstruct {foo} import selector
private object NameSelector {
def apply(name: TermName, offset: Int): ImportSelector = ImportSelector(name, offset, name, offset)
def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match {
case ImportSelector(name1, offset1, name2, offset2) if name1 == name2 && offset1 == offset2 =>
Some((name1.toTermName, offset1))
case _ =>
None
}
}
// construct/deconstruct {foo => bar} import selector
private object RenameSelector {
def apply(name1: TermName, offset1: Int, name2: TermName, offset2: Int): ImportSelector =
ImportSelector(name1, offset1, name2, offset2)
def unapply(sel: ImportSelector): Option[(TermName, Int, TermName, Int)] =
if (sel.isRename) {
val ImportSelector(name1, offset1, name2, offset2) = sel
Some((name1.toTermName, offset1, name2.toTermName, offset2))
} else None
}
// construct/deconstruct {foo => _} import selector
private object UnimportSelector {
def apply(name: TermName, offset: Int): ImportSelector =
ImportSelector(name, offset, nme.WILDCARD, -1)
def unapply(sel: ImportSelector): Option[(TermName, Int)] =
if (sel.isMask) Some((sel.name.toTermName, sel.namePos))
else None
}
// represent {_} import selector as pq"_"
private object WildcardSelectorRepr {
def apply(pos: Position): Tree = atPos(pos)(self.Ident(nme.WILDCARD))
def unapply(tree: Tree): Option[Position] = tree match {
case self.Ident(nme.WILDCARD) => Some(tree.pos)
case _ => None
}
}
// represent {foo} import selector as pq"foo"
private object NameSelectorRepr {
def apply(name: TermName, pos: Position): Tree = atPos(pos)(Bind(name, WildcardSelectorRepr(pos)))
def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
case Bind(name, WildcardSelectorRepr(_)) => Some((name.toTermName, tree.pos))
case _ => None
}
}
// pq"left -> right"
private object Arrow {
def apply(left: Tree, right: Tree): Apply =
Apply(self.Ident(nme.MINGT), left :: right :: Nil)
def unapply(tree: Apply): Option[(Tree, Tree)] = tree match {
case Apply(self.Ident(nme.MINGT), left :: right :: Nil) => Some((left, right))
case _ => None
}
}
// represent {foo => bar} import selector as pq"foo -> bar"
private object RenameSelectorRepr {
def apply(name1: TermName, pos1: Position, name2: TermName, pos2: Position): Tree = {
val left = NameSelectorRepr(name1, pos1)
val right = NameSelectorRepr(name2, pos2)
atPos(wrappingPos(left :: right :: Nil))(Arrow(left, right))
}
def unapply(tree: Tree): Option[(TermName, Position, TermName, Position)] = tree match {
case Arrow(NameSelectorRepr(name1, pos1), NameSelectorRepr(name2, pos2)) =>
Some((name1.toTermName, pos1, name2.toTermName, pos2))
case _ =>
None
}
}
// represent {foo => _} import selector as pq"foo -> _"
private object UnimportSelectorRepr {
def apply(name: TermName, pos: Position): Tree =
atPos(pos)(Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(pos)))
def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
case Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(_)) =>
Some((name, pos))
case _ =>
None
}
}
private def derivedPos(t: Tree, offset: Int): Position =
if (t.pos == NoPosition) NoPosition else t.pos.withPoint(offset)
private def derivedOffset(pos: Position): Int =
if (pos == NoPosition) -1 else pos.point
def apply(expr: Tree, selectors: List[Tree]): Import = {
val importSelectors = selectors.map {
case WildcardSelectorRepr(pos) => WildcardSelector(derivedOffset(pos))
case NameSelectorRepr(name, pos) => NameSelector(name, derivedOffset(pos))
case RenameSelectorRepr(name1, pos1, name2, pos2) => RenameSelector(name1, derivedOffset(pos1), name2, derivedOffset(pos2))
case UnimportSelectorRepr(name, pos) => UnimportSelector(name, derivedOffset(pos))
case tree => throw new IllegalArgumentException(s"${showRaw(tree)} doesn't correspond to import selector")
}
Import(expr, importSelectors)
}
def unapply(imp: Import): Some[(Tree, List[Tree])] = {
val selectors = imp.selectors.map {
case WildcardSelector(offset) => WildcardSelectorRepr(derivedPos(imp, offset))
case NameSelector(name, offset) => NameSelectorRepr(name, derivedPos(imp, offset))
case RenameSelector(name1, offset1, name2, offset2) => RenameSelectorRepr(name1, derivedPos(imp, offset1), name2, derivedPos(imp, offset2))
case UnimportSelector(name, offset) => UnimportSelectorRepr(name, derivedPos(imp, offset))
case x => throw new MatchError(x)
}
Some((imp.expr, selectors))
}
}
object SyntacticSelectType extends SyntacticSelectTypeExtractor {
def apply(qual: Tree, name: TypeName): Select = Select(qual, name)
def unapply(tree: Tree): Option[(Tree, TypeName)] = tree match {
case MaybeTypeTreeOriginal(Select(qual, name: TypeName)) => Some((qual, name))
case _ => None
}
}
object SyntacticSelectTerm extends SyntacticSelectTermExtractor {
def apply(qual: Tree, name: TermName): Select = Select(qual, name)
def unapply(tree: Tree): Option[(Tree, TermName)] = tree match {
case Select(qual, name: TermName) => Some((qual, name))
case _ => None
}
}
object SyntacticCompoundType extends SyntacticCompoundTypeExtractor {
def apply(parents: List[Tree], defns: List[Tree]) =
CompoundTypeTree(Template(gen.mkParents(NoMods, parents), noSelfType, defns))
def unapply(tree: Tree): Option[(List[Tree], List[Tree])] = tree match {
case MaybeTypeTreeOriginal(CompoundTypeTree(Template(parents, _, defns))) =>
Some((parents, defns))
case _ =>
None
}
}
object SyntacticSingletonType extends SyntacitcSingletonTypeExtractor {
def apply(ref: Tree): SingletonTypeTree = SingletonTypeTree(ref)
def unapply(tree: Tree): Option[Tree] = tree match {
case MaybeTypeTreeOriginal(SingletonTypeTree(ref)) =>
Some(ref)
case _ =>
None
}
}
object SyntacticTypeProjection extends SyntacticTypeProjectionExtractor {
def apply(qual: Tree, name: TypeName): SelectFromTypeTree =
SelectFromTypeTree(qual, name)
def unapply(tree: Tree): Option[(Tree, TypeName)] = tree match {
case MaybeTypeTreeOriginal(SelectFromTypeTree(qual, name)) =>
Some((qual, name))
case _ =>
None
}
}
object SyntacticAnnotatedType extends SyntacticAnnotatedTypeExtractor {
def apply(tpt: Tree, annot: Tree): Annotated =
Annotated(annot, tpt)
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case MaybeTypeTreeOriginal(Annotated(annot, tpt)) =>
Some((tpt, annot))
case _ =>
None
}
}
object SyntacticExistentialType extends SyntacticExistentialTypeExtractor {
def apply(tpt: Tree, where: List[Tree]): ExistentialTypeTree =
ExistentialTypeTree(tpt, where.map {
case md: MemberDef => md
case tree => throw new IllegalArgumentException(s"$tree is not legal forSome definition")
})
def unapply(tree: Tree): Option[(Tree, List[MemberDef])] = tree match {
case MaybeTypeTreeOriginal(ExistentialTypeTree(tpt, where)) =>
Some((tpt, where))
case _ =>
None
}
}
}
val build = new ReificationSupportImpl
}
| lrytz/scala | src/reflect/scala/reflect/internal/ReificationSupport.scala | Scala | apache-2.0 | 52,210 |
package model
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* DumbPlayer class - rps
*
* Author:
* Ryan Needham
*
* Issues:
*
* Notes:
*
* An extention of the Player trait that selects its move randomly.
* For more details see "Player.scala"
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
import java.util.Random
final class DumbPlayer (id: Int, turns: Int, ref: Referee) extends Player {
protected val playerID = id
protected val referee = ref
protected var turnsLeft = turns
protected val generator = new Random
/**
* makeMove
*
* Generates a random float between 1 and 0 before
* selecting a hand based on where it sits between
* 1 and 0, splitting the range into 3 potential
* outcomes.
*/
protected def makeMove = {
var choice = generator.nextFloat
if (choice > 0.0 && choice < 0.3) hand = rock
else if (choice > 0.3 && choice < 0.6) hand = paper
else if (choice > 0.6 && choice < 1.0) hand = scissors
}
}
| MyForteIsTimeTravel/RockPaperScissors | src/model/DumbPlayer.scala | Scala | gpl-3.0 | 1,152 |
/**
* Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
*/
package akka.actor
import akka.AkkaException
import akka.util._
import ReflectiveAccess._
import Actor._
import java.util.concurrent.{ CopyOnWriteArrayList, ConcurrentHashMap }
import java.net.InetSocketAddress
import akka.config.Supervision._
class SupervisorException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
/**
* Factory object for creating supervisors declarative. It creates instances of the 'Supervisor' class.
* These are not actors, if you need a supervisor that is an Actor then you have to use the 'SupervisorActor'
* factory object.
* <p/>
*
* Here is a sample on how to use it:
* <pre>
* val supervisor = Supervisor(
* SupervisorConfig(
* RestartStrategy(OneForOne, 3, 10, List(classOf[Exception]),
* Supervise(
* myFirstActor,
* Permanent) ::
* Supervise(
* mySecondActor,
* Permanent) ::
* Nil))
* </pre>
*
* You dynamically link and unlink child children using the 'link' and 'unlink' methods.
* <pre>
* supervisor.link(child)
* supervisor.unlink(child)
* </pre>
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
object Supervisor {
def apply(config: SupervisorConfig): Supervisor = SupervisorFactory(config).newInstance.start
}
/**
* Use this factory instead of the Supervisor factory object if you want to control
* instantiation and starting of the Supervisor, if not then it is easier and better
* to use the Supervisor factory object.
* <p>
* Example usage:
* <pre>
* val factory = SupervisorFactory(
* SupervisorConfig(
* RestartStrategy(OneForOne, 3, 10, List(classOf[Exception]),
* Supervise(
* myFirstActor,
* Permanent) ::
* Supervise(
* mySecondActor,
* Permanent) ::
* Nil))
* </pre>
*
* Then create a new Supervisor tree with the concrete Services we have defined.
*
* <pre>
* val supervisor = factory.newInstance
* supervisor.start // start up all managed servers
* </pre>
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
case class SupervisorFactory(val config: SupervisorConfig) {
def newInstance: Supervisor = newInstanceFor(config)
def newInstanceFor(config: SupervisorConfig): Supervisor = {
val supervisor = new Supervisor(config.restartStrategy, config.maxRestartsHandler)
supervisor.configure(config)
supervisor.start
supervisor
}
}
/**
* <b>NOTE:</b>
* <p/>
* The supervisor class is only used for the configuration system when configuring supervisor
* hierarchies declaratively. Should not be used as part of the regular programming API. Instead
* wire the children together using 'link', 'spawnLink' etc. and set the 'trapExit' flag in the
* children that should trap error signals and trigger restart.
* <p/>
* See the Scaladoc for the SupervisorFactory for an example on how to declaratively wire up children.
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
sealed class Supervisor(handler: FaultHandlingStrategy, maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit) {
import Supervisor._
private val _childActors = new ConcurrentHashMap[String, List[ActorRef]]
private val _childSupervisors = new CopyOnWriteArrayList[Supervisor]
private[akka] val supervisor = actorOf(new SupervisorActor(handler, maxRestartsHandler)).start()
def uuid = supervisor.uuid
def start: Supervisor = {
this
}
def shutdown(): Unit = supervisor.stop()
def link(child: ActorRef) = supervisor.link(child)
def unlink(child: ActorRef) = supervisor.unlink(child)
def children: List[ActorRef] =
_childActors.values.toArray.toList.asInstanceOf[List[List[ActorRef]]].flatten
def childSupervisors: List[Supervisor] =
_childActors.values.toArray.toList.asInstanceOf[List[Supervisor]]
def configure(config: SupervisorConfig): Unit = config match {
case SupervisorConfig(_, servers, _) =>
servers.map(server =>
server match {
case Supervise(actorRef, lifeCycle, registerAsRemoteService) =>
actorRef.start()
val className = actorRef.actor.getClass.getName
val currentActors = {
val list = _childActors.get(className)
if (list eq null) List[ActorRef]()
else list
}
_childActors.put(className, actorRef :: currentActors)
actorRef.lifeCycle = lifeCycle
supervisor.link(actorRef)
if (registerAsRemoteService)
Actor.remote.register(actorRef)
case supervisorConfig@SupervisorConfig(_, _, _) => // recursive supervisor configuration
val childSupervisor = Supervisor(supervisorConfig)
supervisor.link(childSupervisor.supervisor)
_childSupervisors.add(childSupervisor)
})
}
}
/**
* For internal use only.
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
final class SupervisorActor private[akka] (handler: FaultHandlingStrategy, maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit) extends Actor {
self.faultHandler = handler
override def postStop(): Unit = {
val i = self.linkedActors.values.iterator
while (i.hasNext) {
val ref = i.next
ref.stop()
self.unlink(ref)
}
}
def receive = {
case max@MaximumNumberOfRestartsWithinTimeRangeReached(_, _, _, _) => maxRestartsHandler(self, max)
case unknown => throw new SupervisorException(
"SupervisorActor can not respond to messages.\\n\\tUnknown message [" + unknown + "]")
}
}
| felixmulder/scala | test/disabled/presentation/akka/src/akka/actor/Supervisor.scala | Scala | bsd-3-clause | 5,754 |
/*
* @author Philip Stutz
*
* Copyright 2011 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.factory.storage
import com.signalcollect.interfaces.StorageFactory
import com.signalcollect.interfaces.Storage
import com.signalcollect.interfaces.VertexIdSet
import com.signalcollect.storage.DefaultStorage
import com.signalcollect.storage.AboveAverageVertexIdSet
/**
* The InMemory storage factory creates storage objects that store vertices in memory.
*/
object AboveAverage extends StorageFactory {
def createInstance: Storage = new DefaultStorage {
override protected def vertexSetFactory: VertexIdSet = new AboveAverageVertexIdSet(this)
}
} | Tjoene/thesis | Case_Programs/signal-collect/src/main/scala/com/signalcollect/factory/storage/AboveAverageInMemory.scala | Scala | gpl-2.0 | 1,255 |
/*
* Copyright (c) 2013 David Soergel <dev@davidsoergel.com>
* Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package worldmake.lib
import worldmake._
import java.util.UUID
import worldmake.WorldMakeConfig._
import worldmake.storage.Identifier
import scalax.file.Path
import edu.umass.cs.iesl.scalacommons.StringUtils._
import scala.concurrent.{ExecutionContext, Future}
import ExecutionContext.Implicits.global
import com.typesafe.scalalogging.slf4j.Logging
import worldmake.cookingstrategy.CookingStrategy
/**
* @author <a href="mailto:dev@davidsoergel.com">David Soergel</a>
*/
class StringInterpolationRecipe(sc: StringContext, args: Seq[Recipe[Any]]) extends DerivableRecipe[String] with Logging {
def deriveFuture(implicit upstreamStrategy: CookingStrategy) = {
val pr = BlockedProvenance(Identifier[Provenance[String]](UUID.randomUUID().toString), recipeId)
val resolvedProvenancesF: Future[Seq[Successful[Any]]] = Future.sequence(args.map(upstreamStrategy.cookOne))
val result = for (resolvedProvenances <- resolvedProvenancesF
) yield deriveWithArgs(pr.pending(resolvedProvenances.toSet, Map.empty), resolvedProvenances)
result
}
def deriveWithArgs(pr: PendingProvenance[String], resolvedProvenances: Seq[Successful[Any]]): Provenance[String] with Successful[String] = {
val prs = pr.running(new MemoryWithinJvmRunningInfo)
try {
val result = {
val resolvedArgs = resolvedProvenances.map(_.output.environmentString) //.value)
// this stripMargin takes effect after interpolation; see https://github.com/scala/scala/pull/1655 for alternative
val r = StringArtifact(sc.s(resolvedArgs: _*).stripMargin)
r
}
prs.completed(0, None, Map.empty, result)
}
catch {
case t: Throwable => {
val prf = prs.failed(1, None, Map.empty)
logger.debug("Error in StringInterpolationRecipe: ", t) // todo better log message
throw FailedRecipeException("Failed StringInterpolationRecipe", prf, t)
}
}
}
private val template: String = sc.parts.mkString("???").stripMargin.maskNewlines
lazy val recipeId = Identifier[Recipe[String]](WMHashHex(template + args.par.map(_.recipeId).mkString))
def longDescription = {
val argIds = args.map(x => "${" + x.shortDesc + "}")
// this stripMargin takes effect after interpolation; see https://github.com/scala/scala/pull/1655 for alternative
val r = sc.s(argIds: _*).stripMargin
"Interpolate: " + r.replace("\\n", "\\\\n") //.limitAtWhitespace(80, "...")
}
def dependencies = args.toSet
}
object StringInterpolationRecipe {
// Note: We extends AnyVal to prevent runtime instantiation. See
// value class guide for more info.
implicit class StringInterpolationRecipeHelper(val sc: StringContext) extends AnyVal {
def ds(args: Recipe[_]*): Recipe[String] = new StringInterpolationRecipe(sc, args)
// this builds the script as a String derivation first, and then runs it-- as opposed to the raw SystemDerivation where dependencies are passed as environment variables.
def sys(args: Recipe[_]*): Recipe[ManagedPath] = new SystemRecipe(new StringInterpolationRecipe(sc, args), Map.empty)
}
}
/*
class StringInterpolationSystemDerivation(sc: StringContext, args: Seq[Derivation[_]]) extends DerivableDerivation[Path] {
private val resolvedProvenances = args.map(_.resolveOne)
def derive: Provenance[TypedPath] with Successful[TypedPath] = {
val startTime = DateTime.now()
val result = {
val resolvedArgs = resolvedProvenances.map(_.artifact.value)
StringArtifact(sc.s(resolvedArgs))
}
val endTime = DateTime.now()
SuccessfulProvenance(Identifier[Provenance[String]](UUID.randomUUID().toString), derivationId, ProvenanceStatus.Success, derivedFromUnnamed = resolvedProvenances.toSet, startTime = startTime, endTime = endTime, output = Some(result))
}
private val template: String = sc.parts.mkString(" ??? ")
def derivationId = Identifier[Derivation[String]](WMHashHex(template + args.map(_.derivationId).mkString))
def description = {
"String Interpolation: " + template.take(80)
}
def dependencies = args.toSet
}
object StringInterpolationSystemDerivation {
// Note: We extends AnyVal to prevent runtime instantiation. See
// value class guide for more info.
implicit class StringInterpolationDerivationHelper(val sc: StringContext) extends AnyVal {
def sys(args: Derivation[_]*): Derivation[Path] = new StringInterpolationSystemDerivation(sc, args)
}
}
*/
| davidsoergel/worldmake | src/main/scala/worldmake/lib/StringDerivations.scala | Scala | apache-2.0 | 4,609 |
/*
* WindowMaxIndex.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.fscape
package graph
import de.sciss.fscape.Graph.{ProductReader, RefMapIn}
import de.sciss.fscape.UGenSource.unwrap
import de.sciss.fscape.stream.{StreamIn, StreamOut}
import scala.collection.immutable.{IndexedSeq => Vec}
object WindowMaxIndex extends ProductReader[WindowMaxIndex] {
override def read(in: RefMapIn, key: String, arity: Int): WindowMaxIndex = {
require (arity == 2)
val _in = in.readGE()
val _size = in.readGE()
new WindowMaxIndex(_in, _size)
}
}
/** A UGen that determines for each input window the index of the maximum element.
* It outputs one integer value per window; if multiple elements have the same
* value, the index of the first element is reported (notably zero if the window
* contains only identical elements).
*
* @param in the input signal.
* @param size the window size.
*/
final case class WindowMaxIndex(in: GE, size: GE) extends UGenSource.SingleOut {
protected def makeUGens(implicit b: UGenGraph.Builder): UGenInLike =
unwrap(this, Vector(in.expand, size.expand))
protected def makeUGen(args: Vec[UGenIn])(implicit b: UGenGraph.Builder): UGenInLike =
UGen.SingleOut(this, args)
private[fscape] def makeStream(args: Vec[StreamIn])(implicit b: stream.Builder): StreamOut = {
val Vec(in, size) = args: @unchecked
import in.tpe
stream.WindowMaxIndex[in.A, in.Buf](in = in.toElem, size = size.toInt)
}
}
| Sciss/FScape-next | core/shared/src/main/scala/de/sciss/fscape/graph/WindowMaxIndex.scala | Scala | agpl-3.0 | 1,721 |
/*
Copyright 2011 the original author or authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.gumbix.dynpro
/**
* @author Markus Gumbel (m.gumbel@hs-mannheim.de)
*/
trait DynProMatrix {
/**
* The number of rows of the matrix.
*/
def n: Int
/**
* The number of columns of the matrix.
*/
def m: Int
/**
* Matrix containing the accumulated values (costs).
* A cell may be empty (=None).
*/
val matrix: Array[Array[Option[Double]]]
} | markusgumbel/scalabioalg | dp/src/main/scala/net/gumbix/dynpro/DynProMatrix.scala | Scala | apache-2.0 | 990 |
/* Copyright 2009-2021 EPFL, Lausanne */
import stainless.lang._
import stainless.collection._
import stainless._
object ModuloByZero {
def modByZero(x: BigInt): Boolean = {
(x mod BigInt(0)) == BigInt(10)
}
}
| epfl-lara/stainless | frontends/benchmarks/verification/invalid/ModuloByZero.scala | Scala | apache-2.0 | 222 |
package demesne.index
import scala.concurrent.duration._
import akka.actor.FSM.{ CurrentState, Transition }
import akka.actor._
import akka.contrib.pattern.ReliableProxy
import akka.contrib.pattern.ReliableProxy.{ Connecting, TargetChanged }
import akka.event.LoggingReceive
import omnibus.akka.envelope.Envelope
object IndexRelay {
def props( indexAggregatePath: ActorPath, extractor: KeyIdExtractor ): Props = {
Props( new IndexRelay( indexAggregatePath, extractor ) )
}
}
/**
* Created by damonrolfs on 10/27/14.
*/
class IndexRelay( indexAggregatePath: ActorPath, extractor: KeyIdExtractor )
extends Actor
with ActorLogging {
val fullExtractor: KeyIdExtractor = {
case m if extractor.isDefinedAt( m ) => extractor( m )
case Envelope( payload, _ ) if extractor.isDefinedAt( payload ) => extractor( payload )
}
//todo move into configuration retry timeout
val proxy: ActorRef = context.actorOf(
ReliableProxy.props( targetPath = indexAggregatePath, retryAfter = 100.millis )
)
proxy ! FSM.SubscribeTransitionCallBack( self )
//todo: move into configuration
override val receive: Receive = connecting( List() )
def connecting( waiting: List[ActorRef] ): Receive = LoggingReceive {
case CurrentState( _, state ) if state != Connecting => {
log.debug( "Relay connected to index aggregate at {}", indexAggregatePath )
proxy ! WaitingForStart
context become starting( waiting )
}
case Transition( _, Connecting, _ ) => {
log.debug( "Relay connected to index aggregate at {}", indexAggregatePath )
proxy ! WaitingForStart
context become starting( waiting )
}
case WaitingForStart => {
log.debug( "adding to relay's wait queue: {}", sender() )
context become connecting( List( sender() ) )
}
case _: CurrentState[_] => {}
case _: ActorIdentity => {}
case _: TargetChanged => {}
}
def starting( waiting: List[ActorRef] ): Receive = LoggingReceive {
case Started if sender().path == indexAggregatePath => {
log.debug( "relay recd start confirmation from aggregate => activating" )
waiting foreach { _ ! Started }
context become active
}
case WaitingForStart => {
log.debug( "adding to relay's wait queue: {}", sender() )
context become starting( List( sender() ) )
}
case _: Transition[_] => {}
case _: CurrentState[_] => {}
case _: ActorIdentity => {}
case _: TargetChanged => {}
}
val active: Receive = LoggingReceive {
case event if fullExtractor.isDefinedAt( event ) => {
val directive = fullExtractor( event )
proxy ! directive
log.debug( "relayed to aggregate index: {}", directive )
}
case WaitingForStart => {
log.debug( "received WaitForStart: sending Started to {}", sender() )
sender() ! Started
}
case _: Transition[_] => {}
case _: CurrentState[_] => {}
case _: ActorIdentity => {}
case _: TargetChanged => {}
}
override def unhandled( message: Any ): Unit = {
message match {
// case _: akka.actor.FSM.CurrentState[_] => ()
// case _: akka.actor.FSM.Transition[_] => ()
// case _: akka.contrib.pattern.ReliableProxy.TargetChanged => ()
// case id: ActorIdentity => log.debug( "received ActorIdentity:[{}]", id )
case _ =>
log.warning(
"RELAY_UNHANDLED [{}]; extractor-defined-at={}",
message,
fullExtractor.isDefinedAt( message )
)
}
}
}
| dmrolfs/demesne | core/src/main/scala/demesne/index/IndexRelay.scala | Scala | apache-2.0 | 3,558 |
package scala.scalanative
package runtime
import native.{struct, Ptr}
@struct class Type(val id: Int, val name: String)
| cedricviaccoz/scala-native | nativelib/src/main/scala/scala/scalanative/runtime/Type.scala | Scala | bsd-3-clause | 122 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import java.util.Properties
import kafka.common.StreamEndException
import kafka.common.StreamEndException
/**
* A base consumer used to abstract both old and new consumer
* this class should be removed (along with BaseProducer) be removed
* once we deprecate old consumer
*/
trait BaseConsumer {
def receive(): BaseConsumerRecord
def stop()
def cleanup()
def commit()
}
case class BaseConsumerRecord(topic: String, partition: Int, offset: Long, key: Array[Byte], value: Array[Byte])
class NewShinyConsumer(topic: String, consumerProps: Properties, val timeoutMs: Long = Long.MaxValue) extends BaseConsumer {
import org.apache.kafka.clients.consumer.KafkaConsumer
import scala.collection.JavaConversions._
val consumer = new KafkaConsumer[Array[Byte], Array[Byte]](consumerProps)
consumer.subscribe(List(topic))
var recordIter = consumer.poll(0).iterator
override def receive(): BaseConsumerRecord = {
if (!recordIter.hasNext) {
recordIter = consumer.poll(timeoutMs).iterator
if (!recordIter.hasNext)
throw new ConsumerTimeoutException
}
val record = recordIter.next
BaseConsumerRecord(record.topic, record.partition, record.offset, record.key, record.value)
}
override def stop() {
this.consumer.wakeup()
}
override def cleanup() {
this.consumer.close()
}
override def commit() {
this.consumer.commitSync()
}
}
class OldConsumer(topicFilter: TopicFilter, consumerProps: Properties) extends BaseConsumer {
import kafka.serializer.DefaultDecoder
val consumerConnector = Consumer.create(new ConsumerConfig(consumerProps))
val stream: KafkaStream[Array[Byte], Array[Byte]] =
consumerConnector.createMessageStreamsByFilter(topicFilter, 1, new DefaultDecoder(), new DefaultDecoder()).head
val iter = stream.iterator
override def receive(): BaseConsumerRecord = {
if (!iter.hasNext())
throw new StreamEndException
val messageAndMetadata = iter.next
BaseConsumerRecord(messageAndMetadata.topic, messageAndMetadata.partition, messageAndMetadata.offset, messageAndMetadata.key, messageAndMetadata.message)
}
override def stop() {
this.consumerConnector.shutdown()
}
override def cleanup() {
this.consumerConnector.shutdown()
}
override def commit() {
this.consumerConnector.commitOffsets
}
}
| OpenPOWER-BigData/HDP-kafka | core/src/main/scala/kafka/consumer/BaseConsumer.scala | Scala | apache-2.0 | 3,168 |
object Expected extends Exception
object Test {
def throwExpected: Nothing = throw Expected
def foo0(a: Array[Double]) = { // does compile for Int instead of Double
val v = 42
a(0) = throwExpected // was crash in code gen: java.lang.NegativeArraySizeException
}
def foo1(a: Array[Double]) = { // does compile for Int instead of Double
a(0) = throwExpected // was VerifyError at runtime
}
def foo2(a: Array[Int]) = { // does compile for Int instead of Double
a(0) = throwExpected // was VerifyError at runtime
}
def foo3(a: Array[String]) = { // does compile for Int instead of Double
a(0) = throwExpected // was already working
}
def main(args: Array[String]): Unit = {
check(foo0(new Array[Double](1)))
check(foo1(new Array[Double](1)))
check(foo2(new Array[Int](1)))
check(foo3(new Array[String](1)))
}
def check(f: => Any): Unit = {
try {f ; sys.error("no exception thrown")
} catch {
case Expected =>
}
}
}
| scala/scala | test/files/run/t10069.scala | Scala | apache-2.0 | 1,002 |
package controllers
import javax.inject.Inject
import models.{ModerationQueue, ModerationRequest}
import play.api.Play
import play.api.libs.json._
import play.api.libs.ws._
import play.api.mvc._
class Application @Inject()(ws: WSClient) extends Controller {
implicit val context = play.api.libs.concurrent.Execution.Implicits.defaultContext
def index = Action {
Ok(views.html.index())
}
// def queues = Action {
//
// Ok(Json.obj(
// "data" -> Json.obj(
// "queues" -> ModerationQueue.status
// )
// ))
// }
//
// def next(queue: String) = Action {
//
// val moderatorId = 437L // FIXME: get moderatorId from request
//
// Ok(Json.obj(
// "data" -> Json.obj(
// "next" -> ModerationRequest.next(queue, moderatorId)
// )
// ))
// }
//
// // def comment(commentId: Long, status: String) = Action.async {
// // // FIXME - get moderator Id from request
// // val moderatorId = 1L
// //
// // val data = Json.obj(
// // "status" -> status
// // )
// //
// // val url: String = "http://localhost:8080/" // Discussion API
// // val request: WSRequest = ws.url(url)
// //
// // for {
// // comment <- request.post(data)
// // // action <-
// // } yield Ok()
// // }
//
// def delete(requestId: String) = Action {
//
// ModerationRequest.delete(requestId)
//
// Ok(Json.obj(
// "status" -> "ok"
// ))
// }
}
| satterly/play-material-modtools | app/controllers/Application.scala | Scala | mit | 1,442 |
package com.komanov.mysql.streaming.jmh
import java.util.concurrent.TimeUnit
import com.komanov.mysql.streaming._
import org.openjdk.jmh.annotations._
@State(Scope.Benchmark)
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Fork(value = 1, jvmArgs = Array("-Xmx1G"))
@Threads(1)
@Measurement(iterations = 5, time = 2, timeUnit = TimeUnit.SECONDS)
@Warmup(iterations = 1, time = 5, timeUnit = TimeUnit.SECONDS)
abstract class BenchmarkBase(driver: MysqlDriver) {
OneTimeInitialization.initialize(driver)
@Param(Array(
"1", "2", "3", "4", "5", "6", "7", "8", "9",
"10", "20", "30", "40", "50", "60", "70", "80", "90",
"100", "200", "300", "400", "500", "600", "700", "800", "900",
"1000"
))
var limit: Int = 0
@Benchmark
def atOnce(): List[TestTableRow] = {
Query.selectAtOnce(driver, limit)
}
@Benchmark
def stream(): List[TestTableRow] = {
Query.selectViaStreaming(driver, limit)
}
}
private[jmh] object OneTimeInitialization {
private var initialized = false
def initialize(driver: MysqlDriver): Unit = synchronized {
if (!initialized) {
MysqlRunner.run()
Query.clearTable(driver)
Query.prepareTable(driver)
initialized = true
}
}
}
| dkomanov/stuff | src/com/komanov/mysql/streaming/jmh/BenchmarkBase.scala | Scala | mit | 1,259 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.{T, Table}
class MaskHead(
val inChannels: Int,
val resolution: Int,
val scales: Array[Float],
val samplingRatio: Int,
val layers: Array[Int],
val dilation: Int,
val numClasses: Int,
val useGn: Boolean = false)(implicit ev: TensorNumeric[Float])
extends BaseModule[Float] {
override def buildModel(): Module[Float] = {
val featureExtractor = this.maskFeatureExtractor(
inChannels, resolution, scales, samplingRatio, layers, dilation, useGn)
val dimReduced = layers(layers.length - 1)
val predictor = this.maskPredictor(dimReduced, numClasses, dimReduced)
val postProcessor = new MaskPostProcessor()
/**
* input: feature-maps from possibly several levels and proposal boxes
* return:
* first tensor: the result of the feature extractor
* second tensor: proposals (list[BoxList]): during training, the original proposals
* are returned. During testing, the predicted boxlists are returned
* with the `mask` field set
*/
val features = Input()
val proposals = Input()
val labels = Input()
val maskFeatures = featureExtractor.inputs(features, proposals)
val maskLogits = predictor.inputs(maskFeatures)
val result = postProcessor.inputs(maskLogits, labels)
Graph(Array(features, proposals, labels), Array(maskFeatures, result))
}
private[nn] def maskPredictor(inChannels: Int,
numClasses: Int,
dimReduced: Int): Module[Float] = {
val convMask = SpatialFullConvolution(inChannels, dimReduced,
kW = 2, kH = 2, dW = 2, dH = 2)
val maskLogits = SpatialConvolution(nInputPlane = dimReduced,
nOutputPlane = numClasses, kernelW = 1, kernelH = 1, strideH = 1, strideW = 1)
// init weight & bias, MSRAFill by default
convMask.setInitMethod(MsraFiller(false), Zeros)
maskLogits.setInitMethod(MsraFiller(false), Zeros)
val model = Sequential[Float]()
model.add(convMask).add(ReLU[Float]()).add(maskLogits)
model
}
private[nn] def maskFeatureExtractor(inChannels: Int,
resolution: Int,
scales: Array[Float],
samplingRatio: Int,
layers: Array[Int],
dilation: Int,
useGn: Boolean = false): Module[Float] = {
require(dilation == 1, s"Only support dilation = 1, but got ${dilation}")
val model = Sequential[Float]()
model.add(Pooler(resolution, scales, samplingRatio))
var nextFeatures = inChannels
var i = 0
while (i < layers.length) {
val features = layers(i)
// todo: support dilation convolution with no bias
val module = SpatialConvolution[Float](
nextFeatures,
features,
kernelW = 3,
kernelH = 3,
strideW = 1,
strideH = 1,
padW = dilation,
padH = dilation,
withBias = if (useGn) false else true
).setName(s"mask_fcn${i + 1}")
// weight init
module.setInitMethod(MsraFiller(false), Zeros)
model.add(module).add(ReLU[Float]())
nextFeatures = features
i += 1
}
model
}
}
private[nn] class MaskPostProcessor()(implicit ev: TensorNumeric[Float])
extends AbstractModule[Table, Tensor[Float], Float] {
@transient var rangeBuffer: Tensor[Float] = null
private val sigmoid = Sigmoid[Float]()
/**
* @param input feature-maps from possibly several levels, proposal boxes and labels
* @return the predicted boxlists are returned with the `mask` field set
*/
override def updateOutput(input: Table): Tensor[Float] = {
val maskLogits = input[Tensor[Float]](1)
val labels = input[Tensor[Float]](2)
val num_masks = maskLogits.size(1)
if (rangeBuffer == null || rangeBuffer.nElement() != num_masks) {
rangeBuffer = Tensor[Float](num_masks)
rangeBuffer.range(0, num_masks - 1, 1)
}
val mask_prob = sigmoid.forward(maskLogits)
require(labels.nDimension() == 1, s"Labels should be tensor with one dimension," +
s"but get ${labels.nDimension()}")
require(rangeBuffer.nElement() == labels.nElement(), s"number of masks should be same" +
s"with labels, but get ${rangeBuffer.nElement()} ${labels.nElement()}")
output.resize(rangeBuffer.nElement(), 1, mask_prob.size(3), mask_prob.size(4))
var i = 1
while (i <= rangeBuffer.nElement()) {
val dim = rangeBuffer.valueAt(i).toInt + 1
val index = labels.valueAt(i).toInt // start from 1
output.narrow(1, i, 1).copy(mask_prob.narrow(1, i, 1).narrow(2, index + 1, 1))
i += 1
}
output
}
override def updateGradInput(input: Table, gradOutput: Tensor[Float]): Table = {
throw new UnsupportedOperationException("MaskPostProcessor only support inference")
}
}
object MaskHead {
def apply(inChannels: Int,
resolution: Int = 14,
scales: Array[Float] = Array[Float](0.25f, 0.125f, 0.0625f, 0.03125f),
samplingRratio: Int = 2,
layers: Array[Int] = Array[Int](256, 256, 256, 256),
dilation: Int = 1,
numClasses: Int = 81,
useGn: Boolean = false)(implicit ev: TensorNumeric[Float]): Module[Float] = {
new MaskHead(inChannels, resolution, scales, samplingRratio,
layers, dilation, numClasses, useGn)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/MaskHead.scala | Scala | apache-2.0 | 6,343 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst
import java.beans.{Introspector, PropertyDescriptor}
import java.lang.{Iterable => JIterable}
import java.lang.reflect.Type
import java.util.{Iterator => JIterator, List => JList, Map => JMap}
import scala.language.existentials
import com.google.common.reflect.TypeToken
import org.apache.spark.sql.catalyst.DeserializerBuildHelper._
import org.apache.spark.sql.catalyst.SerializerBuildHelper._
import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.objects._
import org.apache.spark.sql.catalyst.util.ArrayBasedMapData
import org.apache.spark.sql.types._
/**
* Type-inference utilities for POJOs and Java collections.
*/
object JavaTypeInference {
private val iterableType = TypeToken.of(classOf[JIterable[_]])
private val mapType = TypeToken.of(classOf[JMap[_, _]])
private val listType = TypeToken.of(classOf[JList[_]])
private val iteratorReturnType = classOf[JIterable[_]].getMethod("iterator").getGenericReturnType
private val nextReturnType = classOf[JIterator[_]].getMethod("next").getGenericReturnType
private val keySetReturnType = classOf[JMap[_, _]].getMethod("keySet").getGenericReturnType
private val valuesReturnType = classOf[JMap[_, _]].getMethod("values").getGenericReturnType
/**
* Infers the corresponding SQL data type of a JavaBean class.
* @param beanClass Java type
* @return (SQL data type, nullable)
*/
def inferDataType(beanClass: Class[_]): (DataType, Boolean) = {
inferDataType(TypeToken.of(beanClass))
}
/**
* Infers the corresponding SQL data type of a Java type.
* @param beanType Java type
* @return (SQL data type, nullable)
*/
private[sql] def inferDataType(beanType: Type): (DataType, Boolean) = {
inferDataType(TypeToken.of(beanType))
}
/**
* Infers the corresponding SQL data type of a Java type.
* @param typeToken Java type
* @return (SQL data type, nullable)
*/
private def inferDataType(typeToken: TypeToken[_], seenTypeSet: Set[Class[_]] = Set.empty)
: (DataType, Boolean) = {
typeToken.getRawType match {
case c: Class[_] if c.isAnnotationPresent(classOf[SQLUserDefinedType]) =>
(c.getAnnotation(classOf[SQLUserDefinedType]).udt().getConstructor().newInstance(), true)
case c: Class[_] if UDTRegistration.exists(c.getName) =>
val udt = UDTRegistration.getUDTFor(c.getName).get.getConstructor().newInstance()
.asInstanceOf[UserDefinedType[_ >: Null]]
(udt, true)
case c: Class[_] if c == classOf[java.lang.String] => (StringType, true)
case c: Class[_] if c == classOf[Array[Byte]] => (BinaryType, true)
case c: Class[_] if c == java.lang.Short.TYPE => (ShortType, false)
case c: Class[_] if c == java.lang.Integer.TYPE => (IntegerType, false)
case c: Class[_] if c == java.lang.Long.TYPE => (LongType, false)
case c: Class[_] if c == java.lang.Double.TYPE => (DoubleType, false)
case c: Class[_] if c == java.lang.Byte.TYPE => (ByteType, false)
case c: Class[_] if c == java.lang.Float.TYPE => (FloatType, false)
case c: Class[_] if c == java.lang.Boolean.TYPE => (BooleanType, false)
case c: Class[_] if c == classOf[java.lang.Short] => (ShortType, true)
case c: Class[_] if c == classOf[java.lang.Integer] => (IntegerType, true)
case c: Class[_] if c == classOf[java.lang.Long] => (LongType, true)
case c: Class[_] if c == classOf[java.lang.Double] => (DoubleType, true)
case c: Class[_] if c == classOf[java.lang.Byte] => (ByteType, true)
case c: Class[_] if c == classOf[java.lang.Float] => (FloatType, true)
case c: Class[_] if c == classOf[java.lang.Boolean] => (BooleanType, true)
case c: Class[_] if c == classOf[java.math.BigDecimal] => (DecimalType.SYSTEM_DEFAULT, true)
case c: Class[_] if c == classOf[java.math.BigInteger] => (DecimalType.BigIntDecimal, true)
case c: Class[_] if c == classOf[java.sql.Date] => (DateType, true)
case c: Class[_] if c == classOf[java.sql.Timestamp] => (TimestampType, true)
case _ if typeToken.isArray =>
val (dataType, nullable) = inferDataType(typeToken.getComponentType, seenTypeSet)
(ArrayType(dataType, nullable), true)
case _ if iterableType.isAssignableFrom(typeToken) =>
val (dataType, nullable) = inferDataType(elementType(typeToken), seenTypeSet)
(ArrayType(dataType, nullable), true)
case _ if mapType.isAssignableFrom(typeToken) =>
val (keyType, valueType) = mapKeyValueType(typeToken)
val (keyDataType, _) = inferDataType(keyType, seenTypeSet)
val (valueDataType, nullable) = inferDataType(valueType, seenTypeSet)
(MapType(keyDataType, valueDataType, nullable), true)
case other if other.isEnum =>
(StringType, true)
case other =>
if (seenTypeSet.contains(other)) {
throw new UnsupportedOperationException(
"Cannot have circular references in bean class, but got the circular reference " +
s"of class $other")
}
// TODO: we should only collect properties that have getter and setter. However, some tests
// pass in scala case class as java bean class which doesn't have getter and setter.
val properties = getJavaBeanReadableProperties(other)
val fields = properties.map { property =>
val returnType = typeToken.method(property.getReadMethod).getReturnType
val (dataType, nullable) = inferDataType(returnType, seenTypeSet + other)
new StructField(property.getName, dataType, nullable)
}
(new StructType(fields), true)
}
}
def getJavaBeanReadableProperties(beanClass: Class[_]): Array[PropertyDescriptor] = {
val beanInfo = Introspector.getBeanInfo(beanClass)
beanInfo.getPropertyDescriptors.filterNot(_.getName == "class")
.filterNot(_.getName == "declaringClass")
.filter(_.getReadMethod != null)
}
private def getJavaBeanReadableAndWritableProperties(
beanClass: Class[_]): Array[PropertyDescriptor] = {
getJavaBeanReadableProperties(beanClass).filter(_.getWriteMethod != null)
}
private def elementType(typeToken: TypeToken[_]): TypeToken[_] = {
val typeToken2 = typeToken.asInstanceOf[TypeToken[_ <: JIterable[_]]]
val iterableSuperType = typeToken2.getSupertype(classOf[JIterable[_]])
val iteratorType = iterableSuperType.resolveType(iteratorReturnType)
iteratorType.resolveType(nextReturnType)
}
private def mapKeyValueType(typeToken: TypeToken[_]): (TypeToken[_], TypeToken[_]) = {
val typeToken2 = typeToken.asInstanceOf[TypeToken[_ <: JMap[_, _]]]
val mapSuperType = typeToken2.getSupertype(classOf[JMap[_, _]])
val keyType = elementType(mapSuperType.resolveType(keySetReturnType))
val valueType = elementType(mapSuperType.resolveType(valuesReturnType))
keyType -> valueType
}
/**
* Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping
* to a native type, an ObjectType is returned.
*
* Unlike `inferDataType`, this function doesn't do any massaging of types into the Spark SQL type
* system. As a result, ObjectType will be returned for things like boxed Integers.
*/
private def inferExternalType(cls: Class[_]): DataType = cls match {
case c if c == java.lang.Boolean.TYPE => BooleanType
case c if c == java.lang.Byte.TYPE => ByteType
case c if c == java.lang.Short.TYPE => ShortType
case c if c == java.lang.Integer.TYPE => IntegerType
case c if c == java.lang.Long.TYPE => LongType
case c if c == java.lang.Float.TYPE => FloatType
case c if c == java.lang.Double.TYPE => DoubleType
case c if c == classOf[Array[Byte]] => BinaryType
case _ => ObjectType(cls)
}
/**
* Returns an expression that can be used to deserialize a Spark SQL representation to an object
* of java bean `T` with a compatible schema. The Spark SQL representation is located at ordinal
* 0 of a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed
* using `UnresolvedExtractValue`.
*/
def deserializerFor(beanClass: Class[_]): Expression = {
val typeToken = TypeToken.of(beanClass)
val walkedTypePath = new WalkedTypePath().recordRoot(beanClass.getCanonicalName)
val (dataType, nullable) = inferDataType(typeToken)
// Assumes we are deserializing the first column of a row.
deserializerForWithNullSafetyAndUpcast(GetColumnByOrdinal(0, dataType), dataType,
nullable = nullable, walkedTypePath, (casted, walkedTypePath) => {
deserializerFor(typeToken, casted, walkedTypePath)
})
}
private def deserializerFor(
typeToken: TypeToken[_],
path: Expression,
walkedTypePath: WalkedTypePath): Expression = {
typeToken.getRawType match {
case c if !inferExternalType(c).isInstanceOf[ObjectType] => path
case c if c == classOf[java.lang.Short] ||
c == classOf[java.lang.Integer] ||
c == classOf[java.lang.Long] ||
c == classOf[java.lang.Double] ||
c == classOf[java.lang.Float] ||
c == classOf[java.lang.Byte] ||
c == classOf[java.lang.Boolean] =>
createDeserializerForTypesSupportValueOf(path, c)
case c if c == classOf[java.time.LocalDate] =>
createDeserializerForLocalDate(path)
case c if c == classOf[java.sql.Date] =>
createDeserializerForSqlDate(path)
case c if c == classOf[java.time.Instant] =>
createDeserializerForInstant(path)
case c if c == classOf[java.sql.Timestamp] =>
createDeserializerForSqlTimestamp(path)
case c if c == classOf[java.lang.String] =>
createDeserializerForString(path, returnNullable = true)
case c if c == classOf[java.math.BigDecimal] =>
createDeserializerForJavaBigDecimal(path, returnNullable = true)
case c if c == classOf[java.math.BigInteger] =>
createDeserializerForJavaBigInteger(path, returnNullable = true)
case c if c.isArray =>
val elementType = c.getComponentType
val newTypePath = walkedTypePath.recordArray(elementType.getCanonicalName)
val (dataType, elementNullable) = inferDataType(elementType)
val mapFunction: Expression => Expression = element => {
// upcast the array element to the data type the encoder expected.
deserializerForWithNullSafetyAndUpcast(
element,
dataType,
nullable = elementNullable,
newTypePath,
(casted, typePath) => deserializerFor(typeToken.getComponentType, casted, typePath))
}
val arrayData = UnresolvedMapObjects(mapFunction, path)
val methodName = elementType match {
case c if c == java.lang.Integer.TYPE => "toIntArray"
case c if c == java.lang.Long.TYPE => "toLongArray"
case c if c == java.lang.Double.TYPE => "toDoubleArray"
case c if c == java.lang.Float.TYPE => "toFloatArray"
case c if c == java.lang.Short.TYPE => "toShortArray"
case c if c == java.lang.Byte.TYPE => "toByteArray"
case c if c == java.lang.Boolean.TYPE => "toBooleanArray"
// non-primitive
case _ => "array"
}
Invoke(arrayData, methodName, ObjectType(c))
case c if listType.isAssignableFrom(typeToken) =>
val et = elementType(typeToken)
val newTypePath = walkedTypePath.recordArray(et.getType.getTypeName)
val (dataType, elementNullable) = inferDataType(et)
val mapFunction: Expression => Expression = element => {
// upcast the array element to the data type the encoder expected.
deserializerForWithNullSafetyAndUpcast(
element,
dataType,
nullable = elementNullable,
newTypePath,
(casted, typePath) => deserializerFor(et, casted, typePath))
}
UnresolvedMapObjects(mapFunction, path, customCollectionCls = Some(c))
case _ if mapType.isAssignableFrom(typeToken) =>
val (keyType, valueType) = mapKeyValueType(typeToken)
val newTypePath = walkedTypePath.recordMap(keyType.getType.getTypeName,
valueType.getType.getTypeName)
val keyData =
Invoke(
UnresolvedMapObjects(
p => deserializerFor(keyType, p, newTypePath),
MapKeys(path)),
"array",
ObjectType(classOf[Array[Any]]))
val valueData =
Invoke(
UnresolvedMapObjects(
p => deserializerFor(valueType, p, newTypePath),
MapValues(path)),
"array",
ObjectType(classOf[Array[Any]]))
StaticInvoke(
ArrayBasedMapData.getClass,
ObjectType(classOf[JMap[_, _]]),
"toJavaMap",
keyData :: valueData :: Nil,
returnNullable = false)
case other if other.isEnum =>
createDeserializerForTypesSupportValueOf(
createDeserializerForString(path, returnNullable = false),
other)
case other =>
val properties = getJavaBeanReadableAndWritableProperties(other)
val setters = properties.map { p =>
val fieldName = p.getName
val fieldType = typeToken.method(p.getReadMethod).getReturnType
val (dataType, nullable) = inferDataType(fieldType)
val newTypePath = walkedTypePath.recordField(fieldType.getType.getTypeName, fieldName)
val setter = expressionWithNullSafety(
deserializerFor(fieldType, addToPath(path, fieldName, dataType, newTypePath),
newTypePath),
nullable = nullable,
newTypePath)
p.getWriteMethod.getName -> setter
}.toMap
val newInstance = NewInstance(other, Nil, ObjectType(other), propagateNull = false)
val result = InitializeJavaBean(newInstance, setters)
expressions.If(
IsNull(path),
expressions.Literal.create(null, ObjectType(other)),
result
)
}
}
/**
* Returns an expression for serializing an object of the given type to a Spark SQL
* representation. The input object is located at ordinal 0 of a row, i.e.,
* `BoundReference(0, _)`.
*/
def serializerFor(beanClass: Class[_]): Expression = {
val inputObject = BoundReference(0, ObjectType(beanClass), nullable = true)
val nullSafeInput = AssertNotNull(inputObject, Seq("top level input bean"))
serializerFor(nullSafeInput, TypeToken.of(beanClass))
}
private def serializerFor(inputObject: Expression, typeToken: TypeToken[_]): Expression = {
def toCatalystArray(input: Expression, elementType: TypeToken[_]): Expression = {
val (dataType, nullable) = inferDataType(elementType)
if (ScalaReflection.isNativeType(dataType)) {
val cls = input.dataType.asInstanceOf[ObjectType].cls
if (cls.isArray && cls.getComponentType.isPrimitive) {
createSerializerForPrimitiveArray(input, dataType)
} else {
createSerializerForGenericArray(input, dataType, nullable = nullable)
}
} else {
createSerializerForMapObjects(input, ObjectType(elementType.getRawType),
serializerFor(_, elementType))
}
}
if (!inputObject.dataType.isInstanceOf[ObjectType]) {
inputObject
} else {
typeToken.getRawType match {
case c if c == classOf[String] => createSerializerForString(inputObject)
case c if c == classOf[java.time.Instant] => createSerializerForJavaInstant(inputObject)
case c if c == classOf[java.sql.Timestamp] => createSerializerForSqlTimestamp(inputObject)
case c if c == classOf[java.time.LocalDate] => createSerializerForJavaLocalDate(inputObject)
case c if c == classOf[java.sql.Date] => createSerializerForSqlDate(inputObject)
case c if c == classOf[java.math.BigDecimal] =>
createSerializerForJavaBigDecimal(inputObject)
case c if c == classOf[java.lang.Boolean] => createSerializerForBoolean(inputObject)
case c if c == classOf[java.lang.Byte] => createSerializerForByte(inputObject)
case c if c == classOf[java.lang.Short] => createSerializerForShort(inputObject)
case c if c == classOf[java.lang.Integer] => createSerializerForInteger(inputObject)
case c if c == classOf[java.lang.Long] => createSerializerForLong(inputObject)
case c if c == classOf[java.lang.Float] => createSerializerForFloat(inputObject)
case c if c == classOf[java.lang.Double] => createSerializerForDouble(inputObject)
case _ if typeToken.isArray =>
toCatalystArray(inputObject, typeToken.getComponentType)
case _ if listType.isAssignableFrom(typeToken) =>
toCatalystArray(inputObject, elementType(typeToken))
case _ if mapType.isAssignableFrom(typeToken) =>
val (keyType, valueType) = mapKeyValueType(typeToken)
createSerializerForMap(
inputObject,
MapElementInformation(
ObjectType(keyType.getRawType),
nullable = true,
serializerFor(_, keyType)),
MapElementInformation(
ObjectType(valueType.getRawType),
nullable = true,
serializerFor(_, valueType))
)
case other if other.isEnum =>
createSerializerForString(
Invoke(inputObject, "name", ObjectType(classOf[String]), returnNullable = false))
case other =>
val properties = getJavaBeanReadableAndWritableProperties(other)
val fields = properties.map { p =>
val fieldName = p.getName
val fieldType = typeToken.method(p.getReadMethod).getReturnType
val fieldValue = Invoke(
inputObject,
p.getReadMethod.getName,
inferExternalType(fieldType.getRawType))
(fieldName, serializerFor(fieldValue, fieldType))
}
createSerializerForObject(inputObject, fields)
}
}
}
}
| Aegeaner/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala | Scala | apache-2.0 | 19,136 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.dstream
import org.apache.spark.streaming.{Duration, DStream, Time}
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
private[streaming]
class MappedDStream[T: ClassTag, U: ClassTag] (
parent: DStream[T],
mapFunc: T => U
) extends DStream[U](parent.ssc) {
override def dependencies = List(parent)
override def slideDuration: Duration = parent.slideDuration
override def compute(validTime: Time): Option[RDD[U]] = {
parent.getOrCompute(validTime).map(_.map[U](mapFunc))
}
}
| mkolod/incubator-spark | streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala | Scala | apache-2.0 | 1,350 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.