code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.sksamuel.elastic4s
import org.scalatest.{ FlatSpec, Matchers }
class ElasticsearchClientUriTest extends FlatSpec with Matchers {
"elasticsearch uri" should "parse multiple host/ports" in {
val uri = ElasticsearchClientUri("elasticsearch://host1:1234,host2:2345")
uri.hosts shouldBe List("host1" -> 1234, "host2" -> 2345)
}
it should "parse single host/ports" in {
val uri = ElasticsearchClientUri("elasticsearch://host1:1234")
uri.hosts shouldBe List("host1" -> 1234)
}
it should "errors on trailing commas" in {
val uri = ElasticsearchClientUri("elasticsearch://host1:1234,")
uri.hosts shouldBe List("host1" -> 1234)
}
it should "errors on missing values between commas" in {
intercept[IllegalArgumentException] {
ElasticsearchClientUri("elasticsearch://host1:1234,,host2:9999")
} should not be null
}
}
| l15k4/elastic4s | elastic4s-core/src/test/scala/com/sksamuel/elastic4s/ElasticsearchClientUriTest.scala | Scala | apache-2.0 | 877 |
/*
* FILE: GeoSparkMetrics.scala
* Copyright (c) 2015 - 2019 GeoSpark Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.datasyslab.geospark.monitoring
import org.apache.spark.SparkContext
object GeoSparkMetrics {
def createMetric(sc: SparkContext, name: String): GeoSparkMetric = {
val acc = new GeoSparkMetric()
sc.register(acc, "geospark.spatialjoin." + name)
acc
}
}
| Sarwat/GeoSpark | core/src/main/scala/org/datasyslab/geospark/monitoring/GeoSparkMetrics.scala | Scala | mit | 932 |
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.controllers
import java.sql.Connection
import org.maproulette.exception.{NotFoundException, StatusMessage}
import org.maproulette.framework.model.User
import org.maproulette.models.BaseObject
import org.maproulette.models.dal.ParentDAL
import play.api.libs.json._
import play.api.mvc.{AbstractController, Action, AnyContent}
/**
* Base controller for parent objects, namely Projects and Challenges. This controller helps in
* building the children object of the parent. The CRUDController handles all the basic operations
* of the object
*
* @author cuthbertm
*/
trait ParentController[T <: BaseObject[Long], C <: BaseObject[Long]] extends CRUDController[T] {
this: AbstractController =>
// The data access layer for the parent
override protected val dal: ParentDAL[Long, T, C]
// The CRUD controller of the child, in the case of a Challenge this is technically a ParentController
protected val childController: CRUDController[C]
// reads function for the json in the post body to the parent object
override implicit val tReads: Reads[T]
// writes function for the parent object to json
override implicit val tWrites: Writes[T]
// reads function for the json in the post body to the child object
protected val cReads: Reads[C]
// writes function for the child object to json
protected val cWrites: Writes[C]
def undelete(id: Long): Action[AnyContent] = Action.async { implicit request =>
this.sessionManager.authenticatedRequest { implicit user =>
this.dal.retrieveById(id) match {
case Some(obj) =>
Ok(Json.toJson(this.dal.undelete(id, user)))
case None =>
throw new NotFoundException(
s"Object with id [$id] was not found, this is most likely because it has been removed from the database and cannot be undeleted."
)
}
}
}
/**
* Passes off all the work to the createChildren function as that function understands how to
* update objects as well.
* Must be authenticated to perform operation
*
* @param id The id of the parent
* @return 201 Created with no content
*/
def updateChildren(implicit id: Long): Action[JsValue] = this.createChildren
/**
* This function is very similar to the batch upload, however it implies a object hierarchy by
* forcing the top level object to be defined. The entire object does not have to be defined if
* it has already been created.
* Must be authenticated to perform operation
*
* @param id The id of the parent
* @return 201 Created with no content
*/
def createChildren(implicit id: Long): Action[JsValue] = Action.async(bodyParsers.json) {
implicit request =>
this.sessionManager.authenticatedRequest { implicit user =>
this.dal.retrieveById match {
case Some(parent) =>
this.extractAndCreate(Json.obj("children" -> request.body), parent, user)
Created
case None =>
val message = s"Bad id, no parent found with supplied id [$id]"
logger.error(message)
NotFound(Json.toJson(StatusMessage("KO", JsString(message))))
}
}
}
/**
* Function can be implemented to extract more information than just the default create data,
* to build other objects with the current object at the core. No data will be returned from this
* function, it purely does work in the background AFTER creating the current object
*
* @param body The Json body of data
* @param createdObject The object that was created by the create function
* @param user the user executing the request
*/
override def extractAndCreate(body: JsValue, createdObject: T, user: User)(
implicit c: Option[Connection] = None
): Unit = {
implicit val reads: Reads[C] = cReads
(body \\ "children").asOpt[List[JsValue]] match {
case Some(children) =>
children map { child =>
// add the parent id to the child.
child.transform(parentAddition(createdObject.id)) match {
case JsSuccess(value, _) =>
(value \\ "id").asOpt[String] match {
case Some(identifier) =>
this.childController.internalUpdate(
this.childController.updateUpdateBody(value, user),
user
)(identifier, -1)
case None =>
this.childController
.updateCreateBody(value, user)
.validate[C]
.fold(
errors => {
throw new Exception(JsError.toJson(errors).toString)
},
element => {
try {
this.childController.internalCreate(value, element, user)
} catch {
case e: Exception =>
logger.error(e.getMessage, e)
throw e
}
}
)
}
case JsError(errors) =>
logger.error(JsError.toJson(errors).toString)
throw new Exception(JsError.toJson(errors).toString)
}
}
case None => // ignore
}
}
/**
* Json transformer that will add the parent id into all the child objects that are being
* created at the same time. It will also overwrite any parent id's that are already there. The
* parent is defined by the json structure.
*
* @param id The id of the parent
* @return
*/
def parentAddition(id: Long): Reads[JsObject] = {
__.json.update(
__.read[JsObject] map { o =>
o ++ Json.obj("parent" -> Json.toJson(id))
}
)
}
/**
* Lists all the children of a given parent. This could be very costly, if you are listing all
* the children of a task with no limit.
*
* @param id The parent id
* @param limit The limit of how many objects to be returned
* @param page for paging
* @return 200 OK with json array of children objects
*/
def listChildren(id: Long, limit: Int, page: Int): Action[AnyContent] = Action.async {
implicit request =>
implicit val writes: Writes[C] = this.cWrites
this.sessionManager.userAwareRequest { implicit user =>
Ok(Json.toJson(this.dal.listChildren(limit, page)(id).map(this.childController.inject)))
}
}
/**
* This function will list all the children and then place it in a "children" key under the
* parent object. Ie. return the parent and it's children. The primary workload is completed
* by the listChildren function, with this function really just retrieving the information of the
* parent
*
* @param id The parent id
* @param limit The limit of how many objects to be returned
* @param page page number starting at 0, used to determine offset
* @return 200 Ok with parent json object containing children objects
*/
def expandedList(id: Long, limit: Int, page: Int): Action[AnyContent] = Action.async {
implicit request =>
implicit val writes: Writes[C] = cWrites
this.sessionManager.userAwareRequest { implicit user =>
val offset = limit * page
// now replace the parent field in the parent with a children array
Json
.toJson(this.dal.retrieveById(id))
.transform(this.childrenAddition(this.dal.listChildren(limit, offset)(id))) match {
case JsSuccess(value, _) => Ok(value)
case JsError(errors) =>
logger.error(JsError.toJson(errors).toString)
InternalServerError(Json.toJson(StatusMessage("KO", JsError.toJson(errors))))
}
}
}
/**
* Adds the child json array to the parent object
*
* @param children The list of children objects to add
* @return
*/
def childrenAddition(children: List[C]): Reads[JsObject] = {
implicit val writes: Writes[C] = cWrites
__.json.update(
__.read[JsObject] map { o =>
o ++ Json.obj("children" -> Json.toJson(children))
}
)
}
}
| mgcuthbert/maproulette2 | app/org/maproulette/controllers/ParentController.scala | Scala | apache-2.0 | 8,367 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.util.{Objects, UUID}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.plans.logical.EventTimeWatermark
import org.apache.spark.sql.catalyst.trees.TreePattern
import org.apache.spark.sql.catalyst.trees.TreePattern._
import org.apache.spark.sql.catalyst.util.quoteIfNeeded
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.types._
import org.apache.spark.util.collection.BitSet
import org.apache.spark.util.collection.ImmutableBitSet
object NamedExpression {
private val curId = new java.util.concurrent.atomic.AtomicLong()
private[expressions] val jvmId = UUID.randomUUID()
def newExprId: ExprId = ExprId(curId.getAndIncrement(), jvmId)
def unapply(expr: NamedExpression): Option[(String, DataType)] = Some((expr.name, expr.dataType))
}
/**
* A globally unique id for a given named expression.
* Used to identify which attribute output by a relation is being
* referenced in a subsequent computation.
*
* The `id` field is unique within a given JVM, while the `uuid` is used to uniquely identify JVMs.
*/
case class ExprId(id: Long, jvmId: UUID) {
override def equals(other: Any): Boolean = other match {
case ExprId(id, jvmId) => this.id == id && this.jvmId == jvmId
case _ => false
}
override def hashCode(): Int = id.hashCode()
}
object ExprId {
def apply(id: Long): ExprId = ExprId(id, NamedExpression.jvmId)
}
/**
* An [[Expression]] that is named.
*/
trait NamedExpression extends Expression {
/** We should never fold named expressions in order to not remove the alias. */
override def foldable: Boolean = false
def name: String
def exprId: ExprId
/**
* Returns a dot separated fully qualified name for this attribute. Given that there can be
* multiple qualifiers, it is possible that there are other possible way to refer to this
* attribute.
*/
def qualifiedName: String = (qualifier :+ name).mkString(".")
/**
* Optional qualifier for the expression.
* Qualifier can also contain the fully qualified information, for e.g, Sequence of string
* containing the database and the table name
*
* For now, since we do not allow using original table name to qualify a column name once the
* table is aliased, this can only be:
*
* 1. Empty Seq: when an attribute doesn't have a qualifier,
* e.g. top level attributes aliased in the SELECT clause, or column from a LocalRelation.
* 2. Seq with a Single element: either the table name or the alias name of the table.
* 3. Seq with 2 elements: database name and table name
* 4. Seq with 3 elements: catalog name, database name and table name
*/
def qualifier: Seq[String]
def toAttribute: Attribute
/** Returns the metadata when an expression is a reference to another expression with metadata. */
def metadata: Metadata = Metadata.empty
/** Returns a copy of this expression with a new `exprId`. */
def newInstance(): NamedExpression
protected def typeSuffix =
if (resolved) {
dataType match {
case LongType => "L"
case _ => ""
}
} else {
""
}
}
abstract class Attribute extends LeafExpression with NamedExpression with NullIntolerant {
@transient
override lazy val references: AttributeSet = AttributeSet(this)
def withNullability(newNullability: Boolean): Attribute
def withQualifier(newQualifier: Seq[String]): Attribute
def withName(newName: String): Attribute
def withMetadata(newMetadata: Metadata): Attribute
def withExprId(newExprId: ExprId): Attribute
override def toAttribute: Attribute = this
def newInstance(): Attribute
}
/**
* Used to assign a new name to a computation.
* For example the SQL expression "1 + 1 AS a" could be represented as follows:
* Alias(Add(Literal(1), Literal(1)), "a")()
*
* Note that exprId and qualifiers are in a separate parameter list because
* we only pattern match on child and name.
*
* Note that when creating a new Alias, all the [[AttributeReference]] that refer to
* the original alias should be updated to the new one.
*
* @param child The computation being performed
* @param name The name to be associated with the result of computing [[child]].
* @param exprId A globally unique id used to check if an [[AttributeReference]] refers to this
* alias. Auto-assigned if left blank.
* @param qualifier An optional Seq of string that can be used to refer to this attribute in a
* fully qualified way. Consider the examples tableName.name, subQueryAlias.name.
* tableName and subQueryAlias are possible qualifiers.
* @param explicitMetadata Explicit metadata associated with this alias that overwrites child's.
* @param nonInheritableMetadataKeys Keys of metadata entries that are supposed to be removed when
* inheriting the metadata from the child.
*/
case class Alias(child: Expression, name: String)(
val exprId: ExprId = NamedExpression.newExprId,
val qualifier: Seq[String] = Seq.empty,
val explicitMetadata: Option[Metadata] = None,
val nonInheritableMetadataKeys: Seq[String] = Seq.empty)
extends UnaryExpression with NamedExpression {
final override val nodePatterns: Seq[TreePattern] = Seq(ALIAS)
// Alias(Generator, xx) need to be transformed into Generate(generator, ...)
override lazy val resolved =
childrenResolved && checkInputDataTypes().isSuccess && !child.isInstanceOf[Generator]
override def eval(input: InternalRow): Any = child.eval(input)
/** Just a simple passthrough for code generation. */
override def genCode(ctx: CodegenContext): ExprCode = child.genCode(ctx)
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
throw QueryExecutionErrors.doGenCodeOfAliasShouldNotBeCalledError
}
override def dataType: DataType = child.dataType
override def nullable: Boolean = child.nullable
override def metadata: Metadata = {
explicitMetadata.getOrElse {
child match {
case named: NamedExpression =>
val builder = new MetadataBuilder().withMetadata(named.metadata)
nonInheritableMetadataKeys.foreach(builder.remove)
builder.build()
case _ => Metadata.empty
}
}
}
def newInstance(): NamedExpression =
Alias(child, name)(
qualifier = qualifier,
explicitMetadata = explicitMetadata,
nonInheritableMetadataKeys = nonInheritableMetadataKeys)
override def toAttribute: Attribute = {
if (resolved) {
AttributeReference(name, child.dataType, child.nullable, metadata)(exprId, qualifier)
} else {
UnresolvedAttribute(name)
}
}
/** Used to signal the column used to calculate an eventTime watermark (e.g. a#1-T{delayMs}) */
private def delaySuffix = if (metadata.contains(EventTimeWatermark.delayKey)) {
s"-T${metadata.getLong(EventTimeWatermark.delayKey)}ms"
} else {
""
}
override def toString: String = s"$child AS $name#${exprId.id}$typeSuffix$delaySuffix"
override protected final def otherCopyArgs: Seq[AnyRef] = {
exprId :: qualifier :: explicitMetadata :: nonInheritableMetadataKeys :: Nil
}
override def hashCode(): Int = {
val state = Seq(name, exprId, child, qualifier, explicitMetadata)
state.map(Objects.hashCode).foldLeft(0)((a, b) => 31 * a + b)
}
override def equals(other: Any): Boolean = other match {
case a: Alias =>
name == a.name && exprId == a.exprId && child == a.child && qualifier == a.qualifier &&
explicitMetadata == a.explicitMetadata &&
nonInheritableMetadataKeys == a.nonInheritableMetadataKeys
case _ => false
}
override def sql: String = {
val qualifierPrefix =
if (qualifier.nonEmpty) qualifier.map(quoteIfNeeded).mkString(".") + "." else ""
s"${child.sql} AS $qualifierPrefix${quoteIfNeeded(name)}"
}
override protected def withNewChildInternal(newChild: Expression): Alias =
copy(child = newChild)(exprId, qualifier, explicitMetadata, nonInheritableMetadataKeys)
}
// Singleton tree pattern BitSet for all AttributeReference instances.
object AttributeReferenceTreeBits {
val bits: BitSet = new ImmutableBitSet(TreePattern.maxId, ATTRIBUTE_REFERENCE.id)
}
/**
* A reference to an attribute produced by another operator in the tree.
*
* @param name The name of this attribute, should only be used during analysis or for debugging.
* @param dataType The [[DataType]] of this attribute.
* @param nullable True if null is a valid value for this attribute.
* @param metadata The metadata of this attribute.
* @param exprId A globally unique id used to check if different AttributeReferences refer to the
* same attribute.
* @param qualifier An optional string that can be used to referred to this attribute in a fully
* qualified way. Consider the examples tableName.name, subQueryAlias.name.
* tableName and subQueryAlias are possible qualifiers.
*/
case class AttributeReference(
name: String,
dataType: DataType,
nullable: Boolean = true,
override val metadata: Metadata = Metadata.empty)(
val exprId: ExprId = NamedExpression.newExprId,
val qualifier: Seq[String] = Seq.empty[String])
extends Attribute with Unevaluable {
override lazy val treePatternBits: BitSet = AttributeReferenceTreeBits.bits
/**
* Returns true iff the expression id is the same for both attributes.
*/
def sameRef(other: AttributeReference): Boolean = this.exprId == other.exprId
override def equals(other: Any): Boolean = other match {
case ar: AttributeReference =>
name == ar.name && dataType == ar.dataType && nullable == ar.nullable &&
metadata == ar.metadata && exprId == ar.exprId && qualifier == ar.qualifier
case _ => false
}
override def semanticEquals(other: Expression): Boolean = other match {
case ar: AttributeReference => sameRef(ar)
case _ => false
}
override def semanticHash(): Int = {
this.exprId.hashCode()
}
override def hashCode: Int = {
// See http://stackoverflow.com/questions/113511/hash-code-implementation
var h = 17
h = h * 37 + name.hashCode()
h = h * 37 + dataType.hashCode()
h = h * 37 + nullable.hashCode()
h = h * 37 + metadata.hashCode()
h = h * 37 + exprId.hashCode()
h = h * 37 + qualifier.hashCode()
h
}
override def newInstance(): AttributeReference =
AttributeReference(name, dataType, nullable, metadata)(qualifier = qualifier)
/**
* Returns a copy of this [[AttributeReference]] with changed nullability.
*/
override def withNullability(newNullability: Boolean): AttributeReference = {
if (nullable == newNullability) {
this
} else {
AttributeReference(name, dataType, newNullability, metadata)(exprId, qualifier)
}
}
override def withName(newName: String): AttributeReference = {
if (name == newName) {
this
} else {
AttributeReference(newName, dataType, nullable, metadata)(exprId, qualifier)
}
}
/**
* Returns a copy of this [[AttributeReference]] with new qualifier.
*/
override def withQualifier(newQualifier: Seq[String]): AttributeReference = {
if (newQualifier == qualifier) {
this
} else {
AttributeReference(name, dataType, nullable, metadata)(exprId, newQualifier)
}
}
override def withExprId(newExprId: ExprId): AttributeReference = {
if (exprId == newExprId) {
this
} else {
AttributeReference(name, dataType, nullable, metadata)(newExprId, qualifier)
}
}
override def withMetadata(newMetadata: Metadata): AttributeReference = {
AttributeReference(name, dataType, nullable, newMetadata)(exprId, qualifier)
}
override protected final def otherCopyArgs: Seq[AnyRef] = {
exprId :: qualifier :: Nil
}
/** Used to signal the column used to calculate an eventTime watermark (e.g. a#1-T{delayMs}) */
private def delaySuffix = if (metadata.contains(EventTimeWatermark.delayKey)) {
s"-T${metadata.getLong(EventTimeWatermark.delayKey)}ms"
} else {
""
}
override def toString: String = s"$name#${exprId.id}$typeSuffix$delaySuffix"
// Since the expression id is not in the first constructor it is missing from the default
// tree string.
override def simpleString(maxFields: Int): String = {
s"$name#${exprId.id}: ${dataType.simpleString(maxFields)}"
}
override def sql: String = {
val qualifierPrefix =
if (qualifier.nonEmpty) qualifier.map(quoteIfNeeded).mkString(".") + "." else ""
s"$qualifierPrefix${quoteIfNeeded(name)}"
}
}
/**
* A place holder used when printing expressions without debugging information such as the
* expression id or the unresolved indicator.
*/
case class PrettyAttribute(
name: String,
dataType: DataType = NullType)
extends Attribute with Unevaluable {
def this(attribute: Attribute) = this(attribute.name, attribute match {
case a: AttributeReference => a.dataType
case a: PrettyAttribute => a.dataType
case _ => NullType
})
override def toString: String = name
override def sql: String = toString
override def withNullability(newNullability: Boolean): Attribute =
throw new UnsupportedOperationException
override def newInstance(): Attribute = throw new UnsupportedOperationException
override def withQualifier(newQualifier: Seq[String]): Attribute =
throw new UnsupportedOperationException
override def withName(newName: String): Attribute = throw new UnsupportedOperationException
override def withMetadata(newMetadata: Metadata): Attribute =
throw new UnsupportedOperationException
override def qualifier: Seq[String] = throw new UnsupportedOperationException
override def exprId: ExprId = throw new UnsupportedOperationException
override def withExprId(newExprId: ExprId): Attribute =
throw new UnsupportedOperationException
override def nullable: Boolean = true
}
/**
* A place holder used to hold a reference that has been resolved to a field outside of the current
* plan. This is used for correlated subqueries.
*/
case class OuterReference(e: NamedExpression)
extends LeafExpression with NamedExpression with Unevaluable {
override def dataType: DataType = e.dataType
override def nullable: Boolean = e.nullable
override def prettyName: String = "outer"
override def sql: String = s"$prettyName(${e.sql})"
override def name: String = e.name
override def qualifier: Seq[String] = e.qualifier
override def exprId: ExprId = e.exprId
override def toAttribute: Attribute = e.toAttribute
override def newInstance(): NamedExpression = OuterReference(e.newInstance())
final override val nodePatterns: Seq[TreePattern] = Seq(OUTER_REFERENCE)
}
object VirtualColumn {
// The attribute name used by Hive, which has different result than Spark, deprecated.
val hiveGroupingIdName: String = "grouping__id"
val groupingIdName: String = "spark_grouping_id"
val groupingIdAttribute: UnresolvedAttribute = UnresolvedAttribute(groupingIdName)
}
| cloud-fan/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala | Scala | apache-2.0 | 16,107 |
/*
* Copyright 2011-2012 Christos KK Loverdos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ckkloverdos.convert
import org.junit.Assert
import org.junit.Test
/**
*
* @author Christos KK Loverdos <loverdos@gmail.com>.
*/
class TypeTest {
val infoList = List(
(1: Byte, Manifest.Byte, Array[Byte](), typeOf[Array[Byte]]),
(true, Manifest.Boolean, Array[Boolean](), typeOf[Array[Boolean]]),
(1: Short, Manifest.Short, Array[Short](), typeOf[Array[Short]]),
(' ', Manifest.Char, Array[Char](), typeOf[Array[Char]]),
(1: Int, Manifest.Int, Array[Int](), typeOf[Array[Int]]),
(1: Long, Manifest.Long, Array[Long](), typeOf[Array[Long]]),
(1: Float, Manifest.Float, Array[Float](), typeOf[Array[Float]]),
(1: Double, Manifest.Double, Array[Double](), typeOf[Array[Double]]) //,
// ((), Manifest.Unit, Array[Unit](), manifest[Array[Unit]]) // we have a bug in scalac here...
)
val valueList = infoList map {
case (v, m, _, _) ⇒ (v, m)
}
val arrayList = infoList map {
case (_, _, av, am) ⇒ (av, am)
}
def id[A](a: A) = a
def _checkValue[T](value: T, expectedType: Type[_], info: String = ""): Unit = {
val computedType = typeOfAny(value)
Assert.assertEquals("%sType for value %s".format(info, value), expectedType, computedType)
}
@Test
def testValues: Unit = {
for((value, tpe) <- valueList) {
// println("Testing value %s of manifest %s".format(value, manifest))
_checkValue(value, tpe)
}
}
@Test
def testValues2: Unit = {
val v2 = valueList map {
case (v, m) => (id(v), m)
}
for((value, tpe) <- v2) {
// println("Testing(2) value %s of manifest %s".format(value, manifest))
_checkValue(value, tpe)
}
}
@Test
def testNull: Unit = {
_checkValue(null, typeOf[Null])
}
@Test
def testArray: Unit = {
for((av, am) <- arrayList) {
_checkValue(av, am)
}
}
} | loverdos/converter | src/test/scala/com/ckkloverdos/convert/TypeTest.scala | Scala | apache-2.0 | 2,482 |
package constants
object StaticNumber {
lazy val CONTENT_PER_PAGE = 6
lazy val COMMENT_PER_PAGE = 200
lazy val ANSWER_PER_PAGE = 11
}
| lequangdzung/quora-clone | api-app/app/constants/StaticNumber.scala | Scala | gpl-2.0 | 141 |
package models.daos.hashmap
import javax.inject.Inject
import scala.concurrent.Future
import java.util.UUID
import models.Professor
import models.daos.ProfessorDAO
class ProfessorDAOImpl @Inject() extends ProfessorDAO {
def findByName(name: String): Future[Seq[Professor]] =
Future.successful(ProfessorDAOImpl.professors.filter(p => p.name1 == name || p.name2 == name).toSeq)
def all(): Future[Seq[Professor]] =
Future.successful(ProfessorDAOImpl.professors.toSeq)
def insert(professor: Professor): Future[UUID] =
Future.successful {
ProfessorDAOImpl.professors = ProfessorDAOImpl.professors + professor
professor.id
}
}
object ProfessorDAOImpl {
var professors = Set[Professor](
Professor(UUID.fromString("13c99d0c-46e2-11e7-8db2-d413de251a16"), "교수님 1", Some("Professor 1")),
Professor(UUID.fromString("14194feb-46e2-11e7-8db2-d413de251a16"), "교수님 2", Some("Professor 2")),
Professor(UUID.fromString("1464f8d6-46e2-11e7-8db2-d413de251a16"), "교수님 3", Some("Professor 3")),
Professor(UUID.fromString("14b207ef-46e2-11e7-8db2-d413de251a16"), "교수님 4", Some("Professor 4")),
Professor(UUID.fromString("14fda29b-46e2-11e7-8db2-d413de251a16"), "교수님 5", Some("Professor 5")),
Professor(UUID.fromString("154f242f-46e2-11e7-8db2-d413de251a16"), "교수님 6", Some("Professor 6")),
Professor(UUID.fromString("159e7537-46e2-11e7-8db2-d413de251a16"), "교수님 7", Some("Professor 7"))
)
}
| yoo-haemin/hufs-planner | project/app/models/daos/hashmap/ProfessorDAOImpl.scala | Scala | agpl-3.0 | 1,523 |
package models.product
import play.api.libs.json.Json
import models._
import models.AssetSupport._
import org.joda.time.DateTime
case class SizeGroupIn(_id: IdType,
createdAt: DateTime,
lastModifiedAt: DateTime,
active: Boolean,
description: String,
from: Int,
to: Int) extends AssetIn with AssetUpdateBuilder[SizeGroupUpdate] {
override def fillup(lastModifiedAt: DateTime): SizeGroupUpdate = SizeGroupUpdate(lastModifiedAt, active, description, from, to)
}
object SizeGroupIn extends AssetInCompanion[SizeGroupIn] {
val collectionName = "sizegroups"
val format = Json.format[SizeGroupIn]
}
case class SizeGroupUpdate(lastModifiedAt: DateTime,
active: Boolean,
description: String,
from: Int,
to: Int) extends AssetUpdate
object SizeGroupUpdate extends AssetUpdateCompanion[SizeGroupUpdate] {
val format = Json.format[SizeGroupUpdate]
val collectionName = SizeGroupIn.collectionName
}
case class SizeGroupCreate(active: Boolean,
description: String,
from: Int,
to: Int) extends AssetCreate[SizeGroupIn] {
override def fillup(b: AssetBase) = SizeGroupIn(b.id, b.createdAt, b.lastModifiedAt, active, description, from, to)
}
object SizeGroupCreate {
implicit val reads = Json.reads[SizeGroupCreate]
}
| tsechov/shoehorn | app/models/product/sizegroup.scala | Scala | apache-2.0 | 1,553 |
package im.actor.server.activation.gate
import akka.http.scaladsl.model.headers.CustomHeader
case class `X-Auth-Token`(value: String) extends CustomHeader {
override def name: String = "X-Auth-Token"
} | boneyao/actor-platform | actor-server/actor-activation/src/main/scala/im/actor/server/activation/gate/customHeaders.scala | Scala | mit | 205 |
/**
* Copyright (c) 2010, Stefan Langer and others
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Element34 nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS ROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package de.element34.sbteclipsify
import scala.xml._
import sbt._
object Utils {
def setting[T](structure: Load.BuildStructure)(ref: ProjectRef, key: SettingKey[T], configuration: Configuration): Option[T] = key in (ref, configuration) get structure.data
def evaluate[A](state: State, structure: Load.BuildStructure)(ref: ProjectRef, key: TaskKey[A], config: Configuration) =
EvaluateTask.evaluateTask(structure, key in config, state, ref, false, EvaluateTask.SystemProcessors)
def nature(ref: ProjectRef, structure: Load.BuildStructure, logger: Logger): ProjectNature = {
setting[ProjectNature](structure)(ref, Eclipsify.projectNature, Provided).
getOrElse(
setting[String](structure)(ref, Eclipsify.nature, Provided).map(ProjectType(_, logger)).
getOrElse(ProjectType.Scala))
}
def unless(predicate: Boolean)(body: => Unit) = if(!predicate) body
}
| musk/SbtEclipsify | src/main/scala/de/element34/sbteclipsify/Utils.scala | Scala | bsd-3-clause | 2,482 |
package scales.component.internal
import scala.scalajs.js
trait Metadata {
def define(prototype: js.Dynamic): js.Dynamic
} | greencatsoft/scales | core/src/main/scala/scales/component/internal/Metadata.scala | Scala | apache-2.0 | 127 |
package synereo.client.rootmodels
/**
* Created by bhagyashree.b on 2016-07-28.
*/
case class AppRootModel(isServerError: Boolean = false,
serverErrorMsg: String = "",
showProfileImageUploadModal: Boolean = false,
showNodeSettingModal: Boolean = false,
showAboutInfoModal: Boolean = false,
showNewMessageModal: Boolean = false,
preventNavigation:Boolean = false)
| LivelyGig/ProductWebUI | sclient/src/main/scala/synereo/client/rootmodels/AppRootModel.scala | Scala | apache-2.0 | 514 |
package main.collections
import main.abstraction._
import rescala._
import scala.collection.immutable._
class ReactiveListMap[A,B](map: Signal[Map[A,B]]) extends ReactiveMap[A,B, ReactiveListMap] {
override protected val internalValue = Var(map)
def this(map: ListMap[A,B]) = this(Var(map))
def this(pairs: (A,B)*) = this(ListMap(pairs:_*))
}
object ReactiveListMap {
implicit def wrapping[C,D] = new SignalWrappable[Map[C,D], ReactiveListMap[C,D]] {
def wrap(unwrapped: Signal[Map[C,D]]): ReactiveListMap[C,D] = new ReactiveListMap(unwrapped)
}
}
| volkc/REScala | Extensions/Datastructures/src/main/scala/main/collections/ReactiveListMap.scala | Scala | apache-2.0 | 568 |
package org.scalaide.core
package testsetup
import java.io.ByteArrayInputStream
import java.io.File
import java.io.InputStream
import java.util.concurrent.TimeoutException
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.eclipse.core.resources.IContainer
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IMarker
import org.eclipse.core.resources.IProject
import org.eclipse.core.resources.IResource
import org.eclipse.core.resources.IncrementalProjectBuilder
import org.eclipse.core.resources.ResourcesPlugin
import org.eclipse.core.runtime.IPath
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.core.runtime.NullProgressMonitor
import org.eclipse.core.runtime.Path
import org.eclipse.core.runtime.Platform
import org.eclipse.core.runtime.preferences.ConfigurationScope
import org.eclipse.core.runtime.preferences.InstanceScope
import org.eclipse.jdt.core.IClasspathEntry
import org.eclipse.jdt.core.ICompilationUnit
import org.eclipse.jdt.core.IJavaModelMarker
import org.eclipse.jdt.core.IJavaProject
import org.eclipse.jdt.core.IPackageFragment
import org.eclipse.jdt.core.JavaCore
import org.eclipse.jdt.debug.core.JDIDebugModel
import org.eclipse.jdt.launching.JavaRuntime
import org.scalaide.core.IScalaProject
import org.scalaide.core.compiler.IScalaPresentationCompiler
import org.scalaide.core.internal.ScalaPlugin
import org.scalaide.core.internal.project.ScalaProject
import org.scalaide.logging.HasLogger
import org.scalaide.util.eclipse.EclipseUtils
import org.scalaide.util.eclipse.OSGiUtils
/**
* Utility functions for setting up test projects.
*
*/
object SDTTestUtils extends HasLogger {
enableAutoBuild(false)
// Be nice to Mac users and use a default encoding other than MacRoman
InstanceScope.INSTANCE.getNode(SdtConstants.PluginId).put(ResourcesPlugin.PREF_ENCODING, "UTF-8")
lazy val workspace = ResourcesPlugin.getWorkspace
def sourceWorkspaceLoc(bundleName: String): IPath = {
val bundle = Platform.getBundle(bundleName)
OSGiUtils.pathInBundle(bundle, File.separatorChar + "test-workspace").get
}
def setJdiRequestTimeout(timeout: Int): Int = {
val debugSettings = ConfigurationScope.INSTANCE.getNode(JDIDebugModel.getPluginIdentifier())
val previousRequestTimeout = debugSettings.getInt(JDIDebugModel.PREF_REQUEST_TIMEOUT, JDIDebugModel.DEF_REQUEST_TIMEOUT)
debugSettings.putInt(JDIDebugModel.PREF_REQUEST_TIMEOUT, timeout)
previousRequestTimeout
}
/** Enable workspace auto-building */
def enableAutoBuild(enable: Boolean): Unit = {
// auto-building is on
val desc = workspace.getDescription
desc.setAutoBuilding(enable)
workspace.setDescription(desc)
}
/** Return the Java problem markers corresponding to the given compilation unit. */
def findProblemMarkers(unit: ICompilationUnit): Array[IMarker] =
unit.getUnderlyingResource().findMarkers(IJavaModelMarker.JAVA_MODEL_PROBLEM_MARKER, true, IResource.DEPTH_INFINITE)
def findProjectProblemMarkers(project: IProject, types: String*): Seq[IMarker] =
for {
typ <- types
markers <- project.findMarkers(typ, false, IResource.DEPTH_INFINITE)
} yield markers
def markersMessages(markers: List[IMarker]): List[String] =
markers.map(_.getAttribute(IMarker.MESSAGE).asInstanceOf[String])
/**
* Setup the project in the target workspace. The 'name' project should
* exist in the source workspace.
*/
def setupProject(name: String, bundleName: String): IScalaProject =
internalSetupProject(name, bundleName)
private[core] def internalSetupProject(name: String, bundleName: String)(implicit progressMonitor: IProgressMonitor = new NullProgressMonitor): ScalaProject = {
EclipseUtils.workspaceRunnableIn(workspace) { monitor =>
val wspaceLoc = workspace.getRoot.getLocation
val src = new File(sourceWorkspaceLoc(bundleName).toFile().getAbsolutePath + File.separatorChar + name)
val dst = new File(wspaceLoc.toFile().getAbsolutePath + File.separatorChar + name)
logger.debug("copying %s to %s".format(src, dst))
FileUtils.copyDirectory(src, dst)
val project = workspace.getRoot.getProject(name)
project.create(progressMonitor)
project.open(progressMonitor)
project.setDefaultCharset("UTF-8", progressMonitor)
JavaCore.create(project)
}
ScalaPlugin().getScalaProject(workspace.getRoot.getProject(name))
}
/**
* Return all positions (offsets) of the given str in the given source file.
*/
def positionsOf(source: Array[Char], str: String): Seq[Int] = {
val buf = new mutable.ListBuffer[Int]
var pos = source.indexOfSlice(str)
while (pos >= 0) {
buf += pos - 1 // we need the position before the first character of this marker
pos = source.indexOfSlice(str, pos + 1)
}
buf.toList
}
/**
* Return all positions and the number in the given marker. The marker is
* wrapped by /**/, and the method returns matches for /*[0-9]+*/, as a sequence
* of pairs (offset, parsedNumber)
*/
def markersOf(source: Array[Char], prefix: String): Seq[(Int, Int)] = {
val regex = """\\/\\*%s([0-9]+)\\*/""".format(prefix).r
val buf = new mutable.ListBuffer[(Int, Int)]
val it = regex.findAllIn(source)
for (m <- it) {
buf += ((it.start, it.group(1).toInt))
}
buf.toSeq
}
def deleteRecursive(d: File): Unit = {
if (d.exists) {
val filesOpt = Option(d.listFiles)
for (files <- filesOpt; file <- files)
if (file.isDirectory)
deleteRecursive(file)
else
file.delete
d.delete
}
}
def createTempDir(name: String): File = {
val userHome = new File(System.getProperty("user.home")).getAbsolutePath
val rootDir = new File(userHome, "SDTCoreTestTempDir")
val result = new File(rootDir, name)
if (result.exists)
deleteRecursive(result)
result
}
def deleteTempDirs(): Unit = {
val userHome = new File(System.getProperty("user.home")).getAbsolutePath
val rootDir = new File(userHome, "SDTCoreTestTempDir")
if (rootDir.exists)
deleteRecursive(rootDir)
}
/**
* Add a new file to the given project. The given path is relative to the
* project.
*
* The file must not exist.
*/
def addFileToProject(project: IProject, path: String, content: String): IFile =
addFileToProject(project, path, content.getBytes(project.getDefaultCharset()))
def addFileToProject(project: IProject, path: String, content: Array[Byte]): IFile = {
val filePath = new Path(path)
val dirNames = filePath.segments.init // last segment is the file
dirNames.foldLeft(project: IContainer) { (container, segment) =>
val folder = container.getFolder(new Path(segment))
if (!folder.exists())
folder.create(false, true, null)
folder
}
val file = project.getFile(filePath);
file.create(new ByteArrayInputStream(content), true, null)
file
}
def changeContentOfFile(file: IFile, newContent: String, encoding: String = workspace.getRoot.getDefaultCharset()): IFile = {
file.setContents(new ByteArrayInputStream(newContent.getBytes(encoding)), 0, null)
file
}
def getProblemMarkers(units: ICompilationUnit*): List[IMarker] = {
units.flatMap(findProblemMarkers).toList
}
def getErrorMessages(project: IProject): Seq[(Int, String)] = {
for (m <- project.findMarkers(IJavaModelMarker.JAVA_MODEL_PROBLEM_MARKER, true, IResource.DEPTH_INFINITE))
yield (m.getAttribute(IMarker.SEVERITY).asInstanceOf[Int], m.getAttribute(IMarker.MESSAGE).toString)
}
def getErrorMessages(units: ICompilationUnit*): List[String] =
for (p <- getProblemMarkers(units: _*)) yield p.getAttribute(IMarker.MESSAGE).toString
def buildWith(resource: IResource, contents: String, unitsToWatch: Seq[ICompilationUnit]): List[String] = {
SDTTestUtils.changeContentOfFile(resource.asInstanceOf[IFile], contents)
logger.debug("=== Rebuilding workspace === ")
SDTTestUtils.workspace.build(IncrementalProjectBuilder.INCREMENTAL_BUILD, null)
val problems = getProblemMarkers(unitsToWatch: _*)
for (p <- problems) yield p.getAttribute(IMarker.MESSAGE).toString
}
def createProjectInLocalFileSystem(parentFile: File, projectName: String): IProject = {
val project = workspace.getRoot.getProject(projectName)
if (project.exists)
project.delete(true, null)
val testFile = new File(parentFile, projectName)
if (testFile.exists)
deleteRecursive(testFile)
val desc = workspace.newProjectDescription(projectName)
desc.setLocation(new Path(new File(parentFile, projectName).getPath))
project.create(desc, null)
project.open(null)
project
}
def slurpAndClose(inputStream: InputStream): String = {
val stringBuilder = new StringBuilder
try {
var ch: Int = 0
while ({ ch = inputStream.read; ch } != -1) {
stringBuilder.append(ch.toChar)
}
} finally {
inputStream.close
}
stringBuilder.toString
}
def findMarker(marker: String) = new {
import org.eclipse.jdt.internal.compiler.env.ICompilationUnit
def in(unit: ICompilationUnit): Seq[Int] = {
val contents = unit.getContents()
SDTTestUtils.positionsOf(contents, marker)
}
}
def createSourcePackage(name: String)(project: IScalaProject): IPackageFragment =
project.javaProject.getPackageFragmentRoot(project.underlying.getFolder("/src")).createPackageFragment(name, true, null)
def createCompilationUnit(pack: IPackageFragment, name: String, sourceCode: String, force: Boolean = false): ICompilationUnit = {
BlockingProgressMonitor.waitUntilDone(pack.createCompilationUnit(name, sourceCode, force, _))
}
def addToClasspath(prj: IScalaProject, entries: IClasspathEntry*): Unit = {
val existing = prj.javaProject.getRawClasspath
prj.javaProject.setRawClasspath(existing ++ entries, null)
}
/** Create Scala projects, equiped with the Scala nature, Scala library container and a '/src' folder. */
def createProjects(names: String*): Seq[IScalaProject] =
names map (n => createProjectInWorkspace(n, true))
private[core] def internalCreateProjects(names: String*): Seq[ScalaProject] =
names map (n => internalCreateProjectInWorkspace(n, withSourceRootOnly))
def deleteProjects(projects: IScalaProject*)(implicit progressMonitor: IProgressMonitor = new NullProgressMonitor): Unit = {
EclipseUtils.workspaceRunnableIn(EclipseUtils.workspaceRoot.getWorkspace) { _ =>
projects foreach (_.underlying.delete(true, progressMonitor))
}
}
/** Wait until `pred` is true, or timeout (in ms). */
def waitUntil(timeout: Int, withTimeoutException: Boolean = false)(pred: => Boolean): Unit = {
val start = System.currentTimeMillis()
var cond = pred
while ((System.currentTimeMillis() < start + timeout) && !cond) {
Thread.sleep(100)
cond = pred
}
if (!cond && withTimeoutException)
throw new TimeoutException(s"Predicate is not fulfiled after declared time limit ($timeout millis).")
}
/**
* Allows to run code that can access the presentation compiler. The code is
* executed in a separate project inside of the workspace. The project is created
* when this method is called and will be removed when it is left.
*
* @param testProjectName
* The name of the test project the code should be executed in
* @param f
* the function executed inside of the presentation compiler
*
* @example {{{
* testWithCompiler("testproject") { compiler =>
* import compiler._
* // use compiler member
* }
* }}}
*/
def testWithCompiler[A](testProjectName: String)(f: IScalaPresentationCompiler => A): Unit = {
var projectSetup: TestProjectSetup = null
try {
val scalaProject = createProjectInWorkspace(testProjectName, withSourceRoot = true)
projectSetup = new TestProjectSetup(testProjectName) {
override lazy val project = scalaProject
}
projectSetup.project.presentationCompiler { c => f(c) }
} finally deleteProjects(projectSetup.project)
}
/**
* Create a project in the current workspace. If `withSourceRoot` is true,
* it creates a source folder called `src`.
*/
def createProjectInWorkspace(projectName: String, withSourceRoot: Boolean = true): IScalaProject =
internalCreateProjectInWorkspace(projectName, if (withSourceRoot) withSourceRootOnly else withNoSourceRoot)
def createProjectInWorkspace(projectName: String, withSrcOutputStructure: SrcPathOutputEntry): IScalaProject =
internalCreateProjectInWorkspace(projectName, withSrcOutputStructure)
type SrcPathOutputEntry = (IProject, IJavaProject) => Seq[IClasspathEntry]
private def withNoSourceRoot: SrcPathOutputEntry = (_, _) => Seq.empty[IClasspathEntry]
private def withSourceRootOnly: SrcPathOutputEntry = (thisProject, correspondingJavaProject) => {
val sourceFolder = thisProject.getFolder("/src")
sourceFolder.create( /* force = */ false, /* local = */ true, /* monitor = */ null)
val root = correspondingJavaProject.getPackageFragmentRoot(sourceFolder)
Seq(JavaCore.newSourceEntry(root.getPath()))
}
private[core] def internalCreateProjectInWorkspace(projectName: String, withSourceRoot: Boolean): ScalaProject =
internalCreateProjectInWorkspace(projectName, if (withSourceRoot) withSourceRootOnly else withNoSourceRoot)
final def createJavaProjectInWorkspace(projectName: String, withSourceFolders: SrcPathOutputEntry): IJavaProject = {
val workspaceRoot = workspace.getRoot()
val project = workspaceRoot.getProject(projectName)
project.create(null)
project.open(null)
val description = project.getDescription()
description.setNatureIds(Array(JavaCore.NATURE_ID))
project.setDescription(description, null)
val javaProject = JavaCore.create(project)
javaProject.setOutputLocation(new Path("/" + projectName + "/bin"), null)
val entries = new ArrayBuffer[IClasspathEntry]()
entries += JavaRuntime.getDefaultJREContainerEntry()
entries ++= withSourceFolders(project, javaProject)
javaProject.setRawClasspath(entries.toArray[IClasspathEntry], null)
javaProject
}
private[core] def internalCreateProjectInWorkspace(projectName: String, withSourceFolders: SrcPathOutputEntry): ScalaProject = {
def withScalaFolders(project: IProject, jProject: IJavaProject) =
withSourceFolders(project, jProject) ++ Seq(JavaCore.newContainerEntry(Path.fromPortableString(SdtConstants.ScalaLibContId)))
def addScalaNature(project: IProject) = {
val description = project.getDescription
description.setNatureIds(SdtConstants.NatureId +: description.getNatureIds)
project.setDescription(description, null)
project
}
ScalaPlugin().getScalaProject(addScalaNature(createJavaProjectInWorkspace(projectName, withScalaFolders).getProject))
}
def withWorkspacePreference[A](name: String, value: Boolean)(thunk: => A): A = {
val store = ScalaPlugin().getPreferenceStore
val old = store.getBoolean(name)
try {
store.setValue(name, value)
thunk
} finally
store.setValue(name, old)
}
def buildWorkspace(): Unit =
workspace.build(IncrementalProjectBuilder.INCREMENTAL_BUILD, new NullProgressMonitor)
}
| sschaef/scala-ide | org.scala-ide.sdt.core.tests/src/org/scalaide/core/testsetup/SDTTestUtils.scala | Scala | bsd-3-clause | 15,431 |
package dundertext.editor.cmd
import dundertext.editor.RowNode
object DeleteRow extends CommandDescription {
def apply() = new DeleteRow
}
class DeleteRow extends SubtitlingCommand {
override def applies: Boolean = {
(cursor.text.rowCount > 1) &&
(cursor.row.prev != null)
}
override def execute() = {
val source: RowNode = cursor.row
val target: RowNode = cursor.row.prev
cursor.moveTo(target)
cursor.moveRowEnd()
source.remove()
}
}
| dundertext/dundertext | editor/src/main/scala/dundertext/editor/cmd/DeleteRow.scala | Scala | gpl-3.0 | 477 |
import sbt._
object Deps {
val curatorFramework = "org.apache.curator" % "curator-framework" % "4.1.0"
val curatorClient = "org.apache.curator" % "curator-client" % "4.1.0"
val curatorDiscovery = "org.apache.curator" % "curator-x-discovery" % "4.1.0"
// process lifecycle
val twitterServer =
("com.twitter" %% "twitter-server" % "21.4.0")
.exclude("com.twitter", "finagle-zipkin_2.12")
def twitterUtil(mod: String) =
"com.twitter" %% s"util-$mod" % "21.4.0"
// networking
def finagle(mod: String) =
"com.twitter" %% s"finagle-$mod" % "21.4.0"
def netty4(mod: String) =
"io.netty" % s"netty-$mod" % "4.1.59.Final"
val boringssl = "io.netty" % "netty-tcnative-boringssl-static" % "2.0.35.Final"
// Jackson (parsing)
val jacksonVersion = "2.11.2"
val jacksonCore =
"com.fasterxml.jackson.core" % "jackson-core" % jacksonVersion
val jacksonAnnotations =
"com.fasterxml.jackson.core" % "jackson-annotations" % jacksonVersion
val jacksonDatabind =
"com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion
val jacksonScala =
"com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion
val jackson =
jacksonCore :: jacksonAnnotations :: jacksonDatabind :: jacksonScala :: Nil
val jacksonYaml =
"com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % jacksonVersion
// parses a variety of timestamp formats (like RFC3339)
val jodaTime = Seq(
"joda-time" % "joda-time" % "2.7",
"org.joda" % "joda-convert" % "1.7"
)
// testing. duh.
val scalatest = "org.scalatest" %% "scalatest" % "3.0.8"
// scalacheck for Property-based testing
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.14.0"
val scalaCollectionCompat = "org.scala-lang.modules" %% "scala-collection-compat" % "2.1.2"
// junit
val junit = "junit" % "junit" % "4.10"
// guava
val guava = "com.google.guava" % "guava" % "23.0"
// jwt for Marathon API
val jwt = "com.pauldijou" %% "jwt-core" % "0.12.1"
val protobuf = "com.google.protobuf" % "protobuf-java" % "3.11.4"
// statsd client
val statsd = "com.datadoghq" % "java-dogstatsd-client" % "2.3"
// dnsjava
val dnsJava = "dnsjava" % "dnsjava" % "2.1.8"
}
| linkerd/linkerd | project/Deps.scala | Scala | apache-2.0 | 2,245 |
package com.twitter.finagle.netty4
import com.twitter.io.ByteReader.UnderflowException
import com.twitter.io.{Buf, ByteReader}
import io.netty.buffer.{ByteBuf, UnpooledByteBufAllocator}
import java.lang.{Double => JDouble, Float => JFloat}
import java.nio.charset.StandardCharsets
import org.scalacheck.Gen
import org.scalatest.FunSuite
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
object CopyingByteBufByteReaderTest {
def wrapByteBufInReader(bb: ByteBuf): ByteReader =
new CopyingByteBufByteReader(bb)
def newReader(f: ByteBuf => Unit): ByteReader = {
val buf = UnpooledByteBufAllocator.DEFAULT.buffer(10, Int.MaxValue)
f(buf)
wrapByteBufInReader(buf)
}
def readerWith(bytes: Byte*): ByteReader = newReader { bb =>
bytes.foreach(bb.writeByte(_))
}
}
class CopyingByteBufByteReaderTest extends AbstractByteBufByteReaderTest {
protected def newReader(f: (ByteBuf) => Unit): ByteReader =
CopyingByteBufByteReaderTest.newReader(f)
protected def readerWith(bytes: Byte*): ByteReader =
CopyingByteBufByteReaderTest.readerWith(bytes: _*)
protected def wrapByteBufInReader(bb: ByteBuf): ByteReader =
CopyingByteBufByteReaderTest.wrapByteBufInReader(bb)
test("Buf instances are backed by a precisely sized Buf.ByteArray") {
val br = readerWith(0x00, 0x01)
br.readBytes(1) match {
case Buf.ByteArray.Owned(data, begin, end) =>
assert(data.sameElements(Seq(0x00)))
assert(begin == 0)
assert(end == 1)
case other =>
val name = other.getClass.getSimpleName
fail(s"Unexpected representation of returned `Buf` instance: $name")
}
}
}
class CopyingByteBufByteReaderProcessorTest
extends ReadableBufProcessorTest(
"CopyingByteBufByteReader", { bytes: Array[Byte] =>
val br = CopyingByteBufByteReaderTest.readerWith(bytes.toIndexedSeq: _*)
new ReadableBufProcessorTest.CanProcess {
def process(from: Int, until: Int, processor: Buf.Processor): Int =
br.process(from, until, processor)
def process(processor: Buf.Processor): Int = br.process(processor)
def readBytes(num: Int): Unit = br.readBytes(num)
def readerIndex(): Int = bytes.length - br.remaining
}
}
)
abstract class AbstractByteBufByteReaderTest extends FunSuite with ScalaCheckDrivenPropertyChecks {
private val SignedMediumMax = 0x800000
protected def wrapByteBufInReader(bb: ByteBuf): ByteReader
protected def newReader(f: ByteBuf => Unit): ByteReader
protected def readerWith(bytes: Byte*): ByteReader
private def maskMedium(i: Int) = i & 0x00ffffff
test("readString")(forAll { (str1: String, str2: String) =>
val bytes1 = str1.getBytes(StandardCharsets.UTF_8)
val bytes2 = str2.getBytes(StandardCharsets.UTF_8)
val all = (bytes1.toIndexedSeq ++ bytes2.toIndexedSeq)
val br = readerWith(all: _*)
assert(br.readString(bytes1.length, StandardCharsets.UTF_8) == str1)
assert(br.readString(bytes2.length, StandardCharsets.UTF_8) == str2)
intercept[UnderflowException] { br.readByte() }
})
test("readByte")(forAll { byte: Byte =>
val br = readerWith(byte)
assert(br.readByte() == byte)
intercept[UnderflowException] { br.readByte() }
})
test("readShortBE")(forAll { s: Short =>
val br = readerWith(
((s >> 8) & 0xff).toByte,
((s) & 0xff).toByte
)
// note, we need to cast here toShort so that the
// MSB is interpreted as the sign bit.
assert(br.readShortBE() == s)
val exc = intercept[UnderflowException] { br.readByte() }
})
test("readShortLE")(forAll { s: Short =>
val br = readerWith(
((s) & 0xff).toByte,
((s >> 8) & 0xff).toByte
)
// note, we need to cast here toShort so that the
// MSB is interpreted as the sign bit.
assert(br.readShortLE() == s)
intercept[UnderflowException] { br.readByte() }
})
test("readUnsignedMediumBE")(forAll { m: Int =>
val br = readerWith(
((m >> 16) & 0xff).toByte,
((m >> 8) & 0xff).toByte,
((m) & 0xff).toByte
)
assert(br.readUnsignedMediumBE() == maskMedium(m))
intercept[UnderflowException] { br.readByte() }
})
test("readUnsignedMediumLE")(forAll { m: Int =>
val br = readerWith(
((m) & 0xff).toByte,
((m >> 8) & 0xff).toByte,
((m >> 16) & 0xff).toByte
)
assert(br.readUnsignedMediumLE() == maskMedium(m))
intercept[UnderflowException] { br.readByte() }
})
test("readIntBE")(forAll { i: Int =>
val br = readerWith(
((i >> 24) & 0xff).toByte,
((i >> 16) & 0xff).toByte,
((i >> 8) & 0xff).toByte,
((i) & 0xff).toByte
)
assert(br.readIntBE() == i)
intercept[UnderflowException] { br.readByte() }
})
test("readIntLE")(forAll { i: Int =>
val br = readerWith(
((i) & 0xff).toByte,
((i >> 8) & 0xff).toByte,
((i >> 16) & 0xff).toByte,
((i >> 24) & 0xff).toByte
)
assert(br.readIntLE() == i)
intercept[UnderflowException] { br.readByte() }
})
test("readLongBE")(forAll { l: Long =>
val br = readerWith(
((l >> 56) & 0xff).toByte,
((l >> 48) & 0xff).toByte,
((l >> 40) & 0xff).toByte,
((l >> 32) & 0xff).toByte,
((l >> 24) & 0xff).toByte,
((l >> 16) & 0xff).toByte,
((l >> 8) & 0xff).toByte,
((l) & 0xff).toByte
)
assert(br.readLongBE() == l)
intercept[UnderflowException] { br.readByte() }
})
test("readLongLE")(forAll { l: Long =>
val br = readerWith(
((l) & 0xff).toByte,
((l >> 8) & 0xff).toByte,
((l >> 16) & 0xff).toByte,
((l >> 24) & 0xff).toByte,
((l >> 32) & 0xff).toByte,
((l >> 40) & 0xff).toByte,
((l >> 48) & 0xff).toByte,
((l >> 56) & 0xff).toByte
)
assert(br.readLongLE() == l)
intercept[UnderflowException] { br.readByte() }
})
test("readUnsignedByte")(forAll { b: Byte =>
val br = newReader(_.writeByte(b))
assert(br.readUnsignedByte() == (b & 0xff))
})
test("readUnsignedShortBE")(forAll { s: Short =>
val br = newReader(_.writeShort(s))
assert(br.readUnsignedShortBE() == (s & 0xffff))
})
test("readUnsignedShortLE")(forAll { s: Short =>
val br = newReader(_.writeShortLE(s))
assert(br.readUnsignedShortLE() == (s & 0xffff))
})
test("readMediumBE")(forAll { i: Int =>
val m = maskMedium(i)
val br = newReader(_.writeMedium(m))
val expected = if (m > SignedMediumMax) m | 0xff000000 else m
assert(br.readMediumBE() == expected)
})
test("readMediumLE")(forAll { i: Int =>
val m = maskMedium(i)
val br = newReader(_.writeMediumLE(m))
val expected = if (m > SignedMediumMax) m | 0xff000000 else m
assert(br.readMediumLE() == expected)
})
test("readUnsignedIntBE")(forAll { i: Int =>
val br = newReader(_.writeInt(i))
assert(br.readUnsignedIntBE() == (i & 0xffffffffL))
})
test("readUnsignedIntLE")(forAll { i: Int =>
val br = newReader(_.writeIntLE(i))
assert(br.readUnsignedIntLE() == (i & 0xffffffffL))
})
val uInt64s: Gen[BigInt] = Gen
.chooseNum(Long.MinValue, Long.MaxValue)
.map(x => BigInt(x) + BigInt(2).pow(63))
test("readUnsignedLongBE")(forAll(uInt64s) { bi: BigInt =>
val br = readerWith(
((bi >> 56) & 0xff).toByte,
((bi >> 48) & 0xff).toByte,
((bi >> 40) & 0xff).toByte,
((bi >> 32) & 0xff).toByte,
((bi >> 24) & 0xff).toByte,
((bi >> 16) & 0xff).toByte,
((bi >> 8) & 0xff).toByte,
((bi) & 0xff).toByte
)
assert(br.readUnsignedLongBE() == bi)
val exc = intercept[UnderflowException] { br.readByte() }
})
test("readUnsignedLongLE")(forAll(uInt64s) { bi1: BigInt =>
val bi = bi1.abs
val br = readerWith(
((bi) & 0xff).toByte,
((bi >> 8) & 0xff).toByte,
((bi >> 16) & 0xff).toByte,
((bi >> 24) & 0xff).toByte,
((bi >> 32) & 0xff).toByte,
((bi >> 40) & 0xff).toByte,
((bi >> 48) & 0xff).toByte,
((bi >> 56) & 0xff).toByte
)
assert(br.readUnsignedLongLE() == bi)
val exc = intercept[UnderflowException] { br.readByte() }
})
// .equals is required to handle NaN
test("readFloatBE")(forAll { i: Int =>
val br = newReader(_.writeInt(i))
assert(br.readFloatBE().equals(JFloat.intBitsToFloat(i)))
})
test("readFloatLE")(forAll { i: Int =>
val br = newReader(_.writeIntLE(i))
assert(br.readFloatLE().equals(JFloat.intBitsToFloat(i)))
})
test("readDoubleBE")(forAll { l: Long =>
val br = newReader(_.writeLong(l))
assert(br.readDoubleBE().equals(JDouble.longBitsToDouble(l)))
})
test("readDoubleLE")(forAll { l: Long =>
val br = newReader(_.writeLongLE(l))
assert(br.readDoubleLE().equals(JDouble.longBitsToDouble(l)))
})
test("readBytes")(forAll { bytes: Array[Byte] =>
val bs = bytes.toIndexedSeq
val br = readerWith(bs ++ bs: _*)
intercept[IllegalArgumentException] { br.readBytes(-1) }
assert(br.readBytes(bytes.length) == Buf.ByteArray.Owned(bytes))
assert(br.readBytes(bytes.length) == Buf.ByteArray.Owned(bytes))
assert(br.readBytes(1) == Buf.Empty)
})
test("readAll")(forAll { bytes: Array[Byte] =>
val bs = bytes.toIndexedSeq
val br = readerWith(bs ++ bs: _*)
assert(br.readAll() == Buf.ByteArray.Owned(bytes ++ bytes))
assert(br.readAll() == Buf.Empty)
})
test("underflow if too many bytes are skipped") {
val br = readerWith(0x0, 0x0)
br.skip(2)
intercept[UnderflowException] {
br.skip(2)
}
}
test("remainingUntil") {
forAll { (bytes: Array[Byte], byte: Byte) =>
val bs = bytes.toIndexedSeq
val buf = Buf.ByteArray.Owned(bytes ++ Array(byte) ++ bytes)
val br = readerWith(bs ++ Vector(byte) ++ bs: _*)
val remainingBefore = br.remaining
val until = br.remainingUntil(byte)
assert(remainingBefore == br.remaining)
val before = br.readBytes(until)
val pivot = br.readByte()
val after = br.readAll()
val expected = before.concat(Buf.ByteArray.Owned(Array(pivot))).concat(after)
assert(pivot == byte && expected == buf)
}
assert(readerWith().remainingUntil(0x0) == -1)
val reader = readerWith(0x1, 0x2, 0x3)
assert(0 == reader.remainingUntil(0x1))
assert(1 == reader.remainingUntil(0x2))
assert(2 == reader.remainingUntil(0x3))
assert(0x1 == reader.readByte())
assert(2 == reader.remaining)
assert(-1 == reader.remainingUntil(0x1))
assert(0 == reader.remainingUntil(0x2))
assert(1 == reader.remainingUntil(0x3))
assert(-1 == reader.remainingUntil(0x4))
assert(2 == reader.remaining)
assert(0x2 == reader.readByte())
assert(0x3 == reader.readByte())
assert(0 == reader.remaining)
assert(-1 == reader.remainingUntil(0x3))
}
test("close() releases the underlying ByteBuf and is idempotent") {
val byteBuf = UnpooledByteBufAllocator.DEFAULT.buffer(10, Int.MaxValue)
val br = wrapByteBufInReader(byteBuf)
assert(byteBuf.refCnt() == 1)
br.close()
assert(byteBuf.refCnt() == 0)
// idempotency
br.close() // would throw if not guarded by the ByteReader implementation
assert(byteBuf.refCnt() == 0)
}
}
| luciferous/finagle | finagle-netty4/src/test/scala/com/twitter/finagle/netty4/AbstractByteBufByteReaderTest.scala | Scala | apache-2.0 | 11,272 |
/*
* Copyright (c) 2012-2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package hadoop
package jobs
// Java
import java.io.File
import java.io.BufferedWriter
import java.io.FileWriter
// Apache Commons Codec
import org.apache.commons.codec.binary.Base64
// Scala
import scala.collection.mutable.ListBuffer
// Scalaz
import scalaz._
import Scalaz._
// Scala
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
// Scalding
import com.twitter.scalding._
// Specs2
import org.specs2.matcher.{Matcher, Expectable}
import org.specs2.matcher.Matchers._
/**
* Holds helpers for running integration
* tests on SnowPlow EtlJobs.
*/
object JobSpecHelpers {
/**
* A Specs2 matcher to check if a Scalding
* output sink is empty or not.
*/
val beEmpty: Matcher[ListBuffer[_]] =
((_: ListBuffer[_]).isEmpty, "is not empty")
/**
* A Specs2 matcher to check if a directory
* on disk is empty or not.
*/
val beEmptyDir: Matcher[File] =
((f: File) => !f.isDirectory || f.list.length > 0, "is populated directory, or not a directory")
/**
* How Scalding represents input lines
*/
type ScaldingLines = List[(String, String)]
/**
* Base64-urlsafe encoded version of this standard
* Iglu configuration.
*/
private val IgluConfig = {
val encoder = new Base64(true) // true means "url safe"
new String(encoder.encode(SpecHelpers.IgluConfig.getBytes)
)
}
/**
* A case class to make it easy to write out input
* lines for Scalding jobs without manually appending
* line numbers.
*
* @param l The repeated String parameters
*/
case class Lines(l: String*) {
val lines = l.toList
val numberedLines = number(lines)
/**
* Writes the lines to the given file
*
* @param file The file to write the
* lines to
*/
def writeTo(file: File) = {
val writer = new BufferedWriter(new FileWriter(file))
for (line <- lines) writer.write(line)
writer.close()
}
/**
* Numbers the lines in the Scalding format.
* Converts "My line" to ("0" -> "My line")
*
* @param lines The List of lines to number
* @return the List of ("line number" -> "line")
* tuples.
*/
private def number(lines: List[String]): ScaldingLines =
for ((l, n) <- lines zip (0 until lines.size)) yield (n.toString -> l)
}
/**
* Implicit conversion from a Lines object to
* a ScaldingLines, aka List[(String, String)],
* ready for Scalding to use.
*
* @param lines The Lines object
* @return the ScaldingLines ready for Scalding
*/
implicit def Lines2ScaldingLines(lines : Lines): ScaldingLines = lines.numberedLines
// Standard JobSpec definition used by all integration tests
val ShredJobSpec =
JobTest("com.snowplowanalytics.snowplow.enrich.hadoop.ShredJob").
arg("input_folder", "inputFolder").
arg("output_folder", "outputFolder").
arg("bad_rows_folder", "badFolder").
arg("exceptions_folder", "exceptionsFolder").
arg("iglu_config", IgluConfig)
case class Sinks(
val output: File,
val badRows: File,
val exceptions: File) {
def deleteAll() {
for (f <- List(exceptions, badRows, output)) {
f.delete()
}
}
}
/**
* Run the ShredJob using the Scalding Tool.
*
* @param lines The input lines to shred
* @return a Tuple3 containing open File
* objects for the output, bad rows
* and exceptions temporary directories.
*/
def runJobInTool(lines: Lines): Sinks = {
def mkTmpDir(tag: String, createParents: Boolean = false, containing: Option[Lines] = None): File = {
val f = File.createTempFile(s"snowplow-shred-job-${tag}-", "")
if (createParents) f.mkdirs() else f.mkdir()
containing.map(_.writeTo(f))
f
}
val input = mkTmpDir("input", createParents = true, containing = lines.some)
val output = mkTmpDir("output")
val badRows = mkTmpDir("bad-rows")
val exceptions = mkTmpDir("exceptions")
val args = Array[String]("com.snowplowanalytics.snowplow.enrich.hadoop.ShredJob", "--local",
"--input_folder", input.getAbsolutePath,
"--output_folder", output.getAbsolutePath,
"--bad_rows_folder", badRows.getAbsolutePath,
"--exceptions_folder", exceptions.getAbsolutePath,
"--iglu_config", IgluConfig)
// Execute
Tool.main(args)
input.delete()
Sinks(output, badRows, exceptions)
}
/**
* Removes the timestamp from bad rows so that what remains is deterministic
*
* @param badRow
* @return The bad row without the timestamp
*/
def removeTstamp(badRow: String): String = {
val badRowJson = parse(badRow)
val badRowWithoutTimestamp = ("line", (badRowJson \\ "line")) ~ ("errors", (badRowJson \\ "errors"))
compact(badRowWithoutTimestamp)
}
}
| mdavid/lessig-bigdata | lib/snowplow/3-enrich/scala-hadoop-shred/src/test/scala/com.snowplowanalytics.snowplow.enrich.hadoop/jobs/JobSpecHelpers.scala | Scala | mit | 5,636 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system.chooser
import java.util.concurrent.atomic.AtomicInteger
import org.apache.samza.system.SystemStream
import org.apache.samza.system.SystemStreamPartition
import org.apache.samza.system.IncomingMessageEnvelope
import org.apache.samza.util.Logging
import org.apache.samza.metrics.MetricsHelper
import org.apache.samza.metrics.MetricsRegistryMap
import org.apache.samza.metrics.MetricsRegistry
import org.apache.samza.system.SystemStreamMetadata
import scala.collection.JavaConversions._
import org.apache.samza.SamzaException
import org.apache.samza.system.SystemStreamMetadata.OffsetType
/**
* BootstrappingChooser is a composable MessageChooser that only chooses
* an envelope when it's received at least one envelope for each SystemStream.
* It does this by only allowing wrapped.choose to be called when the wrapped
* MessageChooser has been updated with at least one envelope for every
* SystemStream defined in the latestMessageOffsets map. Thus, the guarantee
* is that the wrapped chooser will have an envelope from each SystemStream
* whenever it has to make a choice about which envelope to process next.
*
* This behavior continues for each SystemStream that has lagging partitions.
* As a SystemStream catches up to head, it is no longer marked as lagging,
* and the requirement that the wrapped chooser have an envelope from the
* SystemStream is dropped. Once all SystemStreams have caught up, this
* MessageChooser just becomes a pass-through that always delegates to the
* wrapped chooser.
*
* If a SystemStream falls behind after the initial catch-up, this chooser
* makes no effort to catch the SystemStream back up, again.
*/
class BootstrappingChooser(
/**
* The message chooser that BootstrappingChooser delegates to when it's
* updating or choosing envelopes.
*/
wrapped: MessageChooser,
/**
* A map from system stream to metadata information, which includes oldest,
* newest, and upcoming offsets for each partition. If a stream does not need to
* be guaranteed available to the underlying wrapped chooser, it should not
* be included in this map.
*/
var bootstrapStreamMetadata: Map[SystemStream, SystemStreamMetadata] = Map(),
/**
* An object that holds all of the metrics related to bootstrapping.
*/
metrics: BootstrappingChooserMetrics = new BootstrappingChooserMetrics) extends MessageChooser with Logging {
/**
* The number of lagging partitions for each SystemStream that's behind.
*/
var systemStreamLagCounts = bootstrapStreamMetadata
.mapValues(_.getSystemStreamPartitionMetadata.size)
/**
* All SystemStreamPartitions that are lagging.
*/
var laggingSystemStreamPartitions = bootstrapStreamMetadata
.flatMap {
case (systemStream, metadata) =>
metadata
.getSystemStreamPartitionMetadata
.keys
.map(new SystemStreamPartition(systemStream, _))
}
.toSet
/**
* Store all the systemStreamPartitions registered
*/
var registeredSystemStreamPartitions = Set[SystemStreamPartition]()
/**
* The number of lagging partitions that the underlying wrapped chooser has
* been updated with, grouped by SystemStream.
*/
var updatedSystemStreams = Map[SystemStream, Int]()
def start = {
// remove the systemStreamPartitions not registered.
laggingSystemStreamPartitions = laggingSystemStreamPartitions.filter(registeredSystemStreamPartitions.contains(_))
systemStreamLagCounts = laggingSystemStreamPartitions.groupBy(_.getSystemStream).map {case (systemStream, ssps) => systemStream -> ssps.size}
debug("Starting bootstrapping chooser with bootstrap metadata: %s" format bootstrapStreamMetadata)
info("Got lagging partition counts for bootstrap streams: %s" format systemStreamLagCounts)
metrics.setLaggingSystemStreams(() => laggingSystemStreamPartitions.size)
systemStreamLagCounts.keys.foreach { (systemStream: SystemStream) =>
metrics.setLagCount(systemStream, () => systemStreamLagCounts.getOrElse(systemStream, 0))
}
wrapped.start
}
def stop = wrapped.stop
override def register(systemStreamPartition: SystemStreamPartition, offset: String) {
debug("Registering stream partition with offset: %s, %s" format (systemStreamPartition, offset))
// If the offset we're starting to consume from is the same as the upcoming
// offset for this system stream partition, then we've already read all
// messages in the stream, and we're at head for this system stream
// partition.
checkOffset(systemStreamPartition, offset, OffsetType.UPCOMING)
wrapped.register(systemStreamPartition, offset)
registeredSystemStreamPartitions += systemStreamPartition
}
def update(envelope: IncomingMessageEnvelope) {
wrapped.update(envelope)
// If this is an SSP that is still lagging, update the count for the stream.
if (laggingSystemStreamPartitions.contains(envelope.getSystemStreamPartition)) {
trace("Bumping available message count for stream partition: %s" format envelope.getSystemStreamPartition)
val systemStream = envelope.getSystemStreamPartition.getSystemStream
updatedSystemStreams += systemStream -> (updatedSystemStreams.getOrElse(systemStream, 0) + 1)
}
}
/**
* If choose is called, and the parent MessageChoser has received an
* envelope from at least one partition in each lagging SystemStream, then
* the choose call is forwarded to the wrapped chooser. Otherwise, the
* BootstrappingChooser simply returns null, and waits for more updates.
*/
def choose = {
// If no system streams are behind, then go straight to the wrapped chooser.
if (laggingSystemStreamPartitions.size == 0) {
trace("No streams are lagging, so bypassing bootstrap chooser.")
wrapped.choose
} else if (okToChoose) {
trace("Choosing from wrapped chooser, since wrapped choser has an envelope from all bootstrap streams.")
val envelope = wrapped.choose
if (envelope != null) {
trace("Wrapped chooser chose non-null envelope: %s" format envelope)
val systemStreamPartition = envelope.getSystemStreamPartition
val offset = envelope.getOffset
// Chosen envelope was from a bootstrap SSP, so decrement the update map.
if (laggingSystemStreamPartitions.contains(systemStreamPartition)) {
val systemStream = systemStreamPartition.getSystemStream
updatedSystemStreams += systemStream -> (updatedSystemStreams.getOrElse(systemStream, 0) - 1)
}
// If the offset we just read is the same as the offset for the last
// message (newest) in this system stream partition, then we have read
// all messages, and can mark this SSP as bootstrapped.
checkOffset(systemStreamPartition, offset, OffsetType.NEWEST)
}
envelope
} else {
trace("Blocking wrapped.chooser since bootstrapping is not done, but not all streams have messages available.")
null
}
}
/**
* Checks to see if a bootstrap stream is fully caught up. If it is, the
* state of the bootstrap chooser is updated to remove the system stream
* from the set of lagging system streams.
*
* A SystemStreamPartition can be deemed "caught up" in one of two ways.
* First, if a SystemStreamPartition is registered with a starting offset
* that's equal to the upcoming offset for the SystemStreamPartition, then
* it's "caught up". For example, if a SystemStreamPartition were registered
* to start reading from offset 7, and the upcoming offset for the
* SystemStreamPartition is also 7, then all prior messages are assumed to
* already have been chosen, and the stream is marked as bootstrapped.
* Second, if the offset for a chosen message equals the newest offset for the
* message's SystemStreamPartition, then that SystemStreamPartition is deemed
* caught up, because all messages in the stream up to the "newest" message
* have been chosen.
*
* Note that the definition of "caught up" here is defined to be when all
* messages that existed at container start time have been processed. If a
* SystemStreamPartition's newest message offset is 8 at the time that a
* container starts, but two more messages are written to the
* SystemStreamPartition while the container is bootstrapping, the
* SystemStreamPartition is marked as bootstrapped when the message with
* offset 8 is chosen, not when the message with offset 10 is chosen.
*
* @param systemStreamPartition The SystemStreamPartition to check.
* @param offset The offset of the most recently chosen message.
* @param offsetType Whether to check the offset against the newest or
* upcoming offset for the SystemStreamPartition.
* Upcoming is useful during the registration phase,
* and newest is useful during the choosing phase.
*/
private def checkOffset(systemStreamPartition: SystemStreamPartition, offset: String, offsetType: OffsetType) {
val systemStream = systemStreamPartition.getSystemStream
val systemStreamMetadata = bootstrapStreamMetadata.getOrElse(systemStreamPartition.getSystemStream, null)
// Metadata for system/stream, and system/stream/partition are allowed to
// be null since not all streams are bootstrap streams.
val systemStreamPartitionMetadata = if (systemStreamMetadata != null) {
systemStreamMetadata
.getSystemStreamPartitionMetadata
.get(systemStreamPartition.getPartition)
} else {
null
}
val offsetToCheck = if (systemStreamPartitionMetadata == null) {
// Use null for offsetToCheck in cases where the partition metadata was
// null. A null partition metadata implies that the stream is not a
// bootstrap stream, and therefore, there is no need to check its offset.
null
} else {
systemStreamPartitionMetadata.getOffset(offsetType)
}
trace("Check %s offset %s against %s for %s." format (offsetType, offset, offsetToCheck, systemStreamPartition))
// The SSP is no longer lagging if the envelope's offset equals the
// latest offset.
if (offset != null && offset.equals(offsetToCheck)) {
laggingSystemStreamPartitions -= systemStreamPartition
systemStreamLagCounts += systemStream -> (systemStreamLagCounts(systemStream) - 1)
debug("Bootstrap stream partition is fully caught up: %s" format systemStreamPartition)
if (systemStreamLagCounts(systemStream) == 0) {
info("Bootstrap stream is fully caught up: %s" format systemStream)
// If the lag count is 0, then no partition for this stream is lagging
// (the stream has been fully caught up).
systemStreamLagCounts -= systemStream
}
}
}
/**
* It's only OK to allow the wrapped MessageChooser to choose if it's been
* given at least one envelope from each lagging SystemStream.
*/
private def okToChoose = {
updatedSystemStreams.values.filter(_ > 0).size == laggingSystemStreamPartitions.groupBy(_.getSystemStream).size
}
}
class BootstrappingChooserMetrics(val registry: MetricsRegistry = new MetricsRegistryMap) extends MetricsHelper {
val batches = newCounter("batch-resets")
def setLaggingSystemStreams(getValue: () => Int) {
newGauge("lagging-batch-streams", getValue)
}
def setLagCount(systemStream: SystemStream, getValue: () => Int) {
newGauge("%s-%s-lagging-partitions" format (systemStream.getSystem, systemStream.getStream), getValue)
}
}
| InnovaCo/samza | samza-core/src/main/scala/org/apache/samza/system/chooser/BootstrappingChooser.scala | Scala | apache-2.0 | 12,453 |
package akkord.events
import akkord.api.actors.ChannelApiActor.CreateMessage
trait Message {
val id: String
val channel_id: String
val author: UserImpl
val content: String
val timestamp: String
val edited_timestamp: Option[String]
val tts: Boolean
val mention_everyone: Boolean
val mentions: List[UserImpl]
val mention_roles: List[String]
val attachments: List[Attachment]
val embeds: List[Embed]
val reactions: Option[List[Reaction]]
val nonce: Option[String]
val pinned: Boolean
val webhook: Option[String]
val `type`: Int
def reply(replyContent: String) = new CreateMessage(channel_id, replyContent)
}
case class Attachment
(
id: String,
filename: String,
size: Int,
url: String,
proxy_url: String,
height: Option[Int],
width: Option[Int]
)
case class Embed
(
title: String,
`type`: String,
description: Option[String],
url: String,
timestamp: Option[String],
color: Option[Int],
footer: Option[Footer],
image: Option[Image],
thumbnail: Option[Thumbnail],
video: Option[Video],
provider: Option[Provider],
author: Option[Author],
fields: Option[List[Field]]
)
case class Thumbnail
(
url: String,
proxy_url: String,
height: Int,
width: Int
)
case class Video
(
url: String,
height: Int,
width: Int
)
case class Image
(
url: String,
proxy_url: String,
height: Int,
width: Int
)
case class Provider
(
name: String,
url: String
)
case class Author
(
name: String,
url: String,
icon_url: Option[String],
proxy_icon_url: Option[String]
)
case class Footer
(
text: String,
icon_url: String,
proxy_icon_url: String
)
case class Field
(
name: String,
value: String,
inline: Boolean
)
case class Reaction
(
count: Int,
me: Boolean,
emoji: Emoji
)
| ryanmiville/akkord | src/main/scala/akkord/events/Message.scala | Scala | mit | 1,781 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical.sql
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
case class DeleteFromStatement(
tableName: Seq[String],
tableAlias: Option[String],
condition: Option[Expression])
extends ParsedStatement
| bdrillard/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/sql/DeleteFromStatement.scala | Scala | apache-2.0 | 1,151 |
package controllers
import play.api.mvc._
import play.api.libs.json._
import javax.inject._
import actors.CreateWorkflowExecutionActor.CreateWorkflowExecution
import actors.DecrementWorkflowExecutionActor.DecrementWorkflowExecution
import actors.IsFinishedWorkflowExecutionActor.IsFinishedWorkflowExecution
import actors.RemoveWorkflowExecutionsActor.RemoveWorkflowExecutions
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import akka.pattern.ask
import akka.actor.{ActorRef, ActorSystem}
import akka.util.Timeout
@Singleton
class WorkflowExecutionController @Inject()(system: ActorSystem,
@Named("create-workflow-execution-actor") createWorkflowExecutionActor: ActorRef,
@Named("decrement-workflow-execution-actor") decrementWorkflowExecutionActor: ActorRef,
@Named("is-finished-workflow-execution-actor") isFinishedWorkflowExecutionActor: ActorRef,
@Named("remove-workflow-executions-actor") removeWorkflowExecutionsActor: ActorRef)
(implicit ec: ExecutionContext) extends Controller {
implicit val timeout: Timeout = 5.seconds
system.scheduler.schedule(0.minute, 1.minute, removeWorkflowExecutionsActor, RemoveWorkflowExecutions)
def addWorkflowExecution(workflowId: Long) = Action.async {
(createWorkflowExecutionActor ? CreateWorkflowExecution(workflowId)).mapTo[Option[Long]].map(_ match {
case Some(id) => Created(Json.obj("workflow_execution_id" -> id.toString))
case None => NotFound
})
}
def decrementWorkflowExecution(workflowId: Long, workflowExecutionId: Long) = Action.async {
(decrementWorkflowExecutionActor ? DecrementWorkflowExecution(workflowId, workflowExecutionId)).mapTo[Option[Int]].map(_ match {
case Some(n) => if (n == 1) NoContent else BadRequest
case None => NotFound
})
}
def isFinishedWorkflowExecution(workflowId: Long, workflowExecutionId: Long) = Action.async {
(isFinishedWorkflowExecutionActor ? IsFinishedWorkflowExecution(workflowId, workflowExecutionId)).mapTo[Option[Boolean]].map(_ match {
case Some(isFinished) => Ok(Json.obj("finished" -> isFinished))
case None => NotFound
})
}
} | rmscardoso/workflows | app/controllers/WorkflowExecutionController.scala | Scala | mit | 2,355 |
package com.twitter.finagle.netty4.channel
import com.twitter.conversions.time._
import com.twitter.finagle.Stack.Params
import com.twitter.util.{Await, Promise}
import io.netty.buffer.{ByteBuf, Unpooled}
import io.netty.channel._
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.socket.SocketChannel
import io.netty.channel.socket.nio.NioSocketChannel
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class Netty4ClientChannelInitializerTest extends FunSuite {
test("raw channel initializer exposes netty pipeline") {
val reverser = new ChannelOutboundHandlerAdapter {
override def write(ctx: ChannelHandlerContext, msg: Any, promise: ChannelPromise): Unit = msg match {
case b: ByteBuf =>
val bytes = new Array[Byte](b.readableBytes)
b.readBytes(bytes)
val reversed = Unpooled.wrappedBuffer(bytes.reverse)
super.write(ctx, reversed, promise)
case _ => fail("expected ByteBuf message")
}
}
val init =
new RawNetty4ClientChannelInitializer(
pipelineInit = _.addLast(reverser),
params = Params.empty)
val channel: SocketChannel = new NioSocketChannel()
val loop = new NioEventLoopGroup()
loop.register(channel)
init.initChannel(channel)
val msgSeen = new Promise[ByteBuf]
channel.pipeline.addFirst(new ChannelOutboundHandlerAdapter {
override def write(ctx: ChannelHandlerContext, msg: scala.Any, promise: ChannelPromise): Unit = msg match {
case b: ByteBuf => msgSeen.setValue(b)
case _ => fail("expected ByteBuf message")
}
})
val bytes = Array(1.toByte, 2.toByte, 3.toByte)
channel.write(Unpooled.wrappedBuffer(bytes))
val seen = new Array[Byte](3)
Await.result(msgSeen, 5.seconds).readBytes(seen)
assert(seen.toList == bytes.reverse.toList)
}
}
| adriancole/finagle | finagle-netty4/src/test/scala/com/twitter/finagle/netty4/channel/Netty4ClientChannelInitializerTest.scala | Scala | apache-2.0 | 1,942 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.api.auth.oauth2.keyrock
import java.util.concurrent.ExecutionException
import scala.collection.JavaConversions
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.language.postfixOps
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfter, FlatSpec}
import org.scalatest.matchers.MustMatchers
import es.tid.cosmos.api.auth.oauth2.OAuthUserProfile
import es.tid.cosmos.api.profile.UserId
import es.tid.cosmos.common.scalatest.matchers.FutureMatchers
class ProviderIT extends FlatSpec
with MustMatchers
with BeforeAndAfter
with BeforeAndAfterAll
with FutureMatchers {
val testTimeout = 3 seconds
val clientId = "client-id-1"
val clientSecret = "client-s3cr3t"
val redirectUri = "http://callback"
val userProfile = KeyrockProfile(
id = 53,
actorId = 112,
nickName = "jackie",
displayName = "John Smith",
email = "jsmith@tid.es"
)
val serverMock = new MockedKeyrockApi(
port = 2349,
clientId = clientId,
clientSecret = clientSecret,
existingUser = userProfile,
redirectUri = redirectUri
)
var config = serverMock.configurationKeys
var client: Provider = null
override def beforeAll() {
serverMock.start()
}
before {
client = new Provider("keyrock", toConfig(config))
}
after {
serverMock.clear()
}
override def afterAll() {
serverMock.stop()
}
private def toConfig(keys: Map[String, String]) =
ConfigFactory.parseMap(JavaConversions.mapAsJavaMap(keys))
"A Keyrock OAuth client" must "link to signup url" in {
client.newAccountUrl.get must be (config("signup.url"))
}
it must "link to an authentication url" in {
client.authenticationUrl(redirectUri) must be (s"${config("auth.url")}authorize?" + Seq(
"response_type=code",
s"client_id=$clientId",
s"redirect_uri=$redirectUri"
).mkString("&"))
}
it must "successfully request an access token with a valid code" in {
val authUrl = client.authenticationUrl(redirectUri)
val code = serverMock.requestAuthorizationCode(authUrl, userProfile.id)
val token_> = client.requestAccessToken(code, redirectUri)
token_> must (runUnder(testTimeout) and eventually(have length 8))
}
it must "handle an OAuth error when requesting an access tokens with invalid code" in {
val token_> = client.requestAccessToken("invalid_code", redirectUri)
token_> must runUnder(testTimeout)
token_> must eventuallyFailWith [ExecutionException]
}
it must "request the user profile" in {
val authUrl = client.authenticationUrl(redirectUri)
val code = serverMock.requestAuthorizationCode(authUrl, userProfile.id)
val profile_> = for {
token <- client.requestAccessToken(code, redirectUri)
profile <- client.requestUserProfile(token)
} yield profile
profile_> must runUnder(testTimeout)
profile_> must eventually(equal(OAuthUserProfile(
id = UserId("keyrock", "112"),
name = Some("John Smith"),
email = Some("jsmith@tid.es")
)))
}
}
| telefonicaid/fiware-cosmos-platform | cosmos-api/it/scala/es/tid/cosmos/api/auth/oauth2/keyrock/ProviderIT.scala | Scala | apache-2.0 | 3,757 |
package com.sksamuel.elastic4s.searches.queries
import com.sksamuel.elastic4s.searches.QueryBuilderFn
import org.elasticsearch.index.query.{BoolQueryBuilder, QueryBuilders}
object BoolQueryBuilderFn {
def apply(q: BoolQueryDefinition): BoolQueryBuilder = {
val builder = QueryBuilders.boolQuery()
q.adjustPureNegative.foreach(builder.adjustPureNegative)
q.minimumShouldMatch.foreach(builder.minimumNumberShouldMatch)
q.disableCoord.foreach(builder.disableCoord)
q.queryName.foreach(builder.queryName)
q.boost.map(_.toFloat).foreach(builder.boost)
q.must.map(QueryBuilderFn.apply).foreach(builder.must)
q.filters.map(QueryBuilderFn.apply).foreach(builder.filter)
q.not.map(QueryBuilderFn.apply).foreach(builder.mustNot)
q.should.map(QueryBuilderFn.apply).foreach(builder.should)
builder
}
}
| aroundus-inc/elastic4s | elastic4s-tcp/src/main/scala/com/sksamuel/elastic4s/searches/queries/BoolQueryBuilderFn.scala | Scala | apache-2.0 | 840 |
import core.{ HubModule, SystemField }
import java.io.File
import models.MetadataCache
import org.apache.solr.client.solrj.SolrQuery
import org.scalatest.{ Ignore, FlatSpec }
import org.scalatest.matchers.ShouldMatchers
import play.api.libs.json.JsArray
import play.api.libs.json.JsObject
import play.api.libs.json.JsString
import play.api.mvc.AnyContentAsJson
import play.api.test.Helpers._
import play.api.test.{ FakeHeaders, FakeRequest }
import plugins.SimpleDocumentUploadPlugin
import test.TestContext
import util.OrganizationConfigurationHandler
import xml.XML
import services.search.SolrQueryService
/**
*
* @author Manuel Bernhardt <bernhardt.manuel@gmail.com>
*/
@Ignore class SimpleDocumentUploadSpec extends FlatSpec with ShouldMatchers with TestContext {
val controller = new controllers.organizations.SimpleDocumentUpload()(HubModule)
"The SimpleDocumentUpload" should "submit and store a document" in {
withTestData() {
implicit val configuration = OrganizationConfigurationHandler.getByOrgId("delving")
val result = controller.submit(fakeRequest)
status(result) should equal(OK)
val maybeDoc = MetadataCache.get("delving", "uploadDocuments", "uploadDocument").findOne("delving_uploadDocuments_503e203903643da47461306e")
maybeDoc should not equal (None)
val doc = maybeDoc.get
doc.getSystemFieldValues(SystemField.TITLE).headOption should equal(Some("Sample title"))
doc.index should equal(0)
doc.schemaVersions.get("tib") should equal(Some("1.0.1"))
doc.xml("tib") should equal("""<tib:record xmlns:europeana="http://www.europeana.eu/schemas/ese/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:tib="http://www.tib.nl/schemas/tib/" xmlns:delving="http://schemas.delving.eu/"><dc:title>Sample title</dc:title><dc:subject>Random subject</dc:subject><delving:title>Sample title</delving:title></tib:record>""")
}
}
"The SimpleDocumentUpload" should "submit and index a document" in {
withTestData() {
implicit val configuration = OrganizationConfigurationHandler.getByOrgId("delving")
val result = controller.submit(fakeRequest)
status(result) should equal(OK)
val queryById = SolrQueryService.getSolrResponseFromServer(new SolrQuery("delving_orgId:delving id:delving_uploadDocuments_503e203903643da47461306e"))
queryById.getResults.size() should equal(1)
}
}
"The SimpleDocumentUpload" should "propertly integrate uploaded files into a document" in {
withTestData() {
import play.api.Play.current
implicit val configuration = OrganizationConfigurationHandler.getByOrgId("delving")
indexingServiceLocator.byDomain(configuration).deleteByQuery("*:*")
val pdf = new File(current.path, "/modules/simple-document-upload/conf/sample.pdf")
val png = new File(current.path, "public/images/dummy-object.png")
val uid = "123456"
val (f, thumbnailUrl) = controllers.dos.FileUpload.storeFile(pdf, "sample.pdf", "application/pdf", uid).get
val (f1, thumbnailUrl1) = controllers.dos.FileUpload.storeFile(png, "delving-team.jpg", "image/jpg", uid).get
val simulatedAttachment = controller.upload(uid, "delving_uploadDocuments_503e203903643da47461306e")(
FakeRequest().withSession(
("userName" -> "bob")
))
status(simulatedAttachment) should equal(OK)
val result = controller.submit(fakeRequest)
status(result) should equal(OK)
val maybeDoc = MetadataCache.get("delving", "uploadDocuments", SimpleDocumentUploadPlugin.ITEM_TYPE).findOne("delving_uploadDocuments_503e203903643da47461306e")
maybeDoc should not equal (None)
val doc = maybeDoc.get
doc.xml.get("tib") should not equal (None)
val content = doc.xml("tib")
val parsed = XML.loadString(content)
(parsed \\ "title").find(_.prefix == "delving").map(_.text) should equal(Some("Sample title"))
(parsed \\ "title").find(_.prefix == "dc").map(_.text) should equal(Some("Sample title"))
(parsed \\ "subject").text should equal("Random subject")
(parsed \\ "imageUrl").text should equal("http:///image/" + f1.id.toString)
(parsed \\ "thumbnail").map(_.text + "/80") should equal(Seq("http://" + thumbnailUrl1, "http://" + thumbnailUrl))
(parsed \\ "fullTextObjectUrl").text should equal("http:///file/" + f.id.toString)
val queryById = SolrQueryService.getSolrResponseFromServer(new SolrQuery("delving_orgId:delving id:delving_uploadDocuments_503e203903643da47461306e"))
queryById.getResults.size() should equal(1)
val solrDocument = queryById.getResults.get(0)
solrDocument.getFirstValue("delving_fullTextObjectUrl_link") should equal("http:///file/" + f.id.toString)
solrDocument.getFirstValue("delving_imageUrl") should equal("http:///image/" + f1.id.toString)
solrDocument.getFirstValue("delving_thumbnail") should equal("http:///thumbnail/" + f1.id.toString)
solrDocument.getFirstValue("delving_hasDigitalObject") should equal(true)
}
}
// re-enable this if this plugin gets ever used seriously
//
// "The SimpleDocumentUpload" should "index a PDF as full text with Tika" in {
// withTestData() {
//
// import play.api.Play.current
//
// implicit val configuration = OrganizationConfigurationHandler.getByOrgId("delving")
//
// val pdf = new File(current.path, "../modules/simple-document-upload/conf/sample.pdf")
// val uid = "123456"
//
// val (f, thumbnailUrl) = controllers.dos.FileUpload.storeFile(pdf, "sample.pdf", "application/pdf", uid).get
//
// val simulatedAttachment = controller.upload("delving", uid, "delving_uploadDocuments_503e203903643da47461306e")(
// FakeRequest().withSession(
// ("userName" -> "bob")
// ))
//
// status(simulatedAttachment) should equal(OK)
//
// val result = controller.submit("delving")(fakeRequest)
// status(result) should equal(OK)
//
// val queryFullText = SolrQueryService.getSolrResponseFromServer(new SolrQuery("Anticonstitutionellement"))
// // FIXME this does not work yet because due to how the Tika indexing is implemented, the remote fetching in the test scope without real URL does not work
// // queryFullText.getResults.size() should equal(1)
// }
// }
def fakeRequest = FakeRequest(
method = "POST",
uri = "http://delving.localhost:9000",
headers = FakeHeaders(Seq(CONTENT_TYPE -> Seq("application/json"))),
body = AnyContentAsJson(
JsObject(
Seq(
"id" -> JsString("delving_uploadDocuments_503e203903643da47461306e"),
"fields" -> JsArray(
Seq(
JsObject(Seq("key" -> JsString("dc:title"), "fieldType" -> JsString("text"), "label" -> JsString("Title"), "value" -> JsString("Sample title"))),
JsObject(Seq("key" -> JsString("dc:subject"), "fieldType" -> JsString("text"), "label" -> JsString("metadata.dc.subject"), "value" -> JsString("Random subject")))
)
),
"files" -> JsArray(Seq.empty)
)
)
)
).withSession(
("userName" -> "bob")
)
} | delving/culture-hub | modules/simple-document-upload/test/SimpleDocumentUploadSpec.scala | Scala | apache-2.0 | 7,271 |
package com.arcusys.learn.liferay.services
import com.liferay.portal.kernel.dao.orm._
import com.liferay.portal.kernel.workflow.WorkflowConstants
import com.liferay.portal.model.User
import com.liferay.portal.service.{ ServiceContext, UserLocalServiceUtil }
import java.util.Locale
import com.liferay.portal.webserver.WebServerServletTokenUtil
import com.liferay.portal.kernel.util.DigesterUtil
import com.liferay.portal.kernel.util.HttpUtil
import scala.collection.JavaConverters._
object UserLocalServiceHelper {
def apply() = new UserLocalServiceHelper {}
}
trait UserLocalServiceHelper {
def getCompanyUsers(companyId: Long, start: Int, end: Int): java.util.List[User] =
UserLocalServiceUtil.getCompanyUsers(companyId, start, end)
def getOrganizationUsers(organizationId: Long): java.util.List[User] =
UserLocalServiceUtil.getOrganizationUsers(organizationId)
def getUsers(start: Int, end: Int): java.util.List[User] = UserLocalServiceUtil.getUsers(start, end)
def getUser(userId: Long): User = UserLocalServiceUtil.getUser(userId)
def getUserById(companyId: Long, userId: Long): User = UserLocalServiceUtil.getUserById(companyId, userId)
def getRoleUsersCount(roleId: Long): Int = UserLocalServiceUtil.getRoleUsersCount(roleId)
def getUsersByRoleId(liferayRoleId: Long): java.util.List[User] = UserLocalServiceUtil.getRoleUsers(liferayRoleId)
def addGroupUsers(groupId: Long, userIds: Array[Long]) {
UserLocalServiceUtil.addGroupUsers(groupId, userIds)
}
def getGroupUsers(groupId: Long): java.util.List[User] =
UserLocalServiceUtil.getGroupUsers(groupId)
def getGroupUserIds(groupId: Long): Seq[Long] = {
val userIdsInGroup = UserLocalServiceUtil.getGroupUserIds(groupId).toSeq.asJavaCollection
if (userIdsInGroup.isEmpty) Seq()
else {
val userQuery = UserLocalServiceUtil.dynamicQuery()
userQuery
.add(RestrictionsFactoryUtil.in("userId", userIdsInGroup))
.add(RestrictionsFactoryUtil.eq("status", WorkflowConstants.STATUS_APPROVED))
.add(RestrictionsFactoryUtil
.or(RestrictionsFactoryUtil.ne("firstName", ""),
RestrictionsFactoryUtil.ne("lastName", "")))
.addOrder(OrderFactoryUtil.asc("lastName"))
.addOrder(OrderFactoryUtil.asc("firstName"))
userQuery.setProjection(ProjectionFactoryUtil.projectionList()
.add(ProjectionFactoryUtil.property("userId")))
UserLocalServiceUtil.dynamicQuery(userQuery).asScala.map(_.asInstanceOf[Long])
}
}
def getDefaultUserId(companyId: Long): Long = UserLocalServiceUtil.getDefaultUserId(companyId)
def unsetOrganizationUsers(organizationId: Long, userIds: Array[Long]) {
UserLocalServiceUtil.unsetOrganizationUsers(organizationId, userIds)
}
def addUser(creatorUserId: Long, companyId: Long, autoPassword: Boolean,
password1: String, password2: String,
autoScreenName: Boolean, screenName: String, emailAddress: String,
facebookId: Long, openId: String, locale: Locale,
firstName: String, middleName: String, lastName: String,
prefixId: Int, suffixId: Int, male: Boolean,
birthdayMonth: Int, birthdayDay: Int, birthdayYear: Int,
jobTitle: String, groupIds: Array[Long], organizationIds: Array[Long],
roleIds: Array[Long], userGroupIds: Array[Long], sendEmail: Boolean,
serviceContext: ServiceContext): User =
UserLocalServiceUtil.addUser(creatorUserId, companyId, autoPassword, password1, password2,
autoScreenName, screenName, emailAddress, facebookId, openId, locale,
firstName, middleName, lastName, prefixId, suffixId, male,
birthdayMonth, birthdayDay, birthdayYear, jobTitle, groupIds, organizationIds,
roleIds, userGroupIds, sendEmail, serviceContext)
def updatePortrait(userId: Long, bytes: Array[Byte]): User = UserLocalServiceUtil.updatePortrait(userId, bytes)
def updateReminderQuery(userId: Long, question: String, answer: String): User =
UserLocalServiceUtil.updateReminderQuery(userId, question, answer)
def getPortraitTime(portraitId: Long) = {
WebServerServletTokenUtil.getToken(portraitId)
}
def getPortraitToken(user: User) = {
HttpUtil.encodeURL(DigesterUtil.digest(user.getUserUuid))
}
}
| ViLPy/Valamis | learn-liferay620-services/src/main/scala/com/arcusys/learn/liferay/services/UserLocalServiceHelper.scala | Scala | lgpl-3.0 | 4,226 |
package at.bioinform.core.alphabet.dna
import at.bioinform.core.alphabet.{Alphabet, BitUtil}
import at.bioinform.seq.Chain
import scala.annotation.switch
object DNA4 extends Alphabet {
type elemType = Nuc4
override val size = 4
val elements = List(A, C, G, T)
/** For performance reasons */
private[this] val nucleotides = Array(A, C, G, T)
/** Converts a symbol to an `Int` */
override def toInt(symbol: Nuc4@switch): Int = symbol match {
case A => 0
case C => 1
case G => 2
case T => 3
}
override def toInt(char: Char@switch): Int = char match {
case 'A' | 'a' => 0
case 'C' | 'c' => 1
case 'G' | 'g' => 2
case 'T' | 't' => 3
case _ => 0
}
override def fromInt(index: Int): Nuc4 = if (0 <= index && index < 4) nucleotides(index) else A
override def fromChar(char: Char@switch): Nuc4 = char match {
case 'A' | 'a' => A
case 'C' | 'c' => C
case 'G' | 'g' => G
case 'T' | 't' => T
case _ => A
}
override def isCaseSensitive: Boolean = false
override def bitUtil = Dna4BitUtil
/** Custom string interpolation */
implicit class Dna4Helper(val sc: StringContext) extends AnyVal {
def dna4(args: Any*): Chain[DNA4.type] = Chain(sc.parts.mkString)(DNA4)
}
implicit def dna4Implicit = DNA4
object Dna4BitUtil extends BitUtil(DNA4) {
@inline def positionInChunk(pos: Int) = pos % 32
@inline def indexOfChunk(pos: Int) = pos / 32
@inline def symbolsPerChunk() = 32
@inline override def bitsPerSymbol: Int = 2
}
}
| peri4n/bIO | subprojects/core/src/main/scala/at/bioinform/core/alphabet/dna/DNA4.scala | Scala | apache-2.0 | 1,545 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.uuid
import java.util.UUID
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TimeSortedUuidGeneratorTest extends Specification {
val time = 1435598908099L // System.currentTimeMillis()
"TimeSortedUuidGenerator" should {
"create uuids with correct formats" >> {
val id = TimeSortedUuidGenerator.createUuid(time).toString
id.substring(0, 18) mustEqual "000014e4-05ce-4ac3"
val uuid = UUID.fromString(id)
uuid.version() mustEqual 4
uuid.variant() mustEqual 2
}
"create uuids with time as the msb" >> {
val ids = Seq(time - 1, time, time + 1, time + 1000)
.map(TimeSortedUuidGenerator.createUuid).map(_.toString)
ids.sorted mustEqual ids
}
}
}
| aheyne/geomesa | geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/uuid/TimeSortedUuidGeneratorTest.scala | Scala | apache-2.0 | 1,319 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.observables
import minitest.SimpleTestSuite
import monifu.concurrent.Cancelable
import monifu.concurrent.schedulers.TestScheduler
import monifu.reactive.OverflowStrategy.Unbounded
import monifu.reactive.channels.{ObservableChannel, PublishChannel}
import monifu.reactive.{Subject, Observable}
import monifu.reactive.subjects.PublishSubject
import scala.util.Success
object LiftOperatorsSuite extends SimpleTestSuite {
test("ConnectableObservable should work") {
implicit val s = TestScheduler()
val o = Observable.unit(1).publish
val result: ConnectableObservable[Int] = o.sum
val f = result.asFuture
result.connect()
s.tick()
assertEquals(f.value, Some(Success(Some(1))))
}
test("Subject should work") {
implicit val s = TestScheduler()
val result: Subject[Int, Int] = PublishSubject[Int]().sum
val f = result.asFuture
result.onNext(1)
result.onComplete()
s.tick()
assertEquals(f.value, Some(Success(Some(1))))
}
test("ObservableChannel should work") {
implicit val s = TestScheduler()
val result: ObservableChannel[Int, Int] = PublishChannel[Int](Unbounded).sum
val f = result.asFuture
result.pushNext(1)
result.pushComplete()
s.tick()
assertEquals(f.value, Some(Success(Some(1))))
}
test("GroupedObservables should work") {
implicit val s = TestScheduler()
val (in,out) = GroupedObservable.broadcast[Int,Int](10, Cancelable())
val result: GroupedObservable[Int, Int] = out.sum
val f = result.map(_ + result.key).asFuture
in.onNext(11)
in.onComplete()
s.tick()
assertEquals(f.value, Some(Success(Some(21))))
}
}
| sergius/monifu | monifu/shared/src/test/scala/monifu/reactive/observables/LiftOperatorsSuite.scala | Scala | apache-2.0 | 2,365 |
/*
* Copyright 2017-2022 John Snow Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.johnsnowlabs.nlp.pretrained
import com.johnsnowlabs.util.Version
import org.scalatest.flatspec.AnyFlatSpec
import java.sql.Timestamp
import java.text.SimpleDateFormat
import java.util.TimeZone
class ResourceMedataTest extends AnyFlatSpec {
"ResourceMetadata" should "get model with spark version number 3.0 even if version 2.0 has the latest trained date" in {
val resourcePath = "src/test/resources/resource-downloader/test_v2_latest_date.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("explain_document_dl", libVersion = Version(List(3, 3, 2)),
sparkVersion = Version(List(3, 0, 2)))
val expectedSparkNLPVersion = Version(List(3, 1, 3))
val expectedSparkVersion = Version(List(3, 0))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "take version 3.1.1 when SparkNLP is 3.1.1 and Spark is 2.x" in {
val resourcePath = "src/test/resources/resource-downloader/test_v2_latest_date.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("explain_document_dl", libVersion = Version(List(3, 1, 1)),
sparkVersion = Version(List(2, 4, 5)))
val expectedSparkNLPVersion = Version(List(3, 1, 1))
val expectedSparkVersion = Version(List(2, 4))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark version number 3.0 when spark version is 3.0.1" in {
val resourcePath = "src/test/resources/resource-downloader/test_with_spark_3.2.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("explain_document_dl", libVersion = Version(List(3, 3, 2)),
sparkVersion = Version(List(3, 0, 1)))
val expectedSparkNLPVersion = Version(List(3, 1, 3))
val expectedSparkVersion = Version(List(3, 0))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark version number 3.0 when spark version is 3.1.0" in {
val resourcePath = "src/test/resources/resource-downloader/test_with_spark_3.2.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("explain_document_dl", libVersion = Version(List(3, 3, 2)),
sparkVersion = Version(List(3, 1, 0)))
val expectedSparkNLPVersion = Version(List(3, 1, 3))
val expectedSparkVersion = Version(List(3, 0))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark version 3.2 when spark spark version is >= 3.2 " in {
val resourcePath = "src/test/resources/resource-downloader/test_with_spark_3.2.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("explain_document_dl", libVersion = Version(List(3, 4, 0)),
sparkVersion = Version(List(3, 2, 0)))
val expectedSparkNLPVersion = Version(List(3, 4, 0))
val expectedSparkVersion = Version(List(3, 2))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==2.4 and spark-nlp==2.4.0 when spark==2.4 and spark-nlp==2.4.5" in {
val resourcePath = "src/test/resources/resource-downloader/test_example1.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(2, 4, 5)),
sparkVersion = Version(List(2, 4)))
val expectedSparkNLPVersion = Version(List(2, 4, 0))
val expectedSparkVersion = Version(List(2, 4))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==3.0 and sparknlp==2.4.5 when spark==3.0 and spark-nlp==2.4.5" in {
val resourcePath = "src/test/resources/resource-downloader/test_example1.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(2, 4, 5)),
sparkVersion = Version(List(3, 0)))
val expectedSparkNLPVersion = Version(List(2, 4, 5))
val expectedSparkVersion = Version(List(3, 0))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==2.4 and spark-nlp==3.3.0 when spark==2.4 and spark-nlp==3.3.0" in {
val resourcePath = "src/test/resources/resource-downloader/test_models_same_time.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(3, 3, 0)),
sparkVersion = Version(List(2, 4)))
val expectedSparkNLPVersion = Version(List(3, 3, 0))
val expectedSparkVersion = Version(List(2, 4))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==3.0 and spark-nlp==3.3.0 when spark==3.0 and spark-nlp==3.3.0" in {
val resourcePath = "src/test/resources/resource-downloader/test_models_same_time.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(3, 3, 0)),
sparkVersion = Version(List(3, 0)))
val expectedSparkNLPVersion = Version(List(3, 3, 0))
val expectedSparkVersion = Version(List(3, 0))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==3.0 and spark-nlp==3.3.0 when spark==3.0 and spark-nlp==3.3.0 and newest model version is 3.0" in {
val resourcePath = "src/test/resources/resource-downloader/test_bert_v3_newest.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(3, 3, 0)),
sparkVersion = Version(List(3, 0)))
val expectedSparkNLPVersion = Version(List(3, 3, 0))
val expectedSparkVersion = Version(List(3, 0))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==2.4 and spark-nlp==3.3.0 when spark==2.4 and spark-nlp==3.3.0 and newest model version is 3.0" in {
val resourcePath = "src/test/resources/resource-downloader/test_bert_v3_newest.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(3, 3, 0)),
sparkVersion = Version(List(2, 4)))
val expectedSparkNLPVersion = Version(List(3, 3, 0))
val expectedSparkVersion = Version(List(2, 4))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==3.0 and spark-nlp==3.3.0 when spark==3.0 and spark-nlp==3.3.0 and newest model version is 2.4" in {
val resourcePath = "src/test/resources/resource-downloader/test_bert_v2_newest.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(3, 3, 0)),
sparkVersion = Version(List(3, 0)))
val expectedSparkNLPVersion = Version(List(3, 3, 0))
val expectedSparkVersion = Version(List(3, 0))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get model with spark==2.4 and spark-nlp==3.3.0 when spark==2.4 and spark-nlp==3.3.0 and newest model version is 2.4" in {
val resourcePath = "src/test/resources/resource-downloader/test_bert_v2_newest.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("bert_base_cased", libVersion = Version(List(3, 3, 0)),
sparkVersion = Version(List(2, 4)))
val expectedSparkNLPVersion = Version(List(3, 3, 0))
val expectedSparkVersion = Version(List(2, 4))
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
}
it should "get most recent model when spark and spark-nlp versions are the same" in {
val resourcePath = "src/test/resources/resource-downloader/test_bert_v2_newest.json"
val mockResourceDownloader: MockResourceDownloader = new MockResourceDownloader(resourcePath)
val resourceMetadata = mockResourceDownloader.resources
val resourceRequest = ResourceRequest("tfhub_use_multi", libVersion = Version(List(3, 3, 4)),
sparkVersion = Version(List(3, 0)))
val expectedSparkNLPVersion = Version(List(3, 3, 0))
val expectedSparkVersion = Version(List(3, 0))
val expectedTimestamp = getTimestamp("2021-05-06T17:52:37.778Z")
val versions = ResourceMetadata.resolveResource(resourceMetadata, resourceRequest)
assert(versions.get.sparkVersion.get == expectedSparkVersion)
assert(versions.get.libVersion.get == expectedSparkNLPVersion)
assert(versions.get.time == expectedTimestamp)
}
private def getTimestamp(date: String): Timestamp = {
val UTC = TimeZone.getTimeZone("UTC")
val dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
dateFormat.setTimeZone(UTC)
val parsedDate = dateFormat.parse(date)
new Timestamp(parsedDate.getTime)
}
}
| JohnSnowLabs/spark-nlp | src/test/scala/com/johnsnowlabs/nlp/pretrained/ResourceMedataTest.scala | Scala | apache-2.0 | 12,933 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import java.io._
import java.nio.ByteBuffer
import java.util.Locale
import javax.annotation.Nullable
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import scala.util.control.NonFatal
import com.esotericsoftware.kryo.{Kryo, KryoException, Serializer => KryoClassSerializer}
import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
import com.esotericsoftware.kryo.io.{UnsafeInput => KryoUnsafeInput, UnsafeOutput => KryoUnsafeOutput}
import com.esotericsoftware.kryo.pool.{KryoCallback, KryoFactory, KryoPool}
import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer}
import com.twitter.chill.{AllScalaRegistrar, EmptyScalaKryoInstantiator}
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.roaringbitmap.RoaringBitmap
import org.apache.spark._
import org.apache.spark.api.python.PythonBroadcast
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Kryo._
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.scheduler.{CompressedMapStatus, HighlyCompressedMapStatus}
import org.apache.spark.storage._
import org.apache.spark.util.{BoundedPriorityQueue, ByteBufferInputStream, SerializableConfiguration, SerializableJobConf, Utils}
import org.apache.spark.util.collection.CompactBuffer
/**
* A Spark serializer that uses the <a href="https://code.google.com/p/kryo/">
* Kryo serialization library</a>.
*
* @note This serializer is not guaranteed to be wire-compatible across different versions of
* Spark. It is intended to be used to serialize/de-serialize data within a single
* Spark application.
*/
class KryoSerializer(conf: SparkConf)
extends org.apache.spark.serializer.Serializer
with Logging
with Serializable {
private val bufferSizeKb = conf.get(KRYO_SERIALIZER_BUFFER_SIZE)
if (bufferSizeKb >= ByteUnit.GiB.toKiB(2)) {
throw new IllegalArgumentException(s"${KRYO_SERIALIZER_BUFFER_SIZE.key} must be less than " +
s"2048 MiB, got: + ${ByteUnit.KiB.toMiB(bufferSizeKb)} MiB.")
}
private val bufferSize = ByteUnit.KiB.toBytes(bufferSizeKb).toInt
val maxBufferSizeMb = conf.get(KRYO_SERIALIZER_MAX_BUFFER_SIZE).toInt
if (maxBufferSizeMb >= ByteUnit.GiB.toMiB(2)) {
throw new IllegalArgumentException(s"${KRYO_SERIALIZER_MAX_BUFFER_SIZE.key} must be less " +
s"than 2048 MiB, got: $maxBufferSizeMb MiB.")
}
private val maxBufferSize = ByteUnit.MiB.toBytes(maxBufferSizeMb).toInt
private val referenceTracking = conf.get(KRYO_REFERENCE_TRACKING)
private val registrationRequired = conf.get(KRYO_REGISTRATION_REQUIRED)
private val userRegistrators = conf.get(KRYO_USER_REGISTRATORS)
.map(_.trim)
.filter(!_.isEmpty)
private val classesToRegister = conf.get(KRYO_CLASSES_TO_REGISTER)
.map(_.trim)
.filter(!_.isEmpty)
private val avroSchemas = conf.getAvroSchema
// whether to use unsafe based IO for serialization
private val useUnsafe = conf.get(KRYO_USE_UNSAFE)
private val usePool = conf.get(KRYO_USE_POOL)
def newKryoOutput(): KryoOutput =
if (useUnsafe) {
new KryoUnsafeOutput(bufferSize, math.max(bufferSize, maxBufferSize))
} else {
new KryoOutput(bufferSize, math.max(bufferSize, maxBufferSize))
}
@transient
private lazy val factory: KryoFactory = new KryoFactory() {
override def create: Kryo = {
newKryo()
}
}
private class PoolWrapper extends KryoPool {
private var pool: KryoPool = getPool
override def borrow(): Kryo = pool.borrow()
override def release(kryo: Kryo): Unit = pool.release(kryo)
override def run[T](kryoCallback: KryoCallback[T]): T = pool.run(kryoCallback)
def reset(): Unit = {
pool = getPool
}
private def getPool: KryoPool = {
new KryoPool.Builder(factory).softReferences.build
}
}
@transient
private lazy val internalPool = new PoolWrapper
def pool: KryoPool = internalPool
def newKryo(): Kryo = {
val instantiator = new EmptyScalaKryoInstantiator
val kryo = instantiator.newKryo()
kryo.setRegistrationRequired(registrationRequired)
val classLoader = defaultClassLoader.getOrElse(Thread.currentThread.getContextClassLoader)
// Allow disabling Kryo reference tracking if user knows their object graphs don't have loops.
// Do this before we invoke the user registrator so the user registrator can override this.
kryo.setReferences(referenceTracking)
for (cls <- KryoSerializer.toRegister) {
kryo.register(cls)
}
for ((cls, ser) <- KryoSerializer.toRegisterSerializer) {
kryo.register(cls, ser)
}
// For results returned by asJavaIterable. See JavaIterableWrapperSerializer.
kryo.register(JavaIterableWrapperSerializer.wrapperClass, new JavaIterableWrapperSerializer)
// Allow sending classes with custom Java serializers
kryo.register(classOf[SerializableWritable[_]], new KryoJavaSerializer())
kryo.register(classOf[SerializableConfiguration], new KryoJavaSerializer())
kryo.register(classOf[SerializableJobConf], new KryoJavaSerializer())
kryo.register(classOf[PythonBroadcast], new KryoJavaSerializer())
kryo.register(classOf[GenericRecord], new GenericAvroSerializer(avroSchemas))
kryo.register(classOf[GenericData.Record], new GenericAvroSerializer(avroSchemas))
// Use the default classloader when calling the user registrator.
Utils.withContextClassLoader(classLoader) {
try {
// Register classes given through spark.kryo.classesToRegister.
classesToRegister.foreach { className =>
kryo.register(Utils.classForName(className, noSparkClassLoader = true))
}
// Allow the user to register their own classes by setting spark.kryo.registrator.
userRegistrators
.map(Utils.classForName[KryoRegistrator](_, noSparkClassLoader = true).
getConstructor().newInstance())
.foreach { reg => reg.registerClasses(kryo) }
} catch {
case e: Exception =>
throw new SparkException(s"Failed to register classes with Kryo", e)
}
}
// Register Chill's classes; we do this after our ranges and the user's own classes to let
// our code override the generic serializers in Chill for things like Seq
new AllScalaRegistrar().apply(kryo)
// Register types missed by Chill.
// scalastyle:off
kryo.register(classOf[Array[Tuple1[Any]]])
kryo.register(classOf[Array[Tuple2[Any, Any]]])
kryo.register(classOf[Array[Tuple3[Any, Any, Any]]])
kryo.register(classOf[Array[Tuple4[Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple5[Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple6[Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple7[Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple8[Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple9[Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple10[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple11[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple12[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple13[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple14[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple15[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple16[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple17[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple18[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple19[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple20[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple21[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
kryo.register(classOf[Array[Tuple22[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]]])
// scalastyle:on
kryo.register(None.getClass)
kryo.register(Nil.getClass)
kryo.register(Utils.classForName("scala.collection.immutable.$colon$colon"))
kryo.register(Utils.classForName("scala.collection.immutable.Map$EmptyMap$"))
kryo.register(classOf[ArrayBuffer[Any]])
// We can't load those class directly in order to avoid unnecessary jar dependencies.
// We load them safely, ignore it if the class not found.
KryoSerializer.loadableSparkClasses.foreach { clazz =>
try {
kryo.register(clazz)
} catch {
case NonFatal(_) => // do nothing
case _: NoClassDefFoundError if Utils.isTesting => // See SPARK-23422.
}
}
kryo.setClassLoader(classLoader)
kryo
}
override def setDefaultClassLoader(classLoader: ClassLoader): Serializer = {
super.setDefaultClassLoader(classLoader)
internalPool.reset()
this
}
override def newInstance(): SerializerInstance = {
new KryoSerializerInstance(this, useUnsafe, usePool)
}
private[spark] override lazy val supportsRelocationOfSerializedObjects: Boolean = {
// If auto-reset is disabled, then Kryo may store references to duplicate occurrences of objects
// in the stream rather than writing those objects' serialized bytes, breaking relocation. See
// https://groups.google.com/d/msg/kryo-users/6ZUSyfjjtdo/FhGG1KHDXPgJ for more details.
newInstance().asInstanceOf[KryoSerializerInstance].getAutoReset()
}
}
private[spark]
class KryoSerializationStream(
serInstance: KryoSerializerInstance,
outStream: OutputStream,
useUnsafe: Boolean) extends SerializationStream {
private[this] var output: KryoOutput =
if (useUnsafe) new KryoUnsafeOutput(outStream) else new KryoOutput(outStream)
private[this] var kryo: Kryo = serInstance.borrowKryo()
override def writeObject[T: ClassTag](t: T): SerializationStream = {
kryo.writeClassAndObject(output, t)
this
}
override def flush(): Unit = {
if (output == null) {
throw new IOException("Stream is closed")
}
output.flush()
}
override def close(): Unit = {
if (output != null) {
try {
output.close()
} finally {
serInstance.releaseKryo(kryo)
kryo = null
output = null
}
}
}
}
private[spark]
class KryoDeserializationStream(
serInstance: KryoSerializerInstance,
inStream: InputStream,
useUnsafe: Boolean) extends DeserializationStream {
private[this] var input: KryoInput =
if (useUnsafe) new KryoUnsafeInput(inStream) else new KryoInput(inStream)
private[this] var kryo: Kryo = serInstance.borrowKryo()
override def readObject[T: ClassTag](): T = {
try {
kryo.readClassAndObject(input).asInstanceOf[T]
} catch {
// DeserializationStream uses the EOF exception to indicate stopping condition.
case e: KryoException
if e.getMessage.toLowerCase(Locale.ROOT).contains("buffer underflow") =>
throw new EOFException
}
}
override def close(): Unit = {
if (input != null) {
try {
// Kryo's Input automatically closes the input stream it is using.
input.close()
} finally {
serInstance.releaseKryo(kryo)
kryo = null
input = null
}
}
}
}
private[spark] class KryoSerializerInstance(
ks: KryoSerializer, useUnsafe: Boolean, usePool: Boolean)
extends SerializerInstance {
/**
* A re-used [[Kryo]] instance. Methods will borrow this instance by calling `borrowKryo()`, do
* their work, then release the instance by calling `releaseKryo()`. Logically, this is a caching
* pool of size one. SerializerInstances are not thread-safe, hence accesses to this field are
* not synchronized.
*/
@Nullable private[this] var cachedKryo: Kryo = if (usePool) null else borrowKryo()
/**
* Borrows a [[Kryo]] instance. If possible, this tries to re-use a cached Kryo instance;
* otherwise, it allocates a new instance.
*/
private[serializer] def borrowKryo(): Kryo = {
if (usePool) {
val kryo = ks.pool.borrow()
kryo.reset()
kryo
} else {
if (cachedKryo != null) {
val kryo = cachedKryo
// As a defensive measure, call reset() to clear any Kryo state that might have
// been modified by the last operation to borrow this instance
// (see SPARK-7766 for discussion of this issue)
kryo.reset()
cachedKryo = null
kryo
} else {
ks.newKryo()
}
}
}
/**
* Release a borrowed [[Kryo]] instance. If this serializer instance already has a cached Kryo
* instance, then the given Kryo instance is discarded; otherwise, the Kryo is stored for later
* re-use.
*/
private[serializer] def releaseKryo(kryo: Kryo): Unit = {
if (usePool) {
ks.pool.release(kryo)
} else {
if (cachedKryo == null) {
cachedKryo = kryo
}
}
}
// Make these lazy vals to avoid creating a buffer unless we use them.
private lazy val output = ks.newKryoOutput()
private lazy val input = if (useUnsafe) new KryoUnsafeInput() else new KryoInput()
override def serialize[T: ClassTag](t: T): ByteBuffer = {
output.clear()
val kryo = borrowKryo()
try {
kryo.writeClassAndObject(output, t)
} catch {
case e: KryoException if e.getMessage.startsWith("Buffer overflow") =>
throw new SparkException(s"Kryo serialization failed: ${e.getMessage}. To avoid this, " +
s"increase ${KRYO_SERIALIZER_MAX_BUFFER_SIZE.key} value.", e)
} finally {
releaseKryo(kryo)
}
ByteBuffer.wrap(output.toBytes)
}
override def deserialize[T: ClassTag](bytes: ByteBuffer): T = {
val kryo = borrowKryo()
try {
if (bytes.hasArray) {
input.setBuffer(bytes.array(), bytes.arrayOffset() + bytes.position(), bytes.remaining())
} else {
input.setBuffer(new Array[Byte](4096))
input.setInputStream(new ByteBufferInputStream(bytes))
}
kryo.readClassAndObject(input).asInstanceOf[T]
} finally {
releaseKryo(kryo)
}
}
override def deserialize[T: ClassTag](bytes: ByteBuffer, loader: ClassLoader): T = {
val kryo = borrowKryo()
val oldClassLoader = kryo.getClassLoader
try {
kryo.setClassLoader(loader)
if (bytes.hasArray) {
input.setBuffer(bytes.array(), bytes.arrayOffset() + bytes.position(), bytes.remaining())
} else {
input.setBuffer(new Array[Byte](4096))
input.setInputStream(new ByteBufferInputStream(bytes))
}
kryo.readClassAndObject(input).asInstanceOf[T]
} finally {
kryo.setClassLoader(oldClassLoader)
releaseKryo(kryo)
}
}
override def serializeStream(s: OutputStream): SerializationStream = {
new KryoSerializationStream(this, s, useUnsafe)
}
override def deserializeStream(s: InputStream): DeserializationStream = {
new KryoDeserializationStream(this, s, useUnsafe)
}
/**
* Returns true if auto-reset is on. The only reason this would be false is if the user-supplied
* registrator explicitly turns auto-reset off.
*/
def getAutoReset(): Boolean = {
val field = classOf[Kryo].getDeclaredField("autoReset")
field.setAccessible(true)
val kryo = borrowKryo()
try {
field.get(kryo).asInstanceOf[Boolean]
} finally {
releaseKryo(kryo)
}
}
}
/**
* Interface implemented by clients to register their classes with Kryo when using Kryo
* serialization.
*/
trait KryoRegistrator {
def registerClasses(kryo: Kryo): Unit
}
private[serializer] object KryoSerializer {
// Commonly used classes.
private val toRegister: Seq[Class[_]] = Seq(
ByteBuffer.allocate(1).getClass,
classOf[StorageLevel],
classOf[CompressedMapStatus],
classOf[HighlyCompressedMapStatus],
classOf[CompactBuffer[_]],
classOf[BlockManagerId],
classOf[Array[Boolean]],
classOf[Array[Byte]],
classOf[Array[Short]],
classOf[Array[Int]],
classOf[Array[Long]],
classOf[Array[Float]],
classOf[Array[Double]],
classOf[Array[Char]],
classOf[Array[String]],
classOf[Array[Array[String]]],
classOf[BoundedPriorityQueue[_]],
classOf[SparkConf]
)
private val toRegisterSerializer = Map[Class[_], KryoClassSerializer[_]](
classOf[RoaringBitmap] -> new KryoClassSerializer[RoaringBitmap]() {
override def write(kryo: Kryo, output: KryoOutput, bitmap: RoaringBitmap): Unit = {
bitmap.serialize(new KryoOutputObjectOutputBridge(kryo, output))
}
override def read(kryo: Kryo, input: KryoInput, cls: Class[RoaringBitmap]): RoaringBitmap = {
val ret = new RoaringBitmap
ret.deserialize(new KryoInputObjectInputBridge(kryo, input))
ret
}
}
)
// classForName() is expensive in case the class is not found, so we filter the list of
// SQL / ML / MLlib classes once and then re-use that filtered list in newInstance() calls.
private lazy val loadableSparkClasses: Seq[Class[_]] = {
Seq(
"org.apache.spark.sql.catalyst.expressions.UnsafeRow",
"org.apache.spark.sql.catalyst.expressions.UnsafeArrayData",
"org.apache.spark.sql.catalyst.expressions.UnsafeMapData",
"org.apache.spark.ml.attribute.Attribute",
"org.apache.spark.ml.attribute.AttributeGroup",
"org.apache.spark.ml.attribute.BinaryAttribute",
"org.apache.spark.ml.attribute.NominalAttribute",
"org.apache.spark.ml.attribute.NumericAttribute",
"org.apache.spark.ml.feature.Instance",
"org.apache.spark.ml.feature.LabeledPoint",
"org.apache.spark.ml.feature.OffsetInstance",
"org.apache.spark.ml.linalg.DenseMatrix",
"org.apache.spark.ml.linalg.DenseVector",
"org.apache.spark.ml.linalg.Matrix",
"org.apache.spark.ml.linalg.SparseMatrix",
"org.apache.spark.ml.linalg.SparseVector",
"org.apache.spark.ml.linalg.Vector",
"org.apache.spark.ml.stat.distribution.MultivariateGaussian",
"org.apache.spark.ml.tree.impl.TreePoint",
"org.apache.spark.mllib.clustering.VectorWithNorm",
"org.apache.spark.mllib.linalg.DenseMatrix",
"org.apache.spark.mllib.linalg.DenseVector",
"org.apache.spark.mllib.linalg.Matrix",
"org.apache.spark.mllib.linalg.SparseMatrix",
"org.apache.spark.mllib.linalg.SparseVector",
"org.apache.spark.mllib.linalg.Vector",
"org.apache.spark.mllib.regression.LabeledPoint",
"org.apache.spark.mllib.stat.distribution.MultivariateGaussian"
).flatMap { name =>
try {
Some[Class[_]](Utils.classForName(name))
} catch {
case NonFatal(_) => None // do nothing
case _: NoClassDefFoundError if Utils.isTesting => None // See SPARK-23422.
}
}
}
}
/**
* This is a bridge class to wrap KryoInput as an InputStream and ObjectInput. It forwards all
* methods of InputStream and ObjectInput to KryoInput. It's usually helpful when an API expects
* an InputStream or ObjectInput but you want to use Kryo.
*/
private[spark] class KryoInputObjectInputBridge(
kryo: Kryo, input: KryoInput) extends FilterInputStream(input) with ObjectInput {
override def readLong(): Long = input.readLong()
override def readChar(): Char = input.readChar()
override def readFloat(): Float = input.readFloat()
override def readByte(): Byte = input.readByte()
override def readShort(): Short = input.readShort()
override def readUTF(): String = input.readString() // readString in kryo does utf8
override def readInt(): Int = input.readInt()
override def readUnsignedShort(): Int = input.readShortUnsigned()
override def skipBytes(n: Int): Int = {
input.skip(n)
n
}
override def readFully(b: Array[Byte]): Unit = input.read(b)
override def readFully(b: Array[Byte], off: Int, len: Int): Unit = input.read(b, off, len)
override def readLine(): String = throw new UnsupportedOperationException("readLine")
override def readBoolean(): Boolean = input.readBoolean()
override def readUnsignedByte(): Int = input.readByteUnsigned()
override def readDouble(): Double = input.readDouble()
override def readObject(): AnyRef = kryo.readClassAndObject(input)
}
/**
* This is a bridge class to wrap KryoOutput as an OutputStream and ObjectOutput. It forwards all
* methods of OutputStream and ObjectOutput to KryoOutput. It's usually helpful when an API expects
* an OutputStream or ObjectOutput but you want to use Kryo.
*/
private[spark] class KryoOutputObjectOutputBridge(
kryo: Kryo, output: KryoOutput) extends FilterOutputStream(output) with ObjectOutput {
override def writeFloat(v: Float): Unit = output.writeFloat(v)
// There is no "readChars" counterpart, except maybe "readLine", which is not supported
override def writeChars(s: String): Unit = throw new UnsupportedOperationException("writeChars")
override def writeDouble(v: Double): Unit = output.writeDouble(v)
override def writeUTF(s: String): Unit = output.writeString(s) // writeString in kryo does UTF8
override def writeShort(v: Int): Unit = output.writeShort(v)
override def writeInt(v: Int): Unit = output.writeInt(v)
override def writeBoolean(v: Boolean): Unit = output.writeBoolean(v)
override def write(b: Int): Unit = output.write(b)
override def write(b: Array[Byte]): Unit = output.write(b)
override def write(b: Array[Byte], off: Int, len: Int): Unit = output.write(b, off, len)
override def writeBytes(s: String): Unit = output.writeString(s)
override def writeChar(v: Int): Unit = output.writeChar(v.toChar)
override def writeLong(v: Long): Unit = output.writeLong(v)
override def writeByte(v: Int): Unit = output.writeByte(v)
override def writeObject(obj: AnyRef): Unit = kryo.writeClassAndObject(output, obj)
}
/**
* A Kryo serializer for serializing results returned by asJavaIterable.
*
* The underlying object is scala.collection.convert.Wrappers$IterableWrapper.
* Kryo deserializes this into an AbstractCollection, which unfortunately doesn't work.
*/
private class JavaIterableWrapperSerializer
extends com.esotericsoftware.kryo.Serializer[java.lang.Iterable[_]] {
import JavaIterableWrapperSerializer._
override def write(kryo: Kryo, out: KryoOutput, obj: java.lang.Iterable[_]): Unit = {
// If the object is the wrapper, simply serialize the underlying Scala Iterable object.
// Otherwise, serialize the object itself.
if (obj.getClass == wrapperClass && underlyingMethodOpt.isDefined) {
kryo.writeClassAndObject(out, underlyingMethodOpt.get.invoke(obj))
} else {
kryo.writeClassAndObject(out, obj)
}
}
override def read(kryo: Kryo, in: KryoInput, clz: Class[java.lang.Iterable[_]])
: java.lang.Iterable[_] = {
kryo.readClassAndObject(in) match {
case scalaIterable: Iterable[_] => scalaIterable.asJava
case javaIterable: java.lang.Iterable[_] => javaIterable
}
}
}
private object JavaIterableWrapperSerializer extends Logging {
// The class returned by JavaConverters.asJava
// (scala.collection.convert.Wrappers$IterableWrapper).
import scala.collection.JavaConverters._
val wrapperClass = Seq(1).asJava.getClass
// Get the underlying method so we can use it to get the Scala collection for serialization.
private val underlyingMethodOpt = {
try Some(wrapperClass.getDeclaredMethod("underlying")) catch {
case e: Exception =>
logError("Failed to find the underlying field in " + wrapperClass, e)
None
}
}
}
| rezasafi/spark | core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala | Scala | apache-2.0 | 25,049 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600e.v3.retriever.CT600EBoxRetriever
case class E190(value: Option[Int]) extends CtBoxIdentifier("Number of subsidiary or associated companies the charity controls at the end of the period. Exclude companies that were dormant throughout the period") with CtOptionalInteger with Input with ValidatableBox[CT600EBoxRetriever]{
override def validate(boxRetriever: CT600EBoxRetriever): Set[CtValidation] = validateIntegerRange("E190", this, 0, 999)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600e/v3/E190.scala | Scala | apache-2.0 | 1,143 |
package org.apache.spark
import org.apache.hadoop.mapreduce.SparkHadoopMapReduceUtil
trait SparkHadoopMapReduceUtilExtended extends SparkHadoopMapReduceUtil{
}
| mrsqueeze/SparkOnHBase | src/main/scala/org/apache/spark/SparkHadoopMapReduceUtilExtended.scala | Scala | apache-2.0 | 163 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.v2
import org.apache.spark.sql.execution.command
/**
* The class contains tests for the `CREATE NAMESPACE` command to check V2 table catalogs.
*/
class CreateNamespaceSuite extends command.CreateNamespaceSuiteBase with CommandSuiteBase {
override def namespace: String = "ns1.ns2"
}
| ueshin/apache-spark | sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CreateNamespaceSuite.scala | Scala | apache-2.0 | 1,138 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.kafka010
import java.security.PrivilegedExceptionAction
import org.apache.hadoop.io.Text
import org.apache.hadoop.security.UserGroupInformation
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.common.config.{SaslConfigs, SslConfigs}
import org.apache.kafka.common.security.auth.SecurityProtocol.{SASL_PLAINTEXT, SASL_SSL, SSL}
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.config._
class KafkaTokenUtilSuite extends SparkFunSuite with KafkaDelegationTokenTest {
private var sparkConf: SparkConf = null
override def beforeEach(): Unit = {
super.beforeEach()
sparkConf = new SparkConf()
}
test("checkProxyUser with proxy current user should throw exception") {
val realUser = UserGroupInformation.createUserForTesting("realUser", Array())
UserGroupInformation.createProxyUserForTesting("proxyUser", realUser, Array()).doAs(
new PrivilegedExceptionAction[Unit]() {
override def run(): Unit = {
val thrown = intercept[IllegalArgumentException] {
KafkaTokenUtil.checkProxyUser()
}
assert(thrown.getMessage contains
"Obtaining delegation token for proxy user is not yet supported.")
}
}
)
}
test("createAdminClientProperties with SASL_PLAINTEXT protocol should not include " +
"keystore and truststore config") {
val clusterConf = createClusterConf(identifier1, SASL_PLAINTEXT.name)
val adminClientProperties = KafkaTokenUtil.createAdminClientProperties(sparkConf, clusterConf)
assert(adminClientProperties.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)
=== bootStrapServers)
assert(adminClientProperties.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG)
=== SASL_PLAINTEXT.name)
assert(!adminClientProperties.containsKey(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG))
assert(!adminClientProperties.containsKey(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG))
assert(!adminClientProperties.containsKey(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
assert(!adminClientProperties.containsKey(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG))
assert(!adminClientProperties.containsKey(SslConfigs.SSL_KEY_PASSWORD_CONFIG))
}
test("createAdminClientProperties with SASL_SSL protocol should include truststore config") {
val clusterConf = createClusterConf(identifier1, SASL_SSL.name)
val adminClientProperties = KafkaTokenUtil.createAdminClientProperties(sparkConf, clusterConf)
assert(adminClientProperties.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)
=== bootStrapServers)
assert(adminClientProperties.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG)
=== SASL_SSL.name)
assert(adminClientProperties.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG)
=== trustStoreLocation)
assert(adminClientProperties.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)
=== trustStorePassword)
assert(!adminClientProperties.containsKey(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
assert(!adminClientProperties.containsKey(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG))
assert(!adminClientProperties.containsKey(SslConfigs.SSL_KEY_PASSWORD_CONFIG))
}
test("createAdminClientProperties with SSL protocol should include keystore and truststore " +
"config") {
val clusterConf = createClusterConf(identifier1, SSL.name)
val adminClientProperties = KafkaTokenUtil.createAdminClientProperties(sparkConf, clusterConf)
assert(adminClientProperties.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)
=== bootStrapServers)
assert(adminClientProperties.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG)
=== SSL.name)
assert(adminClientProperties.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG)
=== trustStoreLocation)
assert(adminClientProperties.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)
=== trustStorePassword)
assert(adminClientProperties.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG) === keyStoreLocation)
assert(adminClientProperties.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG) === keyStorePassword)
assert(adminClientProperties.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG) === keyPassword)
}
test("createAdminClientProperties with global config should not set dynamic jaas config") {
val clusterConf = createClusterConf(identifier1, SASL_SSL.name)
setGlobalKafkaClientConfig()
val adminClientProperties = KafkaTokenUtil.createAdminClientProperties(sparkConf, clusterConf)
assert(adminClientProperties.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)
=== bootStrapServers)
assert(adminClientProperties.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG)
=== SASL_SSL.name)
assert(!adminClientProperties.containsKey(SaslConfigs.SASL_MECHANISM))
assert(!adminClientProperties.containsKey(SaslConfigs.SASL_JAAS_CONFIG))
}
test("createAdminClientProperties with keytab should set keytab dynamic jaas config") {
sparkConf.set(KEYTAB, keytab)
sparkConf.set(PRINCIPAL, principal)
val clusterConf = createClusterConf(identifier1, SASL_SSL.name)
val adminClientProperties = KafkaTokenUtil.createAdminClientProperties(sparkConf, clusterConf)
assert(adminClientProperties.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)
=== bootStrapServers)
assert(adminClientProperties.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG)
=== SASL_SSL.name)
assert(adminClientProperties.containsKey(SaslConfigs.SASL_MECHANISM))
val saslJaasConfig = adminClientProperties.getProperty(SaslConfigs.SASL_JAAS_CONFIG)
assert(saslJaasConfig.contains("Krb5LoginModule required"))
assert(saslJaasConfig.contains(s"debug="))
assert(saslJaasConfig.contains("useKeyTab=true"))
assert(saslJaasConfig.contains(s"""keyTab="$keytab""""))
assert(saslJaasConfig.contains(s"""principal="$principal""""))
}
test("createAdminClientProperties without keytab should set ticket cache dynamic jaas config") {
val clusterConf = createClusterConf(identifier1, SASL_SSL.name)
val adminClientProperties = KafkaTokenUtil.createAdminClientProperties(sparkConf, clusterConf)
assert(adminClientProperties.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)
=== bootStrapServers)
assert(adminClientProperties.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG)
=== SASL_SSL.name)
assert(adminClientProperties.containsKey(SaslConfigs.SASL_MECHANISM))
val saslJaasConfig = adminClientProperties.getProperty(SaslConfigs.SASL_JAAS_CONFIG)
assert(saslJaasConfig.contains("Krb5LoginModule required"))
assert(saslJaasConfig.contains(s"debug="))
assert(saslJaasConfig.contains("useTicketCache=true"))
}
test("createAdminClientProperties with specified params should include it") {
val clusterConf = createClusterConf(identifier1, SASL_SSL.name,
Map("customKey" -> "customValue"))
val adminClientProperties = KafkaTokenUtil.createAdminClientProperties(sparkConf, clusterConf)
assert(adminClientProperties.get("customKey") === "customValue")
}
test("isGlobalJaasConfigurationProvided without global config should return false") {
assert(!KafkaTokenUtil.isGlobalJaasConfigurationProvided)
}
test("isGlobalJaasConfigurationProvided with global config should return false") {
setGlobalKafkaClientConfig()
assert(KafkaTokenUtil.isGlobalJaasConfigurationProvided)
}
test("findMatchingToken without token should return None") {
assert(KafkaTokenUtil.findMatchingToken(sparkConf, bootStrapServers) === None)
}
test("findMatchingToken with non-matching tokens should return None") {
sparkConf.set(s"spark.kafka.clusters.$identifier1.auth.bootstrap.servers", bootStrapServers)
sparkConf.set(s"spark.kafka.clusters.$identifier1.target.bootstrap.servers.regex",
nonMatchingTargetServersRegex)
sparkConf.set(s"spark.kafka.clusters.$identifier2.bootstrap.servers", bootStrapServers)
sparkConf.set(s"spark.kafka.clusters.$identifier2.target.bootstrap.servers.regex",
matchingTargetServersRegex)
addTokenToUGI(tokenService1)
addTokenToUGI(new Text("intentionally_garbage"))
assert(KafkaTokenUtil.findMatchingToken(sparkConf, bootStrapServers) === None)
}
test("findMatchingToken with one matching token should return cluster configuration") {
sparkConf.set(s"spark.kafka.clusters.$identifier1.auth.bootstrap.servers", bootStrapServers)
sparkConf.set(s"spark.kafka.clusters.$identifier1.target.bootstrap.servers.regex",
matchingTargetServersRegex)
addTokenToUGI(tokenService1)
assert(KafkaTokenUtil.findMatchingToken(sparkConf, bootStrapServers) ===
Some(KafkaTokenSparkConf.getClusterConfig(sparkConf, identifier1)))
}
test("findMatchingToken with multiple matching tokens should throw exception") {
sparkConf.set(s"spark.kafka.clusters.$identifier1.auth.bootstrap.servers", bootStrapServers)
sparkConf.set(s"spark.kafka.clusters.$identifier1.target.bootstrap.servers.regex",
matchingTargetServersRegex)
sparkConf.set(s"spark.kafka.clusters.$identifier2.auth.bootstrap.servers", bootStrapServers)
sparkConf.set(s"spark.kafka.clusters.$identifier2.target.bootstrap.servers.regex",
matchingTargetServersRegex)
addTokenToUGI(tokenService1)
addTokenToUGI(tokenService2)
val thrown = intercept[IllegalArgumentException] {
KafkaTokenUtil.findMatchingToken(sparkConf, bootStrapServers)
}
assert(thrown.getMessage.contains("More than one delegation token matches"))
}
test("getTokenJaasParams with token should return scram module") {
addTokenToUGI(tokenService1)
val clusterConf = createClusterConf(identifier1, SASL_SSL.name)
val jaasParams = KafkaTokenUtil.getTokenJaasParams(clusterConf)
assert(jaasParams.contains("ScramLoginModule required"))
assert(jaasParams.contains("tokenauth=true"))
assert(jaasParams.contains(tokenId))
assert(jaasParams.contains(tokenPassword))
}
}
| pgandhi999/spark | external/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaTokenUtilSuite.scala | Scala | apache-2.0 | 10,838 |
/*******************************************************************************
* Copyright (c) 2013 Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/gpl.html
*
* Contributors:
* Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr> - initial API and implementation
******************************************************************************/
package list.P03
import util.ExerciseTemplate
trait P03 extends ExerciseTemplate {
/*
P03 (*) Find the Kth element of a list.
By convention, the first element in the list is element 0.
Example:
scala> nth(2, List(1, 1, 2, 3, 5, 8))
res0: Int = 2
*/
val name = "P03 (Find the Kth element of a list)"
def nth[A](n: Int, ls: List[A]): A
test("Invoking nth on a non-empty list should return the nth element") {
assert(nth(2, List(1, 1, 2, 3, 5, 8)) == 2)
assert(nth(5, List(1, 1, 2, 3, 5, 8)) == 8)
}
test("Invoking nth on an empty list should produce NoSuchElementException") {
intercept[NoSuchElementException] {
nth(0, List())
}
}
test("Invoking nth on a k-size list (k <= n) should produce NoSuchElementException") {
intercept[NoSuchElementException] {
nth(42, List(1, 2, 3, 4, 4))
}
}
}
| GuillaumeDD/scala99problems | src/main/scala/list/P03/P03.scala | Scala | gpl-3.0 | 1,504 |
package com.twitter.zipkin.storage.cassandra
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.twitter.cassie._
import com.twitter.conversions.time._
import com.twitter.ostrich.stats.Stats
import com.twitter.zipkin.common.{Annotation, Span}
import com.twitter.zipkin.storage.{IndexedTraceId, TraceIdDuration, Index}
import com.twitter.zipkin.util.Util
import com.twitter.util.{Duration, Future}
import com.twitter.zipkin.Constants
import java.nio.ByteBuffer
import java.util.{Map => JMap}
import scala.collection.JavaConverters._
import scala.collection.Set
/**
* An index for the spans and traces using Cassandra with the Cassie client.
*/
case class CassandraIndex(
keyspace: Keyspace,
serviceNames: ColumnFamily[String, String, String],
spanNames: ColumnFamily[String, String, String],
serviceNameIndex: ColumnFamily[String, Long, String],
serviceSpanNameIndex: ColumnFamily[String, Long, String],
annotationsIndex: ColumnFamily[ByteBuffer, Long, String],
durationIndex: ColumnFamily[String, Long, String],
dataTimeToLive: Duration = 3.days
) extends Index {
def close() {
keyspace.close()
}
// store the span name used in this service
private val CASSANDRA_STORE_SPAN_NAME = Stats.getCounter("cassandra_storespanname")
private val CASSANDRA_STORE_SPAN_NAME_NO_SPAN_NAME = Stats.getCounter("cassandra_storespanname_nospanname")
// store the service names
private val CASSANDRA_STORE_SERVICE_NAME = Stats.getCounter("cassandra_storeservicename")
private val CASSANDRA_STORE_SERVICE_NAME_NO_SERVICE_NAME = Stats.getCounter("cassandra_storeservicename_noservicename")
// index the span by service name and span name
private val CASSANDRA_INDEX_SPAN_BY_NAMES = Stats.getCounter("cassandra_indexspanbynames")
// no annotations on the span being indexed
private val CASSANDRA_INDEX_SPAN_BY_NAME_NO_LAST_ANNOTATION = Stats.getCounter("cassandra_indexspanbynames_nolastannotation")
// index the span by annotations (both time and kv based)
private val CASSANDRA_INDEX_SPAN_BY_ANNOTATIONS = Stats.getCounter("cassandra_indexspanbyannotations")
// no annotations on the span being indexed
private val CASSANDRA_INDEX_SPAN_BY_ANNOTATIONS_NO_LAST_ANNOTATION = Stats.getCounter("cassandra_indexspanbyannotations_nolastannotation")
// find trace ids in the index by name
private val CASSANDRA_GET_TRACE_IDS_BY_NAME = Stats.getCounter("cassandra_gettraceidsbyname")
// find trace ids by annotation in the index
private val CASSANDRA_GET_TRACE_IDS_BY_ANN = Stats.getCounter("cassandra_gettraceidsbyannotation")
// get service names
private val CASSANDRA_GET_SERVICE_NAMES = Stats.getCounter("cassandra_getservicenames")
// get span names for a service
private val CASSANDRA_GET_SPAN_NAMES = Stats.getCounter("cassandra_getspannames")
private val WRITE_REQUEST_COUNTER = Stats.getCounter("cassandra.write_request_counter")
private val SERVICE_NAMES_KEY = "servicenames"
// used to delimit the key value annotation parts in the index
private val INDEX_DELIMITER = ":"
Stats.addGauge("cassandra_ttl_days") { dataTimeToLive.inDays }
private def encode(serviceName: String, index: String) = {
Array(serviceName, index).mkString(INDEX_DELIMITER)
}
def getServiceNames: Future[Set[String]] = {
CASSANDRA_GET_SERVICE_NAMES.incr
serviceNames.getRow(SERVICE_NAMES_KEY).map(_.values.asScala.map(v => v.name).toSet)
}
def getSpanNames(service: String): Future[Set[String]] = {
CASSANDRA_GET_SPAN_NAMES.incr
spanNames.getRow(service).map(_.values.asScala.map(v => v.name).toSet)
}
/*
* Storage write methods
* ---------------------
*/
def indexServiceName(span: Span) : Future[Unit] = {
CASSANDRA_STORE_SERVICE_NAME.incr
Future.join {
span.serviceNames.map {
_ match {
case "" =>
CASSANDRA_STORE_SERVICE_NAME_NO_SERVICE_NAME.incr()
Future.Unit
case s @ _ =>
WRITE_REQUEST_COUNTER.incr()
val serviceNameCol = Column[String, String](s.toLowerCase, "").ttl(dataTimeToLive)
serviceNames.insert(SERVICE_NAMES_KEY, serviceNameCol)
}
}.toSeq
}
}
def indexSpanNameByService(span: Span) : Future[Unit] = {
CASSANDRA_STORE_SPAN_NAME.incr
if (span.name == "") {
CASSANDRA_STORE_SPAN_NAME_NO_SPAN_NAME.incr()
Future.Unit
} else {
val spanNameCol = Column[String, String](span.name.toLowerCase, "").ttl(dataTimeToLive)
Future.join {
span.serviceNames.map {
WRITE_REQUEST_COUNTER.incr()
spanNames.insert(_, spanNameCol)
}.toSeq
}
}
}
/*
* Index read methods
* ------------------
*/
def getTraceIdsByName(serviceName: String, spanName: Option[String],
endTs: Long, limit: Int): Future[Seq[IndexedTraceId]] = {
CASSANDRA_GET_TRACE_IDS_BY_NAME.incr
// if we have a span name, look up in the service + span name index
// if not, look up by service name only
val row = spanName match {
case Some(span) =>
val key = serviceName.toLowerCase + "." + span.toLowerCase
serviceSpanNameIndex.getRowSlice(key, Some(endTs), None, limit, Order.Reversed)
case None =>
val key = serviceName.toLowerCase
serviceNameIndex.getRowSlice(key, Some(endTs), None, limit, Order.Reversed)
}
// Future[Seq[Column[Long, Long]]] => Future[Seq[IndexedTraceId]]
row map {
_.map { column =>
IndexedTraceId(traceId = column.value, timestamp = column.name)
}
}
}
def getTraceIdsByAnnotation(service: String, annotation: String, value: Option[ByteBuffer],
endTs: Long, limit: Int): Future[Seq[IndexedTraceId]] = {
CASSANDRA_GET_TRACE_IDS_BY_ANN.incr
val row = value match {
case Some(v) => {
val key = ByteBuffer.wrap(encode(service, annotation).getBytes ++ INDEX_DELIMITER.getBytes ++ Util.getArrayFromBuffer(v))
annotationsIndex.getRowSlice(key, Some(endTs), None, limit, Order.Reversed)
}
case None =>
val key = ByteBuffer.wrap(encode(service, annotation.toLowerCase).getBytes)
annotationsIndex.getRowSlice(key, Some(endTs), None, limit, Order.Reversed)
}
row map {
_.map { column =>
IndexedTraceId(traceId = column.value, timestamp = column.name)
}
}
}
case class TraceIdTimestamp(traceId: String, timestamp: Option[Long])
/**
* Fetch the duration or an estimate thereof from the traces.
*/
def getTracesDuration(traceIds: Seq[String]): Future[Seq[TraceIdDuration]] = {
val startRows = durationIndex.multigetRows(traceIds.toSet.asJava, None, None, Order.Normal, 1)
val traceStartTimestamp = getTraceIdTimestamp(startRows)
val endRows = durationIndex.multigetRows(traceIds.toSet.asJava, None, None, Order.Reversed, 1)
val traceEndTimestamp = getTraceIdTimestamp(endRows)
traceStartTimestamp.join(traceEndTimestamp).map { case (start, end) =>
start.zip(end).collect {
case (TraceIdTimestamp(startId, Some(startTs)), TraceIdTimestamp(endId, Some(endTs))) if (startId == endId) =>
TraceIdDuration(endId, endTs - startTs, startTs)
}.toSeq
}
}
private def getTraceIdTimestamp(rowsFuture: Future[JMap[String, JMap[Long, Column[Long, String]]]]):
Future[Iterable[TraceIdTimestamp]] = {
rowsFuture.map { rows =>
rows.asScala.map { case (ts, cols) =>
// should only be one returned from cassandra
TraceIdTimestamp(ts, cols.entrySet().asScala.headOption.map(_.getKey))
}
}
}
/*
* Index write methods
* -------------------
*/
def indexTraceIdByServiceAndName(span: Span) : Future[Unit] = {
CASSANDRA_INDEX_SPAN_BY_NAMES.incr
val lastAnnotation = span.lastAnnotation getOrElse {
CASSANDRA_INDEX_SPAN_BY_NAME_NO_LAST_ANNOTATION.incr
return Future.Unit
}
val timestamp = lastAnnotation.timestamp
val serviceNames = span.serviceNames
val futures = serviceNames.map(serviceName => {
WRITE_REQUEST_COUNTER.incr()
val serviceSpanIndexKey = serviceName + "." + span.name.toLowerCase
val serviceSpanIndexCol = Column[Long, String](timestamp, span.traceId).ttl(dataTimeToLive)
val serviceSpanNameFuture = serviceSpanNameIndex.insert(serviceSpanIndexKey, serviceSpanIndexCol)
WRITE_REQUEST_COUNTER.incr()
val serviceIndexCol = Column[Long, String](timestamp, span.traceId).ttl(dataTimeToLive)
val serviceNameFuture = serviceNameIndex.insert(serviceName, serviceIndexCol)
List(serviceSpanNameFuture, serviceNameFuture)
}).toList.flatten
Future.join(futures)
}
def indexSpanByAnnotations(span: Span) : Future[Unit] = {
CASSANDRA_INDEX_SPAN_BY_ANNOTATIONS.incr
val lastAnnotation = span.lastAnnotation getOrElse {
CASSANDRA_INDEX_SPAN_BY_ANNOTATIONS_NO_LAST_ANNOTATION.incr
return Future.Unit
}
val timestamp = lastAnnotation.timestamp
val batch = annotationsIndex.batch
span.annotations.filter { a =>
// skip core annotations since that query can be done by service name/span name anyway
!Constants.CoreAnnotations.contains(a.value)
} groupBy {
_.value
} foreach { m: (String, List[Annotation]) =>
val a = m._2.min
a.host match {
case Some(endpoint) => {
WRITE_REQUEST_COUNTER.incr()
val col = Column[Long, String](a.timestamp, span.traceId).ttl(dataTimeToLive)
batch.insert(ByteBuffer.wrap(encode(endpoint.serviceName.toLowerCase, a.value.toLowerCase).getBytes), col)
}
case None => // Nothin
}
}
span.binaryAnnotations foreach { ba =>
ba.host match {
case Some(endpoint) => {
WRITE_REQUEST_COUNTER.incr(2)
val key = encode(endpoint.serviceName, ba.key).getBytes
val col = Column[Long, String](timestamp, span.traceId).ttl(dataTimeToLive)
batch.insert(ByteBuffer.wrap(key ++ INDEX_DELIMITER.getBytes ++ Util.getArrayFromBuffer(ba.value)), col)
batch.insert(ByteBuffer.wrap(key), col)
}
case None =>
}
}
val annFuture = batch.execute()
annFuture.unit
}
def indexSpanDuration(span: Span): Future[Unit] = {
val first = span.firstAnnotation.map(_.timestamp)
val last = span.lastAnnotation.map(_.timestamp)
val batch = durationIndex.batch()
first foreach {
WRITE_REQUEST_COUNTER.incr()
t => batch.insert(span.traceId, Column[Long, String](t, "").ttl(dataTimeToLive))
}
last foreach {
WRITE_REQUEST_COUNTER.incr()
t => batch.insert(span.traceId, Column[Long, String](t, "").ttl(dataTimeToLive))
}
batch.execute().unit
}
}
| cogitate/twitter-zipkin-uuid | zipkin-cassandra/src/main/scala/com/twitter/zipkin/storage/cassandra/CassandraIndex.scala | Scala | apache-2.0 | 11,338 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.catalog
import org.apache.flink.table.descriptors.DescriptorProperties.toScala
import org.apache.flink.table.descriptors.StatisticsValidator.{STATISTICS_COLUMNS, STATISTICS_ROW_COUNT, readColumnStats}
import org.apache.flink.table.descriptors.StreamTableDescriptorValidator.{UPDATE_MODE, UPDATE_MODE_VALUE_APPEND, UPDATE_MODE_VALUE_RETRACT, UPDATE_MODE_VALUE_UPSERT}
import org.apache.flink.table.descriptors._
import org.apache.flink.table.factories.TableFactory
import org.apache.flink.table.plan.stats.TableStats
import scala.collection.JavaConverters._
/**
* Defines a table in an [[ExternalCatalog]]. External catalog tables describe table sources
* and/or sinks for both batch and stream environments.
*
* See also [[TableFactory]] for more information about how to target suitable factories.
*
* Use [[ExternalCatalogTableBuilder]] to integrate with the normalized descriptor-based API.
*
* @param isBatch Flag whether this external table is intended for batch environments.
* @param isStreaming Flag whether this external table is intended for streaming environments.
* @param isSource Flag whether this external table is declared as table source.
* @param isSink Flag whether this external table is declared as table sink.
* @param properties Properties that describe the table and should match with a [[TableFactory]].
*/
class ExternalCatalogTable(
private val isBatch: Boolean,
private val isStreaming: Boolean,
private val isSource: Boolean,
private val isSink: Boolean,
private val properties: java.util.Map[String, String])
extends TableDescriptor {
// ----------------------------------------------------------------------------------------------
// Legacy code
// ---------------------------------------------------------------------------------------------
/**
* Reads table statistics from the descriptors properties.
*
* @deprecated This method exists for backwards-compatibility only.
*/
@Deprecated
@deprecated
def getTableStats: Option[TableStats] = {
val normalizedProps = new DescriptorProperties()
addProperties(normalizedProps)
val rowCount = toScala(normalizedProps.getOptionalLong(STATISTICS_ROW_COUNT))
rowCount match {
case Some(cnt) =>
val columnStats = readColumnStats(normalizedProps, STATISTICS_COLUMNS)
Some(TableStats(cnt, columnStats.asJava))
case None =>
None
}
}
// ----------------------------------------------------------------------------------------------
// Getters
// ----------------------------------------------------------------------------------------------
/**
* Returns whether this external table is declared as table source.
*/
def isTableSource: Boolean = {
isSource
}
/**
* Returns whether this external table is declared as table sink.
*/
def isTableSink: Boolean = {
isSink
}
/**
* Returns whether this external table is intended for batch environments.
*/
def isBatchTable: Boolean = {
isBatch
}
/**
* Returns whether this external table is intended for stream environments.
*/
def isStreamTable: Boolean = {
isStreaming
}
// ----------------------------------------------------------------------------------------------
/**
* Internal method for properties conversion.
*/
override private[flink] def addProperties(descriptorProperties: DescriptorProperties): Unit = {
descriptorProperties.putProperties(properties)
}
}
object ExternalCatalogTable {
/**
* Creates a builder for creating an [[ExternalCatalogTable]].
*
* It takes [[Descriptor]]s which allow for declaring the communication to external
* systems in an implementation-agnostic way. The classpath is scanned for suitable table
* factories that match the desired configuration.
*
* Use the provided builder methods to configure the external catalog table accordingly.
*
* The following example shows how to read from a connector using a JSON format and
* declaring it as a table source:
*
* {{{
* ExternalCatalogTable(
* new ExternalSystemXYZ()
* .version("0.11"))
* .withFormat(
* new Json()
* .jsonSchema("{...}")
* .failOnMissingField(false))
* .withSchema(
* new Schema()
* .field("user-name", "VARCHAR").from("u_name")
* .field("count", "DECIMAL")
* .supportsStreaming()
* .asTableSource()
* }}}
*
* @param connectorDescriptor Connector descriptor describing the external system
* @return External catalog builder
*/
def builder(connectorDescriptor: ConnectorDescriptor): ExternalCatalogTableBuilder = {
new ExternalCatalogTableBuilder(connectorDescriptor)
}
}
/**
* Builder for an [[ExternalCatalogTable]].
*
* @param connectorDescriptor Connector descriptor describing the external system
*/
class ExternalCatalogTableBuilder(private val connectorDescriptor: ConnectorDescriptor)
extends TableDescriptor
with SchematicDescriptor[ExternalCatalogTableBuilder]
with StreamableDescriptor[ExternalCatalogTableBuilder] {
private var isBatch: Boolean = true
private var isStreaming: Boolean = true
private var formatDescriptor: Option[FormatDescriptor] = None
private var schemaDescriptor: Option[Schema] = None
private var statisticsDescriptor: Option[Statistics] = None
private var metadataDescriptor: Option[Metadata] = None
private var updateMode: Option[String] = None
/**
* Specifies the format that defines how to read data from a connector.
*/
override def withFormat(format: FormatDescriptor): ExternalCatalogTableBuilder = {
formatDescriptor = Some(format)
this
}
/**
* Specifies the resulting table schema.
*/
override def withSchema(schema: Schema): ExternalCatalogTableBuilder = {
schemaDescriptor = Some(schema)
this
}
/**
* Declares how to perform the conversion between a dynamic table and an external connector.
*
* In append mode, a dynamic table and an external connector only exchange INSERT messages.
*
* @see See also [[inRetractMode()]] and [[inUpsertMode()]].
*/
override def inAppendMode(): ExternalCatalogTableBuilder = {
updateMode = Some(UPDATE_MODE_VALUE_APPEND)
this
}
/**
* Declares how to perform the conversion between a dynamic table and an external connector.
*
* In retract mode, a dynamic table and an external connector exchange ADD and RETRACT messages.
*
* An INSERT change is encoded as an ADD message, a DELETE change as a RETRACT message, and an
* UPDATE change as a RETRACT message for the updated (previous) row and an ADD message for
* the updating (new) row.
*
* In this mode, a key must not be defined as opposed to upsert mode. However, every update
* consists of two messages which is less efficient.
*
* @see See also [[inAppendMode()]] and [[inUpsertMode()]].
*/
override def inRetractMode(): ExternalCatalogTableBuilder = {
updateMode = Some(UPDATE_MODE_VALUE_RETRACT)
this
}
/**
* Declares how to perform the conversion between a dynamic table and an external connector.
*
* In upsert mode, a dynamic table and an external connector exchange UPSERT and DELETE messages.
*
* This mode requires a (possibly composite) unique key by which updates can be propagated. The
* external connector needs to be aware of the unique key attribute in order to apply messages
* correctly. INSERT and UPDATE changes are encoded as UPSERT messages. DELETE changes as
* DELETE messages.
*
* The main difference to a retract stream is that UPDATE changes are encoded with a single
* message and are therefore more efficient.
*
* @see See also [[inAppendMode()]] and [[inRetractMode()]].
*/
override def inUpsertMode(): ExternalCatalogTableBuilder = {
updateMode = Some(UPDATE_MODE_VALUE_UPSERT)
this
}
/**
* Specifies the statistics for this external table.
*/
def withStatistics(statistics: Statistics): ExternalCatalogTableBuilder = {
statisticsDescriptor = Some(statistics)
this
}
/**
* Specifies the metadata for this external table.
*/
def withMetadata(metadata: Metadata): ExternalCatalogTableBuilder = {
metadataDescriptor = Some(metadata)
this
}
/**
* Explicitly declares this external table for supporting only stream environments.
*/
def supportsStreaming(): ExternalCatalogTableBuilder = {
isBatch = false
isStreaming = true
this
}
/**
* Explicitly declares this external table for supporting only batch environments.
*/
def supportsBatch(): ExternalCatalogTableBuilder = {
isBatch = false
isStreaming = true
this
}
/**
* Explicitly declares this external table for supporting both batch and stream environments.
*/
def supportsBatchAndStreaming(): ExternalCatalogTableBuilder = {
isBatch = true
isStreaming = true
this
}
/**
* Declares this external table as a table source and returns the
* configured [[ExternalCatalogTable]].
*
* @return External catalog table
*/
def asTableSource(): ExternalCatalogTable = {
new ExternalCatalogTable(
isBatch,
isStreaming,
isSource = true,
isSink = false,
DescriptorProperties.toJavaMap(this))
}
/**
* Declares this external table as a table sink and returns the
* configured [[ExternalCatalogTable]].
*
* @return External catalog table
*/
def asTableSink(): ExternalCatalogTable = {
new ExternalCatalogTable(
isBatch,
isStreaming,
isSource = false,
isSink = true,
DescriptorProperties.toJavaMap(this))
}
/**
* Declares this external table as both a table source and sink. It returns the
* configured [[ExternalCatalogTable]].
*
* @return External catalog table
*/
def asTableSourceAndSink(): ExternalCatalogTable = {
new ExternalCatalogTable(
isBatch,
isStreaming,
isSource = true,
isSink = true,
DescriptorProperties.toJavaMap(this))
}
// ----------------------------------------------------------------------------------------------
/**
* Internal method for properties conversion.
*/
override private[flink] def addProperties(properties: DescriptorProperties): Unit = {
connectorDescriptor.addProperties(properties)
formatDescriptor.foreach(_.addProperties(properties))
schemaDescriptor.foreach(_.addProperties(properties))
statisticsDescriptor.foreach(_.addProperties(properties))
metadataDescriptor.foreach(_.addProperties(properties))
updateMode.foreach(mode => properties.putString(UPDATE_MODE, mode))
}
}
| yew1eb/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/catalog/ExternalCatalogTable.scala | Scala | apache-2.0 | 11,696 |
package org.broadinstitute.dsde.vault.datamanagement.services
import com.wordnik.swagger.annotations._
import org.broadinstitute.dsde.vault.common.directives.OpenAMDirectives._
import org.broadinstitute.dsde.vault.common.directives.VersioningDirectives._
import org.broadinstitute.dsde.vault.datamanagement.controller.DataManagementController
import org.broadinstitute.dsde.vault.datamanagement.model.UnmappedBAM
import org.broadinstitute.dsde.vault.datamanagement.services.JsonImplicits._
import spray.http.MediaTypes._
import spray.httpx.SprayJsonSupport._
import spray.json._
import spray.routing._
@Api(value = "/ubams", description = "uBAM Service", produces = "application/json")
trait UnmappedBAMService extends HttpService {
private implicit val ec = actorRefFactory.dispatcher
private final val ApiPrefix = "ubams"
private final val ApiVersions = "v1,v2"
val routes = describeRoute ~ ingestRoute ~ describeRouteList
@ApiOperation(value = "Describes a uBAM's metadata and associated files.",
nickname = "ubam_describe",
httpMethod = "GET",
produces = "application/json",
response = classOf[UnmappedBAM],
notes = "Supports arbitrary metadata keys, but this is not represented well in Swagger (see the 'additionalMetadata' note below)"
)
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path", value = "API version", allowableValues = ApiVersions),
new ApiImplicitParam(name = "id", required = true, dataType = "string", paramType = "path", value = "uBAM Vault ID")
))
@ApiResponses(Array(
new ApiResponse(code = 200, message = "Successful Request"),
new ApiResponse(code = 404, message = "Vault ID Not Found"),
new ApiResponse(code = 500, message = "Vault Internal Error")
))
def describeRoute = {
pathVersion(ApiPrefix, 1, Segment) { (version, id) =>
get {
rejectEmptyResponse {
respondWithMediaType(`application/json`) {
complete {
DataManagementController.getUnmappedBAM(id, version > 1).map(_.toJson.prettyPrint)
}
}
}
}
}
}
@ApiOperation(value = "Creates uBAM objects", nickname = "ubam_ingest", httpMethod = "POST",
produces = "application/json", consumes = "application/json", response = classOf[UnmappedBAM],
notes = "Accepts a json packet as POST. Creates a Vault object with the supplied metadata.")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path", value = "API version", allowableValues = ApiVersions),
new ApiImplicitParam(name = "body", required = true, dataType = "org.broadinstitute.dsde.vault.datamanagement.model.UnmappedBAM", paramType = "body", value = "uBAM to create")
))
@ApiResponses(Array(
new ApiResponse(code = 200, message = "Successful"),
new ApiResponse(code = 500, message = "Vault Internal Error")
))
def ingestRoute = {
pathVersion(ApiPrefix, 1) { version =>
post {
commonNameFromCookie() { commonName =>
entity(as[UnmappedBAM]) { unmappedBAM =>
respondWithMediaType(`application/json`) {
complete {
DataManagementController.createUnmappedBAM(unmappedBAM, commonName, version > 1).toJson.prettyPrint
}
}
}
}
}
}
}
@ApiOperation(value = "Describes a list of uBAM's metadata and associated files.",
nickname = "ubam_describe_list",
httpMethod = "GET",
produces = "application/json",
response = classOf[UnmappedBAM],
responseContainer = "List",
notes = "Supports arbitrary metadata keys, but this is not represented well in Swagger (see the 'additionalMetadata' note below)"
)
@ApiResponses(Array(
new ApiResponse(code = 200, message = "Successful Request"),
new ApiResponse(code = 500, message = "Vault Internal Error")
))
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path", value = "API version", allowableValues = ApiVersions),
new ApiImplicitParam(name = "page[limit]", required = false, dataType = "integer", paramType = "query", value = "uBAM limit", allowableValues = "range[0, 2147483647]")
))
def describeRouteList = {
path("ubams" / "v" ~ IntNumber) { version =>
get {
// Before full pagination, allow limiting the response size.
// `page[limit]` based off http://jsonapi.org/format/#fetching-pagination
// In url to be spray (and RFC3986) compliant, must be encoded as `page%5Blimit%5D=123`
// Could have also used param name `size` from https://github.com/Jarlakxen/spray-extensions#pagination-support
parameter("page[limit]".as[Int].?) { pageLimit =>
rejectEmptyResponse {
respondWithMediaType(`application/json`) {
complete {
DataManagementController.getUnmappedBAMList(version > 1, pageLimit).toJson.prettyPrint
}
}
}
}
}
}
}
}
| broadinstitute/vault-datamanagement | src/main/scala/org/broadinstitute/dsde/vault/datamanagement/services/UnmappedBAMService.scala | Scala | bsd-3-clause | 5,132 |
import javax.servlet.ServletContext
import org.scalatra._
import org.scalatra.metrics.MetricsSupportExtensions._
import org.scalatra.metrics._
import com.socrata.spandex.common.SpandexConfig
import com.socrata.spandex.common.client.SpandexElasticSearchClient
import com.socrata.spandex.http.SpandexResult.Fields._
import com.socrata.spandex.http.SpandexServlet
class ScalatraBootstrap extends LifeCycle with MetricsBootstrap {
val conf = new SpandexConfig
val client = new SpandexElasticSearchClient(conf.es)
override def init(context: ServletContext): Unit = {
context.mountHealthCheckServlet("/health")
context.mountThreadDumpServlet("/thread-dump")
context.installInstrumentedFilter(s"/$routeSample/*")
context.installInstrumentedFilter(s"/$routeSuggest/*")
context.mount(new SpandexServlet(conf, client), "/*")
}
override def destroy(context: ServletContext): Unit = {
client.close()
}
}
| socrata-platform/spandex | spandex-http/src/main/scala/ScalatraBootstrap.scala | Scala | apache-2.0 | 932 |
package net.katsstuff.chitchat.helper
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import org.spongepowered.api.text.Text
import org.spongepowered.api.text.format.{TextColor, TextColors, TextFormat, TextStyle, TextStyles}
object TextHelper {
/**
* Gets the format at the end of a [[Text]].
*/
def getFormatAtEnd(text: Text): Option[TextFormat] =
getTextAtEnd(text, _.getFormat != TextFormat.NONE).map(_.getFormat)
/**
* Gets the color at the end of a [[Text]].
*/
def getColorAtEnd(text: Text): Option[TextColor] = getTextAtEnd(text, _.getColor != TextColors.NONE).map(_.getColor)
/**
* Gets the style at the end of a [[Text]].
*/
def getStyleAtEnd(text: Text): Option[TextStyle] = getTextAtEnd(text, _.getStyle != TextStyles.NONE).map(_.getStyle)
/**
* Gets some [[Text]] at the end of another [[Text]], according to a
* predicate.
* @return The text if it was found.
*/
def getTextAtEnd(text: Text, predicate: Text => Boolean): Option[Text] = {
@tailrec
def allEnds(acc: List[Text], end: Text): List[Text] = {
val children = end.getChildren.asScala
if (children.isEmpty) end :: acc
else allEnds(end :: acc, children.last)
}
allEnds(Nil, text).find(predicate)
}
}
| Katrix-/ChitChat | shared/src/main/scala/net/katsstuff/chitchat/helper/TextHelper.scala | Scala | mit | 1,297 |
/*
* Copyright 2014-2016 Panavista Technologies, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deftrade
import java.net.{ Socket, ServerSocket }
import java.io.DataInputStream
import java.util.Random
import scala.language.postfixOps
import scala.util.control.NonFatal
import scala.concurrent.{ promise, Promise, Future, Await, duration }
import duration._
import akka.util.{ ByteString }
import akka.actor.ActorSystem
import akka.event.Logging
/**
* A test fixture which represents the TWS server side. Note that this is purely a protocol testing
* oriented fixture; no functional testing is done - the only goal is to ensure that the message
* fields defined by the com.ib.client API are conformed to by the ib-scala messages.
*
* For `OutgoingMessages`, the random messages sent by the ib-scala and com.ib.client libraries
* are compared.
*
* For `IncomingMessages`, the following observation is exploited: if the random fields are
* restricted to be integers, they will be parsed correctly no matter if the field type is
* String, Double, Boolean or Int. Combine this with the lenient error handling for the
* `Enumeration` based ib-scala message types, and it's possibly to generate random valued
* fields and have them parsed correctly (enough) by the clients.
*
* One nuance is that the random values must themselves not correspond to valid API message
* numbers: (FIXME: why?)
*/
class TestServer(system: ActorSystem) {
import TestServer._
val log = Logging(system, this.getClass)
val ss = new ServerSocket(TEST_PORT)
// val executor = new misc.SingleThreadPool()
// need to be careful on each release to adjust these.
def startTestServer(params: Params): Unit = {
import params._
val okToGo = Promise[String]
class Server extends Runnable {
override def run: Unit = {
import OutgoingMessages.wrz
// import IncomingMessages.rdz
// commented out - rdz() is cut/pasted to avoid interfering with nFields counter
import ImplicitConversions.{ i2s, s2i }
val threadName = Thread.currentThread().getName()
okToGo.success(threadName)
log.debug("Test server waiting for connection on port {} on thread {}",
ss.getLocalPort, threadName)
val socket = ss.accept()
log.debug("connected on for server version {}", serverVersion)
implicit val is = new DataInputStream(socket.getInputStream())
implicit val os = socket.getOutputStream()
val clientVersion: Int = rdz
log.debug("clientVersion is {}", clientVersion)
wrz(serverVersion)
wrz("TWS date/time field")
val clientId: Int = rdz
log.debug(params.toString)
if (incoming) {
val random = new Random(seed)
def nextRandomField = {
val index = random.nextInt(nics)
illegalCodes(index)
}
/*
* send the api code and version code, and then a random string until shut down.
*/
try {
wrz(apiCode)
wrz(apiVers)
fields match {
case Some(fs) => fs foreach (wrz(_))
case None => misc.repeat(nFields) { wrz(nextRandomField) }
}
params.connected foreach { Await.ready(_, 1000 millis) }
wrz("-1")
os.close()
} catch {
case NonFatal(ex) => log.info("caught exception: {}", ex)
}
} else {
/*
* read bytes and store in a map of clientId -> ByteString
*/
log.debug("TestServer: reading bytes")
val bsb = ByteString.newBuilder
while ({
val b = is.read()
if (b != -1) { bsb += b.asInstanceOf[Byte]; true }
else { false }
}) {}
val bs = bsb.result
log.debug("raw fields: {}EOF", bs map { b =>
val c = b.asInstanceOf[Char]; if (c == 0) '|' else c
} mkString)
recMap(clientId) success bs
}
socket.close()
}
}
new Thread(new Server()).start
// executor.execute(new Server())
val threadName = Await.result(okToGo.future, 1000 millis)
log.info("startServer: Thread {} started", threadName)
}
def close() = ss.close()
}
object TestServer {
case class Params(serverVersion: Int,
incoming: Boolean,
apiCode: Int,
apiVers: Int,
nFields: Int = 8192,
fields: Option[List[String]] = None,
connected: Option[Future[Unit]] = None,
seed: Long = SEED) {
require(nFields == 0 || fields == None)
def hasList: Boolean = fields.isDefined
override def toString = NonDefaultNamedValues.nonDefaultNamedValues
}
object Params {
def list(serverVersion: Int, apiCode: Int, apiVers: Int, fields: String*): Params =
Params(serverVersion, true, apiCode, apiVers, nFields = 0, fields = Some(List(fields: _*)))
}
val TEST_PORT = 8888
val illegalCodes = Vector.fill(10)("0") ++ Vector.fill(10)("") ++
((22 until 45) map (_.toString)) ++
((60 until 99) map (_.toString))
val nics = illegalCodes.size
val SEED = 0x4DBABEBEABAD8F00L
def rdz(implicit input: DataInputStream): String = {
val sb = StringBuilder.newBuilder
// sb.clear
while ({
val c = input.readByte()
if (c != 0) { sb += c.asInstanceOf[Char]; true } else false
}) {}
sb.result
}
/*
* map from clientId to Bytes received
*/
import collection.mutable
val recMap = mutable.Map.empty[Int, Promise[ByteString]]
def future(cid: Int): Future[ByteString] = {
val p = Promise[ByteString]
recMap.put(cid, p)
p.future
}
def toFieldList(bs: ByteString): List[String] = {
@annotation.tailrec
def step(bs: ByteString, acc: List[String]): List[String] = {
if (bs == ByteString.empty) acc else {
val (first, rest) = bs span { _ != 0 }
val s = new String(first.toArray, "US-ASCII")
step(rest drop 1, s :: acc)
}
}
step(bs, List.empty[String]).reverse
}
} | ndwade/def-trade | ib-client/src/test/scala/io/deftrade/TestServer.scala | Scala | apache-2.0 | 6,599 |
package cspom.compiler
import cspom.{CSPOM, CSPOMConstraint}
import cspom.variable.CSPOMSeq
/**
* Conjunction is converted to CNF :
*
* a = b ^ c ^ d...
*
* <=>
*
* (a v -b v -c v -d...) ^ (-a v b) ^ (-a v c) ^ (-a v d) ^ ...
*/
object ReifiedConj extends ConstraintCompilerNoData {
def functions = Functions("and")
override def matchBool(c: CSPOMConstraint[_], p: CSPOM): Boolean = true
def compile(fc: CSPOMConstraint[_], problem: CSPOM): Delta = {
val res = fc.result
val args = fc.arguments
val c1 = CSPOMConstraint("clause")(CSPOMSeq(res), CSPOMSeq(args: _*))
val c2 = args.map(v => CSPOMConstraint("clause")(CSPOMSeq(v), CSPOMSeq(res)))
ConstraintCompiler.replaceCtr(fc, c1 +: c2, problem)
}
}
| concrete-cp/cspom | src/test/scala/cspom/compiler/ReifiedConj.scala | Scala | lgpl-2.1 | 742 |
/*
* Copyright 2010 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.conversions
import java.util.concurrent.Callable
import scala.language.implicitConversions
/**
* Implicits for turning a block of code into a Runnable or Callable.
*/
object thread {
implicit def makeRunnable(f: => Unit): Runnable = new Runnable() { def run(): Unit = f }
implicit def makeCallable[T](f: => T): Callable[T] = new Callable[T]() { def call(): T = f }
}
| edombowsky/util | util-core/src/main/scala/com/twitter/conversions/thread.scala | Scala | apache-2.0 | 993 |
package pipelines.speech
import breeze.stats.distributions.{CauchyDistribution, RandBasis, ThreadLocalRandomGenerator}
import breeze.linalg.DenseVector
import org.apache.commons.math3.random.MersenneTwister
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import evaluation.MulticlassClassifierEvaluator
import loaders.TimitFeaturesDataLoader
import nodes.learning.{BlockLinearMapper, BlockLeastSquaresEstimator}
import nodes.stats.{CosineRandomFeatures, StandardScaler}
import nodes.util.{VectorCombiner, ClassLabelIndicatorsFromIntLabels, MaxClassifier}
import pipelines._
import workflow.Pipeline
object TimitPipeline extends Logging {
val appName = "Timit"
case class TimitConfig(
trainDataLocation: String = "",
trainLabelsLocation: String = "",
testDataLocation: String = "",
testLabelsLocation: String = "",
numParts: Int = 512,
numCosines: Int = 50,
gamma: Double = 0.05555,
rfType: Distributions.Value = Distributions.Gaussian,
lambda: Double = 0.0,
numEpochs: Int = 5,
checkpointDir: Option[String] = None)
def run(sc: SparkContext, conf: TimitConfig): Pipeline[DenseVector[Double], Int] = {
conf.checkpointDir.foreach(_ => sc.setCheckpointDir(_))
Thread.sleep(5000)
// Set the constants
val seed = 123L
val random = new java.util.Random(seed)
val randomSource = new RandBasis(new ThreadLocalRandomGenerator(new MersenneTwister(random.nextLong())))
val numCosineFeatures = 4096
val numCosineBatches = conf.numCosines
val colsPerBatch = numCosineFeatures + 1
// Load the data
val timitFeaturesData = TimitFeaturesDataLoader(
sc,
conf.trainDataLocation,
conf.trainLabelsLocation,
conf.testDataLocation,
conf.testLabelsLocation,
conf.numParts)
// Build the pipeline
val trainData = timitFeaturesData.train.data.cache().setName("trainRaw")
trainData.count()
val labels = ClassLabelIndicatorsFromIntLabels(TimitFeaturesDataLoader.numClasses).apply(
timitFeaturesData.train.labels
).cache().setName("trainLabels")
// Train the model
val featurizer = Pipeline.gather {
Seq.fill(numCosineBatches) {
if (conf.rfType == Distributions.Cauchy) {
// TODO: Once https://github.com/scalanlp/breeze/issues/398 is released,
// use a RandBasis for cauchy
CosineRandomFeatures(
TimitFeaturesDataLoader.timitDimension,
numCosineFeatures,
conf.gamma,
new CauchyDistribution(0, 1),
randomSource.uniform).toPipeline
} else {
CosineRandomFeatures(
TimitFeaturesDataLoader.timitDimension,
numCosineFeatures,
conf.gamma,
randomSource.gaussian,
randomSource.uniform).toPipeline
}
}
} andThen VectorCombiner()
val predictor = featurizer andThen
(new BlockLeastSquaresEstimator(numCosineFeatures, conf.numEpochs, conf.lambda),
trainData, labels) andThen MaxClassifier
val testData = timitFeaturesData.test.data.cache().setName("testRaw")
val numTest = testData.count()
val actual = timitFeaturesData.test.labels.cache().setName("actual")
// Calculate test error
val testEval = MulticlassClassifierEvaluator(
predictor(testData),
actual,
TimitFeaturesDataLoader.numClasses)
logInfo("TEST Error is " + (100 * testEval.totalError) + "%")
predictor
}
object Distributions extends Enumeration {
type Distributions = Value
val Gaussian, Cauchy = Value
}
def parse(args: Array[String]): TimitConfig = new OptionParser[TimitConfig](appName) {
head(appName, "0.1")
help("help") text("prints this usage text")
opt[String]("trainDataLocation") required() action { (x,c) => c.copy(trainDataLocation=x) }
opt[String]("trainLabelsLocation") required() action { (x,c) => c.copy(trainLabelsLocation=x) }
opt[String]("testDataLocation") required() action { (x,c) => c.copy(testDataLocation=x) }
opt[String]("testLabelsLocation") required() action { (x,c) => c.copy(testLabelsLocation=x) }
opt[String]("checkpointDir") action { (x,c) => c.copy(checkpointDir=Some(x)) }
opt[Int]("numParts") action { (x,c) => c.copy(numParts=x) }
opt[Int]("numCosines") action { (x,c) => c.copy(numCosines=x) }
opt[Int]("numEpochs") action { (x,c) => c.copy(numEpochs=x) }
opt[Double]("gamma") action { (x,c) => c.copy(gamma=x) }
opt[Double]("lambda") action { (x,c) => c.copy(lambda=x) }
opt("rfType")(scopt.Read.reads(Distributions withName _)) action { (x,c) => c.copy(rfType = x)}
}.parse(args, TimitConfig()).get
/**
* The actual driver receives its configuration parameters from spark-submit usually.
* @param args
*/
def main(args: Array[String]) = {
val appConfig = parse(args)
val conf = new SparkConf().setAppName(appName)
conf.setIfMissing("spark.master", "local[2]")
val sc = new SparkContext(conf)
run(sc, appConfig)
sc.stop()
}
}
| tomerk/keystone | src/main/scala/pipelines/speech/TimitPipeline.scala | Scala | apache-2.0 | 5,119 |
package chandu0101.scalajs.facades.examples.pages.components.leaflet
import chandu0101.scalajs.facades.examples.pages.common.CodeExample
import chandu0101.scalajs.facades.leaflet._
import japgolly.scalajs.react.ReactComponentB
import japgolly.scalajs.react.vdom.all._
import scala.scalajs.js.Dynamic.{global => g, literal => json}
/**
* Created by chandrasekharkode on 3/3/15.
*/
object LBasicMap {
val code =
"""
| div( id := "map", width := "600px", height := "285px")
|
| // define map
| val map = LMap("map").setView(LLatLng(16.1833, 80.8000), 8.0)
| // val map = LeafLeft.map("map").setView(LLatLng(16.1833, 80.8000), 8.0) // defining map using Factory LeafLet
| // get tiles from mapbox /openstreet/..
| val mapId = "chandu0101.lbkd0e8e"
| val token = "pk.eyJ1IjoiY2hhbmR1MDEwMSIsImEiOiJfTWJXQWdVIn0.P_YR-BkVn3AkUdsPgNh0Gw"
| LTileLayer("https://{s}.tiles.mapbox.com/v4/{mapId}/{z}/{x}/{y}.png?access_token={token}",
| LTileLayerOptions(maxZoom = 18.0,
| token = token,
| mapId = mapId,
| attribution = "Map data ©<a href=\\"http://openstreetmap.org\\">OpenStreetMap</a> contributors, <a href=\\"http://creativecommons.org/licenses/by-sa/2.0/\\">CC-BY-SA</a>, Imagery © <a href=\\"http://mapbox.com\\">Mapbox</a>',")
| ).addTo(map) // add tiles to MAP
|
""".stripMargin
val component = ReactComponentB[Unit]("LBasicMap")
.render(P => {
div(
h3("Basic Map"),
CodeExample(code)(
div(key := "map", id := "map", width := "600px", height := "285px")
)
)
})
.componentDidMount(scope => {
// define map
val map = LMap("map").setView(LLatLng(16.1833, 80.8000), 8.0)
// val map = LeafLeft.map("map").setView(LLatLng(16.1833, 80.8000), 8.0) // defining map using Factory LeafLet
// get tiles from mapbox /openstreet/..
val mapId = "chandu0101.lbkd0e8e"
val token = "pk.eyJ1IjoiY2hhbmR1MDEwMSIsImEiOiJfTWJXQWdVIn0.P_YR-BkVn3AkUdsPgNh0Gw"
LTileLayer("https://{s}.tiles.mapbox.com/v4/{mapId}/{z}/{x}/{y}.png?access_token={token}",
LTileLayerOptions.maxZoom (18.0)
.token(token)
.mapId(mapId)
.attribution("Map data ©<a href=\\"http://openstreetmap.org\\">OpenStreetMap</a> contributors, <a href=\\"http://creativecommons.org/licenses/by-sa/2.0/\\">CC-BY-SA</a>, Imagery © <a href=\\"http://mapbox.com\\">Mapbox</a>',").result
).addTo(map) // add tiles to MAP
})
.buildU
def apply() = component()
}
| CapeSepias/scalajs-facades | examples/src/main/scala/chandu0101/scalajs/facades/examples/pages/components/leaflet/LBasicMap.scala | Scala | mit | 2,591 |
package at.logic.gapt.proofs.lk
import at.logic.gapt.expr._
import at.logic.gapt.proofs.HOLSequent
import at.logic.gapt.proofs.lk._
import at.logic.gapt.proofs.lk.base._
import org.specs2.mutable._
class SubstitutionTest extends Specification {
"Substitutions" should {
object proof1 {
val x = Var( "x", Ti )
val p = Const( "P", Ti -> To )
val px = HOLAtom( p, x :: Nil )
val ax1 = Axiom( px :: Nil, px :: Nil )
val ax2 = Axiom( px :: Nil, px :: Nil )
val proof = CutRule( ax1, ax2, ax1.root.succedent.toList.head, ax2.root.antecedent.toList.head )
val a = Const( "a", Ti )
val f = Const( "f", Ti -> Ti )
val fa = App( f, a )
val subst = Substitution( x, fa )
}
object proof2 {
val x = Var( "x", Ti )
val y = Var( "y", Ti )
val p = Const( "P", Ti -> ( Ti -> To ) )
val pxy = HOLAtom( p, List( x, y ) )
val allxpx = All( x, pxy )
val ax1 = Axiom( pxy :: Nil, pxy :: Nil )
val r1 = ForallLeftRule( ax1, ax1.root.antecedent( 0 ), allxpx, x )
val proof = ForallRightRule( r1, r1.root.succedent( 0 ), allxpx, x )
val a = Const( "a", Ti )
val f = Const( "f", Ti -> Ti )
val fa = App( f, a )
val subst = Substitution( y, fa )
val subst2 = Substitution( y, x ) //test for overbinding
}
"apply correctly to a simple proof" in {
val p_s = applySubstitution( proof1.proof, proof1.subst )
val pfa = HOLAtom( proof1.p, proof1.fa :: Nil )
val new_seq = HOLSequent( pfa :: Nil, pfa :: Nil )
val seq = p_s._1.root.toHOLSequent
seq must beEqualTo( new_seq )
}
"apply correctly to a proof with quantifiers" in {
val p_s = applySubstitution( proof2.proof, proof2.subst )
val pfa = All( proof2.x, HOLAtom( proof2.p, List( proof2.x, proof2.fa ) ) )
val new_seq = HOLSequent( pfa :: Nil, pfa :: Nil )
val seq = p_s._1.root.toHOLSequent
seq must beEqualTo( new_seq )
}
}
}
| loewenheim/gapt | src/test/scala/at/logic/gapt/proofs/lk/SubstitutionTest.scala | Scala | gpl-3.0 | 1,992 |
package org.ensime.sexp
import org.parboiled.scala._
import org.ensime.sexp.util.ParboiledParser
/**
* Parse Emacs Lisp into an `Sexp`. Other lisp variants may
* require tweaking, e.g. Scheme's nil, infinity, NaN, etc.
*/
object SexpParser extends ParboiledParser[Sexp] {
protected val Top = SexpP
// e.g. for .el files
def flatParse(el: String): Sexp = parse("(" + el + "\\n)")
private lazy val SexpP: Rule1[Sexp] = rule("Sexp") {
SexpAtomP | SexpListP | SexpEmptyList | SexpConsP | SexpQuotedP
}
private lazy val SexpAtomP: Rule1[SexpAtom] = rule("Atom") {
SexpCharP | SexpStringP | SexpNaNP | SexpNumberP | SexpSymbolP
}
private lazy val SexpCharP: Rule1[SexpChar] = rule("Char") {
ch('?') ~ NormalChar
} ~~> { c => SexpChar(c.head) }
private lazy val SexpStringP: Rule1[SexpString] = rule("String") {
ch('"') ~ zeroOrMore(Character) ~~> (chars => SexpString(chars.mkString(""))) ~ ch('"')
}
private lazy val SexpNumberP: Rule1[SexpNumber] = rule("Number") {
group(Integer ~ optional(Frac) ~ optional(Exp))
} ~> {
value => SexpNumber(BigDecimal(value))
}
private lazy val SexpNaNP: Rule1[SexpAtom] = rule("NaN") {
("-1.0e+INF" ~> { _ => SexpNegInf }) |
("1.0e+INF" ~> { _ => SexpPosInf }) |
(optional("-") ~ "0.0e+NaN" ~> { _ => SexpNaN })
}
private lazy val SexpSymbolP: Rule1[SexpAtom] = rule("Symbol") {
// ? allowed at the end of symbol names
oneOrMore(Alpha | Digit | SymbolSpecial) ~ zeroOrMore(Alpha | Digit | SymbolSpecial | ".") ~ optional("?")
} ~> { sym =>
if (sym == "nil") SexpNil
else SexpSymbol(sym)
}
private lazy val SexpEmptyList: Rule1[SexpNil.type] = rule("List(empty)") {
(LeftBrace ~ RightBrace)
} ~> { _ => SexpNil }
private lazy val SexpListP: Rule1[Sexp] = rule("List") {
LeftBrace ~ (SexpP ~ zeroOrMore(Whitespace ~ SexpP)) ~ RightBrace
} ~~> {
(head, tail) => SexpList(head :: tail)
}
private lazy val SexpConsP: Rule1[SexpCons] = rule("Cons") {
LeftBrace ~
SexpP ~ Whitespace ~ "." ~ Whitespace ~ SexpP ~ RightBrace
} ~~> {
(x, y) => SexpCons(x, y)
}
private val SexpQuote = SexpSymbol("quote")
private lazy val SexpQuotedP: Rule1[Sexp] = rule("Quoted") {
"'" ~ SexpP
} ~~> { v => SexpCons(SexpQuote, v) }
private lazy val Character: Rule1[String] = rule("Character") { EscapedChar | NormalChar }
private lazy val EscapedChar: Rule1[String] = rule("EscapedChar") {
"\\\\" ~ ANY ~> { s => unescape(s) }
}
private lazy val NormalChar: Rule1[String] = rule("NormalChar") { !anyOf("\\"\\\\") ~ ANY ~> identity }
// Rule0 primitives and helpers...
private lazy val Alpha = rule("Alpha") { "a" - "z" | "A" - "Z" }
private lazy val Integer = rule("Integer") { optional("-") ~ (("1" - "9") ~ Digits | Digit) }
private lazy val Digits = rule("Digits") { oneOrMore(Digit) }
private lazy val Digit = rule("Digit") { "0" - "9" }
private lazy val Frac = rule("Frac") { "." ~ Digits }
private lazy val Exp = rule("Exp") { ignoreCase("e") ~ optional(anyOf("+-")) ~ Digits }
private lazy val SymbolSpecial = rule("SymbolSpecial") { anyOf("+-*/_~!@$%^&=:<>{}") }
private lazy val Whitespace = rule("Whitespace") { zeroOrMore(Comment | anyOf(" \\n\\r\\t\\f")) }
private lazy val Comment = rule("Comment") { ";" ~ zeroOrMore(noneOf("\\n")) ~ ("\\n" | EOI) }
private lazy val LeftBrace = rule("(") { Whitespace ~ "(" ~ Whitespace }
private lazy val RightBrace = rule(")") { Whitespace ~ ")" ~ Whitespace }
// https://www.gnu.org/software/emacs/manual/html_node/elisp/Basic-Char-Syntax.html
// https://www.gnu.org/software/emacs/manual/html_node/elisp/Syntax-for-Strings.html
// Not supported: https://www.gnu.org/software/emacs/manual/html_node/elisp/Non_002dASCII-in-Strings.html
private[sexp] val specialChars = Map[String, String](
"\\"" -> "\\"",
"a" -> 7.toChar.toString,
"b" -> "\\b",
"t" -> "\\t",
"n" -> "\\n",
"v" -> 11.toChar.toString,
"f" -> "\\f",
"r" -> "\\r",
"e" -> 27.toChar.toString,
"s" -> " ",
"d" -> 127.toChar.toString,
"\\\\" -> "\\\\"
)
private val ignore = Set("\\n", " ")
private def unescape(c: String): String = {
if (ignore(c)) ""
else {
val unescaped = specialChars.get(c)
require(unescaped.isDefined, c + " is not a valid escaped character")
unescaped.get
}
}
}
| jacobono/ensime-server | sexpress/src/main/scala/org/ensime/sexp/SexpParser.scala | Scala | gpl-3.0 | 4,380 |
/*
* Copyright 2020 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.spline.persistence.migration
trait MigrationScriptLoader {
def loadAll(): Seq[MigrationScript]
}
| AbsaOSS/spline | persistence/src/main/scala/za/co/absa/spline/persistence/migration/MigrationScriptLoader.scala | Scala | apache-2.0 | 722 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.keras.layers
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.{nn => bnn}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.keras.layers.utils.KerasUtils
import scala.reflect.ClassTag
/**
* Takes {mean, log_variance} as input and samples from the Gaussian distribution
*
* When you use this layer as the first layer of a model, you need to provide the argument
* inputShape (a Single Shape, does not include the batch dimension).
*
* Remark: This layer is from Torch and wrapped in Keras style.
*
* @param inputShape A Single Shape, does not include the batch dimension.
* @tparam T Numeric type of parameter(e.g. weight, bias). Only support float/double now
*/
class GaussianSampler[T: ClassTag]
(val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends LayerWrapperByForward[T](KerasUtils.addBatch(inputShape)) {
override def doBuild(inputShape: Shape): AbstractModule[Activity, Activity, T] = {
bnn.GaussianSampler[T]()
}
}
object GaussianSampler {
def apply[@specialized(Float, Double) T: ClassTag](inputShape: Shape = null)(
implicit ev: TensorNumeric[T]) : GaussianSampler[T] = {
new GaussianSampler[T](inputShape)
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/GaussianSampler.scala | Scala | apache-2.0 | 2,002 |
package antonkulaga.projects.repressilator
import org.denigma.binding.binders.{Events, GeneralBinder}
import org.denigma.binding.extensions._
import org.denigma.controls.charts._
import org.scalajs.dom.Element
import org.scalajs.dom.raw.MouseEvent
import rx.core.{Rx, Var}
import rx.ops._
import scala.collection.immutable._
trait Smoother {
protected def avg(points: List[Point]): Point = {
val sum = points.foldLeft(Point(0,0)){
case (acc, el) => acc.copy(acc.x + el.x, acc.y + el.y)
}
Point(sum.x / points.length, sum.y / points.length)
}
protected def glidingAverage(points: List[Point], k: Int) =
points.sliding(k, k).map(avg).toList
protected def smoothPoints(points: collection.immutable.List[Point], maxPoints: Int): collection.immutable.List[Point] = Math.round(points.length / maxPoints) match {
case small if small <=1 => points //do nothing
case large => glidingAverage(points, large) //take gliding averages
}
}
class RepressilatorTimePlot(val elem: Element, val odes: Rx[RepressilatorODEs], val time: Double, val tick: Double, val initialConditions: Rx[Array[Double]])
extends LinesPlot with Smoother {
val scaleX: rx.Var[Scale] = Var(LinearScale("Time", 0.0, time, tick, 1000))
val scaleY: rx.Var[Scale] = Var(LinearScale("Concentration", 0.0, 2000, 500, 500, inverted = true))
val maxPoints:Var[Int] = Var(1000)
//val coords = odes.now.computeAll(initialConditions.now, 2, 3)
val cI_mRNA = Var(new StaticSeries("cI mRNA", List.empty).withStrokeColor("aquamarine"))
val lacI_mRNA = Var(new StaticSeries("LacI mRNA", List.empty).withStrokeColor("pink"))
val tetR_mRNA = Var(new StaticSeries("TetR mRNA", List.empty).withStrokeColor("cyan"))
val cI = Var(new StaticSeries("cI", List.empty).withStrokeColor("green"))
val lacI = Var(new StaticSeries("LacI", List.empty).withStrokeColor("red"))
val tetR = Var(new StaticSeries("TetR", List.empty).withStrokeColor("blue"))
lazy val solve = Var(Events.createMouseEvent)
protected def onSolve() = {
val init: Array[Double] = initialConditions.now
val coords: Array[Array[Point]] = odes.now.computeAll(init)
require(coords.length >= 6, "odes should include 6 elements")
val max = maxPoints.now
cI_mRNA() = cI_mRNA.now.copy(points = smoothPoints(coords(0).toList, max))
lacI_mRNA() = lacI_mRNA.now.copy(points = smoothPoints(coords(1).toList, max))
tetR_mRNA() = tetR_mRNA.now.copy(points = smoothPoints(coords(2).toList, max))
cI() = cI.now.copy(points = smoothPoints(coords(3).toList,max))
lacI() = lacI.now.copy(points = smoothPoints(coords(4).toList,max))
tetR() = tetR.now.copy(points = smoothPoints(coords(5).toList,max))
}
solve.handler{
onSolve()
}
override def newItemView(item: Item): SeriesView = constructItemView(item){
case (el, mp) => new SeriesView(el, item, transform).withBinder(new GeneralBinder(_))
}
val items: Var[Seq[Item]] = Var(Seq(cI_mRNA, lacI_mRNA, tetR_mRNA, cI, lacI, tetR))
}
/*
class RepressilatorProteinsPlot(val elem: Element, val odes: Rx[RepressilatorODEs], val conditionSource: RepressilatorInit) extends LinesPlot {
override type ItemView = SeriesView
lazy val initialConditions: Rx[Array[Double]] = conditionSource.initialConditions
val scaleX: rx.Var[Scale] = Var(LinearScale("LacI", 0.0, 2000.0, 500.0, 400.0))
val scaleY: rx.Var[Scale] = Var(LinearScale("TetR", 0.0, 2000.0, 500.0, 400.0, inverted = true))
val xy = Var(new StaticSeries("LacI | TetR", List.empty))
override val items = Var(Seq(xy))
chartClick.onChange("OnChartClick", uniqueValue = false, skipInitial = true){
event=> onChartClick(event)
}
def onChartClick(event: MouseEvent): Unit = if (event.currentTarget == event.target)
{
event.target match {
case el: Element =>
//println("HTML =" +elem.outerHTML)
val rect = el.getBoundingClientRect()
val x = event.clientX - rect.left - left.now
val y = event.clientY - rect.top - top.now
val sx = scaleX.now.chartCoord(x)
val sy = scaleY.now.chartCoord(y)
//println(s"chart click works! with [$x ; $y] coords [$sx : $sy]")
conditionSource.lacI_start() = sx
conditionSource.tetR_start() = sy
event.preventDefault()
case _ =>
}
}
lazy val solve = Var(Events.createMouseEvent)
solve.handler{
xy() = xy.now.copy(points = odes.now.computeXY(initial = initialConditions.now, 2, 3))
}
}
*/
| antonkulaga/personal | app/js/src/main/scala/antonkulaga/projects/repressilator/RepressilatorTimePlot.scala | Scala | mpl-2.0 | 4,486 |
package scadla.backends.obj
import scadla._
import dzufferey.utils._
import dzufferey.utils.LogLevel._
import java.io._
import squants.space.{Length, Millimeters, LengthUnit}
//TODO make parametric in terms of unit
object Printer extends Printer(Millimeters) {
}
class Printer(unit: LengthUnit = Millimeters) {
def store(obj: Polyhedron, fileName: String) = {
val writer = new BufferedWriter(new FileWriter(fileName))
try {
val (points, faces) = obj.indexed
writer.write("g ScadlaObject")
writer.newLine
points.foreach{ p =>
writer.write("v " + p.x.to(unit) + " " + p.y.to(unit) + " " + p.z.to(unit))
writer.newLine
}
writer.newLine
faces.foreach{ case (a,b,c) =>
writer.write("f " + a + " " + b + " " + c)
writer.newLine
}
writer.newLine
} finally writer.close
}
}
| dzufferey/scadla | src/main/scala/scadla/backends/obj/Printer.scala | Scala | apache-2.0 | 876 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.ui
import java.util.concurrent.atomic.AtomicLong
import scala.collection.mutable
import org.apache.commons.text.StringEscapeUtils
import org.apache.spark.sql.execution.{SparkPlanInfo, WholeStageCodegenExec}
/**
* A graph used for storing information of an executionPlan of DataFrame.
*
* Each graph is defined with a set of nodes and a set of edges. Each node represents a node in the
* SparkPlan tree, and each edge represents a parent-child relationship between two nodes.
*/
case class SparkPlanGraph(
nodes: Seq[SparkPlanGraphNode], edges: Seq[SparkPlanGraphEdge]) {
def makeDotFile(metrics: Map[Long, String]): String = {
val dotFile = new StringBuilder
dotFile.append("digraph G {\\n")
nodes.foreach(node => dotFile.append(node.makeDotNode(metrics) + "\\n"))
edges.foreach(edge => dotFile.append(edge.makeDotEdge + "\\n"))
dotFile.append("}")
dotFile.toString()
}
/**
* All the SparkPlanGraphNodes, including those inside of WholeStageCodegen.
*/
val allNodes: Seq[SparkPlanGraphNode] = {
nodes.flatMap {
case cluster: SparkPlanGraphCluster => cluster.nodes :+ cluster
case node => Seq(node)
}
}
}
object SparkPlanGraph {
/**
* Build a SparkPlanGraph from the root of a SparkPlan tree.
*/
def apply(planInfo: SparkPlanInfo): SparkPlanGraph = {
val nodeIdGenerator = new AtomicLong(0)
val nodes = mutable.ArrayBuffer[SparkPlanGraphNode]()
val edges = mutable.ArrayBuffer[SparkPlanGraphEdge]()
val exchanges = mutable.HashMap[SparkPlanInfo, SparkPlanGraphNode]()
buildSparkPlanGraphNode(planInfo, nodeIdGenerator, nodes, edges, null, null, exchanges)
new SparkPlanGraph(nodes, edges)
}
private def buildSparkPlanGraphNode(
planInfo: SparkPlanInfo,
nodeIdGenerator: AtomicLong,
nodes: mutable.ArrayBuffer[SparkPlanGraphNode],
edges: mutable.ArrayBuffer[SparkPlanGraphEdge],
parent: SparkPlanGraphNode,
subgraph: SparkPlanGraphCluster,
exchanges: mutable.HashMap[SparkPlanInfo, SparkPlanGraphNode]): Unit = {
planInfo.nodeName match {
case name if name.startsWith("WholeStageCodegen") =>
val metrics = planInfo.metrics.map { metric =>
SQLPlanMetric(metric.name, metric.accumulatorId, metric.metricType)
}
val cluster = new SparkPlanGraphCluster(
nodeIdGenerator.getAndIncrement(),
planInfo.nodeName,
planInfo.simpleString,
mutable.ArrayBuffer[SparkPlanGraphNode](),
metrics)
nodes += cluster
buildSparkPlanGraphNode(
planInfo.children.head, nodeIdGenerator, nodes, edges, parent, cluster, exchanges)
case "InputAdapter" =>
buildSparkPlanGraphNode(
planInfo.children.head, nodeIdGenerator, nodes, edges, parent, null, exchanges)
case "BroadcastQueryStage" | "ShuffleQueryStage" =>
if (exchanges.contains(planInfo.children.head)) {
// Point to the re-used exchange
val node = exchanges(planInfo.children.head)
edges += SparkPlanGraphEdge(node.id, parent.id)
} else {
buildSparkPlanGraphNode(
planInfo.children.head, nodeIdGenerator, nodes, edges, parent, null, exchanges)
}
case "Subquery" if subgraph != null =>
// Subquery should not be included in WholeStageCodegen
buildSparkPlanGraphNode(planInfo, nodeIdGenerator, nodes, edges, parent, null, exchanges)
case "Subquery" if exchanges.contains(planInfo) =>
// Point to the re-used subquery
val node = exchanges(planInfo)
edges += SparkPlanGraphEdge(node.id, parent.id)
case "ReusedSubquery" =>
// Re-used subquery might appear before the original subquery, so skip this node and let
// the previous `case` make sure the re-used and the original point to the same node.
buildSparkPlanGraphNode(
planInfo.children.head, nodeIdGenerator, nodes, edges, parent, subgraph, exchanges)
case "ReusedExchange" if exchanges.contains(planInfo.children.head) =>
// Point to the re-used exchange
val node = exchanges(planInfo.children.head)
edges += SparkPlanGraphEdge(node.id, parent.id)
case name =>
val metrics = planInfo.metrics.map { metric =>
SQLPlanMetric(metric.name, metric.accumulatorId, metric.metricType)
}
val node = new SparkPlanGraphNode(
nodeIdGenerator.getAndIncrement(), planInfo.nodeName,
planInfo.simpleString, metrics)
if (subgraph == null) {
nodes += node
} else {
subgraph.nodes += node
}
if (name.contains("Exchange") || name == "Subquery") {
exchanges += planInfo -> node
}
if (parent != null) {
edges += SparkPlanGraphEdge(node.id, parent.id)
}
planInfo.children.foreach(
buildSparkPlanGraphNode(_, nodeIdGenerator, nodes, edges, node, subgraph, exchanges))
}
}
}
/**
* Represent a node in the SparkPlan tree, along with its metrics.
*
* @param id generated by "SparkPlanGraph". There is no duplicate id in a graph
* @param name the name of this SparkPlan node
* @param metrics metrics that this SparkPlan node will track
*/
private[ui] class SparkPlanGraphNode(
val id: Long,
val name: String,
val desc: String,
val metrics: Seq[SQLPlanMetric]) {
def makeDotNode(metricsValue: Map[Long, String]): String = {
val builder = new mutable.StringBuilder(name)
val values = for {
metric <- metrics
value <- metricsValue.get(metric.accumulatorId)
} yield {
metric.name + ": " + value
}
if (values.nonEmpty) {
// If there are metrics, display each entry in a separate line.
// Note: whitespace between two "\\n"s is to create an empty line between the name of
// SparkPlan and metrics. If removing it, it won't display the empty line in UI.
builder ++= "\\n \\n"
builder ++= values.mkString("\\n")
}
s""" $id [label="${StringEscapeUtils.escapeJava(builder.toString())}"];"""
}
}
/**
* Represent a tree of SparkPlan for WholeStageCodegen.
*/
private[ui] class SparkPlanGraphCluster(
id: Long,
name: String,
desc: String,
val nodes: mutable.ArrayBuffer[SparkPlanGraphNode],
metrics: Seq[SQLPlanMetric])
extends SparkPlanGraphNode(id, name, desc, metrics) {
override def makeDotNode(metricsValue: Map[Long, String]): String = {
val duration = metrics.filter(_.name.startsWith(WholeStageCodegenExec.PIPELINE_DURATION_METRIC))
val labelStr = if (duration.nonEmpty) {
require(duration.length == 1)
val id = duration(0).accumulatorId
if (metricsValue.contains(duration(0).accumulatorId)) {
name + "\\n\\n" + metricsValue(id)
} else {
name
}
} else {
name
}
s"""
| subgraph cluster${id} {
| label="${StringEscapeUtils.escapeJava(labelStr)}";
| ${nodes.map(_.makeDotNode(metricsValue)).mkString(" \\n")}
| }
""".stripMargin
}
}
/**
* Represent an edge in the SparkPlan tree. `fromId` is the child node id, and `toId` is the parent
* node id.
*/
private[ui] case class SparkPlanGraphEdge(fromId: Long, toId: Long) {
def makeDotEdge: String = s""" $fromId->$toId;\\n"""
}
| jkbradley/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SparkPlanGraph.scala | Scala | apache-2.0 | 8,202 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.akkastream.example
import akka.actor.{Actor, ActorSystem, Props}
import akka.stream.scaladsl.Sink
import akka.stream.{ActorMaterializer, ActorMaterializerSettings}
import org.apache.gearpump.akkastream.GearpumpMaterializer
import org.apache.gearpump.akkastream.scaladsl.GearSource
import org.apache.gearpump.cluster.main.{ArgumentsParser, CLIOption}
import org.apache.gearpump.streaming.dsl.scalaapi.CollectionDataSource
import org.apache.gearpump.util.AkkaApp
import scala.concurrent.Await
import scala.concurrent.duration._
/**
* WordCount example
* Test GroupBy2 (groupBy which uses SubFlow is not implemented yet)
*/
import org.apache.gearpump.akkastream.scaladsl.Implicits._
object Test6 extends AkkaApp with ArgumentsParser {
// scalastyle:off println
override val options: Array[(String, CLIOption[Any])] = Array(
"gearpump" -> CLIOption[Boolean]("<boolean>", required = false, defaultValue = Some(false))
)
override def main(akkaConf: Config, args: Array[String]): Unit = {
val config = parse(args)
implicit val system = ActorSystem("Test6", akkaConf)
implicit val materializer: ActorMaterializer = config.getBoolean("gearpump") match {
case true =>
GearpumpMaterializer()
case false =>
ActorMaterializer(
ActorMaterializerSettings(system).withAutoFusing(false)
)
}
val echo = system.actorOf(Props(Echo()))
val sink = Sink.actorRef(echo, "COMPLETE")
val sourceData = new CollectionDataSource(
List(
"this is a good start",
"this is a good time",
"time to start",
"congratulations",
"green plant",
"blue sky")
)
val source = GearSource.from[String](sourceData)
source.mapConcat({line =>
line.split(" ").toList
}).groupBy2(x => x)
.map(word => (word, 1))
.reduce({(a, b) =>
(a._1, a._2 + b._2)
})
.log("word-count")
.runWith(sink)
Await.result(system.whenTerminated, 60.minutes)
}
case class Echo() extends Actor {
def receive: Receive = {
case any: AnyRef =>
println("Confirm received: " + any)
}
}
// scalastyle:on println
}
| manuzhang/incubator-gearpump | experiments/akkastream/src/main/scala/org/apache/gearpump/akkastream/example/Test6.scala | Scala | apache-2.0 | 3,016 |
package com.cloudant.clouseau
import java.io.File
import java.io.IOException
import java.nio.file.StandardCopyOption.ATOMIC_MOVE
import java.nio.file.Files
import java.util.List
import java.util.UUID
import org.apache.lucene.index.IndexCommit
import org.apache.lucene.index.IndexDeletionPolicy
import org.apache.lucene.store.FSDirectory
import scala.collection.JavaConversions._
class ExternalSnapshotDeletionPolicy(dir: FSDirectory) extends IndexDeletionPolicy {
val originDir: File = dir.getDirectory
var lastCommit: Option[IndexCommit] = None
def snapshot(snapshotDir: File): Unit = {
synchronized {
lastCommit match {
case None =>
throw new IllegalStateException("No index commit to snapshot");
case Some(commit) =>
ExternalSnapshotDeletionPolicy.snapshot(originDir, snapshotDir, commit.getFileNames)
}
}
}
def onInit(commits: List[_ <: IndexCommit]): Unit = {
keepOnlyLastCommit(commits)
}
def onCommit(commits: List[_ <: IndexCommit]): Unit = {
keepOnlyLastCommit(commits)
}
private def keepOnlyLastCommit(commits: List[_ <: IndexCommit]): Unit = {
synchronized {
for (commit <- commits.reverse.drop(1)) {
commit.delete
}
lastCommit = commits.lastOption
}
}
}
object ExternalSnapshotDeletionPolicy {
def snapshot(originDir: File, snapshotDir: File, files: Collection[String]) {
if (!originDir.isAbsolute) {
throw new IOException(originDir + " is not an absolute path")
}
if (!snapshotDir.isAbsolute) {
throw new IOException(snapshotDir + " is not an absolute path")
}
if (!originDir.isDirectory) {
throw new IOException(originDir + " is not a directory")
}
if (snapshotDir.exists) {
throw new IllegalStateException("Snapshot directory already exists")
}
if (files == null) {
throw new IOException("No files selected for snapshot")
}
/* Prepare the snapshot directory in a temporary location so we can atomically
rename it into place at successful completion. */
val tmpDir = new File(snapshotDir.getParentFile, UUID.randomUUID.toString)
if (!tmpDir.mkdir) {
throw new IOException("Failed to make temporary directory for snapshot")
}
var success = false
try {
for (filename <- files) {
Files.createLink(new File(tmpDir, filename).toPath, new File(originDir, filename).toPath);
}
Files.move(tmpDir.toPath, snapshotDir.toPath, ATOMIC_MOVE)
success = true
} finally {
// Try to clean up if unsuccessful
if (!success) {
for (file <- tmpDir.listFiles) {
file.delete
}
tmpDir.delete
}
}
}
}
| cloudant-labs/clouseau | src/main/scala/com/cloudant/clouseau/ExternalSnapshotDeletionPolicy.scala | Scala | apache-2.0 | 2,720 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.types.DataType
/**
* User-defined function.
* @param function The user defined scala function to run.
* Note that if you use primitive parameters, you are not able to check if it is
* null or not, and the UDF will return null for you if the primitive input is
* null. Use boxed type or [[Option]] if you wanna do the null-handling yourself.
* @param dataType Return type of function.
* @param children The input expressions of this UDF.
* @param inputTypes The expected input types of this UDF, used to perform type coercion. If we do
* not want to perform coercion, simply use "Nil". Note that it would've been
* better to use Option of Seq[DataType] so we can use "None" as the case for no
* type coercion. However, that would require more refactoring of the codebase.
* @param udfName The user-specified name of this UDF.
* @param nullable True if the UDF can return null value.
* @param udfDeterministic True if the UDF is deterministic. Deterministic UDF returns same result
* each time it is invoked with a particular input.
*/
case class ScalaUDF(
function: AnyRef,
dataType: DataType,
children: Seq[Expression],
inputTypes: Seq[DataType] = Nil,
udfName: Option[String] = None,
nullable: Boolean = true,
udfDeterministic: Boolean = true)
extends Expression with ImplicitCastInputTypes with NonSQLExpression with UserDefinedExpression {
// The constructor for SPARK 2.1 and 2.2
def this(
function: AnyRef,
dataType: DataType,
children: Seq[Expression],
inputTypes: Seq[DataType],
udfName: Option[String]) = {
this(
function, dataType, children, inputTypes, udfName, nullable = true, udfDeterministic = true)
}
override lazy val deterministic: Boolean = udfDeterministic && children.forall(_.deterministic)
override def toString: String =
s"${udfName.map(name => s"UDF:$name").getOrElse("UDF")}(${children.mkString(", ")})"
// scalastyle:off line.size.limit
/** This method has been generated by this script
(1 to 22).map { x =>
val anys = (1 to x).map(x => "Any").reduce(_ + ", " + _)
val childs = (0 to x - 1).map(x => s"val child$x = children($x)").reduce(_ + "\\n " + _)
val converters = (0 to x - 1).map(x => s"lazy val converter$x = CatalystTypeConverters.createToScalaConverter(child$x.dataType)").reduce(_ + "\\n " + _)
val evals = (0 to x - 1).map(x => s"converter$x(child$x.eval(input))").reduce(_ + ",\\n " + _)
s"""case $x =>
val func = function.asInstanceOf[($anys) => Any]
$childs
$converters
(input: InternalRow) => {
func(
$evals)
}
"""
}.foreach(println)
*/
private[this] val f = children.size match {
case 0 =>
val func = function.asInstanceOf[() => Any]
(input: InternalRow) => {
func()
}
case 1 =>
val func = function.asInstanceOf[(Any) => Any]
val child0 = children(0)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)))
}
case 2 =>
val func = function.asInstanceOf[(Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)))
}
case 3 =>
val func = function.asInstanceOf[(Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)))
}
case 4 =>
val func = function.asInstanceOf[(Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)))
}
case 5 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)))
}
case 6 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)))
}
case 7 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)))
}
case 8 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)))
}
case 9 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)))
}
case 10 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)))
}
case 11 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)))
}
case 12 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)))
}
case 13 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)))
}
case 14 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)))
}
case 15 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)))
}
case 16 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)))
}
case 17 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)))
}
case 18 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)))
}
case 19 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)))
}
case 20 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)))
}
case 21 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
val child20 = children(20)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)),
converter20(child20.eval(input)))
}
case 22 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
val child20 = children(20)
val child21 = children(21)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
lazy val converter21 = CatalystTypeConverters.createToScalaConverter(child21.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)),
converter20(child20.eval(input)),
converter21(child21.eval(input)))
}
}
// scalastyle:on line.size.limit
override def doGenCode(
ctx: CodegenContext,
ev: ExprCode): ExprCode = {
val converterClassName = classOf[Any => Any].getName
// The type converters for inputs and the result.
val converters: Array[Any => Any] = children.map { c =>
CatalystTypeConverters.createToScalaConverter(c.dataType)
}.toArray :+ CatalystTypeConverters.createToCatalystConverter(dataType)
val convertersTerm = ctx.addReferenceObj("converters", converters, s"$converterClassName[]")
val errorMsgTerm = ctx.addReferenceObj("errMsg", udfErrorMessage)
val resultTerm = ctx.freshName("result")
// codegen for children expressions
val evals = children.map(_.genCode(ctx))
// Generate the codes for expressions and calling user-defined function
// We need to get the boxedType of dataType's javaType here. Because for the dataType
// such as IntegerType, its javaType is `int` and the returned type of user-defined
// function is Object. Trying to convert an Object to `int` will cause casting exception.
val evalCode = evals.map(_.code).mkString("\\n")
val (funcArgs, initArgs) = evals.zipWithIndex.map { case (eval, i) =>
val argTerm = ctx.freshName("arg")
val convert = s"$convertersTerm[$i].apply(${eval.value})"
val initArg = s"Object $argTerm = ${eval.isNull} ? null : $convert;"
(argTerm, initArg)
}.unzip
val udf = ctx.addReferenceObj("udf", function, s"scala.Function${children.length}")
val getFuncResult = s"$udf.apply(${funcArgs.mkString(", ")})"
val resultConverter = s"$convertersTerm[${children.length}]"
val boxedType = CodeGenerator.boxedType(dataType)
val callFunc =
s"""
|$boxedType $resultTerm = null;
|try {
| $resultTerm = ($boxedType)$resultConverter.apply($getFuncResult);
|} catch (Exception e) {
| throw new org.apache.spark.SparkException($errorMsgTerm, e);
|}
""".stripMargin
ev.copy(code =
code"""
|$evalCode
|${initArgs.mkString("\\n")}
|$callFunc
|
|boolean ${ev.isNull} = $resultTerm == null;
|${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
|if (!${ev.isNull}) {
| ${ev.value} = $resultTerm;
|}
""".stripMargin)
}
private[this] val resultConverter = CatalystTypeConverters.createToCatalystConverter(dataType)
lazy val udfErrorMessage = {
val funcCls = function.getClass.getSimpleName
val inputTypes = children.map(_.dataType.catalogString).mkString(", ")
val outputType = dataType.catalogString
s"Failed to execute user defined function($funcCls: ($inputTypes) => $outputType)"
}
override def eval(input: InternalRow): Any = {
val result = try {
f(input)
} catch {
case e: Exception =>
throw new SparkException(udfErrorMessage, e)
}
resultConverter(result)
}
}
| tejasapatil/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala | Scala | apache-2.0 | 52,937 |
trait T {
def apply(x: Double): Double
}
trait Base {
def foo: T
/*resolved: true: applicable: true*/ foo(1d)
}
class Derived1 extends Base {
val foo: T = null
/*resolved: true: applicable: true*/ foo(1d)
}
() | LPTK/intellij-scala | testdata/resolve2/bug3/SCL2073.scala | Scala | apache-2.0 | 222 |
package org.glavo.dotty {
import scala.collection.mutable
sealed trait Node {
def mkString(n: Int): String
}
class Tag(val name: String,
val attributes: mutable.LinkedHashMap[Symbol, String] = mutable.LinkedHashMap(),
val children: mutable.Buffer[Node] = mutable.Buffer()) extends Node {
override def mkString(n: Int): String = {
Tag.spaces(n) + s"<$name ${attributes.map { case (k,v) => k.name + "=" + Tag.unescape(v) }.mkString(" ")}>" +
(if(children.isEmpty) "\\n"
else children.map(_.mkString(n + 4)).mkString("\\n", "\\n", "\\n")) +
Tag.spaces(n) + s"</$name>"
}
def apply(attrs: (Symbol, String)*): this.type = {
attributes ++= attrs
this
}
def apply[U](f: Tag ?=> U)(implicit t: Tag = null): this.type = {
if(t != null) t.children += this
f(using this)
this
}
}
object Tag {
def spaces(n: Int = 0): String = {
if(n == 0) ""
else {
val cs = new Array[Char](n)
for (i <- 0 until n)
cs(i) = 0
new String(cs)
}
}
def unescape(str: String): String = {
"\\"" + str + "\\""
}
implicit def symbolToTag(symbol: Symbol): Tag =
new Tag(symbol.name)
implicit class PairMaker(val tag: Symbol) extends AnyVal {
def :=(value: String): (Symbol, String) = (tag, value)
}
}
class Text(val value: String) extends Node {
override def mkString(n: Int): String = {
Tag.spaces(n) + value
}
}
}
object Test {
import org.glavo.dotty._
import org.glavo.dotty.Tag._
'html{} // error
}
| som-snytt/dotty | tests/neg/i2960.scala | Scala | apache-2.0 | 1,648 |
// Copyright (C) 2014 Fehmi Can Saglam (@fehmicans) and contributors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package reactivemongo.extensions.dao
import org.joda.time.DateTime
import reactivemongo.bson._
object Handlers {
implicit object BSONDateTimeHandler
extends BSONReader[BSONDateTime, DateTime]
with BSONWriter[DateTime, BSONDateTime] {
def read(bson: BSONDateTime): DateTime = new DateTime(bson.value)
def write(date: DateTime) = BSONDateTime(date.getMillis)
}
/* Source: https://github.com/hmrc/simple-reactivemongo/blob/master/src/main/scala/uk/gov/hmrc/mongo/ExtraBSONHandlers.scala */
implicit def MapBSONReader[T](implicit reader: BSONReader[_ <: BSONValue, T]): BSONDocumentReader[Map[String, T]] =
new BSONDocumentReader[Map[String, T]] {
def read(doc: BSONDocument): Map[String, T] = {
doc.elements.collect {
case (key, value) => value.seeAsOpt[T](reader) map {
ov => (key, ov)
}
}.flatten.toMap
}
}
/* Source: https://github.com/hmrc/simple-reactivemongo/blob/master/src/main/scala/uk/gov/hmrc/mongo/ExtraBSONHandlers.scala */
implicit def MapBSONWriter[T](implicit writer: BSONWriter[T, _ <: BSONValue]): BSONDocumentWriter[Map[String, T]] = new BSONDocumentWriter[Map[String, T]] {
def write(doc: Map[String, T]): BSONDocument = {
BSONDocument(doc.toTraversable map (t => (t._1, writer.write(t._2))))
}
}
/* Source: https://github.com/ReactiveMongo/ReactiveMongo/blob/master/driver/samples/BSON.scala */
implicit def MapReader[V](implicit vr: BSONDocumentReader[V]): BSONDocumentReader[Map[String, V]] = new BSONDocumentReader[Map[String, V]] {
def read(bson: BSONDocument): Map[String, V] = {
val elements = bson.elements.map { tuple =>
// assume that all values in the document are BSONDocuments
tuple._1 -> vr.read(tuple._2.seeAsTry[BSONDocument].get)
}
elements.toMap
}
}
/* Source: https://github.com/ReactiveMongo/ReactiveMongo/blob/master/driver/samples/BSON.scala */
implicit def MapWriter[V](implicit vw: BSONDocumentWriter[V]): BSONDocumentWriter[Map[String, V]] = new BSONDocumentWriter[Map[String, V]] {
def write(map: Map[String, V]): BSONDocument = {
val elements = map.toStream.map { tuple =>
tuple._1 -> vw.write(tuple._2)
}
BSONDocument(elements)
}
}
}
| fehmicansaglam/reactivemongo-extensions | core/src/main/scala/dao/Handlers.scala | Scala | apache-2.0 | 3,034 |
/*
* Copyright 2015 – 2016 Martin Seeler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.oanda.prices
import akka.stream.testkit.scaladsl.TestSink
import cats.data.Xor
import org.scalatest._
import org.scalatest.prop.PropertyChecks
import rx.oanda.OandaEnvironment
import rx.oanda.utils.Heartbeat
class PricesClientStreamingSpec extends FlatSpec with PropertyChecks with Matchers with FakePriceStreamingEndpoints {
behavior of "The PricesClient"
val testClient = new PricesClient(OandaEnvironment.TradePracticeEnvironment("token"))
it must "stream prices and heartbeats with authentication" in {
testClient.livePrices(8954946L, "AUD_CAD" :: "AUD_CHF" :: Nil)
.runWith(TestSink.probe[Xor[Price, Heartbeat]])
.requestNext(Xor.left(Price("AUD_CAD", 1391114828000000L, 0.98114, 0.98139)))
.requestNext(Xor.left(Price("AUD_CHF", 1391114828000000L, 0.79353, 0.79382)))
.requestNext(Xor.left(Price("AUD_CHF", 1391114831000000L, 0.79355, 0.79387)))
.requestNext(Xor.right(Heartbeat(1391114831000000L)))
.requestNext(Xor.left(Price("AUD_CHF", 1391114831000000L, 0.79357, 0.79390)))
.requestNext(Xor.left(Price("AUD_CAD", 1391114834000000L, 0.98112, 0.98138)))
.expectComplete()
}
}
| MartinSeeler/rx-oanda | src/test/scala/rx/oanda/prices/PricesClientStreamingSpec.scala | Scala | apache-2.0 | 1,768 |
package org.littlewings.javaee7.cdi
import javax.inject.Inject
import org.apache.deltaspike.testcontrol.api.junit.CdiTestRunner
import org.apache.deltaspike.testcontrol.api.mock.DynamicMockManager
import org.junit.Test
import org.junit.runner.RunWith
import org.mockito.Mockito._
import org.scalatest.Matchers
import org.scalatest.junit.JUnitSuite
@RunWith(classOf[CdiTestRunner])
class SimpleCdiDeltaSpikeMockWithMockitoTest extends JUnitSuite with Matchers {
@Inject
var messageService: MessageService = _
@Inject
var mockManager: DynamicMockManager = _
@Test
def withMock(): Unit = {
val messageServiceMock = mock(classOf[MessageService])
when(messageServiceMock.message).thenReturn("Hello Mock!!")
when(messageServiceMock.message("[", "]")).thenReturn("Hello prefix & suffix Mock!!")
mockManager.addMock(messageServiceMock)
messageService.message should be("Hello Mock!!")
messageService.message("[", "]") should be("Hello prefix & suffix Mock!!")
}
@Test
def withSpy(): Unit = {
val messageServiceSpy = spy(classOf[MessageService])
when(messageServiceSpy.message).thenReturn("Hello Mock!!")
mockManager.addMock(messageServiceSpy)
messageService.message should be("Hello Mock!!")
messageService.message("[", "]") should be("[Hello Mock!!]")
}
}
| kazuhira-r/javaee7-scala-examples | cdi-testing-deltaspike-with-mock/src/test/scala/org/littlewings/javaee7/cdi/SimpleCdiDeltaSpikeMockWithMockitoTest.scala | Scala | mit | 1,323 |
object Main12 {
def main(args: Array[String]) = {
println("Hi 12!")
MainLeaf.main(Array.empty)
}
}
| darkocerdic/sbt-multiproject-resolving | module12/src/main/scala/Main12.scala | Scala | apache-2.0 | 111 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
*/
package org.openapitools.server.model
case class FreeStyleBuild(
`class`: Option[String],
number: Option[Int],
url: Option[String],
actions: Option[List[CauseAction]],
building: Option[Boolean],
description: Option[String],
displayName: Option[String],
duration: Option[Int],
estimatedDuration: Option[Int],
executor: Option[String],
fullDisplayName: Option[String],
id: Option[String],
keepLog: Option[Boolean],
queueId: Option[Int],
result: Option[String],
timestamp: Option[Int],
builtOn: Option[String],
changeSet: Option[EmptyChangeLogSet]
)
| cliffano/swaggy-jenkins | clients/scalatra/generated/src/main/scala/org/openapitools/server/model/FreeStyleBuild.scala | Scala | mit | 916 |
package io.udash.demos.rest.views.book
import io.udash._
import io.udash.core.Presenter
import io.udash.demos.rest.model.{ContactId, PhoneBookId, PhoneBookInfo}
import io.udash.demos.rest.{ApplicationContext, IndexState, PhoneBookFormState}
import org.scalajs.dom
import scala.util.{Failure, Success}
import scala.concurrent.ExecutionContext.Implicits.global
class PhoneBookFormPresenter(model: ModelProperty[PhoneBookEditorModel]) extends Presenter[PhoneBookFormState] {
import ApplicationContext._
override def handleState(state: PhoneBookFormState): Unit = {
state match {
case PhoneBookFormState(None) =>
model.subProp(_.loaded).set(true)
model.subProp(_.loadingText).set("")
model.subProp(_.isNewBook).set(true)
model.subProp(_.name).set("")
model.subProp(_.description).set("")
case PhoneBookFormState(Some(id)) =>
model.subProp(_.loaded).set(false)
model.subProp(_.loadingText).set("Loading phone book data...")
model.subProp(_.isNewBook).set(false)
loadPhoneBookInfo(id)
loadSelectedContacts(id)
loadContacts()
}
}
def loadPhoneBookInfo(id: PhoneBookId): Unit = {
restServer.phoneBooks(id).load() onComplete {
case Success(book) =>
model.subProp(_.loaded).set(true)
model.subProp(_.id).set(id)
model.subProp(_.name).set(book.name)
model.subProp(_.description).set(book.description)
case Failure(ex) =>
model.subProp(_.loadingText).set(s"Problem with phone book details loading: $ex")
}
}
def loadContacts(): Unit = {
restServer.contacts().load() onComplete {
case Success(contacts) =>
model.subProp(_.allContacts).set(contacts)
case Failure(ex) =>
dom.window.alert(s"Problem with contacts loading: $ex")
}
}
def loadSelectedContacts(id: PhoneBookId): Unit = {
restServer.phoneBooks(id).contacts().load() onComplete {
case Success(contacts) =>
model.subProp(_.selectedContacts).set(contacts)
model.subSeq(_.selectedContacts).listenStructure(patch => {
patch.added.foreach(item => addContactToBook(id, item.get))
patch.removed.foreach(item => removeContactFromBook(id, item.get))
})
case Failure(ex) =>
dom.window.alert(s"Problem with selected contacts loading: $ex")
}
}
def addContactToBook(id: PhoneBookId, contactId: ContactId): Unit = {
restServer.phoneBooks(id).contacts().add(contactId).failed.foreach { ex =>
model.subSeq(_.selectedContacts).remove(contactId)
dom.window.alert(s"Contact adding failed: $ex")
}
}
def removeContactFromBook(id: PhoneBookId, contactId: ContactId): Unit = {
restServer.phoneBooks(id).contacts().remove(contactId).failed.foreach { ex =>
model.subSeq(_.selectedContacts).append(contactId)
dom.window.alert(s"Contact remove failed: $ex")
}
}
def createPhoneBook(): Unit = {
restServer.phoneBooks().create(PhoneBookInfo(
PhoneBookId(-1),
model.subProp(_.name).get,
model.subProp(_.description).get
)) onComplete {
case Success(_) =>
applicationInstance.goTo(IndexState)
case Failure(ex) =>
dom.window.alert(s"Phone Book creation failed: $ex")
}
}
def updatePhoneBook(): Unit = {
restServer.phoneBooks(model.subProp(_.id).get).update(PhoneBookInfo(
model.subProp(_.id).get,
model.subProp(_.name).get,
model.subProp(_.description).get
)) onComplete {
case Success(_) =>
applicationInstance.goTo(IndexState)
case Failure(ex) =>
dom.window.alert(s"Phone Book update failed: $ex")
}
}
}
| UdashFramework/udash-demos | rest-akka-http/frontend/src/main/scala/io/udash/demos/rest/views/book/PhoneBookFormPresenter.scala | Scala | gpl-3.0 | 3,700 |
import ru.biocad.ig.alicont.algorithms.AlgorithmType._
import ru.biocad.ig.alicont.common.Scoring
import com.typesafe.scalalogging.LazyLogging
import java.io._
import ru.biocad.ig.regions.RegionsRunner
/**
* Created with IntelliJ IDEA.
* User: mactep
* Date: 31.10.13
* Time: 9:59
*/
object Main extends LazyLogging {
private case class Config(amino : Boolean = false, fasta : File = null, kabat : File = null, source : File = null,
count : Int = 3, gap_open : Double = -10, gap_ext : Double = -1, gap : Double = -5,
scoring : Array[Array[Double]] = null, align : AlgorithmType = SEMIGLOBAL,
marking : Boolean = false, filter : Boolean = false, outdir : File = null,
add_group : Boolean = false, par : Boolean = false, debug : Boolean = false)
def main(args : Array[String]) = {
val parser = getParser
parser.parse(args, Config()) match {
case Some(config) =>
try {
RegionsRunner.run(config.amino, config.fasta, config.kabat, config.source,
config.count, config.gap_open, config.gap_ext, config.gap,
config.scoring, config.align, config.marking, config.filter,
config.outdir, config.par, config.add_group)
} catch {
case e : Exception =>
logger.error(s"Fatal error: ${e.getMessage}")
if (config.debug) {
e.printStackTrace()
}
}
case None =>
parser.showUsage
}
}
private def getParser : scopt.OptionParser[Config] = new scopt.OptionParser[Config]("ig-regions") {
head("ig-regions", "1.0-SNAPSHOT")
note("Required:")
opt[File]('s', "source") required() action {(s, c) => c.copy(source = s)} validate {x =>
if (x.canRead) success else failure("Source file does not exists")
} text "file to annotate [fasta]"
opt[File]('r', "reference") required() action {(s, c) => c.copy(fasta = s)} validate {x =>
if (x.canRead) success else failure("Reference file does not exists")
} text "reference file [fasta]"
opt[File]('m', "marking") required() action {(s, c) => c.copy(kabat = s)} validate {x =>
if (x.canRead) success else failure("Marking file does not exists")
} text "reference marking [igblast marking format]"
note("Optional:")
opt[File]("outdir") action {(x, c) => c.copy(outdir = x)} validate {x =>
if (x.canRead) success else failure("Output directory does not exists")
} text "output directory"
opt[Unit]("par") action {(_, c) => c.copy(par = true)} text "Use parallel mode (highly experimental)"
opt[Unit]("group") action {(_, c) => c.copy(add_group = true)} text "Add germline group to name"
opt[Unit]('a', "amino") action {(_, c) => c.copy(amino = true)} text "use amino acid data"
opt[Unit]('l', "igblast-like") action {(_, c) => c.copy(marking = true)} text "output as igblast marking"
opt[Unit]("filter") action {(s, c) => c.copy(filter = true)} text "enable simple filtration (default: disabled)"
opt[Int]('n', "alignments") action {(s, c) => c.copy(count = s)} text "number of using alignments for annotation (default: 3)"
note("\\n alignment parameters\\n")
opt[File]('x', "matrix") action {(x, c) => c.copy(scoring = Scoring.loadMatrix(x))} validate {x =>
if (x.canRead) success else failure("Scoring matrix file does not exists")
} text "use external alignment matrix [txt]"
opt[Double]('g', "gap") action {(s, c) => c.copy(gap = s)} text "simple gap score (default: -5)"
opt[Double]('o', "gap-open") action {(s, c) => c.copy(gap_open = s)} text "affine open gap score (default: -10)"
opt[Double]('e', "gap-ext") action {(s, c) => c.copy(gap_ext = s)} text "affine extension gap score (default: -1)"
note("\\n alignment algorithms\\n")
opt[Unit]("global") action {(_, c) => c.copy(align = GLOBAL)} text "use global alignment"
opt[Unit]("local") action {(_, c) => c.copy(align = LOCAL)} text "use local alignment"
opt[Unit]("semiglobal") action {(_, c) => c.copy(align = SEMIGLOBAL)} text "use semiglobal alignment (default)"
opt[Unit]("affine-global") action {(_, c) => c.copy(align = AFFINE_GLOBAL)} text "use global alignment"
opt[Unit]("affine-local") action {(_, c) => c.copy(align = AFFINE_LOCAL)} text "use local alignment"
opt[Unit]("affine-semiglobal") action {(_, c) => c.copy(align = AFFINE_SEMIGLOBAL)} text "use semiglobal alignment"
note("Debug:")
opt[Unit]("debug") action {(_, c) => c.copy(debug = true)} text "show exceptions on fail"
note("Help:")
help("help") text "this message"
}
}
| zmactep/igcat | ig-regions/src/main/scala/Main.scala | Scala | bsd-2-clause | 4,675 |
package org.casualmiracles.utilities
import org.casualmiracles.finance.contracts.Contract
import org.casualmiracles.finance.contracts.PR
import org.casualmiracles.finance.contracts.PR._
/**
* @author yl
*/
class ContractsTracer extends Tracer{
val tracehorizon: Int = 7
def trace(i:Int, c:Contract):Unit = if(tracing) { output("\\t"*i); outputln(c) }
def trace[T](i:Int, s:String, pr: PR[T]): PR[T] = {
if(tracing){ output("\\t"*i); outputln(i,s); outputln( formatPr(pr, tracehorizon, "\\t"*i ) ) }
pr
}
} | yuriylesyuk/scala-contracts | src/main/scala/org/casualmiracles/utilities/ContractsTracer.scala | Scala | mit | 526 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box.ValidatableBox._
import uk.gov.hmrc.ct.box._
case class AC5076C(value: Option[String]) extends CtBoxIdentifier(name = "Additional information (optional)")
with CtOptionalString
with Input
with ValidatableBox[Frs102AccountsBoxRetriever] {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors (
validateStringMaxLength("AC5076C", value.getOrElse(""), StandardCohoTextFieldLimit),
validateCoHoStringReturnIllegalChars("AC5076C", this)
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC5076C.scala | Scala | apache-2.0 | 1,391 |
package me.gregd.cineworld.frontend.styles
object FilmsStyle {
private val prefix = "FilmsStyle"
val filmListContainer = s"$prefix-filmListContainer "
val container = s"$prefix-container "
val attribution = s"$prefix-attribution "
val header = s"$prefix-header "
val menuGroup = s"$prefix-menuGroup "
val select = s"$prefix-select "
val label = s"$prefix-label "
val filmCard = s"$prefix-filmCard "
val filmTitle = s"$prefix-filmTitle "
val longFilmTitle = s"$prefix-longFilmTitle $filmTitle"
val filmPosition = s"$prefix-filmPosition "
val filmInfo = s"$prefix-filmInfo $filmPosition"
val threedee = s"$prefix-threedee"
val filmBackground = s"$prefix-filmBackground $filmPosition"
val ratings = s"$prefix-ratings "
val rating = s"$prefix-rating "
val tmdb = s"$prefix-tmdb $rating"
val rtAudience = s"$prefix-rtAudience $rating"
val imdb = s"$prefix-imdb $rating"
val times = s"$prefix-times "
val time = s"$prefix-time "
}
| Grogs/cinema-service | client/src/main/scala/me/gregd/cineworld/frontend/styles/FilmsStyle.scala | Scala | gpl-3.0 | 977 |
package com.seanshubin.http.values.domain
import java.io.ByteArrayOutputStream
import java.nio.charset.StandardCharsets
import javax.servlet.{ServletOutputStream, WriteListener}
class ServletOutputStreamStub extends ServletOutputStream {
val byteArrayOutputStream = new ByteArrayOutputStream()
def asUtf8:String = new String(byteArrayOutputStream.toByteArray, StandardCharsets.UTF_8)
override def isReady: Boolean = ???
override def setWriteListener(writeListener: WriteListener): Unit = ???
override def write(b: Int): Unit = byteArrayOutputStream.write(b)
}
| SeanShubin/http-values | domain/src/test/scala/com/seanshubin/http/values/domain/ServletOutputStreamStub.scala | Scala | unlicense | 574 |
package com.github.mrpowers.spark.daria.sql
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.sql.types.{DataType, StructField, StructType}
object SparkSessionExt {
implicit class SparkSessionMethods(spark: SparkSession) {
private def asRows[U](values: List[U]): List[Row] = {
values.map {
case x: Row => x.asInstanceOf[Row]
case y: Product => Row(y.productIterator.toList: _*)
case a => Row(a)
}
}
private def asSchema[U <: Product](fields: List[U]): List[StructField] = {
fields.map {
case x: StructField => x.asInstanceOf[StructField]
case (name: String, dataType: DataType, nullable: Boolean) =>
StructField(
name,
dataType,
nullable
)
}
}
/**
* Creates a DataFrame, similar to createDataFrame, but with better syntax
* spark-daria defined a createDF method that allows for the terse syntax of `toDF` and the control of `createDataFrame`.
*
* spark.createDF(
* List(
* ("bob", 45),
* ("liz", 25),
* ("freeman", 32)
* ), List(
* ("name", StringType, true),
* ("age", IntegerType, false)
* )
* )
*
* The `createDF` method can also be used with lists of `Row` and `StructField` objects.
*
* spark.createDF(
* List(
* Row("bob", 45),
* Row("liz", 25),
* Row("freeman", 32)
* ), List(
* StructField("name", StringType, true),
* StructField("age", IntegerType, false)
* )
* )
*/
def createDF[U, T <: Product](rowData: List[U], fields: List[T]): DataFrame = {
spark.createDataFrame(
spark.sparkContext.parallelize(asRows(rowData)),
StructType(asSchema(fields))
)
}
/**
* Creates an empty DataFrame given schema fields
*
* This is a handy fallback when you fail to read from a data source
*
* val schema = List(StructField("col1", IntegerType))
* val df = Try {
* spark.read.parquet("non-existent-path")
* }.getOrElse(spark.createEmptyDf(schema))
*/
def createEmptyDF[T <: Product](fields: List[T]): DataFrame = {
spark.createDataFrame(
spark.sparkContext.emptyRDD[Row],
StructType(asSchema(fields))
)
}
}
}
| MrPowers/spark-daria | src/main/scala/com/github/mrpowers/spark/daria/sql/SparkSessionExt.scala | Scala | mit | 2,413 |
class Test {
def test = {
val l1 = null: Int #: String #: Boolean #: String #: HNil.type
type _2 = Succ[Succ[Zero.type]]
val t1: Boolean = null.asInstanceOf[ l1.type#Drop[_2]#Head ]
val t2: Boolean = null.asInstanceOf[ l1.type#Apply[_2] ]
}
}
sealed trait Nat {
type Fold[U, F[_ <: U] <: U, Z <: U] <: U
}
final object Zero extends Nat {
type Fold[U, F[_ <: U] <: U, Z <: U] = Z
}
final class Succ[N <: Nat] extends Nat {
type Fold[U, F[_ <: U] <: U, Z <: U] = F[N#Fold[U, F, Z]]
}
trait HList {
type Head
type Tail <: HList
type Drop[N <: Nat] = N#Fold[HList, ({ type L[X <: HList] = X#Tail })#L, this.type]
type Apply[N <: Nat] = Drop[N]#Head
}
class #: [H, T <: HList] extends HList { type Head = H; type Tail = T }
object HNil extends HList { type Head = Nothing; type Tail = Nothing }
| loskutov/intellij-scala | testdata/scalacTests/pos/t5294b.scala | Scala | apache-2.0 | 836 |
package net.liftmodules.fobobs4.snippet.FoBo
import scala.xml.{NodeSeq, Text}
import net.liftweb.util._
import net.liftweb.common._
import net.liftweb.http._
import net.liftweb._
import Helpers._
import net.liftweb.http.js._
import net.liftweb.http.js.JsCmds._
import net.liftmodules.fobobs4.lib.{ScriptHelper => sch}
/**
* ==Script Helper Snippet Bootstrap v4.x==
*
* This snippet class contains functions for common transform operations useful when working
* with the any toolkit, for convenience included as a Bootstrap v3.x snippet.
* '''Example''' Most of the functions in this class can be invoked using the following pattern.
* {{{ data-lift="FoBo.Bs4ScriptHelper.functionName?paramName=paramValue&...." }}}
* For more examples see the individual transform functions.
* @since v1.1
*/
class Bs4ScriptHelper extends StatefulSnippet with Loggable {
private lazy val sch = new sch()
def dispatch = {
case "registerLoadEventFactory" => registerLoadEventFactory
case "registerLoadEventFactoryAppendGlobalJs" =>
registerLoadEventFactoryAppendGlobalJs
}
/**
* This function register a load event factory script
*
* '''Example'''
* {{{
* <head>
* :
* <script data-lift="FoBo.Bs4Comp.activateDropdown?on=.dropdown-toggle"></script>
* <script data-lift="FoBo.Bs4ScriptHelper.registerLoadEventFactory"></script>
* </head>
* }}}
* This load event factory script has to be registered ones before any activation can be loaded.
*
* '''Result:''' This example snippet invocation will result in the following script:
* {{{
* <script type="text/javascript">// drop down activation that uses the load factory </script>
* <script type="text/javascript">
* // <![CDATA[
* function addLoadEvent(func) {
* var oldonload = window.onload;
* if (typeof window.onload != 'function') {
* window.onload = func;
* } else {
* window.onload = function() {
* if (oldonload) {
* oldonload();
* }
* func();
* }
* }
* }
* // ]]>
* </script>
* }}}
*
*/
def registerLoadEventFactory: CssSel = {
" *" #> JsCmds.Script(sch.registerLoadEventFactoryScript())
}
/**
* '''Lift 3 alternativ''' -- This function creates a register load event factory function and
* appends it to lift's page-script global space.
* @since v1.4
*/
def registerLoadEventFactoryAppendGlobalJs: CssSel = {
var js = sch.registerLoadEventFactoryScript()
S.appendGlobalJs(js)
" *" #> ""
}
}
| karma4u101/FoBo | Bootstrap/Bootstrap4/TwBs-Bootstrap4-API/src/main/scala/net/liftmodules/fobobs4/snippet/FoBo/Bs4ScriptHelper.scala | Scala | apache-2.0 | 2,774 |
package org.jetbrains.plugins.scala.lang.psi.controlFlow
import org.jetbrains.annotations.Nullable
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
/**
* @author ilyas
*/
trait Instruction {
def succ( /*put call env here*/ ): Iterable[Instruction]
def pred( /*put call env here*/ ): Iterable[Instruction]
def addSucc(s: Instruction)
def addPred(p: Instruction)
val num: Int
@Nullable
def element: Option[ScalaPsiElement]
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/controlFlow/Instruction.scala | Scala | apache-2.0 | 491 |
package com.gilt.opm.storage
import com.gilt.gfc.id.Guid
import com.gilt.gfc.time.Timestamp
import com.gilt.opm.OpmFactory._
import com.gilt.opm.{OpmMongoStorage, CollectionHelper, OpmObject}
import org.scalatest.FlatSpec
import org.scalatest.Matchers
object OpmMongoGiltTypeSupportSpec {
trait GiltTypes extends OpmObject {
def typedGuid: Guid[GiltTypes]
def seqTypedGuid: Seq[Guid[GiltTypes]]
def timestamp: Timestamp
def optTimestamp: Option[Timestamp]
}
}
import OpmMongoGiltTypeSupportSpec._
class OpmMongoGiltTypeSupportSpec
extends FlatSpec with Matchers with OpmMongoStorage[GiltTypes] with OpmMongoGiltTypeSupport with CollectionHelper {
val collectionName = "gilt_types"
"OpmMongoGiltTypeSupport" should "allow extra gilt types to be stored and loaded" in {
val gt = instance[GiltTypes]("key")
.set(_.typedGuid).to(Guid.randomGuid[GiltTypes]())
.set(_.seqTypedGuid).to(Seq(Guid.randomGuid[GiltTypes]()))
.set(_.timestamp).to(new Timestamp())
.set(_.optTimestamp).to(Some(new Timestamp()))
put(gt)
val loaded = get("key")
assert(loaded.isDefined)
assert(gt === loaded.get)
}
}
| gilt/opm | src/test/scala/com/gilt/opm/storage/OpmMongoGiltTypeSupportSpec.scala | Scala | mit | 1,160 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
import java.io.{ PrintStream, PrintWriter }
import java.lang.StringBuilder
import java.nio.channels.ClosedChannelException
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger }
import org.apache.logging.log4j.core.appender.AbstractAppender
import org.apache.logging.log4j.core.{ Appender => XAppender, LogEvent => XLogEvent }
import org.apache.logging.log4j.message.{ Message, ObjectMessage, ReusableObjectMessage }
import org.apache.logging.log4j.{ Level => XLevel }
import sbt.internal.util.ConsoleAppender._
import sbt.util._
import org.apache.logging.log4j.core.AbstractLogEvent
import org.apache.logging.log4j.message.StringFormatterMessageFactory
import java.util.concurrent.atomic.AtomicReference
object ConsoleLogger {
// These are provided so other modules do not break immediately.
@deprecated("Use EscHelpers.ESC instead", "0.13.x")
final val ESC = EscHelpers.ESC
@deprecated("Use EscHelpers.isEscapeTerminator instead", "0.13.x")
private[sbt] def isEscapeTerminator(c: Char): Boolean = EscHelpers.isEscapeTerminator(c)
@deprecated("Use EscHelpers.hasEscapeSequence instead", "0.13.x")
def hasEscapeSequence(s: String): Boolean = EscHelpers.hasEscapeSequence(s)
@deprecated("Use EscHelpers.removeEscapeSequences instead", "0.13.x")
def removeEscapeSequences(s: String): String = EscHelpers.removeEscapeSequences(s)
@deprecated("Use ConsoleAppender.formatEnabledInEnv instead", "0.13.x")
lazy val formatEnabled = ConsoleAppender.formatEnabledInEnv
@deprecated("Use ConsoleAppender.noSuppressedMessage instead", "0.13.x")
val noSuppressedMessage = ConsoleAppender.noSuppressedMessage
/**
* A new `ConsoleLogger` that logs to `out`.
*
* @param out Where to log the messages.
* @return A new `ConsoleLogger` that logs to `out`.
*/
def apply(out: PrintStream): ConsoleLogger = apply(ConsoleOut.printStreamOut(out))
/**
* A new `ConsoleLogger` that logs to `out`.
*
* @param out Where to log the messages.
* @return A new `ConsoleLogger` that logs to `out`.
*/
def apply(out: PrintWriter): ConsoleLogger = apply(ConsoleOut.printWriterOut(out))
/**
* A new `ConsoleLogger` that logs to `out`.
*
* @param out Where to log the messages.
* @param ansiCodesSupported `true` if `out` supported ansi codes, `false` otherwise.
* @param useFormat `true` to show formatting, `false` to remove it from messages.
* @param suppressedMessage How to show suppressed stack traces.
* @return A new `ConsoleLogger` that logs to `out`.
*/
def apply(
out: ConsoleOut = ConsoleOut.systemOut,
ansiCodesSupported: Boolean = Terminal.isAnsiSupported,
useFormat: Boolean = Terminal.isColorEnabled,
suppressedMessage: SuppressedTraceContext => Option[String] =
ConsoleAppender.noSuppressedMessage
): ConsoleLogger =
new ConsoleLogger(out, ansiCodesSupported, useFormat, suppressedMessage)
}
/**
* A logger that logs to the console. On supported systems, the level labels are
* colored.
*/
class ConsoleLogger private[ConsoleLogger] (
out: ConsoleOut,
override val ansiCodesSupported: Boolean,
useFormat: Boolean,
suppressedMessage: SuppressedTraceContext => Option[String]
) extends BasicLogger {
private[sbt] val appender: Appender =
ConsoleAppender(generateName(), out, ansiCodesSupported, useFormat, suppressedMessage)
override def control(event: ControlEvent.Value, message: => String): Unit =
appender.control(event, message)
override def log(level: Level.Value, message: => String): Unit =
if (atLevel(level)) {
appender.appendLog(level, message)
}
override def success(message: => String): Unit =
if (successEnabled) {
appender.success(message)
}
override def trace(t: => Throwable): Unit =
appender.trace(t, getTrace)
override def logAll(events: Seq[LogEvent]) = events.foreach(log)
}
object ConsoleAppender {
private[sbt] def cursorLeft(n: Int): String = s"\\u001B[${n}D"
private[sbt] def cursorUp(n: Int): String = s"\\u001B[${n}A"
private[sbt] def cursorDown(n: Int): String = s"\\u001B[${n}B"
private[sbt] def scrollUp(n: Int): String = s"\\u001B[${n}S"
private[sbt] def clearScreen(n: Int): String = s"\\u001B[${n}J"
private[sbt] def clearLine(n: Int): String = s"\\u001B[${n}K"
private[sbt] final val DeleteLine = "\\u001B[2K"
private[sbt] final val ClearScreenAfterCursor = clearScreen(0)
private[sbt] final val CursorLeft1000 = cursorLeft(1000)
private[sbt] final val CursorDown1 = cursorDown(1)
private[sbt] final val ClearPromptLine = CursorLeft1000 + ClearScreenAfterCursor
private[this] val showProgressHolder: AtomicBoolean = new AtomicBoolean(false)
def setShowProgress(b: Boolean): Unit = showProgressHolder.set(b)
def showProgress: Boolean = showProgressHolder.get
private[sbt] trait Properties {
def isAnsiSupported: Boolean
def isColorEnabled: Boolean
def out: ConsoleOut
}
private[sbt] object Properties {
def from(terminal: Terminal): Properties = new Properties {
override def isAnsiSupported: Boolean = terminal.isAnsiSupported
override def isColorEnabled: Boolean = terminal.isColorEnabled
override def out = ConsoleOut.terminalOut(terminal)
}
def from(o: ConsoleOut, ansi: Boolean, color: Boolean): Properties = new Properties {
override def isAnsiSupported: Boolean = ansi
override def isColorEnabled: Boolean = color
override def out = o
}
}
/** Hide stack trace altogether. */
val noSuppressedMessage = (_: SuppressedTraceContext) => None
/**
* Indicates whether formatting has been disabled in environment variables.
* 1. -Dsbt.log.noformat=true means no formatting.
* 2. -Dsbt.color=always/auto/never/true/false
* 3. -Dsbt.colour=always/auto/never/true/false
* 4. -Dsbt.log.format=always/auto/never/true/false
*/
@deprecated("Use Terminal.isAnsiSupported or Terminal.isColorEnabled", "1.4.0")
lazy val formatEnabledInEnv: Boolean = Terminal.isAnsiSupported
private[sbt] def parseLogOption(s: String): LogOption = Terminal.parseLogOption(s) match {
case Some(true) => LogOption.Always
case Some(false) => LogOption.Never
case _ => LogOption.Auto
}
private[this] val generateId: AtomicInteger = new AtomicInteger
/**
* A new `ConsoleAppender` that writes to standard output.
*
* @return A new `ConsoleAppender` that writes to standard output.
*/
def apply(): Appender = apply(ConsoleOut.systemOut)
/**
* A new `ConsoleAppender` that appends log message to `out`.
*
* @param out Where to write messages.
* @return A new `ConsoleAppender`.
*/
def apply(out: PrintStream): Appender = apply(ConsoleOut.printStreamOut(out))
/**
* A new `ConsoleAppender` that appends log messages to `out`.
*
* @param out Where to write messages.
* @return A new `ConsoleAppender`.
*/
def apply(out: PrintWriter): Appender = apply(ConsoleOut.printWriterOut(out))
/**
* A new `ConsoleAppender` that writes to `out`.
*
* @param out Where to write messages.
* @return A new `ConsoleAppender that writes to `out`.
*/
def apply(out: ConsoleOut): Appender = apply(generateName(), out)
/**
* A new `ConsoleAppender` identified by `name`, and that writes to standard output.
*
* @param name An identifier for the `ConsoleAppender`.
* @return A new `ConsoleAppender` that writes to standard output.
*/
def apply(name: String): Appender = apply(name, ConsoleOut.systemOut)
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @return A new `ConsoleAppender` that writes to `out`.
*/
def apply(name: String, out: ConsoleOut): Appender = apply(name, out, Terminal.isAnsiSupported)
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @param suppressedMessage How to handle stack traces.
* @return A new `ConsoleAppender` that writes to `out`.
*/
def apply(
name: String,
out: ConsoleOut,
suppressedMessage: SuppressedTraceContext => Option[String]
): Appender = {
val ansi = Terminal.isAnsiSupported
apply(name, out, ansi, ansi, suppressedMessage)
}
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @param useFormat `true` to enable format (color, bold, etc.), `false` to remove formatting.
* @return A new `ConsoleAppender` that writes to `out`.
*/
def apply(name: String, out: ConsoleOut, useFormat: Boolean): Appender =
apply(name, out, useFormat || Terminal.isAnsiSupported, useFormat, noSuppressedMessage)
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param terminal The terminal to which this appender corresponds
* @return A new `ConsoleAppender` that writes to `out`.
*/
def apply(name: String, terminal: Terminal): Appender = {
new ConsoleAppender(name, Properties.from(terminal), noSuppressedMessage)
}
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param terminal The terminal to which this appender corresponds
* @param suppressedMessage How to handle stack traces.
* @return A new `ConsoleAppender` that writes to `out`.
*/
def apply(
name: String,
terminal: Terminal,
suppressedMessage: SuppressedTraceContext => Option[String]
): Appender = {
new ConsoleAppender(name, Properties.from(terminal), suppressedMessage)
}
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @param ansiCodesSupported `true` if the output stream supports ansi codes, `false` otherwise.
* @param useFormat `true` to enable format (color, bold, etc.), `false` to remove
* formatting.
* @return A new `ConsoleAppender` that writes to `out`.
*/
def apply(
name: String,
out: ConsoleOut,
ansiCodesSupported: Boolean,
useFormat: Boolean,
suppressedMessage: SuppressedTraceContext => Option[String]
): Appender = {
new ConsoleAppender(
name,
Properties.from(out, ansiCodesSupported, useFormat),
suppressedMessage
)
}
/**
* Converts the Log4J `level` to the corresponding sbt level.
*
* @param level A level, as represented by Log4J.
* @return The corresponding level in sbt's world.
*/
def toLevel(level: XLevel): Level.Value =
level match {
case XLevel.OFF => Level.Debug
case XLevel.FATAL => Level.Error
case XLevel.ERROR => Level.Error
case XLevel.WARN => Level.Warn
case XLevel.INFO => Level.Info
case XLevel.DEBUG => Level.Debug
case _ => Level.Debug
}
/**
* Converts the sbt `level` to the corresponding Log4J level.
*
* @param level A level, as represented by sbt.
* @return The corresponding level in Log4J's world.
*/
def toXLevel(level: Level.Value): XLevel =
level match {
case Level.Error => XLevel.ERROR
case Level.Warn => XLevel.WARN
case Level.Info => XLevel.INFO
case Level.Debug => XLevel.DEBUG
}
private[sbt] def generateName(): String = "out-" + generateId.incrementAndGet
private[this] def ansiSupported: Boolean = Terminal.console.isAnsiSupported
}
// See http://stackoverflow.com/questions/24205093/how-to-create-a-custom-appender-in-log4j2
// for custom appender using Java.
// http://logging.apache.org/log4j/2.x/manual/customconfig.html
// https://logging.apache.org/log4j/2.x/log4j-core/apidocs/index.html
/**
* A logger that logs to the console. On supported systems, the level labels are
* colored.
*
* This logger is not thread-safe.
*/
class ConsoleAppender(
override private[sbt] val name: String,
override private[sbt] val properties: Properties,
override private[sbt] val suppressedMessage: SuppressedTraceContext => Option[String]
) extends Appender {
private[this] val log4j = new AtomicReference[XAppender](null)
override private[sbt] lazy val toLog4J = log4j.get match {
case null =>
log4j.synchronized {
log4j.get match {
case null =>
val l = new Log4JConsoleAppender(name, properties, suppressedMessage, { event =>
val level = ConsoleAppender.toLevel(event.getLevel)
val message = event.getMessage
try appendMessage(level, message)
catch { case _: ClosedChannelException => }
})
log4j.set(l)
l
case l => l
}
}
}
override def close(): Unit = log4j.get match {
case null =>
case a => a.stop()
}
}
trait Appender extends AutoCloseable {
private[sbt] def name: String
private[sbt] def properties: Properties
private[sbt] def suppressedMessage: SuppressedTraceContext => Option[String]
import scala.Console.{ BLUE, GREEN, RED, YELLOW }
private[util] def out: ConsoleOut = properties.out
private[util] def ansiCodesSupported: Boolean = properties.isAnsiSupported
private[util] def useFormat: Boolean = properties.isColorEnabled
private def reset: String = scala.Console.RESET
private val SUCCESS_LABEL_COLOR = GREEN
private val SUCCESS_MESSAGE_COLOR = reset
private val NO_COLOR = reset
private var traceEnabledVar: Int = Int.MaxValue
def setTrace(level: Int): Unit = synchronized { traceEnabledVar = level }
/**
* Returns the number of lines for stacktrace.
*/
def getTrace: Int = synchronized { traceEnabledVar }
private[sbt] def toLog4J: XAppender
/**
* Logs the stack trace of `t`, possibly shortening it.
*
* The `traceLevel` parameter configures how the stack trace will be shortened.
* See `StackTrace.trimmed`.
*
* @param t The `Throwable` whose stack trace to log.
* @param traceLevel How to shorten the stack trace.
*/
def trace(t: => Throwable, traceLevel: Int): Unit = {
if (traceLevel >= 0)
write(StackTrace.trimmed(t, traceLevel))
if (traceLevel <= 2) {
val ctx = new SuppressedTraceContext(traceLevel, ansiCodesSupported && useFormat)
for (msg <- suppressedMessage(ctx))
appendLog(NO_COLOR, "trace", NO_COLOR, msg)
}
}
/**
* Logs a `ControlEvent` to the log.
*
* @param event The kind of `ControlEvent`.
* @param message The message to log.
*/
def control(event: ControlEvent.Value, message: => String): Unit =
appendLog(labelColor(Level.Info), Level.Info.toString, BLUE, message)
/**
* Appends the message `message` to the to the log at level `level`.
*
* @param level The importance level of the message.
* @param message The message to log.
*/
def appendLog(level: Level.Value, message: => String): Unit = {
appendLog(labelColor(level), level.toString, NO_COLOR, message)
}
/**
* Select the right color for the label given `level`.
*
* @param level The label to consider to select the color.
* @return The color to use to color the label.
*/
private def labelColor(level: Level.Value): String =
level match {
case Level.Error => RED
case Level.Warn => YELLOW
case _ => NO_COLOR
}
/**
* Appends a full message to the log. Each line is prefixed with `[$label]`, written in
* `labelColor` if formatting is enabled. The lines of the messages are colored with
* `messageColor` if formatting is enabled.
*
* @param labelColor The color to use to format the label.
* @param label The label to prefix each line with. The label is shown between square
* brackets.
* @param messageColor The color to use to format the message.
* @param message The message to write.
*/
private def appendLog(
labelColor: String,
label: String,
messageColor: String,
message: String
): Unit =
try {
// according to https://github.com/sbt/sbt/issues/5608, sometimes we get a null message
if (message == null) ()
else {
val len = labelColor.length + label.length + messageColor.length + reset.length * 3
val builder: StringBuilder = new StringBuilder(len)
message.linesIterator.foreach { line =>
builder.ensureCapacity(len + line.length + 4)
builder.setLength(0)
def fmted(a: String, b: String) = {
if (useFormat) builder.append(reset).append(a).append(b).append(reset)
else builder.append(b)
}
if (useFormat) builder.append(reset)
builder.append('[')
fmted(labelColor, label)
builder.append("] ")
fmted(messageColor, line)
write(builder.toString)
}
}
} catch { case _: InterruptedException => }
// success is called by ConsoleLogger.
private[sbt] def success(message: => String): Unit = {
appendLog(SUCCESS_LABEL_COLOR, Level.SuccessLabel, SUCCESS_MESSAGE_COLOR, message)
}
private def write(msg: String): Unit = {
// There is no api for removing only colors but not other ansi escape sequences
// so we do nothing if useFormat is false but ansiCodesSupported is true which is
// a rare use case but if ansiCodesSupported is true, color codes should work so
// the output may have unwanted colors but it would still be legible. This should
// only be relevant if the log message string itself contains ansi escape sequences
// other than color codes which is very unlikely.
val toWrite = if ((!ansiCodesSupported || !useFormat) && msg.getBytes.contains(27.toByte)) {
val (bytes, len) =
EscHelpers.strip(msg.getBytes, stripAnsi = !ansiCodesSupported, stripColor = !useFormat)
new String(bytes, 0, len)
} else msg
out.println(toWrite)
}
private[util] def appendMessage(level: Level.Value, msg: Message): Unit =
msg match {
case o: ObjectMessage => appendMessageContent(level, o.getParameter)
case o: ReusableObjectMessage => appendMessageContent(level, o.getParameter)
case _ => appendLog(level, msg.getFormattedMessage)
}
private def appendTraceEvent(te: TraceEvent): Unit = {
val traceLevel = getTrace
if (traceLevel >= 0) {
val throwableShowLines: ShowLines[Throwable] =
ShowLines[Throwable]((t: Throwable) => {
List(StackTrace.trimmed(t, traceLevel))
})
val codec: ShowLines[TraceEvent] =
ShowLines[TraceEvent]((t: TraceEvent) => {
throwableShowLines.showLines(t.message)
})
codec.showLines(te).toVector foreach { appendLog(Level.Error, _) }
}
if (traceLevel <= 2) {
suppressedMessage(new SuppressedTraceContext(traceLevel, ansiCodesSupported && useFormat)) foreach {
appendLog(Level.Error, _)
}
}
}
private def appendMessageContent(level: Level.Value, o: AnyRef): Unit = {
def appendEvent(oe: ObjectEvent[_]): Unit = {
val contentType = oe.contentType
contentType match {
case "sbt.internal.util.TraceEvent" => appendTraceEvent(oe.message.asInstanceOf[TraceEvent])
case "sbt.internal.util.ProgressEvent" =>
case _ =>
LogExchange.stringCodec[AnyRef](contentType) match {
case Some(codec) if contentType == "sbt.internal.util.SuccessEvent" =>
codec.showLines(oe.message.asInstanceOf[AnyRef]).toVector foreach { success(_) }
case Some(codec) =>
codec.showLines(oe.message.asInstanceOf[AnyRef]).toVector foreach (appendLog(
level,
_
))
case _ => appendLog(level, oe.message.toString)
}
}
}
o match {
case x: StringEvent => Vector(x.message) foreach { appendLog(level, _) }
case x: ObjectEvent[_] => appendEvent(x)
case _ => Vector(o.toString) foreach { appendLog(level, _) }
}
}
private[sbt] def appendObjectEvent[T](level: Level.Value, message: => ObjectEvent[T]): Unit =
appendMessageContent(level, message)
}
private[internal] class Log4JConsoleAppender(
override private[sbt] val name: String,
override private[sbt] val properties: Properties,
override private[sbt] val suppressedMessage: SuppressedTraceContext => Option[String],
appendEvent: XLogEvent => Unit,
) extends AbstractAppender(name, null, LogExchange.dummyLayout, true, Array.empty)
with Appender {
start()
override def close(): Unit = stop()
override private[sbt] def toLog4J: XAppender = this
override def append(event: XLogEvent): Unit = appendEvent(event)
}
private[sbt] class ConsoleAppenderFromLog4J(
override private[sbt] val name: String,
override private[sbt] val properties: Properties,
override private[sbt] val suppressedMessage: SuppressedTraceContext => Option[String],
val delegate: XAppender,
) extends Appender {
def this(name: String, delegate: XAppender) =
this(name, Properties.from(Terminal.get), _ => None, delegate)
override def close(): Unit = delegate.stop()
private[sbt] def toLog4J: XAppender = delegate
override def appendLog(level: sbt.util.Level.Value, message: => String): Unit = {
delegate.append(new AbstractLogEvent {
override def getLevel(): XLevel = ConsoleAppender.toXLevel(level)
override def getMessage(): Message =
StringFormatterMessageFactory.INSTANCE.newMessage(message.toString, Array.empty)
})
}
}
final class SuppressedTraceContext(val traceLevel: Int, val useFormat: Boolean)
| sbt/sbt | internal/util-logging/src/main/scala/sbt/internal/util/ConsoleAppender.scala | Scala | apache-2.0 | 22,362 |
package services.actor
import akka.actor.{ActorLogging, Actor}
import common.LogHelper.LogHelper
import services.actor.LogActor.{Info, Err, Warn}
import scala.collection.mutable.ListBuffer
/**
* Created by horatio on 10/28/15.
*/
class LogActor(size: Int) extends Actor with ActorLogging {
val name = context.self.path.toString.split("/").last
val errBuffer = ListBuffer[String]()
val warnBuffer = ListBuffer[String]()
val infoBuffer = ListBuffer[String]()
def receive = {
case Err(msgs) =>
errBuffer += msgs
log.info(s"$name: Err: buffer size: $size, now: ${errBuffer.length}")
log.error(msgs)
if (errBuffer.length >= size) {
errBuffer.foreach(log => LogHelper.err(log))
errBuffer.clear
}
case Warn(msgs) =>
warnBuffer += msgs
log.info(s"$name: Warn: buffer size: $size, now: ${warnBuffer.length}")
log.warning(msgs)
if (warnBuffer.length >= size) {
warnBuffer.foreach(log => LogHelper.warn(log))
warnBuffer.clear
}
case Info(msgs) =>
infoBuffer += msgs
log.info(s"$name: Info: buffer size: $size, now: ${infoBuffer.length}")
log.info(msgs)
if (infoBuffer.length >= size) {
infoBuffer.foreach(log => LogHelper.info(log))
infoBuffer.clear
}
}
}
object LogActor {
case class Err(msgs: String)
case class Warn(msgs: String)
case class Info(msgs: String)
case class Debug(msgs: String)
case class Write(buf: ListBuffer[String])
} | bash-horatio/ESJ | app/services/actor/LogActor.scala | Scala | apache-2.0 | 1,515 |
/*
* Copyright 2014–2020 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.connector.datasource
import quasar.qscript.QScriptEducated
/** A Datasource capable of executing QScript. */
abstract class HeavyweightDatasource[T[_[_]], F[_], G[_], R]
extends PhysicalDatasource[F, G, T[QScriptEducated[T, ?]], R]
| slamdata/quasar | connector/src/main/scala/quasar/connector/datasource/HeavyweightDatasource.scala | Scala | apache-2.0 | 858 |
package chapter.nine
object ExerciseNine extends App {
}
| deekim/impatient-scala | src/main/scala/chapter/nine/ExerciseNine.scala | Scala | apache-2.0 | 60 |
package sbtmarathon
import java.io._
import java.nio.file.Path
import java.nio.charset.Charset
object TemplateUtils {
def path(parts: String*): String = {
parts.reduceLeft { (a: String, b: String) =>
if (a.endsWith(File.separator) && b.startsWith(File.separator)) {
a + b.substring(1, b.length)
} else if (a.endsWith(File.separator) || b.startsWith(File.separator)) {
a + b
} else {
a + File.separator + b
}
}
}
def valueOrErr(optionalValue: Option[String], errMsg: => String): String = {
optionalValue match {
case Some(value) => value
case None => sys.error(errMsg)
}
}
def write(file: File, content: String, charset: Charset = Charset.forName("UTF-8"), append: Boolean = false): Unit = {
if (charset.newEncoder.canEncode(content)) {
try {
val parent = file.getParentFile
if (parent != null) {
parent.mkdirs()
}
} catch {
case e: IOException => sys.error(s"Could not create parent directories for $file")
}
val fileWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, append), charset))
try {
fileWriter.write(content)
} catch {
case e: IOException => sys.error(s"error writing to $file: ${e.getMessage}")
} finally {
fileWriter.close()
}
} else {
sys.error("string cannot be encoded by charset " + charset.name)
}
}
}
| Tapad/sbt-marathon | templating-lib/src/main/scala/sbtmarathon/TemplateUtils.scala | Scala | bsd-3-clause | 1,471 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming.test
import java.io.File
import java.util
import org.scalatest.BeforeAndAfter
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.streaming.StreamingRelationV2
import org.apache.spark.sql.connector.{FakeV2Provider, InMemoryTableSessionCatalog}
import org.apache.spark.sql.connector.catalog.{Identifier, InMemoryTableCatalog, SupportsRead, Table, TableCapability, V2TableWithV1Fallback}
import org.apache.spark.sql.connector.expressions.Transform
import org.apache.spark.sql.connector.read.ScanBuilder
import org.apache.spark.sql.execution.streaming.{MemoryStream, MemoryStreamScanBuilder}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.StreamTest
import org.apache.spark.sql.streaming.sources.FakeScanBuilder
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.util.Utils
class DataStreamTableAPISuite extends StreamTest with BeforeAndAfter {
import testImplicits._
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
before {
spark.conf.set("spark.sql.catalog.testcat", classOf[InMemoryTableCatalog].getName)
spark.conf.set("spark.sql.catalog.teststream", classOf[InMemoryStreamTableCatalog].getName)
}
after {
spark.sessionState.catalogManager.reset()
spark.sessionState.conf.clear()
sqlContext.streams.active.foreach(_.stop())
}
test("read: table API with file source") {
Seq("parquet", "").foreach { source =>
withSQLConf(SQLConf.USE_V1_SOURCE_LIST.key -> source) {
withTempDir { tempDir =>
val tblName = "my_table"
val dir = tempDir.getAbsolutePath
withTable(tblName) {
spark.range(3).write.format("parquet").option("path", dir).saveAsTable(tblName)
testStream(spark.readStream.table(tblName))(
ProcessAllAvailable(),
CheckAnswer(Row(0), Row(1), Row(2))
)
}
}
}
}
}
test("read: read non-exist table") {
intercept[AnalysisException] {
spark.readStream.table("non_exist_table")
}.message.contains("Table not found")
}
test("read: stream table API with temp view") {
val tblName = "my_table"
val stream = MemoryStream[Int]
withTable(tblName) {
stream.toDF().createOrReplaceTempView(tblName)
testStream(spark.readStream.table(tblName)) (
AddData(stream, 1, 2, 3),
CheckLastBatch(1, 2, 3),
AddData(stream, 4, 5),
CheckLastBatch(4, 5)
)
}
}
test("read: stream table API with non-streaming temp view") {
val tblName = "my_table"
withTable(tblName) {
spark.range(3).createOrReplaceTempView(tblName)
intercept[AnalysisException] {
spark.readStream.table(tblName)
}.message.contains("is not a temp view of streaming logical plan")
}
}
test("read: read table without streaming capability support") {
val tableIdentifier = "testcat.table_name"
spark.sql(s"CREATE TABLE $tableIdentifier (id bigint, data string) USING foo")
intercept[AnalysisException] {
spark.readStream.table(tableIdentifier)
}.message.contains("does not support either micro-batch or continuous scan")
}
test("read: read table with custom catalog") {
val tblName = "teststream.table_name"
withTable(tblName) {
spark.sql(s"CREATE TABLE $tblName (data int) USING foo")
val stream = MemoryStream[Int]
val testCatalog = spark.sessionState.catalogManager.catalog("teststream").asTableCatalog
val table = testCatalog.loadTable(Identifier.of(Array(), "table_name"))
table.asInstanceOf[InMemoryStreamTable].setStream(stream)
testStream(spark.readStream.table(tblName)) (
AddData(stream, 1, 2, 3),
CheckLastBatch(1, 2, 3),
AddData(stream, 4, 5),
CheckLastBatch(4, 5)
)
}
}
test("read: read table with custom catalog & namespace") {
spark.sql("CREATE NAMESPACE teststream.ns")
val tblName = "teststream.ns.table_name"
withTable(tblName) {
spark.sql(s"CREATE TABLE $tblName (data int) USING foo")
val stream = MemoryStream[Int]
val testCatalog = spark.sessionState.catalogManager.catalog("teststream").asTableCatalog
val table = testCatalog.loadTable(Identifier.of(Array("ns"), "table_name"))
table.asInstanceOf[InMemoryStreamTable].setStream(stream)
testStream(spark.readStream.table(tblName)) (
AddData(stream, 1, 2, 3),
CheckLastBatch(1, 2, 3),
AddData(stream, 4, 5),
CheckLastBatch(4, 5)
)
}
}
test("read: fallback to V1 relation") {
val tblName = DataStreamTableAPISuite.V1FallbackTestTableName
spark.conf.set(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION.key,
classOf[InMemoryStreamTableCatalog].getName)
val v2Source = classOf[FakeV2Provider].getName
withTempDir { tempDir =>
withTable(tblName) {
spark.sql(s"CREATE TABLE $tblName (data int) USING $v2Source")
// Check the StreamingRelationV2 has been replaced by StreamingRelation
val plan = spark.readStream.option("path", tempDir.getCanonicalPath).table(tblName)
.queryExecution.analyzed.collectFirst {
case d: StreamingRelationV2 => d
}
assert(plan.isEmpty)
}
}
}
test("write: write to table with custom catalog & no namespace") {
val tableIdentifier = "testcat.table_name"
withTable(tableIdentifier) {
spark.sql(s"CREATE TABLE $tableIdentifier (id bigint, data string) USING foo")
checkAnswer(spark.table(tableIdentifier), Seq.empty)
runTestWithStreamAppend(tableIdentifier)
}
}
test("write: write to table with custom catalog & namespace") {
spark.sql("CREATE NAMESPACE testcat.ns")
val tableIdentifier = "testcat.ns.table_name"
withTable(tableIdentifier) {
spark.sql(s"CREATE TABLE $tableIdentifier (id bigint, data string) USING foo")
checkAnswer(spark.table(tableIdentifier), Seq.empty)
runTestWithStreamAppend(tableIdentifier)
}
}
test("write: write to table with default session catalog") {
val v2Source = classOf[FakeV2Provider].getName
spark.conf.set(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION.key,
classOf[InMemoryTableSessionCatalog].getName)
spark.sql("CREATE NAMESPACE ns")
val tableIdentifier = "ns.table_name"
withTable(tableIdentifier) {
spark.sql(s"CREATE TABLE $tableIdentifier (id bigint, data string) USING $v2Source")
checkAnswer(spark.table(tableIdentifier), Seq.empty)
runTestWithStreamAppend(tableIdentifier)
}
}
test("write: write to non-exist table with custom catalog") {
val tableIdentifier = "testcat.nonexistenttable"
withTable(tableIdentifier) {
runTestWithStreamAppend(tableIdentifier)
}
}
test("write: write to temporary view isn't allowed yet") {
val tableIdentifier = "testcat.table_name"
val tempViewIdentifier = "temp_view"
spark.sql(s"CREATE TABLE $tableIdentifier (id bigint, data string) USING foo")
checkAnswer(spark.table(tableIdentifier), Seq.empty)
spark.table(tableIdentifier).createOrReplaceTempView(tempViewIdentifier)
withTempDir { checkpointDir =>
val exc = intercept[AnalysisException] {
runStreamQueryAppendMode(tempViewIdentifier, checkpointDir, Seq.empty, Seq.empty)
}
assert(exc.getMessage.contains("doesn't support streaming write"))
}
}
test("write: write to view shouldn't be allowed") {
val tableIdentifier = "testcat.table_name"
val viewIdentifier = "table_view"
spark.sql(s"CREATE TABLE $tableIdentifier (id bigint, data string) USING foo")
checkAnswer(spark.table(tableIdentifier), Seq.empty)
spark.sql(s"CREATE VIEW $viewIdentifier AS SELECT id, data FROM $tableIdentifier")
withTempDir { checkpointDir =>
val exc = intercept[AnalysisException] {
runStreamQueryAppendMode(viewIdentifier, checkpointDir, Seq.empty, Seq.empty)
}
assert(exc.getMessage.contains(s"Streaming into views $viewIdentifier is not supported"))
}
}
test("write: write to an external table") {
withTempDir { dir =>
val tableName = "stream_test"
withTable(tableName) {
checkForStreamTable(Some(dir), tableName)
}
}
}
test("write: write to a managed table") {
val tableName = "stream_test"
withTable(tableName) {
checkForStreamTable(None, tableName)
}
}
test("write: write to an external table with existing path") {
withTempDir { dir =>
val tableName = "stream_test"
withTable(tableName) {
// The file written by batch will not be seen after the table was written by a streaming
// query. This is because we load files from the metadata log instead of listing them
// using HDFS API.
Seq(4, 5, 6).toDF("value").write.format("parquet")
.option("path", dir.getCanonicalPath).saveAsTable(tableName)
checkForStreamTable(Some(dir), tableName)
}
}
}
test("write: write to a managed table with existing path") {
val tableName = "stream_test"
withTable(tableName) {
// The file written by batch will not be seen after the table was written by a streaming
// query. This is because we load files from the metadata log instead of listing them
// using HDFS API.
Seq(4, 5, 6).toDF("value").write.format("parquet").saveAsTable(tableName)
checkForStreamTable(None, tableName)
}
}
test("write: write to an external path and create table") {
withTempDir { dir =>
val tableName = "stream_test"
withTable(tableName) {
// The file written by batch will not be seen after the table was written by a streaming
// query. This is because we load files from the metadata log instead of listing them
// using HDFS API.
Seq(4, 5, 6).toDF("value").write
.mode("append").format("parquet").save(dir.getCanonicalPath)
checkForStreamTable(Some(dir), tableName)
}
}
}
test("write: write to table with different format shouldn't be allowed") {
val tableName = "stream_test"
spark.sql(s"CREATE TABLE $tableName (id bigint, data string) USING json")
checkAnswer(spark.table(tableName), Seq.empty)
withTempDir { checkpointDir =>
val exc = intercept[AnalysisException] {
runStreamQueryAppendMode(tableName, checkpointDir, Seq.empty, Seq.empty)
}
assert(exc.getMessage.contains("The input source(parquet) is different from the table " +
s"$tableName's data source provider(json)"))
}
}
private def checkForStreamTable(dir: Option[File], tableName: String): Unit = {
val memory = MemoryStream[Int]
val dsw = memory.toDS().writeStream.format("parquet")
dir.foreach { output =>
dsw.option("path", output.getCanonicalPath)
}
val sq = dsw
.option("checkpointLocation", Utils.createTempDir().getCanonicalPath)
.toTable(tableName)
memory.addData(1, 2, 3)
sq.processAllAvailable()
checkDataset(
spark.table(tableName).as[Int],
1, 2, 3)
val catalogTable = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
val path = if (dir.nonEmpty) {
dir.get
} else {
new File(catalogTable.location)
}
checkDataset(
spark.read.format("parquet").load(path.getCanonicalPath).as[Int],
1, 2, 3)
}
private def runTestWithStreamAppend(tableIdentifier: String) = {
withTempDir { checkpointDir =>
val input1 = Seq((1L, "a"), (2L, "b"), (3L, "c"))
verifyStreamAppend(tableIdentifier, checkpointDir, Seq.empty, input1, input1)
val input2 = Seq((4L, "d"), (5L, "e"), (6L, "f"))
verifyStreamAppend(tableIdentifier, checkpointDir, Seq(input1), input2, input1 ++ input2)
}
}
private def runStreamQueryAppendMode(
tableIdentifier: String,
checkpointDir: File,
prevInputs: Seq[Seq[(Long, String)]],
newInputs: Seq[(Long, String)]): Unit = {
val inputData = MemoryStream[(Long, String)]
val inputDF = inputData.toDF().toDF("id", "data")
prevInputs.foreach { inputsPerBatch =>
inputData.addData(inputsPerBatch: _*)
}
val query = inputDF
.writeStream
.option("checkpointLocation", checkpointDir.getAbsolutePath)
.toTable(tableIdentifier)
inputData.addData(newInputs: _*)
query.processAllAvailable()
query.stop()
}
private def verifyStreamAppend(
tableIdentifier: String,
checkpointDir: File,
prevInputs: Seq[Seq[(Long, String)]],
newInputs: Seq[(Long, String)],
expectedOutputs: Seq[(Long, String)]): Unit = {
runStreamQueryAppendMode(tableIdentifier, checkpointDir, prevInputs, newInputs)
checkAnswer(
spark.table(tableIdentifier),
expectedOutputs.map { case (id, data) => Row(id, data) }
)
}
}
object DataStreamTableAPISuite {
val V1FallbackTestTableName = "fallbackV1Test"
}
class InMemoryStreamTable(override val name: String) extends Table with SupportsRead {
var stream: MemoryStream[Int] = _
def setStream(inputData: MemoryStream[Int]): Unit = stream = inputData
override def schema(): StructType = stream.fullSchema()
override def capabilities(): util.Set[TableCapability] = {
util.EnumSet.of(TableCapability.MICRO_BATCH_READ, TableCapability.CONTINUOUS_READ)
}
override def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder = {
new MemoryStreamScanBuilder(stream)
}
}
class NonStreamV2Table(override val name: String)
extends Table with SupportsRead with V2TableWithV1Fallback {
override def schema(): StructType = StructType(Nil)
override def capabilities(): util.Set[TableCapability] =
util.EnumSet.of(TableCapability.BATCH_READ)
override def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder = new FakeScanBuilder
override def v1Table: CatalogTable = {
CatalogTable(
identifier =
TableIdentifier(DataStreamTableAPISuite.V1FallbackTestTableName, Some("default")),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
owner = null,
schema = schema(),
provider = Some("parquet"))
}
}
class InMemoryStreamTableCatalog extends InMemoryTableCatalog {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
override def createTable(
ident: Identifier,
schema: StructType,
partitions: Array[Transform],
properties: util.Map[String, String]): Table = {
if (tables.containsKey(ident)) {
throw new TableAlreadyExistsException(ident)
}
val table = if (ident.name() == DataStreamTableAPISuite.V1FallbackTestTableName) {
new NonStreamV2Table(s"$name.${ident.quoted}")
} else {
new InMemoryStreamTable(s"$name.${ident.quoted}")
}
tables.put(ident, table)
namespaces.putIfAbsent(ident.namespace.toList, Map())
table
}
}
| ueshin/apache-spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala | Scala | apache-2.0 | 16,132 |
package org.jetbrains.plugins.scala
package annotator
import com.intellij.lang.annotation.AnnotationHolder
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScClass
import scala.collection.mutable.ArrayBuffer
import com.intellij.psi.search.GlobalSearchScope
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import org.jetbrains.plugins.scala.lang.psi.types.ScDesignatorType
import org.jetbrains.plugins.scala.lang.psi.types.ScAbstractType
import org.jetbrains.plugins.scala.lang.psi.types.ScTypeParameterType
import org.jetbrains.plugins.scala.lang.psi.types.ComparingUtil._
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScCompoundTypeElement
import scala.collection.immutable.HashSet
/**
* Jason Zaugg
*/
trait PatternAnnotator {
def annotatePattern(pattern: ScPattern, holder: AnnotationHolder, highlightErrors: Boolean) {
if (highlightErrors) {
PatternAnnotator.checkPattern(pattern, holder)
}
}
}
object PatternAnnotator {
def checkPattern(pattern: ScPattern, holder: AnnotationHolder) = {
for {
pType <- patternType(pattern)
eType <- pattern.expectedType
} {
checkPatternType(pType, eType, pattern, holder)
}
}
/**
* Logic in this method is mimicked from compiler sources:
* scala.tools.nsc.typechecker.Infer and scala.tools.nsc.typechecker.Checkable
*
* "pattern type is uncompatible with expected type" error is not handled
* */
private def checkPatternType(patType: ScType, exprType: ScType, pattern: ScPattern, holder: AnnotationHolder) {
val exTp = widen(exprType)
val freeTypeParams = freeTypeParamsOfTerms(exTp)
def exTpMatchesPattp = matchesPattern(exTp, widen(patType))
val neverMatches = !matchesPattern(exprType, patType) && (patType match {
case StdType(_, Some(AnyVal)) => false
case _ => isNeverSubType(exprType, patType)
})
pattern match {
case _: ScTypedPattern if exTp.isFinalType && freeTypeParams.isEmpty && !exTpMatchesPattp =>
val message = ScalaBundle.message("scrutinee.incompatible.pattern.type", exTp.presentableText, patType.presentableText)
holder.createErrorAnnotation(pattern, message)
return
case _: ScTypedPattern if Seq(Nothing, Null, AnyVal) contains patType =>
val message = ScalaBundle.message("type.cannot.be.used.in.type.pattern", patType.presentableText)
holder.createErrorAnnotation(pattern, message)
return
case ScTypedPattern(typeElem @ ScCompoundTypeElement(_, Some(refinement))) =>
val message = ScalaBundle.message("pattern.on.refinement.unchecked")
holder.createWarningAnnotation(typeElem, message)
return
case _ =>
}
if (neverMatches) {
val erasureWarn = (ScType.extractClass(exprType), ScType.extractClass(patType)) match {
case (Some(cl1), Some(cl2)) if pattern.isInstanceOf[ScTypedPattern] =>
if (isNeverSubClass(cl1, cl2)) "" else ScalaBundle.message("erasure.warning")
case _ => ""
}
val message = ScalaBundle.message("fruitless.type.test", exprType.presentableText, patType.presentableText) + erasureWarn
holder.createWarningAnnotation(pattern, message)
}
}
private def patternType(pattern: ScPattern): Option[ScType] = {
def constrPatternType(patternRef: ScStableCodeReferenceElement): Option[ScType] = {
patternRef.advancedResolve match {
case Some(srr) =>
srr.getElement match {
case fun: ScFunction if fun.parameters.size == 1 =>
Some(srr.substitutor.subst(fun.paramTypes(0)))
case _ => None
}
case None => None
}
}
pattern match {
case c: ScConstructorPattern =>
constrPatternType(c.ref)
case inf: ScInfixPattern =>
constrPatternType(inf.refernece)
case tuple: ScTuplePattern =>
val project = pattern.getProject
val subPat = tuple.subpatterns
val subTypes = subPat.flatMap(patternType)
if (subTypes.size == subPat.size) Some(ScTupleType(subTypes)(project, GlobalSearchScope.allScope(project)))
else None
case typed: ScTypedPattern =>
typed.typePattern.map(_.typeElement.calcType)
case patt @ (_: ScStableReferenceElementPattern | _: ScLiteralPattern) =>
val result = patt.getType(TypingContext.empty).toOption
if (result == Some(Null)) Some(AnyRef) else result
case naming: ScNamingPattern =>
patternType(naming.named)
case parenth: ScParenthesisedPattern =>
patternType(parenth.subpattern.getOrElse(null))
case null => None
case _ => pattern.getType(TypingContext.empty).toOption
}
}
private def abstraction(scType: ScType, visited: HashSet[ScType] = HashSet.empty): ScType = {
if (visited.contains(scType)) {
return scType
}
val newVisited = visited + scType
scType.recursiveUpdate {
case tp: ScTypeParameterType => (true, ScAbstractType(tp, abstraction(tp.lower.v, newVisited), abstraction(tp.upper.v, newVisited)))
case tpe => (false, tpe)
}
}
private def widen(scType: ScType): ScType = scType match {
case _ if ScType.isSingletonType(scType) => ScType.extractDesignatorSingletonType(scType).getOrElse(scType)
case _ =>
scType.recursiveUpdate {
case ScTypeParameterType(_, _, _, upper, _) => (true, upper.v)
case tp => (false, tp)
}
}
private def freeTypeParamsOfTerms(tp: ScType): Seq[ScType] = {
val buffer = ArrayBuffer[ScType]()
tp.recursiveUpdate {
case tp: ScTypeParameterType =>
buffer += tp
(false, tp)
case _ => (false, tp)
}
buffer.toSeq
}
private def matchesPattern(matching: ScType, matched: ScType): Boolean = {
object arrayType {
def unapply(scType: ScType): Option[ScType] = scType match {
case ScParameterizedType(ScDesignatorType(elem: ScClass), Seq(arg))
if elem.qualifiedName == "scala.Array" => Some(arg)
case _ => None
}
}
matching.weakConforms(matched) || ((matching, matched) match {
case (arrayType(arg1), arrayType(arg2)) => matchesPattern(arg1, arg2)
case (_, parameterized: ScParameterizedType) =>
val newtp = abstraction(parameterized)
!matched.equiv(newtp) && matching.weakConforms(newtp)
case _ => false
})
}
}
| consulo/consulo-scala | src/org/jetbrains/plugins/scala/annotator/PatternAnnotator.scala | Scala | apache-2.0 | 6,697 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.model.classes.HeroicCharacterClass.Alchemist
import io.truthencode.ddo.support.requisite.{ClassRequisiteImpl, FeatRequisiteImpl, RequiresAllOfClass}
/**
* You can use Intelligence for Will saves. Mutually exclusive with
* [[https://ddowiki.com/page/Liquid_Courage]]
*
* @see
* [[https://ddowiki.com/page/Tough_Tincture]]
*/
protected[feats] trait ToughTincture
extends FeatRequisiteImpl with ClassRequisiteImpl with RequiresAllOfClass with AlchemistBonusFeat
with Passive {
self: ClassFeat =>
private[this] val cls = (Alchemist, 8)
abstract override def allOfClass: Seq[(HeroicCharacterClass, Int)] = super.allOfClass :+ cls
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/ToughTincture.scala | Scala | apache-2.0 | 1,418 |
package org.littlewings.infinispan.lucene
import org.infinispan.commons.api.Lifecycle
object ResourceWrappers {
implicit class LifecycleWrapper[A <: Lifecycle](val underlying: A) extends AnyVal {
def foreach(f: A => Unit): Unit =
try {
f(underlying)
} finally {
underlying.stop()
}
}
implicit class AutoClosebleWrapper[A <: AutoCloseable](val underlying: A) extends AnyVal {
def foreach(f: A => Unit): Unit =
try {
f(underlying)
} finally {
underlying.close()
}
}
}
| kazuhira-r/infinispan-getting-started | embedded-lucene-distributed-directory/src/main/scala/org/littlewings/infinispan/lucene/ResourceWrappers.scala | Scala | mit | 553 |
package org.apache.spark.sql.cassandra
import com.datastax.spark.connector
import com.datastax.spark.connector.cql.{ColumnDef, TableDef}
import com.datastax.spark.connector.types.FieldDef
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.logical.{Statistics, LeafNode}
import org.apache.spark.sql.{StructField, catalyst}
private[cassandra] case class CassandraRelation
(tableDef: TableDef, alias: Option[String])(@transient val cc: CassandraSQLContext)
extends LeafNode {
val keyspaceName = tableDef.keyspaceName
val regularColumns = tableDef.regularColumns.toList.map(columnToAttribute)
val indexedColumns = tableDef.regularColumns.filter(_.isIndexedColumn).map(columnToAttribute)
val partitionColumns = tableDef.partitionKey.map(columnToAttribute)
val clusterColumns = tableDef.clusteringColumns.map(columnToAttribute)
val allColumns = tableDef.regularColumns ++ tableDef.partitionKey ++ tableDef.clusteringColumns
val columnNameByLowercase = allColumns.map(c => (c.columnName.toLowerCase, c.columnName)).toMap
var projectAttributes = tableDef.allColumns.map(columnToAttribute)
def columnToAttribute(column: ColumnDef): AttributeReference = {
// Since data can be dumped in randomly with no validation, everything is nullable.
val catalystType = ColumnDataType.catalystDataType(column.columnType, nullable = true)
val qualifiers = tableDef.tableName +: alias.toSeq
new AttributeReference(column.columnName, catalystType, nullable = true)(qualifiers = qualifiers)
}
override def output: Seq[Attribute] = projectAttributes
@transient override lazy val statistics = Statistics(
sizeInBytes = {
BigInt(cc.conf.getLong(keyspaceName + "." + tableName + ".size.in.bytes", cc.defaultSizeInBytes))
}
)
def tableName = tableDef.tableName
}
object ColumnDataType {
private val primitiveTypeMap = Map[connector.types.ColumnType[_], catalyst.types.DataType](
connector.types.TextType -> catalyst.types.StringType,
connector.types.AsciiType -> catalyst.types.StringType,
connector.types.VarCharType -> catalyst.types.StringType,
connector.types.BooleanType -> catalyst.types.BooleanType,
connector.types.IntType -> catalyst.types.IntegerType,
connector.types.BigIntType -> catalyst.types.LongType,
connector.types.CounterType -> catalyst.types.LongType,
connector.types.FloatType -> catalyst.types.FloatType,
connector.types.DoubleType -> catalyst.types.DoubleType,
connector.types.VarIntType -> catalyst.types.DecimalType(), // no native arbitrary-size integer type
connector.types.DecimalType -> catalyst.types.DecimalType(),
connector.types.TimestampType -> catalyst.types.TimestampType,
connector.types.InetType -> catalyst.types.StringType,
connector.types.UUIDType -> catalyst.types.StringType,
connector.types.TimeUUIDType -> catalyst.types.StringType,
connector.types.BlobType -> catalyst.types.ByteType
)
def catalystDataType(cassandraType: connector.types.ColumnType[_], nullable: Boolean): catalyst.types.DataType = {
def catalystStructField(field: FieldDef): StructField =
StructField(field.fieldName, catalystDataType(field.fieldType, nullable = true), nullable = true)
cassandraType match {
case connector.types.SetType(et) => catalyst.types.ArrayType(primitiveTypeMap(et), nullable)
case connector.types.ListType(et) => catalyst.types.ArrayType(primitiveTypeMap(et), nullable)
case connector.types.MapType(kt, vt) => catalyst.types.MapType(primitiveTypeMap(kt), primitiveTypeMap(vt), nullable)
case connector.types.UserDefinedType(_, fields) => catalyst.types.StructType(fields.map(catalystStructField))
case _ => primitiveTypeMap(cassandraType)
}
}
}
| brkyvz/spark-cassandra-connector | spark-cassandra-connector/src/main/scala/org/apache/spark/sql/cassandra/CassandraRelation.scala | Scala | apache-2.0 | 4,030 |
package org.fusesource.scalate.support
object CharData {
val simpleEscape: PartialFunction[Char, Char] = {
case 'b' => '\\b'
case 't' => '\\t'
case 'n' => '\\n'
case 'f' => '\\f'
case 'r' => '\\r'
case '\\"' => '\\"'
case '\\'' => '\\''
case '\\\\' => '\\\\'
}
val zeroDigit: PartialFunction[Char, Char] = {
case '0' => '0'
}
val isNonZeroDigit: PartialFunction[Char, Char] = {
case '1' => '1'
case '2' => '2'
case '3' => '3'
case '4' => '4'
case '5' => '5'
case '6' => '6'
case '7' => '7'
case '8' => '8'
case '9' => '9'
}
val isDigit: PartialFunction[Char, Char] = zeroDigit orElse isNonZeroDigit
val isOctalDigit: PartialFunction[Char, Char] = {
case '0' => '0'
case '1' => '1'
case '2' => '2'
case '3' => '3'
case '4' => '4'
case '5' => '5'
case '6' => '6'
case '7' => '7'
}
val isHexDigit: PartialFunction[Char, Char] = isDigit orElse {
case 'a' => 'a'
case 'b' => 'b'
case 'c' => 'c'
case 'd' => 'd'
case 'e' => 'e'
case 'f' => 'f'
case 'A' => 'A'
case 'B' => 'B'
case 'C' => 'C'
case 'D' => 'D'
case 'E' => 'E'
case 'F' => 'F'
}
def isControl(c: Char) = Character.isISOControl(c)
def isControl(codepoint: Int) = Character.isISOControl(codepoint)
}
| scalate/scalate | scalate-core/src/main/scala/org/fusesource/scalate/support/CharData.scala | Scala | apache-2.0 | 1,326 |
package experiments.esperakka
trait ExampleEsperModule extends EsperModule {
self: EsperClassification =>
val windowSize = 4
val orderSize = 1000
installModule(
s"""
module SimpleAverageTrader;
@Name("Delayed")
insert rstream into Delayed
select rstream symbol,price
from Price.std:groupwin(symbol).win:length(${windowSize-1});
@Name("Averages")
insert into Averages
select symbol,avg(price) as price
from Price.std:groupwin(symbol).win:length_batch($windowSize) group by symbol;
@Name("Buy")
insert into Buy
select p.symbol, p.price, $orderSize as amount
from Price.std:unique(symbol) p
join Delayed.std:unique(symbol) d on d.symbol = p.symbol
join Averages a unidirectional on a.symbol = p.symbol
where a.price > d.price;
""") { evt => publish(evt)
}
}
| fsauer65/akka-esper-integration | src/main/scala/experiments/esperakka/ExampleEsperModule.scala | Scala | mit | 877 |
package com.eharmony.aloha.semantics.compiled.plugin.schemabased.schema
import com.eharmony.aloha.semantics.compiled.plugin.schemabased.schema.Schema.FieldRetrievalError
/**
* Created by ryan.
*/
trait Schema {
def field(name: String): Either[FieldRetrievalError, FieldDesc]
}
object Schema {
case class FieldRetrievalError(error: String)
}
| eHarmony/aloha | aloha-core/src/main/scala/com/eharmony/aloha/semantics/compiled/plugin/schemabased/schema/Schema.scala | Scala | mit | 352 |
package geotrellis.raster.op.local
import geotrellis._
import geotrellis._
import geotrellis.process._
/**
* Negate (multiply by -1) each value in a raster.
*/
case class Negate(r:Op[Raster]) extends Op1(r)({
(r) => Result(r.map( z => -z ))
})
| Tjoene/thesis | Case_Programs/geotrellis-0.7.0/src/main/scala/geotrellis/raster/op/local/Negate.scala | Scala | gpl-2.0 | 250 |
class A1 {
class A2(x: Int) extends AnyVal // error: value class may not be a member of another class
}
class B1 {
def test = {
class B2(x: Int) extends AnyVal // error: value class may not be a local class
}
}
| som-snytt/dotty | tests/neg/valueClasses.scala | Scala | apache-2.0 | 221 |
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.backpressure.{MasterActor, TestWorkerActor}
import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers}
class WorkerSpec extends TestKit(ActorSystem("WorkerSpec"))
with Matchers
with FlatSpecLike
with BeforeAndAfterAll
with ImplicitSender {
override def afterAll(): Unit ={
system.shutdown()
}
def worker(name: String, master:ActorRef) = {
system.actorOf(TestWorkerActor.props(master))
}
"Worker" should "work" in {
// Spin up the master
val m = system.actorOf(Props[MasterActor], "master")
// Create three workers
val w1 = worker("master",m)
val w2 = worker("master",m)
val w3 = worker("master",m)
// Send some work to the master
m ! "Hithere"
m ! "Guys"
m ! "So"
m ! "What's"
m ! "Up?"
// We should get it all back
expectMsgAllOf("Hithere", "Guys", "So", "What's", "Up?")
}
}
| djoudjou/Quiz | test/WorkerSpec.scala | Scala | cc0-1.0 | 1,001 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.index
import org.geotools.feature.simple.SimpleFeatureTypeBuilder
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo._
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.SimpleFeatureType
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TemporalIndexCheckTest extends Specification {
// setup the basic types
def noDTGType = SimpleFeatureTypes.createType("noDTGType", s"foo:String,bar:Geometry,baz:String,$DEFAULT_GEOMETRY_PROPERTY_NAME:Geometry")
def oneDTGType = SimpleFeatureTypes.createType("oneDTGType", s"foo:String,bar:Geometry,baz:String,$DEFAULT_GEOMETRY_PROPERTY_NAME:Geometry,$DEFAULT_DTG_PROPERTY_NAME:Date")
def twoDTGType = SimpleFeatureTypes.createType("twoDTGType", s"foo:String,bar:Geometry,baz:String,$DEFAULT_GEOMETRY_PROPERTY_NAME:Geometry,$DEFAULT_DTG_PROPERTY_NAME:Date,dtg_end_time:Date")
val DEFAULT_DATE_KEY = org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.DEFAULT_DATE_KEY
def copy(sft: SimpleFeatureType) = {
val b = new SimpleFeatureTypeBuilder()
b.init(sft)
b.buildFeatureType()
}
"TemporalIndexCheck" should {
"detect no valid DTG" in {
val testType = copy(noDTGType)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beNone
}
"detect no valid DTG even if SF_PROPERTY_START_TIME is set incorrectly" in {
val testType = copy(noDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, DEFAULT_DTG_PROPERTY_NAME)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beNone
}
"detect a valid DTG if SF_PROPERTY_START_TIME is not set" in {
val testType = copy(oneDTGType)
testType.getUserData.remove(DEFAULT_DATE_KEY)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"detect a valid DTG if SF_PROPERTY_START_TIME is not properly set" in {
val testType = copy(oneDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, "no_such_dtg")
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"present no DTG candidate if SF_PROPERTY_START_TIME is set properly" in {
val testType = copy(oneDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"detect valid DTG candidates and select the first if SF_PROPERTY_START_TIME is not set correctly" in {
val testType = copy(twoDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, "no_such_dtg")
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"present no DTG candidate if SF_PROPERTY_START_TIME is set properly and there are multiple Date attributes" in {
val testType = copy(twoDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, DEFAULT_DTG_PROPERTY_NAME)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
}
"getDTGFieldName" should {
"return a dtg field name if SF_PROPERTY_START_TIME is set properly" in {
val testType = copy(oneDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"not return a dtg field name if SF_PROPERTY_START_TIME is not set correctly" in {
val testType = copy(noDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME) must throwAn[IllegalArgumentException]
testType.getDtgField must beNone
}
}
"getDTGDescriptor" should {
"return a dtg attribute descriptor if SF_PROPERTY_START_TIME is set properly" in {
val testType = copy(oneDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME)
testType.getDtgDescriptor must beSome(oneDTGType.getDescriptor(DEFAULT_DTG_PROPERTY_NAME))
}
"not return a dtg attribute descriptor if SF_PROPERTY_START_TIME is not set correctly" in {
val testType = copy(noDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME) must throwAn[IllegalArgumentException]
testType.getDtgDescriptor must beNone
}
}
}
| drackaer/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/index/TemporalIndexCheckTest.scala | Scala | apache-2.0 | 4,982 |
package org.http4s
package server
package middleware
import cats.effect._
import fs2.Stream._
import org.http4s.dsl.io._
import org.http4s.Uri.uri
class DefaultHeadSpec extends Http4sSpec {
val app = DefaultHead(HttpRoutes.of[IO] {
case GET -> Root / "hello" =>
Ok("hello")
case GET -> Root / "special" =>
Ok(Header("X-Handled-By", "GET"))
case HEAD -> Root / "special" =>
Ok(Header("X-Handled-By", "HEAD"))
}).orNotFound
"DefaultHead" should {
"honor HEAD routes" in {
val req = Request[IO](Method.HEAD, uri = uri("/special"))
app(req).map(_.headers.get("X-Handled-By".ci).map(_.value)) must returnValue(Some("HEAD"))
}
"return truncated body of corresponding GET on fallthrough" in {
val req = Request[IO](Method.HEAD, uri = uri("/hello"))
app(req) must returnBody("")
}
"retain all headers of corresponding GET on fallthrough" in {
val get = Request[IO](Method.GET, uri = uri("/hello"))
val head = get.withMethod(Method.HEAD)
app(get).map(_.headers).unsafeRunSync() must_== app(head)
.map(_.headers)
.unsafeRunSync()
}
"allow GET body to clean up on fallthrough" in {
var cleanedUp = false
val app = DefaultHead(HttpRoutes.of[IO] {
case GET -> _ =>
val body: EntityBody[IO] = eval_(IO({ cleanedUp = true }))
Ok(body)
}).orNotFound
app(Request[IO](Method.HEAD)).flatMap(_.as[String]).unsafeRunSync()
cleanedUp must beTrue
}
}
}
| ChristopherDavenport/http4s | server/src/test/scala/org/http4s/server/middleware/DefaultHeadSpec.scala | Scala | apache-2.0 | 1,524 |
import java.text.NumberFormat
import bloomfilter.mutable.{CuckooFilter, UnsafeTable8Bit}
import com.google.monitoring.runtime.instrumentation.{AllocationRecorder, Sampler}
import com.twitter.algebird.{BloomFilter => AlgebirdBloomFilter}
import scala.util.Random
object SandboxApp {
def checkMemory(): Unit = {
val runtime = Runtime.getRuntime
val format = NumberFormat.getInstance()
val sb = new StringBuilder()
val maxMemory = runtime.maxMemory()
val allocatedMemory = runtime.totalMemory()
val freeMemory = runtime.freeMemory()
sb.append("free memory: " + format.format(freeMemory / 1024) + "\\n")
sb.append("allocated memory: " + format.format(allocatedMemory / 1024) + "\\n")
sb.append("max memory: " + format.format(maxMemory / 1024) + "\\n")
sb.append("total free memory: " + format.format((freeMemory + (maxMemory - allocatedMemory)) / 1024) + "\\n")
System.out.println(sb.toString())
}
def main(args: Array[String]): Unit = {
val sut = CuckooFilter[Long](1000)
sut.add(8)
assert(sut.mightContain(8))
sut.add(10)
assert(sut.mightContain(10))
sut.add(8)
assert(sut.mightContain(8))
sut.add(10000)
assert(sut.mightContain(10000))
}
def compareAlgebirdFPR(): Unit = {
val random: Random = new Random()
val itemsExpected = 10000L
val falsePositiveRate = 0.1
var bf = AlgebirdBloomFilter(itemsExpected.toInt, falsePositiveRate, 0).create("")
val bf2 = bloomfilter.mutable.BloomFilter[String](itemsExpected, falsePositiveRate)
var i = 0
while (i < itemsExpected) {
val str: String = random.nextString(1000)
bf = bf.+(str)
bf2.add(str)
i += 1
}
i = 0
var in, in2 = 0
while (true) {
val str = random.nextString(1000)
if (bf.contains(str).isTrue) {
in += 1
}
if (bf2.mightContain(str)) {
in2 += 1
}
if (i % 1000 == 0) {
println(s"in: $in; in2: $in2")
}
}
}
def checkAllocations(): Unit = {
val sampler: Sampler = new Sampler() {
def sampleAllocation(count: Int, desc: String, newObj: Object, size: Long) {
System.out.println("I just allocated the object " + newObj +
" of type " + desc + " whose size is " + size)
if (count != -1) {
System.out.println("It's an array of size " + count)
}
}
}
AllocationRecorder.addSampler(sampler)
AllocationRecorder.removeSampler(sampler)
}
} | alexandrnikitin/bloom-filter-scala | sandboxApp/src/main/scala/SandboxApp.scala | Scala | mit | 2,499 |
package capitulo10
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
/**
* Você pode adicionar, a uma classe ou a um objeto,
* múltiplas traits que chamam umas as outras começando
* pela última. Isso é útil quando você precisa transformar
* um valor em etapas
*/
@RunWith(classOf[JUnitRunner])
class TraitsEmCamadas extends FunSuite{
trait Logged {
def log(msg: String) = {""}
}
trait ConsoleLogger extends Logged {
override def log(msg: String) = { "LOG: " + msg }
}
trait TimestampLogger extends Logged {
override def log(msg: String) = {
super.log("02/09/2014 13:59 " + msg)
// Definido horário fixo para facilitar o
// assert no test. O ideal seria usar:
//super.log(new java.util.Date() + " " + msg)
}
}
trait ShortLogger extends Logged {
val maxLength = 15
override def log(msg: String) = {
super.log(
if (msg.length <= maxLength) msg else msg.substring(0, maxLength - 3) + "..."
)
}
}
// Note que cada um dos métodos log passa uma mensagem
// modificada para super.log
// Com traits, super.log não tem a mesma intenção que com
// classes. (Se tivesse, então essas traits seriam inúteis -
// elas estenderiam de Logged cujos métodos não fazem nada
// Ao invés disso, super.log chama a próxima trait em hierarquia
// de traits, no qual depende da ordem em que as tratis foram
// adicionadas. Geralmente, traits são processadas começando
// com a última. (Seções posteriores considerarão detalhes
// quando as traits formam uma árvore arbitrária e não somente
// uma cadeia)
class Account
class SavingsAccount extends Account with Logged {
var balance = 10;
def withdraw(amount: Double) = {
if (amount > balance) log("Insufficient funds")
else log("OK")
}
}
test("traits em camadas"){
val acct1 = new SavingsAccount with ConsoleLogger with
TimestampLogger with ShortLogger
// O método log do ShortLogger foi chamado primeiro,
// e sua chamada para super.log invocou o TimestampLogger
assert("LOG: 02/09/2014 13:59 Insufficient..." == acct1.withdraw(25))
val acct2 = new SavingsAccount with ConsoleLogger with
ShortLogger with TimestampLogger
// Já nesse outro caso, o TimeStampLogger aparece por último,
// na lista de traits. Sua mensagem de log foi chamada primeiro,
// e o resultado foi encurtado.
assert("LOG: 02/09/2014 1..." == acct2.withdraw(25))
// Com traits, você não pode dizer através do código fonte
// qual método é chamado por super.algumMetodo. O método
// extato depende da ordem das traits no objeto ou classe
// que as usa. Isso torna super muito mais flexível que
// a herança da maneira tradicional.
// Se você precisar controlar qual método da trait
// é chamado, você pode especificá-lo entre colchetes
// super[ConsoleLogger].log(...). O tipo especificado
// precisa ser um supertipo imediato; você não pode
// acessar traits ou classes que estão mais adiante
// na hierarquia da herança.
}
} | celioeduardo/scala-impatient | src/test/scala/capitulo10/TraitsEmCamadas.scala | Scala | mit | 3,201 |
package cmwell.analytics.main
import cmwell.analytics.data.InfotonWithDuplicatedSystemFields
import cmwell.analytics.util.CmwellConnector
import org.apache.log4j.LogManager
import org.rogach.scallop.{ScallopConf, ScallopOption}
object FindDuplicatedSystemFields {
def main(args: Array[String]): Unit = {
val logger = LogManager.getLogger(FindDuplicatedSystemFields.getClass)
try {
object Opts extends ScallopConf(args) {
val out: ScallopOption[String] = opt[String]("out", short = 'o', descr = "The path to save the output to", required = true)
val url: ScallopOption[String] = trailArg[String]("url", descr = "A CM-Well URL", required = true)
verify()
}
CmwellConnector(
cmwellUrl = Opts.url(),
appName = "Find infotons with duplicated system fields"
).withSparkSessionDo { spark =>
import spark.implicits._
val ds = InfotonWithDuplicatedSystemFields()(spark)
.toDF()
.repartition(1) // expect a small number, so make the output easier to deal with.
.cache()
logger.info(s"There are ${ds.count()} infotons with duplicated system fields.")
ds.write.csv(Opts.out())
}
}
catch {
case ex: Throwable =>
logger.error(ex.getMessage, ex)
System.exit(1)
}
}
}
| bryaakov/CM-Well | tools/dataConsistencyTool/cmwell-spark-analysis/src/main/scala/cmwell/analytics/main/FindDuplicatedSystemFields.scala | Scala | apache-2.0 | 1,343 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.connector.mongodb
import org.apache.spark.sql.crossdata.ExecutionType
import org.apache.spark.sql.crossdata.exceptions.CrossdataException
import org.apache.spark.sql.crossdata.test.SharedXDContextTypesTest
import org.apache.spark.sql.types.StructField
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MongoDotsNotationIT extends MongoDataTypesCollection {
it should "supports Projection with DOT notation using Spark" in {
assumeEnvironmentIsUpAndRunning
val sparkRow = sql(s"SELECT bigint, structofstruct.field1 FROM ${SharedXDContextTypesTest.dataTypesTableName}").collect(ExecutionType.Spark)
sparkRow.head.schema.size should be (2)
sparkRow.head.schema.head.isInstanceOf[StructField] should be (true)
}
it should "supports Projection with DOT notation with no ExecutionType defined" in {
assumeEnvironmentIsUpAndRunning
val sparkRow = sql(s"SELECT bigint, structofstruct.field1 FROM ${SharedXDContextTypesTest.dataTypesTableName}").collect()
sparkRow.head.schema.size should be (2)
sparkRow.head.schema.head.isInstanceOf[StructField] should be (true)
}
it should "Does not supports Projection with DOT notation in Native" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT bigint, structofstruct.field1 FROM ${SharedXDContextTypesTest.dataTypesTableName}")
an [CrossdataException] should be thrownBy df.collect(ExecutionType.Native)
}
it should "supports Filters with DOT notation with no ExecutionType defined" in {
assumeEnvironmentIsUpAndRunning
val sparkRow = sql(s"SELECT int FROM ${SharedXDContextTypesTest.dataTypesTableName} WHERE struct.field2=3").collect()
sparkRow.length should be (10)
}
it should "Does not supports Filters with DOT notation in Native" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT int FROM ${SharedXDContextTypesTest.dataTypesTableName} WHERE struct.field2=3")
an [CrossdataException] should be thrownBy df.collect(ExecutionType.Native)
}
}
| darroyocazorla/crossdata | mongodb/src/test/scala/com/stratio/crossdata/connector/mongodb/MongoDotsNotationIT.scala | Scala | apache-2.0 | 2,712 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.util
import org.slf4j.LoggerFactory
import scala.reflect.internal.util.Position
import scala.tools.nsc.reporters.Reporter
import org.ensime.api._
import org.ensime.core.PositionBackCompat
trait ReportHandler {
def messageUser(str: String): Unit = {}
def clearAllScalaNotes(): Unit = {}
def reportScalaNotes(notes: List[Note]): Unit = {}
def clearAllJavaNotes(): Unit = {}
def reportJavaNotes(notes: List[Note]): Unit = {}
}
class PresentationReporter(handler: ReportHandler) extends Reporter with PositionBackCompat {
val log = LoggerFactory.getLogger(classOf[PresentationReporter])
private var enabled = true
def enable(): Unit = { enabled = true }
def disable(): Unit = { enabled = false }
override def reset(): Unit = {
super.reset()
if (enabled) {
handler.clearAllScalaNotes()
}
}
override def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = {
severity.count += 1
try {
if (severity.id == 0) {
log.info(msg)
} else {
if (enabled) {
if (pos.isDefined) {
val source = pos.source
val f = source.file.absolute.path
val posColumn = if (pos.point == -1) {
0
} else {
pos.column
}
val crash = msg.contains("Presentation compiler crashed")
val note = new Note(
f,
msg,
NoteSeverity(severity.id),
pos.startOrCursor,
// sometimes the compiler flags the entire file
if (crash) pos.startOrCursor else pos.endOrCursor,
pos.line,
posColumn
)
handler.reportScalaNotes(List(note))
}
}
}
} catch {
case ex: UnsupportedOperationException =>
log.warn("Unsupported operation during reporting", ex)
}
}
}
| pascr/ensime-server | core/src/main/scala/org/ensime/util/Reporter.scala | Scala | gpl-3.0 | 2,059 |
package java.util
class ServiceConfigurationError(s: String, e: Throwable) extends Error(s, e) {
def this(s: String) = this(s, null)
}
class ConcurrentModificationException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class DuplicateFormatFlagsException private() extends IllegalFormatException {
private var flags: String = null
def this(f: String) {
this()
if (f == null)
throw new NullPointerException()
flags = f
}
def getFlags(): String = flags
override def getMessage(): String = s"Flags = '$flags'"
}
class EmptyStackException extends RuntimeException
class FormatFlagsConversionMismatchException private(private val c: Char) extends IllegalFormatException {
private var f: String = null
def this(f: String, c: Char) {
this(c)
if (f == null)
throw new NullPointerException()
this.f = f
}
def getFlags(): String = f
def getConversion(): Char = c
override def getMessage(): String = "Conversion = " + c + ", Flags = " + f
}
class FormatterClosedException extends IllegalStateException
class IllegalFormatCodePointException(private val c: Int) extends IllegalFormatException {
def getCodePoint(): Int = c
override def getMessage(): String = s"Code point = $c"
}
class IllegalFormatConversionException private(private val c: Char) extends IllegalFormatException {
private var arg: Class[_] = null
def this(c: Char, arg: Class[_]) {
this(c)
if (arg == null)
throw new NullPointerException()
this.arg = arg
}
def getConversion(): Char = c
def getArgumentClass(): Class[_] = arg
override def getMessage(): String = s"$c != ${arg.getName()}"
}
class IllegalFormatException private[util] () extends IllegalArgumentException
class IllegalFormatFlagsException private() extends IllegalFormatException {
private var flags: String = null
def this(f: String) {
this()
if (f == null)
throw new NullPointerException()
this.flags = f
}
def getFlags(): String = flags
override def getMessage(): String = "Flags = '" + flags + "'"
}
class IllegalFormatPrecisionException(private val p: Int) extends IllegalFormatException {
def getPrecision(): Int = p
override def getMessage(): String = Integer.toString(p)
}
class IllegalFormatWidthException(private val w: Int) extends IllegalFormatException {
def getWidth(): Int = w
override def getMessage(): String = Integer.toString(w)
}
class IllformedLocaleException(s: String, errorIndex: Int)
extends RuntimeException(s + (if(errorIndex < 0) "" else " [at index " + errorIndex + "]")) {
def this() = this(null, -1)
def this(s: String) = this(s, -1)
def getErrorIndex(): Int = errorIndex
}
class InputMismatchException(s: String) extends NoSuchElementException(s) {
def this() = this(null)
}
class InvalidPropertiesFormatException(s: String) extends java.io.IOException(s) {
def this(e: Throwable) {
this(if(e == null) null.asInstanceOf[String] else e.toString())
this.initCause(e)
}
// private def writeObject(out: java.io.ObjectOutputStream) =
// throw new java.io.NotSerializableException("Not serializable.")
// private def readObject(in: java.io.ObjectInputStream) =
// throw new java.io.NotSerializableException("Not serializable.")
}
class MissingFormatArgumentException private() extends IllegalFormatException {
private var s: String = null
def this(s: String) {
this()
if (s == null)
throw new NullPointerException()
this.s = s
}
def getFormatSpecifier(): String = s
override def getMessage(): String = "Format specifier '" + s + "'"
}
class MissingFormatWidthException private() extends IllegalFormatException {
private var s: String = null
def this(s: String) {
this()
if (s == null)
throw new NullPointerException()
this.s = s
}
def getFormatSpecifier(): String = s
override def getMessage(): String = s
}
class MissingResourceException private[util](
s: String, private var className: String, private var key: String, e: Throwable)
extends RuntimeException(s, e) {
def this(s: String, className: String, key: String) = this(s, className, key, null)
def getClassName(): String = className
def getKey(): String = key
}
class NoSuchElementException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class TooManyListenersException(s: String) extends Exception(s) {
def this() = this(null)
}
class UnknownFormatConversionException private () extends IllegalFormatException {
private var s: String = null
def this(s: String) {
this()
if (s == null)
throw new NullPointerException()
this.s = s
}
def getConversion(): String = s
override def getMessage(): String = s"Conversion = '$s'"
}
class UnknownFormatFlagsException private() extends IllegalFormatException {
private var flags: String = null
def this(f: String) {
this()
if (f == null)
throw new NullPointerException()
this.flags = f
}
def getFlags(): String = flags
override def getMessage(): String = "Flags = " + flags
}
| swhgoon/scala-js | javalib/source/src/java/util/Throwables.scala | Scala | bsd-3-clause | 5,079 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.h2o.backends.internal
import java.net.InetAddress
import ai.h2o.sparkling.H2OConf
import ai.h2o.sparkling.backend.SharedBackendConf
import ai.h2o.sparkling.backend.utils.{ArgumentBuilder, ReflectionUtils, SharedBackendUtils}
import org.apache.spark.scheduler.local.LocalSchedulerBackend
import org.apache.spark.{SparkContext, SparkEnv}
private[backends] trait InternalBackendUtils extends SharedBackendUtils {
def checkUnsupportedSparkOptions(unsupportedSparkOptions: Seq[(String, String)], conf: H2OConf): Unit = {
unsupportedSparkOptions.foreach(opt =>
if (conf.contains(opt._1) && (opt._2 == "" || conf.get(opt._1) == opt._2)) {
logWarning(s"Unsupported options ${opt._1} detected!")
if (conf.isFailOnUnsupportedSparkParamEnabled) {
logWarning(s"""
|The application is going down, since the parameter ${SharedBackendConf.PROP_FAIL_ON_UNSUPPORTED_SPARK_PARAM} is true!
|If you would like to skip the fail call, please, specify the value of the parameter to false.
""".stripMargin)
throw new IllegalArgumentException(s"Unsupported argument: $opt")
}
})
}
def setSelfAddressToH2ONode(h2oArgs: Seq[String]): Unit = {
for (Seq(first, second) <- h2oArgs.sliding(2) if first == "-ip") {
water.H2O.SELF_ADDRESS = InetAddress.getByName(second)
}
}
/**
* Produce arguments for H2O node based on provided configuration and environment
*
* It is expected to run on the executor machine
*
* @return array of H2O launcher command line arguments
*/
def getH2OWorkerArgs(conf: H2OConf): Seq[String] = {
val ip = {
val hostname = getHostname(SparkEnv.get)
translateHostnameToIp(hostname)
}
new ArgumentBuilder()
.add(getH2OCommonArgs(conf))
.add(getH2OSecurityArgs(conf))
.addIf("-network", conf.nodeNetworkMask, conf.nodeNetworkMask.isDefined)
.addIf("-ip", ip, conf.nodeNetworkMask.isEmpty)
.buildArgs()
}
private[spark] def guessTotalExecutorSize(sc: SparkContext): Option[Int] = {
sc.conf
.getOption("spark.executor.instances")
.map(_.toInt)
.orElse(getCommandArg("--num-executors").map(_.toInt))
.orElse({
val sb = sc.schedulerBackend
val num = sb match {
case _: LocalSchedulerBackend => Some(1)
// Use text reference to yarn backend to avoid having dependency on Spark's Yarn module
case b if b.getClass.getSimpleName == "YarnSchedulerBackend" =>
Some(ReflectionUtils.reflector(b).getV[Int]("totalExpectedExecutors"))
//case b: CoarseGrainedSchedulerBackend => b.numExistingExecutors
case _ => None
}
num
})
}
private def getCommandArg(argName: String): Option[String] = {
val cmdLine = System.getProperty("sun.java.command", "").split(" ").map(_.trim)
val valueIdx = (for (i <- cmdLine.indices; if cmdLine(i).equals(argName)) yield i + 1).headOption
valueIdx.filter(i => i < cmdLine.length).map(i => cmdLine(i))
}
}
| h2oai/sparkling-water | core/src/main/scala/org/apache/spark/h2o/backends/internal/InternalBackendUtils.scala | Scala | apache-2.0 | 3,895 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.