code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.descriptors
import java.util
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.api.ValidationException
import org.apache.flink.table.utils.TypeStringUtils
import org.apache.flink.util.Preconditions
/**
* Descriptor for a literal value. A literal value consists of a type and the actual value.
* Expression values are not allowed.
*
* If no type is set, the type is automatically derived from the value. Currently,
* this is supported for: BOOLEAN, INT, DOUBLE, and VARCHAR.
*
* Examples:
* - "true", "false" -> BOOLEAN
* - "42", "-5" -> INT
* - "2.0", "1234.222" -> DOUBLE
* - VARCHAR otherwise
*/
class LiteralValue extends HierarchyDescriptor {
var typeInfo: Option[String] = None
var value: Option[Any] = None
/**
* Type information of the literal value. E.g. Types.BOOLEAN.
*
* @param typeInfo type information describing the value
*/
def of(typeInfo: TypeInformation[_]): LiteralValue = {
Preconditions.checkNotNull("Type information must not be null.")
this.typeInfo = Option(TypeStringUtils.writeTypeInfo(typeInfo))
this
}
/**
* Type string of the literal value. E.g. "BOOLEAN".
*
* @param typeString type string describing the value
*/
def of(typeString: String): LiteralValue = {
this.typeInfo = Option(typeString)
this
}
/**
* Literal BOOLEAN value.
*
* @param value literal BOOLEAN value
*/
def value(value: Boolean): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal INT value.
*
* @param value literal INT value
*/
def value(value: Int): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal DOUBLE value.
*
* @param value literal DOUBLE value
*/
def value(value: Double): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal FLOAT value.
*
* @param value literal FLOAT value
*/
def value(value: Float): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal value either for an explicit VARCHAR type or automatically derived type.
*
* If no type is set, the type is automatically derived from the value. Currently,
* this is supported for: BOOLEAN, INT, DOUBLE, and VARCHAR.
*
* @param value literal value
*/
def value(value: String): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal BIGINT value.
*
* @param value literal BIGINT value
*/
def value(value: Long): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal TINYINT value.
*
* @param value literal TINYINT value
*/
def value(value: Byte): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal SMALLINT value.
*
* @param value literal SMALLINT value
*/
def value(value: Short): LiteralValue = {
this.value = Option(value)
this
}
/**
* Literal DECIMAL value.
*
* @param value literal DECIMAL value
*/
def value(value: java.math.BigDecimal): LiteralValue = {
this.value = Option(value)
this
}
/**
* Converts this descriptor into a set of properties.
*/
override def toProperties: util.Map[String, String] = {
val properties = new DescriptorProperties()
addPropertiesWithPrefix(HierarchyDescriptorValidator.EMPTY_PREFIX, properties)
properties.asMap()
}
/**
* Internal method for properties conversion.
*/
override private[flink] def addPropertiesWithPrefix(
keyPrefix: String,
properties: DescriptorProperties)
: Unit = {
typeInfo match {
// explicit type
case Some(ti) =>
properties.putString(keyPrefix + "type", ti)
value.foreach(v => properties.putString(keyPrefix + "value", String.valueOf(v)))
// implicit type
case None =>
// do not allow values in top-level
if (keyPrefix == HierarchyDescriptorValidator.EMPTY_PREFIX) {
throw new ValidationException(
"Literal values with implicit type must not exist in the top level of a hierarchy.")
}
value.foreach { v =>
properties.putString(keyPrefix.substring(0, keyPrefix.length - 1), String.valueOf(v))
}
}
}
}
/**
* Descriptor for a literal value. A literal value consists of a type and the actual value.
* Expression values are not allowed.
*
* If no type is set, the type is automatically derived from the value. Currently,
* this is supported for: BOOLEAN, INT, DOUBLE, and VARCHAR.
*
* Examples:
* - "true", "false" -> BOOLEAN
* - "42", "-5" -> INT
* - "2.0", "1234.222" -> DOUBLE
* - VARCHAR otherwise
*/
object LiteralValue {
/**
* Descriptor for a literal value. A literal value consists of a type and the actual value.
* Expression values are not allowed.
*
* If no type is set, the type is automatically derived from the value. Currently,
* this is supported for: BOOLEAN, INT, DOUBLE, and VARCHAR.
*
* Examples:
* - "true", "false" -> BOOLEAN
* - "42", "-5" -> INT
* - "2.0", "1234.222" -> DOUBLE
* - VARCHAR otherwise
*
* @deprecated Use `new Literal()`.
*/
@deprecated
def apply() = new LiteralValue()
}
|
ueshin/apache-flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/descriptors/LiteralValue.scala
|
Scala
|
apache-2.0
| 6,182
|
/** MACHINE-GENERATED FROM AVRO SCHEMA. DO NOT EDIT DIRECTLY */
package test
final case class Up(value: Int)
final case class Down(value: Int)
|
julianpeeters/avrohugger
|
avrohugger-core/src/test/expected/standard/test/Joystick.scala
|
Scala
|
apache-2.0
| 144
|
package synahive.restapi.utils
import synahive.restapi.models._
import spray.json.DefaultJsonProtocol
trait Protocol extends DefaultJsonProtocol {
implicit val usersFormat = jsonFormat3(UserEntity)
implicit val tokenFormat = jsonFormat3(TokenEntity)
}
|
synahive/synahive-server
|
src/main/scala/synahive/restapi/utils/Protocol.scala
|
Scala
|
mit
| 258
|
package org.scalatra
import scala.util.DynamicVariable
/**
* The core Scalatra DSL.
*/
trait CoreDsl extends Handler with Control {
/**
* The type of application context supported by this application. Made
* abstract for compatibility with the servlet implementation.
*/
type ApplicationContextT <: ApplicationContext
/**
* The application context. It is shared among all handlers within this
* application.
*/
implicit def applicationContext: ApplicationContextT
@deprecated("Use applicationContext instead", "2.1.0")
def servletContext: ApplicationContextT = applicationContext
/**
* The current request
*/
implicit def request: RequestT
/**
* A map of the current parameters. The map contains the head of every
* non-empty value in `multiParams`.
*/
def params: Map[String, String]
/**
* A multi-map of the current parameters. Parameters may come from:
* - the query string
* - the POST body
* - the route matchers of the currently executing route
*
* The map has a default value of `Seq.empty`.
*/
def multiParams: MultiParams
/**
* The current response.
*/
implicit def response: ResponseT
/**
* Gets the content type of the current response.
*/
def contentType: String = response.contentType getOrElse null
/**
* Sets the content type of the current response.
*/
def contentType_=(contentType: String): Unit =
response.contentType = Option(contentType)
@deprecated("Use status_=(Int) instead", "2.1.0")
def status(code: Int) = status_=(code)
/**
* Sets the status code of the current response.
*/
def status_=(code: Int): Unit = response.status = ResponseStatus(code)
/**
* Gets the status code of the current response.
*/
def status: Int = response.status.code
/**
* Sends a redirect response and immediately halts the current action.
*/
def redirect(uri: String): Unit = {
response.redirect(uri)
halt()
}
/**
* Adds a filter to run before the route. The filter only runs if each
* routeMatcher returns Some. If the routeMatchers list is empty, the
* filter runs for all routes.
*/
def before(transformers: RouteTransformer*)(block: => Any): Unit
@deprecated("Use before() { ... }", "2.0.0")
final def beforeAll(block: => Any): Unit = before()(block)
@deprecated("Use before(RouteTransformer*) { ... }", "2.0.0")
final def beforeSome(transformers: RouteTransformer*)(block: => Any): Unit =
before(transformers: _*)(block)
/**
* Adds a filter to run after the route. The filter only runs if each
* routeMatcher returns Some. If the routeMatchers list is empty, the
* filter runs for all routes.
*/
def after(transformers: RouteTransformer*)(block: => Any): Unit
@deprecated("Use after() { ... }", "2.0.0")
final def afterAll(block: => Any): Unit = after()(block)
@deprecated("Use after(RouteTransformer*) { ... }", "2.0.0")
final def afterSome(transformers: RouteTransformer*)(block: => Any): Unit =
before(transformers: _*)(block)
/**
* Defines a block to run if no matching routes are found, or if all
* matching routes pass.
*/
def notFound(block: => Any): Unit
/**
* Defines a block to run if matching routes are found only for other
* methods. The set of matching methods is passed to the block.
*/
def methodNotAllowed(block: Set[HttpMethod] => Any): Unit
/**
* Defines an error handler for exceptions thrown in either the before
* block or a route action.
*
* If the error handler does not match, the result falls through to the
* previously defined error handler. The default error handler simply
* rethrows the exception.
*
* The error handler is run before the after filters, and the result is
* rendered like a standard response. It is the error handler's
* responsibility to set any appropriate status code.
*/
def error(handler: ErrorHandler): Unit
/**
* The Scalatra DSL core methods take a list of [[org.scalatra.RouteMatcher]]
* and a block as the action body. The return value of the block is
* rendered through the pipeline and sent to the client as the response body.
*
* See [[org.scalatra.ScalatraKernel.renderResponseBody]] for the detailed
* behaviour and how to handle your response body more explicitly, and see
* how different return types are handled.
*
* The block is executed in the context of a CoreDsl instance, so all the
* methods defined in this trait are also available inside the block.
*
* {{{
* get("/") {
* <form action="/echo">
* <label>Enter your name</label>
* <input type="text" name="name"/>
* </form>
* }
*
* post("/echo") {
* "hello {params('name)}!"
* }
* }}}
*
* ScalatraKernel provides implicit transformation from boolean blocks,
* strings and regular expressions to [[org.scalatra.RouteMatcher]], so
* you can write code naturally.
* {{{
* get("/", request.getRemoteHost == "127.0.0.1") { "Hello localhost!" }
* }}}
*
*/
def get(transformers: RouteTransformer*)(block: => Any): Route
/**
* @see get
*/
def post(transformers: RouteTransformer*)(block: => Any): Route
/**
* @see get
*/
def put(transformers: RouteTransformer*)(block: => Any): Route
/**
* @see get
*/
def delete(transformers: RouteTransformer*)(block: => Any): Route
/**
* @see get
*/
def options(transformers: RouteTransformer*)(block: => Any): Route
/**
* @see patch
*/
def patch(transformers: RouteTransformer*)(block: => Any): Route
/**
* Error handler for HTTP response status code range. You can intercept every response code previously
* specified with #status or even generic 404 error.
* {{{
* trap(403) {
* "You are not authorized"
* }
}* }}}
}}*/
def trap(codes: Range)(block: => Any): Unit
/**
* @see error
*/
def trap(code: Int)(block: => Any): Unit =
trap(Range(code, code+1))(block)
}
|
louk/scalatra
|
core/src/main/scala/org/scalatra/CoreDsl.scala
|
Scala
|
bsd-2-clause
| 6,076
|
package pl.touk.nussknacker.engine.flink.util.source
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext
import org.apache.flink.streaming.api.watermark
object StaticSource extends SourceFunction[String] {
@volatile var buffer = List[Timer]()
@volatile var running = true
override def cancel(): Unit = {
buffer = List()
running = false
}
override def run(ctx: SourceContext[String]) = {
while (running) {
synchronized {
buffer.reverse.foreach {
case Watermark(time) =>
ctx.emitWatermark(new watermark.Watermark(time))
case a: Data =>
ctx.collectWithTimestamp(a.value, a.time)
}
buffer = List()
}
Thread.sleep(100)
}
}
def add(timer: Timer) = {
synchronized {
buffer = timer :: buffer
}
}
sealed trait Timer
case class Watermark(time: Long) extends Timer
case class Data(time: Long, value: String) extends Timer
}
|
TouK/nussknacker
|
engine/flink/components-utils/src/main/scala/pl/touk/nussknacker/engine/flink/util/source/StaticSource.scala
|
Scala
|
apache-2.0
| 1,063
|
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala
package reflect
package internal
import scala.collection.{ mutable, immutable }
import scala.ref.WeakReference
import mutable.ListBuffer
import Flags._
import scala.util.control.ControlThrowable
import scala.annotation.tailrec
import util.Statistics
import util.ThreeValues._
import Variance._
import Depth._
import TypeConstants._
/* A standard type pattern match:
case ErrorType =>
// internal: error
case WildcardType =>
// internal: unknown
case BoundedWildcardType(bounds) =>
// internal: unknown
case NoType =>
case NoPrefix =>
case ThisType(sym) =>
// sym.this.type
case SuperType(thistpe, supertpe) =>
// super references
case SingleType(pre, sym) =>
// pre.sym.type
case ConstantType(value) =>
// Int(2)
case TypeRef(pre, sym, args) =>
// pre.sym[targs]
// Outer.this.C would be represented as TypeRef(ThisType(Outer), C, List())
case RefinedType(parents, defs) =>
// parent1 with ... with parentn { defs }
case ExistentialType(tparams, result) =>
// result forSome { tparams }
case AnnotatedType(annots, tp) =>
// tp @annots
// the following are non-value types; you cannot write them down in Scala source.
case TypeBounds(lo, hi) =>
// >: lo <: hi
case ClassInfoType(parents, defs, clazz) =>
// same as RefinedType except as body of class
case MethodType(paramtypes, result) =>
// (paramtypes)result
// For instance def m(): T is represented as MethodType(List(), T)
case NullaryMethodType(result) => // eliminated by uncurry
// an eval-by-name type
// For instance def m: T is represented as NullaryMethodType(T)
case PolyType(tparams, result) =>
// [tparams]result where result is a (Nullary)MethodType or ClassInfoType
// The remaining types are not used after phase `typer`.
case OverloadedType(pre, tparams, alts) =>
// all alternatives of an overloaded ident
case AntiPolyType(pre, targs) =>
// rarely used, disappears when combined with a PolyType
case TypeVar(inst, constr) =>
// a type variable
// Replace occurrences of type parameters with type vars, where
// inst is the instantiation and constr is a list of bounds.
case ErasedValueType(clazz, underlying)
// only used during erasure of derived value classes.
*/
trait Types
extends api.Types
with tpe.TypeComparers
with tpe.TypeToStrings
with tpe.CommonOwners
with tpe.GlbLubs
with tpe.TypeMaps
with tpe.TypeConstraints
with tpe.FindMembers
with util.Collections { self: SymbolTable =>
import definitions._
import TypesStats._
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
private final val breakCycles = settings.breakCycles.value
/** In case anyone wants to turn on type parameter bounds being used
* to seed type constraints.
*/
private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
private final val sharperSkolems = sys.props contains "scalac.experimental.sharper-skolems"
protected val enableTypeVarExperimentals = settings.Xexperimental.value
/** Caching the most recent map has a 75-90% hit rate. */
private object substTypeMapCache {
private[this] var cached: SubstTypeMap = new SubstTypeMap(Nil, Nil)
def apply(from: List[Symbol], to: List[Type]): SubstTypeMap = {
if ((cached.from ne from) || (cached.to ne to))
cached = new SubstTypeMap(from, to)
cached
}
}
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
private var _skolemizationLevel = 0
def skolemizationLevel = _skolemizationLevel
def skolemizationLevel_=(value: Int) = _skolemizationLevel = value
/** A map from lists to compound types that have the given list as parents.
* This is used to avoid duplication in the computation of base type sequences and baseClasses.
* It makes use of the fact that these two operations depend only on the parents,
* not on the refinement.
*/
private val _intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
def intersectionWitness = _intersectionWitness
/** A proxy for a type (identified by field `underlying`) that forwards most
* operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
* every operation that is overridden for some kind of types should be forwarded.
*/
trait SimpleTypeProxy extends Type {
def underlying: Type
// the following operations + those in RewrappingTypeProxy are all operations
// in class Type that are overridden in some subclass
// Important to keep this up-to-date when new operations are added!
override def isTrivial = underlying.isTrivial
override def isHigherKinded: Boolean = underlying.isHigherKinded
override def typeConstructor: Type = underlying.typeConstructor
override def isError = underlying.isError
override def isErroneous = underlying.isErroneous
override def paramSectionCount = underlying.paramSectionCount
override def paramss = underlying.paramss
override def params = underlying.params
override def paramTypes = underlying.paramTypes
override def termSymbol = underlying.termSymbol
override def termSymbolDirect = underlying.termSymbolDirect
override def typeParams = underlying.typeParams
override def boundSyms = underlying.boundSyms
override def typeSymbol = underlying.typeSymbol
override def typeSymbolDirect = underlying.typeSymbolDirect
override def widen = underlying.widen
override def typeOfThis = underlying.typeOfThis
override def bounds = underlying.bounds
override def parents = underlying.parents
override def prefix = underlying.prefix
override def decls = underlying.decls
override def baseType(clazz: Symbol) = underlying.baseType(clazz)
override def baseTypeSeq = underlying.baseTypeSeq
override def baseTypeSeqDepth = underlying.baseTypeSeqDepth
override def baseClasses = underlying.baseClasses
}
/** A proxy for a type (identified by field `underlying`) that forwards most
* operations to it. Every operation that is overridden for some kind of types is
* forwarded here. Some operations are rewrapped again.
*/
trait RewrappingTypeProxy extends SimpleTypeProxy {
protected def maybeRewrap(newtp: Type) = (
if (newtp eq underlying) this
else {
// - BoundedWildcardTypes reach here during erroneous compilation: neg/t6258
// - Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800
// - Avoid reusing the existing Wrapped(RefinedType) when we've be asked to wrap an =:= RefinementTypeRef, the
// distinction is important in base type sequences. See TypesTest.testExistentialRefinement
// - Otherwise, if newtp =:= underlying, don't rewrap it.
val hasSpecialMeaningBeyond_=:= = newtp.isWildcard || newtp.isHigherKinded || newtp.isInstanceOf[RefinementTypeRef]
if (!hasSpecialMeaningBeyond_=:= && (newtp =:= underlying)) this
else rewrap(newtp)
}
)
protected def rewrap(newtp: Type): Type
// the following are all operations in class Type that are overridden in some subclass
// Important to keep this up-to-date when new operations are added!
override def widen = maybeRewrap(underlying.widen)
override def narrow = underlying.narrow
override def deconst = maybeRewrap(underlying.deconst)
override def resultType = maybeRewrap(underlying.resultType)
override def resultType(actuals: List[Type]) = maybeRewrap(underlying.resultType(actuals))
override def paramSectionCount = 0
override def paramss: List[List[Symbol]] = List()
override def params: List[Symbol] = List()
override def paramTypes: List[Type] = List()
override def typeArgs = underlying.typeArgs
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = underlying.instantiateTypeParams(formals, actuals)
override def skolemizeExistential(owner: Symbol, origin: AnyRef) = underlying.skolemizeExistential(owner, origin)
override def normalize = maybeRewrap(underlying.normalize)
override def etaExpand = maybeRewrap(underlying.etaExpand)
override def dealias = maybeRewrap(underlying.dealias)
override def cloneInfo(owner: Symbol) = maybeRewrap(underlying.cloneInfo(owner))
override def atOwner(owner: Symbol) = maybeRewrap(underlying.atOwner(owner))
override def prefixString = underlying.prefixString
override def isComplete = underlying.isComplete
override def complete(sym: Symbol) = underlying.complete(sym)
override def load(sym: Symbol) { underlying.load(sym) }
override def withAnnotations(annots: List[AnnotationInfo]) = maybeRewrap(underlying.withAnnotations(annots))
override def withoutAnnotations = maybeRewrap(underlying.withoutAnnotations)
}
case object UnmappableTree extends TermTree {
override def toString = "<unmappable>"
super.setType(NoType)
override def tpe_=(t: Type) = if (t != NoType) {
throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
}
}
abstract class TypeApiImpl extends TypeApi { this: Type =>
def declaration(name: Name): Symbol = decl(name)
def declarations = decls
def typeArguments = typeArgs
def erasure = this match {
case ConstantType(value) => widen.erasure
case _ =>
var result: Type = transformedType(this)
result = result.normalize match { // necessary to deal with erasures of HK types, typeConstructor won't work
case PolyType(undets, underlying) => existentialAbstraction(undets, underlying) // we don't want undets in the result
case _ => result
}
// erasure screws up all ThisTypes for modules into PackageTypeRefs
// we need to unscrew them, or certain typechecks will fail mysteriously
// http://groups.google.com/group/scala-internals/browse_thread/thread/6d3277ae21b6d581
result = result.map(tpe => tpe match {
case tpe: PackageTypeRef => ThisType(tpe.sym)
case _ => tpe
})
result
}
def substituteSymbols(from: List[Symbol], to: List[Symbol]): Type = substSym(from, to)
def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to)
// the only thingies that we want to splice are: 1) type parameters, 2) abstract type members
// the thingies that we don't want to splice are: 1) concrete types (obviously), 2) existential skolems
def isSpliceable = {
this.isInstanceOf[TypeRef] && typeSymbol.isAbstractType && !typeSymbol.isExistential
}
def companion = {
val sym = typeSymbolDirect
if (sym.isModule && !sym.hasPackageFlag) sym.companionSymbol.tpe
else if (sym.isModuleClass && !sym.isPackageClass) sym.sourceModule.companionSymbol.tpe
else if (sym.isClass && !sym.isModuleClass && !sym.isPackageClass) sym.companionSymbol.info
else NoType
}
def paramLists: List[List[Symbol]] = paramss
}
/** The base class for all types */
abstract class Type extends TypeApiImpl with Annotatable[Type] {
/** Types for which asSeenFrom always is the identity, no matter what
* prefix or owner.
*/
def isTrivial: Boolean = false
/** Is this type higher-kinded, i.e., is it a type constructor @M */
def isHigherKinded: Boolean = false
def takesTypeArgs: Boolean = this.isHigherKinded
/** Does this type denote a stable reference (i.e. singleton type)? */
final def isStable: Boolean = definitions isStable this
/** Is this type dangerous (i.e. it might contain conflicting
* type information when empty, so that it can be constructed
* so that type unsoundness results.) A dangerous type has an underlying
* type of the form T_1 with T_n { decls }, where one of the
* T_i (i > 1) is an abstract type.
*/
final def isVolatile: Boolean = definitions isVolatile this
/** Is this type a structural refinement type (it ''refines'' members that have not been inherited) */
def isStructuralRefinement: Boolean = false
/** Does this type depend immediately on an enclosing method parameter?
* I.e., is it a singleton type whose termSymbol refers to an argument of the symbol's owner (which is a method)?
*/
def isImmediatelyDependent: Boolean = false
/** Is this type a dependent method type? */
def isDependentMethodType: Boolean = false
/** True for WildcardType or BoundedWildcardType. */
def isWildcard = false
/** Is this type produced as a repair for an error? */
def isError: Boolean = typeSymbol.isError || termSymbol.isError
/** Is this type produced as a repair for an error? */
def isErroneous: Boolean = ErroneousCollector.collect(this)
/** Can this type only be subtyped by bottom types?
* This is assessed to be the case if the class is final,
* and all type parameters (if any) are invariant.
*/
def isFinalType = typeSymbol.hasOnlyBottomSubclasses && prefix.isStable
/** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
/** If this is a lazy type, assign a new type to `sym`. */
def complete(sym: Symbol) {}
/** The term symbol associated with the type
* Note that the symbol of the normalized type is returned (@see normalize)
*/
def termSymbol: Symbol = NoSymbol
/** The type symbol associated with the type
* Note that the symbol of the normalized type is returned (@see normalize)
* A type's typeSymbol should if possible not be inspected directly, due to
* the likelihood that what is true for tp.typeSymbol is not true for
* tp.sym, due to normalization.
*/
def typeSymbol: Symbol = NoSymbol
/** The term symbol ''directly'' associated with the type.
*/
def termSymbolDirect: Symbol = termSymbol
/** The type symbol ''directly'' associated with the type.
* In other words, no normalization is performed: if this is an alias type,
* the symbol returned is that of the alias, not the underlying type.
*/
def typeSymbolDirect: Symbol = typeSymbol
/** The base type underlying a type proxy, identity on all other types */
def underlying: Type = this
/** Widen from singleton type to its underlying non-singleton
* base type by applying one or more `underlying` dereferences,
* identity for all other types.
*
* class Outer { class C ; val x: C }
* val o: Outer
* <o.x.type>.widen = o.C
*/
def widen: Type = this
/** Map a constant type or not-null-type to its underlying base type,
* identity for all other types.
*/
def deconst: Type = this
/** The type of `this` of a class type or reference type. */
def typeOfThis: Type = typeSymbol.typeOfThis
/** Map to a singleton type which is a subtype of this type.
* The fallback implemented here gives
* T.narrow = T' forSome { type T' <: T with Singleton }
* Overridden where we know more about where types come from.
*/
/*
Note: this implementation of narrow is theoretically superior to the one
in use below, but imposed a significant performance penalty. It was in trunk
from svn r24960 through r25080.
*/
/*
def narrow: Type =
if (phase.erasedTypes) this
else commonOwner(this) freshExistential ".type" setInfo singletonBounds(this) tpe
*/
/** Map to a singleton type which is a subtype of this type.
* The fallback implemented here gives:
* {{{
* T.narrow = (T {}).this.type
* }}}
* Overridden where we know more about where types come from.
*/
def narrow: Type =
if (phase.erasedTypes) this
else {
val cowner = commonOwner(this)
refinedType(this :: Nil, cowner, EmptyScope, cowner.pos).narrow
}
/** For a TypeBounds type, itself;
* for a reference denoting an abstract type, its bounds,
* for all other types, a TypeBounds type all of whose bounds are this type.
*/
def bounds: TypeBounds = TypeBounds(this, this)
/** For a class or intersection type, its parents.
* For a TypeBounds type, the parents of its hi bound.
* inherited by typerefs, singleton types, and refinement types,
* The empty list for all other types */
def parents: List[Type] = List()
/** For a class with nonEmpty parents, the first parent.
* Otherwise some specific fixed top type.
*/
def firstParent = if (parents.nonEmpty) parents.head else ObjectTpe
/** For a typeref or single-type, the prefix of the normalized type (@see normalize).
* NoType for all other types. */
def prefix: Type = NoType
/** A chain of all typeref or singletype prefixes of this type, longest first.
* (Only used from safeToString.)
*/
def prefixChain: List[Type] = this match {
case TypeRef(pre, _, _) => pre :: pre.prefixChain
case SingleType(pre, _) => pre :: pre.prefixChain
case _ => List()
}
/** This type, without its type arguments @M */
def typeConstructor: Type = this
/** For a typeref, its arguments. The empty list for all other types */
def typeArgs: List[Type] = List()
/** A list of placeholder types derived from the type parameters.
* Used by RefinedType and TypeRef.
*/
protected def dummyArgs: List[Type] = typeParams map (_.typeConstructor)
/** For a (nullary) method or poly type, its direct result type,
* the type itself for all other types. */
def resultType: Type = this
def resultType(actuals: List[Type]) = this
/** Only used for dependent method types. */
def resultApprox: Type = ApproximateDependentMap(resultType)
/** For a curried/nullary method or poly type its non-method result type,
* the type itself for all other types */
final def finalResultType: Type = definitions finalResultType this
/** For a method type, the number of its value parameter sections,
* 0 for all other types */
def paramSectionCount: Int = 0
/** For a method or poly type, a list of its value parameter sections,
* the empty list for all other types */
def paramss: List[List[Symbol]] = List()
/** For a method or poly type, its first value parameter section,
* the empty list for all other types */
def params: List[Symbol] = List()
/** For a method or poly type, the types of its first value parameter section,
* the empty list for all other types */
def paramTypes: List[Type] = List()
/** For a (potentially wrapped) poly type, its type parameters,
* the empty list for all other types */
def typeParams: List[Symbol] = List()
/** For a (potentially wrapped) poly or existential type, its bound symbols,
* the empty list for all other types */
def boundSyms: immutable.Set[Symbol] = emptySymbolSet
/** Replace formal type parameter symbols with actual type arguments. ErrorType on arity mismatch.
*
* Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
*/
def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
if (sameLength(formals, actuals)) this.subst(formals, actuals) else ErrorType
/** If this type is an existential, turn all existentially bound variables to type skolems.
* @param owner The owner of the created type skolems
* @param origin The tree whose type was an existential for which the skolem was created.
*/
def skolemizeExistential(owner: Symbol, origin: AnyRef): Type = this
/** A simple version of skolemizeExistential for situations where
* owner or unpack location do not matter (typically used in subtype tests)
*/
def skolemizeExistential: Type = skolemizeExistential(NoSymbol, null)
/** Reduce to beta eta-long normal form.
* Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
* Functions on types are also implemented as PolyTypes.
*
* Example: (in the below, `<List>` is the type constructor of List)
* TypeRef(pre, `<List>`, List()) is replaced by
* PolyType(X, TypeRef(pre, `<List>`, List(X)))
*
* Discussion: normalize is NOT usually what you want to be calling.
* The (very real) danger with normalize is that it will force types
* which would not otherwise have been forced, leading to mysterious
* behavioral differences, cycles, and other elements of mysteries.
* Under most conditions the method you should be calling is `dealiasWiden`
* (see that method for more info.)
*
* Here are a few of the side-effect-trail-leaving methods called
* by various implementations of normalize:
*
* - sym.info
* - tpe.etaExpand
* - tpe.betaReduce
* - tpe.memberType
* - sym.nextOverriddenSymbol
* - constraint.inst
*
* If you've been around the compiler a while that list must fill
* your heart with fear.
*/
def normalize = this // @MAT
def etaExpand = this
/** Expands type aliases. */
def dealias = this
/** Repeatedly apply widen and dealias until they have no effect.
* This compensates for the fact that type aliases can hide beneath
* singleton types and singleton types can hide inside type aliases.
* !!! - and yet it is still inadequate, because aliases and singletons
* might lurk in the upper bounds of an abstract type. See SI-7051.
*/
def dealiasWiden: Type = (
if (this ne widen) widen.dealiasWiden
else if (this ne dealias) dealias.dealiasWiden
else this
)
/** All the types encountered in the course of dealiasing/widening,
* including each intermediate beta reduction step (whereas calling
* dealias applies as many as possible.)
*/
def dealiasWidenChain: List[Type] = this :: (
if (this ne widen) widen.dealiasWidenChain
else if (this ne betaReduce) betaReduce.dealiasWidenChain
else Nil
)
/** Performs a single step of beta-reduction on types.
* Given:
*
* type C[T] = B[T]
* type B[T] = A
* class A
*
* The following will happen after `betaReduce` is invoked:
* TypeRef(pre, <C>, List(Int)) is replaced by
* TypeRef(pre, <B>, List(Int))
*
* Unlike `dealias`, which recursively applies beta reduction, until it's stuck,
* `betaReduce` performs exactly one step and then returns.
*/
def betaReduce: Type = this
/** For a classtype or refined type, its defined or declared members;
* inherited by subtypes and typerefs.
* The empty scope for all other types.
*/
def decls: Scope = EmptyScope
/** The defined or declared members with name `name` in this type;
* an OverloadedSymbol if several exist, NoSymbol if none exist.
* Alternatives of overloaded symbol appear in the order they are declared.
*/
def decl(name: Name): Symbol = findDecl(name, 0)
/** A list of all non-private members defined or declared in this type. */
def nonPrivateDecls: List[Symbol] = decls.filterNot(_.isPrivate).toList
/** The non-private defined or declared members with name `name` in this type;
* an OverloadedSymbol if several exist, NoSymbol if none exist.
* Alternatives of overloaded symbol appear in the order they are declared.
*/
def nonPrivateDecl(name: Name): Symbol = findDecl(name, PRIVATE)
/** A list of all members of this type (defined or inherited)
* Members appear in linearization order of their owners.
* Members with the same owner appear in reverse order of their declarations.
*/
def members: Scope = membersBasedOnFlags(0, 0)
/** A list of all non-private members of this type (defined or inherited) */
def nonPrivateMembers: Scope = membersBasedOnFlags(BridgeAndPrivateFlags, 0)
/** A list of all non-private members of this type (defined or inherited),
* admitting members with given flags `admit`
*/
def nonPrivateMembersAdmitting(admit: Long): Scope = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0)
/** A list of all implicit symbols of this type (defined or inherited) */
def implicitMembers: Scope = {
typeSymbolDirect match {
case sym: ModuleClassSymbol => sym.implicitMembers
case _ => membersBasedOnFlags(BridgeFlags, IMPLICIT)
}
}
/** A list of all deferred symbols of this type (defined or inherited) */
def deferredMembers: Scope = membersBasedOnFlags(BridgeFlags, DEFERRED)
/** The member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist */
def member(name: Name): Symbol =
memberBasedOnName(name, BridgeFlags)
/** The non-private member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist.
* Bridges are excluded from the result
*/
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
def packageObject: Symbol = member(nme.PACKAGE)
/** The non-private member with given name, admitting members with given flags `admit`.
* "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
* flag are usually excluded from findMember results, but supplying any of those flags
* to this method disables that exclusion.
*
* An OverloadedSymbol if several exist, NoSymbol if none exists.
*/
def nonPrivateMemberAdmitting(name: Name, admit: Long): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags & ~admit)
/** The non-local member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist */
def nonLocalMember(name: Name): Symbol =
memberBasedOnName(name, BridgeFlags | LOCAL)
/** Members excluding and requiring the given flags.
* Note: unfortunately it doesn't work to exclude DEFERRED this way.
*/
def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): Scope =
findMembers(excludedFlags, requiredFlags)
def memberBasedOnName(name: Name, excludedFlags: Long): Symbol =
findMember(name, excludedFlags, 0, stableOnly = false)
/** The least type instance of given class which is a supertype
* of this type. Example:
* class D[T]
* class C extends p.D[Int]
* ThisType(C).baseType(D) = p.D[Int]
*/
def baseType(clazz: Symbol): Type = NoType
/** This type as seen from prefix `pre` and class `clazz`. This means:
* Replace all thistypes of `clazz` or one of its subclasses
* by `pre` and instantiate all parameters by arguments of `pre`.
* Proceed analogously for thistypes referring to outer classes.
*
* Example:
* class D[T] { def m: T }
* class C extends p.D[Int]
* T.asSeenFrom(ThisType(C), D) (where D is owner of m)
* = Int
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type = {
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null
try {
val trivial = (
this.isTrivial
|| phase.erasedTypes && pre.typeSymbol != ArrayClass
|| skipPrefixOf(pre, clazz)
)
if (trivial) this
else {
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m(this)
val tp1 = existentialAbstraction(m.capturedParams, tp)
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
}
} finally if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
/** The info of `sym`, seen as a member of this type.
*
* Example:
* {{{
* class D[T] { def m: T }
* class C extends p.D[Int]
* ThisType(C).memberType(m) = Int
* }}}
*/
def memberInfo(sym: Symbol): Type = {
// assert(sym ne NoSymbol, this)
sym.info.asSeenFrom(this, sym.owner)
}
/** The type of `sym`, seen as a member of this type. */
def memberType(sym: Symbol): Type = sym.tpeHK match {
case OverloadedType(_, alts) => OverloadedType(this, alts)
case tp =>
// Correct caching is nearly impossible because `sym.tpeHK.asSeenFrom(pre, sym.owner)`
// may have different results even for reference-identical `sym.tpeHK` and `pre` (even in the same period).
// For example, `pre` could be a `ThisType`. For such a type, `tpThen eq tpNow` does not imply
// `tpThen` and `tpNow` mean the same thing, because `tpThen.typeSymbol.info` could have been different
// from what it is now, and the cache won't know simply by looking at `pre`.
if (sym eq NoSymbol) NoType
else tp.asSeenFrom(this, sym.owner)
}
/** Substitute types `to` for occurrences of references to
* symbols `from` in this type.
*/
def subst(from: List[Symbol], to: List[Type]): Type =
if (from.isEmpty) this else substTypeMapCache(from, to)(this)
/** Substitute symbols `to` for occurrences of symbols `from` in this type.
*
* !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
* first, as otherwise symbols will immediately get rebound in typeRef to the old
* symbol.
*/
def substSym(from: List[Symbol], to: List[Symbol]): Type =
if ((from eq to) || from.isEmpty) this
else new SubstSymMap(from, to) apply this
/** Substitute all occurrences of `ThisType(from)` in this type by `to`.
*
* !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
* first, as otherwise symbols will immediately get rebound in typeRef to the old
* symbol.
*/
def substThis(from: Symbol, to: Type): Type =
new SubstThisMap(from, to) apply this
def substThis(from: Symbol, to: Symbol): Type =
substThis(from, to.thisType)
/** Performs both substThis and substSym, in that order.
*
* [JZ] Reverted `SubstThisAndSymMap` from 334872, which was not the same as
* `substThis(from, to).substSym(symsFrom, symsTo)`.
*
* `SubstThisAndSymMap` performs a breadth-first map over this type, which meant that
* symbol substitution occurred before `ThisType` substitution. Consequently, in substitution
* of a `SingleType(ThisType(from), sym)`, symbols were rebound to `from` rather than `to`.
*/
def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type =
if (symsFrom eq symsTo) substThis(from, to)
else substThis(from, to).substSym(symsFrom, symsTo)
/** Returns all parts of this type which satisfy predicate `p` */
def withFilter(p: Type => Boolean) = new FilterMapForeach(p)
class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){
def foreach[U](f: Type => U): Unit = collect(Type.this) foreach f
def map[T](f: Type => T): List[T] = collect(Type.this) map f
}
@inline final def orElse(alt: => Type): Type = if (this ne NoType) this else alt
/** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
*/
def find(p: Type => Boolean): Option[Type] = new FindTypeCollector(p).collect(this)
/** Apply `f` to each part of this type */
def foreach(f: Type => Unit) { new ForEachTypeTraverser(f).traverse(this) }
/** Apply `pf` to each part of this type on which the function is defined */
def collect[T](pf: PartialFunction[Type, T]): List[T] = new CollectTypeCollector(pf).collect(this)
/** Apply `f` to each part of this type; children get mapped before their parents */
def map(f: Type => Type): Type = new TypeMap {
def apply(x: Type) = f(mapOver(x))
} apply this
/** Is there part of this type which satisfies predicate `p`? */
def exists(p: Type => Boolean): Boolean = !find(p).isEmpty
/** Does this type contain a reference to this symbol? */
def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this)
/** Is this type a subtype of that type? */
def <:<(that: Type): Boolean = {
if (Statistics.canEnable) stat_<:<(that)
else {
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that)
else isSubType(this, that))
}
}
/** Is this type a subtype of that type in a pattern context?
* Dummy type arguments on the right hand side are replaced with
* fresh existentials, except for Arrays.
*
* See bug1434.scala for an example of code which would fail
* if only a <:< test were applied.
*/
def matchesPattern(that: Type): Boolean = (this <:< that) || (that match {
case ArrayTypeRef(elem2) if elem2.typeConstructor.isHigherKinded =>
this match {
case ArrayTypeRef(elem1) => elem1 matchesPattern elem2
case _ => false
}
case TypeRef(_, sym, args) =>
val that1 = existentialAbstraction(args map (_.typeSymbol), that)
(that ne that1) && (this <:< that1) && {
debuglog(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1")
true
}
case _ =>
false
})
def stat_<:<(that: Type): Boolean = {
if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that)
else isSubType(this, that))
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
result
}
/** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
*/
def weak_<:<(that: Type): Boolean = {
if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
result
}
/** Is this type equivalent to that type? */
def =:=(that: Type): Boolean = (
(this eq that) ||
(if (explainSwitch) explain("=", isSameType, this, that)
else isSameType(this, that))
)
/** Is this type close enough to that type so that members
* with the two type would override each other?
* This means:
* - Either both types are polytypes with the same number of
* type parameters and their result types match after renaming
* corresponding type parameters
* - Or both types are (nullary) method types with equivalent type parameter types
* and matching result types
* - Or both types are equivalent
* - Or phase.erasedTypes is false and both types are neither method nor
* poly types.
*/
def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes)
/** Same as matches, except that non-method types are always assumed to match. */
def looselyMatches(that: Type): Boolean = matchesType(this, that, alwaysMatchSimple = true)
/** The shortest sorted upwards closed array of types that contains
* this type as first element.
*
* A list or array of types ts is upwards closed if
*
* for all t in ts:
* for all typerefs p.s[args] such that t <: p.s[args]
* there exists a typeref p'.s[args'] in ts such that
* t <: p'.s['args] <: p.s[args],
*
* and
*
* for all singleton types p.s such that t <: p.s
* there exists a singleton type p'.s in ts such that
* t <: p'.s <: p.s
*
* Sorting is with respect to Symbol.isLess() on type symbols.
*/
def baseTypeSeq: BaseTypeSeq = baseTypeSingletonSeq(this)
/** The maximum depth (@see typeDepth)
* of each type in the BaseTypeSeq of this type except the first.
*/
def baseTypeSeqDepth: Depth = Depth(1)
/** The list of all baseclasses of this type (including its own typeSymbol)
* in linearization order, starting with the class itself and ending
* in class Any.
*/
def baseClasses: List[Symbol] = List()
/**
* @param sym the class symbol
* @return the index of given class symbol in the BaseTypeSeq of this type,
* or -1 if no base type with given class symbol exists.
*/
def baseTypeIndex(sym: Symbol): Int = {
val bts = baseTypeSeq
var lo = 0
var hi = bts.length - 1
while (lo <= hi) {
val mid = (lo + hi) / 2
val btssym = bts.typeSymbol(mid)
if (sym == btssym) return mid
else if (sym isLess btssym) hi = mid - 1
else if (btssym isLess sym) lo = mid + 1
else abort("sym is neither `sym == btssym`, `sym isLess btssym` nor `btssym isLess sym`")
}
-1
}
/** If this is a ExistentialType, PolyType or MethodType, a copy with cloned type / value parameters
* owned by `owner`. Identity for all other types.
*/
def cloneInfo(owner: Symbol) = this
/** Make sure this type is correct as the info of given owner; clone it if not. */
def atOwner(owner: Symbol) = this
protected def objectPrefix = "object "
protected def packagePrefix = "package "
def trimPrefix(str: String) = str stripPrefix objectPrefix stripPrefix packagePrefix
/** The string representation of this type used as a prefix */
def prefixString = trimPrefix(toString) + "#"
/** Convert toString avoiding infinite recursions by cutting off
* after `maxToStringRecursions` recursion levels. Uses `safeToString`
* to produce a string on each level.
*/
override final def toString: String = {
// see comments to internal#Symbol.typeSignature for an explanation why this initializes
if (!isCompilerUniverse) fullyInitializeType(this)
typeToString(this)
}
/** Method to be implemented in subclasses.
* Converts this type to a string in calling toString for its parts.
*/
def safeToString: String = super.toString
/** The string representation of this type, with singletypes explained. */
def toLongString = {
val str = toString
if (str == "type") widen.toString
else if ((str endsWith ".type") && !typeSymbol.isModuleClass)
widen match {
case RefinedType(_, _) => "" + widen
case _ =>
if (widen.toString.trim == "") str
else s"$str (with underlying type $widen)"
}
else str
}
/** The string representation of this type when the direct object in a sentence.
* Normally this is no different from the regular representation, but modules
* read better as "object Foo" here and "Foo.type" the rest of the time.
*/
def directObjectString = safeToString
/** A test whether a type contains any unification type variables.
* Overridden with custom logic except where trivially true.
*/
def isGround: Boolean = this match {
case ThisType(_) | NoPrefix | WildcardType | NoType | ErrorType | ConstantType(_) =>
true
case _ =>
typeVarToOriginMap(this) eq this
}
/** If this is a symbol loader type, load and assign a new type to `sym`. */
def load(sym: Symbol) {}
private def findDecl(name: Name, excludedFlags: Int): Symbol = {
var alts: List[Symbol] = List()
var sym: Symbol = NoSymbol
var e: ScopeEntry = decls.lookupEntry(name)
while (e ne null) {
if (!e.sym.hasFlag(excludedFlags.toLong)) {
if (sym == NoSymbol) sym = e.sym
else {
if (alts.isEmpty) alts = sym :: Nil
alts = e.sym :: alts
}
}
e = decls.lookupNextEntry(e)
}
if (alts.isEmpty) sym
else (baseClasses.head.newOverloaded(this, alts))
}
/** Find all members meeting the flag requirements.
*
* If you require a DEFERRED member, you will get it if it exists -- even if there's an overriding concrete member.
* If you exclude DEFERRED members, or don't specify any requirements,
* you won't get deferred members (whether they have an overriding concrete member or not)
*
* Thus, findMember requiring DEFERRED flags yields deferred members,
* while `findMember(excludedFlags = 0, requiredFlags = 0).filter(_.isDeferred)` may not (if there's a corresponding concrete member)
*
* Requirements take precedence over exclusions, so requiring and excluding DEFERRED will yield a DEFERRED member (if there is one).
*
*/
def findMembers(excludedFlags: Long, requiredFlags: Long): Scope = {
def findMembersInternal = new FindMembers(this, excludedFlags, requiredFlags).apply()
if (this.isGround) findMembersInternal
else suspendingTypeVars(typeVarsInType(this))(findMembersInternal)
}
/**
* Find member(s) in this type. If several members matching criteria are found, they are
* returned in an OverloadedSymbol
*
* @param name The member's name
* @param excludedFlags Returned members do not have these flags
* @param requiredFlags Returned members do have these flags
* @param stableOnly If set, return only members that are types or stable values
*/
def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
def findMemberInternal = new FindMember(this, name, excludedFlags, requiredFlags, stableOnly).apply()
if (this.isGround) findMemberInternal
else suspendingTypeVars(typeVarsInType(this))(findMemberInternal)
}
/** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */
def skolemsExceptMethodTypeParams: List[Symbol] = {
var boundSyms: List[Symbol] = List()
var skolems: List[Symbol] = List()
for (t <- this) {
t match {
case ExistentialType(quantified, qtpe) =>
boundSyms = boundSyms ::: quantified
case TypeRef(_, sym, _) =>
if ((sym.isExistentialSkolem || sym.isGADTSkolem) && // treat GADT skolems like existential skolems
!((boundSyms contains sym) || (skolems contains sym)))
skolems = sym :: skolems
case _ =>
}
}
skolems
}
// Implementation of Annotatable for all types but AnnotatedType, which
// overrides these.
def annotations: List[AnnotationInfo] = Nil
def withoutAnnotations: Type = this
def filterAnnotations(p: AnnotationInfo => Boolean): Type = this
def setAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
def withAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
/** The kind of this type; used for debugging */
def kind: String = "unknown type of class "+getClass()
}
// Subclasses ------------------------------------------------------------
/**
* A type that can be passed to unique(..) and be stored in the uniques map.
*/
abstract class UniqueType extends Type with Product {
final override val hashCode = computeHashCode
protected def computeHashCode = scala.runtime.ScalaRunTime._hashCode(this)
}
/** A base class for types that defer some operations
* to their immediate supertype.
*/
abstract class SubType extends UniqueType {
def supertype: Type
override def parents: List[Type] = supertype.parents
override def decls: Scope = supertype.decls
override def baseType(clazz: Symbol): Type = supertype.baseType(clazz)
override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq
override def baseTypeSeqDepth: Depth = supertype.baseTypeSeqDepth
override def baseClasses: List[Symbol] = supertype.baseClasses
}
/** A base class for types that represent a single value
* (single-types and this-types).
*/
abstract class SingletonType extends SubType with SimpleTypeProxy with SingletonTypeApi {
def supertype = underlying
override def isTrivial = false
override def widen: Type = underlying.widen
override def baseTypeSeq: BaseTypeSeq = {
if (Statistics.canEnable) Statistics.incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
override def safeToString: String = {
// Avoiding printing Predef.type and scala.package.type as "type",
// since in all other cases we omit those prefixes.
val pre = underlying.typeSymbol.skipPackageObject
if (pre.isOmittablePrefix) pre.fullName + ".type"
else prefixString + "type"
}
/*
override def typeOfThis: Type = typeSymbol.typeOfThis
override def bounds: TypeBounds = TypeBounds(this, this)
override def prefix: Type = NoType
override def typeArgs: List[Type] = List()
override def typeParams: List[Symbol] = List()
*/
}
/** An object representing an erroneous type */
case object ErrorType extends Type {
// todo see whether we can do without
override def isError: Boolean = true
override def decls: Scope = new ErrorScope(NoSymbol)
override def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
var sym = decls lookup name
if (sym == NoSymbol) {
sym = NoSymbol.newErrorSymbol(name)
decls enter sym
}
sym
}
override def baseType(clazz: Symbol): Type = this
override def safeToString: String = "<error>"
override def narrow: Type = this
override def kind = "ErrorType"
}
/** An object representing an unknown type, used during type inference.
* If you see WildcardType outside of inference it is almost certainly a bug.
*/
case object WildcardType extends Type {
override def isWildcard = true
override def safeToString: String = "?"
override def kind = "WildcardType"
}
/** BoundedWildcardTypes, used only during type inference, are created in
* two places that I can find:
*
* 1. If the expected type of an expression is an existential type,
* its hidden symbols are replaced with bounded wildcards.
* 2. When an implicit conversion is being sought based in part on
* the name of a method in the converted type, a HasMethodMatching
* type is created: a MethodType with parameters typed as
* BoundedWildcardTypes.
*/
case class BoundedWildcardType(override val bounds: TypeBounds) extends Type with BoundedWildcardTypeApi {
override def isWildcard = true
override def safeToString: String = "?" + bounds
override def kind = "BoundedWildcardType"
}
object BoundedWildcardType extends BoundedWildcardTypeExtractor
/** An object representing a non-existing type */
case object NoType extends Type {
override def isTrivial: Boolean = true
override def safeToString: String = "<notype>"
override def kind = "NoType"
}
/** An object representing a non-existing prefix */
case object NoPrefix extends Type {
override def isTrivial: Boolean = true
override def prefixString = ""
override def safeToString: String = "<noprefix>"
override def kind = "NoPrefixType"
}
/** A class for this-types of the form <sym>.this.type
*/
abstract case class ThisType(sym: Symbol) extends SingletonType with ThisTypeApi {
if (!sym.isClass && !sym.isFreeType) {
// SI-6640 allow StubSymbols to reveal what's missing from the classpath before we trip the assertion.
sym.failIfStub()
abort(s"ThisType($sym) for sym which is not a class")
}
override def isTrivial: Boolean = sym.isPackageClass
override def typeSymbol = sym
override def underlying: Type = sym.typeOfThis
override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded
override def prefixString =
if (settings.debug) sym.nameString + ".this."
else if (sym.isAnonOrRefinementClass) "this."
else if (sym.isOmittablePrefix) ""
else if (sym.isModuleClass) sym.fullNameString + "."
else sym.nameString + ".this."
override def safeToString: String =
if (sym.isEffectiveRoot) "" + sym.name
else super.safeToString
override def narrow: Type = this
override def kind = "ThisType"
}
final class UniqueThisType(sym: Symbol) extends ThisType(sym) { }
object ThisType extends ThisTypeExtractor {
def apply(sym: Symbol): Type = (
if (!phase.erasedTypes) unique(new UniqueThisType(sym))
else sym.tpe_*
)
}
/** A class for singleton types of the form `<prefix>.<sym.name>.type`.
* Cannot be created directly; one should always use `singleType` for creation.
*/
abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType with SingleTypeApi {
private var trivial: ThreeValue = UNKNOWN
override def isTrivial: Boolean = {
if (trivial == UNKNOWN) trivial = fromBoolean(pre.isTrivial)
toBoolean(trivial)
}
override def isGround = sym.isPackageClass || pre.isGround
private[reflect] var underlyingCache: Type = NoType
private[reflect] var underlyingPeriod = NoPeriod
private[Types] def invalidateSingleTypeCaches(): Unit = {
underlyingCache = NoType
underlyingPeriod = NoPeriod
}
override def underlying: Type = {
val cache = underlyingCache
if (underlyingPeriod == currentPeriod && cache != null) cache
else {
defineUnderlyingOfSingleType(this)
underlyingCache
}
}
// more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
/*
override def narrow: Type = {
if (phase.erasedTypes) this
else {
val thissym = refinedType(List(this), sym.owner, EmptyScope).typeSymbol
if (sym.owner != NoSymbol) {
//Console.println("narrowing module " + sym + thissym.owner);
thissym.typeOfThis = this
}
thissym.thisType
}
}
*/
override def narrow: Type = this
override def termSymbol = sym
override def prefix: Type = pre
override def prefixString = (
if (sym.skipPackageObject.isOmittablePrefix) ""
else if (sym.isPackageObjectOrClass) pre.prefixString
else pre.prefixString + sym.nameString + "."
)
override def kind = "SingleType"
}
final class UniqueSingleType(pre: Type, sym: Symbol) extends SingleType(pre, sym)
object SingleType extends SingleTypeExtractor {
def apply(pre: Type, sym: Symbol): Type = {
unique(new UniqueSingleType(pre, sym))
}
}
protected def defineUnderlyingOfSingleType(tpe: SingleType) = {
val period = tpe.underlyingPeriod
if (period != currentPeriod) {
tpe.underlyingPeriod = currentPeriod
if (!isValid(period)) {
// [Eugene to Paul] needs review
tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType
assert(tpe.underlyingCache ne tpe, tpe)
}
}
}
abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType with SuperTypeApi {
private var trivial: ThreeValue = UNKNOWN
override def isTrivial: Boolean = {
if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial)
toBoolean(trivial)
}
override def typeSymbol = thistpe.typeSymbol
override def underlying = supertpe
override def prefix: Type = supertpe.prefix
override def prefixString = thistpe.prefixString.replaceAll("""\\bthis\\.$""", "super.")
override def narrow: Type = thistpe.narrow
override def kind = "SuperType"
}
final class UniqueSuperType(thistp: Type, supertp: Type) extends SuperType(thistp, supertp)
object SuperType extends SuperTypeExtractor {
def apply(thistp: Type, supertp: Type): Type = {
if (phase.erasedTypes) supertp
else unique(new UniqueSuperType(thistp, supertp))
}
}
/** A class for the bounds of abstract types and type parameters
*/
abstract case class TypeBounds(lo: Type, hi: Type) extends SubType with TypeBoundsApi {
def supertype = hi
override def isTrivial: Boolean = lo.isTrivial && hi.isTrivial
override def bounds: TypeBounds = this
def containsType(that: Type) = that match {
case TypeBounds(_, _) => that <:< this
case _ => lo <:< that && that <:< hi
}
private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard
private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard
def isEmptyBounds = emptyLowerBound && emptyUpperBound
override def safeToString = scalaNotation(_.toString)
/** Bounds notation used in Scala syntax.
* For example +This <: scala.collection.generic.Sorted[K,This].
*/
private[internal] def scalaNotation(typeString: Type => String): String = {
(if (emptyLowerBound) "" else " >: " + typeString(lo)) +
(if (emptyUpperBound) "" else " <: " + typeString(hi))
}
/** Bounds notation used in http://adriaanm.github.com/files/higher.pdf.
* For example *(scala.collection.generic.Sorted[K,This]).
*/
private[internal] def starNotation(typeString: Type => String): String = {
if (emptyLowerBound && emptyUpperBound) ""
else if (emptyLowerBound) "(" + typeString(hi) + ")"
else "(%s, %s)" format (typeString(lo), typeString(hi))
}
override def kind = "TypeBoundsType"
}
final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
object TypeBounds extends TypeBoundsExtractor {
def empty: TypeBounds = apply(NothingTpe, AnyTpe)
def upper(hi: Type): TypeBounds = apply(NothingTpe, hi)
def lower(lo: Type): TypeBounds = apply(lo, AnyTpe)
def apply(lo: Type, hi: Type): TypeBounds = {
unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds]
}
}
object CompoundType {
def unapply(tp: Type): Option[(List[Type], Scope, Symbol)] = tp match {
case ClassInfoType(parents, decls, clazz) => Some((parents, decls, clazz))
case RefinedType(parents, decls) => Some((parents, decls, tp.typeSymbol))
case _ => None
}
}
/** A common base class for intersection types and class types
*/
abstract class CompoundType extends Type with CompoundTypeApi {
private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
private[reflect] var baseTypeSeqPeriod = NoPeriod
private[reflect] var baseClassesCache: List[Symbol] = _
private[reflect] var baseClassesPeriod = NoPeriod
private[Types] def invalidatedCompoundTypeCaches() {
baseTypeSeqCache = null
baseTypeSeqPeriod = NoPeriod
baseClassesCache = null
baseClassesPeriod = NoPeriod
}
override def baseTypeSeq: BaseTypeSeq = {
val cached = baseTypeSeqCache
if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq)
cached
else {
defineBaseTypeSeqOfCompoundType(this)
if (baseTypeSeqCache eq undetBaseTypeSeq)
throw new RecoverableCyclicReference(typeSymbol)
baseTypeSeqCache
}
}
override def baseTypeSeqDepth: Depth = baseTypeSeq.maxDepth
override def baseClasses: List[Symbol] = {
val cached = baseClassesCache
if (baseClassesPeriod == currentPeriod && cached != null) cached
else {
defineBaseClassesOfCompoundType(this)
if (baseClassesCache eq null)
throw new RecoverableCyclicReference(typeSymbol)
baseClassesCache
}
}
/** The slightly less idiomatic use of Options is due to
* performance considerations. A version using for comprehensions
* might be too slow (this is deemed a hotspot of the type checker).
*
* See with Martin before changing this method.
*/
def memo[A](op1: => A)(op2: Type => A): A = {
def updateCache(): A = {
intersectionWitness(parents) = new WeakReference(this)
op1
}
intersectionWitness get parents match {
case Some(ref) =>
ref.get match {
case Some(w) => if (w eq this) op1 else op2(w)
case None => updateCache()
}
case None => updateCache()
}
}
override def baseType(sym: Symbol): Type = {
val index = baseTypeIndex(sym)
if (index >= 0) baseTypeSeq(index) else NoType
}
override def narrow: Type = typeSymbol.thisType
override def isStructuralRefinement: Boolean =
typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement)
protected def shouldForceScope = settings.debug || parents.isEmpty || !decls.isEmpty
protected def initDecls = fullyInitializeScope(decls)
protected def scopeString = if (shouldForceScope) initDecls.mkString("{", "; ", "}") else ""
override def safeToString = parentsString(parents) + scopeString
}
protected def computeBaseClasses(tpe: Type): List[Symbol] = {
val parents = tpe.parents // adriaan says tpe.parents does work sometimes, so call it only once
val baseTail = (
if (parents.isEmpty || parents.head.isInstanceOf[PackageTypeRef]) Nil
else {
//Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
// optimized, since this seems to be performance critical
val superclazz = parents.head // parents.isEmpty was already excluded
var mixins = parents.tail
val sbcs = superclazz.baseClasses
var bcs = sbcs
def isNew(clazz: Symbol): Boolean = (
superclazz.baseTypeIndex(clazz) < 0 &&
{ var p = bcs
while ((p ne sbcs) && (p.head != clazz)) p = p.tail
p eq sbcs
}
)
while (!mixins.isEmpty) {
def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
if (mbcs.isEmpty) bcs
else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
else addMixinBaseClasses(mbcs.tail)
bcs = addMixinBaseClasses(mixins.head.baseClasses)
mixins = mixins.tail
}
bcs
}
)
tpe.typeSymbol :: baseTail
}
protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
val period = tpe.baseTypeSeqPeriod
if (period != currentPeriod) {
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
if (tpe.parents exists typeContainsTypeVar) {
// rename type vars to fresh type params, take base type sequence of
// resulting type, and rename back all the entries in that sequence
var tvs = Set[TypeVar]()
for (p <- tpe.parents)
for (t <- p) t match {
case tv: TypeVar => tvs += tv
case _ =>
}
val varToParamMap: Map[Type, Symbol] =
mapFrom[TypeVar, Type, Symbol](tvs.toList)(_.origin.typeSymbol.cloneSymbol)
val paramToVarMap = varToParamMap map (_.swap)
val varToParam = new TypeMap {
def apply(tp: Type) = varToParamMap get tp match {
case Some(sym) => sym.tpe_*
case _ => mapOver(tp)
}
}
val paramToVar = new TypeMap {
def apply(tp: Type) = tp match {
case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym)
case _ => mapOver(tp)
}
}
val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
if (Statistics.canEnable) Statistics.incCounter(compoundBaseTypeSeqCount)
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null
try {
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache =
if (tpe.typeSymbol.isRefinementClass)
tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe_*)
else
compoundBaseTypeSeq(tpe)
} finally {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
// [Martin] suppressing memoization solves the problem with "same type after erasure" errors
// when compiling with
// scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
// I have not yet figured out precisely why this is the case.
// My current assumption is that taking memos forces baseTypeSeqs to be computed
// at stale types (i.e. the underlying typeSymbol has already another type).
// I do not yet see precisely why this would cause a problem, but it looks
// fishy in any case.
}
}
}
//Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG
if (tpe.baseTypeSeqCache eq undetBaseTypeSeq)
throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
}
object baseClassesCycleMonitor {
private var open: List[Symbol] = Nil
@inline private def cycleLog(msg: => String) {
if (settings.debug)
Console.err.println(msg)
}
def size = open.size
def push(clazz: Symbol) {
cycleLog("+ " + (" " * size) + clazz.fullNameString)
open ::= clazz
}
def pop(clazz: Symbol) {
assert(open.head eq clazz, (clazz, open))
open = open.tail
}
def isOpen(clazz: Symbol) = open contains clazz
}
protected def defineBaseClassesOfCompoundType(tpe: CompoundType) {
def define() = defineBaseClassesOfCompoundType(tpe, force = false)
if (!breakCycles || isPastTyper) define()
else tpe match {
// non-empty parents helpfully excludes all package classes
case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass =>
// Cycle: force update
if (baseClassesCycleMonitor isOpen clazz)
defineBaseClassesOfCompoundType(tpe, force = true)
else {
baseClassesCycleMonitor push clazz
try define()
finally baseClassesCycleMonitor pop clazz
}
case _ =>
define()
}
}
private def defineBaseClassesOfCompoundType(tpe: CompoundType, force: Boolean) {
val period = tpe.baseClassesPeriod
if (period == currentPeriod) {
if (force && breakCycles) {
def what = tpe.typeSymbol + " in " + tpe.typeSymbol.owner.fullNameString
val bcs = computeBaseClasses(tpe)
tpe.baseClassesCache = bcs
warning(s"Breaking cycle in base class computation of $what ($bcs)")
}
}
else {
tpe.baseClassesPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null
try {
tpe.baseClassesCache = null
tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail)
}
finally {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
}
if (tpe.baseClassesCache eq null)
throw new TypeError("illegal cyclic reference involving " + tpe.typeSymbol)
}
/** A class representing intersection types with refinements of the form
* `<parents_0> with ... with <parents_n> { decls }`
* Cannot be created directly;
* one should always use `refinedType` for creation.
*/
case class RefinedType(override val parents: List[Type],
override val decls: Scope) extends CompoundType with RefinedTypeApi {
override def isHigherKinded = (
parents.nonEmpty &&
(parents forall typeIsHigherKinded) &&
!phase.erasedTypes
)
override def typeParams =
if (isHigherKinded) firstParent.typeParams
else super.typeParams
//@M may result in an invalid type (references to higher-order args become dangling )
override def typeConstructor =
copyRefinedType(this, parents map (_.typeConstructor), decls)
final override def normalize: Type =
if (phase.erasedTypes) normalizeImpl
else {
if (normalized eq null) normalized = normalizeImpl
normalized
}
private var normalized: Type = _
private def normalizeImpl = {
// TODO see comments around def intersectionType and def merge
// SI-8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala
def flatten(tps: List[Type]): List[Type] = {
def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp
tps map dealiasRefinement flatMap {
case RefinedType(parents, ds) if ds.isEmpty => flatten(parents)
case tp => List(tp)
}
}
val flattened = flatten(parents).distinct
if (decls.isEmpty && hasLength(flattened, 1)) {
flattened.head
} else if (flattened != parents) {
refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition)
} else if (isHigherKinded) {
etaExpand
} else super.normalize
}
final override def etaExpand: Type = {
// MO to AM: This is probably not correct
// If they are several higher-kinded parents with different bounds we need
// to take the intersection of their bounds
// !!! inconsistent with TypeRef.etaExpand that uses initializedTypeParams
if (!isHigherKinded) this
else typeFun(
typeParams,
RefinedType(
parents map {
case TypeRef(pre, sym, List()) => TypeRef(pre, sym, dummyArgs)
case p => p
},
decls,
typeSymbol))
}
override def kind = "RefinedType"
}
final class RefinedType0(parents: List[Type], decls: Scope, clazz: Symbol) extends RefinedType(parents, decls) {
override def typeSymbol = clazz
}
object RefinedType extends RefinedTypeExtractor {
def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType =
new RefinedType0(parents, decls, clazz)
}
/** Overridden in reflection compiler */
def validateClassInfo(tp: ClassInfoType) {}
/** A class representing a class info
*/
case class ClassInfoType(
override val parents: List[Type],
override val decls: Scope,
override val typeSymbol: Symbol) extends CompoundType with ClassInfoTypeApi
{
validateClassInfo(this)
/** refs indices */
private final val NonExpansive = 0
private final val Expansive = 1
/** initialization states */
private final val UnInitialized = 0
private final val Initializing = 1
private final val Initialized = 2
private type RefMap = Map[Symbol, immutable.Set[Symbol]]
/** All type parameters reachable from given type parameter
* by a path which contains at least one expansive reference.
* @See Kennedy, Pierce: On Decidability of Nominal Subtyping with Variance
*/
private[scala] def expansiveRefs(tparam: Symbol) = {
if (state == UnInitialized) {
computeRefs()
while (state != Initialized) propagate()
}
getRefs(Expansive, tparam)
}
/* The rest of this class is auxiliary code for `expansiveRefs`
*/
/** The type parameters which are referenced type parameters of this class.
* Two entries: refs(0): Non-expansive references
* refs(1): Expansive references
* Syncnote: This var need not be protected with synchronized, because
* it is accessed only from expansiveRefs, which is called only from
* Typer.
*/
private var refs: Array[RefMap] = _
/** The initialization state of the class: UnInitialized --> Initializing --> Initialized
* Syncnote: This var need not be protected with synchronized, because
* it is accessed only from expansiveRefs, which is called only from
* Typer.
*/
private var state = UnInitialized
/** Get references for given type parameter
* @param which in {NonExpansive, Expansive}
* @param from The type parameter from which references originate.
*/
private def getRefs(which: Int, from: Symbol): Set[Symbol] = refs(which) get from match {
case Some(set) => set
case none => Set()
}
/** Augment existing refs map with reference <pre>from -> to</pre>
* @param which <- {NonExpansive, Expansive}
*/
private def addRef(which: Int, from: Symbol, to: Symbol) {
refs(which) = refs(which) + (from -> (getRefs(which, from) + to))
}
/** Augment existing refs map with references <pre>from -> sym</pre>, for
* all elements <pre>sym</pre> of set `to`.
* @param which <- {NonExpansive, Expansive}
*/
private def addRefs(which: Int, from: Symbol, to: Set[Symbol]) {
refs(which) = refs(which) + (from -> (getRefs(which, from) ++ to))
}
/** The ClassInfoType which belongs to the class containing given type parameter
*/
private def classInfo(tparam: Symbol): ClassInfoType =
tparam.owner.info.resultType match {
case ci: ClassInfoType => ci
case _ => classInfo(ObjectClass) // something's wrong; fall back to safe value
// (this can happen only for erroneous programs).
}
private object enterRefs extends TypeMap {
private var tparam: Symbol = _
def apply(tp: Type): Type = {
tp match {
case tr @ TypeRef(_, sym, args) if args.nonEmpty =>
val tparams = tr.initializedTypeParams
if (settings.debug && !sameLength(tparams, args))
devWarning(s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args")
foreach2(tparams, args) { (tparam1, arg) =>
if (arg contains tparam) {
addRef(NonExpansive, tparam, tparam1)
if (arg.typeSymbol != tparam)
addRef(Expansive, tparam, tparam1)
}
}
case _ =>
}
mapOver(tp)
}
def enter(tparam0: Symbol, parent: Type) {
this.tparam = tparam0
this(parent)
}
}
/** Compute initial (one-step) references and set state to `Initializing`.
*/
private def computeRefs() {
refs = Array(Map(), Map())
typeSymbol.typeParams foreach { tparam =>
parents foreach { p =>
enterRefs.enter(tparam, p)
}
}
state = Initializing
}
/** Propagate to form transitive closure.
* Set state to Initialized if no change resulted from propagation.
* @return true iff there as a change in last iteration
*/
private def propagate(): Boolean = {
if (state == UnInitialized) computeRefs()
//Console.println("Propagate "+symbol+", initial expansive = "+refs(Expansive)+", nonexpansive = "+refs(NonExpansive))//DEBUG
val lastRefs = Array(refs(0), refs(1))
state = Initialized
var change = false
for ((from, targets) <- refs(NonExpansive).iterator)
for (target <- targets) {
val thatInfo = classInfo(target)
if (thatInfo.state != Initialized)
change = change | thatInfo.propagate()
addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target))
addRefs(Expansive, from, thatInfo.getRefs(Expansive, target))
}
for ((from, targets) <- refs(Expansive).iterator)
for (target <- targets) {
val thatInfo = classInfo(target)
if (thatInfo.state != Initialized)
change = change | thatInfo.propagate()
addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target))
}
change = change || refs(0) != lastRefs(0) || refs(1) != lastRefs(1)
if (change) state = Initializing
//else Console.println("Propagate "+symbol+", final expansive = "+refs(Expansive)+", nonexpansive = "+refs(NonExpansive))//DEBUG
change
}
override def kind = "ClassInfoType"
/** A nicely formatted string with newlines and such.
*/
def formattedToString = parents.mkString("\\n with ") + scopeString
override protected def shouldForceScope = settings.debug || decls.size > 1
override protected def scopeString = initDecls.mkString(" {\\n ", "\\n ", "\\n}")
override def safeToString = if (shouldForceScope) formattedToString else super.safeToString
}
object ClassInfoType extends ClassInfoTypeExtractor
class PackageClassInfoType(decls: Scope, clazz: Symbol)
extends ClassInfoType(List(), decls, clazz)
/** A class representing a constant type.
*/
abstract case class ConstantType(value: Constant) extends SingletonType with ConstantTypeApi {
override def underlying: Type = value.tpe
assert(underlying.typeSymbol != UnitClass)
override def isTrivial: Boolean = true
override def deconst: Type = underlying.deconst
override def safeToString: String =
underlying.toString + "(" + value.escapedStringValue + ")"
override def kind = "ConstantType"
}
final class UniqueConstantType(value: Constant) extends ConstantType(value)
object ConstantType extends ConstantTypeExtractor {
def apply(value: Constant) = unique(new UniqueConstantType(value))
}
/* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
* with synchronized, because they are accessed only from isVolatile, which is called only from
* Typer.
*/
private var volatileRecursions: Int = 0
private val pendingVolatiles = new mutable.HashSet[Symbol]
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
require(args0 ne Nil, this)
/** No unapplied type params size it has (should have) equally as many args. */
override def isHigherKinded = false
override def typeParams = Nil
// note: does not go through typeRef. There's no need to because
// neither `pre` nor `sym` changes. And there's a performance
// advantage to call TypeRef directly.
override def typeConstructor = TypeRef(pre, sym, Nil)
}
class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) {
require(sym.isModuleClass, sym)
private[this] var narrowedCache: Type = _
override def narrow = {
if (narrowedCache eq null)
narrowedCache = singleType(pre, sym.sourceModule)
narrowedCache
}
override private[Types] def invalidateTypeRefCaches(): Unit = {
super.invalidateTypeRefCaches()
narrowedCache = null
}
override protected def finishPrefix(rest: String) = objectPrefix + rest
override def directObjectString = super.safeToString
override def toLongString = toString
override def safeToString = prefixString + "type"
override def prefixString = if (sym.isOmittablePrefix) "" else prefix.prefixString + sym.nameString + "."
}
class PackageTypeRef(pre0: Type, sym0: Symbol) extends ModuleTypeRef(pre0, sym0) {
require(sym.isPackageClass, sym)
override protected def finishPrefix(rest: String) = packagePrefix + rest
}
class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) {
require(sym.isRefinementClass, sym)
// I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
override protected def normalizeImpl: Type = pre.memberInfo(sym).normalize
override protected def finishPrefix(rest: String) = "" + sym.info
}
class NoArgsTypeRef(pre0: Type, sym0: Symbol) extends TypeRef(pre0, sym0, Nil) {
// A reference (in a Scala program) to a type that has type parameters, but where the reference
// does not include type arguments. Note that it doesn't matter whether the symbol refers
// to a java or scala symbol, but it does matter whether it occurs in java or scala code.
// TypeRefs w/o type params that occur in java signatures/code are considered raw types, and are
// represented as existential types.
override def isHigherKinded = (typeParams ne Nil)
override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
if (isHigherKinded) {
if (sameLength(formals intersect typeParams, typeParams))
copyTypeRef(this, pre, sym, actuals)
// partial application (needed in infer when bunching type arguments from classes and methods together)
else
copyTypeRef(this, pre, sym, dummyArgs).instantiateTypeParams(formals, actuals)
}
else
super.instantiateTypeParams(formals, actuals)
override def narrow =
if (sym.isModuleClass) singleType(pre, sym.sourceModule)
else super.narrow
override def typeConstructor = this
// eta-expand, subtyping relies on eta-expansion of higher-kinded types
override protected def normalizeImpl: Type =
if (isHigherKinded) etaExpand else super.normalizeImpl
}
trait NonClassTypeRef extends TypeRef {
require(sym.isNonClassType, sym)
/** Syncnote: These are pure caches for performance; no problem to evaluate these
* several times. Hence, no need to protected with synchronized in a multi-threaded
* usage scenario.
*/
private var relativeInfoCache: Type = _
private var relativeInfoCacheValidForPeriod: Period = NoPeriod
private var relativeInfoCacheValidForSymInfo: Type = _
override private[Types] def invalidateTypeRefCaches(): Unit = {
super.invalidateTypeRefCaches()
relativeInfoCache = NoType
relativeInfoCacheValidForPeriod = NoPeriod
relativeInfoCacheValidForSymInfo = null
}
final override protected def relativeInfo = {
val symInfo = sym.info
if ((relativeInfoCache eq null) || (relativeInfoCacheValidForSymInfo ne symInfo) || (relativeInfoCacheValidForPeriod != currentPeriod)) {
relativeInfoCache = super.relativeInfo
if (this.isInstanceOf[AbstractTypeRef]) validateRelativeInfo()
relativeInfoCacheValidForSymInfo = symInfo
relativeInfoCacheValidForPeriod = currentPeriod
}
relativeInfoCache
}
private def validateRelativeInfo(): Unit = relativeInfoCache match {
// If a subtyping cycle is not detected here, we'll likely enter an infinite
// loop before a sensible error can be issued. SI-5093 is one example.
case x: SubType if x.supertype eq this =>
relativeInfoCache = null
throw new RecoverableCyclicReference(sym)
case _ =>
}
}
trait AliasTypeRef extends NonClassTypeRef {
require(sym.isAliasType, sym)
override def dealias = if (typeParamsMatchArgs) betaReduce.dealias else super.dealias
override def narrow = normalize.narrow
override def prefix = if (this ne normalize) normalize.prefix else pre
override def termSymbol = if (this ne normalize) normalize.termSymbol else super.termSymbol
override def typeSymbol = if (this ne normalize) normalize.typeSymbol else sym
override protected[Types] def parentsImpl: List[Type] = normalize.parents map relativize
// `baseClasses` is sensitive to type args when referencing type members
// consider `type foo[x] = x`, `typeOf[foo[String]].baseClasses` should be the same as `typeOf[String].baseClasses`,
// which would be lost by looking at `sym.info` without propagating args
// since classes cannot be overridden, the prefix can be ignored
// (in fact, taking the prefix into account by replacing `normalize`
// with `relativeInfo` breaks pos/t8177g.scala, which is probably a bug, but a tricky one...
override def baseClasses = normalize.baseClasses
// similar reasoning holds here as for baseClasses
// as another example, consider the type alias `Foo` in `class O { o => type Foo = X { val bla: o.Bar }; type Bar }`
// o1.Foo and o2.Foo have different decls `val bla: o1.Bar` versus `val bla: o2.Bar`
// In principle, you should only call `sym.info.decls` when you know `sym.isClass`,
// and you should `relativize` the infos of the resulting members.
// The latter is certainly violated in multiple spots in the codebase (the members are usually transformed correctly, though).
override def decls: Scope = normalize.decls
// beta-reduce, but don't do partial application -- cycles have been checked in typeRef
override protected def normalizeImpl =
if (typeParamsMatchArgs) betaReduce.normalize
else if (isHigherKinded) super.normalizeImpl
else {
// if we are overriding a type alias in an erroneous way, don't just
// return an ErrorType since that will result in useless error msg.
// Instead let's try to recover from it and rely on refcheck reporting the correct error,
// if that fails fallback to the old behaviour.
val overriddenSym = sym.nextOverriddenSymbol
if (overriddenSym != NoSymbol) pre.memberType(overriddenSym).normalize
else ErrorType
}
// isHKSubType introduces synthetic type params so that
// betaReduce can first apply sym.info to typeArgs before calling
// asSeenFrom. asSeenFrom then skips synthetic type params, which
// are used to reduce HO subtyping to first-order subtyping, but
// which can't be instantiated from the given prefix and class.
//
// this crashes pos/depmet_implicit_tpbetareduce.scala
// appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
override def betaReduce = relativize(sym.info.resultType)
/** SI-3731, SI-8177: when prefix is changed to `newPre`, maintain consistency of prefix and sym
* (where the symbol refers to a declaration "embedded" in the prefix).
*
* @return newSym so that `newPre` binds `sym.name` to `newSym`,
* to remain consistent with `pre` previously binding `sym.name` to `sym`.
*
* `newSym` and `sym` are conceptually the same symbols, but some change to our `prefix`
* got them out of whack. (Usually triggered by substitution or `asSeenFrom`.)
* The only kind of "binds" we consider is where `prefix` (or its underlying type)
* is a refined type that declares `sym` (since the old prefix was discarded,
* the old symbol is now stale and we should update it, like in `def rebind`,
* except this is not for overriding symbols -- a vertical move -- but a "lateral" change.)
*
* The reason for this hack is that substitution and asSeenFrom clone RefinedTypes and
* their members, without updating the potential references to those members -- here, we aim to patch
* this up, so that: when changing a TypeRef(pre, sym, args) to a TypeRef(pre', sym', args'), and pre
* embeds a symbol sym (pre is a RefinedType(_, Scope(..., sym,...)) or a SingleType with such an
* underlying type), make sure that we update sym' to compensate for the change of pre -> pre' (which may
* have created a new symbol for the one the original sym referred to)
*/
override def coevolveSym(newPre: Type): Symbol =
if ((pre ne newPre) && embeddedSymbol(pre, sym.name) == sym) {
val newSym = embeddedSymbol(newPre, sym.name)
debuglog(s"co-evolve: ${pre} -> ${newPre}, $sym : ${sym.info} -> $newSym : ${newSym.info}")
// To deal with erroneous `preNew`, fallback via `orElse sym`, in case `preNew` does not have a decl named `sym.name`.
newSym orElse sym
} else sym
override def kind = "AliasTypeRef"
}
// Return the symbol named `name` that's "embedded" in tp
// This is the case if `tp` is a `T{...; type/val $name ; ...}`,
// or a singleton type with such an underlying type.
private def embeddedSymbol(tp: Type, name: Name): Symbol =
// normalize to flatten nested RefinedTypes
// don't check whether tp is a RefinedType -- it may be a ThisType of one, for example
// TODO: check the resulting symbol is owned by the refinement class? likely an invariant...
if (tp.typeSymbol.isRefinementClass) tp.normalize.decls lookup name
else {
debuglog(s"no embedded symbol $name found in ${showRaw(tp)} --> ${tp.normalize.decls lookup name}")
NoSymbol
}
trait AbstractTypeRef extends NonClassTypeRef {
require(sym.isAbstractType, sym)
override def baseClasses = relativeInfo.baseClasses
override def decls = relativeInfo.decls
override def bounds = relativeInfo.bounds
override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = bounds.hi.baseTypeSeq prepend this
override protected[Types] def parentsImpl: List[Type] = relativeInfo.parents
override def kind = "AbstractTypeRef"
}
/** A class for named types of the form
* `<prefix>.<sym.name>[args]`
* Cannot be created directly; one should always use `typeRef`
* for creation. (@M: Otherwise hashing breaks)
*
* @M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty
*/
abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends UniqueType with TypeRefApi {
private var trivial: ThreeValue = UNKNOWN
override def isTrivial: Boolean = {
if (trivial == UNKNOWN)
trivial = fromBoolean(!sym.isTypeParameter && pre.isTrivial && areTrivialTypes(args))
toBoolean(trivial)
}
private[Types] def invalidateTypeRefCaches(): Unit = {
parentsCache = null
parentsPeriod = NoPeriod
baseTypeSeqCache = null
baseTypeSeqPeriod = NoPeriod
normalized = null
}
private[reflect] var parentsCache: List[Type] = _
private[reflect] var parentsPeriod = NoPeriod
private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
private[reflect] var baseTypeSeqPeriod = NoPeriod
private var normalized: Type = _
//OPT specialize hashCode
override final def computeHashCode = {
import scala.util.hashing.MurmurHash3._
val hasArgs = args ne Nil
var h = productSeed
h = mix(h, pre.hashCode)
h = mix(h, sym.hashCode)
if (hasArgs)
finalizeHash(mix(h, args.hashCode()), 3)
else
finalizeHash(h, 2)
}
// interpret symbol's info in terms of the type's prefix and type args
protected def relativeInfo: Type = appliedType(sym.info.asSeenFrom(pre, sym.owner), argsOrDummies)
// @M: propagate actual type params (args) to `tp`, by replacing
// formal type parameters with actual ones. If tp is higher kinded,
// the "actual" type arguments are types that simply reference the
// corresponding type parameters (unbound type variables)
//
// NOTE: for performance, as well as correctness, we do not attempt
// to reframe trivial types in terms of our prefix and args.
// asSeenFrom, by construction, is the identity for trivial types,
// and substitution cannot change them either (abstract types are non-trivial, specifically because they may need to be replaced)
// For correctness, the result for `tp == NoType` must be `NoType`,
// if we don't shield against this, and apply instantiateTypeParams to it,
// this would result in an ErrorType, which behaves differently during subtyping
// (and thus on recursion, subtyping would go from false -- since a NoType is involved --
// to true, as ErrorType is always a sub/super type....)
final def relativize(tp: Type): Type =
if (tp.isTrivial) tp
else if (args.isEmpty && (phase.erasedTypes || !isHigherKinded || isRawIfWithoutArgs(sym))) tp.asSeenFrom(pre, sym.owner)
else {
// The type params and type args should always match in length,
// though a mismatch can arise when a typevar is encountered for which
// too little information is known to determine its kind, and
// it later turns out not to have kind *. See SI-4070.
val formals = sym.typeParams
// If we're called with a poly type, and we were to run the `asSeenFrom`, over the entire
// type, we can end up with new symbols for the type parameters (clones from TypeMap).
// The subsequent substitution of type arguments would fail. This problem showed up during
// the fix for SI-8046, however the solution taken there wasn't quite right, and led to
// SI-8170.
//
// Now, we detect the PolyType before both the ASF *and* the substitution, and just operate
// on the result type.
//
// TODO: Revisit this and explore the questions raised:
//
// AM: I like this better than the old code, but is there any way the tparams would need the ASF treatment as well?
// JZ: I think its largely irrelevant, as they are no longer referred to in the result type.
// In fact, you can get away with returning a type of kind * here and the sky doesn't fall:
// `case PolyType(`tparams`, result) => asSeenFromInstantiated(result)`
// But I thought it was better to retain the kind.
// AM: I've been experimenting with apply-type-args-then-ASF, but running into cycles.
// In general, it seems iffy the tparams can never occur in the result
// then we might as well represent the type as a no-arg typeref.
// AM: I've also been trying to track down uses of transform (pretty generic name for something that
// does not seem that widely applicable).
// It's kind of a helper for computing baseType (since it tries to propagate our type args to some
// other type, which has to be related to this type for that to make sense).
//
def seenFromOwnerInstantiated(tp: Type): Type =
tp.asSeenFrom(pre, sym.owner).instantiateTypeParams(formals, argsOrDummies)
tp match {
case PolyType(`formals`, result) => PolyType(formals, seenFromOwnerInstantiated(result))
case _ => seenFromOwnerInstantiated(tp)
}
}
private def argsOrDummies = if (args.isEmpty) dummyArgs else args
final override def baseType(clazz: Symbol): Type =
if (clazz eq sym) this
// NOTE: this first goes to requested base type, *then* does asSeenFrom prefix & instantiates args
else if (sym.isClass) relativize(sym.info.baseType(clazz))
else baseTypeOfNonClassTypeRef(clazz)
// two differences with class type basetype:
// (1) first relativize the type, then go to the requested base type
// (2) cache for cycle robustness
private def baseTypeOfNonClassTypeRef(clazz: Symbol) =
try {
basetypeRecursions += 1
if (basetypeRecursions >= LogPendingBaseTypesThreshold) baseTypeOfNonClassTypeRefLogged(clazz)
else relativeInfo.baseType(clazz)
} finally basetypeRecursions -= 1
private def baseTypeOfNonClassTypeRefLogged(clazz: Symbol) =
if (pendingBaseTypes add this) try relativeInfo.baseType(clazz) finally { pendingBaseTypes remove this }
// TODO: is this optimization for AnyClass worth it? (or is it playing last-ditch cycle defense?)
// NOTE: for correctness, it only applies for non-class types
// (e.g., a package class should not get AnyTpe as its supertype, ever)
else if (clazz eq AnyClass) AnyTpe
else NoType
// eta-expand, subtyping relies on eta-expansion of higher-kinded types
protected def normalizeImpl: Type = if (isHigherKinded) etaExpand else super.normalize
// TODO: test case that is compiled in a specific order and in different runs
final override def normalize: Type = {
// arises when argument-dependent types are approximated (see def depoly in implicits)
if (pre eq WildcardType) WildcardType
else if (phase.erasedTypes) normalizeImpl
else {
if (normalized eq null)
normalized = normalizeImpl
normalized
}
}
override def isGround = (
sym.isPackageClass
|| pre.isGround && args.forall(_.isGround)
)
final override def etaExpand: Type = {
// must initialise symbol, see test/files/pos/ticket0137.scala
val tpars = initializedTypeParams
if (tpars.isEmpty) this
else typeFunAnon(tpars, copyTypeRef(this, pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
}
// only need to rebind type aliases, as typeRef already handles abstract types
// (they are allowed to be rebound more liberally)
def coevolveSym(pre1: Type): Symbol = sym
def initializedTypeParams = sym.info.typeParams
def typeParamsMatchArgs = sameLength(initializedTypeParams, args)
override def baseTypeSeqDepth = baseTypeSeq.maxDepth
override def prefix = pre
override def termSymbol = super.termSymbol
override def termSymbolDirect = super.termSymbol
override def typeArgs = args
override def typeOfThis = relativize(sym.typeOfThis)
override def typeSymbol = sym
override def typeSymbolDirect = sym
override def parents: List[Type] = {
val cache = parentsCache
if (parentsPeriod == currentPeriod && cache != null) cache
else {
defineParentsOfTypeRef(this)
parentsCache
}
}
protected[Types] def parentsImpl: List[Type] = sym.info.parents map relativize
// Since type parameters cannot occur in super types, no need to relativize before looking at base *classes*.
// Similarly, our prefix can occur in super class types, but it cannot influence which classes those types resolve to.
// For example, `class Outer { outer => class Inner extends outer.Foo; class Foo }`
// `outer`'s value has no impact on which `Foo` is selected, since classes cannot be overridden.
// besides being faster, we can't use relativeInfo because it causes cycles
override def baseClasses = sym.info.baseClasses
// in principle, we should use `relativeInfo.decls`, but I believe all uses of `decls` will correctly `relativize` the individual members
override def decls: Scope = sym.info.decls
protected[Types] def baseTypeSeqImpl: BaseTypeSeq =
if (sym.info.baseTypeSeq exists (_.typeSymbolDirect.isAbstractType))
// SI-8046 base type sequence might have more elements in a subclass, we can't map it element wise.
relativize(sym.info).baseTypeSeq
else
// Optimization: no abstract types, we can compute the BTS of this TypeRef as an element-wise map
// of the BTS of the referenced symbol.
sym.info.baseTypeSeq map relativize
override def baseTypeSeq: BaseTypeSeq = {
val cache = baseTypeSeqCache
if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq)
cache
else {
defineBaseTypeSeqOfTypeRef(this)
if (baseTypeSeqCache == undetBaseTypeSeq)
throw new RecoverableCyclicReference(sym)
baseTypeSeqCache
}
}
// ensure that symbol is not a local copy with a name coincidence
private def needsPreString = (
settings.debug
|| !shorthands(sym.fullName)
|| (sym.ownersIterator exists (s => !s.isClass))
)
private def preString = if (needsPreString) pre.prefixString else ""
private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
private def refinementDecls = fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
private def refinementString = (
if (sym.isStructuralRefinement)
refinementDecls map (_.defString) mkString("{", "; ", "}")
else ""
)
protected def finishPrefix(rest: String) = (
if (sym.isInitialized && sym.isAnonymousClass && !phase.erasedTypes)
parentsString(sym.info.parents) + refinementString
else rest
)
private def noArgsString = finishPrefix(preString + sym.nameString)
private def tupleTypeString: String = args match {
case Nil => noArgsString
case arg :: Nil => s"($arg,)"
case _ => args.mkString("(", ", ", ")")
}
private def customToString = sym match {
case RepeatedParamClass | JavaRepeatedParamClass => args.head + "*"
case ByNameParamClass => "=> " + args.head
case _ =>
if (isFunctionTypeDirect(this)) {
// Aesthetics: printing Function1 as T => R rather than (T) => R
// ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable
// from (T1, T2) => R.
unspecializedTypeArgs(this) match {
// See neg/t588 for an example which arrives here - printing
// the type of a Function1 after erasure.
case Nil => noArgsString
case in :: out :: Nil if !isTupleTypeDirect(in) =>
// A => B => C should be (A => B) => C or A => (B => C).
// Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
// would mean => (A => B) which is a different type
val in_s = if (isFunctionTypeDirect(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
val out_s = if (isFunctionTypeDirect(out)) "(" + out + ")" else "" + out
in_s + " => " + out_s
case xs =>
xs.init.mkString("(", ", ", ")") + " => " + xs.last
}
}
else if (isTupleTypeDirect(this))
tupleTypeString
else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne dealias))
"" + dealias
else
""
}
override def safeToString = {
val custom = if (settings.debug) "" else customToString
if (custom != "") custom
else finishPrefix(preString + sym.nameString + argsString)
}
override def prefixString = "" + (
if (settings.debug)
super.prefixString
else if (sym.isOmittablePrefix)
""
else if (sym.isPackageClass || sym.isPackageObjectOrClass)
sym.skipPackageObject.fullName + "."
else if (isStable && nme.isSingletonName(sym.name))
tpnme.dropSingletonName(sym.name) + "."
else
super.prefixString
)
// Suppressing case class copy method which risks subverting our single point of creation.
private def copy = null
override def kind = "TypeRef"
}
// No longer defined as anonymous classes in `object TypeRef` to avoid an unnecessary outer pointer.
private final class AliasArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AliasTypeRef
private final class AbstractArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AbstractTypeRef
private final class ClassArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args)
private final class AliasNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AliasTypeRef
private final class AbstractNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AbstractTypeRef
private final class ClassNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym)
object TypeRef extends TypeRefExtractor {
def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
if (args ne Nil) {
if (sym.isAliasType) new AliasArgsTypeRef(pre, sym, args)
else if (sym.isAbstractType) new AbstractArgsTypeRef(pre, sym, args)
else new ClassArgsTypeRef(pre, sym, args)
}
else {
if (sym.isAliasType) new AliasNoArgsTypeRef(pre, sym)
else if (sym.isAbstractType) new AbstractNoArgsTypeRef(pre, sym)
else if (sym.isRefinementClass) new RefinementTypeRef(pre, sym)
else if (sym.isPackageClass) new PackageTypeRef(pre, sym)
else if (sym.isModuleClass) new ModuleTypeRef(pre, sym)
else new ClassNoArgsTypeRef(pre, sym)
}
})
}
protected def defineParentsOfTypeRef(tpe: TypeRef) = {
val period = tpe.parentsPeriod
if (period != currentPeriod) {
tpe.parentsPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
tpe.parentsCache = tpe.parentsImpl
} else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641
tpe.parentsCache = List(AnyTpe)
}
}
}
protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) = {
val period = tpe.baseTypeSeqPeriod
if (period != currentPeriod) {
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
if (Statistics.canEnable) Statistics.incCounter(typerefBaseTypeSeqCount)
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null
try {
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
} finally {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
}
if (tpe.baseTypeSeqCache == undetBaseTypeSeq)
throw new TypeError("illegal cyclic inheritance involving " + tpe.sym)
}
/** A class representing a method type with parameters.
* Note that a parameterless method is represented by a NullaryMethodType:
*
* def m(): Int MethodType(Nil, Int)
* def m: Int NullaryMethodType(Int)
*/
case class MethodType(override val params: List[Symbol],
override val resultType: Type) extends Type with MethodTypeApi {
private var trivial: ThreeValue = UNKNOWN
override def isTrivial: Boolean = {
if (trivial == UNKNOWN) trivial = fromBoolean(isTrivialResult && areTrivialParams(params))
toBoolean(trivial)
}
private def isTrivialResult =
resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
private def areTrivialParams(ps: List[Symbol]): Boolean = ps match {
case p :: rest =>
p.tpe.isTrivial && !typesContain(paramTypes, p) && !(resultType contains p) &&
areTrivialParams(rest)
case _ =>
true
}
def isImplicit = (params ne Nil) && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
override def paramSectionCount: Int = resultType.paramSectionCount + 1
override def paramss: List[List[Symbol]] = params :: resultType.paramss
override def paramTypes = mapList(params)(symTpe) // OPT use mapList rather than .map
override def boundSyms = resultType.boundSyms ++ params
override def resultType(actuals: List[Type]) =
if (isTrivial || phase.erasedTypes) resultType
else if (/*isDependentMethodType &&*/ sameLength(actuals, params)) {
val idm = new InstantiateDependentMap(params, actuals)
val res = idm(resultType)
existentialAbstraction(idm.existentialsNeeded, res)
}
else existentialAbstraction(params, resultType)
private var isdepmeth: ThreeValue = UNKNOWN
override def isDependentMethodType: Boolean = {
if (isdepmeth == UNKNOWN) isdepmeth = fromBoolean(IsDependentCollector.collect(resultType.dealias))
toBoolean(isdepmeth)
}
// implicit args can only be depended on in result type:
//TODO this may be generalised so that the only constraint is dependencies are acyclic
def approximate: MethodType = MethodType(params, resultApprox)
override def safeToString = paramString(this) + resultType
override def cloneInfo(owner: Symbol) = {
val vparams = cloneSymbolsAtOwner(params, owner)
copyMethodType(this, vparams, resultType.substSym(params, vparams).cloneInfo(owner))
}
override def atOwner(owner: Symbol) =
if (!allSymbolsHaveOwner(params, owner) || (resultType.atOwner(owner) ne resultType))
cloneInfo(owner)
else
this
override def kind = "MethodType"
}
object MethodType extends MethodTypeExtractor
class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt) {
override def isJava = true
}
// TODO: rename so it's more appropriate for the type that is for a method without argument lists
// ("nullary" erroneously implies it has an argument list with zero arguments, it actually has zero argument lists)
case class NullaryMethodType(override val resultType: Type) extends Type with NullaryMethodTypeApi {
override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
override def prefix: Type = resultType.prefix
override def narrow: Type = resultType.narrow
override def termSymbol: Symbol = resultType.termSymbol
override def typeSymbol: Symbol = resultType.typeSymbol
override def parents: List[Type] = resultType.parents
override def decls: Scope = resultType.decls
override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth
override def baseClasses: List[Symbol] = resultType.baseClasses
override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
override def boundSyms = resultType.boundSyms
override def safeToString: String = "=> "+ resultType
override def kind = "NullaryMethodType"
}
object NullaryMethodType extends NullaryMethodTypeExtractor
/** A type function or the type of a polymorphic value (and thus of kind *).
*
* Before the introduction of NullaryMethodType, a polymorphic nullary method (e.g, def isInstanceOf[T]: Boolean)
* used to be typed as PolyType(tps, restpe), and a monomorphic one as PolyType(Nil, restpe)
* This is now: PolyType(tps, NullaryMethodType(restpe)) and NullaryMethodType(restpe)
* by symmetry to MethodTypes: PolyType(tps, MethodType(params, restpe)) and MethodType(params, restpe)
*
* Thus, a PolyType(tps, TypeRef(...)) unambiguously indicates a type function (which results from eta-expanding a type constructor alias).
* Similarly, PolyType(tps, ClassInfoType(...)) is a type constructor.
*
* A polytype is of kind * iff its resultType is a (nullary) method type.
*/
case class PolyType(override val typeParams: List[Symbol], override val resultType: Type)
extends Type with PolyTypeApi {
//assert(!(typeParams contains NoSymbol), this)
assert(typeParams.nonEmpty, this) // used to be a marker for nullary method type, illegal now (see @NullaryMethodType)
override def paramSectionCount: Int = resultType.paramSectionCount
override def paramss: List[List[Symbol]] = resultType.paramss
override def params: List[Symbol] = resultType.params
override def paramTypes: List[Type] = resultType.paramTypes
override def parents: List[Type] = resultType.parents
override def decls: Scope = resultType.decls
override def termSymbol: Symbol = resultType.termSymbol
override def typeSymbol: Symbol = resultType.typeSymbol
override def boundSyms = immutable.Set[Symbol](typeParams ++ resultType.boundSyms: _*)
override def prefix: Type = resultType.prefix
override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth
override def baseClasses: List[Symbol] = resultType.baseClasses
override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
override def narrow: Type = resultType.narrow
// SI-9475: PolyTypes with dependent method types are still dependent
override def isDependentMethodType = resultType.isDependentMethodType
/** @M: typeDefSig wraps a TypeBounds in a PolyType
* to represent a higher-kinded type parameter
* wrap lo&hi in polytypes to bind variables
*/
override def bounds: TypeBounds =
TypeBounds(typeFun(typeParams, resultType.bounds.lo),
typeFun(typeParams, resultType.bounds.hi))
override def isHigherKinded = !typeParams.isEmpty
override def safeToString = typeParamsString(this) + resultType
override def cloneInfo(owner: Symbol) = {
val tparams = cloneSymbolsAtOwner(typeParams, owner)
PolyType(tparams, resultType.substSym(typeParams, tparams).cloneInfo(owner))
}
override def atOwner(owner: Symbol) =
if (!allSymbolsHaveOwner(typeParams, owner) || (resultType.atOwner(owner) ne resultType))
cloneInfo(owner)
else
this
override def kind = "PolyType"
}
object PolyType extends PolyTypeExtractor
/** A creator for existential types which flattens nested existentials.
*/
def newExistentialType(quantified: List[Symbol], underlying: Type): Type =
if (quantified.isEmpty) underlying
else underlying match {
case ExistentialType(qs, restpe) => newExistentialType(quantified ::: qs, restpe)
case _ => ExistentialType(quantified, underlying)
}
case class ExistentialType(quantified: List[Symbol],
override val underlying: Type) extends RewrappingTypeProxy with ExistentialTypeApi
{
override protected def rewrap(newtp: Type) = existentialAbstraction(quantified, newtp)
override def isTrivial = false
override def bounds = TypeBounds(maybeRewrap(underlying.bounds.lo), maybeRewrap(underlying.bounds.hi))
override def parents = underlying.parents map maybeRewrap
override def boundSyms = quantified.toSet
override def prefix = maybeRewrap(underlying.prefix)
override def typeArgs = underlying.typeArgs map maybeRewrap
override def params = underlying.params mapConserve { param =>
val tpe1 = rewrap(param.tpeHK)
if (tpe1 eq param.tpeHK) param else param.cloneSymbol.setInfo(tpe1)
}
override def paramTypes = underlying.paramTypes map maybeRewrap
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
// maybeRewrap(underlying.instantiateTypeParams(formals, actuals))
val quantified1 = new SubstTypeMap(formals, actuals) mapOver quantified
val underlying1 = underlying.instantiateTypeParams(formals, actuals)
if ((quantified1 eq quantified) && (underlying1 eq underlying)) this
else existentialAbstraction(quantified1, underlying1.substSym(quantified, quantified1))
}
override def baseType(clazz: Symbol) = maybeRewrap(underlying.baseType(clazz))
override def baseTypeSeq = underlying.baseTypeSeq map maybeRewrap
override def isHigherKinded = false
// TODO: check invariant that all quantifiers have the same (existing) owner
private def quantifierOwner = quantified collectFirst { case q if q.owner.exists => q.owner } getOrElse NoSymbol
// Is this existential of the form: T[Q1, ..., QN] forSome { type Q1 >: L1 <: U1, ..., QN >: LN <: UN}
private def isStraightApplication = (quantified corresponds underlying.typeArgs){ (q, a) => q.tpe =:= a }
/** [SI-6169, SI-8197 -- companion to SI-1786]
*
* Approximation to improve the bounds of a Java-defined existential type,
* based on the bounds of the type parameters of the quantified type
* In Scala syntax, given a java-defined class C[T <: String], the existential type C[_]
* is improved to C[_ <: String] before skolemization, which captures (get it?) what Java does:
* enter the type parameters' bounds into the context when checking subtyping/type equality of existential types
*
* Also tried doing this once during class file parsing or when creating the existential type,
* but that causes cyclic errors because it happens too early.
*
* NOTE: we're only modifying the skolems to avoid leaking the sharper bounds to `quantified` (SI-8283)
*
* TODO: figure out how to do this earlier without running into cycles, so this can subsume the fix for SI-1786
*/
override def skolemizeExistential(owner0: Symbol, origin: AnyRef) = {
val owner = owner0 orElse quantifierOwner
// do this here because it's quite close to what Java does:
// when checking subtyping/type equality, enter constraints
// derived from the existentially quantified type into the typing environment
// (aka \\Gamma, which tracks types for variables and constraints/kinds for types)
// as a nice bonus, delaying this until we need it avoids cyclic errors
def tpars = underlying.typeSymbolDirect.initialize.typeParams
def newSkolem(quant: Symbol) = owner.newExistentialSkolem(quant, origin)
def newSharpenedSkolem(quant: Symbol, tparam: Symbol): Symbol = {
def emptyBounds(sym: Symbol) = sym.info.bounds.isEmptyBounds
// avoid creating cycles [pos/t2940] that consist of an existential quantifier's
// bounded by an existential type that unhygienically has that quantifier as its own quantifier
// (TODO: clone latter existential with fresh quantifiers -- not covering this case for now)
val canSharpen = (
emptyBounds(quant) && !emptyBounds(tparam)
&& (existentialsInType(tparam.info) intersect quantified).isEmpty
)
val skolemInfo = if (!canSharpen) quant.info else tparam.info.substSym(tpars, quantified)
owner.newExistentialSkolem(quant.name.toTypeName, skolemInfo, quant.flags, quant.pos, origin)
}
val canSharpenBounds = (underlying.typeSymbol.isJavaDefined || sharperSkolems) && isStraightApplication
if (canSharpenBounds) deriveType2(quantified, tpars, newSharpenedSkolem)(underlying)
else deriveType(quantified, newSkolem)(underlying)
}
private def wildcardArgsString(qset: Set[Symbol], args: List[Type]): List[String] = args map {
case TypeRef(_, sym, _) if (qset contains sym) =>
"_"+sym.infoString(sym.info)
case arg =>
arg.toString
}
/** An existential can only be printed with wildcards if:
* - the underlying type is a typeref
* - every quantified variable appears at most once as a type argument and
* nowhere inside a type argument
* - no quantified type argument contains a quantified variable in its bound
* - the typeref's symbol is not itself quantified
* - the prefix is not quantified
*/
def isRepresentableWithWildcards = {
val qset = quantified.toSet
underlying match {
case _: RefinementTypeRef => false
case TypeRef(pre, sym, args) =>
def isQuantified(tpe: Type): Boolean = {
(tpe exists (t => qset contains t.typeSymbol)) ||
tpe.typeSymbol.isRefinementClass && (tpe.parents exists isQuantified)
}
val (wildcardArgs, otherArgs) = args partition (arg => qset contains arg.typeSymbol)
wildcardArgs.distinct == wildcardArgs &&
!(otherArgs exists (arg => isQuantified(arg))) &&
!(wildcardArgs exists (arg => isQuantified(arg.typeSymbol.info.bounds))) &&
!(qset contains sym) &&
!isQuantified(pre)
case _ => false
}
}
override def safeToString: String = {
def clauses = {
val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
if (settings.explaintypes) "(" + str + ")" else str
}
underlying match {
case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards =>
"" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
"(" + underlying + ")" + clauses
case _ =>
"" + underlying + clauses
}
}
override def cloneInfo(owner: Symbol) =
createFromClonedSymbolsAtOwner(quantified, owner, underlying)(newExistentialType)
override def atOwner(owner: Symbol) =
if (!allSymbolsHaveOwner(quantified, owner)) cloneInfo(owner) else this
override def kind = "ExistentialType"
def withTypeVars(op: Type => Boolean): Boolean = withTypeVars(op, AnyDepth)
def withTypeVars(op: Type => Boolean, depth: Depth): Boolean = {
val quantifiedFresh = cloneSymbols(quantified)
val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
op(underlying1) && {
solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), upper = false, depth) &&
isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.inst))
}
}
}
object ExistentialType extends ExistentialTypeExtractor
/** A class containing the alternatives and type prefix of an overloaded symbol.
* Not used after phase `typer`.
*/
case class OverloadedType(pre: Type, alternatives: List[Symbol]) extends Type {
override def prefix: Type = pre
override def safeToString =
(alternatives map pre.memberType).mkString("", " <and> ", "")
override def kind = "OverloadedType"
}
/** The canonical creator for OverloadedTypes.
*/
def overloadedType(pre: Type, alternatives: List[Symbol]): Type = alternatives match {
case Nil => NoType
case alt :: Nil => pre memberType alt
case _ => OverloadedType(pre, alternatives)
}
case class ImportType(expr: Tree) extends Type {
override def safeToString = "ImportType("+expr+")"
}
/** A class remembering a type instantiation for some a set of overloaded
* polymorphic symbols.
* Not used after phase `typer`.
*/
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
pre.toString + targs.mkString("(with type arguments ", ", ", ")")
override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
override def kind = "AntiPolyType"
}
object HasTypeMember {
def apply(name: TypeName, tp: Type): Type = {
val bound = refinedType(List(WildcardType), NoSymbol)
val bsym = bound.typeSymbol.newAliasType(name)
bsym setInfo tp
bound.decls enter bsym
bound
}
def unapply(tp: Type): Option[(TypeName, Type)] = tp match {
case RefinedType(List(WildcardType), Scope(sym)) => Some((sym.name.toTypeName, sym.info))
case _ => None
}
}
object ArrayTypeRef {
def unapply(tp: Type) = tp match {
case TypeRef(_, ArrayClass, arg :: Nil) => Some(arg)
case _ => None
}
}
//@M
// a TypeVar used to be a case class with only an origin and a constr
// then, constr became mutable (to support UndoLog, I guess),
// but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
@inline final def trace[T](action: String, msg: => String)(value: T): T = {
if (traceTypeVars) {
val s = msg match {
case "" => ""
case str => "( " + str + " )"
}
Console.err.println("[%10s] %-25s%s".format(action, value, s))
}
value
}
/** Create a new TypeConstraint based on the given symbol.
*/
private def deriveConstraint(tparam: Symbol): TypeConstraint = {
/** Must force the type parameter's info at this point
* or things don't end well for higher-order type params.
* See SI-5359.
*/
val bounds = tparam.info.bounds
/* We can seed the type constraint with the type parameter
* bounds as long as the types are concrete. This should lower
* the complexity of the search even if it doesn't improve
* any results.
*/
if (propagateParameterBoundsToTypeVars) {
val exclude = bounds.isEmptyBounds || (bounds exists typeIsNonClassType)
if (exclude) new TypeConstraint
else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(new TypeConstraint(bounds))
}
else new TypeConstraint
}
def untouchable(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = true)
def apply(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = false)
def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil)
def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar =
createTypeVar(origin, constr, args, params, untouchable = false)
/** This is the only place TypeVars should be instantiated.
*/
private def createTypeVar(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol], untouchable: Boolean): TypeVar = {
val tv = (
if (args.isEmpty && params.isEmpty) {
if (untouchable) new TypeVar(origin, constr) with UntouchableTypeVar
else new TypeVar(origin, constr) {}
}
else if (args.size == params.size) {
if (untouchable) new AppliedTypeVar(origin, constr, params zip args) with UntouchableTypeVar
else new AppliedTypeVar(origin, constr, params zip args)
}
else if (args.isEmpty) {
if (untouchable) new HKTypeVar(origin, constr, params) with UntouchableTypeVar
else new HKTypeVar(origin, constr, params)
}
else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
)
trace("create", "In " + tv.originLocation)(tv)
}
private def createTypeVar(tparam: Symbol, untouchable: Boolean): TypeVar =
createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
}
/** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.)
*/
class HKTypeVar(
_origin: Type,
_constr: TypeConstraint,
override val params: List[Symbol]
) extends TypeVar(_origin, _constr) {
require(params.nonEmpty, this)
override def isHigherKinded = true
}
/** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
*/
class AppliedTypeVar(
_origin: Type,
_constr: TypeConstraint,
zippedArgs: List[(Symbol, Type)]
) extends TypeVar(_origin, _constr) {
require(zippedArgs.nonEmpty, this)
override def params: List[Symbol] = zippedArgs map (_._1)
override def typeArgs: List[Type] = zippedArgs map (_._2)
override def safeToString: String = super.safeToString + typeArgs.map(_.safeToString).mkString("[", ", ", "]")
}
trait UntouchableTypeVar extends TypeVar {
override def untouchable = true
override def isGround = true
override def registerTypeEquality(tp: Type, typeVarLHS: Boolean) = tp match {
case t: TypeVar if !t.untouchable =>
t.registerTypeEquality(this, !typeVarLHS)
case _ =>
super.registerTypeEquality(tp, typeVarLHS)
}
override def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = tp match {
case t: TypeVar if !t.untouchable =>
t.registerBound(this, !isLowerBound, isNumericBound)
case _ =>
super.registerBound(tp, isLowerBound, isNumericBound)
}
}
/** A class representing a type variable: not used after phase `typer`.
*
* A higher-kinded TypeVar has params (Symbols) and typeArgs (Types).
* A TypeVar with nonEmpty typeArgs can only be instantiated by a higher-kinded
* type that can be applied to those args. A TypeVar is much like a TypeRef,
* except it has special logic for equality and subtyping.
*
* Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
*/
abstract case class TypeVar(
origin: Type,
var constr: TypeConstraint
) extends Type {
// We don't want case class equality/hashing as TypeVar-s are mutable,
// and TypeRefs based on them get wrongly `uniqued` otherwise. See SI-7226.
override def hashCode(): Int = System.identityHashCode(this)
override def equals(other: Any): Boolean = this eq other.asInstanceOf[AnyRef]
def untouchable = false // by other typevars
override def params: List[Symbol] = Nil
override def typeArgs: List[Type] = Nil
override def isHigherKinded = false
/** The constraint associated with the variable
* Syncnote: Type variables are assumed to be used from only one
* thread. They are not exposed in api.Types and are used only locally
* in operations that are exposed from types. Hence, no syncing of `constr`
* or `encounteredHigherLevel` or `suspended` accesses should be necessary.
*/
def instValid = constr.instValid
def inst = constr.inst
def instWithinBounds = constr.instWithinBounds
override def isGround = instValid && inst.isGround
/** The variable's skolemization level */
val level = skolemizationLevel
/** Applies this TypeVar to type arguments, if arity matches.
*
* Different applications of the same type constructor variable `?CC`,
* e.g. `?CC[Int]` and `?CC[String]`, are modeled as distinct instances of `TypeVar`
* that share a `TypeConstraint`, so that the comparisons `?CC[Int] <:< List[Int]`
* and `?CC[String] <:< Iterable[String]` result in `?CC` being upper-bounded by `List` and `Iterable`.
*
* Applying the wrong number of type args results in a TypeVar whose instance is set to `ErrorType`.
*/
def applyArgs(newArgs: List[Type]): TypeVar = (
if (newArgs.isEmpty && typeArgs.isEmpty)
this
else if (newArgs.size == params.size) {
val tv = TypeVar(origin, constr, newArgs, params)
tv.linkSuspended(this)
TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
}
else
TypeVar(typeSymbol).setInst(ErrorType)
)
// newArgs.length may differ from args.length (could've been empty before)
//
// !!! @PP - I need an example of this, since this exception never triggers
// even though I am requiring the size match.
//
// example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
// we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
// TypeVars get applied to different arguments over time (in asSeenFrom)
// -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
// thus: make new TypeVar's for every application of a TV to args,
// inference may generate several TypeVar's for a single type parameter that must be inferred,
// only one of them is in the set of tvars that need to be solved, but
// they share the same TypeConstraint instance
// When comparing to types containing skolems, remember the highest level
// of skolemization. If that highest level is higher than our initial
// skolemizationLevel, we can't re-use those skolems as the solution of this
// typevar, which means we'll need to repack our inst into a fresh existential.
// were we compared to skolems at a higher skolemizationLevel?
// EXPERIMENTAL: value will not be considered unless enableTypeVarExperimentals is true
// see SI-5729 for why this is still experimental
private var encounteredHigherLevel = false
private def shouldRepackType = enableTypeVarExperimentals && encounteredHigherLevel
// <region name="constraint mutators + undoLog">
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
def setInst(tp: Type): this.type = {
if (tp eq this) {
log(s"TypeVar cycle: called setInst passing $this to itself.")
return this
}
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
val res = if (shouldRepackType) repackExistential(tp) else tp
constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
this
}
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
assert(tp != this, tp) // implies there is a cycle somewhere (?)
//println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG
if (!sharesConstraints(tp)) {
undoLog record this
constr.addLoBound(tp, isNumericBound)
}
}
def addHiBound(tp: Type, isNumericBound: Boolean = false) {
// assert(tp != this)
//println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG
if (!sharesConstraints(tp)) {
undoLog record this
constr.addHiBound(tp, isNumericBound)
}
}
// </region>
// ignore subtyping&equality checks while true -- see findMember
// OPT: This could be Either[TypeVar, Boolean], but this encoding was chosen instead to save allocations.
private var _suspended: Type = ConstantFalse
private[Types] def suspended: Boolean = (_suspended: @unchecked) match {
case ConstantFalse => false
case ConstantTrue => true
case tv: TypeVar => tv.suspended
}
/** `AppliedTypeVar`s share the same `TypeConstraint` with the `HKTypeVar` that it was spawned from.
* A type inference session can also have more than one ATV.
* If we don't detect that, we end up with "cyclic constraint" when we try to instantiate type parameters
* after solving in, pos/t8237
*/
protected final def sharesConstraints(other: Type): Boolean = other match {
case other: TypeVar => constr == other.constr // SI-8237 avoid cycles. Details in pos/t8237.scala
case _ => false
}
private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) ConstantTrue else ConstantFalse
// SI-7785 Link the suspended attribute of a TypeVar created in, say, a TypeMap (e.g. AsSeenFrom) to its originator
private[Types] def linkSuspended(origin: TypeVar): Unit = _suspended = origin
/** Called when a TypeVar is involved in a subtyping check. Result is whether
* this TypeVar could plausibly be a [super/sub]type of argument `tp` and if so,
* tracks tp as a [lower/upper] bound of this TypeVar.
*
* if (isLowerBound) this typevar could be a subtype, track tp as a lower bound
* if (!isLowerBound) this typevar could be a supertype, track tp as an upper bound
*
* If isNumericBound is true, the subtype check is performed with weak_<:< instead of <:<.
*/
def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = {
// println("regBound: "+(safeToString, debugString(tp), isLowerBound)) //@MDEBUG
if (isLowerBound)
assert(tp != this)
// side effect: adds the type to upper or lower bounds
def addBound(tp: Type) {
if (isLowerBound) addLoBound(tp, isNumericBound)
else addHiBound(tp, isNumericBound)
}
// swaps the arguments if it's an upper bound
def checkSubtype(tp1: Type, tp2: Type) = {
val lhs = if (isLowerBound) tp1 else tp2
val rhs = if (isLowerBound) tp2 else tp1
if (isNumericBound) lhs weak_<:< rhs
else lhs <:< rhs
}
/* Simple case: type arguments can be ignored, because either this typevar has
* no type parameters, or we are comparing to Any/Nothing.
*
* The latter condition is needed because HK unification is limited to constraints of the shape
* {{{
* TC1[T1,..., TN] <: TC2[T'1,...,T'N]
* }}}
* which would preclude the following important constraints:
* {{{
* Nothing <: ?TC[?T]
* ?TC[?T] <: Any
* }}}
*/
def unifySimple = {
val sym = tp.typeSymbol
if (sym == NothingClass || sym == AnyClass) { // kind-polymorphic
// SI-7126 if we register some type alias `T=Any`, we can later end
// with malformed types like `T[T]` during type inference in
// `handlePolymorphicCall`. No such problem if we register `Any`.
addBound(sym.tpe)
true
} else if (params.isEmpty) {
addBound(tp)
true
} else false
}
/* Full case: involving a check of the form
* {{{
* TC1[T1,..., TN] <: TC2[T'1,...,T'N]
* }}}
* Checks subtyping of higher-order type vars, and uses variances as defined in the
* type parameter we're trying to infer (the result will be sanity-checked later).
*/
def unifyFull(tpe: Type): Boolean = {
def unifySpecific(tp: Type) = {
val tpTypeArgs = tp.typeArgs
val arityDelta = compareLengths(typeArgs, tpTypeArgs)
if (arityDelta == 0) {
val lhs = if (isLowerBound) tpTypeArgs else typeArgs
val rhs = if (isLowerBound) typeArgs else tpTypeArgs
// This is a higher-kinded type var with same arity as tp.
// If so (see SI-7517), side effect: adds the type constructor itself as a bound.
isSubArgs(lhs, rhs, params, AnyDepth) && {addBound(tp.typeConstructor); true}
} else if (settings.YpartialUnification && arityDelta < 0 && typeArgs.nonEmpty) {
// Simple algorithm as suggested by Paul Chiusano in the comments on SI-2712
//
// https://issues.scala-lang.org/browse/SI-2712?focusedCommentId=61270
//
// Treat the type constructor as curried and partially applied, we treat a prefix
// as constants and solve for the suffix. For the example in the ticket, unifying
// M[A] with Int => Int this unifies as,
//
// M[t] = [t][Int => t] --> abstract on the right to match the expected arity
// A = Int --> capture the remainder on the left
//
// A more "natural" unifier might be M[t] = [t][t => t]. There's lots of scope for
// experimenting with alternatives here.
val numCaptured = tpTypeArgs.length - typeArgs.length
val (captured, abstractedArgs) = tpTypeArgs.splitAt(numCaptured)
val (lhs, rhs) =
if (isLowerBound) (abstractedArgs, typeArgs)
else (typeArgs, abstractedArgs)
isSubArgs(lhs, rhs, params, AnyDepth) && {
val tpSym = tp.typeSymbolDirect
val abstractedTypeParams = tpSym.typeParams.drop(numCaptured).map(_.cloneSymbol(tpSym))
addBound(PolyType(abstractedTypeParams, appliedType(tp.typeConstructor, captured ++ abstractedTypeParams.map(_.tpeHK))))
true
}
} else false
}
// The type with which we can successfully unify can be hidden
// behind singleton types and type aliases.
tpe.dealiasWidenChain exists unifySpecific
}
// There's a <: test taking place right now, where tp is a concrete type and this is a typevar
// attempting to satisfy that test. Either the test will be unsatisfiable, in which case
// registerBound will return false; or the upper or lower bounds of this type var will be
// supplemented with the type being tested against.
//
// Eventually the types which have accumulated in the upper and lower bounds will be lubbed
// (resp. glbbed) to instantiate the typevar.
//
// The only types which are eligible for unification are those with the same number of
// typeArgs as this typevar, or Any/Nothing, which are kind-polymorphic. For the upper bound,
// any parent or base type of `tp` may be tested here (leading to a corresponding relaxation
// in the upper bound.) The universe of possible glbs, being somewhat more infinite, is not
// addressed here: all lower bounds are retained and their intersection calculated when the
// bounds are solved.
//
// In a side-effect free universe, checking tp and tp.parents before checking tp.baseTypeSeq
// would be pointless. In this case, each check we perform causes us to lose specificity: in
// the end the best we'll do is the least specific type we tested against, since the typevar
// does not see these checks as "probes" but as requirements to fulfill.
// TODO: can the `suspended` flag be used to poke around without leaving a trace?
//
// So the strategy used here is to test first the type, then the direct parents, and finally
// to fall back on the individual base types. This warrants eventual re-examination.
// AM: I think we could use the `suspended` flag to avoid side-effecting during unification
if (suspended) // constraint accumulation is disabled
checkSubtype(tp, origin)
else if (instValid) // type var is already set
checkSubtype(tp, inst)
else isRelatable(tp) && {
unifySimple || unifyFull(tp) || (
// only look harder if our gaze is oriented toward Any
isLowerBound && (
(tp.parents exists unifyFull) || (
// @PP: Is it going to be faster to filter out the parents we just checked?
// That's what's done here but I'm not sure it matters.
tp.baseTypeSeq.toList.tail filterNot (tp.parents contains _) exists unifyFull
)
)
)
}
}
def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
// println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (instValid) "IV" else "")) //@MDEBUG
def checkIsSameType(tp: Type) = (
if (typeVarLHS) inst =:= tp
else tp =:= inst
)
if (suspended) tp =:= origin
else if (instValid) checkIsSameType(tp)
else isRelatable(tp) && {
val newInst = wildcardToTypeVarMap(tp)
(constr isWithinBounds newInst) && {
setInst(newInst)
true
}
}
}
/**
* `?A.T =:= tp` is rewritten as the constraint `?A <: {type T = tp}`
*
* TODO: make these constraints count (incorporate them into implicit search in `applyImplicitArgs`)
* (`T` corresponds to @param sym)
*/
def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
registerBound(HasTypeMember(sym.name.toTypeName, tp), isLowerBound = false)
}
private def isSkolemAboveLevel(tp: Type) = tp.typeSymbol match {
case ts: TypeSkolem => ts.level > level
case _ => false
}
// side-effects encounteredHigherLevel
private def containsSkolemAboveLevel(tp: Type) =
(tp exists isSkolemAboveLevel) && { encounteredHigherLevel = true ; true }
/** Can this variable be related in a constraint to type `tp`?
* This is not the case if `tp` contains type skolems whose
* skolemization level is higher than the level of this variable.
*/
def isRelatable(tp: Type) = (
shouldRepackType // short circuit if we already know we've seen higher levels
|| !containsSkolemAboveLevel(tp) // side-effects tracking boolean
|| enableTypeVarExperimentals // -Xexperimental: always say we're relatable, track consequences
)
override def normalize: Type = (
if (instValid) inst
// get here when checking higher-order subtyping of the typevar by itself
// TODO: check whether this ever happens?
else if (isHigherKinded) etaExpand
else super.normalize
)
override def etaExpand: Type = (
if (!isHigherKinded) this
else logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor))))
)
override def typeSymbol = origin.typeSymbol
private def tparamsOfSym(sym: Symbol) = sym.info match {
case PolyType(tparams, _) if tparams.nonEmpty =>
tparams map (_.defString) mkString("[", ",", "]")
case _ => ""
}
def originName = origin.typeSymbolDirect.decodedName
def originLocation = {
val sym = origin.typeSymbolDirect
val encl = sym.owner.logicallyEnclosingMember
// This should display somewhere between one and three
// things which enclose the origin: at most, a class, a
// a method, and a term. At least, a class.
List(
Some(encl.enclClass),
if (encl.isMethod) Some(encl) else None,
if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None
).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#"
}
private def levelString = if (settings.explaintypes) level else ""
override def safeToString = (
if ((constr eq null) || (inst eq null)) "TVar<" + originName + "=null>"
else if (inst ne NoType) "=?" + inst
else (if(untouchable) "!?" else "?") + levelString + originName
)
def originString = s"$originName in $originLocation"
override def kind = "TypeVar"
def cloneInternal = {
// cloning a suspended type variable when it's suspended will cause the clone
// to never be resumed with the current implementation
assert(!suspended, this)
TypeVar.trace("clone", originLocation)(
TypeVar(origin, constr.cloneInternal, typeArgs, params) // @M TODO: clone args/params?
)
}
}
/** A type carrying some annotations. Created by the typechecker
* when eliminating ''Annotated'' trees (see typedAnnotated).
*
* @param annotations the list of annotations on the type
* @param underlying the type without the annotation
*/
case class AnnotatedType(override val annotations: List[AnnotationInfo],
override val underlying: Type)
extends RewrappingTypeProxy with AnnotatedTypeApi {
assert(!annotations.isEmpty, "" + underlying)
override protected def rewrap(tp: Type) = copy(underlying = tp)
override def isTrivial: Boolean = underlying.isTrivial && annotations.forall(_.isTrivial)
override def safeToString = annotations.mkString(underlying + " @", " @", "")
override def filterAnnotations(p: AnnotationInfo => Boolean): Type = {
val (yes, no) = annotations partition p
if (yes.isEmpty) underlying
else if (no.isEmpty) this
else copy(annotations = yes)
}
override def setAnnotations(annots: List[AnnotationInfo]): Type =
if (annots.isEmpty) underlying
else copy(annotations = annots)
/** Add a number of annotations to this type */
override def withAnnotations(annots: List[AnnotationInfo]): Type =
if (annots.isEmpty) this
else copy(annots ::: this.annotations)
/** Remove any annotations from this type.
* TODO - is it allowed to nest AnnotatedTypes? If not then let's enforce
* that at creation. At the moment if they do ever turn up nested this
* recursively calls withoutAnnotations.
*/
override def withoutAnnotations = underlying.withoutAnnotations
/** Drop the annotations on the bounds, unless the low and high
* bounds are exactly tp.
*/
override def bounds: TypeBounds = underlying.bounds match {
case TypeBounds(_: this.type, _: this.type) => TypeBounds(this, this)
case oftp => oftp
}
// ** Replace formal type parameter symbols with actual type arguments. * /
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
val annotations1 = annotations.map(info => AnnotationInfo(info.atp.instantiateTypeParams(
formals, actuals), info.args, info.assocs).setPos(info.pos))
val underlying1 = underlying.instantiateTypeParams(formals, actuals)
if ((annotations1 eq annotations) && (underlying1 eq underlying)) this
else AnnotatedType(annotations1, underlying1)
}
/** Return the base type sequence of tp, dropping the annotations, unless the base type sequence of tp
* is precisely tp itself. */
override def baseTypeSeq: BaseTypeSeq = {
val oftp = underlying.baseTypeSeq
if ((oftp.length == 1) && (oftp(0) eq underlying))
baseTypeSingletonSeq(this)
else
oftp
}
override def kind = "AnnotatedType"
}
/** Creator for AnnotatedTypes. It returns the underlying type if annotations.isEmpty
* rather than walking into the assertion.
*/
def annotatedType(annots: List[AnnotationInfo], underlying: Type): Type =
if (annots.isEmpty) underlying
else AnnotatedType(annots, underlying)
object AnnotatedType extends AnnotatedTypeExtractor
object StaticallyAnnotatedType {
def unapply(tp: Type): Option[(List[AnnotationInfo], Type)] = tp.staticAnnotations match {
case Nil => None
case annots => Some((annots, tp.withoutAnnotations))
}
}
/** A class representing types with a name. When an application uses
* named arguments, the named argument types for calling isApplicable
* are represented as NamedType.
*/
case class NamedType(name: Name, tp: Type) extends Type {
override def safeToString: String = name.toString +": "+ tp
}
/** As with NamedType, used only when calling isApplicable.
* Records that the application has a wildcard star (aka _*)
* at the end of it.
*/
case class RepeatedType(tp: Type) extends Type {
override def safeToString: String = tp + ": _*"
}
/** A temporary type representing the erasure of a user-defined value type.
* Created during phase erasure, eliminated again in posterasure.
*
* SI-6385 Erasure's creation of bridges considers method signatures `exitingErasure`,
* which contain `ErasedValueType`-s. In order to correctly consider the overriding
* and overridden signatures as equivalent in `run/t6385.scala`, it is critical that
* this type contains the erasure of the wrapped type, rather than the unerased type
* of the value class itself, as was originally done.
*
* @param valueClazz The value class symbol
* @param erasedUnderlying The erased type of the unboxed value
*/
abstract case class ErasedValueType(valueClazz: Symbol, erasedUnderlying: Type) extends UniqueType {
override def safeToString = s"ErasedValueType($valueClazz, $erasedUnderlying)"
}
final class UniqueErasedValueType(valueClazz: Symbol, erasedUnderlying: Type) extends ErasedValueType(valueClazz, erasedUnderlying)
object ErasedValueType {
def apply(valueClazz: Symbol, erasedUnderlying: Type): Type = {
assert(valueClazz ne NoSymbol, "ErasedValueType over NoSymbol")
unique(new UniqueErasedValueType(valueClazz, erasedUnderlying))
}
}
/** A class representing an as-yet unevaluated type.
*/
abstract class LazyType extends Type {
override def isComplete: Boolean = false
override def complete(sym: Symbol)
override def safeToString = "<?>"
override def kind = "LazyType"
}
/** A marker trait representing an as-yet unevaluated type
* which doesn't assign flags to the underlying symbol.
*/
trait FlagAgnosticCompleter extends LazyType
/** A marker trait representing an as-yet unevaluated type
* which assigns flags to the underlying symbol.
*/
trait FlagAssigningCompleter extends LazyType
abstract class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType {
override def safeToString =
(if (typeParams.isEmpty) "" else typeParamsString(this)) + super.safeToString
}
// Creators ---------------------------------------------------------------
/** Rebind symbol `sym` to an overriding member in type `pre`. */
private def rebind(pre: Type, sym: Symbol): Symbol = {
if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym
else pre.nonPrivateMember(sym.name).suchThat { sym =>
// SI-7928 `isModuleNotMethod` is here to avoid crashing with spuriously "overloaded" module accessor and module symbols.
// These appear after the fields phase eliminates ModuleDefs that implement an interface.
// Here, we exclude the module symbol, which allows us to bind to the accessor.
// SI-8054 We must only do this after fields, otherwise we exclude the module symbol which does not yet have an accessor!
val isModuleWithAccessor = phase.assignsFields && sym.isModuleNotMethod
sym.isType || (!isModuleWithAccessor && sym.isStable && !sym.hasVolatileType)
} orElse sym
}
/** Convert a `super` prefix to a this-type if `sym` is abstract or final. */
private def removeSuper(tp: Type, sym: Symbol): Type = tp match {
case SuperType(thistp, _) =>
if (sym.isEffectivelyFinal || sym.isDeferred) thistp
else tp
case _ =>
tp
}
/** The canonical creator for single-types */
def singleType(pre: Type, sym: Symbol): Type = {
if (phase.erasedTypes)
sym.tpe.resultType
else if (sym.isRootPackage)
ThisType(sym.moduleClass)
else {
var sym1 = rebind(pre, sym)
val pre1 = removeSuper(pre, sym1)
if (pre1 ne pre) sym1 = rebind(pre1, sym1)
SingleType(pre1, sym1)
}
}
/** the canonical creator for a refined type with a given scope */
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = {
if (phase.erasedTypes)
if (parents.isEmpty) ObjectTpe else parents.head
else {
val clazz = owner.newRefinementClass(pos)
val result = RefinedType(parents, decls, clazz)
clazz.setInfo(result)
result
}
}
/** The canonical creator for a refined type with an initially empty scope.
*/
def refinedType(parents: List[Type], owner: Symbol): Type =
refinedType(parents, owner, newScope, owner.pos)
def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
if ((parents eq original.parents) && (decls eq original.decls)) original
else {
val owner = original.typeSymbol.owner
val result =
if (isIntersectionTypeForLazyBaseType(original)) intersectionTypeForLazyBaseType(parents)
else refinedType(parents, owner)
val syms1 = decls.toList
for (sym <- syms1)
result.decls.enter(sym.cloneSymbol(result.typeSymbol))
val syms2 = result.decls.toList
val resultThis = result.typeSymbol.thisType
for (sym <- syms2)
sym modifyInfo (_ substThisAndSym(original.typeSymbol, resultThis, syms1, syms2))
result
}
/** The canonical creator for typerefs
* todo: see how we can clean this up a bit
*/
def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = {
// type alias selections are rebound in TypeMap ("coevolved",
// actually -- see #3731) e.g., when type parameters that are
// referenced by the alias are instantiated in the prefix. See
// pos/depmet_rebind_typealias.
val sym1 = if (sym.isAbstractType) rebind(pre, sym) else sym
// don't expand cyclical type alias
// we require that object is initialized, thus info.typeParams instead of typeParams.
if (sym1.isAliasType && sameLength(sym1.info.typeParams, args) && !sym1.lockOK)
throw new RecoverableCyclicReference(sym1)
val pre1 = pre match {
case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred =>
x.thistpe
case _ => pre
}
if (pre eq pre1) TypeRef(pre, sym1, args)
else if (sym1.isAbstractType && !sym1.isClass) typeRef(pre1, rebind(pre1, sym1), args)
else typeRef(pre1, sym1, args)
}
// Optimization to avoid creating unnecessary new typerefs.
def copyTypeRef(tp: Type, pre: Type, sym: Symbol, args: List[Type]): Type = tp match {
case TypeRef(pre0, sym0, _) if pre == pre0 && sym0.name == sym.name =>
if (sym.isAliasType && sameLength(sym.info.typeParams, args) && !sym.lockOK)
throw new RecoverableCyclicReference(sym)
TypeRef(pre, sym, args)
case _ =>
typeRef(pre, sym, args)
}
/** The canonical creator for implicit method types */
def JavaMethodType(params: List[Symbol], resultType: Type): JavaMethodType =
new JavaMethodType(params, resultType) // don't unique this!
/** Create a new MethodType of the same class as tp, i.e. keep JavaMethodType */
def copyMethodType(tp: Type, params: List[Symbol], restpe: Type): Type = tp match {
case _: JavaMethodType => JavaMethodType(params, restpe)
case _ => MethodType(params, restpe)
}
/** A creator for intersection type where intersections of a single type are
* replaced by the type itself, and repeated parent classes are merged.
*
* !!! Repeated parent classes are not merged - is this a bug in the
* comment or in the code?
*/
def intersectionType(tps: List[Type], owner: Symbol): Type = tps match {
case tp :: Nil => tp
case _ => refinedType(tps, owner)
}
/** A creator for intersection type where intersections of a single type are
* replaced by the type itself.
*/
def intersectionType(tps: List[Type]): Type = tps match {
case tp :: Nil => tp
case _ => refinedType(tps, commonOwner(tps))
}
def intersectionTypeForLazyBaseType(tps: List[Type]) = tps match {
case tp :: Nil => tp
case _ => RefinedType(tps, newScope, tps.head.typeSymbolDirect)
}
def isIntersectionTypeForLazyBaseType(tp: RefinedType) = tp.parents match {
case head :: _ => tp.typeSymbolDirect eq head.typeSymbolDirect
case _ => false
}
/**** This implementation to merge parents was checked in in commented-out
form and has languished unaltered for five years. I think we should
use it or lose it.
def merge(tps: List[Type]): List[Type] = tps match {
case tp :: tps1 =>
val tps1a = tps1 filter (_.typeSymbol.==(tp.typeSymbol))
val tps1b = tps1 filter (_.typeSymbol.!=(tp.typeSymbol))
mergePrefixAndArgs(tps1a, -1) match {
case Some(tp1) => tp1 :: merge(tps1b)
case None => throw new MalformedType(
"malformed type: "+refinedType(tps, owner)+" has repeated parent class "+
tp.typeSymbol+" with incompatible prefixes or type arguments")
}
case _ => tps
}
refinedType(merge(tps), owner)
*/
/** A creator for type applications */
def appliedType(tycon: Type, args: List[Type]): Type = {
if (args.isEmpty)
return tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??))
/* Disabled - causes cycles in tcpoly tests. */
if (false && isDefinitionsInitialized) {
assert(isUseableAsTypeArgs(args), {
val tapp_s = s"""$tycon[${args mkString ", "}]"""
val arg_s = args filterNot isUseableAsTypeArg map (t => t + "/" + t.getClass) mkString ", "
s"$tapp_s includes illegal type argument $arg_s"
})
}
tycon match {
case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
case TypeRef(pre, sym, Nil) => copyTypeRef(tycon, pre, sym, args)
case TypeRef(pre, sym, bogons) => devWarning(s"Dropping $bogons from $tycon in appliedType.") ; copyTypeRef(tycon, pre, sym, args)
case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args))
case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // @PP: Can this be right?
case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args)) // @PP: Can this be right?
case tv@TypeVar(_, _) => tv.applyArgs(args)
case AnnotatedType(annots, underlying) => AnnotatedType(annots, appliedType(underlying, args))
case ErrorType | WildcardType => tycon
case _ => abort(debugString(tycon))
}
}
def appliedType(tycon: Type, args: Type*): Type =
appliedType(tycon, args.toList)
def appliedType(tyconSym: Symbol, args: List[Type]): Type =
appliedType(tyconSym.typeConstructor, args)
/** Very convenient. */
def appliedType(tyconSym: Symbol, args: Type*): Type =
appliedType(tyconSym.typeConstructor, args.toList)
/** A creator and extractor for type parameterizations that strips empty type parameter lists.
* Use this factory method to indicate the type has kind * (it's a polymorphic value)
* until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
*
* PP to AM: I've co-opted this for where I know tparams may well be empty, and
* expecting to get back `tpe` in such cases. Re being "forgiving" below,
* can we instead say this is the canonical creator for polyTypes which
* may or may not be poly? (It filched the standard "canonical creator" name.)
*/
object GenPolyType {
def apply(tparams: List[Symbol], tpe: Type): Type = {
tpe match {
case MethodType(_, _) =>
assert(tparams forall (_.isInvariant), "Trying to create a method with variant type parameters: " + ((tparams, tpe)))
case _ =>
}
if (tparams.nonEmpty) typeFun(tparams, tpe)
else tpe // it's okay to be forgiving here
}
def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match {
case PolyType(tparams, restpe) => Some((tparams, restpe))
case _ => Some((Nil, tpe))
}
}
def genPolyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
@deprecated("use genPolyType(...) instead", "2.10.0") // Used in reflection API
def polyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
/** A creator for anonymous type functions, where the symbol for the type function still needs to be created.
*
* TODO:
* type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion
* higher-order subtyping expects eta-expansion of type constructors that arise from a class; here, the type params are owned by that class, but is that the right thing to do?
*/
def typeFunAnon(tps: List[Symbol], body: Type): Type = typeFun(tps, body)
/** A creator for a type functions, assuming the type parameters tps already have the right owner. */
def typeFun(tps: List[Symbol], body: Type): Type = PolyType(tps, body)
/** A creator for existential types. This generates:
*
* tpe1 where { tparams }
*
* where `tpe1` is the result of extrapolating `tpe` with respect to `tparams`.
* Extrapolating means that type variables in `tparams` occurring
* in covariant positions are replaced by upper bounds, (minus any
* SingletonClass markers), type variables in `tparams` occurring in
* contravariant positions are replaced by upper bounds, provided the
* resulting type is legal with regard to stability, and does not contain any type
* variable in `tparams`.
*
* The abstraction drops all type parameters that are not directly or
* indirectly referenced by type `tpe1`. If there are no remaining type
* parameters, simply returns result type `tpe`.
*/
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
if (tparams.isEmpty) tpe0
else {
val tpe = normalizeAliases(tpe0)
val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe
var tparams0 = tparams
var tparams1 = tparams0 filter tpe1.contains
while (tparams1 != tparams0) {
tparams0 = tparams1
tparams1 = tparams filter { p =>
tparams1 exists { p1 => p1 == p || (p1.info contains p) }
}
}
newExistentialType(tparams1, tpe1)
}
// Hash consing --------------------------------------------------------------
private val initialUniquesCapacity = 4096
private var uniques: util.WeakHashSet[Type] = _
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.WeakHashSet[Type](initialUniquesCapacity)
// JZ: We used to register this as a perRunCache so it would be cleared eagerly at
// the end of the compilation run. But, that facility didn't actually clear this map (SI-8129)!
// When i fixed that bug, run/tpeCache-tyconCache.scala started failing. Why was that?
// I've removed the registration for now. I don't think it's particularly harmful anymore
// as a) this is now a weak set, and b) it is discarded completely before the next run.
uniqueRunId = currentRunId
}
(uniques findEntryOrUpdate tp).asInstanceOf[T]
}
// Helper Classes ---------------------------------------------------------
class TypeUnwrapper(poly: Boolean, existential: Boolean, annotated: Boolean, nullary: Boolean) extends (Type => Type) {
def apply(tp: Type): Type = tp match {
case AnnotatedType(_, underlying) if annotated => apply(underlying)
case ExistentialType(_, underlying) if existential => apply(underlying)
case PolyType(_, underlying) if poly => apply(underlying)
case NullaryMethodType(underlying) if nullary => apply(underlying)
case tp => tp
}
}
class ClassUnwrapper(existential: Boolean) extends TypeUnwrapper(poly = true, existential, annotated = true, nullary = false) {
override def apply(tp: Type) = super.apply(tp.normalize) // normalize is required here
}
object unwrapToClass extends ClassUnwrapper(existential = true) { }
object unwrapToStableClass extends ClassUnwrapper(existential = false) { }
object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { }
def elementExtract(container: Symbol, tp: Type): Type = {
assert(!container.isAliasType, container)
unwrapWrapperTypes(tp baseType container).dealiasWiden match {
case TypeRef(_, `container`, arg :: Nil) => arg
case _ => NoType
}
}
def elementExtractOption(container: Symbol, tp: Type): Option[Type] = {
elementExtract(container, tp) match {
case NoType => None
case tp => Some(tp)
}
}
def elementTest(container: Symbol, tp: Type)(f: Type => Boolean): Boolean = {
elementExtract(container, tp) match {
case NoType => false
case tp => f(tp)
}
}
def elementTransform(container: Symbol, tp: Type)(f: Type => Type): Type = {
elementExtract(container, tp) match {
case NoType => NoType
case tp => f(tp)
}
}
def transparentShallowTransform(container: Symbol, tp: Type)(f: Type => Type): Type = {
def loop(tp: Type): Type = tp match {
case tp @ AnnotatedType(_, underlying) => tp.copy(underlying = loop(underlying))
case tp @ ExistentialType(_, underlying) => tp.copy(underlying = loop(underlying))
case tp @ PolyType(_, resultType) => tp.copy(resultType = loop(resultType))
case tp @ NullaryMethodType(resultType) => tp.copy(resultType = loop(resultType))
case tp => elementTransform(container, tp)(el => appliedType(container, f(el))).orElse(f(tp))
}
loop(tp)
}
/** Repack existential types, otherwise they sometimes get unpacked in the
* wrong location (type inference comes up with an unexpected skolem)
*/
def repackExistential(tp: Type): Type = (
if (tp == NoType) tp
else existentialAbstraction(existentialsInType(tp), tp)
)
def containsExistential(tpe: Type) = tpe exists typeIsExistentiallyBound
def existentialsInType(tpe: Type) = tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
private def isDummyOf(tpe: Type)(targ: Type) = {
val sym = targ.typeSymbol
sym.isTypeParameter && sym.owner == tpe.typeSymbol
}
def isDummyAppliedType(tp: Type) = tp.dealias match {
case tr @ TypeRef(_, _, args) => args exists isDummyOf(tr)
case _ => false
}
def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
val eparams = mapWithIndex(tparams)((tparam, i) =>
clazz.newExistential(newTypeName("?"+i), clazz.pos) setInfo tparam.info.bounds)
eparams map (_ substInfo (tparams, eparams))
}
def typeParamsToExistentials(clazz: Symbol): List[Symbol] =
typeParamsToExistentials(clazz, clazz.typeParams)
def isRawIfWithoutArgs(sym: Symbol) = sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
/** Is type tp a ''raw type''? */
// note: it's important to write the two tests in this order,
// as only typeParams forces the classfile to be read. See #400
def isRawType(tp: Type) = !phase.erasedTypes && (tp match {
case TypeRef(_, sym, Nil) => isRawIfWithoutArgs(sym)
case _ => false
})
@deprecated("use isRawType", "2.10.1") // presently used by sbt
def isRaw(sym: Symbol, args: List[Type]) = (
!phase.erasedTypes
&& args.isEmpty
&& isRawIfWithoutArgs(sym)
)
def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
/**
* A more persistent version of `Type#memberType` which does not require
* that the symbol is a direct member of the prefix.
*
* For instance:
*
* {{{
* class C[T] {
* sealed trait F[A]
* object X {
* object S1 extends F[T]
* }
* class S2 extends F[T]
* }
* object O extends C[Int] {
* def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here.
* }
* class S3 extends O.F[String]
*
* nestedMemberType(<S1>, <O.type>, <C>) = O.X.S1.type
* nestedMemberType(<S2>, <O.type>, <C>) = O.S2.type
* nestedMemberType(<S3>, <O.type>, <C>) = S3.type
* }}}
*
* @param sym The symbol of the subtype
* @param pre The prefix from which the symbol is seen
* @param owner
*/
def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = {
def loop(tp: Type): Type =
if (tp.isTrivial) tp
else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
val widened = tp match {
case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
}
val memType = widened asSeenFrom (pre, tp.typeSymbol.owner)
if (tp eq widened) memType else memType.narrow
}
else loop(tp.prefix) memberType tp.typeSymbol
val result = loop(sym.tpeHK)
assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}")
result
}
class MissingAliasControl extends ControlThrowable
val missingAliasException = new MissingAliasControl
class MissingTypeControl extends ControlThrowable
// Helper Methods -------------------------------------------------------------
/** The maximum allowable depth of lubs or glbs over types `ts`.
*/
def lubDepth(ts: List[Type]): Depth = {
val td = typeDepth(ts)
val bd = baseTypeSeqDepth(ts)
lubDepthAdjust(td, td max bd)
}
/** The maximum allowable depth of lubs or glbs over given types,
* as a function over the maximum depth `td` of these types, and
* the maximum depth `bd` of all types in the base type sequences of these types.
*/
private def lubDepthAdjust(td: Depth, bd: Depth): Depth = (
if (settings.XfullLubs) bd
else if (bd <= Depth(3)) bd
else if (bd <= Depth(5)) td max bd.decr
else if (bd <= Depth(7)) td max (bd decr 2)
else td.decr max (bd decr 3)
)
private def symTypeDepth(syms: List[Symbol]): Depth = typeDepth(syms map (_.info))
private def typeDepth(tps: List[Type]): Depth = maxDepth(tps)
private def baseTypeSeqDepth(tps: List[Type]): Depth = maxbaseTypeSeqDepth(tps)
/** Is intersection of given types populated? That is,
* for all types tp1, tp2 in intersection
* for all common base classes bc of tp1 and tp2
* let bt1, bt2 be the base types of tp1, tp2 relative to class bc
* Then:
* bt1 and bt2 have the same prefix, and
* any corresponding non-variant type arguments of bt1 and bt2 are the same
*/
def isPopulated(tp1: Type, tp2: Type): Boolean = {
def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
assert(sym1 == sym2, (sym1, sym2))
( pre1 =:= pre2
&& forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
// if left-hand argument is a typevar, make it compatible with variance
// this is for more precise pattern matching
// todo: work this in the spec of this method
// also: think what happens if there are embedded typevars?
if (tparam.variance.isInvariant)
arg1 =:= arg2
else !arg1.isInstanceOf[TypeVar] || {
if (tparam.variance.isContravariant) arg1 <:< arg2
else arg2 <:< arg1
}
}
)
case (et: ExistentialType, _) =>
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
et.withTypeVars(isConsistent(tp1, _))
case (_, _) =>
throw new MatchError((tp1, tp2))
}
def check(tp1: Type, tp2: Type) = (
if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL))
tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol)
else tp1.baseClasses forall (bc =>
tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc)))
)
check(tp1, tp2) && check(tp2, tp1)
}
def normalizePlus(tp: Type): Type = {
if (isRawType(tp)) rawToExistential(tp)
else tp.normalize match {
// Unify the representations of module classes
case st@SingleType(_, sym) if sym.isModule => st.underlying.normalize
case st@ThisType(sym) if sym.isModuleClass => normalizePlus(st.underlying)
case _ => tp.normalize
}
}
/*
todo: change to:
def normalizePlus(tp: Type) = tp match {
case TypeRef(pre, sym, List()) =>
if (!sym.isInitialized) sym.rawInfo.load(sym)
if (sym.isJavaDefined && !sym.typeParams.isEmpty) rawToExistential(tp)
else tp.normalize
case _ => tp.normalize
}
*/
/** Are `tps1` and `tps2` lists of pairwise equivalent types? */
def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
/** True if two lists have the same length. Since calling length on linear sequences
* is O(n), it is an inadvisable way to test length equality.
*/
final def sameLength(xs1: List[_], xs2: List[_]) = compareLengths(xs1, xs2) == 0
@tailrec final def compareLengths(xs1: List[_], xs2: List[_]): Int =
if (xs1.isEmpty) { if (xs2.isEmpty) 0 else -1 }
else if (xs2.isEmpty) 1
else compareLengths(xs1.tail, xs2.tail)
/** Again avoiding calling length, but the lengthCompare interface is clunky.
*/
final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
private var _basetypeRecursions: Int = 0
def basetypeRecursions = _basetypeRecursions
def basetypeRecursions_=(value: Int) = _basetypeRecursions = value
private val _pendingBaseTypes = new mutable.HashSet[Type]
def pendingBaseTypes = _pendingBaseTypes
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
* type selections with the same name of equal (as determined by `=:=`) prefixes are
* considered equal in regard to `=:=`.
*/
def isEligibleForPrefixUnification(tp: Type): Boolean = tp match {
case SingleType(pre, sym) => !(sym hasFlag PACKAGE) && isEligibleForPrefixUnification(pre)
case tv@TypeVar(_, constr) => !tv.instValid || isEligibleForPrefixUnification(constr.inst)
case RefinedType(_, _) => true
case _ => false
}
def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
/** This appears to be equivalent to tp.isInstanceof[SingletonType],
* except it excludes ConstantTypes.
*/
def isSingleType(tp: Type) = tp match {
case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true
case _ => false
}
def isConstantType(tp: Type) = tp match {
case ConstantType(_) => true
case _ => false
}
def isExistentialType(tp: Type): Boolean = tp match {
case _: ExistentialType => true
case tp: Type if tp.dealias ne tp => isExistentialType(tp.dealias)
case _ => false
}
def isImplicitMethodType(tp: Type) = tp match {
case mt: MethodType => mt.isImplicit
case _ => false
}
/** This is defined and named as it is because the goal is to exclude source
* level types which are not value types (e.g. MethodType) without excluding
* necessary internal types such as WildcardType. There are also non-value
* types which can be used as type arguments (e.g. type constructors.)
*/
def isUseableAsTypeArg(tp: Type) = (
isInternalTypeUsedAsTypeArg(tp) // the subset of internal types which can be type args
|| isHKTypeRef(tp) // not a value type, but ok as a type arg
|| isValueElseNonValue(tp) // otherwise only value types
)
private def isHKTypeRef(tp: Type) = tp match {
case TypeRef(_, sym, Nil) => tp.isHigherKinded
case _ => false
}
@tailrec final def isUseableAsTypeArgs(tps: List[Type]): Boolean = tps match {
case Nil => true
case x :: xs => isUseableAsTypeArg(x) && isUseableAsTypeArgs(xs)
}
/** The "third way", types which are neither value types nor
* non-value types as defined in the SLS, further divided into
* types which are used internally in type applications and
* types which are not.
*/
/**** Not used right now, but kept around to document which Types
* land in which bucket.
private def isInternalTypeNotUsedAsTypeArg(tp: Type): Boolean = tp match {
case AntiPolyType(pre, targs) => true
case ClassInfoType(parents, defs, clazz) => true
case ErasedValueType(tref) => true
case NoPrefix => true
case NoType => true
case SuperType(thistpe, supertpe) => true
case TypeBounds(lo, hi) => true
case _ => false
}
****/
private def isInternalTypeUsedAsTypeArg(tp: Type): Boolean = tp match {
case WildcardType => true
case BoundedWildcardType(_) => true
case ErrorType => true
case _: TypeVar => true
case _ => false
}
private def isAlwaysValueType(tp: Type) = tp match {
case RefinedType(_, _) => true
case ExistentialType(_, _) => true
case ConstantType(_) => true
case _ => false
}
private def isAlwaysNonValueType(tp: Type) = tp match {
case OverloadedType(_, _) => true
case NullaryMethodType(_) => true
case MethodType(_, _) => true
case PolyType(_, MethodType(_, _)) => true
case _ => false
}
/** Should be called only with types for which a clear true/false answer
* can be given: true == value type, false == non-value type. Otherwise,
* an exception is thrown.
*/
private def isValueElseNonValue(tp: Type): Boolean = tp match {
case tp if isAlwaysValueType(tp) => true
case tp if isAlwaysNonValueType(tp) => false
case AnnotatedType(_, underlying) => isValueElseNonValue(underlying)
case SingleType(_, sym) => sym.isValue // excludes packages and statics
case TypeRef(_, _, _) if tp.isHigherKinded => false // excludes type constructors
case ThisType(sym) => !sym.isPackageClass // excludes packages
case TypeRef(_, sym, _) => !sym.isPackageClass // excludes packages
case PolyType(_, _) => true // poly-methods excluded earlier
case tp => sys.error("isValueElseNonValue called with third-way type " + tp)
}
/** SLS 3.2, Value Types
* Is the given type definitely a value type? A true result means
* it verifiably is, but a false result does not mean it is not,
* only that it cannot be assured. To avoid false positives, this
* defaults to false, but since Type is not sealed, one should take
* a false answer with a grain of salt. This method may be primarily
* useful as documentation; it is likely that !isNonValueType(tp)
* will serve better than isValueType(tp).
*/
/** def isValueType(tp: Type) = isValueElseNonValue(tp) */
/** SLS 3.3, Non-Value Types
* Is the given type definitely a non-value type, as defined in SLS 3.3?
* The specification-enumerated non-value types are method types, polymorphic
* method types, and type constructors. Supplements to the specified set of
* non-value types include: types which wrap non-value symbols (packages
* abd statics), overloaded types. Varargs and by-name types T* and (=>T) are
* not designated non-value types because there is code which depends on using
* them as type arguments, but their precise status is unclear.
*/
/** def isNonValueType(tp: Type) = !isValueElseNonValue(tp) */
def isNonRefinementClassType(tpe: Type) = tpe match {
case SingleType(_, sym) => sym.isModuleClass
case TypeRef(_, sym, _) => sym.isClass && !sym.isRefinementClass
case ErrorType => true
case _ => false
}
def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Depth): Boolean = {
def isSubArg(t1: Type, t2: Type, variance: Variance) = (
(variance.isCovariant || isSubType(t2, t1, depth)) // The order of these two checks can be material for performance (SI-8478)
&& (variance.isContravariant || isSubType(t1, t2, depth))
)
corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg)
}
def specializesSym(tp: Type, sym: Symbol, depth: Depth): Boolean = {
def directlySpecializedBy(member: Symbol): Boolean = (
member == sym
|| specializesSym(tp.narrow, member, sym.owner.thisType, sym, depth)
)
// Closure reduction, else this would be simply `member exists directlySpecializedBy`
def specializedBy(member: Symbol): Boolean = (
if (member eq NoSymbol) false
else if (member.isOverloaded) member.alternatives exists directlySpecializedBy
else directlySpecializedBy(member)
)
( (tp.typeSymbol isBottomSubClass sym.owner)
|| specializedBy(tp nonPrivateMember sym.name)
)
}
/** Does member `symLo` of `tpLo` have a stronger type
* than member `symHi` of `tpHi`?
*/
protected[internal] def specializesSym(preLo: Type, symLo: Symbol, preHi: Type, symHi: Symbol, depth: Depth): Boolean =
(symHi.isAliasType || symHi.isTerm || symHi.isAbstractType) && {
// only now that we know symHi is a viable candidate ^^^^^^^, do the expensive checks: ----V
require((symLo ne NoSymbol) && (symHi ne NoSymbol), ((preLo, symLo, preHi, symHi, depth)))
val tpHi = preHi.memberInfo(symHi).substThis(preHi.typeSymbol, preLo)
// Should we use memberType or memberInfo?
// memberType transforms (using `asSeenFrom`) `sym.tpe`,
// whereas memberInfo performs the same transform on `sym.info`.
// For term symbols, this ends up being the same thing (`sym.tpe == sym.info`).
// For type symbols, however, the `.info` of an abstract type member
// is defined by its bounds, whereas its `.tpe` is a `TypeRef` to that type symbol,
// so that `sym.tpe <:< sym.info`, but not the other way around.
//
// Thus, for the strongest (correct) result,
// we should use `memberType` on the low side.
//
// On the high side, we should use the result appropriate
// for the right side of the `<:<` above (`memberInfo`).
val tpLo = preLo.memberType(symLo)
debuglog(s"specializesSymHi: $preHi . $symHi : $tpHi")
debuglog(s"specializesSymLo: $preLo . $symLo : $tpLo")
if (symHi.isTerm)
(isSubType(tpLo, tpHi, depth) &&
(!symHi.isStable || symLo.isStable) && // sub-member must remain stable
(!symLo.hasVolatileType || symHi.hasVolatileType || tpHi.isWildcard)) // sub-member must not introduce volatility
else if (symHi.isAbstractType)
((tpHi.bounds containsType tpLo) &&
kindsConform(symHi :: Nil, tpLo :: Nil, preLo, symLo.owner))
else // we know `symHi.isAliasType` (see above)
tpLo =:= tpHi
}
/** A function implementing `tp1` matches `tp2`. */
final def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean = (
sameLength(tparams1, tparams2) &&
matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
)
def lastTry =
tp2 match {
case ExistentialType(_, res2) if alwaysMatchSimple =>
matchesType(tp1, res2, alwaysMatchSimple = true)
case MethodType(_, _) =>
false
case PolyType(_, _) =>
false
case _ =>
alwaysMatchSimple || tp1 =:= tp2
}
tp1 match {
case mt1 @ MethodType(params1, res1) =>
tp2 match {
case mt2 @ MethodType(params2, res2) =>
// sameLength(params1, params2) was used directly as pre-screening optimization (now done by matchesQuantified -- is that ok, performancewise?)
mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
matchesQuantified(params1, params2, res1, res2)
case NullaryMethodType(res2) =>
if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
else matchesType(tp1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case TypeRef(_, sym, Nil) =>
params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
false
}
case mt1 @ NullaryMethodType(res1) =>
tp2 match {
case mt2 @ MethodType(Nil, res2) => // could never match if params nonEmpty, and !mt2.isImplicit is implied by empty param list
matchesType(res1, res2, alwaysMatchSimple)
case NullaryMethodType(res2) =>
matchesType(res1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case TypeRef(_, sym, Nil) if sym.isModuleClass =>
matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
matchesType(res1, tp2, alwaysMatchSimple)
}
case PolyType(tparams1, res1) =>
tp2 match {
case PolyType(tparams2, res2) =>
if ((tparams1 corresponds tparams2)(_ eq _))
matchesType(res1, res2, alwaysMatchSimple)
else
matchesQuantified(tparams1, tparams2, res1, res2)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case _ =>
false // remember that tparams1.nonEmpty is now an invariant of PolyType
}
case ExistentialType(tparams1, res1) =>
tp2 match {
case ExistentialType(tparams2, res2) =>
matchesQuantified(tparams1, tparams2, res1, res2)
case _ =>
if (alwaysMatchSimple) matchesType(res1, tp2, alwaysMatchSimple = true)
else lastTry
}
case TypeRef(_, sym, Nil) if sym.isModuleClass =>
tp2 match {
case MethodType(Nil, res2) => matchesType(tp1, res2, alwaysMatchSimple)
case NullaryMethodType(res2) => matchesType(tp1, res2, alwaysMatchSimple)
case _ => lastTry
}
case _ =>
lastTry
}
}
/** matchesType above is an optimized version of the following implementation:
def matchesType2(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean =
tparams1.length == tparams2.length &&
matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
(tp1, tp2) match {
case (MethodType(params1, res1), MethodType(params2, res2)) =>
params1.length == params2.length && // useful pre-screening optimization
matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
matchesType(res1, res2, alwaysMatchSimple) &&
tp1.isImplicit == tp2.isImplicit
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
matchesQuantified(tparams1, tparams2, res1, res2)
case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
matchesType(rtp1, rtp2, alwaysMatchSimple)
case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
matchesType(rtp1, rtp2, alwaysMatchSimple)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
matchesQuantified(tparams1, tparams2, res1, res2)
case (ExistentialType(_, res1), _) if alwaysMatchSimple =>
matchesType(res1, tp2, alwaysMatchSimple)
case (_, ExistentialType(_, res2)) if alwaysMatchSimple =>
matchesType(tp1, res2, alwaysMatchSimple)
case (NullaryMethodType(rtp1), _) =>
matchesType(rtp1, tp2, alwaysMatchSimple)
case (_, NullaryMethodType(rtp2)) =>
matchesType(tp1, rtp2, alwaysMatchSimple)
case (MethodType(_, _), _) => false
case (PolyType(_, _), _) => false
case (_, MethodType(_, _)) => false
case (_, PolyType(_, _)) => false
case _ =>
alwaysMatchSimple || tp1 =:= tp2
}
}
*/
/** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
case Nil =>
syms2.isEmpty
case sym1 :: rest1 =>
syms2 match {
case Nil =>
false
case sym2 :: rest2 =>
val tp1 = sym1.tpe
val tp2 = sym2.tpe
(tp1 =:= tp2 ||
syms1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
syms2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass) &&
matchingParams(rest1, rest2, syms1isJava, syms2isJava)
}
}
/** Do type arguments `targs` conform to formal parameters `tparams`?
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
var bounds = instantiatedBounds(pre, owner, tparams, targs)
if (targs exists typeHasAnnotations)
bounds = adaptBoundsToAnnotations(bounds, tparams, targs)
(bounds corresponds targs)(boundsContainType)
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
mapList(tparams)(_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
clazz.classBound.asSeenFrom(pre, clazz.owner)
case _ =>
t
}
/** A list of the typevars in a type. */
def typeVarsInType(tp: Type): List[TypeVar] = {
var tvs: List[TypeVar] = Nil
tp foreach {
case t: TypeVar => tvs ::= t
case _ =>
}
tvs.reverse
}
// If this type contains type variables, put them to sleep for a while.
// Don't just wipe them out by replacing them by the corresponding type
// parameter, as that messes up (e.g.) type variables in type refinements.
// Without this, the matchesType call would lead to type variables on both
// sides of a subtyping/equality judgement, which can lead to recursive types
// being constructed. See pos/t0851 for a situation where this happens.
@inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = {
val saved = tvs map (_.suspended)
tvs foreach (_.suspended = true)
try op
finally foreach2(tvs, saved)(_.suspended = _)
}
final def stripExistentialsAndTypeVars(ts: List[Type], expandLazyBaseType: Boolean = false): (List[Type], List[Symbol]) = {
val needsStripping = ts.exists {
case _: RefinedType | _: TypeVar | _: ExistentialType => true
case _ => false
}
if (!needsStripping) (ts, Nil) // fast path for common case
else {
val tparams = mutable.ListBuffer[Symbol]()
val stripped = mutable.ListBuffer[Type]()
def stripType(tp: Type): Unit = tp match {
case rt: RefinedType if isIntersectionTypeForLazyBaseType(rt) =>
if (expandLazyBaseType)
rt.parents foreach stripType
else {
devWarning(s"Unexpected RefinedType in stripExistentialsAndTypeVars $ts, not expanding")
stripped += tp
}
case ExistentialType(qs, underlying) =>
tparams ++= qs
stripType(underlying)
case tv@TypeVar(_, constr) =>
if (tv.instValid) stripType(constr.inst)
else if (tv.untouchable) stripped += tv
else abort("trying to do lub/glb of typevar " + tv)
case tp => stripped += tp
}
ts foreach stripType
(stripped.toList, tparams.toList)
}
}
/** Compute lub (if `variance == Covariant`) or glb (if `variance == Contravariant`) of given list
* of types `tps`. All types in `tps` are typerefs or singletypes
* with the same symbol.
* Return `x` if the computation succeeds with result `x`.
* Return `NoType` if the computation fails.
*/
def mergePrefixAndArgs(tps0: List[Type], variance: Variance, depth: Depth): Type = {
val (tps, tparams) = stripExistentialsAndTypeVars(tps0, expandLazyBaseType = true)
val merged = tps match {
case tp :: Nil => tp
case TypeRef(_, sym, _) :: rest =>
val pres = tps map (_.prefix) // prefix normalizes automatically
val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
val capturedParams = new ListBuffer[Symbol]
try {
if (sym == ArrayClass && phase.erasedTypes) {
// special treatment for lubs of array types after erasure:
// if argss contain one value type and some other type, the lub is Object
// if argss contain several reference types, the lub is an array over lub of argtypes
if (argss exists typeListIsEmpty) {
NoType // something is wrong: an array without a type arg.
}
else {
val args = argss map (_.head)
if (args.tail forall (_ =:= args.head)) typeRef(pre, sym, List(args.head))
else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) ObjectTpe
else typeRef(pre, sym, List(lub(args)))
}
}
else transposeSafe(argss) match {
case None =>
// transpose freaked out because of irregular argss
// catching just in case (shouldn't happen, but also doesn't cost us)
// [JZ] It happens: see SI-5683.
debuglog(s"transposed irregular matrix!? tps=$tps argss=$argss")
NoType
case Some(argsst) =>
val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
val as = as0.distinct
if (as.size == 1) as.head
else if (depth.isZero) {
log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString))
// Don't return "Any" (or "Nothing") when we have to give up due to
// recursion depth. Return NoType, which prevents us from poisoning
// lublist's results. It can recognize the recursion and deal with it, but
// only if we aren't returning invalid types.
NoType
}
else {
if (tparam.variance == variance) lub(as, depth.decr)
else if (tparam.variance == variance.flip) glb(as, depth.decr)
else {
val l = lub(as, depth.decr)
val g = glb(as, depth.decr)
if (l <:< g) l
else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
// just err on the conservative side, i.e. with a bound that is too high.
// if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
capturedParams += qvar
qvar.tpe
}
}
}
}
if (args contains NoType) NoType
else existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args))
}
} catch {
case ex: MalformedType => NoType
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
try singleType(pre, sym)
catch { case ex: MalformedType => NoType }
case _ =>
abort(s"mergePrefixAndArgs($tps, $variance, $depth): unsupported tps")
}
existentialAbstraction(tparams, merged)
}
def addMember(thistp: Type, tp: Type, sym: Symbol): Unit = addMember(thistp, tp, sym, AnyDepth)
/** Make symbol `sym` a member of scope `tp.decls`
* where `thistp` is the narrowed owner type of the scope.
*/
def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Depth) {
assert(sym != NoSymbol)
// debuglog("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG
if (!specializesSym(thistp, sym, depth)) {
if (sym.isTerm)
for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
if (specializesSym(thistp, sym, thistp, alt, depth))
tp.decls unlink alt
tp.decls enter sym
}
}
def isJavaVarargsAncestor(clazz: Symbol) = (
clazz.isClass
&& clazz.isJavaDefined
&& (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
)
def inheritsJavaVarArgsMethod(clazz: Symbol) =
clazz.thisType.baseClasses exists isJavaVarargsAncestor
// Errors and Diagnostics -----------------------------------------------------
/** A throwable signalling a type error */
class TypeError(var pos: Position, val msg: String) extends Throwable(msg) {
def this(msg: String) = this(NoPosition, msg)
}
// TODO: RecoverableCyclicReference should be separated from TypeError,
// but that would be a big change. Left for further refactoring.
/** An exception for cyclic references from which we can recover */
case class RecoverableCyclicReference(sym: Symbol)
extends TypeError("illegal cyclic reference involving " + sym) {
if (settings.debug) printStackTrace()
}
class NoCommonType(tps: List[Type]) extends Throwable(
"lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable
/** A throwable signalling a malformed type */
class MalformedType(msg: String) extends TypeError(msg) {
def this(pre: Type, tp: String) = this("malformed type: " + pre + "#" + tp)
}
/** The current indentation string for traces */
private var _indent: String = ""
protected def indent = _indent
protected def indent_=(value: String) = _indent = value
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
inform(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
indent = indent + " "
val result = p(tp1, arg2)
indent = indent stripSuffix " "
inform(indent + result)
result
}
/** If option `explaintypes` is set, print a subtype trace for `found <:< required`. */
def explainTypes(found: Type, required: Type) {
if (settings.explaintypes) withTypesExplained(found <:< required)
}
/** If option `explaintypes` is set, print a subtype trace for `op(found, required)`. */
def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) {
if (settings.explaintypes) withTypesExplained(op(found, required))
}
/** Execute `op` while printing a trace of the operations on types executed. */
def withTypesExplained[A](op: => A): A = {
val s = explainSwitch
try { explainSwitch = true; op } finally { explainSwitch = s }
}
def isUnboundedGeneric(tp: Type) = tp match {
case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefTpe)
case _ => false
}
def isBoundedGeneric(tp: Type) = tp match {
case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefTpe)
case TypeRef(_, sym, _) => !isPrimitiveValueClass(sym)
case _ => false
}
// Add serializable to a list of parents, unless one of them already is
def addSerializable(ps: Type*): List[Type] = (
if (ps exists typeIsSubTypeOfSerializable) ps.toList
else (ps :+ SerializableTpe).toList
)
/** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
final def uncheckedBounds(tp: Type): Type = {
if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibility with older scala-reflect.jar
else tp.withAnnotation(AnnotationInfo marker UncheckedBoundsClass.tpe)
}
/** Members of the given class, other than those inherited
* from Any or AnyRef.
*/
def nonTrivialMembers(clazz: Symbol): Scope = clazz.info.members filterNot isUniversalMember
/** Members which can be imported into other scopes.
*/
def importableMembers(pre: Type): Scope = pre.members filter isImportable
def objToAny(tp: Type): Type =
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyTpe
else tp
def invalidateTreeTpeCaches(tree: Tree, updatedSyms: List[Symbol]) = if (updatedSyms.nonEmpty)
for (t <- tree if t.tpe != null)
for (tp <- t.tpe) {
invalidateCaches(tp, updatedSyms)
}
def invalidateCaches(t: Type, updatedSyms: List[Symbol]) =
t match {
case st: SingleType if updatedSyms.contains(st.sym) => st.invalidateSingleTypeCaches()
case tr: TypeRef if updatedSyms.contains(tr.sym) => tr.invalidateTypeRefCaches()
case ct: CompoundType if ct.baseClasses.exists(updatedSyms.contains) => ct.invalidatedCompoundTypeCaches()
case _ =>
}
val shorthands = Set(
"scala.collection.immutable.List",
"scala.collection.immutable.Nil",
"scala.collection.Seq",
"scala.collection.Traversable",
"scala.collection.Iterable",
"scala.collection.mutable.StringBuilder",
"scala.collection.IndexedSeq",
"scala.collection.Iterator")
// ----- Hoisted closures and convenience methods, for compile time reductions -------
private[scala] val isTypeVar = (tp: Type) => tp.isInstanceOf[TypeVar]
private[scala] val typeContainsTypeVar = (tp: Type) => tp exists isTypeVar
private[scala] val typeIsNonClassType = (tp: Type) => tp.typeSymbolDirect.isNonClassType
private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
private[scala] val symTypeIsError = (sym: Symbol) => sym.tpe.isError
private[scala] val treeTpe = (t: Tree) => t.tpe
private[scala] val symTpe = (sym: Symbol) => sym.tpe
private[scala] val symInfo = (sym: Symbol) => sym.info
private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations ne Nil
private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe
private[scala] val typeIsNothing = (tp: Type) => tp.typeSymbolDirect eq NothingClass
private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass
private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
/** The maximum depth of type `tp` */
def typeDepth(tp: Type): Depth = tp match {
case TypeRef(pre, sym, args) => typeDepth(pre) max typeDepth(args).incr
case RefinedType(parents, decls) => typeDepth(parents) max symTypeDepth(decls.toList).incr
case TypeBounds(lo, hi) => typeDepth(lo) max typeDepth(hi)
case MethodType(paramtypes, result) => typeDepth(result)
case NullaryMethodType(result) => typeDepth(result)
case PolyType(tparams, result) => typeDepth(result) max symTypeDepth(tparams).incr
case ExistentialType(tparams, result) => typeDepth(result) max symTypeDepth(tparams).incr
case _ => Depth(1)
}
//OPT replaced with tailrecursive function to save on #closures
// was:
// var d = 0
// for (tp <- tps) d = d max by(tp) //!!!OPT!!!
// d
private[scala] def maxDepth(tps: List[Type]): Depth = {
@tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match {
case tp :: rest => loop(rest, acc max typeDepth(tp))
case _ => acc
}
loop(tps, Depth.Zero)
}
private[scala] def maxbaseTypeSeqDepth(tps: List[Type]): Depth = {
@tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match {
case tp :: rest => loop(rest, acc max tp.baseTypeSeqDepth)
case _ => acc
}
loop(tps, Depth.Zero)
}
@tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match {
case tp :: rest => (tp contains sym) || typesContain(rest, sym)
case _ => false
}
@tailrec private def areTrivialTypes(tps: List[Type]): Boolean = tps match {
case tp :: rest => tp.isTrivial && areTrivialTypes(rest)
case _ => true
}
// -------------- Classtags --------------------------------------------------------
implicit val AnnotatedTypeTag = ClassTag[AnnotatedType](classOf[AnnotatedType])
implicit val BoundedWildcardTypeTag = ClassTag[BoundedWildcardType](classOf[BoundedWildcardType])
implicit val ClassInfoTypeTag = ClassTag[ClassInfoType](classOf[ClassInfoType])
implicit val CompoundTypeTag = ClassTag[CompoundType](classOf[CompoundType])
implicit val ConstantTypeTag = ClassTag[ConstantType](classOf[ConstantType])
implicit val ExistentialTypeTag = ClassTag[ExistentialType](classOf[ExistentialType])
implicit val MethodTypeTag = ClassTag[MethodType](classOf[MethodType])
implicit val NullaryMethodTypeTag = ClassTag[NullaryMethodType](classOf[NullaryMethodType])
implicit val PolyTypeTag = ClassTag[PolyType](classOf[PolyType])
implicit val RefinedTypeTag = ClassTag[RefinedType](classOf[RefinedType])
implicit val SingletonTypeTag = ClassTag[SingletonType](classOf[SingletonType])
implicit val SingleTypeTag = ClassTag[SingleType](classOf[SingleType])
implicit val SuperTypeTag = ClassTag[SuperType](classOf[SuperType])
implicit val ThisTypeTag = ClassTag[ThisType](classOf[ThisType])
implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds])
implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
implicit val TypeTagg = ClassTag[Type](classOf[Type])
// -------------- Statistics --------------------------------------------------------
Statistics.newView("#unique types") { if (uniques == null) 0 else uniques.size }
}
object TypeConstants {
final val DefaultLogThreshhold = 50
final val LogPendingBaseTypesThreshold = DefaultLogThreshhold
final val LogVolatileThreshold = DefaultLogThreshhold
}
object TypesStats {
import BaseTypeSeqsStats._
val rawTypeCount = Statistics.newCounter ("#raw type creations")
val subtypeCount = Statistics.newCounter ("#subtype ops")
val sametypeCount = Statistics.newCounter ("#sametype ops")
val lubCount = Statistics.newCounter ("#toplevel lubs/glbs")
val nestedLubCount = Statistics.newCounter ("#all lubs/glbs")
val findMemberCount = Statistics.newCounter ("#findMember ops")
val findMembersCount = Statistics.newCounter ("#findMembers ops")
val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos)
val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos)
val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos)
val findMembersNanos = Statistics.newStackableTimer("time spent in findmembers", typerNanos)
val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos)
val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos)
val baseClassesNanos = Statistics.newStackableTimer("time spent in baseClasses", typerNanos)
val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
val typeOpsStack = Statistics.newTimerStack()
/* Commented out, because right now this does not inline, so creates a closure which will distort statistics
@inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = {
val start = Statistics.pushTimer(typeOpsStack, c)
try op
finally
}
*/
}
|
slothspot/scala
|
src/reflect/scala/reflect/internal/Types.scala
|
Scala
|
bsd-3-clause
| 210,001
|
package net.paploo.diestats.statistics.util
/**
* Marker trait for Monoids that obey commutativity:
* concat(x, y) === concat(y, x)
* @tparam A
*/
trait CommutativeMonoid[A] extends Monoid[A]
object CommutativeMonoid {
def apply[A](emptyValue: => A)(concatFunction: (A, A) => A): CommutativeMonoid[A] = new CommutativeMonoid[A] {
override def concat(x: A, y: A): A = concatFunction(x, y)
override def empty: A = emptyValue
}
class AdditiveMonoid[N](implicit num: Numeric[N]) extends CommutativeMonoid[N] {
override def concat(x: N, y: N): N = num.plus(x, y)
override def empty: N = num.zero
}
def AdditiveMonoid[N](implicit num: Numeric[N]): CommutativeMonoid[N] = new AdditiveMonoid[N]()
implicit val AdditiveIntMonoid: CommutativeMonoid[Int] = new AdditiveMonoid[Int]()
implicit val AdditiveLongMonoid: CommutativeMonoid[Long] = new AdditiveMonoid[Long]()
implicit val AdditiveDoubleMonoid: CommutativeMonoid[Double] = new AdditiveMonoid[Double]()
implicit val AdditiveProbabilityMonoid: CommutativeMonoid[Probability] = new AdditiveMonoid[Probability]()(Probability.ProbabilityIsConflicted)
class MultiplicativeMonoid[N](implicit num: Numeric[N]) extends CommutativeMonoid[N] {
override def concat(x: N, y: N): N = num.times(x, y)
override def empty: N = num.one
}
def MultiplicativeMonoid[N](implicit num: Numeric[N]): CommutativeMonoid[N] = new MultiplicativeMonoid[N]()
val MultiplicativeIntMonoid: CommutativeMonoid[Int] = new MultiplicativeMonoid[Int]()
val MultiplicativeLongMonoid: CommutativeMonoid[Long] = new MultiplicativeMonoid[Long]()
val MultiplicativeDoubleMonoid: CommutativeMonoid[Double] = new MultiplicativeMonoid[Double]()
val MultiplicativeProbabilityMonoid: Monoid[Probability] = new MultiplicativeMonoid[Probability]()(Probability.ProbabilityIsConflicted)
/**
* Often used with convolve where the order doesn't matter, but the counts do;
* in most cases, it is clearer to convolve over a Map[A, Long] instead.
*
* Example:
* convolving a coin, Seq(Tails) and Seq(Heads), with itself will combine the two results
* of a heads and a tails into a single value, Seq(Tails, Heads), instead of one for
* Seq(Tails, Heads) and another for Seq(Heads, Tails).
* @param ord
* @tparam A
*/
class SeqMonoid[A](implicit ord: Ordering[A]) extends CommutativeMonoid[Seq[A]] {
override def concat(x: Seq[A], y: Seq[A]): Seq[A] = (x ++ y).sorted
override def empty: Seq[A] = Vector.empty[A]
}
def SeqMonoid[A](implicit ord: Ordering[A]): CommutativeMonoid[Seq[A]] = new SeqMonoid[A]()
}
|
paploo/DieStats
|
src/main/scala/net/paploo/diestats/statistics/util/CommutativeMonoid.scala
|
Scala
|
bsd-3-clause
| 2,622
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.sources
import javax.annotation.concurrent.GuardedBy
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, Statistics}
import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes.{Append, Complete, Update}
import org.apache.spark.sql.execution.streaming.{MemorySinkBase, Sink}
import org.apache.spark.sql.sources.v2.{DataSourceOptions, DataSourceV2, StreamingWriteSupportProvider}
import org.apache.spark.sql.sources.v2.writer._
import org.apache.spark.sql.sources.v2.writer.streaming.{StreamingDataWriterFactory, StreamingWriteSupport}
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.types.StructType
/**
* A sink that stores the results in memory. This [[Sink]] is primarily intended for use in unit
* tests and does not provide durability.
*/
class MemorySinkV2 extends DataSourceV2 with StreamingWriteSupportProvider
with MemorySinkBase with Logging {
override def createStreamingWriteSupport(
queryId: String,
schema: StructType,
mode: OutputMode,
options: DataSourceOptions): StreamingWriteSupport = {
new MemoryStreamingWriteSupport(this, mode, schema)
}
private case class AddedData(batchId: Long, data: Array[Row])
/** An order list of batches that have been written to this [[Sink]]. */
@GuardedBy("this")
private val batches = new ArrayBuffer[AddedData]()
/** Returns all rows that are stored in this [[Sink]]. */
def allData: Seq[Row] = synchronized {
batches.flatMap(_.data)
}
def latestBatchId: Option[Long] = synchronized {
batches.lastOption.map(_.batchId)
}
def latestBatchData: Seq[Row] = synchronized {
batches.lastOption.toSeq.flatten(_.data)
}
def dataSinceBatch(sinceBatchId: Long): Seq[Row] = synchronized {
batches.filter(_.batchId > sinceBatchId).flatMap(_.data)
}
def toDebugString: String = synchronized {
batches.map { case AddedData(batchId, data) =>
val dataStr = try data.mkString(" ") catch {
case NonFatal(e) => "[Error converting to string]"
}
s"$batchId: $dataStr"
}.mkString("\\n")
}
def write(batchId: Long, outputMode: OutputMode, newRows: Array[Row]): Unit = {
val notCommitted = synchronized {
latestBatchId.isEmpty || batchId > latestBatchId.get
}
if (notCommitted) {
logDebug(s"Committing batch $batchId to $this")
outputMode match {
case Append | Update =>
val rows = AddedData(batchId, newRows)
synchronized { batches += rows }
case Complete =>
val rows = AddedData(batchId, newRows)
synchronized {
batches.clear()
batches += rows
}
case _ =>
throw new IllegalArgumentException(
s"Output mode $outputMode is not supported by MemorySinkV2")
}
} else {
logDebug(s"Skipping already committed batch: $batchId")
}
}
def clear(): Unit = synchronized {
batches.clear()
}
override def toString(): String = "MemorySinkV2"
}
case class MemoryWriterCommitMessage(partition: Int, data: Seq[Row])
extends WriterCommitMessage {}
class MemoryStreamingWriteSupport(
val sink: MemorySinkV2, outputMode: OutputMode, schema: StructType)
extends StreamingWriteSupport {
override def createStreamingWriterFactory: MemoryWriterFactory = {
MemoryWriterFactory(outputMode, schema)
}
override def commit(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {
val newRows = messages.flatMap {
case message: MemoryWriterCommitMessage => message.data
}
sink.write(epochId, outputMode, newRows)
}
override def abort(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {
// Don't accept any of the new input.
}
}
case class MemoryWriterFactory(outputMode: OutputMode, schema: StructType)
extends DataWriterFactory with StreamingDataWriterFactory {
override def createWriter(
partitionId: Int,
taskId: Long): DataWriter[InternalRow] = {
new MemoryDataWriter(partitionId, outputMode, schema)
}
override def createWriter(
partitionId: Int,
taskId: Long,
epochId: Long): DataWriter[InternalRow] = {
createWriter(partitionId, taskId)
}
}
class MemoryDataWriter(partition: Int, outputMode: OutputMode, schema: StructType)
extends DataWriter[InternalRow] with Logging {
private val data = mutable.Buffer[Row]()
private val encoder = RowEncoder(schema).resolveAndBind()
override def write(row: InternalRow): Unit = {
data.append(encoder.fromRow(row))
}
override def commit(): MemoryWriterCommitMessage = {
val msg = MemoryWriterCommitMessage(partition, data.clone())
data.clear()
msg
}
override def abort(): Unit = {}
}
/**
* Used to query the data that has been written into a [[MemorySinkV2]].
*/
case class MemoryPlanV2(sink: MemorySinkV2, override val output: Seq[Attribute]) extends LeafNode {
private val sizePerRow = EstimationUtils.getSizePerRow(output)
override def computeStats(): Statistics = Statistics(sizePerRow * sink.allData.size)
}
|
michalsenkyr/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/memoryV2.scala
|
Scala
|
apache-2.0
| 6,366
|
package org.deeplearning4j.ui.views.html.training
import play.twirl.api._
import play.twirl.api.TemplateMagic._
object TrainingHelp_Scope0 {
import models._
import controllers._
import play.api.i18n._
import views.html._
import play.api.templates.PlayMagic._
import play.api.mvc._
import play.api.data._
class TrainingHelp extends BaseScalaTemplate[play.twirl.api.HtmlFormat.Appendable,Format[play.twirl.api.HtmlFormat.Appendable]](play.twirl.api.HtmlFormat) with play.twirl.api.Template1[org.deeplearning4j.ui.api.I18N,play.twirl.api.HtmlFormat.Appendable] {
/**/
def apply/*1.2*/(i18n: org.deeplearning4j.ui.api.I18N):play.twirl.api.HtmlFormat.Appendable = {
_display_ {
{
Seq[Any](format.raw/*1.40*/("""
"""),format.raw/*2.1*/("""<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>"""),_display_(/*7.17*/i18n/*7.21*/.getMessage("train.pagetitle")),format.raw/*7.51*/("""</title>
<!-- Start Mobile Specific -->
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- End Mobile Specific -->
<link id="bootstrap-style" href="/assets/css/bootstrap.min.css" rel="stylesheet">
<link href="/assets/css/bootstrap-responsive.min.css" rel="stylesheet">
<link id="base-style" href="/assets/css/style.css" rel="stylesheet">
<link id="base-style-responsive" href="/assets/css/style-responsive.css" rel="stylesheet">
<link href='http://fonts.googleapis.com/css?family=Open+Sans:300italic,400italic,600italic,700italic,800italic,400,300,600,700,800&subset=latin,cyrillic-ext,latin-ext' rel='stylesheet' type='text/css'>
<link rel="shortcut icon" href="/assets/img/favicon.ico">
<!-- The HTML5 shim, for IE6-8 support of HTML5 elements -->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<link id="ie-style" href="/assets/css/ie.css" rel="stylesheet"/>
<![endif]-->
<!--[if IE 9]>
<link id="ie9style" href="/assets/css/ie9.css" rel="stylesheet"/>
<![endif]-->
</head>
<body>
<!-- Start Header -->
<div class="navbar">
<div class="navbar-inner">
<div class="container-fluid">
<a class="btn btn-navbar" data-toggle="collapse" data-target=".top-nav.nav-collapse,.sidebar-nav.nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</a>
<a class="brand" href="#"><span>"""),_display_(/*41.54*/i18n/*41.58*/.getMessage("train.pagetitle")),format.raw/*41.88*/("""</span></a>
</div>
</div>
</div>
<!-- End Header -->
<div class="container-fluid-full">
<div class="row-fluid">
<!-- Start Main Menu -->
<div id="sidebar-left" class="span2">
<div class="nav-collapse sidebar-nav">
<ul class="nav nav-tabs nav-stacked main-menu">
<li><a href="overview"><i class="icon-bar-chart"></i><span class="hidden-tablet"> """),_display_(/*54.112*/i18n/*54.116*/.getMessage("train.nav.overview")),format.raw/*54.149*/("""</span></a></li>
<li><a href="model"><i class="icon-tasks"></i><span class="hidden-tablet"> """),_display_(/*55.105*/i18n/*55.109*/.getMessage("train.nav.model")),format.raw/*55.139*/("""</span></a></li>
<li><a href="system"><i class="icon-dashboard"></i><span class="hidden-tablet"> """),_display_(/*56.110*/i18n/*56.114*/.getMessage("train.nav.system")),format.raw/*56.145*/("""</span></a></li>
<li class="active"><a href="javascript:void(0);"><i class="icon-star"></i><span class="hidden-tablet"> """),_display_(/*57.133*/i18n/*57.137*/.getMessage("train.nav.userguide")),format.raw/*57.171*/("""</span></a></li>
<li>
<a class="dropmenu" href="javascript:void(0);"><i class="icon-folder-close-alt"></i><span class="hidden-tablet">"""),_display_(/*59.146*/i18n/*59.150*/.getMessage("train.nav.language")),format.raw/*59.183*/("""</span></a>
<ul>
<li><a class="submenu" href="javascript:void(0);" onclick="languageSelect('en', 'help')"><i class="icon-file-alt"></i> <span class="hidden-tablet"> English</span></a></li>
<li><a class="submenu" href="javascript:void(0);" onclick="languageSelect('ja', 'help')"><i class="icon-file-alt"></i> <span class="hidden-tablet"> 日本語</span></a></li>
<li><a class="submenu" href="javascript:void(0);" onclick="languageSelect('zh', 'help')"><i class="icon-file-alt"></i> <span class="hidden-tablet"> 中文</span></a></li>
<li><a class="submenu" href="javascript:void(0);" onclick="languageSelect('ko', 'help')"><i class="icon-file-alt"></i> <span class="hidden-tablet"> 한글</span></a></li>
<li><a class="submenu" href="javascript:void(0);" onclick="languageSelect('ru', 'help')"><i class="icon-file-alt"></i> <span class="hidden-tablet"> русский</span></a></li>
<li><a class="submenu" href="javascript:void(0);" onclick="languageSelect('uk', 'help')"><i class="icon-file-alt"></i> <span class="hidden-tablet"> український</span></a></li>
</ul>
</li>
</ul>
</div>
</div>
<!-- End Main Menu -->
<noscript>
<div class="alert alert-block span10">
<h4 class="alert-heading">Warning!</h4>
<p>You need to have <a href="http://en.wikipedia.org/wiki/JavaScript" target="_blank">
JavaScript</a> enabled to use this site.</p>
</div>
</noscript>
<!-- Start Content -->
<div id="content" class="span10">
<!-- Begin User Guide -->
<div class="row-fluid">
<div class="box span9">
<div class="box-header">
<h2><b>User Guide</b></h2>
</div>
<div class="box-content">
<div class="page-header">
<h1>Deeplearning4j <small>Training UI</small></h1>
</div>
<div class="row-fluid">
<div class="span8">
<h1><small>Welcome!</small></h1>
<p>
Welcome to the Deeplearning4j Training UI! DL4J provides the HistogramIterationListener as a method of visualizing in your browser (in real time) the progress of network training. Here’s an excellent <a href="https://cs231n.github.io/neural-networks-3/#baby" target="_blank">
web page by Andrej Karpathy</a>
about visualizing neural net training. It is worth reading that page first.
</p>
</div>
<div class="span4">
<div class="well">
<h1><small>
If there's any confusion, please ask our engineers in <a href="https://gitter.im/deeplearning4j/deeplearning4j" target="_blank">
Gitter</a>.</small></h1>
</div>
</div>
</div>
<h1><small>Overview Tab</small></h1>
<div class="row-fluid">
<div class="span6">
<h2>Score vs Iteration: Snapshot</h2>
<ul>
<li>Score vs. iteration should (overall) go down over time.</li>
<ul>
<li>If the score increases consistently, your learning rate is likely set too high. Try reducing it until scores become more stable.</li>
<li>Increasing scores can also be indicative of other network issues, such as incorrect data normalization.</li>
<li>If the score is flat or decreases very slowly (over a few hundred iteratons) (a) your learning rate may be too low, or (b) you might be having diffulties with optimization. In the latter case, if you are using the SGD updater, try a different updater such as momentum, RMSProp or Adagrad.</li>
<li>Note that data that isn’t shuffled (i.e., each minibatch contains only one class, for classification) can result in very rough or abnormal-looking score vs. iteration graphs.</li>
</ul>
<li>Some noise in this line chart is expected (i.e., the line will go up and down within a small range). However, if the scores vary quite significantly between runs variation is very large, this can be a problem.</li>
</ul>
</div>
<div class="span6">
<h2>Model Performance</h2>
<p>
The table contains basic model performance metrics.<br><br>
<b>Model Type</b> - MultiLayerNetwork or...<br>
<b>nLayers</b> - Number of layers.<br>
<b>nParams</b> - Number of parameters.<br>
<b>Total Runtime</b> - Explain importance<br>
<b>Last Update</b> - Explain importance<br>
<b>Total Parameter Updates</b> - Explain importance<br>
<b>Updates Per Second</b> - Explain importance<br>
<b>Examples Per Second</b> - Explain importance
</p>
</div>
</div>
<div class="row-fluid">
<div class="span6">
<h2>Ratio of Updates to Parameters: All Layers</h2>
<p>
Need Explanation Here.
</p>
</div>
<div class="span6">
<h2>Variances</h2>
<p>
Need Explanation Here.
</p>
</div>
</div>
<h1><small>Model Tab</small></h1>
<div class="row-fluid">
<div class="span6">
<h2>Layer Visualization UI</h2>
<p>
The layer visualization UI renders network structure dynamically. Users can inspect the and node layer parameters by clicking on the various elements of the GUI to see general information about layers/nodes, overall network information such as performance.
</p>
</div>
<div class="span6">
<h2>Layer Information</h2>
<p>
The table contains basic layer information.<br><br>
<b>Name</b> - MultiLayerNetwork or...<br>
<b>Type</b> - Number of layers.<br>
<b>Inputs</b> - Number of parameters.<br>
<b>Outputs</b> - Explain importance<br>
<b>Activation Function</b> - Explain importance<br>
<b>Learning Rate</b> - Explain importance
</p>
</div>
</div>
<div class="row-fluid">
<div class="span6">
<h2>Mean Magnitudes</h2>
<p>
<ul>
<li>At the right is a line chart of the mean magnitude of both the parameters and the updates in the neural network.</li>
<ul>
<li>“Mean magnitude” = the average of the absolute value of the parameters or updates.</li>
</ul>
<li>For tuning the learning rate, the ratio of parameters to updates for a layer should be somewhere in the order of 1000:1 - but note that is a rough guide only, and may not be appropriate for all networks. It’s often a good starting point, however.</li>
<ul>
<li>If the ratio diverges significantly from this, your parameters may be too unstable to learn useful features, or may change too slowly to learn useful features</li>
<li>To change this ratio, adjust your learning rate (or sometimes, parameter initialization). In some networks, you may need to set the learning rate differently for different layers.</li>
</ul>
<li>Keep an eye out for unusually large spikes in the updates: this may indicate exploding gradients (see discussion in the “histogram of gradients” section above)</li>
</p>
</div>
<div class="span6">
<h2>Activations</h2>
<p>
Need Explanation Here.
</p>
</div>
</div>
<div class="row-fluid">
<div class="span6">
<h2>Parameters Histogram</h2>
<p>
<ul>
<li>At the top right is a histogram of the weights in the neural network (at the last iteration), split up by layer and the type of parameter. For example, “param_0_W” refers to the weight parameters for the first layer.</li>
<li>For weights, these histograms should have an approximately Gaussian (normal) distribution, after some time.</li>
<li>For biases, these histograms will generally start at 0, and will usually end up being approximately Gaussian.</li>
<ul>
<li>One exception to this is for LSTM recurrent neural network layers: by default, the biases for one gate (the forget gate) are set to 1.0 (by default, though this is configurable), to help in learning dependencies across long time periods. This results in the bias graphs initially having many biases around 0.0, with another set of biases around 1.0</li>
</ul>
<li>Keep an eye out for parameters that are diverging to +/- infinity: this may be due to too high a learning rate, or insufficient regularization (try adding some L2 regularization to your network).</li>
<li>Keep an eye out for biases that become very large. This can sometimes occur in the output layer for classification, if the distribution of classes is very imbalanced</li>
</ul>
</p>
</div>
<div class="span6">
<h2>Updates Histogram</h2>
<p>
<ul>
<li>At the bottom left is the histogram of updates for the neural network (at the last iteration), also split up by layer and type of parameter.</li>
<ul>
<li>Note that these are the updates - i.e., the gradients after appling learning rate, momentum, regularization etc.</li>
</ul>
<li>As with the parameter graphs, these should have an approximately Gaussian (normal) distribution.</li>
<li>Keep an eye out for very large values: this can indicate exploding gradients in your network.</li>
<ul>
<li>Exploding gradients are problematic as they can ‘mess up’ the parameters of your network.</li>
<li>In this case, it may indicate a weight initialization, learning rate or input/labels data normalization issue.</li>
<li>In the case of recurrent neural networks, adding some gradient normalization or gradient clipping can frequently help.</li>
</ul>
</ul>
</p>
</div>
</div>
<h1><small>System Tab</small></h1>
<div class="row-fluid">
<div class="span6">
<h2>JVM Memory Utilization</h2>
<p>
Need Explanation Here.
</p>
</div>
<div class="span6">
<h2>Off-Heap Memory Utilization</h2>
<p>
Need Explanation Here.
</p>
</div>
</div>
<div class="row-fluid">
<div class="span6">
<h2>Hardware Information</h2>
<p>
The table contains basic hardware metrics.<br><br>
<b>JVM Current Memory</b> - What this means.<br>
<b>JVM Max Memory</b> - What this means.<br>
<b>Off-Heap Current Memory</b> - What this means.<br>
<b>Off-Heap Current Memory</b> - What this means.<br>
<b>JVM Available Processors</b> - What this means.<br>
<b>Number Compute Devices</b> - What this means.
</p>
</div>
<div class="span6">
<h2>Software Information</h2>
<p>
The table contains basic software information.<br><br>
<b>OS</b> - What this means.<br>
<b>Host Name</b> - What this means.<br>
<b>OS Architecture</b> - What this means.<br>
<b>JVM Name</b> - What this means.<br>
<b>JVM Version</b> - What this means.<br>
<b>ND4J Backend</b> - What this means.<br>
<b>ND4J Datatype</b> - What this means.
</p>
</div>
</div>
<div class="row-fluid">
<div class="span6">
<h2>GPU Specific Graph?</h2>
<p>
Need Explanation Here.
</p>
</div>
<div class="span6">
<h2>GPU Specific Table?</h2>
<p>
Need Explanation Here.
</p>
</div>
</div>
</div>
</div>
<!-- End User Guide -->
<!-- Begin Table of Contents -->
<div class="box span3">
<div class="box-header">
<h2><b>Table of Contents</b></h2>
</div>
<div class="box-content">
<dl>
<dt>Overview</dt>
<dd>Snapshot of your model performance.</dd>
<dt>Model</dt>
<dd>Layer by layer inspection tool.</dd>
<dt>System</dt>
<dd>Memory utilization dashboard as well as system configurations across multiple machines.</dd>
<dt>Language</dt>
<dd>Switch between English, Japanese, Chinese, Korean, Ukranian and Russian.</dd>
</dl>
</div>
</div><!-- End Table of Contents -->
</div><!-- End Row Fluid -->
</div>
"""),
format.raw(""" <!-- End Content -->
</div><!--End Row Fluid -->
</div><!-- End Container Fluid Full-->
<!-- Start JavaScript-->
<script src="/assets/js/jquery-1.9.1.min.js"></script>
<script src="/assets/js/jquery-migrate-1.0.0.min.js"></script>
<script src="/assets/js/jquery-ui-1.10.0.custom.min.js"></script>
<script src="/assets/js/jquery.ui.touch-punch.js"></script>
<script src="/assets/js/modernizr.js"></script>
<script src="/assets/js/bootstrap.min.js"></script>
<script src="/assets/js/jquery.cookie.js"></script>
<script src="/assets/js/fullcalendar.min.js"></script>
<script src="/assets/js/jquery.dataTables.min.js"></script>
<script src="/assets/js/excanvas.js"></script>
<script src="/assets/js/jquery.flot.js"></script>
<script src="/assets/js/jquery.flot.pie.js"></script>
<script src="/assets/js/jquery.flot.stack.js"></script>
<script src="/assets/js/jquery.flot.resize.min.js"></script>
<script src="/assets/js/jquery.chosen.min.js"></script>
<script src="/assets/js/jquery.uniform.min.js"></script>
<script src="/assets/js/jquery.cleditor.min.js"></script>
<script src="/assets/js/jquery.noty.js"></script>
<script src="/assets/js/jquery.elfinder.min.js"></script>
<script src="/assets/js/jquery.raty.min.js"></script>
<script src="/assets/js/jquery.iphone.toggle.js"></script>
<script src="/assets/js/jquery.uploadify-3.1.min.js"></script>
<script src="/assets/js/jquery.gritter.min.js"></script>
<script src="/assets/js/jquery.imagesloaded.js"></script>
<script src="/assets/js/jquery.masonry.min.js"></script>
<script src="/assets/js/jquery.knob.modified.js"></script>
<script src="/assets/js/jquery.sparkline.min.js"></script>
<script src="/assets/js/counter.js"></script>
<script src="/assets/js/retina.js"></script>
<script src="/assets/js/train/train.js"></script> <!-- Common (lang selection, etc) -->
<!-- End JavaScript-->
</body>
</html>
"""))
}
}
}
def render(i18n:org.deeplearning4j.ui.api.I18N): play.twirl.api.HtmlFormat.Appendable = apply(i18n)
def f:((org.deeplearning4j.ui.api.I18N) => play.twirl.api.HtmlFormat.Appendable) = (i18n) => apply(i18n)
def ref: this.type = this
}
}
/**/
object TrainingHelp extends TrainingHelp_Scope0.TrainingHelp
/*
-- GENERATED --
DATE: Sun Nov 27 14:24:01 AEDT 2016
SOURCE: C:/DL4J/Git/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-play/src/main/views/org/deeplearning4j/ui/views/training/TrainingHelp.scala.html
HASH: f6899558de3ff2ca1c02dc5fccdb266b29b2a6dc
MATRIX: 596->1|729->39|757->41|880->138|892->142|942->172|2718->1921|2731->1925|2782->1955|3346->2491|3360->2495|3415->2528|3565->2650|3579->2654|3631->2684|3786->2811|3800->2815|3853->2846|4031->2996|4045->3000|4101->3034|4326->3231|4340->3235|4395->3268
LINES: 20->1|25->1|26->2|31->7|31->7|31->7|65->41|65->41|65->41|78->54|78->54|78->54|79->55|79->55|79->55|80->56|80->56|80->56|81->57|81->57|81->57|83->59|83->59|83->59
-- GENERATED --
*/
|
xuzhongxing/deeplearning4j
|
deeplearning4j-ui-parent/deeplearning4j-play/src/main/scala/org/deeplearning4j/ui/views/html/training/TrainingHelp.template.scala
|
Scala
|
apache-2.0
| 27,503
|
/*
Copyright 2013, 2014 NICTA
This file is part of t3as (Text Analysis As A Service).
t3as is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
t3as is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with t3as. If not, see <http://www.gnu.org/licenses/>.
*/
package org.t3as.patClas.common.search
import java.io.Reader
import scala.collection.JavaConversions.mapAsJavaMap
import scala.language.implicitConversions
import org.apache.lucene.analysis.Analyzer
import org.apache.lucene.analysis.Analyzer.TokenStreamComponents
import org.apache.lucene.analysis.core.{KeywordAnalyzer, KeywordTokenizer, LowerCaseFilter, StopFilter}
import org.apache.lucene.analysis.en.EnglishAnalyzer
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper
import org.apache.lucene.analysis.standard.{StandardAnalyzer, StandardFilter, StandardTokenizer}
import org.apache.lucene.util.Version
/** Constants shared by indexer and searcher.
*/
object Constants {
val version = Version.LUCENE_48
/** Like EnglishAnalyzer, but without the EnglishPossessiveFilter and PorterStemFilter.
* EnglishAnalyzer uses the following Lucene components:
* StandardTokenizer, StandardFilter, EnglishPossessiveFilter, LowerCaseFilter, StopFilter, PorterStemFilter.
*/
class EnglishUnstemmedAnalyzer(version: Version) extends Analyzer {
protected def createComponents(fieldName: String, reader: Reader) = {
val source = new StandardTokenizer(version, reader)
new TokenStreamComponents(
source,
new StopFilter(
version,
new LowerCaseFilter(version, new StandardFilter(version, source)),
StandardAnalyzer.STOP_WORDS_SET))
}
}
class LowercaseAnalyzer(version: Version) extends Analyzer {
protected def createComponents(fieldName: String, reader: Reader) = {
val source = new KeywordTokenizer(reader)
new TokenStreamComponents(
source,
new LowerCaseFilter(version, source))
}
}
// the analyzer is used with TextFields (textFieldType), but not with StringFields (keywordFieldType).
// It uses the following Lucene components:
// StandardTokenizer, StandardFilter, EnglishPossessiveFilter, LowerCaseFilter, StopFilter, PorterStemFilter.
val stemmedAnalyzer = new EnglishAnalyzer(version)
// as above, but without stemming
val unstemmedAnalyzer: Analyzer = new EnglishUnstemmedAnalyzer(version)
val lowercaseAnalyzer = new LowercaseAnalyzer(version)
private def mkAnalyzer(textFields: List[String], unstemmedFields: List[String]) = {
val fieldToAnalyzer = (textFields.iterator.map((_, stemmedAnalyzer)) ++ unstemmedFields.iterator.map((_, unstemmedAnalyzer))).toMap
new PerFieldAnalyzerWrapper(lowercaseAnalyzer, fieldToAnalyzer)
}
val cpcAnalyzer = {
import org.t3as.patClas.common.CPCUtil._
mkAnalyzer(analyzerTextFields, analyzerUnstemmedTextFields)
}
val ipcAnalyzer = {
import org.t3as.patClas.common.IPCUtil._
mkAnalyzer(analyzerTextFields, analyzerUnstemmedTextFields)
}
val uspcAnalyzer = {
import org.t3as.patClas.common.USPCUtil._
mkAnalyzer(textFields, unstemmedTextFields)
}
}
|
NICTA/t3as-pat-clas
|
pat-clas-common/src/main/scala/org/t3as/patClas/common/search/Constants.scala
|
Scala
|
gpl-3.0
| 3,635
|
package org.bfn.ninetynineprobs
import org.scalatest._
class P38Spec extends UnitSpec {
// TODO
}
|
bfontaine/99Scala
|
src/test/scala/P38Spec.scala
|
Scala
|
mit
| 105
|
package com.github.morikuni.locest.frequency.test.helper
import com.github.morikuni.locest.frequency.domain.model.Word
import com.github.morikuni.locest.frequency.domain.repository.{WordRepository, WordRepositorySession}
import com.github.morikuni.locest.util.Transaction
import org.specs2.mock.Mockito
object WordRepositoryHelper extends Mockito {
def create(create: Transaction[WordRepositorySession, Word]): WordRepository = {
val repo = mock[WordRepository]
repo.create(any) returns create
repo
}
}
|
morikuni/locest
|
frequency/test/com/github/morikuni/locest/frequency/test/helper/WordRepositoryHelper.scala
|
Scala
|
mit
| 521
|
/**
* Copyright (C) 2014-2015 Really Inc. <http://really.io>
*/
package io.really.token
package object generator {
object AuthType extends Enumeration {
type AuthType = Value
val Anonymous, Password = Value
def Custom(provider: String) = Value(provider)
}
}
|
reallylabs/really
|
modules/really-simple-auth/app/lib/io/really/token/generator/package.scala
|
Scala
|
apache-2.0
| 280
|
package com.optrak.testakka.simple.impl
import com.optrak.testakka.api.GreetingMessage
import com.optrak.testakka.simple.api.SimpleService
import org.scalatest.{AsyncWordSpec, Matchers}
trait SimpleServiceTestBase extends AsyncWordSpec with Matchers {
def simpleService: SimpleService
"test-akka-integration service" should {
"say hello" in {
simpleService.hello("Alice").invoke().map { answer =>
answer should ===("Hello, Alice!")
}
}
"allow responding with a custom message" in {
for {
_ <- simpleService.useGreeting("Bob").invoke(GreetingMessage("Hi"))
answer <- simpleService.hello("Bob").invoke()
} yield {
answer should ===("Hi, Bob!")
}
}
}
}
|
Optrak/lagom-testbed
|
test-akka-integration/akka-model/simple/impl/src/test/scala/com/optrak/testakka/simple/impl/SimpleServiceTestBase.scala
|
Scala
|
apache-2.0
| 740
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.loadBalancer.test
import scala.collection.mutable
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.concurrent.Future
import org.apache.kafka.clients.producer.RecordMetadata
import org.apache.kafka.common.TopicPartition
import org.junit.runner.RunWith
import org.scalamock.scalatest.MockFactory
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FlatSpecLike
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import akka.actor.ActorRef
import akka.actor.ActorRefFactory
import akka.actor.ActorSystem
import akka.actor.FSM
import akka.actor.FSM.CurrentState
import akka.actor.FSM.SubscribeTransitionCallBack
import akka.actor.FSM.Transition
import akka.pattern.ask
import akka.testkit.ImplicitSender
import akka.testkit.TestFSMRef
import akka.testkit.TestKit
import akka.testkit.TestProbe
import akka.util.Timeout
import common.{LoggedFunction, StreamLogging}
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.connector.ActivationMessage
import org.apache.openwhisk.core.connector.PingMessage
import org.apache.openwhisk.core.entity.ActivationId.ActivationIdGenerator
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.entity.size._
import org.apache.openwhisk.core.loadBalancer.ActivationRequest
import org.apache.openwhisk.core.loadBalancer.GetStatus
import org.apache.openwhisk.core.loadBalancer.InvokerState._
import org.apache.openwhisk.core.loadBalancer.InvocationFinishedResult
import org.apache.openwhisk.core.loadBalancer.InvocationFinishedMessage
import org.apache.openwhisk.core.loadBalancer.InvokerActor
import org.apache.openwhisk.core.loadBalancer.InvokerPool
import org.apache.openwhisk.core.loadBalancer.InvokerState
import org.apache.openwhisk.core.loadBalancer.InvokerHealth
import org.apache.openwhisk.utils.retry
import org.apache.openwhisk.core.connector.test.TestConnector
import org.apache.openwhisk.core.entity.ControllerInstanceId
@RunWith(classOf[JUnitRunner])
class InvokerSupervisionTests
extends TestKit(ActorSystem("InvokerSupervision"))
with ImplicitSender
with FlatSpecLike
with Matchers
with BeforeAndAfterAll
with MockFactory
with StreamLogging {
val config = new WhiskConfig(ExecManifest.requiredProperties)
val defaultUserMemory: ByteSize = 1024.MB
ExecManifest.initialize(config)
override def afterAll {
TestKit.shutdownActorSystem(system)
}
implicit val timeout = Timeout(5.seconds)
/** Imitates a StateTimeout in the FSM */
def timeout(actor: ActorRef) = actor ! FSM.StateTimeout
/** Queries all invokers for their state */
def allStates(pool: ActorRef) =
Await.result(pool.ask(GetStatus).mapTo[IndexedSeq[(InvokerInstanceId, InvokerState)]], timeout.duration)
/** Helper to generate a list of (InstanceId, InvokerState) */
def zipWithInstance(list: IndexedSeq[InvokerState]) = list.zipWithIndex.map {
case (state, index) => new InvokerHealth(InvokerInstanceId(index, userMemory = defaultUserMemory), state)
}
val pC = new TestConnector("pingFeedTtest", 4, false) {}
behavior of "InvokerPool"
it should "successfully create invokers in its pool on ping and keep track of statechanges" in {
val invoker5 = TestProbe()
val invoker2 = TestProbe()
val invoker5Instance = InvokerInstanceId(5, userMemory = defaultUserMemory)
val invoker2Instance = InvokerInstanceId(2, userMemory = defaultUserMemory)
val children = mutable.Queue(invoker5.ref, invoker2.ref)
val childFactory = (f: ActorRefFactory, instance: InvokerInstanceId) => children.dequeue()
val sendActivationToInvoker = stubFunction[ActivationMessage, InvokerInstanceId, Future[RecordMetadata]]
val supervisor = system.actorOf(InvokerPool.props(childFactory, sendActivationToInvoker, pC))
within(timeout.duration) {
// create first invoker
val ping0 = PingMessage(invoker5Instance)
supervisor ! ping0
invoker5.expectMsgType[SubscribeTransitionCallBack] // subscribe to the actor
invoker5.expectMsg(ping0)
invoker5.send(supervisor, CurrentState(invoker5.ref, Healthy))
allStates(supervisor) shouldBe zipWithInstance(IndexedSeq(Offline, Offline, Offline, Offline, Offline, Healthy))
// create second invoker
val ping1 = PingMessage(invoker2Instance)
supervisor ! ping1
invoker2.expectMsgType[SubscribeTransitionCallBack]
invoker2.expectMsg(ping1)
invoker2.send(supervisor, CurrentState(invoker2.ref, Healthy))
allStates(supervisor) shouldBe zipWithInstance(IndexedSeq(Offline, Offline, Healthy, Offline, Offline, Healthy))
// ping the first invoker again
supervisor ! ping0
invoker5.expectMsg(ping0)
allStates(supervisor) shouldBe zipWithInstance(IndexedSeq(Offline, Offline, Healthy, Offline, Offline, Healthy))
// one invoker goes offline
invoker2.send(supervisor, Transition(invoker2.ref, Healthy, Offline))
allStates(supervisor) shouldBe zipWithInstance(IndexedSeq(Offline, Offline, Offline, Offline, Offline, Healthy))
}
}
it should "forward the ActivationResult to the appropriate invoker" in {
val invoker = TestProbe()
val invokerInstance = InvokerInstanceId(0, userMemory = defaultUserMemory)
val invokerName = s"invoker${invokerInstance.toInt}"
val childFactory = (f: ActorRefFactory, instance: InvokerInstanceId) => invoker.ref
val sendActivationToInvoker = stubFunction[ActivationMessage, InvokerInstanceId, Future[RecordMetadata]]
val supervisor = system.actorOf(InvokerPool.props(childFactory, sendActivationToInvoker, pC))
within(timeout.duration) {
// Create one invoker
val ping0 = PingMessage(invokerInstance)
supervisor ! ping0
invoker.expectMsgType[SubscribeTransitionCallBack] // subscribe to the actor
invoker.expectMsg(ping0)
invoker.send(supervisor, CurrentState(invoker.ref, Healthy))
allStates(supervisor) shouldBe zipWithInstance(IndexedSeq(Healthy))
// Send message and expect receive in invoker
val msg = InvocationFinishedMessage(invokerInstance, InvocationFinishedResult.Success)
supervisor ! msg
invoker.expectMsg(msg)
}
}
it should "forward an ActivationMessage to the sendActivation-Method" in {
val invoker = TestProbe()
val invokerInstance = InvokerInstanceId(0, userMemory = defaultUserMemory)
val invokerName = s"invoker${invokerInstance.toInt}"
val childFactory = (f: ActorRefFactory, instance: InvokerInstanceId) => invoker.ref
val sendActivationToInvoker = LoggedFunction { (a: ActivationMessage, b: InvokerInstanceId) =>
Future.successful(new RecordMetadata(new TopicPartition(invokerName, 0), 0L, 0L, 0L, Long.box(0L), 0, 0))
}
val supervisor = system.actorOf(InvokerPool.props(childFactory, sendActivationToInvoker, pC))
// Send ActivationMessage to InvokerPool
val uuid = UUID()
val activationMessage = ActivationMessage(
transid = TransactionId.invokerHealth,
action = FullyQualifiedEntityName(EntityPath("whisk.system/utils"), EntityName("date")),
revision = DocRevision.empty,
user = Identity(
Subject("unhealthyInvokerCheck"),
Namespace(EntityName("unhealthyInvokerCheck"), uuid),
BasicAuthenticationAuthKey(uuid, Secret())),
activationId = new ActivationIdGenerator {}.make(),
rootControllerIndex = ControllerInstanceId("0"),
blocking = false,
content = None,
initArgs = Set.empty)
val msg = ActivationRequest(activationMessage, invokerInstance)
supervisor ! msg
// Verify, that MessageProducer will receive a call to send the message
retry(sendActivationToInvoker.calls should have size 1, N = 3, waitBeforeRetry = Some(500.milliseconds))
}
behavior of "InvokerActor"
// unHealthy -> offline
// offline -> unhealthy
it should "start unhealthy, go offline if the state times out and goes unhealthy on a successful ping again" in {
val pool = TestProbe()
val invoker =
pool.system.actorOf(
InvokerActor.props(InvokerInstanceId(0, userMemory = defaultUserMemory), ControllerInstanceId("0")))
within(timeout.duration) {
pool.send(invoker, SubscribeTransitionCallBack(pool.ref))
pool.expectMsg(CurrentState(invoker, Unhealthy))
timeout(invoker)
pool.expectMsg(Transition(invoker, Unhealthy, Offline))
invoker ! PingMessage(InvokerInstanceId(0, userMemory = defaultUserMemory))
pool.expectMsg(Transition(invoker, Offline, Unhealthy))
}
}
// unhealthy -> healthy -> unhealthy -> healthy
it should "goto healthy again, if unhealthy and error buffer has enough successful invocations" in {
val pool = TestProbe()
val invoker =
pool.system.actorOf(
InvokerActor.props(InvokerInstanceId(0, userMemory = defaultUserMemory), ControllerInstanceId("0")))
within(timeout.duration) {
pool.send(invoker, SubscribeTransitionCallBack(pool.ref))
pool.expectMsg(CurrentState(invoker, Unhealthy))
(1 to InvokerActor.bufferSize).foreach { _ =>
invoker ! InvocationFinishedMessage(
InvokerInstanceId(0, userMemory = defaultUserMemory),
InvocationFinishedResult.Success)
}
pool.expectMsg(Transition(invoker, Unhealthy, Healthy))
// Fill buffer with errors
(1 to InvokerActor.bufferSize).foreach { _ =>
invoker ! InvocationFinishedMessage(
InvokerInstanceId(0, userMemory = defaultUserMemory),
InvocationFinishedResult.SystemError)
}
pool.expectMsg(Transition(invoker, Healthy, Unhealthy))
// Fill buffer with successful invocations to become healthy again (one below errorTolerance)
(1 to InvokerActor.bufferSize - InvokerActor.bufferErrorTolerance).foreach { _ =>
invoker ! InvocationFinishedMessage(
InvokerInstanceId(0, userMemory = defaultUserMemory),
InvocationFinishedResult.Success)
}
pool.expectMsg(Transition(invoker, Unhealthy, Healthy))
}
}
// unhealthy -> healthy -> overloaded -> healthy
it should "goto healthy again, if overloaded and error buffer has enough successful invocations" in {
val pool = TestProbe()
val invoker =
pool.system.actorOf(
InvokerActor.props(InvokerInstanceId(0, userMemory = defaultUserMemory), ControllerInstanceId("0")))
within(timeout.duration) {
pool.send(invoker, SubscribeTransitionCallBack(pool.ref))
pool.expectMsg(CurrentState(invoker, Unhealthy))
(1 to InvokerActor.bufferSize).foreach { _ =>
invoker ! InvocationFinishedMessage(
InvokerInstanceId(0, userMemory = defaultUserMemory),
InvocationFinishedResult.Success)
}
pool.expectMsg(Transition(invoker, Unhealthy, Healthy))
// Fill buffer with timeouts
(1 to InvokerActor.bufferSize).foreach { _ =>
invoker ! InvocationFinishedMessage(
InvokerInstanceId(0, userMemory = defaultUserMemory),
InvocationFinishedResult.Timeout)
}
pool.expectMsg(Transition(invoker, Healthy, Unresponsive))
// Fill buffer with successful invocations to become healthy again (one below errorTolerance)
(1 to InvokerActor.bufferSize - InvokerActor.bufferErrorTolerance).foreach { _ =>
invoker ! InvocationFinishedMessage(
InvokerInstanceId(0, userMemory = defaultUserMemory),
InvocationFinishedResult.Success)
}
pool.expectMsg(Transition(invoker, Unresponsive, Healthy))
}
}
// unhealthy -> offline
// offline -> unhealthy
it should "go offline when unhealthy, if the state times out and go unhealthy on a successful ping again" in {
val pool = TestProbe()
val invoker =
pool.system.actorOf(
InvokerActor.props(InvokerInstanceId(0, userMemory = defaultUserMemory), ControllerInstanceId("0")))
within(timeout.duration) {
pool.send(invoker, SubscribeTransitionCallBack(pool.ref))
pool.expectMsg(CurrentState(invoker, Unhealthy))
timeout(invoker)
pool.expectMsg(Transition(invoker, Unhealthy, Offline))
invoker ! PingMessage(InvokerInstanceId(0, userMemory = defaultUserMemory))
pool.expectMsg(Transition(invoker, Offline, Unhealthy))
}
}
it should "start timer to send testactions when unhealthy" in {
val invoker =
TestFSMRef(new InvokerActor(InvokerInstanceId(0, userMemory = defaultUserMemory), ControllerInstanceId("0")))
invoker.stateName shouldBe Unhealthy
invoker.isTimerActive(InvokerActor.timerName) shouldBe true
// Fill buffer with successful invocations to become healthy again (one below errorTolerance)
(1 to InvokerActor.bufferSize - InvokerActor.bufferErrorTolerance).foreach { _ =>
invoker ! InvocationFinishedMessage(
InvokerInstanceId(0, userMemory = defaultUserMemory),
InvocationFinishedResult.Success)
}
invoker.stateName shouldBe Healthy
invoker.isTimerActive(InvokerActor.timerName) shouldBe false
}
it should "initially store invoker status with its full id - instance/uniqueName/displayedName" in {
val invoker0 = TestProbe()
val children = mutable.Queue(invoker0.ref)
val childFactory = (f: ActorRefFactory, instance: InvokerInstanceId) => children.dequeue()
val sendActivationToInvoker = stubFunction[ActivationMessage, InvokerInstanceId, Future[RecordMetadata]]
val supervisor = system.actorOf(InvokerPool.props(childFactory, sendActivationToInvoker, pC))
val invokerInstance = InvokerInstanceId(0, Some("10.x.x.x"), Some("invoker-xyz"), userMemory = defaultUserMemory)
within(timeout.duration) {
val ping = PingMessage(invokerInstance)
supervisor ! ping
invoker0.expectMsgType[SubscribeTransitionCallBack]
invoker0.expectMsg(ping)
allStates(supervisor) shouldBe IndexedSeq(new InvokerHealth(invokerInstance, Offline))
}
}
it should "update the invoker instance id after it was restarted" in {
val invoker0 = TestProbe()
val children = mutable.Queue(invoker0.ref)
val childFactory = (f: ActorRefFactory, instance: InvokerInstanceId) => children.dequeue()
val sendActivationToInvoker = stubFunction[ActivationMessage, InvokerInstanceId, Future[RecordMetadata]]
val supervisor = system.actorOf(InvokerPool.props(childFactory, sendActivationToInvoker, pC))
val invokerInstance = InvokerInstanceId(0, Some("10.x.x.x"), Some("invoker-xyz"), userMemory = defaultUserMemory)
val invokerAfterRestart =
InvokerInstanceId(0, Some("10.x.x.x"), Some("invoker-zyx"), userMemory = defaultUserMemory)
within(timeout.duration) {
val ping = PingMessage(invokerInstance)
supervisor ! ping
invoker0.expectMsgType[SubscribeTransitionCallBack]
invoker0.expectMsg(ping)
invoker0.send(supervisor, CurrentState(invoker0.ref, Unhealthy))
val newPing = PingMessage(invokerAfterRestart)
supervisor ! newPing
allStates(supervisor) shouldBe IndexedSeq(new InvokerHealth(invokerAfterRestart, Unhealthy))
}
}
}
|
markusthoemmes/openwhisk
|
tests/src/test/scala/org/apache/openwhisk/core/loadBalancer/test/InvokerSupervisionTests.scala
|
Scala
|
apache-2.0
| 16,023
|
package org.hotsextra.matchmaking
package rating
import scala.collection.immutable.Queue
case class NormalDistribution(mu: Double, sig: Double) extends Function1[Double, Double] {
def apply(x: Double) = {
val exp = -(((x - mu) * (x - mu)) / (2 * sig * sig))
val denom = sig * math.sqrt(2 * math.Pi)
math.exp(exp) / denom
}
def convolution(that: NormalDistribution) = {
val sprime = math.sqrt((sig * sig) + (that.sig * that.sig))
NormalDistribution(mu + that.mu, sprime)
}
def difference(that: NormalDistribution) = {
val sprime = math.sqrt((sig * sig) + (that.sig * that.sig))
NormalDistribution(mu - that.mu, sprime)
}
}
|
martijnhoekstra/hotsextra-matchmaker
|
src/main/scala/org/hotsextra/matchmaking/rating/NormalDistribution.scala
|
Scala
|
agpl-3.0
| 667
|
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.contrib.activity.notification
import java.net.{ HttpURLConnection, URL }
import org.json4s.JsonAST.{ JString, JObject }
import org.json4s.jackson.JsonMethods._
import scopt.OptionParser
object SendSlackMessage {
case class Options(
failOnError: Boolean = false,
webhookUrl: String = "",
user: Option[String] = None,
message: Seq[String] = Seq.empty,
iconEmoji: Option[String] = None,
channel: Option[String] = None
)
def apply(options: Options): Boolean = try {
// Setup the connection
val connection = new URL(options.webhookUrl).openConnection().asInstanceOf[HttpURLConnection]
connection.setDoOutput(true)
connection.setRequestProperty("Content-Type", "application/json")
connection.setRequestProperty("Accept", "application/json")
// Write the message
val output = connection.getOutputStream
try {
val message = Seq(
"icon_emoji" -> options.iconEmoji,
"channel" -> options.channel,
"username" -> options.user,
"text" -> Option(options.message.mkString("\\n"))
).flatMap {
case (k, None) => None
case (k, Some(v)) => Option(k -> JString(v))
}
output.write(compact(render(JObject(message: _*))).getBytes)
} finally {
output.close()
}
// Check the response code
connection.getResponseCode == 200 || !options.failOnError
} catch {
case e: Throwable =>
System.err.println(e.toString)
!options.failOnError
}
def main(args: Array[String]): Unit = {
val parser = new OptionParser[Options](s"hyperion-notification-slack-activity") {
override def showUsageOnError = Option(true)
note("Sends a notification message to a Slack incoming webhook.")
help("help").text("prints this usage text")
opt[Unit]("fail-on-error").optional().action((_, c) => c.copy(failOnError = true))
.text("Causes the activity to fail if any error received from the webhook")
opt[String]("webhook-url").valueName("WEBHOOK").required().action((x, c) => c.copy(webhookUrl = x))
.text("Sends the message to the given WEBHOOK url")
opt[String]("user").valueName("NAME").optional().action((x, c) => c.copy(user = Option(x)))
.text("Sends the message as the user with NAME")
opt[String]("emoji").valueName("EMOJI").optional().action((x, c) => c.copy(iconEmoji = Option(x)))
.text("Use EMOJI for the icon")
opt[String]("to").valueName("CHANNEL or USERNAME").optional().action((x, c) => c.copy(channel = Option(x)))
.text("Sends the message to #CHANNEL or @USERNAME")
arg[String]("MESSAGE").required().unbounded().action((x, c) => c.copy(message = c.message :+ x))
.text("Sends the given MESSAGE")
}
if (!parser.parse(args, Options()).exists(apply)) {
System.exit(3)
}
}
}
|
realstraw/hyperion
|
contrib/activity/notification/src/main/scala/com/krux/hyperion/contrib/activity/notification/SendSlackMessage.scala
|
Scala
|
bsd-3-clause
| 3,099
|
package breeze.linalg
import breeze.linalg.operators.{OpMulScalar, OpSet}
import breeze.linalg.support.CanTraverseKeyValuePairs.KeyValuePairsVisitor
import breeze.linalg.support.CanTraverseValues.ValuesVisitor
import breeze.linalg.support._
import breeze.math.{Semiring}
import breeze.storage.Zero
import scala.reflect.ClassTag
import scala.{specialized => spec}
/**
* A SliceVector is a vector that is a view of another underlying tensor. For instance:
* {{{
* val m = DenseMatrix(...)
* m( (1,2), (3,4), (4,5))
* }}}
*
* will give a SliceVector such that apply/update at index 0 will map to m(1,2), index 1 to m(3,4), etc.
*
* @author dlwh
*/
class SliceVector[@spec(Int) K, @spec(Double, Int, Float, Long) V:ClassTag](val tensor: Tensor[K,V],
val slices: IndexedSeq[K])
extends Vector[V] with VectorLike[V, SliceVector[K, V]] {
def apply(i: Int): V = tensor(slices(i))
def update(i: Int, v: V) {tensor(slices(i)) = v}
def copy: DenseVector[V] = DenseVector((slices map (tensor.apply _)): _*)
def length: Int = slices.length
def activeSize: Int = slices.length
def repr: SliceVector[K, V] = this
def activeKeysIterator: Iterator[Int] = keysIterator
def activeIterator: Iterator[(Int, V)] = iterator
def activeValuesIterator: Iterator[V] = valuesIterator
override def toString = {
valuesIterator.mkString("SliceVector(", ", ", ")")
}
}
object SliceVector extends SliceVectorOps {
implicit def scalarOf[K, T]: ScalarOf[SliceVector[K, T], T] = ScalarOf.dummy
implicit def canMapKeyValuePairs[K, V, V2: ClassTag]: CanMapKeyValuePairs[SliceVector[K, V], Int, V, V2, DenseVector[V2]] = {
new CanMapKeyValuePairs[SliceVector[K, V], Int, V, V2, DenseVector[V2]] {
override def map(from: SliceVector[K, V], fn: (Int, V) => V2): DenseVector[V2] = {
DenseVector.tabulate(from.length)(i => fn(i, from(i)))
}
override def mapActive(from: SliceVector[K, V], fn: (Int, V) => V2): DenseVector[V2] = {
map(from, fn)
}
}
}
implicit def canMapValues[K, V, V2: ClassTag]: CanMapValues[SliceVector[K, V], V, V2, DenseVector[V2]] = {
new CanMapValues[SliceVector[K, V], V, V2, DenseVector[V2]] {
override def apply(from: SliceVector[K, V], fn: (V) => V2): DenseVector[V2] = {
DenseVector.tabulate(from.length)(i => fn(from(i)))
}
}
}
implicit def canCreateZerosLike[K, V: ClassTag : Zero]: CanCreateZerosLike[SliceVector[K, V], DenseVector[V]] = {
new CanCreateZerosLike[SliceVector[K, V], DenseVector[V]] {
def apply(v1: SliceVector[K, V]): DenseVector[V] = {
DenseVector.zeros[V](v1.length)
}
}
}
implicit def canIterateValues[K, V]: CanTraverseValues[SliceVector[K, V], V] =
new CanTraverseValues[SliceVector[K, V], V] {
def isTraversableAgain(from: SliceVector[K, V]): Boolean = true
/** Iterates all key-value pairs from the given collection. */
def traverse(from: SliceVector[K, V], fn: ValuesVisitor[V]): Unit = {
from.valuesIterator foreach {
fn.visit(_)
}
}
}
implicit def canIterateKeyValuePairs[K, V]: CanTraverseKeyValuePairs[SliceVector[K, V], Int, V] = {
new CanTraverseKeyValuePairs[SliceVector[K, V], Int, V] {
/** Traverses all values from the given collection. */
override def traverse(from: SliceVector[K, V], fn: KeyValuePairsVisitor[Int, V]): Unit = {
from.iterator foreach {
case (k, v) => fn.visit(k, v)
}
}
def isTraversableAgain(from: SliceVector[K, V]): Boolean = true
}
}
implicit def canTransformValues[K, V]: CanTransformValues[SliceVector[K, V], V] = {
new CanTransformValues[SliceVector[K, V], V] {
def transform(from: SliceVector[K, V], fn: (V) => V) {
for (i <- 0 until from.length) {
from(i) = fn(from(i))
}
}
def transformActive(from: SliceVector[K, V], fn: (V) => V) {
transform(from, fn)
}
}
}
}
trait SliceVectorOps {
// todo: all all the other ops (can this be done with some macro magic?
implicit def opSetInPlace[K, V]: OpSet.InPlaceImpl2[SliceVector[K, V], V] = new SVOpSetInPlace[K, V]
implicit def opMulScalar[K, V : ClassTag : Semiring]: OpMulScalar.Impl2[SliceVector[K, V], V, DenseVector[V]] = new SVOpMulScalar[K, V]
implicit def opMulScalarInPlace[K, V : ClassTag : Semiring]: OpMulScalar.InPlaceImpl2[SliceVector[K, V], V] = new SVOpMulScalarInPlace[K, V]
class SVOpSetInPlace[@specialized(Int) K, @specialized(Double, Int, Float, Long) V] extends OpSet.InPlaceImpl2[SliceVector[K, V], V] {
def apply(a: SliceVector[K, V], b: V): Unit = a.keysIterator.foreach(k => a.update(k, b))
}
class SVOpMulScalar[@specialized(Int) K, @specialized(Double, Int, Float, Long) V: ClassTag : Semiring] extends OpMulScalar.Impl2[SliceVector[K, V], V, DenseVector[V]] {
val semiring = implicitly[Semiring[V]]
def apply(a: SliceVector[K, V], b: V): DenseVector[V] = a.iterator.foldLeft(new VectorBuilder[V](a.length))({
case (builder, (k, v)) =>
builder.add(k, semiring.*(v, b))
builder
}).toDenseVector
}
class SVOpMulScalarInPlace[@specialized(Int) K, @specialized(Double, Int, Float, Long) V: ClassTag : Semiring] extends OpMulScalar.InPlaceImpl2[SliceVector[K, V], V] {
val semiring = implicitly[Semiring[V]]
def apply(a: SliceVector[K, V], b: V): Unit = a.iterator.foreach({
case (k, v) => a(k) = semiring.*(v, b)
})
}
}
|
claydonkey/breeze
|
math/src/main/scala/breeze/linalg/SliceVector.scala
|
Scala
|
apache-2.0
| 5,580
|
package simplez
import scala.language.{ higherKinds, implicitConversions }
/**
* This package provides implicit functions, so that we can for example access
* {{{
* val x : List[Int] = List(1,2,3)
* x.mappend(List(4,5,6)
* }}} instead of
* {{{
* Monoid[List].append(x, List(1,2,3)
* }}}
*
*/
package object syntax {
trait SemigroupSyntax[A] {
def self: A
def F: Semigroup[A]
def |+|(b: A): A = append(b)
def append(b: A): A = F.append(self, b)
}
implicit def ToSemigroupOps[A: Semigroup](a: A): SemigroupSyntax[A] = new SemigroupSyntax[A] {
def self: A = a
def F: Semigroup[A] = implicitly[Semigroup[A]]
}
/**
*
* @tparam A
*/
trait MonoidSyntax[A] {
def self: A
def F: Monoid[A]
def mzero(): A = F.zero
}
/**
*
* @param a
* @tparam A
* @return
*/
implicit def ToMonoidOps[A: Monoid](a: A): MonoidSyntax[A] = new MonoidSyntax[A] {
def self: A = a
def F: Monoid[A] = implicitly[Monoid[A]]
}
/**
*
* @tparam F
* @tparam A
*/
trait FunctorSyntax[F[_], A] {
def self: F[A]
def F: Functor[F]
def map[B](f: A => B): F[B] = F.map(self)(f)
def void: F[Unit] = F.void(self)
def as[B](b: => B) = F.as(self)(b)
}
/**
*
* @param a
* @tparam F
* @tparam A
* @return
*/
implicit def ToFunctorOps[F[_]: Functor, A](a: F[A]): FunctorSyntax[F, A] = new FunctorSyntax[F, A] {
def self = a
def F: Functor[F] = implicitly[Functor[F]]
}
/**
*
* @tparam F
* @tparam A
*/
trait MonadSyntax[F[_], A] {
def self: F[A]
def F: Monad[F]
def flatMap[B](f: A => F[B]): F[B] = F.flatMap(self)(f)
def pure[A](a: => A): F[A] = F.pure(a)
}
/**
*
* @param a
* @tparam F
* @tparam A
* @return
*/
implicit def ToMonadOps[F[_]: Monad, A](a: F[A]): MonadSyntax[F, A] = new MonadSyntax[F, A] {
def self = a
def F: Monad[F] = implicitly[Monad[F]]
}
/**
*
* @tparam W
* @tparam A
*/
trait WriterSyntax[W, A] {
def self: A
def set(w: W): Writer[W, A] = Writer(w -> self)
}
/**
*
* @param a
* @tparam W
* @tparam A
* @return
*/
implicit def ToWriterOps[W, A](a: A) = new WriterSyntax[W, A] {
def self: A = a
}
trait FoldableSyntax[F[_], A] {
def self: F[A]
def F: Foldable[F]
def foldRight[B](z: B)(f: (A, B) => B): B = F.foldRight(self, z)(f)
def foldMap[B](f: A => B)(implicit ev: Monoid[B]): B = F.foldMap(self)(f)
def fold(implicit ev: Monoid[A]): A = F.foldMap(self)(identity _)
}
implicit def ToFoldableOps[F[_]: Foldable, A](t: F[A]) = {
def self: F[A] = t
def F: Foldable[F] = implicitly[Foldable[F]]
}
/**
*
* @param w
* @param W
* @tparam W
* @tparam A
* @return
*/
implicit def writerToMonad[W, A](w: Writer[W, A])(implicit W: Monoid[W]) = new Monad[({ type λ[α] = Writer[W, α] })#λ] {
override def flatMap[A, B](F: Writer[W, A])(f: (A) => Writer[W, B]): Writer[W, B] = F.flatMap(f)
override def pure[A](a: => A): Writer[W, A] = Writer(W.zero -> a)
}
implicit def ToTraverseOps[F[_]: Traverse, A](a: F[A]) = new TraverseSyntax[F, A] {
val self = a
val F = Traverse[F]
}
trait TraverseSyntax[F[_], A] {
def self: F[A]
def F: Traverse[F]
def traverse[G[_]: Applicative, B](f: A => G[B]): G[F[B]] = F.traverse(self)(f)
def contents(): List[A] = F.contents(self)
def count(): Int = F.count(self)
def shape(): F[Unit] = F.shape(self)
def decompose(): (F[Unit], List[A]) = F.decompose(self)
def reassemble[B](elements: List[B])(implicit ev: A =:= Unit): Option[F[B]] =
F.reassemble(self)(elements)
def collect[G[_]: Applicative, B](f: A => G[Unit])(g: A => B): G[F[B]] =
F.collect(self)(f, g)
def collectS[S, B](f: A => State[S, Unit])(g: A => B): State[S, F[B]] = {
F.collect[Lambda[a => State[S, a]], A, B](self)(f, g)
}
def disperse[G[_]: Applicative, B, C](fb: G[B], g: A => B => C): G[F[C]] = {
F.disperse(self)(fb, g)
}
def disperseS[S, C](fb: State[S, S], g: A => S => C) = {
F.disperse[Lambda[a => State[S, a]], A, S, C](self)(fb, g)
}
}
/**
*
* @param a
* @tparam F
* @tparam A
* @return
*/
implicit def ToApplicativeOps[F[_]: Applicative, A](a: F[A]) = new ApplicativeSyntax[F, A] {
val self = a
val F = Applicative[F]
}
/**
*
* @tparam F
* @tparam A
*/
trait ApplicativeSyntax[F[_], A] {
def self: F[A]
def F: Applicative[F]
final def <*>[B](f: F[A => B]): F[B] = F.ap(self)(f)
/** Combine `self` and `fb` according to `Applicative[F]` with a function that discards the `A`s */
final def *>[B](fb: F[B]): F[B] = F.apply2(self, fb)((_, b) => b)
/** Combine `self` and `fb` according to `Applicative[F]` with a function that discards the `B`s */
final def <*[B](fb: F[B]): F[A] = F.apply2(self, fb)((a, _) => a)
def |@|[B](b1: F[B]) = new ApplicativeBuilder[F, A, B] {
val a = self
val b = b1
}
def compose[G[_]: Applicative]: Applicative[Lambda[a => F[G[a]]]] = F.compose[G]
def product[G[_]: Applicative]: Applicative[Lambda[a => (F[a], G[a])]] = F.product[G]
}
/**
*
* @tparam F
* @tparam A
* @tparam B
*/
trait ApplicativeBuilder[F[_], A, B] {
val a: F[A]
val b: F[B]
def apply[C](f: (A, B) => C)(implicit ap: Applicative[F]): F[C] = ap.apply2(a, b)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B)] = apply(Tuple2.apply)
def |@|[C](cc: F[C]) = new ApplicativeBuilder3[C] {
val c = cc
}
sealed trait ApplicativeBuilder3[C] {
val c: F[C]
def apply[D](f: (A, B, C) => D)(implicit ap: Applicative[F]): F[D] = ap.apply3(a, b, c)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C)] = apply(Tuple3.apply)
sealed trait ApplicativeBuilder4[D] {
val d: F[D]
def apply[E](f: (A, B, C, D) => E)(implicit ap: Applicative[F]): F[E] = ap.apply4(a, b, c, d)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D)] = apply(Tuple4.apply)
def |@|[E](ee: F[E]) = new ApplicativeBuilder5[E] {
val e = ee
}
sealed trait ApplicativeBuilder5[E] {
val e: F[E]
def apply[G](f: (A, B, C, D, E) => G)(implicit ap: Applicative[F]): F[G] = ap.apply5(a, b, c, d, e)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E)] = apply(Tuple5.apply)
def |@|[G](gg: F[G]) = new ApplicativeBuilder6[G] {
val g = gg
}
sealed trait ApplicativeBuilder6[G] {
val g: F[G]
def apply[H](f: (A, B, C, D, E, G) => H)(implicit ap: Applicative[F]): F[H] = ap.apply6(a, b, c, d, e, g)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G)] = apply(Tuple6.apply)
def |@|[H](hh: F[H]) = new ApplicativeBuilder7[H] {
val h = hh
}
sealed trait ApplicativeBuilder7[H] {
val h: F[H]
def apply[I](f: (A, B, C, D, E, G, H) => I)(implicit ap: Applicative[F]): F[I] = ap.apply7(a, b, c, d, e, g, h)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H)] = apply(Tuple7.apply)
def |@|[I](ii: F[I]) = new ApplicativeBuilder8[I] {
val i = ii
}
sealed trait ApplicativeBuilder8[I] {
val i: F[I]
def apply[J](f: (A, B, C, D, E, G, H, I) => J)(implicit ap: Applicative[F]): F[J] = ap.apply8(a, b, c, d, e, g, h, i)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I)] = apply(Tuple8.apply)
def |@|[J](jj: F[J]) = new ApplicativeBuilder9[J] {
val j = jj
}
sealed trait ApplicativeBuilder9[J] {
val j: F[J]
def apply[K](f: (A, B, C, D, E, G, H, I, J) => K)(implicit ap: Applicative[F]): F[K] = ap.apply9(a, b, c, d, e, g, h, i, j)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J)] = apply(Tuple9.apply)
def |@|[K](kk: F[K]) = new ApplicativeBuilder10[K] {
val k = kk
}
sealed trait ApplicativeBuilder10[K] {
val k: F[K]
def apply[L](f: (A, B, C, D, E, G, H, I, J, K) => L)(implicit ap: Applicative[F]): F[L] = ap.apply10(a, b, c, d, e, g, h, i, j, k)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K)] = apply(Tuple10.apply)
def |@|[L](ll: F[L]) = new ApplicativeBuilder11[L] {
val l = ll
}
sealed trait ApplicativeBuilder11[L] {
val l: F[L]
def apply[M](f: (A, B, C, D, E, G, H, I, J, K, L) => M)(implicit ap: Applicative[F]): F[M] = ap.apply11(a, b, c, d, e, g, h, i, j, k, l)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L)] = apply(Tuple11.apply)
def |@|[M](mm: F[M]) = new ApplicativeBuilder12[M] {
val m = mm
}
sealed trait ApplicativeBuilder12[M] {
val m: F[M]
def apply[N](f: (A, B, C, D, E, G, H, I, J, K, L, M) => N)(implicit ap: Applicative[F]): F[N] = ap.apply12(a, b, c, d, e, g, h, i, j, k, l, m)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M)] = apply(Tuple12.apply)
def |@|[N](nn: F[N]) = new ApplicativeBuilder13[N] {
val n = nn
}
sealed trait ApplicativeBuilder13[N] {
val n: F[N]
def apply[O](f: (A, B, C, D, E, G, H, I, J, K, L, M, N) => O)(implicit ap: Applicative[F]): F[O] = ap.apply13(a, b, c, d, e, g, h, i, j, k, l, m, n)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N)] = apply(Tuple13.apply)
def |@|[O](oo: F[O]) = new ApplicativeBuilder14[O] {
val o = oo
}
sealed trait ApplicativeBuilder14[O] {
val o: F[O]
def apply[P](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O) => P)(implicit ap: Applicative[F]): F[P] = ap.apply14(a, b, c, d, e, g, h, i, j, k, l, m, n, o)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O)] = apply(Tuple14.apply)
def |@|[P](pp: F[P]) = new ApplicativeBuilder15[P] {
val p = pp
}
sealed trait ApplicativeBuilder15[P] {
val p: F[P]
def apply[Q](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P) => Q)(implicit ap: Applicative[F]): F[Q] = ap.apply15(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P)] = apply(Tuple15.apply)
def |@|[Q](qq: F[Q]) = new ApplicativeBuilder16[Q] {
val q = qq
}
sealed trait ApplicativeBuilder16[Q] {
val q: F[Q]
def apply[R](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q) => R)(implicit ap: Applicative[F]): F[R] = ap.apply16(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p, q)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q)] = apply(Tuple16.apply)
def |@|[R](rr: F[R]) = new ApplicativeBuilder17[R] {
val r = rr
}
sealed trait ApplicativeBuilder17[R] {
val r: F[R]
def apply[S](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R) => S)(implicit ap: Applicative[F]): F[S] = ap.apply17(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p, q, r)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R)] = apply(Tuple17.apply)
def |@|[S](ss: F[S]) = new ApplicativeBuilder18[S] {
val s = ss
}
sealed trait ApplicativeBuilder18[S] {
val s: F[S]
def apply[T](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S) => T)(implicit ap: Applicative[F]): F[T] = ap.apply18(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p, q, r, s)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S)] = apply(Tuple18.apply)
def |@|[T](tt: F[T]) = new ApplicativeBuilder19[T] {
val t = tt
}
sealed trait ApplicativeBuilder19[T] {
val t: F[T]
def apply[U](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => U)(implicit ap: Applicative[F]): F[U] = ap.apply19(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p, q, r, s, t)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] = apply(Tuple19.apply)
def |@|[U](uu: F[U]) = new ApplicativeBuilder20[U] {
val u = uu
}
sealed trait ApplicativeBuilder20[U] {
val u: F[U]
def apply[V](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => V)(implicit ap: Applicative[F]): F[V] = ap.apply20(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] = apply(Tuple20.apply)
def |@|[V](vv: F[V]) = new ApplicativeBuilder21[V] {
val v = vv
}
sealed trait ApplicativeBuilder21[V] {
val v: F[V]
def apply[W](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => W)(implicit ap: Applicative[F]): F[W] = ap.apply21(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] = apply(Tuple21.apply)
def |@|[W](ww: F[W]) = new ApplicativeBuilder22[W] {
val w = ww
}
sealed trait ApplicativeBuilder22[W] {
val w: F[W]
def apply[X](f: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W) => X)(implicit ap: Applicative[F]): F[X] = ap.apply22(a, b, c, d, e, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w)(f)
def tupled(implicit ap: Applicative[F]): F[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W)] = apply(Tuple22.apply)
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
|
inoio/simplez
|
main/src/main/scala/simplez/syntax/package.scala
|
Scala
|
bsd-2-clause
| 16,884
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast.functions
import org.neo4j.cypher.internal.frontend.v2_3.ast.{Function, SimpleTypedFunction}
import org.neo4j.cypher.internal.frontend.v2_3.symbols._
case object Left extends Function with SimpleTypedFunction {
def name = "left"
val signatures = Vector(
Signature(argumentTypes = Vector(CTString, CTInteger), outputType = CTString)
)
}
|
HuangLS/neo4j
|
community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/functions/Left.scala
|
Scala
|
apache-2.0
| 1,197
|
package views.html
package userTournament
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import lila.common.paginator.Paginator
import lila.user.User
object upcoming {
def apply(u: User, pager: Paginator[lila.tournament.Tournament])(implicit ctx: Context) =
bits.layout(
u = u,
title = s"${u.username} upcoming tournaments",
path = "upcoming"
) {
if (pager.nbResults == 0)
div(cls := "box-pad")(u.username, " hasn't joined any tournament yet!")
else
div(cls := "tournament-list")(
table(cls := "slist")(
thead(
tr(
th(cls := "count")(pager.nbResults),
th(colspan := 2)(h1(userLink(u, withOnline = true), " upcoming tournaments")),
th(trans.players())
)
),
tbody(
pager.currentPageResults.map { t =>
tr(
td(cls := "icon")(iconTag(tournamentIconChar(t))),
views.html.tournament.finishedList.header(t),
td(momentFromNow(t.startsAt)),
td(cls := "text", dataIcon := "")(t.nbPlayers.localize)
)
}
)
)
)
}
}
|
luanlv/lila
|
app/views/userTournament/upcoming.scala
|
Scala
|
mit
| 1,308
|
package com.spann.models
case class Speed(speed: Float) {
def inMeterPerSecond = {
(speed * 1000 ) / 3600
}
}
|
sunil-123/SpannRace
|
src/main/scala/com/spann/models/Speed.scala
|
Scala
|
mit
| 119
|
/*
** Copyright [2013-2016] [Megam Systems]
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
package models.json.analytics
import scalaz._
import scalaz.NonEmptyList._
import scalaz.Validation
import scalaz.Validation._
import Scalaz._
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import java.util.Date
import java.nio.charset.Charset
import io.megam.auth.funnel.FunnelErrors._
import controllers.Constants._
import models.analytics.{ KeyValueField}
/**
* @author rajthilak
*
*/
class KeyValueFieldSerialization(charset: Charset = UTF8Charset) extends io.megam.json.SerializationBase[KeyValueField] {
protected val NameKey = "key"
protected val ValueKey = "value"
override implicit val writer = new JSONW[KeyValueField] {
override def write(h: KeyValueField): JValue = {
JObject(
JField(NameKey, toJSON(h.key)) ::
JField(ValueKey, toJSON(h.value)) ::
Nil)
}
}
override implicit val reader = new JSONR[KeyValueField] {
override def read(json: JValue): Result[KeyValueField] = {
val nameField = field[String](NameKey)(json)
val valueField = field[String](ValueKey)(json)
(nameField |@| valueField ) {
(name: String, value: String) =>
new KeyValueField(name, value)
}
}
}
}
|
meglytics/bidi
|
app/models/json/analytics/KeyValueFieldSerialization.scala
|
Scala
|
mit
| 1,823
|
package work.martins.simon.expect.fluent
import work.martins.simon.expect.{Settings, core}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class EmptySpec extends AnyFlatSpec with Matchers {
"An Expect without a command" should "throw IllegalArgumentException" in {
an [IllegalArgumentException] should be thrownBy new Expect("", defaultValue = ())
}
"An Expect with an empty expect block" should "fail when generating the core.Expect" in {
val fe = new Expect(Seq("ls"), defaultValue = (), Settings()) {
expect
}
an [IllegalArgumentException] should be thrownBy fe.toCore
}
"Invoking expect.expect" should "fail when generating the core.Expect" in {
val fe = new Expect(Seq("ls"), defaultValue = (), Settings.fromConfig()) {
expect.expect
}
an [IllegalArgumentException] should be thrownBy fe.toCore
}
"An Expect without expect blocks" should "generate the correct core.Expect" in {
val fe = new Expect("ls", defaultValue = (), Settings())
fe.toCore shouldEqual new core.Expect("ls", defaultValue = ())()
}
}
|
Lasering/scala-expect
|
src/test/scala/work/martins/simon/expect/fluent/EmptySpec.scala
|
Scala
|
mit
| 1,119
|
package com.mesosphere.cosmos.finch
import com.mesosphere.cosmos.http.RequestSession
import com.twitter.finagle.http.Fields
import com.twitter.finagle.http.Status
import com.twitter.util.Future
import com.twitter.util.Throw
import io.circe.Json
import io.circe.syntax._
import io.finch._
abstract class EndpointHandler[Request, Response](successStatus: Status = Status.Ok) {
private[this] val logger = org.slf4j.LoggerFactory.getLogger(getClass)
final def apply(context: EndpointContext[Request, Response]): Future[Output[Json]] = {
apply(context.requestBody)(context.session)
.respond {
case Throw(e) =>
logger.warn(s"Processing [${context.requestBody}] resulted in : ${e.getMessage}")
case _ => ()
}
.map { response =>
Output
.payload(response.asJson(context.responseEncoder.encoder), successStatus)
.withHeader(Fields.ContentType -> context.responseEncoder.mediaType(response).show)
}
}
def apply(request: Request)(implicit session: RequestSession): Future[Response]
}
|
dcos/cosmos
|
cosmos-common/src/main/scala/com/mesosphere/cosmos/finch/EndpointHandler.scala
|
Scala
|
apache-2.0
| 1,068
|
/*
* Copyright 2020 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.extra.hll.zetasketch
import java.lang
import org.apache.beam.sdk.extensions.zetasketch.HllCount
import org.apache.beam.sdk.extensions.zetasketch.HllCount.Init
sealed trait ZetaSketchable[T] {
type IN
def init(p: Int): Init.Builder[IN]
}
object ZetaSketchable {
implicit val intZetaSketchable: ZetaSketchable[Int] = new ZetaSketchable[Int] {
type IN = lang.Integer
override def init(p: Int): Init.Builder[lang.Integer] =
HllCount.Init.forIntegers().withPrecision(p)
}
implicit val longZetaSketchable: ZetaSketchable[Long] = new ZetaSketchable[Long] {
type IN = lang.Long
override def init(p: Int): Init.Builder[lang.Long] = HllCount.Init.forLongs().withPrecision(p)
}
implicit val stringZetaSketchable: ZetaSketchable[String] = new ZetaSketchable[String] {
override type IN = String
override def init(p: Int): Init.Builder[String] =
HllCount.Init.forStrings().withPrecision(p)
}
implicit val byteArrayZetaSketchable: ZetaSketchable[Array[Byte]] =
new ZetaSketchable[Array[Byte]] {
override type IN = Array[Byte]
override def init(p: Int): Init.Builder[Array[Byte]] =
HllCount.Init.forBytes().withPrecision(p)
}
}
|
spotify/scio
|
scio-extra/src/main/scala/com/spotify/scio/extra/hll/zetasketch/ZetaSketchable.scala
|
Scala
|
apache-2.0
| 1,830
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.enhancement.enhancements
import io.truthencode.ddo.model.enhancement.enhancements.classbased.BombardierTierFour
trait EfficientHeighten extends BombardierTierFour with ClassEnhancementImpl {
/**
* Some enhancements can be taken multiple times (generally up to three)
*/
override val ranks: Int = 2
/**
* Some enhancements have multiple ranks. This is the cost for each rank. Older versions had
* increasing costs which has been streamlined to a linear progression.
*
* @return
*/
override def apCostPerRank: Int = 1
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/enhancement/enhancements/EfficientHeighten.scala
|
Scala
|
apache-2.0
| 1,227
|
package org.biosys.pubmed.models
object SummaryParts {
def from(doc:scala.xml.Node) = {
SummaryPart(
label = doc.attribute("Label").map(_.text),
text = doc.text
)
}
}
case class SummaryPart(label:Option[String],text:String) {
}
|
sdor/biosys
|
pubmed_common/src/main/scala/org/biosys/pubmed/models/SummaryPart.scala
|
Scala
|
gpl-2.0
| 255
|
package argonaut
package benchmark
import scala.io.Source
import Argonaut._
object ParserBench {
def parse(json: String, count: Int) = {
val elapsed = Benchmark.time(() => json.parse, count) / 1000.0
val bytes = json.getBytes("UTF-8").length
val rate = (count / elapsed).toInt
(bytes, rate)
}
def parseAndPrint(name: String, json: String, count: Int = 10000) = {
parse(json, count) match {
case (bytes, rate) =>
printf("%s, %.5gKB @ %dmsg/s (%.2gMB/s)%n", name, (bytes / 1024.0), rate, rate / ((1024 * 1024) / (bytes / 1.0)))
}
}
}
|
etorreborre/argonaut
|
benchmark/src/main/scala/argonaut/benchmark/ParserBench.scala
|
Scala
|
bsd-3-clause
| 582
|
/* Copyright (C) 2008-2010 Univ of Massachusetts Amherst, Computer Science Dept
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://code.google.com/p/factorie/
This software is provided under the terms of the Eclipse Public License 1.0
as published by http://www.opensource.org. For further information,
see the file `LICENSE.txt' included with this distribution. */
package cc.factorie.example
import cc.factorie._
//import cc.factorie.DenseProportionsMaximumLikelihoodEstimator._
import cc.factorie.DenseProportions._
import cc.factorie.DenseCountsProportions._
object MultinomialDemo {
val numSides = 6
class Roll(die:Proportions, value:Int) extends Discrete(die, value)
Domain[Roll].size = () => numSides // TODO Make this unnecessary
def main(args:Array[String]) : Unit = {
val die = new DenseProportions(List(.1, .2, .3, .2, .2))
println("True distribution "+die)
val rolls = for (i <- 1 to 1000) yield new Roll(die, die.sampleInt)
rolls.foreach(_.sample(null))
die.estimate()
println("Est distribution "+die)
val r = new scala.util.Random
val die2 = new GrowableDenseCountsProportions
val rolls2 = for (i <- 1 to 1000) yield new Roll(die2, r.nextInt(6))
die2.estimate()
println("Die2 "+die2)
}
}
|
andrewmilkowski/factorie
|
src/main/scala/cc/factorie/example/MultinomialDemo.scala
|
Scala
|
epl-1.0
| 1,336
|
package de.sciss.fscape
import de.sciss.kollflitz.Vec
import scala.annotation.tailrec
import scala.concurrent.Promise
import scala.util.Success
class ResizeWindowSpec extends UGenSpec {
"The ResizeWindow UGen" should "work as intended" in {
// var count = 0
// showControlLog = true
// showStreamLog = true
def variant(inLen: Int, winInSz: Int, start: Int, stop: Int): Unit = {
val p = Promise[Vec[Int]]()
val inDataP = if (inLen < 1) Vector.empty else 1 to inLen
val inDataSq = inDataP.grouped(winInSz)
val expected: Vector[Int] = inDataSq.flatMap { in0 =>
val in1 = if (in0.size >= winInSz) in0 else in0.padTo(winInSz, 0)
val in2 = if (start < 0) Vector.fill(-start)(0) ++ in1 else in1 // in1.drop (start)
val in3 = if (stop > 0) in2 ++ Vector.fill(stop)(0) else in2.dropRight(-stop)
val in4 = if (start < 0) in3 else in3.drop(start)
in4
} .toVector
val g = Graph {
import graph._
val oneTwo = ArithmSeq(start = 1, length = inLen)
val r = ResizeWindow(oneTwo, size = winInSz, start = start, stop = stop)
DebugIntPromise(r, p)
}
runGraph(g, 128)
val info = s"for inLen = $inLen, winInSz = $winInSz, start = $start, stop = $stop"
assert(p.isCompleted)
val res = getPromiseVec(p)
assert (res === expected, info)
}
for {
inLen <- List(0, 1, 2, 10, 200)
winInSz <- List(1, 2, 9)
start <- -3 to +3
stop <- -3 to +3
} {
variant(inLen = inLen, winInSz = winInSz, start = start, stop = stop)
}
}
it should "work with varying window parameters" in {
val p = Promise[Vec[Int]]()
val inLen = 385
val winSzInSq = List(56, 36, 59, 26, 18, 49, 55, 41, 45)
assert (winSzInSq.sum == inLen)
val inData: List[Int] = List.tabulate(inLen)(i => i)
val maxWinSz = 56
val winSzOutSq: List[Int] = winSzInSq.map(_ min maxWinSz)
val inDataSq: List[List[Int]] = {
@tailrec
def loop(rem: List[Int], sz: List[Int], res: List[List[Int]]): List[List[Int]] =
sz match {
case head :: tail =>
val (remHd, remTl) = rem.splitAt(head)
loop(remTl, tail, remHd :: res)
case Nil => res.reverse
}
loop(inData, winSzInSq, Nil)
}
val expected: Vector[Int] = (inDataSq zip winSzOutSq).iterator.flatMap { case (w, sz) => w take sz }.toVector
val g = Graph {
import graph._
import de.sciss.fscape.Ops._
val in = ArithmSeq().take(inLen)
val winSzIn: GE = winSzInSq.map(i => i: GE).reduce(_ ++ _)
val winSzOut = winSzIn.min(maxWinSz)
val dStop = winSzOut - winSzIn
// dStop.poll(1, "dStop") // (0, 0, -3, 0, 0, 0, 0, 0, 0) -- correct
// winSzIn.poll(1, "winSzIn")
val out = ResizeWindow(in, winSzIn, stop = dStop)
// Length(in ).poll("in .length") // 385 -- correct
Length(out) // .poll("out.length") // 392 -- wrong
DebugIntPromise(out, p)
}
runGraph(g, 128)
assert(p.isCompleted)
val res = p.future.value.get
assert (res === Success(expected))
}
}
|
Sciss/FScape-next
|
core/jvm/src/test/scala/de/sciss/fscape/ResizeWindowSpec.scala
|
Scala
|
agpl-3.0
| 3,199
|
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model
case class InputStepImpllinks(
self: Option[Link] = None,
`class`: Option[String] = None
)
|
cliffano/swaggy-jenkins
|
clients/scala-sttp/generated/src/main/scala/org/openapitools/client/model/InputStepImpllinks.scala
|
Scala
|
mit
| 483
|
package io.dmjb.riscv.instructions
import io.dmjb.riscv.model.State
sealed trait Instruction {
def execute(state: State): State
}
case class LoadByte(rs1: Int, imm: Int, rd: Int) extends IFormat with Instruction {
override def execute(state: State): State = {
}
}
case class LoadHalfword() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class LoadWord() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class LoadByteUnsigned() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class LoadHalfUnsigned() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class StoreByte() extends SFormat with Instruction {
override def execute(state: State): State = ???
}
case class StoreHalfword() extends SFormat with Instruction {
override def execute(state: State): State = ???
}
case class StoreWord() extends SFormat with Instruction {
override def execute(state: State): State = ???
}
case class ShiftLeft() extends RFormat with Instruction {
override def execute(state: State): State = ???
}
case class ShiftLeftImmediate() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ShiftRight() extends RFormat with Instruction {
override def execute(state: State): State = ???
}
case class ShiftRightImmediate() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ShiftRightArithmetic() extends RFormat with Instruction {
override def execute(state: State): State = ???
}
case class ShiftRightArithImm() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class Add() extends RFormat with Instruction {
override def execute(state: State): State = ???
}
case class AddImmediate() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class Subtract() extends RFormat with Instruction {
override def execute(state: State): State = ???
}
case class LoadUpperImm() extends UFormat with Instruction {
override def execute(state: State): State = ???
}
case class AddUpperImmToPc() extends UFormat with Instruction {
override def execute(state: State): State = ???
}
case class SetLt() extends RFormat with Instruction {
override def execute(state: State): State = ???
}
case class SetLtImmediate() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class SetLtUnsigned() extends RFormat with Instruction {
override def execute(state: State): State = ???
}
case class SetLtImmUnsigned() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class BranchEq() extends SBFormat with Instruction {
override def execute(state: State): State = ???
}
case class BranchNe() extends SBFormat with Instruction {
override def execute(state: State): State = ???
}
case class BranchLt() extends SBFormat with Instruction {
override def execute(state: State): State = ???
}
case class BranchGte() extends SBFormat with Instruction {
override def execute(state: State): State = ???
}
case class BranchLtUnsigned() extends SBFormat with Instruction {
override def execute(state: State): State = ???
}
case class BranchGteUnsigned() extends SBFormat with Instruction {
override def execute(state: State): State = ???
}
case class JumpLink() extends UJFormat with Instruction {
override def execute(state: State): State = ???
}
case class JumpLinkRegister() extends UJFormat with Instruction {
override def execute(state: State): State = ???
}
case class SynchThread() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class SynchInstrData() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class SystemCall() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class SystemBreak() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ReadCycle() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ReadCycleUpperHalf() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ReadTime() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ReadTimeUpperHalf() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ReadInstrRetired() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
case class ReadInstrUpperHalf() extends IFormat with Instruction {
override def execute(state: State): State = ???
}
|
dmjb/risc-v
|
src/main/scala/io/dmjb/riscv/instructions/Instruction.scala
|
Scala
|
apache-2.0
| 4,863
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import org.apache.spark.annotation.{Experimental, InterfaceStability}
/**
* :: Experimental ::
* Exception that stopped a [[StreamingQuery]]. Use `cause` get the actual exception
* that caused the failure.
* @param message Message of this exception
* @param cause Internal cause of this exception
* @param startOffset Starting offset in json of the range of data in which exception occurred
* @param endOffset Ending offset in json of the range of data in exception occurred
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
class StreamingQueryException private[sql](
private val queryDebugString: String,
val message: String,
val cause: Throwable,
val startOffset: String,
val endOffset: String)
extends Exception(message, cause) {
/** Time when the exception occurred */
val time: Long = System.currentTimeMillis
override def toString(): String =
s"""${classOf[StreamingQueryException].getName}: ${cause.getMessage}
|$queryDebugString""".stripMargin
}
|
spark0001/spark2.1.1
|
sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala
|
Scala
|
apache-2.0
| 1,871
|
/* sbt -- Simple Build Tool
* Copyright 2009, 2010, 2011, 2012 Mark Harrah
*/
package sbt
import java.io.File
import scala.collection.mutable.Map
// Assumes exclusive ownership of the file.
private[sbt] class TestStatusReporter(f: File) extends TestsListener {
private lazy val succeeded = TestStatus.read(f)
def doInit {}
def startGroup(name: String): Unit = { succeeded remove name }
def testEvent(event: TestEvent): Unit = ()
def endGroup(name: String, t: Throwable): Unit = ()
def endGroup(name: String, result: TestResult.Value): Unit = {
if (result == TestResult.Passed)
succeeded(name) = System.currentTimeMillis
}
def doComplete(finalResult: TestResult.Value): Unit = {
TestStatus.write(succeeded, "Successful Tests", f)
}
}
private[sbt] object TestStatus {
import java.util.Properties
def read(f: File): Map[String, Long] =
{
import scala.collection.JavaConversions.{ enumerationAsScalaIterator, propertiesAsScalaMap }
val properties = new Properties
IO.load(properties, f)
properties map { case (k, v) => (k, v.toLong) }
}
def write(map: Map[String, Long], label: String, f: File): Unit = {
val properties = new Properties
for ((test, lastSuccessTime) <- map)
properties.setProperty(test, lastSuccessTime.toString)
IO.write(properties, label, f)
}
}
|
jasonchaffee/sbt
|
testing/src/main/scala/sbt/TestStatusReporter.scala
|
Scala
|
bsd-3-clause
| 1,357
|
package japgolly.scalajs.react
import scalaz.{Optional => _, _}
import extra._
import router.{BaseUrl, AbsUrl, Path}
import router.StaticDsl.Rule
private[react] object ScalazReactExtra {
final class ScalazReusability$Ops(private val ε: Reusability.type) extends AnyVal {
/** Compare using Scalaz equality. */
def byEqual[A](implicit e: Equal[A]): Reusability[A] =
new Reusability(e.equal)
/** Compare by reference and if different, compare using Scalaz equality. */
def byRefOrEqual[A <: AnyRef : Equal]: Reusability[A] =
Reusability.byRef[A] || byEqual[A]
}
final class ScalazListenable$Ops(private val ε: Listenable.type) extends AnyVal {
import Listenable._
import ScalazReact._
def installS[P, S, B <: OnUnmount, N <: TopNode, M[_], A](f: P => Listenable[A], g: A => ReactST[M, S, Unit])(implicit M: M ~> CallbackTo, N: Monad[M]) =
install[P, S, B, N, A](f, $ => a => $.runState(g(a)))
def installSF[P, S, B <: OnUnmount, N <: TopNode, M[_], A](f: P => Listenable[A], g: A => ReactST[M, S, Unit])(implicit M: M ~> CallbackTo, N: Monad[M], F: ChangeFilter[S]) =
install[P, S, B, N, A](f, $ => a => $.runStateF(g(a)))
}
}
trait ScalazReactExtra {
import ScalazReactExtra._
implicit def routerEqualBaseUrl: Equal[BaseUrl] = Equal.equalA
implicit def routerEqualPath : Equal[Path] = Equal.equalA
implicit def routerEqualAbsUrl : Equal[AbsUrl] = Equal.equalA
implicit def routerRuleMonoid[P]: Monoid[Rule[P]] =
new Monoid[Rule[P]] {
override def zero = Rule.empty
override def append(a: Rule[P], b: => Rule[P]) = a | b
}
@inline implicit def ScalazReusability$Ops(a: Reusability.type) = new ScalazReusability$Ops(a)
@inline implicit def ScalazListenable$Ops(a: Listenable.type) = new ScalazListenable$Ops(a)
implicit def reusabilityDisjunction[A: Reusability, B: Reusability]: Reusability[A \\/ B] =
Reusability.fn((x, y) =>
x.fold[Boolean](
a => y.fold(a ~=~ _, _ => false),
b => y.fold(_ => false, b ~=~ _)))
implicit def reusabilityThese[A: Reusability, B: Reusability]: Reusability[A \\&/ B] = {
import \\&/._
Reusability.fn {
case (Both(a, b), Both(c, d)) => (a ~=~ c) && (b ~=~ d)
case (This(a), This(b)) => a ~=~ b
case (That(a), That(b)) => a ~=~ b
case _ => false
}
}
}
|
zlangbert/scalajs-react
|
scalaz-7.2/src/main/scala/japgolly/scalajs/react/Extra.scala
|
Scala
|
apache-2.0
| 2,374
|
package TAPL
import TAPL.Lib._
object RecType {
trait Alg[T] {
def TyRec(x: String, t: T): T
}
trait Print extends Alg[String] {
def TyRec(x: String, t: String): String = "Rec " + x + "." + t
}
trait Parse[T] extends TParser[T] {
lexical.reserved += "Rec"
lexical.delimiters += "."
val alg: Alg[T]
val pRecTypeT: Parser[T] = "Rec" ~> ucid ~ ("." ~> pT) ^^ { case x ~ ty => alg.TyRec(x, ty) }
}
}
object FullEquiRec {
trait Alg[E, T] extends FullSimple.Alg[E, T] with RecType.Alg[T]
trait Print extends Alg[String, String] with FullSimple.Print with RecType.Print
trait Parse[E, T] extends FullSimple.Parse[E, T] with RecType.Parse[T] {
override val alg: Alg[E, T]
val pFullEquiRecE: Parser[E] = pFullSimpleE
val pFullEquiRecT: Parser[T] = pFullSimpleT ||| pRecTypeT
override val pE: Parser[E] = pFullEquiRecE
override val pT: Parser[T] = pFullEquiRecT
}
}
object TestFullEquiRec {
def parseWithAlg[E, T](inp: String)(a: FullEquiRec.Alg[E, T]): E = {
val p = new FullEquiRec.Parse[E, T] {
override val alg: FullEquiRec.Alg[E, T] = a
}
parse(p.pE)(inp)
}
def parseAndPrint(inp: String): Unit = println(parseWithAlg(inp)(new FullEquiRec.Print {}))
}
|
hy-zhang/parser
|
Scala/Parser/src/TAPL/FullEquiRec.scala
|
Scala
|
bsd-3-clause
| 1,254
|
package jp.ne.opt.redshiftfake.parse.compat
import Ops._
import net.sf.jsqlparser.expression.operators.arithmetic._
import net.sf.jsqlparser.expression._
import net.sf.jsqlparser.expression.operators.conditional.{AndExpression, OrExpression}
import net.sf.jsqlparser.expression.operators.relational._
import net.sf.jsqlparser.parser.CCJSqlParserUtil
import net.sf.jsqlparser.schema.{Table, Column}
import net.sf.jsqlparser.statement.create.index.CreateIndex
import net.sf.jsqlparser.statement.create.table.CreateTable
import net.sf.jsqlparser.statement.delete.Delete
import net.sf.jsqlparser.statement.drop.Drop
import net.sf.jsqlparser.statement.execute.Execute
import net.sf.jsqlparser.statement.insert.Insert
import net.sf.jsqlparser.statement.replace.Replace
import net.sf.jsqlparser.statement.truncate.Truncate
import net.sf.jsqlparser.statement.update.Update
import net.sf.jsqlparser.statement.{Statements, SetStatement, StatementVisitor}
import net.sf.jsqlparser.statement.alter.Alter
import net.sf.jsqlparser.statement.create.view.{CreateView, AlterView}
import net.sf.jsqlparser.statement.merge.Merge
import net.sf.jsqlparser.statement.select._
import scala.collection.JavaConverters._
class CompatibilityHandler extends SelectVisitor
with FromItemVisitor
with ExpressionVisitor
with ItemsListVisitor
with SelectItemVisitor
with StatementVisitor {
def visit(withItem: WithItem): Unit = withItem.getSelectBody.accept(this)
def visit(setOpList: SetOperationList): Unit = {
setOpList.getSelects.asScala.foreach(_.accept(this))
}
def visit(plainSelect: PlainSelect): Unit = {
Option(plainSelect.getSelectItems).foreach { items =>
items.asScala.foreach(_.accept(this))
}
Option(plainSelect.getFromItem).foreach(_.accept(this))
Option(plainSelect.getJoins).foreach { joins =>
joins.asScala.foreach(_.getRightItem.accept(this))
}
Option(plainSelect.getWhere).foreach(_.accept(this))
Option(plainSelect.getOracleHierarchical).foreach(_.accept(this))
}
def visit(tableFunction: TableFunction): Unit = {
}
def visit(valuesList: ValuesList): Unit = {
}
def visit(lateralSubSelect: LateralSubSelect): Unit = {
lateralSubSelect.getSubSelect.getSelectBody.accept(this)
}
def visit(subjoin: SubJoin): Unit = {
subjoin.getLeft.accept(this)
subjoin.getJoin.getRightItem.accept(this)
}
def visit(tableName: Table): Unit = {
}
def visit(literal: DateTimeLiteralExpression): Unit = {
}
def visit(timeKeyExpression: TimeKeyExpression): Unit = {
}
def visit(hint: OracleHint): Unit = {
}
def visit(doubleValue: DoubleValue): Unit = {
}
def visit(longValue: LongValue): Unit = {
}
def visit(hexValue: HexValue): Unit = {
}
def visit(dateValue: DateValue): Unit = {
}
def visit(timeValue: TimeValue): Unit = {
}
def visit(timestampValue: TimestampValue): Unit = {
}
def visit(stringValue: StringValue): Unit = {
}
def visit(parenthesis: Parenthesis): Unit = {
parenthesis.getExpression.accept(this)
}
def visit(likeExpression: LikeExpression): Unit = {
visitBinaryExpression(likeExpression)
}
def visit(minorThan: MinorThan): Unit = {
visitBinaryExpression(minorThan)
}
def visit(minorThanEquals: MinorThanEquals): Unit = {
visitBinaryExpression(minorThanEquals)
}
def visit(notEqualsTo: NotEqualsTo): Unit = {
visitBinaryExpression(notEqualsTo)
}
def visit(tableColumn: Column): Unit = {
}
def visit(castExpression: CastExpression): Unit = {
castExpression.getLeftExpression.accept(this)
}
def visit(caseExpression: CaseExpression): Unit = {
Option(caseExpression.getElseExpression).foreach(_.accept(this))
Option(caseExpression.getSwitchExpression).foreach(_.accept(this))
Option(caseExpression.getWhenClauses).foreach { clauses =>
clauses.asScala.foreach(_.accept(this))
}
}
def visit(wgexpr: WithinGroupExpression): Unit = {
}
def visit(eexpr: ExtractExpression): Unit = {
}
def visit(iexpr: IntervalExpression): Unit = {
}
def visit(oexpr: OracleHierarchicalExpression): Unit = {
Option(oexpr.getStartExpression).foreach(_.accept(this))
Option(oexpr.getConnectExpression).foreach(_.accept(this))
}
def visit(rexpr: RegExpMatchOperator): Unit = {
visitBinaryExpression(rexpr)
}
def visit(aexpr: AnalyticExpression): Unit = {
}
def visit(modulo: Modulo): Unit = {
visitBinaryExpression(modulo)
}
def visit(rowConstructor: RowConstructor): Unit = {
rowConstructor.getExprList.getExpressions.asScala.foreach(_.accept(this))
}
def visit(groupConcat: MySQLGroupConcat): Unit = {
}
def visit(aexpr: KeepExpression): Unit = {
}
def visit(bind: NumericBind): Unit = {
}
def visit(`var`: UserVariable): Unit = {
}
def visit(regExpMySQLOperator: RegExpMySQLOperator): Unit = {
visitBinaryExpression(regExpMySQLOperator)
}
def visit(jsonExpr: JsonExpression): Unit = {
}
def visit(isNullExpression: IsNullExpression): Unit = {
}
def visit(inExpression: InExpression): Unit = {
Option(inExpression.getLeftExpression).fold {
Option(inExpression.getLeftItemsList).foreach(_.accept(this))
} { _.accept(this) }
inExpression.getRightItemsList.accept(this)
}
def visit(greaterThanEquals: GreaterThanEquals): Unit = {
visitBinaryExpression(greaterThanEquals)
}
def visit(greaterThan: GreaterThan): Unit = {
visitBinaryExpression(greaterThan)
}
def visit(jdbcNamedParameter: JdbcNamedParameter): Unit = {
}
def visit(jdbcParameter: JdbcParameter): Unit = {
}
def visit(signedExpression: SignedExpression): Unit = {
signedExpression.getExpression.accept(this)
}
def visit(function: Function): Unit = {
function.getName.toLowerCase match {
case "getdate" =>
function.setName("now")
//https://www.postgresql.org/docs/9.5/static/functions-conditional.html
case "nvl" =>
function.setName("coalesce")
//https://docs.aws.amazon.com/redshift/latest/dg/r_LISTAGG.html
case "listagg" =>
//https://www.postgresql.org/docs/current/static/functions-aggregate.html
function.setName("string_agg")
function.setDistinct(false)
case _ =>;
}
Option(function.getParameters).foreach(visit)
}
def visit(nullValue: NullValue): Unit = {
}
def visit(addition: Addition): Unit = {
visitBinaryExpression(addition)
}
def visit(division: Division): Unit = {
visitBinaryExpression(division)
}
def visit(multiplication: Multiplication): Unit = {
visitBinaryExpression(multiplication)
}
def visit(subtraction: Subtraction): Unit = {
visitBinaryExpression(subtraction)
}
def visit(andExpression: AndExpression): Unit = {
visitBinaryExpression(andExpression)
}
def visit(orExpression: OrExpression): Unit = {
visitBinaryExpression(orExpression)
}
def visit(between: Between): Unit = {
between.getLeftExpression.accept(this)
between.getBetweenExpressionStart.accept(this)
between.getBetweenExpressionEnd.accept(this)
}
def visit(equalsTo: EqualsTo): Unit = {
visitBinaryExpression(equalsTo)
}
def visit(whenClause: WhenClause): Unit = {
Option(whenClause.getThenExpression).foreach(_.accept(this))
Option(whenClause.getWhenExpression).foreach(_.accept(this))
}
def visit(existsExpression: ExistsExpression): Unit = {
existsExpression.getRightExpression.accept(this)
}
def visit(allComparisonExpression: AllComparisonExpression): Unit = {
allComparisonExpression.getSubSelect.getSelectBody.accept(this)
}
def visit(anyComparisonExpression: AnyComparisonExpression): Unit = {
anyComparisonExpression.getSubSelect.getSelectBody.accept(this)
}
def visit(concat: Concat): Unit = {
visitBinaryExpression(concat)
}
def visit(matches: Matches): Unit = {
visitBinaryExpression(matches)
}
def visit(bitwiseAnd: BitwiseAnd): Unit = {
visitBinaryExpression(bitwiseAnd)
}
def visit(bitwiseOr: BitwiseOr): Unit = {
visitBinaryExpression(bitwiseOr)
}
def visit(bitwiseXor: BitwiseXor): Unit = {
visitBinaryExpression(bitwiseXor)
}
def visit(multiExprList: MultiExpressionList): Unit = {
multiExprList.getExprList.asScala.foreach(_.accept(this))
}
def visit(expressionList: ExpressionList): Unit = {
expressionList.getExpressions.asScala.foreach(_.accept(this))
}
def visit(subSelect: SubSelect): Unit = {
Option(subSelect.getWithItemsList).foreach { itemList =>
itemList.asScala.foreach(_.accept(this))
}
subSelect.getSelectBody.accept(this)
}
def visit(selectExpressionItem: SelectExpressionItem): Unit = {
selectExpressionItem.getExpression match {
case expression: WithinGroupExpression =>
if (expression.getName.equalsIgnoreCase("listagg")){
val asFunction = new Function()
asFunction.setName(expression.getName)
asFunction.setParameters(expression.getExprList)
selectExpressionItem.setExpression(asFunction)
}
case expression: Function =>
expression.getName.toLowerCase match {
case "median" => {
//https://docs.aws.amazon.com/redshift/latest/dg/r_MEDIAN.html
val asPercentileCont = new WithinGroupExpression()
asPercentileCont.setName("percentile_cont")
val parameters = new ExpressionList
parameters.setExpressions(List(new DoubleValue("0.5").asInstanceOf[Expression]).asJava)
asPercentileCont.setExprList(parameters)
val orderBy = new OrderByElement()
orderBy.setExpression(expression.getParameters.getExpressions.get(0))
asPercentileCont.setOrderByElements(List(orderBy).asJava)
selectExpressionItem.setExpression(asPercentileCont)
}
case "nvl2" => {
//https://docs.aws.amazon.com/redshift/latest/dg/r_NVL2.html
val functionsArguments = expression.getParameters.getExpressions
val asCaseStatement = new CaseExpression
val isNullExpression = new IsNullExpression
isNullExpression.setNot(true)
isNullExpression.setLeftExpression(functionsArguments.get(0))
asCaseStatement.setSwitchExpression(isNullExpression)
asCaseStatement.setWhenClauses(List(functionsArguments.get(1)).asJava)
asCaseStatement.setElseExpression(functionsArguments.get(2))
selectExpressionItem.setExpression(asCaseStatement)
}
//The simplest way to achieve this in postgres is as follows:
// `case when extract(epoch from age(a, b)) > 0 then 1 when extract(epoch from age(a, b)) < 0 then -1 else 0 end`
case "timestamp_cmp" => {
val functionsArguments = expression.getParameters.getExpressions
val asCaseStatement = new CaseExpression
val ageFunction = new Function()
ageFunction.setName("age")
val ageFunctionParameters = new ExpressionList()
ageFunctionParameters.setExpressions(functionsArguments)
ageFunction.setParameters(ageFunctionParameters)
val extractSecondsFromAge = new ExtractExpression()
extractSecondsFromAge.setName("epoch")
extractSecondsFromAge.setExpression(ageFunction)
val greaterThan = new GreaterThan()
greaterThan.setLeftExpression(extractSecondsFromAge)
greaterThan.setRightExpression(new LongValue(0))
val greaterThanWhenClause = new WhenClause
greaterThanWhenClause.setWhenExpression(greaterThan)
greaterThanWhenClause.setThenExpression(new LongValue(1))
val lessThan = new MinorThan()
lessThan.setLeftExpression(extractSecondsFromAge)
lessThan.setRightExpression(new LongValue(0))
val lessThanWhenClause = new WhenClause
lessThanWhenClause.setWhenExpression(lessThan)
lessThanWhenClause.setThenExpression(new LongValue(-1))
asCaseStatement.setWhenClauses(List(greaterThanWhenClause.asInstanceOf[Expression], lessThanWhenClause.asInstanceOf[Expression]).asJava)
asCaseStatement.setElseExpression(new LongValue(0))
selectExpressionItem.setExpression(asCaseStatement)
}
case _ => ;
}
case _ =>;
}
selectExpressionItem.getExpression.accept(this)
}
def visit(allTableColumns: AllTableColumns): Unit = {
}
def visit(allColumns: AllColumns): Unit = {
}
def visit(stmts: Statements): Unit = {
stmts.getStatements.asScala.foreach(_.accept(this))
}
def visit(createView: CreateView): Unit = {
}
def visit(createTable: CreateTable): Unit = {
Option(createTable.getSelect).foreach(_.accept(this))
}
def visit(createIndex: CreateIndex): Unit = {
}
def visit(truncate: Truncate): Unit = {
}
def visit(drop: Drop): Unit = {
}
def visit(replace: Replace): Unit = {
Option(replace.getExpressions).foreach { expressions =>
expressions.asScala.foreach(_.accept(this))
}
Option(replace.getItemsList).foreach(_.accept(this))
}
def visit(insert: Insert): Unit = {
Option(insert.getItemsList).foreach(_.accept(this))
Option(insert.getSelect).foreach(_.accept(this))
}
def visit(update: Update): Unit = {
Option(update.getExpressions).foreach { expressions =>
expressions.asScala.foreach(_.accept(this))
}
Option(update.getFromItem).foreach(_.accept(this))
Option(update.getJoins).foreach { joins =>
joins.asScala.foreach(_.getRightItem.accept(this))
}
Option(update.getWhere).foreach(_.accept(this))
}
def visit(delete: Delete): Unit = {
Option(delete.getWhere).foreach(_.accept(this))
}
def visit(select: Select): Unit = {
Option(select.getWithItemsList).foreach { itemList =>
itemList.asScala.foreach(_.accept(this))
}
select.getSelectBody match {
case plainSelect: PlainSelect =>
val joins = Option(plainSelect.getJoins).map(_.asScala).getOrElse(Nil)
def mkSelect: PlainSelect = {
val select = new PlainSelect
select.setSelectItems(plainSelect.getSelectItems)
select.setFromItem(plainSelect.getFromItem)
select
}
//================================================
// Redshift supports FULL JOINs with non-merge-joinable or non-hash-joinable join conditions but postgresql does not.
// Following code try to get rid of FULL JOINs(upto three tables) from statement by replacing FULL JOINs with union of LEFT/RIGHT JOINs.
//================================================
joins match {
case join +: Nil if join.isFull =>
val expr1 = mkSelect
.withJoins(mkLeftJoin
.withRightItem(join.getRightItem)
.on(join.getOnExpression))
val expr2 = mkSelect
.withJoins(mkRightJoin
.withRightItem(join.getRightItem)
.on(join.getOnExpression))
val statement = CCJSqlParserUtil.parse(s"$expr1 UNION $expr2")
select.setSelectBody(statement.asInstanceOf[Select].getSelectBody)
case join1 +: join2 +: Nil if join1.isFull && join2.isLeft =>
val expr1 = mkSelect
.withJoins(
mkLeftJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkLeftJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val expr2 = mkSelect
.withJoins(
mkRightJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkLeftJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val statement = CCJSqlParserUtil.parse(s"$expr1 UNION $expr2")
select.setSelectBody(statement.asInstanceOf[Select].getSelectBody)
case join1 +: join2 +: Nil if join1.isFull && join2.isRight =>
val expr1 = mkSelect
.withJoins(
mkLeftJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkLeftJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val expr2 = mkSelect
.withJoins(
mkRightJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkRightJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val statement = CCJSqlParserUtil.parse(s"$expr1 UNION $expr2")
select.setSelectBody(statement.asInstanceOf[Select].getSelectBody)
case join1 +: join2 +: Nil if join1.isLeft && join2.isFull =>
val expr1 = mkSelect
.withJoins(
mkLeftJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkRightJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val expr2 = mkSelect
.withJoins(
mkLeftJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkLeftJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val statement = CCJSqlParserUtil.parse(s"$expr1 UNION $expr2")
select.setSelectBody(statement.asInstanceOf[Select].getSelectBody)
case join1 +: join2 +: Nil if join1.isRight && join2.isFull =>
val expr1 = mkSelect
.withJoins(
mkRightJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkLeftJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val expr2 = mkSelect
.withJoins(
mkRightJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkRightJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val statement = CCJSqlParserUtil.parse(s"$expr1 UNION $expr2")
select.setSelectBody(statement.asInstanceOf[Select].getSelectBody)
case join1 +: join2 +: Nil if join1.isFull && join2.isFull =>
val expr1 = mkSelect
.withJoins(
mkLeftJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkLeftJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val expr2 = mkSelect
.withJoins(
mkRightJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkLeftJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val expr3 = mkSelect
.withJoins(
mkRightJoin
.withRightItem(join1.getRightItem)
.on(join1.getOnExpression),
mkRightJoin
.withRightItem(join2.getRightItem)
.on(join2.getOnExpression))
val statement = CCJSqlParserUtil.parse(s"$expr1 UNION $expr2 UNION $expr3")
select.setSelectBody(statement.asInstanceOf[Select].getSelectBody)
case _ => // do nothing
}
case _ =>
}
select.getSelectBody.accept(this)
}
def visit(execute: Execute): Unit = {
}
def visit(set: SetStatement): Unit = {
set.getExpression.accept(this)
}
def visit(merge: Merge): Unit = {
Option(merge.getUsingTable).fold {
Option(merge.getUsingSelect).foreach(_.accept(this.asInstanceOf[FromItemVisitor]))
} { _.accept(this) }
}
def visit(alterView: AlterView): Unit = {
}
def visit(alter: Alter): Unit = {
}
private[this] def visitBinaryExpression(expr: BinaryExpression): Unit = {
expr.getLeftExpression.accept(this)
expr.getRightExpression.accept(this)
}
}
|
opt-tech/redshift-fake-driver
|
src/main/scala/jp/ne/opt/redshiftfake/parse/compat/CompatibilityHandler.scala
|
Scala
|
apache-2.0
| 20,632
|
/**
* Copyright (c) 2013, The National Archives <digitalpreservation@nationalarchives.gov.uk>
* https://www.nationalarchives.gov.uk
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package uk.gov.nationalarchives.csv.validator
package object schema {
val Uuid4Regex = "[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}"
//val UriRegex = """([A-Za-z0-9]+:\\/\\/)?([a-zA-Z0-9]+(\\.[a-zA-Z0-9]+)*)?(\\/|(\\/([A-Za-z0-9\\:@!\\$&'\\(\\}\\*\\+\\-_,;=~\\.]+|(%[A-F0-9]{2})+))*)(\\?[A-Za-z0-9]+=[A-Za-z0-9]+(&[A-Za-z0-9]+=[A-Za-z0-9]+)*)?"""
val XsdDateTimeRegex = xsdDateNoTzComponentRegex + "T" + XsdTimeOptionalTimeZoneRegex
val XsdDateTimeWithTimeZoneRegex = xsdDateNoTzComponentRegex + "T" + XsdTimeWithTimeZoneRegex
val XsdDateRegex = xsdDateNoTzComponentRegex + xsdOptionalTzComponentRegex
lazy val XsdTimeOptionalTimeZoneRegex = xsdTimeNoTzComponentRegex + xsdOptionalTzComponentRegex
lazy val XsdTimeWithTimeZoneRegex = xsdTimeNoTzComponentRegex + xsdTzComponentRegex
private lazy val xsdDateNoTzComponentRegex = """-?[0-9]{4}-(((0(1|3|5|7|8)|1(0|2))-(0[1-9]|(1|2)[0-9]|3[0-1]))|((0(4|6|9)|11)-(0[1-9]|(1|2)[0-9]|30))|(02-(0[1-9]|(1|2)[0-9])))"""
private lazy val xsdTimeNoTzComponentRegex = """([0-1][0-9]|2[0-4]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9])(\\.[0-9]{3})?"""
private lazy val xsdOptionalTzComponentRegex = """((\\+|-)([0-1][0-9]|2[0-4]):(0[0-9]|[1-5][0-9])|Z)?"""
private lazy val xsdTzComponentRegex = """((\\+|-)([0-1][0-9]|2[0-4]):(0[0-9]|[1-5][0-9])|Z)"""
val UkDateRegex = """(((0[1-9]|(1|2)[0-9]|3[0-1])\\/(0(1|3|5|7|8)|1(0|2)))|((0[1-9]|(1|2)[0-9]|30)\\/(0(4|6|9)|11))|((0[1-9]|(1|2)[0-9])\\/02))\\/[0-9]{4}"""
val PositiveIntegerRegex = "[0-9]+"
val UpperCaseRegex = "^[\\\\p{Lu}\\\\p{N}\\\\p{P}\\\\s]*$"
val LowerCaseRegex = "^[\\\\p{Ll}\\\\p{N}\\\\p{P}\\\\s]*$"
val UkDateFormat = "dd/MM/YYYY"
val PartUkDateRegex = """(([0\\?][1-9\\?])|([1-2\\?][0-9\\?])|([3\\?][0-1\\?])|\\*)\\/(January|February|March|April|May|June|July|August|September|October|November|December|\\?|\\*)\\/([0-9\\?]{4}|\\*)"""
type FilePathBase = String
type FileName = String
}
|
adamretter/csv-validator
|
csv-validator-core/src/main/scala/uk/gov/nationalarchives/csv/validator/schema/package.scala
|
Scala
|
mpl-2.0
| 2,249
|
/*
* Copyright (C) 2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spray.json
import org.specs2.mutable.Specification
class CustomFormatSpec extends Specification with DefaultJsonProtocol {
case class MyType(name: String, value: Int)
implicit val MyTypeProtocol: JsonFormat[MyType] = new JsonFormat[MyType] {
def read(json: JsValue) = {
json.asJsObject.getFields("name", "value") match {
case Seq(JsString(name), JsNumber(value)) => MyType(name, value.toInt)
case _ => deserializationError("Expected fields: 'name' (JSON string) and 'value' (JSON number)")
}
}
def write(obj: MyType) = JsObject("name" -> JsString(obj.name), "value" -> JsNumber(obj.value))
}
"A custom JsonFormat built with 'asJsonObject'" should {
val value = MyType("bob", 42)
"correctly deserialize valid JSON content" in {
"""{ "name": "bob", "value": 42 }""".parseJson.convertTo[MyType] mustEqual value
}
"support full round-trip (de)serialization" in {
value.toJson.convertTo[MyType] mustEqual value
}
}
}
|
spray/spray-json
|
src/test/scala/spray/json/CustomFormatSpec.scala
|
Scala
|
apache-2.0
| 1,607
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
/** Support for interacting with different versions of the HiveMetastoreClient */
package object client {
private[hive] sealed abstract class HiveVersion(
val fullVersion: String,
val extraDeps: Seq[String] = Nil,
val exclusions: Seq[String] = Nil)
// scalastyle:off
private[hive] object hive {
case object v12 extends HiveVersion("0.12.0")
case object v13 extends HiveVersion("0.13.1")
// Do not need Calcite because we disabled hive.cbo.enable.
//
// The other excluded dependencies are nowhere to be found, so exclude them explicitly. If
// they're needed by the metastore client, users will have to dig them out of somewhere and use
// configuration to point Spark at the correct jars.
case object v14 extends HiveVersion("0.14.0",
exclusions = Seq("org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-avatica",
"org.pentaho:pentaho-aggdesigner-algorithm"))
case object v1_0 extends HiveVersion("1.0.1",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-avatica",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
// The curator dependency was added to the exclusions here because it seems to confuse the ivy
// library. org.apache.curator:curator is a pom dependency but ivy tries to find the jar for it,
// and fails.
case object v1_1 extends HiveVersion("1.1.1",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
case object v1_2 extends HiveVersion("1.2.2",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
case object v2_0 extends HiveVersion("2.0.1",
exclusions = Seq("org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
case object v2_1 extends HiveVersion("2.1.1",
exclusions = Seq("org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
case object v2_2 extends HiveVersion("2.2.0",
exclusions = Seq("org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-druid",
"org.apache.calcite.avatica:avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
// Since HIVE-14496, Hive materialized view need calcite-core.
// For spark, only VersionsSuite currently creates a hive materialized view for testing.
case object v2_3 extends HiveVersion("2.3.6",
exclusions = Seq("org.apache.calcite:calcite-druid",
"org.apache.calcite.avatica:avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
// Since Hive 3.0, HookUtils uses org.apache.logging.log4j.util.Strings
// Since HIVE-14496, Hive.java uses calcite-core
case object v3_0 extends HiveVersion("3.0.0",
extraDeps = Seq("org.apache.logging.log4j:log4j-api:2.10.0",
"org.apache.derby:derby:10.14.1.0"),
exclusions = Seq("org.apache.calcite:calcite-druid",
"org.apache.calcite.avatica:avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
// Since Hive 3.0, HookUtils uses org.apache.logging.log4j.util.Strings
// Since HIVE-14496, Hive.java uses calcite-core
case object v3_1 extends HiveVersion("3.1.2",
extraDeps = Seq("org.apache.logging.log4j:log4j-api:2.10.0",
"org.apache.derby:derby:10.14.1.0"),
exclusions = Seq("org.apache.calcite:calcite-druid",
"org.apache.calcite.avatica:avatica",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
val allSupportedHiveVersions =
Set(v12, v13, v14, v1_0, v1_1, v1_2, v2_0, v2_1, v2_2, v2_3, v3_0, v3_1)
}
// scalastyle:on
}
|
pgandhi999/spark
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
|
Scala
|
apache-2.0
| 5,298
|
package org.jetbrains.plugins.scala
package codeInspection
package shadow
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.openapi.project.Project
import com.intellij.psi.{PsiElement, PsiNamedElement, ResolveResult}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScCaseClause, ScReferencePattern}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.resolve.processor.ResolveProcessor
import org.jetbrains.plugins.scala.lang.resolve.{ResolvableStableCodeReferenceElement, StdKinds}
class VariablePatternShadowInspection extends AbstractInspection("VariablePatternShadow", "Suspicious shadowing by a Variable Pattern") {
def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case refPat: ScReferencePattern => check(refPat, holder)
}
private def check(refPat: ScReferencePattern, holder: ProblemsHolder) {
val isInCaseClause = ScalaPsiUtil.nameContext(refPat).isInstanceOf[ScCaseClause]
if (isInCaseClause) {
val dummyRef: ScStableCodeReferenceElement = ScalaPsiElementFactory.createReferenceFromText(refPat.name, refPat.getContext.getContext, refPat)
val proc = new ResolveProcessor(StdKinds.valuesRef, dummyRef, refPat.name)
val results = dummyRef.asInstanceOf[ResolvableStableCodeReferenceElement].doResolve(dummyRef, proc)
def isAccessible(rr: ResolveResult): Boolean = rr.getElement match {
case named: PsiNamedElement => proc.isAccessible(named, refPat)
case _ => false
}
if (results.exists(isAccessible)) {
holder.registerProblem(refPat.nameId, getDisplayName, new ConvertToStableIdentifierPatternFix(refPat), new RenameVariablePatternFix(refPat))
}
}
}
}
class ConvertToStableIdentifierPatternFix(r: ScReferencePattern)
extends AbstractFixOnPsiElement("Convert to Stable Identifier Pattern `%s`".format(r.getText), r) {
def doApplyFix(project: Project) {
val ref = getElement
val stableIdPattern = ScalaPsiElementFactory.createPatternFromText("`%s`".format(ref.getText), ref.getManager)
ref.replace(stableIdPattern)
}
}
class RenameVariablePatternFix(ref: ScReferencePattern) extends RenameElementQuickfix(ref, "Rename Variable Pattern")
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/shadow/VariablePatternShadowInspection.scala
|
Scala
|
apache-2.0
| 2,421
|
/**
* Copyright 2015 ICT.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ac.ict.acs.netflow.load.worker
trait Writer {
def id: Int
def timeBase(): Long
def init(): Unit
def write(flowSet: DataFlowSet): Unit
def close(): Unit
}
trait WriterWrapper {
def init()
def write(flowSet: DataFlowSet, packetTime: Long)
def close()
}
|
DataSysLab/netflow
|
load/src/main/scala/cn/ac/ict/acs/netflow/load/worker/Writer.scala
|
Scala
|
apache-2.0
| 1,108
|
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.common.sink
import com.datamountaineer.streamreactor.common.rowkeys.StringGenericRowKeyBuilder
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
class StringGenericRowKeyBuilderTest extends AnyWordSpec with Matchers {
"StringGenericRowKeyBuilder" should {
"use the topic, partition and offset to make the key" in {
val topic = "sometopic"
val partition = 2
val offset = 1243L
val sinkRecord = new SinkRecord(topic, partition, Schema.INT32_SCHEMA, 345, Schema.STRING_SCHEMA, "", offset)
val keyBuilder = new StringGenericRowKeyBuilder()
val expected = Seq(topic, partition, offset).mkString("|")
keyBuilder.build(sinkRecord) shouldBe expected
}
}
}
|
datamountaineer/stream-reactor
|
kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/sink/StringGenericRowKeyBuilderTest.scala
|
Scala
|
apache-2.0
| 1,486
|
package edu.mit.cryptdb.user
import edu.mit.cryptdb.SqlExpr
// implementations should be completely stateless
trait Translator {
def translateTableName(
plainTableName: String): String
def translateColumnName(
plainTableName: String, plainColumnName: String, encOnion: Int): String
def translatePrecomputedExprName(
exprId: String, plainTableName: String, expr: SqlExpr, encOnion: Int): String
def filenameForHomAggGroup(
aggId: Int, plainDbName: String, plainTableName: String, aggs: Seq[SqlExpr]): String
def preferredHomAggGroup(
plainTableName: String, group: Seq[SqlExpr]): Seq[SqlExpr]
def sizeInfoForHomAggGroup(
plainTableName: String, group: Seq[SqlExpr]): Int
}
|
tristartom/monomi-optimizer
|
src/main/scala/user/translator.scala
|
Scala
|
mit
| 717
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn
import java.io.File
import java.net.URL
import java.nio.charset.StandardCharsets
import java.util.{HashMap => JHashMap}
import scala.collection.mutable
import scala.concurrent.duration._
import scala.io.Source
import scala.language.postfixOps
import com.google.common.io.{ByteStreams, Files}
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.deploy.yarn.config._
import org.apache.spark.internal.Logging
import org.apache.spark.launcher._
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationStart,
SparkListenerExecutorAdded}
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.tags.ExtendedYarnTest
import org.apache.spark.util.Utils
/**
* Integration tests for YARN; these tests use a mini Yarn cluster to run Spark-on-YARN
* applications, and require the Spark assembly to be built before they can be successfully
* run.
*/
@ExtendedYarnTest
class YarnClusterSuite extends BaseYarnClusterSuite {
override def newYarnConfig(): YarnConfiguration = new YarnConfiguration()
private val TEST_PYFILE = """
|import mod1, mod2
|import sys
|from operator import add
|
|from pyspark import SparkConf , SparkContext
|if __name__ == "__main__":
| if len(sys.argv) != 2:
| print >> sys.stderr, "Usage: test.py [result file]"
| exit(-1)
| sc = SparkContext(conf=SparkConf())
| status = open(sys.argv[1],'w')
| result = "failure"
| rdd = sc.parallelize(range(10)).map(lambda x: x * mod1.func() * mod2.func())
| cnt = rdd.count()
| if cnt == 10:
| result = "success"
| status.write(result)
| status.close()
| sc.stop()
""".stripMargin
private val TEST_PYMODULE = """
|def func():
| return 42
""".stripMargin
test("run Spark in yarn-client mode") {
testBasicYarnApp(true)
}
test("run Spark in yarn-cluster mode") {
testBasicYarnApp(false)
}
test("run Spark in yarn-client mode with different configurations, ensuring redaction") {
testBasicYarnApp(true,
Map(
"spark.driver.memory" -> "512m",
"spark.executor.cores" -> "1",
"spark.executor.memory" -> "512m",
"spark.executor.instances" -> "2",
// Sending some senstive information, which we'll make sure gets redacted
"spark.executorEnv.HADOOP_CREDSTORE_PASSWORD" -> YarnClusterDriver.SECRET_PASSWORD,
"spark.yarn.appMasterEnv.HADOOP_CREDSTORE_PASSWORD" -> YarnClusterDriver.SECRET_PASSWORD
))
}
test("run Spark in yarn-cluster mode with different configurations, ensuring redaction") {
testBasicYarnApp(false,
Map(
"spark.driver.memory" -> "512m",
"spark.driver.cores" -> "1",
"spark.executor.cores" -> "1",
"spark.executor.memory" -> "512m",
"spark.executor.instances" -> "2",
// Sending some senstive information, which we'll make sure gets redacted
"spark.executorEnv.HADOOP_CREDSTORE_PASSWORD" -> YarnClusterDriver.SECRET_PASSWORD,
"spark.yarn.appMasterEnv.HADOOP_CREDSTORE_PASSWORD" -> YarnClusterDriver.SECRET_PASSWORD
))
}
test("run Spark in yarn-cluster mode with using SparkHadoopUtil.conf") {
testYarnAppUseSparkHadoopUtilConf()
}
test("run Spark in yarn-client mode with additional jar") {
testWithAddJar(true)
}
test("run Spark in yarn-cluster mode with additional jar") {
testWithAddJar(false)
}
test("run Spark in yarn-cluster mode unsuccessfully") {
// Don't provide arguments so the driver will fail.
val finalState = runSpark(false, mainClassName(YarnClusterDriver.getClass))
finalState should be (SparkAppHandle.State.FAILED)
}
test("run Spark in yarn-cluster mode failure after sc initialized") {
val finalState = runSpark(false, mainClassName(YarnClusterDriverWithFailure.getClass))
finalState should be (SparkAppHandle.State.FAILED)
}
test("run Python application in yarn-client mode") {
testPySpark(true)
}
test("run Python application in yarn-cluster mode") {
testPySpark(false)
}
test("run Python application in yarn-cluster mode using " +
" spark.yarn.appMasterEnv to override local envvar") {
testPySpark(
clientMode = false,
extraConf = Map(
"spark.yarn.appMasterEnv.PYSPARK_DRIVER_PYTHON"
-> sys.env.getOrElse("PYSPARK_DRIVER_PYTHON", "python"),
"spark.yarn.appMasterEnv.PYSPARK_PYTHON"
-> sys.env.getOrElse("PYSPARK_PYTHON", "python")),
extraEnv = Map(
"PYSPARK_DRIVER_PYTHON" -> "not python",
"PYSPARK_PYTHON" -> "not python"))
}
test("user class path first in client mode") {
testUseClassPathFirst(true)
}
test("user class path first in cluster mode") {
testUseClassPathFirst(false)
}
test("monitor app using launcher library") {
val env = new JHashMap[String, String]()
env.put("YARN_CONF_DIR", hadoopConfDir.getAbsolutePath())
val propsFile = createConfFile()
val handle = new SparkLauncher(env)
.setSparkHome(sys.props("spark.test.home"))
.setConf("spark.ui.enabled", "false")
.setPropertiesFile(propsFile)
.setMaster("yarn")
.setDeployMode("client")
.setAppResource(SparkLauncher.NO_RESOURCE)
.setMainClass(mainClassName(YarnLauncherTestApp.getClass))
.startApplication()
try {
eventually(timeout(30 seconds), interval(100 millis)) {
handle.getState() should be (SparkAppHandle.State.RUNNING)
}
handle.getAppId() should not be (null)
handle.getAppId() should startWith ("application_")
handle.stop()
eventually(timeout(30 seconds), interval(100 millis)) {
handle.getState() should be (SparkAppHandle.State.KILLED)
}
} finally {
handle.kill()
}
}
test("timeout to get SparkContext in cluster mode triggers failure") {
val timeout = 2000
val finalState = runSpark(false, mainClassName(SparkContextTimeoutApp.getClass),
appArgs = Seq((timeout * 4).toString),
extraConf = Map(AM_MAX_WAIT_TIME.key -> timeout.toString))
finalState should be (SparkAppHandle.State.FAILED)
}
private def testBasicYarnApp(clientMode: Boolean, conf: Map[String, String] = Map()): Unit = {
val result = File.createTempFile("result", null, tempDir)
val finalState = runSpark(clientMode, mainClassName(YarnClusterDriver.getClass),
appArgs = Seq(result.getAbsolutePath()),
extraConf = conf)
checkResult(finalState, result)
}
private def testYarnAppUseSparkHadoopUtilConf(): Unit = {
val result = File.createTempFile("result", null, tempDir)
val finalState = runSpark(false,
mainClassName(YarnClusterDriverUseSparkHadoopUtilConf.getClass),
appArgs = Seq("key=value", result.getAbsolutePath()),
extraConf = Map("spark.hadoop.key" -> "value"))
checkResult(finalState, result)
}
private def testWithAddJar(clientMode: Boolean): Unit = {
val originalJar = TestUtils.createJarWithFiles(Map("test.resource" -> "ORIGINAL"), tempDir)
val driverResult = File.createTempFile("driver", null, tempDir)
val executorResult = File.createTempFile("executor", null, tempDir)
val finalState = runSpark(clientMode, mainClassName(YarnClasspathTest.getClass),
appArgs = Seq(driverResult.getAbsolutePath(), executorResult.getAbsolutePath()),
extraClassPath = Seq(originalJar.getPath()),
extraJars = Seq("local:" + originalJar.getPath()))
checkResult(finalState, driverResult, "ORIGINAL")
checkResult(finalState, executorResult, "ORIGINAL")
}
private def testPySpark(
clientMode: Boolean,
extraConf: Map[String, String] = Map(),
extraEnv: Map[String, String] = Map()): Unit = {
val primaryPyFile = new File(tempDir, "test.py")
Files.write(TEST_PYFILE, primaryPyFile, StandardCharsets.UTF_8)
// When running tests, let's not assume the user has built the assembly module, which also
// creates the pyspark archive. Instead, let's use PYSPARK_ARCHIVES_PATH to point at the
// needed locations.
val sparkHome = sys.props("spark.test.home")
val pythonPath = Seq(
s"$sparkHome/python/lib/py4j-0.10.6-src.zip",
s"$sparkHome/python")
val extraEnvVars = Map(
"PYSPARK_ARCHIVES_PATH" -> pythonPath.map("local:" + _).mkString(File.pathSeparator),
"PYTHONPATH" -> pythonPath.mkString(File.pathSeparator)) ++ extraEnv
val moduleDir =
if (clientMode) {
// In client-mode, .py files added with --py-files are not visible in the driver.
// This is something that the launcher library would have to handle.
tempDir
} else {
val subdir = new File(tempDir, "pyModules")
subdir.mkdir()
subdir
}
val pyModule = new File(moduleDir, "mod1.py")
Files.write(TEST_PYMODULE, pyModule, StandardCharsets.UTF_8)
val mod2Archive = TestUtils.createJarWithFiles(Map("mod2.py" -> TEST_PYMODULE), moduleDir)
val pyFiles = Seq(pyModule.getAbsolutePath(), mod2Archive.getPath()).mkString(",")
val result = File.createTempFile("result", null, tempDir)
val finalState = runSpark(clientMode, primaryPyFile.getAbsolutePath(),
sparkArgs = Seq("--py-files" -> pyFiles),
appArgs = Seq(result.getAbsolutePath()),
extraEnv = extraEnvVars,
extraConf = extraConf)
checkResult(finalState, result)
}
private def testUseClassPathFirst(clientMode: Boolean): Unit = {
// Create a jar file that contains a different version of "test.resource".
val originalJar = TestUtils.createJarWithFiles(Map("test.resource" -> "ORIGINAL"), tempDir)
val userJar = TestUtils.createJarWithFiles(Map("test.resource" -> "OVERRIDDEN"), tempDir)
val driverResult = File.createTempFile("driver", null, tempDir)
val executorResult = File.createTempFile("executor", null, tempDir)
val finalState = runSpark(clientMode, mainClassName(YarnClasspathTest.getClass),
appArgs = Seq(driverResult.getAbsolutePath(), executorResult.getAbsolutePath()),
extraClassPath = Seq(originalJar.getPath()),
extraJars = Seq("local:" + userJar.getPath()),
extraConf = Map(
"spark.driver.userClassPathFirst" -> "true",
"spark.executor.userClassPathFirst" -> "true"))
checkResult(finalState, driverResult, "OVERRIDDEN")
checkResult(finalState, executorResult, "OVERRIDDEN")
}
}
private[spark] class SaveExecutorInfo extends SparkListener {
val addedExecutorInfos = mutable.Map[String, ExecutorInfo]()
var driverLogs: Option[collection.Map[String, String]] = None
override def onExecutorAdded(executor: SparkListenerExecutorAdded) {
addedExecutorInfos(executor.executorId) = executor.executorInfo
}
override def onApplicationStart(appStart: SparkListenerApplicationStart): Unit = {
driverLogs = appStart.driverLogs
}
}
private object YarnClusterDriverWithFailure extends Logging with Matchers {
def main(args: Array[String]): Unit = {
val sc = new SparkContext(new SparkConf()
.set("spark.extraListeners", classOf[SaveExecutorInfo].getName)
.setAppName("yarn test with failure"))
throw new Exception("exception after sc initialized")
}
}
private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matchers {
def main(args: Array[String]): Unit = {
if (args.length != 2) {
// scalastyle:off println
System.err.println(
s"""
|Invalid command line: ${args.mkString(" ")}
|
|Usage: YarnClusterDriverUseSparkHadoopUtilConf [hadoopConfKey=value] [result file]
""".stripMargin)
// scalastyle:on println
System.exit(1)
}
val sc = new SparkContext(new SparkConf()
.set("spark.extraListeners", classOf[SaveExecutorInfo].getName)
.setAppName("yarn test using SparkHadoopUtil's conf"))
val kv = args(0).split("=")
val status = new File(args(1))
var result = "failure"
try {
SparkHadoopUtil.get.conf.get(kv(0)) should be (kv(1))
result = "success"
} finally {
Files.write(result, status, StandardCharsets.UTF_8)
sc.stop()
}
}
}
private object YarnClusterDriver extends Logging with Matchers {
val WAIT_TIMEOUT_MILLIS = 10000
val SECRET_PASSWORD = "secret_password"
def main(args: Array[String]): Unit = {
if (args.length != 1) {
// scalastyle:off println
System.err.println(
s"""
|Invalid command line: ${args.mkString(" ")}
|
|Usage: YarnClusterDriver [result file]
""".stripMargin)
// scalastyle:on println
System.exit(1)
}
val sc = new SparkContext(new SparkConf()
.set("spark.extraListeners", classOf[SaveExecutorInfo].getName)
.setAppName("yarn \\"test app\\" 'with quotes' and \\\\back\\\\slashes and $dollarSigns"))
val conf = sc.getConf
val status = new File(args(0))
var result = "failure"
try {
val data = sc.parallelize(1 to 4, 4).collect().toSet
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
data should be (Set(1, 2, 3, 4))
result = "success"
// Verify that the config archive is correctly placed in the classpath of all containers.
val confFile = "/" + Client.SPARK_CONF_FILE
assert(getClass().getResource(confFile) != null)
val configFromExecutors = sc.parallelize(1 to 4, 4)
.map { _ => Option(getClass().getResource(confFile)).map(_.toString).orNull }
.collect()
assert(configFromExecutors.find(_ == null) === None)
} finally {
Files.write(result, status, StandardCharsets.UTF_8)
sc.stop()
}
// verify log urls are present
val listeners = sc.listenerBus.findListenersByClass[SaveExecutorInfo]
assert(listeners.size === 1)
val listener = listeners(0)
val executorInfos = listener.addedExecutorInfos.values
assert(executorInfos.nonEmpty)
executorInfos.foreach { info =>
assert(info.logUrlMap.nonEmpty)
info.logUrlMap.values.foreach { url =>
val log = Source.fromURL(url).mkString
assert(
!log.contains(SECRET_PASSWORD),
s"Executor logs contain sensitive info (${SECRET_PASSWORD}): \\n${log} "
)
}
}
// If we are running in yarn-cluster mode, verify that driver logs links and present and are
// in the expected format.
if (conf.get("spark.submit.deployMode") == "cluster") {
assert(listener.driverLogs.nonEmpty)
val driverLogs = listener.driverLogs.get
assert(driverLogs.size === 2)
assert(driverLogs.contains("stderr"))
assert(driverLogs.contains("stdout"))
val urlStr = driverLogs("stderr")
driverLogs.foreach { kv =>
val log = Source.fromURL(kv._2).mkString
assert(
!log.contains(SECRET_PASSWORD),
s"Driver logs contain sensitive info (${SECRET_PASSWORD}): \\n${log} "
)
}
val containerId = YarnSparkHadoopUtil.get.getContainerId
val user = Utils.getCurrentUserName()
assert(urlStr.endsWith(s"/node/containerlogs/$containerId/$user/stderr?start=-4096"))
}
}
}
private object YarnClasspathTest extends Logging {
def error(m: String, ex: Throwable = null): Unit = {
logError(m, ex)
// scalastyle:off println
System.out.println(m)
if (ex != null) {
ex.printStackTrace(System.out)
}
// scalastyle:on println
}
def main(args: Array[String]): Unit = {
if (args.length != 2) {
error(
s"""
|Invalid command line: ${args.mkString(" ")}
|
|Usage: YarnClasspathTest [driver result file] [executor result file]
""".stripMargin)
// scalastyle:on println
}
readResource(args(0))
val sc = new SparkContext(new SparkConf())
try {
sc.parallelize(Seq(1)).foreach { x => readResource(args(1)) }
} finally {
sc.stop()
}
}
private def readResource(resultPath: String): Unit = {
var result = "failure"
try {
val ccl = Thread.currentThread().getContextClassLoader()
val resource = ccl.getResourceAsStream("test.resource")
val bytes = ByteStreams.toByteArray(resource)
result = new String(bytes, 0, bytes.length, StandardCharsets.UTF_8)
} catch {
case t: Throwable =>
error(s"loading test.resource to $resultPath", t)
} finally {
Files.write(result, new File(resultPath), StandardCharsets.UTF_8)
}
}
}
private object YarnLauncherTestApp {
def main(args: Array[String]): Unit = {
// Do not stop the application; the test will stop it using the launcher lib. Just run a task
// that will prevent the process from exiting.
val sc = new SparkContext(new SparkConf())
sc.parallelize(Seq(1)).foreach { i =>
this.synchronized {
wait()
}
}
}
}
/**
* Used to test code in the AM that detects the SparkContext instance. Expects a single argument
* with the duration to sleep for, in ms.
*/
private object SparkContextTimeoutApp {
def main(args: Array[String]): Unit = {
val Array(sleepTime) = args
Thread.sleep(java.lang.Long.parseLong(sleepTime))
}
}
|
aokolnychyi/spark
|
resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
|
Scala
|
apache-2.0
| 18,205
|
package com.cloudera.hue.livy.server
import java.net.URL
import com.cloudera.hue.livy.Logging
import com.cloudera.hue.livy.msgs.ExecuteRequest
import com.cloudera.hue.livy.server.sessions.Session
import com.cloudera.hue.livy.server.sessions.Session.SessionFailedToStart
import com.fasterxml.jackson.core.JsonParseException
import org.json4s.{DefaultFormats, Formats, MappingException}
import org.scalatra._
import org.scalatra.json.JacksonJsonSupport
import scala.concurrent._
import scala.concurrent.duration._
object WebApp extends Logging {
case class CreateSessionRequest(lang: String)
}
case class CallbackRequest(url: String)
class WebApp(sessionManager: SessionManager)
extends ScalatraServlet
with FutureSupport
with MethodOverride
with JacksonJsonSupport
with UrlGeneratorSupport {
import WebApp._
override protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
override protected implicit def jsonFormats: Formats = DefaultFormats
before() {
contentType = formats("json")
}
get("/sessions") {
sessionManager.getSessionIds
}
val getSession = get("/sessions/:sessionId") {
sessionManager.get(params("sessionId")) match {
case Some(session) => formatSession(session)
case None => NotFound("Session not found")
}
}
post("/sessions") {
val createSessionRequest = parsedBody.extract[CreateSessionRequest]
val sessionFuture = createSessionRequest.lang match {
case "scala" => sessionManager.createSession(createSessionRequest.lang)
case "spark" => sessionManager.createSession(createSessionRequest.lang)
case "pyspark" => sessionManager.createSession(createSessionRequest.lang)
case "python" => sessionManager.createSession(createSessionRequest.lang)
case lang => halt(400, "unsupported language: " + lang)
}
val rep = sessionFuture.map {
case session =>
Created(formatSession(session),
headers = Map("Location" -> url(getSession, "sessionId" -> session.id.toString)))
}
new AsyncResult { val is = rep }
}
post("/sessions/:sessionId/callback") {
val callback = parsedBody.extract[CallbackRequest]
sessionManager.get(params("sessionId")) match {
case Some(session) =>
if (session.state == Session.Starting()) {
session.url = new URL(callback.url)
Accepted()
} else {
BadRequest("Session is in wrong state")
}
case None => NotFound("Session not found")
}
}
post("/sessions/:sessionId/stop") {
sessionManager.get(params("sessionId")) match {
case Some(session) =>
val future = session.stop()
new AsyncResult() { val is = for { _ <- future } yield NoContent() }
case None => NotFound("Session not found")
}
}
post("/sessions/:sessionId/interrupt") {
sessionManager.get(params("sessionId")) match {
case Some(session) =>
val future = for {
_ <- session.interrupt()
} yield Accepted()
// FIXME: this is silently eating exceptions.
new AsyncResult() { val is = for { _ <- future } yield NoContent() }
case None => NotFound("Session not found")
}
}
delete("/sessions/:sessionId") {
val future = for {
_ <- sessionManager.delete(params("sessionId"))
} yield Accepted()
new AsyncResult() { val is = for { _ <- future } yield NoContent() }
}
get("/sessions/:sessionId/statements") {
sessionManager.get(params("sessionId")) match {
case Some(session: Session) => session.statements().map(formatStatement)
case None => NotFound("Session not found")
}
}
val getStatement = get("/sessions/:sessionId/statements/:statementId") {
sessionManager.get(params("sessionId")) match {
case Some(session) =>
session.statement(params("statementId").toInt) match {
case Some(statement) => formatStatement(statement)
case None => NotFound("Statement not found")
}
case None => NotFound("Session not found")
}
}
post("/sessions/:sessionId/statements") {
val req = parsedBody.extract[ExecuteRequest]
sessionManager.get(params("sessionId")) match {
case Some(session) =>
val statement = session.executeStatement(req)
Created(formatStatement(statement),
headers = Map(
"Location" -> url(getStatement,
"sessionId" -> session.id.toString,
"statementId" -> statement.id.toString)))
case None => NotFound("Session not found")
}
}
error {
case e: JsonParseException => BadRequest(e.getMessage)
case e: MappingException => BadRequest(e.getMessage)
case e: SessionFailedToStart => InternalServerError(e.getMessage)
case e: dispatch.StatusCode => ActionResult(ResponseStatus(e.code), e.getMessage, Map.empty)
case e =>
WebApp.error("internal error", e)
InternalServerError(e.toString)
}
private def formatSession(session: Session) = {
Map(
"id" -> session.id,
"state" -> session.state.getClass.getSimpleName.toLowerCase
)
}
private def formatStatement(statement: Statement) = {
// Take a couple milliseconds to see if the statement has finished.
val output = try {
Await.result(statement.output, 100 milliseconds)
} catch {
case _: TimeoutException => null
}
Map(
"id" -> statement.id,
"state" -> statement.state.getClass.getSimpleName.toLowerCase,
"output" -> output
)
}
}
|
erickt/hue
|
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/WebApp.scala
|
Scala
|
apache-2.0
| 5,544
|
package im.actor.server.api.rpc.service.profile
import akka.actor.ActorSystem
import akka.util.Timeout
import im.actor.api.rpc._
import im.actor.api.rpc.files.ApiFileLocation
import im.actor.api.rpc.misc.{ ResponseBool, ResponseSeq }
import im.actor.api.rpc.profile.{ ProfileService, ResponseEditAvatar }
import im.actor.server.db.DbExtension
import im.actor.server.file.{ FileStorageExtension, FileErrors, FileStorageAdapter, ImageUtils }
import im.actor.server.persist.UserRepo
import im.actor.server.sequence.{ SequenceErrors, SeqState }
import im.actor.server.social.{ SocialExtension, SocialManagerRegion }
import im.actor.server.user._
import im.actor.util.ThreadLocalSecureRandom
import im.actor.util.misc.StringUtils
import slick.driver.PostgresDriver.api._
import scala.concurrent.duration._
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.concurrent.{ ExecutionContext, Future }
object ProfileRpcErrors {
val NicknameInvalid = RpcError(400, "NICKNAME_INVALID",
"Invalid nickname. Valid nickname should contain from 5 to 32 characters, and may consist of latin characters, numbers and underscores", false, None)
val NameInvalid = RpcError(400, "NAME_INVALID", "Invalid name. Valid name should not be empty or contain bad symbols", false, None)
val NicknameBusy = RpcError(400, "NICKNAME_BUSY", "This nickname already belongs some other user, we are sorry!", false, None)
val AboutTooLong = RpcError(400, "ABOUT_TOO_LONG",
"About is too long. It should be no longer then 255 characters", false, None)
}
final class ProfileServiceImpl()(implicit system: ActorSystem) extends ProfileService {
import FileHelpers._
import ImageUtils._
import FutureResultRpc._
override implicit val ec: ExecutionContext = system.dispatcher
private implicit val timeout = Timeout(5.seconds)
// TODO: configurable
private val db: Database = DbExtension(system).db
private val userExt = UserExtension(system)
private implicit val socialRegion: SocialManagerRegion = SocialExtension(system).region
private implicit val fsAdapter: FileStorageAdapter = FileStorageExtension(system).fsAdapter
// TODO: flatten
override def doHandleEditAvatar(fileLocation: ApiFileLocation, clientData: ClientData): Future[HandlerResult[ResponseEditAvatar]] =
authorized(clientData) { implicit client ⇒
val action = withFileLocation(fileLocation, AvatarSizeLimit) {
scaleAvatar(fileLocation.fileId, ThreadLocalSecureRandom.current()) flatMap {
case Right(avatar) ⇒
for {
UserCommands.UpdateAvatarAck(avatar, SeqState(seq, state)) ← DBIO.from(userExt.updateAvatar(client.userId, Some(avatar)))
} yield Ok(ResponseEditAvatar(
avatar.get,
seq,
state.toByteArray
))
case Left(e) ⇒
throw FileErrors.LocationInvalid
}
}
db.run(action)
}
override def doHandleRemoveAvatar(clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client ⇒
val action = for {
UserCommands.UpdateAvatarAck(_, SeqState(seq, state)) ← DBIO.from(userExt.updateAvatar(client.userId, None))
} yield Ok(ResponseSeq(seq, state.toByteArray))
db.run(action)
}
override def doHandleEditName(name: String, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client ⇒
for {
SeqState(seq, state) ← userExt.changeName(client.userId, name)
} yield Ok(ResponseSeq(seq, state.toByteArray))
}
override def doHandleEditNickName(nickname: Option[String], clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client ⇒
for {
SeqState(seq, state) ← userExt.changeNickname(client.userId, nickname)
} yield Ok(ResponseSeq(seq, state.toByteArray))
}
override def doHandleCheckNickName(nickname: String, clientData: ClientData): Future[HandlerResult[ResponseBool]] =
authorized(clientData) { implicit client ⇒
(for {
_ ← fromBoolean(ProfileRpcErrors.NicknameInvalid)(StringUtils.validUsername(nickname))
exists ← fromFuture(db.run(UserRepo.nicknameExists(nickname.trim)))
} yield ResponseBool(!exists)).value
}
//todo: move validation inside of UserOffice
override def doHandleEditAbout(about: Option[String], clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
authorized(clientData) { implicit client ⇒
(for {
trimmed ← point(about.map(_.trim))
_ ← fromBoolean(ProfileRpcErrors.AboutTooLong)(trimmed.map(s ⇒ s.nonEmpty & s.length < 255).getOrElse(true))
s ← fromFuture(userExt.changeAbout(client.userId, trimmed))
} yield ResponseSeq(s.seq, s.state.toByteArray)).value
}
}
override def doHandleEditMyTimeZone(tz: String, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client ⇒
(for {
s ← fromFuture(produceError)(userExt.changeTimeZone(client.userId, tz))
} yield ResponseSeq(s.seq, s.state.toByteArray)).value
}
override def doHandleEditMyPreferredLanguages(preferredLanguages: IndexedSeq[String], clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client ⇒
(for {
s ← fromFuture(produceError)(userExt.changePreferredLanguages(client.userId, preferredLanguages))
} yield ResponseSeq(s.seq, s.state.toByteArray)).value
}
override def onFailure: PartialFunction[Throwable, RpcError] = {
case FileErrors.LocationInvalid ⇒ FileRpcErrors.LocationInvalid
case UserErrors.InvalidName ⇒ ProfileRpcErrors.NameInvalid
case UserErrors.NicknameTaken ⇒ ProfileRpcErrors.NicknameBusy
case UserErrors.InvalidNickname ⇒ ProfileRpcErrors.NicknameInvalid
}
private def produceError = PartialFunction[Throwable, RpcError] {
case SequenceErrors.UpdateAlreadyApplied(field) ⇒ RpcError(400, "UPDATE_ALREADY_APPLIED", s"$field already updated.", canTryAgain = false, data = None)
case UserErrors.InvalidLocale(locale) ⇒ RpcError(400, "INVALID_LOCALE", s"Invalid language: $locale.", canTryAgain = false, data = None)
case UserErrors.InvalidTimeZone(tz) ⇒ RpcError(400, "INVALID_TIME_ZONE", s"Invalid time zone: $tz.", canTryAgain = false, data = None)
case UserErrors.EmptyLocalesList ⇒ RpcError(400, "EMPTY_LOCALES_LIST", s"Empty languages list.", canTryAgain = false, data = None)
case e ⇒ throw e
}
}
|
ljshj/actor-platform
|
actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/profile/ProfileServiceImpl.scala
|
Scala
|
mit
| 6,620
|
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.tube
import java.util.Date
import java.util.concurrent.atomic.AtomicInteger
import com.typesafe.scalalogging.LazyLogging
import org.locationtech.jts.geom._
import org.locationtech.jts.geom.impl.CoordinateArraySequence
import org.geotools.data.simple.SimpleFeatureCollection
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.geotools.referencing.GeodeticCalculator
import org.locationtech.geomesa.features.ScalaSimpleFeatureFactory
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType._
import org.locationtech.geomesa.utils.geotools.{GeometryUtils, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.geotools.converters.FastConverter
import org.locationtech.geomesa.utils.text.WKTUtils
import org.locationtech.geomesa.utils.date.DateUtils.toInstant
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.collection.immutable.NumericRange
object TubeBuilder {
val DefaultDtgField = "dtg"
}
/**
* Build a tube for input to a TubeSelect by buffering and binning the input
* tubeFeatures into SimpleFeatures that can be used as inputs to Geomesa queries
*/
abstract class TubeBuilder(val tubeFeatures: SimpleFeatureCollection,
val bufferDistance: Double,
val maxBins: Int) extends LazyLogging {
val calc = new GeodeticCalculator()
val dtgField: String = tubeFeatures.getSchema.getDtgField.getOrElse(TubeBuilder.DefaultDtgField)
val geoFac = new GeometryFactory
val GEOM_PROP = "geom"
val tubeType: SimpleFeatureType = SimpleFeatureTypes.createType("tubeType", s"$GEOM_PROP:Geometry:srid=4326,start:Date,end:Date")
val builder: SimpleFeatureBuilder = ScalaSimpleFeatureFactory.featureBuilder(tubeType)
def getGeom(sf: SimpleFeature): Geometry = sf.getAttribute(0).asInstanceOf[Geometry]
def getStartTime(sf: SimpleFeature): Date = sf.getAttribute(1).asInstanceOf[Date]
def getEndTime(sf: SimpleFeature): Date = sf.getAttribute(2).asInstanceOf[Date]
def bufferGeom(geom: Geometry, meters: Double): Geometry = {
import org.locationtech.geomesa.utils.geotools.Conversions.RichGeometry
geom.buffer(metersToDegrees(meters, geom.safeCentroid()))
}
def metersToDegrees(meters: Double, point: Point): Double = {
logger.debug("Buffering: "+meters.toString + " "+WKTUtils.write(point))
calc.setStartingGeographicPoint(point.getX, point.getY)
calc.setDirection(0, meters)
val dest2D = calc.getDestinationGeographicPoint
val destPoint = geoFac.createPoint(new Coordinate(dest2D.getX, dest2D.getY))
point.distance(destPoint)
}
def buffer(simpleFeatures: Iterator[SimpleFeature], meters:Double): Iterator[SimpleFeature] =
simpleFeatures.map { sf =>
val bufferedGeom = bufferGeom(getGeom(sf), meters)
builder.reset()
builder.init(sf)
builder.set(GEOM_PROP, bufferedGeom)
builder.buildFeature(sf.getID)
}
// transform the input tubeFeatures into the intermediate SF used by the
// tubing code consisting of three attributes (geom, startTime, endTime)
// handle date parsing from input -> TODO revisit date parsing...
def transform(tubeFeatures: SimpleFeatureCollection, dtgField: String): Iterator[SimpleFeature] = {
SelfClosingIterator(tubeFeatures.features).map { sf =>
val date = FastConverter.convert(sf.getAttribute(dtgField), classOf[Date])
if (date == null) {
logger.error("Unable to retrieve date field from input tubeFeatures...ensure there a field named " + dtgField)
throw new IllegalArgumentException("Unable to retrieve date field from input tubeFeatures...ensure there a field named \"" + dtgField + "\"")
}
builder.reset()
builder.buildFeature(sf.getID, Array(sf.getDefaultGeometry, date, null))
}
}
/**
* Return an Array containing either 1 or 2 LineStrings that straddle but
* do not cross the IDL.
* @param input1 The first point in the segment
* @param input2 The second point in the segment
* @return an array of LineString containing either 1 or 2 LineStrings that do not
* span the IDL.
*/
def makeIDLSafeLineString(input1:Coordinate, input2:Coordinate): Geometry = {
//If the points cross the IDL we must generate two line segments
if (GeometryUtils.crossesIDL(input1, input2)) {
//Find the latitude where the segment intercepts the IDL
val latIntercept = GeometryUtils.calcIDLIntercept(input1, input2)
val p1 = new Coordinate(-180, latIntercept)
val p2 = new Coordinate(180, latIntercept)
//This orders the points so that point1 is always the east-most point
val (point1, point2) = if (input1.x > 0) (input1, input2) else (input2, input1)
val westLine = new LineString(new CoordinateArraySequence(Array(p1, point2)), geoFac)
val eastLine = new LineString(new CoordinateArraySequence(Array(point1, p2)), geoFac)
new MultiLineString(Array[LineString](westLine,eastLine), geoFac)
} else {
new LineString(new CoordinateArraySequence(Array(input1, input2)), geoFac)
}
}
def createTube: Iterator[SimpleFeature]
}
/**
* Build a tube with no gap filling - only buffering and binning
*/
class NoGapFill(tubeFeatures: SimpleFeatureCollection,
bufferDistance: Double,
maxBins: Int) extends TubeBuilder(tubeFeatures, bufferDistance, maxBins) with LazyLogging {
// Bin ordered features into maxBins that retain order by date then union by geometry
def timeBinAndUnion(features: Iterable[SimpleFeature], maxBins: Int): Iterator[SimpleFeature] = {
val numFeatures = features.size
if (numFeatures == 0) { Iterator.empty } else {
//If 0 is passed in then don't bin the features, if 1 then make one bin, otherwise calculate number
//of bins based on numFeatures and maxBins
val binSize = maxBins match {
case 0 => 1
case 1 => numFeatures
case _ => numFeatures / maxBins + (if (numFeatures % maxBins == 0 ) 0 else 1)
}
features.grouped(binSize).zipWithIndex.map { case(bin, idx) => unionFeatures(bin.toSeq, idx.toString) }
}
}
// Union features to create a single geometry and single combined time range
def unionFeatures(orderedFeatures: Seq[SimpleFeature], id: String): SimpleFeature = {
import scala.collection.JavaConversions._
val geoms = orderedFeatures.map { sf => getGeom(sf) }
val unionGeom = geoFac.buildGeometry(geoms).union
val min = getStartTime(orderedFeatures.head)
val max = getStartTime(orderedFeatures.last)
builder.reset()
builder.buildFeature(id, Array(unionGeom, min, max))
}
override def createTube: Iterator[SimpleFeature] = {
logger.debug("Creating tube with no gap filling")
val transformed = transform(tubeFeatures, dtgField)
val buffered = buffer(transformed, bufferDistance)
val sortedTube = buffered.toSeq.sortBy { sf => getStartTime(sf).getTime }
logger.debug(s"sorted tube size: ${sortedTube.size}")
timeBinAndUnion(sortedTube, maxBins)
}
}
/**
* Build a tube with gap filling that draws a line between time-ordered features
* from the given tubeFeatures
*/
class LineGapFill(tubeFeatures: SimpleFeatureCollection,
bufferDistance: Double,
maxBins: Int) extends TubeBuilder(tubeFeatures, bufferDistance, maxBins) with LazyLogging {
val id = new AtomicInteger(0)
def nextId: String = id.getAndIncrement.toString
override def createTube: Iterator[SimpleFeature] = {
import org.locationtech.geomesa.utils.geotools.Conversions.RichGeometry
logger.debug("Creating tube with line gap fill")
val transformed = transform(tubeFeatures, dtgField)
val sortedTube = transformed.toSeq.sortBy { sf => getStartTime(sf).getTime }
val pointsAndTimes = sortedTube.map(sf => (getGeom(sf).safeCentroid(), getStartTime(sf)))
val lineFeatures = if (pointsAndTimes.lengthCompare(1) == 0) {
val (p1, t1) = pointsAndTimes.head
logger.debug("Only a single result - can't create a line")
Iterator(builder.buildFeature(nextId, Array(p1, t1, t1)))
} else {
pointsAndTimes.sliding(2).map { case Seq((p1, t1), (p2, t2)) =>
val geo = if (p1.equals(p2)) p1 else makeIDLSafeLineString(p1.getCoordinate,p2.getCoordinate)
logger.debug(s"Created Line-filled Geometry: ${WKTUtils.write(geo)} From ${WKTUtils.write(p1)} and ${WKTUtils.write(p2)}")
builder.buildFeature(nextId, Array(geo, t1, t2))
}
}
buffer(lineFeatures, bufferDistance)
}
}
/**
* Class to create an interpolated line-gap filled tube
* @param tubeFeatures features
* @param bufferDistance distance
* @param maxBins max bins
*/
class InterpolatedGapFill(tubeFeatures: SimpleFeatureCollection,
bufferDistance: Double,
maxBins: Int) extends TubeBuilder(tubeFeatures, bufferDistance, maxBins) with LazyLogging {
val id = new AtomicInteger(0)
def nextId: String = id.getAndIncrement.toString
override def createTube: Iterator[SimpleFeature] = {
import org.locationtech.geomesa.utils.geotools.Conversions.RichGeometry
logger.debug("Creating tube with line interpolated line gap fill")
val transformed = transform(tubeFeatures, dtgField)
val sortedTube = transformed.toSeq.sortBy(sf => getStartTime(sf).getTime)
val pointsAndTimes = sortedTube.map(sf => (getGeom(sf).safeCentroid(), getStartTime(sf)))
val lineFeatures = if (pointsAndTimes.lengthCompare(1) == 0) {
val (p1, t1) = pointsAndTimes.head
logger.debug("Only a single result - can't create a line")
Iterator(builder.buildFeature(nextId, Array(p1, t1, t1)))
} else {
pointsAndTimes.sliding(2).flatMap { case Seq((p1, t1), (p2, t2)) =>
calc.setStartingGeographicPoint(p1.getX, p1.getY)
calc.setDestinationGeographicPoint(p2.getX, p2.getY)
val dist = calc.getOrthodromicDistance
//If the distance between points is greater than the buffer distance, segment the line
//So that no segment is larger than the buffer. This ensures that each segment has an
//times and distance. Also ensure that features do not share a time value.
val timeDiffMillis = toInstant(t2).toEpochMilli - toInstant(t1).toEpochMilli
val segCount = (dist / bufferDistance).toInt
val segDuration = timeDiffMillis / segCount
val timeSteps = NumericRange.inclusive(toInstant(t1).toEpochMilli, toInstant(t2).toEpochMilli, segDuration)
if (dist > bufferDistance && timeSteps.lengthCompare(1) > 0) {
val heading = calc.getAzimuth
var segStep = new Coordinate(p1.getX, p1.getY, 0)
timeSteps.sliding(2).map { case Seq(time0, time1) =>
val segP1 = segStep
calc.setStartingGeographicPoint(segP1.x, segP1.y)
calc.setDirection(heading, bufferDistance)
val destPoint = calc.getDestinationGeographicPoint
segStep = new Coordinate(destPoint.getX, destPoint.getY, 0)
val geo = makeIDLSafeLineString(segP1, segStep)
builder.buildFeature(nextId, Array(geo, new Date(time0), new Date(time1)))
}
} else {
val geo = if (p1.equals(p2)) { p1 } else { makeIDLSafeLineString(p1.getCoordinate, p2.getCoordinate) }
logger.debug(s"Created line-filled geometry: ${WKTUtils.write(geo)} " +
s"from ${WKTUtils.write(p1)} and ${WKTUtils.write(p2)}")
Seq(builder.buildFeature(nextId, Array(geo, t1, t2)))
}
}
}
buffer(lineFeatures, bufferDistance)
}
}
|
elahrvivaz/geomesa
|
geomesa-process/geomesa-process-vector/src/main/scala/org/locationtech/geomesa/process/tube/TubeBuilder.scala
|
Scala
|
apache-2.0
| 12,217
|
package org.broadinstitute.dsde.vault.services.uBAM
import org.broadinstitute.dsde.vault.model.uBAMJsonProtocol._
import org.broadinstitute.dsde.vault.model.{UBamIngest, UBamIngestResponse}
import org.broadinstitute.dsde.vault.{VaultConfig, VaultFreeSpec}
import org.scalatest.{DoNotDiscover, Suite}
import org.broadinstitute.dsde.vault.model.Properties._
import spray.http.StatusCodes._
import spray.http.{ContentType, HttpEntity, MediaTypes}
import spray.httpx.SprayJsonSupport._
@DoNotDiscover
class UBamIngestServiceSpec extends VaultFreeSpec with UBamIngestService{
def actorRefFactory = system
val versions = Table(
"version",
None,
Some(1)
)
val ubamIngest = new UBamIngest(
files = Map(("bam", "vault/test/test.bam"), ("bai", "vault/test/test.bai")),
metadata = Map(("testAttr", "testValue"), ("randomData", "7"))
)
"UBamIngestServiceSpec" - {
forAll(versions) { (version: Option[Int]) =>
s"when accessing version = '${v(version)}'" - {
"when calling POST to the UBam Ingest path with a UBamIngest object" - {
"should return a valid response" in {
// As designed, the API returns an object that only contains an id and files, but not the supplied metadata
Post(VaultConfig.Vault.ubamIngestPath.versioned(version), ubamIngest) ~> addOpenAmCookie ~> uBamIngestRoute ~> check {
status should equal(OK)
responseAs[String] should include("bam")
responseAs[String] should include("bai")
responseAs[String] shouldNot include("randomData")
responseAs[String] shouldNot include("testUser")
val properties = responseAs[UBamIngestResponse].properties
version match {
case Some(x) if x > 1 =>
properties shouldNot be(empty)
properties.get.get(CreatedBy) shouldNot be(empty)
properties.get.get(CreatedDate) shouldNot be(empty)
properties.get.get(ModifiedBy) should be(empty)
properties.get.get(ModifiedDate) should be(empty)
case _ =>
properties shouldBe empty
}
}
}
}
"when calling POST to the UBam Ingest path with a UBamIngest object and 'X-Force-Location' header" - {
"should return a valid response with the provided file paths" in {
Post(VaultConfig.Vault.ubamIngestPath.versioned(version), ubamIngest) ~> addHeader("X-Force-Location", "true") ~> addOpenAmCookie ~> uBamIngestRoute ~> check {
status should equal(OK)
val response = responseAs[UBamIngestResponse]
val files = response.files
files.get("bam").get should equal("vault/test/test.bam")
files.get("bai").get should equal("vault/test/test.bai")
version match {
case Some(x) if x > 1 =>
response.properties shouldNot be(empty)
val properties = response.properties.get
properties.get(CreatedBy) shouldNot be(empty)
properties.get(CreatedDate) shouldNot be(empty)
properties.get(ModifiedBy) should be(empty)
properties.get(ModifiedDate) should be(empty)
case _ =>
response.properties shouldBe empty
}
}
}
}
"when calling GET to the UBam Ingest path" - {
"should return a MethodNotAllowed error" in {
Get(VaultConfig.Vault.ubamIngestPath.versioned(version)) ~> sealRoute(uBamIngestRoute) ~> check {
status should equal(MethodNotAllowed)
entity.toString should include("HTTP method not allowed, supported methods: POST")
}
}
}
"when calling PUT to the UBam Ingest path" - {
"should return a MethodNotAllowed error" in {
Put(VaultConfig.Vault.ubamIngestPath.versioned(version)) ~> sealRoute(uBamIngestRoute) ~> check {
status should equal(MethodNotAllowed)
entity.toString should include("HTTP method not allowed, supported methods: POST")
}
}
}
"when calling POST to the UBam Ingest path with a malformed UBamIngest object" - {
"should return an invalid response" in {
val malformedEntity = HttpEntity(ContentType(MediaTypes.`application/json`), """{"random":"data"}""")
Post(VaultConfig.Vault.ubamIngestPath.versioned(version), malformedEntity) ~> addOpenAmCookie ~> sealRoute(uBamIngestRoute) ~> check {
status should equal(BadRequest)
}
}
}
}
}
}
}
|
broadinstitute/vault-api
|
src/test/scala/org/broadinstitute/dsde/vault/services/uBAM/UBamIngestServiceSpec.scala
|
Scala
|
bsd-3-clause
| 4,783
|
//package rat.client.services
//
//import autowire._
//import boopickle.Default._
//import rat.client.ukko.Actor
//import rat.shared.{User, UserName, Api2}
//import rx._
//import rat.client.ukko._
//import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
//
//case class UpdateUser(user: User)
//
//trait MainHub extends Actor{
// private val theUser = Var(User("invalid", "invalid", valid = false))
// override val name = "MainHub"
//
// private def updateUser(newUser: User): Unit = {
// println(s"let's update the user $newUser")
// println(s"last user ${theUser.now}")
// if (newUser != theUser.now) {
// theUser() = newUser
// println(s"last user ${theUser.now}")
// }
// }
//
// override def receive = {
// case UpdateUser(usr) => updateUser(usr)
// case _ =>
// }
// def user:Rx[User] = theUser
//
//}
//
//
//
//object MainHub extends MainHub{
// MainDispatcher.register(this)
//}
//
//object UserAction {
// def signIn(userName: UserName) = {
// println(userName)
// AjaxClient[Api2].signIn(userName).call().foreach { users =>
// MainDispatcher.dispatch(UpdateUser(users))
// }
// }
//}
|
omidb/rat
|
client/src/main/scala/rat/client/services/MainHub.scala
|
Scala
|
apache-2.0
| 1,159
|
package crm
import scala.collection.JavaConverters._
import scala.language._
import scala.util.control.Exception._
import scopt._
import org.w3c.dom._
import dispatch._
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.async.Async._
import scala.util._
import org.log4s._
import com.lucidchart.open.xtract.{ XmlReader, _ }
import XmlReader._
import play.api.libs.functional.syntax._
import cats._
import cats.data._
import cats.implicits._
import cats.instances._
import cats.syntax._
import better.files._
import java.io.{ File => JFile }
import fs2._
import scala.concurrent.ExecutionContext
import sdk.CrmAuth._
import sdk.httphelpers._
import sdk.streamhelpers._
import scala.util.matching._
import sdk.soapnamespaces.implicits._
import sdk.messages.soaprequestwriters._
import sdk.soapreaders._
import sdk.metadata.xmlreaders._
import sdk._
import sdk.discovery._
import sdk.metadata.soapwriters._
import sdk.discovery.soapwriters._
import sdk.metadata._
import scala.xml
import org.apache.ddlutils._
import org.apache.ddlutils.model._
import org.apache.ddlutils.io._
import _root_.io.circe._
import _root_.io.circe.syntax._
object Copy {
private[this] implicit val logger = getLogger
def apply(config: Config): Unit = {
config.copyAction match {
case "listTargets" =>
PlatformFactory.getSupportedPlatforms.toSeq.sorted.foreach(println)
case "ddl" =>
val metadatafile = config.copyMetadataFile.getOrElse("metadata.xml")
val schema =
try {
val mxml = xml.XML.loadFile(metadatafile)
schemaReader.read(mxml) match {
case ParseSuccess(s) => s
case PartialParseSuccess(s, msg) => s
case ParseFailure(msg) => throw new RuntimeException("Parsing error while parsing CRM XML metadata.")
}
} catch {
case scala.util.control.NonFatal(e) =>
logger.error(e)("Error obtaining metadata.")
throw new RuntimeException(s"Unable to open metadata file $metadatafile: ${e.getMessage}")
}
val (db, json, mapping) = createDdl(schema)
println("rvalue: " + db)
val platform = PlatformFactory.createNewPlatformInstance(config.copyDatabaseType)
new DatabaseIO().write(db, config.copyDatabaseType + ".xml")
val sql = platform.getCreateTablesSql(db, true, true)
val enhancedJson = json.add("dbTarget", Json.fromString(config.copyDatabaseType))
import better.files._
(config.copyDatabaseType + ".ddl").toFile < sql
(config.copyDatabaseType + ".mapping").toFile < enhancedJson.asJson.toString
println(mapping)
}
}
/** If GUIDs become string, this is the size. */
val guidSizeStr = "36"
case class GenerationConfig(
tablename_prefix: Option[String] = None,
tablename_postfix: Option[String] = None,
schema: Option[String] = None)
/**
* Wide range of possible column types for internal processing. These may
* be translated into a RDBMS specific type depending on the database
* capabilities. Some of the ColTypes' are really just aliases for each other.
* The column types mostly assume a RDBMS like target and are not designed
* to represent targets such as document databases.
*/
object AttrsType {
// BIT, TINYINT, SMALLINT, INTEGER, BIGINT, FLOAT, REAL, NUMERIC, DECIMAL,
// CHAR, VARCHAR, LONGVARCHAR, DATE,
// TIME, TIMESTAMP, BINARY, VARBINARY, LONGVARBINARY, NULL, OTHER, JAVA_OBJECT,
// DISTINCT, STRUCT, ARRAY, BLOB, CLOB, REF, BOOLEANINT, BOOLEANCHAR, DOUBLE
sealed trait Type
case class Char(size: Int, unicode: Boolean = false) extends Type
case class String(maxLength: Int, minLength: Option[Int] = None, info: Map[String, String] = Map(), unicode: Boolean = false) extends Type
case class Decimal(precision: Int = 0, scale: Option[Int] = None) extends Type
case class Binary(maxLength: Option[Int]) extends Type
case class VarBinary(maxLength: Option[Int]) extends Type
/** Physical representation will depend on the target. */
case object Auto extends Type
/** Physical representation will depend on the target e.g. GUID or Long. */
case object UniqueIdentifier extends Type
case object Float extends Type
case object Double extends Type
case object Bit extends Type
case object SmallInt extends Type
case object Integer extends Type
case object Long extends Type
case object BigInt extends Type
case object Money extends Type
case object Clob extends Type
case object Blob extends Type
case object Timestamp extends Type
case object Time extends Type
case object Date extends Type
case object Boolean extends Type
case object BooleanChar extends Type
case object BoleanInt extends Type
/**
* Within the target "system," a reference to another object e.g. a FK.
* Generally, Ref will be target specific type that the rendering system
* will infer based on defaults or user input. Ref is generally only used
* on the target side.
*/
case object Ref extends Type
/** Extensible type that is not represented by `Type`. */
case class Other(name: String, info: Map[String, String] = Map()) extends Type
/** Embedded structure. */
case class Object(name: String, attrs: Seq[Type]) extends Type
}
/** A slot is a source or target description. */
sealed trait Slot
/** An attribute specification: name and type. */
case class Spec(name: String, atype: AttrsType.Type, required: Boolean = false, info: Map[String, String] = Map()) extends Slot
/** Source that's a constant value. The constant value is expressed as a string. */
case class Literal(value: String, atype: AttrsType.Type) extends Slot
/**
* A computed value of a specific type and a possible list of
* attributes that may be input into that computation.
*/
case class Computed(atype: AttrsType.Type, inputs: Seq[String] = Nil) extends Slot
/** No slot. */
case object EmptySlot extends Slot
/**
* Mapping from one slot to another. Only certain types of slots are
* allowed to be a target while a source can be any type of slot.
*/
case class Mapping(target: Spec, source: Slot, info: Map[String, String] = Map.empty)
/**
* Given a CRM schema, output a mapping objects.
*/
def createDdl(schema: CRMSchema) = {
//import scala.collection.mutable._
println(s"CRM schema has ${schema.entities.size} entity descriptions.")
val gconfig = GenerationConfig(schema = Option("crmuser"))
val root = JsonObject.fromIterable(Seq(
"verson" -> Json.fromLong(1),
"description" -> Json.fromString("Mapping from CRM to RDBMS"),
"createdOn" -> Json.fromString(httphelpers.instantString)))
val jtables = scala.collection.mutable.ListBuffer[Json]()
val mappings = scala.collection.mutable.Map[String, Seq[Mapping]]()
val db = new Database()
db.setName("crm")
schema.entities.foreach { ent =>
val attributes = scala.collection.mutable.ListBuffer[Json]()
val emapping = scala.collection.mutable.ListBuffer[Mapping]()
val table = new Table()
table.setName(ent.logicalName)
table.setSchema("crmuser")
table.setDescription("MS CRM entity " + ent.logicalName)
ent.retrievableAttributes.sortBy(_.logicalName).foreach { attr =>
logger.info(s"${ent.logicalName}.${attr.logicalName} => ${attr.attributeType}")
val cols = toColumn(attr)
cols.foreach(c => table.addColumn(c))
val as = cols.map(c => Json.obj(
"target" -> Json.fromString(c.getName),
"source" -> Json.fromString(attr.logicalName),
"sourcetype" -> Json.fromString(attr.attributeType)))
def direct(a: Attribute, at: AttrsType.Type) = Mapping(Spec(a.logicalName, at), Spec(a.logicalName, at))
emapping ++= (attr match {
case a: StringAttribute =>
Seq(Mapping(Spec(attr.logicalName, AttrsType.String(a.maxLength)), Spec(attr.logicalName, AttrsType.String(a.maxLength))))
case a: LookupAttribute =>
val col1 = Mapping(Spec(attr.logicalName, AttrsType.UniqueIdentifier), Spec(attr.logicalName, AttrsType.UniqueIdentifier))
val col2source =
if (a.targets.size == 1)
Literal(a.targets(0), AttrsType.String(100))
else
Spec(attr.logicalName + "_target", AttrsType.String(100))
val col2 = Mapping(Spec(attr.logicalName + "_target", AttrsType.String(100)), col2source,
Map("description" -> "Entity reference target entity name."))
Seq(col1, col2)
case a: PicklistAttribute =>
val col1 = Mapping(Spec(a.logicalName, AttrsType.Integer), Spec(a.logicalName, AttrsType.Integer))
val col2 = Mapping(Spec(a.logicalName + "_label", AttrsType.String(100)), Spec(a.logicalName + "_fv", AttrsType.String(100)))
Seq(col1, col2)
case a: StatusAttribute =>
val col1 = Mapping(Spec(a.logicalName, AttrsType.Integer), Spec(a.logicalName, AttrsType.Integer))
val col2 = Mapping(Spec(a.logicalName + "_label", AttrsType.String(100)), Spec(a.logicalName + "_fv", AttrsType.String(100)))
Seq(col1, col2)
case a: StateAttribute =>
val col1 = Mapping(Spec(a.logicalName, AttrsType.Integer), Spec(a.logicalName, AttrsType.Integer))
val col2 = Mapping(Spec(a.logicalName + "_label", AttrsType.String(100)), Spec(a.logicalName + "_fv", AttrsType.String(100)))
Seq(col1, col2)
case a: IntegerAttribute =>
Seq(direct(a, AttrsType.Integer))
case a: DateTimeAttribute =>
Seq(direct(a, AttrsType.Timestamp))
case a: EntityNameAttribute =>
Seq(direct(a, AttrsType.String(100)))
case a: MemoAttribute =>
Seq(direct(a, AttrsType.String(a.maxLength)))
case a: BooleanAttribute =>
Seq(direct(a, AttrsType.Boolean))
case a: DecimalAttribute =>
Seq(direct(a, AttrsType.Decimal(a.precision.getOrElse(0))))
case a: MoneyAttribute =>
Seq(direct(a, AttrsType.Money))
case a: DoubleAttribute =>
Seq(direct(a, AttrsType.Double))
case a: BigIntAttribute =>
Seq(direct(a, AttrsType.BigInt))
case a: BasicAttribute =>
a.attributeType match {
case "Uniqueidentifier" =>
Seq(direct(a, AttrsType.UniqueIdentifier))
case "ManagedProperty" =>
Seq(direct(a, AttrsType.String(40)))
case "Customer" =>
Seq(direct(a, AttrsType.UniqueIdentifier))
case "Owner" =>
Seq(direct(a, AttrsType.UniqueIdentifier))
case "Virtual" =>
Nil
case x@_ =>
throw new RuntimeException(s"Attribute ${a.logicalName}: Unhandled attribute type $x")
}
// case _ =>
// throw new Ill
// Seq(Mapping(Spec(attr.logicalName, AttrsType.Char(10)), Spec(attr.logicalName, AttrsType.Char(10))))
})
attributes ++= as
}
val col = new Column()
col.setName("auto_id")
col.setType(TypeMap.INTEGER)
col.setAutoIncrement(true)
table.addColumn(col)
db.addTable(table)
emapping += Mapping(Spec("auto_inc", AttrsType.Auto), EmptySlot)
mappings += ent.logicalName -> emapping
val anames = ent.retrievableAttributes.map(_.logicalName.toLowerCase)
val hasCreatedOn = anames.find(_ == "createdon")
val hasModifiedOn = anames.find(_ == "modifiedon")
jtables += Json.obj(
"entityname" -> Json.fromString(ent.logicalName),
"tablename" -> Json.fromString(ent.logicalName),
"mappings" -> Json.fromValues(attributes),
"timestampAttributes" -> Json.fromValues(Seq(Json.fromString("createdOn"), Json.fromString("modifiedOn"))))
}
(db, root.add("tables", Json.fromValues(jtables)), mappings)
}
def toColumn(spec: Spec): Validated[String, Column] = {
import AttrsType._
spec.atype match {
case Char(size, unicode) =>
val col = new Column()
col.setName(spec.name)
col.setType(TypeMap.CHAR)
col.setSize(size.toString)
col.valid
case String(maxLength, minLength, info, unicode) => null
case Decimal(precision, scale) => null
case Binary(maxLength) => null
case VarBinary(maxLength) => null
case Auto => null
case UniqueIdentifier => null
case Float => null
case Double => null
case Bit => null
case SmallInt => null
case Integer => null
case Long => null
case BigInt => null
case Money => null
case Clob => null
case Blob => null
case Timestamp => null
case Time => null
case Date => null
case Boolean => null
case BooleanChar => null
case BoleanInt =>
val col = new Column()
col.setName(spec.name)
col.setType(TypeMap.BOOLEAN)
col.valid
case t: Ref.type => "No conversion from Ref".invalid
case t: Other => "No conversion from Other".invalid
case t: Object => "No conversion from Object".invalid
}
}
def toColumn(attr: Attribute): Seq[Column] = {
def c(attr: Attribute) = {
val c = new Column()
c.setName(attr.logicalName)
c.setDescription(attr.logicalName)
c
}
attr match {
case a: LookupAttribute =>
val col = c(a)
col.setType(TypeMap.CHAR)
col.setSize(guidSizeStr)
val col2 = new Column();
col2.setName(a.logicalName + "_tgt")
col2.setDescription("Entity reference target.")
col2.setType(TypeMap.VARCHAR)
col2.setSize("100")
Seq(col, col2)
case a: PicklistAttribute =>
val col = c(a)
col.setType(TypeMap.INTEGER)
val col2 = new Column()
col2.setName(a.logicalName + "_label")
col2.setType(TypeMap.VARCHAR)
col2.setSize("100")
Seq(col, col2)
case a: StatusAttribute =>
val col = c(a)
col.setType(TypeMap.INTEGER)
val col2 = new Column()
col2.setName(a.logicalName + "_label")
col2.setType(TypeMap.VARCHAR)
col2.setSize("100")
Seq(col, col2)
case a: StateAttribute =>
val col = c(a)
col.setType(TypeMap.INTEGER)
val col2 = new Column()
col2.setName(a.logicalName + "_label")
col2.setType(TypeMap.VARCHAR)
col2.setSize("100")
Seq(col, col2)
case a: StringAttribute =>
val col = c(a)
col.setType(TypeMap.VARCHAR)
col.setSize(a.maxLength.toString)
Seq(col)
case a: IntegerAttribute =>
val col = c(a)
col.setType(TypeMap.INTEGER)
Seq(col)
case a: DateTimeAttribute =>
val col = c(a)
col.setType(TypeMap.TIMESTAMP)
Seq(col)
case a: EntityNameAttribute =>
val col = c(a)
col.setType(TypeMap.VARCHAR)
col.setSize("100")
Seq(col)
case a: MemoAttribute =>
val col = c(a)
col.setType(TypeMap.VARCHAR)
col.setSize(a.maxLength.toString)
Seq(col)
case a: BooleanAttribute =>
val col = c(a)
col.setType(TypeMap.BOOLEAN)
Seq(col)
case a: DecimalAttribute =>
val col = c(a)
col.setType(TypeMap.DECIMAL)
a.precision.foreach(p => col.setPrecisionRadix(p))
Seq(col)
case a: MoneyAttribute =>
// TODO handle precision better
val col = c(a)
col.setType(TypeMap.DECIMAL)
col.setPrecisionRadix(2)
Seq(col)
case a: DoubleAttribute =>
val col = c(a)
col.setType(TypeMap.DOUBLE)
Seq(col)
case a: BigIntAttribute =>
val col = c(a)
col.setType(TypeMap.BIGINT)
Seq(col)
case a: BasicAttribute =>
// bit of a catch-all...
val col = c(a)
a.attributeType match {
case "BigInt" =>
col.setType(TypeMap.BIGINT)
case "Uniqueidentifier" =>
col.setType(TypeMap.CHAR)
col.setSize(guidSizeStr)
case "Boolean" =>
col.setType(TypeMap.BOOLEAN)
case "ManagedProperty" =>
col.setType(TypeMap.VARCHAR)
col.setSize("40")
case "Customer" =>
col.setType(TypeMap.CHAR)
col.setSize(guidSizeStr)
case "Owner" =>
col.setType(TypeMap.CHAR)
col.setSize(guidSizeStr)
case "Virtual" =>
Nil
case x@_ =>
throw new RuntimeException(s"Attribute ${a.logicalName}: Unhandled attribute type $x")
}
Seq(col)
case x@_ =>
throw new RuntimeException(s"Unhandled attribute metadata $x")
}
}
}
|
aappddeevv/mscrm-soap-auth
|
src/main/scala/crm/copy.scala
|
Scala
|
apache-2.0
| 17,139
|
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.stats.writer
import io.gatling.commons.stats.Status
import io.gatling.commons.stats.assertion.Assertion
import io.gatling.core.config.GatlingConfiguration
import io.gatling.core.session.Session
import io.gatling.core.stats.message.{ ResponseTimings, MessageEvent }
case class ShortScenarioDescription(name: String, userCount: Int)
case class RunMessage(
simulationClassName: String,
userDefinedSimulationId: Option[String],
defaultSimulationId: String,
start: Long,
runDescription: String
) {
val simulationId = userDefinedSimulationId.getOrElse(defaultSimulationId)
val runId = simulationId + "-" + start
}
sealed trait DataWriterMessage
case class Init(configuration: GatlingConfiguration, assertions: Seq[Assertion], runMessage: RunMessage, scenarios: Seq[ShortScenarioDescription]) extends DataWriterMessage
case object Flush extends DataWriterMessage
case class Crash(cause: String) extends DataWriterMessage
case object Stop extends DataWriterMessage
sealed trait LoadEventMessage extends DataWriterMessage
case class UserMessage(
session: Session,
event: MessageEvent,
timestamp: Long
) extends LoadEventMessage
case class ResponseMessage(
scenario: String,
userId: Long,
groupHierarchy: List[String],
name: String,
timings: ResponseTimings,
status: Status,
responseCode: Option[String],
message: Option[String],
extraInfo: List[Any]
) extends LoadEventMessage
case class GroupMessage(
scenario: String,
userId: Long,
groupHierarchy: List[String],
startTimestamp: Long,
endTimestamp: Long,
cumulatedResponseTime: Int,
status: Status
) extends LoadEventMessage {
val duration = (endTimestamp - startTimestamp).toInt
}
case class ErrorMessage(message: String, date: Long) extends LoadEventMessage
|
GabrielPlassard/gatling
|
gatling-core/src/main/scala/io/gatling/core/stats/writer/DataWriterMessage.scala
|
Scala
|
apache-2.0
| 2,592
|
import org.scalajs.jsenv.nodejs.NodeJSEnv
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
import sbt.Keys._
import sbt._
object Dependencies {
object Version {
val scala211 = "2.11.8"
val scala212 = "2.12.1"
val scalaAsync = "0.9.6"
val scalatest = "3.0.0"
val utest = "0.3.0"
val scalaJSDOM = "0.9.0"
val chandu0101Macros = "2016.5.0"
val sriCore = "0.6.0"
}
val scalatestJS = libraryDependencies += "org.scalatest" %%% "scalatest" % Version.scalatest % Test
val scalaAsync = libraryDependencies += "org.scala-lang.modules" %% "scala-async" % Version.scalaAsync
val scalaJSDOM = libraryDependencies += "org.scala-js" %%% "scalajs-dom" % Version.scalaJSDOM
val chandu0101Macros = libraryDependencies += "com.github.chandu0101" %%% "macros" % Version.chandu0101Macros
val sriUniversal = libraryDependencies += "com.github.chandu0101" %%% "sri-universal" % Version.sriCore
val sriMobile = libraryDependencies += "com.github.chandu0101" %%% "sri-mobile" % Version.sriCore
val sriWeb = libraryDependencies += "com.github.chandu0101" %%% "sri-web" % Version.sriCore
val scalaReflect = libraryDependencies += "org.scala-lang" % "scala-reflect" % scalaVersion.value % Provided
val relayModuleDeps = Seq(scalaReflect, sriUniversal)
val relayWebExamplesModuleDeps = Seq(sriWeb)
val relayMobileExamplesModuleDeps = Seq(sriMobile)
val scalatestJSSettings = Seq(
scalatestJS,
scalaJSStage in Global := FastOptStage,
// scalaJSStage in Global := FullOptStage,
jsDependencies += RuntimeDOM,
jsDependencies += ProvidedJS / "test-bundle.js" % Test,
jsEnv in Test := new PhantomJS2Env(scalaJSPhantomJSClassLoader.value, addArgs = Seq("--web-security=no"))
// jsEnv in Test := new NodeJSEnv()
)
}
|
chandu0101/sri-relay
|
project/Dependencies.scala
|
Scala
|
apache-2.0
| 1,802
|
package org.labrad.crypto
import org.scalatest.FunSuite
class CertsTest extends FunSuite {
/**
* Verify that fingerprint matches what we get from openssl.
*
* To get the fingerprint of a certificate with openssl, you can do:
* $ openssl x509 -fingerprint -in <certfile>
*/
test("fingerprintSHA1") {
val cert = """
|-----BEGIN CERTIFICATE-----
|MIICqjCCAZKgAwIBAgIILDVF+J1HIWowDQYJKoZIhvcNAQELBQAwFDESMBAGA1UE
|AwwJbG9jYWxob3N0MCAXDTE0MDgyODE3MTUyM1oYDzk5OTkxMjMxMjM1OTU5WjAU
|MRIwEAYDVQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
|AoIBAQCVPL4WfLACl/oR+RNolkwxfyxcj8IdTrzrUXmY1zBBncdHhHOPijJZHdP5
|vmD4HO2kb793V4uT02gnMq+MiYr8fNui7yRYnInG3GsgwJKQTH/5objHN6JZl505
|82r/aas0rDjRRc25WXGSCuPuW+h1u0f2N4PuBwN6iCmv0FDhEz7C3FgDWDSk4heY
|xLglX2wnkrD4nob4NXWqX7VZ5A9KYBKQPhpOczg+vtv1nd3CnU5JN9GbC3AZ+4wF
|Zb0xMpLDgn4T5z0TjOhJOwCbDOSsJWdC0q6UQe4ln5GubzKjPaJWVR4VXOsVuqoU
|q4fLed5vHSsQUc0C6qElvak0xULbAgMBAAEwDQYJKoZIhvcNAQELBQADggEBACVZ
|H5UJfpvujJ5tV0Mj9Jho0f5Wf/KARTa/5NL6lbM3KPSrrOZGdnH2Hag6me3JMJ+y
|kpxCc5HQnhF+Ep21Y5tFo0Ex2/FRGSfxZZVL0ERjMYnJpzjbnB4S5VPYW1LB+ctL
|+kwNc+sc8up986zNZnzxRY5hllvmC82Bn24dCVECzy3fgczVpOSh4pLeF+sXYOA1
|2ZT081GtWsEjebCndRoTtInTkqdtsSLHvznAi8YQ7lhtow/sAr5hbUaGwdrROUaq
|6Z+dzh7LHrTmfqmefNGGqi+hWKSU+fxGYQ+QDUjCD5J1dfnnueeHgYIeiQ9ZjnyE
|f3brW92sWUeqydhqD5A=
|-----END CERTIFICATE-----
|""".stripMargin
val expectedFingerprint = "5E:DF:14:6E:87:0F:2D:15:FD:B2:07:4E:6B:43:9A:21:84:58:F7:70"
assert(Certs.fingerprintSHA1(cert) == expectedFingerprint)
}
}
|
labrad/scalabrad
|
core/src/test/scala/org/labrad/crypto/CertsTest.scala
|
Scala
|
mit
| 1,665
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.datastream
import com.google.common.collect.ImmutableList
import org.apache.calcite.plan._
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.core.Values
import org.apache.calcite.rex.RexLiteral
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.table.api.{StreamQueryConfig, StreamTableEnvironment}
import org.apache.flink.table.codegen.InputFormatCodeGenerator
import org.apache.flink.table.plan.schema.RowSchema
import org.apache.flink.table.runtime.io.CRowValuesInputFormat
import org.apache.flink.table.runtime.types.{CRow, CRowTypeInfo}
import scala.collection.JavaConverters._
/**
* DataStream RelNode for LogicalValues.
*/
class DataStreamValues(
cluster: RelOptCluster,
traitSet: RelTraitSet,
schema: RowSchema,
tuples: ImmutableList[ImmutableList[RexLiteral]],
ruleDescription: String)
extends Values(cluster, schema.logicalType, tuples, traitSet)
with DataStreamRel {
override def deriveRowType() = schema.logicalType
override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = {
new DataStreamValues(
cluster,
traitSet,
schema,
getTuples,
ruleDescription
)
}
override def translateToPlan(
tableEnv: StreamTableEnvironment,
queryConfig: StreamQueryConfig): DataStream[CRow] = {
val config = tableEnv.getConfig
val returnType = CRowTypeInfo(schema.physicalTypeInfo)
val generator = new InputFormatCodeGenerator(config)
// generate code for every record
val generatedRecords = getTuples.asScala.map { r =>
generator.generateResultExpression(
schema.physicalTypeInfo,
schema.physicalFieldNames,
r.asScala)
}
// generate input format
val generatedFunction = generator.generateValuesInputFormat(
ruleDescription,
generatedRecords.map(_.code),
schema.physicalTypeInfo)
val inputFormat = new CRowValuesInputFormat(
generatedFunction.name,
generatedFunction.code,
returnType)
tableEnv.execEnv.createInput(inputFormat, returnType)
}
}
|
mtunique/flink
|
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/datastream/DataStreamValues.scala
|
Scala
|
apache-2.0
| 2,968
|
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.resolver
import java.net.InetAddress
import java.util.{ Collections => JCollections, List => JList }
import io.netty.resolver.NameResolver
import io.netty.util.concurrent.{ Future, ImmediateEventExecutor, Promise }
class AliasesAwareNameResolver(aliases: Map[String, InetAddress], wrapped: NameResolver[InetAddress]) extends NameResolver[InetAddress] {
override def resolve(s: String): Future[InetAddress] =
aliases.get(s) match {
case Some(address) => ImmediateEventExecutor.INSTANCE.newPromise[InetAddress].setSuccess(address)
case _ => wrapped.resolve(s)
}
override def resolve(s: String, promise: Promise[InetAddress]): Future[InetAddress] =
aliases.get(s) match {
case Some(address) => promise.setSuccess(address)
case _ => wrapped.resolve(s, promise)
}
override def resolveAll(s: String): Future[JList[InetAddress]] =
aliases.get(s) match {
case Some(address) => ImmediateEventExecutor.INSTANCE.newPromise[JList[InetAddress]].setSuccess(JCollections.singletonList(address))
case _ => wrapped.resolveAll(s)
}
override def resolveAll(s: String, promise: Promise[JList[InetAddress]]): Future[JList[InetAddress]] =
aliases.get(s) match {
case Some(address) => promise.setSuccess(JCollections.singletonList(address))
case _ => wrapped.resolveAll(s, promise)
}
override def close(): Unit = wrapped.close()
}
|
GabrielPlassard/gatling
|
gatling-http/src/main/scala/io/gatling/http/resolver/AliasesAwareNameResolver.scala
|
Scala
|
apache-2.0
| 2,096
|
/*
* Copyright 2017 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.epigraph.compiler
import com.intellij.psi.PsiElement
import ws.epigraph.lang.TextLocation
import ws.epigraph.psi.EpigraphPsiUtil
/**
* @author <a href="mailto:konstantin.sobolev@gmail.com">Konstantin Sobolev</a>
*/
trait ErrorReporter {
def message(message: String, location: TextLocation, level: CMessageLevel)
def error(message: String, location: TextLocation): Unit = this.message(message, location, CMessageLevel.Error)
def error(message: String, psi: PsiElement): Unit = error(message, EpigraphPsiUtil.getLocation(psi))
def warning(message: String, location: TextLocation): Unit = this.message(message, location, CMessageLevel.Warning)
def warning(message: String, psi: PsiElement): Unit = warning(message, EpigraphPsiUtil.getLocation(psi))
}
object ErrorReporter {
private def cErrorPosition(csf: CSchemaFile, location: TextLocation): CMessagePosition =
if (location == TextLocation.UNKNOWN)
CMessagePosition.NA
else if (location.startLine() == location.endLine())
csf.lnu.pos(location.startOffset(), location.endOffset() - location.startOffset())
else
csf.lnu.pos(location.startOffset())
def reporter(csf: CSchemaFile)(implicit ctx: CContext): ErrorReporter =
new ErrorReporter {
override def message(msg: String, location: TextLocation, level: CMessageLevel): Unit = {
val errorPosition = cErrorPosition(csf, location)
ctx.errors.add(new CMessage(location.fileName(), errorPosition, msg, level))
}
}
def reporter(csfm: Map[String, CSchemaFile])(implicit ctx: CContext): ErrorReporter =
new ErrorReporter {
override def message(msg: String, location: TextLocation, level: CMessageLevel): Unit = {
val errorPosition: CMessagePosition =
if (location == TextLocation.UNKNOWN)
CMessagePosition.NA
else
csfm.get(location.fileName()).map(csf => cErrorPosition(csf, location)).getOrElse(CMessagePosition.NA)
ctx.errors.add(new CMessage(location.fileName(), errorPosition, msg, level))
}
}
}
|
SumoLogic/epigraph
|
compiler/src/main/scala/ws/epigraph/compiler/ErrorReporter.scala
|
Scala
|
apache-2.0
| 2,671
|
package com.rest.akka.api.routes
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._
import com.rest.akka.api.controller.AkkaHttpController
import com.rest.akka.api.domain.WelcomeMessage
import com.rest.akka.api.serialization.JsonSerialization
import org.joda.time.DateTime
class MainRouter(private implicit val materializer: ActorMaterializer) extends JsonSerialization {
def apply(): Route = {
// handleExceptions(exceptionHandler) {
pathPrefix("api") { routes() }
// }
}
private def routes(): Route =
path("test") { get { complete(WelcomeMessage("Test message")) } } ~
path("welcome") {
get { complete { AkkaHttpController.welcome() } } ~
(post & entity(as[WelcomeMessage])) { messageRequest => complete(AkkaHttpController.welcome(Some(messageRequest.message))) }
} ~
path("status") { get { complete(Status("OK", DateTime.now().toString("HH:mm dd MMM yyyy"))) } }
}
case class Status(status: String, time: String)
|
mgobec/rest-akka-api
|
src/main/scala/com/rest/akka/api/routes/MainRouter.scala
|
Scala
|
apache-2.0
| 1,116
|
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wsutil
import javax.inject._
import cmwell.domain.{Formattable, Infoton}
import cmwell.formats._
import cmwell.fts.{FieldFilter, FieldOperator}
import cmwell.web.ld.cmw.CMWellRDFHelper
import com.typesafe.scalalogging.LazyLogging
import markdown.PrettyCsvFormatter
object FormatterManager {
val prettyMangledField: String => String = {
case s if s.length > 1 && s(1) == '$' =>
s.head match {
case 'i' => s.drop(2) + ": Int"
case 'l' => s.drop(2) + ": Long/BigInt"
case 'w' => s.drop(2) + ": Double/BigDecimal"
case 'b' => s.drop(2) + ": Boolean"
case 'd' => s.drop(2) + ": Date"
case 'f' => s.drop(2) + ": Float"
}
case s => s
}
def multiFormattableToSeq(formattable: Formattable, formatter: Formatter): String = {
val infotons: Seq[Infoton] = formattable match {
case _ => ???
}
formatFormattableSeq(infotons, formatter)
}
def formatFormattableSeq[T <: Formattable](infotons: Seq[T], formatter: Formatter): String = {
val sb = new StringBuilder()
infotons.foreach { i =>
val formatted = formatter.render(i) + "\\n"
sb.append(formatted)
}
sb.mkString
}
private def getKeyForRdfFormatterMap(rdfFlavor: RdfFlavor,
host: String,
withoutMeta: Boolean,
filterOutBlanks: Boolean,
forcrUniquness: Boolean,
pretty: Boolean,
raw: Boolean,
callback: Option[String]): String = {
def bool2string(b: Boolean): String = if (b) "T" else "F"
if (Set[RdfFlavor](JsonLDFlavor, JsonLDQFlavor)(rdfFlavor)) {
s"${rdfFlavor.key}\\t$host\\t${bool2string(withoutMeta)}\\t${bool2string(filterOutBlanks)}\\t${bool2string(
forcrUniquness
)}\\t${bool2string(pretty)}\\t${bool2string(raw)}\\t${callback.getOrElse("")}"
} else {
s"${rdfFlavor.key}\\t$host\\t${bool2string(withoutMeta)}\\t${bool2string(filterOutBlanks)}\\t${bool2string(raw)}\\t${bool2string(forcrUniquness)}\\t\\t"
}
}
}
@Singleton
class FormatterManager @Inject()(C: CMWellRDFHelper) extends LazyLogging {
import FormatterManager._
//var is OK as not volatile, cache, frequent reads + rare writes of immutable object pattern (Gilad + Dudi)
private[this] var rdfFormatterMap = Map[String, RDFFormatter]()
def innerToSimpleFieldName(fieldName: String): String = {
fieldName.lastIndexOf('.') match {
case -1 => fieldName
case i => {
val (first, dotLast) = fieldName.splitAt(i)
val last = dotLast.tail
C.hashToUrlAndPrefix(last, None) match {
case None => fieldName
case Some((_, prefix)) => s"$first.$prefix"
}
}
}
}
lazy val jsonFormatter = new JsonFormatter(innerToSimpleFieldName)
lazy val prettyJsonFormatter = new PrettyJsonFormatter(innerToSimpleFieldName)
lazy val yamlFormatter = new YamlFormatter(innerToSimpleFieldName)
lazy val csvFormatter = CSVFormatter(prettyMangledField.compose(innerToSimpleFieldName))
lazy val prettyCsvFormatter = new PrettyCsvFormatter(innerToSimpleFieldName)
val fieldTranslatorForRichRDF: Option[Long] => String => Option[(String, Option[String])] = (t: Option[Long]) =>
(s: String) => C.hashToUrlAndPrefix(s, t).map { case (url, prefix) => url -> Option(prefix) }
val fieldTranslatorForPrefixlessRDF: Option[Long] => String => Option[(String, Option[String])] = (t: Option[Long]) =>
(s: String) => C.hashToUrl(s, t).map { case url => url -> None }
def getFormatter(
format: FormatType,
timeContext: Option[Long],
host: String = "http://cm-well",
uri: String = "http://cm-well",
pretty: Boolean = false,
raw: Boolean = false,
callback: Option[String] = None,
fieldFilters: Option[FieldFilter] = None,
offset: Option[Long] = None,
length: Option[Long] = None,
withData: Option[String] = None,
withoutMeta: Boolean = false,
forceUniqueness: Boolean = false, //if you want histories to not collide, e.g. searching with-history and output RDF (RDF only flag)
filterOutBlanks: Boolean = false
): Formatter = {
format match {
case TextType => PathFormatter
case TsvType => TsvFormatter
case CsvType if pretty => prettyCsvFormatter
case CsvType => csvFormatter
case JsonType if pretty && callback.isDefined => new PrettyJsonFormatter(innerToSimpleFieldName, callback)
case JsonType if pretty => prettyJsonFormatter
case JsonType if callback.isDefined => new JsonFormatter(innerToSimpleFieldName, callback)
case JsonType => jsonFormatter
case JsonlType if pretty =>
new PrettyJsonlFormatter(C.hashToUrlAndPrefix(_, timeContext), { quadUrl =>
C.getAliasForQuadUrl(quadUrl) match {
case opt @ Some(alias) => opt
case None => Some(quadUrl)
}
}, callback)
case JsonlType => new JsonlFormatter(C.hashToUrlAndPrefix(_, timeContext), Some.apply, callback)
case YamlType => yamlFormatter
case RdfType(rdfFlavor) => {
val key =
getKeyForRdfFormatterMap(rdfFlavor, host, withoutMeta, filterOutBlanks, forceUniqueness, pretty, raw, callback)
if (rdfFormatterMap.contains(key)) rdfFormatterMap(key)
else {
val newFormatter = rdfFlavor match {
case RdfXmlFlavor =>
new RDFXmlFormatter(host,
fieldTranslatorForRichRDF(timeContext),
withoutMeta,
filterOutBlanks,
raw,
forceUniqueness)
case TurtleFlavor =>
new TurtleFormatter(host,
fieldTranslatorForRichRDF(timeContext),
withoutMeta,
filterOutBlanks,
raw,
forceUniqueness)
case N3Flavor =>
new N3Formatter(host,
fieldTranslatorForRichRDF(timeContext),
withoutMeta,
filterOutBlanks,
raw,
forceUniqueness)
case NTriplesFlavor =>
new NTriplesFormatter(host,
fieldTranslatorForPrefixlessRDF(timeContext),
withoutMeta,
filterOutBlanks,
raw,
forceUniqueness)
case JsonLDFlavor =>
JsonLDFormatter(host,
fieldTranslatorForRichRDF(timeContext),
withoutMeta,
filterOutBlanks,
forceUniqueness,
pretty,
raw,
callback)
case NquadsFlavor =>
new NQuadsFormatter(host,
fieldTranslatorForPrefixlessRDF(timeContext),
withoutMeta,
filterOutBlanks,
raw,
forceUniqueness)
case TriGFlavor =>
new TriGFormatter(host,
fieldTranslatorForRichRDF(timeContext),
C.getAliasForQuadUrl,
withoutMeta,
filterOutBlanks,
raw,
forceUniqueness)
case TriXFlavor =>
new TriXFormatter(host,
fieldTranslatorForRichRDF(timeContext),
C.getAliasForQuadUrl,
withoutMeta,
filterOutBlanks,
raw,
forceUniqueness)
case JsonLDQFlavor =>
JsonLDQFormatter(host,
fieldTranslatorForRichRDF(timeContext),
C.getAliasForQuadUrl,
withoutMeta,
filterOutBlanks,
forceUniqueness,
pretty,
raw,
callback)
}
rdfFormatterMap = rdfFormatterMap.updated(key, newFormatter)
newFormatter
}
}
case AtomType => {
val innerFormatterOpt = withData.map(ft => FormatExtractor.withDefault(ft, RdfType(TriGFlavor))).map { ft =>
if (ft eq AtomType) throw new IllegalArgumentException("you can't have atom format with inline atom data!")
else getFormatter(ft, timeContext, host, uri, pretty, raw, callback, fieldFilters, offset, length, None)
}
(offset, length) match {
case (Some(o), Some(l)) => AtomFormatter(host, uri, fieldFilters, o, l, innerFormatterOpt)
case (None, None) => AtomFormatter(host, uri, innerFormatterOpt)
case _ => {
logger.warn(s"Atom formatter: case that was un-thought of reached with: $fieldFilters , $offset , $length")
AtomFormatter(host, uri, innerFormatterOpt)
}
}
}
}
}
}
|
thomsonreuters/CM-Well
|
server/cmwell-ws/app/wsutil/FormatterManager.scala
|
Scala
|
apache-2.0
| 10,558
|
package fpinscala.gettingstarted.samples
import scala.annotation.tailrec
/**
* Created by younggi on 4/14/17.
*/
object FormatResults {
def abs(n: Int): Int = {
if ( n < 0) -n
else n
}
def factorial(n: Int): Int = {
@tailrec
def fac(n: Int, acc: Int): Int = {
if (n == 0) acc
else fac(n-1, n*acc)
}
fac(n, 1)
}
def fibonacci(n: Int): Int = {
@tailrec
def fib(n: Int, a: Int, b: Int): Int = {
if (n == 0) a
else fib(n-1, b, a+b)
}
fib(n, 0, 1)
}
def formatResult(name: String, x: Int, f: Int => Int) = {
val msg = "The %s of %d is %d"
msg.format(name, x, f(x))
}
def main(args: Array[String]): Unit = {
println(formatResult("absolure value", -42, abs))
println(formatResult("factorial", 7, factorial))
println(formatResult("fibonnaci", 7, fibonacci))
println(formatResult("increment", 7, (x: Int) => x + 1))
println(formatResult("increment2", 7, (x) => x + 1))
println(formatResult("increment3", 7, x => x + 1))
println(formatResult("increment2", 7, _ + 1))
println(formatResult("increment2", 7, x => { val r = x + 1; r}))
}
}
|
younggi/books
|
functional_programming_in_scala/funpro_scala/src/main/scala/fpinscala/gettingstarted/samples/FormatResults.scala
|
Scala
|
mit
| 1,160
|
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.storage
import slamdata.Predef._
import quasar.impl.cluster.{Timestamped, Cluster, Message}, Timestamped._, Message._
import cats.~>
import cats.effect._
import cats.effect.concurrent.{Deferred, Semaphore}
import cats.instances.list._
import cats.syntax.applicative._
import cats.syntax.apply._
import cats.syntax.flatMap._
import cats.syntax.foldable._
import cats.syntax.functor._
import fs2.Stream
import scodec.Codec
import scodec.codecs.{listOfN, int32}
import scodec.codecs.implicits._
import scala.concurrent.duration._
import AntiEntropyStore._
final class AntiEntropyStore[F[_]: ConcurrentEffect: ContextShift: Timer, K: Codec, V: Codec](
name: String,
cluster: Cluster[F, Message],
store: TimestampedStore[F, K, V],
gates: Gates[F],
config: AntiEntropyStoreConfig)
extends IndexedStore[F, K, V] {
private val F = Sync[F]
private val underlying: IndexedStore[F, K, Timestamped[V]] = store.underlying
private def ms(inp: Long): FiniteDuration = new FiniteDuration(inp, MILLISECONDS)
def entries: Stream[F, (K, V)] =
store.entries
def lookup(k: K): F[Option[V]] =
store.lookup(k)
def insert(k: K, v: V): F[Unit] =
gates.in(store.insert(k, v))
def delete(k: K): F[Boolean] =
gates.in(store.delete(k))
// SENDING ADS
def sendingAdStream: Stream[F, Unit] =
(Stream.eval(sendAd) *> Stream.sleep(ms(config.adTimeoutMillis))).repeat
private def sendAd: F[Unit] =
store.timestamps.flatMap { (ts: Map[K, Long]) =>
cluster.gossip(Advertisement(name), ts).unlessA(ts.isEmpty)
}
// RECEIVING ADS
private def handleAdvertisement(id: cluster.Id, ad: Map[K, Long]): F[Unit] = {
type Accum = (List[K], Map[K, Timestamped[V]])
// Note, that we don't handle keys aren't presented in advertisement
// They are handled when this node sends its advertisement instead.
// for every key in advertisement having the most recent modification timestamp map
def result(init: Accum, timestamps: Map[K, Long]) = ad.toList.foldM[F, Accum](init){
case (acc @ (requesting, returning), (k, incoming)) => for {
// when this key was modified last time
current <- timestamps.get(k).getOrElse(0L).pure[F]
res <- if (current < incoming) {
// current value is older than incoming, add key to update request
((k :: requesting, returning)).pure[F]
} else underlying.lookup(k).map {
case None =>
acc // impossible in theory, but can't figure out how to handle this gracefully
case Some(having) =>
// Value we have is newer than incoming, create update message
(requesting, returning.updated(k, having))
}
} yield res
}
for {
timestamps <- store.timestamps
(requesting, returning) <- result((List(), Map.empty), timestamps)
_ <- cluster.unicast(RequestUpdate(name), requesting, id).unlessA(requesting.isEmpty)
_ <- cluster.unicast(Update(name), returning, id).unlessA(returning.isEmpty)
} yield ()
}
def advertisementHandled: F[Stream[F, Unit]] =
cluster.subscribe[Map[K, Long]](Advertisement(name), config.adLimit)
.map(_.evalMap(Function.tupled(handleAdvertisement(_, _))(_)))
// TOMBSTONES PURGING
def purgeTombstones: Stream[F, Unit] =
(Stream.sleep(ms(config.purgeTimeoutMillis)) *> Stream.eval(purge)).repeat
private def purge: F[Unit] = {
val purgingStream: Stream[F, Unit] = underlying.entries.evalMap { case (k, v) => for {
now <- Timer[F].clock.realTime(MILLISECONDS)
_ <- underlying.delete(k).whenA(raw(v).isEmpty && now - timestamp(v) > config.tombstoneLiveForMillis)
} yield () }
gates.strict(purgingStream.compile.drain)
}
// RECEIVING UPDATES
private def updateHandler(id: cluster.Id, mp: Map[K, Timestamped[V]]): F[Unit] = mp.toList.traverse_ {
case (k, newVal) => for {
v <- underlying.lookup(k)
ts = v.fold(0L)(timestamp(_))
_ <- gates.in(underlying.insert(k, newVal).whenA(ts < timestamp(newVal)))
} yield (())
}
def updateHandled: F[Stream[F, Unit]] =
cluster.subscribe[Map[K, Timestamped[V]]](Update(name), config.updateLimit)
.map(_.evalMap(Function.tupled(updateHandler(_, _))(_)))
// REQUESTING FOR UPDATES
private def updateRequestedHandler(id: cluster.Id, req: List[K]): F[Unit] = for {
payload <- subMap(req)
_ <- cluster.unicast(Update(name), payload, id).unlessA(payload.isEmpty)
} yield (())
private def subMap(req: List[K]): F[Map[K, Timestamped[V]]] =
req.foldM(Map[K, Timestamped[V]]())((acc: Map[K, Timestamped[V]], k: K) => for {
uv <- underlying.lookup(k)
// we've been requested for update and if we don't have a value in the key return tombstone
toInsert <- uv.fold(tombstone[F, V])(_.pure[F])
} yield acc.updated(k, toInsert))
def updateRequestHandled: F[Stream[F, Unit]] =
cluster.subscribe[List[K]](RequestUpdate(name), config.updateRequestLimit)
.map(_.evalMap(Function.tupled(updateRequestedHandler(_, _))(_)))
// INITIALIZATION
def requestInitialization: F[Unit] =
cluster.random(RequestInit(name), ())
def initialization: Stream[F, Unit] =
(Stream.eval(requestInitialization) *> Stream.sleep(ms(config.adTimeoutMillis))).repeat
def initRequestHandled: F[Stream[F, Unit]] =
cluster.subscribe[Unit](RequestInit(name), config.updateRequestLimit)
.map(_.evalMap(x => initRequestHandler(x._1)))
def initRequestHandler(id: cluster.Id): F[Unit] =
underlying.entries.compile.toList.flatMap { (lst: List[(K, Timestamped[V])]) =>
cluster.unicast(Init(name), lst.toMap, id)
}
def initHandled(stopper: Deferred[F, Either[Throwable, Unit]]): F[Stream[F, Unit]] =
cluster.subscribe[Map[K, Timestamped[V]]](Init(name), config.updateLimit)
.map(_.evalMap {
case (id, mp) => for {
_ <- updateHandler(id, mp)
_ <- Sync[F].suspend(stopper.complete(Right(())))
} yield ()
})
// BROADCASTING UPDATES
def broadcastUpdates: Stream[F, Unit] =
store.updates
.groupWithin(config.updateBroadcastBatch, config.updateBroadcastMillis.milliseconds)
.map(_.toList.toMap)
.evalMap(cluster.broadcast(Update(name), _))
}
object AntiEntropyStore {
final class Gates[F[_]: Bracket[?[_], Throwable]](semaphore: Semaphore[F]) {
def in[A](fa: F[A]): F[A] =
Bracket[F, Throwable].guarantee(semaphore.acquire *> fa)(semaphore.release)
def strict[A](fa: F[A]): F[A] =
Bracket[F, Throwable].guarantee(semaphore.acquireN(Int.MaxValue) *> fa)(semaphore.releaseN(Int.MaxValue))
}
object Gates {
def apply[F[_]: Concurrent: Bracket[?[_], Throwable]]: F[Gates[F]] =
Semaphore[F](Int.MaxValue) map (new Gates(_))
}
implicit def mapCodec[K, V](implicit k: Codec[K], v: Codec[V]): Codec[Map[K, V]] =
listOfN(int32, k ~ v).xmap(_.toMap, _.toList)
def apply[F[_]: ConcurrentEffect: ContextShift, K: Codec, V: Codec](
config: AntiEntropyStoreConfig,
name: String,
cluster: Cluster[F, Message],
underlying: TimestampedStore[F, K, V],
blocker: Blocker)(
implicit timer: Timer[F])
: Resource[F, IndexedStore[F, K, V]] = {
val res: F[Resource[F, IndexedStore[F, K, V]]] = for {
gates <- Gates[F]
store <- Sync[F].delay(new AntiEntropyStore[F, K, V](
name,
cluster,
underlying,
gates,
config))
stopper <- Deferred[F, Either[Throwable, Unit]]
empty <- cluster.isEmpty
_ <- stopper.complete(Right(())).whenA(empty)
initRequest <- store.initRequestHandled
init <- store.initHandled(stopper)
adReceiver <- store.advertisementHandled
updates <- store.updateHandled
updateRequester <- store.updateRequestHandled
} yield {
val merged = Stream.emits(List(
adReceiver,
store.sendingAdStream,
store.purgeTombstones,
store.broadcastUpdates,
initRequest,
updates,
updateRequester,
store.initialization.interruptWhen(stopper),
init.interruptWhen(stopper))).parJoinUnbounded
val storeStream = Stream.emit[F, IndexedStore[F, K, V]](store)
for {
resource <- storeStream.concurrently(merged).compile.resource.lastOrError
_ <- Resource.liftF(stopper.get)
} yield resource
}
Resource.suspend(res).mapK(λ[F ~> F](ContextShift[F].blockOn(blocker)(_)))
}
def default[F[_]: ConcurrentEffect: ContextShift, K: Codec, V: Codec](
name: String,
cluster: Cluster[F, Message],
underlying: TimestampedStore[F, K, V],
blocker: Blocker)(
implicit timer: Timer[F])
: Resource[F, IndexedStore[F, K, V]] =
apply[F, K, V](AntiEntropyStoreConfig.default, name, cluster, underlying, blocker)
}
|
djspiewak/quasar
|
impl/src/main/scala/quasar/impl/storage/AntiEntropyStore.scala
|
Scala
|
apache-2.0
| 9,461
|
package moe.brianhsu.easytaipei
import android.app.Activity
import android.os.Bundle
import android.support.v7.app.AppCompatActivity
import android.support.v4.app.FragmentPagerAdapter
import android.support.v4.app.FragmentManager
import android.view.Menu
import android.view.View
import android.support.design.widget.Snackbar
import android.graphics.Color
import android.view.LayoutInflater
import android.view.ViewGroup
import android.support.v4.app.Fragment
import android.support.design.widget.TabLayout.TabLayoutOnPageChangeListener
import scala.collection.JavaConversions._
class MainActivity extends AppCompatActivity with TypedFindView
{
private lazy val tabPagerAdapter = new TabPagerAdapter(getSupportFragmentManager)
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
val actionBar = findView(TR.toolbar)
val viewPager = findView(TR.container)
val tabLayout = findView(TR.tabs)
val fab = findView(TR.fab)
viewPager.setAdapter(tabPagerAdapter)
viewPager.setOffscreenPageLimit(10)
tabLayout.setupWithViewPager(viewPager)
setSupportActionBar(actionBar)
fab.setOnClickListener(new View.OnClickListener {
override def onClick(view: View) {
val fragments = getSupportFragmentManager().getFragments
val visibleFragments = fragments.filter(f => f != null && f.getUserVisibleHint)
visibleFragments.foreach { f => f.asInstanceOf[BaseFragment].updateMarkers() }
}
})
}
class TabPagerAdapter(fm: FragmentManager) extends FragmentPagerAdapter(fm) {
val tolietFragment = new TolietFragment
val trashCanFragment = new TrashCanFragment
val drinkingStationFragment = new DrinkingStationFragment
val breastfeedingRoomFragment = new BreastfeedingRoomFragment
override def getCount = 4
override def getPageTitle(position: Int): CharSequence = position match {
case 0 => "公共廁所"
case 1 => "行人垃圾筒"
case 2 => "飲水台"
case 3 => "哺集乳室"
}
override def getItem(position: Int): Fragment = position match {
case 0 => tolietFragment
case 1 => trashCanFragment
case 2 => drinkingStationFragment
case 3 => breastfeedingRoomFragment
}
}
}
|
brianhsu/EasyTaipei
|
src/main/scala/MainActivity.scala
|
Scala
|
gpl-2.0
| 2,315
|
package streamz.akka.stream
import java.util.concurrent.{TimeUnit, CountDownLatch}
import org.reactivestreams.Publisher
import scala.collection.immutable
import scala.reflect._
import scala.util.Random
import scala.util.control.NoStackTrace
import scala.concurrent.{Promise, ExecutionContext, Future, Await}
import scala.concurrent.duration._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import akka.actor._
import akka.pattern.{AskTimeoutException, ask}
import akka.stream.impl2.ActorBasedFlowMaterializer
import akka.stream.scaladsl2.{FoldSink => _, _}
import akka.stream.{Transformer, MaterializerSettings}
import akka.testkit._
import scalaz.concurrent.Task
import scalaz.stream.Process
import scalaz.syntax.either._
import streamz.akka.stream.TestAdapter.GetInFlight
class AkkaStreamSpec
extends TestKit(ActorSystem(classOf[AkkaStreamSpec].getSimpleName)) with ImplicitSender with DefaultTimeout
with WordSpecLike with Matchers with BeforeAndAfterAll {
implicit val materializer = FlowMaterializer(MaterializerSettings(system))
import system.dispatcher
"Process.publisher" when {
"invoked on a normal input-Process" must {
"return a Publisher and a Process that publishes the elements of the input-Process in a normal Flow" in {
val input: Process[Task, Int] = Process(1 to 50: _*)
val (process, publisher) = input.publisher()
val published = toList(FlowFrom(publisher))
process.run.run
result(published) should be (input.runLog.run)
}
}
"invoked with an actor-name" must {
"create an actor under this name that is stopped after the stream is finished" in {
val actorName = "publisher"
val input: Process[Task, Int] = Process(1 to 50: _*)
val (process, publisher) = input.publisher(name = Some(actorName))
identify(actorName) should not be None
val published = toList(FlowFrom(publisher))
process.run.run
result(published)
identify(actorName) should be (None)
}
}
"invoked on an erroneous input-Process" must {
"return a Publisher and a Process that publishes the valid elements of the input-Process in a normal Flow" in {
val failAfter = 20
val input: Process[Task, Int] = Process(1 to 50: _*).map(exceptionOn(expectedException)(_ > failAfter))
val (process, publisher) = input.publisher()
val published = toList(FlowFrom(publisher).take(failAfter))
process.take(failAfter).run.run
result(published) should be (input.take(failAfter).runLog.run)
}
}
"invoked on an erroneous input-Process" must {
"return a Publisher and a Process that publishes the exception of the input-Process in a normal Flow" in {
val input: Process[Task, Int] = Process.fail(expectedException)
val (process, publisher) = input.publisher()
val published = toList(FlowFrom(publisher))
process.run.attemptRun
the[Exception] thrownBy result(published) should be (expectedException)
}
}
"invoked on a slow input-Process" must {
"return a Publisher and a Process that publishes the elements of the input-Process in a normal Flow" in {
val input: Process[Task, Int] = Process(1 to 50: _*)
val (process, publisher) = input.map(sleep()).publisher()
val published = toList(FlowFrom(publisher))
process.run.run
result(published) should be (input.runLog.run)
}
}
"invoked on a normal input-Process" must {
"return a Publisher and a Process that publishes the elements of the input-Process in a slow Flow" in {
val input: Process[Task, Int] = Process(1 to 50: _*)
val (process, publisher) = input.publisher()
val published = toList(FlowFrom(publisher).map(sleep()))
process.run.run
result(published) should be (input.runLog.run)
}
}
"invoked on a normal input-Process with a MaxInFlightRequestStrategy" must {
"return a Publisher and a Process that publishes the elements of the input-Process in a slow Flow with the given max elements in flight" in {
val input: Process[Task, Int] = Process(1 to 50: _*)
val maxInFlight = Random.nextInt(4) + 1
val mockActorFactory = new MockActorRefFactory(Map(classOf[AdapterPublisher[Int]] -> TestAdapterPublisher.props))
val (process, publisher) = input.publisher(maxInFlightStrategyFactory(maxInFlight))(mockActorFactory)
val published = toList(FlowFrom(publisher)
.map(sleep())
.map(_ => currentInFlight[AdapterPublisher[Int]](mockActorFactory)))
process.run.run
result(published).map(result).foreach(_ should be <= maxInFlight)
}
}
}
"stream.subscribe" when {
"invoked on a normal Flow" must {
"return a Process that produces elements of the Flow" in {
val input = FlowFrom(1 to 50)
val process = input.toProcess()
process.runLog.run should be(result(toList(input)))
}
}
"invoked with an actor-name" must {
"create an actor under this name that is stopped after the stream is finished" in {
val actorName = "subscriber"
val finished = new CountDownLatch(1)
val input = FlowFrom(1 to 50)
val process = input.toProcess(name = Some(actorName)).runLog.runAsync(_ => finished.countDown())
identify(actorName) should not be None
waitFor(finished)
identify(actorName) should be (None)
}
}
"invoked on a erroneous Flow" must {
"return a Process that fails with the same exception as the Flow" in {
val input = FlowFrom[Int](() => throw expectedException)
val process = input.toProcess()
process.run.attemptRun should be (expectedException.left)
}
}
"invoked on a erroneous Flow" must {
"return a Process that when slowed down produces all valid elements of the Flow and fails afterwards" in {
val failAfter = 20
val input = FlowFrom(1 to 50).map(exceptionOn(expectedException)(_ > failAfter))
val process: Process[Task, Int] = input.toProcess().map(sleep()).map(effect(testActor ! _))
process.run.attemptRun should be (expectedException.left)
(1 to failAfter).foreach(i => expectMsg(i))
}
}
"invoked with a normal Flow and a MaxInFlightRequestStrategy" must {
"return a Process that when slowed has the given max elements in flight" in {
val input = FlowFrom(1 to 50)
val maxInFlight = Random.nextInt(4) + 1
val mockActorFactory = new MockActorRefFactory(Map(classOf[AdapterSubscriber[Int]] -> TestAdapterSubscriber.props))
val process = input.toProcess(maxInFlightStrategyFactory(maxInFlight))(mockActorFactory, materializer)
val slowProcess = process
.map(sleep())
.map(_ => currentInFlight[AdapterSubscriber[Int]](mockActorFactory))
slowProcess.runLog.run.map(result).foreach(_ should be <= maxInFlight)
}
}
}
"stream.publish" must {
"publish to a managed flow" in {
val process: Process[Task, Unit] = Process.emitAll(1 to 3).publish()(_.withSink(ForeachSink(testActor ! _)))()
process.run.run
expectMsg(1)
expectMsg(2)
expectMsg(3)
}
}
"stream.subscribe" must {
"subscribe to a flow" in {
val process = FlowFrom(1 to 3).toProcess()
process.runLog.run should be(Seq(1, 2, 3))
}
}
private def toList[I, O](flow: FlowWithSource[I, O])(implicit executionContext: ExecutionContext): Future[List[O]] = {
val sink = FoldSink[List[O], O](Nil)(_ :+ _)
val materializedFlow = flow.withSink(sink).run()
sink.future(materializedFlow)
}
private def currentInFlight[A : ClassTag](mockActorFactory: MockActorRefFactory): Future[Int] =
(mockActorFactory.createdActor[A] ? GetInFlight).mapTo[Int]
private def exceptionOn[A](ex: Exception)(condition: A => Boolean): A => A = effect[A](a => if(condition(a)) throw ex)
private def sleep[A](pause: FiniteDuration = 10.millis): A => A = effect(_ => Thread.sleep(pause.toMillis))
private def effect[A](eff: A => Unit): A => A = { a =>
eff(a)
a
}
private def result[A](future: Future[A]): A = Await.result(future, timeout.duration * 0.9)
private def waitFor(latch: CountDownLatch) = latch.await(timeout.duration.toMillis, TimeUnit.MILLISECONDS)
private def identify(name: String): Option[ActorRef] = result {
(system.actorSelection(s"/user/$name") ? Identify(None)).mapTo[ActorIdentity]
.map(id => Option(id.getRef))
.recover { case ex: AskTimeoutException => None}
}
private val expectedException = new Exception with NoStackTrace
override protected def afterAll() = shutdown(system)
}
final case class FoldSink[U, Out](zero: U)(f: (U, Out) ⇒ U) extends SinkWithKey[Out, Future[U]] {
override def attach(flowPublisher: Publisher[Out], materializer: ActorBasedFlowMaterializer, flowName: String): Future[U] = {
val promise = Promise[U]()
FlowFrom(flowPublisher).transform("fold", () ⇒ new Transformer[Out, U] {
var state: U = zero
override def onNext(in: Out): immutable.Seq[U] = {
state = f(state, in)
Nil
}
override def onTermination(end: Option[Throwable]) = {
end match {
case None ⇒ promise.success(state)
case Some(e) ⇒ promise.failure(e)
}
Nil
}
override def onError(cause: Throwable) = ()
}).consume()(materializer.withNamePrefix(flowName))
promise.future
}
def future(m: MaterializedSink): Future[U] = m.getSinkFor(this)
}
|
Astrac/streamz
|
streamz-akka-stream/src/test/scala/streamz/akka/stream/AkkaStreamSpec.scala
|
Scala
|
apache-2.0
| 9,718
|
/**
* Copyright 2017-2020 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package brave.play.filter
import akka.stream.Materializer
import brave.play.ZipkinTraceServiceLike
import javax.inject.Inject
import play.api.mvc.{Filter, Headers, RequestHeader, Result}
import play.api.routing.Router
import scala.concurrent.Future
import scala.util.Failure
/**
* A Zipkin filter.
*
* This filter is that reports how long a request takes to execute in Play as a server span.
* The way to use this filter is following:
* {{{
* class Filters @Inject() (
* zipkin: ZipkinTraceFilter
* ) extends DefaultHttpFilters(zipkin)
* }}}
*
* @param tracer a Zipkin tracer
* @param mat a materializer
*/
class ZipkinTraceFilter @Inject() (tracer: ZipkinTraceServiceLike)(implicit val mat: Materializer) extends Filter {
import tracer.executionContext
private val reqHeaderToSpanName: RequestHeader => String = ZipkinTraceFilter.ParamAwareRequestNamer
def apply(nextFilter: RequestHeader => Future[Result])(req: RequestHeader): Future[Result] = {
val serverSpan = tracer.serverReceived(
spanName = reqHeaderToSpanName(req),
span = tracer.newSpan(req.headers)((headers, key) => headers.get(key))
)
val result = nextFilter(req.withHeaders(new Headers(
(req.headers.toMap.mapValues(_.headOption getOrElse "") ++ tracer.toMap(serverSpan)).toSeq
)))
result.onComplete {
case Failure(t) => tracer.serverSend(serverSpan, "failed" -> s"Finished with exception: ${t.getMessage}")
case _ => tracer.serverSend(serverSpan)
}
result
}
}
object ZipkinTraceFilter {
val ParamAwareRequestNamer: RequestHeader => String = { reqHeader =>
import org.apache.commons.lang3.StringUtils
val pathPattern = StringUtils.replace(
reqHeader.attrs.get(Router.Attrs.HandlerDef).map(_.path).getOrElse(reqHeader.path),
"<[^/]+>", ""
)
s"${reqHeader.method} - $pathPattern"
}
}
|
bizreach/play-zipkin-tracing
|
play/src/main/scala/brave/play/filter/ZipkinTraceFilter.scala
|
Scala
|
apache-2.0
| 2,468
|
package suggestions
import java.net.URL
/**
* Created by alex on 17/05/15.
*/
trait LookupProvider {
def lookupUrl(name: String): Option[URL]
}
|
unclealex72/ripper
|
src/main/scala/suggestions/LookupProvider.scala
|
Scala
|
mit
| 151
|
import scala.collection.JavaConversions._
// you can use println for debugging purposes, e.g.
// println("this is a debug message")
object Solution {
def solution(A: Array[Int]): Int = {
// write your code in Scala 2.10
val n = A.length
var best_ind = 0
var best_sum: Long = A.sum
var best_len = n
var j = n
var s: Long = 0
for (i <- n-1 to 0 by -1) {
s += A(i)
while ((i+3 <= j) && (s*(j-i-1) > (s-A(i))*(j-i))) {
s -= A(j-1)
j -= 1
if ((s*best_len) <= best_sum*(j-i)) {
best_sum = s
best_len = j-i
best_ind = i
}
}
if ((i+2 <= j) && ((s*best_len) <= best_sum*(j-i))) {
best_sum = s
best_len = j-i
best_ind = i
}
}
best_ind
}
}
|
sfindeisen/prgctst
|
codility/lesson/MinAvgTwoSlice.scala
|
Scala
|
gpl-3.0
| 1,028
|
package com.redislabs.provider.redis.df.cluster
import com.redislabs.provider.redis.df.DataframeSuite
import com.redislabs.provider.redis.env.RedisClusterEnv
class DataframeClusterSuite extends DataframeSuite with RedisClusterEnv
|
RedisLabs/spark-redis
|
src/test/scala/com/redislabs/provider/redis/df/cluster/DataframeClusterSuite.scala
|
Scala
|
bsd-3-clause
| 232
|
package com.nidkil.downloader.actor
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.duration.DurationInt
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
import com.typesafe.config.ConfigFactory
import akka.actor.ActorPath
import akka.actor.ActorRef
import akka.actor.ActorSystem
import akka.actor.Props
import akka.actor.actorRef2Scala
import akka.pattern.pipe
import akka.testkit.DefaultTimeout
import akka.testkit.ImplicitSender
import akka.testkit.TestKit
import akka.util.Timeout
import com.nidkil.downloader.protocol.MasterWorkerProtocol
class WorkerSpec extends TestKit(ActorSystem("WorkerSpec", ConfigFactory.parseString(WorkerSpec.config)))
with DefaultTimeout with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll {
import WorkerSpec._
import org.scalatest.Matchers._
import scala.concurrent.duration._
import MasterWorkerProtocol._
implicit val askTimeout = Timeout(1 second)
override def afterAll {
shutdown()
}
def worker(name: String) = system.actorOf(Props(
new TestWorker(ActorPath.fromString(s"akka://${system.name}/user/$name"))))
def badWorker(name: String) = system.actorOf(Props(
new BadTestWorker(ActorPath.fromString(s"akka://${system.name}/user/$name"))))
"Worker" should {
"work" in {
val master = system.actorOf(Props[Master], "master-1")
val worker1 = worker("master-1")
val worker2 = worker("master-1")
val worker3 = worker("master-1")
master ! "Hi there"
master ! "Guys"
master ! "So"
master ! "What's"
master ! "Up?"
expectMsgAllOf("Hi there", "Guys", "So", "What's", "Up?")
}
"still work if one dies" in {
val master = system.actorOf(Props[Master], "master-2")
val worker1 = worker("master-2")
val worker2 = badWorker("master-2")
master ! "Hi there"
master ! "Guys"
master ! "So"
master ! "What's"
master ! "Up?"
expectMsgAllOf("Hi there", "Guys", "So", "What's", "Up?")
}
"work with Futures" in {
val master = system.actorOf(Props[Master], "master-3")
val worker1 = worker("master-3")
val worker2 = worker("master-3")
val worker3 = worker("master-3")
val listOfMessages = List("Hi there", "Guys", "So", "What's", "Up?")
val listOfFutures = listOfMessages.map(akka.pattern.ask(master, _).mapTo[String])
implicit val ec = system.dispatcher
val futureList = Future.sequence(listOfFutures)
val result = Await.result(futureList, 1 second)
assert(listOfMessages == result)
}
/**"work with funneled master" in {
val funneledMaster = system.actorOf(Props[FunneledMaster], "master-4")
val worker1 = worker("master-1")
val worker2 = worker("master-1")
val worker3 = worker("master-1")
funneledMaster ! FunnelWork("test-1", "Hi there")
funneledMaster ! FunnelWork("test-1", "Guys")
funneledMaster ! FunnelWork("test-1", "So")
funneledMaster ! FunnelWork("test-1", "What's")
funneledMaster ! FunnelWork("test-1", "Up?")
funneledMaster ! FunnelWork("test-2", "This")
funneledMaster ! FunnelWork("test-2", "is")
funneledMaster ! FunnelWork("test-2", "my")
funneledMaster ! FunnelWork("test-2", "second")
funneledMaster ! FunnelWork("test-2", "batch")
funneledMaster ! FunnelWork("test-2", "of")
funneledMaster ! FunnelWork("test-2", "messages")
expectMsgAllOf("Hi there", "Guys", "So", "What's", "Up?", "This", "is", "my", "second", "batch", "of", "messages")
}*/
}
}
object WorkerSpec {
// Define the test specific configuration
val config = """
akka {
loglevel = "WARNING"
}"""
class TestWorker(masterLocation: ActorPath) extends Worker(masterLocation) {
// Set an execution context
implicit val ec = context.dispatcher
def doWork(workSender: ActorRef, msg: Any): Unit = {
Future {
workSender ! msg
WorkComplete("done")
} pipeTo self
}
}
class BadTestWorker(masterLocation: ActorPath) extends Worker(masterLocation) {
// Set an execution context
implicit val ec = context.dispatcher
def doWork(workSender: ActorRef, msg: Any): Unit = context.stop(self)
}
}
|
nidkil/akka-downloader
|
src/test/scala/com/nidkil/downloader/actor/WorkerSpec.scala
|
Scala
|
apache-2.0
| 4,361
|
package spire
package math
import spire.algebra._
import org.scalacheck.Arbitrary._
import org.scalacheck._
import Arbitrary.arbitrary
object ArbitrarySupport {
object Ordinal {
trait _0
trait _1
trait _2
trait _3
trait _4
trait _5
trait _6
trait _7
trait _8
trait _9
trait _10
trait _20
trait _50
trait _100
}
abstract class Size[A](val value: Int)
object Size {
import Ordinal._
implicit object Size0SpireImplicit extends Size[_0](0)
implicit object Size1SpireImplicit extends Size[_1](1)
implicit object Size2SpireImplicit extends Size[_2](2)
implicit object Size3SpireImplicit extends Size[_3](3)
implicit object Size4SpireImplicit extends Size[_4](4)
implicit object Size5SpireImplicit extends Size[_5](5)
implicit object Size6SpireImplicit extends Size[_6](6)
implicit object Size7SpireImplicit extends Size[_3](7)
implicit object Size8SpireImplicit extends Size[_3](8)
implicit object Size9SpireImplicit extends Size[_3](9)
implicit object Size10SpireImplicit extends Size[_10](10)
implicit object Size20SpireImplicit extends Size[_20](20)
implicit object Size50SpireImplicit extends Size[_50](50)
implicit object Size100SpireImplicit extends Size[_100](100)
def apply[A](implicit sz: Size[A]): Int = sz.value
}
case class Sized[A, L, U](num: A)
case class Positive[A](num: A)
case class Negative[A](num: A)
case class NonZero[A](num: A)
case class NonPositive[A](num: A)
case class NonNegative[A](num: A)
import spire.syntax.all._
implicit def sizedSpireImplicit[A: EuclideanRing: Signed: Arbitrary, L: Size, U: Size]: Arbitrary[Sized[A, L, U]] =
Arbitrary(arbitrary[A].map(a => Sized((a emod (Size[U] - Size[L])).abs + Size[L])))
implicit def positiveSpireImplicit[A: Signed: Arbitrary]: Arbitrary[Positive[A]] =
Arbitrary(arbitrary[A].map(_.abs).filter(_.signum > 0).map(Positive(_)))
implicit def negativeSpireImplicit[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[Negative[A]] =
Arbitrary(arbitrary[A].map(-_.abs).filter(_.signum < 0).map(Negative(_)))
implicit def nonZeroSpireImplicit[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[NonZero[A]] =
Arbitrary(arbitrary[A].filter(_.signum != 0).map(NonZero(_)))
implicit def nonPositiveSpireImplicit[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[NonPositive[A]] =
Arbitrary(arbitrary[A].map(-_.abs).filter(_.signum < 1).map(NonPositive(_)))
implicit def nonNegativeSpireImplicit[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[NonNegative[A]] =
Arbitrary(arbitrary[A].map(_.abs).filter(_.signum > -1).map(NonNegative(_)))
}
|
non/spire
|
tests/src/test/scala/spire/math/ArbitrarySupport.scala
|
Scala
|
mit
| 2,682
|
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.anyvals
import reflect.macros.Context
import org.scalactic.Resources
private[scalactic] object PosZFloatMacro extends CompileTimeAssertions {
def apply(c: Context)(value: c.Expr[Float]): c.Expr[PosZFloat] = {
val notValidMsg = Resources.notValidPosZFloat
val notLiteralMsg = Resources.notLiteralPosZFloat
import c.universe._
ensureValidFloatLiteral(c)(value, notValidMsg, notLiteralMsg) { i => i >= 0.0F }
reify { PosZFloat.from(value.splice).get }
}
}
|
SRGOM/scalatest
|
scalactic-macro/src/main/scala/org/scalactic/anyvals/PosZFloatMacro.scala
|
Scala
|
apache-2.0
| 1,106
|
package util
import collection.immutable.SortedSet
package object power {
type PowerComponents = Set[PowerComponent]
type PowerUnits = SortedSet[PowerUnit]
}
|
Shopify/collins
|
app/util/power/package.scala
|
Scala
|
apache-2.0
| 164
|
/*
* Copyright (C) 2013 Alcatel-Lucent.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Licensed to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package molecule
package stream
package ochan
package immutable
case class MapperT[A, B: Message](complexity: Int, f: B => A) extends Transformer[A, B] { outer =>
def apply(ochan: OChan[A]): OChan[B] = new OChan[B] {
def write(t: UThread, seg: Seg[B], sigOpt: Option[Signal], k: OChan[B] => Unit): Unit =
ochan.write(t, seg.map(f), sigOpt, ochan => k(ochan.add(outer)))
def close(signal: Signal): Unit =
ochan.close(signal)
def add[C: Message](transformer: Transformer[B, C]): OChan[C] =
transformer match {
case MapperT(c, e) =>
new MapperT[A, C](complexity + c, e andThen f).apply(ochan)
case FlatMapperT(c, e) =>
new FlatMapperT[A, C](complexity + c, e andThen (_.map(f))).apply(ochan)
case pars: ParserT[_, _] => // p:ParserT[B, C] => unchecked warning
val p = pars.asInstanceOf[ParserT[B, C]]
ParserT[A, C](outer.complexity + p.complexity,
p.reset.map(f),
p.parser.map(f)).apply(ochan)
case _ =>
transformer(this)
}
}
}
case class PartialMapperT[A, B: Message](val complexity: Int, f: PartialFunction[B, A])
extends Transformer[A, B] { outer =>
def apply(ochan: OChan[A]): OChan[B] = new OChan[B] {
def write(t: UThread, seg: Seg[B], sigOpt: Option[Signal], k: OChan[B] => Unit): Unit =
ochan.write(t, seg.map(f), sigOpt, ochan => k(ochan.add(outer)))
def close(signal: Signal): Unit =
ochan.close(signal)
def add[C: Message](transformer: Transformer[B, C]): OChan[C] =
transformer match {
case pars: ParserT[_, _] => // pars:ParserT[B, C] => unchecked warning
val p = pars.asInstanceOf[ParserT[B, C]]
ParserT[A, C](outer.complexity + p.complexity,
p.reset.collect(f),
p.parser.collect(f)).apply(ochan)
case _ =>
transformer(this)
}
}
}
|
molecule-labs/molecule
|
molecule-core/src/main/scala/molecule/stream/ochan/immutable/MapperT.scala
|
Scala
|
apache-2.0
| 2,633
|
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.modules.jvm
import edu.latrobe._
import edu.latrobe.blaze._
import edu.latrobe.blaze.modules._
final class MultiplyFilter_JVM_Baseline(override val builder: MultiplyFilterBuilder,
override val inputHints: BuildHints,
override val seed: InstanceSeed,
override val weightBufferBuilder: ValueTensorBufferBuilder)
extends MultiplyFilter_JVM {
// ---------------------------------------------------------------------------
// Forward propagation related.
// ---------------------------------------------------------------------------
override protected def doPredictPerValue(output: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: RGB RGB RGB | RGB RGB RGB
output :*= filter
}
override protected def doPredictPerUnit(output: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: RGB RGB RGB
output.foreachSample((off, length) => {
ArrayEx.multiply(
output.values, off, 1,
filter.values, 0, 1,
length
)
})
}
override protected def doPredictPerChannel(output: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: RGB
output.foreachChannel((off, stride, length) => {
ArrayEx.multiply(
output.values, off, stride,
filter.values(off),
length
)
})
/*
// TODO: Parallelize!
val out = output.values
val w = filter.values
var off = 0
while (off < out.length) {
ArrayEx.multiply(
out, off, 1,
w, 0, 1,
w.length
)
off += w.length
}
assume(off == out.length)
*/
}
override protected def doPredictPerSample(output: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: R | R
output.foreachSamplePair((i, off, length) => {
ArrayEx.multiply(
output.values, off, 1,
filter.values(i),
length
)
})
}
override protected def doPredictPerBatch(output: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// bias: R
output *= filter.values(0)
}
override protected def doPredictInvPerValue(input: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: RGB RGB RGB | RGB RGB RGB
input :/= filter
}
override protected def doPredictInvPerUnit(input: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: RGB RGB RGB
input.foreachUnit((off, stride, length) => {
ArrayEx.multiply(
input.values, off, stride,
Real.one / filter.values(off),
length
)
})
}
override protected def doPredictInvPerChannel(input: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: RGB
input.foreachChannel((off, stride, length) => {
ArrayEx.multiply(
input.values, off, stride,
Real.one / filter.values(off),
length
)
})
/*
// TODO: Parallelize!
val inp = input.values
val w = filter.values
var off = 0
while (off < inp.length) {
ArrayEx.divide(
inp, off, 1,
w, 0, 0,
w.length
)
off += w.length
}
assume(off == inp.length)
*/
}
override protected def doPredictInvPerSample(input: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: R | R
input.foreachSamplePair((i, off, length) => {
ArrayEx.multiply(
input.values, off, 1,
Real.one / filter.values(i),
length
)
})
}
override protected def doPredictInvPerBatch(input: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// filter: R
input *= Real.one / filter.values(0)
}
// ---------------------------------------------------------------------------
// Back propagation related.
// ---------------------------------------------------------------------------
override protected def doDeriveFilterGradientsPerValue(input: RealArrayTensor,
error: RealArrayTensor,
sink: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// error: RGB RGB RGB | RGB RGB RGB
// sink: RGB RGB RGB | RGB RGB RGB
sink.add(
error,
input
)
}
override protected def doDeriveFilterGradientsPerUnit(input: RealArrayTensor,
error: RealArrayTensor,
sink: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// error: RGB RGB RGB | RGB RGB RGB
// sink: RGB RGB RGB
error.foreachUnit((off, stride, length) => {
val tmp = ArrayEx.dot(
error.values, off, stride,
input.values, off, stride,
length
)
sink.values(off) += tmp
})
}
override protected def doDeriveFilterGradientsPerChannel(input: RealArrayTensor,
error: RealArrayTensor,
sink: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// error: RGB RGB RGB | RGB RGB RGB
// sink: RGB
error.foreachChannel((off, stride, length) => {
val tmp = ArrayEx.dot(
error.values, off, stride,
input.values, off, stride,
length
)
sink.values(off) += tmp
})
/*
val inp = input.values
val err = error.values
val dst = sink.values
var off = 0
while (off < err.length) {
ArrayEx.transform(
dst, 0, 1,
inp, off, 1,
err, off, 1,
dst.length
)(_ + _ * _)
off += dst.length
}
assume(off == err.length)
*/
}
override protected def doDeriveFilterGradientsPerSample(input: RealArrayTensor,
error: RealArrayTensor,
sink: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// error: RGB RGB RGB | RGB RGB RGB
// sink: R | R
error.foreachSamplePair((i, off, length) => {
val tmp = ArrayEx.dot(
error.values, off, 1,
input.values, off, 1,
length
)
sink.values(i) += tmp
})
}
override protected def doDeriveFilterGradientsPerBatch(input: RealArrayTensor,
error: RealArrayTensor,
sink: RealArrayTensor)
: Unit = {
// input: RGB RGB RGB | RGB RGB RGB
// error: RGB RGB RGB | RGB RGB RGB
// sink: R
sink.values(0) += error.dot(input)
}
override protected def doDeriveInputErrorPerValue(error: RealArrayTensor)
: Unit = {
// error: RGB RGB RGB | RGB RGB RGB
// filter: RGB RGB RGB | RGB RGB RGB
error :*= filter
}
override protected def doDeriveInputErrorPerUnit(error: RealArrayTensor)
: Unit = {
// error: RGB RGB RGB | RGB RGB RGB
// filter: RGB RGB RGB
error.foreachSample((off, length) => {
ArrayEx.multiply(
error.values, off, 1,
filter.values, 0, 1,
length
)
})
}
override protected def doDeriveInputErrorPerChannel(error: RealArrayTensor)
: Unit = {
// error: RGB RGB RGB | RGB RGB RGB
// filter: RGB
error.foreachChannel((off, stride, length) => {
ArrayEx.multiply(
error.values, off, stride,
filter.values(off),
length
)
})
/*
// TODO: Parallelize!
val err = error.values
val w = filter.values
var off = 0
while (off < err.length) {
ArrayEx.multiply(
err, off, 1,
w, 0, 1,
w.length
)
off += w.length
}
assume(off == err.length)
*/
}
override protected def doDeriveInputErrorPerSample(error: RealArrayTensor)
: Unit = {
// error: RGB RGB RGB | RGB RGB RGB
// filter: R | R
error.foreachSamplePair((i, off, length) => {
ArrayEx.multiply(
error.values, off, 1,
filter.values(i),
length
)
})
}
override protected def doDeriveInputErrorPerBatch(error: RealArrayTensor)
: Unit = {
// error: RGB RGB RGB | RGB RGB RGB
// filter: R
error *= filter.values(0)
}
}
object ImmediateFilter_JVM_Baseline_Description
extends ModuleVariant_JVM_Description[MultiplyFilterBuilder] {
override def build(builder: MultiplyFilterBuilder,
hints: BuildHints,
seed: InstanceSeed,
weightsBuilder: ValueTensorBufferBuilder)
: MultiplyFilter_JVM_Baseline = new MultiplyFilter_JVM_Baseline(
builder, hints, seed, weightsBuilder
)
}
|
bashimao/ltudl
|
blaze/src/main/scala/edu/latrobe/blaze/modules/jvm/MultiplyFilter_JVM_Baseline.scala
|
Scala
|
apache-2.0
| 9,824
|
package se.sics.caracaldb.driver
import se.sics.caracaldb.driver.experiments._
import se.sics.kompics.network.Transport
object Experiments {
val registered = Map(
0 -> new SimpleTransfer(Transport.TCP),
1 -> new SimpleTransfer(Transport.UDT),
2 -> new SimpleTransfer(Transport.DATA),
3 -> new SimplePings(Transport.TCP, 100),
4 -> new SimplePings(Transport.UDT, 100),
5 -> new PingTransfer(Transport.TCP, Transport.TCP),
6 -> new PingTransfer(Transport.TCP, Transport.UDT),
7 -> new PingTransfer(Transport.TCP, Transport.DATA),
8 -> new TorrentTransfer()
)
}
|
CaracalDB/CaracalDB
|
experiments/driver/src/main/scala/se/sics/caracaldb/driver/Experiments.scala
|
Scala
|
gpl-2.0
| 640
|
/*
* Copyright (c) 2014-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.hadoop.scalding
import scala.util.parsing.json._
trait Preprocessor {
def preprocess(line: String): (String, Seq[String])
}
object RawLinePreprocessor extends Preprocessor {
def preprocess(line: String): (String, Seq[String]) = (line, Nil)
}
object BadRowReprocessor extends Preprocessor {
def preprocess(line: String): (String, Seq[String]) = {
val parsedJson = JSON.parseFull(line).get.asInstanceOf[Map[String, Object]]
val inputTsv = parsedJson("line").asInstanceOf[String]
val errs = parsedJson("errors").asInstanceOf[Seq[Object]]
// We need to determine whether these are old-style errors of the form ["errorString1", ...]
// or new-style ones of the form [{"level": "..", "message": "errorString1"}]
val errorStrings = if (errs.isEmpty) {
Nil
} else {
errs(0) match {
case s: String => errs.asInstanceOf[Seq[String]]
case _ => errs.asInstanceOf[Seq[Map[String, Object]]].map(_("message").asInstanceOf[String])
}
}
(inputTsv, errorStrings)
}
}
|
haensel-ams/snowplow
|
3-enrich/hadoop-event-recovery/src/main/scala/com/snowplowanalytics/hadoop/scalding/preprocessors.scala
|
Scala
|
apache-2.0
| 1,767
|
package io.youi.component
import io.youi.component.support.FontSupport
import io.youi.component.types.Prop
import io.youi.dom
import org.scalajs.dom.{Event, html}
class TextInput(element: html.Input = dom.create.input,
password: Boolean = false) extends Component(element) with FontSupport {
lazy val name: Prop[String] = new Prop[String](element.name, element.name_=)
lazy val value: Prop[String] = new Prop[String](element.value, element.value_=)
lazy val placeholder: Prop[String] = new Prop[String](element.placeholder, element.placeholder_=)
lazy val disabled: Prop[Boolean] = new Prop[Boolean](element.disabled, element.disabled_=)
lazy val spellCheck: Prop[Boolean] = new Prop[Boolean](element.spellcheck, element.spellcheck_=)
if (password) element.`type` = "password"
element.addEventListener("input", (_: Event) => {
Prop.changing(value) {
value @= element.value
}
})
def focus(): Unit = element.focus()
def blur(): Unit = element.blur()
}
|
outr/youi
|
gui/src/main/scala/io/youi/component/TextInput.scala
|
Scala
|
mit
| 1,005
|
package jsky.app.ot.viewer.action
import edu.gemini.pot.sp.{Conflicts, ISPNode}
import jsky.app.ot.vcs.{VcsStateEvent, VcsIcon, ConflictNavigator}
import jsky.app.ot.viewer.SPViewer
import javax.swing.{KeyStroke, Action, Icon}
import java.awt.event.{InputEvent, ActionEvent, KeyEvent}
import scala.swing.{Component, Dialog, Reactor}
object VcsShowConflictAction {
sealed trait Direction {
def name: String
def icon: Icon
def key: Int
def preposition: String
def find: (ISPNode, ISPNode) => Option[ISPNode]
def title: String = "%s Conflict".format(name.capitalize)
}
case object Prev extends Direction {
val name = "prev"
val icon = VcsIcon.ConflictPrev
val key = KeyEvent.VK_P
val preposition = "before"
val find = ConflictNavigator.prev _
}
case object Next extends Direction {
val name = "next"
val icon = VcsIcon.ConflictNext
val key = KeyEvent.VK_N
val preposition = "after"
val find = ConflictNavigator.next _
}
}
import VcsShowConflictAction._
abstract class VcsShowConflictAction(dir: Direction, viewer: SPViewer) extends AbstractViewerAction(viewer, dir.title, dir.icon) with Reactor {
putValue(Action.SHORT_DESCRIPTION, "Show the %s conflict, if any, %s the selected node.".format(dir.name, dir.preposition))
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(dir.key, AbstractViewerAction.platformEventMask() | InputEvent.SHIFT_DOWN_MASK))
putValue(AbstractViewerAction.SHORT_NAME, "Conflict")
listenTo(viewer.getVcsStateTracker)
reactions += {
case VcsStateEvent(_,_,_,conflicts) => setEnabled(isEnabledFor(conflicts))
}
def actionPerformed(evt: ActionEvent) {
def warnAndFixInvisibleNode(invisibles: List[ISPNode]): Boolean = {
val parent = Component.wrap(viewer)
val message = "You have conflicting changes in a node that you are not allowed to edit. Click OK to accept the database version."
val title = "Edit Permission Issue"
if (Dialog.showConfirmation(parent, message, title, Dialog.Options.OkCancel, Dialog.Message.Warning) == Dialog.Result.Ok) {
invisibles.foreach(_.setConflicts(Conflicts.EMPTY))
true
} else false
}
Option(viewer.getRoot) foreach { root =>
val tree = viewer.getTree
// Warn and fix conflicts for any invisible nodes or stop. :-/
val continue = ConflictNavigator.allConflictNodes(root).filter(n => Option(tree.getTreeNode(n)).isEmpty) match {
case Nil => true
case ns => warnAndFixInvisibleNode(ns)
}
if (continue) {
val selected = Option(viewer.getNode).getOrElse(root)
dir.find(root, selected) foreach { conflictNode =>
val treeNode = Option(tree.getTreeNode(conflictNode))
treeNode.foreach { tn => tree.setSelectedNode(tn) }
}
}
}
}
// OCSINF-360. I'm not a fan of this change, but it was requested to make
// this button enabled always when there is a conflict, even if there is no
// "next" conflict. That way it shows up red as an indicator that there are
// conflicts. Unfortunately it also means you can click on it and it takes
// you back to the same node, which seems like nothing happens. :-/
private def isEnabledFor(conflicts: List[ISPNode]): Boolean =
conflicts.size > 0
// (conflicts.size > 1) || !conflicts.filterNot(_ == viewer.getTree.getSelectedNode).isEmpty
override def computeEnabledState: Boolean =
Option(viewer).exists(v => isEnabledFor(v.getVcsStateTracker.conflicts))
}
final class VcsShowPrevConflictAction(viewer: SPViewer) extends VcsShowConflictAction(Prev, viewer)
final class VcsShowNextConflictAction(viewer: SPViewer) extends VcsShowConflictAction(Next, viewer)
|
arturog8m/ocs
|
bundle/jsky.app.ot/src/main/scala/jsky/app/ot/viewer/action/VcsShowConflictAction.scala
|
Scala
|
bsd-3-clause
| 3,744
|
package de.m7w3.signal.controller
import de.m7w3.signal.ApplicationContext
import scala.reflect.runtime.universe.{Type, typeOf}
import scalafx.Includes._
import scalafx.scene.Parent
import scalafxml.core.{DependenciesByType, FXMLView}
import scalafxml.core.macros.{nested, sfxml}
@sfxml
class MainViewController(@nested[EditMenuController] editMenuController: MenuController,
@nested[FileMenuController] fileMenuController: MenuController,
@nested[HelpMenuController] helpMenuController: MenuController,
applicationContext: ApplicationContext) {
}
object MainView {
def load(context: ApplicationContext): Parent = {
val dependencies = Map[Type, Any](
typeOf[ApplicationContext] -> context
)
val resourceUri = "/de/m7w3/signal/main_view.fxml"
val fxmlUri = getClass.getResource(resourceUri)
require(fxmlUri != null, s"$resourceUri not found")
FXMLView(fxmlUri, new DependenciesByType(dependencies))
}
}
|
ayoub-benali/signal-desktop-client
|
src/main/scala/de/m7w3/signal/controller/MainViewController.scala
|
Scala
|
apache-2.0
| 1,019
|
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package synthesis
import purescala.Expressions.Choose
import purescala.Definitions._
import purescala.ExprOps._
import purescala.DefOps._
import purescala.ScalaPrinter
import solvers._
import leon.utils._
import scala.concurrent.duration._
import synthesis.strategies._
class Synthesizer(val context : LeonContext,
val program: Program,
val ci: SourceInfo,
val settings: SynthesisSettings) {
val reporter = context.reporter
lazy val sctx = new SynthesisContext(context, settings, ci.fd, program)
implicit val debugSection = leon.utils.DebugSectionSynthesis
def getSearch: Search = {
val strat0 = new CostBasedStrategy(context, settings.costModel)
val strat1 = if (settings.manualSearch.isDefined) {
new ManualStrategy(context, settings.manualSearch, strat0)
} else {
strat0
}
val strat2 = settings.searchBound match {
case Some(b) =>
BoundedStrategy(strat1, b)
case None =>
strat1
}
new Search(context, ci, strat2)
}
private var lastTime: Long = 0
def synthesize(): (Search, Stream[Solution]) = {
reporter.ifDebug { printer =>
printer(ci.problem.eb.asString("Tests available for synthesis")(context))
}
val s = getSearch
reporter.info(ASCIIHelpers.title(s"Synthesizing '${ci.fd.id}'"))
val t = context.timers.synthesis.search.start()
val sols = s.search(sctx)
val diff = t.stop()
lastTime = diff
reporter.info("Finished in "+diff+"ms")
(s, sols)
}
def validate(results: (Search, Stream[Solution]), allowPartial: Boolean): (Search, Stream[(Solution, Boolean)]) = {
val (s, sols) = results
val result = sols.map {
case sol if sol.isTrusted =>
(sol, Some(true))
case sol =>
validateSolution(s, sol, 5.seconds)
}
// Print out report for synthesis, if necessary
reporter.ifDebug { printer =>
import java.text.SimpleDateFormat
import java.util.Date
val categoryName = ci.fd.getPos.file.toString.split("/").dropRight(1).lastOption.getOrElse("?")
val benchName = categoryName+"."+ci.fd.id.name
val time = lastTime/1000.0
val defs = visibleDefsFrom(ci.fd)(program).collect {
case cd: ClassDef => 1 + cd.fields.size
case fd: FunDef => 1 + fd.params.size + formulaSize(fd.fullBody)
}
val psize = defs.sum
val (size, calls, proof) = result.headOption match {
case Some((sol, trusted)) =>
val expr = sol.toSimplifiedExpr(context, program, ci.problem.pc)
val pr = trusted match {
case Some(true) => "✓"
case Some(false) => "✗"
case None => "?"
}
(formulaSize(expr), functionCallsOf(expr).size, pr)
case _ =>
(0, 0, "F")
}
val date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())
val fw = new java.io.FileWriter("synthesis-report.txt", true)
try {
fw.write(f"$date: $benchName%-50s | $psize%4d | $size%4d | $calls%4d | $proof%7s | $time%2.1f \n")
} finally {
fw.close
}
}(DebugSectionReport)
(s, if (result.isEmpty && allowPartial) {
Stream((new PartialSolution(s.strat, true)(context).getSolutionFor(s.g.root), false))
} else {
// Discard invalid solutions
result collect {
case (sol, Some(true)) => (sol, true)
case (sol, None) => (sol, false)
}
})
}
def validateSolution(search: Search, sol: Solution, timeout: Duration): (Solution, Option[Boolean]) = {
import verification.VerificationPhase._
import verification.VerificationContext
val timer = context.timers.synthesis.validation
timer.start()
reporter.info("Solution requires validation")
val (npr, fd) = solutionToProgram(sol)
val solverf = SolverFactory.getFromSettings(context, npr).withTimeout(timeout)
try {
val vctx = new VerificationContext(context, npr, solverf)
val vcs = generateVCs(vctx, List(fd))
val vcreport = checkVCs(vctx, vcs, stopWhen = _.isInvalid)
if (vcreport.totalValid == vcreport.totalConditions) {
(sol, Some(true))
} else if (vcreport.totalValid + vcreport.totalUnknown == vcreport.totalConditions) {
reporter.warning("Solution may be invalid:")
(sol, None)
} else {
reporter.error("Solution was invalid:")
reporter.error(ScalaPrinter(fd))
reporter.error(vcreport.summaryString)
(new PartialSolution(search.strat, false)(context).getSolutionFor(search.g.root), Some(false))
}
} finally {
timer.stop()
solverf.shutdown()
}
}
// Returns the new program and the new functions generated for this
def solutionToProgram(sol: Solution): (Program, FunDef) = {
// We replace the choose with the body of the synthesized solution
val solutionExpr = sol.toSimplifiedExpr(context, program, ci.problem.pc)
val transformer = funDefReplacer {
case fd if fd eq ci.fd =>
val nfd = fd.duplicate()
nfd.fullBody = replace(Map(ci.source -> solutionExpr), nfd.fullBody)
(fd.body, fd.postcondition) match {
case (Some(Choose(pred)), None) =>
nfd.postcondition = Some(pred)
case _ =>
}
Some(nfd)
case _ => None
}
val npr = transformProgram(transformer, program)
(npr, transformer.transform(ci.fd))
}
def shutdown(): Unit = {
sctx.solverFactory.shutdown()
}
}
|
regb/leon
|
src/main/scala/leon/synthesis/Synthesizer.scala
|
Scala
|
gpl-3.0
| 5,624
|
/**
* Copyright (C) 2012-2013 Vadim Bartko (vadim.bartko@nevilon.com).
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* See file LICENSE.txt for License information.
*/
package com.nevilon.nomad.storage.graph
import com.nevilon.nomad._
import crawler._
import logs.Logs
import scala.Predef.String
import collection.mutable
import scala.Some
class TitanDBService extends TransactionSupport with Logs {
private val connectorSelector = new ConnectorSelector
private implicit val graph = connectorSelector.graph
val urlService = new UrlService
val domainService = new DomainService
domainService.createSuperDomainIfNeeded
def shutdown() =connectorSelector.shutdown()
def linkUrls(relations: List[Relation]) {
withTransaction {
implicit tx => {
val urlCache = new Cache[Url]((url) => {
urlService.getUrlInTx(url.location)
}, (url) => {
Some(urlService.saveOrUpdateUrlInTx(url))
})
val domainCache = new Cache[Domain]((domain) => {
domainService.getDomainInTx(domain)
}, (domain) => Some(domainService.createDomainIfNeededInTx(domain)))
val isLinkedCache = new mutable.HashSet[String]
//var firstTime = 0l
//var secondTime = 0l
implicit val superNode = domainService.getSuperDomainNodeInTx
relations.foreach(relation => {
// var startTime = System.currentTimeMillis()
val parentPage = urlCache.getOrElse(relation.from).get //getOrCreate(relation.from)
val childPage = urlCache.getOrElse(relation.to).get //getOrCreate(relation.to)
tx.addEdge("", parentPage, childPage, "relation")
//var endTime = System.currentTimeMillis()
// firstTime = firstTime + (endTime - startTime)
val newChildUrl: Url = Transformers.vertex2Url(childPage)
val domainName = URLUtils.getDomainName(URLUtils.normalize(URLUtils.getDomainName(newChildUrl.location)))
val domain = new Domain(domainName, DomainStatus.NEW)
domainCache.getOrElse(domain).get //getOrCreateDomain(domain)
// startTime = System.currentTimeMillis()
if (newChildUrl.status == UrlStatus.NEW && !isLinkedCache.contains(newChildUrl.location) &&
!domainService.isUrlLinkedToDomainInTx(newChildUrl)) {
domainService.addUrlToDomainInTx(domain, childPage)
isLinkedCache.add(newChildUrl.location)
}
// endTime = System.currentTimeMillis()
// secondTime = secondTime + (endTime - startTime)
})
// println("firstTime " + firstTime)
// println("secondTime " + secondTime)
}
}
}
def removeUrlFromDomain(url: Url) {
withTransaction {
implicit tx => {
domainService.removeUrlFromDomainInTx(url)
}
}
}
def addUrlToDomain(url: Url) {
withTransaction {
implicit tx => {
val domainName = URLUtils.getDomainName(URLUtils.normalize(url.location))
val domain = new Domain(domainName, DomainStatus.NEW)
domainService.addUrlToDomainInTx(domain, urlService.getUrlInTx(url.location).get)
}
}
}
}
|
hudvin/nomad
|
src/main/scala/com/nevilon/nomad/storage/graph/TitanDBService.scala
|
Scala
|
gpl-2.0
| 3,387
|
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.client
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
trait ClientFormRunnerSummaryTest extends AssertionsForJUnit with FormRunnerOps {
@Test def navigateSummary(): Unit = {
Summary.navigate("orbeon", "bookshelf")
assert("1 to 10 of 12" === Summary.paging)
Summary.nextPage()
assert("11 to 12 of 12" === Summary.paging)
Summary.firstPage()
assert("1 to 10 of 12" === Summary.paging)
}
}
|
brunobuzzi/orbeon-forms
|
xforms/jvm/src/test/scala/org/orbeon/oxf/client/ClientFormRunnerSummaryTest.scala
|
Scala
|
lgpl-2.1
| 1,118
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import java.io.OutputStream
import scala.collection.mutable.ArrayBuffer
/**
* A logical byte buffer that wraps a list of byte arrays. All the byte arrays have equal size. The
* advantage of this over a standard ArrayBuffer is that it can grow without claiming large amounts
* of memory and needing to copy the full contents. The disadvantage is that the contents don't
* occupy a contiguous segment of memory.
*/
private[spark] class ChainedBuffer(chunkSize: Int) {
private val chunkSizeLog2: Int = java.lang.Long.numberOfTrailingZeros(
java.lang.Long.highestOneBit(chunkSize))
assert((1 << chunkSizeLog2) == chunkSize,
s"ChainedBuffer chunk size $chunkSize must be a power of two")
private val chunks: ArrayBuffer[Array[Byte]] = new ArrayBuffer[Array[Byte]]()
private var _size: Long = 0
/**
* Feed bytes from this buffer into a DiskBlockObjectWriter.
*
* @param pos Offset in the buffer to read from.
* @param os OutputStream to read into.
* @param len Number of bytes to read.
*/
def read(pos: Long, os: OutputStream, len: Int): Unit = {
if (pos + len > _size) {
throw new IndexOutOfBoundsException(
s"Read of $len bytes at position $pos would go past size ${_size} of buffer")
}
var chunkIndex: Int = (pos >> chunkSizeLog2).toInt
var posInChunk: Int = (pos - (chunkIndex.toLong << chunkSizeLog2)).toInt
var written: Int = 0
while (written < len) {
val toRead: Int = math.min(len - written, chunkSize - posInChunk)
os.write(chunks(chunkIndex), posInChunk, toRead)
written += toRead
chunkIndex += 1
posInChunk = 0
}
}
/**
* Read bytes from this buffer into a byte array.
*
* @param pos Offset in the buffer to read from.
* @param bytes Byte array to read into.
* @param offs Offset in the byte array to read to.
* @param len Number of bytes to read.
*/
def read(pos: Long, bytes: Array[Byte], offs: Int, len: Int): Unit = {
if (pos + len > _size) {
throw new IndexOutOfBoundsException(
s"Read of $len bytes at position $pos would go past size of buffer")
}
var chunkIndex: Int = (pos >> chunkSizeLog2).toInt
var posInChunk: Int = (pos - (chunkIndex.toLong << chunkSizeLog2)).toInt
var written: Int = 0
while (written < len) {
val toRead: Int = math.min(len - written, chunkSize - posInChunk)
System.arraycopy(chunks(chunkIndex), posInChunk, bytes, offs + written, toRead)
written += toRead
chunkIndex += 1
posInChunk = 0
}
}
/**
* Write bytes from a byte array into this buffer.
*
* @param pos Offset in the buffer to write to.
* @param bytes Byte array to write from.
* @param offs Offset in the byte array to write from.
* @param len Number of bytes to write.
*/
def write(pos: Long, bytes: Array[Byte], offs: Int, len: Int): Unit = {
if (pos > _size) {
throw new IndexOutOfBoundsException(
s"Write at position $pos starts after end of buffer ${_size}")
}
// Grow if needed
val endChunkIndex: Int = ((pos + len - 1) >> chunkSizeLog2).toInt
while (endChunkIndex >= chunks.length) {
chunks += new Array[Byte](chunkSize)
}
var chunkIndex: Int = (pos >> chunkSizeLog2).toInt
var posInChunk: Int = (pos - (chunkIndex.toLong << chunkSizeLog2)).toInt
var written: Int = 0
while (written < len) {
val toWrite: Int = math.min(len - written, chunkSize - posInChunk)
System.arraycopy(bytes, offs + written, chunks(chunkIndex), posInChunk, toWrite)
written += toWrite
chunkIndex += 1
posInChunk = 0
}
_size = math.max(_size, pos + len)
}
/**
* Total size of buffer that can be written to without allocating additional memory.
*/
def capacity: Long = chunks.size.toLong * chunkSize
/**
* Size of the logical buffer.
*/
def size: Long = _size
}
/**
* Output stream that writes to a ChainedBuffer.
*/
private[spark] class ChainedBufferOutputStream(chainedBuffer: ChainedBuffer) extends OutputStream {
private var pos: Long = 0
override def write(b: Int): Unit = {
throw new UnsupportedOperationException()
}
override def write(bytes: Array[Byte], offs: Int, len: Int): Unit = {
chainedBuffer.write(pos, bytes, offs, len)
pos += len
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/main/scala/org/apache/spark/util/collection/ChainedBuffer.scala
|
Scala
|
apache-2.0
| 5,167
|
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.rewrite
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import laika.io.DocumentType.Markup
import laika.tree.helper.ModelBuilder
import laika.tree.Elements._
import laika.tree.Documents._
import laika.tree.helper.DocumentViewBuilder.{Documents => Docs}
import laika.tree.helper.DocumentViewBuilder._
import laika.tree.Paths._
class CrossReferenceSpec extends FlatSpec
with Matchers
with ModelBuilder {
trait TreeModel {
def rootWithLink (id: String, text: String, path: PathInfo): RootElement = rootElement(p(CrossLink(List(Text(text)), id, path)))
def rootWithRef (id: String, text: String): RootElement = rootElement(p(LinkReference(List(Text(text)), id, s"[$id]")))
def rootWithTarget (id: String): RootElement = rootElement(InternalLinkTarget(Id(id)))
def rootElement (b: Block): RootElement = root(p("A"), b, p("B"))
def treeWithDocs (path: Path, name: String, root1: RootElement, root2: RootElement): DocumentTree =
DocumentTree(path, List(Document(path / (name+"1"), root1), Document(path / (name+"2"), root2)))
def treeWithDoc (path: Path, name: String, root: RootElement, subtrees: List[DocumentTree] = Nil): DocumentTree =
DocumentTree(path, List(Document(path / name, root)) ++ subtrees)
def treeWithSubtrees (path: Path, trees: DocumentTree*): DocumentTree =
DocumentTree(path, trees)
def treeViewWithDocs (path: Path, name: String, root1: RootElement, root2: RootElement): TreeView =
TreeView(path, List(Docs(Markup, List(
DocumentView(path / (name+"1"), List(Content(root1.content))),
DocumentView(path / (name+"2"), List(Content(root2.content)))
))))
def treeViewWithDoc (path: Path, name: String, root: RootElement, subtree: Option[TreeView] = None): TreeView =
TreeView(path, List(Docs(Markup, List(DocumentView(path / name, List(Content(root.content)))))) ::: (subtree map (t => Subtrees(List(t)))).toList)
def treeViewWithSubtrees (path: Path, trees: TreeView*): TreeView =
TreeView(path, List(Subtrees(trees)))
def rewrite (tree: DocumentTree): DocumentTree = tree.rewrite(RewriteRules.defaults)
}
"The reference resolver" should "resolve a cross reference to a target in another document in the same directory" in {
new TreeModel {
val tree = treeWithDocs(Root, "doc", rootWithRef("ref", "text"), rootWithTarget("ref"))
val treeResult = treeViewWithDocs(Root, "doc", rootWithLink("ref", "text", PathInfo(Root / "doc2",Current / "doc2")), rootWithTarget("ref"))
viewOf(rewrite(tree)) should be (treeResult)
}
}
it should "resolve a cross reference to a target in a document in a parent directory" in {
new TreeModel {
val subtree = treeWithDoc(Root / "sub", "doc1", rootWithRef("ref", "text"))
val rootTree = treeWithDoc(Root, "doc2", rootWithTarget("ref"), List(subtree))
val subtreeResult = treeViewWithDoc(Root / "sub", "doc1", rootWithLink("ref", "text", PathInfo(Root / "doc2", Parent(1) / "doc2")))
val treeResult = treeViewWithDoc(Root, "doc2", rootWithTarget("ref"), Some(subtreeResult))
viewOf(rewrite(rootTree)) should be (treeResult)
}
}
it should "resolve a cross reference to a target in a document in a child directory" in {
new TreeModel {
val subtree = treeWithDoc(Root / "sub", "doc2", rootWithTarget("ref"))
val rootTree = treeWithDoc(Root, "doc1", rootWithRef("ref", "text"), List(subtree))
val subtreeResult = treeViewWithDoc(Root / "sub", "doc2", rootWithTarget("ref"))
val treeResult = treeViewWithDoc(Root, "doc1", rootWithLink("ref", "text", PathInfo(Root / "sub" / "doc2", Current / "sub" / "doc2")), Some(subtreeResult))
viewOf(rewrite(rootTree)) should be (treeResult)
}
}
it should "resolve a cross reference to a target in a document in a sibling directory" in {
new TreeModel {
val subtree1 = treeWithDoc(Root / "sub1", "doc1", rootWithRef("ref", "text"))
val subtree2 = treeWithDoc(Root / "sub2", "doc2", rootWithTarget("ref"))
val rootTree = treeWithSubtrees(Root, subtree1, subtree2)
val subtreeResult1 = treeViewWithDoc(Root / "sub1", "doc1", rootWithLink("ref", "text", PathInfo(Root / "sub2" / "doc2", Parent(1) / "sub2" / "doc2")))
val subtreeResult2 = treeViewWithDoc(Root / "sub2", "doc2", rootWithTarget("ref"))
val treeResult = treeViewWithSubtrees(Root, subtreeResult1, subtreeResult2)
viewOf(rewrite(rootTree)) should be (treeResult)
}
}
it should "resolve a cross reference to a target in another document in the same directory using an explicit path" in {
new TreeModel {
val tree = treeWithDocs(Root, "doc", rootWithRef("doc2:ref", "text"), rootWithTarget("ref"))
val treeResult = treeViewWithDocs(Root, "doc", rootWithLink("ref", "text", PathInfo(Root / "doc2",Current / "doc2")), rootWithTarget("ref"))
viewOf(rewrite(tree)) should be (treeResult)
}
}
it should "resolve a cross reference to a target in a document in a parent directory using an explicit path" in {
new TreeModel {
val subtree = treeWithDoc(Root / "sub", "doc1", rootWithRef("../doc2:ref", "text"))
val rootTree = treeWithDoc(Root, "doc2", rootWithTarget("ref"), List(subtree))
val subtreeResult = treeViewWithDoc(Root / "sub", "doc1", rootWithLink("ref", "text", PathInfo(Root / "doc2", Parent(1) / "doc2")))
val treeResult = treeViewWithDoc(Root, "doc2", rootWithTarget("ref"), Some(subtreeResult))
viewOf(rewrite(rootTree)) should be (treeResult)
}
}
it should "resolve a cross reference to a target in a document in a child directory using an explicit path" in {
new TreeModel {
val subtree = treeWithDoc(Root / "sub", "doc2", rootWithTarget("ref"))
val rootTree = treeWithDoc(Root, "doc1", rootWithRef("sub/doc2:ref", "text"), List(subtree))
val subtreeResult = treeViewWithDoc(Root / "sub", "doc2", rootWithTarget("ref"))
val treeResult = treeViewWithDoc(Root, "doc1", rootWithLink("ref", "text", PathInfo(Root / "sub" / "doc2", Current / "sub" / "doc2")), Some(subtreeResult))
viewOf(rewrite(rootTree)) should be (treeResult)
}
}
it should "resolve a cross reference to a target in a document in a sibling directory using an explicit path" in {
new TreeModel {
val subtree1 = treeWithDoc(Root / "sub1", "doc1", rootWithRef("../sub2/doc2:ref", "text"))
val subtree2 = treeWithDoc(Root / "sub2", "doc2", rootWithTarget("ref"))
val rootTree = treeWithSubtrees(Root, subtree1, subtree2)
val subtreeResult1 = treeViewWithDoc(Root / "sub1", "doc1", rootWithLink("ref", "text", PathInfo(Root / "sub2" / "doc2", Parent(1) / "sub2" / "doc2")))
val subtreeResult2 = treeViewWithDoc(Root / "sub2", "doc2", rootWithTarget("ref"))
val treeResult = treeViewWithSubtrees(Root, subtreeResult1, subtreeResult2)
viewOf(rewrite(rootTree)) should be (treeResult)
}
}
}
|
amuramatsu/Laika
|
core/src/test/scala/laika/rewrite/CrossReferenceSpec.scala
|
Scala
|
apache-2.0
| 7,742
|
package org.jetbrains.plugins.scala
package testingSupport.test.utest
import com.intellij.testIntegration.TestFramework
import org.jetbrains.plugins.scala.extensions.IteratorExt
import org.jetbrains.plugins.scala.lang.psi.ElementScope
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil.isInheritorDeep
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTemplateDefinition}
import org.jetbrains.plugins.scala.testingSupport.test.AbstractTestFramework.TestFrameworkSetupInfo
import org.jetbrains.plugins.scala.testingSupport.test.{AbstractTestFramework, TestFrameworkSetupSupportBase}
final class UTestTestFramework extends AbstractTestFramework with TestFrameworkSetupSupportBase {
override def getName: String = "uTest"
override def testFileTemplateName = "uTest Object"
override def getMarkerClassFQName: String = "utest.TestSuite"
override def getDefaultSuperClass: String = "utest.TestSuite"
override def baseSuitePaths: Seq[String] = Seq("utest.framework.TestSuite", "utest.TestSuite")
// overridden cause UTest now has 2 marker classes which are equal to suitePathes
override protected def isTestClass(definition: ScTemplateDefinition): Boolean = {
if (!definition.isInstanceOf[ScObject]) return false
val elementScope = ElementScope(definition.getProject)
val cachedClass = baseSuitePaths.iterator.flatMap(elementScope.getCachedClass).headOption
cachedClass.exists(isInheritorDeep(definition, _))
}
override def frameworkSetupInfo(scalaVersion: Option[String]): TestFrameworkSetupInfo =
TestFrameworkSetupInfo(Seq(""""com.lihaoyi" %% "utest" % "latest.integration""""), Seq())
}
object UTestTestFramework {
@deprecated("use `apply` instead", "2020.3")
def instance: UTestTestFramework = apply()
def apply(): UTestTestFramework =
TestFramework.EXTENSION_NAME.findExtension(classOf[UTestTestFramework])
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestTestFramework.scala
|
Scala
|
apache-2.0
| 1,902
|
package monocle
import monocle.function.Plated
/**
* Show how could we use Optics to manipulate some Json AST
*/
class JsonExample extends MonocleSuite {
sealed trait Json
case class JsString(s: String) extends Json
case class JsNumber(n: Int) extends Json
case class JsArray(l: List[Json]) extends Json
case class JsObject(m: Map[String, Json]) extends Json
val jsString = Prism[Json, String]{ case JsString(s) => Some(s); case _ => None}(JsString.apply)
val jsNumber = Prism[Json, Int]{ case JsNumber(n) => Some(n); case _ => None}(JsNumber.apply)
val jsArray = Prism[Json, List[Json]]{ case JsArray(a) => Some(a); case _ => None}(JsArray.apply)
val jsObject = Prism[Json, Map[String, Json]]{ case JsObject(m) => Some(m); case _ => None}(JsObject.apply)
val json: Json = JsObject(Map(
"first_name" -> JsString("John"),
"last_name" -> JsString("Doe"),
"age" -> JsNumber(28),
"siblings" -> JsArray(List(
JsObject(Map(
"first_name" -> JsString("Elia"),
"age" -> JsNumber(23)
)),
JsObject(Map(
"first_name" -> JsString("Robert"),
"age" -> JsNumber(25)
))
))
))
test("Json Prism") {
jsNumber.getOption(JsString("plop")) shouldEqual None
jsNumber.getOption(JsNumber(2)) shouldEqual Some(2)
}
test("Use index to go into an JsObject or JsArray") {
(jsObject composeOptional index("age") composePrism jsNumber).getOption(json) shouldEqual Some(28)
(jsObject composeOptional index("siblings")
composePrism jsArray
composeOptional index(1)
composePrism jsObject
composeOptional index("first_name")
composePrism jsString
).set("Robert Jr.")(json) shouldEqual JsObject(Map(
"first_name" -> JsString("John"),
"last_name" -> JsString("Doe"),
"age" -> JsNumber(28),
"siblings" -> JsArray(List(
JsObject(Map(
"first_name" -> JsString("Elia"),
"age" -> JsNumber(23)
)),
JsObject(Map(
"first_name" -> JsString("Robert Jr."), // name is updated
"age" -> JsNumber(25)
))
))
))
}
test("Use at to add delete fields") {
(jsObject composeLens at("nick_name")).set(Some(JsString("Jojo")))(json) shouldEqual JsObject(Map(
"first_name" -> JsString("John"),
"nick_name" -> JsString("Jojo"), // new field
"last_name" -> JsString("Doe"),
"age" -> JsNumber(28),
"siblings" -> JsArray(List(
JsObject(Map(
"first_name" -> JsString("Elia"),
"age" -> JsNumber(23)
)),
JsObject(Map(
"first_name" -> JsString("Robert"),
"age" -> JsNumber(25)
))
))
))
(jsObject composeLens at("age")).set(None)(json) shouldEqual JsObject(Map(
"first_name" -> JsString("John"),
"last_name" -> JsString("Doe"), // John is ageless now
"siblings" -> JsArray(List(
JsObject(Map(
"first_name" -> JsString("Elia"),
"age" -> JsNumber(23)
)),
JsObject(Map(
"first_name" -> JsString("Robert"),
"age" -> JsNumber(25)
))
))
))
}
test("Use each and filterIndex to modify several fields at a time") {
(jsObject composeTraversal filterIndex((_: String).contains("name"))
composePrism jsString
composeOptional headOption
).modify(_.toLower)(json) shouldEqual JsObject(Map(
"first_name" -> JsString("john"), // starts with lower case
"last_name" -> JsString("doe"), // starts with lower case
"age" -> JsNumber(28),
"siblings" -> JsArray(List(
JsObject(Map(
"first_name" -> JsString("Elia"),
"age" -> JsNumber(23)
)),
JsObject(Map(
"first_name" -> JsString("Robert"),
"age" -> JsNumber(25)
))
))
))
(jsObject composeOptional index("siblings")
composePrism jsArray
composeTraversal each
composePrism jsObject
composeOptional index("age")
composePrism jsNumber
).modify(_ + 1)(json) shouldEqual JsObject(Map(
"first_name" -> JsString("John"),
"last_name" -> JsString("Doe"),
"age" -> JsNumber(28),
"siblings" -> JsArray(List(
JsObject(Map(
"first_name" -> JsString("Elia"),
"age" -> JsNumber(24) // Elia is older
)),
JsObject(Map(
"first_name" -> JsString("Robert"),
"age" -> JsNumber(26) // Robert is older
))
))
))
}
implicit val jsonPlated: Plated[Json] = new Plated[Json] {
import scalaz.{Applicative, Traverse}
import scalaz.std.list._
import scalaz.std.map._
import scalaz.syntax.traverse._
val plate: Traversal[Json, Json] = new Traversal[Json, Json] {
def modifyF[F[_]: Applicative](f: Json => F[Json])(a: Json): F[Json] =
a match {
case j@(JsString(_) | JsNumber(_)) => Applicative[F].point(j)
case JsArray(l) => l.traverse(f).map(JsArray)
case JsObject(m) => m.traverse(f).map(JsObject)
}
}
}
test("Plated instance to rewrite any matching elements") {
Plated.rewrite[Json] {
case JsString(s) =>
val u = s.toUpperCase
if (s != u) Some(JsString(u)) else None
case _ => None
}(json) shouldEqual JsObject(Map(
"first_name" -> JsString("JOHN"),
"last_name" -> JsString("DOE"),
"age" -> JsNumber(28),
"siblings" -> JsArray(List(
JsObject(Map(
"first_name" -> JsString("ELIA"),
"age" -> JsNumber(23)
)),
JsObject(Map(
"first_name" -> JsString("ROBERT"),
"age" -> JsNumber(25)
))
))
))
}
}
|
rperry/Monocle
|
example/src/test/scala/monocle/JsonExample.scala
|
Scala
|
mit
| 6,023
|
package com.nitin.nizhawan.decompiler.structures.constantpool
import com.nitin.nizhawan.decompiler.main.ByteReader
/**
* Created by nitin on 13/12/15.
*/
class MethodTypeConstPoolEntry(tag:Int,br:ByteReader,pool:ConstantPool) extends ConstPoolEntry(tag,br,pool){
val index = br.readChar()
override lazy val info = pool.poolEntries(index).info
}
|
nitin-nizhawan/jedi
|
src/com/nitin/nizhawan/decompiler/structures/constantpool/MethodTypeConstPoolEntry.scala
|
Scala
|
artistic-2.0
| 353
|
package ionroller
import play.api.libs.json._
final case class ReleaseVersion(tag: String)
object ReleaseVersion {
implicit object JsonFormat extends Format[ReleaseVersion] {
def reads(json: JsValue): JsResult[ReleaseVersion] =
json.validate[String] map ReleaseVersion.apply
def writes(o: ReleaseVersion): JsValue =
JsString(o.tag)
}
}
|
browngeek666/ionroller
|
core/src/main/scala/ionroller/ReleaseVersion.scala
|
Scala
|
mit
| 366
|
package concrete.constraint.semantic
import com.typesafe.scalalogging.LazyLogging
import concrete._
import concrete.constraint.AdviseCount
import cspom._
import cspom.variable.IntVariable
import org.scalatest.{FlatSpec, Matchers, OptionValues}
import scala.util.Random
class DiffNTest extends FlatSpec with Matchers with OptionValues with LazyLogging {
"diffN" should "filter" in {
val xs = Array(Singleton(1), Singleton(2), IntDomain(0 until 15)).zipWithIndex.map {
case (d, i) => new Variable(s"x$i", d)
}
val ys = Array(Singleton(0), Singleton(1), IntDomain(0 until 2)).zipWithIndex.map {
case (d, i) => new Variable(s"y$i", d)
}
val dxs = Array(Singleton(1), Singleton(1), Singleton(3)).zipWithIndex.map {
case (d, i) => new Variable(s"dx$i", d)
}
val dys = Array(Singleton(1), Singleton(2), Singleton(2)).zipWithIndex.map {
case (d, i) => new Variable(s"dy$i", d)
}
val problem = new Problem(xs ++ ys ++ dxs ++ dys)
val diffn = new DiffN(xs.reverse, ys.reverse, dxs.reverse, dys.reverse)
problem.addConstraint(diffn)
diffn.register(new AdviseCount)
val mod = problem.initState
.andThen { ps =>
diffn.eventAll(ps)
diffn.revise(ps)
}
.toState
mod.dom(xs(2)).head shouldBe 3
}
it should "run" in {
val r = new Random(0)
import CSPOM._
import CSPOMDriver._
val n = 20
val sizes = IndexedSeq.fill(n)(
Seq(1 + r.nextInt(5), 1 + r.nextInt(5)))
val cspom = CSPOM { implicit problem =>
val max = Seq(IntVariable(0 until 100) as "maxX",
IntVariable(0 until 100) as "maxY")
ctr(max(0) === max(1))
// val obj = (maxX * maxY) as "obj"
val coordinates = Seq.tabulate(n)(i =>
Seq(IntVariable(0 until 100) as s"x$i",
IntVariable(0 until 100) as s"y$i"))
for (i <- 0 until n; dim <- 0 until 2) {
ctr(coordinates(i)(dim) + sizes(i)(dim) <= max(dim))
}
ctr(CSPOMConstraint("diffn")(coordinates.map(seq2CSPOMSeq(_)), sizes.map(seq2CSPOMSeq(_))))
goal(CSPOMGoal.Minimize(max(0)))
}
val pm = new ParameterManager().updated("heuristic.value", Seq())
val solver = Solver(cspom, pm).get
val stats = new StatisticsManager
stats.register("solver", solver.solver)
// for (sol <- solver.toIterable) {
// for (i <- 0 until n) println((sol.get(s"x$i").get, sol.get(s"y$i").get, dxs(i), dys(i)))
// println(sol.get("maxX"))
// println("------")
// }
solver.reduceLeftOption((_, next) => next).value("maxX") shouldBe 14
logger.info(stats.toString)
}
}
|
concrete-cp/concrete
|
src/test/scala/concrete/constraint/semantic/DiffNTest.scala
|
Scala
|
lgpl-2.1
| 2,663
|
package models.storage.nodes
import models.storage.Interval
import no.uio.musit.formatters.StrictFormatters._
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json._
case class EnvironmentRequirement(
temperature: Option[Interval[Double]],
relativeHumidity: Option[Interval[Double]],
hypoxicAir: Option[Interval[Double]],
cleaning: Option[String],
lightingCondition: Option[String],
comment: Option[String]
)
object EnvironmentRequirement {
implicit val format: Format[EnvironmentRequirement] = (
(__ \ "temperature").formatNullable[Interval[Double]] and
(__ \ "relativeHumidity").formatNullable[Interval[Double]] and
(__ \ "hypoxicAir").formatNullable[Interval[Double]] and
(__ \ "cleaning").formatNullable[String](maxCharsFormat(100)) and
(__ \ "lightingCondition").formatNullable[String](maxCharsFormat(100)) and
(__ \ "comment").formatNullable[String](maxCharsFormat(250))
)(EnvironmentRequirement.apply, unlift(EnvironmentRequirement.unapply))
}
|
MUSIT-Norway/musit
|
service_backend/app/models/storage/nodes/EnvironmentRequirement.scala
|
Scala
|
gpl-2.0
| 1,065
|
package spire
package algebra
/**
* Rig is a ring whose additive structure doesn't have an inverse (ie. it is
* monoid, not a group). Put another way, a Rig is a Ring without a negative.
*/
trait Rig[@sp(Byte, Short, Int, Long, Float, Double) A] extends Any with Semiring[A] with AdditiveMonoid[A] with MultiplicativeMonoid[A] {
/**
* This is similar to `Semigroup#pow`, except that `a pow 0` is defined to be
* the multiplicative identity.
*/
override def pow(a:A, n:Int):A =
if (n >= 0) prodn(a, n)
else throw new IllegalArgumentException(s"Illegal negative exponent $n to Monoid#pow")
}
object Rig {
@inline final def apply[A](implicit r:Rig[A]): Rig[A] = r
}
/**
* CRig is a Rig that is commutative under multiplication.
*/
trait CRig[@sp(Byte, Short, Int, Long, Float, Double) A] extends Any with Rig[A] with MultiplicativeCMonoid[A]
object CRig {
@inline final def apply[A](implicit r: CRig[A]): CRig[A] = r
}
|
tixxit/spire
|
core/shared/src/main/scala/spire/algebra/Rig.scala
|
Scala
|
mit
| 950
|
import scala.annotation.tailrec
object OcrNumbers {
def convert(grid: List[String]): String = {
val validGridSize = grid.length % 4 != 0 || !grid.forall(_.length % 3 == 0)
if (validGridSize)
"?"
else {
ocrReadoutLines(grid).map(ocrLine => {
val iterator = ocrLine.iterator
val line0 = iterator.next()
val line1 = iterator.next()
val line2 = iterator.next()
val line3 = iterator.next()
toDigits(line0, line1, line2, line3, "")
}).mkString(",")
}
}
private def ocrReadoutLines(grid: List[String]) = grid.grouped(4)
@tailrec
private def toDigits(line0: String, line1: String,
line2: String, line3: String,
acc: String): String = {
if (line0.isEmpty || line1.isEmpty || line2.isEmpty || line3.isEmpty) acc
else {
val (l0Head, l0Tail) = line0.splitAt(3)
val (l1Head, l1Tail) = line1.splitAt(3)
val (l2Head, l2Tail) = line2.splitAt(3)
val (l3Head, l3Tail) = line3.splitAt(3)
val ocrDigit = List(l0Head, l1Head, l2Head, l3Head)
val digit = fontToDigit.getOrElse(ocrDigit, '?')
toDigits(l0Tail, l1Tail, l2Tail, l3Tail, acc + digit)
}
}
private lazy val fontToDigit = Map(List(" _ "
, "| |"
, "|_|"
, " ") -> '0',
List(" "
, " |"
, " |"
, " ") -> '1',
List(" _ "
, " _|"
, "|_ "
, " ") -> '2',
List(" _ "
, " _|"
, " _|"
, " ") -> '3',
List(" "
, "|_|"
, " |"
, " ") -> '4',
List(" _ "
, "|_ "
, " _|"
, " ") -> '5',
List(" _ "
, "|_ "
, "|_|"
, " ") -> '6',
List(" _ "
, " |"
, " |"
, " ") -> '7',
List(" _ "
, "|_|"
, "|_|"
, " ") -> '8',
List(" _ "
, "|_|"
, " _|"
, " ") -> '9')
}
|
exercism/xscala
|
exercises/practice/ocr-numbers/.meta/Example.scala
|
Scala
|
mit
| 1,919
|
/*
* The Reactive Summit Austin talk
* Copyright (C) 2016 Jan Machacek
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package org.eigengo.rsa.v100
import java.io.File
import scala.io.Source
object LocalMain {
def main(args: Array[String]): Unit = {
System.setProperty("KAFKA_BOOTSTRAP_SERVERS", "localhost:9092")
System.setProperty("CASSANDRA_JOURNAL_CPS", "localhost:9042")
System.setProperty("CASSANDRA_SNAPSHOT_CPS", "localhost:9042")
Source.fromFile(new File(System.getProperty("user.home"), ".env/twitter-rsa"))
.getLines()
.foreach { line ⇒
val Array(k, v) = line.split("=")
System.setProperty(k, v)
}
org.eigengo.rsa.ingest.v100.Main.main(args)
org.eigengo.rsa.dashboard.v100.Main.main(args)
org.eigengo.rsa.scene.v100.Main.main(args)
org.eigengo.rsa.identity.v100.Main.main(args)
Thread.sleep(30000)
org.eigengo.rsa.it.v100.Main.main(args)
}
}
|
eigengo/reactive-summit-2016
|
fat-it/src/main/scala/org/eigengo/rsa/v100/LocalMain.scala
|
Scala
|
gpl-3.0
| 1,550
|
package im.mange.acceptance.driveby.scalatest.browser
import im.mange.acceptance.driveby.scalatest.WebSpecification
import im.mange.driveby.conditions._
import org.scalatest.Matchers
class TitleSpec extends WebSpecification with Matchers {
def `title must be the page title` {
given.page(<title>page title</title>)
.assert(TitleEquals("page title"))
}
}
|
alltonp/driveby
|
src/test/scala/im/mange/acceptance/driveby/scalatest/browser/TitleSpec.scala
|
Scala
|
apache-2.0
| 369
|
package com.twitter.finagle.stats
/**
* Interface used via the LoadService mechanism to obtain an
* efficient mechanism to sample stats.
*/
private[twitter] trait StatsRegistry {
/** Whether or not the counters are latched. */
val latched: Boolean
def apply(): Map[String, StatEntry]
}
private[twitter] trait StatEntry {
/**
* The delta since the entry was last sampled.
* Note, this field is identical to `value` for
* instantaneous entries (ex. gauges).
*/
val delta: Double
/** The instantaneous value of the entry. */
val value: Double
/** The type of the metric backing this StatEntry ("counter", "gauge", or "histogram"). */
val metricType: String
}
|
twitter/util
|
util-stats/src/main/scala/com/twitter/finagle/stats/StatsRegistry.scala
|
Scala
|
apache-2.0
| 696
|
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.control
import org.orbeon.oxf.util.CoreUtils._
import org.orbeon.oxf.xforms._
import org.orbeon.oxf.xforms.analysis.controls.{LHHAAnalysis, StaticLHHASupport}
import org.orbeon.xforms.analysis.model.ValidationLevel
import org.orbeon.oxf.xforms.control.LHHASupport._
import org.orbeon.oxf.xforms.control.XFormsControl._
import org.orbeon.oxf.xforms.analysis.controls.LHHA
import scala.collection.compat._
trait ControlLHHASupport {
self: XFormsControl =>
// Label, help, hint and alert (evaluated lazily)
// 2013-06-19: We support multiple alerts, but only one client-facing alert value at this point.
// NOTE: var because of cloning
private[ControlLHHASupport] var lhhaArray = new Array[LHHAProperty](LHHA.size)
// XBL Container in which dynamic LHHA elements like `xf:output` and AVTs evaluate
def lhhaContainer = container
def markLHHADirty(): Unit =
for (currentLHHA <- lhhaArray)
if (currentLHHA ne null)
currentLHHA.handleMarkDirty()
// This is needed because, unlike the other LHH, the alert doesn't only depend on its expressions: it also depends
// on the control's current validity and validations. Because we don't have yet a way of taking those in as
// dependencies, we force dirty alerts whenever such validations change upon refresh.
def forceDirtyAlert(): Unit = {
val alert = lhhaArray(LHHA.valuesToIndex(LHHA.Alert))
if (alert ne null)
alert.handleMarkDirty(force = true)
}
def evaluateNonRelevantLHHA(): Unit =
for (i <- lhhaArray.indices)
lhhaArray(i) = null
// Copy LHHA if not null
def updateLHHACopy(copy: XFormsControl): Unit = {
copy.lhhaArray = new Array[LHHAProperty](LHHA.size)
for {
i <- lhhaArray.indices
currentLHHA = lhhaArray(i)
if currentLHHA ne null
} yield {
// Evaluate lazy value before copying
currentLHHA.value()
// Copy
copy.lhhaArray(i) = currentLHHA.copy.asInstanceOf[LHHAProperty]
}
}
def lhhaProperty(lhha: LHHA): LHHAProperty = {
// TODO: Not great to work by index.
val index = LHHA.valuesToIndex(lhha)
// Evaluate lazily
Option(lhhaArray(index)) getOrElse {
// NOTE: Ugly because of imbalanced hierarchy between static/runtime controls
val property =
if (part.hasLHHA(prefixedId, lhha) && self.staticControl.isInstanceOf[StaticLHHASupport])
self match {
case singleNodeControl: XFormsSingleNodeControl if lhha == LHHA.Alert =>
new MutableAlertProperty(singleNodeControl, lhha, htmlLhhaSupport(lhha))
case control: XFormsControl if lhha != LHHA.Alert =>
new MutableLHHProperty(control, lhha, htmlLhhaSupport(lhha))
case _ =>
NullLHHA
}
else
NullLHHA
lhhaArray(index) = property
property
}
}
def htmlLhhaSupport: Set[LHHA] = LHHA.DefaultLHHAHTMLSupport
def ajaxLhhaSupport: Seq[LHHA] = LHHA.values
def compareLHHA(other: XFormsControl) =
ajaxLhhaSupport forall (lhha => lhhaProperty(lhha).value() == other.lhhaProperty(lhha).value())
// Convenience accessors
final def getLabel = lhhaProperty(LHHA.Label).value()
final def getEscapedLabel = lhhaProperty(LHHA.Label).escapedValue()
final def isHTMLLabel = lhhaProperty(LHHA.Label).isHTML
final def getHelp = lhhaProperty(LHHA.Help).value()
final def getEscapedHelp = lhhaProperty(LHHA.Help).escapedValue()
final def isHTMLHelp = lhhaProperty(LHHA.Help).isHTML
final def getHint = lhhaProperty(LHHA.Hint).value()
final def getEscapedHint = lhhaProperty(LHHA.Hint).escapedValue()
final def isHTMLHint = lhhaProperty(LHHA.Hint).isHTML
final def getAlert = lhhaProperty(LHHA.Alert).value()
final def isHTMLAlert = lhhaProperty(LHHA.Alert).isHTML
final def getEscapedAlert = lhhaProperty(LHHA.Alert).escapedValue()
}
// NOTE: Use name different from trait so that the Java compiler is happy
object LHHASupport {
val NullLHHA = new NullLHHAProperty
// Control property for LHHA
trait LHHAProperty extends ControlProperty[String] {
def escapedValue(): String
def isHTML: Boolean
}
// Immutable null LHHA property
class NullLHHAProperty extends ImmutableControlProperty(null: String) with LHHAProperty {
def escapedValue(): String = null
def isHTML = false
}
// Whether a given control has an associated xf:label element.
// TODO: Handle https://github.com/orbeon/orbeon-forms/issues/3853.
def hasLabel(containingDocument: XFormsContainingDocument, prefixedId: String) =
containingDocument.staticOps.hasLHHA(prefixedId, LHHA.Label)
// Gather all active alerts for the given control following a selection algorithm
//
// - This depends on
// - the control validity
// - failed validations
// - alerts in the UI matching validations or not
// - If no alert is active for the control, return None.
// - Only alerts for the highest ValidationLevel are returned.
//
def gatherActiveAlerts(control: XFormsSingleNodeControl): Option[(ValidationLevel, List[LHHAAnalysis])] =
if (control.isRelevant) {
val staticAlerts = control.staticControl.asInstanceOf[StaticLHHASupport].alerts
def nonEmptyOption[T](l: List[T]) = l.nonEmpty option l
def alertsMatchingValidations = {
val failedValidationsIds = control.failedValidations.map(_.id).to(Set)
nonEmptyOption(staticAlerts filter (_.forValidations intersect failedValidationsIds nonEmpty))
}
// Find all alerts which match the given level, if there are any failed validations for that level
// NOTE: ErrorLevel is handled specially: in addition to failed validations, the level matches if the
// control is not valid for any reason including failed schema validation.
def alertsMatchingLevel(level: ValidationLevel) =
nonEmptyOption(staticAlerts filter (_.forLevels(level)))
// Alerts that specify neither a validations nor a level
def alertsMatchingAny =
nonEmptyOption(staticAlerts filter (a => a.forValidations.isEmpty && a.forLevels.isEmpty))
// For that given level, identify all matching alerts if any, whether they match by validations or by level.
// Alerts that specify neither a validation nor a level are considered a default, that is they are not added
// if other alerts have already been matched.
// Alerts are returned in document order
control.alertLevel flatMap { level =>
val alerts =
alertsMatchingValidations orElse
alertsMatchingLevel(level) orElse
alertsMatchingAny getOrElse
Nil
val matchingAlertIds = alerts map (_.staticId) toSet
val matchingAlerts = staticAlerts filter (a => matchingAlertIds(a.staticId))
matchingAlerts.nonEmpty option (level, matchingAlerts)
}
} else
None
}
|
orbeon/orbeon-forms
|
xforms-runtime/shared/src/main/scala/org/orbeon/oxf/xforms/control/ControlLHHASupport.scala
|
Scala
|
lgpl-2.1
| 7,625
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.repl
import org.apache.spark.util.Utils
import org.apache.spark._
import org.apache.spark.sql.SQLContext
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.SparkILoop
object Main extends Logging {
val conf = new SparkConf()
val tmp = System.getProperty("java.io.tmpdir")
val rootDir = conf.get("spark.repl.classdir", tmp)
val outputDir = Utils.createTempDir(rootDir)
val s = new Settings()
s.processArguments(List("-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}", "-Yrepl-sync"), true)
val classServer = new HttpServer(conf, outputDir, new SecurityManager(conf))
var sparkContext: SparkContext = _
var sqlContext: SQLContext = _
var interp = new SparkILoop // this is a public var because tests reset it.
def main(args: Array[String]) {
if (getMaster == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
// Start the classServer and store its URI in a spark system property
// (which will be passed to executors so that they can connect to it)
classServer.start()
interp.process(s) // Repl starts and goes in loop of R.E.P.L
classServer.stop()
Option(sparkContext).map(_.stop)
}
def getAddedJars: Array[String] = {
val envJars = sys.env.get("ADD_JARS")
if (envJars.isDefined) {
logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
}
val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
val jars = propJars.orElse(envJars).getOrElse("")
Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
}
def createSparkContext(): SparkContext = {
val execUri = System.getenv("SPARK_EXECUTOR_URI")
val jars = getAddedJars
val conf = new SparkConf()
.setMaster(getMaster)
.setAppName("Spark shell")
.setJars(jars)
.set("spark.repl.class.uri", classServer.uri)
logInfo("Spark class server started at " + classServer.uri)
if (execUri != null) {
conf.set("spark.executor.uri", execUri)
}
if (System.getenv("SPARK_HOME") != null) {
conf.setSparkHome(System.getenv("SPARK_HOME"))
}
sparkContext = new SparkContext(conf)
logInfo("Created spark context..")
sparkContext
}
def createSQLContext(): SQLContext = {
val name = "org.apache.spark.sql.hive.HiveContext"
val loader = Utils.getContextOrSparkClassLoader
try {
sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
.newInstance(sparkContext).asInstanceOf[SQLContext]
logInfo("Created sql context (with Hive support)..")
}
catch {
case _: java.lang.ClassNotFoundException | _: java.lang.NoClassDefFoundError =>
sqlContext = new SQLContext(sparkContext)
logInfo("Created sql context..")
}
sqlContext
}
private def getMaster: String = {
val master = {
val envMaster = sys.env.get("MASTER")
val propMaster = sys.props.get("spark.master")
propMaster.orElse(envMaster).getOrElse("local[*]")
}
master
}
}
|
andrewor14/iolap
|
repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
|
Scala
|
apache-2.0
| 3,905
|
/* ----------------- sse-breaker ----------------- *\\
* Licensed under the Apache License, Version 2.0. *
* Author: Spiros Tzavellas *
\\* ----------------------------------------------- */
package com.tzavellas.sse.breaker
import org.junit.Assert._
class TestListener extends CircuitStateListener {
var opened, closed: Boolean = false
var error: Throwable = _
def onOpen(circuit: CircuitBreaker, error: Throwable): Unit = {
opened = true
this.error = error
assertNotNull(error)
assertTrue(circuit.isOpen)
}
def onClose(circuit: CircuitBreaker): Unit = {
closed = true
assertTrue(circuit.isClosed)
}
def assertCalledOnOpen(): Unit = {
assertTrue(opened)
}
def assertCalledOnClose(): Unit = {
assertTrue(closed)
}
}
|
sptz45/sse-breaker
|
src/test/scala/com/tzavellas/sse/breaker/TestListener.scala
|
Scala
|
apache-2.0
| 797
|
///*
//
//active-learning-scala: Active Learning library for Scala
//Copyright (c) 2014 Davi Pereira dos Santos
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//*/
//
//package clean.tex
//
//import java.io.PrintWriter
//
//import al.strategies.Passive
//import clean.lib._
//import ml.classifiers.NoLearner
//import util.Stat
//
//import scala.collection.mutable
//
//object newplot extends AppWithUsage with LearnerTrait with StratsTrait with RangeGenerator with Rank {
// lazy val arguments = superArguments ++ List("learners:nb,5nn,c45,vfdt,ci,...|eci|i|ei|in|svm", "porRank:r", "porRisco:r", "dist:euc,man,mah")
// val context = "newplot"
// val tipoLearner = "all"
// val tipoSumariz = "media"
// val strats = stratsTexRedux(dist)
// val pioresAignorar = 0
// val measure = Kappa
// run()
//
// override def run() = {
// super.run()
// val arq = s"/home/davi/wcs/tese/$measure$pioresAignorar$dist$tipoSumariz$tipoLearner" + (if (porRank) "Rank" else "") + (if (porRisco) "Risco" else "") + ".plot"
// println(s"$arq")
// val dss = datasets.take(765675).filter { d =>
// val ds = Ds(d, readOnly = true)
// ds.open()
// val U = ds.poolSize.toInt
// ds.close()
// U > 200
// }
// val tests = for {
// dataset <- dss
// les = if (pioresAignorar == 0) learners(learnersStr)
// else dispensaMelhores(learners(learnersStr).map { l =>
// val ds = Ds(dataset, readOnly = true)
// ds.open()
// val vs = for (r <- 0 until runs; f <- 0 until folds) yield measure(ds, Passive(Seq()), l, r, f)(-1).read(ds).getOrElse(ds.quit("Kappa passiva não encontrada"))
// ds.close()
// l -> Stat.media_desvioPadrao(vs.toVector)._1
// }, pioresAignorar)(-_._2).map(_._1).par
// le <- les
// } yield {
// println(les.map(_.limp).mkString(" "))
// val ds = Ds(dataset, readOnly = true)
// println(s"$ds")
// ds.open()
// val (stras, curvas) = (for {
// s0 <- strats
// } yield {
// val s = s0(le)
// val v25 = for {
// r <- 0 until runs
// f <- 0 until folds
// } yield measure(ds, s, le, r, f)(0).readAll(ds).getOrElse(throw new Error(s"NA: ${(ds, s, le.abr)}"))
// val plotmedio = v25.transpose.map { v =>
// if (porRisco) Stat.media_desvioPadrao(v.toVector)._2 * (if (porRank) -1 else 1)
// else Stat.media_desvioPadrao(v.toVector)._1
// }
// val fst = plotmedio.head
// s.limp -> plotmedio.reverse.padTo(200, fst).reverse.toList //para evitar degraus nos diferentes inicios de cada base
// }).unzip
// ds.close()
// lazy val rank = stras zip (curvas.transpose map ranqueia).transpose
//
// //cada strat uma curva
// if (porRank) rank else stras zip curvas
// }
//
// //emenda conjuntos de curvas que vieram de vários datasets, agrupa por strat e faz curva media de cada uma
// // println(tests.flatten.groupBy(_._1))
// val (sts, plots) = tests.flatten.groupBy(_._1).map { case (st, v) =>
// val curvas = v.map(_._2)
// val curvamedia = curvas.transpose.map(x => Stat.media_desvioPadrao(x.toVector)._2)
// val curvasuave = curvamedia.sliding(20).map(y => y.sum / y.size).toList
// st -> curvasuave
// }.unzip
// val plot = plots.transpose
// val fw = new PrintWriter(arq, "ISO-8859-1")
// fw.write("budget " + sts.mkString(" ") + "\\n")
// plot.zipWithIndex foreach { case (momento, i) =>
// fw.write((i + 10) + " " + momento.mkString(" ") + "\\n")
// }
// fw.close()
// println(s"$arq " + plots.size + " strats.")
// }
//}
|
active-learning/active-learning-scala
|
src/main/scala/clean/tex/newplot.scala
|
Scala
|
gpl-2.0
| 4,457
|
package intellijhocon.psi
import com.intellij.psi.impl.source.PsiFileImpl
import com.intellij.psi.{PsiElementVisitor, FileViewProvider}
import com.intellij.openapi.fileTypes.FileType
import intellijhocon.parser.HoconElementType
import HoconElementType.HoconFileElementType
import intellijhocon.lang.HoconLanguageFileType
class HoconPsiFile(provider: FileViewProvider) extends PsiFileImpl(HoconFileElementType, HoconFileElementType, provider) {
def accept(visitor: PsiElementVisitor): Unit =
visitor.visitFile(this)
def getFileType: FileType =
HoconLanguageFileType
}
|
consulo/consulo-scala
|
intellij-hocon/src/main/scala/intellijhocon/psi/HoconPsiFile.scala
|
Scala
|
apache-2.0
| 582
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.